summaryrefslogtreecommitdiffstats
path: root/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts
diff options
context:
space:
mode:
Diffstat (limited to 'src/VBox/Devices/EFI/Firmware/BaseTools/Scripts')
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/BinToPcd.py220
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertFceToStructurePcd.py734
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertMasmToNasm.py1005
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertUni.py125
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/DetectNotUsedItem.py198
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/FormatDosFiles.py107
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GccBase.lds79
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GetMaintainer.py194
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GetUtcDateTime.py44
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/MemoryProfileSymbolGen.py276
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/Readme.md19
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/__init__.py6
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py424
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/packagedocapp.pyw1060
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/__init__.py6
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/__init__.py6
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py445
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py606
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py475
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/inidocview.py17
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py46
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/__init__.py6
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/__init__.py6
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py928
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py313
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py1084
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py1086
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py195
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py335
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/__init__.py6
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PatchCheck.py770
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/RunMakefile.py172
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/RunMakefileSample.mak37
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/Rvct-Align32.sct19
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/Rvct-Align4K.sct19
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SetVisualStudio.bat102
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SetupGit.py213
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ShowEnvironment.bat213
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SmiHandlerProfileSymbolGen.py307
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Scripts/UpdateBuildVersions.py398
40 files changed, 12301 insertions, 0 deletions
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/BinToPcd.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/BinToPcd.py
new file mode 100755
index 00000000..91d1d055
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/BinToPcd.py
@@ -0,0 +1,220 @@
+## @file
+# Convert a binary file to a VOID* PCD value or DSC file VOID* PCD statement.
+#
+# Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+BinToPcd
+'''
+from __future__ import print_function
+
+import sys
+import argparse
+import re
+import xdrlib
+
+#
+# Globals for help information
+#
+__prog__ = 'BinToPcd'
+__copyright__ = 'Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.'
+__description__ = 'Convert one or more binary files to a VOID* PCD value or DSC file VOID* PCD statement.\n'
+
+if __name__ == '__main__':
+ def ValidateUnsignedInteger (Argument):
+ try:
+ Value = int (Argument, 0)
+ except:
+ Message = '{Argument} is not a valid integer value.'.format (Argument = Argument)
+ raise argparse.ArgumentTypeError (Message)
+ if Value < 0:
+ Message = '{Argument} is a negative value.'.format (Argument = Argument)
+ raise argparse.ArgumentTypeError (Message)
+ return Value
+
+ def ValidatePcdName (Argument):
+ if re.split ('[a-zA-Z\_][a-zA-Z0-9\_]*\.[a-zA-Z\_][a-zA-Z0-9\_]*', Argument) != ['', '']:
+ Message = '{Argument} is not in the form <PcdTokenSpaceGuidCName>.<PcdCName>'.format (Argument = Argument)
+ raise argparse.ArgumentTypeError (Message)
+ return Argument
+
+ def ValidateGuidName (Argument):
+ if re.split ('[a-zA-Z\_][a-zA-Z0-9\_]*', Argument) != ['', '']:
+ Message = '{Argument} is not a valid GUID C name'.format (Argument = Argument)
+ raise argparse.ArgumentTypeError (Message)
+ return Argument
+
+ def ByteArray (Buffer, Xdr = False):
+ if Xdr:
+ #
+ # If Xdr flag is set then encode data using the Variable-Length Opaque
+ # Data format of RFC 4506 External Data Representation Standard (XDR).
+ #
+ XdrEncoder = xdrlib.Packer ()
+ for Item in Buffer:
+ XdrEncoder.pack_bytes (Item)
+ Buffer = bytearray (XdrEncoder.get_buffer ())
+ else:
+ #
+ # If Xdr flag is not set, then concatenate all the data
+ #
+ Buffer = bytearray (b''.join (Buffer))
+ #
+ # Return a PCD value of the form '{0x01, 0x02, ...}' along with the PCD length in bytes
+ #
+ return '{' + (', '.join (['0x{Byte:02X}'.format (Byte = Item) for Item in Buffer])) + '}', len (Buffer)
+
+ #
+ # Create command line argument parser object
+ #
+ parser = argparse.ArgumentParser (prog = __prog__,
+ description = __description__ + __copyright__,
+ conflict_handler = 'resolve')
+ parser.add_argument ("-i", "--input", dest = 'InputFile', type = argparse.FileType ('rb'), action='append', required = True,
+ help = "Input binary filename. Multiple input files are combined into a single PCD.")
+ parser.add_argument ("-o", "--output", dest = 'OutputFile', type = argparse.FileType ('w'),
+ help = "Output filename for PCD value or PCD statement")
+ parser.add_argument ("-p", "--pcd", dest = 'PcdName', type = ValidatePcdName,
+ help = "Name of the PCD in the form <PcdTokenSpaceGuidCName>.<PcdCName>")
+ parser.add_argument ("-t", "--type", dest = 'PcdType', default = None, choices = ['VPD', 'HII'],
+ help = "PCD statement type (HII or VPD). Default is standard.")
+ parser.add_argument ("-m", "--max-size", dest = 'MaxSize', type = ValidateUnsignedInteger,
+ help = "Maximum size of the PCD. Ignored with --type HII.")
+ parser.add_argument ("-f", "--offset", dest = 'Offset', type = ValidateUnsignedInteger,
+ help = "VPD offset if --type is VPD. UEFI Variable offset if --type is HII. Must be 8-byte aligned.")
+ parser.add_argument ("-n", "--variable-name", dest = 'VariableName',
+ help = "UEFI variable name. Only used with --type HII.")
+ parser.add_argument ("-g", "--variable-guid", type = ValidateGuidName, dest = 'VariableGuid',
+ help = "UEFI variable GUID C name. Only used with --type HII.")
+ parser.add_argument ("-x", "--xdr", dest = 'Xdr', action = "store_true",
+ help = "Encode PCD using the Variable-Length Opaque Data format of RFC 4506 External Data Representation Standard (XDR)")
+ parser.add_argument ("-v", "--verbose", dest = 'Verbose', action = "store_true",
+ help = "Increase output messages")
+ parser.add_argument ("-q", "--quiet", dest = 'Quiet', action = "store_true",
+ help = "Reduce output messages")
+ parser.add_argument ("--debug", dest = 'Debug', type = int, metavar = '[0-9]', choices = range (0, 10), default = 0,
+ help = "Set debug level")
+
+ #
+ # Parse command line arguments
+ #
+ args = parser.parse_args ()
+
+ #
+ # Read all binary input files
+ #
+ Buffer = []
+ for File in args.InputFile:
+ try:
+ Buffer.append (File.read ())
+ File.close ()
+ except:
+ print ('BinToPcd: error: can not read binary input file {File}'.format (File = File))
+ sys.exit (1)
+
+ #
+ # Convert PCD to an encoded string of hex values and determine the size of
+ # the encoded PCD in bytes.
+ #
+ PcdValue, PcdSize = ByteArray (Buffer, args.Xdr)
+
+ #
+ # Convert binary buffer to a DSC file PCD statement
+ #
+ if args.PcdName is None:
+ #
+ # If PcdName is None, then only a PCD value is being requested.
+ #
+ Pcd = PcdValue
+ if args.Verbose:
+ print ('BinToPcd: Convert binary file to PCD Value')
+ elif args.PcdType is None:
+ #
+ # If --type is neither VPD nor HII, then use PCD statement syntax that is
+ # compatible with [PcdsFixedAtBuild], [PcdsPatchableInModule],
+ # [PcdsDynamicDefault], and [PcdsDynamicExDefault].
+ #
+ if args.MaxSize is None:
+ #
+ # If --max-size is not provided, then do not generate the syntax that
+ # includes the maximum size.
+ #
+ Pcd = ' {Name}|{Value}'.format (Name = args.PcdName, Value = PcdValue)
+ elif args.MaxSize < PcdSize:
+ print ('BinToPcd: error: argument --max-size is smaller than input file.')
+ sys.exit (1)
+ else:
+ Pcd = ' {Name}|{Value}|VOID*|{Size}'.format (Name = args.PcdName, Value = PcdValue, Size = args.MaxSize)
+
+ if args.Verbose:
+ print ('BinToPcd: Convert binary file to PCD statement compatible with PCD sections:')
+ print (' [PcdsFixedAtBuild]')
+ print (' [PcdsPatchableInModule]')
+ print (' [PcdsDynamicDefault]')
+ print (' [PcdsDynamicExDefault]')
+ elif args.PcdType == 'VPD':
+ if args.MaxSize is None:
+ #
+ # If --max-size is not provided, then set maximum size to the size of the
+ # binary input file
+ #
+ args.MaxSize = PcdSize
+ if args.MaxSize < PcdSize:
+ print ('BinToPcd: error: argument --max-size is smaller than input file.')
+ sys.exit (1)
+ if args.Offset is None:
+ #
+ # if --offset is not provided, then set offset field to '*' so build
+ # tools will compute offset of PCD in VPD region.
+ #
+ Pcd = ' {Name}|*|{Size}|{Value}'.format (Name = args.PcdName, Size = args.MaxSize, Value = PcdValue)
+ else:
+ #
+ # --offset value must be 8-byte aligned
+ #
+ if (args.Offset % 8) != 0:
+ print ('BinToPcd: error: argument --offset must be 8-byte aligned.')
+ sys.exit (1)
+ #
+ # Use the --offset value provided.
+ #
+ Pcd = ' {Name}|{Offset}|{Size}|{Value}'.format (Name = args.PcdName, Offset = args.Offset, Size = args.MaxSize, Value = PcdValue)
+ if args.Verbose:
+ print ('BinToPcd: Convert binary file to PCD statement compatible with PCD sections')
+ print (' [PcdsDynamicVpd]')
+ print (' [PcdsDynamicExVpd]')
+ elif args.PcdType == 'HII':
+ if args.VariableGuid is None or args.VariableName is None:
+ print ('BinToPcd: error: arguments --variable-guid and --variable-name are required for --type HII.')
+ sys.exit (1)
+ if args.Offset is None:
+ #
+ # Use UEFI Variable offset of 0 if --offset is not provided
+ #
+ args.Offset = 0
+ #
+ # --offset value must be 8-byte aligned
+ #
+ if (args.Offset % 8) != 0:
+ print ('BinToPcd: error: argument --offset must be 8-byte aligned.')
+ sys.exit (1)
+ Pcd = ' {Name}|L"{VarName}"|{VarGuid}|{Offset}|{Value}'.format (Name = args.PcdName, VarName = args.VariableName, VarGuid = args.VariableGuid, Offset = args.Offset, Value = PcdValue)
+ if args.Verbose:
+ print ('BinToPcd: Convert binary file to PCD statement compatible with PCD sections')
+ print (' [PcdsDynamicHii]')
+ print (' [PcdsDynamicExHii]')
+
+ #
+ # Write PCD value or PCD statement to the output file
+ #
+ try:
+ args.OutputFile.write (Pcd)
+ args.OutputFile.close ()
+ except:
+ #
+ # If output file is not specified or it can not be written, then write the
+ # PCD value or PCD statement to the console
+ #
+ print (Pcd)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertFceToStructurePcd.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertFceToStructurePcd.py
new file mode 100755
index 00000000..995fcfce
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertFceToStructurePcd.py
@@ -0,0 +1,734 @@
+#!/usr/bin/python
+## @file
+# Firmware Configuration Editor (FCE) from https://firmware.intel.com/develop
+# can parse BIOS image and generate Firmware Configuration file.
+# This script bases on Firmware Configuration file, and generate the structure
+# PCD setting in DEC/DSC/INF files.
+#
+# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+ConvertFceToStructurePcd
+'''
+
+import re
+import os
+import datetime
+import argparse
+
+#
+# Globals for help information
+#
+__prog__ = 'ConvertFceToStructurePcd'
+__version__ = '%s Version %s' % (__prog__, '0.1 ')
+__copyright__ = 'Copyright (c) 2018, Intel Corporation. All rights reserved.'
+__description__ = 'Generate Structure PCD in DEC/DSC/INF based on Firmware Configuration.\n'
+
+
+dscstatement='''[Defines]
+ VPD_TOOL_GUID = 8C3D856A-9BE6-468E-850A-24F7A8D38E08
+
+[SkuIds]
+ 0|DEFAULT # The entry: 0|DEFAULT is reserved and always required.
+
+[DefaultStores]
+ 0|STANDARD # UEFI Standard default 0|STANDARD is reserved.
+ 1|MANUFACTURING # UEFI Manufacturing default 1|MANUFACTURING is reserved.
+
+[PcdsDynamicExVpd.common.DEFAULT]
+ gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer|*
+'''
+
+decstatement = '''[Guids]
+ gStructPcdTokenSpaceGuid = {0x3f1406f4, 0x2b, 0x487a, {0x8b, 0x69, 0x74, 0x29, 0x1b, 0x36, 0x16, 0xf4}}
+
+[PcdsFixedAtBuild,PcdsPatchableInModule,PcdsDynamic,PcdsDynamicEx]
+'''
+
+infstatement = '''[Pcd]
+'''
+
+SECTION='PcdsDynamicHii'
+PCD_NAME='gStructPcdTokenSpaceGuid.Pcd'
+Max_Pcd_Len = 100
+
+WARNING=[]
+ERRORMSG=[]
+
+class parser_lst(object):
+
+ def __init__(self,filelist):
+ self._ignore=['BOOLEAN', 'UINT8', 'UINT16', 'UINT32', 'UINT64']
+ self.file=filelist
+ self.text=self.megre_lst()[0]
+ self.content=self.megre_lst()[1]
+
+ def megre_lst(self):
+ alltext=''
+ content={}
+ for file in self.file:
+ with open(file,'r') as f:
+ read =f.read()
+ alltext += read
+ content[file]=read
+ return alltext,content
+
+ def struct_lst(self):#{struct:lst file}
+ structs_file={}
+ name_format = re.compile(r'(?<!typedef)\s+struct (\w+) {.*?;', re.S)
+ for i in list(self.content.keys()):
+ structs= name_format.findall(self.content[i])
+ if structs:
+ for j in structs:
+ if j not in self._ignore:
+ structs_file[j]=i
+ else:
+ print("%s"%structs)
+ return structs_file
+
+ def struct(self):#struct:{offset:name}
+ unit_num = re.compile('(\d+)')
+ offset1_re = re.compile('(\d+)\[')
+ pcdname_num_re = re.compile('\w+\[(\S+)\]')
+ pcdname_re = re.compile('\](.*)\<')
+ pcdname2_re = re.compile('(\w+)\[')
+ uint_re = re.compile('\<(\S+)\>')
+ name_format = re.compile(r'(?<!typedef)\s+struct (\w+) {.*?;', re.S)
+ name=name_format.findall(self.text)
+ info={}
+ unparse=[]
+ if name:
+ tmp_n = [n for n in name if n not in self._ignore]
+ name = list(set(tmp_n))
+ name.sort(key = tmp_n.index)
+ name.reverse()
+ #name=list(set(name).difference(set(self._ignore)))
+ for struct in name:
+ s_re = re.compile(r'struct %s :(.*?)};'% struct, re.S)
+ content = s_re.search(self.text)
+ if content:
+ tmp_dict = {}
+ text = content.group().split('+')
+ for line in text[1:]:
+ offset = offset1_re.findall(line)
+ t_name = pcdname_re.findall(line)
+ uint = uint_re.findall(line)
+ if offset and uint:
+ offset = offset[0]
+ uint = uint[0]
+ if t_name:
+ t_name = t_name[0].strip()
+ if (' ' in t_name) or ("=" in t_name) or (";" in t_name) or("\\" in name) or (t_name ==''):
+ WARNING.append("Warning:Invalid Pcd name '%s' for Offset %s in struct %s" % (t_name,offset, struct))
+ else:
+ if '[' in t_name:
+ if uint in ['UINT8', 'UINT16', 'UINT32', 'UINT64']:
+ offset = int(offset, 10)
+ tmp_name = pcdname2_re.findall(t_name)[0] + '[0]'
+ tmp_dict[offset] = tmp_name
+ pcdname_num = int(pcdname_num_re.findall(t_name)[0],10)
+ uint = int(unit_num.findall(uint)[0],10)
+ bit = uint // 8
+ for i in range(1, pcdname_num):
+ offset += bit
+ tmp_name = pcdname2_re.findall(t_name)[0] + '[%s]' % i
+ tmp_dict[offset] = tmp_name
+ else:
+ tmp_name = pcdname2_re.findall(t_name)[0]
+ pcdname_num = pcdname_num_re.findall(t_name)[0]
+ line = [offset,tmp_name,pcdname_num,uint]
+ line.append(struct)
+ unparse.append(line)
+ else:
+ if uint not in ['UINT8', 'UINT16', 'UINT32', 'UINT64']:
+ line = [offset, t_name, 0, uint]
+ line.append(struct)
+ unparse.append(line)
+ else:
+ offset = int(offset,10)
+ tmp_dict[offset] = t_name
+ info[struct] = tmp_dict
+ if len(unparse) != 0:
+ for u in unparse:
+ if u[3] in list(info.keys()):
+ unpar = self.nameISstruct(u,info[u[3]])
+ info[u[4]]= dict(list(info[u[4]].items())+list(unpar[u[4]].items()))
+ else:
+ print("ERROR: No struct name found in %s" % self.file)
+ ERRORMSG.append("ERROR: No struct name found in %s" % self.file)
+ return info
+
+
+ def nameISstruct(self,line,key_dict):
+ dict={}
+ dict2={}
+ s_re = re.compile(r'struct %s :(.*?)};' % line[3], re.S)
+ size_re = re.compile(r'mTotalSize \[(\S+)\]')
+ content = s_re.search(self.text)
+ if content:
+ s_size = size_re.findall(content.group())[0]
+ else:
+ s_size = '0'
+ print("ERROR: Struct %s not define mTotalSize in lst file" %line[3])
+ ERRORMSG.append("ERROR: Struct %s not define mTotalSize in lst file" %line[3])
+ size = int(line[0], 10)
+ if line[2] != 0:
+ for j in range(0, int(line[2], 10)):
+ for k in list(key_dict.keys()):
+ offset = size + k
+ name ='%s.%s' %((line[1]+'[%s]'%j),key_dict[k])
+ dict[offset] = name
+ size = int(s_size,16)+size
+ elif line[2] == 0:
+ for k in list(key_dict.keys()):
+ offset = size + k
+ name = '%s.%s' % (line[1], key_dict[k])
+ dict[offset] = name
+ dict2[line[4]] = dict
+ return dict2
+
+ def efivarstore_parser(self):
+ efivarstore_format = re.compile(r'efivarstore.*?;', re.S)
+ struct_re = re.compile(r'efivarstore(.*?),',re.S)
+ name_re = re.compile(r'name=(\w+)')
+ efivarstore_dict={}
+ efitxt = efivarstore_format.findall(self.text)
+ for i in efitxt:
+ struct = struct_re.findall(i.replace(' ',''))
+ if struct[0] in self._ignore:
+ continue
+ name = name_re.findall(i.replace(' ',''))
+ if struct and name:
+ efivarstore_dict[name[0]]=struct[0]
+ else:
+ print("ERROR: Can't find Struct or name in lst file, please check have this format:efivarstore XXXX, name=xxxx")
+ ERRORMSG.append("ERROR: Can't find Struct or name in lst file, please check have this format:efivarstore XXXX, name=xxxx")
+ return efivarstore_dict
+
+class Config(object):
+
+ def __init__(self,Config):
+ self.config=Config
+
+ #Parser .config file,return list[offset,name,guid,value,help]
+ def config_parser(self):
+ ids_re =re.compile('_ID:(\d+)',re.S)
+ id_re= re.compile('\s+')
+ info = []
+ info_dict={}
+ with open(self.config, 'r') as text:
+ read = text.read()
+ if 'DEFAULT_ID:' in read:
+ all_txt = read.split('FCEKEY DEFAULT')
+ for i in all_txt[1:]:
+ part = [] #save all infomation for DEFAULT_ID
+ str_id=''
+ ids = ids_re.findall(i.replace(' ',''))
+ for m in ids:
+ str_id +=m+'_'
+ str_id=str_id[:-1]
+ part.append(ids)
+ section = i.split('\nQ') #split with '\nQ ' to get every block
+ part +=self.section_parser(section)
+ info_dict[str_id] = self.section_parser(section)
+ info.append(part)
+ else:
+ part = []
+ id=('0','0')
+ str_id='0_0'
+ part.append(id)
+ section = read.split('\nQ')
+ part +=self.section_parser(section)
+ info_dict[str_id] = self.section_parser(section)
+ info.append(part)
+ return info_dict
+
+ def eval_id(self,id):
+ id = id.split("_")
+ default_id=id[0:len(id)//2]
+ platform_id=id[len(id)//2:]
+ text=''
+ for i in range(len(default_id)):
+ text +="%s.common.%s.%s,"%(SECTION,self.id_name(platform_id[i],'PLATFORM'),self.id_name(default_id[i],'DEFAULT'))
+ return '\n[%s]\n'%text[:-1]
+
+ def id_name(self,ID, flag):
+ platform_dict = {'0': 'DEFAULT'}
+ default_dict = {'0': 'STANDARD', '1': 'MANUFACTURING'}
+ if flag == "PLATFORM":
+ try:
+ value = platform_dict[ID]
+ except KeyError:
+ value = 'SKUID%s' % ID
+ elif flag == 'DEFAULT':
+ try:
+ value = default_dict[ID]
+ except KeyError:
+ value = 'DEFAULTID%s' % ID
+ else:
+ value = None
+ return value
+
+ def section_parser(self,section):
+ offset_re = re.compile(r'offset=(\w+)')
+ name_re = re.compile(r'name=(\S+)')
+ guid_re = re.compile(r'guid=(\S+)')
+ # help_re = re.compile(r'help = (.*)')
+ attribute_re=re.compile(r'attribute=(\w+)')
+ value_re = re.compile(r'(//.*)')
+ part = []
+ part_without_comment = []
+ for x in section[1:]:
+ line=x.split('\n')[0]
+ comment_list = value_re.findall(line) # the string \\... in "Q...." line
+ comment_list[0] = comment_list[0].replace('//', '')
+ comment = comment_list[0].strip()
+ line=value_re.sub('',line) #delete \\... in "Q...." line
+ list1=line.split(' ')
+ value=self.value_parser(list1)
+ offset = offset_re.findall(x.replace(' ',''))
+ name = name_re.findall(x.replace(' ',''))
+ guid = guid_re.findall(x.replace(' ',''))
+ attribute =attribute_re.findall(x.replace(' ',''))
+ if offset and name and guid and value and attribute:
+ if attribute[0] in ['0x3','0x7']:
+ offset = int(offset[0], 16)
+ #help = help_re.findall(x)
+ text_without_comment = offset, name[0], guid[0], value, attribute[0]
+ if text_without_comment in part_without_comment:
+ # check if exists same Pcd with different comments, add different comments in one line with "|".
+ dupl_index = part_without_comment.index(text_without_comment)
+ part[dupl_index] = list(part[dupl_index])
+ if comment not in part[dupl_index][-1]:
+ part[dupl_index][-1] += " | " + comment
+ part[dupl_index] = tuple(part[dupl_index])
+ else:
+ text = offset, name[0], guid[0], value, attribute[0], comment
+ part_without_comment.append(text_without_comment)
+ part.append(text)
+ return(part)
+
+ def value_parser(self, list1):
+ list1 = [t for t in list1 if t != ''] # remove '' form list
+ first_num = int(list1[0], 16)
+ if list1[first_num + 1] == 'STRING': # parser STRING
+ if list1[-1] == '""':
+ value = "{0x0, 0x0}"
+ else:
+ value = 'L%s' % list1[-1]
+ elif list1[first_num + 1] == 'ORDERED_LIST': # parser ORDERED_LIST
+ value_total = int(list1[first_num + 2])
+ list2 = list1[-value_total:]
+ tmp = []
+ line = ''
+ for i in list2:
+ if len(i) % 2 == 0 and len(i) != 2:
+ for m in range(0, len(i) // 2):
+ tmp.append('0x%02x' % (int('0x%s' % i, 16) >> m * 8 & 0xff))
+ else:
+ tmp.append('0x%s' % i)
+ for i in tmp:
+ line += '%s,' % i
+ value = '{%s}' % line[:-1]
+ else:
+ value = "0x%01x" % int(list1[-1], 16)
+ return value
+
+
+#parser Guid file, get guid name form guid value
+class GUID(object):
+
+ def __init__(self,path):
+ self.path = path
+ self.guidfile = self.gfile()
+ self.guiddict = self.guid_dict()
+
+ def gfile(self):
+ for root, dir, file in os.walk(self.path, topdown=True, followlinks=False):
+ if 'FV' in dir:
+ gfile = os.path.join(root,'Fv','Guid.xref')
+ if os.path.isfile(gfile):
+ return gfile
+ else:
+ print("ERROR: Guid.xref file not found")
+ ERRORMSG.append("ERROR: Guid.xref file not found")
+ exit()
+
+ def guid_dict(self):
+ guiddict={}
+ with open(self.guidfile,'r') as file:
+ lines = file.readlines()
+ guidinfo=lines
+ for line in guidinfo:
+ list=line.strip().split(' ')
+ if list:
+ if len(list)>1:
+ guiddict[list[0].upper()]=list[1]
+ elif list[0] != ''and len(list)==1:
+ print("Error: line %s can't be parser in %s"%(line.strip(),self.guidfile))
+ ERRORMSG.append("Error: line %s can't be parser in %s"%(line.strip(),self.guidfile))
+ else:
+ print("ERROR: No data in %s" %self.guidfile)
+ ERRORMSG.append("ERROR: No data in %s" %self.guidfile)
+ return guiddict
+
+ def guid_parser(self,guid):
+ if guid.upper() in self.guiddict:
+ return self.guiddict[guid.upper()]
+ else:
+ print("ERROR: GUID %s not found in file %s"%(guid, self.guidfile))
+ ERRORMSG.append("ERROR: GUID %s not found in file %s"%(guid, self.guidfile))
+ return guid
+
+class PATH(object):
+
+ def __init__(self,path):
+ self.path=path
+ self.rootdir=self.get_root_dir()
+ self.usefuldir=set()
+ self.lstinf = {}
+ for path in self.rootdir:
+ for o_root, o_dir, o_file in os.walk(os.path.join(path, "OUTPUT"), topdown=True, followlinks=False):
+ for INF in o_file:
+ if os.path.splitext(INF)[1] == '.inf':
+ for l_root, l_dir, l_file in os.walk(os.path.join(path, "DEBUG"), topdown=True,
+ followlinks=False):
+ for LST in l_file:
+ if os.path.splitext(LST)[1] == '.lst':
+ self.lstinf[os.path.join(l_root, LST)] = os.path.join(o_root, INF)
+ self.usefuldir.add(path)
+
+ def get_root_dir(self):
+ rootdir=[]
+ for root,dir,file in os.walk(self.path,topdown=True,followlinks=False):
+ if "OUTPUT" in root:
+ updir=root.split("OUTPUT",1)[0]
+ rootdir.append(updir)
+ rootdir=list(set(rootdir))
+ return rootdir
+
+ def lst_inf(self):
+ return self.lstinf
+
+ def package(self):
+ package={}
+ package_re=re.compile(r'Packages\.\w+]\n(.*)',re.S)
+ for i in list(self.lstinf.values()):
+ with open(i,'r') as inf:
+ read=inf.read()
+ section=read.split('[')
+ for j in section:
+ p=package_re.findall(j)
+ if p:
+ package[i]=p[0].rstrip()
+ return package
+
+ def header(self,struct):
+ header={}
+ head_re = re.compile('typedef.*} %s;[\n]+(.*)(?:typedef|formset)'%struct,re.M|re.S)
+ head_re2 = re.compile(r'#line[\s\d]+"(\S+h)"')
+ for i in list(self.lstinf.keys()):
+ with open(i,'r') as lst:
+ read = lst.read()
+ h = head_re.findall(read)
+ if h:
+ head=head_re2.findall(h[0])
+ if head:
+ format = head[0].replace('\\\\','/').replace('\\','/')
+ name =format.split('/')[-1]
+ head = self.headerfileset.get(name)
+ if head:
+ head = head.replace('\\','/')
+ header[struct] = head
+ return header
+ @property
+ def headerfileset(self):
+ headerset = dict()
+ for root,dirs,files in os.walk(self.path):
+ for file in files:
+ if os.path.basename(file) == 'deps.txt':
+ with open(os.path.join(root,file),"r") as fr:
+ for line in fr.readlines():
+ headerset[os.path.basename(line).strip()] = line.strip()
+ return headerset
+
+ def makefile(self,filename):
+ re_format = re.compile(r'DEBUG_DIR.*(?:\S+Pkg)\\(.*\\%s)'%filename)
+ for i in self.usefuldir:
+ with open(os.path.join(i,'Makefile'),'r') as make:
+ read = make.read()
+ dir = re_format.findall(read)
+ if dir:
+ return dir[0]
+ return None
+
+class mainprocess(object):
+
+ def __init__(self,InputPath,Config,OutputPath):
+ self.init = 0xFCD00000
+ self.inputpath = os.path.abspath(InputPath)
+ self.outputpath = os.path.abspath(OutputPath)
+ self.LST = PATH(self.inputpath)
+ self.lst_dict = self.LST.lst_inf()
+ self.Config = Config
+ self.attribute_dict = {'0x3': 'NV, BS', '0x7': 'NV, BS, RT'}
+ self.guid = GUID(self.inputpath)
+ self.header={}
+
+ def main(self):
+ conf=Config(self.Config)
+ config_dict=conf.config_parser() #get {'0_0':[offset,name,guid,value,attribute]...,'1_0':....}
+ lst=parser_lst(list(self.lst_dict.keys()))
+ efi_dict=lst.efivarstore_parser() #get {name:struct} form lst file
+ keys=sorted(config_dict.keys())
+ all_struct=lst.struct()
+ stru_lst=lst.struct_lst()
+ title_list=[]
+ info_list=[]
+ header_list=[]
+ inf_list =[]
+ for i in stru_lst:
+ tmp = self.LST.header(i)
+ self.header.update(tmp)
+ for id_key in keys:
+ tmp_id=[id_key] #['0_0',[(struct,[name...]),(struct,[name...])]]
+ tmp_info={} #{name:struct}
+ for section in config_dict[id_key]:
+ c_offset,c_name,c_guid,c_value,c_attribute,c_comment = section
+ if c_name in efi_dict:
+ struct = efi_dict[c_name]
+ title='%s%s|L"%s"|%s|0x00||%s\n'%(PCD_NAME,c_name,c_name,self.guid.guid_parser(c_guid),self.attribute_dict[c_attribute])
+ if struct in all_struct:
+ lstfile = stru_lst[struct]
+ struct_dict=all_struct[struct]
+ try:
+ title2 = '%s%s|{0}|%s|0xFCD00000{\n <HeaderFiles>\n %s\n <Packages>\n%s\n}\n' % (PCD_NAME, c_name, struct, self.header[struct], self.LST.package()[self.lst_dict[lstfile]])
+ except KeyError:
+ WARNING.append("Warning: No <HeaderFiles> for struct %s"%struct)
+ title2 = '%s%s|{0}|%s|0xFCD00000{\n <HeaderFiles>\n %s\n <Packages>\n%s\n}\n' % (PCD_NAME, c_name, struct, '', self.LST.package()[self.lst_dict[lstfile]])
+ header_list.append(title2)
+ elif struct not in lst._ignore:
+ struct_dict ={}
+ print("ERROR: Struct %s can't found in lst file" %struct)
+ ERRORMSG.append("ERROR: Struct %s can't found in lst file" %struct)
+ if c_offset in struct_dict:
+ offset_name=struct_dict[c_offset]
+ info = "%s%s.%s|%s\n"%(PCD_NAME,c_name,offset_name,c_value)
+ blank_length = Max_Pcd_Len - len(info)
+ if blank_length <= 0:
+ info_comment = "%s%s.%s|%s%s# %s\n"%(PCD_NAME,c_name,offset_name,c_value," ",c_comment)
+ else:
+ info_comment = "%s%s.%s|%s%s# %s\n"%(PCD_NAME,c_name,offset_name,c_value,blank_length*" ",c_comment)
+ inf = "%s%s\n"%(PCD_NAME,c_name)
+ inf_list.append(inf)
+ tmp_info[info_comment]=title
+ else:
+ print("ERROR: Can't find offset %s with struct name %s"%(c_offset,struct))
+ ERRORMSG.append("ERROR: Can't find offset %s with name %s"%(c_offset,struct))
+ else:
+ print("ERROR: Can't find name %s in lst file"%(c_name))
+ ERRORMSG.append("ERROR: Can't find name %s in lst file"%(c_name))
+ tmp_id.append(list(self.reverse_dict(tmp_info).items()))
+ id,tmp_title_list,tmp_info_list = self.read_list(tmp_id)
+ title_list +=tmp_title_list
+ info_list.append(tmp_info_list)
+ inf_list = self.del_repeat(inf_list)
+ header_list = self.plus(self.del_repeat(header_list))
+ title_all=list(set(title_list))
+ info_list = self.remove_bracket(self.del_repeat(info_list))
+ for i in range(len(info_list)-1,-1,-1):
+ if len(info_list[i]) == 0:
+ info_list.remove(info_list[i])
+ for i in (inf_list, title_all, header_list):
+ i.sort()
+ return keys,title_all,info_list,header_list,inf_list
+
+ def correct_sort(self, PcdString):
+ # sort the Pcd list with two rules:
+ # First sort through Pcd name;
+ # Second if the Pcd exists several elements, sort them through index value.
+ if ("]|") in PcdString:
+ Pcdname = PcdString.split("[")[0]
+ Pcdindex = int(PcdString.split("[")[1].split("]")[0])
+ else:
+ Pcdname = PcdString.split("|")[0]
+ Pcdindex = 0
+ return Pcdname, Pcdindex
+
+ def remove_bracket(self,List):
+ for i in List:
+ for j in i:
+ tmp = j.split("|")
+ if (('L"' in j) and ("[" in j)) or (tmp[1].strip() == '{0x0, 0x0}'):
+ tmp[0] = tmp[0][:tmp[0].index('[')]
+ List[List.index(i)][i.index(j)] = "|".join(tmp)
+ else:
+ List[List.index(i)][i.index(j)] = j
+ for i in List:
+ if type(i) == type([0,0]):
+ i.sort(key = lambda x:(self.correct_sort(x)[0], self.correct_sort(x)[1]))
+ return List
+
+ def write_all(self):
+ title_flag=1
+ info_flag=1
+ if not os.path.isdir(self.outputpath):
+ os.makedirs(self.outputpath)
+ decwrite = write2file(os.path.join(self.outputpath,'StructurePcd.dec'))
+ dscwrite = write2file(os.path.join(self.outputpath,'StructurePcd.dsc'))
+ infwrite = write2file(os.path.join(self.outputpath, 'StructurePcd.inf'))
+ conf = Config(self.Config)
+ ids,title,info,header,inf=self.main()
+ decwrite.add2file(decstatement)
+ decwrite.add2file(header)
+ infwrite.add2file(infstatement)
+ infwrite.add2file(inf)
+ dscwrite.add2file(dscstatement)
+ for id in ids:
+ dscwrite.add2file(conf.eval_id(id))
+ if title_flag:
+ dscwrite.add2file(title)
+ title_flag=0
+ if len(info) == 1:
+ dscwrite.add2file(info)
+ elif len(info) == 2:
+ if info_flag:
+ dscwrite.add2file(info[0])
+ info_flag =0
+ else:
+ dscwrite.add2file(info[1])
+
+ def del_repeat(self,List):
+ if len(List) == 1 or len(List) == 0:
+ return List
+ else:
+ if type(List[0]) != type('xxx'):
+ alist=[]
+ for i in range(len(List)):
+ if i == 0:
+ alist.append(List[0])
+ else:
+ plist = []
+ for j in range(i):
+ plist += List[j]
+ alist.append(self.__del(list(set(plist)), List[i]))
+ return alist
+ else:
+ return list(set(List))
+
+
+ def __del(self,list1,list2):
+ return list(set(list2).difference(set(list1)))
+
+ def reverse_dict(self,dict):
+ data={}
+ for i in list(dict.items()):
+ if i[1] not in list(data.keys()):
+ data[i[1]]=[i[0]]
+ else:
+ data[i[1]].append(i[0])
+ return data
+
+ def read_list(self,list):
+ title_list=[]
+ info_list=[]
+ for i in list[1]:
+ title_list.append(i[0])
+ for j in i[1]:
+ info_list.append(j)
+ return list[0],title_list,info_list
+
+ def plus(self,list):
+ nums=[]
+ for i in list:
+ if type(i) != type([0]):
+ self.init += 1
+ num = "0x%01x" % self.init
+ j=i.replace('0xFCD00000',num.upper())
+ nums.append(j)
+ return nums
+
+class write2file(object):
+
+ def __init__(self,Output):
+ self.output=Output
+ self.text=''
+ if os.path.exists(self.output):
+ os.remove(self.output)
+
+ def add2file(self,content):
+ self.text = ''
+ with open(self.output,'a+') as file:
+ file.write(self.__gen(content))
+
+ def __gen(self,content):
+ if type(content) == type(''):
+ return content
+ elif type(content) == type([0,0])or type(content) == type((0,0)):
+ return self.__readlist(content)
+ elif type(content) == type({0:0}):
+ return self.__readdict(content)
+
+ def __readlist(self,list):
+ for i in list:
+ if type(i) == type([0,0])or type(i) == type((0,0)):
+ self.__readlist(i)
+ elif type(i) == type('') :
+ self.text +=i
+ return self.text
+
+ def __readdict(self,dict):
+ content=list(dict.items())
+ return self.__readlist(content)
+
+def stamp():
+ return datetime.datetime.now()
+
+def dtime(start,end,id=None):
+ if id:
+ pass
+ print("%s time:%s" % (id,str(end - start)))
+ else:
+ print("Total time:%s" %str(end-start)[:-7])
+
+
+def main():
+ start = stamp()
+ parser = argparse.ArgumentParser(prog = __prog__,
+ description = __description__ + __copyright__,
+ conflict_handler = 'resolve')
+ parser.add_argument('-v', '--version', action = 'version',version = __version__, help="show program's version number and exit")
+ parser.add_argument('-p', '--path', metavar='PATH', dest='path', help="platform build output directory")
+ parser.add_argument('-c', '--config',metavar='FILENAME', dest='config', help="firmware configuration file")
+ parser.add_argument('-o', '--outputdir', metavar='PATH', dest='output', help="output directoy")
+ options = parser.parse_args()
+ if options.config:
+ if options.path:
+ if options.output:
+ run = mainprocess(options.path, options.config, options.output)
+ print("Running...")
+ run.write_all()
+ if WARNING:
+ warning = list(set(WARNING))
+ for j in warning:
+ print(j)
+ if ERRORMSG:
+ ERROR = list(set(ERRORMSG))
+ with open("ERROR.log", 'w+') as error:
+ for i in ERROR:
+ error.write(i + '\n')
+ print("Some error find, error log in ERROR.log")
+ print('Finished, Output files in directory %s'%os.path.abspath(options.output))
+ else:
+ print('Error command, no output path, use -h for help')
+ else:
+ print('Error command, no build path input, use -h for help')
+ else:
+ print('Error command, no output file, use -h for help')
+ end = stamp()
+ dtime(start, end)
+
+if __name__ == '__main__':
+ main()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertMasmToNasm.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertMasmToNasm.py
new file mode 100755
index 00000000..7c286a06
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertMasmToNasm.py
@@ -0,0 +1,1005 @@
+# @file ConvertMasmToNasm.py
+# This script assists with conversion of MASM assembly syntax to NASM
+#
+# Copyright (c) 2007 - 2016, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import print_function
+
+#
+# Import Modules
+#
+import argparse
+import io
+import os.path
+import re
+import subprocess
+import sys
+
+
+class UnsupportedConversion(Exception):
+ pass
+
+
+class NoSourceFile(Exception):
+ pass
+
+
+class UnsupportedArch(Exception):
+ unsupported = ('aarch64', 'arm', 'ebc', 'ipf')
+
+
+class CommonUtils:
+
+ # Version and Copyright
+ VersionNumber = "0.01"
+ __version__ = "%prog Version " + VersionNumber
+ __copyright__ = "Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved."
+ __usage__ = "%prog [options] source.asm [destination.nasm]"
+
+ def __init__(self, clone=None):
+ if clone is None:
+ self.args = self.ProcessCommandLine()
+ else:
+ self.args = clone.args
+
+ self.unsupportedSyntaxSeen = False
+ self.src = self.args.source
+ self.keep = self.args.keep
+ assert(os.path.exists(self.src))
+ self.dirmode = os.path.isdir(self.src)
+ srcExt = os.path.splitext(self.src)[1]
+ assert (self.dirmode or srcExt != '.nasm')
+ self.infmode = not self.dirmode and srcExt == '.inf'
+ self.diff = self.args.diff
+ self.git = self.args.git
+ self.force = self.args.force
+
+ if clone is None:
+ self.rootdir = os.getcwd()
+ self.DetectGit()
+ else:
+ self.rootdir = clone.rootdir
+ self.gitdir = clone.gitdir
+ self.gitemail = clone.gitemail
+
+ def ProcessCommandLine(self):
+ parser = argparse.ArgumentParser(description=self.__copyright__)
+ parser.add_argument('--version', action='version',
+ version='%(prog)s ' + self.VersionNumber)
+ parser.add_argument("-q", "--quiet", action="store_true",
+ help="Disable all messages except FATAL ERRORS.")
+ parser.add_argument("--git", action="store_true",
+ help="Use git to create commits for each file converted")
+ parser.add_argument("--keep", action="append", choices=('asm', 's'),
+ default=[],
+ help="Don't remove files with this extension")
+ parser.add_argument("--diff", action="store_true",
+ help="Show diff of conversion")
+ parser.add_argument("-f", "--force", action="store_true",
+ help="Force conversion even if unsupported")
+ parser.add_argument('source', help='MASM input file')
+ parser.add_argument('dest', nargs='?',
+ help='NASM output file (default=input.nasm; - for stdout)')
+
+ return parser.parse_args()
+
+ def RootRelative(self, path):
+ result = path
+ if result.startswith(self.rootdir):
+ result = result[len(self.rootdir):]
+ while len(result) > 0 and result[0] in '/\\':
+ result = result[1:]
+ return result
+
+ def MatchAndSetMo(self, regexp, string):
+ self.mo = regexp.match(string)
+ return self.mo is not None
+
+ def SearchAndSetMo(self, regexp, string):
+ self.mo = regexp.search(string)
+ return self.mo is not None
+
+ def ReplacePreserveSpacing(self, string, find, replace):
+ if len(find) >= len(replace):
+ padded = replace + (' ' * (len(find) - len(replace)))
+ return string.replace(find, padded)
+ elif find.find(replace) >= 0:
+ return string.replace(find, replace)
+ else:
+ lenDiff = len(replace) - len(find)
+ result = string
+ for i in range(lenDiff, -1, -1):
+ padded = find + (' ' * i)
+ result = result.replace(padded, replace)
+ return result
+
+ def DetectGit(self):
+ lastpath = os.path.realpath(self.src)
+ self.gitdir = None
+ while True:
+ path = os.path.split(lastpath)[0]
+ if path == lastpath:
+ self.gitemail = None
+ return
+ candidate = os.path.join(path, '.git')
+ if os.path.isdir(candidate):
+ self.gitdir = candidate
+ self.gitemail = self.FormatGitEmailAddress()
+ return
+ lastpath = path
+
+ def FormatGitEmailAddress(self):
+ if not self.git or not self.gitdir:
+ return ''
+
+ cmd = ('git', 'config', 'user.name')
+ name = self.RunAndCaptureOutput(cmd).strip()
+ cmd = ('git', 'config', 'user.email')
+ email = self.RunAndCaptureOutput(cmd).strip()
+ if name.find(',') >= 0:
+ name = '"' + name + '"'
+ return name + ' <' + email + '>'
+
+ def RunAndCaptureOutput(self, cmd, checkExitCode=True, pipeIn=None):
+ if pipeIn:
+ subpStdin = subprocess.PIPE
+ else:
+ subpStdin = None
+ p = subprocess.Popen(args=cmd, stdout=subprocess.PIPE, stdin=subpStdin)
+ (stdout, stderr) = p.communicate(pipeIn)
+ if checkExitCode:
+ if p.returncode != 0:
+ print('command:', ' '.join(cmd))
+ print('stdout:', stdout)
+ print('stderr:', stderr)
+ print('return:', p.returncode)
+ assert p.returncode == 0
+ return stdout.decode('utf-8', 'ignore')
+
+ def FileUpdated(self, path):
+ if not self.git or not self.gitdir:
+ return
+
+ cmd = ('git', 'add', path)
+ self.RunAndCaptureOutput(cmd)
+
+ def FileAdded(self, path):
+ self.FileUpdated(path)
+
+ def RemoveFile(self, path):
+ if not self.git or not self.gitdir:
+ return
+
+ if self.ShouldKeepFile(path):
+ return
+
+ cmd = ('git', 'rm', path)
+ self.RunAndCaptureOutput(cmd)
+
+ def ShouldKeepFile(self, path):
+ ext = os.path.splitext(path)[1].lower()
+ if ext.startswith('.'):
+ ext = ext[1:]
+ return ext in self.keep
+
+ def FileConversionFinished(self, pkg, module, src, dst):
+ if not self.git or not self.gitdir:
+ return
+
+ if not self.args.quiet:
+ print('Committing: Conversion of', dst)
+
+ prefix = ' '.join(filter(lambda a: a, [pkg, module]))
+ message = ''
+ if self.unsupportedSyntaxSeen:
+ message += 'ERROR! '
+ message += '%s: Convert %s to NASM\n' % (prefix, src)
+ message += '\n'
+ message += 'The %s script was used to convert\n' % sys.argv[0]
+ message += '%s to %s\n' % (src, dst)
+ message += '\n'
+ message += 'Contributed-under: TianoCore Contribution Agreement 1.0\n'
+ assert(self.gitemail is not None)
+ message += 'Signed-off-by: %s\n' % self.gitemail
+ message = message.encode('utf-8', 'ignore')
+
+ cmd = ('git', 'commit', '-F', '-')
+ self.RunAndCaptureOutput(cmd, pipeIn=message)
+
+
+class ConvertAsmFile(CommonUtils):
+
+ def __init__(self, src, dst, clone):
+ CommonUtils.__init__(self, clone)
+ self.ConvertAsmFile(src, dst)
+ self.FileAdded(dst)
+ self.RemoveFile(src)
+
+ def ConvertAsmFile(self, inputFile, outputFile=None):
+ self.globals = set()
+ self.unsupportedSyntaxSeen = False
+ self.inputFilename = inputFile
+ if not outputFile:
+ outputFile = os.path.splitext(inputFile)[0] + '.nasm'
+ self.outputFilename = outputFile
+
+ fullSrc = os.path.realpath(inputFile)
+ srcParentDir = os.path.basename(os.path.split(fullSrc)[0])
+ maybeArch = srcParentDir.lower()
+ if maybeArch in UnsupportedArch.unsupported:
+ raise UnsupportedArch
+ self.ia32 = maybeArch == 'ia32'
+ self.x64 = maybeArch == 'x64'
+
+ self.inputFileBase = os.path.basename(self.inputFilename)
+ self.outputFileBase = os.path.basename(self.outputFilename)
+ self.output = io.BytesIO()
+ if not self.args.quiet:
+ dirpath, src = os.path.split(self.inputFilename)
+ dirpath = self.RootRelative(dirpath)
+ dst = os.path.basename(self.outputFilename)
+ print('Converting:', dirpath, src, '->', dst)
+ lines = io.open(self.inputFilename).readlines()
+ self.Convert(lines)
+ if self.outputFilename == '-' and not self.diff:
+ output_data = self.output.getvalue()
+ if sys.version_info >= (3, 0):
+ output_data = output_data.decode('utf-8', 'ignore')
+ sys.stdout.write(output_data)
+ self.output.close()
+ else:
+ f = io.open(self.outputFilename, 'wb')
+ f.write(self.output.getvalue())
+ f.close()
+ self.output.close()
+
+ endOfLineRe = re.compile(r'''
+ \s* ( ; .* )? \n $
+ ''',
+ re.VERBOSE | re.MULTILINE
+ )
+ begOfLineRe = re.compile(r'''
+ \s*
+ ''',
+ re.VERBOSE
+ )
+
+ def Convert(self, lines):
+ self.proc = None
+ self.anonLabelCount = -1
+ output = self.output
+ self.oldAsmEmptyLineCount = 0
+ self.newAsmEmptyLineCount = 0
+ for line in lines:
+ mo = self.begOfLineRe.search(line)
+ assert mo is not None
+ self.indent = mo.group()
+ lineWithoutBeginning = line[len(self.indent):]
+ mo = self.endOfLineRe.search(lineWithoutBeginning)
+ if mo is None:
+ endOfLine = ''
+ else:
+ endOfLine = mo.group()
+ oldAsm = line[len(self.indent):len(line) - len(endOfLine)]
+ self.originalLine = line.rstrip()
+ if line.strip() == '':
+ self.oldAsmEmptyLineCount += 1
+ self.TranslateAsm(oldAsm, endOfLine)
+ if line.strip() != '':
+ self.oldAsmEmptyLineCount = 0
+
+ procDeclRe = re.compile(r'''
+ (?: ASM_PFX \s* [(] \s* )?
+ ([\w@][\w@0-9]*) \s*
+ [)]? \s+
+ PROC
+ (?: \s+ NEAR | FAR )?
+ (?: \s+ C )?
+ (?: \s+ (PUBLIC | PRIVATE) )?
+ (?: \s+ USES ( (?: \s+ \w[\w0-9]* )+ ) )?
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ procEndRe = re.compile(r'''
+ ([\w@][\w@0-9]*) \s+
+ ENDP
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ varAndTypeSubRe = r' (?: [\w@][\w@0-9]* ) (?: \s* : \s* \w+ )? '
+ publicRe = re.compile(r'''
+ PUBLIC \s+
+ ( %s (?: \s* , \s* %s )* )
+ \s* $
+ ''' % (varAndTypeSubRe, varAndTypeSubRe),
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ varAndTypeSubRe = re.compile(varAndTypeSubRe, re.VERBOSE | re.IGNORECASE)
+
+ macroDeclRe = re.compile(r'''
+ ([\w@][\w@0-9]*) \s+
+ MACRO
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ sectionDeclRe = re.compile(r'''
+ ([\w@][\w@0-9]*) \s+
+ ( SECTION | ENDS )
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ externRe = re.compile(r'''
+ EXTE?RN \s+ (?: C \s+ )?
+ ([\w@][\w@0-9]*) \s* : \s* (\w+)
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ externdefRe = re.compile(r'''
+ EXTERNDEF \s+ (?: C \s+ )?
+ ([\w@][\w@0-9]*) \s* : \s* (\w+)
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ protoRe = re.compile(r'''
+ ([\w@][\w@0-9]*) \s+
+ PROTO
+ (?: \s+ .* )?
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ defineDataRe = re.compile(r'''
+ ([\w@][\w@0-9]*) \s+
+ ( db | dw | dd | dq ) \s+
+ ( .*? )
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ equRe = re.compile(r'''
+ ([\w@][\w@0-9]*) \s+ EQU \s+ (\S.*?)
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ ignoreRe = re.compile(r'''
+ \. (?: const |
+ mmx |
+ model |
+ xmm |
+ x?list |
+ [3-6]86p?
+ ) |
+ page
+ (?: \s+ .* )?
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ whitespaceRe = re.compile(r'\s+', re.MULTILINE)
+
+ def TranslateAsm(self, oldAsm, endOfLine):
+ assert(oldAsm.strip() == oldAsm)
+
+ endOfLine = endOfLine.replace(self.inputFileBase, self.outputFileBase)
+
+ oldOp = oldAsm.split()
+ if len(oldOp) >= 1:
+ oldOp = oldOp[0]
+ else:
+ oldOp = ''
+
+ if oldAsm == '':
+ newAsm = oldAsm
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif oldOp in ('#include', ):
+ newAsm = oldAsm
+ self.EmitLine(oldAsm + endOfLine)
+ elif oldOp.lower() in ('end', 'title', 'text'):
+ newAsm = ''
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif oldAsm.lower() == '@@:':
+ self.anonLabelCount += 1
+ self.EmitLine(self.anonLabel(self.anonLabelCount) + ':')
+ elif self.MatchAndSetMo(self.ignoreRe, oldAsm):
+ newAsm = ''
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif oldAsm.lower() == 'ret':
+ for i in range(len(self.uses) - 1, -1, -1):
+ register = self.uses[i]
+ self.EmitNewContent('pop ' + register)
+ newAsm = 'ret'
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ self.uses = tuple()
+ elif oldOp.lower() == 'lea':
+ newAsm = self.ConvertLea(oldAsm)
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif oldAsm.lower() == 'end':
+ newAsm = ''
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ self.uses = tuple()
+ elif self.MatchAndSetMo(self.equRe, oldAsm):
+ equ = self.mo.group(1)
+ newAsm = '%%define %s %s' % (equ, self.mo.group(2))
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif self.MatchAndSetMo(self.externRe, oldAsm) or \
+ self.MatchAndSetMo(self.protoRe, oldAsm):
+ extern = self.mo.group(1)
+ self.NewGlobal(extern)
+ newAsm = 'extern ' + extern
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif self.MatchAndSetMo(self.externdefRe, oldAsm):
+ newAsm = ''
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif self.MatchAndSetMo(self.macroDeclRe, oldAsm):
+ newAsm = '%%macro %s 0' % self.mo.group(1)
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif oldOp.lower() == 'endm':
+ newAsm = r'%endmacro'
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif self.MatchAndSetMo(self.sectionDeclRe, oldAsm):
+ name = self.mo.group(1)
+ ty = self.mo.group(2)
+ if ty.lower() == 'section':
+ newAsm = '.' + name
+ else:
+ newAsm = ''
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif self.MatchAndSetMo(self.procDeclRe, oldAsm):
+ proc = self.proc = self.mo.group(1)
+ visibility = self.mo.group(2)
+ if visibility is None:
+ visibility = ''
+ else:
+ visibility = visibility.lower()
+ if visibility != 'private':
+ self.NewGlobal(self.proc)
+ proc = 'ASM_PFX(' + proc + ')'
+ self.EmitNewContent('global ' + proc)
+ newAsm = proc + ':'
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ uses = self.mo.group(3)
+ if uses is not None:
+ uses = tuple(filter(None, uses.split()))
+ else:
+ uses = tuple()
+ self.uses = uses
+ for register in self.uses:
+ self.EmitNewContent(' push ' + register)
+ elif self.MatchAndSetMo(self.procEndRe, oldAsm):
+ newAsm = ''
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif self.MatchAndSetMo(self.publicRe, oldAsm):
+ publics = re.findall(self.varAndTypeSubRe, self.mo.group(1))
+ publics = tuple(map(lambda p: p.split(':')[0].strip(), publics))
+ for i in range(len(publics) - 1):
+ name = publics[i]
+ self.EmitNewContent('global ASM_PFX(%s)' % publics[i])
+ self.NewGlobal(name)
+ name = publics[-1]
+ self.NewGlobal(name)
+ newAsm = 'global ASM_PFX(%s)' % name
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ elif self.MatchAndSetMo(self.defineDataRe, oldAsm):
+ name = self.mo.group(1)
+ ty = self.mo.group(2)
+ value = self.mo.group(3)
+ if value == '?':
+ value = 0
+ newAsm = '%s: %s %s' % (name, ty, value)
+ newAsm = self.CommonConversions(newAsm)
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+ else:
+ newAsm = self.CommonConversions(oldAsm)
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+
+ def NewGlobal(self, name):
+ regex = re.compile(r'(?<![_\w\d])(?<!ASM_PFX\()(' + re.escape(name) +
+ r')(?![_\w\d])')
+ self.globals.add(regex)
+
+ def ConvertAnonymousLabels(self, oldAsm):
+ newAsm = oldAsm
+ anonLabel = self.anonLabel(self.anonLabelCount)
+ newAsm = newAsm.replace('@b', anonLabel)
+ newAsm = newAsm.replace('@B', anonLabel)
+ anonLabel = self.anonLabel(self.anonLabelCount + 1)
+ newAsm = newAsm.replace('@f', anonLabel)
+ newAsm = newAsm.replace('@F', anonLabel)
+ return newAsm
+
+ def anonLabel(self, count):
+ return '.%d' % count
+
+ def EmitString(self, string):
+ self.output.write(string.encode('utf-8', 'ignore'))
+
+ def EmitLineWithDiff(self, old, new):
+ newLine = (self.indent + new).rstrip()
+ if self.diff:
+ if old is None:
+ print('+%s' % newLine)
+ elif newLine != old:
+ print('-%s' % old)
+ print('+%s' % newLine)
+ else:
+ print('', newLine)
+ if newLine != '':
+ self.newAsmEmptyLineCount = 0
+ self.EmitString(newLine + '\r\n')
+
+ def EmitLine(self, string):
+ self.EmitLineWithDiff(self.originalLine, string)
+
+ def EmitNewContent(self, string):
+ self.EmitLineWithDiff(None, string)
+
+ def EmitAsmReplaceOp(self, oldAsm, oldOp, newOp, endOfLine):
+ newAsm = oldAsm.replace(oldOp, newOp, 1)
+ self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
+
+ hexNumRe = re.compile(r'0*((?=[\da-f])\d*(?<=\d)[\da-f]*)h', re.IGNORECASE)
+
+ def EmitAsmWithComment(self, oldAsm, newAsm, endOfLine):
+ for glblRe in self.globals:
+ newAsm = glblRe.sub(r'ASM_PFX(\1)', newAsm)
+
+ newAsm = self.hexNumRe.sub(r'0x\1', newAsm)
+
+ newLine = newAsm + endOfLine
+ emitNewLine = ((newLine.strip() != '') or
+ ((oldAsm + endOfLine).strip() == ''))
+ if emitNewLine and newLine.strip() == '':
+ self.newAsmEmptyLineCount += 1
+ if self.newAsmEmptyLineCount > 1:
+ emitNewLine = False
+ if emitNewLine:
+ self.EmitLine(newLine.rstrip())
+ elif self.diff:
+ print('-%s' % self.originalLine)
+
+ leaRe = re.compile(r'''
+ (lea \s+) ([\w@][\w@0-9]*) \s* , \s* (\S (?:.*\S)?)
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ def ConvertLea(self, oldAsm):
+ newAsm = oldAsm
+ if self.MatchAndSetMo(self.leaRe, oldAsm):
+ lea = self.mo.group(1)
+ dst = self.mo.group(2)
+ src = self.mo.group(3)
+ if src.find('[') < 0:
+ src = '[' + src + ']'
+ newAsm = lea + dst + ', ' + src
+ newAsm = self.CommonConversions(newAsm)
+ return newAsm
+
+ ptrRe = re.compile(r'''
+ (?<! \S )
+ ([dfq]?word|byte) \s+ (?: ptr ) (\s*)
+ (?= [[\s] )
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ def ConvertPtr(self, oldAsm):
+ newAsm = oldAsm
+ while self.SearchAndSetMo(self.ptrRe, newAsm):
+ ty = self.mo.group(1)
+ if ty.lower() == 'fword':
+ ty = ''
+ else:
+ ty += self.mo.group(2)
+ newAsm = newAsm[:self.mo.start(0)] + ty + newAsm[self.mo.end(0):]
+ return newAsm
+
+ labelByteRe = re.compile(r'''
+ (?: \s+ label \s+ (?: [dfq]?word | byte ) )
+ (?! \S )
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ def ConvertLabelByte(self, oldAsm):
+ newAsm = oldAsm
+ if self.SearchAndSetMo(self.labelByteRe, newAsm):
+ newAsm = newAsm[:self.mo.start(0)] + ':' + newAsm[self.mo.end(0):]
+ return newAsm
+
+ unaryBitwiseOpRe = re.compile(r'''
+ ( NOT )
+ (?= \s+ \S )
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+ binaryBitwiseOpRe = re.compile(r'''
+ ( \S \s+ )
+ ( AND | OR | SHL | SHR )
+ (?= \s+ \S )
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+ bitwiseOpReplacements = {
+ 'not': '~',
+ 'and': '&',
+ 'shl': '<<',
+ 'shr': '>>',
+ 'or': '|',
+ }
+
+ def ConvertBitwiseOp(self, oldAsm):
+ newAsm = oldAsm
+ while self.SearchAndSetMo(self.binaryBitwiseOpRe, newAsm):
+ prefix = self.mo.group(1)
+ op = self.bitwiseOpReplacements[self.mo.group(2).lower()]
+ newAsm = newAsm[:self.mo.start(0)] + prefix + op + \
+ newAsm[self.mo.end(0):]
+ while self.SearchAndSetMo(self.unaryBitwiseOpRe, newAsm):
+ op = self.bitwiseOpReplacements[self.mo.group(1).lower()]
+ newAsm = newAsm[:self.mo.start(0)] + op + newAsm[self.mo.end(0):]
+ return newAsm
+
+ sectionRe = re.compile(r'''
+ \. ( code |
+ data
+ )
+ (?: \s+ .* )?
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ segmentRe = re.compile(r'''
+ ( code |
+ data )
+ (?: \s+ SEGMENT )
+ (?: \s+ .* )?
+ \s* $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ def ConvertSection(self, oldAsm):
+ newAsm = oldAsm
+ if self.MatchAndSetMo(self.sectionRe, newAsm) or \
+ self.MatchAndSetMo(self.segmentRe, newAsm):
+ name = self.mo.group(1).lower()
+ if name == 'code':
+ if self.x64:
+ self.EmitLine('DEFAULT REL')
+ name = 'text'
+ newAsm = 'SECTION .' + name
+ return newAsm
+
+ fwordRe = re.compile(r'''
+ (?<! \S )
+ fword
+ (?! \S )
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ def FwordUnsupportedCheck(self, oldAsm):
+ newAsm = oldAsm
+ if self.SearchAndSetMo(self.fwordRe, newAsm):
+ newAsm = self.Unsupported(newAsm, 'fword used')
+ return newAsm
+
+ __common_conversion_routines__ = (
+ ConvertAnonymousLabels,
+ ConvertPtr,
+ FwordUnsupportedCheck,
+ ConvertBitwiseOp,
+ ConvertLabelByte,
+ ConvertSection,
+ )
+
+ def CommonConversions(self, oldAsm):
+ newAsm = oldAsm
+ for conv in self.__common_conversion_routines__:
+ newAsm = conv(self, newAsm)
+ return newAsm
+
+ def Unsupported(self, asm, message=None):
+ if not self.force:
+ raise UnsupportedConversion
+
+ self.unsupportedSyntaxSeen = True
+ newAsm = '%error conversion unsupported'
+ if message:
+ newAsm += '; ' + message
+ newAsm += ': ' + asm
+ return newAsm
+
+
+class ConvertInfFile(CommonUtils):
+
+ def __init__(self, inf, clone):
+ CommonUtils.__init__(self, clone)
+ self.inf = inf
+ self.ScanInfAsmFiles()
+ if self.infmode:
+ self.ConvertInfAsmFiles()
+
+ infSrcRe = re.compile(r'''
+ \s*
+ ( [\w@][\w@0-9/]* \.(asm|s) )
+ \s* (?: \| [^#]* )?
+ \s* (?: \# .* )?
+ $
+ ''',
+ re.VERBOSE | re.IGNORECASE
+ )
+
+ def GetInfAsmFileMapping(self):
+ srcToDst = {'order': []}
+ for line in self.lines:
+ line = line.rstrip()
+ if self.MatchAndSetMo(self.infSrcRe, line):
+ src = self.mo.group(1)
+ srcExt = self.mo.group(2)
+ dst = os.path.splitext(src)[0] + '.nasm'
+ fullDst = os.path.join(self.dir, dst)
+ if src not in srcToDst and not os.path.exists(fullDst):
+ srcToDst[src] = dst
+ srcToDst['order'].append(src)
+ return srcToDst
+
+ def ScanInfAsmFiles(self):
+ src = self.inf
+ assert os.path.isfile(src)
+ f = io.open(src, 'rt')
+ self.lines = f.readlines()
+ f.close()
+
+ path = os.path.realpath(self.inf)
+ (self.dir, inf) = os.path.split(path)
+ parent = os.path.normpath(self.dir)
+ (lastpath, self.moduleName) = os.path.split(parent)
+ self.packageName = None
+ while True:
+ lastpath = os.path.normpath(lastpath)
+ (parent, basename) = os.path.split(lastpath)
+ if parent == lastpath:
+ break
+ if basename.endswith('Pkg'):
+ self.packageName = basename
+ break
+ lastpath = parent
+
+ self.srcToDst = self.GetInfAsmFileMapping()
+
+ self.dstToSrc = {'order': []}
+ for src in self.srcToDst['order']:
+ srcExt = os.path.splitext(src)[1]
+ dst = self.srcToDst[src]
+ if dst not in self.dstToSrc:
+ self.dstToSrc[dst] = [src]
+ self.dstToSrc['order'].append(dst)
+ else:
+ self.dstToSrc[dst].append(src)
+
+ def __len__(self):
+ return len(self.dstToSrc['order'])
+
+ def __iter__(self):
+ return iter(self.dstToSrc['order'])
+
+ def ConvertInfAsmFiles(self):
+ notConverted = []
+ unsupportedArchCount = 0
+ for dst in self:
+ didSomething = False
+ try:
+ self.UpdateInfAsmFile(dst)
+ didSomething = True
+ except UnsupportedConversion:
+ if not self.args.quiet:
+ print('MASM=>NASM conversion unsupported for', dst)
+ notConverted.append(dst)
+ except NoSourceFile:
+ if not self.args.quiet:
+ print('Source file missing for', reldst)
+ notConverted.append(dst)
+ except UnsupportedArch:
+ unsupportedArchCount += 1
+ else:
+ if didSomething:
+ self.ConversionFinished(dst)
+ if len(notConverted) > 0 and not self.args.quiet:
+ for dst in notConverted:
+ reldst = self.RootRelative(dst)
+ print('Unabled to convert', reldst)
+ if unsupportedArchCount > 0 and not self.args.quiet:
+ print('Skipped', unsupportedArchCount, 'files based on architecture')
+
+ def UpdateInfAsmFile(self, dst, IgnoreMissingAsm=False):
+ infPath = os.path.split(os.path.realpath(self.inf))[0]
+ asmSrc = os.path.splitext(dst)[0] + '.asm'
+ fullSrc = os.path.join(infPath, asmSrc)
+ fullDst = os.path.join(infPath, dst)
+ srcParentDir = os.path.basename(os.path.split(fullSrc)[0])
+ if srcParentDir.lower() in UnsupportedArch.unsupported:
+ raise UnsupportedArch
+ elif not os.path.exists(fullSrc):
+ if not IgnoreMissingAsm:
+ raise NoSourceFile
+ else: # not os.path.exists(fullDst):
+ conv = ConvertAsmFile(fullSrc, fullDst, self)
+ self.unsupportedSyntaxSeen = conv.unsupportedSyntaxSeen
+
+ fileChanged = False
+ recentSources = list()
+ i = 0
+ while i < len(self.lines):
+ line = self.lines[i].rstrip()
+ updatedLine = line
+ lineChanged = False
+ preserveOldSource = False
+ for src in self.dstToSrc[dst]:
+ assert self.srcToDst[src] == dst
+ updatedLine = self.ReplacePreserveSpacing(
+ updatedLine, src, dst)
+ lineChanged = updatedLine != line
+ if lineChanged:
+ preserveOldSource = self.ShouldKeepFile(src)
+ break
+
+ if lineChanged:
+ if preserveOldSource:
+ if updatedLine.strip() not in recentSources:
+ self.lines.insert(i, updatedLine + '\n')
+ recentSources.append(updatedLine.strip())
+ i += 1
+ if self.diff:
+ print('+%s' % updatedLine)
+ if self.diff:
+ print('', line)
+ else:
+ if self.diff:
+ print('-%s' % line)
+ if updatedLine.strip() in recentSources:
+ self.lines[i] = None
+ else:
+ self.lines[i] = updatedLine + '\n'
+ recentSources.append(updatedLine.strip())
+ if self.diff:
+ print('+%s' % updatedLine)
+ else:
+ if len(recentSources) > 0:
+ recentSources = list()
+ if self.diff:
+ print('', line)
+
+ fileChanged |= lineChanged
+ i += 1
+
+ if fileChanged:
+ self.lines = list(filter(lambda l: l is not None, self.lines))
+
+ for src in self.dstToSrc[dst]:
+ if not src.endswith('.asm'):
+ fullSrc = os.path.join(infPath, src)
+ if os.path.exists(fullSrc):
+ self.RemoveFile(fullSrc)
+
+ if fileChanged:
+ f = io.open(self.inf, 'w', newline='\r\n')
+ f.writelines(self.lines)
+ f.close()
+ self.FileUpdated(self.inf)
+
+ def ConversionFinished(self, dst):
+ asmSrc = os.path.splitext(dst)[0] + '.asm'
+ self.FileConversionFinished(
+ self.packageName, self.moduleName, asmSrc, dst)
+
+
+class ConvertInfFiles(CommonUtils):
+
+ def __init__(self, infs, clone):
+ CommonUtils.__init__(self, clone)
+ infs = map(lambda i: ConvertInfFile(i, self), infs)
+ infs = filter(lambda i: len(i) > 0, infs)
+ dstToInfs = {'order': []}
+ for inf in infs:
+ for dst in inf:
+ fulldst = os.path.realpath(os.path.join(inf.dir, dst))
+ pair = (inf, dst)
+ if fulldst in dstToInfs:
+ dstToInfs[fulldst].append(pair)
+ else:
+ dstToInfs['order'].append(fulldst)
+ dstToInfs[fulldst] = [pair]
+
+ notConverted = []
+ unsupportedArchCount = 0
+ for dst in dstToInfs['order']:
+ didSomething = False
+ try:
+ for inf, reldst in dstToInfs[dst]:
+ inf.UpdateInfAsmFile(reldst, IgnoreMissingAsm=didSomething)
+ didSomething = True
+ except UnsupportedConversion:
+ if not self.args.quiet:
+ print('MASM=>NASM conversion unsupported for', reldst)
+ notConverted.append(dst)
+ except NoSourceFile:
+ if not self.args.quiet:
+ print('Source file missing for', reldst)
+ notConverted.append(dst)
+ except UnsupportedArch:
+ unsupportedArchCount += 1
+ else:
+ if didSomething:
+ inf.ConversionFinished(reldst)
+ if len(notConverted) > 0 and not self.args.quiet:
+ for dst in notConverted:
+ reldst = self.RootRelative(dst)
+ print('Unabled to convert', reldst)
+ if unsupportedArchCount > 0 and not self.args.quiet:
+ print('Skipped', unsupportedArchCount, 'files based on architecture')
+
+
+class ConvertDirectories(CommonUtils):
+
+ def __init__(self, paths, clone):
+ CommonUtils.__init__(self, clone)
+ self.paths = paths
+ self.ConvertInfAndAsmFiles()
+
+ def ConvertInfAndAsmFiles(self):
+ infs = list()
+ for path in self.paths:
+ assert(os.path.exists(path))
+ for path in self.paths:
+ for root, dirs, files in os.walk(path):
+ for d in ('.svn', '.git'):
+ if d in dirs:
+ dirs.remove(d)
+ for f in files:
+ if f.lower().endswith('.inf'):
+ inf = os.path.realpath(os.path.join(root, f))
+ infs.append(inf)
+
+ ConvertInfFiles(infs, self)
+
+
+class ConvertAsmApp(CommonUtils):
+
+ def __init__(self):
+ CommonUtils.__init__(self)
+
+ src = self.args.source
+ dst = self.args.dest
+ if self.infmode:
+ ConvertInfFiles((src,), self)
+ elif self.dirmode:
+ ConvertDirectories((src,), self)
+ elif not self.dirmode:
+ ConvertAsmFile(src, dst, self)
+
+ConvertAsmApp()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertUni.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertUni.py
new file mode 100755
index 00000000..c099aa4a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ConvertUni.py
@@ -0,0 +1,125 @@
+## @file
+# Check a patch for various format issues
+#
+# Copyright (c) 2015, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import print_function
+
+VersionNumber = '0.1'
+__copyright__ = "Copyright (c) 2015, Intel Corporation All rights reserved."
+
+import argparse
+import codecs
+import os
+import sys
+
+class ConvertOneArg:
+ """Converts utf-16 to utf-8 for one command line argument.
+
+ This could be a single file, or a directory.
+ """
+
+ def __init__(self, utf8, source):
+ self.utf8 = utf8
+ self.source = source
+
+ self.ok = True
+
+ if not os.path.exists(source):
+ self.ok = False
+ elif os.path.isdir(source):
+ for (root, dirs, files) in os.walk(source):
+ files = filter(lambda a: a.endswith('.uni'), files)
+ for filename in files:
+ path = os.path.join(root, filename)
+ self.ok &= self.convert_one_file(path)
+ if not self.ok:
+ break
+
+ if not self.ok:
+ break
+ else:
+ self.ok &= self.convert_one_file(source)
+
+ def convert_one_file(self, source):
+ if self.utf8:
+ new_enc, old_enc = 'utf-8', 'utf-16'
+ else:
+ new_enc, old_enc = 'utf-16', 'utf-8'
+ #
+ # Read file
+ #
+ f = open(source, mode='rb')
+ file_content = f.read()
+ f.close()
+
+ #
+ # Detect UTF-16 Byte Order Mark at beginning of file.
+ #
+ bom = (file_content.startswith(codecs.BOM_UTF16_BE) or
+ file_content.startswith(codecs.BOM_UTF16_LE))
+ if bom != self.utf8:
+ print("%s: already %s" % (source, new_enc))
+ return True
+
+ #
+ # Decode old string data
+ #
+ str_content = file_content.decode(old_enc, 'ignore')
+
+ #
+ # Encode new string data
+ #
+ new_content = str_content.encode(new_enc, 'ignore')
+
+ #
+ # Write converted data back to file
+ #
+ f = open(source, mode='wb')
+ f.write(new_content)
+ f.close()
+
+ print(source + ": converted, size", len(file_content), '=>', len(new_content))
+ return True
+
+
+class ConvertUniApp:
+ """Converts .uni files between utf-16 and utf-8."""
+
+ def __init__(self):
+ self.parse_options()
+ sources = self.args.source
+
+ self.ok = True
+ for patch in sources:
+ self.process_one_arg(patch)
+
+ if self.ok:
+ self.retval = 0
+ else:
+ self.retval = -1
+
+ def process_one_arg(self, arg):
+ self.ok &= ConvertOneArg(self.utf8, arg).ok
+
+ def parse_options(self):
+ parser = argparse.ArgumentParser(description=__copyright__)
+ parser.add_argument('--version', action='version',
+ version='%(prog)s ' + VersionNumber)
+ parser.add_argument('source', nargs='+',
+ help='[uni file | directory]')
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument("--utf-8",
+ action="store_true",
+ help="Convert from utf-16 to utf-8 [default]")
+ group.add_argument("--utf-16",
+ action="store_true",
+ help="Convert from utf-8 to utf-16")
+ self.args = parser.parse_args()
+ self.utf8 = not self.args.utf_16
+
+if __name__ == "__main__":
+ sys.exit(ConvertUniApp().retval)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/DetectNotUsedItem.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/DetectNotUsedItem.py
new file mode 100755
index 00000000..1c77ed77
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/DetectNotUsedItem.py
@@ -0,0 +1,198 @@
+## @file
+# Detect unreferenced PCD and GUID/Protocols/PPIs.
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+DetectNotUsedItem
+'''
+import re
+import os
+import sys
+import argparse
+
+#
+# Globals for help information
+#
+__prog__ = 'DetectNotUsedItem'
+__version__ = '%s Version %s' % (__prog__, '0.1')
+__copyright__ = 'Copyright (c) 2019, Intel Corporation. All rights reserved.'
+__description__ = "Detect unreferenced PCD and GUID/Protocols/PPIs.\n"
+
+SectionList = ["LibraryClasses", "Guids", "Ppis", "Protocols", "Pcd"]
+
+
+class PROCESS(object):
+
+ def __init__(self, DecPath, InfDirs):
+ self.Dec = DecPath
+ self.InfPath = InfDirs
+ self.Log = []
+
+ def ParserDscFdfInfFile(self):
+ AllContentList = []
+ for File in self.SearchbyExt([".dsc", ".fdf", ".inf"]):
+ AllContentList += self.ParseDscFdfInfContent(File)
+ return AllContentList
+
+ # Search File by extension name
+ def SearchbyExt(self, ExtList):
+ FileList = []
+ for path in self.InfPath:
+ if type(ExtList) == type(''):
+ for root, _, files in os.walk(path, topdown=True, followlinks=False):
+ for filename in files:
+ if filename.endswith(ExtList):
+ FileList.append(os.path.join(root, filename))
+ elif type(ExtList) == type([]):
+ for root, _, files in os.walk(path, topdown=True, followlinks=False):
+ for filename in files:
+ for Ext in ExtList:
+ if filename.endswith(Ext):
+ FileList.append(os.path.join(root, filename))
+ return FileList
+
+ # Parse DEC file to get Line number and Name
+ # return section name, the Item Name and comments line number
+ def ParseDecContent(self):
+ SectionRE = re.compile(r'\[(.*)\]')
+ Flag = False
+ Comments = {}
+ Comment_Line = []
+ ItemName = {}
+ with open(self.Dec, 'r') as F:
+ for Index, content in enumerate(F):
+ NotComment = not content.strip().startswith("#")
+ Section = SectionRE.findall(content)
+ if Section and NotComment:
+ Flag = self.IsNeedParseSection(Section[0])
+ if Flag:
+ Comment_Line.append(Index)
+ if NotComment:
+ if content != "\n" and content != "\r\n":
+ ItemName[Index] = content.split('=')[0].split('|')[0].split('#')[0].strip()
+ Comments[Index] = Comment_Line
+ Comment_Line = []
+ return ItemName, Comments
+
+ def IsNeedParseSection(self, SectionName):
+ for item in SectionList:
+ if item in SectionName:
+ return True
+ return False
+
+ # Parse DSC, FDF, INF File, remove comments, return Lines list
+ def ParseDscFdfInfContent(self, File):
+ with open(File, 'r') as F:
+ lines = F.readlines()
+ for Index in range(len(lines) - 1, -1, -1):
+ if lines[Index].strip().startswith("#") or lines[Index] == "\n" or lines[Index] == "\r\n":
+ lines.remove(lines[Index])
+ elif "#" in lines[Index]:
+ lines[Index] = lines[Index].split("#")[0].strip()
+ else:
+ lines[Index] = lines[Index].strip()
+ return lines
+
+ def DetectNotUsedItem(self):
+ NotUsedItem = {}
+ DecItem, DecComments = self.ParseDecContent()
+ InfDscFdfContent = self.ParserDscFdfInfFile()
+ for LineNum in list(DecItem.keys()):
+ DecItemName = DecItem[LineNum]
+ Match_reg = re.compile("(?<![a-zA-Z0-9_-])%s(?![a-zA-Z0-9_-])" % DecItemName)
+ MatchFlag = False
+ for Line in InfDscFdfContent:
+ if Match_reg.search(Line):
+ MatchFlag = True
+ break
+ if not MatchFlag:
+ NotUsedItem[LineNum] = DecItemName
+ self.Display(NotUsedItem)
+ return NotUsedItem, DecComments
+
+ def Display(self, UnuseDict):
+ print("DEC File:\n%s\n%s%s" % (self.Dec, "{:<15}".format("Line Number"), "{:<0}".format("Unused Item")))
+ self.Log.append(
+ "DEC File:\n%s\n%s%s\n" % (self.Dec, "{:<15}".format("Line Number"), "{:<0}".format("Unused Item")))
+ for num in list(sorted(UnuseDict.keys())):
+ ItemName = UnuseDict[num]
+ print("%s%s%s" % (" " * 3, "{:<12}".format(num + 1), "{:<1}".format(ItemName)))
+ self.Log.append(("%s%s%s\n" % (" " * 3, "{:<12}".format(num + 1), "{:<1}".format(ItemName))))
+
+ def Clean(self, UnUseDict, Comments):
+ removednum = []
+ for num in list(UnUseDict.keys()):
+ if num in list(Comments.keys()):
+ removednum += Comments[num]
+ with open(self.Dec, 'r') as Dec:
+ lines = Dec.readlines()
+ try:
+ with open(self.Dec, 'w+') as T:
+ for linenum in range(len(lines)):
+ if linenum in removednum:
+ continue
+ else:
+ T.write(lines[linenum])
+ print("DEC File has been clean: %s" % (self.Dec))
+ except Exception as err:
+ print(err)
+
+
+class Main(object):
+
+ def mainprocess(self, Dec, Dirs, Isclean, LogPath):
+ for dir in Dirs:
+ if not os.path.exists(dir):
+ print("Error: Invalid path for '--dirs': %s" % dir)
+ sys.exit(1)
+ Pa = PROCESS(Dec, Dirs)
+ unuse, comment = Pa.DetectNotUsedItem()
+ if Isclean:
+ Pa.Clean(unuse, comment)
+ self.Logging(Pa.Log, LogPath)
+
+ def Logging(self, content, LogPath):
+ if LogPath:
+ try:
+ if os.path.isdir(LogPath):
+ FilePath = os.path.dirname(LogPath)
+ if not os.path.exists(FilePath):
+ os.makedirs(FilePath)
+ with open(LogPath, 'w+') as log:
+ for line in content:
+ log.write(line)
+ print("Log save to file: %s" % LogPath)
+ except Exception as e:
+ print("Save log Error: %s" % e)
+
+
+def main():
+ parser = argparse.ArgumentParser(prog=__prog__,
+ description=__description__ + __copyright__,
+ conflict_handler='resolve')
+ parser.add_argument('-i', '--input', metavar="", dest='InputDec', help="Input DEC file name.")
+ parser.add_argument('--dirs', metavar="", action='append', dest='Dirs',
+ help="The package directory. To specify more directories, please repeat this option.")
+ parser.add_argument('--clean', action='store_true', default=False, dest='Clean',
+ help="Clean the unreferenced items from DEC file.")
+ parser.add_argument('--log', metavar="", dest="Logfile", default=False,
+ help="Put log in specified file as well as on console.")
+ options = parser.parse_args()
+ if options.InputDec:
+ if not (os.path.exists(options.InputDec) and options.InputDec.endswith(".dec")):
+ print("Error: Invalid DEC file input: %s" % options.InputDec)
+ if options.Dirs:
+ M = Main()
+ M.mainprocess(options.InputDec, options.Dirs, options.Clean, options.Logfile)
+ else:
+ print("Error: the following argument is required:'--dirs'.")
+ else:
+ print("Error: the following argument is required:'-i/--input'.")
+
+
+if __name__ == '__main__':
+ main()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/FormatDosFiles.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/FormatDosFiles.py
new file mode 100755
index 00000000..748aaad3
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/FormatDosFiles.py
@@ -0,0 +1,107 @@
+# @file FormatDosFiles.py
+# This script format the source files to follow dos style.
+# It supports Python2.x and Python3.x both.
+#
+# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+#
+# Import Modules
+#
+from __future__ import print_function
+import argparse
+import os
+import os.path
+import re
+import sys
+import copy
+
+__prog__ = 'FormatDosFiles'
+__version__ = '%s Version %s' % (__prog__, '0.10 ')
+__copyright__ = 'Copyright (c) 2018-2019, Intel Corporation. All rights reserved.'
+__description__ = 'Convert source files to meet the EDKII C Coding Standards Specification.\n'
+DEFAULT_EXT_LIST = ['.h', '.c', '.nasm', '.nasmb', '.asm', '.S', '.inf', '.dec', '.dsc', '.fdf', '.uni', '.asl', '.aslc', '.vfr', '.idf', '.txt', '.bat', '.py']
+
+#For working in python2 and python3 environment, re pattern should use binary string, which is bytes type in python3.
+#Because in python3,read from file in binary mode will return bytes type,and in python3 bytes type can not be mixed with str type.
+def FormatFile(FilePath, Args):
+ with open(FilePath, 'rb') as Fd:
+ Content = Fd.read()
+ # Convert the line endings to CRLF
+ Content = re.sub(br'([^\r])\n', br'\1\r\n', Content)
+ Content = re.sub(br'^\n', br'\r\n', Content, flags=re.MULTILINE)
+ # Add a new empty line if the file is not end with one
+ Content = re.sub(br'([^\r\n])$', br'\1\r\n', Content)
+ # Remove trailing white spaces
+ Content = re.sub(br'[ \t]+(\r\n)', br'\1', Content, flags=re.MULTILINE)
+ # Replace '\t' with two spaces
+ Content = re.sub(b'\t', b' ', Content)
+ with open(FilePath, 'wb') as Fd:
+ Fd.write(Content)
+ if not Args.Quiet:
+ print(FilePath)
+
+def FormatFilesInDir(DirPath, ExtList, Args):
+
+ FileList = []
+ ExcludeDir = DirPath
+ for DirPath, DirNames, FileNames in os.walk(DirPath):
+ if Args.Exclude:
+ DirNames[:] = [d for d in DirNames if d not in Args.Exclude]
+ FileNames[:] = [f for f in FileNames if f not in Args.Exclude]
+ Continue = False
+ for Path in Args.Exclude:
+ Path = Path.strip('\\').strip('/')
+ if not os.path.isdir(Path) and not os.path.isfile(Path):
+ Path = os.path.join(ExcludeDir, Path)
+ if os.path.isdir(Path) and Path.endswith(DirPath):
+ DirNames[:] = []
+ Continue = True
+ elif os.path.isfile(Path):
+ FilePaths = FileNames
+ for ItemPath in FilePaths:
+ FilePath = os.path.join(DirPath, ItemPath)
+ if Path.endswith(FilePath):
+ FileNames.remove(ItemPath)
+ if Continue:
+ continue
+ for FileName in [f for f in FileNames if any(f.endswith(ext) for ext in ExtList)]:
+ FileList.append(os.path.join(DirPath, FileName))
+ for File in FileList:
+ FormatFile(File, Args)
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(prog=__prog__, description=__description__ + __copyright__, conflict_handler = 'resolve')
+
+ parser.add_argument('Path', nargs='+',
+ help='the path for files to be converted.It could be directory or file path.')
+ parser.add_argument('--version', action='version', version=__version__)
+ parser.add_argument('--append-extensions', dest='AppendExt', nargs='+',
+ help='append file extensions filter to default extensions. (Example: .txt .c .h)')
+ parser.add_argument('--override-extensions', dest='OverrideExt', nargs='+',
+ help='override file extensions filter on default extensions. (Example: .txt .c .h)')
+ parser.add_argument('-v', '--verbose', dest='Verbose', action='store_true',
+ help='increase output messages')
+ parser.add_argument('-q', '--quiet', dest='Quiet', action='store_true',
+ help='reduce output messages')
+ parser.add_argument('--debug', dest='Debug', type=int, metavar='[0-9]', choices=range(0, 10), default=0,
+ help='set debug level')
+ parser.add_argument('--exclude', dest='Exclude', nargs='+', help="directory name or file name which will be excluded")
+ args = parser.parse_args()
+ DefaultExt = copy.copy(DEFAULT_EXT_LIST)
+
+ if args.OverrideExt is not None:
+ DefaultExt = args.OverrideExt
+ if args.AppendExt is not None:
+ DefaultExt = list(set(DefaultExt + args.AppendExt))
+
+ for Path in args.Path:
+ if not os.path.exists(Path):
+ print("not exists path: {0}".format(Path))
+ sys.exit(1)
+ if os.path.isdir(Path):
+ FormatFilesInDir(Path, DefaultExt, args)
+ elif os.path.isfile(Path):
+ FormatFile(Path, args)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GccBase.lds b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GccBase.lds
new file mode 100644
index 00000000..4e27334a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GccBase.lds
@@ -0,0 +1,79 @@
+/** @file
+
+ Unified linker script for GCC based builds
+
+ Copyright (c) 2010 - 2015, Intel Corporation. All rights reserved.<BR>
+ Copyright (c) 2015, Linaro Ltd. All rights reserved.<BR>
+ (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
+
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+**/
+
+SECTIONS {
+
+ /*
+ * The PE/COFF binary consists of DOS and PE/COFF headers, and a sequence of
+ * section headers adding up to PECOFF_HEADER_SIZE bytes (which differs
+ * between 32-bit and 64-bit builds). The actual start of the .text section
+ * will be rounded up based on its actual alignment.
+ */
+ . = PECOFF_HEADER_SIZE;
+
+ .text : ALIGN(CONSTANT(COMMONPAGESIZE)) {
+ *(.text .text.* .stub .gnu.linkonce.t.*)
+ *(.rodata .rodata.* .gnu.linkonce.r.*)
+ *(.got .got.*)
+
+ /*
+ * The contents of AutoGen.c files are mostly constant from the POV of the
+ * program, but most of it ends up in .data or .bss by default since few of
+ * the variable definitions that get emitted are declared as CONST.
+ * Unfortunately, we cannot pull it into the .text section entirely, since
+ * patchable PCDs are also emitted here, but we can at least move all of the
+ * emitted GUIDs here.
+ */
+ *:AutoGen.obj(.data.g*Guid)
+ }
+
+ /*
+ * The alignment of the .data section should be less than or equal to the
+ * alignment of the .text section. This ensures that the relative offset
+ * between these sections is the same in the ELF and the PE/COFF versions of
+ * this binary.
+ */
+ .data ALIGN(ALIGNOF(.text)) : ALIGN(CONSTANT(COMMONPAGESIZE)) {
+ *(.data .data.* .gnu.linkonce.d.*)
+ *(.bss .bss.*)
+ }
+
+ .eh_frame ALIGN(CONSTANT(COMMONPAGESIZE)) : {
+ KEEP (*(.eh_frame))
+ }
+
+ .rela (INFO) : {
+ *(.rela .rela.*)
+ }
+
+ .hii : ALIGN(CONSTANT(COMMONPAGESIZE)) {
+ KEEP (*(.hii))
+ }
+
+ /*
+ * Retain the GNU build id but in a non-allocatable section so GenFw
+ * does not copy it into the PE/COFF image.
+ */
+ .build-id (INFO) : { *(.note.gnu.build-id) }
+
+ /DISCARD/ : {
+ *(.note.GNU-stack)
+ *(.gnu_debuglink)
+ *(.interp)
+ *(.dynsym)
+ *(.dynstr)
+ *(.dynamic)
+ *(.hash .gnu.hash)
+ *(.comment)
+ *(COMMON)
+ }
+}
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GetMaintainer.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GetMaintainer.py
new file mode 100755
index 00000000..2ec550e8
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GetMaintainer.py
@@ -0,0 +1,194 @@
+## @file
+# Retrieves the people to request review from on submission of a commit.
+#
+# Copyright (c) 2019, Linaro Ltd. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import print_function
+from collections import defaultdict
+from collections import OrderedDict
+import argparse
+import os
+import re
+import SetupGit
+
+EXPRESSIONS = {
+ 'exclude': re.compile(r'^X:\s*(?P<exclude>.*?)\r*$'),
+ 'file': re.compile(r'^F:\s*(?P<file>.*?)\r*$'),
+ 'list': re.compile(r'^L:\s*(?P<list>.*?)\r*$'),
+ 'maintainer': re.compile(r'^M:\s*(?P<maintainer>.*<.*?>)\r*$'),
+ 'reviewer': re.compile(r'^R:\s*(?P<reviewer>.*?)\r*$'),
+ 'status': re.compile(r'^S:\s*(?P<status>.*?)\r*$'),
+ 'tree': re.compile(r'^T:\s*(?P<tree>.*?)\r*$'),
+ 'webpage': re.compile(r'^W:\s*(?P<webpage>.*?)\r*$')
+}
+
+def printsection(section):
+ """Prints out the dictionary describing a Maintainers.txt section."""
+ print('===')
+ for key in section.keys():
+ print("Key: %s" % key)
+ for item in section[key]:
+ print(' %s' % item)
+
+def pattern_to_regex(pattern):
+ """Takes a string containing regular UNIX path wildcards
+ and returns a string suitable for matching with regex."""
+
+ pattern = pattern.replace('.', r'\.')
+ pattern = pattern.replace('?', r'.')
+ pattern = pattern.replace('*', r'.*')
+
+ if pattern.endswith('/'):
+ pattern += r'.*'
+ elif pattern.endswith('.*'):
+ pattern = pattern[:-2]
+ pattern += r'(?!.*?/.*?)'
+
+ return pattern
+
+def path_in_section(path, section):
+ """Returns True of False indicating whether the path is covered by
+ the current section."""
+ if not 'file' in section:
+ return False
+
+ for pattern in section['file']:
+ regex = pattern_to_regex(pattern)
+
+ match = re.match(regex, path)
+ if match:
+ # Check if there is an exclude pattern that applies
+ for pattern in section['exclude']:
+ regex = pattern_to_regex(pattern)
+
+ match = re.match(regex, path)
+ if match:
+ return False
+
+ return True
+
+ return False
+
+def get_section_maintainers(path, section):
+ """Returns a list with email addresses to any M: and R: entries
+ matching the provided path in the provided section."""
+ maintainers = []
+ lists = []
+ nowarn_status = ['Supported', 'Maintained']
+
+ if path_in_section(path, section):
+ for status in section['status']:
+ if status not in nowarn_status:
+ print('WARNING: Maintained status for "%s" is \'%s\'!' % (path, status))
+ for address in section['maintainer'], section['reviewer']:
+ # Convert to list if necessary
+ if isinstance(address, list):
+ maintainers += address
+ else:
+ lists += [address]
+ for address in section['list']:
+ # Convert to list if necessary
+ if isinstance(address, list):
+ lists += address
+ else:
+ lists += [address]
+
+ return maintainers, lists
+
+def get_maintainers(path, sections, level=0):
+ """For 'path', iterates over all sections, returning maintainers
+ for matching ones."""
+ maintainers = []
+ lists = []
+ for section in sections:
+ tmp_maint, tmp_lists = get_section_maintainers(path, section)
+ if tmp_maint:
+ maintainers += tmp_maint
+ if tmp_lists:
+ lists += tmp_lists
+
+ if not maintainers:
+ # If no match found, look for match for (nonexistent) file
+ # REPO.working_dir/<default>
+ print('"%s": no maintainers found, looking for default' % path)
+ if level == 0:
+ maintainers = get_maintainers('<default>', sections, level=level + 1)
+ else:
+ print("No <default> maintainers set for project.")
+ if not maintainers:
+ return None
+
+ return maintainers + lists
+
+def parse_maintainers_line(line):
+ """Parse one line of Maintainers.txt, returning any match group and its key."""
+ for key, expression in EXPRESSIONS.items():
+ match = expression.match(line)
+ if match:
+ return key, match.group(key)
+ return None, None
+
+def parse_maintainers_file(filename):
+ """Parse the Maintainers.txt from top-level of repo and
+ return a list containing dictionaries of all sections."""
+ with open(filename, 'r') as text:
+ line = text.readline()
+ sectionlist = []
+ section = defaultdict(list)
+ while line:
+ key, value = parse_maintainers_line(line)
+ if key and value:
+ section[key].append(value)
+
+ line = text.readline()
+ # If end of section (end of file, or non-tag line encountered)...
+ if not key or not value or not line:
+ # ...if non-empty, append section to list.
+ if section:
+ sectionlist.append(section.copy())
+ section.clear()
+
+ return sectionlist
+
+def get_modified_files(repo, args):
+ """Returns a list of the files modified by the commit specified in 'args'."""
+ commit = repo.commit(args.commit)
+ return commit.stats.files
+
+if __name__ == '__main__':
+ PARSER = argparse.ArgumentParser(
+ description='Retrieves information on who to cc for review on a given commit')
+ PARSER.add_argument('commit',
+ action="store",
+ help='git revision to examine (default: HEAD)',
+ nargs='?',
+ default='HEAD')
+ PARSER.add_argument('-l', '--lookup',
+ help='Find section matches for path LOOKUP',
+ required=False)
+ ARGS = PARSER.parse_args()
+
+ REPO = SetupGit.locate_repo()
+
+ CONFIG_FILE = os.path.join(REPO.working_dir, 'Maintainers.txt')
+
+ SECTIONS = parse_maintainers_file(CONFIG_FILE)
+
+ if ARGS.lookup:
+ FILES = [ARGS.lookup]
+ else:
+ FILES = get_modified_files(REPO, ARGS)
+
+ ADDRESSES = []
+
+ for file in FILES:
+ print(file)
+ addresslist = get_maintainers(file, SECTIONS)
+ if addresslist:
+ ADDRESSES += addresslist
+
+ for address in list(OrderedDict.fromkeys(ADDRESSES)):
+ print(' %s' % address)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GetUtcDateTime.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GetUtcDateTime.py
new file mode 100644
index 00000000..c23a3a10
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/GetUtcDateTime.py
@@ -0,0 +1,44 @@
+## @file
+# Get current UTC date and time information and output as ascii code.
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+VersionNumber = '0.1'
+import sys
+import datetime
+import argparse
+
+def Main():
+ PARSER = argparse.ArgumentParser(
+ description='Retrieves UTC date and time information (output ordering: year, date, time) - Version ' + VersionNumber)
+ PARSER.add_argument('--year',
+ action='store_true',
+ help='Return UTC year of now. [Example output (2019): 39313032]')
+ PARSER.add_argument('--date',
+ action='store_true',
+ help='Return UTC date MMDD of now. [Example output (7th August): 37303830]')
+ PARSER.add_argument('--time',
+ action='store_true',
+ help='Return 24-hour-format UTC time HHMM of now. [Example output (14:25): 35323431]')
+
+ ARGS = PARSER.parse_args()
+ if len(sys.argv) == 1:
+ print ("ERROR: At least one argument is required!\n")
+ PARSER.print_help()
+
+ today = datetime.datetime.utcnow()
+ if ARGS.year:
+ ReversedNumber = str(today.year)[::-1]
+ print (''.join(hex(ord(HexString))[2:] for HexString in ReversedNumber))
+ if ARGS.date:
+ ReversedNumber = str(today.strftime("%m%d"))[::-1]
+ print (''.join(hex(ord(HexString))[2:] for HexString in ReversedNumber))
+ if ARGS.time:
+ ReversedNumber = str(today.strftime("%H%M"))[::-1]
+ print (''.join(hex(ord(HexString))[2:] for HexString in ReversedNumber))
+
+if __name__ == '__main__':
+ Main()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/MemoryProfileSymbolGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/MemoryProfileSymbolGen.py
new file mode 100755
index 00000000..5e171297
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/MemoryProfileSymbolGen.py
@@ -0,0 +1,276 @@
+##
+# Generate symbal for memory profile info.
+#
+# This tool depends on DIA2Dump.exe (VS) or nm (gcc) to parse debug entry.
+#
+# Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+from __future__ import print_function
+import os
+import re
+import sys
+from optparse import OptionParser
+
+versionNumber = "1.1"
+__copyright__ = "Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved."
+
+class Symbols:
+ def __init__(self):
+ self.listLineAddress = []
+ self.pdbName = ""
+ # Cache for function
+ self.functionName = ""
+ # Cache for line
+ self.sourceName = ""
+
+
+ def getSymbol (self, rva):
+ index = 0
+ lineName = 0
+ sourceName = "??"
+ while index + 1 < self.lineCount :
+ if self.listLineAddress[index][0] <= rva and self.listLineAddress[index + 1][0] > rva :
+ offset = rva - self.listLineAddress[index][0]
+ functionName = self.listLineAddress[index][1]
+ lineName = self.listLineAddress[index][2]
+ sourceName = self.listLineAddress[index][3]
+ if lineName == 0 :
+ return " (" + self.listLineAddress[index][1] + "() - " + ")"
+ else :
+ return " (" + self.listLineAddress[index][1] + "() - " + sourceName + ":" + str(lineName) + ")"
+ index += 1
+
+ return " (unknown)"
+
+ def parse_debug_file(self, driverName, pdbName):
+ if cmp (pdbName, "") == 0 :
+ return
+ self.pdbName = pdbName;
+
+ try:
+ nmCommand = "nm"
+ nmLineOption = "-l"
+ print("parsing (debug) - " + pdbName)
+ os.system ('%s %s %s > nmDump.line.log' % (nmCommand, nmLineOption, pdbName))
+ except :
+ print('ERROR: nm command not available. Please verify PATH')
+ return
+
+ #
+ # parse line
+ #
+ linefile = open("nmDump.line.log")
+ reportLines = linefile.readlines()
+ linefile.close()
+
+ # 000113ca T AllocatePool c:\home\edk-ii\MdePkg\Library\UefiMemoryAllocationLib\MemoryAllocationLib.c:399
+ patchLineFileMatchString = "([0-9a-fA-F]*)\s+[T|D|t|d]\s+(\w+)\s*((?:[a-zA-Z]:)?[\w+\-./_a-zA-Z0-9\\\\]*):?([0-9]*)"
+
+ for reportLine in reportLines:
+ #print "check - " + reportLine
+ match = re.match(patchLineFileMatchString, reportLine)
+ if match is not None:
+ #print "match - " + reportLine[:-1]
+ #print "0 - " + match.group(0)
+ #print "1 - " + match.group(1)
+ #print "2 - " + match.group(2)
+ #print "3 - " + match.group(3)
+ #print "4 - " + match.group(4)
+
+ rva = int (match.group(1), 16)
+ functionName = match.group(2)
+ sourceName = match.group(3)
+ if cmp (match.group(4), "") != 0 :
+ lineName = int (match.group(4))
+ else :
+ lineName = 0
+ self.listLineAddress.append ([rva, functionName, lineName, sourceName])
+
+ self.lineCount = len (self.listLineAddress)
+
+ self.listLineAddress = sorted(self.listLineAddress, key=lambda symbolAddress:symbolAddress[0])
+
+ #for key in self.listLineAddress :
+ #print "rva - " + "%x"%(key[0]) + ", func - " + key[1] + ", line - " + str(key[2]) + ", source - " + key[3]
+
+ def parse_pdb_file(self, driverName, pdbName):
+ if cmp (pdbName, "") == 0 :
+ return
+ self.pdbName = pdbName;
+
+ try:
+ #DIA2DumpCommand = "\"C:\\Program Files (x86)\Microsoft Visual Studio 14.0\\DIA SDK\\Samples\\DIA2Dump\\x64\\Debug\\Dia2Dump.exe\""
+ DIA2DumpCommand = "Dia2Dump.exe"
+ #DIA2SymbolOption = "-p"
+ DIA2LinesOption = "-l"
+ print("parsing (pdb) - " + pdbName)
+ #os.system ('%s %s %s > DIA2Dump.symbol.log' % (DIA2DumpCommand, DIA2SymbolOption, pdbName))
+ os.system ('%s %s %s > DIA2Dump.line.log' % (DIA2DumpCommand, DIA2LinesOption, pdbName))
+ except :
+ print('ERROR: DIA2Dump command not available. Please verify PATH')
+ return
+
+ #
+ # parse line
+ #
+ linefile = open("DIA2Dump.line.log")
+ reportLines = linefile.readlines()
+ linefile.close()
+
+ # ** GetDebugPrintErrorLevel
+ # line 32 at [0000C790][0001:0000B790], len = 0x3 c:\home\edk-ii\mdepkg\library\basedebugprinterrorlevellib\basedebugprinterrorlevellib.c (MD5: 687C0AE564079D35D56ED5D84A6164CC)
+ # line 36 at [0000C793][0001:0000B793], len = 0x5
+ # line 37 at [0000C798][0001:0000B798], len = 0x2
+
+ patchLineFileMatchString = "\s+line ([0-9]+) at \[([0-9a-fA-F]{8})\]\[[0-9a-fA-F]{4}\:[0-9a-fA-F]{8}\], len = 0x[0-9a-fA-F]+\s*([\w+\-\:./_a-zA-Z0-9\\\\]*)\s*"
+ patchLineFileMatchStringFunc = "\*\*\s+(\w+)\s*"
+
+ for reportLine in reportLines:
+ #print "check line - " + reportLine
+ match = re.match(patchLineFileMatchString, reportLine)
+ if match is not None:
+ #print "match - " + reportLine[:-1]
+ #print "0 - " + match.group(0)
+ #print "1 - " + match.group(1)
+ #print "2 - " + match.group(2)
+ if cmp (match.group(3), "") != 0 :
+ self.sourceName = match.group(3)
+ sourceName = self.sourceName
+ functionName = self.functionName
+
+ rva = int (match.group(2), 16)
+ lineName = int (match.group(1))
+ self.listLineAddress.append ([rva, functionName, lineName, sourceName])
+ else :
+ match = re.match(patchLineFileMatchStringFunc, reportLine)
+ if match is not None:
+ self.functionName = match.group(1)
+
+ self.lineCount = len (self.listLineAddress)
+ self.listLineAddress = sorted(self.listLineAddress, key=lambda symbolAddress:symbolAddress[0])
+
+ #for key in self.listLineAddress :
+ #print "rva - " + "%x"%(key[0]) + ", func - " + key[1] + ", line - " + str(key[2]) + ", source - " + key[3]
+
+class SymbolsFile:
+ def __init__(self):
+ self.symbolsTable = {}
+
+symbolsFile = ""
+
+driverName = ""
+rvaName = ""
+symbolName = ""
+
+def getSymbolName(driverName, rva):
+ global symbolsFile
+
+ #print "driverName - " + driverName
+
+ try :
+ symbolList = symbolsFile.symbolsTable[driverName]
+ if symbolList is not None:
+ return symbolList.getSymbol (rva)
+ else:
+ return " (???)"
+ except Exception:
+ return " (???)"
+
+def processLine(newline):
+ global driverName
+ global rvaName
+
+ driverPrefixLen = len("Driver - ")
+ # get driver name
+ if cmp(newline[0:driverPrefixLen], "Driver - ") == 0 :
+ driverlineList = newline.split(" ")
+ driverName = driverlineList[2]
+ #print "Checking : ", driverName
+
+ # EDKII application output
+ pdbMatchString = "Driver - \w* \(Usage - 0x[0-9a-fA-F]+\) \(Pdb - ([:\-.\w\\\\/]*)\)\s*"
+ pdbName = ""
+ match = re.match(pdbMatchString, newline)
+ if match is not None:
+ #print "match - " + newline
+ #print "0 - " + match.group(0)
+ #print "1 - " + match.group(1)
+ pdbName = match.group(1)
+ #print "PDB - " + pdbName
+
+ symbolsFile.symbolsTable[driverName] = Symbols()
+
+ if cmp (pdbName[-3:], "pdb") == 0 :
+ symbolsFile.symbolsTable[driverName].parse_pdb_file (driverName, pdbName)
+ else :
+ symbolsFile.symbolsTable[driverName].parse_debug_file (driverName, pdbName)
+
+ elif cmp(newline, "") == 0 :
+ driverName = ""
+
+ # check entry line
+ if newline.find ("<==") != -1 :
+ entry_list = newline.split(" ")
+ rvaName = entry_list[4]
+ #print "rva : ", rvaName
+ symbolName = getSymbolName (driverName, int(rvaName, 16))
+ else :
+ rvaName = ""
+ symbolName = ""
+
+ if cmp(rvaName, "") == 0 :
+ return newline
+ else :
+ return newline + symbolName
+
+def myOptionParser():
+ usage = "%prog [--version] [-h] [--help] [-i inputfile [-o outputfile]]"
+ Parser = OptionParser(usage=usage, description=__copyright__, version="%prog " + str(versionNumber))
+ Parser.add_option("-i", "--inputfile", dest="inputfilename", type="string", help="The input memory profile info file output from MemoryProfileInfo application in MdeModulePkg")
+ Parser.add_option("-o", "--outputfile", dest="outputfilename", type="string", help="The output memory profile info file with symbol, MemoryProfileInfoSymbol.txt will be used if it is not specified")
+
+ (Options, args) = Parser.parse_args()
+ if Options.inputfilename is None:
+ Parser.error("no input file specified")
+ if Options.outputfilename is None:
+ Options.outputfilename = "MemoryProfileInfoSymbol.txt"
+ return Options
+
+def main():
+ global symbolsFile
+ global Options
+ Options = myOptionParser()
+
+ symbolsFile = SymbolsFile()
+
+ try :
+ file = open(Options.inputfilename)
+ except Exception:
+ print("fail to open " + Options.inputfilename)
+ return 1
+ try :
+ newfile = open(Options.outputfilename, "w")
+ except Exception:
+ print("fail to open " + Options.outputfilename)
+ return 1
+
+ try:
+ while True:
+ line = file.readline()
+ if not line:
+ break
+ newline = line[:-1]
+
+ newline = processLine(newline)
+
+ newfile.write(newline)
+ newfile.write("\n")
+ finally:
+ file.close()
+ newfile.close()
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/Readme.md b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/Readme.md
new file mode 100644
index 00000000..4f7de7d2
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/Readme.md
@@ -0,0 +1,19 @@
+Prerequisite Tools:
+1. Install Python 2.7.3 from https://www.python.org/download/releases/2.7.3/
+2. Install wxPython 2.8.12.1 from https://sourceforge.net/projects/wxpython/files/wxPython/2.8.12.1/
+ generally the libraries will be installed at python's subfolder, for example in windows: c:\python27\Lib\site-packages\
+3. Install DoxyGen 1.8.6 from https://sourceforge.net/projects/doxygen/files/rel-1.8.6/
+4. (Windows only) Install Htmlhelp tool from https://msdn.microsoft.com/en-us/library/windows/desktop/ms669985(v=vs.85).aspx
+
+Limitation:
+1. Current tool doesn't work on latest wxPython and DoxyGen tool. Please use the sepecific version in above.
+
+Run the Tool:
+a) Run with GUI:
+ 1. Enter src folder, double click "packagedocapp.pyw" or run command "python packagedocapp.pyw" to open the GUI.
+ 2. Make sure all the information in blank are correct.
+ 3. Click "Generate Package Document!"
+b) Run with command line:
+ 1. Open command line window
+ 2. Enter src folder, for example: "cd C:\PackageDocumentTools\src"
+ 3. Run "python packagedoc_cli.py --help" for detail command.
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/__init__.py
new file mode 100644
index 00000000..a7909346
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/__init__.py
@@ -0,0 +1,6 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py
new file mode 100755
index 00000000..7e7dc1c8
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/packagedoc_cli.py
@@ -0,0 +1,424 @@
+## @file
+# This module provide command line entry for generating package document!
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import print_function
+import os, sys, logging, traceback, subprocess
+from optparse import OptionParser
+
+from plugins.EdkPlugins.edk2.model import baseobject
+from plugins.EdkPlugins.edk2.model import doxygengen
+
+gArchMarcoDict = {'ALL' : 'MDE_CPU_IA32 MDE_CPU_X64 MDE_CPU_EBC MDE_CPU_IPF _MSC_EXTENSIONS __GNUC__ __INTEL_COMPILER',
+ 'IA32_MSFT': 'MDE_CPU_IA32 _MSC_EXTENSIONS',
+ 'IA32_GNU' : 'MDE_CPU_IA32 __GNUC__',
+ 'X64_MSFT' : 'MDE_CPU_X64 _MSC_EXTENSIONS ASM_PFX= OPTIONAL= ',
+ 'X64_GNU' : 'MDE_CPU_X64 __GNUC__ ASM_PFX= OPTIONAL= ',
+ 'IPF_MSFT' : 'MDE_CPU_IPF _MSC_EXTENSIONS ASM_PFX= OPTIONAL= ',
+ 'IPF_GNU' : 'MDE_CPU_IPF __GNUC__ ASM_PFX= OPTIONAL= ',
+ 'EBC_INTEL': 'MDE_CPU_EBC __INTEL_COMPILER ASM_PFX= OPTIONAL= '}
+
+def parseCmdArgs():
+ parser = OptionParser(version="Package Document Generation Tools - Version 0.1")
+ parser.add_option('-w', '--workspace', action='store', type='string', dest='WorkspacePath',
+ help='Specify workspace absolute path. For example: c:\\tianocore')
+ parser.add_option('-p', '--decfile', action='store', dest='PackagePath',
+ help='Specify the absolute path for package DEC file. For example: c:\\tianocore\\MdePkg\\MdePkg.dec')
+ parser.add_option('-x', '--doxygen', action='store', dest='DoxygenPath',
+ help='Specify the absolute path of doxygen tools installation. For example: C:\\Program Files\\doxygen\bin\doxygen.exe')
+ parser.add_option('-o', '--output', action='store', dest='OutputPath',
+ help='Specify the document output path. For example: c:\\docoutput')
+ parser.add_option('-a', '--arch', action='store', dest='Arch', choices=list(gArchMarcoDict.keys()),
+ help='Specify the architecture used in preprocess package\'s source. For example: -a IA32_MSFT')
+ parser.add_option('-m', '--mode', action='store', dest='DocumentMode', choices=['CHM', 'HTML'],
+ help='Specify the document mode from : CHM or HTML')
+ parser.add_option('-i', '--includeonly', action='store_true', dest='IncludeOnly',
+ help='Only generate document for package\'s public interfaces produced by include folder. ')
+ parser.add_option('-c', '--htmlworkshop', dest='HtmlWorkshopPath',
+ help='Specify the absolute path for Microsoft HTML Workshop\'s hhc.exe file. For example: C:\\Program Files\\HTML Help Workshop\\hhc.exe')
+ (options, args) = parser.parse_args()
+
+ # validate the options
+ errors = []
+ if options.WorkspacePath is None:
+ errors.append('- Please specify workspace path via option -w!')
+ elif not os.path.exists(options.WorkspacePath):
+ errors.append("- Invalid workspace path %s! The workspace path should be exist in absolute path!" % options.WorkspacePath)
+
+ if options.PackagePath is None:
+ errors.append('- Please specify package DEC file path via option -p!')
+ elif not os.path.exists(options.PackagePath):
+ errors.append("- Invalid package's DEC file path %s! The DEC path should be exist in absolute path!" % options.PackagePath)
+
+ default = "C:\\Program Files\\doxygen\\bin\\doxygen.exe"
+ if options.DoxygenPath is None:
+ if os.path.exists(default):
+ print("Warning: Assume doxygen tool is installed at %s. If not, please specify via -x" % default)
+ options.DoxygenPath = default
+ else:
+ errors.append('- Please specify the path of doxygen tool installation via option -x! or install it in default path %s' % default)
+ elif not os.path.exists(options.DoxygenPath):
+ errors.append("- Invalid doxygen tool path %s! The doxygen tool path should be exist in absolute path!" % options.DoxygenPath)
+
+ if options.OutputPath is not None:
+ if not os.path.exists(options.OutputPath):
+ # create output
+ try:
+ os.makedirs(options.OutputPath)
+ except:
+ errors.append('- Fail to create the output directory %s' % options.OutputPath)
+ else:
+ if options.PackagePath is not None and os.path.exists(options.PackagePath):
+ dirpath = os.path.dirname(options.PackagePath)
+ default = os.path.join (dirpath, "Document")
+ print('Warning: Assume document output at %s. If not, please specify via option -o' % default)
+ options.OutputPath = default
+ if not os.path.exists(default):
+ try:
+ os.makedirs(default)
+ except:
+ errors.append('- Fail to create default output directory %s! Please specify document output diretory via option -o' % default)
+ else:
+ errors.append('- Please specify document output path via option -o!')
+
+ if options.Arch is None:
+ options.Arch = 'ALL'
+ print("Warning: Assume arch is \"ALL\". If not, specify via -a")
+
+ if options.DocumentMode is None:
+ options.DocumentMode = "HTML"
+ print("Warning: Assume document mode is \"HTML\". If not, specify via -m")
+
+ if options.IncludeOnly is None:
+ options.IncludeOnly = False
+ print("Warning: Assume generate package document for all package\'s source including publich interfaces and implementation libraries and modules.")
+
+ if options.DocumentMode.lower() == 'chm':
+ default = "C:\\Program Files\\HTML Help Workshop\\hhc.exe"
+ if options.HtmlWorkshopPath is None:
+ if os.path.exists(default):
+ print('Warning: Assume the installation path of Microsoft HTML Workshop is %s. If not, specify via option -c.' % default)
+ options.HtmlWorkshopPath = default
+ else:
+ errors.append('- Please specify the installation path of Microsoft HTML Workshop via option -c!')
+ elif not os.path.exists(options.HtmlWorkshopPath):
+ errors.append('- The installation path of Microsoft HTML Workshop %s does not exists. ' % options.HtmlWorkshopPath)
+
+ if len(errors) != 0:
+ print('\n')
+ parser.error('Fail to start due to following reasons: \n%s' %'\n'.join(errors))
+ return (options.WorkspacePath, options.PackagePath, options.DoxygenPath, options.OutputPath,
+ options.Arch, options.DocumentMode, options.IncludeOnly, options.HtmlWorkshopPath)
+
+def createPackageObject(wsPath, pkgPath):
+ try:
+ pkgObj = baseobject.Package(None, wsPath)
+ pkgObj.Load(pkgPath)
+ except:
+ logging.getLogger().error ('Fail to create package object!')
+ return None
+
+ return pkgObj
+
+def callbackLogMessage(msg, level):
+ print(msg.strip())
+
+def callbackCreateDoxygenProcess(doxPath, configPath):
+ if sys.platform == 'win32':
+ cmd = '"%s" %s' % (doxPath, configPath)
+ else:
+ cmd = '%s %s' % (doxPath, configPath)
+ print(cmd)
+ subprocess.call(cmd, shell=True)
+
+
+def DocumentFixup(outPath, arch):
+ # find BASE_LIBRARY_JUMP_BUFFER structure reference page
+
+ print('\n >>> Start fixup document \n')
+
+ for root, dirs, files in os.walk(outPath):
+ for dir in dirs:
+ if dir.lower() in ['.svn', '_svn', 'cvs']:
+ dirs.remove(dir)
+ for file in files:
+ if not file.lower().endswith('.html'): continue
+ fullpath = os.path.join(outPath, root, file)
+ try:
+ f = open(fullpath, 'r')
+ text = f.read()
+ f.close()
+ except:
+ logging.getLogger().error('\nFail to open file %s\n' % fullpath)
+ continue
+ if arch.lower() == 'all':
+ if text.find('BASE_LIBRARY_JUMP_BUFFER Struct Reference') != -1:
+ FixPageBASE_LIBRARY_JUMP_BUFFER(fullpath, text)
+ if text.find('MdePkg/Include/Library/BaseLib.h File Reference') != -1:
+ FixPageBaseLib(fullpath, text)
+ if text.find('IA32_IDT_GATE_DESCRIPTOR Union Reference') != -1:
+ FixPageIA32_IDT_GATE_DESCRIPTOR(fullpath, text)
+ if text.find('MdePkg/Include/Library/UefiDriverEntryPoint.h File Reference') != -1:
+ FixPageUefiDriverEntryPoint(fullpath, text)
+ if text.find('MdePkg/Include/Library/UefiApplicationEntryPoint.h File Reference') != -1:
+ FixPageUefiApplicationEntryPoint(fullpath, text)
+
+ print(' >>> Finish all document fixing up! \n')
+
+def FixPageBaseLib(path, text):
+ print(' >>> Fixup BaseLib file page at file %s \n' % path)
+ lines = text.split('\n')
+ lastBaseJumpIndex = -1
+ lastIdtGateDescriptor = -1
+ for index in range(len(lines) - 1, -1, -1):
+ line = lines[index]
+ if line.strip() == '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;4 </td>':
+ lines[index] = '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;4&nbsp;[IA32] </td>'
+ if line.strip() == '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;0x10 </td>':
+ lines[index] = '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;0x10&nbsp;[IPF] </td>'
+ if line.strip() == '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;8 </td>':
+ lines[index] = '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;9&nbsp;[EBC, x64] </td>'
+ if line.find('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;4') != -1:
+ lines[index] = lines[index].replace('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;4',
+ 'BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;4&nbsp;[IA32]')
+ if line.find('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;0x10') != -1:
+ lines[index] = lines[index].replace('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;0x10',
+ 'BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;0x10&nbsp;[IPF]')
+ if line.find('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;8') != -1:
+ lines[index] = lines[index].replace('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;8',
+ 'BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;8&nbsp;[x64, EBC]')
+ if line.find('>BASE_LIBRARY_JUMP_BUFFER</a>') != -1:
+ if lastBaseJumpIndex != -1:
+ del lines[lastBaseJumpIndex]
+ lastBaseJumpIndex = index
+ if line.find('>IA32_IDT_GATE_DESCRIPTOR</a></td>') != -1:
+ if lastIdtGateDescriptor != -1:
+ del lines[lastIdtGateDescriptor]
+ lastIdtGateDescriptor = index
+ try:
+ f = open(path, 'w')
+ f.write('\n'.join(lines))
+ f.close()
+ except:
+ logging.getLogger().error(" <<< Fail to fixup file %s\n" % path)
+ return
+ print(" <<< Finish to fixup file %s\n" % path)
+
+def FixPageIA32_IDT_GATE_DESCRIPTOR(path, text):
+ print(' >>> Fixup structure reference IA32_IDT_GATE_DESCRIPTOR at file %s \n' % path)
+ lines = text.split('\n')
+ for index in range(len(lines) - 1, -1, -1):
+ line = lines[index].strip()
+ if line.find('struct {</td>') != -1 and lines[index - 2].find('>Uint64</a></td>') != -1:
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For X64</h2></td></tr>')
+ if line.find('struct {</td>') != -1 and lines[index - 1].find('Data Fields') != -1:
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For IA32</h2></td></tr>')
+ try:
+ f = open(path, 'w')
+ f.write('\n'.join(lines))
+ f.close()
+ except:
+ logging.getLogger().error(" <<< Fail to fixup file %s\n" % path)
+ return
+ print(" <<< Finish to fixup file %s\n" % path)
+
+def FixPageBASE_LIBRARY_JUMP_BUFFER(path, text):
+ print(' >>> Fixup structure reference BASE_LIBRARY_JUMP_BUFFER at file %s \n' % path)
+ lines = text.split('\n')
+ bInDetail = True
+ bNeedRemove = False
+ for index in range(len(lines) - 1, -1, -1):
+ line = lines[index]
+ if line.find('Detailed Description') != -1:
+ bInDetail = False
+ if line.startswith('EBC context buffer used by') and lines[index - 1].startswith('x64 context buffer'):
+ lines[index] = "IA32/IPF/X64/" + line
+ bNeedRemove = True
+ if line.startswith("x64 context buffer") or line.startswith('IPF context buffer used by') or \
+ line.startswith('IA32 context buffer used by'):
+ if bNeedRemove:
+ lines.remove(line)
+ if line.find('>R0</a>') != -1 and not bInDetail:
+ if lines[index - 1] != '<tr><td colspan="2"><br><h2>Data Fields For EBC</h2></td></tr>':
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For EBC</h2></td></tr>')
+ if line.find('>Rbx</a>') != -1 and not bInDetail:
+ if lines[index - 1] != '<tr><td colspan="2"><br><h2>Data Fields For X64</h2></td></tr>':
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For X64</h2></td></tr>')
+ if line.find('>F2</a>') != -1 and not bInDetail:
+ if lines[index - 1] != '<tr><td colspan="2"><br><h2>Data Fields For IPF</h2></td></tr>':
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For IPF</h2></td></tr>')
+ if line.find('>Ebx</a>') != -1 and not bInDetail:
+ if lines[index - 1] != '<tr><td colspan="2"><br><h2>Data Fields For IA32</h2></td></tr>':
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For IA32</h2></td></tr>')
+ try:
+ f = open(path, 'w')
+ f.write('\n'.join(lines))
+ f.close()
+ except:
+ logging.getLogger().error(" <<< Fail to fixup file %s" % path)
+ return
+ print(" <<< Finish to fixup file %s\n" % path)
+
+def FixPageUefiDriverEntryPoint(path, text):
+ print(' >>> Fixup file reference MdePkg/Include/Library/UefiDriverEntryPoint.h at file %s \n' % path)
+ lines = text.split('\n')
+ bInModuleEntry = False
+ bInEfiMain = False
+ ModuleEntryDlCount = 0
+ ModuleEntryDelStart = 0
+ ModuleEntryDelEnd = 0
+ EfiMainDlCount = 0
+ EfiMainDelStart = 0
+ EfiMainDelEnd = 0
+
+ for index in range(len(lines)):
+ line = lines[index].strip()
+ if line.find('EFI_STATUS</a> EFIAPI _ModuleEntryPoint </td>') != -1:
+ bInModuleEntry = True
+ if line.find('EFI_STATUS</a> EFIAPI EfiMain </td>') != -1:
+ bInEfiMain = True
+ if line.startswith('<p>References <a'):
+ if bInModuleEntry:
+ ModuleEntryDelEnd = index - 1
+ bInModuleEntry = False
+ elif bInEfiMain:
+ EfiMainDelEnd = index - 1
+ bInEfiMain = False
+ if bInModuleEntry:
+ if line.startswith('</dl>'):
+ ModuleEntryDlCount = ModuleEntryDlCount + 1
+ if ModuleEntryDlCount == 1:
+ ModuleEntryDelStart = index + 1
+ if bInEfiMain:
+ if line.startswith('</dl>'):
+ EfiMainDlCount = EfiMainDlCount + 1
+ if EfiMainDlCount == 1:
+ EfiMainDelStart = index + 1
+
+ if EfiMainDelEnd > EfiMainDelStart:
+ for index in range(EfiMainDelEnd, EfiMainDelStart, -1):
+ del lines[index]
+ if ModuleEntryDelEnd > ModuleEntryDelStart:
+ for index in range(ModuleEntryDelEnd, ModuleEntryDelStart, -1):
+ del lines[index]
+
+ try:
+ f = open(path, 'w')
+ f.write('\n'.join(lines))
+ f.close()
+ except:
+ logging.getLogger().error(" <<< Fail to fixup file %s" % path)
+ return
+ print(" <<< Finish to fixup file %s\n" % path)
+
+
+def FixPageUefiApplicationEntryPoint(path, text):
+ print(' >>> Fixup file reference MdePkg/Include/Library/UefiApplicationEntryPoint.h at file %s \n' % path)
+ lines = text.split('\n')
+ bInModuleEntry = False
+ bInEfiMain = False
+ ModuleEntryDlCount = 0
+ ModuleEntryDelStart = 0
+ ModuleEntryDelEnd = 0
+ EfiMainDlCount = 0
+ EfiMainDelStart = 0
+ EfiMainDelEnd = 0
+
+ for index in range(len(lines)):
+ line = lines[index].strip()
+ if line.find('EFI_STATUS</a> EFIAPI _ModuleEntryPoint </td>') != -1:
+ bInModuleEntry = True
+ if line.find('EFI_STATUS</a> EFIAPI EfiMain </td>') != -1:
+ bInEfiMain = True
+ if line.startswith('<p>References <a'):
+ if bInModuleEntry:
+ ModuleEntryDelEnd = index - 1
+ bInModuleEntry = False
+ elif bInEfiMain:
+ EfiMainDelEnd = index - 1
+ bInEfiMain = False
+ if bInModuleEntry:
+ if line.startswith('</dl>'):
+ ModuleEntryDlCount = ModuleEntryDlCount + 1
+ if ModuleEntryDlCount == 1:
+ ModuleEntryDelStart = index + 1
+ if bInEfiMain:
+ if line.startswith('</dl>'):
+ EfiMainDlCount = EfiMainDlCount + 1
+ if EfiMainDlCount == 1:
+ EfiMainDelStart = index + 1
+
+ if EfiMainDelEnd > EfiMainDelStart:
+ for index in range(EfiMainDelEnd, EfiMainDelStart, -1):
+ del lines[index]
+ if ModuleEntryDelEnd > ModuleEntryDelStart:
+ for index in range(ModuleEntryDelEnd, ModuleEntryDelStart, -1):
+ del lines[index]
+
+ try:
+ f = open(path, 'w')
+ f.write('\n'.join(lines))
+ f.close()
+ except:
+ logging.getLogger().error(" <<< Fail to fixup file %s" % path)
+ return
+ print(" <<< Finish to fixup file %s\n" % path)
+
+if __name__ == '__main__':
+ wspath, pkgpath, doxpath, outpath, archtag, docmode, isinc, hwpath = parseCmdArgs()
+
+ # configure logging system
+ logfilepath = os.path.join(outpath, 'log.txt')
+ logging.basicConfig(format='%(levelname)-8s %(message)s', level=logging.DEBUG)
+
+ # create package model object firstly
+ pkgObj = createPackageObject(wspath, pkgpath)
+ if pkgObj is None:
+ sys.exit(-1)
+
+ # create doxygen action model
+ arch = None
+ tooltag = None
+ if archtag.lower() != 'all':
+ arch = archtag.split('_')[0]
+ tooltag = archtag.split('_')[1]
+ else:
+ arch = 'all'
+ tooltag = 'all'
+
+ # preprocess package and call doxygen
+ try:
+ action = doxygengen.PackageDocumentAction(doxpath,
+ hwpath,
+ outpath,
+ pkgObj,
+ docmode,
+ callbackLogMessage,
+ arch,
+ tooltag,
+ isinc,
+ True)
+ action.RegisterCallbackDoxygenProcess(callbackCreateDoxygenProcess)
+ action.Generate()
+ except:
+ message = traceback.format_exception(*sys.exc_info())
+ logging.getLogger().error('Fail to create doxygen action! \n%s' % ''.join(message))
+ sys.exit(-1)
+
+ DocumentFixup(outpath, arch)
+
+ # generate CHM is necessary
+ if docmode.lower() == 'chm':
+ indexpath = os.path.join(outpath, 'html', 'index.hhp')
+ if sys.platform == 'win32':
+ cmd = '"%s" %s' % (hwpath, indexpath)
+ else:
+ cmd = '%s %s' % (hwpath, indexpath)
+ subprocess.call(cmd)
+ print('\nFinish to generate package document! Please open %s for review' % os.path.join(outpath, 'html', 'index.chm'))
+ else:
+ print('\nFinish to generate package document! Please open %s for review' % os.path.join(outpath, 'html', 'index.html'))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/packagedocapp.pyw b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/packagedocapp.pyw
new file mode 100755
index 00000000..d764c1a1
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/packagedocapp.pyw
@@ -0,0 +1,1060 @@
+## @file
+# This file is used to define common string related functions used in parsing
+# process
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+import os, sys, wx, logging
+
+import wx.stc
+import wx.lib.newevent
+import wx.lib.agw.genericmessagedialog as GMD
+from plugins.EdkPlugins.edk2.model import baseobject
+from plugins.EdkPlugins.edk2.model import doxygengen
+
+if hasattr(sys, "frozen"):
+ appPath = os.path.abspath(os.path.dirname(sys.executable))
+else:
+ appPath = os.path.abspath(os.path.dirname(__file__))
+
+AppCallBackEvent, EVT_APP_CALLBACK = wx.lib.newevent.NewEvent()
+LogEvent, EVT_LOG = wx.lib.newevent.NewEvent()
+
+class PackageDocApp(wx.App):
+
+ def OnInit(self):
+ logfile = os.path.join(appPath, 'log.txt')
+ logging.basicConfig(format='%(name)-8s %(levelname)-8s %(message)s',
+ filename=logfile, level=logging.ERROR)
+
+ self.SetAppName('Package Doxygen Generate Application')
+ frame = PackageDocMainFrame(None, "Package Document Generation Application!")
+ self.SetTopWindow(frame)
+
+ frame.Show(True)
+
+ EVT_APP_CALLBACK( self, self.OnAppCallBack)
+ return True
+
+ def GetLogger(self):
+ return logging.getLogger('')
+
+ def ForegroundProcess(self, function, args):
+ wx.PostEvent(self, AppCallBackEvent(callback=function, args=args))
+
+ def OnAppCallBack(self, event):
+ try:
+ event.callback(*event.args)
+ except:
+ self._logger.exception( 'OnAppCallBack<%s.%s>\n' %
+ (event.callback.__module__, event.callback.__name__ ))
+
+class PackageDocMainFrame(wx.Frame):
+ def __init__(self, parent, title):
+ wx.Frame.__init__(self, parent, -1, title, size=(550, 290), style=wx.MINIMIZE_BOX|wx.SYSTEM_MENU|wx.CAPTION|wx.CLOSE_BOX )
+
+ panel = wx.Panel(self)
+ sizer = wx.BoxSizer(wx.VERTICAL)
+
+ subsizer = wx.GridBagSizer(5, 10)
+ subsizer.AddGrowableCol(1)
+ subsizer.Add(wx.StaticText(panel, -1, "Workspace Location : "), (0, 0), flag=wx.ALIGN_CENTER_VERTICAL)
+ self._workspacePathCtrl = wx.ComboBox(panel, -1)
+ list = self.GetConfigure("WorkspacePath")
+ if len(list) != 0:
+ for item in list:
+ self._workspacePathCtrl.Append(item)
+ self._workspacePathCtrl.SetValue(list[len(list) - 1])
+
+ subsizer.Add(self._workspacePathCtrl, (0, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
+ self._workspacePathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider_GetBitmap(wx.ART_FILE_OPEN))
+ subsizer.Add(self._workspacePathBt, (0, 2), flag=wx.ALIGN_CENTER_VERTICAL)
+ wx.EVT_BUTTON(self._workspacePathBt, self._workspacePathBt.GetId(), self.OnBrowsePath)
+
+ subsizer.Add(wx.StaticText(panel, -1, "Package DEC Location : "), (1, 0), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
+ self._packagePathCtrl = wx.ComboBox(panel, -1)
+ list = self.GetConfigure("PackagePath")
+ if len(list) != 0:
+ for item in list:
+ self._packagePathCtrl.Append(item)
+ self._packagePathCtrl.SetValue(list[len(list) - 1])
+ subsizer.Add(self._packagePathCtrl, (1, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
+ self._packagePathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider_GetBitmap(wx.ART_FILE_OPEN))
+ subsizer.Add(self._packagePathBt, (1, 2), flag=wx.ALIGN_CENTER_VERTICAL)
+ wx.EVT_BUTTON(self._packagePathBt, self._packagePathBt.GetId(), self.OnBrowsePath)
+
+ subsizer.Add(wx.StaticText(panel, -1, "Doxygen Tool Location : "), (2, 0), flag=wx.ALIGN_CENTER_VERTICAL)
+ self._doxygenPathCtrl = wx.TextCtrl(panel, -1)
+ list = self.GetConfigure('DoxygenPath')
+ if len(list) != 0:
+ self._doxygenPathCtrl.SetValue(list[0])
+ else:
+ if wx.Platform == '__WXMSW__':
+ self._doxygenPathCtrl.SetValue('C:\\Program Files\\Doxygen\\bin\\doxygen.exe')
+ else:
+ self._doxygenPathCtrl.SetValue('/usr/bin/doxygen')
+
+ self._doxygenPathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider_GetBitmap(wx.ART_FILE_OPEN))
+ subsizer.Add(self._doxygenPathCtrl, (2, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
+ subsizer.Add(self._doxygenPathBt, (2, 2), flag=wx.ALIGN_CENTER_VERTICAL)
+ wx.EVT_BUTTON(self._doxygenPathBt, self._doxygenPathBt.GetId(), self.OnBrowsePath)
+
+ subsizer.Add(wx.StaticText(panel, -1, "CHM Tool Location : "), (3, 0), flag=wx.ALIGN_CENTER_VERTICAL)
+ self._chmPathCtrl = wx.TextCtrl(panel, -1)
+ list = self.GetConfigure('CHMPath')
+ if len(list) != 0:
+ self._chmPathCtrl.SetValue(list[0])
+ else:
+ self._chmPathCtrl.SetValue('C:\\Program Files\\HTML Help Workshop\\hhc.exe')
+
+ self._chmPathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider_GetBitmap(wx.ART_FILE_OPEN))
+ subsizer.Add(self._chmPathCtrl, (3, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
+ subsizer.Add(self._chmPathBt, (3, 2), flag=wx.ALIGN_CENTER_VERTICAL)
+ wx.EVT_BUTTON(self._chmPathBt, self._chmPathBt.GetId(), self.OnBrowsePath)
+
+ subsizer.Add(wx.StaticText(panel, -1, "Output Location : "), (4, 0), flag=wx.ALIGN_CENTER_VERTICAL)
+ self._outputPathCtrl = wx.ComboBox(panel, -1)
+ list = self.GetConfigure("OutputPath")
+ if len(list) != 0:
+ for item in list:
+ self._outputPathCtrl.Append(item)
+ self._outputPathCtrl.SetValue(list[len(list) - 1])
+
+ subsizer.Add(self._outputPathCtrl, (4, 1), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
+ self._outputPathBt = wx.BitmapButton(panel, -1, bitmap=wx.ArtProvider_GetBitmap(wx.ART_FILE_OPEN))
+ subsizer.Add(self._outputPathBt, (4, 2), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
+ wx.EVT_BUTTON(self._outputPathBt, self._outputPathBt.GetId(), self.OnBrowsePath)
+
+ subsizer.Add(wx.StaticText(panel, -1, "Architecture Specified : "), (5, 0), flag=wx.ALIGN_CENTER_VERTICAL)
+ self._archCtrl = wx.ComboBox(panel, -1, value='ALL', choices=['ALL', 'IA32/MSFT', 'IA32/GNU', 'X64/INTEL', 'X64/GNU', 'IPF/MSFT', 'IPF/GNU', 'EBC/INTEL'],
+ style=wx.CB_READONLY)
+ self._archCtrl.Bind(wx.EVT_COMBOBOX, self.OnArchtectureSelectChanged)
+ subsizer.Add(self._archCtrl, (5, 1), (1, 2), flag=wx.ALIGN_CENTER_VERTICAL|wx.EXPAND)
+ sizer.Add(subsizer, 0, wx.EXPAND|wx.TOP|wx.BOTTOM|wx.LEFT|wx.RIGHT, 5)
+
+ sizer6 = wx.BoxSizer(wx.HORIZONTAL)
+ self._modesel = wx.RadioBox(panel, -1, 'Generated Document Mode', majorDimension=2, choices=['CHM', 'HTML'], style=wx.RA_SPECIFY_COLS)
+ self._modesel.SetStringSelection('HTML')
+
+ self._includeonlysel = wx.CheckBox(panel, -1, 'Only document public include')
+
+ sizer6.Add(self._modesel, 0 , wx.EXPAND)
+ sizer6.Add(self._includeonlysel, 0, wx.EXPAND|wx.LEFT, 5)
+
+ sizer.Add(sizer6, 0, wx.EXPAND|wx.TOP|wx.LEFT|wx.RIGHT, 5)
+
+ self._generateBt = wx.Button(panel, -1, "Generate Package Document!")
+ self._generateBt.Bind(wx.EVT_BUTTON, self.OnGenerate)
+ sizer.Add(self._generateBt, 0, wx.EXPAND|wx.TOP|wx.LEFT|wx.RIGHT, 5)
+
+ panel.SetSizer(sizer)
+ panel.Layout()
+ panel.SetAutoLayout(True)
+ self.CenterOnScreen()
+
+ def SaveConfigure(self, name, value):
+ if value ==None or len(value) == 0:
+ return
+ config = wx.ConfigBase_Get()
+ oldvalues = config.Read(name, '').split(';')
+ if len(oldvalues) >= 10:
+ oldvalues.remove(oldvalues[0])
+ if value not in oldvalues:
+ oldvalues.append(value)
+ else:
+ oldvalues.remove(value)
+ oldvalues.append(value)
+
+ config.Write(name, ';'.join(oldvalues))
+
+ def GetConfigure(self, name):
+ config = wx.ConfigBase_Get()
+ values = config.Read(name, '').split(';')
+ list = []
+ for item in values:
+ if len(item) != 0:
+ list.append(item)
+ return list
+
+ def OnBrowsePath(self, event):
+ id = event.GetId()
+ editctrl = None
+ startdir = ''
+ isFile = False
+ if id == self._packagePathBt.GetId():
+ dlgTitle = "Choose package path:"
+ editctrl = self._packagePathCtrl
+ isFile = True
+ if os.path.exists(self.GetWorkspace()):
+ startdir = self.GetWorkspace()
+ elif id == self._workspacePathBt.GetId():
+ dlgTitle = "Choose workspace path:"
+ editctrl = self._workspacePathCtrl
+ startdir = editctrl.GetValue()
+ elif id == self._doxygenPathBt.GetId():
+ isFile = True
+ dlgTitle = "Choose doxygen installation path:"
+ editctrl = self._doxygenPathCtrl
+ startdir = editctrl.GetValue()
+ elif id == self._outputPathBt.GetId():
+ dlgTitle = "Choose document output path:"
+ editctrl = self._outputPathCtrl
+ if os.path.exists(self.GetWorkspace()):
+ startdir = self.GetWorkspace()
+ startdir = editctrl.GetValue()
+ elif id == self._chmPathBt.GetId():
+ isFile = True
+ dlgTitle = "Choose installation path for Microsoft HTML workshop software"
+ editctrl = self._chmPathCtrl
+ startdir = editctrl.GetValue()
+ else:
+ return
+
+ if not isFile:
+ dlg = wx.DirDialog(self, dlgTitle, defaultPath=startdir)
+ else:
+ dlg = wx.FileDialog(self, dlgTitle, defaultDir=startdir)
+
+ if dlg.ShowModal() == wx.ID_OK:
+ editctrl.SetValue(dlg.GetPath())
+ dlg.Destroy()
+
+ def OnArchtectureSelectChanged(self, event):
+ str = ''
+ selarch = self._archCtrl.GetValue()
+ if selarch == 'ALL':
+ str += 'MDE_CPU_IA32 MDE_CPU_X64 MDE_CPU_EBC MDE_CPU_IPF _MSC_EXTENSIONS __GNUC__ __INTEL_COMPILER'
+ elif selarch == 'IA32/MSFT':
+ str += 'MDE_CPU_IA32 _MSC_EXTENSIONS'
+ elif selarch == 'IA32/GNU':
+ str += 'MDE_CPU_IA32 __GNUC__'
+ elif selarch == 'X64/MSFT':
+ str += 'MDE_CPU_X64 _MSC_EXTENSIONS'
+ elif selarch == 'X64/GNU':
+ str += 'MDE_CPU_X64 __GNUC__'
+ elif selarch == 'IPF/MSFT':
+ str += 'MDE_CPU_IPF _MSC_EXTENSIONS'
+ elif selarch == 'IPF/GNU':
+ str += 'MDE_CPU_IPF __GNUC__'
+ elif selarch == 'EBC/INTEL':
+ str += 'MDE_CPU_EBC __INTEL_COMPILER'
+
+ str += ' ASM_PFX= OPTIONAL= '
+
+ def OnMacroText(self, event):
+ str = ''
+ selarch = self._archCtrl.GetValue()
+ if selarch == 'ALL':
+ str += 'MDE_CPU_IA32 MDE_CPU_X64 MDE_CPU_EBC MDE_CPU_IPF _MSC_EXTENSIONS __GNUC__ __INTEL_COMPILER'
+ elif selarch == 'IA32/MSFT':
+ str += 'MDE_CPU_IA32 _MSC_EXTENSIONS'
+ elif selarch == 'IA32/GNU':
+ str += 'MDE_CPU_IA32 __GNUC__'
+ elif selarch == 'X64/MSFT':
+ str += 'MDE_CPU_X64 _MSC_EXTENSIONS'
+ elif selarch == 'X64/GNU':
+ str += 'MDE_CPU_X64 __GNUC__'
+ elif selarch == 'IPF/MSFT':
+ str += 'MDE_CPU_IPF _MSC_EXTENSIONS'
+ elif selarch == 'IPF/GNU':
+ str += 'MDE_CPU_IPF __GNUC__'
+ elif selarch == 'EBC/INTEL':
+ str += 'MDE_CPU_EBC __INTEL_COMPILER'
+
+ str += ' ASM_PFX= OPTIONAL= '
+
+ def OnGenerate(self, event):
+ if not self.CheckInput(): return
+
+ dlg = ProgressDialog(self)
+ dlg.ShowModal()
+ dlg.Destroy()
+
+ def CheckInput(self):
+ pPath = self.GetPackagePath()
+ wPath = self.GetWorkspace()
+ dPath = self.GetDoxygenToolPath()
+ cPath = self.GetChmToolPath()
+ oPath = self.GetOutputPath()
+
+ if len(wPath) == 0 or not os.path.exists(wPath):
+ self._Error("Please input existing workspace path!")
+ return False
+ else:
+ self.SaveConfigure('WorkspacePath', wPath)
+
+ if len(pPath) == 0 or not os.path.exists(pPath) or not pPath.lower().endswith('.dec'):
+ self._Error("Please input existing package file location!")
+ return False
+ elif pPath.lower().find(wPath.lower()) == -1:
+ self._Error("Package patch should starts with workspace path, such as if workspace path is c:\\edk2, package patch could be c:\\edk2\MdePkg")
+ return False
+ else:
+ self.SaveConfigure('PackagePath', pPath)
+
+ if len(dPath) == 0 or not os.path.exists(dPath):
+ self._Error("Can not find doxygen tool from path %s! Please download it from www.stack.nl/~dimitri/doxygen/download.html" % dPath)
+ return False
+ else:
+ self.SaveConfigure('DoxygenPath', dPath)
+
+ if self._modesel.GetStringSelection() == 'CHM':
+ if (len(cPath) == 0 or not os.path.exists(cPath)):
+ self._Error("You select CHM mode to generate document, but can not find software of Microsoft HTML Help Workshop.\nPlease\
+ download it from http://www.microsoft.com/downloads/details.aspx?FamilyID=00535334-c8a6-452f-9aa0-d597d16580cc&displaylang=en\n\
+and install!")
+ return False
+ else:
+ self.SaveConfigure('CHMPath', cPath)
+
+ if len(oPath) == 0:
+ self._Error("You must specific document output path")
+ return False
+ else:
+ self.SaveConfigure('OutputPath', oPath)
+
+ if os.path.exists(oPath):
+ # add checking whether there is old doxygen config file here
+ files = os.listdir(oPath)
+ for file in files:
+ if os.path.isfile(os.path.join(oPath,file)):
+ basename, ext = os.path.splitext(file)
+ if ext.lower() == '.doxygen_config':
+ dlg = GMD.GenericMessageDialog(self, "Existing doxygen document in output directory will be overwritten\n, Are you sure?",
+ "Info", wx.ICON_WARNING|wx.YES_NO)
+ if dlg.ShowModal() == wx.ID_YES:
+ break
+ else:
+ return False
+ else:
+ try:
+ os.makedirs(oPath)
+ except:
+ self._Error("Fail to create output directory, please select another output directory!")
+ return False
+
+ return True
+
+ def _Error(self, message):
+ dlg = GMD.GenericMessageDialog(self, message,
+ "Error", wx.ICON_ERROR|wx.OK)
+ dlg.ShowModal()
+ dlg.Destroy()
+
+ def GetWorkspace(self):
+ return os.path.normpath(self._workspacePathCtrl.GetValue())
+
+ def GetPackagePath(self):
+ return os.path.normpath(self._packagePathCtrl.GetValue())
+
+ def GetOutputPath(self):
+ return os.path.normpath(self._outputPathCtrl.GetValue())
+
+ def GetDoxygenToolPath(self):
+ return os.path.normpath(self._doxygenPathCtrl.GetValue())
+
+ def GetChmToolPath(self):
+ return os.path.normpath(self._chmPathCtrl.GetValue())
+
+ def GetDocumentMode(self):
+ return self._modesel.GetStringSelection()
+
+ def GetArchitecture(self):
+ value = self._archCtrl.GetValue()
+ return value.split('/')[0]
+
+ def GetToolTag(self):
+ value = self._archCtrl.GetValue()
+ if value == 'ALL':
+ return 'ALL'
+ return value.split('/')[1]
+
+ def GetIsOnlyDocumentInclude(self):
+ return self._includeonlysel.IsChecked()
+
+class ProgressDialog(wx.Dialog):
+ def __init__(self, parent, id=wx.ID_ANY):
+ title = "Generate Document for " + parent.GetPackagePath()
+ wx.Dialog.__init__(self, parent, id, title=title, style=wx.CAPTION, size=(600, 300))
+ self.Freeze()
+ sizer = wx.BoxSizer(wx.VERTICAL)
+ self._textCtrl = wx.StaticText(self, -1, "Start launching!")
+ self._gaugeCtrl = wx.Gauge(self, -1, 100, size=(-1, 10))
+ self._resultCtrl = wx.stc.StyledTextCtrl(self, -1)
+ self._closeBt = wx.Button(self, -1, "Close")
+ self._gotoOuputBt = wx.Button(self, -1, "Goto Output")
+
+ # clear all margin
+ self._resultCtrl.SetMarginWidth(0, 0)
+ self._resultCtrl.SetMarginWidth(1, 0)
+ self._resultCtrl.SetMarginWidth(2, 0)
+
+ sizer.Add(self._textCtrl, 0, wx.EXPAND|wx.LEFT|wx.TOP|wx.RIGHT, 5)
+ sizer.Add(self._gaugeCtrl, 0, wx.EXPAND|wx.LEFT|wx.TOP|wx.RIGHT, 5)
+ sizer.Add(self._resultCtrl, 1, wx.EXPAND|wx.LEFT|wx.TOP|wx.RIGHT, 5)
+ btsizer = wx.BoxSizer(wx.HORIZONTAL)
+ btsizer.Add(self._gotoOuputBt, 0, wx.ALIGN_CENTER_HORIZONTAL|wx.LEFT|wx.TOP|wx.LEFT|wx.BOTTOM, 5)
+ btsizer.Add(self._closeBt, 0, wx.ALIGN_CENTER_HORIZONTAL|wx.LEFT|wx.TOP|wx.LEFT|wx.BOTTOM, 5)
+ sizer.Add(btsizer, 0, wx.ALIGN_CENTER_HORIZONTAL)
+
+ self.SetSizer(sizer)
+ self.CenterOnScreen()
+ self.Thaw()
+
+ self._logger = logging.getLogger('')
+ self._loghandle = ResultHandler(self)
+ logging.getLogger('edk').addHandler(self._loghandle)
+ logging.getLogger('').addHandler(self._loghandle)
+ logging.getLogger('app').addHandler(self._loghandle)
+
+ wx.EVT_BUTTON(self._closeBt, self._closeBt.GetId(), self.OnButtonClose)
+ wx.EVT_UPDATE_UI(self, self._closeBt.GetId(), self.OnUpdateCloseButton)
+ wx.EVT_BUTTON(self._gotoOuputBt, self._gotoOuputBt.GetId(), self.OnGotoOutput)
+ EVT_LOG(self, self.OnPostLog)
+
+ self._process = None
+ self._pid = None
+ self._input = None
+ self._output = None
+ self._error = None
+ self._inputThread = None
+ self._errorThread = None
+ self._isBusy = True
+ self._pObj = None
+
+ wx.CallAfter(self.GenerateAction)
+
+ def OnUpdateCloseButton(self, event):
+ self._closeBt.Enable(not self._isBusy)
+ return True
+
+ def OnButtonClose(self, event):
+ if self._isBusy:
+ self._InfoDialog("Please don't close in progressing...")
+ return
+
+ if self._process != None:
+ self._process.CloseOutput()
+
+ if self._inputThread:
+ self._inputThread.Terminate()
+ if self._errorThread:
+ self._errorThread.Terminate()
+
+ if self._pid != None:
+ wx.Process.Kill(self._pid, wx.SIGKILL, wx.KILL_CHILDREN)
+
+ logging.getLogger('edk').removeHandler(self._loghandle)
+ logging.getLogger('').removeHandler(self._loghandle)
+ logging.getLogger('app').removeHandler(self._loghandle)
+
+ if self._pObj != None:
+ self._pObj.Destroy()
+
+ self.EndModal(0)
+
+ def OnGotoOutput(self, event):
+ output = self.GetParent().GetOutputPath()
+ if os.path.exists(output):
+ if wx.Platform == '__WXMSW__':
+ os.startfile(self.GetParent().GetOutputPath())
+ else:
+ import webbrowser
+ webbrowser.open(self.GetParent().GetOutputPath())
+ else:
+ self._ErrorDialog("Output directory does not exist!")
+
+ def _ErrorDialog(self, message):
+ dlg = GMD.GenericMessageDialog(self, message,
+ "Error", wx.ICON_ERROR|wx.OK)
+ dlg.ShowModal()
+ dlg.Destroy()
+
+ def _InfoDialog(self, message):
+ dlg = GMD.GenericMessageDialog(self, message,
+ "Info", wx.ICON_INFORMATION|wx.OK)
+ dlg.ShowModal()
+ dlg.Destroy()
+
+ def _LogStep(self, index, message):
+ stepstr = "Step %d: %s" % (index, message)
+ self._textCtrl.SetLabel(stepstr)
+ self.LogMessage(os.linesep + stepstr + os.linesep)
+ self._gaugeCtrl.SetValue(index * 100 / 6 )
+
+ def OnPostLog(self, event):
+ self.LogMessage(event.message)
+
+ def GenerateAction(self):
+ self._LogStep(1, "Create Package Object Model")
+ wsPath = self.GetParent().GetWorkspace()
+ pkPath = self.GetParent().GetPackagePath()[len(wsPath) + 1:]
+
+ try:
+ pObj = baseobject.Package(None, self.GetParent().GetWorkspace())
+ pObj.Load(pkPath)
+ except:
+ self._ErrorDialog("Fail to create package object model! Please check log.txt under this application folder!")
+ self._isBusy = False
+ return
+ self._pObj = pObj
+
+ self.LogMessage(str(pObj.GetPcds()))
+
+ self._LogStep(2, "Preprocess and Generate Doxygen Config File")
+ try:
+ action = doxygengen.PackageDocumentAction(self.GetParent().GetDoxygenToolPath(),
+ self.GetParent().GetChmToolPath(),
+ self.GetParent().GetOutputPath(),
+ pObj,
+ self.GetParent().GetDocumentMode(),
+ self.LogMessage,
+ self.GetParent().GetArchitecture(),
+ self.GetParent().GetToolTag(),
+ self.GetParent().GetIsOnlyDocumentInclude(),
+ True)
+ except:
+ self._ErrorDialog("Fail to preprocess! Please check log.txt under this application folder!")
+ self._isBusy = False
+ return
+
+ action.RegisterCallbackDoxygenProcess(self.CreateDoxygeProcess)
+
+ try:
+ if not action.Generate():
+ self._isBusy = False
+ self.LogMessage("Fail to generate package document! Please check log.txt under this application folder!", 'error')
+ except:
+ import traceback
+ message = traceback.format_exception(*sys.exc_info())
+ logging.getLogger('').error(''.join(message))
+ self._isBusy = False
+ self._ErrorDialog("Fail to generate package document! Please check log.txt under this application folder!")
+
+ def LogMessage(self, message, level='info'):
+ self._resultCtrl.DocumentEnd()
+ self._resultCtrl.SetReadOnly(False)
+ self._resultCtrl.AppendText(message)
+ self._resultCtrl.Home()
+ self._resultCtrl.Home()
+ self._resultCtrl.SetReadOnly(True)
+ if level == 'error':
+ wx.GetApp().GetLogger().error(message)
+
+ def CreateDoxygeProcess(self, doxPath, configFile):
+ self._LogStep(3, "Launch Doxygen Tool and Generate Package Document")
+
+ cmd = '"%s" %s' % (doxPath, configFile)
+ try:
+ self._process = DoxygenProcess()
+ self._process.SetParent(self)
+ self._process.Redirect()
+ self._pid = wx.Execute(cmd, wx.EXEC_ASYNC, self._process)
+ self._input = self._process.GetInputStream()
+ self._output = self._process.GetOutputStream()
+ self._error = self._process.GetErrorStream()
+ except:
+ self._ErrorDialog('Fail to launch doxygen cmd %s! Please check log.txt under this application folder!' % cmd)
+ self._isBusy = False
+ return False
+
+ self._inputThread = MonitorThread(self._input, self.LogMessage)
+ self._errorThread = MonitorThread(self._error, self.LogMessage)
+ self._inputThread.start()
+ self._errorThread.start()
+ return True
+
+ def OnTerminateDoxygenProcess(self):
+ if self._inputThread:
+ self._inputThread.Terminate()
+ self._inputThread = None
+ if self._errorThread:
+ self._errorThread.Terminate()
+ self._errorThread = None
+
+ if self._error:
+ while self._error.CanRead():
+ text = self._error.read()
+ self.LogMessage(text)
+
+ if self._input:
+ while self._input.CanRead():
+ text = self._input.read()
+ self.LogMessage(text)
+ self._process.Detach()
+
+ self._process.CloseOutput()
+ self._process = None
+ self._pid = None
+
+ self.DocumentFixup()
+
+ if self.GetParent().GetDocumentMode().lower() == 'chm':
+ hhcfile = os.path.join(self.GetParent().GetOutputPath(), 'html', 'index.hhc')
+ hhpfile = os.path.join(self.GetParent().GetOutputPath(), 'html', 'index.hhp')
+ self.FixDecDoxygenFileLink(hhcfile, None)
+ if not self.CreateCHMProcess(self.GetParent().GetChmToolPath(), hhpfile):
+ self._ErrorDialog("Fail to Create %s process for %s" % (self.GetParent().GetChmToolPath(), hhpfile))
+ self._isBusy = False
+ else:
+ self._LogStep(6, "Finished Document Generation!")
+ self._isBusy = False
+ indexpath = os.path.realpath(os.path.join(self.GetParent().GetOutputPath(), 'html', 'index.html'))
+ if wx.Platform == '__WXMSW__':
+ os.startfile(indexpath)
+ else:
+ import webbrowser
+ webbrowser.open(indexpath)
+
+ self._InfoDialog('Success create HTML doxgen document %s' % indexpath)
+
+ def CreateCHMProcess(self, chmPath, hhpfile):
+ self.LogMessage(" >>>>>> Start Microsoft HTML workshop process...Zzz...\n")
+ cmd = '"%s" %s' % (chmPath, hhpfile)
+ try:
+ self._process = CHMProcess()
+ self._process.SetParent(self)
+ self._process.Redirect()
+ self._pid = wx.Execute(cmd, wx.EXEC_ASYNC, self._process)
+ self._input = self._process.GetInputStream()
+ self._output = self._process.GetOutputStream()
+ self._error = self._process.GetErrorStream()
+ except:
+ self.LogMessage('\nFail to launch hhp cmd %s!\n' % cmd)
+ self._isBusy = False
+ return False
+ self._inputThread = MonitorThread(self._input, self.LogMessage)
+ self._errorThread = MonitorThread(self._error, self.LogMessage)
+ self._inputThread.start()
+ self._errorThread.start()
+ return True
+
+ def OnTerminateCHMProcess(self):
+ if self._inputThread:
+ self._inputThread.Terminate()
+ self._inputThread = None
+ if self._errorThread:
+ self._errorThread.Terminate()
+ self._errorThread = None
+
+ if self._error:
+ while self._error.CanRead():
+ text = self._error.read()
+ self.LogMessage(text)
+ if self._input:
+ while self._input.CanRead():
+ text = self._input.read()
+ self.LogMessage(text)
+ self._process.Detach()
+
+ self._process.CloseOutput()
+ self._process = None
+ self._pid = None
+ self._isBusy = False
+ indexpath = os.path.realpath(os.path.join(self.GetParent().GetOutputPath(), 'html', 'index.chm'))
+ if os.path.exists(indexpath):
+ if wx.Platform == '__WXMSW__':
+ os.startfile(indexpath)
+ else:
+ import webbrowser
+ webbrowser.open(indexpath)
+
+ self._LogStep(6, "Finished Document Generation!")
+ self.LogMessage('\nSuccess create CHM doxgen document %s\n' % indexpath)
+ self._InfoDialog('Success create CHM doxgen document %s' % indexpath)
+
+ def DocumentFixup(self):
+ # find BASE_LIBRARY_JUMP_BUFFER structure reference page
+ self._LogStep(4, "Fixup Package Document!")
+ self.LogMessage('\n >>> Start fixup document \n')
+
+ for root, dirs, files in os.walk(os.path.join(self.GetParent().GetOutputPath(), 'html')):
+ for dir in dirs:
+ if dir.lower() in ['.svn', '_svn', 'cvs']:
+ dirs.remove(dir)
+ for file in files:
+ wx.YieldIfNeeded()
+ if not file.lower().endswith('.html'): continue
+ fullpath = os.path.join(self.GetParent().GetOutputPath(), root, file)
+ try:
+ f = open(fullpath, 'r')
+ text = f.read()
+ f.close()
+ except:
+ self.LogMessage('\nFail to open file %s\n' % fullpath)
+ continue
+ if text.find('BASE_LIBRARY_JUMP_BUFFER Struct Reference') != -1 and self.GetParent().GetArchitecture() == 'ALL':
+ self.FixPageBASE_LIBRARY_JUMP_BUFFER(fullpath, text)
+ if text.find('MdePkg/Include/Library/BaseLib.h File Reference') != -1 and self.GetParent().GetArchitecture() == 'ALL':
+ self.FixPageBaseLib(fullpath, text)
+ if text.find('IA32_IDT_GATE_DESCRIPTOR Union Reference') != -1 and self.GetParent().GetArchitecture() == 'ALL':
+ self.FixPageIA32_IDT_GATE_DESCRIPTOR(fullpath, text)
+ if text.find('MdePkg/Include/Library/UefiDriverEntryPoint.h File Reference') != -1:
+ self.FixPageUefiDriverEntryPoint(fullpath, text)
+ if text.find('MdePkg/Include/Library/UefiApplicationEntryPoint.h File Reference') != -1:
+ self.FixPageUefiApplicationEntryPoint(fullpath, text)
+ if text.lower().find('.s.dox') != -1 or \
+ text.lower().find('.asm.dox') != -1 or \
+ text.lower().find('.uni.dox') != -1:
+ self.FixDoxFileLink(fullpath, text)
+
+ self.RemoveFileList()
+ self.LogMessage(' >>> Finish all document fixing up! \n')
+
+ def RemoveFileList(self):
+ path_html = os.path.join(self.GetParent().GetOutputPath(), "html", "tree.html")
+ path_chm = os.path.join(self.GetParent().GetOutputPath(), "html", "index.hhc")
+ if os.path.exists(path_html):
+ self.LogMessage(' >>>Remove FileList item from generated HTML document.\n');
+ lines = []
+ f = open (path_html, "r")
+ lines = f.readlines()
+ f.close()
+ bfound = False
+ for index in range(len(lines)):
+ if lines[index].find('<a class="el" href="files.html" target="basefrm">File List</a>') != -1:
+ lines[index] = "<!-- %s" % lines[index]
+ bfound = True
+ continue
+ if bfound:
+ if lines[index].find('</div>') != -1:
+ lines[index] = "%s -->" % lines[index]
+ break
+ if bfound:
+ f = open(path_html, "w")
+ f.write("".join(lines))
+ f.close()
+ else:
+ self.LogMessage (' !!!Can not found FileList item in HTML document!\n')
+
+ if os.path.exists(path_chm):
+ self.LogMessage(" >>>Warning: Can not remove FileList for CHM files!\n");
+ """
+ self.LogMessage(' >>>Remove FileList item from generated CHM document!\n');
+ lines = []
+ f = open (path_chm, "r")
+ lines = f.readlines()
+ f.close()
+ bfound = False
+ for index in xrange(len(lines)):
+ if not bfound:
+ if lines[index].find('<param name="Local" value="files.html">') != -1:
+ lines[index] = '<!-- %s' % lines[index]
+ bfound = True
+ continue
+ if bfound:
+ if lines[index].find('</UL>') != -1:
+ lines[index] = '%s -->\n' % lines[index].rstrip()
+ break
+ if bfound:
+ f = open(path_chm, "w")
+ f.write("".join(lines))
+ f.close()
+ import time
+ time.sleep(2)
+ else:
+ self.LogMessage(' !!!Can not found the FileList item in CHM document!')
+ """
+ def FixPageBaseLib(self, path, text):
+ self.LogMessage(' >>> Fixup BaseLib file page at file %s \n' % path)
+ lines = text.split('\n')
+ lastBaseJumpIndex = -1
+ lastIdtGateDescriptor = -1
+ for index in range(len(lines) - 1, -1, -1):
+ line = lines[index]
+ if line.strip() == '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;4 </td>':
+ lines[index] = '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;4&nbsp;[IA32] </td>'
+ if line.strip() == '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;0x10 </td>':
+ lines[index] = '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;0x10&nbsp;[IPF] </td>'
+ if line.strip() == '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;8 </td>':
+ lines[index] = '<td class="memname">#define BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT&nbsp;&nbsp;&nbsp;9&nbsp;[EBC, x64] </td>'
+ if line.find('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;4') != -1:
+ lines[index] = lines[index].replace('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;4',
+ 'BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;4&nbsp;[IA32]')
+ if line.find('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;0x10') != -1:
+ lines[index] = lines[index].replace('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;0x10',
+ 'BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;0x10&nbsp;[IPF]')
+ if line.find('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;8') != -1:
+ lines[index] = lines[index].replace('BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;8',
+ 'BASE_LIBRARY_JUMP_BUFFER_ALIGNMENT</a>&nbsp;&nbsp;&nbsp;8&nbsp;[x64, EBC]')
+ if line.find('>BASE_LIBRARY_JUMP_BUFFER</a>') != -1:
+ if lastBaseJumpIndex != -1:
+ del lines[lastBaseJumpIndex]
+ lastBaseJumpIndex = index
+ if line.find('>IA32_IDT_GATE_DESCRIPTOR</a></td>') != -1:
+ if lastIdtGateDescriptor != -1:
+ del lines[lastIdtGateDescriptor]
+ lastIdtGateDescriptor = index
+ try:
+ f = open(path, 'w')
+ f.write('\n'.join(lines))
+ f.close()
+ except:
+ self._isBusy = False
+ self.LogMessage(" <<< Fail to fixup file %s\n" % path)
+ self.LogMessage(" <<< Finish to fixup file %s\n" % path)
+
+ def FixPageIA32_IDT_GATE_DESCRIPTOR(self, path, text):
+ self.LogMessage(' >>> Fixup structure reference IA32_IDT_GATE_DESCRIPTOR at file %s \n' % path)
+ lines = text.split('\n')
+ for index in range(len(lines) - 1, -1, -1):
+ line = lines[index].strip()
+ if line.find('struct {</td>') != -1 and lines[index - 2].find('>Uint64</a></td>') != -1:
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For X64</h2></td></tr>')
+ if line.find('struct {</td>') != -1 and lines[index - 1].find('Data Fields') != -1:
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For IA32</h2></td></tr>')
+ try:
+ f = open(path, 'w')
+ f.write('\n'.join(lines))
+ f.close()
+ except:
+ self._isBusy = False
+ self.LogMessage(" <<< Fail to fixup file %s\n" % path)
+ self.LogMessage(" <<< Finish to fixup file %s\n" % path)
+
+ def FixPageBASE_LIBRARY_JUMP_BUFFER(self, path, text):
+ self.LogMessage(' >>> Fixup structure reference BASE_LIBRARY_JUMP_BUFFER at file %s \n' % path)
+ lines = text.split('\n')
+ bInDetail = True
+ bNeedRemove = False
+ for index in range(len(lines) - 1, -1, -1):
+ line = lines[index]
+ if line.find('Detailed Description') != -1:
+ bInDetail = False
+ if line.startswith('EBC context buffer used by') and lines[index - 1].startswith('x64 context buffer'):
+ lines[index] = "IA32/IPF/X64/" + line
+ bNeedRemove = True
+ if line.startswith("x64 context buffer") or line.startswith('IPF context buffer used by') or \
+ line.startswith('IA32 context buffer used by'):
+ if bNeedRemove:
+ lines.remove(line)
+ if line.find('>R0</a>') != -1 and not bInDetail:
+ if lines[index - 1] != '<tr><td colspan="2"><br><h2>Data Fields For EBC</h2></td></tr>':
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For EBC</h2></td></tr>')
+ if line.find('>Rbx</a>') != -1 and not bInDetail:
+ if lines[index - 1] != '<tr><td colspan="2"><br><h2>Data Fields For X64</h2></td></tr>':
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For X64</h2></td></tr>')
+ if line.find('>F2</a>') != -1 and not bInDetail:
+ if lines[index - 1] != '<tr><td colspan="2"><br><h2>Data Fields For IPF</h2></td></tr>':
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For IPF</h2></td></tr>')
+ if line.find('>Ebx</a>') != -1 and not bInDetail:
+ if lines[index - 1] != '<tr><td colspan="2"><br><h2>Data Fields For IA32</h2></td></tr>':
+ lines.insert(index, '<tr><td colspan="2"><br><h2>Data Fields For IA32</h2></td></tr>')
+ try:
+ f = open(path, 'w')
+ f.write('\n'.join(lines))
+ f.close()
+ except:
+ self._isBusy = False
+ self.LogMessage(" <<< Fail to fixup file %s" % path)
+ self.LogMessage(" <<< Finish to fixup file %s\n" % path)
+
+ def FixPageUefiDriverEntryPoint(self, path, text):
+ self.LogMessage(' >>> Fixup file reference MdePkg/Include/Library/UefiDriverEntryPoint.h at file %s \n' % path)
+ lines = text.split('\n')
+ bInModuleEntry = False
+ bInEfiMain = False
+ ModuleEntryDlCount = 0
+ ModuleEntryDelStart = 0
+ ModuleEntryDelEnd = 0
+ EfiMainDlCount = 0
+ EfiMainDelStart = 0
+ EfiMainDelEnd = 0
+
+ for index in range(len(lines)):
+ line = lines[index].strip()
+ if line.find('EFI_STATUS</a> EFIAPI _ModuleEntryPoint </td>') != -1:
+ bInModuleEntry = True
+ if line.find('EFI_STATUS</a> EFIAPI EfiMain </td>') != -1:
+ bInEfiMain = True
+ if line.startswith('<p>References <a'):
+ if bInModuleEntry:
+ ModuleEntryDelEnd = index - 1
+ bInModuleEntry = False
+ elif bInEfiMain:
+ EfiMainDelEnd = index - 1
+ bInEfiMain = False
+ if bInModuleEntry:
+ if line.startswith('</dl>'):
+ ModuleEntryDlCount = ModuleEntryDlCount + 1
+ if ModuleEntryDlCount == 1:
+ ModuleEntryDelStart = index + 1
+ if bInEfiMain:
+ if line.startswith('</dl>'):
+ EfiMainDlCount = EfiMainDlCount + 1
+ if EfiMainDlCount == 1:
+ EfiMainDelStart = index + 1
+
+ if EfiMainDelEnd > EfiMainDelStart:
+ for index in range(EfiMainDelEnd, EfiMainDelStart, -1):
+ del lines[index]
+ if ModuleEntryDelEnd > ModuleEntryDelStart:
+ for index in range(ModuleEntryDelEnd, ModuleEntryDelStart, -1):
+ del lines[index]
+
+ try:
+ f = open(path, 'w')
+ f.write('\n'.join(lines))
+ f.close()
+ except:
+ self._isBusy = False
+ self.LogMessage(" <<< Fail to fixup file %s" % path)
+ self.LogMessage(" <<< Finish to fixup file %s\n" % path)
+
+ def FixPageUefiApplicationEntryPoint(self, path, text):
+ self.LogMessage(' >>> Fixup file reference MdePkg/Include/Library/UefiApplicationEntryPoint.h at file %s \n' % path)
+ lines = text.split('\n')
+ bInModuleEntry = False
+ bInEfiMain = False
+ ModuleEntryDlCount = 0
+ ModuleEntryDelStart = 0
+ ModuleEntryDelEnd = 0
+ EfiMainDlCount = 0
+ EfiMainDelStart = 0
+ EfiMainDelEnd = 0
+
+ for index in range(len(lines)):
+ line = lines[index].strip()
+ if line.find('EFI_STATUS</a> EFIAPI _ModuleEntryPoint </td>') != -1:
+ bInModuleEntry = True
+ if line.find('EFI_STATUS</a> EFIAPI EfiMain </td>') != -1:
+ bInEfiMain = True
+ if line.startswith('<p>References <a'):
+ if bInModuleEntry:
+ ModuleEntryDelEnd = index - 1
+ bInModuleEntry = False
+ elif bInEfiMain:
+ EfiMainDelEnd = index - 1
+ bInEfiMain = False
+ if bInModuleEntry:
+ if line.startswith('</dl>'):
+ ModuleEntryDlCount = ModuleEntryDlCount + 1
+ if ModuleEntryDlCount == 1:
+ ModuleEntryDelStart = index + 1
+ if bInEfiMain:
+ if line.startswith('</dl>'):
+ EfiMainDlCount = EfiMainDlCount + 1
+ if EfiMainDlCount == 1:
+ EfiMainDelStart = index + 1
+
+ if EfiMainDelEnd > EfiMainDelStart:
+ for index in range(EfiMainDelEnd, EfiMainDelStart, -1):
+ del lines[index]
+ if ModuleEntryDelEnd > ModuleEntryDelStart:
+ for index in range(ModuleEntryDelEnd, ModuleEntryDelStart, -1):
+ del lines[index]
+
+ try:
+ f = open(path, 'w')
+ f.write('\n'.join(lines))
+ f.close()
+ except:
+ self._isBusy = False
+ self.LogMessage(" <<< Fail to fixup file %s" % path)
+ self.LogMessage(" <<< Finish to fixup file %s\n" % path)
+
+
+ def FixDoxFileLink(self, path, text):
+ self.LogMessage(' >>> Fixup .dox postfix for file %s \n' % path)
+ try:
+ fd = open(path, 'r')
+ text = fd.read()
+ fd.close()
+ except Exception as e:
+ self.LogMessage (" <<<Fail to open file %s" % path)
+ return
+ text = text.replace ('.s.dox', '.s')
+ text = text.replace ('.S.dox', '.S')
+ text = text.replace ('.asm.dox', '.asm')
+ text = text.replace ('.Asm.dox', '.Asm')
+ text = text.replace ('.uni.dox', '.uni')
+ text = text.replace ('.Uni.dox', '.Uni')
+ try:
+ fd = open(path, 'w')
+ fd.write(text)
+ fd.close()
+ except Exception as e:
+ self.LogMessage (" <<<Fail to fixup file %s" % path)
+ return
+ self.LogMessage(' >>> Finish to fixup .dox postfix for file %s \n' % path)
+
+ def FixDecDoxygenFileLink(self, path, text):
+ self.LogMessage(' >>> Fixup .decdoxygen postfix for file %s \n' % path)
+ try:
+ fd = open(path, 'r')
+ lines = fd.readlines()
+ fd.close()
+ except Exception as e:
+ self.LogMessage (" <<<Fail to open file %s" % path)
+ return
+ for line in lines:
+ if line.find('.decdoxygen') != -1:
+ lines.remove(line)
+ break
+ try:
+ fd = open(path, 'w')
+ fd.write("".join(lines))
+ fd.close()
+ except Exception as e:
+ self.LogMessage (" <<<Fail to fixup file %s" % path)
+ return
+ self.LogMessage(' >>> Finish to fixup .decdoxygen postfix for file %s \n' % path)
+
+import threading
+class MonitorThread(threading.Thread):
+ def __init__(self, pipe, callback):
+ threading.Thread.__init__(self)
+ self._pipe = pipe
+ self._callback = callback
+ self._isCancel = False
+
+ def run(self):
+ while (not self._isCancel):
+ self._pipe.Peek()
+ if self._pipe.LastRead() == 0:
+ break
+ text = self._pipe.read()
+ if len(text.strip()) != 0:
+ wx.GetApp().ForegroundProcess(self._callback, (text,))
+
+ def Terminate(self):
+ self._pipe.flush()
+ self._isCancel = True
+
+class DoxygenProcess(wx.Process):
+ def OnTerminate(self, id, status):
+ self._parent.OnTerminateDoxygenProcess()
+
+ def SetParent(self, parent):
+ self._parent = parent
+
+class CHMProcess(wx.Process):
+ def OnTerminate(self, id, status):
+ self._parent.OnTerminateCHMProcess()
+
+ def SetParent(self, parent):
+ self._parent = parent
+
+class ResultHandler:
+ def __init__(self, parent):
+ self._parent = parent
+ self.level = 0
+
+ def emit(self, record):
+ self._parent.LogMessage(record)
+
+ def handle(self, record):
+ wx.PostEvent(self._parent, LogEvent(message=record.getMessage()))
+
+ def acquire(self):
+ pass
+
+ def release(self):
+ pass
+
+if __name__ == '__main__':
+ app = PackageDocApp(redirect=False)
+ app.MainLoop()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/__init__.py
new file mode 100644
index 00000000..a7909346
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/__init__.py
@@ -0,0 +1,6 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/__init__.py
new file mode 100644
index 00000000..a7909346
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/__init__.py
@@ -0,0 +1,6 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py
new file mode 100644
index 00000000..b3198d5b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/doxygen.py
@@ -0,0 +1,445 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import print_function
+from __future__ import absolute_import
+import os
+
+from .message import *
+
+class BaseDoxygeItem:
+ def __init__(self, name, tag=''):
+ self.mName = name
+ self.mTag = tag
+ self.mDescription = ''
+ self.mText = []
+
+ def AddDescription(self, desc):
+ self.mDescription = '%s%s' % (self.mDescription, desc)
+
+ def __str__(self):
+ return '\n'.join(self.mText)
+
+ def Generate(self):
+ """This interface need to be override"""
+
+class Section(BaseDoxygeItem):
+ def Generate(self):
+ """This interface need to be override"""
+ if len(self.mTag) != 0:
+ self.mText.append(' \section %s %s' % (self.mName, self.mTag))
+ else:
+ self.mText.append(' \section %s' % self.mName)
+
+ self.mText.append(self.mDescription)
+ return self.mText
+
+class Page(BaseDoxygeItem):
+ def __init__(self, name, tag=None, isSort=True):
+ BaseDoxygeItem.__init__(self, name, tag)
+ self.mSubPages = []
+ self.mIsMainPage = False
+ self.mSections = []
+ self.mIsSort = isSort
+
+ def GetSubpageCount(self):
+ return len(self.mSubPages)
+
+ def AddPage(self, subpage):
+ self.mSubPages.append(subpage)
+ return subpage
+
+ def AddPages(self, pageArray):
+ if pageArray is None:
+ return
+ for page in pageArray:
+ self.AddPage(page)
+
+ def AddSection(self, section):
+ self.mSections.append(section)
+ self.mSections.sort(key=lambda x: x.mName.lower())
+
+ def Generate(self):
+ if self.mIsMainPage:
+ self.mText.append('/** \mainpage %s' % self.mName)
+ self.mIsSort = False
+ else:
+ self.mText.append('/** \page %s %s' % (self.mTag, self.mName))
+
+ if len(self.mDescription) != 0:
+ self.mText.append(self.mDescription)
+ endIndex = len(self.mText)
+
+ self.mSections.sort(key=lambda x: x.mName.lower())
+ for sect in self.mSections:
+ self.mText += sect.Generate()
+
+ endIndex = len(self.mText)
+
+ if len(self.mSubPages) != 0:
+ self.mText.insert(endIndex, "<p> \section content_index INDEX")
+ endIndex = len(self.mText)
+ self.mText.insert(endIndex, '<ul>')
+ endIndex += 1
+ if self.mIsSort:
+ self.mSubPages.sort(key=lambda x: x.mName.lower())
+ for page in self.mSubPages:
+ self.mText.insert(endIndex, '<li>\subpage %s \"%s\" </li>' % (page.mTag, page.mName))
+ endIndex += 1
+ self.mText += page.Generate()
+ self.mText.insert(endIndex, '</ul>')
+ endIndex += 1
+ self.mText.insert(endIndex, ' **/')
+ return self.mText
+
+class DoxygenFile(Page):
+ def __init__(self, name, file):
+ Page.__init__(self, name)
+ self.mFilename = file
+ self.mIsMainPage = True
+
+ def GetFilename(self):
+ return self.mFilename.replace('/', '\\')
+
+ def Save(self):
+ str = self.Generate()
+ try:
+ f = open(self.mFilename, 'w')
+ f.write('\n'.join(str))
+ f.close()
+ except IOError as e:
+ ErrorMsg ('Fail to write file %s' % self.mFilename)
+ return False
+
+ return True
+
+doxygenConfigTemplate = """
+DOXYFILE_ENCODING = UTF-8
+PROJECT_NAME = %(ProjectName)s
+PROJECT_NUMBER = %(ProjectVersion)s
+OUTPUT_DIRECTORY = %(OutputDir)s
+CREATE_SUBDIRS = YES
+OUTPUT_LANGUAGE = English
+BRIEF_MEMBER_DESC = YES
+REPEAT_BRIEF = YES
+ABBREVIATE_BRIEF = "The $name class " \\
+ "The $name widget " \\
+ "The $name file " \\
+ is \\
+ provides \\
+ specifies \\
+ contains \\
+ represents \\
+ a \\
+ an \\
+ the
+ALWAYS_DETAILED_SEC = NO
+INLINE_INHERITED_MEMB = NO
+FULL_PATH_NAMES = YES
+STRIP_FROM_PATH = %(StripPath)s
+STRIP_FROM_INC_PATH =
+SHORT_NAMES = YES
+JAVADOC_AUTOBRIEF = NO
+QT_AUTOBRIEF = NO
+MULTILINE_CPP_IS_BRIEF = NO
+DETAILS_AT_TOP = YES
+INHERIT_DOCS = YES
+SEPARATE_MEMBER_PAGES = NO
+TAB_SIZE = 1
+ALIASES =
+OPTIMIZE_OUTPUT_FOR_C = YES
+OPTIMIZE_OUTPUT_JAVA = NO
+BUILTIN_STL_SUPPORT = NO
+CPP_CLI_SUPPORT = NO
+SIP_SUPPORT = NO
+DISTRIBUTE_GROUP_DOC = YES
+SUBGROUPING = YES
+TYPEDEF_HIDES_STRUCT = NO
+
+EXTRACT_ALL = YES
+EXTRACT_PRIVATE = NO
+EXTRACT_STATIC = NO
+EXTRACT_LOCAL_CLASSES = NO
+EXTRACT_LOCAL_METHODS = NO
+EXTRACT_ANON_NSPACES = NO
+HIDE_UNDOC_MEMBERS = NO
+HIDE_UNDOC_CLASSES = NO
+HIDE_FRIEND_COMPOUNDS = NO
+HIDE_IN_BODY_DOCS = NO
+INTERNAL_DOCS = NO
+CASE_SENSE_NAMES = NO
+HIDE_SCOPE_NAMES = NO
+SHOW_INCLUDE_FILES = NO
+INLINE_INFO = YES
+SORT_MEMBER_DOCS = YES
+SORT_BRIEF_DOCS = NO
+SORT_BY_SCOPE_NAME = YES
+GENERATE_TODOLIST = YES
+GENERATE_TESTLIST = YES
+GENERATE_BUGLIST = YES
+GENERATE_DEPRECATEDLIST= YES
+ENABLED_SECTIONS =
+MAX_INITIALIZER_LINES = 30
+SHOW_USED_FILES = NO
+SHOW_DIRECTORIES = NO
+FILE_VERSION_FILTER =
+
+QUIET = NO
+WARNINGS = YES
+WARN_IF_UNDOCUMENTED = YES
+WARN_IF_DOC_ERROR = YES
+WARN_NO_PARAMDOC = YES
+WARN_FORMAT = "$file:$line: $text "
+WARN_LOGFILE = %(WarningFile)s
+
+INPUT = %(FileList)s
+INPUT_ENCODING = UTF-8
+FILE_PATTERNS = %(Pattern)s
+RECURSIVE = NO
+EXCLUDE = *.svn
+EXCLUDE_SYMLINKS = NO
+EXCLUDE_PATTERNS = .svn
+EXCLUDE_SYMBOLS =
+EXAMPLE_PATH = %(ExamplePath)s
+EXAMPLE_PATTERNS = *
+EXAMPLE_RECURSIVE = NO
+IMAGE_PATH =
+INPUT_FILTER =
+FILTER_PATTERNS =
+FILTER_SOURCE_FILES = NO
+
+SOURCE_BROWSER = NO
+INLINE_SOURCES = NO
+STRIP_CODE_COMMENTS = YES
+REFERENCED_BY_RELATION = YES
+REFERENCES_RELATION = YES
+REFERENCES_LINK_SOURCE = NO
+USE_HTAGS = NO
+VERBATIM_HEADERS = NO
+
+ALPHABETICAL_INDEX = NO
+COLS_IN_ALPHA_INDEX = 5
+IGNORE_PREFIX =
+
+GENERATE_HTML = YES
+HTML_OUTPUT = html
+HTML_FILE_EXTENSION = .html
+HTML_HEADER =
+HTML_FOOTER =
+HTML_STYLESHEET =
+HTML_ALIGN_MEMBERS = YES
+GENERATE_HTMLHELP = %(WhetherGenerateHtmlHelp)s
+HTML_DYNAMIC_SECTIONS = NO
+CHM_FILE = index.chm
+HHC_LOCATION =
+GENERATE_CHI = NO
+BINARY_TOC = NO
+TOC_EXPAND = NO
+DISABLE_INDEX = NO
+ENUM_VALUES_PER_LINE = 4
+GENERATE_TREEVIEW = %(WhetherGenerateTreeView)s
+TREEVIEW_WIDTH = 250
+
+GENERATE_LATEX = NO
+LATEX_OUTPUT = latex
+LATEX_CMD_NAME = latex
+MAKEINDEX_CMD_NAME = makeindex
+COMPACT_LATEX = NO
+PAPER_TYPE = a4wide
+EXTRA_PACKAGES =
+LATEX_HEADER =
+PDF_HYPERLINKS = YES
+USE_PDFLATEX = YES
+LATEX_BATCHMODE = NO
+LATEX_HIDE_INDICES = NO
+
+GENERATE_RTF = NO
+RTF_OUTPUT = rtf
+COMPACT_RTF = NO
+RTF_HYPERLINKS = NO
+RTF_STYLESHEET_FILE =
+RTF_EXTENSIONS_FILE =
+
+GENERATE_MAN = NO
+MAN_OUTPUT = man
+MAN_EXTENSION = .3
+MAN_LINKS = NO
+
+GENERATE_XML = NO
+XML_OUTPUT = xml
+XML_SCHEMA =
+XML_DTD =
+XML_PROGRAMLISTING = YES
+
+GENERATE_AUTOGEN_DEF = NO
+
+GENERATE_PERLMOD = NO
+PERLMOD_LATEX = NO
+PERLMOD_PRETTY = YES
+PERLMOD_MAKEVAR_PREFIX =
+
+ENABLE_PREPROCESSING = YES
+MACRO_EXPANSION = YES
+EXPAND_ONLY_PREDEF = YES
+SEARCH_INCLUDES = YES
+INCLUDE_PATH = %(IncludePath)s
+INCLUDE_FILE_PATTERNS = *.h
+PREDEFINED = %(PreDefined)s
+EXPAND_AS_DEFINED =
+SKIP_FUNCTION_MACROS = NO
+
+TAGFILES =
+GENERATE_TAGFILE =
+ALLEXTERNALS = NO
+EXTERNAL_GROUPS = YES
+PERL_PATH = /usr/bin/perl
+
+CLASS_DIAGRAMS = NO
+MSCGEN_PATH =
+HIDE_UNDOC_RELATIONS = YES
+HAVE_DOT = NO
+CLASS_GRAPH = YES
+COLLABORATION_GRAPH = YES
+GROUP_GRAPHS = YES
+UML_LOOK = NO
+TEMPLATE_RELATIONS = NO
+INCLUDE_GRAPH = YES
+INCLUDED_BY_GRAPH = YES
+CALL_GRAPH = NO
+CALLER_GRAPH = NO
+GRAPHICAL_HIERARCHY = YES
+DIRECTORY_GRAPH = YES
+DOT_IMAGE_FORMAT = png
+DOT_PATH =
+DOTFILE_DIRS =
+DOT_GRAPH_MAX_NODES = 50
+MAX_DOT_GRAPH_DEPTH = 1000
+DOT_TRANSPARENT = YES
+DOT_MULTI_TARGETS = NO
+GENERATE_LEGEND = YES
+DOT_CLEANUP = YES
+
+SEARCHENGINE = NO
+
+"""
+class DoxygenConfigFile:
+ def __init__(self):
+ self.mProjectName = ''
+ self.mOutputDir = ''
+ self.mFileList = []
+ self.mIncludeList = []
+ self.mStripPath = ''
+ self.mExamplePath = ''
+ self.mPattern = ['*.c', '*.h',
+ '*.asm', '*.s', '.nasm', '*.html', '*.dox']
+ self.mMode = 'HTML'
+ self.mWarningFile = ''
+ self.mPreDefined = []
+ self.mProjectVersion = 0.1
+
+ def SetChmMode(self):
+ self.mMode = 'CHM'
+
+ def SetHtmlMode(self):
+ self.mMode = 'HTML'
+
+ def SetProjectName(self, str):
+ self.mProjectName = str
+
+ def SetProjectVersion(self, str):
+ self.mProjectVersion = str
+
+ def SetOutputDir(self, str):
+ self.mOutputDir = str
+
+ def SetStripPath(self, str):
+ self.mStripPath = str
+
+ def SetExamplePath(self, str):
+ self.mExamplePath = str
+
+ def SetWarningFilePath(self, str):
+ self.mWarningFile = str.replace('\\', '/')
+
+ def FileExists(self, path):
+ if path is None:
+ return False
+ if len(path) == 0:
+ return False
+
+ for p in self.mFileList:
+ if path.lower() == p.lower():
+ return True
+
+ return False
+
+ def AddFile(self, path):
+ if path is None:
+ return
+
+ if len(path) == 0:
+ return
+ path = path.replace('\\', '/')
+ if not self.FileExists(path):
+ self.mFileList.append(path)
+
+ def AddIncludePath(self, path):
+ path = path.replace('\\', '/')
+ if path not in self.mIncludeList:
+ self.mIncludeList.append(path)
+
+ def AddPattern(self, pattern):
+ self.mPattern.append(pattern)
+
+ def AddPreDefined(self, macro):
+ self.mPreDefined.append(macro)
+
+ def Generate(self, path):
+ files = ' \\\n'.join(self.mFileList)
+ includes = ' \\\n'.join(self.mIncludeList)
+ patterns = ' \\\n'.join(self.mPattern)
+ if self.mMode.lower() == 'html':
+ sHtmlHelp = 'NO'
+ sTreeView = 'YES'
+ else:
+ sHtmlHelp = 'YES'
+ sTreeView = 'NO'
+
+ text = doxygenConfigTemplate % {'ProjectName':self.mProjectName,
+ 'OutputDir':self.mOutputDir,
+ 'StripPath':self.mStripPath,
+ 'ExamplePath':self.mExamplePath,
+ 'FileList':files,
+ 'Pattern':patterns,
+ 'WhetherGenerateHtmlHelp':sHtmlHelp,
+ 'WhetherGenerateTreeView':sTreeView,
+ 'IncludePath':includes,
+ 'WarningFile':self.mWarningFile,
+ 'PreDefined':' '.join(self.mPreDefined),
+ 'ProjectVersion':self.mProjectVersion}
+ try:
+ f = open(path, 'w')
+ f.write(text)
+ f.close()
+ except IOError as e:
+ ErrorMsg ('Fail to generate doxygen config file %s' % path)
+ return False
+
+ return True
+
+########################################################################
+# TEST CODE
+########################################################################
+if __name__== '__main__':
+ df = DoxygenFile('Platform Document', 'm:\tree')
+ df.AddPage(Page('Module', 'module'))
+ p = df.AddPage(Page('Library', 'library'))
+ p.AddDescription(desc)
+ p.AddPage(Page('PCD', 'pcds'))
+
+ df.Generate()
+ print(df)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py
new file mode 100755
index 00000000..44ab7511
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/efibinary.py
@@ -0,0 +1,606 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import print_function
+import array
+import uuid
+import re
+import os
+import logging
+import core.pe as pe
+
+def GetLogger():
+ return logging.getLogger('EFI Binary File')
+
+class EFIBinaryError(Exception):
+ def __init__(self, message):
+ Exception.__init__(self)
+ self._message = message
+
+ def GetMessage(self):
+ return self._message
+
+class EfiFd(object):
+ EFI_FV_HEADER_SIZE = 0x48
+
+ def __init__(self):
+ self._fvs = []
+
+ def Load(self, fd, size):
+ index = fd.tell()
+ while (index + self.EFI_FV_HEADER_SIZE < size):
+ fv = EfiFv(self)
+ fv.Load(fd)
+ self._fvs.append(fv)
+ index += fv.GetHeader().GetFvLength()
+ index = align(index, 8)
+ fd.seek(index)
+
+ def GetFvs(self):
+ return self._fvs
+
+class EfiFv(object):
+ FILE_SYSTEM_GUID = uuid.UUID('{8c8ce578-8a3d-4f1c-9935-896185c32dd3}')
+
+ def __init__(self, parent=None):
+ self._size = 0
+ self._filename = None
+ self._fvheader = None
+ self._blockentries = []
+ self._ffs = []
+
+ # following field is for FV in FD
+ self._parent = parent
+ self._offset = 0
+ self._raw = array.array('B')
+
+ def Load(self, fd):
+ self._offset = fd.tell()
+ self._filename = fd.name
+
+ # get file header
+ self._fvheader = EfiFirmwareVolumeHeader.Read(fd)
+ #self._fvheader.Dump()
+
+ self._size = self._fvheader.GetFvLength()
+
+ if self._fvheader.GetFileSystemGuid() != self.FILE_SYSTEM_GUID:
+ fd.seek(self._offset)
+ self._raw.fromfile(fd, self.GetHeader().GetFvLength())
+ return
+
+ # read block map
+ blockentry = BlockMapEntry.Read(fd)
+ self._blockentries.append(blockentry)
+ while (blockentry.GetNumberBlocks() != 0 and blockentry.GetLength() != 0):
+ self._blockentries.append(blockentry)
+ blockentry = BlockMapEntry.Read(fd)
+
+
+ if self._fvheader.GetSize() + (len(self._blockentries)) * 8 != \
+ self._fvheader.GetHeaderLength():
+ raise EFIBinaryError("Volume Header length not consistent with block map!")
+
+ index = align(fd.tell(), 8)
+ count = 0
+ while ((index + EfiFfs.FFS_HEADER_SIZE) < self._size):
+ ffs = EfiFfs.Read(fd, self)
+ if not isValidGuid(ffs.GetNameGuid()):
+ break
+ self._ffs.append(ffs)
+ count += 1
+ index = align(fd.tell(), 8)
+
+ fd.seek(self._offset)
+ self._raw.fromfile(fd, self.GetHeader().GetFvLength())
+
+ def GetFfs(self):
+ return self._ffs
+
+ def GetHeader(self):
+ return self._fvheader
+
+ def GetBlockEntries(self):
+ return self._blockentries
+
+ def GetHeaderRawData(self):
+ ret = []
+ ret += self._fvheader.GetRawData()
+ for block in self._blockentries:
+ ret += block.GetRawData()
+ return ret
+
+ def GetOffset(self):
+ return 0
+
+ def GetRawData(self):
+ return self._raw.tolist()
+
+class BinaryItem(object):
+ def __init__(self, parent=None):
+ self._size = 0
+ self._arr = array.array('B')
+ self._parent = parent
+
+ @classmethod
+ def Read(cls, fd, parent=None):
+ item = cls(parent)
+ item.fromfile(fd)
+ return item
+
+ def Load(self, fd):
+ self.fromfile(fd)
+
+ def GetSize(self):
+ """should be implemented by inherited class"""
+
+ def fromfile(self, fd):
+ self._arr.fromfile(fd, self.GetSize())
+
+ def GetParent(self):
+ return self._parent
+
+class EfiFirmwareVolumeHeader(BinaryItem):
+ def GetSize(self):
+ return 56
+
+ def GetSigunature(self):
+ list = self._arr.tolist()
+ sig = ''
+ for x in list[40:44]:
+ sig += chr(x)
+ return sig
+
+ def GetAttribute(self):
+ return list2int(self._arr.tolist()[44:48])
+
+ def GetErasePolarity(self):
+ list = self.GetAttrStrings()
+ if 'EFI_FVB2_ERASE_POLARITY' in list:
+ return True
+ return False
+
+ def GetAttrStrings(self):
+ list = []
+ value = self.GetAttribute()
+ if (value & 0x01) != 0:
+ list.append('EFI_FVB2_READ_DISABLED_CAP')
+ if (value & 0x02) != 0:
+ list.append('EFI_FVB2_READ_ENABLED_CAP')
+ if (value & 0x04) != 0:
+ list.append('EFI_FVB2_READ_STATUS')
+ if (value & 0x08) != 0:
+ list.append('EFI_FVB2_WRITE_DISABLED_CAP')
+ if (value & 0x10) != 0:
+ list.append('EFI_FVB2_WRITE_ENABLED_CAP')
+ if (value & 0x20) != 0:
+ list.append('EFI_FVB2_WRITE_STATUS')
+ if (value & 0x40) != 0:
+ list.append('EFI_FVB2_LOCK_CAP')
+ if (value & 0x80) != 0:
+ list.append('EFI_FVB2_LOCK_STATUS')
+ if (value & 0x200) != 0:
+ list.append('EFI_FVB2_STICKY_WRITE')
+ if (value & 0x400) != 0:
+ list.append('EFI_FVB2_MEMORY_MAPPED')
+ if (value & 0x800) != 0:
+ list.append('EFI_FVB2_ERASE_POLARITY')
+ if (value & 0x1000) != 0:
+ list.append('EFI_FVB2_READ_LOCK_CAP')
+ if (value & 0x00002000) != 0:
+ list.append('EFI_FVB2_READ_LOCK_STATUS')
+ if (value & 0x00004000) != 0:
+ list.append('EFI_FVB2_WRITE_LOCK_CAP')
+ if (value & 0x00008000) != 0:
+ list.append('EFI_FVB2_WRITE_LOCK_STATUS')
+
+ if (value == 0):
+ list.append('EFI_FVB2_ALIGNMENT_1')
+ if (value & 0x001F0000) == 0x00010000:
+ list.append('EFI_FVB2_ALIGNMENT_2')
+ if (value & 0x001F0000) == 0x00020000:
+ list.append('EFI_FVB2_ALIGNMENT_4')
+ if (value & 0x001F0000) == 0x00030000:
+ list.append('EFI_FVB2_ALIGNMENT_8')
+ if (value & 0x001F0000) == 0x00040000:
+ list.append('EFI_FVB2_ALIGNMENT_16')
+ if (value & 0x001F0000) == 0x00050000:
+ list.append('EFI_FVB2_ALIGNMENT_32')
+ if (value & 0x001F0000) == 0x00060000:
+ list.append('EFI_FVB2_ALIGNMENT_64')
+ if (value & 0x001F0000) == 0x00070000:
+ list.append('EFI_FVB2_ALIGNMENT_128')
+ if (value & 0x001F0000) == 0x00080000:
+ list.append('EFI_FVB2_ALIGNMENT_256')
+ if (value & 0x001F0000) == 0x00090000:
+ list.append('EFI_FVB2_ALIGNMENT_512')
+ if (value & 0x001F0000) == 0x000A0000:
+ list.append('EFI_FVB2_ALIGNMENT_1K')
+ if (value & 0x001F0000) == 0x000B0000:
+ list.append('EFI_FVB2_ALIGNMENT_2K')
+ if (value & 0x001F0000) == 0x000C0000:
+ list.append('EFI_FVB2_ALIGNMENT_4K')
+ if (value & 0x001F0000) == 0x000D0000:
+ list.append('EFI_FVB2_ALIGNMENT_8K')
+ if (value & 0x001F0000) == 0x000E0000:
+ list.append('EFI_FVB2_ALIGNMENT_16K')
+ if (value & 0x001F0000) == 0x000F0000:
+ list.append('EFI_FVB2_ALIGNMENT_32K')
+ if (value & 0x001F0000) == 0x00100000:
+ list.append('EFI_FVB2_ALIGNMENT_64K')
+ if (value & 0x001F0000) == 0x00110000:
+ list.append('EFI_FVB2_ALIGNMENT_128K')
+ if (value & 0x001F0000) == 0x00120000:
+ list.append('EFI_FVB2_ALIGNMENT_256K')
+ if (value & 0x001F0000) == 0x00130000:
+ list.append('EFI_FVB2_ALIGNMENT_512K')
+
+ return list
+
+ def GetHeaderLength(self):
+ return list2int(self._arr.tolist()[48:50])
+
+ def Dump(self):
+ print('Signature: %s' % self.GetSigunature())
+ print('Attribute: 0x%X' % self.GetAttribute())
+ print('Header Length: 0x%X' % self.GetHeaderLength())
+ print('File system Guid: ', self.GetFileSystemGuid())
+ print('Revision: 0x%X' % self.GetRevision())
+ print('FvLength: 0x%X' % self.GetFvLength())
+
+ def GetFileSystemGuid(self):
+ list = self._arr.tolist()
+ return list2guid(list[16:32])
+
+ def GetRevision(self):
+ list = self._arr.tolist()
+ return int(list[55])
+
+ def GetFvLength(self):
+ list = self._arr.tolist()
+ return list2int(list[32:40])
+
+ def GetRawData(self):
+ return self._arr.tolist()
+
+class BlockMapEntry(BinaryItem):
+ def GetSize(self):
+ return 8
+
+ def GetNumberBlocks(self):
+ list = self._arr.tolist()
+ return list2int(list[0:4])
+
+ def GetLength(self):
+ list = self._arr.tolist()
+ return list2int(list[4:8])
+
+ def GetRawData(self):
+ return self._arr.tolist()
+
+ def __str__(self):
+ return '[BlockEntry] Number = 0x%X, length=0x%X' % (self.GetNumberBlocks(), self.GetLength())
+
+class EfiFfs(object):
+ FFS_HEADER_SIZE = 24
+
+ def __init__(self, parent=None):
+ self._header = None
+
+ # following field is for FFS in FV file.
+ self._parent = parent
+ self._offset = 0
+ self._sections = []
+
+ def Load(self, fd):
+ self._offset = align(fd.tell(), 8)
+
+ self._header = EfiFfsHeader.Read(fd, self)
+
+ if not isValidGuid(self.GetNameGuid()):
+ return
+
+ index = self._offset
+ fileend = self._offset + self.GetSize()
+ while (index + EfiSection.EFI_SECTION_HEADER_SIZE < fileend):
+ section = EfiSection(self)
+ section.Load(fd)
+ if section.GetSize() == 0 and section.GetHeader().GetType() == 0:
+ break
+ self._sections.append(section)
+ index = fd.tell()
+
+ # rebase file pointer to next ffs file
+ index = self._offset + self._header.GetFfsSize()
+ index = align(index, 8)
+ fd.seek(index)
+
+ def GetOffset(self):
+ return self._offset
+
+ def GetSize(self):
+ return self._header.GetFfsSize()
+
+ @classmethod
+ def Read(cls, fd, parent=None):
+ item = cls(parent)
+ item.Load(fd)
+ return item
+
+ def GetNameGuid(self):
+ return self._header.GetNameGuid()
+
+ def DumpContent(self):
+ list = self._content.tolist()
+ line = []
+ count = 0
+ for item in list:
+ if count < 32:
+ line.append('0x%X' % int(item))
+ count += 1
+ else:
+ print(' '.join(line))
+ count = 0
+ line = []
+ line.append('0x%X' % int(item))
+ count += 1
+
+ def GetHeader(self):
+ return self._header
+
+ def GetParent(self):
+ return self._parent
+
+ def GetSections(self):
+ return self._sections
+
+class EfiFfsHeader(BinaryItem):
+ ffs_state_map = {0x01:'EFI_FILE_HEADER_CONSTRUCTION',
+ 0x02:'EFI_FILE_HEADER_VALID',
+ 0x04:'EFI_FILE_DATA_VALID',
+ 0x08:'EFI_FILE_MARKED_FOR_UPDATE',
+ 0x10:'EFI_FILE_DELETED',
+ 0x20:'EFI_FILE_HEADER_INVALID'}
+
+ def GetSize(self):
+ return 24
+
+ def GetNameGuid(self):
+ list = self._arr.tolist()
+ return list2guid(list[0:16])
+
+ def GetType(self):
+ list = self._arr.tolist()
+ return int(list[18])
+
+
+ def GetTypeString(self):
+ value = self.GetType()
+ if value == 0x01:
+ return 'EFI_FV_FILETYPE_RAW'
+ if value == 0x02:
+ return 'EFI_FV_FILETYPE_FREEFORM'
+ if value == 0x03:
+ return 'EFI_FV_FILETYPE_SECURITY_CORE'
+ if value == 0x04:
+ return 'EFI_FV_FILETYPE_PEI_CORE'
+ if value == 0x05:
+ return 'EFI_FV_FILETYPE_DXE_CORE'
+ if value == 0x06:
+ return 'EFI_FV_FILETYPE_PEIM'
+ if value == 0x07:
+ return 'EFI_FV_FILETYPE_DRIVER'
+ if value == 0x08:
+ return 'EFI_FV_FILETYPE_COMBINED_PEIM_DRIVER'
+ if value == 0x09:
+ return 'EFI_FV_FILETYPE_APPLICATION'
+ if value == 0x0B:
+ return 'EFI_FV_FILETYPE_FIRMWARE_VOLUME_IMAGE'
+ if value == 0xc0:
+ return 'EFI_FV_FILETYPE_OEM_MIN'
+ if value == 0xdf:
+ return 'EFI_FV_FILETYPE_OEM_MAX'
+ if value == 0xe0:
+ return 'EFI_FV_FILETYPE_DEBUG_MIN'
+ if value == 0xef:
+ return 'EFI_FV_FILETYPE_DEBUG_MAX'
+ if value == 0xf0:
+ return 'EFI_FV_FILETYPE_FFS_PAD'
+ if value == 0xff:
+ return 'EFI_FV_FILETYPE_FFS_MAX'
+ return 'Unknown FFS Type'
+
+ def GetAttributes(self):
+ list = self._arr.tolist()
+ return int(list[19])
+
+ def GetFfsSize(self):
+ list = self._arr.tolist()
+ return list2int(list[20:23])
+
+ def GetState(self):
+ list = self._arr.tolist()
+ state = int(list[23])
+ polarity = self.GetParent().GetParent().GetHeader().GetErasePolarity()
+ if polarity:
+ state = (~state) & 0xFF
+ HighestBit = 0x80
+ while (HighestBit != 0) and (HighestBit & state) == 0:
+ HighestBit = HighestBit >> 1
+ return HighestBit
+
+ def GetStateString(self):
+ state = self.GetState()
+ if state in self.ffs_state_map.keys():
+ return self.ffs_state_map[state]
+ return 'Unknown Ffs State'
+
+ def Dump(self):
+ print("FFS name: ", self.GetNameGuid())
+ print("FFS type: ", self.GetType())
+ print("FFS attr: 0x%X" % self.GetAttributes())
+ print("FFS size: 0x%X" % self.GetFfsSize())
+ print("FFS state: 0x%X" % self.GetState())
+
+ def GetRawData(self):
+ return self._arr.tolist()
+
+
+class EfiSection(object):
+ EFI_SECTION_HEADER_SIZE = 4
+
+ def __init__(self, parent=None):
+ self._size = 0
+ self._parent = parent
+ self._offset = 0
+ self._contents = array.array('B')
+
+ def Load(self, fd):
+ self._offset = align(fd.tell(), 4)
+
+ self._header = EfiSectionHeader.Read(fd, self)
+
+ if self._header.GetTypeString() == "EFI_SECTION_PE32":
+ pefile = pe.PEFile(self)
+ pefile.Load(fd, self.GetContentSize())
+
+ fd.seek(self._offset)
+ self._contents.fromfile(fd, self.GetContentSize())
+
+ # rebase file pointer to next section
+ index = self._offset + self.GetSize()
+ index = align(index, 4)
+ fd.seek(index)
+
+ def GetContentSize(self):
+ return self.GetSize() - self.EFI_SECTION_HEADER_SIZE
+
+ def GetContent(self):
+ return self._contents.tolist()
+
+ def GetSize(self):
+ return self._header.GetSectionSize()
+
+ def GetHeader(self):
+ return self._header
+
+ def GetSectionOffset(self):
+ return self._offset + self.EFI_SECTION_HEADER_SIZE
+
+class EfiSectionHeader(BinaryItem):
+ section_type_map = {0x01: 'EFI_SECTION_COMPRESSION',
+ 0x02: 'EFI_SECTION_GUID_DEFINED',
+ 0x10: 'EFI_SECTION_PE32',
+ 0x11: 'EFI_SECTION_PIC',
+ 0x12: 'EFI_SECTION_TE',
+ 0x13: 'EFI_SECTION_DXE_DEPEX',
+ 0x14: 'EFI_SECTION_VERSION',
+ 0x15: 'EFI_SECTION_USER_INTERFACE',
+ 0x16: 'EFI_SECTION_COMPATIBILITY16',
+ 0x17: 'EFI_SECTION_FIRMWARE_VOLUME_IMAGE',
+ 0x18: 'EFI_SECTION_FREEFORM_SUBTYPE_GUID',
+ 0x19: 'EFI_SECTION_RAW',
+ 0x1B: 'EFI_SECTION_PEI_DEPEX'}
+ def GetSize(self):
+ return 4
+
+ def GetSectionSize(self):
+ list = self._arr.tolist()
+ return list2int(list[0:3])
+
+ def GetType(self):
+ list = self._arr.tolist()
+ return int(list[3])
+
+ def GetTypeString(self):
+ type = self.GetType()
+ if type not in self.section_type_map.keys():
+ return 'Unknown Section Type'
+ return self.section_type_map[type]
+
+ def Dump(self):
+ print('size = 0x%X' % self.GetSectionSize())
+ print('type = 0x%X' % self.GetType())
+
+
+
+rMapEntry = re.compile('^(\w+)[ \(\w\)]* \(BaseAddress=([0-9a-fA-F]+), EntryPoint=([0-9a-fA-F]+), GUID=([0-9a-fA-F\-]+)')
+class EfiFvMapFile(object):
+ def __init__(self):
+ self._mapentries = {}
+
+ def Load(self, path):
+ if not os.path.exists(path):
+ return False
+
+ try:
+ file = open(path, 'r')
+ lines = file.readlines()
+ file.close()
+ except:
+ return False
+
+ for line in lines:
+ if line[0] != ' ':
+ # new entry
+ ret = rMapEntry.match(line)
+ if ret is not None:
+ name = ret.groups()[0]
+ baseaddr = int(ret.groups()[1], 16)
+ entry = int(ret.groups()[2], 16)
+ guidstr = '{' + ret.groups()[3] + '}'
+ guid = uuid.UUID(guidstr)
+ self._mapentries[guid] = EfiFvMapFileEntry(name, baseaddr, entry, guid)
+ return True
+
+ def GetEntry(self, guid):
+ if guid in self._mapentries.keys():
+ return self._mapentries[guid]
+ return None
+
+class EfiFvMapFileEntry(object):
+ def __init__(self, name, baseaddr, entry, guid):
+ self._name = name
+ self._baseaddr = baseaddr
+ self._entry = entry
+ self._guid = guid
+
+ def GetName(self):
+ return self._name
+
+ def GetBaseAddress(self):
+ return self._baseaddr
+
+ def GetEntryPoint(self):
+ return self._entry
+
+def list2guid(list):
+ val1 = list2int(list[0:4])
+ val2 = list2int(list[4:6])
+ val3 = list2int(list[6:8])
+ val4 = 0
+ for item in list[8:16]:
+ val4 = (val4 << 8) | int(item)
+
+ val = val1 << 12 * 8 | val2 << 10 * 8 | val3 << 8 * 8 | val4
+ guid = uuid.UUID(int=val)
+ return guid
+
+def list2int(list):
+ val = 0
+ for index in range(len(list) - 1, -1, -1):
+ val = (val << 8) | int(list[index])
+ return val
+
+def align(value, alignment):
+ return (value + ((alignment - value) & (alignment - 1)))
+
+gInvalidGuid = uuid.UUID(int=0xffffffffffffffffffffffffffffffff)
+def isValidGuid(guid):
+ if guid == gInvalidGuid:
+ return False
+ return True
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py
new file mode 100755
index 00000000..6d61b7a2
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/ini.py
@@ -0,0 +1,475 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import absolute_import
+from .message import *
+import re
+import os
+
+section_re = re.compile(r'^\[([\w., "]+)\]')
+
+class BaseINIFile(object):
+ _objs = {}
+ def __new__(cls, *args, **kwargs):
+ """Maintain only a single instance of this object
+ @return: instance of this class
+
+ """
+ if len(args) == 0: return object.__new__(cls)
+ filename = args[0]
+ parent = None
+ if len(args) > 1:
+ parent = args[1]
+
+ key = os.path.normpath(filename)
+ if key not in cls._objs.keys():
+ cls._objs[key] = object.__new__(cls)
+
+ if parent is not None:
+ cls._objs[key].AddParent(parent)
+
+ return cls._objs[key]
+
+ def __init__(self, filename=None, parent=None):
+ self._lines = []
+ self._sections = {}
+ self._filename = filename
+ self._globals = []
+ self._isModify = True
+
+ def AddParent(self, parent):
+ if parent is None: return
+ if not hasattr(self, "_parents"):
+ self._parents = []
+
+ if parent in self._parents:
+ ErrorMsg("Duplicate parent is found for INI file %s" % self._filename)
+ return
+ self._parents.append(parent)
+
+ def GetFilename(self):
+ return os.path.normpath(self._filename)
+
+ def IsModified(self):
+ return self._isModify
+
+ def Modify(self, modify=True, obj=None):
+ if modify == self._isModify: return
+ self._isModify = modify
+ if modify:
+ for parent in self._parents:
+ parent.Modify(True, self)
+
+ def _ReadLines(self, filename):
+ #
+ # try to open file
+ #
+ if not os.path.exists(filename):
+ return False
+
+ try:
+ handle = open(filename, 'r')
+ self._lines = handle.readlines()
+ handle.close()
+ except:
+ raise EdkException("Fail to open file %s" % filename)
+
+ return True
+
+ def GetSectionInstance(self, parent, name, isCombined=False):
+ return BaseINISection(parent, name, isCombined)
+
+ def GetSectionByName(self, name):
+ arr = []
+ for key in self._sections.keys():
+ if '.private' in key:
+ continue
+ for item in self._sections[key]:
+ if item.GetBaseName().lower().find(name.lower()) != -1:
+ arr.append(item)
+ return arr
+
+ def GetSectionObjectsByName(self, name):
+ arr = []
+ sects = self.GetSectionByName(name)
+ for sect in sects:
+ for obj in sect.GetObjects():
+ arr.append(obj)
+ return arr
+
+ def Parse(self):
+ if not self._isModify: return True
+ if not self._ReadLines(self._filename): return False
+
+ sObjs = []
+ inGlobal = True
+ # process line
+ for index in range(len(self._lines)):
+ templine = self._lines[index].strip()
+ # skip comments
+ if len(templine) == 0: continue
+ if re.match("^\[=*\]", templine) or re.match("^#", templine) or \
+ re.match("\*+/", templine):
+ continue
+
+ m = section_re.match(templine)
+ if m is not None: # found a section
+ inGlobal = False
+ # Finish the latest section first
+ if len(sObjs) != 0:
+ for sObj in sObjs:
+ sObj._end = index - 1
+ if not sObj.Parse():
+ ErrorMsg("Fail to parse section %s" % sObj.GetBaseName(),
+ self._filename,
+ sObj._start)
+
+ # start new section
+ sname_arr = m.groups()[0].split(',')
+ sObjs = []
+ for name in sname_arr:
+ sObj = self.GetSectionInstance(self, name, (len(sname_arr) > 1))
+ sObj._start = index
+ sObjs.append(sObj)
+ if name.lower() not in self._sections:
+ self._sections[name.lower()] = [sObj]
+ else:
+ self._sections[name.lower()].append(sObj)
+ elif inGlobal: # not start any section and find global object
+ gObj = BaseINIGlobalObject(self)
+ gObj._start = index
+ gObj.Parse()
+ self._globals.append(gObj)
+
+ # Finish the last section
+ if len(sObjs) != 0:
+ for sObj in sObjs:
+ sObj._end = index
+ if not sObj.Parse():
+ ErrorMsg("Fail to parse section %s" % sObj.GetBaseName(),
+ self._filename,
+ sObj._start)
+
+ self._isModify = False
+ return True
+
+ def Destroy(self, parent):
+
+ # check referenced parent
+ if parent is not None:
+ assert parent in self._parents, "when destory ini object, can not found parent reference!"
+ self._parents.remove(parent)
+
+ if len(self._parents) != 0: return
+
+ for sects in self._sections.values():
+ for sect in sects:
+ sect.Destroy()
+
+ # dereference from _objs array
+ assert self.GetFilename() in self._objs.keys(), "When destroy ini object, can not find obj reference!"
+ assert self in self._objs.values(), "When destroy ini object, can not find obj reference!"
+ del self._objs[self.GetFilename()]
+
+ # dereference self
+ self.Clear()
+
+ def GetDefine(self, name):
+ sects = self.GetSectionByName('Defines')
+ for sect in sects:
+ for obj in sect.GetObjects():
+ line = obj.GetLineByOffset(obj._start).split('#')[0].strip()
+ arr = line.split('=')
+ if arr[0].strip().lower() == name.strip().lower():
+ return arr[1].strip()
+ return None
+
+ def Clear(self):
+ for sects in self._sections.values():
+ for sect in sects:
+ del sect
+ self._sections.clear()
+ for gObj in self._globals:
+ del gObj
+
+ del self._globals[:]
+ del self._lines[:]
+
+ def Reload(self):
+ self.Clear()
+ ret = self.Parse()
+ if ret:
+ self._isModify = False
+ return ret
+
+ def AddNewSection(self, sectName):
+ if sectName.lower() in self._sections.keys():
+ ErrorMsg('Section %s can not be created for conflict with existing section')
+ return None
+
+ sectionObj = self.GetSectionInstance(self, sectName)
+ sectionObj._start = len(self._lines)
+ sectionObj._end = len(self._lines) + 1
+ self._lines.append('[%s]\n' % sectName)
+ self._lines.append('\n\n')
+ self._sections[sectName.lower()] = sectionObj
+ return sectionObj
+
+ def CopySectionsByName(self, oldDscObj, nameStr):
+ sects = oldDscObj.GetSectionByName(nameStr)
+ for sect in sects:
+ sectObj = self.AddNewSection(sect.GetName())
+ sectObj.Copy(sect)
+
+ def __str__(self):
+ return ''.join(self._lines)
+
+ ## Get file header's comment from basic INI file.
+ # The file comments has two style:
+ # 1) #/** @file
+ # 2) ## @file
+ #
+ def GetFileHeader(self):
+ desc = []
+ lineArr = self._lines
+ inHeader = False
+ for num in range(len(self._lines)):
+ line = lineArr[num].strip()
+ if not inHeader and (line.startswith("#/**") or line.startswith("##")) and \
+ line.find("@file") != -1:
+ inHeader = True
+ continue
+ if inHeader and (line.startswith("#**/") or line.startswith('##')):
+ inHeader = False
+ break
+ if inHeader:
+ prefixIndex = line.find('#')
+ if prefixIndex == -1:
+ desc.append(line)
+ else:
+ desc.append(line[prefixIndex + 1:])
+ return '<br>\n'.join(desc)
+
+class BaseINISection(object):
+ def __init__(self, parent, name, isCombined=False):
+ self._parent = parent
+ self._name = name
+ self._isCombined = isCombined
+ self._start = 0
+ self._end = 0
+ self._objs = []
+
+ def __del__(self):
+ for obj in self._objs:
+ del obj
+ del self._objs[:]
+
+ def GetName(self):
+ return self._name
+
+ def GetObjects(self):
+ return self._objs
+
+ def GetParent(self):
+ return self._parent
+
+ def GetStartLinenumber(self):
+ return self._start
+
+ def GetEndLinenumber(self):
+ return self._end
+
+ def GetLine(self, linenumber):
+ return self._parent._lines[linenumber]
+
+ def GetFilename(self):
+ return self._parent.GetFilename()
+
+ def GetSectionINIObject(self, parent):
+ return BaseINISectionObject(parent)
+
+ def Parse(self):
+ # skip first line in section, it is used by section name
+ visit = self._start + 1
+ iniObj = None
+ while (visit <= self._end):
+ line = self.GetLine(visit).strip()
+ if re.match("^\[=*\]", line) or re.match("^#", line) or len(line) == 0:
+ visit += 1
+ continue
+ line = line.split('#')[0].strip()
+ if iniObj is not None:
+ if line.endswith('}'):
+ iniObj._end = visit - self._start
+ if not iniObj.Parse():
+ ErrorMsg("Fail to parse ini object",
+ self.GetFilename(),
+ iniObj.GetStartLinenumber())
+ else:
+ self._objs.append(iniObj)
+ iniObj = None
+ else:
+ iniObj = self.GetSectionINIObject(self)
+ iniObj._start = visit - self._start
+ if not line.endswith('{'):
+ iniObj._end = visit - self._start
+ if not iniObj.Parse():
+ ErrorMsg("Fail to parse ini object",
+ self.GetFilename(),
+ iniObj.GetStartLinenumber())
+ else:
+ self._objs.append(iniObj)
+ iniObj = None
+ visit += 1
+ return True
+
+ def Destroy(self):
+ for obj in self._objs:
+ obj.Destroy()
+
+ def GetBaseName(self):
+ return self._name
+
+ def AddLine(self, line):
+ end = self.GetEndLinenumber()
+ self._parent._lines.insert(end, line)
+ self._end += 1
+
+ def Copy(self, sectObj):
+ index = sectObj.GetStartLinenumber() + 1
+ while index < sectObj.GetEndLinenumber():
+ line = sectObj.GetLine(index)
+ if not line.strip().startswith('#'):
+ self.AddLine(line)
+ index += 1
+
+ def AddObject(self, obj):
+ lines = obj.GenerateLines()
+ for line in lines:
+ self.AddLine(line)
+
+ def GetComment(self):
+ comments = []
+ start = self._start - 1
+ bFound = False
+
+ while (start > 0):
+ line = self.GetLine(start).strip()
+ if len(line) == 0:
+ start -= 1
+ continue
+ if line.startswith('##'):
+ bFound = True
+ index = line.rfind('#')
+ if (index + 1) < len(line):
+ comments.append(line[index + 1:])
+ break
+ if line.startswith('#'):
+ start -= 1
+ continue
+ break
+ if bFound:
+ end = start + 1
+ while (end < self._start):
+ line = self.GetLine(end).strip()
+ if len(line) == 0: break
+ if not line.startswith('#'): break
+ index = line.rfind('#')
+ if (index + 1) < len(line):
+ comments.append(line[index + 1:])
+ end += 1
+ return comments
+
+class BaseINIGlobalObject(object):
+ def __init__(self, parent):
+ self._start = 0
+ self._end = 0
+
+ def Parse(self):
+ return True
+
+ def __str__(self):
+ return parent._lines[self._start]
+
+ def __del__(self):
+ pass
+
+class BaseINISectionObject(object):
+ def __init__(self, parent):
+ self._start = 0
+ self._end = 0
+ self._parent = parent
+
+ def __del__(self):
+ self._parent = None
+
+ def GetParent(self):
+ return self._parent
+
+ def GetFilename(self):
+ return self.GetParent().GetFilename()
+
+ def GetPackageName(self):
+ return self.GetFilename()
+
+ def GetFileObj(self):
+ return self.GetParent().GetParent()
+
+ def GetStartLinenumber(self):
+ return self.GetParent()._start + self._start
+
+ def GetLineByOffset(self, offset):
+ sect_start = self._parent.GetStartLinenumber()
+ linenumber = sect_start + offset
+ return self._parent.GetLine(linenumber)
+
+ def GetLinenumberByOffset(self, offset):
+ return offset + self._parent.GetStartLinenumber()
+
+ def Parse(self):
+ return True
+
+ def Destroy(self):
+ pass
+
+ def __str__(self):
+ return self.GetLineByOffset(self._start).strip()
+
+ def GenerateLines(self):
+ return ['default setion object string\n']
+
+ def GetComment(self):
+ comments = []
+ start = self.GetStartLinenumber() - 1
+ bFound = False
+
+ while (start > 0):
+ line = self.GetParent().GetLine(start).strip()
+ if len(line) == 0:
+ start -= 1
+ continue
+ if line.startswith('##'):
+ bFound = True
+ index = line.rfind('#')
+ if (index + 1) < len(line):
+ comments.append(line[index + 1:])
+ break
+ if line.startswith('#'):
+ start -= 1
+ continue
+ break
+ if bFound:
+ end = start + 1
+ while (end <= self.GetStartLinenumber() - 1):
+ line = self.GetParent().GetLine(end).strip()
+ if len(line) == 0: break
+ if not line.startswith('#'): break
+ index = line.rfind('#')
+ if (index + 1) < len(line):
+ comments.append(line[index + 1:])
+ end += 1
+ return comments
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/inidocview.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/inidocview.py
new file mode 100755
index 00000000..46015d54
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/inidocview.py
@@ -0,0 +1,17 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+import core.editor
+
+class INIDoc(core.editor.EditorDocument):
+ def __init__(self):
+ core.editor.EditorDocument.__init__(self)
+ self._iniobj = None
+
+
+class INIView(core.editor.EditorView):
+ pass
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py
new file mode 100755
index 00000000..e36930ef
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/basemodel/message.py
@@ -0,0 +1,46 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+def GetEdkLogger():
+ import logging
+ return logging.getLogger('edk')
+
+class EdkException(Exception):
+ def __init__(self, message, fName=None, fNo=None):
+ self._message = message
+ ErrorMsg(message, fName, fNo)
+
+ def GetMessage(self):
+ return '[EDK Failure]: %s' %self._message
+
+def ErrorMsg(mess, fName=None, fNo=None):
+ GetEdkLogger().error(NormalMessage('#ERR#', mess, fName, fNo))
+
+def LogMsg(mess, fName=None, fNo=None):
+ GetEdkLogger().info(NormalMessage('@LOG@', mess, fName, fNo))
+
+def WarnMsg(mess, fName=None, fNo=None):
+ GetEdkLogger().warning(NormalMessage('!WAR!', mess, fName, fNo))
+
+def NormalMessage(type, mess, fName=None, fNo=None):
+ strMsg = type
+
+ if fName is not None:
+ strMsg += ' %s' % fName.replace('/', '\\')
+ if fNo is not None:
+ strMsg += '(%d):' % fNo
+ else:
+ strMsg += ' :'
+
+ if fName is None and fNo is None:
+ strMsg += ' '
+ strMsg += mess
+
+ return strMsg
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/__init__.py
new file mode 100644
index 00000000..a7909346
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/__init__.py
@@ -0,0 +1,6 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/__init__.py
new file mode 100644
index 00000000..a7909346
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/__init__.py
@@ -0,0 +1,6 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py
new file mode 100755
index 00000000..0bafe2dc
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/baseobject.py
@@ -0,0 +1,928 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+from plugins.EdkPlugins.basemodel import ini
+from plugins.EdkPlugins.edk2.model import dsc
+from plugins.EdkPlugins.edk2.model import inf
+from plugins.EdkPlugins.edk2.model import dec
+import os
+from plugins.EdkPlugins.basemodel.message import *
+
+class SurfaceObject(object):
+ _objs = {}
+
+ def __new__(cls, *args, **kwargs):
+ """Maintain only a single instance of this object
+ @return: instance of this class
+
+ """
+ obj = object.__new__(cls)
+ if "None" not in cls._objs:
+ cls._objs["None"] = []
+ cls._objs["None"].append(obj)
+
+ return obj
+
+ def __init__(self, parent, workspace):
+ self._parent = parent
+ self._fileObj = None
+ self._workspace = workspace
+ self._isModify = False
+ self._modifiedObjs = []
+
+ def __del__(self):
+ pass
+
+ def Destroy(self):
+ key = self.GetRelativeFilename()
+ self.GetFileObj().Destroy(self)
+ del self._fileObj
+ # dereference self from _objs arrary
+ assert key in self._objs, "when destory, object is not in obj list"
+ assert self in self._objs[key], "when destory, object is not in obj list"
+ self._objs[key].remove(self)
+ if len(self._objs[key]) == 0:
+ del self._objs[key]
+
+ def GetParent(self):
+ return self._parent
+
+ def GetWorkspace(self):
+ return self._workspace
+
+ def GetFileObjectClass(self):
+ return ini.BaseINIFile
+
+ def GetFilename(self):
+ return self.GetFileObj().GetFilename()
+
+ def GetFileObj(self):
+ return self._fileObj
+
+ def GetRelativeFilename(self):
+ fullPath = self.GetFilename()
+ return fullPath[len(self._workspace) + 1:]
+
+ def Load(self, relativePath):
+ # if has been loaded, directly return
+ if self._fileObj is not None: return True
+
+ relativePath = os.path.normpath(relativePath)
+ fullPath = os.path.join(self._workspace, relativePath)
+ fullPath = os.path.normpath(fullPath)
+
+ if not os.path.exists(fullPath):
+ ErrorMsg("file does not exist!", fullPath)
+ return False
+
+ self._fileObj = self.GetFileObjectClass()(fullPath, self)
+
+ if not self._fileObj.Parse():
+ ErrorMsg("Fail to parse file!", fullPath)
+ return False
+
+ # remove self from None list to list with filename as key
+ cls = self.__class__
+ if self not in cls._objs["None"]:
+ ErrorMsg("Sufrace object does not be create into None list")
+ cls._objs["None"].remove(self)
+ if relativePath not in cls._objs:
+ cls._objs[relativePath] = []
+ cls._objs[relativePath].append(self)
+
+ return True
+
+ def Reload(self, force=False):
+ ret = True
+ # whether require must be update
+ if force:
+ ret = self.GetFileObj().Reload(True)
+ else:
+ if self.IsModified():
+ if self.GetFileObj().IsModified():
+ ret = self.GetFileObj().Reload()
+ return ret
+
+ def Modify(self, modify=True, modifiedObj=None):
+ if modify:
+ #LogMsg("%s is modified, modified object is %s" % (self.GetFilename(), modifiedObj))
+ if issubclass(modifiedObj.__class__, ini.BaseINIFile) and self._isModify:
+ return
+ self._isModify = modify
+ self.GetParent().Modify(modify, self)
+ else:
+ self._isModify = modify
+
+ def IsModified(self):
+ return self._isModify
+
+ def GetModifiedObjs(self):
+ return self._modifiedObjs
+
+ def FilterObjsByArch(self, objs, arch):
+ arr = []
+ for obj in objs:
+ if obj.GetArch().lower() == 'common':
+ arr.append(obj)
+ continue
+ if obj.GetArch().lower() == arch.lower():
+ arr.append(obj)
+ continue
+ return arr
+
+class Platform(SurfaceObject):
+ def __init__(self, parent, workspace):
+ SurfaceObject.__init__(self, parent, workspace)
+ self._modules = []
+ self._packages = []
+
+ def Destroy(self):
+ for module in self._modules:
+ module.Destroy()
+ del self._modules[:]
+
+ del self._packages[:]
+ SurfaceObject.Destroy(self)
+
+ def GetName(self):
+ return self.GetFileObj().GetDefine("PLATFORM_NAME")
+
+ def GetFileObjectClass(self):
+ return dsc.DSCFile
+
+ def GetModuleCount(self):
+ if self.GetFileObj() is None:
+ ErrorMsg("Fail to get module count because DSC file has not been load!")
+
+ return len(self.GetFileObj().GetComponents())
+
+ def GetSupportArchs(self):
+ return self.GetFileObj().GetDefine("SUPPORTED_ARCHITECTURES").strip().split('#')[0].split('|')
+
+ def LoadModules(self, precallback=None, postcallback=None):
+ for obj in self.GetFileObj().GetComponents():
+ mFilename = obj.GetFilename()
+ if precallback is not None:
+ precallback(self, mFilename)
+ arch = obj.GetArch()
+ if arch.lower() == 'common':
+ archarr = self.GetSupportArchs()
+ else:
+ archarr = [arch]
+ for arch in archarr:
+ module = Module(self, self.GetWorkspace())
+ if module.Load(mFilename, arch, obj.GetOveridePcds(), obj.GetOverideLibs()):
+ self._modules.append(module)
+ if postcallback is not None:
+ postcallback(self, module)
+ else:
+ del module
+ ErrorMsg("Fail to load module %s" % mFilename)
+
+ def GetModules(self):
+ return self._modules
+
+ def GetLibraryPath(self, classname, arch, type):
+ objs = self.GetFileObj().GetSectionObjectsByName("libraryclasses")
+
+ for obj in objs:
+ if classname.lower() != obj.GetClass().lower():
+ continue
+ if obj.GetArch().lower() != 'common' and \
+ obj.GetArch().lower() != arch.lower():
+ continue
+
+ if obj.GetModuleType().lower() != 'common' and \
+ obj.GetModuleType().lower() != type.lower():
+ continue
+
+ return obj.GetInstance()
+
+ ErrorMsg("Fail to get library class %s [%s][%s] from platform %s" % (classname, arch, type, self.GetFilename()))
+ return None
+
+ def GetPackage(self, path):
+ package = self.GetParent().GetPackage(path)
+ if package not in self._packages:
+ self._packages.append(package)
+ return package
+
+ def GetPcdBuildObjs(self, name, arch=None):
+ arr = []
+ objs = self.GetFileObj().GetSectionObjectsByName('pcds')
+ for obj in objs:
+ if obj.GetPcdName().lower() == name.lower():
+ arr.append(obj)
+ if arch is not None:
+ arr = self.FilterObjsByArch(arr, arch)
+ return arr
+
+ def Reload(self, callback=None):
+ # do not care force paramter for platform object
+ isFileChanged = self.GetFileObj().IsModified()
+ ret = SurfaceObject.Reload(self, False)
+ if not ret: return False
+ if isFileChanged:
+ # destroy all modules and reload them again
+ for obj in self._modules:
+ obj.Destroy()
+ del self._modules[:]
+ del self._packages[:]
+ self.LoadModules(callback)
+ else:
+ for obj in self._modules:
+ callback(self, obj.GetFilename())
+ obj.Reload()
+
+ self.Modify(False)
+ return True
+
+ def Modify(self, modify=True, modifiedObj=None):
+ if modify:
+ #LogMsg("%s is modified, modified object is %s" % (self.GetFilename(), modifiedObj))
+ if issubclass(modifiedObj.__class__, ini.BaseINIFile) and self._isModify:
+ return
+ self._isModify = modify
+ self.GetParent().Modify(modify, self)
+ else:
+ if self.GetFileObj().IsModified():
+ return
+ for obj in self._modules:
+ if obj.IsModified():
+ return
+
+ self._isModify = modify
+ self.GetParent().Modify(modify, self)
+
+ def GetModuleObject(self, relativePath, arch):
+ path = os.path.normpath(relativePath)
+ for obj in self._modules:
+ if obj.GetRelativeFilename() == path:
+ if arch.lower() == 'common':
+ return obj
+ if obj.GetArch() == arch:
+ return obj
+ return None
+
+ def GenerateFullReferenceDsc(self):
+ oldDsc = self.GetFileObj()
+ newDsc = dsc.DSCFile()
+ newDsc.CopySectionsByName(oldDsc, 'defines')
+ newDsc.CopySectionsByName(oldDsc, 'SkuIds')
+
+ #
+ # Dynamic common section should also be copied
+ #
+ newDsc.CopySectionsByName(oldDsc, 'PcdsDynamicDefault')
+ newDsc.CopySectionsByName(oldDsc, 'PcdsDynamicHii')
+ newDsc.CopySectionsByName(oldDsc, 'PcdsDynamicVpd')
+ newDsc.CopySectionsByName(oldDsc, 'PcdsDynamicEx')
+
+ sects = oldDsc.GetSectionByName('Components')
+ for oldSect in sects:
+ newSect = newDsc.AddNewSection(oldSect.GetName())
+ for oldComObj in oldSect.GetObjects():
+ module = self.GetModuleObject(oldComObj.GetFilename(), oldSect.GetArch())
+ if module is None: continue
+
+ newComObj = dsc.DSCComponentObject(newSect)
+ newComObj.SetFilename(oldComObj.GetFilename())
+
+ # add all library instance for override section
+ libdict = module.GetLibraries()
+ for libclass in libdict.keys():
+ if libdict[libclass] is not None:
+ newComObj.AddOverideLib(libclass, libdict[libclass].GetRelativeFilename().replace('\\', '/'))
+
+ # add all pcds for override section
+ pcddict = module.GetPcds()
+ for pcd in pcddict.values():
+ buildPcd = pcd.GetBuildObj()
+ buildType = buildPcd.GetPcdType()
+ buildValue = None
+ if buildType.lower() == 'pcdsdynamichii' or \
+ buildType.lower() == 'pcdsdynamicvpd' or \
+ buildType.lower() == 'pcdsdynamicdefault':
+ buildType = 'PcdsDynamic'
+ if buildType != 'PcdsDynamic':
+ buildValue = buildPcd.GetPcdValue()
+ newComObj.AddOveridePcd(buildPcd.GetPcdName(),
+ buildType,
+ buildValue)
+ newSect.AddObject(newComObj)
+ return newDsc
+
+class Module(SurfaceObject):
+ def __init__(self, parent, workspace):
+ SurfaceObject.__init__(self, parent, workspace)
+ self._arch = 'common'
+ self._parent = parent
+ self._overidePcds = {}
+ self._overideLibs = {}
+ self._libs = {}
+ self._pcds = {}
+ self._ppis = []
+ self._protocols = []
+ self._depexs = []
+ self._guids = []
+ self._packages = []
+
+ def Destroy(self):
+ for lib in self._libs.values():
+ if lib is not None:
+ lib.Destroy()
+ self._libs.clear()
+
+ for pcd in self._pcds.values():
+ pcd.Destroy()
+ self._pcds.clear()
+
+ for ppi in self._ppis:
+ ppi.DeRef(self)
+ del self._ppis[:]
+
+ for protocol in self._protocols:
+ if protocol is not None:
+ protocol.DeRef(self)
+ del self._protocols[:]
+
+ for guid in self._guids:
+ if guid is not None:
+ guid.DeRef(self)
+ del self._guids[:]
+
+ del self._packages[:]
+ del self._depexs[:]
+ SurfaceObject.Destroy(self)
+
+ def GetFileObjectClass(self):
+ return inf.INFFile
+
+ def GetLibraries(self):
+ return self._libs
+
+ def Load(self, filename, arch='common', overidePcds=None, overideLibs=None):
+ if not SurfaceObject.Load(self, filename):
+ return False
+
+ self._arch = arch
+ if overidePcds is not None:
+ self._overideLibs = overideLibs
+ if overideLibs is not None:
+ self._overidePcds = overidePcds
+
+ self._SearchLibraries()
+ self._SearchPackage()
+ self._SearchSurfaceItems()
+ return True
+
+ def GetArch(self):
+ return self._arch
+
+ def GetModuleName(self):
+ return self.GetFileObj().GetDefine("BASE_NAME")
+
+ def GetModuleType(self):
+ return self.GetFileObj().GetDefine("MODULE_TYPE")
+
+ def GetPlatform(self):
+ return self.GetParent()
+
+ def GetModuleObj(self):
+ return self
+
+ def GetPcds(self):
+ pcds = self._pcds.copy()
+ for lib in self._libs.values():
+ if lib is None: continue
+ for name in lib._pcds.keys():
+ pcds[name] = lib._pcds[name]
+ return pcds
+
+ def GetPpis(self):
+ ppis = []
+ ppis += self._ppis
+ for lib in self._libs.values():
+ if lib is None: continue
+ ppis += lib._ppis
+ return ppis
+
+ def GetProtocols(self):
+ pros = []
+ pros = self._protocols
+ for lib in self._libs.values():
+ if lib is None: continue
+ pros += lib._protocols
+ return pros
+
+ def GetGuids(self):
+ guids = []
+ guids += self._guids
+ for lib in self._libs.values():
+ if lib is None: continue
+ guids += lib._guids
+ return guids
+
+ def GetDepexs(self):
+ deps = []
+ deps += self._depexs
+ for lib in self._libs.values():
+ if lib is None: continue
+ deps += lib._depexs
+ return deps
+
+ def IsLibrary(self):
+ return self.GetFileObj().GetDefine("LIBRARY_CLASS") is not None
+
+ def GetLibraryInstance(self, classname, arch, type):
+ if classname not in self._libs.keys():
+ # find in overide lib firstly
+ if classname in self._overideLibs.keys():
+ self._libs[classname] = Library(self, self.GetWorkspace())
+ self._libs[classname].Load(self._overideLibs[classname])
+ return self._libs[classname]
+
+ parent = self.GetParent()
+ if issubclass(parent.__class__, Platform):
+ path = parent.GetLibraryPath(classname, arch, type)
+ if path is None:
+ ErrorMsg('Fail to get library instance for %s' % classname, self.GetFilename())
+ return None
+ self._libs[classname] = Library(self, self.GetWorkspace())
+ if not self._libs[classname].Load(path, self.GetArch()):
+ self._libs[classname] = None
+ else:
+ self._libs[classname] = parent.GetLibraryInstance(classname, arch, type)
+ return self._libs[classname]
+
+ def GetSourceObjs(self):
+ return self.GetFileObj().GetSectionObjectsByName('source')
+
+ def _SearchLibraries(self):
+ objs = self.GetFileObj().GetSectionObjectsByName('libraryclasses')
+ arch = self.GetArch()
+ type = self.GetModuleType()
+ for obj in objs:
+ if obj.GetArch().lower() != 'common' and \
+ obj.GetArch().lower() not in self.GetPlatform().GetSupportArchs():
+ continue
+ classname = obj.GetClass()
+ instance = self.GetLibraryInstance(classname, arch, type)
+ if not self.IsLibrary() and instance is not None:
+ instance._isInherit = False
+
+ if classname not in self._libs.keys():
+ self._libs[classname] = instance
+
+ def _SearchSurfaceItems(self):
+ # get surface item from self's inf
+ pcds = []
+ ppis = []
+ pros = []
+ deps = []
+ guids = []
+ if self.GetFileObj() is not None:
+ pcds = self.FilterObjsByArch(self.GetFileObj().GetSectionObjectsByName('pcd'),
+ self.GetArch())
+ for pcd in pcds:
+ if pcd.GetPcdName() not in self._pcds.keys():
+ pcdItem = PcdItem(pcd.GetPcdName(), self, pcd)
+ self._pcds[pcd.GetPcdName()] = ModulePcd(self,
+ pcd.GetPcdName(),
+ pcd,
+ pcdItem)
+
+ ppis += self.FilterObjsByArch(self.GetFileObj().GetSectionObjectsByName('ppis'),
+ self.GetArch())
+
+ for ppi in ppis:
+ item = PpiItem(ppi.GetName(), self, ppi)
+ if item not in self._ppis:
+ self._ppis.append(item)
+
+ pros += self.FilterObjsByArch(self.GetFileObj().GetSectionObjectsByName('protocols'),
+ self.GetArch())
+
+ for pro in pros:
+ item = ProtocolItem(pro.GetName(), self, pro)
+ if item not in self._protocols:
+ self._protocols.append(item)
+
+ deps += self.FilterObjsByArch(self.GetFileObj().GetSectionObjectsByName('depex'),
+ self.GetArch())
+ for dep in deps:
+ item = DepexItem(self, dep)
+ self._depexs.append(item)
+
+ guids += self.FilterObjsByArch(self.GetFileObj().GetSectionObjectsByName('guids'),
+ self.GetArch())
+ for guid in guids:
+ item = GuidItem(guid.GetName(), self, guid)
+ if item not in self._guids:
+ self._guids.append(item)
+
+ def _SearchPackage(self):
+ objs = self.GetFileObj().GetSectionObjectsByName('packages')
+ for obj in objs:
+ package = self.GetPlatform().GetPackage(obj.GetPath())
+ if package is not None:
+ self._packages.append(package)
+
+ def GetPackages(self):
+ return self._packages
+
+ def GetPcdObjects(self):
+ if self.GetFileObj() is None:
+ return []
+
+ return self.GetFileObj().GetSectionObjectsByName('pcd')
+
+ def GetLibraryClassHeaderFilePath(self):
+ lcname = self.GetFileObj().GetProduceLibraryClass()
+ if lcname is None: return None
+
+ pkgs = self.GetPackages()
+ for package in pkgs:
+ path = package.GetLibraryClassHeaderPathByName(lcname)
+ if path is not None:
+ return os.path.realpath(os.path.join(package.GetFileObj().GetPackageRootPath(), path))
+ return None
+
+ def Reload(self, force=False, callback=None):
+ if callback is not None:
+ callback(self, "Starting reload...")
+
+ ret = SurfaceObject.Reload(self, force)
+ if not ret: return False
+
+ if not force and not self.IsModified():
+ return True
+
+ for lib in self._libs.values():
+ if lib is not None:
+ lib.Destroy()
+ self._libs.clear()
+
+ for pcd in self._pcds.values():
+ pcd.Destroy()
+ self._pcds.clear()
+
+ for ppi in self._ppis:
+ ppi.DeRef(self)
+ del self._ppis[:]
+
+ for protocol in self._protocols:
+ protocol.DeRef(self)
+ del self._protocols[:]
+
+ for guid in self._guids:
+ guid.DeRef(self)
+ del self._guids[:]
+
+ del self._packages[:]
+ del self._depexs[:]
+
+ if callback is not None:
+ callback(self, "Searching libraries...")
+ self._SearchLibraries()
+ if callback is not None:
+ callback(self, "Searching packages...")
+ self._SearchPackage()
+ if callback is not None:
+ callback(self, "Searching surface items...")
+ self._SearchSurfaceItems()
+
+ self.Modify(False)
+ return True
+
+ def Modify(self, modify=True, modifiedObj=None):
+ if modify:
+ #LogMsg("%s is modified, modified object is %s" % (self.GetFilename(), modifiedObj))
+ if issubclass(modifiedObj.__class__, ini.BaseINIFile) and self._isModify:
+ return
+ self._isModify = modify
+ self.GetParent().Modify(modify, self)
+ else:
+ if self.GetFileObj().IsModified():
+ return
+
+ self._isModify = modify
+ self.GetParent().Modify(modify, self)
+
+class Library(Module):
+ def __init__(self, parent, workspace):
+ Module.__init__(self, parent, workspace)
+ self._isInherit = True
+
+ def IsInherit(self):
+ return self._isInherit
+
+ def GetModuleType(self):
+ return self.GetParent().GetModuleType()
+
+ def GetPlatform(self):
+ return self.GetParent().GetParent()
+
+ def GetModuleObj(self):
+ return self.GetParent()
+
+ def GetArch(self):
+ return self.GetParent().GetArch()
+
+ def Destroy(self):
+ self._libs.clear()
+ self._pcds.clear()
+ SurfaceObject.Destroy(self)
+
+class Package(SurfaceObject):
+ def __init__(self, parent, workspace):
+ SurfaceObject.__init__(self, parent, workspace)
+ self._pcds = {}
+ self._guids = {}
+ self._protocols = {}
+ self._ppis = {}
+
+ def GetPcds(self):
+ return self._pcds
+
+ def GetPpis(self):
+ return list(self._ppis.values())
+
+ def GetProtocols(self):
+ return list(self._protocols.values())
+
+ def GetGuids(self):
+ return list(self._guids.values())
+
+ def Destroy(self):
+ for pcd in self._pcds.values():
+ if pcd is not None:
+ pcd.Destroy()
+ for guid in self._guids.values():
+ if guid is not None:
+ guid.Destroy()
+ for protocol in self._protocols.values():
+ if protocol is not None:
+ protocol.Destroy()
+ for ppi in self._ppis.values():
+ if ppi is not None:
+ ppi.Destroy()
+ self._pcds.clear()
+ self._guids.clear()
+ self._protocols.clear()
+ self._ppis.clear()
+ self._pcds.clear()
+ SurfaceObject.Destroy(self)
+
+ def Load(self, relativePath):
+ ret = SurfaceObject.Load(self, relativePath)
+ if not ret: return False
+ pcds = self.GetFileObj().GetSectionObjectsByName('pcds')
+ for pcd in pcds:
+ if pcd.GetPcdName() in self._pcds.keys():
+ if self._pcds[pcd.GetPcdName()] is not None:
+ self._pcds[pcd.GetPcdName()].AddDecObj(pcd)
+ else:
+ self._pcds[pcd.GetPcdName()] = PcdItem(pcd.GetPcdName(), self, pcd)
+
+ guids = self.GetFileObj().GetSectionObjectsByName('guids')
+ for guid in guids:
+ if guid.GetName() not in self._guids.keys():
+ self._guids[guid.GetName()] = GuidItem(guid.GetName(), self, guid)
+ else:
+ WarnMsg("Duplicate definition for %s" % guid.GetName())
+
+ ppis = self.GetFileObj().GetSectionObjectsByName('ppis')
+ for ppi in ppis:
+ if ppi.GetName() not in self._ppis.keys():
+ self._ppis[ppi.GetName()] = PpiItem(ppi.GetName(), self, ppi)
+ else:
+ WarnMsg("Duplicate definition for %s" % ppi.GetName())
+
+ protocols = self.GetFileObj().GetSectionObjectsByName('protocols')
+ for protocol in protocols:
+ if protocol.GetName() not in self._protocols.keys():
+ self._protocols[protocol.GetName()] = ProtocolItem(protocol.GetName(), self, protocol)
+ else:
+ WarnMsg("Duplicate definition for %s" % protocol.GetName())
+
+ return True
+
+ def GetFileObjectClass(self):
+ return dec.DECFile
+
+ def GetName(self):
+ return self.GetFileObj().GetDefine("PACKAGE_NAME")
+
+ def GetPcdDefineObjs(self, name=None):
+ arr = []
+ objs = self.GetFileObj().GetSectionObjectsByName('pcds')
+ if name is None: return objs
+
+ for obj in objs:
+ if obj.GetPcdName().lower() == name.lower():
+ arr.append(obj)
+ return arr
+
+ def GetLibraryClassObjs(self):
+ return self.GetFileObj().GetSectionObjectsByName('libraryclasses')
+
+ def Modify(self, modify=True, modifiedObj=None):
+ if modify:
+ self._isModify = modify
+ self.GetParent().Modify(modify, self)
+ else:
+ if self.GetFileObj().IsModified():
+ return
+
+ self._isModify = modify
+ self.GetParent().Modify(modify, self)
+
+ def GetLibraryClassHeaderPathByName(self, clsname):
+ objs = self.GetLibraryClassObjs()
+ for obj in objs:
+ if obj.GetClassName() == clsname:
+ return obj.GetHeaderFile()
+ return None
+
+class DepexItem(object):
+ def __init__(self, parent, infObj):
+ self._parent = parent
+ self._infObj = infObj
+
+ def GetDepexString(self):
+ return str(self._infObj)
+
+ def GetInfObject(self):
+ return self._infObj
+
+class ModulePcd(object):
+ _type_mapping = {'FeaturePcd': 'PcdsFeatureFlag',
+ 'FixedPcd': 'PcdsFixedAtBuild',
+ 'PatchPcd': 'PcdsPatchableInModule'}
+
+ def __init__(self, parent, name, infObj, pcdItem):
+ assert issubclass(parent.__class__, Module), "Module's PCD's parent must be module!"
+ assert pcdItem is not None, 'Pcd %s does not in some package!' % name
+
+ self._name = name
+ self._parent = parent
+ self._pcdItem = pcdItem
+ self._infObj = infObj
+
+ def GetName(self):
+ return self._name
+
+ def GetParent(self):
+ return self._name
+
+ def GetArch(self):
+ return self._parent.GetArch()
+
+ def Destroy(self):
+ self._pcdItem.DeRef(self._parent)
+ self._infObj = None
+
+ def GetBuildObj(self):
+ platformInfos = self._parent.GetPlatform().GetPcdBuildObjs(self._name, self.GetArch())
+ modulePcdType = self._infObj.GetPcdType()
+
+ # if platform do not gives pcd's value, get default value from package
+ if len(platformInfos) == 0:
+ if modulePcdType.lower() == 'pcd':
+ return self._pcdItem.GetDecObject()
+ else:
+ for obj in self._pcdItem.GetDecObjects():
+ if modulePcdType not in self._type_mapping.keys():
+ ErrorMsg("Invalid PCD type %s" % modulePcdType)
+ return None
+
+ if self._type_mapping[modulePcdType] == obj.GetPcdType():
+ return obj
+ ErrorMsg ('Module PCD type %s does not in valied range [%s] in package!' % \
+ (modulePcdType))
+ else:
+ if modulePcdType.lower() == 'pcd':
+ if len(platformInfos) > 1:
+ WarnMsg("Find more than one value for PCD %s in platform %s" % \
+ (self._name, self._parent.GetPlatform().GetFilename()))
+ return platformInfos[0]
+ else:
+ for obj in platformInfos:
+ if modulePcdType not in self._type_mapping.keys():
+ ErrorMsg("Invalid PCD type %s" % modulePcdType)
+ return None
+
+ if self._type_mapping[modulePcdType] == obj.GetPcdType():
+ return obj
+
+ ErrorMsg('Can not find value for pcd %s in pcd type %s' % \
+ (self._name, modulePcdType))
+ return None
+
+
+class SurfaceItem(object):
+ _objs = {}
+
+ def __new__(cls, *args, **kwargs):
+ """Maintain only a single instance of this object
+ @return: instance of this class
+
+ """
+ name = args[0]
+ parent = args[1]
+ fileObj = args[2]
+ if issubclass(parent.__class__, Package):
+ if name in cls._objs.keys():
+ ErrorMsg("%s item is duplicated defined in packages: %s and %s" %
+ (name, parent.GetFilename(), cls._objs[name].GetParent().GetFilename()))
+ return None
+ obj = object.__new__(cls)
+ cls._objs[name] = obj
+ return obj
+ elif issubclass(parent.__class__, Module):
+ if name not in cls._objs.keys():
+ ErrorMsg("%s item does not defined in any package! It is used by module %s" % \
+ (name, parent.GetFilename()))
+ return None
+ return cls._objs[name]
+
+ return None
+
+
+ def __init__(self, name, parent, fileObj):
+ if issubclass(parent.__class__, Package):
+ self._name = name
+ self._parent = parent
+ self._decObj = [fileObj]
+ self._refMods = {}
+ else:
+ self.RefModule(parent, fileObj)
+
+ @classmethod
+ def GetObjectDict(cls):
+ return cls._objs
+
+ def GetParent(self):
+ return self._parent
+
+ def GetReference(self):
+ return self._refMods
+
+ def RefModule(self, mObj, infObj):
+ if mObj in self._refMods.keys():
+ return
+ self._refMods[mObj] = infObj
+
+ def DeRef(self, mObj):
+ if mObj not in self._refMods.keys():
+ WarnMsg("%s is not referenced by module %s" % (self._name, mObj.GetFilename()))
+ return
+ del self._refMods[mObj]
+
+ def Destroy(self):
+ self._refMods.clear()
+ cls = self.__class__
+ del cls._objs[self._name]
+
+ def GetName(self):
+ return self._name
+
+ def GetDecObject(self):
+ return self._decObj[0]
+
+ def GetDecObjects(self):
+ return self._decObj
+
+class PcdItem(SurfaceItem):
+ def AddDecObj(self, fileObj):
+ for decObj in self._decObj:
+ if decObj.GetFilename() != fileObj.GetFilename():
+ ErrorMsg("Pcd %s defined in more than one packages : %s and %s" % \
+ (self._name, decObj.GetFilename(), fileObj.GetFilename()))
+ return
+ if decObj.GetPcdType() == fileObj.GetPcdType() and \
+ decObj.GetArch().lower() == fileObj.GetArch():
+ ErrorMsg("Pcd %s is duplicated defined in pcd type %s in package %s" % \
+ (self._name, decObj.GetPcdType(), decObj.GetFilename()))
+ return
+ self._decObj.append(fileObj)
+
+ def GetValidPcdType(self):
+ types = []
+ for obj in self._decObj:
+ if obj.GetPcdType() not in types:
+ types += obj.GetPcdType()
+ return types
+
+class GuidItem(SurfaceItem):
+ pass
+
+class PpiItem(SurfaceItem):
+ pass
+
+class ProtocolItem(SurfaceItem):
+ pass
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py
new file mode 100755
index 00000000..cff2f127
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dec.py
@@ -0,0 +1,313 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from plugins.EdkPlugins.basemodel import ini
+import re, os
+from plugins.EdkPlugins.basemodel.message import *
+
+class DECFile(ini.BaseINIFile):
+
+ def GetSectionInstance(self, parent, name, isCombined=False):
+ return DECSection(parent, name, isCombined)
+
+ def GetComponents(self):
+ return self.GetSectionByName('Components')
+
+ def GetPackageRootPath(self):
+ return os.path.dirname(self.GetFilename()).strip()
+
+ def GetBaseName(self):
+ return self.GetDefine("PACKAGE_NAME").strip()
+
+ def GetVersion(self):
+ return self.GetDefine("PACKAGE_VERSION").strip()
+
+ def GetSectionObjectsByName(self, name, arch=None):
+ arr = []
+ sects = self.GetSectionByName(name)
+ for sect in sects:
+ # skip unmatched archtecture content
+ if not sect.IsArchMatch(arch):
+ continue
+
+ for obj in sect.GetObjects():
+ arr.append(obj)
+
+ return arr
+
+class DECSection(ini.BaseINISection):
+ def GetSectionINIObject(self, parent):
+ type = self.GetType()
+
+ if type.lower().find('defines') != -1:
+ return DECDefineSectionObject(self)
+ if type.lower().find('includes') != -1:
+ return DECIncludeObject(self)
+ if type.lower().find('pcd') != -1:
+ return DECPcdObject(self)
+ if type.lower() == 'libraryclasses':
+ return DECLibraryClassObject(self)
+ if type.lower() == 'guids':
+ return DECGuidObject(self)
+ if type.lower() == 'ppis':
+ return DECPpiObject(self)
+ if type.lower() == 'protocols':
+ return DECProtocolObject(self)
+
+ return DECSectionObject(self)
+
+ def GetType(self):
+ arr = self._name.split('.')
+ return arr[0].strip()
+
+ def GetArch(self):
+ arr = self._name.split('.')
+ if len(arr) == 1:
+ return 'common'
+ return arr[1]
+
+ def IsArchMatch(self, arch):
+ if arch is None or self.GetArch() == 'common':
+ return True
+
+ if self.GetArch().lower() != arch.lower():
+ return False
+
+ return True
+
+class DECSectionObject(ini.BaseINISectionObject):
+ def GetArch(self):
+ return self.GetParent().GetArch()
+
+class DECDefineSectionObject(DECSectionObject):
+ def __init__(self, parent):
+ DECSectionObject.__init__(self, parent)
+ self._key = None
+ self._value = None
+
+ def Parse(self):
+ assert (self._start == self._end), 'The object in define section must be in single line'
+
+ line = self.GetLineByOffset(self._start).strip()
+
+ line = line.split('#')[0]
+ arr = line.split('=')
+ if len(arr) != 2:
+ ErrorMsg('Invalid define section object',
+ self.GetFilename(),
+ self.GetParent().GetName()
+ )
+ return False
+
+ self._key = arr[0].strip()
+ self._value = arr[1].strip()
+
+ return True
+
+ def GetKey(self):
+ return self._key
+
+ def GetValue(self):
+ return self._value
+
+class DECGuidObject(DECSectionObject):
+ _objs = {}
+
+ def __init__(self, parent):
+ DECSectionObject.__init__(self, parent)
+ self._name = None
+
+ def Parse(self):
+ line = self.GetLineByOffset(self._start).strip().split('#')[0]
+ self._name = line.split('=')[0].strip()
+ self._guid = line.split('=')[1].strip()
+ objdict = DECGuidObject._objs
+ if self._name not in objdict.keys():
+ objdict[self._name] = [self]
+ else:
+ objdict[self._name].append(self)
+
+ return True
+
+ def GetName(self):
+ return self._name
+
+ def GetGuid(self):
+ return self._guid
+
+ def Destroy(self):
+ objdict = DECGuidObject._objs
+ objdict[self._name].remove(self)
+ if len(objdict[self._name]) == 0:
+ del objdict[self._name]
+
+ @staticmethod
+ def GetObjectDict():
+ return DECGuidObject._objs
+
+class DECPpiObject(DECSectionObject):
+ _objs = {}
+ def __init__(self, parent):
+ DECSectionObject.__init__(self, parent)
+ self._name = None
+
+ def Parse(self):
+ line = self.GetLineByOffset(self._start).strip().split('#')[0]
+ self._name = line.split('=')[0].strip()
+ self._guid = line.split('=')[1].strip()
+ objdict = DECPpiObject._objs
+ if self._name not in objdict.keys():
+ objdict[self._name] = [self]
+ else:
+ objdict[self._name].append(self)
+
+ return True
+
+ def GetName(self):
+ return self._name
+
+ def GetGuid(self):
+ return self._guid
+
+ def Destroy(self):
+ objdict = DECPpiObject._objs
+ objdict[self._name].remove(self)
+ if len(objdict[self._name]) == 0:
+ del objdict[self._name]
+
+ @staticmethod
+ def GetObjectDict():
+ return DECPpiObject._objs
+
+class DECProtocolObject(DECSectionObject):
+ _objs = {}
+
+ def __init__(self, parent):
+ DECSectionObject.__init__(self, parent)
+ self._name = None
+
+ def Parse(self):
+ line = self.GetLineByOffset(self._start).strip().split('#')[0]
+ self._name = line.split('=')[0].strip()
+ self._guid = line.split('=')[1].strip()
+ objdict = DECProtocolObject._objs
+ if self._name not in objdict.keys():
+ objdict[self._name] = [self]
+ else:
+ objdict[self._name].append(self)
+
+ return True
+
+ def GetName(self):
+ return self._name
+
+ def GetGuid(self):
+ return self._guid
+
+ def Destroy(self):
+ objdict = DECProtocolObject._objs
+ objdict[self._name].remove(self)
+ if len(objdict[self._name]) == 0:
+ del objdict[self._name]
+
+
+ @staticmethod
+ def GetObjectDict():
+ return DECProtocolObject._objs
+
+class DECLibraryClassObject(DECSectionObject):
+ _objs = {}
+
+ def __init__(self, parent):
+ DECSectionObject.__init__(self, parent)
+ self.mClassName = None
+ self.mHeaderFile = None
+
+ def Parse(self):
+ line = self.GetLineByOffset(self._start).strip().split('#')[0]
+ self.mClassName, self.mHeaderFile = line.split('|')
+ objdict = DECLibraryClassObject._objs
+ if self.mClassName not in objdict.keys():
+ objdict[self.mClassName] = [self]
+ else:
+ objdict[self.mClassName].append(self)
+ return True
+
+ def GetClassName(self):
+ return self.mClassName
+
+ def GetName(self):
+ return self.mClassName
+
+ def GetHeaderFile(self):
+ return self.mHeaderFile
+
+ def Destroy(self):
+ objdict = DECLibraryClassObject._objs
+ objdict[self.mClassName].remove(self)
+ if len(objdict[self.mClassName]) == 0:
+ del objdict[self.mClassName]
+
+ @staticmethod
+ def GetObjectDict():
+ return DECLibraryClassObject._objs
+
+class DECIncludeObject(DECSectionObject):
+ def __init__(self, parent):
+ DECSectionObject.__init__(self, parent)
+
+ def GetPath(self):
+ return self.GetLineByOffset(self._start).split('#')[0].strip()
+
+class DECPcdObject(DECSectionObject):
+ _objs = {}
+
+ def __init__(self, parent):
+ DECSectionObject.__init__(self, parent)
+ self.mPcdName = None
+ self.mPcdDefaultValue = None
+ self.mPcdDataType = None
+ self.mPcdToken = None
+
+ def Parse(self):
+ line = self.GetLineByOffset(self._start).strip().split('#')[0]
+ (self.mPcdName, self.mPcdDefaultValue, self.mPcdDataType, self.mPcdToken) = line.split('|')
+ objdict = DECPcdObject._objs
+ if self.mPcdName not in objdict.keys():
+ objdict[self.mPcdName] = [self]
+ else:
+ objdict[self.mPcdName].append(self)
+
+ return True
+
+ def Destroy(self):
+ objdict = DECPcdObject._objs
+ objdict[self.mPcdName].remove(self)
+ if len(objdict[self.mPcdName]) == 0:
+ del objdict[self.mPcdName]
+
+ def GetPcdType(self):
+ return self.GetParent().GetType()
+
+ def GetPcdName(self):
+ return self.mPcdName
+
+ def GetPcdValue(self):
+ return self.mPcdDefaultValue
+
+ def GetPcdDataType(self):
+ return self.mPcdDataType
+
+ def GetPcdToken(self):
+ return self.mPcdToken
+
+ def GetName(self):
+ return self.GetPcdName().split('.')[1]
+
+ @staticmethod
+ def GetObjectDict():
+ return DECPcdObject._objs
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py
new file mode 100755
index 00000000..829ac803
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen.py
@@ -0,0 +1,1084 @@
+## @file
+#
+# This file produce action class to generate doxygen document for edk2 codebase.
+# The action classes are shared by GUI and command line tools.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+"""This file produce action class to generate doxygen document for edk2 codebase.
+ The action classes are shared by GUI and command line tools.
+"""
+from plugins.EdkPlugins.basemodel import doxygen
+import os
+try:
+ import wx
+ gInGui = True
+except:
+ gInGui = False
+import re
+from plugins.EdkPlugins.edk2.model import inf
+from plugins.EdkPlugins.edk2.model import dec
+from plugins.EdkPlugins.basemodel.message import *
+
+_ignore_dir = ['.svn', '_svn', 'cvs']
+_inf_key_description_mapping_table = {
+ 'INF_VERSION':'Version of INF file specification',
+ #'BASE_NAME':'Module Name',
+ 'FILE_GUID':'Module Guid',
+ 'MODULE_TYPE': 'Module Type',
+ 'VERSION_STRING': 'Module Version',
+ 'LIBRARY_CLASS': 'Produced Library Class',
+ 'EFI_SPECIFICATION_VERSION': 'UEFI Specification Version',
+ 'PI_SPECIFICATION_VERSION': 'PI Specification Version',
+ 'ENTRY_POINT': 'Module Entry Point Function',
+ 'CONSTRUCTOR': 'Library Constructor Function'
+}
+
+_dec_key_description_mapping_table = {
+ 'DEC_SPECIFICATION': 'Version of DEC file specification',
+ 'PACKAGE_GUID': 'Package Guid'
+}
+class DoxygenAction:
+ """This is base class for all doxygen action.
+ """
+
+ def __init__(self, doxPath, chmPath, outputPath, projname, mode='html', log=None, verbose=False):
+ """Constructor function.
+ @param doxPath the obosolution path of doxygen execute file.
+ @param outputPath the obosolution output path.
+ @param log log function for output message
+ """
+ self._doxPath = doxPath
+ self._chmPath = chmPath
+ self._outputPath = outputPath
+ self._projname = projname
+ self._configFile = None # doxygen config file is used by doxygen exe file
+ self._indexPageFile = None # doxygen page file for index page.
+ self._log = log
+ self._mode = mode
+ self._verbose = verbose
+ self._doxygenCallback = None
+ self._chmCallback = None
+
+ def Log(self, message, level='info'):
+ if self._log is not None:
+ self._log(message, level)
+
+ def IsVerbose(self):
+ return self._verbose
+
+ def Generate(self):
+ """Generate interface called by outer directly"""
+ self.Log(">>>>>> Start generate doxygen document for %s... Zzz....\n" % self._projname)
+
+ # create doxygen config file at first
+ self._configFile = doxygen.DoxygenConfigFile()
+ self._configFile.SetOutputDir(self._outputPath)
+
+ self._configFile.SetWarningFilePath(os.path.join(self._outputPath, 'warning.txt'))
+ if self._mode.lower() == 'html':
+ self._configFile.SetHtmlMode()
+ else:
+ self._configFile.SetChmMode()
+
+ self.Log(" >>>>>> Initialize doxygen config file...Zzz...\n")
+ self.InitializeConfigFile()
+
+ self.Log(" >>>>>> Generate doxygen index page file...Zzz...\n")
+ indexPagePath = self.GenerateIndexPage()
+ if indexPagePath is None:
+ self.Log("Fail to generate index page!\n", 'error')
+ return False
+ else:
+ self.Log("Success to create doxygen index page file %s \n" % indexPagePath)
+
+ # Add index page doxygen file to file list.
+ self._configFile.AddFile(indexPagePath)
+
+ # save config file to output path
+ configFilePath = os.path.join(self._outputPath, self._projname + '.doxygen_config')
+ self._configFile.Generate(configFilePath)
+ self.Log(" <<<<<< Success Save doxygen config file to %s...\n" % configFilePath)
+
+ # launch doxygen tool to generate document
+ if self._doxygenCallback is not None:
+ self.Log(" >>>>>> Start doxygen process...Zzz...\n")
+ if not self._doxygenCallback(self._doxPath, configFilePath):
+ return False
+ else:
+ self.Log("Fail to create doxygen process!", 'error')
+ return False
+
+ return True
+
+ def InitializeConfigFile(self):
+ """Initialize config setting for doxygen project. It will be invoked after config file
+ object is created. Inherited class should implement it.
+ """
+
+ def GenerateIndexPage(self):
+ """Generate doxygen index page. Inherited class should implement it."""
+ return None
+
+ def RegisterCallbackDoxygenProcess(self, callback):
+ self._doxygenCallback = callback
+
+ def RegisterCallbackCHMProcess(self, callback):
+ self._chmCallback = callback
+
+class PlatformDocumentAction(DoxygenAction):
+ """Generate platform doxygen document, will be implement at future."""
+
+class PackageDocumentAction(DoxygenAction):
+ """Generate package reference document"""
+
+ def __init__(self, doxPath, chmPath, outputPath, pObj, mode='html', log=None, arch=None, tooltag=None,
+ onlyInclude=False, verbose=False):
+ DoxygenAction.__init__(self, doxPath, chmPath, outputPath, pObj.GetName(), mode, log, verbose)
+ self._pObj = pObj
+ self._arch = arch
+ self._tooltag = tooltag
+ self._onlyIncludeDocument = onlyInclude
+
+ def InitializeConfigFile(self):
+ if self._arch == 'IA32':
+ self._configFile.AddPreDefined('MDE_CPU_IA32')
+ elif self._arch == 'X64':
+ self._configFile.AddPreDefined('MDE_CPU_X64')
+ elif self._arch == 'IPF':
+ self._configFile.AddPreDefined('MDE_CPU_IPF')
+ elif self._arch == 'EBC':
+ self._configFile.AddPreDefined('MDE_CPU_EBC')
+ else:
+ self._arch = None
+ self._configFile.AddPreDefined('MDE_CPU_IA32')
+ self._configFile.AddPreDefined('MDE_CPU_X64')
+ self._configFile.AddPreDefined('MDE_CPU_IPF')
+ self._configFile.AddPreDefined('MDE_CPU_EBC')
+ self._configFile.AddPreDefined('MDE_CPU_ARM')
+
+ namestr = self._pObj.GetName()
+ if self._arch is not None:
+ namestr += '[%s]' % self._arch
+ if self._tooltag is not None:
+ namestr += '[%s]' % self._tooltag
+ self._configFile.SetProjectName(namestr)
+ self._configFile.SetStripPath(self._pObj.GetWorkspace())
+ self._configFile.SetProjectVersion(self._pObj.GetFileObj().GetVersion())
+ self._configFile.AddPattern('*.decdoxygen')
+
+ if self._tooltag.lower() == 'msft':
+ self._configFile.AddPreDefined('_MSC_EXTENSIONS')
+ elif self._tooltag.lower() == 'gnu':
+ self._configFile.AddPreDefined('__GNUC__')
+ elif self._tooltag.lower() == 'intel':
+ self._configFile.AddPreDefined('__INTEL_COMPILER')
+ else:
+ self._tooltag = None
+ self._configFile.AddPreDefined('_MSC_EXTENSIONS')
+ self._configFile.AddPreDefined('__GNUC__')
+ self._configFile.AddPreDefined('__INTEL_COMPILER')
+
+ self._configFile.AddPreDefined('ASM_PFX= ')
+ self._configFile.AddPreDefined('OPTIONAL= ')
+
+ def GenerateIndexPage(self):
+ """Generate doxygen index page. Inherited class should implement it."""
+ fObj = self._pObj.GetFileObj()
+ pdObj = doxygen.DoxygenFile('%s Package Document' % self._pObj.GetName(),
+ '%s.decdoxygen' % self._pObj.GetFilename())
+ self._configFile.AddFile(pdObj.GetFilename())
+ pdObj.AddDescription(fObj.GetFileHeader())
+
+ defSection = fObj.GetSectionByName('defines')[0]
+ baseSection = doxygen.Section('PackageBasicInformation', 'Package Basic Information')
+ descr = '<TABLE>'
+ for obj in defSection.GetObjects():
+ if obj.GetKey() in _dec_key_description_mapping_table.keys():
+ descr += '<TR>'
+ descr += '<TD><B>%s</B></TD>' % _dec_key_description_mapping_table[obj.GetKey()]
+ descr += '<TD>%s</TD>' % obj.GetValue()
+ descr += '</TR>'
+ descr += '</TABLE><br>'
+ baseSection.AddDescription(descr)
+ pdObj.AddSection(baseSection)
+
+ knownIssueSection = doxygen.Section('Known_Issue_section', 'Known Issue')
+ knownIssueSection.AddDescription('<ul>')
+ knownIssueSection.AddDescription('<li> OPTIONAL macro for function parameter can not be dealed with doxygen, so it disapear in this document! </li>')
+ knownIssueSection.AddDescription('</ul>')
+ pdObj.AddSection(knownIssueSection)
+
+ self.AddAllIncludeFiles(self._pObj, self._configFile)
+ pages = self.GenerateIncludesSubPage(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ pages = self.GenerateLibraryClassesSubPage(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ pages = self.GeneratePcdSubPages(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ pages = self.GenerateGuidSubPages(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ pages = self.GeneratePpiSubPages(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ pages = self.GenerateProtocolSubPages(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ if not self._onlyIncludeDocument:
+ pdObj.AddPages(self.GenerateModulePages(self._pObj, self._configFile))
+
+ pdObj.Save()
+ return pdObj.GetFilename()
+
+ def GenerateIncludesSubPage(self, pObj, configFile):
+ # by default add following path as include path to config file
+ pkpath = pObj.GetFileObj().GetPackageRootPath()
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include'))
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include', 'Library'))
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include', 'Protocol'))
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include', 'Ppi'))
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include', 'Guid'))
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include', 'IndustryStandard'))
+
+ rootArray = []
+ pageRoot = doxygen.Page("Public Includes", "%s_public_includes" % pObj.GetName())
+ objs = pObj.GetFileObj().GetSectionObjectsByName('includes')
+ if len(objs) == 0: return []
+
+ for obj in objs:
+ # Add path to include path
+ path = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetPath())
+ configFile.AddIncludePath(path)
+
+ # only list common folder's include file
+ if obj.GetArch().lower() != 'common':
+ continue
+
+ bNeedAddIncludePage = False
+ topPage = doxygen.Page(self._ConvertPathToDoxygen(path, pObj), 'public_include_top')
+
+ topPage.AddDescription('<ul>\n')
+ for file in os.listdir(path):
+ if file.lower() in _ignore_dir: continue
+ fullpath = os.path.join(path, file)
+ if os.path.isfile(fullpath):
+ self.ProcessSourceFileForInclude(fullpath, pObj, configFile)
+ topPage.AddDescription('<li> \link %s\endlink </li>\n' % self._ConvertPathToDoxygen(fullpath, pObj))
+ else:
+ if file.lower() in ['library', 'protocol', 'guid', 'ppi', 'ia32', 'x64', 'ipf', 'ebc', 'arm', 'pi', 'uefi', 'aarch64']:
+ continue
+ bNeedAddSubPage = False
+ subpage = doxygen.Page(self._ConvertPathToDoxygen(fullpath, pObj), 'public_include_%s' % file)
+ subpage.AddDescription('<ul>\n')
+ for subfile in os.listdir(fullpath):
+ if subfile.lower() in _ignore_dir: continue
+ bNeedAddSubPage = True
+ subfullpath = os.path.join(fullpath, subfile)
+ self.ProcessSourceFileForInclude(subfullpath, pObj, configFile)
+ subpage.AddDescription('<li> \link %s \endlink </li>\n' % self._ConvertPathToDoxygen(subfullpath, pObj))
+ subpage.AddDescription('</ul>\n')
+ if bNeedAddSubPage:
+ bNeedAddIncludePage = True
+ pageRoot.AddPage(subpage)
+ topPage.AddDescription('</ul>\n')
+ if bNeedAddIncludePage:
+ pageRoot.AddPage(topPage)
+
+ if pageRoot.GetSubpageCount() != 0:
+ return [pageRoot]
+ else:
+ return []
+
+ def GenerateLibraryClassesSubPage(self, pObj, configFile):
+ """
+ Generate sub page for library class for package.
+ One DEC file maybe contains many library class sections
+ for different architecture.
+
+ @param fObj DEC file object.
+ """
+ rootArray = []
+ pageRoot = doxygen.Page("Library Class", "%s_libraryclass" % pObj.GetName())
+ objs = pObj.GetFileObj().GetSectionObjectsByName('libraryclass', self._arch)
+ if len(objs) == 0: return []
+
+ if self._arch is not None:
+ for obj in objs:
+ classPage = doxygen.Page(obj.GetClassName(),
+ "lc_%s" % obj.GetClassName())
+ comments = obj.GetComment()
+ if len(comments) != 0:
+ classPage.AddDescription('<br>\n'.join(comments) + '<br>\n')
+ pageRoot.AddPage(classPage)
+ path = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetHeaderFile())
+ path = path[len(pObj.GetWorkspace()) + 1:]
+ if len(comments) == 0:
+ classPage.AddDescription('\copydoc %s<p>' % obj.GetHeaderFile())
+ section = doxygen.Section('ref', 'Refer to Header File')
+ section.AddDescription('\link %s\n' % obj.GetHeaderFile())
+ section.AddDescription(' \endlink<p>\n')
+ classPage.AddSection(section)
+ fullPath = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetHeaderFile())
+ self.ProcessSourceFileForInclude(fullPath, pObj, configFile)
+ else:
+ archPageDict = {}
+ for obj in objs:
+ if obj.GetArch() not in archPageDict.keys():
+ archPageDict[obj.GetArch()] = doxygen.Page(obj.GetArch(),
+ 'lc_%s' % obj.GetArch())
+ pageRoot.AddPage(archPageDict[obj.GetArch()])
+ subArchRoot = archPageDict[obj.GetArch()]
+ classPage = doxygen.Page(obj.GetClassName(),
+ "lc_%s" % obj.GetClassName())
+ comments = obj.GetComment()
+ if len(comments) != 0:
+ classPage.AddDescription('<br>\n'.join(comments) + '<br>\n')
+ subArchRoot.AddPage(classPage)
+ path = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetHeaderFile())
+ path = path[len(pObj.GetWorkspace()) + 1:]
+ if len(comments) == 0:
+ classPage.AddDescription('\copydoc %s<p>' % obj.GetHeaderFile())
+ section = doxygen.Section('ref', 'Refer to Header File')
+ section.AddDescription('\link %s\n' % obj.GetHeaderFile())
+ section.AddDescription(' \endlink<p>\n')
+ classPage.AddSection(section)
+ fullPath = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetHeaderFile())
+
+ self.ProcessSourceFileForInclude(fullPath, pObj, configFile)
+ rootArray.append(pageRoot)
+ return rootArray
+
+ def ProcessSourceFileForInclude(self, path, pObj, configFile, infObj=None):
+ """
+ @param path the analysising file full path
+ @param pObj package object
+ @param configFile doxygen config file.
+ """
+ if gInGui:
+ wx.Yield()
+ if not os.path.exists(path):
+ ErrorMsg('Source file path %s does not exist!' % path)
+ return
+
+ if configFile.FileExists(path):
+ return
+
+ try:
+ with open(path, 'r') as f:
+ lines = f.readlines()
+ except UnicodeDecodeError:
+ return
+ except IOError:
+ ErrorMsg('Fail to open file %s' % path)
+ return
+
+ configFile.AddFile(path)
+
+ no = 0
+ for no in range(len(lines)):
+ if len(lines[no].strip()) == 0:
+ continue
+ if lines[no].strip()[:2] in ['##', '//', '/*', '*/']:
+ continue
+ index = lines[no].lower().find('include')
+ #mo = IncludePattern.finditer(lines[no].lower())
+ mo = re.match(r"^#\s*include\s+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip().lower())
+ if not mo:
+ continue
+ mo = re.match(r"^[#\w\s]+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip())
+ filePath = mo.groups()[0]
+
+ if filePath is None or len(filePath) == 0:
+ continue
+
+ # find header file in module's path firstly.
+ fullPath = None
+
+ if os.path.exists(os.path.join(os.path.dirname(path), filePath)):
+ # Find the file in current directory
+ fullPath = os.path.join(os.path.dirname(path), filePath).replace('\\', '/')
+ else:
+ # find in depedent package's include path
+ incObjs = pObj.GetFileObj().GetSectionObjectsByName('includes')
+ for incObj in incObjs:
+ incPath = os.path.join(pObj.GetFileObj().GetPackageRootPath(), incObj.GetPath()).strip()
+ incPath = os.path.realpath(os.path.join(incPath, filePath))
+ if os.path.exists(incPath):
+ fullPath = incPath
+ break
+ if infObj is not None:
+ pkgInfObjs = infObj.GetSectionObjectsByName('packages')
+ for obj in pkgInfObjs:
+ decObj = dec.DECFile(os.path.join(pObj.GetWorkspace(), obj.GetPath()))
+ if not decObj:
+ ErrorMsg ('Fail to create pacakge object for %s' % obj.GetPackageName())
+ continue
+ if not decObj.Parse():
+ ErrorMsg ('Fail to load package object for %s' % obj.GetPackageName())
+ continue
+ incObjs = decObj.GetSectionObjectsByName('includes')
+ for incObj in incObjs:
+ incPath = os.path.join(decObj.GetPackageRootPath(), incObj.GetPath()).replace('\\', '/')
+ if os.path.exists(os.path.join(incPath, filePath)):
+ fullPath = os.path.join(os.path.join(incPath, filePath))
+ break
+ if fullPath is not None:
+ break
+
+ if fullPath is None and self.IsVerbose():
+ self.Log('Can not resolve header file %s for file %s in package %s\n' % (filePath, path, pObj.GetFileObj().GetFilename()), 'error')
+ return
+ else:
+ fullPath = fullPath.replace('\\', '/')
+ if self.IsVerbose():
+ self.Log('Preprocessing: Add include file %s for file %s\n' % (fullPath, path))
+ #LogMsg ('Preprocessing: Add include file %s for file %s' % (fullPath, path))
+ self.ProcessSourceFileForInclude(fullPath, pObj, configFile, infObj)
+
+ def AddAllIncludeFiles(self, pObj, configFile):
+ objs = pObj.GetFileObj().GetSectionObjectsByName('includes')
+ for obj in objs:
+ incPath = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetPath())
+ for root, dirs, files in os.walk(incPath):
+ for dir in dirs:
+ if dir.lower() in _ignore_dir:
+ dirs.remove(dir)
+ for file in files:
+ path = os.path.normpath(os.path.join(root, file))
+ configFile.AddFile(path.replace('/', '\\'))
+
+ def GeneratePcdSubPages(self, pObj, configFile):
+ """
+ Generate sub pages for package's PCD definition.
+ @param pObj package object
+ @param configFile config file object
+ """
+ rootArray = []
+ objs = pObj.GetFileObj().GetSectionObjectsByName('pcd')
+ if len(objs) == 0:
+ return []
+
+ pcdRootPage = doxygen.Page('PCD', 'pcd_root_page')
+ typeRootPageDict = {}
+ typeArchRootPageDict = {}
+ for obj in objs:
+ if obj.GetPcdType() not in typeRootPageDict.keys():
+ typeRootPageDict[obj.GetPcdType()] = doxygen.Page(obj.GetPcdType(), 'pcd_%s_root_page' % obj.GetPcdType())
+ pcdRootPage.AddPage(typeRootPageDict[obj.GetPcdType()])
+ typeRoot = typeRootPageDict[obj.GetPcdType()]
+ if self._arch is not None:
+ pcdPage = doxygen.Page('%s' % obj.GetPcdName(),
+ 'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))
+ pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')
+ section = doxygen.Section('PCDinformation', 'PCD Information')
+ desc = '<TABLE>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>Name</CAPTION></TD>'
+ desc += '<TD><CAPTION>Token Space</CAPTION></TD>'
+ desc += '<TD><CAPTION>Token number</CAPTION></TD>'
+ desc += '<TD><CAPTION>Data Type</CAPTION></TD>'
+ desc += '<TD><CAPTION>Default Value</CAPTION></TD>'
+ desc += '</TR>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdName().split('.')[1]
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdName().split('.')[0]
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdToken()
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdDataType()
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdValue()
+ desc += '</TR>'
+ desc += '</TABLE>'
+ section.AddDescription(desc)
+ pcdPage.AddSection(section)
+ typeRoot.AddPage(pcdPage)
+ else:
+ keystr = obj.GetPcdType() + obj.GetArch()
+ if keystr not in typeArchRootPageDict.keys():
+ typeArchRootPage = doxygen.Page(obj.GetArch(), 'pcd_%s_%s_root_page' % (obj.GetPcdType(), obj.GetArch()))
+ typeArchRootPageDict[keystr] = typeArchRootPage
+ typeRoot.AddPage(typeArchRootPage)
+ typeArchRoot = typeArchRootPageDict[keystr]
+ pcdPage = doxygen.Page('%s' % obj.GetPcdName(),
+ 'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))
+ pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')
+ section = doxygen.Section('PCDinformation', 'PCD Information')
+ desc = '<TABLE>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>Name</CAPTION></TD>'
+ desc += '<TD><CAPTION>Token Space</CAPTION></TD>'
+ desc += '<TD><CAPTION>Token number</CAPTION></TD>'
+ desc += '<TD><CAPTION>Data Type</CAPTION></TD>'
+ desc += '<TD><CAPTION>Default Value</CAPTION></TD>'
+ desc += '</TR>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdName().split('.')[1]
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdName().split('.')[0]
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdToken()
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdDataType()
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdValue()
+ desc += '</TR>'
+ desc += '</TABLE>'
+ section.AddDescription(desc)
+ pcdPage.AddSection(section)
+ typeArchRoot.AddPage(pcdPage)
+ return [pcdRootPage]
+
+ def _GenerateGuidSubPage(self, pObj, obj, configFile):
+ guidPage = doxygen.Page('%s' % obj.GetName(),
+ 'guid_%s_%s' % (obj.GetArch(), obj.GetName()))
+ comments = obj.GetComment()
+ if len(comments) != 0:
+ guidPage.AddDescription('<br>'.join(obj.GetComment()) + '<br>')
+ section = doxygen.Section('BasicGuidInfo', 'GUID Information')
+ desc = '<TABLE>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>GUID\'s Guid Name</CAPTION></TD><TD><CAPTION>GUID\'s Guid</CAPTION></TD>'
+ desc += '</TR>'
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % obj.GetName()
+ desc += '<TD>%s</TD>' % obj.GetGuid()
+ desc += '</TR>'
+ desc += '</TABLE>'
+ section.AddDescription(desc)
+ guidPage.AddSection(section)
+ refFile = self.FindHeaderFileForGuid(pObj, obj.GetName(), configFile)
+ if refFile:
+ relPath = refFile[len(pObj.GetWorkspace()) + 1:]
+ if len(comments) == 0:
+ guidPage.AddDescription(' \\copydoc %s <br>' % relPath)
+
+ section = doxygen.Section('ref', 'Refer to Header File')
+ section.AddDescription('\link %s\n' % relPath)
+ section.AddDescription('\endlink\n')
+ self.ProcessSourceFileForInclude(refFile, pObj, configFile)
+ guidPage.AddSection(section)
+ return guidPage
+
+ def GenerateGuidSubPages(self, pObj, configFile):
+ """
+ Generate sub pages for package's GUID definition.
+ @param pObj package object
+ @param configFilf doxygen config file object
+ """
+ pageRoot = doxygen.Page('GUID', 'guid_root_page')
+ objs = pObj.GetFileObj().GetSectionObjectsByName('guids', self._arch)
+ if len(objs) == 0: return []
+ if self._arch is not None:
+ for obj in objs:
+ pageRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))
+ else:
+ guidArchRootPageDict = {}
+ for obj in objs:
+ if obj.GetArch() not in guidArchRootPageDict.keys():
+ guidArchRoot = doxygen.Page(obj.GetArch(), 'guid_arch_root_%s' % obj.GetArch())
+ pageRoot.AddPage(guidArchRoot)
+ guidArchRootPageDict[obj.GetArch()] = guidArchRoot
+ guidArchRoot = guidArchRootPageDict[obj.GetArch()]
+ guidArchRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))
+ return [pageRoot]
+
+ def _GeneratePpiSubPage(self, pObj, obj, configFile):
+ guidPage = doxygen.Page(obj.GetName(), 'ppi_page_%s' % obj.GetName())
+ comments = obj.GetComment()
+ if len(comments) != 0:
+ guidPage.AddDescription('<br>'.join(obj.GetComment()) + '<br>')
+ section = doxygen.Section('BasicPpiInfo', 'PPI Information')
+ desc = '<TABLE>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>PPI\'s Guid Name</CAPTION></TD><TD><CAPTION>PPI\'s Guid</CAPTION></TD>'
+ desc += '</TR>'
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % obj.GetName()
+ desc += '<TD>%s</TD>' % obj.GetGuid()
+ desc += '</TR>'
+ desc += '</TABLE>'
+ section.AddDescription(desc)
+ guidPage.AddSection(section)
+ refFile = self.FindHeaderFileForGuid(pObj, obj.GetName(), configFile)
+ if refFile:
+ relPath = refFile[len(pObj.GetWorkspace()) + 1:]
+ if len(comments) == 0:
+ guidPage.AddDescription(' \\copydoc %s <br>' % relPath)
+ section = doxygen.Section('ref', 'Refer to Header File')
+ section.AddDescription('\link %s\n' % relPath)
+ section.AddDescription('\endlink\n')
+ self.ProcessSourceFileForInclude(refFile, pObj, configFile)
+ guidPage.AddSection(section)
+
+ return guidPage
+
+ def GeneratePpiSubPages(self, pObj, configFile):
+ """
+ Generate sub pages for package's GUID definition.
+ @param pObj package object
+ @param configFilf doxygen config file object
+ """
+ pageRoot = doxygen.Page('PPI', 'ppi_root_page')
+ objs = pObj.GetFileObj().GetSectionObjectsByName('ppis', self._arch)
+ if len(objs) == 0: return []
+ if self._arch is not None:
+ for obj in objs:
+ pageRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))
+ else:
+ guidArchRootPageDict = {}
+ for obj in objs:
+ if obj.GetArch() not in guidArchRootPageDict.keys():
+ guidArchRoot = doxygen.Page(obj.GetArch(), 'ppi_arch_root_%s' % obj.GetArch())
+ pageRoot.AddPage(guidArchRoot)
+ guidArchRootPageDict[obj.GetArch()] = guidArchRoot
+ guidArchRoot = guidArchRootPageDict[obj.GetArch()]
+ guidArchRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))
+ return [pageRoot]
+
+ def _GenerateProtocolSubPage(self, pObj, obj, configFile):
+ guidPage = doxygen.Page(obj.GetName(), 'protocol_page_%s' % obj.GetName())
+ comments = obj.GetComment()
+ if len(comments) != 0:
+ guidPage.AddDescription('<br>'.join(obj.GetComment()) + '<br>')
+ section = doxygen.Section('BasicProtocolInfo', 'PROTOCOL Information')
+ desc = '<TABLE>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>PROTOCOL\'s Guid Name</CAPTION></TD><TD><CAPTION>PROTOCOL\'s Guid</CAPTION></TD>'
+ desc += '</TR>'
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % obj.GetName()
+ desc += '<TD>%s</TD>' % obj.GetGuid()
+ desc += '</TR>'
+ desc += '</TABLE>'
+ section.AddDescription(desc)
+ guidPage.AddSection(section)
+
+ refFile = self.FindHeaderFileForGuid(pObj, obj.GetName(), configFile)
+ if refFile:
+ relPath = refFile[len(pObj.GetWorkspace()) + 1:]
+ if len(comments) == 0:
+ guidPage.AddDescription(' \\copydoc %s <br>' % relPath)
+ section = doxygen.Section('ref', 'Refer to Header File')
+ section.AddDescription('\link %s\n' % relPath)
+ section.AddDescription('\endlink\n')
+ self.ProcessSourceFileForInclude(refFile, pObj, configFile)
+ guidPage.AddSection(section)
+
+ return guidPage
+
+ def GenerateProtocolSubPages(self, pObj, configFile):
+ """
+ Generate sub pages for package's GUID definition.
+ @param pObj package object
+ @param configFilf doxygen config file object
+ """
+ pageRoot = doxygen.Page('PROTOCOL', 'protocol_root_page')
+ objs = pObj.GetFileObj().GetSectionObjectsByName('protocols', self._arch)
+ if len(objs) == 0: return []
+ if self._arch is not None:
+ for obj in objs:
+ pageRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))
+ else:
+ guidArchRootPageDict = {}
+ for obj in objs:
+ if obj.GetArch() not in guidArchRootPageDict.keys():
+ guidArchRoot = doxygen.Page(obj.GetArch(), 'protocol_arch_root_%s' % obj.GetArch())
+ pageRoot.AddPage(guidArchRoot)
+ guidArchRootPageDict[obj.GetArch()] = guidArchRoot
+ guidArchRoot = guidArchRootPageDict[obj.GetArch()]
+ guidArchRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))
+ return [pageRoot]
+
+ def FindHeaderFileForGuid(self, pObj, name, configFile):
+ """
+ For declaration header file for GUID/PPI/Protocol.
+
+ @param pObj package object
+ @param name guid/ppi/protocol's name
+ @param configFile config file object
+
+ @return full path of header file and None if not found.
+ """
+ startPath = pObj.GetFileObj().GetPackageRootPath()
+ incPath = os.path.join(startPath, 'Include').replace('\\', '/')
+ # if <PackagePath>/include exist, then search header under it.
+ if os.path.exists(incPath):
+ startPath = incPath
+
+ for root, dirs, files in os.walk(startPath):
+ for dir in dirs:
+ if dir.lower() in _ignore_dir:
+ dirs.remove(dir)
+ for file in files:
+ fPath = os.path.join(root, file)
+ if not IsCHeaderFile(fPath):
+ continue
+ try:
+ f = open(fPath, 'r')
+ lines = f.readlines()
+ f.close()
+ except IOError:
+ self.Log('Fail to open file %s\n' % fPath)
+ continue
+ for line in lines:
+ if line.find(name) != -1 and \
+ line.find('extern') != -1:
+ return fPath.replace('\\', '/')
+ return None
+
+ def GetPackageModuleList(self, pObj):
+ """
+ Get all module's INF path under package's root path
+ @param pObj package object
+ @return arrary of INF full path
+ """
+ mArray = []
+ packPath = pObj.GetFileObj().GetPackageRootPath()
+ if not os.path.exists:
+ return None
+ for root, dirs, files in os.walk(packPath):
+ for dir in dirs:
+ if dir.lower() in _ignore_dir:
+ dirs.remove(dir)
+ for file in files:
+ if CheckPathPostfix(file, 'inf'):
+ fPath = os.path.join(root, file).replace('\\', '/')
+ mArray.append(fPath)
+ return mArray
+
+ def GenerateModulePages(self, pObj, configFile):
+ """
+ Generate sub pages for package's module which is under the package
+ root directory.
+
+ @param pObj package object
+ @param configFilf doxygen config file object
+ """
+ infList = self.GetPackageModuleList(pObj)
+ rootPages = []
+ libObjs = []
+ modObjs = []
+ for infpath in infList:
+ infObj = inf.INFFile(infpath)
+ #infObj = INFFileObject.INFFile (pObj.GetWorkspacePath(),
+ # inf)
+ if not infObj:
+ self.Log('Fail create INF object for %s' % inf)
+ continue
+ if not infObj.Parse():
+ self.Log('Fail to load INF file %s' % inf)
+ continue
+ if infObj.GetProduceLibraryClass() is not None:
+ libObjs.append(infObj)
+ else:
+ modObjs.append(infObj)
+
+ if len(libObjs) != 0:
+ libRootPage = doxygen.Page('Libraries', 'lib_root_page')
+ rootPages.append(libRootPage)
+ for libInf in libObjs:
+ libRootPage.AddPage(self.GenerateModulePage(pObj, libInf, configFile, True))
+
+ if len(modObjs) != 0:
+ modRootPage = doxygen.Page('Modules', 'module_root_page')
+ rootPages.append(modRootPage)
+ for modInf in modObjs:
+ modRootPage.AddPage(self.GenerateModulePage(pObj, modInf, configFile, False))
+
+ return rootPages
+
+ def GenerateModulePage(self, pObj, infObj, configFile, isLib):
+ """
+ Generate page for a module/library.
+ @param infObj INF file object for module/library
+ @param configFile doxygen config file object
+ @param isLib Whether this module is library
+
+ @param module doxygen page object
+ """
+ workspace = pObj.GetWorkspace()
+ refDecObjs = []
+ for obj in infObj.GetSectionObjectsByName('packages'):
+ decObj = dec.DECFile(os.path.join(workspace, obj.GetPath()))
+ if not decObj:
+ ErrorMsg ('Fail to create pacakge object for %s' % obj.GetPackageName())
+ continue
+ if not decObj.Parse():
+ ErrorMsg ('Fail to load package object for %s' % obj.GetPackageName())
+ continue
+ refDecObjs.append(decObj)
+
+ modPage = doxygen.Page('%s' % infObj.GetBaseName(),
+ 'module_%s' % infObj.GetBaseName())
+ modPage.AddDescription(infObj.GetFileHeader())
+
+ basicInfSection = doxygen.Section('BasicModuleInformation', 'Basic Module Information')
+ desc = "<TABLE>"
+ for obj in infObj.GetSectionObjectsByName('defines'):
+ key = obj.GetKey()
+ value = obj.GetValue()
+ if key not in _inf_key_description_mapping_table.keys(): continue
+ if key == 'LIBRARY_CLASS' and value.find('|') != -1:
+ clsname, types = value.split('|')
+ desc += '<TR>'
+ desc += '<TD><B>%s</B></TD>' % _inf_key_description_mapping_table[key]
+ desc += '<TD>%s</TD>' % clsname
+ desc += '</TR>'
+
+ desc += '<TR>'
+ desc += '<TD><B>Supported Module Types</B></TD>'
+ desc += '<TD>%s</TD>' % types
+ desc += '</TR>'
+ else:
+ desc += '<TR>'
+ desc += '<TD><B>%s</B></TD>' % _inf_key_description_mapping_table[key]
+ if key == 'EFI_SPECIFICATION_VERSION' and value == '0x00020000':
+ value = '2.0'
+ desc += '<TD>%s</TD>' % value
+ desc += '</TR>'
+ desc += '</TABLE>'
+ basicInfSection.AddDescription(desc)
+ modPage.AddSection(basicInfSection)
+
+ # Add protocol section
+ data = []
+ for obj in infObj.GetSectionObjectsByName('pcd', self._arch):
+ data.append(obj.GetPcdName().strip())
+ if len(data) != 0:
+ s = doxygen.Section('Pcds', 'Pcds')
+ desc = "<TABLE>"
+ desc += '<TR><TD><B>PCD Name</B></TD><TD><B>TokenSpace</B></TD><TD><B>Package</B></TD></TR>'
+ for item in data:
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % item.split('.')[1]
+ desc += '<TD>%s</TD>' % item.split('.')[0]
+ pkgbasename = self.SearchPcdPackage(item, workspace, refDecObjs)
+ desc += '<TD>%s</TD>' % pkgbasename
+ desc += '</TR>'
+ desc += "</TABLE>"
+ s.AddDescription(desc)
+ modPage.AddSection(s)
+
+ # Add protocol section
+ #sects = infObj.GetSectionByString('protocol')
+ data = []
+ #for sect in sects:
+ for obj in infObj.GetSectionObjectsByName('protocol', self._arch):
+ data.append(obj.GetName().strip())
+ if len(data) != 0:
+ s = doxygen.Section('Protocols', 'Protocols')
+ desc = "<TABLE>"
+ desc += '<TR><TD><B>Name</B></TD><TD><B>Package</B></TD></TR>'
+ for item in data:
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % item
+ pkgbasename = self.SearchProtocolPackage(item, workspace, refDecObjs)
+ desc += '<TD>%s</TD>' % pkgbasename
+ desc += '</TR>'
+ desc += "</TABLE>"
+ s.AddDescription(desc)
+ modPage.AddSection(s)
+
+ # Add ppi section
+ #sects = infObj.GetSectionByString('ppi')
+ data = []
+ #for sect in sects:
+ for obj in infObj.GetSectionObjectsByName('ppi', self._arch):
+ data.append(obj.GetName().strip())
+ if len(data) != 0:
+ s = doxygen.Section('Ppis', 'Ppis')
+ desc = "<TABLE>"
+ desc += '<TR><TD><B>Name</B></TD><TD><B>Package</B></TD></TR>'
+ for item in data:
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % item
+ pkgbasename = self.SearchPpiPackage(item, workspace, refDecObjs)
+ desc += '<TD>%s</TD>' % pkgbasename
+ desc += '</TR>'
+ desc += "</TABLE>"
+ s.AddDescription(desc)
+ modPage.AddSection(s)
+
+ # Add guid section
+ #sects = infObj.GetSectionByString('guid')
+ data = []
+ #for sect in sects:
+ for obj in infObj.GetSectionObjectsByName('guid', self._arch):
+ data.append(obj.GetName().strip())
+ if len(data) != 0:
+ s = doxygen.Section('Guids', 'Guids')
+ desc = "<TABLE>"
+ desc += '<TR><TD><B>Name</B></TD><TD><B>Package</B></TD></TR>'
+ for item in data:
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % item
+ pkgbasename = self.SearchGuidPackage(item, workspace, refDecObjs)
+ desc += '<TD>%s</TD>' % pkgbasename
+ desc += '</TR>'
+ desc += "</TABLE>"
+ s.AddDescription(desc)
+ modPage.AddSection(s)
+
+ section = doxygen.Section('LibraryClasses', 'Library Classes')
+ desc = "<TABLE>"
+ desc += '<TR><TD><B>Name</B></TD><TD><B>Type</B></TD><TD><B>Package</B></TD><TD><B>Header File</B></TD></TR>'
+ if isLib:
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % infObj.GetProduceLibraryClass()
+ desc += '<TD>Produce</TD>'
+ try:
+ pkgname, hPath = self.SearchLibraryClassHeaderFile(infObj.GetProduceLibraryClass(),
+ workspace,
+ refDecObjs)
+ except:
+ self.Log ('fail to get package header file for lib class %s' % infObj.GetProduceLibraryClass())
+ pkgname = 'NULL'
+ hPath = 'NULL'
+ desc += '<TD>%s</TD>' % pkgname
+ if hPath != "NULL":
+ desc += '<TD>\link %s \endlink</TD>' % hPath
+ else:
+ desc += '<TD>%s</TD>' % hPath
+ desc += '</TR>'
+ for lcObj in infObj.GetSectionObjectsByName('libraryclasses', self._arch):
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % lcObj.GetClass()
+ retarr = self.SearchLibraryClassHeaderFile(lcObj.GetClass(),
+ workspace,
+ refDecObjs)
+ if retarr is not None:
+ pkgname, hPath = retarr
+ else:
+ self.Log('Fail find the library class %s definition from module %s dependent package!' % (lcObj.GetClass(), infObj.GetFilename()), 'error')
+ pkgname = 'NULL'
+ hPath = 'NULL'
+ desc += '<TD>Consume</TD>'
+ desc += '<TD>%s</TD>' % pkgname
+ desc += '<TD>\link %s \endlink</TD>' % hPath
+ desc += '</TR>'
+ desc += "</TABLE>"
+ section.AddDescription(desc)
+ modPage.AddSection(section)
+
+ section = doxygen.Section('SourceFiles', 'Source Files')
+ section.AddDescription('<ul>\n')
+ for obj in infObj.GetSourceObjects(self._arch, self._tooltag):
+ sPath = infObj.GetModuleRootPath()
+ sPath = os.path.join(sPath, obj.GetSourcePath()).replace('\\', '/').strip()
+ if sPath.lower().endswith('.uni') or sPath.lower().endswith('.s') or sPath.lower().endswith('.asm') or sPath.lower().endswith('.nasm'):
+ newPath = self.TranslateUniFile(sPath)
+ configFile.AddFile(newPath)
+ newPath = newPath[len(pObj.GetWorkspace()) + 1:]
+ section.AddDescription('<li> \link %s \endlink </li>' % newPath)
+ else:
+ self.ProcessSourceFileForInclude(sPath, pObj, configFile, infObj)
+ sPath = sPath[len(pObj.GetWorkspace()) + 1:]
+ section.AddDescription('<li>\link %s \endlink </li>' % sPath)
+ section.AddDescription('</ul>\n')
+ modPage.AddSection(section)
+
+ #sects = infObj.GetSectionByString('depex')
+ data = []
+ #for sect in sects:
+ for obj in infObj.GetSectionObjectsByName('depex'):
+ data.append(str(obj))
+ if len(data) != 0:
+ s = doxygen.Section('DependentSection', 'Module Dependencies')
+ s.AddDescription('<br>'.join(data))
+ modPage.AddSection(s)
+
+ return modPage
+
+ def TranslateUniFile(self, path):
+ newpath = path + '.dox'
+ #import core.textfile as textfile
+ #file = textfile.TextFile(path)
+
+ try:
+ file = open(path, 'r')
+ except (IOError, OSError) as msg:
+ return None
+
+ t = file.read()
+ file.close()
+
+ output = '/** @file \n'
+ #output = '<html><body>'
+ arr = t.split('\r\n')
+ for line in arr:
+ if line.find('@file') != -1:
+ continue
+ if line.find('*/') != -1:
+ continue
+ line = line.strip()
+ if line.strip().startswith('/'):
+ arr = line.split(' ')
+ if len(arr) > 1:
+ line = ' '.join(arr[1:])
+ else:
+ continue
+ output += '%s<br>\n' % line
+ output += '**/'
+
+ if os.path.exists(newpath):
+ os.remove(newpath)
+
+ file = open(newpath, "w")
+ file.write(output)
+ file.close()
+ return newpath
+
+ def SearchPcdPackage(self, pcdname, workspace, decObjs):
+ for decObj in decObjs:
+ for pcd in decObj.GetSectionObjectsByName('pcd'):
+ if pcdname == pcd.GetPcdName():
+ return decObj.GetBaseName()
+ return None
+
+ def SearchProtocolPackage(self, protname, workspace, decObjs):
+ for decObj in decObjs:
+ for proto in decObj.GetSectionObjectsByName('protocol'):
+ if protname == proto.GetName():
+ return decObj.GetBaseName()
+ return None
+
+ def SearchPpiPackage(self, ppiname, workspace, decObjs):
+ for decObj in decObjs:
+ for ppi in decObj.GetSectionObjectsByName('ppi'):
+ if ppiname == ppi.GetName():
+ return decObj.GetBaseName()
+ return None
+
+ def SearchGuidPackage(self, guidname, workspace, decObjs):
+ for decObj in decObjs:
+ for guid in decObj.GetSectionObjectsByName('guid'):
+ if guidname == guid.GetName():
+ return decObj.GetBaseName()
+ return None
+
+ def SearchLibraryClassHeaderFile(self, className, workspace, decObjs):
+ for decObj in decObjs:
+ for cls in decObj.GetSectionObjectsByName('libraryclasses'):
+ if cls.GetClassName().strip() == className:
+ path = cls.GetHeaderFile().strip()
+ path = os.path.join(decObj.GetPackageRootPath(), path)
+ path = path[len(workspace) + 1:]
+ return decObj.GetBaseName(), path.replace('\\', '/')
+
+ return None
+
+ def _ConvertPathToDoxygen(self, path, pObj):
+ pRootPath = pObj.GetWorkspace()
+ path = path[len(pRootPath) + 1:]
+ return path.replace('\\', '/')
+
+def IsCHeaderFile(path):
+ return CheckPathPostfix(path, 'h')
+
+def CheckPathPostfix(path, str):
+ index = path.rfind('.')
+ if index == -1:
+ return False
+ if path[index + 1:].lower() == str.lower():
+ return True
+ return False
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py
new file mode 100755
index 00000000..448e3682
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/doxygengen_spec.py
@@ -0,0 +1,1086 @@
+## @file
+#
+# This file produce action class to generate doxygen document for edk2 codebase.
+# The action classes are shared by GUI and command line tools.
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+from plugins.EdkPlugins.basemodel import doxygen
+import os
+try:
+ import wx
+ gInGui = True
+except:
+ gInGui = False
+import re
+from plugins.EdkPlugins.edk2.model import inf
+from plugins.EdkPlugins.edk2.model import dec
+from plugins.EdkPlugins.basemodel.message import *
+
+_ignore_dir = ['.svn', '_svn', 'cvs']
+_inf_key_description_mapping_table = {
+ 'INF_VERSION':'Version of INF file specification',
+ #'BASE_NAME':'Module Name',
+ 'FILE_GUID':'Module Guid',
+ 'MODULE_TYPE': 'Module Type',
+ 'VERSION_STRING': 'Module Version',
+ 'LIBRARY_CLASS': 'Produced Library Class',
+ 'EFI_SPECIFICATION_VERSION': 'UEFI Specification Version',
+ 'PI_SPECIFICATION_VERSION': 'PI Specification Version',
+ 'ENTRY_POINT': 'Module Entry Point Function',
+ 'CONSTRUCTOR': 'Library Constructor Function'
+}
+
+_dec_key_description_mapping_table = {
+ 'DEC_SPECIFICATION': 'Version of DEC file specification',
+ 'PACKAGE_GUID': 'Package Guid'
+}
+class DoxygenAction:
+ """This is base class for all doxygen action.
+ """
+
+ def __init__(self, doxPath, chmPath, outputPath, projname, mode='html', log=None, verbose=False):
+ """Constructor function.
+ @param doxPath the obosolution path of doxygen execute file.
+ @param outputPath the obosolution output path.
+ @param log log function for output message
+ """
+ self._doxPath = doxPath
+ self._chmPath = chmPath
+ self._outputPath = outputPath
+ self._projname = projname
+ self._configFile = None # doxygen config file is used by doxygen exe file
+ self._indexPageFile = None # doxygen page file for index page.
+ self._log = log
+ self._mode = mode
+ self._verbose = verbose
+ self._doxygenCallback = None
+ self._chmCallback = None
+
+ def Log(self, message, level='info'):
+ if self._log is not None:
+ self._log(message, level)
+
+ def IsVerbose(self):
+ return self._verbose
+
+ def Generate(self):
+ """Generate interface called by outer directly"""
+ self.Log(">>>>>> Start generate doxygen document for %s... Zzz....\n" % self._projname)
+
+ # create doxygen config file at first
+ self._configFile = doxygen.DoxygenConfigFile()
+ self._configFile.SetOutputDir(self._outputPath)
+
+ self._configFile.SetWarningFilePath(os.path.join(self._outputPath, 'warning.txt'))
+ if self._mode.lower() == 'html':
+ self._configFile.SetHtmlMode()
+ else:
+ self._configFile.SetChmMode()
+
+ self.Log(" >>>>>> Initialize doxygen config file...Zzz...\n")
+ self.InitializeConfigFile()
+
+ self.Log(" >>>>>> Generate doxygen index page file...Zzz...\n")
+ indexPagePath = self.GenerateIndexPage()
+ if indexPagePath is None:
+ self.Log("Fail to generate index page!\n", 'error')
+ return False
+ else:
+ self.Log("Success to create doxygen index page file %s \n" % indexPagePath)
+
+ # Add index page doxygen file to file list.
+ self._configFile.AddFile(indexPagePath)
+
+ # save config file to output path
+ configFilePath = os.path.join(self._outputPath, self._projname + '.doxygen_config')
+ self._configFile.Generate(configFilePath)
+ self.Log(" <<<<<< Success Save doxygen config file to %s...\n" % configFilePath)
+
+ # launch doxygen tool to generate document
+ if self._doxygenCallback is not None:
+ self.Log(" >>>>>> Start doxygen process...Zzz...\n")
+ if not self._doxygenCallback(self._doxPath, configFilePath):
+ return False
+ else:
+ self.Log("Fail to create doxygen process!", 'error')
+ return False
+
+ return True
+
+ def InitializeConfigFile(self):
+ """Initialize config setting for doxygen project. It will be invoked after config file
+ object is created. Inherited class should implement it.
+ """
+
+ def GenerateIndexPage(self):
+ """Generate doxygen index page. Inherited class should implement it."""
+ return None
+
+ def RegisterCallbackDoxygenProcess(self, callback):
+ self._doxygenCallback = callback
+
+ def RegisterCallbackCHMProcess(self, callback):
+ self._chmCallback = callback
+
+class PlatformDocumentAction(DoxygenAction):
+ """Generate platform doxygen document, will be implement at future."""
+
+class PackageDocumentAction(DoxygenAction):
+ """Generate package reference document"""
+
+ def __init__(self, doxPath, chmPath, outputPath, pObj, mode='html', log=None, arch=None, tooltag=None,
+ macros=[], onlyInclude=False, verbose=False):
+ DoxygenAction.__init__(self, doxPath, chmPath, outputPath, pObj.GetName(), mode, log, verbose)
+ self._pObj = pObj
+ self._arch = arch
+ self._tooltag = tooltag
+ self._macros = macros
+ self._onlyIncludeDocument = onlyInclude
+
+ def InitializeConfigFile(self):
+ if self._arch == 'IA32':
+ self._configFile.AddPreDefined('MDE_CPU_IA32')
+ elif self._arch == 'X64':
+ self._configFile.AddPreDefined('MDE_CPU_X64')
+ elif self._arch == 'IPF':
+ self._configFile.AddPreDefined('MDE_CPU_IPF')
+ elif self._arch == 'EBC':
+ self._configFile.AddPreDefined('MDE_CPU_EBC')
+ else:
+ self._arch = None
+ self._configFile.AddPreDefined('MDE_CPU_IA32')
+ self._configFile.AddPreDefined('MDE_CPU_X64')
+ self._configFile.AddPreDefined('MDE_CPU_IPF')
+ self._configFile.AddPreDefined('MDE_CPU_EBC')
+ self._configFile.AddPreDefined('MDE_CPU_ARM')
+
+ for macro in self._macros:
+ self._configFile.AddPreDefined(macro)
+
+ namestr = self._pObj.GetName()
+ if self._arch is not None:
+ namestr += '[%s]' % self._arch
+ if self._tooltag is not None:
+ namestr += '[%s]' % self._tooltag
+ self._configFile.SetProjectName(namestr)
+ self._configFile.SetStripPath(self._pObj.GetWorkspace())
+ self._configFile.SetProjectVersion(self._pObj.GetFileObj().GetVersion())
+ self._configFile.AddPattern('*.decdoxygen')
+
+ if self._tooltag.lower() == 'msft':
+ self._configFile.AddPreDefined('_MSC_EXTENSIONS')
+ elif self._tooltag.lower() == 'gnu':
+ self._configFile.AddPreDefined('__GNUC__')
+ elif self._tooltag.lower() == 'intel':
+ self._configFile.AddPreDefined('__INTEL_COMPILER')
+ else:
+ self._tooltag = None
+ self._configFile.AddPreDefined('_MSC_EXTENSIONS')
+ self._configFile.AddPreDefined('__GNUC__')
+ self._configFile.AddPreDefined('__INTEL_COMPILER')
+
+ self._configFile.AddPreDefined('ASM_PFX= ')
+ self._configFile.AddPreDefined('OPTIONAL= ')
+
+ def GenerateIndexPage(self):
+ """Generate doxygen index page. Inherited class should implement it."""
+ fObj = self._pObj.GetFileObj()
+ pdObj = doxygen.DoxygenFile('%s Package Document' % self._pObj.GetName(),
+ '%s.decdoxygen' % self._pObj.GetFilename())
+ self._configFile.AddFile(pdObj.GetFilename())
+ pdObj.AddDescription(fObj.GetFileHeader())
+
+ defSection = fObj.GetSectionByName('defines')[0]
+ baseSection = doxygen.Section('PackageBasicInformation', 'Package Basic Information')
+ descr = '<TABLE>'
+ for obj in defSection.GetObjects():
+ if obj.GetKey() in _dec_key_description_mapping_table.keys():
+ descr += '<TR>'
+ descr += '<TD><B>%s</B></TD>' % _dec_key_description_mapping_table[obj.GetKey()]
+ descr += '<TD>%s</TD>' % obj.GetValue()
+ descr += '</TR>'
+ descr += '</TABLE><br>'
+ baseSection.AddDescription(descr)
+ pdObj.AddSection(baseSection)
+
+ knownIssueSection = doxygen.Section('Known_Issue_section', 'Known Issue')
+ knownIssueSection.AddDescription('<ul>')
+ knownIssueSection.AddDescription('<li> OPTIONAL macro for function parameter can not be dealed with doxygen, so it disapear in this document! </li>')
+ knownIssueSection.AddDescription('</ul>')
+ pdObj.AddSection(knownIssueSection)
+
+ self.AddAllIncludeFiles(self._pObj, self._configFile)
+ pages = self.GenerateIncludesSubPage(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ pages = self.GenerateLibraryClassesSubPage(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ pages = self.GeneratePcdSubPages(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ pages = self.GenerateGuidSubPages(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ pages = self.GeneratePpiSubPages(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ pages = self.GenerateProtocolSubPages(self._pObj, self._configFile)
+ if len(pages) != 0:
+ pdObj.AddPages(pages)
+ if not self._onlyIncludeDocument:
+ pdObj.AddPages(self.GenerateModulePages(self._pObj, self._configFile))
+
+ pdObj.Save()
+ return pdObj.GetFilename()
+
+ def GenerateIncludesSubPage(self, pObj, configFile):
+ # by default add following path as include path to config file
+ pkpath = pObj.GetFileObj().GetPackageRootPath()
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include'))
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include', 'Library'))
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include', 'Protocol'))
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include', 'Ppi'))
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include', 'Guid'))
+ configFile.AddIncludePath(os.path.join(pkpath, 'Include', 'IndustryStandard'))
+
+ rootArray = []
+ pageRoot = doxygen.Page("Public Includes", "%s_public_includes" % pObj.GetName())
+ objs = pObj.GetFileObj().GetSectionObjectsByName('includes')
+ if len(objs) == 0: return []
+
+ for obj in objs:
+ # Add path to include path
+ path = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetPath())
+ configFile.AddIncludePath(path)
+
+ # only list common folder's include file
+ if obj.GetArch().lower() != 'common':
+ continue
+
+ bNeedAddIncludePage = False
+ topPage = doxygen.Page(self._ConvertPathToDoxygen(path, pObj), 'public_include_top')
+
+ topPage.AddDescription('<ul>\n')
+ for file in os.listdir(path):
+ if file.lower() in _ignore_dir: continue
+ fullpath = os.path.join(path, file)
+ if os.path.isfile(fullpath):
+ self.ProcessSourceFileForInclude(fullpath, pObj, configFile)
+ topPage.AddDescription('<li> \link %s\endlink </li>\n' % self._ConvertPathToDoxygen(fullpath, pObj))
+ else:
+ if file.lower() in ['library', 'protocol', 'guid', 'ppi', 'ia32', 'x64', 'ipf', 'ebc', 'arm', 'pi', 'uefi', 'aarch64']:
+ continue
+ bNeedAddSubPage = False
+ subpage = doxygen.Page(self._ConvertPathToDoxygen(fullpath, pObj), 'public_include_%s' % file)
+ subpage.AddDescription('<ul>\n')
+ for subfile in os.listdir(fullpath):
+ if subfile.lower() in _ignore_dir: continue
+ bNeedAddSubPage = True
+ subfullpath = os.path.join(fullpath, subfile)
+ self.ProcessSourceFileForInclude(subfullpath, pObj, configFile)
+ subpage.AddDescription('<li> \link %s \endlink </li>\n' % self._ConvertPathToDoxygen(subfullpath, pObj))
+ subpage.AddDescription('</ul>\n')
+ if bNeedAddSubPage:
+ bNeedAddIncludePage = True
+ pageRoot.AddPage(subpage)
+ topPage.AddDescription('</ul>\n')
+ if bNeedAddIncludePage:
+ pageRoot.AddPage(topPage)
+
+ if pageRoot.GetSubpageCount() != 0:
+ return [pageRoot]
+ else:
+ return []
+
+ def GenerateLibraryClassesSubPage(self, pObj, configFile):
+ """
+ Generate sub page for library class for package.
+ One DEC file maybe contains many library class sections
+ for different architecture.
+
+ @param fObj DEC file object.
+ """
+ rootArray = []
+ pageRoot = doxygen.Page("Library Class", "%s_libraryclass" % pObj.GetName())
+ objs = pObj.GetFileObj().GetSectionObjectsByName('libraryclass', self._arch)
+ if len(objs) == 0: return []
+
+ if self._arch is not None:
+ for obj in objs:
+ classPage = doxygen.Page(obj.GetClassName(),
+ "lc_%s" % obj.GetClassName())
+ comments = obj.GetComment()
+ if len(comments) != 0:
+ classPage.AddDescription('<br>\n'.join(comments) + '<br>\n')
+ pageRoot.AddPage(classPage)
+ path = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetHeaderFile())
+ path = path[len(pObj.GetWorkspace()) + 1:]
+ if len(comments) == 0:
+ classPage.AddDescription('\copydoc %s<p>' % obj.GetHeaderFile())
+ section = doxygen.Section('ref', 'Refer to Header File')
+ section.AddDescription('\link %s\n' % obj.GetHeaderFile())
+ section.AddDescription(' \endlink<p>\n')
+ classPage.AddSection(section)
+ fullPath = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetHeaderFile())
+ self.ProcessSourceFileForInclude(fullPath, pObj, configFile)
+ else:
+ archPageDict = {}
+ for obj in objs:
+ if obj.GetArch() not in archPageDict.keys():
+ archPageDict[obj.GetArch()] = doxygen.Page(obj.GetArch(),
+ 'lc_%s' % obj.GetArch())
+ pageRoot.AddPage(archPageDict[obj.GetArch()])
+ subArchRoot = archPageDict[obj.GetArch()]
+ classPage = doxygen.Page(obj.GetClassName(),
+ "lc_%s" % obj.GetClassName())
+ comments = obj.GetComment()
+ if len(comments) != 0:
+ classPage.AddDescription('<br>\n'.join(comments) + '<br>\n')
+ subArchRoot.AddPage(classPage)
+ path = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetHeaderFile())
+ path = path[len(pObj.GetWorkspace()) + 1:]
+ if len(comments) == 0:
+ classPage.AddDescription('\copydoc %s<p>' % obj.GetHeaderFile())
+ section = doxygen.Section('ref', 'Refer to Header File')
+ section.AddDescription('\link %s\n' % obj.GetHeaderFile())
+ section.AddDescription(' \endlink<p>\n')
+ classPage.AddSection(section)
+ fullPath = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetHeaderFile())
+
+ self.ProcessSourceFileForInclude(fullPath, pObj, configFile)
+ rootArray.append(pageRoot)
+ return rootArray
+
+ def ProcessSourceFileForInclude(self, path, pObj, configFile, infObj=None):
+ """
+ @param path the analysising file full path
+ @param pObj package object
+ @param configFile doxygen config file.
+ """
+
+ if gInGui:
+ wx.Yield()
+ if not os.path.exists(path):
+ ErrorMsg('Source file path %s does not exist!' % path)
+ return
+
+ if configFile.FileExists(path):
+ return
+
+ try:
+ f = open(path, 'r')
+ lines = f.readlines()
+ f.close()
+ except IOError:
+ ErrorMsg('Fail to open file %s' % path)
+ return
+
+ configFile.AddFile(path)
+ return
+ no = 0
+ for no in range(len(lines)):
+ if len(lines[no].strip()) == 0:
+ continue
+ if lines[no].strip()[:2] in ['##', '//', '/*', '*/']:
+ continue
+ index = lines[no].lower().find('include')
+ #mo = IncludePattern.finditer(lines[no].lower())
+ mo = re.match(r"^#\s*include\s+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip().lower())
+ if not mo:
+ continue
+ mo = re.match(r"^[#\w\s]+[<\"]([\\/\w.]+)[>\"]$", lines[no].strip())
+ filePath = mo.groups()[0]
+
+ if filePath is None or len(filePath) == 0:
+ continue
+
+ # find header file in module's path firstly.
+ fullPath = None
+
+ if os.path.exists(os.path.join(os.path.dirname(path), filePath)):
+ # Find the file in current directory
+ fullPath = os.path.join(os.path.dirname(path), filePath).replace('\\', '/')
+ else:
+ # find in depedent package's include path
+ incObjs = pObj.GetFileObj().GetSectionObjectsByName('includes')
+ for incObj in incObjs:
+ incPath = os.path.join(pObj.GetFileObj().GetPackageRootPath(), incObj.GetPath()).strip()
+ incPath = os.path.realpath(os.path.join(incPath, filePath))
+ if os.path.exists(incPath):
+ fullPath = incPath
+ break
+ if infObj is not None:
+ pkgInfObjs = infObj.GetSectionObjectsByName('packages')
+ for obj in pkgInfObjs:
+ decObj = dec.DECFile(os.path.join(pObj.GetWorkspace(), obj.GetPath()))
+ if not decObj:
+ ErrorMsg ('Fail to create pacakge object for %s' % obj.GetPackageName())
+ continue
+ if not decObj.Parse():
+ ErrorMsg ('Fail to load package object for %s' % obj.GetPackageName())
+ continue
+ incObjs = decObj.GetSectionObjectsByName('includes')
+ for incObj in incObjs:
+ incPath = os.path.join(decObj.GetPackageRootPath(), incObj.GetPath()).replace('\\', '/')
+ if os.path.exists(os.path.join(incPath, filePath)):
+ fullPath = os.path.join(os.path.join(incPath, filePath))
+ break
+ if fullPath is not None:
+ break
+
+ if fullPath is None and self.IsVerbose():
+ self.Log('Can not resolve header file %s for file %s in package %s\n' % (filePath, path, pObj.GetFileObj().GetFilename()), 'error')
+ return
+ else:
+ fullPath = fullPath.replace('\\', '/')
+ if self.IsVerbose():
+ self.Log('Preprocessing: Add include file %s for file %s\n' % (fullPath, path))
+ #LogMsg ('Preprocessing: Add include file %s for file %s' % (fullPath, path))
+ self.ProcessSourceFileForInclude(fullPath, pObj, configFile, infObj)
+
+ def AddAllIncludeFiles(self, pObj, configFile):
+ objs = pObj.GetFileObj().GetSectionObjectsByName('includes')
+ for obj in objs:
+ incPath = os.path.join(pObj.GetFileObj().GetPackageRootPath(), obj.GetPath())
+ for root, dirs, files in os.walk(incPath):
+ for dir in dirs:
+ if dir.lower() in _ignore_dir:
+ dirs.remove(dir)
+ for file in files:
+ path = os.path.normpath(os.path.join(root, file))
+ configFile.AddFile(path.replace('/', '\\'))
+
+ def GeneratePcdSubPages(self, pObj, configFile):
+ """
+ Generate sub pages for package's PCD definition.
+ @param pObj package object
+ @param configFile config file object
+ """
+ rootArray = []
+ objs = pObj.GetFileObj().GetSectionObjectsByName('pcd')
+ if len(objs) == 0:
+ return []
+
+ pcdRootPage = doxygen.Page('PCD', 'pcd_root_page')
+ typeRootPageDict = {}
+ typeArchRootPageDict = {}
+ for obj in objs:
+ if obj.GetPcdType() not in typeRootPageDict.keys():
+ typeRootPageDict[obj.GetPcdType()] = doxygen.Page(obj.GetPcdType(), 'pcd_%s_root_page' % obj.GetPcdType())
+ pcdRootPage.AddPage(typeRootPageDict[obj.GetPcdType()])
+ typeRoot = typeRootPageDict[obj.GetPcdType()]
+ if self._arch is not None:
+ pcdPage = doxygen.Page('%s' % obj.GetPcdName(),
+ 'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))
+ pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')
+ section = doxygen.Section('PCDinformation', 'PCD Information')
+ desc = '<TABLE>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>Name</CAPTION></TD>'
+ desc += '<TD><CAPTION>Token Space</CAPTION></TD>'
+ desc += '<TD><CAPTION>Token number</CAPTION></TD>'
+ desc += '<TD><CAPTION>Data Type</CAPTION></TD>'
+ desc += '<TD><CAPTION>Default Value</CAPTION></TD>'
+ desc += '</TR>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdName().split('.')[1]
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdName().split('.')[0]
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdToken()
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdDataType()
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdValue()
+ desc += '</TR>'
+ desc += '</TABLE>'
+ section.AddDescription(desc)
+ pcdPage.AddSection(section)
+ typeRoot.AddPage(pcdPage)
+ else:
+ keystr = obj.GetPcdType() + obj.GetArch()
+ if keystr not in typeArchRootPageDict.keys():
+ typeArchRootPage = doxygen.Page(obj.GetArch(), 'pcd_%s_%s_root_page' % (obj.GetPcdType(), obj.GetArch()))
+ typeArchRootPageDict[keystr] = typeArchRootPage
+ typeRoot.AddPage(typeArchRootPage)
+ typeArchRoot = typeArchRootPageDict[keystr]
+ pcdPage = doxygen.Page('%s' % obj.GetPcdName(),
+ 'pcd_%s_%s_%s' % (obj.GetPcdType(), obj.GetArch(), obj.GetPcdName().split('.')[1]))
+ pcdPage.AddDescription('<br>\n'.join(obj.GetComment()) + '<br>\n')
+ section = doxygen.Section('PCDinformation', 'PCD Information')
+ desc = '<TABLE>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>Name</CAPTION></TD>'
+ desc += '<TD><CAPTION>Token Space</CAPTION></TD>'
+ desc += '<TD><CAPTION>Token number</CAPTION></TD>'
+ desc += '<TD><CAPTION>Data Type</CAPTION></TD>'
+ desc += '<TD><CAPTION>Default Value</CAPTION></TD>'
+ desc += '</TR>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdName().split('.')[1]
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdName().split('.')[0]
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdToken()
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdDataType()
+ desc += '<TD><CAPTION>%s</CAPTION></TD>' % obj.GetPcdValue()
+ desc += '</TR>'
+ desc += '</TABLE>'
+ section.AddDescription(desc)
+ pcdPage.AddSection(section)
+ typeArchRoot.AddPage(pcdPage)
+ return [pcdRootPage]
+
+ def _GenerateGuidSubPage(self, pObj, obj, configFile):
+ guidPage = doxygen.Page('%s' % obj.GetName(),
+ 'guid_%s_%s' % (obj.GetArch(), obj.GetName()))
+ comments = obj.GetComment()
+ if len(comments) != 0:
+ guidPage.AddDescription('<br>'.join(obj.GetComment()) + '<br>')
+ section = doxygen.Section('BasicGuidInfo', 'GUID Information')
+ desc = '<TABLE>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>GUID\'s Guid Name</CAPTION></TD><TD><CAPTION>GUID\'s Guid</CAPTION></TD>'
+ desc += '</TR>'
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % obj.GetName()
+ desc += '<TD>%s</TD>' % obj.GetGuid()
+ desc += '</TR>'
+ desc += '</TABLE>'
+ section.AddDescription(desc)
+ guidPage.AddSection(section)
+ refFile = self.FindHeaderFileForGuid(pObj, obj.GetName(), configFile)
+ if refFile:
+ relPath = refFile[len(pObj.GetWorkspace()) + 1:]
+ if len(comments) == 0:
+ guidPage.AddDescription(' \\copydoc %s <br>' % relPath)
+
+ section = doxygen.Section('ref', 'Refer to Header File')
+ section.AddDescription('\link %s\n' % relPath)
+ section.AddDescription('\endlink\n')
+ self.ProcessSourceFileForInclude(refFile, pObj, configFile)
+ guidPage.AddSection(section)
+ return guidPage
+
+ def GenerateGuidSubPages(self, pObj, configFile):
+ """
+ Generate sub pages for package's GUID definition.
+ @param pObj package object
+ @param configFilf doxygen config file object
+ """
+ pageRoot = doxygen.Page('GUID', 'guid_root_page')
+ objs = pObj.GetFileObj().GetSectionObjectsByName('guids', self._arch)
+ if len(objs) == 0: return []
+ if self._arch is not None:
+ for obj in objs:
+ pageRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))
+ else:
+ guidArchRootPageDict = {}
+ for obj in objs:
+ if obj.GetArch() not in guidArchRootPageDict.keys():
+ guidArchRoot = doxygen.Page(obj.GetArch(), 'guid_arch_root_%s' % obj.GetArch())
+ pageRoot.AddPage(guidArchRoot)
+ guidArchRootPageDict[obj.GetArch()] = guidArchRoot
+ guidArchRoot = guidArchRootPageDict[obj.GetArch()]
+ guidArchRoot.AddPage(self._GenerateGuidSubPage(pObj, obj, configFile))
+ return [pageRoot]
+
+ def _GeneratePpiSubPage(self, pObj, obj, configFile):
+ guidPage = doxygen.Page(obj.GetName(), 'ppi_page_%s' % obj.GetName())
+ comments = obj.GetComment()
+ if len(comments) != 0:
+ guidPage.AddDescription('<br>'.join(obj.GetComment()) + '<br>')
+ section = doxygen.Section('BasicPpiInfo', 'PPI Information')
+ desc = '<TABLE>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>PPI\'s Guid Name</CAPTION></TD><TD><CAPTION>PPI\'s Guid</CAPTION></TD>'
+ desc += '</TR>'
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % obj.GetName()
+ desc += '<TD>%s</TD>' % obj.GetGuid()
+ desc += '</TR>'
+ desc += '</TABLE>'
+ section.AddDescription(desc)
+ guidPage.AddSection(section)
+ refFile = self.FindHeaderFileForGuid(pObj, obj.GetName(), configFile)
+ if refFile:
+ relPath = refFile[len(pObj.GetWorkspace()) + 1:]
+ if len(comments) == 0:
+ guidPage.AddDescription(' \\copydoc %s <br>' % relPath)
+ section = doxygen.Section('ref', 'Refer to Header File')
+ section.AddDescription('\link %s\n' % relPath)
+ section.AddDescription('\endlink\n')
+ self.ProcessSourceFileForInclude(refFile, pObj, configFile)
+ guidPage.AddSection(section)
+
+ return guidPage
+
+ def GeneratePpiSubPages(self, pObj, configFile):
+ """
+ Generate sub pages for package's GUID definition.
+ @param pObj package object
+ @param configFilf doxygen config file object
+ """
+ pageRoot = doxygen.Page('PPI', 'ppi_root_page')
+ objs = pObj.GetFileObj().GetSectionObjectsByName('ppis', self._arch)
+ if len(objs) == 0: return []
+ if self._arch is not None:
+ for obj in objs:
+ pageRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))
+ else:
+ guidArchRootPageDict = {}
+ for obj in objs:
+ if obj.GetArch() not in guidArchRootPageDict.keys():
+ guidArchRoot = doxygen.Page(obj.GetArch(), 'ppi_arch_root_%s' % obj.GetArch())
+ pageRoot.AddPage(guidArchRoot)
+ guidArchRootPageDict[obj.GetArch()] = guidArchRoot
+ guidArchRoot = guidArchRootPageDict[obj.GetArch()]
+ guidArchRoot.AddPage(self._GeneratePpiSubPage(pObj, obj, configFile))
+ return [pageRoot]
+
+ def _GenerateProtocolSubPage(self, pObj, obj, configFile):
+ guidPage = doxygen.Page(obj.GetName(), 'protocol_page_%s' % obj.GetName())
+ comments = obj.GetComment()
+ if len(comments) != 0:
+ guidPage.AddDescription('<br>'.join(obj.GetComment()) + '<br>')
+ section = doxygen.Section('BasicProtocolInfo', 'PROTOCOL Information')
+ desc = '<TABLE>'
+ desc += '<TR>'
+ desc += '<TD><CAPTION>PROTOCOL\'s Guid Name</CAPTION></TD><TD><CAPTION>PROTOCOL\'s Guid</CAPTION></TD>'
+ desc += '</TR>'
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % obj.GetName()
+ desc += '<TD>%s</TD>' % obj.GetGuid()
+ desc += '</TR>'
+ desc += '</TABLE>'
+ section.AddDescription(desc)
+ guidPage.AddSection(section)
+
+ refFile = self.FindHeaderFileForGuid(pObj, obj.GetName(), configFile)
+ if refFile:
+ relPath = refFile[len(pObj.GetWorkspace()) + 1:]
+ if len(comments) == 0:
+ guidPage.AddDescription(' \\copydoc %s <br>' % relPath)
+ section = doxygen.Section('ref', 'Refer to Header File')
+ section.AddDescription('\link %s\n' % relPath)
+ section.AddDescription('\endlink\n')
+ self.ProcessSourceFileForInclude(refFile, pObj, configFile)
+ guidPage.AddSection(section)
+
+ return guidPage
+
+ def GenerateProtocolSubPages(self, pObj, configFile):
+ """
+ Generate sub pages for package's GUID definition.
+ @param pObj package object
+ @param configFilf doxygen config file object
+ """
+ pageRoot = doxygen.Page('PROTOCOL', 'protocol_root_page')
+ objs = pObj.GetFileObj().GetSectionObjectsByName('protocols', self._arch)
+ if len(objs) == 0: return []
+ if self._arch is not None:
+ for obj in objs:
+ pageRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))
+ else:
+ guidArchRootPageDict = {}
+ for obj in objs:
+ if obj.GetArch() not in guidArchRootPageDict.keys():
+ guidArchRoot = doxygen.Page(obj.GetArch(), 'protocol_arch_root_%s' % obj.GetArch())
+ pageRoot.AddPage(guidArchRoot)
+ guidArchRootPageDict[obj.GetArch()] = guidArchRoot
+ guidArchRoot = guidArchRootPageDict[obj.GetArch()]
+ guidArchRoot.AddPage(self._GenerateProtocolSubPage(pObj, obj, configFile))
+ return [pageRoot]
+
+ def FindHeaderFileForGuid(self, pObj, name, configFile):
+ """
+ For declaration header file for GUID/PPI/Protocol.
+
+ @param pObj package object
+ @param name guid/ppi/protocol's name
+ @param configFile config file object
+
+ @return full path of header file and None if not found.
+ """
+ startPath = pObj.GetFileObj().GetPackageRootPath()
+ incPath = os.path.join(startPath, 'Include').replace('\\', '/')
+ # if <PackagePath>/include exist, then search header under it.
+ if os.path.exists(incPath):
+ startPath = incPath
+
+ for root, dirs, files in os.walk(startPath):
+ for dir in dirs:
+ if dir.lower() in _ignore_dir:
+ dirs.remove(dir)
+ for file in files:
+ fPath = os.path.join(root, file)
+ if not IsCHeaderFile(fPath):
+ continue
+ try:
+ f = open(fPath, 'r')
+ lines = f.readlines()
+ f.close()
+ except IOError:
+ self.Log('Fail to open file %s\n' % fPath)
+ continue
+ for line in lines:
+ if line.find(name) != -1 and \
+ line.find('extern') != -1:
+ return fPath.replace('\\', '/')
+ return None
+
+ def GetPackageModuleList(self, pObj):
+ """
+ Get all module's INF path under package's root path
+ @param pObj package object
+ @return arrary of INF full path
+ """
+ mArray = []
+ packPath = pObj.GetFileObj().GetPackageRootPath()
+ if not os.path.exists:
+ return None
+ for root, dirs, files in os.walk(packPath):
+ for dir in dirs:
+ if dir.lower() in _ignore_dir:
+ dirs.remove(dir)
+ for file in files:
+ if CheckPathPostfix(file, 'inf'):
+ fPath = os.path.join(root, file).replace('\\', '/')
+ mArray.append(fPath)
+ return mArray
+
+ def GenerateModulePages(self, pObj, configFile):
+ """
+ Generate sub pages for package's module which is under the package
+ root directory.
+
+ @param pObj package object
+ @param configFilf doxygen config file object
+ """
+ infList = self.GetPackageModuleList(pObj)
+ rootPages = []
+ libObjs = []
+ modObjs = []
+ for infpath in infList:
+ infObj = inf.INFFile(infpath)
+ #infObj = INFFileObject.INFFile (pObj.GetWorkspacePath(),
+ # inf)
+ if not infObj:
+ self.Log('Fail create INF object for %s' % inf)
+ continue
+ if not infObj.Parse():
+ self.Log('Fail to load INF file %s' % inf)
+ continue
+ if infObj.GetProduceLibraryClass() is not None:
+ libObjs.append(infObj)
+ else:
+ modObjs.append(infObj)
+
+ if len(libObjs) != 0:
+ libRootPage = doxygen.Page('Libraries', 'lib_root_page')
+ rootPages.append(libRootPage)
+ for libInf in libObjs:
+ libRootPage.AddPage(self.GenerateModulePage(pObj, libInf, configFile, True))
+
+ if len(modObjs) != 0:
+ modRootPage = doxygen.Page('Modules', 'module_root_page')
+ rootPages.append(modRootPage)
+ for modInf in modObjs:
+ modRootPage.AddPage(self.GenerateModulePage(pObj, modInf, configFile, False))
+
+ return rootPages
+
+ def GenerateModulePage(self, pObj, infObj, configFile, isLib):
+ """
+ Generate page for a module/library.
+ @param infObj INF file object for module/library
+ @param configFile doxygen config file object
+ @param isLib Whether this module is library
+
+ @param module doxygen page object
+ """
+ workspace = pObj.GetWorkspace()
+ refDecObjs = []
+ for obj in infObj.GetSectionObjectsByName('packages'):
+ decObj = dec.DECFile(os.path.join(workspace, obj.GetPath()))
+ if not decObj:
+ ErrorMsg ('Fail to create pacakge object for %s' % obj.GetPackageName())
+ continue
+ if not decObj.Parse():
+ ErrorMsg ('Fail to load package object for %s' % obj.GetPackageName())
+ continue
+ refDecObjs.append(decObj)
+
+ modPage = doxygen.Page('%s' % infObj.GetBaseName(),
+ 'module_%s' % infObj.GetBaseName())
+ modPage.AddDescription(infObj.GetFileHeader())
+
+ basicInfSection = doxygen.Section('BasicModuleInformation', 'Basic Module Information')
+ desc = "<TABLE>"
+ for obj in infObj.GetSectionObjectsByName('defines'):
+ key = obj.GetKey()
+ value = obj.GetValue()
+ if key not in _inf_key_description_mapping_table.keys(): continue
+ if key == 'LIBRARY_CLASS' and value.find('|') != -1:
+ clsname, types = value.split('|')
+ desc += '<TR>'
+ desc += '<TD><B>%s</B></TD>' % _inf_key_description_mapping_table[key]
+ desc += '<TD>%s</TD>' % clsname
+ desc += '</TR>'
+
+ desc += '<TR>'
+ desc += '<TD><B>Supported Module Types</B></TD>'
+ desc += '<TD>%s</TD>' % types
+ desc += '</TR>'
+ else:
+ desc += '<TR>'
+ desc += '<TD><B>%s</B></TD>' % _inf_key_description_mapping_table[key]
+ if key == 'EFI_SPECIFICATION_VERSION' and value == '0x00020000':
+ value = '2.0'
+ desc += '<TD>%s</TD>' % value
+ desc += '</TR>'
+ desc += '</TABLE>'
+ basicInfSection.AddDescription(desc)
+ modPage.AddSection(basicInfSection)
+
+ # Add protocol section
+ data = []
+ for obj in infObj.GetSectionObjectsByName('pcd', self._arch):
+ data.append(obj.GetPcdName().strip())
+ if len(data) != 0:
+ s = doxygen.Section('Pcds', 'Pcds')
+ desc = "<TABLE>"
+ desc += '<TR><TD><B>PCD Name</B></TD><TD><B>TokenSpace</B></TD><TD><B>Package</B></TD></TR>'
+ for item in data:
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % item.split('.')[1]
+ desc += '<TD>%s</TD>' % item.split('.')[0]
+ pkgbasename = self.SearchPcdPackage(item, workspace, refDecObjs)
+ desc += '<TD>%s</TD>' % pkgbasename
+ desc += '</TR>'
+ desc += "</TABLE>"
+ s.AddDescription(desc)
+ modPage.AddSection(s)
+
+ # Add protocol section
+ #sects = infObj.GetSectionByString('protocol')
+ data = []
+ #for sect in sects:
+ for obj in infObj.GetSectionObjectsByName('protocol', self._arch):
+ data.append(obj.GetName().strip())
+ if len(data) != 0:
+ s = doxygen.Section('Protocols', 'Protocols')
+ desc = "<TABLE>"
+ desc += '<TR><TD><B>Name</B></TD><TD><B>Package</B></TD></TR>'
+ for item in data:
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % item
+ pkgbasename = self.SearchProtocolPackage(item, workspace, refDecObjs)
+ desc += '<TD>%s</TD>' % pkgbasename
+ desc += '</TR>'
+ desc += "</TABLE>"
+ s.AddDescription(desc)
+ modPage.AddSection(s)
+
+ # Add ppi section
+ #sects = infObj.GetSectionByString('ppi')
+ data = []
+ #for sect in sects:
+ for obj in infObj.GetSectionObjectsByName('ppi', self._arch):
+ data.append(obj.GetName().strip())
+ if len(data) != 0:
+ s = doxygen.Section('Ppis', 'Ppis')
+ desc = "<TABLE>"
+ desc += '<TR><TD><B>Name</B></TD><TD><B>Package</B></TD></TR>'
+ for item in data:
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % item
+ pkgbasename = self.SearchPpiPackage(item, workspace, refDecObjs)
+ desc += '<TD>%s</TD>' % pkgbasename
+ desc += '</TR>'
+ desc += "</TABLE>"
+ s.AddDescription(desc)
+ modPage.AddSection(s)
+
+ # Add guid section
+ #sects = infObj.GetSectionByString('guid')
+ data = []
+ #for sect in sects:
+ for obj in infObj.GetSectionObjectsByName('guid', self._arch):
+ data.append(obj.GetName().strip())
+ if len(data) != 0:
+ s = doxygen.Section('Guids', 'Guids')
+ desc = "<TABLE>"
+ desc += '<TR><TD><B>Name</B></TD><TD><B>Package</B></TD></TR>'
+ for item in data:
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % item
+ pkgbasename = self.SearchGuidPackage(item, workspace, refDecObjs)
+ desc += '<TD>%s</TD>' % pkgbasename
+ desc += '</TR>'
+ desc += "</TABLE>"
+ s.AddDescription(desc)
+ modPage.AddSection(s)
+
+ section = doxygen.Section('LibraryClasses', 'Library Classes')
+ desc = "<TABLE>"
+ desc += '<TR><TD><B>Name</B></TD><TD><B>Type</B></TD><TD><B>Package</B></TD><TD><B>Header File</B></TD></TR>'
+ if isLib:
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % infObj.GetProduceLibraryClass()
+ desc += '<TD>Produce</TD>'
+ try:
+ pkgname, hPath = self.SearchLibraryClassHeaderFile(infObj.GetProduceLibraryClass(),
+ workspace,
+ refDecObjs)
+ except:
+ self.Log ('fail to get package header file for lib class %s' % infObj.GetProduceLibraryClass())
+ pkgname = 'NULL'
+ hPath = 'NULL'
+ desc += '<TD>%s</TD>' % pkgname
+ if hPath != "NULL":
+ #desc += '<TD>\link %s \endlink</TD>' % hPath
+ desc += '<TD>%s</TD>' % hPath
+ else:
+ desc += '<TD>%s</TD>' % hPath
+ desc += '</TR>'
+ for lcObj in infObj.GetSectionObjectsByName('libraryclasses', self._arch):
+ desc += '<TR>'
+ desc += '<TD>%s</TD>' % lcObj.GetClass()
+ retarr = self.SearchLibraryClassHeaderFile(lcObj.GetClass(),
+ workspace,
+ refDecObjs)
+ if retarr is not None:
+ pkgname, hPath = retarr
+ else:
+ self.Log('Fail find the library class %s definition from module %s dependent package!' % (lcObj.GetClass(), infObj.GetFilename()), 'error')
+ pkgname = 'NULL'
+ hPath = 'NULL'
+ desc += '<TD>Consume</TD>'
+ desc += '<TD>%s</TD>' % pkgname
+ desc += '<TD>%s</TD>' % hPath
+ desc += '</TR>'
+ desc += "</TABLE>"
+ section.AddDescription(desc)
+ modPage.AddSection(section)
+
+ section = doxygen.Section('SourceFiles', 'Source Files')
+ section.AddDescription('<ul>\n')
+ for obj in infObj.GetSourceObjects(self._arch, self._tooltag):
+ sPath = infObj.GetModuleRootPath()
+ sPath = os.path.join(sPath, obj.GetSourcePath()).replace('\\', '/').strip()
+ if sPath.lower().endswith('.uni') or sPath.lower().endswith('.s') or sPath.lower().endswith('.asm') or sPath.lower().endswith('.nasm'):
+ newPath = self.TranslateUniFile(sPath)
+ configFile.AddFile(newPath)
+ newPath = newPath[len(pObj.GetWorkspace()) + 1:]
+ section.AddDescription('<li> \link %s \endlink </li>' % newPath)
+ else:
+ self.ProcessSourceFileForInclude(sPath, pObj, configFile, infObj)
+ sPath = sPath[len(pObj.GetWorkspace()) + 1:]
+ section.AddDescription('<li>\link %s \endlink </li>' % sPath)
+ section.AddDescription('</ul>\n')
+ modPage.AddSection(section)
+
+ #sects = infObj.GetSectionByString('depex')
+ data = []
+ #for sect in sects:
+ for obj in infObj.GetSectionObjectsByName('depex'):
+ data.append(str(obj))
+ if len(data) != 0:
+ s = doxygen.Section('DependentSection', 'Module Dependencies')
+ s.AddDescription('<br>'.join(data))
+ modPage.AddSection(s)
+
+ return modPage
+
+ def TranslateUniFile(self, path):
+ newpath = path + '.dox'
+ #import core.textfile as textfile
+ #file = textfile.TextFile(path)
+
+ try:
+ file = open(path, 'r')
+ except (IOError, OSError) as msg:
+ return None
+
+ t = file.read()
+ file.close()
+
+ output = '/** @file \n'
+ #output = '<html><body>'
+ arr = t.split('\r\n')
+ for line in arr:
+ if line.find('@file') != -1:
+ continue
+ if line.find('*/') != -1:
+ continue
+ line = line.strip()
+ if line.strip().startswith('/'):
+ arr = line.split(' ')
+ if len(arr) > 1:
+ line = ' '.join(arr[1:])
+ else:
+ continue
+ output += '%s<br>\n' % line
+ output += '**/'
+
+ if os.path.exists(newpath):
+ os.remove(newpath)
+
+ file = open(newpath, "w")
+ file.write(output)
+ file.close()
+ return newpath
+
+ def SearchPcdPackage(self, pcdname, workspace, decObjs):
+ for decObj in decObjs:
+ for pcd in decObj.GetSectionObjectsByName('pcd'):
+ if pcdname == pcd.GetPcdName():
+ return decObj.GetBaseName()
+ return None
+
+ def SearchProtocolPackage(self, protname, workspace, decObjs):
+ for decObj in decObjs:
+ for proto in decObj.GetSectionObjectsByName('protocol'):
+ if protname == proto.GetName():
+ return decObj.GetBaseName()
+ return None
+
+ def SearchPpiPackage(self, ppiname, workspace, decObjs):
+ for decObj in decObjs:
+ for ppi in decObj.GetSectionObjectsByName('ppi'):
+ if ppiname == ppi.GetName():
+ return decObj.GetBaseName()
+ return None
+
+ def SearchGuidPackage(self, guidname, workspace, decObjs):
+ for decObj in decObjs:
+ for guid in decObj.GetSectionObjectsByName('guid'):
+ if guidname == guid.GetName():
+ return decObj.GetBaseName()
+ return None
+
+ def SearchLibraryClassHeaderFile(self, className, workspace, decObjs):
+ for decObj in decObjs:
+ for cls in decObj.GetSectionObjectsByName('libraryclasses'):
+ if cls.GetClassName().strip() == className:
+ path = cls.GetHeaderFile().strip()
+ path = os.path.join(decObj.GetPackageRootPath(), path)
+ path = path[len(workspace) + 1:]
+ return decObj.GetBaseName(), path.replace('\\', '/')
+
+ return None
+
+ def _ConvertPathToDoxygen(self, path, pObj):
+ pRootPath = pObj.GetWorkspace()
+ path = path[len(pRootPath) + 1:]
+ return path.replace('\\', '/')
+
+def IsCHeaderFile(path):
+ return CheckPathPostfix(path, 'h')
+
+def CheckPathPostfix(path, str):
+ index = path.rfind('.')
+ if index == -1:
+ return False
+ if path[index + 1:].lower() == str.lower():
+ return True
+ return False
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py
new file mode 100755
index 00000000..40535cea
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/dsc.py
@@ -0,0 +1,195 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from plugins.EdkPlugins.basemodel import ini
+import re, os
+from plugins.EdkPlugins.basemodel.message import *
+
+class DSCFile(ini.BaseINIFile):
+ def GetSectionInstance(self, parent, name, isCombined=False):
+ return DSCSection(parent, name, isCombined)
+
+ def GetComponents(self):
+ return self.GetSectionObjectsByName('Components')
+
+class DSCSection(ini.BaseINISection):
+ def GetSectionINIObject(self, parent):
+ type = self.GetType()
+
+ if type.lower() == 'components':
+ return DSCComponentObject(self)
+ if type.lower() == 'libraryclasses':
+ return DSCLibraryClassObject(self)
+ if type.lower() == 'defines':
+ return ini.BaseINISectionObject(self)
+ if type.lower() == 'pcdsfeatureflag' or \
+ type.lower() == 'pcdsfixedatbuild' or \
+ type.lower() == 'pcdspatchableinmodule' or\
+ type.lower() == 'pcdsdynamicdefault' or \
+ type.lower() == 'pcdsdynamicex' or \
+ type.lower() == 'pcdsdynamichii' or \
+ type.lower() == 'pcdsdynamicvpd':
+ return DSCPcdObject(self)
+
+ return DSCSectionObject(self)
+
+ def GetType(self):
+ arr = self._name.split('.')
+ return arr[0].strip()
+
+ def GetArch(self):
+ arr = self._name.split('.')
+ if len(arr) == 1:
+ return 'common'
+ return arr[1]
+
+ def GetModuleType(self):
+ arr = self._name.split('.')
+ if len(arr) < 3:
+ return 'common'
+ return arr[2]
+
+class DSCSectionObject(ini.BaseINISectionObject):
+ def GetArch(self):
+ return self.GetParent().GetArch()
+
+class DSCPcdObject(DSCSectionObject):
+
+ def __init__(self, parent):
+ ini.BaseINISectionObject.__init__(self, parent)
+ self._name = None
+
+ def Parse(self):
+ line = self.GetLineByOffset(self._start).strip().split('#')[0]
+ self._name = line.split('|')[0]
+ self._value = line.split('|')[1]
+ return True
+
+ def GetPcdName(self):
+ return self._name
+
+ def GetPcdType(self):
+ return self.GetParent().GetType()
+
+ def GetPcdValue(self):
+ return self._value
+
+class DSCLibraryClassObject(DSCSectionObject):
+ def __init__(self, parent):
+ ini.BaseINISectionObject.__init__(self, parent)
+
+ def GetClass(self):
+ line = self.GetLineByOffset(self._start)
+ return line.split('#')[0].split('|')[0].strip()
+
+ def GetInstance(self):
+ line = self.GetLineByOffset(self._start)
+ return line.split('#')[0].split('|')[1].strip()
+
+ def GetArch(self):
+ return self.GetParent().GetArch()
+
+ def GetModuleType(self):
+ return self.GetParent().GetModuleType()
+
+class DSCComponentObject(DSCSectionObject):
+
+ def __init__(self, parent):
+ ini.BaseINISectionObject.__init__(self, parent)
+ self._OveridePcds = {}
+ self._OverideLibraries = {}
+ self._Filename = ''
+
+ def __del__(self):
+ self._OverideLibraries.clear()
+ self._OverideLibraries.clear()
+ ini.BaseINISectionObject.__del__(self)
+
+ def AddOverideLib(self, libclass, libinstPath):
+ if libclass not in self._OverideLibraries.keys():
+ self._OverideLibraries[libclass] = libinstPath
+
+ def AddOveridePcd(self, name, type, value=None):
+ if type not in self._OveridePcds.keys():
+ self._OveridePcds[type] = []
+ self._OveridePcds[type].append((name, value))
+
+ def GetOverideLibs(self):
+ return self._OverideLibraries
+
+ def GetArch(self):
+ return self.GetParent().GetArch()
+
+ def GetOveridePcds(self):
+ return self._OveridePcds
+
+ def GetFilename(self):
+ return self.GetLineByOffset(self._start).split('#')[0].split('{')[0].strip()
+
+ def SetFilename(self, fName):
+ self._Filename = fName
+
+ def Parse(self):
+ if (self._start < self._end):
+ #
+ # The first line is inf path and could be ignored
+ # The end line is '}' and could be ignored
+ #
+ curr = self._start + 1
+ end = self._end - 1
+ OverideName = ''
+ while (curr <= end):
+ line = self.GetLineByOffset(curr).strip()
+ if len(line) > 0 and line[0] != '#':
+ line = line.split('#')[0].strip()
+ if line[0] == '<':
+ OverideName = line[1:len(line)-1]
+ elif OverideName.lower() == 'libraryclasses':
+ arr = line.split('|')
+ self._OverideLibraries[arr[0].strip()] = arr[1].strip()
+ elif OverideName.lower() == 'pcds':
+ ErrorMsg('EDES does not support PCD overide',
+ self.GetFileName(),
+ self.GetParent().GetLinenumberByOffset(curr))
+ curr = curr + 1
+ return True
+
+ def GenerateLines(self):
+ lines = []
+ hasLib = False
+ hasPcd = False
+ if len(self._OverideLibraries) != 0:
+ hasLib = True
+ if len(self._OveridePcds) != 0:
+ hasPcd = True
+
+ if hasLib or hasPcd:
+ lines.append((' %s {\n' % self._Filename))
+ else:
+ lines.append((' %s \n' % self._Filename))
+ return lines
+
+ if hasLib:
+ lines.append(' <LibraryClasses>\n')
+ for libKey in self._OverideLibraries.keys():
+ lines.append(' %s|%s\n' % (libKey, self._OverideLibraries[libKey]))
+
+ if hasPcd:
+ for key in self._OveridePcds.keys():
+ lines.append(' <%s>\n' % key)
+
+ for name, value in self._OveridePcds[key]:
+ if value is not None:
+ lines.append(' %s|%s\n' % (name, value))
+ else:
+ lines.append(' %s\n' % name)
+
+ if hasLib or hasPcd:
+ lines.append(' }\n')
+
+ return lines
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py
new file mode 100755
index 00000000..fc8056b1
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/EdkPlugins/edk2/model/inf.py
@@ -0,0 +1,335 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from plugins.EdkPlugins.basemodel import ini
+import re, os
+from plugins.EdkPlugins.basemodel.message import *
+
+class INFFile(ini.BaseINIFile):
+ _libobjs = {}
+
+ def GetSectionInstance(self, parent, name, isCombined=False):
+ return INFSection(parent, name, isCombined)
+
+ def GetProduceLibraryClass(self):
+ obj = self.GetDefine("LIBRARY_CLASS")
+ if obj is None: return None
+
+ return obj.split('|')[0].strip()
+
+ def GetSectionObjectsByName(self, name, arch=None):
+ arr = []
+ sects = self.GetSectionByName(name)
+ for sect in sects:
+ # skip unmatched archtecture content
+ if not sect.IsArchMatch(arch):
+ continue
+
+ for obj in sect.GetObjects():
+ arr.append(obj)
+
+ return arr
+
+ def GetSourceObjects(self, arch=None, tool=None):
+ arr = []
+ sects = self.GetSectionByName('sources')
+ for sect in sects:
+ # skip unmatched archtecture content
+ if not sect.IsArchMatch(arch):
+ continue
+
+ for obj in sect.GetObjects():
+ if not obj.IsMatchFamily(tool):
+ continue
+ arr.append(obj)
+
+ return arr
+
+ def Parse(self):
+ if not ini.BaseINIFile.Parse(self):
+ return False
+ classname = self.GetProduceLibraryClass()
+ if classname is not None:
+ libobjdict = INFFile._libobjs
+ if classname in libobjdict:
+ if self not in libobjdict[classname]:
+ libobjdict[classname].append(self)
+ else:
+ libobjdict[classname] = [self]
+
+ return True
+
+ def GetBaseName(self):
+ return self.GetDefine("BASE_NAME").strip()
+
+ def GetModuleRootPath(self):
+ return os.path.dirname(self.GetFilename())
+
+ def Clear(self):
+ classname = self.GetProduceLibraryClass()
+ if classname is not None:
+ libobjdict = INFFile._libobjs
+ libobjdict[classname].remove(self)
+ if len(libobjdict[classname]) == 0:
+ del libobjdict[classname]
+ ini.BaseINIFile.Clear(self)
+
+
+class INFSection(ini.BaseINISection):
+ def GetSectionINIObject(self, parent):
+ type = self.GetType()
+
+ if type.lower() == 'libraryclasses':
+ return INFLibraryClassObject(self)
+ if type.lower() == 'sources':
+ return INFSourceObject(self)
+ if type.lower().find('pcd') != -1:
+ return INFPcdObject(self)
+ if type.lower() == 'packages':
+ return INFDependentPackageObject(self)
+ if type.lower() in ['guids', 'protocols', 'ppis']:
+ return INFGuidObject(self)
+ if type.lower() == 'defines':
+ return INFDefineSectionObject(self)
+ return INFSectionObject(self)
+
+ def GetType(self):
+ arr = self._name.split('.')
+ return arr[0].strip()
+
+ def GetArch(self):
+ arr = self._name.split('.')
+ if len(arr) == 1:
+ return 'common'
+ return arr[1]
+
+ def IsArchMatch(self, arch):
+ if arch is None or self.GetArch() == 'common':
+ return True
+
+ if self.GetArch().lower() != arch.lower():
+ return False
+
+ return True
+
+class INFSectionObject(ini.BaseINISectionObject):
+ def GetArch(self):
+ return self.GetParent().GetArch()
+
+class INFDefineSectionObject(INFSectionObject):
+ def __init__(self, parent):
+ INFSectionObject.__init__(self, parent)
+ self._key = None
+ self._value = None
+
+ def Parse(self):
+ assert (self._start == self._end), 'The object in define section must be in single line'
+
+ line = self.GetLineByOffset(self._start).strip()
+
+ line = line.split('#')[0]
+ arr = line.split('=')
+ if len(arr) != 2:
+ ErrorMsg('Invalid define section object',
+ self.GetFilename(),
+ self._start
+ )
+ return False
+
+ self._key = arr[0].strip()
+ self._value = arr[1].strip()
+
+ return True
+
+ def GetKey(self):
+ return self._key
+
+ def GetValue(self):
+ return self._value
+
+class INFLibraryClassObject(INFSectionObject):
+ _objs = {}
+ def __init__(self, parent):
+ INFSectionObject.__init__(self, parent)
+ self._classname = None
+
+ def GetClass(self):
+ return self._classname
+
+ def Parse(self):
+ self._classname = self.GetLineByOffset(self._start).split('#')[0].strip()
+ objdict = INFLibraryClassObject._objs
+ if self._classname in objdict:
+ objdict[self._classname].append(self)
+ else:
+ objdict[self._classname] = [self]
+ return True
+
+ def Destroy(self):
+ objdict = INFLibraryClassObject._objs
+ objdict[self._classname].remove(self)
+ if len(objdict[self._classname]) == 0:
+ del objdict[self._classname]
+
+ def GetName(self):
+ return self._classname
+
+ @staticmethod
+ def GetObjectDict():
+ return INFLibraryClassObject._objs
+
+class INFDependentPackageObject(INFSectionObject):
+ def GetPath(self):
+ return self.GetLineByOffset(self._start).split('#')[0].strip()
+
+class INFSourceObject(INFSectionObject):
+ _objs = {}
+ def __init__(self, parent):
+ INFSectionObject.__init__(self, parent)
+
+ self.mSourcename = None
+ self.mToolCode = None
+ self.mFamily = None
+ self.mTagName = None
+ self.mFeaturePcd = None
+ self.mFilename = None
+
+ def GetSourcePath(self):
+ return self.mSourcename
+
+ def GetSourceFullPath(self):
+ path = os.path.dirname(self.GetFilename())
+ path = os.path.join(path, self.GetSourcePath())
+ return os.path.normpath(path)
+
+ def GetToolCode(self):
+ return self.mToolCode
+
+ def GetFamily(self):
+ return self.mFamily
+
+ def GetTagName(self):
+ return self.mTagName
+
+ def GetFeaturePcd(self):
+ return self.mFeaturePcd
+
+ def Parse(self):
+ line = self.GetLineByOffset(self._start).strip().split('#')[0]
+
+ arr = line.split('|')
+
+ self.mSourcename = arr[0].strip()
+ if len(arr) >= 2:
+ self.mFamily = arr[1].strip()
+ if len(arr) >= 3:
+ self.mTagName = arr[2].strip()
+ if len(arr) >= 4:
+ self.mToolCode = arr[3].strip()
+ if len(arr) >= 5:
+ self.mFeaturePcd = arr[4].strip()
+
+ self.mFilename = os.path.basename(self.GetSourceFullPath())
+ objdict = INFSourceObject._objs
+ if self.mFilename not in objdict:
+ objdict[self.mFilename] = [self]
+ else:
+ objdict[self.mFilename].append(self)
+
+ return True
+
+ def GetName(self):
+ return self.mFilename
+
+ def Destroy(self):
+ objdict = INFSourceObject._objs
+ objdict[self.mFilename].remove(self)
+ if len(objdict[self.mFilename]) == 0:
+ del objdict[self.mFilename]
+
+ def IsMatchFamily(self, family):
+ if family is None:
+ return True
+ if self.mFamily is not None:
+ if family.strip().lower() == self.mFamily.lower():
+ return True
+ else:
+ return False
+ else:
+ fname = self.GetSourcePath()
+ if fname.endswith('.S') and family.lower() != 'gcc':
+ return False
+ if fname.endswith('.s') and (self.GetArch().lower() != 'ipf' and self.GetArch().lower() != 'common'):
+ return False
+ if fname.lower().endswith('.asm') and (family.lower() != 'msft' and family.lower() != 'intel'):
+ return False
+ return True
+
+ @staticmethod
+ def GetObjectDict():
+ return INFSourceObject._objs
+
+class INFPcdObject(INFSectionObject):
+ _objs = {}
+
+ def __init__(self, parent):
+ INFSectionObject.__init__(self, parent)
+
+ self.mPcdType = None
+ self.mDefaultValue = None
+ self.mPcdName = None
+
+ @staticmethod
+ def GetObjectDict():
+ return INFPcdObject._objs
+
+ def Parse(self):
+ line = self.GetLineByOffset(self._start).strip().split('#')[0]
+
+ arr = line.split('|')
+ self.mPcdName = arr[0].strip()
+
+ if len(arr) >= 2:
+ self.mDefaultValue = arr[1].strip()
+
+ objdict = INFPcdObject._objs
+ if self.GetName() in objdict:
+ if self not in objdict[self.GetName()]:
+ objdict[self.GetName()].append(self)
+ else:
+ objdict[self.GetName()] = [self]
+ return True
+
+ def GetPcdName(self):
+ return self.mPcdName
+
+ def GetPcdType(self):
+ return self.GetParent().GetType()
+
+ def GetName(self):
+ return self.mPcdName.split('.')[1]
+
+ def Destroy(self):
+ objdict = INFPcdObject._objs
+ objdict[self.GetName()].remove(self)
+ if len(objdict[self.GetName()]) == 0:
+ del objdict[self.GetName()]
+
+class INFGuidObject(INFSectionObject):
+ def __init__(self, parent):
+ INFSectionObject.__init__(self, parent)
+ self._name = None
+
+ def Parse(self):
+ line = self.GetLineByOffset(self._start).strip().split('#')[0].split("|")[0]
+ self._name = line.strip()
+ return True
+
+ def GetName(self):
+ return self._name
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/__init__.py
new file mode 100644
index 00000000..a7909346
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PackageDocumentTools/plugins/__init__.py
@@ -0,0 +1,6 @@
+## @file
+#
+# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PatchCheck.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PatchCheck.py
new file mode 100755
index 00000000..c5879229
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/PatchCheck.py
@@ -0,0 +1,770 @@
+## @file
+# Check a patch for various format issues
+#
+# Copyright (c) 2015 - 2020, Intel Corporation. All rights reserved.<BR>
+# Copyright (C) 2020, Red Hat, Inc.<BR>
+# Copyright (c) 2020, ARM Ltd. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import print_function
+
+VersionNumber = '0.1'
+__copyright__ = "Copyright (c) 2015 - 2016, Intel Corporation All rights reserved."
+
+import email
+import argparse
+import os
+import re
+import subprocess
+import sys
+
+import email.header
+
+class Verbose:
+ SILENT, ONELINE, NORMAL = range(3)
+ level = NORMAL
+
+class EmailAddressCheck:
+ """Checks an email address."""
+
+ def __init__(self, email, description):
+ self.ok = True
+
+ if email is None:
+ self.error('Email address is missing!')
+ return
+ if description is None:
+ self.error('Email description is missing!')
+ return
+
+ self.description = "'" + description + "'"
+ self.check_email_address(email)
+
+ def error(self, *err):
+ if self.ok and Verbose.level > Verbose.ONELINE:
+ print('The ' + self.description + ' email address is not valid:')
+ self.ok = False
+ if Verbose.level < Verbose.NORMAL:
+ return
+ count = 0
+ for line in err:
+ prefix = (' *', ' ')[count > 0]
+ print(prefix, line)
+ count += 1
+
+ email_re1 = re.compile(r'(?:\s*)(.*?)(\s*)<(.+)>\s*$',
+ re.MULTILINE|re.IGNORECASE)
+
+ def check_email_address(self, email):
+ email = email.strip()
+ mo = self.email_re1.match(email)
+ if mo is None:
+ self.error("Email format is invalid: " + email.strip())
+ return
+
+ name = mo.group(1).strip()
+ if name == '':
+ self.error("Name is not provided with email address: " +
+ email)
+ else:
+ quoted = len(name) > 2 and name[0] == '"' and name[-1] == '"'
+ if name.find(',') >= 0 and not quoted:
+ self.error('Add quotes (") around name with a comma: ' +
+ name)
+
+ if mo.group(2) == '':
+ self.error("There should be a space between the name and " +
+ "email address: " + email)
+
+ if mo.group(3).find(' ') >= 0:
+ self.error("The email address cannot contain a space: " +
+ mo.group(3))
+
+ if ' via Groups.Io' in name and mo.group(3).endswith('@groups.io'):
+ self.error("Email rewritten by lists DMARC / DKIM / SPF: " +
+ email)
+
+class CommitMessageCheck:
+ """Checks the contents of a git commit message."""
+
+ def __init__(self, subject, message):
+ self.ok = True
+
+ if subject is None and message is None:
+ self.error('Commit message is missing!')
+ return
+
+ self.subject = subject
+ self.msg = message
+
+ print (subject)
+
+ self.check_contributed_under()
+ self.check_signed_off_by()
+ self.check_misc_signatures()
+ self.check_overall_format()
+ self.report_message_result()
+
+ url = 'https://github.com/tianocore/tianocore.github.io/wiki/Commit-Message-Format'
+
+ def report_message_result(self):
+ if Verbose.level < Verbose.NORMAL:
+ return
+ if self.ok:
+ # All checks passed
+ return_code = 0
+ print('The commit message format passed all checks.')
+ else:
+ return_code = 1
+ if not self.ok:
+ print(self.url)
+
+ def error(self, *err):
+ if self.ok and Verbose.level > Verbose.ONELINE:
+ print('The commit message format is not valid:')
+ self.ok = False
+ if Verbose.level < Verbose.NORMAL:
+ return
+ count = 0
+ for line in err:
+ prefix = (' *', ' ')[count > 0]
+ print(prefix, line)
+ count += 1
+
+ # Find 'contributed-under:' at the start of a line ignoring case and
+ # requires ':' to be present. Matches if there is white space before
+ # the tag or between the tag and the ':'.
+ contributed_under_re = \
+ re.compile(r'^\s*contributed-under\s*:', re.MULTILINE|re.IGNORECASE)
+
+ def check_contributed_under(self):
+ match = self.contributed_under_re.search(self.msg)
+ if match is not None:
+ self.error('Contributed-under! (Note: this must be ' +
+ 'removed by the code contributor!)')
+
+ @staticmethod
+ def make_signature_re(sig, re_input=False):
+ if re_input:
+ sub_re = sig
+ else:
+ sub_re = sig.replace('-', r'[-\s]+')
+ re_str = (r'^(?P<tag>' + sub_re +
+ r')(\s*):(\s*)(?P<value>\S.*?)(?:\s*)$')
+ try:
+ return re.compile(re_str, re.MULTILINE|re.IGNORECASE)
+ except Exception:
+ print("Tried to compile re:", re_str)
+ raise
+
+ sig_block_re = \
+ re.compile(r'''^
+ (?: (?P<tag>[^:]+) \s* : \s*
+ (?P<value>\S.*?) )
+ |
+ (?: \[ (?P<updater>[^:]+) \s* : \s*
+ (?P<note>.+?) \s* \] )
+ \s* $''',
+ re.VERBOSE | re.MULTILINE)
+
+ def find_signatures(self, sig):
+ if not sig.endswith('-by') and sig != 'Cc':
+ sig += '-by'
+ regex = self.make_signature_re(sig)
+
+ sigs = regex.findall(self.msg)
+
+ bad_case_sigs = filter(lambda m: m[0] != sig, sigs)
+ for s in bad_case_sigs:
+ self.error("'" +s[0] + "' should be '" + sig + "'")
+
+ for s in sigs:
+ if s[1] != '':
+ self.error('There should be no spaces between ' + sig +
+ " and the ':'")
+ if s[2] != ' ':
+ self.error("There should be a space after '" + sig + ":'")
+
+ EmailAddressCheck(s[3], sig)
+
+ return sigs
+
+ def check_signed_off_by(self):
+ sob='Signed-off-by'
+ if self.msg.find(sob) < 0:
+ self.error('Missing Signed-off-by! (Note: this must be ' +
+ 'added by the code contributor!)')
+ return
+
+ sobs = self.find_signatures('Signed-off')
+
+ if len(sobs) == 0:
+ self.error('Invalid Signed-off-by format!')
+ return
+
+ sig_types = (
+ 'Reviewed',
+ 'Reported',
+ 'Tested',
+ 'Suggested',
+ 'Acked',
+ 'Cc'
+ )
+
+ def check_misc_signatures(self):
+ for sig in self.sig_types:
+ self.find_signatures(sig)
+
+ cve_re = re.compile('CVE-[0-9]{4}-[0-9]{5}[^0-9]')
+
+ def check_overall_format(self):
+ lines = self.msg.splitlines()
+
+ if len(lines) >= 1 and lines[0].endswith('\r\n'):
+ empty_line = '\r\n'
+ else:
+ empty_line = '\n'
+
+ lines.insert(0, empty_line)
+ lines.insert(0, self.subject + empty_line)
+
+ count = len(lines)
+
+ if count <= 0:
+ self.error('Empty commit message!')
+ return
+
+ if count >= 1 and re.search(self.cve_re, lines[0]):
+ #
+ # If CVE-xxxx-xxxxx is present in subject line, then limit length of
+ # subject line to 92 characters
+ #
+ if len(lines[0].rstrip()) >= 93:
+ self.error(
+ 'First line of commit message (subject line) is too long (%d >= 93).' %
+ (len(lines[0].rstrip()))
+ )
+ else:
+ #
+ # If CVE-xxxx-xxxxx is not present in subject line, then limit
+ # length of subject line to 75 characters
+ #
+ if len(lines[0].rstrip()) >= 76:
+ self.error(
+ 'First line of commit message (subject line) is too long (%d >= 76).' %
+ (len(lines[0].rstrip()))
+ )
+
+ if count >= 1 and len(lines[0].strip()) == 0:
+ self.error('First line of commit message (subject line) ' +
+ 'is empty.')
+
+ if count >= 2 and lines[1].strip() != '':
+ self.error('Second line of commit message should be ' +
+ 'empty.')
+
+ for i in range(2, count):
+ if (len(lines[i]) >= 76 and
+ len(lines[i].split()) > 1 and
+ not lines[i].startswith('git-svn-id:') and
+ not lines[i].startswith('Reviewed-by') and
+ not lines[i].startswith('Acked-by:') and
+ not lines[i].startswith('Tested-by:') and
+ not lines[i].startswith('Reported-by:') and
+ not lines[i].startswith('Suggested-by:') and
+ not lines[i].startswith('Signed-off-by:') and
+ not lines[i].startswith('Cc:')):
+ #
+ # Print a warning if body line is longer than 75 characters
+ #
+ print(
+ 'WARNING - Line %d of commit message is too long (%d >= 76).' %
+ (i + 1, len(lines[i]))
+ )
+ print(lines[i])
+
+ last_sig_line = None
+ for i in range(count - 1, 0, -1):
+ line = lines[i]
+ mo = self.sig_block_re.match(line)
+ if mo is None:
+ if line.strip() == '':
+ break
+ elif last_sig_line is not None:
+ err2 = 'Add empty line before "%s"?' % last_sig_line
+ self.error('The line before the signature block ' +
+ 'should be empty', err2)
+ else:
+ self.error('The signature block was not found')
+ break
+ last_sig_line = line.strip()
+
+(START, PRE_PATCH, PATCH) = range(3)
+
+class GitDiffCheck:
+ """Checks the contents of a git diff."""
+
+ def __init__(self, diff):
+ self.ok = True
+ self.format_ok = True
+ self.lines = diff.splitlines(True)
+ self.count = len(self.lines)
+ self.line_num = 0
+ self.state = START
+ self.new_bin = []
+ while self.line_num < self.count and self.format_ok:
+ line_num = self.line_num
+ self.run()
+ assert(self.line_num > line_num)
+ self.report_message_result()
+
+ def report_message_result(self):
+ if Verbose.level < Verbose.NORMAL:
+ return
+ if self.ok:
+ print('The code passed all checks.')
+ if self.new_bin:
+ print('\nWARNING - The following binary files will be added ' +
+ 'into the repository:')
+ for binary in self.new_bin:
+ print(' ' + binary)
+
+ def run(self):
+ line = self.lines[self.line_num]
+
+ if self.state in (PRE_PATCH, PATCH):
+ if line.startswith('diff --git'):
+ self.state = START
+ if self.state == PATCH:
+ if line.startswith('@@ '):
+ self.state = PRE_PATCH
+ elif len(line) >= 1 and line[0] not in ' -+' and \
+ not line.startswith('\r\n') and \
+ not line.startswith(r'\ No newline ') and not self.binary:
+ for line in self.lines[self.line_num + 1:]:
+ if line.startswith('diff --git'):
+ self.format_error('diff found after end of patch')
+ break
+ self.line_num = self.count
+ return
+
+ if self.state == START:
+ if line.startswith('diff --git'):
+ self.state = PRE_PATCH
+ self.filename = line[13:].split(' ', 1)[0]
+ self.is_newfile = False
+ self.force_crlf = True
+ self.force_notabs = True
+ if self.filename.endswith('.sh') or \
+ self.filename.startswith('BaseTools/BinWrappers/PosixLike/') or \
+ self.filename.startswith('BaseTools/BinPipWrappers/PosixLike/') or \
+ self.filename.startswith('BaseTools/Bin/CYGWIN_NT-5.1-i686/') or \
+ self.filename == 'BaseTools/BuildEnv':
+ #
+ # Do not enforce CR/LF line endings for linux shell scripts.
+ # Some linux shell scripts don't end with the ".sh" extension,
+ # they are identified by their path.
+ #
+ self.force_crlf = False
+ if self.filename == '.gitmodules' or \
+ self.filename == 'BaseTools/Conf/diff.order':
+ #
+ # .gitmodules and diff orderfiles are used internally by git
+ # use tabs and LF line endings. Do not enforce no tabs and
+ # do not enforce CR/LF line endings.
+ #
+ self.force_crlf = False
+ self.force_notabs = False
+ elif len(line.rstrip()) != 0:
+ self.format_error("didn't find diff command")
+ self.line_num += 1
+ elif self.state == PRE_PATCH:
+ if line.startswith('@@ '):
+ self.state = PATCH
+ self.binary = False
+ elif line.startswith('GIT binary patch') or \
+ line.startswith('Binary files'):
+ self.state = PATCH
+ self.binary = True
+ if self.is_newfile:
+ self.new_bin.append(self.filename)
+ elif line.startswith('new file mode 160000'):
+ #
+ # New submodule. Do not enforce CR/LF line endings
+ #
+ self.force_crlf = False
+ else:
+ ok = False
+ self.is_newfile = self.newfile_prefix_re.match(line)
+ for pfx in self.pre_patch_prefixes:
+ if line.startswith(pfx):
+ ok = True
+ if not ok:
+ self.format_error("didn't find diff hunk marker (@@)")
+ self.line_num += 1
+ elif self.state == PATCH:
+ if self.binary:
+ pass
+ elif line.startswith('-'):
+ pass
+ elif line.startswith('+'):
+ self.check_added_line(line[1:])
+ elif line.startswith('\r\n'):
+ pass
+ elif line.startswith(r'\ No newline '):
+ pass
+ elif not line.startswith(' '):
+ self.format_error("unexpected patch line")
+ self.line_num += 1
+
+ pre_patch_prefixes = (
+ '--- ',
+ '+++ ',
+ 'index ',
+ 'new file ',
+ 'deleted file ',
+ 'old mode ',
+ 'new mode ',
+ 'similarity index ',
+ 'copy from ',
+ 'copy to ',
+ 'rename ',
+ )
+
+ line_endings = ('\r\n', '\n\r', '\n', '\r')
+
+ newfile_prefix_re = \
+ re.compile(r'''^
+ index\ 0+\.\.
+ ''',
+ re.VERBOSE)
+
+ def added_line_error(self, msg, line):
+ lines = [ msg ]
+ if self.filename is not None:
+ lines.append('File: ' + self.filename)
+ lines.append('Line: ' + line)
+
+ self.error(*lines)
+
+ old_debug_re = \
+ re.compile(r'''
+ DEBUG \s* \( \s* \( \s*
+ (?: DEBUG_[A-Z_]+ \s* \| \s*)*
+ EFI_D_ ([A-Z_]+)
+ ''',
+ re.VERBOSE)
+
+ def check_added_line(self, line):
+ eol = ''
+ for an_eol in self.line_endings:
+ if line.endswith(an_eol):
+ eol = an_eol
+ line = line[:-len(eol)]
+
+ stripped = line.rstrip()
+
+ if self.force_crlf and eol != '\r\n' and (line.find('Subproject commit') == -1):
+ self.added_line_error('Line ending (%s) is not CRLF' % repr(eol),
+ line)
+ if self.force_notabs and '\t' in line:
+ self.added_line_error('Tab character used', line)
+ if len(stripped) < len(line):
+ self.added_line_error('Trailing whitespace found', line)
+
+ mo = self.old_debug_re.search(line)
+ if mo is not None:
+ self.added_line_error('EFI_D_' + mo.group(1) + ' was used, '
+ 'but DEBUG_' + mo.group(1) +
+ ' is now recommended', line)
+
+ split_diff_re = re.compile(r'''
+ (?P<cmd>
+ ^ diff \s+ --git \s+ a/.+ \s+ b/.+ $
+ )
+ (?P<index>
+ ^ index \s+ .+ $
+ )
+ ''',
+ re.IGNORECASE | re.VERBOSE | re.MULTILINE)
+
+ def format_error(self, err):
+ self.format_ok = False
+ err = 'Patch format error: ' + err
+ err2 = 'Line: ' + self.lines[self.line_num].rstrip()
+ self.error(err, err2)
+
+ def error(self, *err):
+ if self.ok and Verbose.level > Verbose.ONELINE:
+ print('Code format is not valid:')
+ self.ok = False
+ if Verbose.level < Verbose.NORMAL:
+ return
+ count = 0
+ for line in err:
+ prefix = (' *', ' ')[count > 0]
+ print(prefix, line)
+ count += 1
+
+class CheckOnePatch:
+ """Checks the contents of a git email formatted patch.
+
+ Various checks are performed on both the commit message and the
+ patch content.
+ """
+
+ def __init__(self, name, patch):
+ self.patch = patch
+ self.find_patch_pieces()
+
+ email_check = EmailAddressCheck(self.author_email, 'Author')
+ email_ok = email_check.ok
+
+ msg_check = CommitMessageCheck(self.commit_subject, self.commit_msg)
+ msg_ok = msg_check.ok
+
+ diff_ok = True
+ if self.diff is not None:
+ diff_check = GitDiffCheck(self.diff)
+ diff_ok = diff_check.ok
+
+ self.ok = email_ok and msg_ok and diff_ok
+
+ if Verbose.level == Verbose.ONELINE:
+ if self.ok:
+ result = 'ok'
+ else:
+ result = list()
+ if not msg_ok:
+ result.append('commit message')
+ if not diff_ok:
+ result.append('diff content')
+ result = 'bad ' + ' and '.join(result)
+ print(name, result)
+
+
+ git_diff_re = re.compile(r'''
+ ^ diff \s+ --git \s+ a/.+ \s+ b/.+ $
+ ''',
+ re.IGNORECASE | re.VERBOSE | re.MULTILINE)
+
+ stat_re = \
+ re.compile(r'''
+ (?P<commit_message> [\s\S\r\n]* )
+ (?P<stat>
+ ^ --- $ [\r\n]+
+ (?: ^ \s+ .+ \s+ \| \s+ \d+ \s+ \+* \-*
+ $ [\r\n]+ )+
+ [\s\S\r\n]+
+ )
+ ''',
+ re.IGNORECASE | re.VERBOSE | re.MULTILINE)
+
+ subject_prefix_re = \
+ re.compile(r'''^
+ \s* (\[
+ [^\[\]]* # Allow all non-brackets
+ \])* \s*
+ ''',
+ re.VERBOSE)
+
+ def find_patch_pieces(self):
+ if sys.version_info < (3, 0):
+ patch = self.patch.encode('ascii', 'ignore')
+ else:
+ patch = self.patch
+
+ self.commit_msg = None
+ self.stat = None
+ self.commit_subject = None
+ self.commit_prefix = None
+ self.diff = None
+
+ if patch.startswith('diff --git'):
+ self.diff = patch
+ return
+
+ pmail = email.message_from_string(patch)
+ parts = list(pmail.walk())
+ assert(len(parts) == 1)
+ assert(parts[0].get_content_type() == 'text/plain')
+ content = parts[0].get_payload(decode=True).decode('utf-8', 'ignore')
+
+ mo = self.git_diff_re.search(content)
+ if mo is not None:
+ self.diff = content[mo.start():]
+ content = content[:mo.start()]
+
+ mo = self.stat_re.search(content)
+ if mo is None:
+ self.commit_msg = content
+ else:
+ self.stat = mo.group('stat')
+ self.commit_msg = mo.group('commit_message')
+ #
+ # Parse subject line from email header. The subject line may be
+ # composed of multiple parts with different encodings. Decode and
+ # combine all the parts to produce a single string with the contents of
+ # the decoded subject line.
+ #
+ parts = email.header.decode_header(pmail.get('subject'))
+ subject = ''
+ for (part, encoding) in parts:
+ if encoding:
+ part = part.decode(encoding)
+ else:
+ try:
+ part = part.decode()
+ except:
+ pass
+ subject = subject + part
+
+ self.commit_subject = subject.replace('\r\n', '')
+ self.commit_subject = self.commit_subject.replace('\n', '')
+ self.commit_subject = self.subject_prefix_re.sub('', self.commit_subject, 1)
+
+ self.author_email = pmail['from']
+
+class CheckGitCommits:
+ """Reads patches from git based on the specified git revision range.
+
+ The patches are read from git, and then checked.
+ """
+
+ def __init__(self, rev_spec, max_count):
+ commits = self.read_commit_list_from_git(rev_spec, max_count)
+ if len(commits) == 1 and Verbose.level > Verbose.ONELINE:
+ commits = [ rev_spec ]
+ self.ok = True
+ blank_line = False
+ for commit in commits:
+ if Verbose.level > Verbose.ONELINE:
+ if blank_line:
+ print()
+ else:
+ blank_line = True
+ print('Checking git commit:', commit)
+ email = self.read_committer_email_address_from_git(commit)
+ self.ok &= EmailAddressCheck(email, 'Committer').ok
+ patch = self.read_patch_from_git(commit)
+ self.ok &= CheckOnePatch(commit, patch).ok
+ if not commits:
+ print("Couldn't find commit matching: '{}'".format(rev_spec))
+
+ def read_commit_list_from_git(self, rev_spec, max_count):
+ # Run git to get the commit patch
+ cmd = [ 'rev-list', '--abbrev-commit', '--no-walk' ]
+ if max_count is not None:
+ cmd.append('--max-count=' + str(max_count))
+ cmd.append(rev_spec)
+ out = self.run_git(*cmd)
+ return out.split() if out else []
+
+ def read_patch_from_git(self, commit):
+ # Run git to get the commit patch
+ return self.run_git('show', '--pretty=email', '--no-textconv',
+ '--no-use-mailmap', commit)
+
+ def read_committer_email_address_from_git(self, commit):
+ # Run git to get the committer email
+ return self.run_git('show', '--pretty=%cn <%ce>', '--no-patch',
+ '--no-use-mailmap', commit)
+
+ def run_git(self, *args):
+ cmd = [ 'git' ]
+ cmd += args
+ p = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ Result = p.communicate()
+ return Result[0].decode('utf-8', 'ignore') if Result[0] and Result[0].find(b"fatal")!=0 else None
+
+class CheckOnePatchFile:
+ """Performs a patch check for a single file.
+
+ stdin is used when the filename is '-'.
+ """
+
+ def __init__(self, patch_filename):
+ if patch_filename == '-':
+ patch = sys.stdin.read()
+ patch_filename = 'stdin'
+ else:
+ f = open(patch_filename, 'rb')
+ patch = f.read().decode('utf-8', 'ignore')
+ f.close()
+ if Verbose.level > Verbose.ONELINE:
+ print('Checking patch file:', patch_filename)
+ self.ok = CheckOnePatch(patch_filename, patch).ok
+
+class CheckOneArg:
+ """Performs a patch check for a single command line argument.
+
+ The argument will be handed off to a file or git-commit based
+ checker.
+ """
+
+ def __init__(self, param, max_count=None):
+ self.ok = True
+ if param == '-' or os.path.exists(param):
+ checker = CheckOnePatchFile(param)
+ else:
+ checker = CheckGitCommits(param, max_count)
+ self.ok = checker.ok
+
+class PatchCheckApp:
+ """Checks patches based on the command line arguments."""
+
+ def __init__(self):
+ self.parse_options()
+ patches = self.args.patches
+
+ if len(patches) == 0:
+ patches = [ 'HEAD' ]
+
+ self.ok = True
+ self.count = None
+ for patch in patches:
+ self.process_one_arg(patch)
+
+ if self.count is not None:
+ self.process_one_arg('HEAD')
+
+ if self.ok:
+ self.retval = 0
+ else:
+ self.retval = -1
+
+ def process_one_arg(self, arg):
+ if len(arg) >= 2 and arg[0] == '-':
+ try:
+ self.count = int(arg[1:])
+ return
+ except ValueError:
+ pass
+ self.ok &= CheckOneArg(arg, self.count).ok
+ self.count = None
+
+ def parse_options(self):
+ parser = argparse.ArgumentParser(description=__copyright__)
+ parser.add_argument('--version', action='version',
+ version='%(prog)s ' + VersionNumber)
+ parser.add_argument('patches', nargs='*',
+ help='[patch file | git rev list]')
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument("--oneline",
+ action="store_true",
+ help="Print one result per line")
+ group.add_argument("--silent",
+ action="store_true",
+ help="Print nothing")
+ self.args = parser.parse_args()
+ if self.args.oneline:
+ Verbose.level = Verbose.ONELINE
+ if self.args.silent:
+ Verbose.level = Verbose.SILENT
+
+if __name__ == "__main__":
+ sys.exit(PatchCheckApp().retval)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/RunMakefile.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/RunMakefile.py
new file mode 100755
index 00000000..e5f2c39b
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/RunMakefile.py
@@ -0,0 +1,172 @@
+## @file
+# Run a makefile as part of a PREBUILD or POSTBUILD action.
+#
+# Copyright (c) 2017, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+'''
+RunMakefile.py
+'''
+
+import os
+import sys
+import argparse
+import subprocess
+
+#
+# Globals for help information
+#
+__prog__ = 'RunMakefile'
+__version__ = '%s Version %s' % (__prog__, '1.0')
+__copyright__ = 'Copyright (c) 2017, Intel Corporation. All rights reserved.'
+__description__ = 'Run a makefile as part of a PREBUILD or POSTBUILD action.\n'
+
+#
+# Globals
+#
+gArgs = None
+
+def Log(Message):
+ if not gArgs.Verbose:
+ return
+ sys.stdout.write (__prog__ + ': ' + Message + '\n')
+
+def Error(Message, ExitValue=1):
+ sys.stderr.write (__prog__ + ': ERROR: ' + Message + '\n')
+ sys.exit (ExitValue)
+
+def RelativePath(target):
+ return os.path.relpath (target, gWorkspace)
+
+def NormalizePath(target):
+ if isinstance(target, tuple):
+ return os.path.normpath (os.path.join (*target))
+ else:
+ return os.path.normpath (target)
+
+if __name__ == '__main__':
+ #
+ # Create command line argument parser object
+ #
+ parser = argparse.ArgumentParser (
+ prog = __prog__,
+ version = __version__,
+ description = __description__ + __copyright__,
+ conflict_handler = 'resolve'
+ )
+ parser.add_argument (
+ '-a', '--arch', dest = 'Arch', nargs = '+', action = 'append',
+ required = True,
+ help = '''ARCHS is one of list: IA32, X64, IPF, ARM, AARCH64 or EBC,
+ which overrides target.txt's TARGET_ARCH definition. To
+ specify more archs, please repeat this option.'''
+ )
+ parser.add_argument (
+ '-t', '--tagname', dest = 'ToolChain', required = True,
+ help = '''Using the Tool Chain Tagname to build the platform,
+ overriding target.txt's TOOL_CHAIN_TAG definition.'''
+ )
+ parser.add_argument (
+ '-p', '--platform', dest = 'PlatformFile', required = True,
+ help = '''Build the platform specified by the DSC file name argument,
+ overriding target.txt's ACTIVE_PLATFORM definition.'''
+ )
+ parser.add_argument (
+ '-b', '--buildtarget', dest = 'BuildTarget', required = True,
+ help = '''Using the TARGET to build the platform, overriding
+ target.txt's TARGET definition.'''
+ )
+ parser.add_argument (
+ '--conf=', dest = 'ConfDirectory', required = True,
+ help = '''Specify the customized Conf directory.'''
+ )
+ parser.add_argument (
+ '-D', '--define', dest = 'Define', nargs='*', action = 'append',
+ help = '''Macro: "Name [= Value]".'''
+ )
+ parser.add_argument (
+ '--makefile', dest = 'Makefile', required = True,
+ help = '''Makefile to run passing in arguments as makefile defines.'''
+ )
+ parser.add_argument (
+ '-v', '--verbose', dest = 'Verbose', action = 'store_true',
+ help = '''Turn on verbose output with informational messages printed'''
+ )
+
+ #
+ # Parse command line arguments
+ #
+ gArgs, remaining = parser.parse_known_args()
+ gArgs.BuildType = 'all'
+ for BuildType in ['all', 'fds', 'genc', 'genmake', 'clean', 'cleanall', 'modules', 'libraries', 'run']:
+ if BuildType in remaining:
+ gArgs.BuildType = BuildType
+ remaining.remove(BuildType)
+ break
+ gArgs.Remaining = ' '.join(remaining)
+
+ #
+ # Start
+ #
+ Log ('Start')
+
+ #
+ # Find makefile in WORKSPACE or PACKAGES_PATH
+ #
+ PathList = ['']
+ try:
+ PathList.append(os.environ['WORKSPACE'])
+ except:
+ Error ('WORKSPACE environment variable not set')
+ try:
+ PathList += os.environ['PACKAGES_PATH'].split(os.pathsep)
+ except:
+ pass
+ for Path in PathList:
+ Makefile = NormalizePath((Path, gArgs.Makefile))
+ if os.path.exists (Makefile):
+ break
+ if not os.path.exists(Makefile):
+ Error ('makefile %s not found' % (gArgs.Makefile))
+
+ #
+ # Build command line arguments converting build arguments to makefile defines
+ #
+ CommandLine = [Makefile]
+ CommandLine.append('TARGET_ARCH="%s"' % (' '.join([Item[0] for Item in gArgs.Arch])))
+ CommandLine.append('TOOL_CHAIN_TAG="%s"' % (gArgs.ToolChain))
+ CommandLine.append('TARGET="%s"' % (gArgs.BuildTarget))
+ CommandLine.append('ACTIVE_PLATFORM="%s"' % (gArgs.PlatformFile))
+ CommandLine.append('CONF_DIRECTORY="%s"' % (gArgs.ConfDirectory))
+ if gArgs.Define:
+ for Item in gArgs.Define:
+ if '=' not in Item[0]:
+ continue
+ Item = Item[0].split('=', 1)
+ CommandLine.append('%s="%s"' % (Item[0], Item[1]))
+ CommandLine.append('EXTRA_FLAGS="%s"' % (gArgs.Remaining))
+ CommandLine.append(gArgs.BuildType)
+ if sys.platform == "win32":
+ CommandLine = 'nmake /f %s' % (' '.join(CommandLine))
+ else:
+ CommandLine = 'make -f %s' % (' '.join(CommandLine))
+
+ #
+ # Run the makefile
+ #
+ try:
+ Process = subprocess.Popen(CommandLine, shell=True)
+ except:
+ Error ('make command not available. Please verify PATH')
+ Process.communicate()
+
+ #
+ # Done
+ #
+ Log ('Done')
+
+ #
+ # Return status from running the makefile
+ #
+ sys.exit(Process.returncode)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/RunMakefileSample.mak b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/RunMakefileSample.mak
new file mode 100644
index 00000000..9337558e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/RunMakefileSample.mak
@@ -0,0 +1,37 @@
+## @file
+# Sample makefile for PREBUILD or POSTBUILD action.
+#
+# Copyright (c) 2017, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+all: show
+ @echo $@
+genc: show
+ @echo $@
+genmake: show
+ @echo $@
+modules: show
+ @echo $@
+libraries: show
+ @echo $@
+fds: show
+ @echo $@
+clean: show
+ @echo $@
+cleanall: show
+ @echo $@
+cleanlib: show
+ @echo $@
+run: show
+ @echo $@
+
+show:
+ @echo WORKSPACE........ $(WORKSPACE)
+ @echo PACKAGES_PATH.... $(PACKAGES_PATH)
+ @echo ACTIVE_PLATFORM.. $(ACTIVE_PLATFORM)
+ @echo TARGET_ARCH...... $(TARGET_ARCH)
+ @echo TOOL_CHAIN_TAG... $(TOOL_CHAIN_TAG)
+ @echo CONF_DIRECTORY... $(CONF_DIRECTORY)
+ @echo TARGET........... $(TARGET)
+ @echo EXTRA_FLAGS...... $(EXTRA_FLAGS)
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/Rvct-Align32.sct b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/Rvct-Align32.sct
new file mode 100644
index 00000000..80f40452
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/Rvct-Align32.sct
@@ -0,0 +1,19 @@
+/** @file
+
+ Copyright (c) 2015, Linaro Ltd. All rights reserved.<BR>
+
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+**/
+
+REGION 0x220 RELOC {
+ ER_RO +0 ALIGN 32 {
+ * (+RO)
+ }
+ ER_RW +0 ALIGN 32 {
+ * (+RW)
+ }
+ ER_ZI +0 {
+ * (+ZI)
+ }
+}
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/Rvct-Align4K.sct b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/Rvct-Align4K.sct
new file mode 100644
index 00000000..33a3c970
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/Rvct-Align4K.sct
@@ -0,0 +1,19 @@
+/** @file
+
+ Copyright (c) 2015, Linaro Ltd. All rights reserved.<BR>
+
+ SPDX-License-Identifier: BSD-2-Clause-Patent
+
+**/
+
+REGION 0x1000 RELOC {
+ ER_RO +0 ALIGN 4096 {
+ * (+RO)
+ }
+ ER_RW +0 ALIGN 4096 {
+ * (+RW)
+ }
+ ER_ZI +0 {
+ * (+ZI)
+ }
+}
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SetVisualStudio.bat b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SetVisualStudio.bat
new file mode 100644
index 00000000..60d493a4
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SetVisualStudio.bat
@@ -0,0 +1,102 @@
+@REM @file
+@REM Windows batch file to set up the Microsoft Visual Studio environment
+@REM
+@REM This script is used to set up one of the Microsoft Visual Studio
+@REM environments, VS2008x86, VS2010x86, VS2012x86 or VS2013x86 for
+@REM building the Nt32Pkg/Nt32Pkg.dsc emulation environment to run on
+@REM an X64 version of Windows.
+@REM The system environment variables in this script are set by the
+@rem Edk2Setup.bat script (that will be renamed to edksetup.bat).
+@REM
+@REM This script can also be used to build the Win32 binaries
+@REM
+@REM Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
+@REM SPDX-License-Identifier: BSD-2-Clause-Patent
+@REM
+@echo off
+@if defined NT32_X64 @goto CheckLatest
+@if "%REBUILD_TOOLS%"=="TRUE" @goto RebuildTools
+
+:CheckLatest
+echo.
+@if defined VS140COMNTOOLS (
+ @set "COMMONTOOLSx64=C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\x86_amd64"
+ @goto SetVs
+)
+
+@if defined VS120COMNTOOLS (
+ @set "COMMONTOOLSx64=C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\bin\x86_amd64"
+ @goto SetVs
+)
+
+@if defined VS110COMNTOOLS (
+ @set "COMMONTOOLSx64=C:\Program Files (x86)\Microsoft Visual Studio 11.0\VC\bin\x86_amd64"
+ @goto SetVs
+)
+
+@if defined VS100COMNTOOLS (
+ @set "COMMONTOOLSx64=C:\Program Files (x86)\Microsoft Visual Studio 10.0\VC\bin\x86_amd64"
+ @goto SetVs
+)
+
+@if defined VS90COMNTOOLS (
+ @set "COMMONTOOLSx64=C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\x86_amd64"
+ @goto SetVs
+)
+@echo.
+@echo No version of Microsoft Visual Studio was found on this system
+@echo.
+@exit /B 1
+
+@REM Set up the X64 environment for building Nt32Pkg/Nt32Pkg.dsc to run on an X64 platform
+:SetVs
+if exist "%COMMONTOOLSx64%\vcvarsx86_amd64.bat" (
+ @call "%COMMONTOOLSx64%\vcvarsx86_amd64.bat"
+ @if errorlevel 1 (
+ @echo. ERROR setting Microsoft Visual Studio %1
+ @set COMMONTOOLSx64=
+ @exit /B 1
+ )
+)
+if not exist "%COMMONTOOLSx64%\vcvarsx86_amd64.bat" (
+ @echo ERROR : This script does not exist: "%COMMONTOOLSx64%\vcvarsx86_amd64.bat"
+ @set COMMONTOOLSx64=
+ @exit /B 1
+)
+@set COMMONTOOLSx64=
+@goto End
+
+:RebuildTools
+@call python "%BASE_TOOLS_PATH%\Scripts\UpdateBuildVersions.py"
+@set "BIN_DIR=%EDK_TOOLS_PATH%\Bin\Win32"
+if not exist "%BIN_DIR%" @mkdir "%BIN_DIR%"
+@echo Removing temporary and binary files
+@cd "%BASE_TOOLS_PATH%"
+@call nmake cleanall
+@echo Rebuilding the EDK II BaseTools
+@cd "%BASE_TOOLS_PATH%\Source\C"
+@call nmake -nologo -a -f Makefile
+@if errorlevel 1 (
+@echo Error building the C-based BaseTools
+@cd "%WORKSPACE%"
+@exit /B1
+)
+@cd %BASE_TOOLS_PATH%\Source\Python
+@call nmake -nologo -a -f Makefile
+@if errorlevel 1 (
+@echo Error building the Python-based BaseTools
+@cd %WORKSPACE%
+@exit /B1
+)
+@cd %WORKSPACE%
+
+@goto End
+
+:VersionNotFound
+@echo.
+@echo This Microsoft Visual Studio version is in not installed on this system: %1
+@echo.
+@exit /B 1
+
+:End
+@exit /B 0
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SetupGit.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SetupGit.py
new file mode 100755
index 00000000..b1af9aea
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SetupGit.py
@@ -0,0 +1,213 @@
+## @file
+# Set up the git configuration for contributing to TianoCore projects
+#
+# Copyright (c) 2019, Linaro Ltd. All rights reserved.<BR>
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+from __future__ import print_function
+import argparse
+import os.path
+import re
+import sys
+
+try:
+ import git
+except ImportError:
+ print('Unable to load gitpython module - please install and try again.')
+ sys.exit(1)
+
+try:
+ # Try Python 2 'ConfigParser' module first since helpful lib2to3 will
+ # otherwise automagically load it with the name 'configparser'
+ import ConfigParser
+except ImportError:
+ # Otherwise, try loading the Python 3 'configparser' under an alias
+ try:
+ import configparser as ConfigParser
+ except ImportError:
+ print("Unable to load configparser/ConfigParser module - please install and try again!")
+ sys.exit(1)
+
+
+# Assumptions: Script is in edk2/BaseTools/Scripts,
+# templates in edk2/BaseTools/Conf
+CONFDIR = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
+ 'Conf')
+
+UPSTREAMS = [
+ {'name': 'edk2',
+ 'repo': 'https://github.com/tianocore/edk2.git',
+ 'list': 'devel@edk2.groups.io'},
+ {'name': 'edk2-platforms',
+ 'repo': 'https://github.com/tianocore/edk2-platforms.git',
+ 'list': 'devel@edk2.groups.io', 'prefix': 'edk2-platforms'},
+ {'name': 'edk2-non-osi',
+ 'repo': 'https://github.com/tianocore/edk2-non-osi.git',
+ 'list': 'devel@edk2.groups.io', 'prefix': 'edk2-non-osi'}
+ ]
+
+# The minimum version required for all of the below options to work
+MIN_GIT_VERSION = (1, 9, 0)
+
+# Set of options to be set identically for all repositories
+OPTIONS = [
+ {'section': 'am', 'option': 'keepcr', 'value': True},
+ {'section': 'am', 'option': 'signoff', 'value': True},
+ {'section': 'cherry-pick', 'option': 'signoff', 'value': True},
+ {'section': 'color', 'option': 'diff', 'value': True},
+ {'section': 'color', 'option': 'grep', 'value': 'auto'},
+ {'section': 'commit', 'option': 'signoff', 'value': True},
+ {'section': 'core', 'option': 'abbrev', 'value': 12},
+ {'section': 'core', 'option': 'attributesFile',
+ 'value': os.path.join(CONFDIR, 'gitattributes').replace('\\', '/')},
+ {'section': 'core', 'option': 'whitespace', 'value': 'cr-at-eol'},
+ {'section': 'diff', 'option': 'algorithm', 'value': 'patience'},
+ {'section': 'diff', 'option': 'orderFile',
+ 'value': os.path.join(CONFDIR, 'diff.order').replace('\\', '/')},
+ {'section': 'diff', 'option': 'renames', 'value': 'copies'},
+ {'section': 'diff', 'option': 'statGraphWidth', 'value': '20'},
+ {'section': 'diff "ini"', 'option': 'xfuncname',
+ 'value': '^\\\\[[A-Za-z0-9_., ]+]'},
+ {'section': 'format', 'option': 'coverLetter', 'value': True},
+ {'section': 'format', 'option': 'numbered', 'value': True},
+ {'section': 'format', 'option': 'signoff', 'value': False},
+ {'section': 'log', 'option': 'mailmap', 'value': True},
+ {'section': 'notes', 'option': 'rewriteRef', 'value': 'refs/notes/commits'},
+ {'section': 'sendemail', 'option': 'chainreplyto', 'value': False},
+ {'section': 'sendemail', 'option': 'thread', 'value': True},
+ {'section': 'sendemail', 'option': 'transferEncoding', 'value': '8bit'},
+ ]
+
+
+def locate_repo():
+ """Opens a Repo object for the current tree, searching upwards in the directory hierarchy."""
+ try:
+ repo = git.Repo(path='.', search_parent_directories=True)
+ except (git.InvalidGitRepositoryError, git.NoSuchPathError):
+ print("It doesn't look like we're inside a git repository - aborting.")
+ sys.exit(2)
+ return repo
+
+
+def fuzzy_match_repo_url(one, other):
+ """Compares two repository URLs, ignoring protocol and optional trailing '.git'."""
+ oneresult = re.match(r'.*://(?P<oneresult>.*?)(\.git)*$', one)
+ otherresult = re.match(r'.*://(?P<otherresult>.*?)(\.git)*$', other)
+
+ if oneresult and otherresult:
+ onestring = oneresult.group('oneresult')
+ otherstring = otherresult.group('otherresult')
+ if onestring == otherstring:
+ return True
+
+ return False
+
+
+def get_upstream(url, name):
+ """Extracts the dict for the current repo origin."""
+ for upstream in UPSTREAMS:
+ if (fuzzy_match_repo_url(upstream['repo'], url) or
+ upstream['name'] == name):
+ return upstream
+ print("Unknown upstream '%s' - aborting!" % url)
+ sys.exit(3)
+
+
+def check_versions():
+ """Checks versions of dependencies."""
+ version = git.cmd.Git().version_info
+
+ if version < MIN_GIT_VERSION:
+ print('Need git version %d.%d or later!' % (version[0], version[1]))
+ sys.exit(4)
+
+
+def write_config_value(repo, section, option, data):
+ """."""
+ with repo.config_writer(config_level='repository') as configwriter:
+ configwriter.set_value(section, option, data)
+
+
+if __name__ == '__main__':
+ check_versions()
+
+ PARSER = argparse.ArgumentParser(
+ description='Sets up a git repository according to TianoCore rules.')
+ PARSER.add_argument('-c', '--check',
+ help='check current config only, printing what would be changed',
+ action='store_true',
+ required=False)
+ PARSER.add_argument('-f', '--force',
+ help='overwrite existing settings conflicting with program defaults',
+ action='store_true',
+ required=False)
+ PARSER.add_argument('-n', '--name', type=str, metavar='repo',
+ choices=['edk2', 'edk2-platforms', 'edk2-non-osi'],
+ help='set the repo name to configure for, if not '
+ 'detected automatically',
+ required=False)
+ PARSER.add_argument('-v', '--verbose',
+ help='enable more detailed output',
+ action='store_true',
+ required=False)
+ ARGS = PARSER.parse_args()
+
+ REPO = locate_repo()
+ if REPO.bare:
+ print('Bare repo - please check out an upstream one!')
+ sys.exit(6)
+
+ URL = REPO.remotes.origin.url
+
+ UPSTREAM = get_upstream(URL, ARGS.name)
+ if not UPSTREAM:
+ print("Upstream '%s' unknown, aborting!" % URL)
+ sys.exit(7)
+
+ # Set a list email address if our upstream wants it
+ if 'list' in UPSTREAM:
+ OPTIONS.append({'section': 'sendemail', 'option': 'to',
+ 'value': UPSTREAM['list']})
+ # Append a subject prefix entry to OPTIONS if our upstream wants it
+ if 'prefix' in UPSTREAM:
+ OPTIONS.append({'section': 'format', 'option': 'subjectPrefix',
+ 'value': "PATCH " + UPSTREAM['prefix']})
+
+ CONFIG = REPO.config_reader(config_level='repository')
+
+ for entry in OPTIONS:
+ exists = False
+ try:
+ # Make sure to read boolean/int settings as real type rather than strings
+ if isinstance(entry['value'], bool):
+ value = CONFIG.getboolean(entry['section'], entry['option'])
+ elif isinstance(entry['value'], int):
+ value = CONFIG.getint(entry['section'], entry['option'])
+ else:
+ value = CONFIG.get(entry['section'], entry['option'])
+
+ exists = True
+ # Don't bail out from options not already being set
+ except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
+ pass
+
+ if exists:
+ if value == entry['value']:
+ if ARGS.verbose:
+ print("%s.%s already set (to '%s')" % (entry['section'],
+ entry['option'], value))
+ else:
+ if ARGS.force:
+ write_config_value(REPO, entry['section'], entry['option'], entry['value'])
+ else:
+ print("Not overwriting existing %s.%s value:" % (entry['section'],
+ entry['option']))
+ print(" '%s' != '%s'" % (value, entry['value']))
+ print(" add '-f' to command line to force overwriting existing settings")
+ else:
+ print("%s.%s => '%s'" % (entry['section'], entry['option'], entry['value']))
+ if not ARGS.check:
+ write_config_value(REPO, entry['section'], entry['option'], entry['value'])
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ShowEnvironment.bat b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ShowEnvironment.bat
new file mode 100644
index 00000000..b91886b6
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/ShowEnvironment.bat
@@ -0,0 +1,213 @@
+@REM @file
+@REM Windows batch file to display the Windows environment
+@REM
+@REM This script will be used to show the current EDK II build environment.
+@REM it may be called by the Edk2Setup.bat (that will be renamed to edksetup.bat) or
+@REM run as stand-alone application.
+@REM
+@REM Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
+@REM SPDX-License-Identifier: BSD-2-Clause-Patent
+@REM
+@echo off
+@set SE_SVN_REVISION=$Revision: 8 $
+@set SE_VERSION=0.7.0.
+@if "%SCRIPT%"=="EDKSETUP_BAT" goto SkipCmdlineArgumentCheck
+
+:parse_cmd_line
+@if /I "%1"=="-h" @goto Usage
+@if /I "%1"=="--help" @goto Usage
+@if /I "%1"=="/?" @goto Usage
+@if /I "%1"=="-v" @goto Version
+@if /I "%1"=="--version" @goto Version
+
+:Usage
+@echo Usage: ShowEnvironment.bat [Options]
+@echo Copyright(c) 2014, Intel Corporation. All rights reserved.
+@echo.
+@echo Options:
+@echo --help, -h Print this help screen and exit
+@echo --version, -v Print this tool's version and exit
+@echo.
+@goto End
+
+:Version
+@echo ShowEnvironment.bat Version: %SE_VERSION%%SE_SVN_REVISION:~11,-1%
+@echo Copyright(c) 2014, Intel Corporation. All rights reserved.
+
+:SkipCmdlineArgumentCheck
+if defined SRC_CONF @goto SetEnv
+
+@echo.
+@echo #############################################################################
+@if defined WORKSPACE @echo WORKSPACE = %WORKSPACE%
+@if not defined WORKSPACE @echo WORKSPACE = Not Set
+@if defined PACKAGES_PATH @echo PACKAGES_PATH = %PACKAGES_PATH%
+@if defined EDK_TOOLS_PATH @echo EDK_TOOLS_PATH = %EDK_TOOLS_PATH%
+@if not defined EDK_TOOLS_PATH @echo EDK_TOOLS_PATH = Not Set
+@if defined BASE_TOOLS_PATH @echo BASE_TOOLS_PATH = %BASE_TOOLS_PATH%
+@if defined EDK_TOOLS_BIN @echo EDK_TOOLS_BIN = %EDK_TOOLS_BIN%
+@if "%NT32PKG%"=="TRUE" (
+ @echo.
+ @echo NOTE: Please configure your build to use the following TOOL_CHAIN_TAG
+ @echo when building NT32Pkg/Nt32Pkg.dsc
+ @if defined VCINSTALLDIR @call :CheckVsVer
+ @set TEST_VS=
+)
+@if defined HIDE_PATH goto End
+
+
+@echo ############################## PATH #########################################
+@setlocal DisableDelayedExpansion
+@set "var=%PATH%"
+@set "var=%var:"=""%"
+@set "var=%var:^=^^%"
+@set "var=%var:&=^&%"
+@set "var=%var:|=^|%"
+@set "var=%var:<=^<%"
+@set "var=%var:>=^>%"
+@set "var=%var:;=^;^;%"
+@set var=%var:""="%
+@set "var=%var:"=""Q%"
+@set "var=%var:;;="S"S%"
+@set "var=%var:^;^;=;%"
+@set "var=%var:""="%"
+@setlocal EnableDelayedExpansion
+@set "var=!var:"Q=!"
+@for %%a in ("!var:"S"S=";"!") do (
+ @if "!!"=="" endlocal
+ @if %%a neq "" echo %%~a
+)
+@goto End
+
+:CheckVsVer
+@set "TEST_VS=C:\Program Files (x86)\Microsoft Visual Studio 9.0\"
+@if "%VSINSTALLDIR%"=="%TEST_VS%" (
+ @echo TOOL_CHAIN_TAG = VS2008x86
+ @goto :EOF
+)
+@set "TEST_VS=C:\Program Files\Microsoft Visual Studio 9.0\"
+@if "%VSINSTALLDIR%"=="%TEST_VS%" (
+ @echo TOOL_CHAIN_TAG = VS2008
+ @goto :EOF
+)
+
+@set "TEST_VS=C:\Program Files (x86)\Microsoft Visual Studio 10.0\"
+@if "%VSINSTALLDIR%"=="%TEST_VS%" (
+ @echo TOOL_CHAIN_TAG = VS2010x86
+ @goto :EOF
+)
+@set "TEST_VS=C:\Program Files\Microsoft Visual Studio 10.0\"
+@if "%VSINSTALLDIR%"=="%TEST_VS%" (
+ @echo TOOL_CHAIN_TAG = VS2010
+ @goto :EOF
+)
+
+@set "TEST_VS=C:\Program Files (x86)\Microsoft Visual Studio 11.0\"
+@if "%VSINSTALLDIR%"=="%TEST_VS%" (
+ @echo TOOL_CHAIN_TAG = VS2012x86
+ @goto :EOF
+)
+@set "TEST_VS=C:\Program Files\Microsoft Visual Studio 11.0\"
+@if "%VSINSTALLDIR%"=="%TEST_VS%" (
+ @echo TOOL_CHAIN_TAG = VS2012
+ @goto :EOF
+)
+
+@set "TEST_VS=C:\Program Files (x86)\Microsoft Visual Studio 12.0\"
+@if "%VSINSTALLDIR%"=="%TEST_VS%" (
+ @echo TOOL_CHAIN_TAG = VS2013x86
+ @goto :EOF
+)
+@set "TEST_VS=C:\Program Files\Microsoft Visual Studio 12.0\"
+@if "%VSINSTALLDIR%"=="%TEST_VS%" (
+ @echo TOOL_CHAIN_TAG = VS2013
+ @goto :EOF
+)
+
+@set "TEST_VS=C:\Program Files (x86)\Microsoft Visual Studio 14.0\"
+@if "%VSINSTALLDIR%"=="%TEST_VS%" (
+ @echo TOOL_CHAIN_TAG = VS2015x86
+ @goto :EOF
+)
+@set "TEST_VS=C:\Program Files\Microsoft Visual Studio 14.0\"
+@if "%VSINSTALLDIR%"=="%TEST_VS%" (
+ @echo TOOL_CHAIN_TAG = VS2015
+ @goto :EOF
+)
+@goto :EOF
+
+:SetEnv
+@set FIRST_COPY=FALSE
+@set MISSING_TARGET_TEMPLATE=FALSE
+@set MISSING_TOOLS_DEF_TEMPLATE=FALSE
+@set MISSING_BUILD_RULE_TEMPLATE=FALSE
+@if not exist "%SRC_CONF%\target.template" @set MISSING_TARGET_TEMPLATE=TRUE
+@if not exist "%SRC_CONF%\tools_def.template" @set MISSING_TOOLS_DEF_TEMPLATE=TRUE
+@if not exist "%SRC_CONF%\build_rule.template" @set MISSING_BUILD_RULE_TEMPLATE=TRUE
+
+@if not exist "%WORKSPACE%\Conf\target.txt" (
+ @if "%MISSING_TARGET_TEMPLATE%"=="TRUE" @goto MissingTemplates
+ @echo copying ... target.template to %WORKSPACE%\Conf\target.txt
+ @copy /Y "%SRC_CONF%\target.template" "%WORKSPACE%\Conf\target.txt" > nul
+ @set FIRST_COPY=TRUE
+)
+@if not exist "%WORKSPACE%\Conf\tools_def.txt" (
+ @if "%MISSING_TOOLS_DEF_TEMPLATE%"=="TRUE" @goto MissingTemplates
+ @echo copying ... tools_def.template to %WORKSPACE%\Conf\tools_def.txt
+ @copy /Y "%SRC_CONF%\tools_def.template" "%WORKSPACE%\Conf\tools_def.txt" > nul
+ @set FIRST_COPY=TRUE
+)
+@if not exist "%WORKSPACE%\Conf\build_rule.txt" (
+ @if "%MISSING_BUILD_RULE_TEMPLATE%"=="TRUE" @goto MissingTemplates
+ @echo copying ... build_rule.template to %WORKSPACE%\Conf\build_rule.txt
+ @copy /Y "%SRC_CONF%\build_rule.template" "%WORKSPACE%\Conf\build_rule.txt" > nul
+ @set FIRST_COPY=TRUE
+)
+
+@if "%FIRST_COPY%"=="TRUE" @goto End
+@if not "%RECONFIG%"=="TRUE" @goto End
+
+@if "%RECONFIG%"=="TRUE" (
+ @echo.
+ @echo Over-writing the files in the WORKSPACE\Conf directory
+ @echo using the default template files
+ @echo.
+ @if "%MISSING_TARGET_TEMPLATE%"=="TRUE" @goto MissingTemplates
+ @echo over-write ... target.template to %WORKSPACE%\Conf\target.txt
+ @copy /Y "%SRC_CONF%\target.template" "%WORKSPACE%\Conf\target.txt" > nul
+
+ @if "%MISSING_TOOLS_DEF_TEMPLATE%"=="TRUE" @goto MissingTemplates
+ @echo over-write ... tools_def.template to %WORKSPACE%\Conf\tools_def.txt
+ @copy /Y "%SRC_CONF%\tools_def.template" "%WORKSPACE%\Conf\tools_def.txt" > nul
+
+ @if "%MISSING_BUILD_RULE_TEMPLATE%"=="TRUE" @goto MissingTemplates
+ @echo over-write ... build_rule.template to %WORKSPACE%\Conf\build_rule.txt
+ @copy /Y "%SRC_CONF%\build_rule.template" "%WORKSPACE%\Conf\build_rule.txt" > nul
+ @goto End
+)
+
+:MissingTemplates
+@echo.
+@if "%RECONFIG%"=="TRUE" @echo ERROR : Reconfig failed
+@if "%MISSING_TARGET_TEMPLATE%"=="TRUE" @echo ERROR : Unable to locate: "%SRC_CONF%\target.template"
+@if "%MISSING_TOOLS_DEF_TEMPLATE%"=="TRUE" @echo ERROR : Unable to locate: "%SRC_CONF%\tools_def.template"
+@if "%MISSING_BUILD_RULE_TEMPLATE%"=="TRUE" @echo ERROR : Unable to locate: "%SRC_CONF%\build_rule.template"
+@echo.
+@set MISSING_TARGET_TEMPLATE=
+@set MISSING_TOOLS_DEF_TEMPLATE=
+@set MISSING_BUILD_RULE_TEMPLATE=
+@set FIRST_COPY=
+@set SE_VERSION=
+@set SE_SVN_REVISION=
+@if not "%SCRIPT%"=="EDKSETUP_BAT" @echo on
+exit /B 1
+
+:End
+@set MISSING_TARGET_TEMPLATE=
+@set MISSING_TOOLS_DEF_TEMPLATE=
+@set MISSING_BUILD_RULE_TEMPLATE=
+@set FIRST_COPY=
+@set SE_VERSION=
+@set SE_SVN_REVISION=
+@if not "%SCRIPT%"=="EDKSETUP_BAT" @echo on
+exit /B 0
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SmiHandlerProfileSymbolGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SmiHandlerProfileSymbolGen.py
new file mode 100755
index 00000000..4f3283b3
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/SmiHandlerProfileSymbolGen.py
@@ -0,0 +1,307 @@
+##
+# Generate symbal for SMI handler profile info.
+#
+# This tool depends on DIA2Dump.exe (VS) or nm (gcc) to parse debug entry.
+#
+# Copyright (c) 2017, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+##
+
+from __future__ import print_function
+import os
+import re
+import sys
+from optparse import OptionParser
+
+from xml.dom.minidom import parse
+import xml.dom.minidom
+
+versionNumber = "1.1"
+__copyright__ = "Copyright (c) 2016, Intel Corporation. All rights reserved."
+
+class Symbols:
+ def __init__(self):
+ self.listLineAddress = []
+ self.pdbName = ""
+ # Cache for function
+ self.functionName = ""
+ # Cache for line
+ self.sourceName = ""
+
+
+ def getSymbol (self, rva):
+ index = 0
+ lineName = 0
+ sourceName = "??"
+ while index + 1 < self.lineCount :
+ if self.listLineAddress[index][0] <= rva and self.listLineAddress[index + 1][0] > rva :
+ offset = rva - self.listLineAddress[index][0]
+ functionName = self.listLineAddress[index][1]
+ lineName = self.listLineAddress[index][2]
+ sourceName = self.listLineAddress[index][3]
+ if lineName == 0 :
+ return [functionName]
+ else :
+ return [functionName, sourceName, lineName]
+ index += 1
+
+ return []
+
+ def parse_debug_file(self, driverName, pdbName):
+ if cmp (pdbName, "") == 0 :
+ return
+ self.pdbName = pdbName;
+
+ try:
+ nmCommand = "nm"
+ nmLineOption = "-l"
+ print("parsing (debug) - " + pdbName)
+ os.system ('%s %s %s > nmDump.line.log' % (nmCommand, nmLineOption, pdbName))
+ except :
+ print('ERROR: nm command not available. Please verify PATH')
+ return
+
+ #
+ # parse line
+ #
+ linefile = open("nmDump.line.log")
+ reportLines = linefile.readlines()
+ linefile.close()
+
+ # 000113ca T AllocatePool c:\home\edk-ii\MdePkg\Library\UefiMemoryAllocationLib\MemoryAllocationLib.c:399
+ patchLineFileMatchString = "([0-9a-fA-F]*)\s+[T|D|t|d]\s+(\w+)\s*((?:[a-zA-Z]:)?[\w+\-./_a-zA-Z0-9\\\\]*):?([0-9]*)"
+
+ for reportLine in reportLines:
+ match = re.match(patchLineFileMatchString, reportLine)
+ if match is not None:
+ rva = int (match.group(1), 16)
+ functionName = match.group(2)
+ sourceName = match.group(3)
+ if cmp (match.group(4), "") != 0 :
+ lineName = int (match.group(4))
+ else :
+ lineName = 0
+ self.listLineAddress.append ([rva, functionName, lineName, sourceName])
+
+ self.lineCount = len (self.listLineAddress)
+
+ self.listLineAddress = sorted(self.listLineAddress, key=lambda symbolAddress:symbolAddress[0])
+
+ def parse_pdb_file(self, driverName, pdbName):
+ if cmp (pdbName, "") == 0 :
+ return
+ self.pdbName = pdbName;
+
+ try:
+ #DIA2DumpCommand = "\"C:\\Program Files (x86)\Microsoft Visual Studio 14.0\\DIA SDK\\Samples\\DIA2Dump\\x64\\Debug\\Dia2Dump.exe\""
+ DIA2DumpCommand = "Dia2Dump.exe"
+ #DIA2SymbolOption = "-p"
+ DIA2LinesOption = "-l"
+ print("parsing (pdb) - " + pdbName)
+ #os.system ('%s %s %s > DIA2Dump.symbol.log' % (DIA2DumpCommand, DIA2SymbolOption, pdbName))
+ os.system ('%s %s %s > DIA2Dump.line.log' % (DIA2DumpCommand, DIA2LinesOption, pdbName))
+ except :
+ print('ERROR: DIA2Dump command not available. Please verify PATH')
+ return
+
+ #
+ # parse line
+ #
+ linefile = open("DIA2Dump.line.log")
+ reportLines = linefile.readlines()
+ linefile.close()
+
+ # ** GetDebugPrintErrorLevel
+ # line 32 at [0000C790][0001:0000B790], len = 0x3 c:\home\edk-ii\mdepkg\library\basedebugprinterrorlevellib\basedebugprinterrorlevellib.c (MD5: 687C0AE564079D35D56ED5D84A6164CC)
+ # line 36 at [0000C793][0001:0000B793], len = 0x5
+ # line 37 at [0000C798][0001:0000B798], len = 0x2
+
+ patchLineFileMatchString = "\s+line ([0-9]+) at \[([0-9a-fA-F]{8})\]\[[0-9a-fA-F]{4}\:[0-9a-fA-F]{8}\], len = 0x[0-9a-fA-F]+\s*([\w+\-\:./_a-zA-Z0-9\\\\]*)\s*"
+ patchLineFileMatchStringFunc = "\*\*\s+(\w+)\s*"
+
+ for reportLine in reportLines:
+ match = re.match(patchLineFileMatchString, reportLine)
+ if match is not None:
+ if cmp (match.group(3), "") != 0 :
+ self.sourceName = match.group(3)
+ sourceName = self.sourceName
+ functionName = self.functionName
+
+ rva = int (match.group(2), 16)
+ lineName = int (match.group(1))
+ self.listLineAddress.append ([rva, functionName, lineName, sourceName])
+ else :
+ match = re.match(patchLineFileMatchStringFunc, reportLine)
+ if match is not None:
+ self.functionName = match.group(1)
+
+ self.lineCount = len (self.listLineAddress)
+ self.listLineAddress = sorted(self.listLineAddress, key=lambda symbolAddress:symbolAddress[0])
+
+class SymbolsFile:
+ def __init__(self):
+ self.symbolsTable = {}
+
+symbolsFile = ""
+
+driverName = ""
+rvaName = ""
+symbolName = ""
+
+def getSymbolName(driverName, rva):
+ global symbolsFile
+
+ try :
+ symbolList = symbolsFile.symbolsTable[driverName]
+ if symbolList is not None:
+ return symbolList.getSymbol (rva)
+ else:
+ return []
+ except Exception:
+ return []
+
+def myOptionParser():
+ usage = "%prog [--version] [-h] [--help] [-i inputfile [-o outputfile] [-g guidreffile]]"
+ Parser = OptionParser(usage=usage, description=__copyright__, version="%prog " + str(versionNumber))
+ Parser.add_option("-i", "--inputfile", dest="inputfilename", type="string", help="The input memory profile info file output from MemoryProfileInfo application in MdeModulePkg")
+ Parser.add_option("-o", "--outputfile", dest="outputfilename", type="string", help="The output memory profile info file with symbol, MemoryProfileInfoSymbol.txt will be used if it is not specified")
+ Parser.add_option("-g", "--guidref", dest="guidreffilename", type="string", help="The input guid ref file output from build")
+
+ (Options, args) = Parser.parse_args()
+ if Options.inputfilename is None:
+ Parser.error("no input file specified")
+ if Options.outputfilename is None:
+ Options.outputfilename = "SmiHandlerProfileInfoSymbol.xml"
+ return Options
+
+dictGuid = {
+ '00000000-0000-0000-0000-000000000000':'gZeroGuid',
+ '2A571201-4966-47F6-8B86-F31E41F32F10':'gEfiEventLegacyBootGuid',
+ '27ABF055-B1B8-4C26-8048-748F37BAA2DF':'gEfiEventExitBootServicesGuid',
+ '7CE88FB3-4BD7-4679-87A8-A8D8DEE50D2B':'gEfiEventReadyToBootGuid',
+ '02CE967A-DD7E-4FFC-9EE7-810CF0470880':'gEfiEndOfDxeEventGroupGuid',
+ '60FF8964-E906-41D0-AFED-F241E974E08E':'gEfiDxeSmmReadyToLockProtocolGuid',
+ '18A3C6DC-5EEA-48C8-A1C1-B53389F98999':'gEfiSmmSwDispatch2ProtocolGuid',
+ '456D2859-A84B-4E47-A2EE-3276D886997D':'gEfiSmmSxDispatch2ProtocolGuid',
+ '4CEC368E-8E8E-4D71-8BE1-958C45FC8A53':'gEfiSmmPeriodicTimerDispatch2ProtocolGuid',
+ 'EE9B8D90-C5A6-40A2-BDE2-52558D33CCA1':'gEfiSmmUsbDispatch2ProtocolGuid',
+ '25566B03-B577-4CBF-958C-ED663EA24380':'gEfiSmmGpiDispatch2ProtocolGuid',
+ '7300C4A1-43F2-4017-A51B-C81A7F40585B':'gEfiSmmStandbyButtonDispatch2ProtocolGuid',
+ '1B1183FA-1823-46A7-8872-9C578755409D':'gEfiSmmPowerButtonDispatch2ProtocolGuid',
+ '58DC368D-7BFA-4E77-ABBC-0E29418DF930':'gEfiSmmIoTrapDispatch2ProtocolGuid',
+ }
+
+def genGuidString(guidreffile):
+ guidLines = guidreffile.readlines()
+ for guidLine in guidLines:
+ guidLineList = guidLine.split(" ")
+ if len(guidLineList) == 2:
+ guid = guidLineList[0]
+ guidName = guidLineList[1]
+ if guid not in dictGuid :
+ dictGuid[guid] = guidName
+
+def createSym(symbolName):
+ SymbolNode = xml.dom.minidom.Document().createElement("Symbol")
+ SymbolFunction = xml.dom.minidom.Document().createElement("Function")
+ SymbolFunctionData = xml.dom.minidom.Document().createTextNode(symbolName[0])
+ SymbolFunction.appendChild(SymbolFunctionData)
+ SymbolNode.appendChild(SymbolFunction)
+ if (len(symbolName)) >= 2:
+ SymbolSourceFile = xml.dom.minidom.Document().createElement("SourceFile")
+ SymbolSourceFileData = xml.dom.minidom.Document().createTextNode(symbolName[1])
+ SymbolSourceFile.appendChild(SymbolSourceFileData)
+ SymbolNode.appendChild(SymbolSourceFile)
+ if (len(symbolName)) >= 3:
+ SymbolLineNumber = xml.dom.minidom.Document().createElement("LineNumber")
+ SymbolLineNumberData = xml.dom.minidom.Document().createTextNode(str(symbolName[2]))
+ SymbolLineNumber.appendChild(SymbolLineNumberData)
+ SymbolNode.appendChild(SymbolLineNumber)
+ return SymbolNode
+
+def main():
+ global symbolsFile
+ global Options
+ Options = myOptionParser()
+
+ symbolsFile = SymbolsFile()
+
+ try :
+ DOMTree = xml.dom.minidom.parse(Options.inputfilename)
+ except Exception:
+ print("fail to open input " + Options.inputfilename)
+ return 1
+
+ if Options.guidreffilename is not None:
+ try :
+ guidreffile = open(Options.guidreffilename)
+ except Exception:
+ print("fail to open guidref" + Options.guidreffilename)
+ return 1
+ genGuidString(guidreffile)
+ guidreffile.close()
+
+ SmiHandlerProfile = DOMTree.documentElement
+
+ SmiHandlerDatabase = SmiHandlerProfile.getElementsByTagName("SmiHandlerDatabase")
+ SmiHandlerCategory = SmiHandlerDatabase[0].getElementsByTagName("SmiHandlerCategory")
+ for smiHandlerCategory in SmiHandlerCategory:
+ SmiEntry = smiHandlerCategory.getElementsByTagName("SmiEntry")
+ for smiEntry in SmiEntry:
+ if smiEntry.hasAttribute("HandlerType"):
+ guidValue = smiEntry.getAttribute("HandlerType")
+ if guidValue in dictGuid:
+ smiEntry.setAttribute("HandlerType", dictGuid[guidValue])
+ SmiHandler = smiEntry.getElementsByTagName("SmiHandler")
+ for smiHandler in SmiHandler:
+ Module = smiHandler.getElementsByTagName("Module")
+ Pdb = Module[0].getElementsByTagName("Pdb")
+ if (len(Pdb)) >= 1:
+ driverName = Module[0].getAttribute("Name")
+ pdbName = Pdb[0].childNodes[0].data
+
+ Module[0].removeChild(Pdb[0])
+
+ symbolsFile.symbolsTable[driverName] = Symbols()
+
+ if cmp (pdbName[-3:], "pdb") == 0 :
+ symbolsFile.symbolsTable[driverName].parse_pdb_file (driverName, pdbName)
+ else :
+ symbolsFile.symbolsTable[driverName].parse_debug_file (driverName, pdbName)
+
+ Handler = smiHandler.getElementsByTagName("Handler")
+ RVA = Handler[0].getElementsByTagName("RVA")
+ print(" Handler RVA: %s" % RVA[0].childNodes[0].data)
+
+ if (len(RVA)) >= 1:
+ rvaName = RVA[0].childNodes[0].data
+ symbolName = getSymbolName (driverName, int(rvaName, 16))
+
+ if (len(symbolName)) >= 1:
+ SymbolNode = createSym(symbolName)
+ Handler[0].appendChild(SymbolNode)
+
+ Caller = smiHandler.getElementsByTagName("Caller")
+ RVA = Caller[0].getElementsByTagName("RVA")
+ print(" Caller RVA: %s" % RVA[0].childNodes[0].data)
+
+ if (len(RVA)) >= 1:
+ rvaName = RVA[0].childNodes[0].data
+ symbolName = getSymbolName (driverName, int(rvaName, 16))
+
+ if (len(symbolName)) >= 1:
+ SymbolNode = createSym(symbolName)
+ Caller[0].appendChild(SymbolNode)
+
+ try :
+ newfile = open(Options.outputfilename, "w")
+ except Exception:
+ print("fail to open output" + Options.outputfilename)
+ return 1
+
+ newfile.write(DOMTree.toprettyxml(indent = "\t", newl = "\n", encoding = "utf-8"))
+ newfile.close()
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/UpdateBuildVersions.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/UpdateBuildVersions.py
new file mode 100755
index 00000000..a02214a9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Scripts/UpdateBuildVersions.py
@@ -0,0 +1,398 @@
+## @file
+# Update build revisions of the tools when performing a developer build
+#
+# This script will modife the C/Include/Common/BuildVersion.h file and the two
+# Python scripts, Python/Common/BuildVersion.py and Python/UPT/BuildVersion.py.
+# If SVN is available, the tool will obtain the current checked out version of
+# the source tree for including the --version commands.
+
+# Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+##
+""" This program will update the BuildVersion.py and BuildVersion.h files used to set a tool's version value """
+from __future__ import absolute_import
+
+import os
+import shlex
+import subprocess
+import sys
+
+from argparse import ArgumentParser, SUPPRESS
+from tempfile import NamedTemporaryFile
+from types import IntType, ListType
+
+
+SYS_ENV_ERR = "ERROR : %s system environment variable must be set prior to running this tool.\n"
+
+__execname__ = "UpdateBuildVersions.py"
+SVN_REVISION = "$LastChangedRevision: 3 $"
+SVN_REVISION = SVN_REVISION.replace("$LastChangedRevision:", "").replace("$", "").strip()
+__copyright__ = "Copyright (c) 2014, Intel Corporation. All rights reserved."
+VERSION_NUMBER = "0.7.0"
+__version__ = "Version %s.%s" % (VERSION_NUMBER, SVN_REVISION)
+
+
+def ParseOptions():
+ """
+ Parse the command-line options.
+ The options for this tool will be passed along to the MkBinPkg tool.
+ """
+ parser = ArgumentParser(
+ usage=("%s [options]" % __execname__),
+ description=__copyright__,
+ conflict_handler='resolve')
+
+ # Standard Tool Options
+ parser.add_argument("--version", action="version",
+ version=__execname__ + " " + __version__)
+ parser.add_argument("-s", "--silent", action="store_true",
+ dest="silent",
+ help="All output will be disabled, pass/fail determined by the exit code")
+ parser.add_argument("-v", "--verbose", action="store_true",
+ dest="verbose",
+ help="Enable verbose output")
+ # Tool specific options
+ parser.add_argument("--revert", action="store_true",
+ dest="REVERT", default=False,
+ help="Revert the BuildVersion files only")
+ parser.add_argument("--svn-test", action="store_true",
+ dest="TEST_SVN", default=False,
+ help="Test if the svn command is available")
+ parser.add_argument("--svnFlag", action="store_true",
+ dest="HAVE_SVN", default=False,
+ help=SUPPRESS)
+
+ return(parser.parse_args())
+
+
+def ShellCommandResults(CmdLine, Opt):
+ """ Execute the command, returning the output content """
+ file_list = NamedTemporaryFile(delete=False)
+ filename = file_list.name
+ Results = []
+
+ returnValue = 0
+ try:
+ subprocess.check_call(args=shlex.split(CmdLine), stderr=subprocess.STDOUT, stdout=file_list)
+ except subprocess.CalledProcessError as err_val:
+ file_list.close()
+ if not Opt.silent:
+ sys.stderr.write("ERROR : %d : %s\n" % (err_val.returncode, err_val.__str__()))
+ if os.path.exists(filename):
+ sys.stderr.write(" : Partial results may be in this file: %s\n" % filename)
+ sys.stderr.flush()
+ returnValue = err_val.returncode
+
+ except IOError as err_val:
+ (errno, strerror) = err_val.args
+ file_list.close()
+ if not Opt.silent:
+ sys.stderr.write("I/O ERROR : %s : %s\n" % (str(errno), strerror))
+ sys.stderr.write("ERROR : this command failed : %s\n" % CmdLine)
+ if os.path.exists(filename):
+ sys.stderr.write(" : Partial results may be in this file: %s\n" % filename)
+ sys.stderr.flush()
+ returnValue = errno
+
+ except OSError as err_val:
+ (errno, strerror) = err_val.args
+ file_list.close()
+ if not Opt.silent:
+ sys.stderr.write("OS ERROR : %s : %s\n" % (str(errno), strerror))
+ sys.stderr.write("ERROR : this command failed : %s\n" % CmdLine)
+ if os.path.exists(filename):
+ sys.stderr.write(" : Partial results may be in this file: %s\n" % filename)
+ sys.stderr.flush()
+ returnValue = errno
+
+ except KeyboardInterrupt:
+ file_list.close()
+ if not Opt.silent:
+ sys.stderr.write("ERROR : Command terminated by user : %s\n" % CmdLine)
+ if os.path.exists(filename):
+ sys.stderr.write(" : Partial results may be in this file: %s\n" % filename)
+ sys.stderr.flush()
+ returnValue = 1
+
+ finally:
+ if not file_list.closed:
+ file_list.flush()
+ os.fsync(file_list.fileno())
+ file_list.close()
+
+ if os.path.exists(filename):
+ fd_ = open(filename, 'r')
+ Results = fd_.readlines()
+ fd_.close()
+ os.unlink(filename)
+
+ if returnValue > 0:
+ return returnValue
+
+ return Results
+
+
+def UpdateBuildVersionPython(Rev, UserModified, opts):
+ """ This routine will update the BuildVersion.h files in the C source tree """
+ for SubDir in ["Common", "UPT"]:
+ PyPath = os.path.join(os.environ['BASE_TOOLS_PATH'], "Source", "Python", SubDir)
+ BuildVersionPy = os.path.join(PyPath, "BuildVersion.py")
+ fd_ = open(os.path.normpath(BuildVersionPy), 'r')
+ contents = fd_.readlines()
+ fd_.close()
+ if opts.HAVE_SVN is False:
+ BuildVersionOrig = os.path.join(PyPath, "orig_BuildVersion.py")
+ fd_ = open (BuildVersionOrig, 'w')
+ for line in contents:
+ fd_.write(line)
+ fd_.flush()
+ fd_.close()
+ new_content = []
+ for line in contents:
+ if line.strip().startswith("gBUILD_VERSION"):
+ new_line = "gBUILD_VERSION = \"Developer Build based on Revision: %s\"" % Rev
+ if UserModified:
+ new_line = "gBUILD_VERSION = \"Developer Build based on Revision: %s with Modified Sources\"" % Rev
+ new_content.append(new_line)
+ continue
+ new_content.append(line)
+
+ fd_ = open(os.path.normpath(BuildVersionPy), 'w')
+ for line in new_content:
+ fd_.write(line)
+ fd_.close()
+
+
+def UpdateBuildVersionH(Rev, UserModified, opts):
+ """ This routine will update the BuildVersion.h files in the C source tree """
+ CPath = os.path.join(os.environ['BASE_TOOLS_PATH'], "Source", "C", "Include", "Common")
+ BuildVersionH = os.path.join(CPath, "BuildVersion.h")
+ fd_ = open(os.path.normpath(BuildVersionH), 'r')
+ contents = fd_.readlines()
+ fd_.close()
+ if opts.HAVE_SVN is False:
+ BuildVersionOrig = os.path.join(CPath, "orig_BuildVersion.h")
+ fd_ = open(BuildVersionOrig, 'w')
+ for line in contents:
+ fd_.write(line)
+ fd_.flush()
+ fd_.close()
+
+ new_content = []
+ for line in contents:
+ if line.strip().startswith("#define"):
+ new_line = "#define __BUILD_VERSION \"Developer Build based on Revision: %s\"" % Rev
+ if UserModified:
+ new_line = "#define __BUILD_VERSION \"Developer Build based on Revision: %s with Modified Sources\"" % \
+ Rev
+ new_content.append(new_line)
+ continue
+ new_content.append(line)
+
+ fd_ = open(os.path.normpath(BuildVersionH), 'w')
+ for line in new_content:
+ fd_.write(line)
+ fd_.close()
+
+
+def RevertCmd(Filename, Opt):
+ """ This is the shell command that does the SVN revert """
+ CmdLine = "svn revert %s" % Filename.replace("\\", "/").strip()
+ try:
+ subprocess.check_output(args=shlex.split(CmdLine))
+ except subprocess.CalledProcessError as err_val:
+ if not Opt.silent:
+ sys.stderr.write("Subprocess ERROR : %s\n" % err_val)
+ sys.stderr.flush()
+
+ except IOError as err_val:
+ (errno, strerror) = err_val.args
+ if not Opt.silent:
+ sys.stderr.write("I/O ERROR : %d : %s\n" % (str(errno), strerror))
+ sys.stderr.write("ERROR : this command failed : %s\n" % CmdLine)
+ sys.stderr.flush()
+
+ except OSError as err_val:
+ (errno, strerror) = err_val.args
+ if not Opt.silent:
+ sys.stderr.write("OS ERROR : %d : %s\n" % (str(errno), strerror))
+ sys.stderr.write("ERROR : this command failed : %s\n" % CmdLine)
+ sys.stderr.flush()
+
+ except KeyboardInterrupt:
+ if not Opt.silent:
+ sys.stderr.write("ERROR : Command terminated by user : %s\n" % CmdLine)
+ sys.stderr.flush()
+
+ if Opt.verbose:
+ sys.stdout.write("Reverted this file: %s\n" % Filename)
+ sys.stdout.flush()
+
+
+def GetSvnRevision(opts):
+ """ Get the current revision of the BaseTools/Source tree, and check if any of the files have been modified """
+ Revision = "Unknown"
+ Modified = False
+
+ if opts.HAVE_SVN is False:
+ sys.stderr.write("WARNING: the svn command-line tool is not available.\n")
+ return (Revision, Modified)
+
+ SrcPath = os.path.join(os.environ['BASE_TOOLS_PATH'], "Source")
+ # Check if there are modified files.
+ Cwd = os.getcwd()
+ os.chdir(SrcPath)
+
+ StatusCmd = "svn st -v --depth infinity --non-interactive"
+ contents = ShellCommandResults(StatusCmd, opts)
+ os.chdir(Cwd)
+ if isinstance(contents, ListType):
+ for line in contents:
+ if line.startswith("M "):
+ Modified = True
+ break
+
+ # Get the repository revision of BaseTools/Source
+ InfoCmd = "svn info %s" % SrcPath.replace("\\", "/").strip()
+ Revision = 0
+ contents = ShellCommandResults(InfoCmd, opts)
+ if isinstance(contents, IntType):
+ return 0, Modified
+ for line in contents:
+ line = line.strip()
+ if line.startswith("Revision:"):
+ Revision = line.replace("Revision:", "").strip()
+ break
+
+ return (Revision, Modified)
+
+
+def CheckSvn(opts):
+ """
+ This routine will return True if an svn --version command succeeds, or False if it fails.
+ If it failed, SVN is not available.
+ """
+ OriginalSilent = opts.silent
+ opts.silent = True
+ VerCmd = "svn --version"
+ contents = ShellCommandResults(VerCmd, opts)
+ opts.silent = OriginalSilent
+ if isinstance(contents, IntType):
+ if opts.verbose:
+ sys.stdout.write("SVN does not appear to be available.\n")
+ sys.stdout.flush()
+ return False
+
+ if opts.verbose:
+ sys.stdout.write("Found %s" % contents[0])
+ sys.stdout.flush()
+ return True
+
+
+def CopyOrig(Src, Dest, Opt):
+ """ Overwrite the Dest File with the Src File content """
+ try:
+ fd_ = open(Src, 'r')
+ contents = fd_.readlines()
+ fd_.close()
+ fd_ = open(Dest, 'w')
+ for line in contents:
+ fd_.write(line)
+ fd_.flush()
+ fd_.close()
+ except IOError:
+ if not Opt.silent:
+ sys.stderr.write("Unable to restore this file: %s\n" % Dest)
+ sys.stderr.flush()
+ return 1
+
+ os.remove(Src)
+ if Opt.verbose:
+ sys.stdout.write("Restored this file: %s\n" % Src)
+ sys.stdout.flush()
+
+ return 0
+
+
+def CheckOriginals(Opts):
+ """
+ If SVN was not available, then the tools may have made copies of the original BuildVersion.* files using
+ orig_BuildVersion.* for the name. If they exist, replace the existing BuildVersion.* file with the corresponding
+ orig_BuildVersion.* file.
+ Returns 0 if this succeeds, or 1 if the copy function fails. It will also return 0 if the orig_BuildVersion.* file
+ does not exist.
+ """
+ CPath = os.path.join(os.environ['BASE_TOOLS_PATH'], "Source", "C", "Include", "Common")
+ BuildVersionH = os.path.join(CPath, "BuildVersion.h")
+ OrigBuildVersionH = os.path.join(CPath, "orig_BuildVersion.h")
+ if not os.path.exists(OrigBuildVersionH):
+ return 0
+ if CopyOrig(OrigBuildVersionH, BuildVersionH, Opts):
+ return 1
+ for SubDir in ["Common", "UPT"]:
+ PyPath = os.path.join(os.environ['BASE_TOOLS_PATH'], "Source", "Python", SubDir)
+ BuildVersionPy = os.path.join(PyPath, "BuildVersion.h")
+ OrigBuildVersionPy = os.path.join(PyPath, "orig_BuildVersion.h")
+ if not os.path.exists(OrigBuildVersionPy):
+ return 0
+ if CopyOrig(OrigBuildVersionPy, BuildVersionPy, Opts):
+ return 1
+
+ return 0
+
+
+def RevertBuildVersionFiles(opts):
+ """
+ This routine will attempt to perform an SVN --revert on each of the BuildVersion.* files
+ """
+ if not opts.HAVE_SVN:
+ if CheckOriginals(opts):
+ return 1
+ return 0
+ # SVN is available
+ BuildVersionH = os.path.join(os.environ['BASE_TOOLS_PATH'], "Source", "C", "Include", "Common", "BuildVersion.h")
+ RevertCmd(BuildVersionH, opts)
+ for SubDir in ["Common", "UPT"]:
+ BuildVersionPy = os.path.join(os.environ['BASE_TOOLS_PATH'], "Source", "Python", SubDir, "BuildVersion.py")
+ RevertCmd(BuildVersionPy, opts)
+
+def UpdateRevisionFiles():
+ """ Main routine that will update the BuildVersion.py and BuildVersion.h files."""
+ options = ParseOptions()
+ # Check the working environment
+ if "WORKSPACE" not in os.environ.keys():
+ sys.stderr.write(SYS_ENV_ERR % 'WORKSPACE')
+ return 1
+ if 'BASE_TOOLS_PATH' not in os.environ.keys():
+ sys.stderr.write(SYS_ENV_ERR % 'BASE_TOOLS_PATH')
+ return 1
+ if not os.path.exists(os.environ['BASE_TOOLS_PATH']):
+ sys.stderr.write("Unable to locate the %s directory." % os.environ['BASE_TOOLS_PATH'])
+ return 1
+
+
+ options.HAVE_SVN = CheckSvn(options)
+ if options.TEST_SVN:
+ return (not options.HAVE_SVN)
+ # done processing the option, now use the option.HAVE_SVN as a flag. True = Have it, False = Don't have it.
+ if options.REVERT:
+ # Just revert the tools an exit
+ RevertBuildVersionFiles(options)
+ else:
+ # Revert any changes in the BuildVersion.* files before setting them again.
+ RevertBuildVersionFiles(options)
+ Revision, Modified = GetSvnRevision(options)
+ if options.verbose:
+ sys.stdout.write("Revision: %s is Modified: %s\n" % (Revision, Modified))
+ sys.stdout.flush()
+ UpdateBuildVersionH(Revision, Modified, options)
+ UpdateBuildVersionPython(Revision, Modified, options)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(UpdateRevisionFiles())
+
+