summaryrefslogtreecommitdiffstats
path: root/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-11 08:17:27 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-11 08:17:27 +0000
commitf215e02bf85f68d3a6106c2a1f4f7f063f819064 (patch)
tree6bb5b92c046312c4e95ac2620b10ddf482d3fa8b /src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen
parentInitial commit. (diff)
downloadvirtualbox-f215e02bf85f68d3a6106c2a1f4f7f063f819064.tar.xz
virtualbox-f215e02bf85f68d3a6106c2a1f4f7f063f819064.zip
Adding upstream version 7.0.14-dfsg.upstream/7.0.14-dfsg
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen')
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGen.py113
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGenWorker.py329
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/BuildEngine.py650
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/DataPipe.py169
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenC.py2111
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenDepex.py464
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenMake.py1810
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenPcdDb.py1615
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenVar.py366
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IdfClassObject.py132
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IncludesAutoGen.py304
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/InfSectionParser.py119
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py2456
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py674
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py1603
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/StrGather.py630
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/UniClassObject.py683
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py280
-rwxr-xr-xsrc/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py971
-rw-r--r--src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/__init__.py11
20 files changed, 15490 insertions, 0 deletions
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGen.py
new file mode 100755
index 00000000..8e4c54bb
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGen.py
@@ -0,0 +1,113 @@
+## @file
+# Generate AutoGen.h, AutoGen.c and *.depex files
+#
+# Copyright (c) 2007 - 2019, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2018, Hewlett Packard Enterprise Development, L.P.<BR>
+# Copyright (c) 2019, American Megatrends, Inc. All rights reserved.<BR>
+#
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+from Common.DataType import TAB_STAR
+## Base class for AutoGen
+#
+# This class just implements the cache mechanism of AutoGen objects.
+#
+class AutoGen(object):
+ # database to maintain the objects in each child class
+ __ObjectCache = {} # (BuildTarget, ToolChain, ARCH, platform file): AutoGen object
+
+ ## Factory method
+ #
+ # @param Class class object of real AutoGen class
+ # (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)
+ # @param Workspace Workspace directory or WorkspaceAutoGen object
+ # @param MetaFile The path of meta file
+ # @param Target Build target
+ # @param Toolchain Tool chain name
+ # @param Arch Target arch
+ # @param *args The specific class related parameters
+ # @param **kwargs The specific class related dict parameters
+ #
+
+ def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ # check if the object has been created
+ Key = (Target, Toolchain, Arch, MetaFile)
+ if Key in cls.__ObjectCache:
+ # if it exists, just return it directly
+ return cls.__ObjectCache[Key]
+ # it didnt exist. create it, cache it, then return it
+ RetVal = cls.__ObjectCache[Key] = super(AutoGen, cls).__new__(cls)
+ return RetVal
+
+
+ ## hash() operator
+ #
+ # The file path of platform file will be used to represent hash value of this object
+ #
+ # @retval int Hash value of the file path of platform file
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+ ## str() operator
+ #
+ # The file path of platform file will be used to represent this object
+ #
+ # @retval string String of platform file path
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## "==" operator
+ def __eq__(self, Other):
+ return Other and self.MetaFile == Other
+
+ @classmethod
+ def Cache(cls):
+ return cls.__ObjectCache
+
+#
+# The priority list while override build option
+#
+PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
+ "0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
+ "0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
+ "0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
+ "0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE
+ "0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE
+ "0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE
+ "0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE
+ "0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE
+ "0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE
+ "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
+ "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
+ "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
+## Calculate the priority value of the build option
+#
+# @param Key Build option definition contain: TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+#
+# @retval Value Priority value based on the priority list.
+#
+def CalculatePriorityValue(Key):
+ Target, ToolChain, Arch, CommandType, Attr = Key.split('_')
+ PriorityValue = 0x11111
+ if Target == TAB_STAR:
+ PriorityValue &= 0x01111
+ if ToolChain == TAB_STAR:
+ PriorityValue &= 0x10111
+ if Arch == TAB_STAR:
+ PriorityValue &= 0x11011
+ if CommandType == TAB_STAR:
+ PriorityValue &= 0x11101
+ if Attr == TAB_STAR:
+ PriorityValue &= 0x11110
+
+ return PrioList["0x%0.5x" % PriorityValue]
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGenWorker.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGenWorker.py
new file mode 100755
index 00000000..d392ffb2
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/AutoGenWorker.py
@@ -0,0 +1,329 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+import multiprocessing as mp
+import threading
+from Common.Misc import PathClass
+from AutoGen.ModuleAutoGen import ModuleAutoGen
+from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo
+import Common.GlobalData as GlobalData
+import Common.EdkLogger as EdkLogger
+import os
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from AutoGen.AutoGen import AutoGen
+from Workspace.WorkspaceDatabase import BuildDB
+try:
+ from queue import Empty
+except:
+ from Queue import Empty
+import traceback
+import sys
+from AutoGen.DataPipe import MemoryDataPipe
+import logging
+import time
+
+def clearQ(q):
+ try:
+ while True:
+ q.get_nowait()
+ except Empty:
+ pass
+
+class LogAgent(threading.Thread):
+ def __init__(self,log_q,log_level,log_file=None):
+ super(LogAgent,self).__init__()
+ self.log_q = log_q
+ self.log_level = log_level
+ self.log_file = log_file
+ def InitLogger(self):
+ # For DEBUG level (All DEBUG_0~9 are applicable)
+ self._DebugLogger_agent = logging.getLogger("tool_debug_agent")
+ _DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
+ self._DebugLogger_agent.setLevel(self.log_level)
+ _DebugChannel = logging.StreamHandler(sys.stdout)
+ _DebugChannel.setFormatter(_DebugFormatter)
+ self._DebugLogger_agent.addHandler(_DebugChannel)
+
+ # For VERBOSE, INFO, WARN level
+ self._InfoLogger_agent = logging.getLogger("tool_info_agent")
+ _InfoFormatter = logging.Formatter("%(message)s")
+ self._InfoLogger_agent.setLevel(self.log_level)
+ _InfoChannel = logging.StreamHandler(sys.stdout)
+ _InfoChannel.setFormatter(_InfoFormatter)
+ self._InfoLogger_agent.addHandler(_InfoChannel)
+
+ # For ERROR level
+ self._ErrorLogger_agent = logging.getLogger("tool_error_agent")
+ _ErrorFormatter = logging.Formatter("%(message)s")
+ self._ErrorLogger_agent.setLevel(self.log_level)
+ _ErrorCh = logging.StreamHandler(sys.stderr)
+ _ErrorCh.setFormatter(_ErrorFormatter)
+ self._ErrorLogger_agent.addHandler(_ErrorCh)
+
+ if self.log_file:
+ if os.path.exists(self.log_file):
+ os.remove(self.log_file)
+ _Ch = logging.FileHandler(self.log_file)
+ _Ch.setFormatter(_DebugFormatter)
+ self._DebugLogger_agent.addHandler(_Ch)
+
+ _Ch= logging.FileHandler(self.log_file)
+ _Ch.setFormatter(_InfoFormatter)
+ self._InfoLogger_agent.addHandler(_Ch)
+
+ _Ch = logging.FileHandler(self.log_file)
+ _Ch.setFormatter(_ErrorFormatter)
+ self._ErrorLogger_agent.addHandler(_Ch)
+
+ def run(self):
+ self.InitLogger()
+ while True:
+ log_message = self.log_q.get()
+ if log_message is None:
+ break
+ if log_message.name == "tool_error":
+ self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())
+ elif log_message.name == "tool_info":
+ self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
+ elif log_message.name == "tool_debug":
+ self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())
+ else:
+ self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
+
+ def kill(self):
+ self.log_q.put(None)
+class AutoGenManager(threading.Thread):
+ def __init__(self,autogen_workers, feedback_q,error_event):
+ super(AutoGenManager,self).__init__()
+ self.autogen_workers = autogen_workers
+ self.feedback_q = feedback_q
+ self.Status = True
+ self.error_event = error_event
+ def run(self):
+ try:
+ fin_num = 0
+ while True:
+ badnews = self.feedback_q.get()
+ if badnews is None:
+ break
+ if badnews == "Done":
+ fin_num += 1
+ elif badnews == "QueueEmpty":
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))
+ self.TerminateWorkers()
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))
+ self.Status = False
+ self.TerminateWorkers()
+ if fin_num == len(self.autogen_workers):
+ self.clearQueue()
+ for w in self.autogen_workers:
+ w.join()
+ break
+ except Exception:
+ return
+
+ def clearQueue(self):
+ taskq = self.autogen_workers[0].module_queue
+ logq = self.autogen_workers[0].log_q
+ clearQ(taskq)
+ clearQ(self.feedback_q)
+ clearQ(logq)
+ # Copy the cache queue itmes to parent thread before clear
+ cacheq = self.autogen_workers[0].cache_q
+ try:
+ cache_num = 0
+ while True:
+ item = cacheq.get()
+ if item == "CacheDone":
+ cache_num += 1
+ else:
+ GlobalData.gModuleAllCacheStatus.add(item)
+ if cache_num == len(self.autogen_workers):
+ break
+ except:
+ print ("cache_q error")
+
+ def TerminateWorkers(self):
+ self.error_event.set()
+ def kill(self):
+ self.feedback_q.put(None)
+class AutoGenWorkerInProcess(mp.Process):
+ def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event):
+ mp.Process.__init__(self)
+ self.module_queue = module_queue
+ self.data_pipe_file_path =data_pipe_file_path
+ self.data_pipe = None
+ self.feedback_q = feedback_q
+ self.PlatformMetaFileSet = {}
+ self.file_lock = file_lock
+ self.cache_q = cache_q
+ self.log_q = log_q
+ self.error_event = error_event
+ def GetPlatformMetaFile(self,filepath,root):
+ try:
+ return self.PlatformMetaFileSet[(filepath,root)]
+ except:
+ self.PlatformMetaFileSet[(filepath,root)] = filepath
+ return self.PlatformMetaFileSet[(filepath,root)]
+ def run(self):
+ try:
+ taskname = "Init"
+ with self.file_lock:
+ try:
+ self.data_pipe = MemoryDataPipe()
+ self.data_pipe.load(self.data_pipe_file_path)
+ except:
+ self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)
+ EdkLogger.LogClientInitialize(self.log_q)
+ loglevel = self.data_pipe.Get("LogLevel")
+ if not loglevel:
+ loglevel = EdkLogger.INFO
+ EdkLogger.SetLevel(loglevel)
+ target = self.data_pipe.Get("P_Info").get("Target")
+ toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
+ archlist = self.data_pipe.Get("P_Info").get("ArchList")
+
+ active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")
+ workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")
+ PackagesPath = os.getenv("PACKAGES_PATH")
+ mws.setWs(workspacedir, PackagesPath)
+ self.Wa = WorkSpaceInfo(
+ workspacedir,active_p,target,toolchain,archlist
+ )
+ self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")
+ GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")
+ GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")
+ os.environ._data = self.data_pipe.Get("Env_Var")
+ GlobalData.gWorkspace = workspacedir
+ GlobalData.gDisableIncludePathCheck = False
+ GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")
+ GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")
+
+ GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache")
+ GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")
+ GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")
+ GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile")
+ GlobalData.gModulePreMakeCacheStatus = dict()
+ GlobalData.gModuleMakeCacheStatus = dict()
+ GlobalData.gHashChainStatus = dict()
+ GlobalData.gCMakeHashFile = dict()
+ GlobalData.gModuleHashFile = dict()
+ GlobalData.gFileHashDict = dict()
+ GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")
+ GlobalData.file_lock = self.file_lock
+ CommandTarget = self.data_pipe.Get("CommandTarget")
+ pcd_from_build_option = []
+ for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):
+ pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))
+ if pcd_tuple[2].strip():
+ pcd_id = ".".join((pcd_id,pcd_tuple[2]))
+ pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))
+ GlobalData.BuildOptionPcd = pcd_from_build_option
+ module_count = 0
+ FfsCmd = self.data_pipe.Get("FfsCommand")
+ if FfsCmd is None:
+ FfsCmd = {}
+ GlobalData.FfsCmd = FfsCmd
+ PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),
+ self.data_pipe.Get("P_Info").get("WorkspaceDir"))
+ while True:
+ if self.error_event.is_set():
+ break
+ module_count += 1
+ try:
+ module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()
+ except Empty:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Fake Empty."))
+ time.sleep(0.01)
+ continue
+ if module_file is None:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Worker get the last item in the queue."))
+ self.feedback_q.put("QueueEmpty")
+ time.sleep(0.01)
+ continue
+
+ modulefullpath = os.path.join(module_root,module_file)
+ taskname = " : ".join((modulefullpath,module_arch))
+ module_metafile = PathClass(module_file,module_root)
+ if module_path:
+ module_metafile.Path = module_path
+ if module_basename:
+ module_metafile.BaseName = module_basename
+ if module_originalpath:
+ module_metafile.OriginalPath = PathClass(module_originalpath,module_root)
+ arch = module_arch
+ target = self.data_pipe.Get("P_Info").get("Target")
+ toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
+ Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)
+ Ma.IsLibrary = IsLib
+ # SourceFileList calling sequence impact the makefile string sequence.
+ # Create cached SourceFileList here to unify its calling sequence for both
+ # CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
+ RetVal = Ma.SourceFileList
+ if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]:
+ try:
+ CacheResult = Ma.CanSkipbyPreMakeCache()
+ except:
+ CacheResult = False
+ self.feedback_q.put(taskname)
+
+ if CacheResult:
+ self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True))
+ continue
+ else:
+ self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False))
+
+ Ma.CreateCodeFile(False)
+ Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))
+ Ma.CreateAsBuiltInf()
+ if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
+ try:
+ CacheResult = Ma.CanSkipbyMakeCache()
+ except:
+ CacheResult = False
+ self.feedback_q.put(taskname)
+
+ if CacheResult:
+ self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True))
+ continue
+ else:
+ self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False))
+
+ except Exception as e:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), str(e)))
+ self.feedback_q.put(taskname)
+ finally:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Done"))
+ self.feedback_q.put("Done")
+ self.cache_q.put("CacheDone")
+
+ def printStatus(self):
+ print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))
+ print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))
+ groupobj = {}
+ for buildobj in BuildDB.BuildObject.GetCache().values():
+ if str(buildobj).lower().endswith("dec"):
+ try:
+ groupobj['dec'].append(str(buildobj))
+ except:
+ groupobj['dec'] = [str(buildobj)]
+ if str(buildobj).lower().endswith("dsc"):
+ try:
+ groupobj['dsc'].append(str(buildobj))
+ except:
+ groupobj['dsc'] = [str(buildobj)]
+
+ if str(buildobj).lower().endswith("inf"):
+ try:
+ groupobj['inf'].append(str(buildobj))
+ except:
+ groupobj['inf'] = [str(buildobj)]
+
+ print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))
+ print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))
+ print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/BuildEngine.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/BuildEngine.py
new file mode 100755
index 00000000..8a321bf8
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/BuildEngine.py
@@ -0,0 +1,650 @@
+## @file
+# The engine for building files
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+##
+# Import Modules
+#
+from __future__ import print_function
+import Common.LongFilePathOs as os
+import re
+import copy
+import string
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+from Common.GlobalData import *
+from Common.BuildToolError import *
+from Common.Misc import tdict, PathClass
+from Common.StringUtils import NormPath
+from Common.DataType import *
+from Common.TargetTxtClassObject import TargetTxtDict
+gDefaultBuildRuleFile = 'build_rule.txt'
+AutoGenReqBuildRuleVerNum = '0.1'
+
+import Common.EdkLogger as EdkLogger
+
+## Convert file type to file list macro name
+#
+# @param FileType The name of file type
+#
+# @retval string The name of macro
+#
+def FileListMacro(FileType):
+ return "%sS" % FileType.replace("-", "_").upper()
+
+## Convert file type to list file macro name
+#
+# @param FileType The name of file type
+#
+# @retval string The name of macro
+#
+def ListFileMacro(FileType):
+ return "%s_LIST" % FileListMacro(FileType)
+
+class TargetDescBlock(object):
+ def __init__(self, Inputs, Outputs, Commands, Dependencies):
+ self.InitWorker(Inputs, Outputs, Commands, Dependencies)
+
+ def InitWorker(self, Inputs, Outputs, Commands, Dependencies):
+ self.Inputs = Inputs
+ self.Outputs = Outputs
+ self.Commands = Commands
+ self.Dependencies = Dependencies
+ if self.Outputs:
+ self.Target = self.Outputs[0]
+ else:
+ self.Target = None
+
+ def __str__(self):
+ return self.Target.Path
+
+ def __hash__(self):
+ return hash(self.Target.Path)
+
+ def __eq__(self, Other):
+ if isinstance(Other, type(self)):
+ return Other.Target.Path == self.Target.Path
+ else:
+ return str(Other) == self.Target.Path
+
+ def AddInput(self, Input):
+ if Input not in self.Inputs:
+ self.Inputs.append(Input)
+
+ def IsMultipleInput(self):
+ return len(self.Inputs) > 1
+
+## Class for one build rule
+#
+# This represents a build rule which can give out corresponding command list for
+# building the given source file(s). The result can be used for generating the
+# target for makefile.
+#
+class FileBuildRule:
+ INC_LIST_MACRO = "INC_LIST"
+ INC_MACRO = "INC"
+
+ ## constructor
+ #
+ # @param Input The dictionary representing input file(s) for a rule
+ # @param Output The list representing output file(s) for a rule
+ # @param Command The list containing commands to generate the output from input
+ #
+ def __init__(self, Type, Input, Output, Command, ExtraDependency=None):
+ # The Input should not be empty
+ if not Input:
+ Input = []
+ if not Output:
+ Output = []
+ if not Command:
+ Command = []
+
+ self.FileListMacro = FileListMacro(Type)
+ self.ListFileMacro = ListFileMacro(Type)
+ self.IncListFileMacro = self.INC_LIST_MACRO
+
+ self.SourceFileType = Type
+ # source files listed not in TAB_STAR or "?" pattern format
+ if not ExtraDependency:
+ self.ExtraSourceFileList = []
+ else:
+ self.ExtraSourceFileList = ExtraDependency
+
+ #
+ # Search macros used in command lines for <FILE_TYPE>_LIST and INC_LIST.
+ # If found, generate a file to keep the input files used to get over the
+ # limitation of command line length
+ #
+ self.MacroList = []
+ self.CommandList = []
+ for CmdLine in Command:
+ self.MacroList.extend(gMacroRefPattern.findall(CmdLine))
+ # replace path separator with native one
+ self.CommandList.append(CmdLine)
+
+ # Indicate what should be generated
+ if self.FileListMacro in self.MacroList:
+ self.GenFileListMacro = True
+ else:
+ self.GenFileListMacro = False
+
+ if self.ListFileMacro in self.MacroList:
+ self.GenListFile = True
+ self.GenFileListMacro = True
+ else:
+ self.GenListFile = False
+
+ if self.INC_LIST_MACRO in self.MacroList:
+ self.GenIncListFile = True
+ else:
+ self.GenIncListFile = False
+
+ # Check input files
+ self.IsMultipleInput = False
+ self.SourceFileExtList = set()
+ for File in Input:
+ Base, Ext = os.path.splitext(File)
+ if Base.find(TAB_STAR) >= 0:
+ # There's TAB_STAR in the file name
+ self.IsMultipleInput = True
+ self.GenFileListMacro = True
+ elif Base.find("?") < 0:
+ # There's no TAB_STAR and "?" in file name
+ self.ExtraSourceFileList.append(File)
+ continue
+ self.SourceFileExtList.add(Ext)
+
+ # Check output files
+ self.DestFileList = []
+ for File in Output:
+ self.DestFileList.append(File)
+
+ # All build targets generated by this rule for a module
+ self.BuildTargets = {}
+
+ ## str() function support
+ #
+ # @retval string
+ #
+ def __str__(self):
+ SourceString = ""
+ SourceString += " %s %s %s" % (self.SourceFileType, " ".join(self.SourceFileExtList), self.ExtraSourceFileList)
+ DestString = ", ".join([str(i) for i in self.DestFileList])
+ CommandString = "\n\t".join(self.CommandList)
+ return "%s : %s\n\t%s" % (DestString, SourceString, CommandString)
+
+ def Instantiate(self, Macros = None):
+ if Macros is None:
+ Macros = {}
+ NewRuleObject = copy.copy(self)
+ NewRuleObject.BuildTargets = {}
+ NewRuleObject.DestFileList = []
+ for File in self.DestFileList:
+ NewRuleObject.DestFileList.append(PathClass(NormPath(File, Macros)))
+ return NewRuleObject
+
+ ## Apply the rule to given source file(s)
+ #
+ # @param SourceFile One file or a list of files to be built
+ # @param RelativeToDir The relative path of the source file
+ # @param PathSeparator Path separator
+ #
+ # @retval tuple (Source file in full path, List of individual sourcefiles, Destination file, List of build commands)
+ #
+ def Apply(self, SourceFile, BuildRuleOrder=None):
+ if not self.CommandList or not self.DestFileList:
+ return None
+
+ # source file
+ if self.IsMultipleInput:
+ SrcFileName = ""
+ SrcFileBase = ""
+ SrcFileExt = ""
+ SrcFileDir = ""
+ SrcPath = ""
+ # SourceFile must be a list
+ SrcFile = "$(%s)" % self.FileListMacro
+ else:
+ SrcFileName, SrcFileBase, SrcFileExt = SourceFile.Name, SourceFile.BaseName, SourceFile.Ext
+ if SourceFile.Root:
+ SrcFileDir = SourceFile.SubDir
+ if SrcFileDir == "":
+ SrcFileDir = "."
+ else:
+ SrcFileDir = "."
+ SrcFile = SourceFile.Path
+ SrcPath = SourceFile.Dir
+
+ # destination file (the first one)
+ if self.DestFileList:
+ DestFile = self.DestFileList[0].Path
+ DestPath = self.DestFileList[0].Dir
+ DestFileName = self.DestFileList[0].Name
+ DestFileBase, DestFileExt = self.DestFileList[0].BaseName, self.DestFileList[0].Ext
+ else:
+ DestFile = ""
+ DestPath = ""
+ DestFileName = ""
+ DestFileBase = ""
+ DestFileExt = ""
+
+ BuildRulePlaceholderDict = {
+ # source file
+ "src" : SrcFile,
+ "s_path" : SrcPath,
+ "s_dir" : SrcFileDir,
+ "s_name" : SrcFileName,
+ "s_base" : SrcFileBase,
+ "s_ext" : SrcFileExt,
+ # destination file
+ "dst" : DestFile,
+ "d_path" : DestPath,
+ "d_name" : DestFileName,
+ "d_base" : DestFileBase,
+ "d_ext" : DestFileExt,
+ }
+
+ DstFile = []
+ for File in self.DestFileList:
+ File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict)
+ File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict)
+ DstFile.append(PathClass(File, IsBinary=True))
+
+ if DstFile[0] in self.BuildTargets:
+ TargetDesc = self.BuildTargets[DstFile[0]]
+ if BuildRuleOrder and SourceFile.Ext in BuildRuleOrder:
+ Index = BuildRuleOrder.index(SourceFile.Ext)
+ for Input in TargetDesc.Inputs:
+ if Input.Ext not in BuildRuleOrder or BuildRuleOrder.index(Input.Ext) > Index:
+ #
+ # Command line should be regenerated since some macros are different
+ #
+ CommandList = self._BuildCommand(BuildRulePlaceholderDict)
+ TargetDesc.InitWorker([SourceFile], DstFile, CommandList, self.ExtraSourceFileList)
+ break
+ else:
+ TargetDesc.AddInput(SourceFile)
+ else:
+ CommandList = self._BuildCommand(BuildRulePlaceholderDict)
+ TargetDesc = TargetDescBlock([SourceFile], DstFile, CommandList, self.ExtraSourceFileList)
+ TargetDesc.ListFileMacro = self.ListFileMacro
+ TargetDesc.FileListMacro = self.FileListMacro
+ TargetDesc.IncListFileMacro = self.IncListFileMacro
+ TargetDesc.GenFileListMacro = self.GenFileListMacro
+ TargetDesc.GenListFile = self.GenListFile
+ TargetDesc.GenIncListFile = self.GenIncListFile
+ self.BuildTargets[DstFile[0]] = TargetDesc
+ return TargetDesc
+
+ def _BuildCommand(self, Macros):
+ CommandList = []
+ for CommandString in self.CommandList:
+ CommandString = string.Template(CommandString).safe_substitute(Macros)
+ CommandString = string.Template(CommandString).safe_substitute(Macros)
+ CommandList.append(CommandString)
+ return CommandList
+
+## Class for build rules
+#
+# BuildRule class parses rules defined in a file or passed by caller, and converts
+# the rule into FileBuildRule object.
+#
+class BuildRule:
+ _SectionHeader = "SECTIONHEADER"
+ _Section = "SECTION"
+ _SubSectionHeader = "SUBSECTIONHEADER"
+ _SubSection = "SUBSECTION"
+ _InputFile = "INPUTFILE"
+ _OutputFile = "OUTPUTFILE"
+ _ExtraDependency = "EXTRADEPENDENCY"
+ _Command = "COMMAND"
+ _UnknownSection = "UNKNOWNSECTION"
+
+ _SubSectionList = [_InputFile, _OutputFile, _Command]
+
+ _PATH_SEP = "(+)"
+ _FileTypePattern = re.compile("^[_a-zA-Z][_\-0-9a-zA-Z]*$")
+ _BinaryFileRule = FileBuildRule(TAB_DEFAULT_BINARY_FILE, [], [os.path.join("$(OUTPUT_DIR)", "${s_name}")],
+ ["$(CP) ${src} ${dst}"], [])
+
+ ## Constructor
+ #
+ # @param File The file containing build rules in a well defined format
+ # @param Content The string list of build rules in a well defined format
+ # @param LineIndex The line number from which the parsing will begin
+ # @param SupportedFamily The list of supported tool chain families
+ #
+ def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=[TAB_COMPILER_MSFT, "INTEL", "GCC", "RVCT"]):
+ self.RuleFile = File
+ # Read build rules from file if it's not none
+ if File is not None:
+ try:
+ self.RuleContent = open(File, 'r').readlines()
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
+ elif Content is not None:
+ self.RuleContent = Content
+ else:
+ EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given")
+
+ self.SupportedToolChainFamilyList = SupportedFamily
+ self.RuleDatabase = tdict(True, 4) # {FileExt, ModuleType, Arch, Family : FileBuildRule object}
+ self.Ext2FileType = {} # {ext : file-type}
+ self.FileTypeList = set()
+
+ self._LineIndex = LineIndex
+ self._State = ""
+ self._RuleInfo = tdict(True, 2) # {toolchain family : {"InputFile": {}, "OutputFile" : [], "Command" : []}}
+ self._FileType = ''
+ self._BuildTypeList = set()
+ self._ArchList = set()
+ self._FamilyList = []
+ self._TotalToolChainFamilySet = set()
+ self._RuleObjectList = [] # FileBuildRule object list
+ self._FileVersion = ""
+
+ self.Parse()
+
+ # some intrinsic rules
+ self.RuleDatabase[TAB_DEFAULT_BINARY_FILE, TAB_COMMON, TAB_COMMON, TAB_COMMON] = self._BinaryFileRule
+ self.FileTypeList.add(TAB_DEFAULT_BINARY_FILE)
+
+ ## Parse the build rule strings
+ def Parse(self):
+ self._State = self._Section
+ for Index in range(self._LineIndex, len(self.RuleContent)):
+ # Clean up the line and replace path separator with native one
+ Line = self.RuleContent[Index].strip().replace(self._PATH_SEP, os.path.sep)
+ self.RuleContent[Index] = Line
+
+ # find the build_rule_version
+ if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) != -1:
+ if Line.find("=") != -1 and Line.find("=") < (len(Line) - 1) and (Line[(Line.find("=") + 1):]).split():
+ self._FileVersion = (Line[(Line.find("=") + 1):]).split()[0]
+ # skip empty or comment line
+ if Line == "" or Line[0] == "#":
+ continue
+
+ # find out section header, enclosed by []
+ if Line[0] == '[' and Line[-1] == ']':
+ # merge last section information into rule database
+ self.EndOfSection()
+ self._State = self._SectionHeader
+ # find out sub-section header, enclosed by <>
+ elif Line[0] == '<' and Line[-1] == '>':
+ if self._State != self._UnknownSection:
+ self._State = self._SubSectionHeader
+
+ # call section handler to parse each (sub)section
+ self._StateHandler[self._State](self, Index)
+ # merge last section information into rule database
+ self.EndOfSection()
+
+ ## Parse definitions under a section
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseSection(self, LineIndex):
+ pass
+
+ ## Parse definitions under a subsection
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseSubSection(self, LineIndex):
+ # currently nothing here
+ pass
+
+ ## Placeholder for not supported sections
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def SkipSection(self, LineIndex):
+ pass
+
+ ## Merge section information just got into rule database
+ def EndOfSection(self):
+ Database = self.RuleDatabase
+ # if there's specific toolchain family, 'COMMON' doesn't make sense any more
+ if len(self._TotalToolChainFamilySet) > 1 and TAB_COMMON in self._TotalToolChainFamilySet:
+ self._TotalToolChainFamilySet.remove(TAB_COMMON)
+ for Family in self._TotalToolChainFamilySet:
+ Input = self._RuleInfo[Family, self._InputFile]
+ Output = self._RuleInfo[Family, self._OutputFile]
+ Command = self._RuleInfo[Family, self._Command]
+ ExtraDependency = self._RuleInfo[Family, self._ExtraDependency]
+
+ BuildRule = FileBuildRule(self._FileType, Input, Output, Command, ExtraDependency)
+ for BuildType in self._BuildTypeList:
+ for Arch in self._ArchList:
+ Database[self._FileType, BuildType, Arch, Family] = BuildRule
+ for FileExt in BuildRule.SourceFileExtList:
+ self.Ext2FileType[FileExt] = self._FileType
+
+ ## Parse section header
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseSectionHeader(self, LineIndex):
+ self._RuleInfo = tdict(True, 2)
+ self._BuildTypeList = set()
+ self._ArchList = set()
+ self._FamilyList = []
+ self._TotalToolChainFamilySet = set()
+ FileType = ''
+ RuleNameList = self.RuleContent[LineIndex][1:-1].split(',')
+ for RuleName in RuleNameList:
+ Arch = TAB_COMMON
+ BuildType = TAB_COMMON
+ TokenList = [Token.strip().upper() for Token in RuleName.split('.')]
+ # old format: Build.File-Type
+ if TokenList[0] == "BUILD":
+ if len(TokenList) == 1:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+
+ FileType = TokenList[1]
+ if FileType == '':
+ EdkLogger.error("build", FORMAT_INVALID, "No file type given",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+ if self._FileTypePattern.match(FileType) is None:
+ EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")
+ # new format: File-Type.Build-Type.Arch
+ else:
+ if FileType == '':
+ FileType = TokenList[0]
+ elif FileType != TokenList[0]:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "Different file types are not allowed in the same rule section",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+ if len(TokenList) > 1:
+ BuildType = TokenList[1]
+ if len(TokenList) > 2:
+ Arch = TokenList[2]
+ self._BuildTypeList.add(BuildType)
+ self._ArchList.add(Arch)
+
+ if TAB_COMMON in self._BuildTypeList and len(self._BuildTypeList) > 1:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "Specific build types must not be mixed with common one",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+ if TAB_COMMON in self._ArchList and len(self._ArchList) > 1:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "Specific ARCH must not be mixed with common one",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+
+ self._FileType = FileType
+ self._State = self._Section
+ self.FileTypeList.add(FileType)
+
+ ## Parse sub-section header
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseSubSectionHeader(self, LineIndex):
+ SectionType = ""
+ List = self.RuleContent[LineIndex][1:-1].split(',')
+ FamilyList = []
+ for Section in List:
+ TokenList = Section.split('.')
+ Type = TokenList[0].strip().upper()
+
+ if SectionType == "":
+ SectionType = Type
+ elif SectionType != Type:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "Two different section types are not allowed in the same sub-section",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+
+ if len(TokenList) > 1:
+ Family = TokenList[1].strip().upper()
+ else:
+ Family = TAB_COMMON
+
+ if Family not in FamilyList:
+ FamilyList.append(Family)
+
+ self._FamilyList = FamilyList
+ self._TotalToolChainFamilySet.update(FamilyList)
+ self._State = SectionType.upper()
+ if TAB_COMMON in FamilyList and len(FamilyList) > 1:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "Specific tool chain family should not be mixed with general one",
+ File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData=self.RuleContent[LineIndex])
+ if self._State not in self._StateHandler:
+ EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
+ ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex])
+ ## Parse <InputFile> sub-section
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseInputFileSubSection(self, LineIndex):
+ FileList = [File.strip() for File in self.RuleContent[LineIndex].split(",")]
+ for ToolChainFamily in self._FamilyList:
+ if self._RuleInfo[ToolChainFamily, self._State] is None:
+ self._RuleInfo[ToolChainFamily, self._State] = []
+ self._RuleInfo[ToolChainFamily, self._State].extend(FileList)
+
+ ## Parse <ExtraDependency> sub-section
+ ## Parse <OutputFile> sub-section
+ ## Parse <Command> sub-section
+ #
+ # @param LineIndex The line index of build rule text
+ #
+ def ParseCommonSubSection(self, LineIndex):
+ for ToolChainFamily in self._FamilyList:
+ if self._RuleInfo[ToolChainFamily, self._State] is None:
+ self._RuleInfo[ToolChainFamily, self._State] = []
+ self._RuleInfo[ToolChainFamily, self._State].append(self.RuleContent[LineIndex])
+
+ ## Get a build rule via [] operator
+ #
+ # @param FileExt The extension of a file
+ # @param ToolChainFamily The tool chain family name
+ # @param BuildVersion The build version number. TAB_STAR means any rule
+ # is applicable.
+ #
+ # @retval FileType The file type string
+ # @retval FileBuildRule The object of FileBuildRule
+ #
+ # Key = (FileExt, ModuleType, Arch, ToolChainFamily)
+ def __getitem__(self, Key):
+ if not Key:
+ return None
+
+ if Key[0] in self.Ext2FileType:
+ Type = self.Ext2FileType[Key[0]]
+ elif Key[0].upper() in self.FileTypeList:
+ Type = Key[0].upper()
+ else:
+ return None
+
+ if len(Key) > 1:
+ Key = (Type,) + Key[1:]
+ else:
+ Key = (Type,)
+ return self.RuleDatabase[Key]
+
+ _StateHandler = {
+ _SectionHeader : ParseSectionHeader,
+ _Section : ParseSection,
+ _SubSectionHeader : ParseSubSectionHeader,
+ _SubSection : ParseSubSection,
+ _InputFile : ParseInputFileSubSection,
+ _OutputFile : ParseCommonSubSection,
+ _ExtraDependency : ParseCommonSubSection,
+ _Command : ParseCommonSubSection,
+ _UnknownSection : SkipSection,
+ }
+
+class ToolBuildRule():
+
+ def __new__(cls, *args, **kw):
+ if not hasattr(cls, '_instance'):
+ orig = super(ToolBuildRule, cls)
+ cls._instance = orig.__new__(cls, *args, **kw)
+ return cls._instance
+
+ def __init__(self):
+ if not hasattr(self, 'ToolBuildRule'):
+ self._ToolBuildRule = None
+
+ @property
+ def ToolBuildRule(self):
+ if not self._ToolBuildRule:
+ self._GetBuildRule()
+ return self._ToolBuildRule
+
+ def _GetBuildRule(self):
+ BuildRuleFile = None
+ TargetObj = TargetTxtDict()
+ TargetTxt = TargetObj.Target
+ if TAB_TAT_DEFINES_BUILD_RULE_CONF in TargetTxt.TargetTxtDictionary:
+ BuildRuleFile = TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
+ if not BuildRuleFile:
+ BuildRuleFile = gDefaultBuildRuleFile
+ RetVal = BuildRule(BuildRuleFile)
+ if RetVal._FileVersion == "":
+ RetVal._FileVersion = AutoGenReqBuildRuleVerNum
+ else:
+ if RetVal._FileVersion < AutoGenReqBuildRuleVerNum :
+ # If Build Rule's version is less than the version number required by the tools, halting the build.
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\
+ % (RetVal._FileVersion, AutoGenReqBuildRuleVerNum))
+ self._ToolBuildRule = RetVal
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+ import sys
+ EdkLogger.Initialize()
+ if len(sys.argv) > 1:
+ Br = BuildRule(sys.argv[1])
+ print(str(Br[".c", SUP_MODULE_DXE_DRIVER, "IA32", TAB_COMPILER_MSFT][1]))
+ print()
+ print(str(Br[".c", SUP_MODULE_DXE_DRIVER, "IA32", "INTEL"][1]))
+ print()
+ print(str(Br[".c", SUP_MODULE_DXE_DRIVER, "IA32", "GCC"][1]))
+ print()
+ print(str(Br[".ac", "ACPI_TABLE", "IA32", TAB_COMPILER_MSFT][1]))
+ print()
+ print(str(Br[".h", "ACPI_TABLE", "IA32", "INTEL"][1]))
+ print()
+ print(str(Br[".ac", "ACPI_TABLE", "IA32", TAB_COMPILER_MSFT][1]))
+ print()
+ print(str(Br[".s", SUP_MODULE_SEC, "IPF", "COMMON"][1]))
+ print()
+ print(str(Br[".s", SUP_MODULE_SEC][1]))
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/DataPipe.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/DataPipe.py
new file mode 100755
index 00000000..d5028f39
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/DataPipe.py
@@ -0,0 +1,169 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+from Workspace.WorkspaceDatabase import BuildDB
+from Workspace.WorkspaceCommon import GetModuleLibInstances
+import Common.GlobalData as GlobalData
+import os
+import pickle
+from pickle import HIGHEST_PROTOCOL
+from Common import EdkLogger
+
+class PCD_DATA():
+ def __init__(self,TokenCName,TokenSpaceGuidCName,Type,DatumType,SkuInfoList,DefaultValue,
+ MaxDatumSize,UserDefinedDefaultStoresFlag,validateranges,
+ validlists,expressions,CustomAttribute,TokenValue):
+ self.TokenCName = TokenCName
+ self.TokenSpaceGuidCName = TokenSpaceGuidCName
+ self.Type = Type
+ self.DatumType = DatumType
+ self.SkuInfoList = SkuInfoList
+ self.DefaultValue = DefaultValue
+ self.MaxDatumSize = MaxDatumSize
+ self.UserDefinedDefaultStoresFlag = UserDefinedDefaultStoresFlag
+ self.validateranges = validateranges
+ self.validlists = validlists
+ self.expressions = expressions
+ self.CustomAttribute = CustomAttribute
+ self.TokenValue = TokenValue
+
+class DataPipe(object):
+ def __init__(self, BuildDir=None):
+ self.data_container = {}
+ self.BuildDir = BuildDir
+ self.dump_file = ""
+
+class MemoryDataPipe(DataPipe):
+
+ def Get(self,key):
+ return self.data_container.get(key)
+
+ def dump(self,file_path):
+ self.dump_file = file_path
+ with open(file_path,'wb') as fd:
+ pickle.dump(self.data_container,fd,pickle.HIGHEST_PROTOCOL)
+
+ def load(self,file_path):
+ with open(file_path,'rb') as fd:
+ self.data_container = pickle.load(fd)
+
+ @property
+ def DataContainer(self):
+ return self.data_container
+ @DataContainer.setter
+ def DataContainer(self,data):
+ self.data_container.update(data)
+
+ def FillData(self,PlatformInfo):
+ #Platform Pcds
+ self.DataContainer = {
+ "PLA_PCD" : [PCD_DATA(
+ pcd.TokenCName,pcd.TokenSpaceGuidCName,pcd.Type,
+ pcd.DatumType,pcd.SkuInfoList,pcd.DefaultValue,
+ pcd.MaxDatumSize,pcd.UserDefinedDefaultStoresFlag,pcd.validateranges,
+ pcd.validlists,pcd.expressions,pcd.CustomAttribute,pcd.TokenValue)
+ for pcd in PlatformInfo.Platform.Pcds.values()]
+ }
+
+ #Platform Module Pcds
+ ModulePcds = {}
+ for m in PlatformInfo.Platform.Modules:
+ module = PlatformInfo.Platform.Modules[m]
+ m_pcds = module.Pcds
+ if m_pcds:
+ ModulePcds[module.Guid] = [PCD_DATA(
+ pcd.TokenCName,pcd.TokenSpaceGuidCName,pcd.Type,
+ pcd.DatumType,pcd.SkuInfoList,pcd.DefaultValue,
+ pcd.MaxDatumSize,pcd.UserDefinedDefaultStoresFlag,pcd.validateranges,
+ pcd.validlists,pcd.expressions,pcd.CustomAttribute,pcd.TokenValue)
+ for pcd in PlatformInfo.Platform.Modules[m].Pcds.values()]
+
+
+ self.DataContainer = {"MOL_PCDS":ModulePcds}
+
+ #Module's Library Instance
+ ModuleLibs = {}
+ libModules = {}
+ for m in PlatformInfo.Platform.Modules:
+ module_obj = BuildDB.BuildObject[m,PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain]
+ Libs = GetModuleLibInstances(module_obj, PlatformInfo.Platform, BuildDB.BuildObject, PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain,PlatformInfo.MetaFile,EdkLogger)
+ for lib in Libs:
+ try:
+ libModules[(lib.MetaFile.File,lib.MetaFile.Root,lib.Arch,lib.MetaFile.Path)].append((m.File,m.Root,module_obj.Arch,m.Path))
+ except:
+ libModules[(lib.MetaFile.File,lib.MetaFile.Root,lib.Arch,lib.MetaFile.Path)] = [(m.File,m.Root,module_obj.Arch,m.Path)]
+ ModuleLibs[(m.File,m.Root,module_obj.Arch,m.Path)] = [(l.MetaFile.File,l.MetaFile.Root,l.Arch,l.MetaFile.Path) for l in Libs]
+ self.DataContainer = {"DEPS":ModuleLibs}
+ self.DataContainer = {"REFS":libModules}
+
+ #Platform BuildOptions
+
+ platform_build_opt = PlatformInfo.EdkIIBuildOption
+
+ ToolDefinition = PlatformInfo.ToolDefinition
+ module_build_opt = {}
+ for m in PlatformInfo.Platform.Modules:
+ ModuleTypeOptions, PlatformModuleOptions = PlatformInfo.GetGlobalBuildOptions(BuildDB.BuildObject[m,PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain])
+ if ModuleTypeOptions or PlatformModuleOptions:
+ module_build_opt.update({(m.File,m.Root): {"ModuleTypeOptions":ModuleTypeOptions, "PlatformModuleOptions":PlatformModuleOptions}})
+
+ self.DataContainer = {"PLA_BO":platform_build_opt,
+ "TOOLDEF":ToolDefinition,
+ "MOL_BO":module_build_opt
+ }
+
+
+
+ #Platform Info
+ PInfo = {
+ "WorkspaceDir":PlatformInfo.Workspace.WorkspaceDir,
+ "Target":PlatformInfo.BuildTarget,
+ "ToolChain":PlatformInfo.Workspace.ToolChain,
+ "BuildRuleFile":PlatformInfo.BuildRule,
+ "Arch": PlatformInfo.Arch,
+ "ArchList":PlatformInfo.Workspace.ArchList,
+ "ActivePlatform":PlatformInfo.MetaFile
+ }
+ self.DataContainer = {'P_Info':PInfo}
+
+ self.DataContainer = {'M_Name':PlatformInfo.UniqueBaseName}
+
+ self.DataContainer = {"ToolChainFamily": PlatformInfo.ToolChainFamily}
+
+ self.DataContainer = {"BuildRuleFamily": PlatformInfo.BuildRuleFamily}
+
+ self.DataContainer = {"MixedPcd":GlobalData.MixedPcd}
+
+ self.DataContainer = {"BuildOptPcd":GlobalData.BuildOptionPcd}
+
+ self.DataContainer = {"BuildCommand": PlatformInfo.BuildCommand}
+
+ self.DataContainer = {"AsBuildModuleList": PlatformInfo._AsBuildModuleList}
+
+ self.DataContainer = {"G_defines": GlobalData.gGlobalDefines}
+
+ self.DataContainer = {"CL_defines": GlobalData.gCommandLineDefines}
+
+ self.DataContainer = {"Env_Var": {k:v for k, v in os.environ.items()}}
+
+ self.DataContainer = {"PackageList": [(dec.MetaFile,dec.Arch) for dec in PlatformInfo.PackageList]}
+
+ self.DataContainer = {"GuidDict": PlatformInfo.Platform._GuidDict}
+
+ self.DataContainer = {"DatabasePath":GlobalData.gDatabasePath}
+
+ self.DataContainer = {"FdfParser": True if GlobalData.gFdfParser else False}
+
+ self.DataContainer = {"LogLevel": EdkLogger.GetLevel()}
+
+ self.DataContainer = {"UseHashCache":GlobalData.gUseHashCache}
+
+ self.DataContainer = {"BinCacheSource":GlobalData.gBinCacheSource}
+
+ self.DataContainer = {"BinCacheDest":GlobalData.gBinCacheDest}
+
+ self.DataContainer = {"EnableGenfdsMultiThread":GlobalData.gEnableGenfdsMultiThread}
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenC.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenC.py
new file mode 100755
index 00000000..7c7a7d53
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenC.py
@@ -0,0 +1,2111 @@
+## @file
+# Routines for generating AutoGen.h and AutoGen.c
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+from __future__ import absolute_import
+import string
+import collections
+import struct
+from Common import EdkLogger
+from Common import GlobalData
+from Common.BuildToolError import *
+from Common.DataType import *
+from Common.Misc import *
+from Common.StringUtils import StringToArray
+from .StrGather import *
+from .GenPcdDb import CreatePcdDatabaseCode
+from .IdfClassObject import *
+
+## PCD type string
+gItemTypeStringDatabase = {
+ TAB_PCDS_FEATURE_FLAG : TAB_PCDS_FIXED_AT_BUILD,
+ TAB_PCDS_FIXED_AT_BUILD : TAB_PCDS_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE: 'BinaryPatch',
+ TAB_PCDS_DYNAMIC : '',
+ TAB_PCDS_DYNAMIC_DEFAULT : '',
+ TAB_PCDS_DYNAMIC_VPD : '',
+ TAB_PCDS_DYNAMIC_HII : '',
+ TAB_PCDS_DYNAMIC_EX : '',
+ TAB_PCDS_DYNAMIC_EX_DEFAULT : '',
+ TAB_PCDS_DYNAMIC_EX_VPD : '',
+ TAB_PCDS_DYNAMIC_EX_HII : '',
+}
+
+
+## Datum size
+gDatumSizeStringDatabase = {TAB_UINT8:'8',TAB_UINT16:'16',TAB_UINT32:'32',TAB_UINT64:'64','BOOLEAN':'BOOLEAN',TAB_VOID:'8'}
+gDatumSizeStringDatabaseH = {TAB_UINT8:'8',TAB_UINT16:'16',TAB_UINT32:'32',TAB_UINT64:'64','BOOLEAN':'BOOL',TAB_VOID:'PTR'}
+gDatumSizeStringDatabaseLib = {TAB_UINT8:'8',TAB_UINT16:'16',TAB_UINT32:'32',TAB_UINT64:'64','BOOLEAN':'Bool',TAB_VOID:'Ptr'}
+
+## AutoGen File Header Templates
+gAutoGenHeaderString = TemplateString("""\
+/**
+ DO NOT EDIT
+ FILE auto-generated
+ Module name:
+ ${FileName}
+ Abstract: Auto-generated ${FileName} for building module or library.
+**/
+""")
+
+gAutoGenHPrologueString = TemplateString("""
+#ifndef _${File}_${Guid}
+#define _${File}_${Guid}
+
+""")
+
+gAutoGenHCppPrologueString = """\
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+"""
+
+gAutoGenHEpilogueString = """
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
+"""
+
+## PEI Core Entry Point Templates
+gPeiCoreEntryPointPrototype = TemplateString("""
+${BEGIN}
+VOID
+EFIAPI
+${Function} (
+ IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData,
+ IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList,
+ IN VOID *Context
+ );
+${END}
+""")
+
+gPeiCoreEntryPointString = TemplateString("""
+${BEGIN}
+VOID
+EFIAPI
+ProcessModuleEntryPointList (
+ IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData,
+ IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList,
+ IN VOID *Context
+ )
+
+{
+ ${Function} (SecCoreData, PpiList, Context);
+}
+${END}
+""")
+
+
+## DXE Core Entry Point Templates
+gDxeCoreEntryPointPrototype = TemplateString("""
+${BEGIN}
+VOID
+EFIAPI
+${Function} (
+ IN VOID *HobStart
+ );
+${END}
+""")
+
+gDxeCoreEntryPointString = TemplateString("""
+${BEGIN}
+VOID
+EFIAPI
+ProcessModuleEntryPointList (
+ IN VOID *HobStart
+ )
+
+{
+ ${Function} (HobStart);
+}
+${END}
+""")
+
+## PEIM Entry Point Templates
+gPeimEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ );
+${END}
+""")
+
+gPeimEntryPointString = [
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ )
+
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
+${BEGIN}
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ )
+
+{
+ return ${Function} (FileHandle, PeiServices);
+}
+${END}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ )
+
+{
+ EFI_STATUS Status;
+ EFI_STATUS CombinedStatus;
+
+ CombinedStatus = EFI_LOAD_ERROR;
+${BEGIN}
+ Status = ${Function} (FileHandle, PeiServices);
+ if (!EFI_ERROR (Status) || EFI_ERROR (CombinedStatus)) {
+ CombinedStatus = Status;
+ }
+${END}
+ return CombinedStatus;
+}
+""")
+]
+
+## SMM_CORE Entry Point Templates
+gSmmCoreEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );
+${END}
+""")
+
+gSmmCoreEntryPointString = TemplateString("""
+${BEGIN}
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+ return ${Function} (ImageHandle, SystemTable);
+}
+${END}
+""")
+
+## MM_CORE_STANDALONE Entry Point Templates
+gMmCoreStandaloneEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN VOID *HobStart
+ );
+${END}
+""")
+
+gMmCoreStandaloneEntryPointString = TemplateString("""
+${BEGIN}
+const UINT32 _gMmRevision = ${PiSpecVersion};
+
+VOID
+EFIAPI
+ProcessModuleEntryPointList (
+ IN VOID *HobStart
+ )
+{
+ ${Function} (HobStart);
+}
+${END}
+""")
+
+## MM_STANDALONE Entry Point Templates
+gMmStandaloneEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ );
+${END}
+""")
+
+gMmStandaloneEntryPointString = [
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gMmRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ )
+
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gMmRevision = ${PiSpecVersion};
+${BEGIN}
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ )
+
+{
+ return ${Function} (ImageHandle, MmSystemTable);
+}
+${END}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gMmRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ )
+
+{
+ EFI_STATUS Status;
+ EFI_STATUS CombinedStatus;
+
+ CombinedStatus = EFI_LOAD_ERROR;
+${BEGIN}
+ Status = ${Function} (ImageHandle, MmSystemTable);
+ if (!EFI_ERROR (Status) || EFI_ERROR (CombinedStatus)) {
+ CombinedStatus = Status;
+ }
+${END}
+ return CombinedStatus;
+}
+""")
+]
+
+## DXE SMM Entry Point Templates
+gDxeSmmEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );
+${END}
+""")
+
+gDxeSmmEntryPointString = [
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
+static EFI_STATUS mDriverEntryPointStatus;
+
+VOID
+EFIAPI
+ExitDriver (
+ IN EFI_STATUS Status
+ )
+{
+ if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
+ mDriverEntryPointStatus = Status;
+ }
+ LongJump (&mJumpContext, (UINTN)-1);
+ ASSERT (FALSE);
+}
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+ mDriverEntryPointStatus = EFI_LOAD_ERROR;
+
+${BEGIN}
+ if (SetJump (&mJumpContext) == 0) {
+ ExitDriver (${Function} (ImageHandle, SystemTable));
+ ASSERT (FALSE);
+ }
+${END}
+
+ return mDriverEntryPointStatus;
+}
+""")
+]
+
+## UEFI Driver Entry Point Templates
+gUefiDriverEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );
+${END}
+""")
+
+gUefiDriverEntryPointString = [
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+${BEGIN}
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+
+{
+ return ${Function} (ImageHandle, SystemTable);
+}
+${END}
+VOID
+EFIAPI
+ExitDriver (
+ IN EFI_STATUS Status
+ )
+{
+ if (EFI_ERROR (Status)) {
+ ProcessLibraryDestructorList (gImageHandle, gST);
+ }
+ gBS->Exit (gImageHandle, Status, 0, NULL);
+}
+"""),
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+const UINT32 _gDxeRevision = ${PiSpecVersion};
+
+static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
+static EFI_STATUS mDriverEntryPointStatus;
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+ mDriverEntryPointStatus = EFI_LOAD_ERROR;
+ ${BEGIN}
+ if (SetJump (&mJumpContext) == 0) {
+ ExitDriver (${Function} (ImageHandle, SystemTable));
+ ASSERT (FALSE);
+ }
+ ${END}
+ return mDriverEntryPointStatus;
+}
+
+VOID
+EFIAPI
+ExitDriver (
+ IN EFI_STATUS Status
+ )
+{
+ if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
+ mDriverEntryPointStatus = Status;
+ }
+ LongJump (&mJumpContext, (UINTN)-1);
+ ASSERT (FALSE);
+}
+""")
+]
+
+
+## UEFI Application Entry Point Templates
+gUefiApplicationEntryPointPrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );
+${END}
+""")
+
+gUefiApplicationEntryPointString = [
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+
+${BEGIN}
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+
+{
+ return ${Function} (ImageHandle, SystemTable);
+}
+${END}
+VOID
+EFIAPI
+ExitDriver (
+ IN EFI_STATUS Status
+ )
+{
+ if (EFI_ERROR (Status)) {
+ ProcessLibraryDestructorList (gImageHandle, gST);
+ }
+ gBS->Exit (gImageHandle, Status, 0, NULL);
+}
+"""),
+TemplateString("""
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleEntryPointList (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+
+{
+ ${BEGIN}
+ if (SetJump (&mJumpContext) == 0) {
+ ExitDriver (${Function} (ImageHandle, SystemTable));
+ ASSERT (FALSE);
+ }
+ ${END}
+ return mDriverEntryPointStatus;
+}
+
+static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
+static EFI_STATUS mDriverEntryPointStatus = EFI_LOAD_ERROR;
+
+VOID
+EFIAPI
+ExitDriver (
+ IN EFI_STATUS Status
+ )
+{
+ if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
+ mDriverEntryPointStatus = Status;
+ }
+ LongJump (&mJumpContext, (UINTN)-1);
+ ASSERT (FALSE);
+}
+""")
+]
+
+## UEFI Unload Image Templates
+gUefiUnloadImagePrototype = TemplateString("""
+${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle
+ );
+${END}
+""")
+
+gUefiUnloadImageString = [
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleUnloadList (
+ IN EFI_HANDLE ImageHandle
+ )
+{
+ return EFI_SUCCESS;
+}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
+
+${BEGIN}
+EFI_STATUS
+EFIAPI
+ProcessModuleUnloadList (
+ IN EFI_HANDLE ImageHandle
+ )
+{
+ return ${Function} (ImageHandle);
+}
+${END}
+"""),
+TemplateString("""
+GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
+
+EFI_STATUS
+EFIAPI
+ProcessModuleUnloadList (
+ IN EFI_HANDLE ImageHandle
+ )
+{
+ EFI_STATUS Status;
+
+ Status = EFI_SUCCESS;
+${BEGIN}
+ if (EFI_ERROR (Status)) {
+ ${Function} (ImageHandle);
+ } else {
+ Status = ${Function} (ImageHandle);
+ }
+${END}
+ return Status;
+}
+""")
+]
+
+gLibraryStructorPrototype = {
+SUP_MODULE_BASE : TemplateString("""${BEGIN}
+RETURN_STATUS
+EFIAPI
+${Function} (
+ VOID
+ );${END}
+"""),
+
+'PEI' : TemplateString("""${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ );${END}
+"""),
+
+'DXE' : TemplateString("""${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );${END}
+"""),
+
+'MM' : TemplateString("""${BEGIN}
+EFI_STATUS
+EFIAPI
+${Function} (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ );${END}
+"""),
+}
+
+gLibraryStructorCall = {
+SUP_MODULE_BASE : TemplateString("""${BEGIN}
+ Status = ${Function} ();
+ ASSERT_RETURN_ERROR (Status);${END}
+"""),
+
+'PEI' : TemplateString("""${BEGIN}
+ Status = ${Function} (FileHandle, PeiServices);
+ ASSERT_EFI_ERROR (Status);${END}
+"""),
+
+'DXE' : TemplateString("""${BEGIN}
+ Status = ${Function} (ImageHandle, SystemTable);
+ ASSERT_EFI_ERROR (Status);${END}
+"""),
+
+'MM' : TemplateString("""${BEGIN}
+ Status = ${Function} (ImageHandle, MmSystemTable);
+ ASSERT_EFI_ERROR (Status);${END}
+"""),
+}
+
+## Library Constructor and Destructor Templates
+gLibraryString = {
+SUP_MODULE_BASE : TemplateString("""
+${BEGIN}${FunctionPrototype}${END}
+
+VOID
+EFIAPI
+ProcessLibrary${Type}List (
+ VOID
+ )
+{
+${BEGIN} RETURN_STATUS Status;
+${FunctionCall}${END}
+}
+"""),
+
+'PEI' : TemplateString("""
+${BEGIN}${FunctionPrototype}${END}
+
+VOID
+EFIAPI
+ProcessLibrary${Type}List (
+ IN EFI_PEI_FILE_HANDLE FileHandle,
+ IN CONST EFI_PEI_SERVICES **PeiServices
+ )
+{
+${BEGIN} EFI_STATUS Status;
+${FunctionCall}${END}
+}
+"""),
+
+'DXE' : TemplateString("""
+${BEGIN}${FunctionPrototype}${END}
+
+VOID
+EFIAPI
+ProcessLibrary${Type}List (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ )
+{
+${BEGIN} EFI_STATUS Status;
+${FunctionCall}${END}
+}
+"""),
+
+'MM' : TemplateString("""
+${BEGIN}${FunctionPrototype}${END}
+
+VOID
+EFIAPI
+ProcessLibrary${Type}List (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_MM_SYSTEM_TABLE *MmSystemTable
+ )
+{
+${BEGIN} EFI_STATUS Status;
+${FunctionCall}${END}
+}
+"""),
+}
+
+gBasicHeaderFile = "Base.h"
+
+gModuleTypeHeaderFile = {
+ SUP_MODULE_BASE : [gBasicHeaderFile, "Library/DebugLib.h"],
+ SUP_MODULE_SEC : ["PiPei.h", "Library/DebugLib.h"],
+ SUP_MODULE_PEI_CORE : ["PiPei.h", "Library/DebugLib.h", "Library/PeiCoreEntryPoint.h"],
+ SUP_MODULE_PEIM : ["PiPei.h", "Library/DebugLib.h", "Library/PeimEntryPoint.h"],
+ SUP_MODULE_DXE_CORE : ["PiDxe.h", "Library/DebugLib.h", "Library/DxeCoreEntryPoint.h"],
+ SUP_MODULE_DXE_DRIVER : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_DXE_SMM_DRIVER : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_DXE_RUNTIME_DRIVER: ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_DXE_SAL_DRIVER : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_UEFI_DRIVER : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_UEFI_APPLICATION : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiApplicationEntryPoint.h"],
+ SUP_MODULE_SMM_CORE : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiDriverEntryPoint.h"],
+ SUP_MODULE_MM_STANDALONE : ["PiMm.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/StandaloneMmDriverEntryPoint.h"],
+ SUP_MODULE_MM_CORE_STANDALONE : ["PiMm.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/StandaloneMmCoreEntryPoint.h"],
+ SUP_MODULE_USER_DEFINED : [gBasicHeaderFile, "Library/DebugLib.h"],
+ SUP_MODULE_HOST_APPLICATION : [gBasicHeaderFile, "Library/DebugLib.h"]
+}
+
+## Autogen internal worker macro to define DynamicEx PCD name includes both the TokenSpaceGuidName
+# the TokenName and Guid comparison to avoid define name collisions.
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenH The TemplateString object for header file
+#
+#
+def DynExPcdTokenNumberMapping(Info, AutoGenH):
+ ExTokenCNameList = []
+ PcdExList = []
+ # Even it is the Library, the PCD is saved in the ModulePcdList
+ PcdList = Info.ModulePcdList
+ for Pcd in PcdList:
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ ExTokenCNameList.append(Pcd.TokenCName)
+ PcdExList.append(Pcd)
+ if len(ExTokenCNameList) == 0:
+ return
+ AutoGenH.Append('\n#define COMPAREGUID(Guid1, Guid2) (BOOLEAN)(*(CONST UINT64*)Guid1 == *(CONST UINT64*)Guid2 && *((CONST UINT64*)Guid1 + 1) == *((CONST UINT64*)Guid2 + 1))\n')
+ # AutoGen for each PCD listed in a [PcdEx] section of a Module/Lib INF file.
+ # Auto generate a macro for each TokenName that takes a Guid pointer as a parameter.
+ # Use the Guid pointer to see if it matches any of the token space GUIDs.
+ TokenCNameList = set()
+ for TokenCName in ExTokenCNameList:
+ if TokenCName in TokenCNameList:
+ continue
+ Index = 0
+ Count = ExTokenCNameList.count(TokenCName)
+ for Pcd in PcdExList:
+ RealTokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ RealTokenCName = PcdItem[0]
+ break
+ if Pcd.TokenCName == TokenCName:
+ Index = Index + 1
+ if Index == 1:
+ AutoGenH.Append('\n#define __PCD_%s_ADDR_CMP(GuidPtr) (' % (RealTokenCName))
+ AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
+ else:
+ AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
+ if Index == Count:
+ AutoGenH.Append('0 \\\n )\n')
+ TokenCNameList.add(TokenCName)
+
+ TokenCNameList = set()
+ for TokenCName in ExTokenCNameList:
+ if TokenCName in TokenCNameList:
+ continue
+ Index = 0
+ Count = ExTokenCNameList.count(TokenCName)
+ for Pcd in PcdExList:
+ RealTokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ RealTokenCName = PcdItem[0]
+ break
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == TokenCName:
+ Index = Index + 1
+ if Index == 1:
+ AutoGenH.Append('\n#define __PCD_%s_VAL_CMP(GuidPtr) (' % (RealTokenCName))
+ AutoGenH.Append('\\\n (GuidPtr == NULL) ? 0:')
+ AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
+ else:
+ AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
+ if Index == Count:
+ AutoGenH.Append('0 \\\n )\n')
+ # Autogen internal worker macro to compare GUIDs. Guid1 is a pointer to a GUID.
+ # Guid2 is a C name for a GUID. Compare pointers first because optimizing compiler
+ # can do this at build time on CONST GUID pointers and optimize away call to COMPAREGUID().
+ # COMPAREGUID() will only be used if the Guid passed in is local to the module.
+ AutoGenH.Append('#define _PCD_TOKEN_EX_%s(GuidPtr) __PCD_%s_ADDR_CMP(GuidPtr) ? __PCD_%s_ADDR_CMP(GuidPtr) : __PCD_%s_VAL_CMP(GuidPtr) \n'
+ % (RealTokenCName, RealTokenCName, RealTokenCName, RealTokenCName))
+ TokenCNameList.add(TokenCName)
+
+## Create code for module PCDs
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+# @param Pcd The PCD object
+#
+def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
+ TokenSpaceGuidValue = Pcd.TokenSpaceGuidValue #Info.GuidList[Pcd.TokenSpaceGuidCName]
+ PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber
+ #
+ # Write PCDs
+ #
+ TokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ PcdTokenName = '_PCD_TOKEN_' + TokenCName
+ PatchPcdSizeTokenName = '_PCD_PATCHABLE_' + TokenCName +'_SIZE'
+ PatchPcdSizeVariableName = '_gPcd_BinaryPatch_Size_' + TokenCName
+ PatchPcdMaxSizeVariable = '_gPcd_BinaryPatch_MaxSize_' + TokenCName
+ FixPcdSizeTokenName = '_PCD_SIZE_' + TokenCName
+ FixedPcdSizeVariableName = '_gPcd_FixedAtBuild_Size_' + TokenCName
+
+ if Pcd.PcdValueFromComm:
+ Pcd.DefaultValue = Pcd.PcdValueFromComm
+ elif Pcd.PcdValueFromFdf:
+ Pcd.DefaultValue = Pcd.PcdValueFromFdf
+
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ TokenNumber = int(Pcd.TokenValue, 0)
+ # Add TokenSpaceGuidValue value to PcdTokenName to discriminate the DynamicEx PCDs with
+ # different Guids but same TokenCName
+ PcdExTokenName = '_PCD_TOKEN_' + Pcd.TokenSpaceGuidCName + '_' + TokenCName
+ AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
+ else:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
+ # If one of the Source built modules listed in the DSC is not listed in FDF modules,
+ # and the INF lists a PCD can only use the PcdsDynamic access method (it is only
+ # listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
+ # report warning message notify the PI that they are attempting to build a module
+ # that must be included in a flash image in order to be functional. These Dynamic PCD
+ # will not be added into the Database unless it is used by other modules that are
+ # included in the FDF file.
+ # In this case, just assign an invalid token number to make it pass build.
+ if Pcd.Type in PCD_DYNAMIC_TYPE_SET:
+ TokenNumber = 0
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ else:
+ TokenNumber = PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]
+ AutoGenH.Append('\n#define %s %dU\n' % (PcdTokenName, TokenNumber))
+
+ EdkLogger.debug(EdkLogger.DEBUG_3, "Creating code for " + TokenCName + "." + Pcd.TokenSpaceGuidCName)
+ if Pcd.Type not in gItemTypeStringDatabase:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+
+ DatumSize = gDatumSizeStringDatabase[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabase else gDatumSizeStringDatabase[TAB_VOID]
+ DatumSizeLib = gDatumSizeStringDatabaseLib[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabaseLib else gDatumSizeStringDatabaseLib[TAB_VOID]
+ GetModeName = '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
+ SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
+ SetModeStatusName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_S_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_S_' + TokenCName
+ GetModeSizeName = '_PCD_GET_MODE_SIZE' + '_' + TokenCName
+
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ if Info.IsLibrary:
+ PcdList = Info.LibraryPcdList
+ else:
+ PcdList = Info.ModulePcdList + Info.LibraryPcdList
+ PcdExCNameTest = 0
+ for PcdModule in PcdList:
+ if PcdModule.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
+ PcdExCNameTest += 1
+ # get out early once we found > 1...
+ if PcdExCNameTest > 1:
+ break
+ # Be compatible with the current code which using PcdToken and PcdGet/Set for DynamicEx Pcd.
+ # If only PcdToken and PcdGet/Set used in all Pcds with different CName, it should succeed to build.
+ # If PcdToken and PcdGet/Set used in the Pcds with different Guids but same CName, it should failed to build.
+ if PcdExCNameTest > 1:
+ AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n')
+ AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName))
+ AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('// #define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName))
+ AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ elif Pcd.Type in PCD_DYNAMIC_TYPE_SET:
+ PcdCNameTest = 0
+ for PcdModule in Info.LibraryPcdList + Info.ModulePcdList:
+ if PcdModule.Type in PCD_DYNAMIC_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
+ PcdCNameTest += 1
+ # get out early once we found > 1...
+ if PcdCNameTest > 1:
+ break
+ if PcdCNameTest > 1:
+ EdkLogger.error("build", AUTOGEN_ERROR, "More than one Dynamic Pcds [%s] are different Guids but same CName. They need to be changed to DynamicEx type to avoid the confliction.\n" % (TokenCName), ExtraData="[%s]" % str(Info.MetaFile.Path))
+ else:
+ AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s LibPcdGetSize(%s)\n' % (GetModeSizeName, PcdTokenName))
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%sS(%s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s(Value) LibPcdSet%sS(%s, (Value))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
+ else:
+ PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
+ Const = 'const'
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ Const = ''
+ Type = ''
+ Array = ''
+ Value = Pcd.DefaultValue
+ Unicode = False
+ ValueNumber = 0
+
+ if Pcd.DatumType == 'BOOLEAN':
+ BoolValue = Value.upper()
+ if BoolValue == 'TRUE' or BoolValue == '1':
+ Value = '1U'
+ elif BoolValue == 'FALSE' or BoolValue == '0':
+ Value = '0U'
+
+ if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
+ try:
+ if Value.upper().endswith('L'):
+ Value = Value[:-1]
+ if Value.startswith('0') and not Value.lower().startswith('0x') and len(Value) > 1 and Value.lstrip('0'):
+ Value = Value.lstrip('0')
+ ValueNumber = int (Value, 0)
+ except:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "PCD value is not valid dec or hex number for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ if ValueNumber < 0:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ elif ValueNumber > MAX_VAL_TYPE[Pcd.DatumType]:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ if Pcd.DatumType == TAB_UINT64 and not Value.endswith('ULL'):
+ Value += 'ULL'
+ elif Pcd.DatumType != TAB_UINT64 and not Value.endswith('U'):
+ Value += 'U'
+
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ if not Pcd.MaxDatumSize:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+
+ ArraySize = int(Pcd.MaxDatumSize, 0)
+ if Value[0] == '{':
+ Type = '(VOID *)'
+ ValueSize = len(Value.split(','))
+ else:
+ if Value[0] == 'L':
+ Unicode = True
+ Value = Value.lstrip('L') #.strip('"')
+ Value = eval(Value) # translate escape character
+ ValueSize = len(Value) + 1
+ NewValue = '{'
+ for Index in range(0, len(Value)):
+ if Unicode:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ', '
+ else:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ', '
+ if Unicode:
+ ArraySize = ArraySize // 2
+ Value = NewValue + '0 }'
+ if ArraySize < ValueSize:
+ if Pcd.MaxSizeUserSet:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ else:
+ ArraySize = Pcd.GetPcdSize()
+ if Unicode:
+ ArraySize = ArraySize // 2
+ Array = '[%d]' % ArraySize
+ #
+ # skip casting for fixed at build since it breaks ARM assembly.
+ # Long term we need PCD macros that work in assembly
+ #
+ elif Pcd.Type != TAB_PCDS_FIXED_AT_BUILD and Pcd.DatumType in TAB_PCD_NUMERIC_TYPES_VOID:
+ Value = "((%s)%s)" % (Pcd.DatumType, Value)
+
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ PcdValueName = '_PCD_PATCHABLE_VALUE_' + TokenCName
+ else:
+ PcdValueName = '_PCD_VALUE_' + TokenCName
+
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ #
+ # For unicode, UINT16 array will be generated, so the alignment of unicode is guaranteed.
+ #
+ AutoGenH.Append('#define %s %s%s\n' %(PcdValueName, Type, PcdVariableName))
+ if Unicode:
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT16 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
+ AutoGenH.Append('extern %s UINT16 %s%s;\n' %(Const, PcdVariableName, Array))
+ else:
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT8 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
+ AutoGenH.Append('extern %s UINT8 %s%s;\n' %(Const, PcdVariableName, Array))
+ AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName))
+
+ PcdDataSize = Pcd.GetPcdSize()
+ if Pcd.Type == TAB_PCDS_FIXED_AT_BUILD:
+ AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
+ AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, FixPcdSizeTokenName))
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED const UINTN %s = %s;\n' % (FixedPcdSizeVariableName, PcdDataSize))
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, Pcd.MaxDatumSize))
+ AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, PatchPcdSizeVariableName))
+ AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName, PcdDataSize))
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED const UINTN %s = %s;\n' % (PatchPcdMaxSizeVariable, Pcd.MaxDatumSize))
+ elif Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
+ AutoGenC.Append('volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
+ AutoGenH.Append('extern volatile %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
+ AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
+
+ PcdDataSize = Pcd.GetPcdSize()
+ AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
+
+ AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, PatchPcdSizeVariableName))
+ AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName, PcdDataSize))
+ else:
+ PcdDataSize = Pcd.GetPcdSize()
+ AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
+ AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, FixPcdSizeTokenName))
+
+ AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
+ AutoGenH.Append('extern %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
+ AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
+
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSize((VOID *)_gPcd_BinaryPatch_%s, &_gPcd_BinaryPatch_Size_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeName, Pcd.TokenCName, Pcd.TokenCName, Pcd.TokenCName))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSizeS((VOID *)_gPcd_BinaryPatch_%s, &_gPcd_BinaryPatch_Size_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, Pcd.TokenCName, Pcd.TokenCName, Pcd.TokenCName))
+ else:
+ AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName))
+ AutoGenH.Append('#define %s(Value) ((%s = (Value)), RETURN_SUCCESS) \n' % (SetModeStatusName, PcdVariableName))
+ else:
+ AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
+
+## Create code for library module PCDs
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+# @param Pcd The PCD object
+#
+def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
+ PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber
+ TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
+ TokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (TokenCName, TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ PcdTokenName = '_PCD_TOKEN_' + TokenCName
+ FixPcdSizeTokenName = '_PCD_SIZE_' + TokenCName
+ PatchPcdSizeTokenName = '_PCD_PATCHABLE_' + TokenCName +'_SIZE'
+ PatchPcdSizeVariableName = '_gPcd_BinaryPatch_Size_' + TokenCName
+ PatchPcdMaxSizeVariable = '_gPcd_BinaryPatch_MaxSize_' + TokenCName
+ FixedPcdSizeVariableName = '_gPcd_FixedAtBuild_Size_' + TokenCName
+
+ if Pcd.PcdValueFromComm:
+ Pcd.DefaultValue = Pcd.PcdValueFromComm
+ elif Pcd.PcdValueFromFdf:
+ Pcd.DefaultValue = Pcd.PcdValueFromFdf
+ #
+ # Write PCDs
+ #
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ TokenNumber = int(Pcd.TokenValue, 0)
+ else:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
+ # If one of the Source built modules listed in the DSC is not listed in FDF modules,
+ # and the INF lists a PCD can only use the PcdsDynamic access method (it is only
+ # listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
+ # report warning message notify the PI that they are attempting to build a module
+ # that must be included in a flash image in order to be functional. These Dynamic PCD
+ # will not be added into the Database unless it is used by other modules that are
+ # included in the FDF file.
+ # In this case, just assign an invalid token number to make it pass build.
+ if Pcd.Type in PCD_DYNAMIC_TYPE_SET:
+ TokenNumber = 0
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+ else:
+ TokenNumber = PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]
+
+ if Pcd.Type not in gItemTypeStringDatabase:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, TokenCName),
+ ExtraData="[%s]" % str(Info))
+
+ DatumType = Pcd.DatumType
+ DatumSize = gDatumSizeStringDatabase[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabase else gDatumSizeStringDatabase[TAB_VOID]
+ DatumSizeLib = gDatumSizeStringDatabaseLib[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabaseLib else gDatumSizeStringDatabaseLib[TAB_VOID]
+ GetModeName = '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
+ SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
+ SetModeStatusName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_S_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_S_' + TokenCName
+ GetModeSizeName = '_PCD_GET_MODE_SIZE' + '_' + TokenCName
+
+ Type = ''
+ Array = ''
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ if Pcd.DefaultValue[0]== '{':
+ Type = '(VOID *)'
+ Array = '[]'
+ PcdItemType = Pcd.Type
+ if PcdItemType in PCD_DYNAMIC_EX_TYPE_SET:
+ PcdExTokenName = '_PCD_TOKEN_' + TokenSpaceGuidCName + '_' + TokenCName
+ AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
+
+ if Info.IsLibrary:
+ PcdList = Info.LibraryPcdList
+ else:
+ PcdList = Info.ModulePcdList
+ PcdExCNameTest = 0
+ for PcdModule in PcdList:
+ if PcdModule.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
+ PcdExCNameTest += 1
+ # get out early once we found > 1...
+ if PcdExCNameTest > 1:
+ break
+ # Be compatible with the current code which using PcdGet/Set for DynamicEx Pcd.
+ # If only PcdGet/Set used in all Pcds with different CName, it should succeed to build.
+ # If PcdGet/Set used in the Pcds with different Guids but same CName, it should failed to build.
+ if PcdExCNameTest > 1:
+ AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n')
+ AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName))
+ AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('// #define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('// #define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName))
+ AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
+ else:
+ AutoGenH.Append('#define _PCD_TOKEN_%s %dU\n' % (TokenCName, TokenNumber))
+ if PcdItemType in PCD_DYNAMIC_TYPE_SET:
+ PcdList = []
+ PcdCNameList = []
+ PcdList.extend(Info.LibraryPcdList)
+ PcdList.extend(Info.ModulePcdList)
+ for PcdModule in PcdList:
+ if PcdModule.Type in PCD_DYNAMIC_TYPE_SET:
+ PcdCNameList.append(PcdModule.TokenCName)
+ if PcdCNameList.count(Pcd.TokenCName) > 1:
+ EdkLogger.error("build", AUTOGEN_ERROR, "More than one Dynamic Pcds [%s] are different Guids but same CName.They need to be changed to DynamicEx type to avoid the confliction.\n" % (TokenCName), ExtraData="[%s]" % str(Info.MetaFile.Path))
+ else:
+ AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s LibPcdGetSize(%s)\n' % (GetModeSizeName, PcdTokenName))
+ if DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%sS(%s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
+ else:
+ AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName))
+ AutoGenH.Append('#define %s(Value) LibPcdSet%sS(%s, (Value))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
+ if PcdItemType == TAB_PCDS_PATCHABLE_IN_MODULE:
+ PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[TAB_PCDS_PATCHABLE_IN_MODULE] + '_' + TokenCName
+ if DatumType not in TAB_PCD_NUMERIC_TYPES:
+ if DatumType == TAB_VOID and Array == '[]':
+ DatumType = [TAB_UINT8, TAB_UINT16][Pcd.DefaultValue[0] == 'L']
+ else:
+ DatumType = TAB_UINT8
+ AutoGenH.Append('extern %s _gPcd_BinaryPatch_%s%s;\n' %(DatumType, TokenCName, Array))
+ else:
+ AutoGenH.Append('extern volatile %s %s%s;\n' % (DatumType, PcdVariableName, Array))
+ AutoGenH.Append('#define %s %s_gPcd_BinaryPatch_%s\n' %(GetModeName, Type, TokenCName))
+ PcdDataSize = Pcd.GetPcdSize()
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSize((VOID *)_gPcd_BinaryPatch_%s, &%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, TokenCName, PatchPcdSizeVariableName, PatchPcdMaxSizeVariable))
+ AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSizeS((VOID *)_gPcd_BinaryPatch_%s, &%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, TokenCName, PatchPcdSizeVariableName, PatchPcdMaxSizeVariable))
+ AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PatchPcdMaxSizeVariable))
+ AutoGenH.Append('extern const UINTN %s; \n' % PatchPcdMaxSizeVariable)
+ else:
+ AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName))
+ AutoGenH.Append('#define %s(Value) ((%s = (Value)), RETURN_SUCCESS)\n' % (SetModeStatusName, PcdVariableName))
+ AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
+
+ AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, PatchPcdSizeVariableName))
+ AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
+
+ if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG:
+ key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
+ if DatumType == TAB_VOID and Array == '[]':
+ DatumType = [TAB_UINT8, TAB_UINT16][Pcd.DefaultValue[0] == 'L']
+ if DatumType not in TAB_PCD_NUMERIC_TYPES_VOID:
+ DatumType = TAB_UINT8
+ AutoGenH.Append('extern const %s _gPcd_FixedAtBuild_%s%s;\n' %(DatumType, TokenCName, Array))
+ AutoGenH.Append('#define %s %s_gPcd_FixedAtBuild_%s\n' %(GetModeName, Type, TokenCName))
+ AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
+
+ ConstFixedPcd = False
+ if PcdItemType == TAB_PCDS_FIXED_AT_BUILD and (key in Info.ConstPcd or (Info.IsLibrary and not Info.ReferenceModules)):
+ ConstFixedPcd = True
+ if key in Info.ConstPcd:
+ Pcd.DefaultValue = Info.ConstPcd[key]
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ AutoGenH.Append('#define _PCD_VALUE_%s %s%s\n' %(TokenCName, Type, PcdVariableName))
+ else:
+ AutoGenH.Append('#define _PCD_VALUE_%s %s\n' %(TokenCName, Pcd.DefaultValue))
+ PcdDataSize = Pcd.GetPcdSize()
+ if PcdItemType == TAB_PCDS_FIXED_AT_BUILD:
+ if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
+ if ConstFixedPcd:
+ AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
+ AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixPcdSizeTokenName))
+ else:
+ AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixedPcdSizeVariableName))
+ AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, FixedPcdSizeVariableName))
+ AutoGenH.Append('extern const UINTN %s; \n' % FixedPcdSizeVariableName)
+ else:
+ AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
+ AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixPcdSizeTokenName))
+
+## Create code for library constructor
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH):
+ #
+ # Library Constructors
+ #
+ ConstructorPrototypeString = TemplateString()
+ ConstructorCallingString = TemplateString()
+ if Info.IsLibrary:
+ DependentLibraryList = [Info.Module]
+ else:
+ DependentLibraryList = Info.DependentLibraryList
+ for Lib in DependentLibraryList:
+ if len(Lib.ConstructorList) <= 0:
+ continue
+ Dict = {'Function':Lib.ConstructorList}
+ if Lib.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC]:
+ ConstructorPrototypeString.Append(gLibraryStructorPrototype[SUP_MODULE_BASE].Replace(Dict))
+ ConstructorCallingString.Append(gLibraryStructorCall[SUP_MODULE_BASE].Replace(Dict))
+ if Info.ModuleType not in [SUP_MODULE_BASE, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
+ if Lib.ModuleType in SUP_MODULE_SET_PEI:
+ ConstructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict))
+ ConstructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict))
+ elif Lib.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
+ SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
+ ConstructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict))
+ ConstructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict))
+ elif Lib.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
+ ConstructorPrototypeString.Append(gLibraryStructorPrototype['MM'].Replace(Dict))
+ ConstructorCallingString.Append(gLibraryStructorCall['MM'].Replace(Dict))
+
+ if str(ConstructorPrototypeString) == '':
+ ConstructorPrototypeList = []
+ else:
+ ConstructorPrototypeList = [str(ConstructorPrototypeString)]
+ if str(ConstructorCallingString) == '':
+ ConstructorCallingList = []
+ else:
+ ConstructorCallingList = [str(ConstructorCallingString)]
+
+ Dict = {
+ 'Type' : 'Constructor',
+ 'FunctionPrototype' : ConstructorPrototypeList,
+ 'FunctionCall' : ConstructorCallingList
+ }
+ if Info.IsLibrary:
+ AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict)
+ else:
+ if Info.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
+ AutoGenC.Append(gLibraryString[SUP_MODULE_BASE].Replace(Dict))
+ elif Info.ModuleType in SUP_MODULE_SET_PEI:
+ AutoGenC.Append(gLibraryString['PEI'].Replace(Dict))
+ elif Info.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
+ SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
+ AutoGenC.Append(gLibraryString['DXE'].Replace(Dict))
+ elif Info.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
+ AutoGenC.Append(gLibraryString['MM'].Replace(Dict))
+
+## Create code for library destructor
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH):
+ #
+ # Library Destructors
+ #
+ DestructorPrototypeString = TemplateString()
+ DestructorCallingString = TemplateString()
+ if Info.IsLibrary:
+ DependentLibraryList = [Info.Module]
+ else:
+ DependentLibraryList = Info.DependentLibraryList
+ for Index in range(len(DependentLibraryList)-1, -1, -1):
+ Lib = DependentLibraryList[Index]
+ if len(Lib.DestructorList) <= 0:
+ continue
+ Dict = {'Function':Lib.DestructorList}
+ if Lib.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC]:
+ DestructorPrototypeString.Append(gLibraryStructorPrototype[SUP_MODULE_BASE].Replace(Dict))
+ DestructorCallingString.Append(gLibraryStructorCall[SUP_MODULE_BASE].Replace(Dict))
+ if Info.ModuleType not in [SUP_MODULE_BASE, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
+ if Lib.ModuleType in SUP_MODULE_SET_PEI:
+ DestructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict))
+ DestructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict))
+ elif Lib.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
+ SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
+ DestructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict))
+ DestructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict))
+ elif Lib.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
+ DestructorPrototypeString.Append(gLibraryStructorPrototype['MM'].Replace(Dict))
+ DestructorCallingString.Append(gLibraryStructorCall['MM'].Replace(Dict))
+
+ if str(DestructorPrototypeString) == '':
+ DestructorPrototypeList = []
+ else:
+ DestructorPrototypeList = [str(DestructorPrototypeString)]
+ if str(DestructorCallingString) == '':
+ DestructorCallingList = []
+ else:
+ DestructorCallingList = [str(DestructorCallingString)]
+
+ Dict = {
+ 'Type' : 'Destructor',
+ 'FunctionPrototype' : DestructorPrototypeList,
+ 'FunctionCall' : DestructorCallingList
+ }
+ if Info.IsLibrary:
+ AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict)
+ else:
+ if Info.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
+ AutoGenC.Append(gLibraryString[SUP_MODULE_BASE].Replace(Dict))
+ elif Info.ModuleType in SUP_MODULE_SET_PEI:
+ AutoGenC.Append(gLibraryString['PEI'].Replace(Dict))
+ elif Info.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
+ SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
+ AutoGenC.Append(gLibraryString['DXE'].Replace(Dict))
+ elif Info.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
+ AutoGenC.Append(gLibraryString['MM'].Replace(Dict))
+
+
+## Create code for ModuleEntryPoint
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH):
+ if Info.IsLibrary or Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_SEC]:
+ return
+ #
+ # Module Entry Points
+ #
+ NumEntryPoints = len(Info.Module.ModuleEntryPointList)
+ if 'PI_SPECIFICATION_VERSION' in Info.Module.Specification:
+ PiSpecVersion = Info.Module.Specification['PI_SPECIFICATION_VERSION']
+ else:
+ PiSpecVersion = '0x00000000'
+ if 'UEFI_SPECIFICATION_VERSION' in Info.Module.Specification:
+ UefiSpecVersion = Info.Module.Specification['UEFI_SPECIFICATION_VERSION']
+ else:
+ UefiSpecVersion = '0x00000000'
+ Dict = {
+ 'Function' : Info.Module.ModuleEntryPointList,
+ 'PiSpecVersion' : PiSpecVersion + 'U',
+ 'UefiSpecVersion': UefiSpecVersion + 'U'
+ }
+
+ if Info.ModuleType in [SUP_MODULE_PEI_CORE, SUP_MODULE_DXE_CORE, SUP_MODULE_SMM_CORE, SUP_MODULE_MM_CORE_STANDALONE]:
+ if Info.SourceFileList:
+ if NumEntryPoints != 1:
+ EdkLogger.error(
+ "build",
+ AUTOGEN_ERROR,
+ '%s must have exactly one entry point' % Info.ModuleType,
+ File=str(Info),
+ ExtraData= ", ".join(Info.Module.ModuleEntryPointList)
+ )
+ if Info.ModuleType == SUP_MODULE_PEI_CORE:
+ AutoGenC.Append(gPeiCoreEntryPointString.Replace(Dict))
+ AutoGenH.Append(gPeiCoreEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_DXE_CORE:
+ AutoGenC.Append(gDxeCoreEntryPointString.Replace(Dict))
+ AutoGenH.Append(gDxeCoreEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_SMM_CORE:
+ AutoGenC.Append(gSmmCoreEntryPointString.Replace(Dict))
+ AutoGenH.Append(gSmmCoreEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_MM_CORE_STANDALONE:
+ AutoGenC.Append(gMmCoreStandaloneEntryPointString.Replace(Dict))
+ AutoGenH.Append(gMmCoreStandaloneEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_PEIM:
+ if NumEntryPoints < 2:
+ AutoGenC.Append(gPeimEntryPointString[NumEntryPoints].Replace(Dict))
+ else:
+ AutoGenC.Append(gPeimEntryPointString[2].Replace(Dict))
+ AutoGenH.Append(gPeimEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType in [SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
+ if NumEntryPoints < 2:
+ AutoGenC.Append(gUefiDriverEntryPointString[NumEntryPoints].Replace(Dict))
+ else:
+ AutoGenC.Append(gUefiDriverEntryPointString[2].Replace(Dict))
+ AutoGenH.Append(gUefiDriverEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_DXE_SMM_DRIVER:
+ if NumEntryPoints == 0:
+ AutoGenC.Append(gDxeSmmEntryPointString[0].Replace(Dict))
+ else:
+ AutoGenC.Append(gDxeSmmEntryPointString[1].Replace(Dict))
+ AutoGenH.Append(gDxeSmmEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_MM_STANDALONE:
+ if NumEntryPoints < 2:
+ AutoGenC.Append(gMmStandaloneEntryPointString[NumEntryPoints].Replace(Dict))
+ else:
+ AutoGenC.Append(gMmStandaloneEntryPointString[2].Replace(Dict))
+ AutoGenH.Append(gMmStandaloneEntryPointPrototype.Replace(Dict))
+ elif Info.ModuleType == SUP_MODULE_UEFI_APPLICATION:
+ if NumEntryPoints < 2:
+ AutoGenC.Append(gUefiApplicationEntryPointString[NumEntryPoints].Replace(Dict))
+ else:
+ AutoGenC.Append(gUefiApplicationEntryPointString[2].Replace(Dict))
+ AutoGenH.Append(gUefiApplicationEntryPointPrototype.Replace(Dict))
+
+## Create code for ModuleUnloadImage
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH):
+ if Info.IsLibrary or Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE, SUP_MODULE_SEC]:
+ return
+ #
+ # Unload Image Handlers
+ #
+ NumUnloadImage = len(Info.Module.ModuleUnloadImageList)
+ Dict = {'Count':str(NumUnloadImage) + 'U', 'Function':Info.Module.ModuleUnloadImageList}
+ if NumUnloadImage < 2:
+ AutoGenC.Append(gUefiUnloadImageString[NumUnloadImage].Replace(Dict))
+ else:
+ AutoGenC.Append(gUefiUnloadImageString[2].Replace(Dict))
+ AutoGenH.Append(gUefiUnloadImagePrototype.Replace(Dict))
+
+## Create code for GUID
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH):
+ if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
+ GuidType = TAB_GUID
+ else:
+ GuidType = "EFI_GUID"
+
+ if Info.GuidList:
+ if not Info.IsLibrary:
+ AutoGenC.Append("\n// Guids\n")
+ AutoGenH.Append("\n// Guids\n")
+ #
+ # GUIDs
+ #
+ for Key in Info.GuidList:
+ if not Info.IsLibrary:
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.GuidList[Key]))
+ AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
+
+## Create code for protocol
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH):
+ if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
+ GuidType = TAB_GUID
+ else:
+ GuidType = "EFI_GUID"
+
+ if Info.ProtocolList:
+ if not Info.IsLibrary:
+ AutoGenC.Append("\n// Protocols\n")
+ AutoGenH.Append("\n// Protocols\n")
+ #
+ # Protocol GUIDs
+ #
+ for Key in Info.ProtocolList:
+ if not Info.IsLibrary:
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.ProtocolList[Key]))
+ AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
+
+## Create code for PPI
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH):
+ if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
+ GuidType = TAB_GUID
+ else:
+ GuidType = "EFI_GUID"
+
+ if Info.PpiList:
+ if not Info.IsLibrary:
+ AutoGenC.Append("\n// PPIs\n")
+ AutoGenH.Append("\n// PPIs\n")
+ #
+ # PPI GUIDs
+ #
+ for Key in Info.PpiList:
+ if not Info.IsLibrary:
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.PpiList[Key]))
+ AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
+
+## Create code for PCD
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreatePcdCode(Info, AutoGenC, AutoGenH):
+
+ # Collect Token Space GUIDs used by DynamicEc PCDs
+ TokenSpaceList = []
+ for Pcd in Info.ModulePcdList:
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenSpaceGuidCName not in TokenSpaceList:
+ TokenSpaceList.append(Pcd.TokenSpaceGuidCName)
+
+ SkuMgr = Info.PlatformInfo.Platform.SkuIdMgr
+ AutoGenH.Append("\n// Definition of SkuId Array\n")
+ AutoGenH.Append("extern UINT64 _gPcd_SkuId_Array[];\n")
+ # Add extern declarations to AutoGen.h if one or more Token Space GUIDs were found
+ if TokenSpaceList:
+ AutoGenH.Append("\n// Definition of PCD Token Space GUIDs used in this module\n\n")
+ if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
+ GuidType = TAB_GUID
+ else:
+ GuidType = "EFI_GUID"
+ for Item in TokenSpaceList:
+ AutoGenH.Append('extern %s %s;\n' % (GuidType, Item))
+
+ if Info.IsLibrary:
+ if Info.ModulePcdList:
+ AutoGenH.Append("\n// PCD definitions\n")
+ for Pcd in Info.ModulePcdList:
+ CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd)
+ DynExPcdTokenNumberMapping (Info, AutoGenH)
+ else:
+ AutoGenC.Append("\n// Definition of SkuId Array\n")
+ AutoGenC.Append("GLOBAL_REMOVE_IF_UNREFERENCED UINT64 _gPcd_SkuId_Array[] = %s;\n" % SkuMgr.DumpSkuIdArrary())
+ if Info.ModulePcdList:
+ AutoGenH.Append("\n// Definition of PCDs used in this module\n")
+ AutoGenC.Append("\n// Definition of PCDs used in this module\n")
+ for Pcd in Info.ModulePcdList:
+ CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd)
+ DynExPcdTokenNumberMapping (Info, AutoGenH)
+ if Info.LibraryPcdList:
+ AutoGenH.Append("\n// Definition of PCDs used in libraries is in AutoGen.c\n")
+ AutoGenC.Append("\n// Definition of PCDs used in libraries\n")
+ for Pcd in Info.LibraryPcdList:
+ CreateModulePcdCode(Info, AutoGenC, AutoGenC, Pcd)
+ CreatePcdDatabaseCode(Info, AutoGenC, AutoGenH)
+
+## Create code for unicode string definition
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
+# @param UniGenBinBuffer Buffer to store uni string package data
+#
+def CreateUnicodeStringCode(Info, AutoGenC, AutoGenH, UniGenCFlag, UniGenBinBuffer):
+ WorkingDir = os.getcwd()
+ os.chdir(Info.WorkspaceDir)
+
+ IncList = [Info.MetaFile.Dir]
+ # Get all files under [Sources] section in inf file for EDK-II module
+ EDK2Module = True
+ SrcList = [F for F in Info.SourceFileList]
+
+ if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-c') > -1:
+ CompatibleMode = True
+ else:
+ CompatibleMode = False
+
+ #
+ # -s is a temporary option dedicated for building .UNI files with ISO 639-2 language codes of EDK Shell in EDK2
+ #
+ if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-s') > -1:
+ if CompatibleMode:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "-c and -s build options should be used exclusively",
+ ExtraData="[%s]" % str(Info))
+ ShellMode = True
+ else:
+ ShellMode = False
+
+ #RFC4646 is only for EDKII modules and ISO639-2 for EDK modules
+ if EDK2Module:
+ FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.RFCLanguages]
+ else:
+ FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.ISOLanguages]
+ Header, Code = GetStringFiles(Info.UnicodeFileList, SrcList, IncList, Info.IncludePathList, ['.uni', '.inf'], Info.Name, CompatibleMode, ShellMode, UniGenCFlag, UniGenBinBuffer, FilterInfo)
+ if CompatibleMode or UniGenCFlag:
+ AutoGenC.Append("\n//\n//Unicode String Pack Definition\n//\n")
+ AutoGenC.Append(Code)
+ AutoGenC.Append("\n")
+ AutoGenH.Append("\n//\n//Unicode String ID\n//\n")
+ AutoGenH.Append(Header)
+ if CompatibleMode or UniGenCFlag:
+ AutoGenH.Append("\n#define STRING_ARRAY_NAME %sStrings\n" % Info.Name)
+ os.chdir(WorkingDir)
+
+def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
+ if len(Info.IdfFileList) > 0:
+ ImageFiles = IdfFileClassObject(sorted (Info.IdfFileList))
+ if ImageFiles.ImageFilesDict:
+ Index = 1
+ PaletteIndex = 1
+ IncList = [Info.MetaFile.Dir]
+ SrcList = [F for F in Info.SourceFileList]
+ SkipList = ['.jpg', '.png', '.bmp', '.inf', '.idf']
+ FileList = GetFileList(SrcList, IncList, SkipList)
+ ValueStartPtr = 60
+ StringH.Append("\n//\n//Image ID\n//\n")
+ ImageInfoOffset = 0
+ PaletteInfoOffset = 0
+ ImageBuffer = pack('x')
+ PaletteBuffer = pack('x')
+ BufferStr = ''
+ PaletteStr = ''
+ FileDict = {}
+ for Idf in ImageFiles.ImageFilesDict:
+ if ImageFiles.ImageFilesDict[Idf]:
+ for FileObj in ImageFiles.ImageFilesDict[Idf]:
+ for sourcefile in Info.SourceFileList:
+ if FileObj.FileName == sourcefile.File:
+ if not sourcefile.Ext.upper() in ['.PNG', '.BMP', '.JPG']:
+ EdkLogger.error("build", AUTOGEN_ERROR, "The %s's postfix must be one of .bmp, .jpg, .png" % (FileObj.FileName), ExtraData="[%s]" % str(Info))
+ FileObj.File = sourcefile
+ break
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR, "The %s in %s is not defined in the driver's [Sources] section" % (FileObj.FileName, Idf), ExtraData="[%s]" % str(Info))
+
+ for FileObj in ImageFiles.ImageFilesDict[Idf]:
+ ID = FileObj.ImageID
+ File = FileObj.File
+ try:
+ SearchImageID (FileObj, FileList)
+ if FileObj.Referenced:
+ if (ValueStartPtr - len(DEFINE_STR + ID)) <= 0:
+ Line = DEFINE_STR + ' ' + ID + ' ' + DecToHexStr(Index, 4) + '\n'
+ else:
+ Line = DEFINE_STR + ' ' + ID + ' ' * (ValueStartPtr - len(DEFINE_STR + ID)) + DecToHexStr(Index, 4) + '\n'
+
+ if File not in FileDict:
+ FileDict[File] = Index
+ else:
+ DuplicateBlock = pack('B', EFI_HII_IIBT_DUPLICATE)
+ DuplicateBlock += pack('H', FileDict[File])
+ ImageBuffer += DuplicateBlock
+ BufferStr = WriteLine(BufferStr, '// %s: %s: %s' % (DecToHexStr(Index, 4), ID, DecToHexStr(Index, 4)))
+ TempBufferList = AscToHexList(DuplicateBlock)
+ BufferStr = WriteLine(BufferStr, CreateArrayItem(TempBufferList, 16) + '\n')
+ StringH.Append(Line)
+ Index += 1
+ continue
+
+ TmpFile = open(File.Path, 'rb')
+ Buffer = TmpFile.read()
+ TmpFile.close()
+ if File.Ext.upper() == '.PNG':
+ TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_PNG)
+ TempBuffer += pack('I', len(Buffer))
+ TempBuffer += Buffer
+ elif File.Ext.upper() == '.JPG':
+ ImageType, = struct.unpack('4s', Buffer[6:10])
+ if ImageType != b'JFIF':
+ EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
+ TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
+ TempBuffer += pack('I', len(Buffer))
+ TempBuffer += Buffer
+ elif File.Ext.upper() == '.BMP':
+ TempBuffer, TempPalette = BmpImageDecoder(File, Buffer, PaletteIndex, FileObj.TransParent)
+ if len(TempPalette) > 1:
+ PaletteIndex += 1
+ NewPalette = pack('H', len(TempPalette))
+ NewPalette += TempPalette
+ PaletteBuffer += NewPalette
+ PaletteStr = WriteLine(PaletteStr, '// %s: %s: %s' % (DecToHexStr(PaletteIndex - 1, 4), ID, DecToHexStr(PaletteIndex - 1, 4)))
+ TempPaletteList = AscToHexList(NewPalette)
+ PaletteStr = WriteLine(PaletteStr, CreateArrayItem(TempPaletteList, 16) + '\n')
+ ImageBuffer += TempBuffer
+ BufferStr = WriteLine(BufferStr, '// %s: %s: %s' % (DecToHexStr(Index, 4), ID, DecToHexStr(Index, 4)))
+ TempBufferList = AscToHexList(TempBuffer)
+ BufferStr = WriteLine(BufferStr, CreateArrayItem(TempBufferList, 16) + '\n')
+
+ StringH.Append(Line)
+ Index += 1
+ except IOError:
+ EdkLogger.error("build", FILE_NOT_FOUND, ExtraData=File.Path)
+
+ BufferStr = WriteLine(BufferStr, '// End of the Image Info')
+ BufferStr = WriteLine(BufferStr, CreateArrayItem(DecToHexList(EFI_HII_IIBT_END, 2)) + '\n')
+ ImageEnd = pack('B', EFI_HII_IIBT_END)
+ ImageBuffer += ImageEnd
+
+ if len(ImageBuffer) > 1:
+ ImageInfoOffset = 12
+ if len(PaletteBuffer) > 1:
+ PaletteInfoOffset = 12 + len(ImageBuffer) - 1 # -1 is for the first empty pad byte of ImageBuffer
+
+ IMAGE_PACKAGE_HDR = pack('=II', ImageInfoOffset, PaletteInfoOffset)
+ # PACKAGE_HEADER_Length = PACKAGE_HEADER + ImageInfoOffset + PaletteInfoOffset + ImageBuffer Length + PaletteCount + PaletteBuffer Length
+ if len(PaletteBuffer) > 1:
+ PACKAGE_HEADER_Length = 4 + 4 + 4 + len(ImageBuffer) - 1 + 2 + len(PaletteBuffer) - 1
+ else:
+ PACKAGE_HEADER_Length = 4 + 4 + 4 + len(ImageBuffer) - 1
+ if PaletteIndex > 1:
+ PALETTE_INFO_HEADER = pack('H', PaletteIndex - 1)
+ # EFI_HII_PACKAGE_HEADER length max value is 0xFFFFFF
+ Hex_Length = '%06X' % PACKAGE_HEADER_Length
+ if PACKAGE_HEADER_Length > 0xFFFFFF:
+ EdkLogger.error("build", AUTOGEN_ERROR, "The Length of EFI_HII_PACKAGE_HEADER exceed its maximum value", ExtraData="[%s]" % str(Info))
+ PACKAGE_HEADER = pack('=HBB', int('0x' + Hex_Length[2:], 16), int('0x' + Hex_Length[0:2], 16), EFI_HII_PACKAGE_IMAGES)
+
+ IdfGenBinBuffer.write(PACKAGE_HEADER)
+ IdfGenBinBuffer.write(IMAGE_PACKAGE_HDR)
+ if len(ImageBuffer) > 1 :
+ IdfGenBinBuffer.write(ImageBuffer[1:])
+ if PaletteIndex > 1:
+ IdfGenBinBuffer.write(PALETTE_INFO_HEADER)
+ if len(PaletteBuffer) > 1:
+ IdfGenBinBuffer.write(PaletteBuffer[1:])
+
+ if IdfGenCFlag:
+ TotalLength = EFI_HII_ARRAY_SIZE_LENGTH + PACKAGE_HEADER_Length
+ AutoGenC.Append("\n//\n//Image Pack Definition\n//\n")
+ AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + Info.Module.BaseName + 'Images' + '[] = {\n')
+ AllStr = WriteLine(AllStr, '// STRGATHER_OUTPUT_HEADER')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength)) + '\n')
+ AllStr = WriteLine(AllStr, '// Image PACKAGE HEADER\n')
+ IMAGE_PACKAGE_HDR_List = AscToHexList(PACKAGE_HEADER)
+ IMAGE_PACKAGE_HDR_List += AscToHexList(IMAGE_PACKAGE_HDR)
+ AllStr = WriteLine(AllStr, CreateArrayItem(IMAGE_PACKAGE_HDR_List, 16) + '\n')
+ AllStr = WriteLine(AllStr, '// Image DATA\n')
+ if BufferStr:
+ AllStr = WriteLine(AllStr, BufferStr)
+ if PaletteStr:
+ AllStr = WriteLine(AllStr, '// Palette Header\n')
+ PALETTE_INFO_HEADER_List = AscToHexList(PALETTE_INFO_HEADER)
+ AllStr = WriteLine(AllStr, CreateArrayItem(PALETTE_INFO_HEADER_List, 16) + '\n')
+ AllStr = WriteLine(AllStr, '// Palette Data\n')
+ AllStr = WriteLine(AllStr, PaletteStr)
+ AllStr = WriteLine(AllStr, '};')
+ AutoGenC.Append(AllStr)
+ AutoGenC.Append("\n")
+ StringH.Append('\nextern unsigned char ' + Info.Module.BaseName + 'Images[];\n')
+ StringH.Append("\n#define IMAGE_ARRAY_NAME %sImages\n" % Info.Module.BaseName)
+
+# typedef struct _EFI_HII_IMAGE_PACKAGE_HDR {
+# EFI_HII_PACKAGE_HEADER Header; # Standard package header, where Header.Type = EFI_HII_PACKAGE_IMAGES
+# UINT32 ImageInfoOffset;
+# UINT32 PaletteInfoOffset;
+# } EFI_HII_IMAGE_PACKAGE_HDR;
+
+# typedef struct {
+# UINT32 Length:24;
+# UINT32 Type:8;
+# UINT8 Data[];
+# } EFI_HII_PACKAGE_HEADER;
+
+# typedef struct _EFI_HII_IMAGE_BLOCK {
+# UINT8 BlockType;
+# UINT8 BlockBody[];
+# } EFI_HII_IMAGE_BLOCK;
+
+def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
+ ImageType, = struct.unpack('2s', Buffer[0:2])
+ if ImageType!= b'BM': # BMP file type is 'BM'
+ EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
+ BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
+ BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
+ BmpHeader = BMP_IMAGE_HEADER._make(BMP_IMAGE_HEADER_STRUCT.unpack_from(Buffer[2:]))
+ #
+ # Doesn't support compress.
+ #
+ if BmpHeader.biCompression != 0:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The compress BMP file %s is not support." % File.Path)
+
+ # The Width and Height is UINT16 type in Image Package
+ if BmpHeader.biWidth > 0xFFFF:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The BMP file %s Width is exceed 0xFFFF." % File.Path)
+ if BmpHeader.biHeight > 0xFFFF:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The BMP file %s Height is exceed 0xFFFF." % File.Path)
+
+ PaletteBuffer = pack('x')
+ if BmpHeader.biBitCount == 1:
+ if TransParent:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT_TRANS)
+ else:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT)
+ ImageBuffer += pack('B', PaletteIndex)
+ Width = (BmpHeader.biWidth + 7)//8
+ if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
+ PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
+ elif BmpHeader.biBitCount == 4:
+ if TransParent:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT_TRANS)
+ else:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT)
+ ImageBuffer += pack('B', PaletteIndex)
+ Width = (BmpHeader.biWidth + 1)//2
+ if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
+ PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
+ elif BmpHeader.biBitCount == 8:
+ if TransParent:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_8BIT_TRANS)
+ else:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_8BIT)
+ ImageBuffer += pack('B', PaletteIndex)
+ Width = BmpHeader.biWidth
+ if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
+ PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
+ elif BmpHeader.biBitCount == 24:
+ if TransParent:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_24BIT_TRANS)
+ else:
+ ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_24BIT)
+ Width = BmpHeader.biWidth * 3
+ else:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "Only support the 1 bit, 4 bit, 8bit, 24 bit BMP files.", ExtraData="[%s]" % str(File.Path))
+
+ ImageBuffer += pack('H', BmpHeader.biWidth)
+ ImageBuffer += pack('H', BmpHeader.biHeight)
+ Start = BmpHeader.bfOffBits
+ End = BmpHeader.bfSize - 1
+ for Height in range(0, BmpHeader.biHeight):
+ if Width % 4 != 0:
+ Start = End + (Width % 4) - 4 - Width
+ else:
+ Start = End - Width
+ ImageBuffer += Buffer[Start + 1 : Start + Width + 1]
+ End = Start
+
+ # handle the Palette info, BMP use 4 bytes for R, G, B and Reserved info while EFI_HII_RGB_PIXEL only have the R, G, B info
+ if PaletteBuffer and len(PaletteBuffer) > 1:
+ PaletteTemp = pack('x')
+ for Index in range(0, len(PaletteBuffer)):
+ if Index % 4 == 3:
+ continue
+ PaletteTemp += PaletteBuffer[Index:Index+1]
+ PaletteBuffer = PaletteTemp[1:]
+ return ImageBuffer, PaletteBuffer
+
+## Create common code
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateHeaderCode(Info, AutoGenC, AutoGenH):
+ # file header
+ AutoGenH.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.h'}))
+ # header file Prologue
+ AutoGenH.Append(gAutoGenHPrologueString.Replace({'File':'AUTOGENH','Guid':Info.Guid.replace('-', '_')}))
+ AutoGenH.Append(gAutoGenHCppPrologueString)
+
+ # header files includes
+ if Info.ModuleType in gModuleTypeHeaderFile:
+ AutoGenH.Append("#include <%s>\n" % gModuleTypeHeaderFile[Info.ModuleType][0])
+ #
+ # if either PcdLib in [LibraryClasses] sections or there exist Pcd section, add PcdLib.h
+ # As if modules only uses FixedPcd, then PcdLib is not needed in [LibraryClasses] section.
+ #
+ if 'PcdLib' in Info.Module.LibraryClasses or Info.Module.Pcds:
+ AutoGenH.Append("#include <Library/PcdLib.h>\n")
+
+ AutoGenH.Append('\nextern GUID gEfiCallerIdGuid;')
+ AutoGenH.Append('\nextern GUID gEdkiiDscPlatformGuid;')
+ AutoGenH.Append('\nextern CHAR8 *gEfiCallerBaseName;\n\n')
+
+ if Info.IsLibrary:
+ return
+
+ AutoGenH.Append("#define EFI_CALLER_ID_GUID \\\n %s\n" % GuidStringToGuidStructureString(Info.Guid))
+ AutoGenH.Append("#define EDKII_DSC_PLATFORM_GUID \\\n %s\n" % GuidStringToGuidStructureString(Info.PlatformInfo.Guid))
+
+ if Info.IsLibrary:
+ return
+ # C file header
+ AutoGenC.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.c'}))
+ # C file header files includes
+ if Info.ModuleType in gModuleTypeHeaderFile:
+ for Inc in gModuleTypeHeaderFile[Info.ModuleType]:
+ AutoGenC.Append("#include <%s>\n" % Inc)
+ else:
+ AutoGenC.Append("#include <%s>\n" % gBasicHeaderFile)
+
+ #
+ # Publish the CallerId Guid
+ #
+ AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED GUID gEfiCallerIdGuid = %s;\n' % GuidStringToGuidStructureString(Info.Guid))
+ AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED GUID gEdkiiDscPlatformGuid = %s;\n' % GuidStringToGuidStructureString(Info.PlatformInfo.Guid))
+ AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED CHAR8 *gEfiCallerBaseName = "%s";\n' % Info.Name)
+
+## Create common code for header file
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreateFooterCode(Info, AutoGenC, AutoGenH):
+ AutoGenH.Append(gAutoGenHEpilogueString)
+
+## Create code for a module
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+# @param StringH The TemplateString object for header file
+# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
+# @param UniGenBinBuffer Buffer to store uni string package data
+# @param StringIdf The TemplateString object for header file
+# @param IdfGenCFlag IdfString is generated into AutoGen C file when it is set to True
+# @param IdfGenBinBuffer Buffer to store Idf string package data
+#
+def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer, StringIdf, IdfGenCFlag, IdfGenBinBuffer):
+ CreateHeaderCode(Info, AutoGenC, AutoGenH)
+
+ CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH)
+ CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH)
+ CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH)
+ CreatePcdCode(Info, AutoGenC, AutoGenH)
+ CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH)
+ CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH)
+ CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH)
+ CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH)
+
+ if Info.UnicodeFileList:
+ FileName = "%sStrDefs.h" % Info.Name
+ StringH.Append(gAutoGenHeaderString.Replace({'FileName':FileName}))
+ StringH.Append(gAutoGenHPrologueString.Replace({'File':'STRDEFS', 'Guid':Info.Guid.replace('-', '_')}))
+ CreateUnicodeStringCode(Info, AutoGenC, StringH, UniGenCFlag, UniGenBinBuffer)
+
+ GuidMacros = []
+ for Guid in Info.Module.Guids:
+ if Guid in Info.Module.GetGuidsUsedByPcd():
+ continue
+ GuidMacros.append('#define %s %s' % (Guid, Info.Module.Guids[Guid]))
+ for Guid, Value in list(Info.Module.Protocols.items()) + list(Info.Module.Ppis.items()):
+ GuidMacros.append('#define %s %s' % (Guid, Value))
+ # supports FixedAtBuild and FeaturePcd usage in VFR file
+ if Info.VfrFileList and Info.ModulePcdList:
+ GuidMacros.append('#define %s %s' % ('FixedPcdGetBool(TokenName)', '_PCD_VALUE_##TokenName'))
+ GuidMacros.append('#define %s %s' % ('FixedPcdGet8(TokenName)', '_PCD_VALUE_##TokenName'))
+ GuidMacros.append('#define %s %s' % ('FixedPcdGet16(TokenName)', '_PCD_VALUE_##TokenName'))
+ GuidMacros.append('#define %s %s' % ('FixedPcdGet32(TokenName)', '_PCD_VALUE_##TokenName'))
+ GuidMacros.append('#define %s %s' % ('FixedPcdGet64(TokenName)', '_PCD_VALUE_##TokenName'))
+ GuidMacros.append('#define %s %s' % ('FeaturePcdGet(TokenName)', '_PCD_VALUE_##TokenName'))
+ for Pcd in Info.ModulePcdList:
+ if Pcd.Type in [TAB_PCDS_FIXED_AT_BUILD, TAB_PCDS_FEATURE_FLAG]:
+ TokenCName = Pcd.TokenCName
+ Value = Pcd.DefaultValue
+ if Pcd.DatumType == 'BOOLEAN':
+ BoolValue = Value.upper()
+ if BoolValue == 'TRUE':
+ Value = '1'
+ elif BoolValue == 'FALSE':
+ Value = '0'
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ GuidMacros.append('#define %s %s' % ('_PCD_VALUE_'+TokenCName, Value))
+
+ if Info.IdfFileList:
+ GuidMacros.append('#include "%sImgDefs.h"' % Info.Name)
+
+ if GuidMacros:
+ StringH.Append('\n#ifdef VFRCOMPILE\n%s\n#endif\n' % '\n'.join(GuidMacros))
+
+ StringH.Append("\n#endif\n")
+ AutoGenH.Append('#include "%s"\n' % FileName)
+
+ if Info.IdfFileList:
+ FileName = "%sImgDefs.h" % Info.Name
+ StringIdf.Append(gAutoGenHeaderString.Replace({'FileName':FileName}))
+ StringIdf.Append(gAutoGenHPrologueString.Replace({'File':'IMAGEDEFS', 'Guid':Info.Guid.replace('-', '_')}))
+ CreateIdfFileCode(Info, AutoGenC, StringIdf, IdfGenCFlag, IdfGenBinBuffer)
+
+ StringIdf.Append("\n#endif\n")
+ AutoGenH.Append('#include "%s"\n' % FileName)
+
+ CreateFooterCode(Info, AutoGenC, AutoGenH)
+
+## Create the code file
+#
+# @param FilePath The path of code file
+# @param Content The content of code file
+# @param IsBinaryFile The flag indicating if the file is binary file or not
+#
+# @retval True If file content is changed or file doesn't exist
+# @retval False If the file exists and the content is not changed
+#
+def Generate(FilePath, Content, IsBinaryFile):
+ return SaveFileOnChange(FilePath, Content, IsBinaryFile)
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenDepex.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenDepex.py
new file mode 100755
index 00000000..901250bb
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenDepex.py
@@ -0,0 +1,464 @@
+## @file
+# This file is used to generate DEPEX file for module's dependency expression
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+## Import Modules
+#
+import sys
+import Common.LongFilePathOs as os
+import re
+import traceback
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from io import BytesIO
+from struct import pack
+from Common.BuildToolError import *
+from Common.Misc import SaveFileOnChange
+from Common.Misc import GuidStructureStringToGuidString
+from Common.Misc import GuidStructureByteArrayToGuidString
+from Common.Misc import GuidStringToGuidStructureString
+from Common import EdkLogger as EdkLogger
+from Common.BuildVersion import gBUILD_VERSION
+from Common.DataType import *
+
+## Regular expression for matching "DEPENDENCY_START ... DEPENDENCY_END"
+gStartClosePattern = re.compile(".*DEPENDENCY_START(.+)DEPENDENCY_END.*", re.S)
+
+## Mapping between module type and EFI phase
+gType2Phase = {
+ SUP_MODULE_BASE : None,
+ SUP_MODULE_SEC : "PEI",
+ SUP_MODULE_PEI_CORE : "PEI",
+ SUP_MODULE_PEIM : "PEI",
+ SUP_MODULE_DXE_CORE : "DXE",
+ SUP_MODULE_DXE_DRIVER : "DXE",
+ SUP_MODULE_DXE_SMM_DRIVER : "DXE",
+ SUP_MODULE_DXE_RUNTIME_DRIVER: "DXE",
+ SUP_MODULE_DXE_SAL_DRIVER : "DXE",
+ SUP_MODULE_UEFI_DRIVER : "DXE",
+ SUP_MODULE_UEFI_APPLICATION : "DXE",
+ SUP_MODULE_SMM_CORE : "DXE",
+ SUP_MODULE_MM_STANDALONE : "MM",
+ SUP_MODULE_MM_CORE_STANDALONE : "MM",
+}
+
+## Convert dependency expression string into EFI internal representation
+#
+# DependencyExpression class is used to parse dependency expression string and
+# convert it into its binary form.
+#
+class DependencyExpression:
+
+ ArchProtocols = {
+ '665e3ff6-46cc-11d4-9a38-0090273fc14d', # 'gEfiBdsArchProtocolGuid'
+ '26baccb1-6f42-11d4-bce7-0080c73c8881', # 'gEfiCpuArchProtocolGuid'
+ '26baccb2-6f42-11d4-bce7-0080c73c8881', # 'gEfiMetronomeArchProtocolGuid'
+ '1da97072-bddc-4b30-99f1-72a0b56fff2a', # 'gEfiMonotonicCounterArchProtocolGuid'
+ '27cfac87-46cc-11d4-9a38-0090273fc14d', # 'gEfiRealTimeClockArchProtocolGuid'
+ '27cfac88-46cc-11d4-9a38-0090273fc14d', # 'gEfiResetArchProtocolGuid'
+ 'b7dfb4e1-052f-449f-87be-9818fc91b733', # 'gEfiRuntimeArchProtocolGuid'
+ 'a46423e3-4617-49f1-b9ff-d1bfa9115839', # 'gEfiSecurityArchProtocolGuid'
+ '26baccb3-6f42-11d4-bce7-0080c73c8881', # 'gEfiTimerArchProtocolGuid'
+ '6441f818-6362-4e44-b570-7dba31dd2453', # 'gEfiVariableWriteArchProtocolGuid'
+ '1e5668e2-8481-11d4-bcf1-0080c73c8881', # 'gEfiVariableArchProtocolGuid'
+ '665e3ff5-46cc-11d4-9a38-0090273fc14d' # 'gEfiWatchdogTimerArchProtocolGuid'
+ }
+
+ OpcodePriority = {
+ DEPEX_OPCODE_AND : 1,
+ DEPEX_OPCODE_OR : 1,
+ DEPEX_OPCODE_NOT : 2,
+ }
+
+ Opcode = {
+ "PEI" : {
+ DEPEX_OPCODE_PUSH : 0x02,
+ DEPEX_OPCODE_AND : 0x03,
+ DEPEX_OPCODE_OR : 0x04,
+ DEPEX_OPCODE_NOT : 0x05,
+ DEPEX_OPCODE_TRUE : 0x06,
+ DEPEX_OPCODE_FALSE : 0x07,
+ DEPEX_OPCODE_END : 0x08
+ },
+
+ "DXE" : {
+ DEPEX_OPCODE_BEFORE: 0x00,
+ DEPEX_OPCODE_AFTER : 0x01,
+ DEPEX_OPCODE_PUSH : 0x02,
+ DEPEX_OPCODE_AND : 0x03,
+ DEPEX_OPCODE_OR : 0x04,
+ DEPEX_OPCODE_NOT : 0x05,
+ DEPEX_OPCODE_TRUE : 0x06,
+ DEPEX_OPCODE_FALSE : 0x07,
+ DEPEX_OPCODE_END : 0x08,
+ DEPEX_OPCODE_SOR : 0x09
+ },
+
+ "MM" : {
+ DEPEX_OPCODE_BEFORE: 0x00,
+ DEPEX_OPCODE_AFTER : 0x01,
+ DEPEX_OPCODE_PUSH : 0x02,
+ DEPEX_OPCODE_AND : 0x03,
+ DEPEX_OPCODE_OR : 0x04,
+ DEPEX_OPCODE_NOT : 0x05,
+ DEPEX_OPCODE_TRUE : 0x06,
+ DEPEX_OPCODE_FALSE : 0x07,
+ DEPEX_OPCODE_END : 0x08,
+ DEPEX_OPCODE_SOR : 0x09
+ }
+ }
+
+ # all supported op codes and operands
+ SupportedOpcode = [DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER, DEPEX_OPCODE_PUSH, DEPEX_OPCODE_AND, DEPEX_OPCODE_OR, DEPEX_OPCODE_NOT, DEPEX_OPCODE_END, DEPEX_OPCODE_SOR]
+ SupportedOperand = [DEPEX_OPCODE_TRUE, DEPEX_OPCODE_FALSE]
+
+ OpcodeWithSingleOperand = [DEPEX_OPCODE_NOT, DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER]
+ OpcodeWithTwoOperand = [DEPEX_OPCODE_AND, DEPEX_OPCODE_OR]
+
+ # op code that should not be the last one
+ NonEndingOpcode = [DEPEX_OPCODE_AND, DEPEX_OPCODE_OR, DEPEX_OPCODE_NOT, DEPEX_OPCODE_SOR]
+ # op code must not present at the same time
+ ExclusiveOpcode = [DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER]
+ # op code that should be the first one if it presents
+ AboveAllOpcode = [DEPEX_OPCODE_SOR, DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER]
+
+ #
+ # open and close brace must be taken as individual tokens
+ #
+ TokenPattern = re.compile("(\(|\)|\{[^{}]+\{?[^{}]+\}?[ ]*\}|\w+)")
+
+ ## Constructor
+ #
+ # @param Expression The list or string of dependency expression
+ # @param ModuleType The type of the module using the dependency expression
+ #
+ def __init__(self, Expression, ModuleType, Optimize=False):
+ self.ModuleType = ModuleType
+ self.Phase = gType2Phase[ModuleType]
+ if isinstance(Expression, type([])):
+ self.ExpressionString = " ".join(Expression)
+ self.TokenList = Expression
+ else:
+ self.ExpressionString = Expression
+ self.GetExpressionTokenList()
+
+ self.PostfixNotation = []
+ self.OpcodeList = []
+
+ self.GetPostfixNotation()
+ self.ValidateOpcode()
+
+ EdkLogger.debug(EdkLogger.DEBUG_8, repr(self))
+ if Optimize:
+ self.Optimize()
+ EdkLogger.debug(EdkLogger.DEBUG_8, "\n Optimized: " + repr(self))
+
+ def __str__(self):
+ return " ".join(self.TokenList)
+
+ def __repr__(self):
+ WellForm = ''
+ for Token in self.PostfixNotation:
+ if Token in self.SupportedOpcode:
+ WellForm += "\n " + Token
+ else:
+ WellForm += ' ' + Token
+ return WellForm
+
+ ## Split the expression string into token list
+ def GetExpressionTokenList(self):
+ self.TokenList = self.TokenPattern.findall(self.ExpressionString)
+
+ ## Convert token list into postfix notation
+ def GetPostfixNotation(self):
+ Stack = []
+ LastToken = ''
+ for Token in self.TokenList:
+ if Token == "(":
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before open parentheses",
+ ExtraData="Near %s" % LastToken)
+ Stack.append(Token)
+ elif Token == ")":
+ if '(' not in Stack:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
+ ExtraData=str(self))
+ elif LastToken in self.SupportedOpcode + ['', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before close parentheses",
+ ExtraData="Near %s" % LastToken)
+ while len(Stack) > 0:
+ if Stack[-1] == '(':
+ Stack.pop()
+ break
+ self.PostfixNotation.append(Stack.pop())
+ elif Token in self.OpcodePriority:
+ if Token == DEPEX_OPCODE_NOT:
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before NOT",
+ ExtraData="Near %s" % LastToken)
+ elif LastToken in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before " + Token,
+ ExtraData="Near %s" % LastToken)
+
+ while len(Stack) > 0:
+ if Stack[-1] == "(" or self.OpcodePriority[Token] >= self.OpcodePriority[Stack[-1]]:
+ break
+ self.PostfixNotation.append(Stack.pop())
+ Stack.append(Token)
+ self.OpcodeList.append(Token)
+ else:
+ if Token not in self.SupportedOpcode:
+ # not OP, take it as GUID
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before %s" % Token,
+ ExtraData="Near %s" % LastToken)
+ if len(self.OpcodeList) == 0 or self.OpcodeList[-1] not in self.ExclusiveOpcode:
+ if Token not in self.SupportedOperand:
+ self.PostfixNotation.append(DEPEX_OPCODE_PUSH)
+ # check if OP is valid in this phase
+ elif Token in self.Opcode[self.Phase]:
+ if Token == DEPEX_OPCODE_END:
+ break
+ self.OpcodeList.append(Token)
+ else:
+ EdkLogger.error("GenDepex", PARSER_ERROR,
+ "Opcode=%s doesn't supported in %s stage " % (Token, self.Phase),
+ ExtraData=str(self))
+ self.PostfixNotation.append(Token)
+ LastToken = Token
+
+ # there should not be parentheses in Stack
+ if '(' in Stack or ')' in Stack:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
+ ExtraData=str(self))
+ while len(Stack) > 0:
+ self.PostfixNotation.append(Stack.pop())
+ if self.PostfixNotation[-1] != DEPEX_OPCODE_END:
+ self.PostfixNotation.append(DEPEX_OPCODE_END)
+
+ ## Validate the dependency expression
+ def ValidateOpcode(self):
+ for Op in self.AboveAllOpcode:
+ if Op in self.PostfixNotation:
+ if Op != self.PostfixNotation[0]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the first opcode in the expression" % Op,
+ ExtraData=str(self))
+ if len(self.PostfixNotation) < 3:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
+ ExtraData=str(self))
+ for Op in self.ExclusiveOpcode:
+ if Op in self.OpcodeList:
+ if len(self.OpcodeList) > 1:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the only opcode in the expression" % Op,
+ ExtraData=str(self))
+ if len(self.PostfixNotation) < 3:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
+ ExtraData=str(self))
+ if self.TokenList[-1] != DEPEX_OPCODE_END and self.TokenList[-1] in self.NonEndingOpcode:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-1],
+ ExtraData=str(self))
+ if self.TokenList[-1] == DEPEX_OPCODE_END and self.TokenList[-2] in self.NonEndingOpcode:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-2],
+ ExtraData=str(self))
+ if DEPEX_OPCODE_END in self.TokenList and DEPEX_OPCODE_END != self.TokenList[-1]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra expressions after END",
+ ExtraData=str(self))
+
+ ## Simply optimize the dependency expression by removing duplicated operands
+ def Optimize(self):
+ OpcodeSet = set(self.OpcodeList)
+ # if there are isn't one in the set, return
+ if len(OpcodeSet) != 1:
+ return
+ Op = OpcodeSet.pop()
+ #if Op isn't either OR or AND, return
+ if Op not in [DEPEX_OPCODE_AND, DEPEX_OPCODE_OR]:
+ return
+ NewOperand = []
+ AllOperand = set()
+ for Token in self.PostfixNotation:
+ if Token in self.SupportedOpcode or Token in NewOperand:
+ continue
+ AllOperand.add(Token)
+ if Token == DEPEX_OPCODE_TRUE:
+ if Op == DEPEX_OPCODE_AND:
+ continue
+ else:
+ NewOperand.append(Token)
+ break
+ elif Token == DEPEX_OPCODE_FALSE:
+ if Op == DEPEX_OPCODE_OR:
+ continue
+ else:
+ NewOperand.append(Token)
+ break
+ NewOperand.append(Token)
+
+ # don't generate depex if only TRUE operand left
+ if self.ModuleType == SUP_MODULE_PEIM and len(NewOperand) == 1 and NewOperand[0] == DEPEX_OPCODE_TRUE:
+ self.PostfixNotation = []
+ return
+
+ # don't generate depex if all operands are architecture protocols
+ if self.ModuleType in [SUP_MODULE_UEFI_DRIVER, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_MM_STANDALONE] and \
+ Op == DEPEX_OPCODE_AND and \
+ self.ArchProtocols == set(GuidStructureStringToGuidString(Guid) for Guid in AllOperand):
+ self.PostfixNotation = []
+ return
+
+ if len(NewOperand) == 0:
+ self.TokenList = list(AllOperand)
+ else:
+ self.TokenList = []
+ while True:
+ self.TokenList.append(NewOperand.pop(0))
+ if NewOperand == []:
+ break
+ self.TokenList.append(Op)
+ self.PostfixNotation = []
+ self.GetPostfixNotation()
+
+
+ ## Convert a GUID value in C structure format into its binary form
+ #
+ # @param Guid The GUID value in C structure format
+ #
+ # @retval array The byte array representing the GUID value
+ #
+ def GetGuidValue(self, Guid):
+ GuidValueString = Guid.replace("{", "").replace("}", "").replace(" ", "")
+ GuidValueList = GuidValueString.split(",")
+ if len(GuidValueList) != 11 and len(GuidValueList) == 16:
+ GuidValueString = GuidStringToGuidStructureString(GuidStructureByteArrayToGuidString(Guid))
+ GuidValueString = GuidValueString.replace("{", "").replace("}", "").replace(" ", "")
+ GuidValueList = GuidValueString.split(",")
+ if len(GuidValueList) != 11:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid GUID value string or opcode: %s" % Guid)
+ return pack("1I2H8B", *(int(value, 16) for value in GuidValueList))
+
+ ## Save the binary form of dependency expression in file
+ #
+ # @param File The path of file. If None is given, put the data on console
+ #
+ # @retval True If the file doesn't exist or file is changed
+ # @retval False If file exists and is not changed.
+ #
+ def Generate(self, File=None):
+ Buffer = BytesIO()
+ if len(self.PostfixNotation) == 0:
+ return False
+
+ for Item in self.PostfixNotation:
+ if Item in self.Opcode[self.Phase]:
+ Buffer.write(pack("B", self.Opcode[self.Phase][Item]))
+ elif Item in self.SupportedOpcode:
+ EdkLogger.error("GenDepex", FORMAT_INVALID,
+ "Opcode [%s] is not expected in %s phase" % (Item, self.Phase),
+ ExtraData=self.ExpressionString)
+ else:
+ Buffer.write(self.GetGuidValue(Item))
+
+ FilePath = ""
+ FileChangeFlag = True
+ if File is None:
+ sys.stdout.write(Buffer.getvalue())
+ FilePath = "STDOUT"
+ else:
+ FileChangeFlag = SaveFileOnChange(File, Buffer.getvalue(), True)
+
+ Buffer.close()
+ return FileChangeFlag
+
+versionNumber = ("0.04" + " " + gBUILD_VERSION)
+__version__ = "%prog Version " + versionNumber
+__copyright__ = "Copyright (c) 2007-2018, Intel Corporation All rights reserved."
+__usage__ = "%prog [options] [dependency_expression_file]"
+
+## Parse command line options
+#
+# @retval OptionParser
+#
+def GetOptions():
+ from optparse import OptionParser
+
+ Parser = OptionParser(description=__copyright__, version=__version__, usage=__usage__)
+
+ Parser.add_option("-o", "--output", dest="OutputFile", default=None, metavar="FILE",
+ help="Specify the name of depex file to be generated")
+ Parser.add_option("-t", "--module-type", dest="ModuleType", default=None,
+ help="The type of module for which the dependency expression serves")
+ Parser.add_option("-e", "--dependency-expression", dest="Expression", default="",
+ help="The string of dependency expression. If this option presents, the input file will be ignored.")
+ Parser.add_option("-m", "--optimize", dest="Optimize", default=False, action="store_true",
+ help="Do some simple optimization on the expression.")
+ Parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true",
+ help="build with verbose information")
+ Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
+ Parser.add_option("-q", "--quiet", dest="quiet", default=False, action="store_true",
+ help="build with little information")
+
+ return Parser.parse_args()
+
+
+## Entrance method
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def Main():
+ EdkLogger.Initialize()
+ Option, Input = GetOptions()
+
+ # Set log level
+ if Option.quiet:
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ elif Option.verbose:
+ EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ elif Option.debug is not None:
+ EdkLogger.SetLevel(Option.debug + 1)
+ else:
+ EdkLogger.SetLevel(EdkLogger.INFO)
+
+ try:
+ if Option.ModuleType is None or Option.ModuleType not in gType2Phase:
+ EdkLogger.error("GenDepex", OPTION_MISSING, "Module type is not specified or supported")
+
+ DxsFile = ''
+ if len(Input) > 0 and Option.Expression == "":
+ DxsFile = Input[0]
+ DxsString = open(DxsFile, 'r').read().replace("\n", " ").replace("\r", " ")
+ DxsString = gStartClosePattern.sub("\\1", DxsString)
+ elif Option.Expression != "":
+ if Option.Expression[0] == '"':
+ DxsString = Option.Expression[1:-1]
+ else:
+ DxsString = Option.Expression
+ else:
+ EdkLogger.error("GenDepex", OPTION_MISSING, "No expression string or file given")
+
+ Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)
+ if Option.OutputFile is not None:
+ FileChangeFlag = Dpx.Generate(Option.OutputFile)
+ if not FileChangeFlag and DxsFile:
+ #
+ # Touch the output file if its time stamp is older than the original
+ # DXS file to avoid re-invoke this tool for the dependency check in build rule.
+ #
+ if os.stat(DxsFile)[8] > os.stat(Option.OutputFile)[8]:
+ os.utime(Option.OutputFile, None)
+ else:
+ Dpx.Generate()
+ except BaseException as X:
+ EdkLogger.quiet("")
+ if Option is not None and Option.debug is not None:
+ EdkLogger.quiet(traceback.format_exc())
+ else:
+ EdkLogger.quiet(str(X))
+ return 1
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(Main())
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenMake.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenMake.py
new file mode 100755
index 00000000..130557de
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenMake.py
@@ -0,0 +1,1810 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+from __future__ import absolute_import
+import Common.LongFilePathOs as os
+import sys
+import string
+import re
+import os.path as path
+from Common.LongFilePathSupport import OpenLongFilePath as open
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from Common.BuildToolError import *
+from Common.Misc import *
+from Common.StringUtils import *
+from .BuildEngine import *
+import Common.GlobalData as GlobalData
+from collections import OrderedDict
+from Common.DataType import TAB_COMPILER_MSFT
+
+## Regular expression for finding header file inclusions
+gIncludePattern = re.compile(r"^[ \t]*[#%]?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE | re.UNICODE | re.IGNORECASE)
+
+## Regular expression for matching macro used in header file inclusion
+gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE)
+
+gIsFileMap = {}
+
+## pattern for include style in Edk.x code
+gProtocolDefinition = "Protocol/%(HeaderKey)s/%(HeaderKey)s.h"
+gGuidDefinition = "Guid/%(HeaderKey)s/%(HeaderKey)s.h"
+gArchProtocolDefinition = "ArchProtocol/%(HeaderKey)s/%(HeaderKey)s.h"
+gPpiDefinition = "Ppi/%(HeaderKey)s/%(HeaderKey)s.h"
+gIncludeMacroConversion = {
+ "EFI_PROTOCOL_DEFINITION" : gProtocolDefinition,
+ "EFI_GUID_DEFINITION" : gGuidDefinition,
+ "EFI_ARCH_PROTOCOL_DEFINITION" : gArchProtocolDefinition,
+ "EFI_PROTOCOL_PRODUCER" : gProtocolDefinition,
+ "EFI_PROTOCOL_CONSUMER" : gProtocolDefinition,
+ "EFI_PROTOCOL_DEPENDENCY" : gProtocolDefinition,
+ "EFI_ARCH_PROTOCOL_PRODUCER" : gArchProtocolDefinition,
+ "EFI_ARCH_PROTOCOL_CONSUMER" : gArchProtocolDefinition,
+ "EFI_ARCH_PROTOCOL_DEPENDENCY" : gArchProtocolDefinition,
+ "EFI_PPI_DEFINITION" : gPpiDefinition,
+ "EFI_PPI_PRODUCER" : gPpiDefinition,
+ "EFI_PPI_CONSUMER" : gPpiDefinition,
+ "EFI_PPI_DEPENDENCY" : gPpiDefinition,
+}
+
+NMAKE_FILETYPE = "nmake"
+GMAKE_FILETYPE = "gmake"
+WIN32_PLATFORM = "win32"
+POSIX_PLATFORM = "posix"
+
+## BuildFile class
+#
+# This base class encapsules build file and its generation. It uses template to generate
+# the content of build file. The content of build file will be got from AutoGen objects.
+#
+class BuildFile(object):
+ ## template used to generate the build file (i.e. makefile if using make)
+ _TEMPLATE_ = TemplateString('')
+
+ _DEFAULT_FILE_NAME_ = "Makefile"
+
+ ## default file name for each type of build file
+ _FILE_NAME_ = {
+ NMAKE_FILETYPE : "Makefile",
+ GMAKE_FILETYPE : "GNUmakefile"
+ }
+
+ # Get Makefile name.
+ def getMakefileName(self):
+ if not self._FileType:
+ return self._DEFAULT_FILE_NAME_
+ else:
+ return self._FILE_NAME_[self._FileType]
+
+ ## Fixed header string for makefile
+ _MAKEFILE_HEADER = '''#
+# DO NOT EDIT
+# This file is auto-generated by build utility
+#
+# Module Name:
+#
+# %s
+#
+# Abstract:
+#
+# Auto-generated makefile for building modules, libraries or platform
+#
+ '''
+
+ ## Header string for each type of build file
+ _FILE_HEADER_ = {
+ NMAKE_FILETYPE : _MAKEFILE_HEADER % _FILE_NAME_[NMAKE_FILETYPE],
+ GMAKE_FILETYPE : _MAKEFILE_HEADER % _FILE_NAME_[GMAKE_FILETYPE]
+ }
+
+ ## shell commands which can be used in build file in the form of macro
+ # $(CP) copy file command
+ # $(MV) move file command
+ # $(RM) remove file command
+ # $(MD) create dir command
+ # $(RD) remove dir command
+ #
+ _SHELL_CMD_ = {
+ WIN32_PLATFORM : {
+ "CP" : "copy /y",
+ "MV" : "move /y",
+ "RM" : "del /f /q",
+ "MD" : "mkdir",
+ "RD" : "rmdir /s /q",
+ },
+
+ POSIX_PLATFORM : {
+ "CP" : "cp -f",
+ "MV" : "mv -f",
+ "RM" : "rm -f",
+ "MD" : "mkdir -p",
+ "RD" : "rm -r -f",
+ }
+ }
+
+ ## directory separator
+ _SEP_ = {
+ WIN32_PLATFORM : "\\",
+ POSIX_PLATFORM : "/"
+ }
+
+ ## directory creation template
+ _MD_TEMPLATE_ = {
+ WIN32_PLATFORM : 'if not exist %(dir)s $(MD) %(dir)s',
+ POSIX_PLATFORM : "$(MD) %(dir)s"
+ }
+
+ ## directory removal template
+ _RD_TEMPLATE_ = {
+ WIN32_PLATFORM : 'if exist %(dir)s $(RD) %(dir)s',
+ POSIX_PLATFORM : "$(RD) %(dir)s"
+ }
+ ## cp if exist
+ _CP_TEMPLATE_ = {
+ WIN32_PLATFORM : 'if exist %(Src)s $(CP) %(Src)s %(Dst)s',
+ POSIX_PLATFORM : "test -f %(Src)s && $(CP) %(Src)s %(Dst)s"
+ }
+
+ _CD_TEMPLATE_ = {
+ WIN32_PLATFORM : 'if exist %(dir)s cd %(dir)s',
+ POSIX_PLATFORM : "test -e %(dir)s && cd %(dir)s"
+ }
+
+ _MAKE_TEMPLATE_ = {
+ WIN32_PLATFORM : 'if exist %(file)s "$(MAKE)" $(MAKE_FLAGS) -f %(file)s',
+ POSIX_PLATFORM : 'test -e %(file)s && "$(MAKE)" $(MAKE_FLAGS) -f %(file)s'
+ }
+
+ _INCLUDE_CMD_ = {
+ NMAKE_FILETYPE : '!INCLUDE',
+ GMAKE_FILETYPE : "include"
+ }
+
+ _INC_FLAG_ = {TAB_COMPILER_MSFT : "/I", "GCC" : "-I", "INTEL" : "-I", "RVCT" : "-I", "NASM" : "-I"}
+
+ ## Constructor of BuildFile
+ #
+ # @param AutoGenObject Object of AutoGen class
+ #
+ def __init__(self, AutoGenObject):
+ self._AutoGenObject = AutoGenObject
+
+ MakePath = AutoGenObject.BuildOption.get('MAKE', {}).get('PATH')
+ if not MakePath:
+ self._FileType = ""
+ elif "nmake" in MakePath:
+ self._FileType = NMAKE_FILETYPE
+ else:
+ self._FileType = "gmake"
+
+ if sys.platform == "win32":
+ self._Platform = WIN32_PLATFORM
+ else:
+ self._Platform = POSIX_PLATFORM
+
+ ## Create build file.
+ #
+ # Only nmake and gmake are supported.
+ #
+ # @retval TRUE The build file is created or re-created successfully.
+ # @retval FALSE The build file exists and is the same as the one to be generated.
+ #
+ def Generate(self):
+ FileContent = self._TEMPLATE_.Replace(self._TemplateDict)
+ FileName = self.getMakefileName()
+ if not os.path.exists(os.path.join(self._AutoGenObject.MakeFileDir, "deps.txt")):
+ with open(os.path.join(self._AutoGenObject.MakeFileDir, "deps.txt"),"w+") as fd:
+ fd.write("")
+ if not os.path.exists(os.path.join(self._AutoGenObject.MakeFileDir, "dependency")):
+ with open(os.path.join(self._AutoGenObject.MakeFileDir, "dependency"),"w+") as fd:
+ fd.write("")
+ if not os.path.exists(os.path.join(self._AutoGenObject.MakeFileDir, "deps_target")):
+ with open(os.path.join(self._AutoGenObject.MakeFileDir, "deps_target"),"w+") as fd:
+ fd.write("")
+ return SaveFileOnChange(os.path.join(self._AutoGenObject.MakeFileDir, FileName), FileContent, False)
+
+ ## Return a list of directory creation command string
+ #
+ # @param DirList The list of directory to be created
+ #
+ # @retval list The directory creation command list
+ #
+ def GetCreateDirectoryCommand(self, DirList):
+ return [self._MD_TEMPLATE_[self._Platform] % {'dir':Dir} for Dir in DirList]
+
+ ## Return a list of directory removal command string
+ #
+ # @param DirList The list of directory to be removed
+ #
+ # @retval list The directory removal command list
+ #
+ def GetRemoveDirectoryCommand(self, DirList):
+ return [self._RD_TEMPLATE_[self._Platform] % {'dir':Dir} for Dir in DirList]
+
+ def PlaceMacro(self, Path, MacroDefinitions=None):
+ if Path.startswith("$("):
+ return Path
+ else:
+ if MacroDefinitions is None:
+ MacroDefinitions = {}
+ PathLength = len(Path)
+ for MacroName in MacroDefinitions:
+ MacroValue = MacroDefinitions[MacroName]
+ MacroValueLength = len(MacroValue)
+ if MacroValueLength == 0:
+ continue
+ if MacroValueLength <= PathLength and Path.startswith(MacroValue):
+ Path = "$(%s)%s" % (MacroName, Path[MacroValueLength:])
+ break
+ return Path
+
+## ModuleMakefile class
+#
+# This class encapsules makefie and its generation for module. It uses template to generate
+# the content of makefile. The content of makefile will be got from ModuleAutoGen object.
+#
+class ModuleMakefile(BuildFile):
+ ## template used to generate the makefile for module
+ _TEMPLATE_ = TemplateString('''\
+${makefile_header}
+
+#
+# Platform Macro Definition
+#
+PLATFORM_NAME = ${platform_name}
+PLATFORM_GUID = ${platform_guid}
+PLATFORM_VERSION = ${platform_version}
+PLATFORM_RELATIVE_DIR = ${platform_relative_directory}
+PLATFORM_DIR = ${platform_dir}
+PLATFORM_OUTPUT_DIR = ${platform_output_directory}
+
+#
+# Module Macro Definition
+#
+MODULE_NAME = ${module_name}
+MODULE_GUID = ${module_guid}
+MODULE_NAME_GUID = ${module_name_guid}
+MODULE_VERSION = ${module_version}
+MODULE_TYPE = ${module_type}
+MODULE_FILE = ${module_file}
+MODULE_FILE_BASE_NAME = ${module_file_base_name}
+BASE_NAME = $(MODULE_NAME)
+MODULE_RELATIVE_DIR = ${module_relative_directory}
+PACKAGE_RELATIVE_DIR = ${package_relative_directory}
+MODULE_DIR = ${module_dir}
+FFS_OUTPUT_DIR = ${ffs_output_directory}
+
+MODULE_ENTRY_POINT = ${module_entry_point}
+ARCH_ENTRY_POINT = ${arch_entry_point}
+IMAGE_ENTRY_POINT = ${image_entry_point}
+
+${BEGIN}${module_extra_defines}
+${END}
+#
+# Build Configuration Macro Definition
+#
+ARCH = ${architecture}
+TOOLCHAIN = ${toolchain_tag}
+TOOLCHAIN_TAG = ${toolchain_tag}
+TARGET = ${build_target}
+
+#
+# Build Directory Macro Definition
+#
+# PLATFORM_BUILD_DIR = ${platform_build_directory}
+BUILD_DIR = ${platform_build_directory}
+BIN_DIR = $(BUILD_DIR)${separator}${architecture}
+LIB_DIR = $(BIN_DIR)
+MODULE_BUILD_DIR = ${module_build_directory}
+OUTPUT_DIR = ${module_output_directory}
+DEBUG_DIR = ${module_debug_directory}
+DEST_DIR_OUTPUT = $(OUTPUT_DIR)
+DEST_DIR_DEBUG = $(DEBUG_DIR)
+
+#
+# Shell Command Macro
+#
+${BEGIN}${shell_command_code} = ${shell_command}
+${END}
+
+#
+# Tools definitions specific to this module
+#
+${BEGIN}${module_tool_definitions}
+${END}
+MAKE_FILE = ${makefile_path}
+
+#
+# Build Macro
+#
+${BEGIN}${file_macro}
+${END}
+
+#
+# Overridable Target Macro Definitions
+#
+FORCE_REBUILD = force_build
+INIT_TARGET = init
+PCH_TARGET =
+BC_TARGET = ${BEGIN}${backward_compatible_target} ${END}
+CODA_TARGET = ${BEGIN}${remaining_build_target} \\
+ ${END}
+
+#
+# Default target, which will build dependent libraries in addition to source files
+#
+
+all: mbuild
+
+
+#
+# Target used when called from platform makefile, which will bypass the build of dependent libraries
+#
+
+pbuild: $(INIT_TARGET) $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET)
+
+#
+# ModuleTarget
+#
+
+mbuild: $(INIT_TARGET) $(BC_TARGET) gen_libs $(PCH_TARGET) $(CODA_TARGET)
+
+#
+# Build Target used in multi-thread build mode, which will bypass the init and gen_libs targets
+#
+
+tbuild: $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET)
+
+#
+# Phony target which is used to force executing commands for a target
+#
+force_build:
+\t-@
+
+#
+# Target to update the FD
+#
+
+fds: mbuild gen_fds
+
+#
+# Initialization target: print build information and create necessary directories
+#
+init: info dirs
+
+info:
+\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)]
+
+dirs:
+${BEGIN}\t-@${create_directory_command}\n${END}
+
+strdefs:
+\t-@$(CP) $(DEBUG_DIR)${separator}AutoGen.h $(DEBUG_DIR)${separator}$(MODULE_NAME)StrDefs.h
+
+#
+# GenLibsTarget
+#
+gen_libs:
+\t${BEGIN}@"$(MAKE)" $(MAKE_FLAGS) -f ${dependent_library_build_directory}${separator}${makefile_name}
+\t${END}@cd $(MODULE_BUILD_DIR)
+
+#
+# Build Flash Device Image
+#
+gen_fds:
+\t@"$(MAKE)" $(MAKE_FLAGS) -f $(BUILD_DIR)${separator}${makefile_name} fds
+\t@cd $(MODULE_BUILD_DIR)
+
+${INCLUDETAG}
+
+#
+# Individual Object Build Targets
+#
+${BEGIN}${file_build_target}
+${END}
+
+#
+# clean all intermediate files
+#
+clean:
+\t${BEGIN}${clean_command}
+\t${END}\t$(RM) AutoGenTimeStamp
+
+#
+# clean all generated files
+#
+cleanall:
+${BEGIN}\t${cleanall_command}
+${END}\t$(RM) *.pdb *.idb > NUL 2>&1
+\t$(RM) $(BIN_DIR)${separator}$(MODULE_NAME).efi
+\t$(RM) AutoGenTimeStamp
+
+#
+# clean all dependent libraries built
+#
+cleanlib:
+\t${BEGIN}-@${library_build_command} cleanall
+\t${END}@cd $(MODULE_BUILD_DIR)\n\n''')
+
+ _FILE_MACRO_TEMPLATE = TemplateString("${macro_name} = ${BEGIN} \\\n ${source_file}${END}\n")
+ _BUILD_TARGET_TEMPLATE = TemplateString("${BEGIN}${target} : ${deps}\n${END}\t${cmd}\n")
+
+ ## Constructor of ModuleMakefile
+ #
+ # @param ModuleAutoGen Object of ModuleAutoGen class
+ #
+ def __init__(self, ModuleAutoGen):
+ BuildFile.__init__(self, ModuleAutoGen)
+ self.PlatformInfo = self._AutoGenObject.PlatformInfo
+
+ self.ResultFileList = []
+ self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]
+
+ self.FileBuildTargetList = [] # [(src, target string)]
+ self.BuildTargetList = [] # [target string]
+ self.PendingBuildTargetList = [] # [FileBuildRule objects]
+ self.CommonFileDependency = []
+ self.FileListMacros = {}
+ self.ListFileMacros = {}
+ self.ObjTargetDict = OrderedDict()
+ self.FileCache = {}
+ self.LibraryBuildCommandList = []
+ self.LibraryFileList = []
+ self.LibraryMakefileList = []
+ self.LibraryBuildDirectoryList = []
+ self.SystemLibraryList = []
+ self.Macros = OrderedDict()
+ self.Macros["OUTPUT_DIR" ] = self._AutoGenObject.Macros["OUTPUT_DIR"]
+ self.Macros["DEBUG_DIR" ] = self._AutoGenObject.Macros["DEBUG_DIR"]
+ self.Macros["MODULE_BUILD_DIR"] = self._AutoGenObject.Macros["MODULE_BUILD_DIR"]
+ self.Macros["BIN_DIR" ] = self._AutoGenObject.Macros["BIN_DIR"]
+ self.Macros["BUILD_DIR" ] = self._AutoGenObject.Macros["BUILD_DIR"]
+ self.Macros["WORKSPACE" ] = self._AutoGenObject.Macros["WORKSPACE"]
+ self.Macros["FFS_OUTPUT_DIR" ] = self._AutoGenObject.Macros["FFS_OUTPUT_DIR"]
+ self.GenFfsList = ModuleAutoGen.GenFfsList
+ self.MacroList = ['FFS_OUTPUT_DIR', 'MODULE_GUID', 'OUTPUT_DIR']
+ self.FfsOutputFileList = []
+ self.DependencyHeaderFileSet = set()
+
+ # Compose a dict object containing information used to do replacement in template
+ @property
+ def _TemplateDict(self):
+ MyAgo = self._AutoGenObject
+ Separator = self._SEP_[self._Platform]
+
+ # break build if no source files and binary files are found
+ if len(MyAgo.SourceFileList) == 0 and len(MyAgo.BinaryFileList) == 0:
+ EdkLogger.error("build", AUTOGEN_ERROR, "No files to be built in module [%s, %s, %s]"
+ % (MyAgo.BuildTarget, MyAgo.ToolChain, MyAgo.Arch),
+ ExtraData="[%s]" % str(MyAgo))
+
+ # convert dependent libraries to build command
+ self.ProcessDependentLibrary()
+ if len(MyAgo.Module.ModuleEntryPointList) > 0:
+ ModuleEntryPoint = MyAgo.Module.ModuleEntryPointList[0]
+ else:
+ ModuleEntryPoint = "_ModuleEntryPoint"
+
+ ArchEntryPoint = ModuleEntryPoint
+
+ if MyAgo.Arch == "EBC":
+ # EBC compiler always use "EfiStart" as entry point. Only applies to EdkII modules
+ ImageEntryPoint = "EfiStart"
+ else:
+ # EdkII modules always use "_ModuleEntryPoint" as entry point
+ ImageEntryPoint = "_ModuleEntryPoint"
+
+ for k, v in MyAgo.Module.Defines.items():
+ if k not in MyAgo.Macros:
+ MyAgo.Macros[k] = v
+
+ if 'MODULE_ENTRY_POINT' not in MyAgo.Macros:
+ MyAgo.Macros['MODULE_ENTRY_POINT'] = ModuleEntryPoint
+ if 'ARCH_ENTRY_POINT' not in MyAgo.Macros:
+ MyAgo.Macros['ARCH_ENTRY_POINT'] = ArchEntryPoint
+ if 'IMAGE_ENTRY_POINT' not in MyAgo.Macros:
+ MyAgo.Macros['IMAGE_ENTRY_POINT'] = ImageEntryPoint
+
+ PCI_COMPRESS_Flag = False
+ for k, v in MyAgo.Module.Defines.items():
+ if 'PCI_COMPRESS' == k and 'TRUE' == v:
+ PCI_COMPRESS_Flag = True
+
+ # tools definitions
+ ToolsDef = []
+ IncPrefix = self._INC_FLAG_[MyAgo.ToolChainFamily]
+ for Tool in sorted(list(MyAgo.BuildOption)):
+ Appended = False
+ for Attr in sorted(list(MyAgo.BuildOption[Tool])):
+ Value = MyAgo.BuildOption[Tool][Attr]
+ if Attr == "FAMILY":
+ continue
+ elif Attr == "PATH":
+ ToolsDef.append("%s = %s" % (Tool, Value))
+ Appended = True
+ else:
+ # Don't generate MAKE_FLAGS in makefile. It's put in environment variable.
+ if Tool == "MAKE":
+ continue
+ # Remove duplicated include path, if any
+ if Attr == "FLAGS":
+ Value = RemoveDupOption(Value, IncPrefix, MyAgo.IncludePathList)
+ if Tool == "OPTROM" and PCI_COMPRESS_Flag:
+ ValueList = Value.split()
+ if ValueList:
+ for i, v in enumerate(ValueList):
+ if '-e' == v:
+ ValueList[i] = '-ec'
+ Value = ' '.join(ValueList)
+
+ ToolsDef.append("%s_%s = %s" % (Tool, Attr, Value))
+ Appended = True
+ if Appended:
+ ToolsDef.append("")
+
+ # generate the Response file and Response flag
+ RespDict = self.CommandExceedLimit()
+ RespFileList = os.path.join(MyAgo.OutputDir, 'respfilelist.txt')
+ if RespDict:
+ RespFileListContent = ''
+ for Resp in RespDict:
+ RespFile = os.path.join(MyAgo.OutputDir, str(Resp).lower() + '.txt')
+ StrList = RespDict[Resp].split(' ')
+ UnexpandMacro = []
+ NewStr = []
+ for Str in StrList:
+ if '$' in Str or '-MMD' in Str or '-MF' in Str:
+ UnexpandMacro.append(Str)
+ else:
+ NewStr.append(Str)
+ UnexpandMacroStr = ' '.join(UnexpandMacro)
+ NewRespStr = ' '.join(NewStr)
+ SaveFileOnChange(RespFile, NewRespStr, False)
+ ToolsDef.append("%s = %s" % (Resp, UnexpandMacroStr + ' @' + RespFile))
+ RespFileListContent += '@' + RespFile + TAB_LINE_BREAK
+ RespFileListContent += NewRespStr + TAB_LINE_BREAK
+ SaveFileOnChange(RespFileList, RespFileListContent, False)
+ else:
+ if os.path.exists(RespFileList):
+ os.remove(RespFileList)
+
+ # convert source files and binary files to build targets
+ self.ResultFileList = [str(T.Target) for T in MyAgo.CodaTargetList]
+ if len(self.ResultFileList) == 0 and len(MyAgo.SourceFileList) != 0:
+ EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build",
+ ExtraData="[%s]" % str(MyAgo))
+
+ self.ProcessBuildTargetList(MyAgo.OutputDir, ToolsDef)
+ self.ParserGenerateFfsCmd()
+
+ # Generate macros used to represent input files
+ FileMacroList = [] # macro name = file list
+ for FileListMacro in self.FileListMacros:
+ FileMacro = self._FILE_MACRO_TEMPLATE.Replace(
+ {
+ "macro_name" : FileListMacro,
+ "source_file" : self.FileListMacros[FileListMacro]
+ }
+ )
+ FileMacroList.append(FileMacro)
+
+ # INC_LIST is special
+ FileMacro = ""
+ IncludePathList = []
+ for P in MyAgo.IncludePathList:
+ IncludePathList.append(IncPrefix + self.PlaceMacro(P, self.Macros))
+ if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros:
+ self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix + P)
+ FileMacro += self._FILE_MACRO_TEMPLATE.Replace(
+ {
+ "macro_name" : "INC",
+ "source_file" : IncludePathList
+ }
+ )
+ FileMacroList.append(FileMacro)
+ # Add support when compiling .nasm source files
+ IncludePathList = []
+ asmsource = [item for item in MyAgo.SourceFileList if item.File.upper().endswith((".NASM",".ASM",".NASMB","S"))]
+ if asmsource:
+ for P in MyAgo.IncludePathList:
+ IncludePath = self._INC_FLAG_['NASM'] + self.PlaceMacro(P, self.Macros)
+ if IncludePath.endswith(os.sep):
+ IncludePath = IncludePath.rstrip(os.sep)
+ # When compiling .nasm files, need to add a literal backslash at each path.
+ # In nmake makfiles, a trailing literal backslash must be escaped with a caret ('^').
+ # It is otherwise replaced with a space (' '). This is not necessary for GNU makfefiles.
+ if P == MyAgo.IncludePathList[-1] and self._Platform == WIN32_PLATFORM and self._FileType == NMAKE_FILETYPE:
+ IncludePath = ''.join([IncludePath, '^', os.sep])
+ else:
+ IncludePath = os.path.join(IncludePath, '')
+ IncludePathList.append(IncludePath)
+ FileMacroList.append(self._FILE_MACRO_TEMPLATE.Replace({"macro_name": "NASM_INC", "source_file": IncludePathList}))
+
+ # Generate macros used to represent files containing list of input files
+ for ListFileMacro in self.ListFileMacros:
+ ListFileName = os.path.join(MyAgo.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro) - 5])
+ FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName))
+ SaveFileOnChange(
+ ListFileName,
+ "\n".join(self.ListFileMacros[ListFileMacro]),
+ False
+ )
+
+ # Generate objlist used to create .obj file
+ for Type in self.ObjTargetDict:
+ NewLine = ' '.join(list(self.ObjTargetDict[Type]))
+ FileMacroList.append("OBJLIST_%s = %s" % (list(self.ObjTargetDict.keys()).index(Type), NewLine))
+
+ BcTargetList = []
+
+ MakefileName = self.getMakefileName()
+ LibraryMakeCommandList = []
+ for D in self.LibraryBuildDirectoryList:
+ Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":os.path.join(D, MakefileName)}
+ LibraryMakeCommandList.append(Command)
+
+ package_rel_dir = MyAgo.SourceDir
+ current_dir = self.Macros["WORKSPACE"]
+ found = False
+ while not found and os.sep in package_rel_dir:
+ index = package_rel_dir.index(os.sep)
+ current_dir = mws.join(current_dir, package_rel_dir[:index])
+ if os.path.exists(current_dir):
+ for fl in os.listdir(current_dir):
+ if fl.endswith('.dec'):
+ found = True
+ break
+ package_rel_dir = package_rel_dir[index + 1:]
+
+ MakefileTemplateDict = {
+ "makefile_header" : self._FILE_HEADER_[self._FileType],
+ "makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName),
+ "makefile_name" : MakefileName,
+ "platform_name" : self.PlatformInfo.Name,
+ "platform_guid" : self.PlatformInfo.Guid,
+ "platform_version" : self.PlatformInfo.Version,
+ "platform_relative_directory": self.PlatformInfo.SourceDir,
+ "platform_output_directory" : self.PlatformInfo.OutputDir,
+ "ffs_output_directory" : MyAgo.Macros["FFS_OUTPUT_DIR"],
+ "platform_dir" : MyAgo.Macros["PLATFORM_DIR"],
+
+ "module_name" : MyAgo.Name,
+ "module_guid" : MyAgo.Guid,
+ "module_name_guid" : MyAgo.UniqueBaseName,
+ "module_version" : MyAgo.Version,
+ "module_type" : MyAgo.ModuleType,
+ "module_file" : MyAgo.MetaFile.Name,
+ "module_file_base_name" : MyAgo.MetaFile.BaseName,
+ "module_relative_directory" : MyAgo.SourceDir,
+ "module_dir" : mws.join (self.Macros["WORKSPACE"], MyAgo.SourceDir),
+ "package_relative_directory": package_rel_dir,
+ "module_extra_defines" : ["%s = %s" % (k, v) for k, v in MyAgo.Module.Defines.items()],
+
+ "architecture" : MyAgo.Arch,
+ "toolchain_tag" : MyAgo.ToolChain,
+ "build_target" : MyAgo.BuildTarget,
+
+ "platform_build_directory" : self.PlatformInfo.BuildDir,
+ "module_build_directory" : MyAgo.BuildDir,
+ "module_output_directory" : MyAgo.OutputDir,
+ "module_debug_directory" : MyAgo.DebugDir,
+
+ "separator" : Separator,
+ "module_tool_definitions" : ToolsDef,
+
+ "shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
+ "shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
+
+ "module_entry_point" : ModuleEntryPoint,
+ "image_entry_point" : ImageEntryPoint,
+ "arch_entry_point" : ArchEntryPoint,
+ "remaining_build_target" : self.ResultFileList,
+ "common_dependency_file" : self.CommonFileDependency,
+ "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
+ "clean_command" : self.GetRemoveDirectoryCommand(["$(OUTPUT_DIR)"]),
+ "cleanall_command" : self.GetRemoveDirectoryCommand(["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]),
+ "dependent_library_build_directory" : self.LibraryBuildDirectoryList,
+ "library_build_command" : LibraryMakeCommandList,
+ "file_macro" : FileMacroList,
+ "file_build_target" : self.BuildTargetList,
+ "backward_compatible_target": BcTargetList,
+ "INCLUDETAG" : "\n".join([self._INCLUDE_CMD_[self._FileType] + " " + os.path.join("$(MODULE_BUILD_DIR)","dependency"),
+ self._INCLUDE_CMD_[self._FileType] + " " + os.path.join("$(MODULE_BUILD_DIR)","deps_target")
+ ])
+ }
+
+ return MakefileTemplateDict
+
+ def ParserGenerateFfsCmd(self):
+ #Add Ffs cmd to self.BuildTargetList
+ OutputFile = ''
+ DepsFileList = []
+
+ for Cmd in self.GenFfsList:
+ if Cmd[2]:
+ for CopyCmd in Cmd[2]:
+ Src, Dst = CopyCmd
+ Src = self.ReplaceMacro(Src)
+ Dst = self.ReplaceMacro(Dst)
+ if Dst not in self.ResultFileList:
+ self.ResultFileList.append(Dst)
+ if '%s :' %(Dst) not in self.BuildTargetList:
+ self.BuildTargetList.append("%s : %s" %(Dst,Src))
+ self.BuildTargetList.append('\t' + self._CP_TEMPLATE_[self._Platform] %{'Src': Src, 'Dst': Dst})
+
+ FfsCmdList = Cmd[0]
+ for index, Str in enumerate(FfsCmdList):
+ if '-o' == Str:
+ OutputFile = FfsCmdList[index + 1]
+ if '-i' == Str or "-oi" == Str:
+ if DepsFileList == []:
+ DepsFileList = [FfsCmdList[index + 1]]
+ else:
+ DepsFileList.append(FfsCmdList[index + 1])
+ DepsFileString = ' '.join(DepsFileList).strip()
+ if DepsFileString == '':
+ continue
+ OutputFile = self.ReplaceMacro(OutputFile)
+ self.ResultFileList.append(OutputFile)
+ DepsFileString = self.ReplaceMacro(DepsFileString)
+ self.BuildTargetList.append('%s : %s' % (OutputFile, DepsFileString))
+ CmdString = ' '.join(FfsCmdList).strip()
+ CmdString = self.ReplaceMacro(CmdString)
+ self.BuildTargetList.append('\t%s' % CmdString)
+
+ self.ParseSecCmd(DepsFileList, Cmd[1])
+ for SecOutputFile, SecDepsFile, SecCmd in self.FfsOutputFileList :
+ self.BuildTargetList.append('%s : %s' % (self.ReplaceMacro(SecOutputFile), self.ReplaceMacro(SecDepsFile)))
+ self.BuildTargetList.append('\t%s' % self.ReplaceMacro(SecCmd))
+ self.FfsOutputFileList = []
+
+ def ParseSecCmd(self, OutputFileList, CmdTuple):
+ for OutputFile in OutputFileList:
+ for SecCmdStr in CmdTuple:
+ SecDepsFileList = []
+ SecCmdList = SecCmdStr.split()
+ CmdName = SecCmdList[0]
+ for index, CmdItem in enumerate(SecCmdList):
+ if '-o' == CmdItem and OutputFile == SecCmdList[index + 1]:
+ index = index + 1
+ while index + 1 < len(SecCmdList):
+ if not SecCmdList[index+1].startswith('-'):
+ SecDepsFileList.append(SecCmdList[index + 1])
+ index = index + 1
+ if CmdName == 'Trim':
+ SecDepsFileList.append(os.path.join('$(DEBUG_DIR)', os.path.basename(OutputFile).replace('offset', 'efi')))
+ if OutputFile.endswith('.ui') or OutputFile.endswith('.ver'):
+ SecDepsFileList.append(os.path.join('$(MODULE_DIR)', '$(MODULE_FILE)'))
+ self.FfsOutputFileList.append((OutputFile, ' '.join(SecDepsFileList), SecCmdStr))
+ if len(SecDepsFileList) > 0:
+ self.ParseSecCmd(SecDepsFileList, CmdTuple)
+ break
+ else:
+ continue
+
+ def ReplaceMacro(self, str):
+ for Macro in self.MacroList:
+ if self._AutoGenObject.Macros[Macro] and os.path.normcase(self._AutoGenObject.Macros[Macro]) in os.path.normcase(str):
+ replace_dir = str[os.path.normcase(str).index(os.path.normcase(self._AutoGenObject.Macros[Macro])): os.path.normcase(str).index(
+ os.path.normcase(self._AutoGenObject.Macros[Macro])) + len(self._AutoGenObject.Macros[Macro])]
+ str = str.replace(replace_dir, '$(' + Macro + ')')
+ return str
+
+ def CommandExceedLimit(self):
+ FlagDict = {
+ 'CC' : { 'Macro' : '$(CC_FLAGS)', 'Value' : False},
+ 'PP' : { 'Macro' : '$(PP_FLAGS)', 'Value' : False},
+ 'APP' : { 'Macro' : '$(APP_FLAGS)', 'Value' : False},
+ 'ASLPP' : { 'Macro' : '$(ASLPP_FLAGS)', 'Value' : False},
+ 'VFRPP' : { 'Macro' : '$(VFRPP_FLAGS)', 'Value' : False},
+ 'ASM' : { 'Macro' : '$(ASM_FLAGS)', 'Value' : False},
+ 'ASLCC' : { 'Macro' : '$(ASLCC_FLAGS)', 'Value' : False},
+ }
+
+ RespDict = {}
+ FileTypeList = []
+ IncPrefix = self._INC_FLAG_[self._AutoGenObject.ToolChainFamily]
+
+ # base on the source files to decide the file type
+ for File in self._AutoGenObject.SourceFileList:
+ for type in self._AutoGenObject.FileTypes:
+ if File in self._AutoGenObject.FileTypes[type]:
+ if type not in FileTypeList:
+ FileTypeList.append(type)
+
+ # calculate the command-line length
+ if FileTypeList:
+ for type in FileTypeList:
+ BuildTargets = self._AutoGenObject.BuildRules[type].BuildTargets
+ for Target in BuildTargets:
+ CommandList = BuildTargets[Target].Commands
+ for SingleCommand in CommandList:
+ Tool = ''
+ SingleCommandLength = len(SingleCommand)
+ SingleCommandList = SingleCommand.split()
+ if len(SingleCommandList) > 0:
+ for Flag in FlagDict:
+ if '$('+ Flag +')' in SingleCommandList[0]:
+ Tool = Flag
+ break
+ if Tool:
+ if 'PATH' not in self._AutoGenObject.BuildOption[Tool]:
+ EdkLogger.error("build", AUTOGEN_ERROR, "%s_PATH doesn't exist in %s ToolChain and %s Arch." %(Tool, self._AutoGenObject.ToolChain, self._AutoGenObject.Arch), ExtraData="[%s]" % str(self._AutoGenObject))
+ SingleCommandLength += len(self._AutoGenObject.BuildOption[Tool]['PATH'])
+ for item in SingleCommandList[1:]:
+ if FlagDict[Tool]['Macro'] in item:
+ if 'FLAGS' not in self._AutoGenObject.BuildOption[Tool]:
+ EdkLogger.error("build", AUTOGEN_ERROR, "%s_FLAGS doesn't exist in %s ToolChain and %s Arch." %(Tool, self._AutoGenObject.ToolChain, self._AutoGenObject.Arch), ExtraData="[%s]" % str(self._AutoGenObject))
+ Str = self._AutoGenObject.BuildOption[Tool]['FLAGS']
+ for Option in self._AutoGenObject.BuildOption:
+ for Attr in self._AutoGenObject.BuildOption[Option]:
+ if Str.find(Option + '_' + Attr) != -1:
+ Str = Str.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
+ while(Str.find('$(') != -1):
+ for macro in self._AutoGenObject.Macros:
+ MacroName = '$('+ macro + ')'
+ if (Str.find(MacroName) != -1):
+ Str = Str.replace(MacroName, self._AutoGenObject.Macros[macro])
+ break
+ else:
+ break
+ SingleCommandLength += len(Str)
+ elif '$(INC)' in item:
+ SingleCommandLength += self._AutoGenObject.IncludePathLength + len(IncPrefix) * len(self._AutoGenObject.IncludePathList)
+ elif item.find('$(') != -1:
+ Str = item
+ for Option in self._AutoGenObject.BuildOption:
+ for Attr in self._AutoGenObject.BuildOption[Option]:
+ if Str.find(Option + '_' + Attr) != -1:
+ Str = Str.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
+ while(Str.find('$(') != -1):
+ for macro in self._AutoGenObject.Macros:
+ MacroName = '$('+ macro + ')'
+ if (Str.find(MacroName) != -1):
+ Str = Str.replace(MacroName, self._AutoGenObject.Macros[macro])
+ break
+ else:
+ break
+ SingleCommandLength += len(Str)
+
+ if SingleCommandLength > GlobalData.gCommandMaxLength:
+ FlagDict[Tool]['Value'] = True
+
+ # generate the response file content by combine the FLAGS and INC
+ for Flag in FlagDict:
+ if FlagDict[Flag]['Value']:
+ Key = Flag + '_RESP'
+ RespMacro = FlagDict[Flag]['Macro'].replace('FLAGS', 'RESP')
+ Value = self._AutoGenObject.BuildOption[Flag]['FLAGS']
+ for inc in self._AutoGenObject.IncludePathList:
+ Value += ' ' + IncPrefix + inc
+ for Option in self._AutoGenObject.BuildOption:
+ for Attr in self._AutoGenObject.BuildOption[Option]:
+ if Value.find(Option + '_' + Attr) != -1:
+ Value = Value.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
+ while (Value.find('$(') != -1):
+ for macro in self._AutoGenObject.Macros:
+ MacroName = '$('+ macro + ')'
+ if (Value.find(MacroName) != -1):
+ Value = Value.replace(MacroName, self._AutoGenObject.Macros[macro])
+ break
+ else:
+ break
+
+ if self._AutoGenObject.ToolChainFamily == 'GCC':
+ RespDict[Key] = Value.replace('\\', '/')
+ else:
+ RespDict[Key] = Value
+ for Target in BuildTargets:
+ for i, SingleCommand in enumerate(BuildTargets[Target].Commands):
+ if FlagDict[Flag]['Macro'] in SingleCommand:
+ BuildTargets[Target].Commands[i] = SingleCommand.replace('$(INC)', '').replace(FlagDict[Flag]['Macro'], RespMacro)
+ return RespDict
+
+ def ProcessBuildTargetList(self, RespFile, ToolsDef):
+ #
+ # Search dependency file list for each source file
+ #
+ ForceIncludedFile = []
+ for File in self._AutoGenObject.AutoGenFileList:
+ if File.Ext == '.h':
+ ForceIncludedFile.append(File)
+ SourceFileList = []
+ OutPutFileList = []
+ for Target in self._AutoGenObject.IntroTargetList:
+ SourceFileList.extend(Target.Inputs)
+ OutPutFileList.extend(Target.Outputs)
+
+ if OutPutFileList:
+ for Item in OutPutFileList:
+ if Item in SourceFileList:
+ SourceFileList.remove(Item)
+
+ FileDependencyDict = {item:ForceIncludedFile for item in SourceFileList}
+
+ for Dependency in FileDependencyDict.values():
+ self.DependencyHeaderFileSet.update(set(Dependency))
+
+ # Get a set of unique package includes from MetaFile
+ parentMetaFileIncludes = set()
+ for aInclude in self._AutoGenObject.PackageIncludePathList:
+ aIncludeName = str(aInclude)
+ parentMetaFileIncludes.add(aIncludeName.lower())
+
+ # Check if header files are listed in metafile
+ # Get a set of unique module header source files from MetaFile
+ headerFilesInMetaFileSet = set()
+ for aFile in self._AutoGenObject.SourceFileList:
+ aFileName = str(aFile)
+ if not aFileName.endswith('.h'):
+ continue
+ headerFilesInMetaFileSet.add(aFileName.lower())
+
+ # Get a set of unique module autogen files
+ localAutoGenFileSet = set()
+ for aFile in self._AutoGenObject.AutoGenFileList:
+ localAutoGenFileSet.add(str(aFile).lower())
+
+ # Get a set of unique module dependency header files
+ # Exclude autogen files and files not in the source directory
+ # and files that are under the package include list
+ headerFileDependencySet = set()
+ localSourceDir = str(self._AutoGenObject.SourceDir).lower()
+ for Dependency in FileDependencyDict.values():
+ for aFile in Dependency:
+ aFileName = str(aFile).lower()
+ # Exclude non-header files
+ if not aFileName.endswith('.h'):
+ continue
+ # Exclude autogen files
+ if aFileName in localAutoGenFileSet:
+ continue
+ # Exclude include out of local scope
+ if localSourceDir not in aFileName:
+ continue
+ # Exclude files covered by package includes
+ pathNeeded = True
+ for aIncludePath in parentMetaFileIncludes:
+ if aIncludePath in aFileName:
+ pathNeeded = False
+ break
+ if not pathNeeded:
+ continue
+ # Keep the file to be checked
+ headerFileDependencySet.add(aFileName)
+
+ # Check if a module dependency header file is missing from the module's MetaFile
+ for aFile in headerFileDependencySet:
+ if aFile in headerFilesInMetaFileSet:
+ continue
+ if GlobalData.gUseHashCache:
+ GlobalData.gModuleBuildTracking[self._AutoGenObject] = 'FAIL_METAFILE'
+ EdkLogger.warn("build","Module MetaFile [Sources] is missing local header!",
+ ExtraData = "Local Header: " + aFile + " not found in " + self._AutoGenObject.MetaFile.Path
+ )
+
+ for File,Dependency in FileDependencyDict.items():
+ if not Dependency:
+ continue
+
+ self._AutoGenObject.AutoGenDepSet |= set(Dependency)
+
+ CmdSumDict = {}
+ CmdTargetDict = {}
+ CmdCppDict = {}
+ DependencyDict = FileDependencyDict.copy()
+
+ # Convert target description object to target string in makefile
+ if self._AutoGenObject.BuildRuleFamily == TAB_COMPILER_MSFT and TAB_C_CODE_FILE in self._AutoGenObject.Targets:
+ for T in self._AutoGenObject.Targets[TAB_C_CODE_FILE]:
+ NewFile = self.PlaceMacro(str(T), self.Macros)
+ if not self.ObjTargetDict.get(T.Target.SubDir):
+ self.ObjTargetDict[T.Target.SubDir] = set()
+ self.ObjTargetDict[T.Target.SubDir].add(NewFile)
+ for Type in self._AutoGenObject.Targets:
+ resp_file_number = 0
+ for T in self._AutoGenObject.Targets[Type]:
+ # Generate related macros if needed
+ if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros:
+ self.FileListMacros[T.FileListMacro] = []
+ if T.GenListFile and T.ListFileMacro not in self.ListFileMacros:
+ self.ListFileMacros[T.ListFileMacro] = []
+ if T.GenIncListFile and T.IncListFileMacro not in self.ListFileMacros:
+ self.ListFileMacros[T.IncListFileMacro] = []
+
+ Deps = []
+ CCodeDeps = []
+ # Add force-dependencies
+ for Dep in T.Dependencies:
+ Deps.append(self.PlaceMacro(str(Dep), self.Macros))
+ if Dep != '$(MAKE_FILE)':
+ CCodeDeps.append(self.PlaceMacro(str(Dep), self.Macros))
+ # Add inclusion-dependencies
+ if len(T.Inputs) == 1 and T.Inputs[0] in FileDependencyDict:
+ for F in FileDependencyDict[T.Inputs[0]]:
+ Deps.append(self.PlaceMacro(str(F), self.Macros))
+ # Add source-dependencies
+ for F in T.Inputs:
+ NewFile = self.PlaceMacro(str(F), self.Macros)
+ # In order to use file list macro as dependency
+ if T.GenListFile:
+ # gnu tools need forward slash path separator, even on Windows
+ self.ListFileMacros[T.ListFileMacro].append(str(F).replace ('\\', '/'))
+ self.FileListMacros[T.FileListMacro].append(NewFile)
+ elif T.GenFileListMacro:
+ self.FileListMacros[T.FileListMacro].append(NewFile)
+ else:
+ Deps.append(NewFile)
+ for key in self.FileListMacros:
+ self.FileListMacros[key].sort()
+ # Use file list macro as dependency
+ if T.GenFileListMacro:
+ Deps.append("$(%s)" % T.FileListMacro)
+ if Type in [TAB_OBJECT_FILE, TAB_STATIC_LIBRARY]:
+ Deps.append("$(%s)" % T.ListFileMacro)
+
+ # VBox - begin: Add $(QUIET)
+ sAllCmds = None;
+ for sCmd in T.Commands:
+ sCmd = sCmd.strip();
+ if len(sCmd) > 0:
+ if sCmd[0] == '-' and self._FileType == 'nmake':
+ sCmd = '-$(EFI_QUIET)' + sCmd[1:];
+ else:
+ sCmd = '$(EFI_QUIET)' + sCmd;
+ if sAllCmds is None:
+ sAllCmds = sCmd;
+ else:
+ sAllCmds += '\n\t' + sCmd;
+ # VBox - end.
+
+ if self._AutoGenObject.BuildRuleFamily == TAB_COMPILER_MSFT and Type == TAB_C_CODE_FILE:
+ T, CmdTarget, CmdTargetDict, CmdCppDict = self.ParserCCodeFile(T, Type, CmdSumDict, CmdTargetDict,
+ CmdCppDict, DependencyDict, RespFile,
+ ToolsDef, resp_file_number)
+ resp_file_number += 1
+ TargetDict = {"target": self.PlaceMacro(T.Target.Path, self.Macros), "cmd": "\n\t".join(T.Commands),"deps": CCodeDeps}
+ # VBox: Original: TargetDict = {"target": self.PlaceMacro(T.Target.Path, self.Macros), "cmd": sAllCmds,"deps": CCodeDeps}
+ CmdLine = self._BUILD_TARGET_TEMPLATE.Replace(TargetDict).rstrip().replace('\t$(OBJLIST', '$(OBJLIST')
+ if T.Commands:
+ CmdLine = '%s%s' %(CmdLine, TAB_LINE_BREAK)
+ if CCodeDeps or CmdLine:
+ self.BuildTargetList.append(CmdLine)
+ else:
+ TargetDict = {"target": self.PlaceMacro(T.Target.Path, self.Macros), "cmd": "\n\t".join(T.Commands),"deps": Deps}
+ # VBox: Original: TargetDict = {"target": self.PlaceMacro(T.Target.Path, self.Macros), "cmd": sAllCmds,"deps": Deps}
+ self.BuildTargetList.append(self._BUILD_TARGET_TEMPLATE.Replace(TargetDict))
+
+ # Add a Makefile rule for targets generating multiple files.
+ # The main output is a prerequisite for the other output files.
+ for i in T.Outputs[1:]:
+ AnnexeTargetDict = {"target": self.PlaceMacro(i.Path, self.Macros), "cmd": "", "deps": self.PlaceMacro(T.Target.Path, self.Macros)}
+ self.BuildTargetList.append(self._BUILD_TARGET_TEMPLATE.Replace(AnnexeTargetDict))
+
+ def ParserCCodeFile(self, T, Type, CmdSumDict, CmdTargetDict, CmdCppDict, DependencyDict, RespFile, ToolsDef,
+ resp_file_number):
+ SaveFilePath = os.path.join(RespFile, "cc_resp_%s.txt" % resp_file_number)
+ if not CmdSumDict:
+ for item in self._AutoGenObject.Targets[Type]:
+ CmdSumDict[item.Target.SubDir] = item.Target.BaseName
+ for CppPath in item.Inputs:
+ Path = self.PlaceMacro(CppPath.Path, self.Macros)
+ if CmdCppDict.get(item.Target.SubDir):
+ CmdCppDict[item.Target.SubDir].append(Path)
+ else:
+ CmdCppDict[item.Target.SubDir] = ['$(MAKE_FILE)', Path]
+ if CppPath.Path in DependencyDict:
+ for Temp in DependencyDict[CppPath.Path]:
+ try:
+ Path = self.PlaceMacro(Temp.Path, self.Macros)
+ except:
+ continue
+ if Path not in (self.CommonFileDependency + CmdCppDict[item.Target.SubDir]):
+ CmdCppDict[item.Target.SubDir].append(Path)
+ if T.Commands:
+ CommandList = T.Commands[:]
+ for Item in CommandList[:]:
+ SingleCommandList = Item.split()
+ if len(SingleCommandList) > 0 and self.CheckCCCmd(SingleCommandList):
+ for Temp in SingleCommandList:
+ if Temp.startswith('/Fo'):
+ CmdSign = '%s%s' % (Temp.rsplit(TAB_SLASH, 1)[0], TAB_SLASH)
+ break
+ else:
+ continue
+ if CmdSign not in list(CmdTargetDict.keys()):
+ cmd = Item.replace(Temp, CmdSign)
+ if SingleCommandList[-1] in cmd:
+ CmdTargetDict[CmdSign] = [cmd.replace(SingleCommandList[-1], "").rstrip(), SingleCommandList[-1]]
+ else:
+ # CmdTargetDict[CmdSign] = "%s %s" % (CmdTargetDict[CmdSign], SingleCommandList[-1])
+ CmdTargetDict[CmdSign].append(SingleCommandList[-1])
+ Index = CommandList.index(Item)
+ CommandList.pop(Index)
+ if SingleCommandList[-1].endswith("%s%s.c" % (TAB_SLASH, CmdSumDict[CmdSign[3:].rsplit(TAB_SLASH, 1)[0]])):
+ Cpplist = CmdCppDict[T.Target.SubDir]
+ Cpplist.insert(0, '$(OBJLIST_%d): ' % list(self.ObjTargetDict.keys()).index(T.Target.SubDir))
+ source_files = CmdTargetDict[CmdSign][1:]
+ source_files.insert(0, " ")
+ if len(source_files)>2:
+ SaveFileOnChange(SaveFilePath, " ".join(source_files), False)
+ T.Commands[Index] = '%s\n\t%s $(cc_resp_%s)' % (
+ ' \\\n\t'.join(Cpplist), CmdTargetDict[CmdSign][0], resp_file_number)
+ ToolsDef.append("cc_resp_%s = @%s" % (resp_file_number, SaveFilePath))
+
+ elif len(source_files)<=2 and len(" ".join(CmdTargetDict[CmdSign][:2]))>GlobalData.gCommandMaxLength:
+ SaveFileOnChange(SaveFilePath, " ".join(source_files), False)
+ T.Commands[Index] = '%s\n\t%s $(cc_resp_%s)' % (
+ ' \\\n\t'.join(Cpplist), CmdTargetDict[CmdSign][0], resp_file_number)
+ ToolsDef.append("cc_resp_%s = @%s" % (resp_file_number, SaveFilePath))
+
+ else:
+ T.Commands[Index] = '%s\n\t%s' % (' \\\n\t'.join(Cpplist), " ".join(CmdTargetDict[CmdSign]))
+ else:
+ T.Commands.pop(Index)
+ return T, CmdSumDict, CmdTargetDict, CmdCppDict
+
+ def CheckCCCmd(self, CommandList):
+ for cmd in CommandList:
+ if '$(CC)' in cmd:
+ return True
+ return False
+ ## For creating makefile targets for dependent libraries
+ def ProcessDependentLibrary(self):
+ for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
+ if not LibraryAutoGen.IsBinaryModule:
+ self.LibraryBuildDirectoryList.append(self.PlaceMacro(LibraryAutoGen.BuildDir, self.Macros))
+
+ ## Return a list containing source file's dependencies
+ #
+ # @param FileList The list of source files
+ # @param ForceInculeList The list of files which will be included forcely
+ # @param SearchPathList The list of search path
+ #
+ # @retval dict The mapping between source file path and its dependencies
+ #
+ def GetFileDependency(self, FileList, ForceInculeList, SearchPathList):
+ Dependency = {}
+ for F in FileList:
+ Dependency[F] = GetDependencyList(self._AutoGenObject, self.FileCache, F, ForceInculeList, SearchPathList)
+ return Dependency
+
+
+## CustomMakefile class
+#
+# This class encapsules makefie and its generation for module. It uses template to generate
+# the content of makefile. The content of makefile will be got from ModuleAutoGen object.
+#
+class CustomMakefile(BuildFile):
+ ## template used to generate the makefile for module with custom makefile
+ _TEMPLATE_ = TemplateString('''\
+${makefile_header}
+
+#
+# Platform Macro Definition
+#
+PLATFORM_NAME = ${platform_name}
+PLATFORM_GUID = ${platform_guid}
+PLATFORM_VERSION = ${platform_version}
+PLATFORM_RELATIVE_DIR = ${platform_relative_directory}
+PLATFORM_DIR = ${platform_dir}
+PLATFORM_OUTPUT_DIR = ${platform_output_directory}
+
+#
+# Module Macro Definition
+#
+MODULE_NAME = ${module_name}
+MODULE_GUID = ${module_guid}
+MODULE_NAME_GUID = ${module_name_guid}
+MODULE_VERSION = ${module_version}
+MODULE_TYPE = ${module_type}
+MODULE_FILE = ${module_file}
+MODULE_FILE_BASE_NAME = ${module_file_base_name}
+BASE_NAME = $(MODULE_NAME)
+MODULE_RELATIVE_DIR = ${module_relative_directory}
+MODULE_DIR = ${module_dir}
+
+#
+# Build Configuration Macro Definition
+#
+ARCH = ${architecture}
+TOOLCHAIN = ${toolchain_tag}
+TOOLCHAIN_TAG = ${toolchain_tag}
+TARGET = ${build_target}
+
+#
+# Build Directory Macro Definition
+#
+# PLATFORM_BUILD_DIR = ${platform_build_directory}
+BUILD_DIR = ${platform_build_directory}
+BIN_DIR = $(BUILD_DIR)${separator}${architecture}
+LIB_DIR = $(BIN_DIR)
+MODULE_BUILD_DIR = ${module_build_directory}
+OUTPUT_DIR = ${module_output_directory}
+DEBUG_DIR = ${module_debug_directory}
+DEST_DIR_OUTPUT = $(OUTPUT_DIR)
+DEST_DIR_DEBUG = $(DEBUG_DIR)
+
+#
+# Tools definitions specific to this module
+#
+${BEGIN}${module_tool_definitions}
+${END}
+MAKE_FILE = ${makefile_path}
+
+#
+# Shell Command Macro
+#
+${BEGIN}${shell_command_code} = ${shell_command}
+${END}
+
+${custom_makefile_content}
+
+#
+# Target used when called from platform makefile, which will bypass the build of dependent libraries
+#
+
+pbuild: init all
+
+
+#
+# ModuleTarget
+#
+
+mbuild: init all
+
+#
+# Build Target used in multi-thread build mode, which no init target is needed
+#
+
+tbuild: all
+
+#
+# Initialization target: print build information and create necessary directories
+#
+init:
+\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)]
+${BEGIN}\t-@${create_directory_command}\n${END}\
+
+''')
+
+ ## Constructor of CustomMakefile
+ #
+ # @param ModuleAutoGen Object of ModuleAutoGen class
+ #
+ def __init__(self, ModuleAutoGen):
+ BuildFile.__init__(self, ModuleAutoGen)
+ self.PlatformInfo = self._AutoGenObject.PlatformInfo
+ self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]
+ self.DependencyHeaderFileSet = set()
+
+ # Compose a dict object containing information used to do replacement in template
+ @property
+ def _TemplateDict(self):
+ Separator = self._SEP_[self._Platform]
+ MyAgo = self._AutoGenObject
+ if self._FileType not in MyAgo.CustomMakefile:
+ EdkLogger.error('build', OPTION_NOT_SUPPORTED, "No custom makefile for %s" % self._FileType,
+ ExtraData="[%s]" % str(MyAgo))
+ MakefilePath = mws.join(
+ MyAgo.WorkspaceDir,
+ MyAgo.CustomMakefile[self._FileType]
+ )
+ try:
+ CustomMakefile = open(MakefilePath, 'r').read()
+ except:
+ EdkLogger.error('build', FILE_OPEN_FAILURE, File=str(MyAgo),
+ ExtraData=MyAgo.CustomMakefile[self._FileType])
+
+ # tools definitions
+ ToolsDef = []
+ for Tool in MyAgo.BuildOption:
+ # Don't generate MAKE_FLAGS in makefile. It's put in environment variable.
+ if Tool == "MAKE":
+ continue
+ for Attr in MyAgo.BuildOption[Tool]:
+ if Attr == "FAMILY":
+ continue
+ elif Attr == "PATH":
+ ToolsDef.append("%s = %s" % (Tool, MyAgo.BuildOption[Tool][Attr]))
+ else:
+ ToolsDef.append("%s_%s = %s" % (Tool, Attr, MyAgo.BuildOption[Tool][Attr]))
+ ToolsDef.append("")
+
+ MakefileName = self.getMakefileName()
+ MakefileTemplateDict = {
+ "makefile_header" : self._FILE_HEADER_[self._FileType],
+ "makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName),
+ "platform_name" : self.PlatformInfo.Name,
+ "platform_guid" : self.PlatformInfo.Guid,
+ "platform_version" : self.PlatformInfo.Version,
+ "platform_relative_directory": self.PlatformInfo.SourceDir,
+ "platform_output_directory" : self.PlatformInfo.OutputDir,
+ "platform_dir" : MyAgo.Macros["PLATFORM_DIR"],
+
+ "module_name" : MyAgo.Name,
+ "module_guid" : MyAgo.Guid,
+ "module_name_guid" : MyAgo.UniqueBaseName,
+ "module_version" : MyAgo.Version,
+ "module_type" : MyAgo.ModuleType,
+ "module_file" : MyAgo.MetaFile,
+ "module_file_base_name" : MyAgo.MetaFile.BaseName,
+ "module_relative_directory" : MyAgo.SourceDir,
+ "module_dir" : mws.join (MyAgo.WorkspaceDir, MyAgo.SourceDir),
+
+ "architecture" : MyAgo.Arch,
+ "toolchain_tag" : MyAgo.ToolChain,
+ "build_target" : MyAgo.BuildTarget,
+
+ "platform_build_directory" : self.PlatformInfo.BuildDir,
+ "module_build_directory" : MyAgo.BuildDir,
+ "module_output_directory" : MyAgo.OutputDir,
+ "module_debug_directory" : MyAgo.DebugDir,
+
+ "separator" : Separator,
+ "module_tool_definitions" : ToolsDef,
+
+ "shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
+ "shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
+
+ "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
+ "custom_makefile_content" : CustomMakefile
+ }
+
+ return MakefileTemplateDict
+
+## PlatformMakefile class
+#
+# This class encapsules makefie and its generation for platform. It uses
+# template to generate the content of makefile. The content of makefile will be
+# got from PlatformAutoGen object.
+#
+class PlatformMakefile(BuildFile):
+ ## template used to generate the makefile for platform
+ _TEMPLATE_ = TemplateString('''\
+${makefile_header}
+
+#
+# Platform Macro Definition
+#
+PLATFORM_NAME = ${platform_name}
+PLATFORM_GUID = ${platform_guid}
+PLATFORM_VERSION = ${platform_version}
+PLATFORM_FILE = ${platform_file}
+PLATFORM_DIR = ${platform_dir}
+PLATFORM_OUTPUT_DIR = ${platform_output_directory}
+
+#
+# Build Configuration Macro Definition
+#
+TOOLCHAIN = ${toolchain_tag}
+TOOLCHAIN_TAG = ${toolchain_tag}
+TARGET = ${build_target}
+
+#
+# Build Directory Macro Definition
+#
+BUILD_DIR = ${platform_build_directory}
+FV_DIR = ${platform_build_directory}${separator}FV
+
+#
+# Shell Command Macro
+#
+${BEGIN}${shell_command_code} = ${shell_command}
+${END}
+
+MAKE = ${make_path}
+MAKE_FILE = ${makefile_path}
+
+#
+# Default target
+#
+all: init build_libraries build_modules
+
+#
+# Initialization target: print build information and create necessary directories
+#
+init:
+\t-@echo Building ... $(PLATFORM_FILE) [${build_architecture_list}]
+\t${BEGIN}-@${create_directory_command}
+\t${END}
+#
+# library build target
+#
+libraries: init build_libraries
+
+#
+# module build target
+#
+modules: init build_libraries build_modules
+
+#
+# Build all libraries:
+#
+build_libraries:
+${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${library_makefile_list} pbuild
+${END}\t@cd $(BUILD_DIR)
+
+#
+# Build all modules:
+#
+build_modules:
+${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${module_makefile_list} pbuild
+${END}\t@cd $(BUILD_DIR)
+
+#
+# Clean intermediate files
+#
+clean:
+\t${BEGIN}-@${library_build_command} clean
+\t${END}${BEGIN}-@${module_build_command} clean
+\t${END}@cd $(BUILD_DIR)
+
+#
+# Clean all generated files except to makefile
+#
+cleanall:
+${BEGIN}\t${cleanall_command}
+${END}
+
+#
+# Clean all library files
+#
+cleanlib:
+\t${BEGIN}-@${library_build_command} cleanall
+\t${END}@cd $(BUILD_DIR)\n
+''')
+
+ ## Constructor of PlatformMakefile
+ #
+ # @param ModuleAutoGen Object of PlatformAutoGen class
+ #
+ def __init__(self, PlatformAutoGen):
+ BuildFile.__init__(self, PlatformAutoGen)
+ self.ModuleBuildCommandList = []
+ self.ModuleMakefileList = []
+ self.IntermediateDirectoryList = []
+ self.ModuleBuildDirectoryList = []
+ self.LibraryBuildDirectoryList = []
+ self.LibraryMakeCommandList = []
+ self.DependencyHeaderFileSet = set()
+
+ # Compose a dict object containing information used to do replacement in template
+ @property
+ def _TemplateDict(self):
+ Separator = self._SEP_[self._Platform]
+
+ MyAgo = self._AutoGenObject
+ if "MAKE" not in MyAgo.ToolDefinition or "PATH" not in MyAgo.ToolDefinition["MAKE"]:
+ EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!",
+ ExtraData="[%s]" % str(MyAgo))
+
+ self.IntermediateDirectoryList = ["$(BUILD_DIR)"]
+ self.ModuleBuildDirectoryList = self.GetModuleBuildDirectoryList()
+ self.LibraryBuildDirectoryList = self.GetLibraryBuildDirectoryList()
+
+ MakefileName = self.getMakefileName()
+ LibraryMakefileList = []
+ LibraryMakeCommandList = []
+ for D in self.LibraryBuildDirectoryList:
+ D = self.PlaceMacro(D, {"BUILD_DIR":MyAgo.BuildDir})
+ Makefile = os.path.join(D, MakefileName)
+ Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":Makefile}
+ LibraryMakefileList.append(Makefile)
+ LibraryMakeCommandList.append(Command)
+ self.LibraryMakeCommandList = LibraryMakeCommandList
+
+ ModuleMakefileList = []
+ ModuleMakeCommandList = []
+ for D in self.ModuleBuildDirectoryList:
+ D = self.PlaceMacro(D, {"BUILD_DIR":MyAgo.BuildDir})
+ Makefile = os.path.join(D, MakefileName)
+ Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":Makefile}
+ ModuleMakefileList.append(Makefile)
+ ModuleMakeCommandList.append(Command)
+
+ MakefileTemplateDict = {
+ "makefile_header" : self._FILE_HEADER_[self._FileType],
+ "makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName),
+ "make_path" : MyAgo.ToolDefinition["MAKE"]["PATH"],
+ "makefile_name" : MakefileName,
+ "platform_name" : MyAgo.Name,
+ "platform_guid" : MyAgo.Guid,
+ "platform_version" : MyAgo.Version,
+ "platform_file" : MyAgo.MetaFile,
+ "platform_relative_directory": MyAgo.SourceDir,
+ "platform_output_directory" : MyAgo.OutputDir,
+ "platform_build_directory" : MyAgo.BuildDir,
+ "platform_dir" : MyAgo.Macros["PLATFORM_DIR"],
+
+ "toolchain_tag" : MyAgo.ToolChain,
+ "build_target" : MyAgo.BuildTarget,
+ "shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
+ "shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
+ "build_architecture_list" : MyAgo.Arch,
+ "architecture" : MyAgo.Arch,
+ "separator" : Separator,
+ "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
+ "cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList),
+ "library_makefile_list" : LibraryMakefileList,
+ "module_makefile_list" : ModuleMakefileList,
+ "library_build_command" : LibraryMakeCommandList,
+ "module_build_command" : ModuleMakeCommandList,
+ }
+
+ return MakefileTemplateDict
+
+ ## Get the root directory list for intermediate files of all modules build
+ #
+ # @retval list The list of directory
+ #
+ def GetModuleBuildDirectoryList(self):
+ DirList = []
+ for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList:
+ if not ModuleAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
+ return DirList
+
+ ## Get the root directory list for intermediate files of all libraries build
+ #
+ # @retval list The list of directory
+ #
+ def GetLibraryBuildDirectoryList(self):
+ DirList = []
+ for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
+ if not LibraryAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
+ return DirList
+
+## TopLevelMakefile class
+#
+# This class encapsules makefie and its generation for entrance makefile. It
+# uses template to generate the content of makefile. The content of makefile
+# will be got from WorkspaceAutoGen object.
+#
+class TopLevelMakefile(BuildFile):
+ ## template used to generate toplevel makefile
+ _TEMPLATE_ = TemplateString('''${BEGIN}\tGenFds -f ${fdf_file} --conf=${conf_directory} -o ${platform_build_directory} -t ${toolchain_tag} -b ${build_target} -p ${active_platform} -a ${build_architecture_list} ${extra_options}${END}${BEGIN} -r ${fd} ${END}${BEGIN} -i ${fv} ${END}${BEGIN} -C ${cap} ${END}${BEGIN} -D ${macro} ${END}''')
+
+ ## Constructor of TopLevelMakefile
+ #
+ # @param Workspace Object of WorkspaceAutoGen class
+ #
+ def __init__(self, Workspace):
+ BuildFile.__init__(self, Workspace)
+ self.IntermediateDirectoryList = []
+ self.DependencyHeaderFileSet = set()
+
+ # Compose a dict object containing information used to do replacement in template
+ @property
+ def _TemplateDict(self):
+ Separator = self._SEP_[self._Platform]
+
+ # any platform autogen object is ok because we just need common information
+ MyAgo = self._AutoGenObject
+
+ if "MAKE" not in MyAgo.ToolDefinition or "PATH" not in MyAgo.ToolDefinition["MAKE"]:
+ EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!",
+ ExtraData="[%s]" % str(MyAgo))
+
+ for Arch in MyAgo.ArchList:
+ self.IntermediateDirectoryList.append(Separator.join(["$(BUILD_DIR)", Arch]))
+ self.IntermediateDirectoryList.append("$(FV_DIR)")
+
+ # TRICK: for not generating GenFds call in makefile if no FDF file
+ MacroList = []
+ if MyAgo.FdfFile is not None and MyAgo.FdfFile != "":
+ FdfFileList = [MyAgo.FdfFile]
+ # macros passed to GenFds
+ MacroDict = {}
+ MacroDict.update(GlobalData.gGlobalDefines)
+ MacroDict.update(GlobalData.gCommandLineDefines)
+ for MacroName in MacroDict:
+ if MacroDict[MacroName] != "":
+ MacroList.append('"%s=%s"' % (MacroName, MacroDict[MacroName].replace('\\', '\\\\')))
+ else:
+ MacroList.append('"%s"' % MacroName)
+ else:
+ FdfFileList = []
+
+ # pass extra common options to external program called in makefile, currently GenFds.exe
+ ExtraOption = ''
+ LogLevel = EdkLogger.GetLevel()
+ if LogLevel == EdkLogger.VERBOSE:
+ ExtraOption += " -v"
+ elif LogLevel <= EdkLogger.DEBUG_9:
+ ExtraOption += " -d %d" % (LogLevel - 1)
+ elif LogLevel == EdkLogger.QUIET:
+ ExtraOption += " -q"
+
+ if GlobalData.gCaseInsensitive:
+ ExtraOption += " -c"
+ if not GlobalData.gEnableGenfdsMultiThread:
+ ExtraOption += " --no-genfds-multi-thread"
+ if GlobalData.gIgnoreSource:
+ ExtraOption += " --ignore-sources"
+
+ for pcd in GlobalData.BuildOptionPcd:
+ if pcd[2]:
+ pcdname = '.'.join(pcd[0:3])
+ else:
+ pcdname = '.'.join(pcd[0:2])
+ if pcd[3].startswith('{'):
+ ExtraOption += " --pcd " + pcdname + '=' + 'H' + '"' + pcd[3] + '"'
+ else:
+ ExtraOption += " --pcd " + pcdname + '=' + pcd[3]
+
+ MakefileName = self.getMakefileName()
+ SubBuildCommandList = []
+ for A in MyAgo.ArchList:
+ Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":os.path.join("$(BUILD_DIR)", A, MakefileName)}
+ SubBuildCommandList.append(Command)
+
+ MakefileTemplateDict = {
+ "makefile_header" : self._FILE_HEADER_[self._FileType],
+ "makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName),
+ "make_path" : MyAgo.ToolDefinition["MAKE"]["PATH"],
+ "platform_name" : MyAgo.Name,
+ "platform_guid" : MyAgo.Guid,
+ "platform_version" : MyAgo.Version,
+ "platform_build_directory" : MyAgo.BuildDir,
+ "conf_directory" : GlobalData.gConfDirectory,
+
+ "toolchain_tag" : MyAgo.ToolChain,
+ "build_target" : MyAgo.BuildTarget,
+ "shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
+ "shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
+ 'arch' : list(MyAgo.ArchList),
+ "build_architecture_list" : ','.join(MyAgo.ArchList),
+ "separator" : Separator,
+ "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
+ "cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList),
+ "sub_build_command" : SubBuildCommandList,
+ "fdf_file" : FdfFileList,
+ "active_platform" : str(MyAgo),
+ "fd" : MyAgo.FdTargetList,
+ "fv" : MyAgo.FvTargetList,
+ "cap" : MyAgo.CapTargetList,
+ "extra_options" : ExtraOption,
+ "macro" : MacroList,
+ }
+
+ return MakefileTemplateDict
+
+ ## Get the root directory list for intermediate files of all modules build
+ #
+ # @retval list The list of directory
+ #
+ def GetModuleBuildDirectoryList(self):
+ DirList = []
+ for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList:
+ if not ModuleAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
+ return DirList
+
+ ## Get the root directory list for intermediate files of all libraries build
+ #
+ # @retval list The list of directory
+ #
+ def GetLibraryBuildDirectoryList(self):
+ DirList = []
+ for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
+ if not LibraryAutoGen.IsBinaryModule:
+ DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
+ return DirList
+
+## Find dependencies for one source file
+#
+# By searching recursively "#include" directive in file, find out all the
+# files needed by given source file. The dependencies will be only searched
+# in given search path list.
+#
+# @param File The source file
+# @param ForceInculeList The list of files which will be included forcely
+# @param SearchPathList The list of search path
+#
+# @retval list The list of files the given source file depends on
+#
+def GetDependencyList(AutoGenObject, FileCache, File, ForceList, SearchPathList):
+ EdkLogger.debug(EdkLogger.DEBUG_1, "Try to get dependency files for %s" % File)
+ FileStack = [File] + ForceList
+ DependencySet = set()
+
+ if AutoGenObject.Arch not in gDependencyDatabase:
+ gDependencyDatabase[AutoGenObject.Arch] = {}
+ DepDb = gDependencyDatabase[AutoGenObject.Arch]
+
+ while len(FileStack) > 0:
+ F = FileStack.pop()
+
+ FullPathDependList = []
+ if F in FileCache:
+ for CacheFile in FileCache[F]:
+ FullPathDependList.append(CacheFile)
+ if CacheFile not in DependencySet:
+ FileStack.append(CacheFile)
+ DependencySet.update(FullPathDependList)
+ continue
+
+ CurrentFileDependencyList = []
+ if F in DepDb:
+ CurrentFileDependencyList = DepDb[F]
+ else:
+ try:
+ Fd = open(F.Path, 'rb')
+ FileContent = Fd.read()
+ Fd.close()
+ except BaseException as X:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
+ if len(FileContent) == 0:
+ continue
+ try:
+ if FileContent[0] == 0xff or FileContent[0] == 0xfe:
+ FileContent = FileContent.decode('utf-16')
+ else:
+ FileContent = FileContent.decode()
+ except:
+ # The file is not txt file. for example .mcb file
+ continue
+ IncludedFileList = gIncludePattern.findall(FileContent)
+
+ for Inc in IncludedFileList:
+ Inc = Inc.strip()
+ # if there's macro used to reference header file, expand it
+ HeaderList = gMacroPattern.findall(Inc)
+ if len(HeaderList) == 1 and len(HeaderList[0]) == 2:
+ HeaderType = HeaderList[0][0]
+ HeaderKey = HeaderList[0][1]
+ if HeaderType in gIncludeMacroConversion:
+ Inc = gIncludeMacroConversion[HeaderType] % {"HeaderKey" : HeaderKey}
+ else:
+ # not known macro used in #include, always build the file by
+ # returning a empty dependency
+ FileCache[File] = []
+ return []
+ Inc = os.path.normpath(Inc)
+ CurrentFileDependencyList.append(Inc)
+ DepDb[F] = CurrentFileDependencyList
+
+ CurrentFilePath = F.Dir
+ PathList = [CurrentFilePath] + SearchPathList
+ for Inc in CurrentFileDependencyList:
+ for SearchPath in PathList:
+ FilePath = os.path.join(SearchPath, Inc)
+ if FilePath in gIsFileMap:
+ if not gIsFileMap[FilePath]:
+ continue
+ # If isfile is called too many times, the performance is slow down.
+ elif not os.path.isfile(FilePath):
+ gIsFileMap[FilePath] = False
+ continue
+ else:
+ gIsFileMap[FilePath] = True
+ FilePath = PathClass(FilePath)
+ FullPathDependList.append(FilePath)
+ if FilePath not in DependencySet:
+ FileStack.append(FilePath)
+ break
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "%s included by %s was not found "\
+ "in any given path:\n\t%s" % (Inc, F, "\n\t".join(SearchPathList)))
+
+ FileCache[F] = FullPathDependList
+ DependencySet.update(FullPathDependList)
+
+ DependencySet.update(ForceList)
+ if File in DependencySet:
+ DependencySet.remove(File)
+ DependencyList = list(DependencySet) # remove duplicate ones
+
+ return DependencyList
+
+# This acts like the main() function for the script, unless it is 'import'ed into another script.
+if __name__ == '__main__':
+ pass
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenPcdDb.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenPcdDb.py
new file mode 100755
index 00000000..8b2cbebc
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenPcdDb.py
@@ -0,0 +1,1615 @@
+## @file
+# Routines for generating Pcd Database
+#
+# Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+from io import BytesIO
+from Common.Misc import *
+from Common.StringUtils import StringToArray
+from struct import pack
+from .ValidCheckingInfoObject import VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER
+from .ValidCheckingInfoObject import VAR_CHECK_PCD_VARIABLE_TAB
+from .ValidCheckingInfoObject import GetValidationObject
+from Common.VariableAttributes import VariableAttributes
+import copy
+from struct import unpack
+from Common.DataType import *
+from Common import GlobalData
+from Common import EdkLogger
+import Common.LongFilePathOs as os
+
+DATABASE_VERSION = 7
+
+gPcdDatabaseAutoGenC = TemplateString("""
+//
+// External PCD database debug information
+//
+#if 0
+${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
+ /* SkuIdTable */
+ { ${BEGIN}${SKUID_VALUE}, ${END} },
+${BEGIN} { ${INIT_VALUE_UINT64} }, /* ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}] */
+${END}
+${BEGIN} ${VARDEF_VALUE_UINT64}, /* ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64} */
+${END}
+${BEGIN} { ${INIT_VALUE_UINT32} }, /* ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}] */
+${END}
+${BEGIN} ${VARDEF_VALUE_UINT32}, /* ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32} */
+${END}
+ /* VPD */
+${BEGIN} { ${VPD_HEAD_VALUE} }, /* ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}] */
+${END}
+ /* ExMapTable */
+ {
+${BEGIN} { ${EXMAPPING_TABLE_EXTOKEN}, ${EXMAPPING_TABLE_LOCAL_TOKEN}, ${EXMAPPING_TABLE_GUID_INDEX} },
+${END}
+ },
+ /* LocalTokenNumberTable */
+ {
+${BEGIN} offsetof(${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE},
+${END}
+ },
+ /* GuidTable */
+ {
+${BEGIN} ${GUID_STRUCTURE},
+${END}
+ },
+${BEGIN} { ${STRING_HEAD_VALUE} }, /* ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}] */
+${END}
+${BEGIN} /* ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}] */
+ {
+ ${VARIABLE_HEAD_VALUE}
+ },
+${END}
+/* SkuHead */
+ {
+ ${BEGIN} offsetof (${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE}, /* */
+ offsetof (${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.SkuHead) /* */
+ ${END}
+ },
+ /* StringTable */
+${BEGIN} ${STRING_TABLE_VALUE}, /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */
+${END}
+ /* SizeTable */
+ {
+${BEGIN} ${SIZE_TABLE_MAXIMUM_LENGTH}, ${SIZE_TABLE_CURRENT_LENGTH}, /* ${SIZE_TABLE_CNAME}_${SIZE_TABLE_GUID} */
+${END}
+ },
+${BEGIN} { ${INIT_VALUE_UINT16} }, /* ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}] */
+${END}
+${BEGIN} ${VARDEF_VALUE_UINT16}, /* ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16} */
+${END}
+${BEGIN} { ${INIT_VALUE_UINT8} }, /* ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}] */
+${END}
+${BEGIN} ${VARDEF_VALUE_UINT8}, /* ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8} */
+${END}
+${BEGIN} { ${INIT_VALUE_BOOLEAN} }, /* ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}] */
+${END}
+${BEGIN} ${VARDEF_VALUE_BOOLEAN}, /* ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN} */
+${END}
+ ${SYSTEM_SKU_ID_VALUE}
+};
+#endif
+""")
+
+## Mapping between PCD driver type and EFI phase
+gPcdPhaseMap = {
+ "PEI_PCD_DRIVER" : "PEI",
+ "DXE_PCD_DRIVER" : "DXE"
+}
+
+gPcdDatabaseAutoGenH = TemplateString("""
+#define PCD_${PHASE}_SERVICE_DRIVER_VERSION ${SERVICE_DRIVER_VERSION}
+
+//
+// External PCD database debug information
+//
+#if 0
+#define ${PHASE}_GUID_TABLE_SIZE ${GUID_TABLE_SIZE}
+#define ${PHASE}_STRING_TABLE_SIZE ${STRING_TABLE_SIZE}
+#define ${PHASE}_SKUID_TABLE_SIZE ${SKUID_TABLE_SIZE}
+#define ${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE ${LOCAL_TOKEN_NUMBER_TABLE_SIZE}
+#define ${PHASE}_LOCAL_TOKEN_NUMBER ${LOCAL_TOKEN_NUMBER}
+#define ${PHASE}_EXMAPPING_TABLE_SIZE ${EXMAPPING_TABLE_SIZE}
+#define ${PHASE}_EX_TOKEN_NUMBER ${EX_TOKEN_NUMBER}
+#define ${PHASE}_SIZE_TABLE_SIZE ${SIZE_TABLE_SIZE}
+#define ${PHASE}_GUID_TABLE_EMPTY ${GUID_TABLE_EMPTY}
+#define ${PHASE}_STRING_TABLE_EMPTY ${STRING_TABLE_EMPTY}
+#define ${PHASE}_SKUID_TABLE_EMPTY ${SKUID_TABLE_EMPTY}
+#define ${PHASE}_DATABASE_EMPTY ${DATABASE_EMPTY}
+#define ${PHASE}_EXMAP_TABLE_EMPTY ${EXMAP_TABLE_EMPTY}
+
+typedef struct {
+ UINT64 SkuIdTable[${PHASE}_SKUID_TABLE_SIZE];
+${BEGIN} UINT64 ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}];
+${END}
+${BEGIN} UINT64 ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64};
+${END}
+${BEGIN} UINT32 ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}];
+${END}
+${BEGIN} UINT32 ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32};
+${END}
+${BEGIN} VPD_HEAD ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}];
+${END}
+ DYNAMICEX_MAPPING ExMapTable[${PHASE}_EXMAPPING_TABLE_SIZE];
+ UINT32 LocalTokenNumberTable[${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE];
+ GUID GuidTable[${PHASE}_GUID_TABLE_SIZE];
+${BEGIN} STRING_HEAD ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}];
+${END}
+${BEGIN} VARIABLE_HEAD ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}];
+${BEGIN} UINT8 StringTable${STRING_TABLE_INDEX}[${STRING_TABLE_LENGTH}]; /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */
+${END}
+ SIZE_INFO SizeTable[${PHASE}_SIZE_TABLE_SIZE];
+${BEGIN} UINT16 ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}];
+${END}
+${BEGIN} UINT16 ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16};
+${END}
+${BEGIN} UINT8 ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}];
+${END}
+${BEGIN} UINT8 ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8};
+${END}
+${BEGIN} BOOLEAN ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}];
+${END}
+${BEGIN} BOOLEAN ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN};
+${END}
+${SYSTEM_SKU_ID}
+} ${PHASE}_PCD_DATABASE_INIT;
+
+typedef struct {
+${PCD_DATABASE_UNINIT_EMPTY}
+${BEGIN} UINT64 ${UNINIT_CNAME_DECL_UINT64}_${UNINIT_GUID_DECL_UINT64}[${UNINIT_NUMSKUS_DECL_UINT64}];
+${END}
+${BEGIN} UINT32 ${UNINIT_CNAME_DECL_UINT32}_${UNINIT_GUID_DECL_UINT32}[${UNINIT_NUMSKUS_DECL_UINT32}];
+${END}
+${BEGIN} UINT16 ${UNINIT_CNAME_DECL_UINT16}_${UNINIT_GUID_DECL_UINT16}[${UNINIT_NUMSKUS_DECL_UINT16}];
+${END}
+${BEGIN} UINT8 ${UNINIT_CNAME_DECL_UINT8}_${UNINIT_GUID_DECL_UINT8}[${UNINIT_NUMSKUS_DECL_UINT8}];
+${END}
+${BEGIN} BOOLEAN ${UNINIT_CNAME_DECL_BOOLEAN}_${UNINIT_GUID_DECL_BOOLEAN}[${UNINIT_NUMSKUS_DECL_BOOLEAN}];
+${END}
+} ${PHASE}_PCD_DATABASE_UNINIT;
+
+typedef struct {
+ //GUID Signature; // PcdDataBaseGuid
+ //UINT32 BuildVersion;
+ //UINT32 Length;
+ //SKU_ID SystemSkuId; // Current SkuId value.
+ //UINT32 LengthForAllSkus; // Length of all SKU PCD DB
+ //UINT32 UninitDataBaseSize;// Total size for PCD those default value with 0.
+ //TABLE_OFFSET LocalTokenNumberTableOffset;
+ //TABLE_OFFSET ExMapTableOffset;
+ //TABLE_OFFSET GuidTableOffset;
+ //TABLE_OFFSET StringTableOffset;
+ //TABLE_OFFSET SizeTableOffset;
+ //TABLE_OFFSET SkuIdTableOffset;
+ //TABLE_OFFSET PcdNameTableOffset;
+ //UINT16 LocalTokenCount; // LOCAL_TOKEN_NUMBER for all
+ //UINT16 ExTokenCount; // EX_TOKEN_NUMBER for DynamicEx
+ //UINT16 GuidTableCount; // The Number of Guid in GuidTable
+ //UINT8 Pad[6];
+ ${PHASE}_PCD_DATABASE_INIT Init;
+ ${PHASE}_PCD_DATABASE_UNINIT Uninit;
+} ${PHASE}_PCD_DATABASE;
+
+#define ${PHASE}_NEX_TOKEN_NUMBER (${PHASE}_LOCAL_TOKEN_NUMBER - ${PHASE}_EX_TOKEN_NUMBER)
+#endif
+""")
+
+
+gEmptyPcdDatabaseAutoGenC = TemplateString("""
+//
+// External PCD database debug information
+//
+#if 0
+${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
+ /* SkuIdTable */
+ { 0 },
+ /* ExMapTable */
+ {
+ {0, 0, 0}
+ },
+ /* LocalTokenNumberTable */
+ {
+ 0
+ },
+ /* GuidTable */
+ {
+ {0x00000000, 0x0000, 0x0000, {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}}
+ },
+ /* StringTable */
+ { 0 },
+ /* SkuHead */
+ {
+ 0, 0
+ },
+ /* SizeTable */
+ {
+ 0, 0
+ },
+ ${SYSTEM_SKU_ID_VALUE}
+};
+#endif
+""")
+
+## DbItemList
+#
+# The class holds the Pcd database items. ItemSize if not zero should match the item datum type in the C structure.
+# When the structure is changed, remember to check the ItemSize and the related PackStr in PackData()
+# RawDataList is the RawData that may need some kind of calculation or transformation,
+# the DataList corresponds to the data that need to be written to database. If DataList is not present, then RawDataList
+# will be written to the database.
+#
+class DbItemList:
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ self.ItemSize = ItemSize
+ self.DataList = DataList if DataList else []
+ self.RawDataList = RawDataList if RawDataList else []
+ self.ListSize = 0
+
+ def GetInterOffset(self, Index):
+ Offset = 0
+ if self.ItemSize == 0:
+ #
+ # Variable length, need to calculate one by one
+ #
+ assert(Index < len(self.RawDataList))
+ for ItemIndex in range(Index):
+ Offset += len(self.RawDataList[ItemIndex])
+ else:
+ Offset = self.ItemSize * Index
+
+ return Offset
+
+ def GetListSize(self):
+ if self.ListSize:
+ return self.ListSize
+ if len(self.RawDataList) == 0:
+ self.ListSize = 0
+ return self.ListSize
+ if self.ItemSize == 0:
+ self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1])
+ else:
+ self.ListSize = self.ItemSize * len(self.RawDataList)
+ return self.ListSize
+
+ def PackData(self):
+ ## PackGuid
+ #
+ # Pack the GUID value in C structure format into data array
+ #
+ # @param GuidStructureValue: The GUID value in C structure format
+ #
+ # @retval Buffer: a data array contains the Guid
+ #
+ def PackGuid(GuidStructureValue):
+ GuidString = GuidStructureStringToGuidString(GuidStructureValue)
+ return PackGUID(GuidString.split('-'))
+
+ PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
+
+ Buffer = bytearray()
+ for Datas in self.RawDataList:
+ if type(Datas) in (list, tuple):
+ for Data in Datas:
+ if PackStr:
+ Buffer += pack(PackStr, GetIntegerValue(Data))
+ else:
+ Buffer += PackGuid(Data)
+ else:
+ if PackStr:
+ Buffer += pack(PackStr, GetIntegerValue(Datas))
+ else:
+ Buffer += PackGuid(Datas)
+
+ return Buffer
+
+## DbExMapTblItemList
+#
+# The class holds the ExMap table
+#
+class DbExMapTblItemList (DbItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def PackData(self):
+ Buffer = bytearray()
+ PackStr = "=LHH"
+ for Datas in self.RawDataList:
+ Buffer += pack(PackStr,
+ GetIntegerValue(Datas[0]),
+ GetIntegerValue(Datas[1]),
+ GetIntegerValue(Datas[2]))
+ return Buffer
+
+## DbComItemList
+#
+# The DbComItemList is a special kind of DbItemList in case that the size of the List can not be computed by the
+# ItemSize multiply the ItemCount.
+#
+class DbComItemList (DbItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def GetInterOffset(self, Index):
+ Offset = 0
+ if self.ItemSize == 0:
+ #
+ # Variable length, need to calculate one by one
+ # The only variable table is stringtable, it is not Composite item, should not reach here
+ #
+ assert(False)
+ else:
+ assert(Index < len(self.RawDataList))
+ for ItemIndex in range(Index):
+ Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
+
+ return Offset
+
+ def GetListSize(self):
+ if self.ListSize:
+ return self.ListSize
+ if self.ItemSize == 0:
+ assert(False)
+ else:
+ if len(self.RawDataList) == 0:
+ self.ListSize = 0
+ else:
+ self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1]) * self.ItemSize
+
+ return self.ListSize
+
+ def PackData(self):
+ PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
+
+ Buffer = bytearray()
+ for DataList in self.RawDataList:
+ for Data in DataList:
+ if type(Data) in (list, tuple):
+ for SingleData in Data:
+ Buffer += pack(PackStr, GetIntegerValue(SingleData))
+ else:
+ Buffer += pack(PackStr, GetIntegerValue(Data))
+
+ return Buffer
+
+## DbVariableTableItemList
+#
+# The class holds the Variable header value table
+#
+class DbVariableTableItemList (DbComItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ DbComItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def PackData(self):
+ PackStr = "=LLHHLHH"
+ Buffer = bytearray()
+ for DataList in self.RawDataList:
+ for Data in DataList:
+ Buffer += pack(PackStr,
+ GetIntegerValue(Data[0]),
+ GetIntegerValue(Data[1]),
+ GetIntegerValue(Data[2]),
+ GetIntegerValue(Data[3]),
+ GetIntegerValue(Data[4]),
+ GetIntegerValue(Data[5]),
+ GetIntegerValue(0))
+ return Buffer
+
+class DbStringHeadTableItemList(DbItemList):
+ def __init__(self,ItemSize,DataList=None,RawDataList=None):
+ DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def GetInterOffset(self, Index):
+ Offset = 0
+ if self.ItemSize == 0:
+ #
+ # Variable length, need to calculate one by one
+ #
+ assert(Index < len(self.RawDataList))
+ for ItemIndex in range(Index):
+ Offset += len(self.RawDataList[ItemIndex])
+ else:
+ for innerIndex in range(Index):
+ if type(self.RawDataList[innerIndex]) in (list, tuple):
+ Offset += len(self.RawDataList[innerIndex]) * self.ItemSize
+ else:
+ Offset += self.ItemSize
+
+ return Offset
+
+ def GetListSize(self):
+ if self.ListSize:
+ return self.ListSize
+ if len(self.RawDataList) == 0:
+ self.ListSize = 0
+ return self.ListSize
+ if self.ItemSize == 0:
+ self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1])
+ else:
+ for Datas in self.RawDataList:
+ if type(Datas) in (list, tuple):
+ self.ListSize += len(Datas) * self.ItemSize
+ else:
+ self.ListSize += self.ItemSize
+ return self.ListSize
+
+## DbSkuHeadTableItemList
+#
+# The class holds the Sku header value table
+#
+class DbSkuHeadTableItemList (DbItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def PackData(self):
+ PackStr = "=LL"
+ Buffer = bytearray()
+ for Data in self.RawDataList:
+ Buffer += pack(PackStr,
+ GetIntegerValue(Data[0]),
+ GetIntegerValue(Data[1]))
+ return Buffer
+
+## DbSizeTableItemList
+#
+# The class holds the size table
+#
+class DbSizeTableItemList (DbItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None):
+ DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+
+ def GetListSize(self):
+ length = 0
+ for Data in self.RawDataList:
+ length += (1 + len(Data[1]))
+ return length * self.ItemSize
+ def PackData(self):
+ PackStr = "=H"
+ Buffer = bytearray()
+ for Data in self.RawDataList:
+ Buffer += pack(PackStr,
+ GetIntegerValue(Data[0]))
+ for subData in Data[1]:
+ Buffer += pack(PackStr,
+ GetIntegerValue(subData))
+ return Buffer
+
+## DbStringItemList
+#
+# The class holds the string table
+#
+class DbStringItemList (DbComItemList):
+ def __init__(self, ItemSize, DataList=None, RawDataList=None, LenList=None):
+ if DataList is None:
+ DataList = []
+ if RawDataList is None:
+ RawDataList = []
+ if LenList is None:
+ LenList = []
+
+ assert(len(RawDataList) == len(LenList))
+ DataList = []
+ # adjust DataList according to the LenList
+ for Index in range(len(RawDataList)):
+ Len = LenList[Index]
+ RawDatas = RawDataList[Index]
+ assert(Len >= len(RawDatas))
+ ActualDatas = []
+ for i in range(len(RawDatas)):
+ ActualDatas.append(RawDatas[i])
+ for i in range(len(RawDatas), Len):
+ ActualDatas.append(0)
+ DataList.append(ActualDatas)
+ self.LenList = LenList
+ DbComItemList.__init__(self, ItemSize, DataList, RawDataList)
+ def GetInterOffset(self, Index):
+ Offset = 0
+
+ assert(Index < len(self.LenList))
+ for ItemIndex in range(Index):
+ Offset += self.LenList[ItemIndex]
+
+ return Offset
+
+ def GetListSize(self):
+ if self.ListSize:
+ return self.ListSize
+
+ if len(self.LenList) == 0:
+ self.ListSize = 0
+ else:
+ self.ListSize = self.GetInterOffset(len(self.LenList) - 1) + self.LenList[len(self.LenList)-1]
+
+ return self.ListSize
+
+ def PackData(self):
+ self.RawDataList = self.DataList
+ return DbComItemList.PackData(self)
+
+
+
+## Find the index in two list where the item matches the key separately
+#
+# @param Key1 The key used to search the List1
+# @param List1 The list that Key1 will be searched
+# @param Key2 The key used to search the List2
+# @param List2 The list that Key2 will be searched
+#
+# @retval Index The position inside the list where list1[Index] == Key1 and list2[Index] == Key2
+#
+def GetMatchedIndex(Key1, List1, Key2, List2):
+ StartPos = 0
+ while StartPos < len(List1):
+ Index = List1.index(Key1, StartPos)
+ if List2[Index] == Key2:
+ return Index
+ else:
+ StartPos = Index + 1
+
+ return -1
+
+
+## convert StringArray like {0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00}
+# to List like [0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00]
+#
+# @param StringArray A string array like {0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00}
+#
+# @retval A list object of integer items
+#
+def StringArrayToList(StringArray):
+ StringArray = StringArray[1:-1]
+ StringArray = '[' + StringArray + ']'
+ return eval(StringArray)
+
+
+## Convert TokenType String like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII" to TokenType value
+#
+# @param TokenType A TokenType string like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII"
+#
+# @retval A integer representation of the TokenType
+#
+def GetTokenTypeValue(TokenType):
+ TokenTypeDict = {
+ "PCD_TYPE_SHIFT": 28,
+ "PCD_TYPE_DATA": (0x0 << 28),
+ "PCD_TYPE_HII": (0x8 << 28),
+ "PCD_TYPE_VPD": (0x4 << 28),
+# "PCD_TYPE_SKU_ENABLED":(0x2 << 28),
+ "PCD_TYPE_STRING": (0x1 << 28),
+
+ "PCD_DATUM_TYPE_SHIFT": 24,
+ "PCD_DATUM_TYPE_POINTER": (0x0 << 24),
+ "PCD_DATUM_TYPE_UINT8": (0x1 << 24),
+ "PCD_DATUM_TYPE_UINT16": (0x2 << 24),
+ "PCD_DATUM_TYPE_UINT32": (0x4 << 24),
+ "PCD_DATUM_TYPE_UINT64": (0x8 << 24),
+
+ "PCD_DATUM_TYPE_SHIFT2": 20,
+ "PCD_DATUM_TYPE_UINT8_BOOLEAN": (0x1 << 20 | 0x1 << 24),
+ }
+ return eval(TokenType, TokenTypeDict)
+
+## construct the external Pcd database using data from Dict
+#
+# @param Dict A dictionary contains Pcd related tables
+#
+# @retval Buffer A byte stream of the Pcd database
+#
+def BuildExDataBase(Dict):
+ # init Db items
+ InitValueUint64 = Dict['INIT_DB_VALUE_UINT64']
+ DbInitValueUint64 = DbComItemList(8, RawDataList = InitValueUint64)
+ VardefValueUint64 = Dict['VARDEF_DB_VALUE_UINT64']
+ DbVardefValueUint64 = DbItemList(8, RawDataList = VardefValueUint64)
+ InitValueUint32 = Dict['INIT_DB_VALUE_UINT32']
+ DbInitValueUint32 = DbComItemList(4, RawDataList = InitValueUint32)
+ VardefValueUint32 = Dict['VARDEF_DB_VALUE_UINT32']
+ DbVardefValueUint32 = DbItemList(4, RawDataList = VardefValueUint32)
+ VpdHeadValue = Dict['VPD_DB_VALUE']
+ DbVpdHeadValue = DbComItemList(4, RawDataList = VpdHeadValue)
+ ExMapTable = list(zip(Dict['EXMAPPING_TABLE_EXTOKEN'], Dict['EXMAPPING_TABLE_LOCAL_TOKEN'], Dict['EXMAPPING_TABLE_GUID_INDEX']))
+ DbExMapTable = DbExMapTblItemList(8, RawDataList = ExMapTable)
+ LocalTokenNumberTable = Dict['LOCAL_TOKEN_NUMBER_DB_VALUE']
+ DbLocalTokenNumberTable = DbItemList(4, RawDataList = LocalTokenNumberTable)
+ GuidTable = Dict['GUID_STRUCTURE']
+ DbGuidTable = DbItemList(16, RawDataList = GuidTable)
+ StringHeadValue = Dict['STRING_DB_VALUE']
+ # DbItemList to DbStringHeadTableItemList
+ DbStringHeadValue = DbStringHeadTableItemList(4, RawDataList = StringHeadValue)
+ VariableTable = Dict['VARIABLE_DB_VALUE']
+ DbVariableTable = DbVariableTableItemList(20, RawDataList = VariableTable)
+ NumberOfSkuEnabledPcd = GetIntegerValue(Dict['SKU_HEAD_SIZE'])
+
+ Dict['STRING_TABLE_DB_VALUE'] = [StringArrayToList(x) for x in Dict['STRING_TABLE_VALUE']]
+
+ StringTableValue = Dict['STRING_TABLE_DB_VALUE']
+ # when calcute the offset, should use StringTableLen instead of StringTableValue, as string maximum len may be different with actual len
+ StringTableLen = Dict['STRING_TABLE_LENGTH']
+ DbStringTableLen = DbStringItemList(0, RawDataList = StringTableValue, LenList = StringTableLen)
+
+
+ PcdTokenTable = Dict['PCD_TOKENSPACE']
+ PcdTokenLen = Dict['PCD_TOKENSPACE_LENGTH']
+ PcdTokenTableValue = [StringArrayToList(x) for x in Dict['PCD_TOKENSPACE']]
+ DbPcdTokenTable = DbStringItemList(0, RawDataList = PcdTokenTableValue, LenList = PcdTokenLen)
+
+ PcdCNameTable = Dict['PCD_CNAME']
+ PcdCNameLen = Dict['PCD_CNAME_LENGTH']
+ PcdCNameTableValue = [StringArrayToList(x) for x in Dict['PCD_CNAME']]
+ DbPcdCNameTable = DbStringItemList(0, RawDataList = PcdCNameTableValue, LenList = PcdCNameLen)
+
+ PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
+ DbPcdNameOffsetTable = DbItemList(4, RawDataList = PcdNameOffsetTable)
+
+ SizeTableValue = list(zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH']))
+ DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
+ InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
+ DbInitValueUint16 = DbComItemList(2, RawDataList = InitValueUint16)
+ VardefValueUint16 = Dict['VARDEF_DB_VALUE_UINT16']
+ DbVardefValueUint16 = DbItemList(2, RawDataList = VardefValueUint16)
+ InitValueUint8 = Dict['INIT_DB_VALUE_UINT8']
+ DbInitValueUint8 = DbComItemList(1, RawDataList = InitValueUint8)
+ VardefValueUint8 = Dict['VARDEF_DB_VALUE_UINT8']
+ DbVardefValueUint8 = DbItemList(1, RawDataList = VardefValueUint8)
+ InitValueBoolean = Dict['INIT_DB_VALUE_BOOLEAN']
+ DbInitValueBoolean = DbComItemList(1, RawDataList = InitValueBoolean)
+ VardefValueBoolean = Dict['VARDEF_DB_VALUE_BOOLEAN']
+ DbVardefValueBoolean = DbItemList(1, RawDataList = VardefValueBoolean)
+ SkuidValue = Dict['SKUID_VALUE']
+ DbSkuidValue = DbItemList(8, RawDataList = SkuidValue)
+
+
+
+ # Unit Db Items
+ UnInitValueUint64 = Dict['UNINIT_GUID_DECL_UINT64']
+ DbUnInitValueUint64 = DbItemList(8, RawDataList = UnInitValueUint64)
+ UnInitValueUint32 = Dict['UNINIT_GUID_DECL_UINT32']
+ DbUnInitValueUint32 = DbItemList(4, RawDataList = UnInitValueUint32)
+ UnInitValueUint16 = Dict['UNINIT_GUID_DECL_UINT16']
+ DbUnInitValueUint16 = DbItemList(2, RawDataList = UnInitValueUint16)
+ UnInitValueUint8 = Dict['UNINIT_GUID_DECL_UINT8']
+ DbUnInitValueUint8 = DbItemList(1, RawDataList = UnInitValueUint8)
+ UnInitValueBoolean = Dict['UNINIT_GUID_DECL_BOOLEAN']
+ DbUnInitValueBoolean = DbItemList(1, RawDataList = UnInitValueBoolean)
+ PcdTokenNumberMap = Dict['PCD_ORDER_TOKEN_NUMBER_MAP']
+
+ DbNameTotle = ["SkuidValue", "InitValueUint64", "VardefValueUint64", "InitValueUint32", "VardefValueUint32", "VpdHeadValue", "ExMapTable",
+ "LocalTokenNumberTable", "GuidTable", "StringHeadValue", "PcdNameOffsetTable", "VariableTable", "StringTableLen", "PcdTokenTable", "PcdCNameTable",
+ "SizeTableValue", "InitValueUint16", "VardefValueUint16", "InitValueUint8", "VardefValueUint8", "InitValueBoolean",
+ "VardefValueBoolean", "UnInitValueUint64", "UnInitValueUint32", "UnInitValueUint16", "UnInitValueUint8", "UnInitValueBoolean"]
+
+ DbTotal = [SkuidValue, InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable,
+ LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable, VariableTable, StringTableLen, PcdTokenTable, PcdCNameTable,
+ SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean,
+ VardefValueBoolean, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean]
+ DbItemTotal = [DbSkuidValue, DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable,
+ DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable, DbVariableTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable,
+ DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
+ DbVardefValueBoolean, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean]
+
+ # VardefValueBoolean is the last table in the init table items
+ InitTableNum = DbNameTotle.index("VardefValueBoolean") + 1
+ # The FixedHeader length of the PCD_DATABASE_INIT, from Signature to Pad
+ FixedHeaderLen = 80
+
+ # Get offset of SkuId table in the database
+ SkuIdTableOffset = FixedHeaderLen
+ for DbIndex in range(len(DbTotal)):
+ if DbTotal[DbIndex] is SkuidValue:
+ break
+ SkuIdTableOffset += DbItemTotal[DbIndex].GetListSize()
+
+
+ # Get offset of SkuValue table in the database
+
+ # Fix up the LocalTokenNumberTable, SkuHeader table
+ for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
+ DbIndex = 0
+ DbOffset = FixedHeaderLen
+ for DbIndex in range(len(DbTotal)):
+ if DbTotal[DbIndex] is Table:
+ DbOffset += DbItemTotal[DbIndex].GetInterOffset(Offset)
+ break
+ DbOffset += DbItemTotal[DbIndex].GetListSize()
+ if DbIndex + 1 == InitTableNum:
+ if DbOffset % 8:
+ DbOffset += (8 - DbOffset % 8)
+ else:
+ assert(False)
+
+ TokenTypeValue = Dict['TOKEN_TYPE'][LocalTokenNumberTableIndex]
+ TokenTypeValue = GetTokenTypeValue(TokenTypeValue)
+ LocalTokenNumberTable[LocalTokenNumberTableIndex] = DbOffset|int(TokenTypeValue)
+ # if PCD_TYPE_SKU_ENABLED, then we need to fix up the SkuTable
+
+
+
+
+ # resolve variable table offset
+ for VariableEntries in VariableTable:
+ skuindex = 0
+ for VariableEntryPerSku in VariableEntries:
+ (VariableHeadGuidIndex, VariableHeadStringIndex, SKUVariableOffset, VariableOffset, VariableRefTable, VariableAttribute) = VariableEntryPerSku[:]
+ DbIndex = 0
+ DbOffset = FixedHeaderLen
+ for DbIndex in range(len(DbTotal)):
+ if DbTotal[DbIndex] is VariableRefTable:
+ DbOffset += DbItemTotal[DbIndex].GetInterOffset(VariableOffset)
+ break
+ DbOffset += DbItemTotal[DbIndex].GetListSize()
+ if DbIndex + 1 == InitTableNum:
+ if DbOffset % 8:
+ DbOffset += (8 - DbOffset % 8)
+ else:
+ assert(False)
+ if isinstance(VariableRefTable[0], list):
+ DbOffset += skuindex * 4
+ skuindex += 1
+ if DbIndex >= InitTableNum:
+ assert(False)
+ VarAttr, VarProp = VariableAttributes.GetVarAttributes(VariableAttribute)
+ VariableEntryPerSku[:] = (VariableHeadStringIndex, DbOffset, VariableHeadGuidIndex, SKUVariableOffset, VarAttr, VarProp)
+
+ # calculate various table offset now
+ DbTotalLength = FixedHeaderLen
+ for DbIndex in range(len(DbItemTotal)):
+ if DbItemTotal[DbIndex] is DbLocalTokenNumberTable:
+ LocalTokenNumberTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbExMapTable:
+ ExMapTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbGuidTable:
+ GuidTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbStringTableLen:
+ StringTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbSizeTableValue:
+ SizeTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbSkuidValue:
+ SkuIdTableOffset = DbTotalLength
+ elif DbItemTotal[DbIndex] is DbPcdNameOffsetTable:
+ DbPcdNameOffset = DbTotalLength
+
+
+ DbTotalLength += DbItemTotal[DbIndex].GetListSize()
+ if not Dict['PCD_INFO_FLAG']:
+ DbPcdNameOffset = 0
+ LocalTokenCount = GetIntegerValue(Dict['LOCAL_TOKEN_NUMBER'])
+ ExTokenCount = GetIntegerValue(Dict['EX_TOKEN_NUMBER'])
+ GuidTableCount = GetIntegerValue(Dict['GUID_TABLE_SIZE'])
+ SystemSkuId = GetIntegerValue(Dict['SYSTEM_SKU_ID_VALUE'])
+ Pad = 0xDA
+
+ UninitDataBaseSize = 0
+ for Item in (DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean):
+ UninitDataBaseSize += Item.GetListSize()
+
+ if (DbTotalLength - UninitDataBaseSize) % 8:
+ DbTotalLength += (8 - (DbTotalLength - UninitDataBaseSize) % 8)
+ # Construct the database buffer
+ Guid = "{0x3c7d193c, 0x682c, 0x4c14, 0xa6, 0x8f, 0x55, 0x2d, 0xea, 0x4f, 0x43, 0x7e}"
+ Guid = StringArrayToList(Guid)
+ Buffer = PackByteFormatGUID(Guid)
+
+ b = pack("=L", DATABASE_VERSION)
+ Buffer += b
+
+ b = pack('=L', DbTotalLength - UninitDataBaseSize)
+
+ Buffer += b
+ b = pack('=Q', SystemSkuId)
+
+ Buffer += b
+ b = pack('=L', 0)
+
+ Buffer += b
+ b = pack('=L', UninitDataBaseSize)
+
+ Buffer += b
+ b = pack('=L', LocalTokenNumberTableOffset)
+
+ Buffer += b
+ b = pack('=L', ExMapTableOffset)
+
+ Buffer += b
+ b = pack('=L', GuidTableOffset)
+
+ Buffer += b
+ b = pack('=L', StringTableOffset)
+
+ Buffer += b
+ b = pack('=L', SizeTableOffset)
+
+ Buffer += b
+ b = pack('=L', SkuIdTableOffset)
+
+ Buffer += b
+ b = pack('=L', DbPcdNameOffset)
+
+ Buffer += b
+ b = pack('=H', LocalTokenCount)
+
+ Buffer += b
+ b = pack('=H', ExTokenCount)
+
+ Buffer += b
+ b = pack('=H', GuidTableCount)
+
+ Buffer += b
+ b = pack('=B', Pad)
+ Buffer += b
+ Buffer += b
+ Buffer += b
+ Buffer += b
+ Buffer += b
+ Buffer += b
+
+ Index = 0
+ for Item in DbItemTotal:
+ Index +=1
+ packdata = Item.PackData()
+ for i in range(len(packdata)):
+ Buffer += packdata[i:i + 1]
+ if Index == InitTableNum:
+ if len(Buffer) % 8:
+ for num in range(8 - len(Buffer) % 8):
+ b = pack('=B', Pad)
+ Buffer += b
+ break
+ return Buffer
+
+## Create code for PCD database
+#
+# @param Info The ModuleAutoGen object
+# @param AutoGenC The TemplateString object for C code
+# @param AutoGenH The TemplateString object for header file
+#
+def CreatePcdDatabaseCode (Info, AutoGenC, AutoGenH):
+ if Info.PcdIsDriver == "":
+ return
+ if Info.PcdIsDriver not in gPcdPhaseMap:
+ EdkLogger.error("build", AUTOGEN_ERROR, "Not supported PcdIsDriver type:%s" % Info.PcdIsDriver,
+ ExtraData="[%s]" % str(Info))
+
+ AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer = NewCreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, 'PEI')
+ AutoGenH.Append(AdditionalAutoGenH.String)
+
+ Phase = gPcdPhaseMap[Info.PcdIsDriver]
+ if Phase == 'PEI':
+ AutoGenC.Append(AdditionalAutoGenC.String)
+
+ if Phase == 'DXE':
+ AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer = NewCreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, Phase)
+ AutoGenH.Append(AdditionalAutoGenH.String)
+ AutoGenC.Append(AdditionalAutoGenC.String)
+
+ if Info.IsBinaryModule:
+ DbFileName = os.path.join(Info.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, Phase + "PcdDataBase.raw")
+ else:
+ DbFileName = os.path.join(Info.OutputDir, Phase + "PcdDataBase.raw")
+ DbFile = BytesIO()
+ DbFile.write(PcdDbBuffer)
+ Changed = SaveFileOnChange(DbFileName, DbFile.getvalue(), True)
+def CreatePcdDataBase(PcdDBData):
+ delta = {}
+ for skuname, skuid in PcdDBData:
+ if len(PcdDBData[(skuname, skuid)][1]) != len(PcdDBData[(TAB_DEFAULT, "0")][1]):
+ EdkLogger.error("build", AUTOGEN_ERROR, "The size of each sku in one pcd are not same")
+ for skuname, skuid in PcdDBData:
+ if skuname == TAB_DEFAULT:
+ continue
+ delta[(skuname, skuid)] = [(index, data, hex(data)) for index, data in enumerate(PcdDBData[(skuname, skuid)][1]) if PcdDBData[(skuname, skuid)][1][index] != PcdDBData[(TAB_DEFAULT, "0")][1][index]]
+ databasebuff = PcdDBData[(TAB_DEFAULT, "0")][0]
+
+ for skuname, skuid in delta:
+ # 8 byte align
+ if len(databasebuff) % 8 > 0:
+ for i in range(8 - (len(databasebuff) % 8)):
+ databasebuff += pack("=B", 0)
+ databasebuff += pack('=Q', int(skuid))
+ databasebuff += pack('=Q', 0)
+ databasebuff += pack('=L', 8+8+4+4*len(delta[(skuname, skuid)]))
+ for item in delta[(skuname, skuid)]:
+ databasebuff += pack("=L", item[0])
+ databasebuff = databasebuff[:-1] + pack("=B", item[1])
+ totallen = len(databasebuff)
+ totallenbuff = pack("=L", totallen)
+ newbuffer = databasebuff[:32]
+ for i in range(4):
+ newbuffer += totallenbuff[i:i+1]
+ for i in range(36, totallen):
+ newbuffer += databasebuff[i:i+1]
+
+ return newbuffer
+
+def CreateVarCheckBin(VarCheckTab):
+ return VarCheckTab[(TAB_DEFAULT, "0")]
+
+def CreateAutoGen(PcdDriverAutoGenData):
+ autogenC = TemplateString()
+ for skuname, skuid in PcdDriverAutoGenData:
+ autogenC.Append("//SKUID: %s" % skuname)
+ autogenC.Append(PcdDriverAutoGenData[(skuname, skuid)][1].String)
+ return (PcdDriverAutoGenData[(skuname, skuid)][0], autogenC)
+def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
+ def prune_sku(pcd, skuname):
+ new_pcd = copy.deepcopy(pcd)
+ new_pcd.SkuInfoList = {skuname:pcd.SkuInfoList[skuname]}
+ new_pcd.isinit = 'INIT'
+ if new_pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
+ for skuobj in pcd.SkuInfoList.values():
+ if skuobj.DefaultValue:
+ defaultvalue = int(skuobj.DefaultValue, 16) if skuobj.DefaultValue.upper().startswith("0X") else int(skuobj.DefaultValue, 10)
+ if defaultvalue != 0:
+ new_pcd.isinit = "INIT"
+ break
+ elif skuobj.VariableName:
+ new_pcd.isinit = "INIT"
+ break
+ else:
+ new_pcd.isinit = "UNINIT"
+ return new_pcd
+ DynamicPcds = Platform.DynamicPcdList
+ DynamicPcdSet_Sku = {(SkuName, skuobj.SkuId):[] for pcd in DynamicPcds for (SkuName, skuobj) in pcd.SkuInfoList.items() }
+ for skuname, skuid in DynamicPcdSet_Sku:
+ DynamicPcdSet_Sku[(skuname, skuid)] = [prune_sku(pcd, skuname) for pcd in DynamicPcds]
+ PcdDBData = {}
+ PcdDriverAutoGenData = {}
+ VarCheckTableData = {}
+ if DynamicPcdSet_Sku:
+ for skuname, skuid in DynamicPcdSet_Sku:
+ AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
+ final_data = ()
+ for item in range(len(PcdDbBuffer)):
+ final_data += unpack("B", PcdDbBuffer[item:item+1])
+ PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
+ PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
+ VarCheckTableData[(skuname, skuid)] = VarCheckTab
+ if Platform.Platform.VarCheckFlag:
+ dest = os.path.join(Platform.BuildDir, TAB_FV_DIRECTORY)
+ VarCheckTable = CreateVarCheckBin(VarCheckTableData)
+ VarCheckTable.dump(dest, Phase)
+ AdditionalAutoGenH, AdditionalAutoGenC = CreateAutoGen(PcdDriverAutoGenData)
+ else:
+ AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
+ final_data = ()
+ for item in range(len(PcdDbBuffer)):
+ final_data += unpack("B", PcdDbBuffer[item:item + 1])
+ PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
+
+ return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)
+## Create PCD database in DXE or PEI phase
+#
+# @param Platform The platform object
+# @retval tuple Two TemplateString objects for C code and header file,
+# respectively
+#
+def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
+ AutoGenC = TemplateString()
+ AutoGenH = TemplateString()
+
+ Dict = {
+ 'PHASE' : Phase,
+ 'SERVICE_DRIVER_VERSION' : DATABASE_VERSION,
+ 'GUID_TABLE_SIZE' : '1U',
+ 'STRING_TABLE_SIZE' : '1U',
+ 'SKUID_TABLE_SIZE' : '1U',
+ 'LOCAL_TOKEN_NUMBER_TABLE_SIZE' : '0U',
+ 'LOCAL_TOKEN_NUMBER' : '0U',
+ 'EXMAPPING_TABLE_SIZE' : '1U',
+ 'EX_TOKEN_NUMBER' : '0U',
+ 'SIZE_TABLE_SIZE' : '2U',
+ 'SKU_HEAD_SIZE' : '1U',
+ 'GUID_TABLE_EMPTY' : 'TRUE',
+ 'STRING_TABLE_EMPTY' : 'TRUE',
+ 'SKUID_TABLE_EMPTY' : 'TRUE',
+ 'DATABASE_EMPTY' : 'TRUE',
+ 'EXMAP_TABLE_EMPTY' : 'TRUE',
+ 'PCD_DATABASE_UNINIT_EMPTY' : ' UINT8 dummy; /* PCD_DATABASE_UNINIT is empty */',
+ 'SYSTEM_SKU_ID' : ' SKU_ID SystemSkuId;',
+ 'SYSTEM_SKU_ID_VALUE' : '0U'
+ }
+
+ SkuObj = Platform.Platform.SkuIdMgr
+ Dict['SYSTEM_SKU_ID_VALUE'] = 0 if SkuObj.SkuUsageType == SkuObj.SINGLE else Platform.Platform.SkuIds[SkuObj.SystemSkuId][0]
+
+ Dict['PCD_INFO_FLAG'] = Platform.Platform.PcdInfoFlag
+
+ for DatumType in TAB_PCD_NUMERIC_TYPES_VOID:
+ Dict['VARDEF_CNAME_' + DatumType] = []
+ Dict['VARDEF_GUID_' + DatumType] = []
+ Dict['VARDEF_SKUID_' + DatumType] = []
+ Dict['VARDEF_VALUE_' + DatumType] = []
+ Dict['VARDEF_DB_VALUE_' + DatumType] = []
+ for Init in ['INIT', 'UNINIT']:
+ Dict[Init+'_CNAME_DECL_' + DatumType] = []
+ Dict[Init+'_GUID_DECL_' + DatumType] = []
+ Dict[Init+'_NUMSKUS_DECL_' + DatumType] = []
+ Dict[Init+'_VALUE_' + DatumType] = []
+ Dict[Init+'_DB_VALUE_'+DatumType] = []
+
+ for Type in ['STRING_HEAD', 'VPD_HEAD', 'VARIABLE_HEAD']:
+ Dict[Type + '_CNAME_DECL'] = []
+ Dict[Type + '_GUID_DECL'] = []
+ Dict[Type + '_NUMSKUS_DECL'] = []
+ Dict[Type + '_VALUE'] = []
+
+ Dict['STRING_DB_VALUE'] = []
+ Dict['VPD_DB_VALUE'] = []
+ Dict['VARIABLE_DB_VALUE'] = []
+
+ Dict['STRING_TABLE_INDEX'] = []
+ Dict['STRING_TABLE_LENGTH'] = []
+ Dict['STRING_TABLE_CNAME'] = []
+ Dict['STRING_TABLE_GUID'] = []
+ Dict['STRING_TABLE_VALUE'] = []
+ Dict['STRING_TABLE_DB_VALUE'] = []
+
+ Dict['SIZE_TABLE_CNAME'] = []
+ Dict['SIZE_TABLE_GUID'] = []
+ Dict['SIZE_TABLE_CURRENT_LENGTH'] = []
+ Dict['SIZE_TABLE_MAXIMUM_LENGTH'] = []
+
+ Dict['EXMAPPING_TABLE_EXTOKEN'] = []
+ Dict['EXMAPPING_TABLE_LOCAL_TOKEN'] = []
+ Dict['EXMAPPING_TABLE_GUID_INDEX'] = []
+
+ Dict['GUID_STRUCTURE'] = []
+ Dict['SKUID_VALUE'] = [0] # init Dict length
+ Dict['VARDEF_HEADER'] = []
+
+ Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = []
+ Dict['VARIABLE_DB_VALUE'] = []
+
+ Dict['PCD_TOKENSPACE'] = []
+ Dict['PCD_CNAME'] = []
+ Dict['PCD_TOKENSPACE_LENGTH'] = []
+ Dict['PCD_CNAME_LENGTH'] = []
+ Dict['PCD_TOKENSPACE_OFFSET'] = []
+ Dict['PCD_CNAME_OFFSET'] = []
+ Dict['PCD_TOKENSPACE_MAP'] = []
+ Dict['PCD_NAME_OFFSET'] = []
+
+ Dict['PCD_ORDER_TOKEN_NUMBER_MAP'] = {}
+ PCD_STRING_INDEX_MAP = {}
+
+ StringTableIndex = 0
+ StringTableSize = 0
+ NumberOfLocalTokens = 0
+ NumberOfPeiLocalTokens = 0
+ NumberOfDxeLocalTokens = 0
+ NumberOfExTokens = 0
+ NumberOfSizeItems = 0
+ NumberOfSkuEnabledPcd = 0
+ GuidList = []
+ VarCheckTab = VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER()
+ i = 0
+ ReorderedDynPcdList = GetOrderedDynamicPcdList(DynamicPcdList, Platform.PcdTokenNumber)
+ for item in ReorderedDynPcdList:
+ if item.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
+ item.DatumType = TAB_VOID
+ for Pcd in ReorderedDynPcdList:
+ VoidStarTypeCurrSize = []
+ i += 1
+ CName = Pcd.TokenCName
+ TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
+
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ CName = PcdItem[0]
+
+ EdkLogger.debug(EdkLogger.DEBUG_3, "PCD: %s %s (%s : %s)" % (CName, TokenSpaceGuidCName, Pcd.Phase, Phase))
+
+ if Pcd.Phase == 'PEI':
+ NumberOfPeiLocalTokens += 1
+ if Pcd.Phase == 'DXE':
+ NumberOfDxeLocalTokens += 1
+ if Pcd.Phase != Phase:
+ continue
+
+ #
+ # TODO: need GetGuidValue() definition
+ #
+ TokenSpaceGuidStructure = Pcd.TokenSpaceGuidValue
+ TokenSpaceGuid = GuidStructureStringToGuidValueName(TokenSpaceGuidStructure)
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ if TokenSpaceGuid not in GuidList:
+ GuidList.append(TokenSpaceGuid)
+ Dict['GUID_STRUCTURE'].append(TokenSpaceGuidStructure)
+ NumberOfExTokens += 1
+
+ ValueList = []
+ DbValueList = []
+ StringHeadOffsetList = []
+ StringDbOffsetList = []
+ VpdHeadOffsetList = []
+ VpdDbOffsetList = []
+ VariableHeadValueList = []
+ VariableDbValueList = []
+ Pcd.InitString = 'UNINIT'
+
+ if Pcd.DatumType == TAB_VOID:
+ if Pcd.Type not in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ Pcd.TokenTypeList = ['PCD_TYPE_STRING']
+ else:
+ Pcd.TokenTypeList = []
+ elif Pcd.DatumType == 'BOOLEAN':
+ Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8_BOOLEAN']
+ else:
+ Pcd.TokenTypeList = ['PCD_DATUM_TYPE_' + Pcd.DatumType]
+
+ if len(Pcd.SkuInfoList) > 1:
+ NumberOfSkuEnabledPcd += 1
+
+ SkuIdIndex = 1
+ VariableHeadList = []
+ for SkuName in Pcd.SkuInfoList:
+ Sku = Pcd.SkuInfoList[SkuName]
+ SkuId = Sku.SkuId
+ if SkuId is None or SkuId == '':
+ continue
+
+
+ SkuIdIndex += 1
+
+ if len(Sku.VariableName) > 0:
+ VariableGuidStructure = Sku.VariableGuidValue
+ VariableGuid = GuidStructureStringToGuidValueName(VariableGuidStructure)
+ if Platform.Platform.VarCheckFlag:
+ var_check_obj = VAR_CHECK_PCD_VARIABLE_TAB(VariableGuidStructure, StringToArray(Sku.VariableName))
+ try:
+ var_check_obj.push_back(GetValidationObject(Pcd, Sku.VariableOffset))
+ VarAttr, _ = VariableAttributes.GetVarAttributes(Sku.VariableAttribute)
+ var_check_obj.SetAttributes(VarAttr)
+ var_check_obj.UpdateSize()
+ VarCheckTab.push_back(var_check_obj)
+ except Exception:
+ ValidInfo = ''
+ if Pcd.validateranges:
+ ValidInfo = Pcd.validateranges[0]
+ if Pcd.validlists:
+ ValidInfo = Pcd.validlists[0]
+ if ValidInfo:
+ EdkLogger.error("build", PCD_VALIDATION_INFO_ERROR,
+ "The PCD '%s.%s' Validation information defined in DEC file has incorrect format." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData = "[%s]" % str(ValidInfo))
+ else:
+ EdkLogger.error("build", PCD_VALIDATION_INFO_ERROR,
+ "The PCD '%s.%s' Validation information defined in DEC file has incorrect format." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ Pcd.TokenTypeList.append('PCD_TYPE_HII')
+ Pcd.InitString = 'INIT'
+ # Store all variable names of one HII PCD under different SKU to stringTable
+ # and calculate the VariableHeadStringIndex
+
+ VariableNameStructure = StringToArray(Sku.VariableName)
+
+ # Make pointer of VaraibleName(HII PCD) 2 bytes aligned
+ VariableNameStructureBytes = VariableNameStructure.lstrip("{").rstrip("}").split(",")
+ if len(VariableNameStructureBytes) % 2:
+ VariableNameStructure = "{%s,0x00}" % ",".join(VariableNameStructureBytes)
+
+ if VariableNameStructure not in Dict['STRING_TABLE_VALUE']:
+ Dict['STRING_TABLE_CNAME'].append(CName)
+ Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
+ if StringTableIndex == 0:
+ Dict['STRING_TABLE_INDEX'].append('')
+ else:
+ Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
+ VarNameSize = len(VariableNameStructure.replace(',', ' ').split())
+ Dict['STRING_TABLE_LENGTH'].append(VarNameSize )
+ Dict['STRING_TABLE_VALUE'].append(VariableNameStructure)
+ StringHeadOffsetList.append(str(StringTableSize) + 'U')
+ VarStringDbOffsetList = []
+ VarStringDbOffsetList.append(StringTableSize)
+ Dict['STRING_DB_VALUE'].append(VarStringDbOffsetList)
+ StringTableIndex += 1
+ StringTableSize += len(VariableNameStructure.replace(',', ' ').split())
+ VariableHeadStringIndex = 0
+ for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)):
+ VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index]
+ VariableHeadList.append(VariableHeadStringIndex)
+
+ VariableHeadStringIndex = VariableHeadList[SkuIdIndex - 2]
+ # store VariableGuid to GuidTable and get the VariableHeadGuidIndex
+
+ if VariableGuid not in GuidList:
+ GuidList.append(VariableGuid)
+ Dict['GUID_STRUCTURE'].append(VariableGuidStructure)
+ VariableHeadGuidIndex = GuidList.index(VariableGuid)
+
+ if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
+ VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s), %dU, %sU' %
+ (VariableHeadStringIndex, Phase, CName, TokenSpaceGuid,
+ VariableHeadGuidIndex, Sku.VariableOffset))
+ else:
+ VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s), %dU, %sU' %
+ (VariableHeadStringIndex, Phase, CName, TokenSpaceGuid, SkuIdIndex,
+ VariableHeadGuidIndex, Sku.VariableOffset))
+ Dict['VARDEF_CNAME_'+Pcd.DatumType].append(CName)
+ Dict['VARDEF_GUID_'+Pcd.DatumType].append(TokenSpaceGuid)
+ Dict['VARDEF_SKUID_'+Pcd.DatumType].append(SkuIdIndex)
+ if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
+ Dict['VARDEF_VALUE_' + Pcd.DatumType].append("%s_%s[%d]" % (Pcd.TokenCName, TokenSpaceGuid, SkuIdIndex))
+ else:
+ #
+ # ULL (for UINT64) or U(other integer type) should be append to avoid
+ # warning under linux building environment.
+ #
+ Dict['VARDEF_DB_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
+
+ if Pcd.DatumType == TAB_UINT64:
+ Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "ULL")
+ elif Pcd.DatumType in (TAB_UINT32, TAB_UINT16, TAB_UINT8):
+ Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "U")
+ elif Pcd.DatumType == "BOOLEAN":
+ if eval(Sku.HiiDefaultValue) in [1, 0]:
+ Dict['VARDEF_VALUE_'+Pcd.DatumType].append(str(eval(Sku.HiiDefaultValue)) + "U")
+ else:
+ Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
+
+ # construct the VariableHeader value
+ if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
+ VariableHeadValueList.append('%dU, %dU, %sU, offsetof(%s_PCD_DATABASE, Init.%s_%s)' %
+ (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset,
+ Phase, CName, TokenSpaceGuid))
+ # the Pcd default value will be filled later on
+ VariableOffset = len(Dict['STRING_DB_VALUE'])
+ VariableRefTable = Dict['STRING_DB_VALUE']
+ else:
+ VariableHeadValueList.append('%dU, %dU, %sU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s)' %
+ (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset,
+ Phase, CName, TokenSpaceGuid, SkuIdIndex))
+ # the Pcd default value was filled before
+ VariableOffset = len(Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]) - 1
+ VariableRefTable = Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]
+ VariableDbValueList.append([VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, VariableOffset, VariableRefTable, Sku.VariableAttribute])
+
+ elif Sku.VpdOffset != '':
+ Pcd.TokenTypeList.append('PCD_TYPE_VPD')
+ Pcd.InitString = 'INIT'
+ VpdHeadOffsetList.append(str(Sku.VpdOffset) + 'U')
+ VpdDbOffsetList.append(Sku.VpdOffset)
+ # Also add the VOID* string of VPD PCD to SizeTable
+ if Pcd.DatumType == TAB_VOID:
+ NumberOfSizeItems += 1
+ # For VPD type of PCD, its current size is equal to its MAX size.
+ VoidStarTypeCurrSize = [str(Pcd.MaxDatumSize) + 'U']
+ continue
+
+ if Pcd.DatumType == TAB_VOID:
+ Pcd.TokenTypeList.append('PCD_TYPE_STRING')
+ Pcd.InitString = 'INIT'
+ if Sku.HiiDefaultValue != '' and Sku.DefaultValue == '':
+ Sku.DefaultValue = Sku.HiiDefaultValue
+ if Sku.DefaultValue != '':
+ NumberOfSizeItems += 1
+ Dict['STRING_TABLE_CNAME'].append(CName)
+ Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
+
+ if StringTableIndex == 0:
+ Dict['STRING_TABLE_INDEX'].append('')
+ else:
+ Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
+ if Sku.DefaultValue[0] == 'L':
+ DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
+ Size = len(DefaultValueBinStructure.replace(',', ' ').split())
+ Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
+ elif Sku.DefaultValue[0] == '"':
+ DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
+ Size = len(Sku.DefaultValue) - 2 + 1
+ Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
+ elif Sku.DefaultValue[0] == '{':
+ DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
+ Size = len(Sku.DefaultValue.split(","))
+ Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
+
+ StringHeadOffsetList.append(str(StringTableSize) + 'U')
+ StringDbOffsetList.append(StringTableSize)
+ if Pcd.MaxDatumSize != '':
+ MaxDatumSize = int(Pcd.MaxDatumSize, 0)
+ if MaxDatumSize < Size:
+ if Pcd.MaxSizeUserSet:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Platform))
+ else:
+ MaxDatumSize = Size
+ else:
+ MaxDatumSize = Size
+ StringTabLen = MaxDatumSize
+ if StringTabLen % 2:
+ StringTabLen += 1
+ if Sku.VpdOffset == '':
+ VoidStarTypeCurrSize.append(str(Size) + 'U')
+ Dict['STRING_TABLE_LENGTH'].append(StringTabLen)
+ StringTableIndex += 1
+ StringTableSize += (StringTabLen)
+ else:
+ if "PCD_TYPE_HII" not in Pcd.TokenTypeList:
+ Pcd.TokenTypeList.append('PCD_TYPE_DATA')
+ if Sku.DefaultValue == 'TRUE':
+ Pcd.InitString = 'INIT'
+ else:
+ Pcd.InitString = Pcd.isinit
+ #
+ # For UNIT64 type PCD's value, ULL should be append to avoid
+ # warning under linux building environment.
+ #
+ if Pcd.DatumType == TAB_UINT64:
+ ValueList.append(Sku.DefaultValue + "ULL")
+ elif Pcd.DatumType in (TAB_UINT32, TAB_UINT16, TAB_UINT8):
+ ValueList.append(Sku.DefaultValue + "U")
+ elif Pcd.DatumType == "BOOLEAN":
+ if Sku.DefaultValue in ["1", "0"]:
+ ValueList.append(Sku.DefaultValue + "U")
+ else:
+ ValueList.append(Sku.DefaultValue)
+
+ DbValueList.append(Sku.DefaultValue)
+
+ Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
+ if Pcd.DatumType == TAB_VOID:
+ Dict['SIZE_TABLE_CNAME'].append(CName)
+ Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
+ Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
+ Dict['SIZE_TABLE_CURRENT_LENGTH'].append(VoidStarTypeCurrSize)
+
+
+
+ if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
+ Dict['VARIABLE_HEAD_CNAME_DECL'].append(CName)
+ Dict['VARIABLE_HEAD_GUID_DECL'].append(TokenSpaceGuid)
+ Dict['VARIABLE_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
+ Dict['VARIABLE_HEAD_VALUE'].append('{ %s }\n' % ' },\n { '.join(VariableHeadValueList))
+ Dict['VARDEF_HEADER'].append('_Variable_Header')
+ Dict['VARIABLE_DB_VALUE'].append(VariableDbValueList)
+ else:
+ Dict['VARDEF_HEADER'].append('')
+ if 'PCD_TYPE_VPD' in Pcd.TokenTypeList:
+ Dict['VPD_HEAD_CNAME_DECL'].append(CName)
+ Dict['VPD_HEAD_GUID_DECL'].append(TokenSpaceGuid)
+ Dict['VPD_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
+ Dict['VPD_HEAD_VALUE'].append('{ %s }' % ' }, { '.join(VpdHeadOffsetList))
+ Dict['VPD_DB_VALUE'].append(VpdDbOffsetList)
+ if 'PCD_TYPE_STRING' in Pcd.TokenTypeList:
+ Dict['STRING_HEAD_CNAME_DECL'].append(CName)
+ Dict['STRING_HEAD_GUID_DECL'].append(TokenSpaceGuid)
+ Dict['STRING_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
+ Dict['STRING_HEAD_VALUE'].append(', '.join(StringHeadOffsetList))
+ Dict['STRING_DB_VALUE'].append(StringDbOffsetList)
+ PCD_STRING_INDEX_MAP[len(Dict['STRING_HEAD_CNAME_DECL']) -1 ] = len(Dict['STRING_DB_VALUE']) -1
+ if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
+ Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType].append(CName)
+ Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType].append(TokenSpaceGuid)
+ Dict[Pcd.InitString+'_NUMSKUS_DECL_'+Pcd.DatumType].append(len(Pcd.SkuInfoList))
+ if Pcd.InitString == 'UNINIT':
+ Dict['PCD_DATABASE_UNINIT_EMPTY'] = ''
+ else:
+ Dict[Pcd.InitString+'_VALUE_'+Pcd.DatumType].append(', '.join(ValueList))
+ Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType].append(DbValueList)
+
+ if Phase == 'PEI':
+ NumberOfLocalTokens = NumberOfPeiLocalTokens
+ if Phase == 'DXE':
+ NumberOfLocalTokens = NumberOfDxeLocalTokens
+
+ Dict['TOKEN_INIT'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['TOKEN_CNAME'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['TOKEN_GUID'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['TOKEN_TYPE'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['PCD_CNAME'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['PCD_TOKENSPACE_MAP'] = ['' for x in range(NumberOfLocalTokens)]
+ Dict['PCD_CNAME_LENGTH'] = [0 for x in range(NumberOfLocalTokens)]
+ SkuEnablePcdIndex = 0
+ for Pcd in ReorderedDynPcdList:
+ CName = Pcd.TokenCName
+ TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
+ if Pcd.Phase != Phase:
+ continue
+
+ TokenSpaceGuid = GuidStructureStringToGuidValueName(Pcd.TokenSpaceGuidValue) #(Platform.PackageList, TokenSpaceGuidCName))
+ GeneratedTokenNumber = Platform.PcdTokenNumber[CName, TokenSpaceGuidCName] - 1
+ if Phase == 'DXE':
+ GeneratedTokenNumber -= NumberOfPeiLocalTokens
+
+ if len(Pcd.SkuInfoList) > 1:
+ Dict['PCD_ORDER_TOKEN_NUMBER_MAP'][GeneratedTokenNumber] = SkuEnablePcdIndex
+ SkuEnablePcdIndex += 1
+
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ CName = PcdItem[0]
+
+ EdkLogger.debug(EdkLogger.DEBUG_1, "PCD = %s.%s" % (CName, TokenSpaceGuidCName))
+ EdkLogger.debug(EdkLogger.DEBUG_1, "phase = %s" % Phase)
+ EdkLogger.debug(EdkLogger.DEBUG_1, "GeneratedTokenNumber = %s" % str(GeneratedTokenNumber))
+
+ #
+ # following four Dict items hold the information for LocalTokenNumberTable
+ #
+ Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Init'
+ if Pcd.InitString == 'UNINIT':
+ Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Uninit'
+
+ Dict['TOKEN_CNAME'][GeneratedTokenNumber] = CName
+ Dict['TOKEN_GUID'][GeneratedTokenNumber] = TokenSpaceGuid
+ Dict['TOKEN_TYPE'][GeneratedTokenNumber] = ' | '.join(Pcd.TokenTypeList)
+
+ if Platform.Platform.PcdInfoFlag:
+ TokenSpaceGuidCNameArray = StringToArray('"' + TokenSpaceGuidCName + '"' )
+ if TokenSpaceGuidCNameArray not in Dict['PCD_TOKENSPACE']:
+ Dict['PCD_TOKENSPACE'].append(TokenSpaceGuidCNameArray)
+ Dict['PCD_TOKENSPACE_LENGTH'].append( len(TokenSpaceGuidCNameArray.split(",")) )
+ Dict['PCD_TOKENSPACE_MAP'][GeneratedTokenNumber] = Dict['PCD_TOKENSPACE'].index(TokenSpaceGuidCNameArray)
+ CNameBinArray = StringToArray('"' + CName + '"' )
+ Dict['PCD_CNAME'][GeneratedTokenNumber] = CNameBinArray
+
+ Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
+
+
+ Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
+
+ # search the Offset and Table, used by LocalTokenNumberTableOffset
+ if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
+ # Find index by CName, TokenSpaceGuid
+ Offset = GetMatchedIndex(CName, Dict['VARIABLE_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['VARIABLE_HEAD_GUID_DECL'])
+ assert(Offset != -1)
+ Table = Dict['VARIABLE_DB_VALUE']
+ if 'PCD_TYPE_VPD' in Pcd.TokenTypeList:
+ Offset = GetMatchedIndex(CName, Dict['VPD_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['VPD_HEAD_GUID_DECL'])
+ assert(Offset != -1)
+ Table = Dict['VPD_DB_VALUE']
+ if 'PCD_TYPE_STRING' in Pcd.TokenTypeList and 'PCD_TYPE_HII' not in Pcd.TokenTypeList:
+ # Find index by CName, TokenSpaceGuid
+ Offset = GetMatchedIndex(CName, Dict['STRING_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['STRING_HEAD_GUID_DECL'])
+ Offset = PCD_STRING_INDEX_MAP[Offset]
+ assert(Offset != -1)
+ Table = Dict['STRING_DB_VALUE']
+ if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
+ # need to store whether it is in init table or not
+ Offset = GetMatchedIndex(CName, Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType], TokenSpaceGuid, Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType])
+ assert(Offset != -1)
+ if Pcd.InitString == 'UNINIT':
+ Table = Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType]
+ else:
+ Table = Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType]
+ Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'][GeneratedTokenNumber] = (Offset, Table)
+
+ #
+ # Update VARDEF_HEADER
+ #
+ if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
+ Dict['VARDEF_HEADER'][GeneratedTokenNumber] = '_Variable_Header'
+ else:
+ Dict['VARDEF_HEADER'][GeneratedTokenNumber] = ''
+
+
+ if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+
+ if Phase == 'DXE':
+ GeneratedTokenNumber += NumberOfPeiLocalTokens
+ #
+ # Per, PCD architecture specification, PCD Token Number is 1 based and 0 is defined as invalid token number.
+ # For each EX type PCD, a PCD Token Number is assigned. When the
+ # PCD Driver/PEIM map EX_GUID and EX_TOKEN_NUMBER to the PCD Token Number,
+ # the non-EX Protocol/PPI interface can be called to get/set the value. This assumption is made by
+ # Pcd Driver/PEIM in MdeModulePkg.
+ # Therefore, 1 is added to GeneratedTokenNumber to generate a PCD Token Number before being inserted
+ # to the EXMAPPING_TABLE.
+ #
+
+
+ Dict['EXMAPPING_TABLE_EXTOKEN'].append(str(Pcd.TokenValue) + 'U')
+ Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(str(GeneratedTokenNumber + 1) + 'U')
+ Dict['EXMAPPING_TABLE_GUID_INDEX'].append(str(GuidList.index(TokenSpaceGuid)) + 'U')
+
+ if Platform.Platform.PcdInfoFlag:
+ for index in range(len(Dict['PCD_TOKENSPACE_MAP'])):
+ TokenSpaceIndex = StringTableSize
+ for i in range(Dict['PCD_TOKENSPACE_MAP'][index]):
+ TokenSpaceIndex += Dict['PCD_TOKENSPACE_LENGTH'][i]
+ Dict['PCD_TOKENSPACE_OFFSET'].append(TokenSpaceIndex)
+ for index in range(len(Dict['PCD_TOKENSPACE'])):
+ StringTableSize += Dict['PCD_TOKENSPACE_LENGTH'][index]
+ StringTableIndex += 1
+ for index in range(len(Dict['PCD_CNAME'])):
+ Dict['PCD_CNAME_OFFSET'].append(StringTableSize)
+ Dict['PCD_NAME_OFFSET'].append(Dict['PCD_TOKENSPACE_OFFSET'][index])
+ Dict['PCD_NAME_OFFSET'].append(StringTableSize)
+ StringTableSize += Dict['PCD_CNAME_LENGTH'][index]
+ StringTableIndex += 1
+ if GuidList != []:
+ Dict['GUID_TABLE_EMPTY'] = 'FALSE'
+ Dict['GUID_TABLE_SIZE'] = str(len(GuidList)) + 'U'
+ else:
+ Dict['GUID_STRUCTURE'] = [GuidStringToGuidStructureString('00000000-0000-0000-0000-000000000000')]
+
+ if StringTableIndex == 0:
+ Dict['STRING_TABLE_INDEX'].append('')
+ Dict['STRING_TABLE_LENGTH'].append(1)
+ Dict['STRING_TABLE_CNAME'].append('')
+ Dict['STRING_TABLE_GUID'].append('')
+ Dict['STRING_TABLE_VALUE'].append('{ 0 }')
+ else:
+ Dict['STRING_TABLE_EMPTY'] = 'FALSE'
+ Dict['STRING_TABLE_SIZE'] = str(StringTableSize) + 'U'
+
+ if Dict['SIZE_TABLE_CNAME'] == []:
+ Dict['SIZE_TABLE_CNAME'].append('')
+ Dict['SIZE_TABLE_GUID'].append('')
+ Dict['SIZE_TABLE_CURRENT_LENGTH'].append(['0U'])
+ Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append('0U')
+
+ if NumberOfLocalTokens != 0:
+ Dict['DATABASE_EMPTY'] = 'FALSE'
+ Dict['LOCAL_TOKEN_NUMBER_TABLE_SIZE'] = NumberOfLocalTokens
+ Dict['LOCAL_TOKEN_NUMBER'] = NumberOfLocalTokens
+
+ if NumberOfExTokens != 0:
+ Dict['EXMAP_TABLE_EMPTY'] = 'FALSE'
+ Dict['EXMAPPING_TABLE_SIZE'] = str(NumberOfExTokens) + 'U'
+ Dict['EX_TOKEN_NUMBER'] = str(NumberOfExTokens) + 'U'
+ else:
+ Dict['EXMAPPING_TABLE_EXTOKEN'].append('0U')
+ Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append('0U')
+ Dict['EXMAPPING_TABLE_GUID_INDEX'].append('0U')
+
+ if NumberOfSizeItems != 0:
+ Dict['SIZE_TABLE_SIZE'] = str(NumberOfSizeItems * 2) + 'U'
+
+ if NumberOfSkuEnabledPcd != 0:
+ Dict['SKU_HEAD_SIZE'] = str(NumberOfSkuEnabledPcd) + 'U'
+
+ for AvailableSkuNumber in SkuObj.SkuIdNumberSet:
+ if AvailableSkuNumber not in Dict['SKUID_VALUE']:
+ Dict['SKUID_VALUE'].append(AvailableSkuNumber)
+ Dict['SKUID_VALUE'][0] = len(Dict['SKUID_VALUE']) - 1
+
+ AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict))
+ if NumberOfLocalTokens == 0:
+ AutoGenC.Append(gEmptyPcdDatabaseAutoGenC.Replace(Dict))
+ else:
+ #
+ # Update Size Table to the right order, it should be same with LocalTokenNumberTable
+ #
+ SizeCNameTempList = []
+ SizeGuidTempList = []
+ SizeCurLenTempList = []
+ SizeMaxLenTempList = []
+ ReOrderFlag = True
+
+ if len(Dict['SIZE_TABLE_CNAME']) == 1:
+ if not (Dict['SIZE_TABLE_CNAME'][0] and Dict['SIZE_TABLE_GUID'][0]):
+ ReOrderFlag = False
+
+ if ReOrderFlag:
+ for Count in range(len(Dict['TOKEN_CNAME'])):
+ for Count1 in range(len(Dict['SIZE_TABLE_CNAME'])):
+ if Dict['TOKEN_CNAME'][Count] == Dict['SIZE_TABLE_CNAME'][Count1] and \
+ Dict['TOKEN_GUID'][Count] == Dict['SIZE_TABLE_GUID'][Count1]:
+ SizeCNameTempList.append(Dict['SIZE_TABLE_CNAME'][Count1])
+ SizeGuidTempList.append(Dict['SIZE_TABLE_GUID'][Count1])
+ SizeCurLenTempList.append(Dict['SIZE_TABLE_CURRENT_LENGTH'][Count1])
+ SizeMaxLenTempList.append(Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count1])
+
+ for Count in range(len(Dict['SIZE_TABLE_CNAME'])):
+ Dict['SIZE_TABLE_CNAME'][Count] = SizeCNameTempList[Count]
+ Dict['SIZE_TABLE_GUID'][Count] = SizeGuidTempList[Count]
+ Dict['SIZE_TABLE_CURRENT_LENGTH'][Count] = SizeCurLenTempList[Count]
+ Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count] = SizeMaxLenTempList[Count]
+
+ AutoGenC.Append(gPcdDatabaseAutoGenC.Replace(Dict))
+
+
+# print Phase
+ Buffer = BuildExDataBase(Dict)
+ return AutoGenH, AutoGenC, Buffer, VarCheckTab
+
+def GetOrderedDynamicPcdList(DynamicPcdList, PcdTokenNumberList):
+ ReorderedDyPcdList = [None for i in range(len(DynamicPcdList))]
+ for Pcd in DynamicPcdList:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in PcdTokenNumberList:
+ ReorderedDyPcdList[PcdTokenNumberList[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]-1] = Pcd
+ return ReorderedDyPcdList
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenVar.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenVar.py
new file mode 100755
index 00000000..84cf0fc9
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/GenVar.py
@@ -0,0 +1,366 @@
+# Copyright (c) 2017 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+#
+# This file is used to collect the Variable checking information
+#
+
+# #
+# Import Modules
+#
+from struct import pack, unpack
+import collections
+import copy
+from Common.VariableAttributes import VariableAttributes
+from Common.Misc import *
+import collections
+import Common.DataType as DataType
+
+var_info = collections.namedtuple("uefi_var", "pcdindex,pcdname,defaultstoragename,skuname,var_name, var_guid, var_offset,var_attribute,pcd_default_value, default_value, data_type,PcdDscLine,StructurePcd")
+NvStorageHeaderSize = 28
+VariableHeaderSize = 32
+
+class VariableMgr(object):
+ def __init__(self, DefaultStoreMap, SkuIdMap):
+ self.VarInfo = []
+ self.DefaultStoreMap = DefaultStoreMap
+ self.SkuIdMap = SkuIdMap
+ self.VpdRegionSize = 0
+ self.VpdRegionOffset = 0
+ self.NVHeaderBuff = None
+ self.VarDefaultBuff = None
+ self.VarDeltaBuff = None
+
+ def append_variable(self, uefi_var):
+ self.VarInfo.append(uefi_var)
+
+ def SetVpdRegionMaxSize(self, maxsize):
+ self.VpdRegionSize = maxsize
+
+ def SetVpdRegionOffset(self, vpdoffset):
+ self.VpdRegionOffset = vpdoffset
+
+ def PatchNVStoreDefaultMaxSize(self, maxsize):
+ if not self.NVHeaderBuff:
+ return ""
+ self.NVHeaderBuff = self.NVHeaderBuff[:8] + pack("=Q", maxsize)
+ default_var_bin = VariableMgr.format_data(self.NVHeaderBuff + self.VarDefaultBuff + self.VarDeltaBuff)
+ value_str = "{"
+ default_var_bin_strip = [ data.strip("""'""") for data in default_var_bin]
+ value_str += ",".join(default_var_bin_strip)
+ value_str += "}"
+ return value_str
+
+ def combine_variable(self):
+ indexedvarinfo = collections.OrderedDict()
+ for item in self.VarInfo:
+ if (item.skuname, item.defaultstoragename, item.var_name, item.var_guid) not in indexedvarinfo:
+ indexedvarinfo[(item.skuname, item.defaultstoragename, item.var_name, item.var_guid) ] = []
+ indexedvarinfo[(item.skuname, item.defaultstoragename, item.var_name, item.var_guid)].append(item)
+ for key in indexedvarinfo:
+ sku_var_info_offset_list = indexedvarinfo[key]
+ sku_var_info_offset_list.sort(key=lambda x:x.PcdDscLine)
+ FirstOffset = int(sku_var_info_offset_list[0].var_offset, 16) if sku_var_info_offset_list[0].var_offset.upper().startswith("0X") else int(sku_var_info_offset_list[0].var_offset)
+ fisrtvalue_list = sku_var_info_offset_list[0].default_value.strip("{").strip("}").split(",")
+ firstdata_type = sku_var_info_offset_list[0].data_type
+ if firstdata_type in DataType.TAB_PCD_NUMERIC_TYPES:
+ fisrtdata_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[firstdata_type]]
+ fisrtdata = fisrtvalue_list[0]
+ fisrtvalue_list = []
+ pack_data = pack(fisrtdata_flag, int(fisrtdata, 0))
+ for data_byte in range(len(pack_data)):
+ fisrtvalue_list.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))
+ newvalue_list = ["0x00"] * FirstOffset + fisrtvalue_list
+
+ for var_item in sku_var_info_offset_list[1:]:
+ CurOffset = int(var_item.var_offset, 16) if var_item.var_offset.upper().startswith("0X") else int(var_item.var_offset)
+ CurvalueList = var_item.default_value.strip("{").strip("}").split(",")
+ Curdata_type = var_item.data_type
+ if Curdata_type in DataType.TAB_PCD_NUMERIC_TYPES:
+ data_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[Curdata_type]]
+ data = CurvalueList[0]
+ CurvalueList = []
+ pack_data = pack(data_flag, int(data, 0))
+ for data_byte in range(len(pack_data)):
+ CurvalueList.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))
+ if CurOffset > len(newvalue_list):
+ newvalue_list = newvalue_list + ["0x00"] * (CurOffset - len(newvalue_list)) + CurvalueList
+ else:
+ newvalue_list[CurOffset : CurOffset + len(CurvalueList)] = CurvalueList
+
+ newvaluestr = "{" + ",".join(newvalue_list) +"}"
+ n = sku_var_info_offset_list[0]
+ indexedvarinfo[key] = [var_info(n.pcdindex, n.pcdname, n.defaultstoragename, n.skuname, n.var_name, n.var_guid, "0x00", n.var_attribute, newvaluestr, newvaluestr, DataType.TAB_VOID,n.PcdDscLine,n.StructurePcd)]
+ self.VarInfo = [item[0] for item in list(indexedvarinfo.values())]
+
+ def process_variable_data(self):
+
+ var_data = collections.defaultdict(collections.OrderedDict)
+
+ indexedvarinfo = collections.OrderedDict()
+ for item in self.VarInfo:
+ if item.pcdindex not in indexedvarinfo:
+ indexedvarinfo[item.pcdindex] = dict()
+ indexedvarinfo[item.pcdindex][(item.skuname, item.defaultstoragename)] = item
+
+ for index in indexedvarinfo:
+ sku_var_info = indexedvarinfo[index]
+
+ default_data_buffer = ""
+ others_data_buffer = ""
+ tail = None
+ default_sku_default = indexedvarinfo[index].get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT))
+
+ if default_sku_default.data_type not in DataType.TAB_PCD_NUMERIC_TYPES:
+ var_max_len = max(len(var_item.default_value.split(",")) for var_item in sku_var_info.values())
+ if len(default_sku_default.default_value.split(",")) < var_max_len:
+ tail = ",".join("0x00" for i in range(var_max_len-len(default_sku_default.default_value.split(","))))
+
+ default_data_buffer = VariableMgr.PACK_VARIABLES_DATA(default_sku_default.default_value, default_sku_default.data_type, tail)
+
+ default_data_array = ()
+ for item in range(len(default_data_buffer)):
+ default_data_array += unpack("B", default_data_buffer[item:item + 1])
+
+ var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
+
+ for (skuid, defaultstoragename) in indexedvarinfo[index]:
+ tail = None
+ if (skuid, defaultstoragename) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
+ continue
+ other_sku_other = indexedvarinfo[index][(skuid, defaultstoragename)]
+
+ if default_sku_default.data_type not in DataType.TAB_PCD_NUMERIC_TYPES:
+ if len(other_sku_other.default_value.split(",")) < var_max_len:
+ tail = ",".join("0x00" for i in range(var_max_len-len(other_sku_other.default_value.split(","))))
+
+ others_data_buffer = VariableMgr.PACK_VARIABLES_DATA(other_sku_other.default_value, other_sku_other.data_type, tail)
+
+ others_data_array = ()
+ for item in range(len(others_data_buffer)):
+ others_data_array += unpack("B", others_data_buffer[item:item + 1])
+
+ data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
+
+ var_data[(skuid, defaultstoragename)][index] = (data_delta, sku_var_info[(skuid, defaultstoragename)])
+ return var_data
+
+ def new_process_varinfo(self):
+ self.combine_variable()
+
+ var_data = self.process_variable_data()
+
+ if not var_data:
+ return []
+
+ pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
+ NvStoreDataBuffer = bytearray()
+ var_data_offset = collections.OrderedDict()
+ offset = NvStorageHeaderSize
+ for default_data, default_info in pcds_default_data.values():
+ var_name_buffer = VariableMgr.PACK_VARIABLE_NAME(default_info.var_name)
+
+ vendorguid = default_info.var_guid.split('-')
+
+ if default_info.var_attribute:
+ var_attr_value, _ = VariableAttributes.GetVarAttributes(default_info.var_attribute)
+ else:
+ var_attr_value = 0x07
+
+ DataBuffer = VariableMgr.AlignData(var_name_buffer + default_data)
+
+ data_size = len(DataBuffer)
+ offset += VariableHeaderSize + len(default_info.var_name.split(","))
+ var_data_offset[default_info.pcdindex] = offset
+ offset += data_size - len(default_info.var_name.split(","))
+
+ var_header_buffer = VariableMgr.PACK_VARIABLE_HEADER(var_attr_value, len(default_info.var_name.split(",")), len (default_data), vendorguid)
+ NvStoreDataBuffer += (var_header_buffer + DataBuffer)
+
+ variable_storage_header_buffer = VariableMgr.PACK_VARIABLE_STORE_HEADER(len(NvStoreDataBuffer) + 28)
+
+ nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
+
+ data_delta_structure_buffer = bytearray()
+ for skuname, defaultstore in var_data:
+ if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
+ continue
+ pcds_sku_data = var_data[(skuname, defaultstore)]
+ delta_data_set = []
+ for pcdindex in pcds_sku_data:
+ offset = var_data_offset[pcdindex]
+ delta_data, _ = pcds_sku_data[pcdindex]
+ delta_data = [(item[0] + offset, item[1]) for item in delta_data]
+ delta_data_set.extend(delta_data)
+
+ data_delta_structure_buffer += VariableMgr.AlignData(self.PACK_DELTA_DATA(skuname, defaultstore, delta_data_set), 8)
+
+ size = len(nv_default_part + data_delta_structure_buffer) + 16
+ maxsize = self.VpdRegionSize if self.VpdRegionSize else size
+ NV_Store_Default_Header = VariableMgr.PACK_NV_STORE_DEFAULT_HEADER(size, maxsize)
+
+ self.NVHeaderBuff = NV_Store_Default_Header
+ self.VarDefaultBuff =nv_default_part
+ self.VarDeltaBuff = data_delta_structure_buffer
+ return VariableMgr.format_data(NV_Store_Default_Header + nv_default_part + data_delta_structure_buffer)
+
+
+ @staticmethod
+ def format_data(data):
+ return [hex(item) for item in VariableMgr.unpack_data(data)]
+
+ @staticmethod
+ def unpack_data(data):
+ final_data = ()
+ for item in range(len(data)):
+ final_data += unpack("B", data[item:item + 1])
+ return final_data
+
+ @staticmethod
+ def calculate_delta(default, theother):
+ if len(default) - len(theother) != 0:
+ EdkLogger.error("build", FORMAT_INVALID, 'The variable data length is not the same for the same PCD.')
+ data_delta = []
+ for i in range(len(default)):
+ if default[i] != theother[i]:
+ data_delta.append((i, theother[i]))
+ return data_delta
+
+ def dump(self):
+
+ default_var_bin = self.new_process_varinfo()
+ if default_var_bin:
+ value_str = "{"
+ default_var_bin_strip = [ data.strip("""'""") for data in default_var_bin]
+ value_str += ",".join(default_var_bin_strip)
+ value_str += "}"
+ return value_str
+ return ""
+
+ @staticmethod
+ def PACK_VARIABLE_STORE_HEADER(size):
+ #Signature: gEfiVariableGuid
+ Guid = "{ 0xddcf3616, 0x3275, 0x4164, { 0x98, 0xb6, 0xfe, 0x85, 0x70, 0x7f, 0xfe, 0x7d }}"
+ Guid = GuidStructureStringToGuidString(Guid)
+ GuidBuffer = PackGUID(Guid.split('-'))
+
+ SizeBuffer = pack('=L', size)
+ FormatBuffer = pack('=B', 0x5A)
+ StateBuffer = pack('=B', 0xFE)
+ reservedBuffer = pack('=H', 0)
+ reservedBuffer += pack('=L', 0)
+
+ return GuidBuffer + SizeBuffer + FormatBuffer + StateBuffer + reservedBuffer
+
+ @staticmethod
+ def PACK_NV_STORE_DEFAULT_HEADER(size, maxsize):
+ Signature = pack('=B', ord('N'))
+ Signature += pack("=B", ord('S'))
+ Signature += pack("=B", ord('D'))
+ Signature += pack("=B", ord('B'))
+
+ SizeBuffer = pack("=L", size)
+ MaxSizeBuffer = pack("=Q", maxsize)
+
+ return Signature + SizeBuffer + MaxSizeBuffer
+
+ @staticmethod
+ def PACK_VARIABLE_HEADER(attribute, namesize, datasize, vendorguid):
+
+ Buffer = pack('=H', 0x55AA) # pack StartID
+ Buffer += pack('=B', 0x3F) # pack State
+ Buffer += pack('=B', 0) # pack reserved
+
+ Buffer += pack('=L', attribute)
+ Buffer += pack('=L', namesize)
+ Buffer += pack('=L', datasize)
+
+ Buffer += PackGUID(vendorguid)
+
+ return Buffer
+
+ @staticmethod
+ def PACK_VARIABLES_DATA(var_value,data_type, tail = None):
+ Buffer = bytearray()
+ data_len = 0
+ if data_type == DataType.TAB_VOID:
+ for value_char in var_value.strip("{").strip("}").split(","):
+ Buffer += pack("=B", int(value_char, 16))
+ data_len += len(var_value.split(","))
+ if tail:
+ for value_char in tail.split(","):
+ Buffer += pack("=B", int(value_char, 16))
+ data_len += len(tail.split(","))
+ elif data_type == "BOOLEAN":
+ Buffer += pack("=B", True) if var_value.upper() in ["TRUE","1"] else pack("=B", False)
+ data_len += 1
+ elif data_type == DataType.TAB_UINT8:
+ Buffer += pack("=B", GetIntegerValue(var_value))
+ data_len += 1
+ elif data_type == DataType.TAB_UINT16:
+ Buffer += pack("=H", GetIntegerValue(var_value))
+ data_len += 2
+ elif data_type == DataType.TAB_UINT32:
+ Buffer += pack("=L", GetIntegerValue(var_value))
+ data_len += 4
+ elif data_type == DataType.TAB_UINT64:
+ Buffer += pack("=Q", GetIntegerValue(var_value))
+ data_len += 8
+
+ return Buffer
+
+ @staticmethod
+ def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
+ Buffer = bytearray()
+ Buffer += pack("=L", 4+8+8)
+ Buffer += pack("=Q", int(skuid))
+ Buffer += pack("=Q", int(defaultstoragename))
+
+ for item in var_value:
+ Buffer += pack("=B", item)
+
+ Buffer = pack("=L", len(Buffer)+4) + Buffer
+
+ return Buffer
+
+ def GetSkuId(self, skuname):
+ if skuname not in self.SkuIdMap:
+ return None
+ return self.SkuIdMap.get(skuname)[0]
+
+ def GetDefaultStoreId(self, dname):
+ if dname not in self.DefaultStoreMap:
+ return None
+ return self.DefaultStoreMap.get(dname)[0]
+
+ def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
+ skuid = self.GetSkuId(skuname)
+ defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
+ Buffer = bytearray()
+ Buffer += pack("=L", 4+8+8)
+ Buffer += pack("=Q", int(skuid))
+ Buffer += pack("=Q", int(defaultstorageid))
+ for (delta_offset, value) in delta_list:
+ Buffer += pack("=L", delta_offset)
+ Buffer = Buffer[:-1] + pack("=B", value)
+
+ Buffer = pack("=L", len(Buffer) + 4) + Buffer
+
+ return Buffer
+
+ @staticmethod
+ def AlignData(data, align = 4):
+ mybuffer = data
+ if (len(data) % align) > 0:
+ for i in range(align - (len(data) % align)):
+ mybuffer += pack("=B", 0)
+
+ return mybuffer
+
+ @staticmethod
+ def PACK_VARIABLE_NAME(var_name):
+ Buffer = bytearray()
+ for name_char in var_name.strip("{").strip("}").split(","):
+ Buffer += pack("=B", int(name_char, 16))
+
+ return Buffer
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IdfClassObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IdfClassObject.py
new file mode 100755
index 00000000..350c76bd
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IdfClassObject.py
@@ -0,0 +1,132 @@
+## @file
+# This file is used to collect all defined strings in Image Definition files
+#
+# Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from Common.StringUtils import GetLineNo
+from Common.Misc import PathClass
+from Common.LongFilePathSupport import LongFilePath
+import re
+import os
+from Common.GlobalData import gIdentifierPattern
+from .UniClassObject import StripComments
+
+IMAGE_TOKEN = re.compile('IMAGE_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
+
+#
+# Value of different image information block types
+#
+EFI_HII_IIBT_END = 0x00
+EFI_HII_IIBT_IMAGE_1BIT = 0x10
+EFI_HII_IIBT_IMAGE_1BIT_TRANS = 0x11
+EFI_HII_IIBT_IMAGE_4BIT = 0x12
+EFI_HII_IIBT_IMAGE_4BIT_TRANS = 0x13
+EFI_HII_IIBT_IMAGE_8BIT = 0x14
+EFI_HII_IIBT_IMAGE_8BIT_TRANS = 0x15
+EFI_HII_IIBT_IMAGE_24BIT = 0x16
+EFI_HII_IIBT_IMAGE_24BIT_TRANS = 0x17
+EFI_HII_IIBT_IMAGE_JPEG = 0x18
+EFI_HII_IIBT_IMAGE_PNG = 0x19
+EFI_HII_IIBT_DUPLICATE = 0x20
+EFI_HII_IIBT_SKIP2 = 0x21
+EFI_HII_IIBT_SKIP1 = 0x22
+EFI_HII_IIBT_EXT1 = 0x30
+EFI_HII_IIBT_EXT2 = 0x31
+EFI_HII_IIBT_EXT4 = 0x32
+
+#
+# Value of HII package type
+#
+EFI_HII_PACKAGE_TYPE_ALL = 0x00
+EFI_HII_PACKAGE_TYPE_GUID = 0x01
+EFI_HII_PACKAGE_FORMS = 0x02
+EFI_HII_PACKAGE_STRINGS = 0x04
+EFI_HII_PACKAGE_FONTS = 0x05
+EFI_HII_PACKAGE_IMAGES = 0x06
+EFI_HII_PACKAGE_SIMPLE_FONTS = 0x07
+EFI_HII_PACKAGE_DEVICE_PATH = 0x08
+EFI_HII_PACKAGE_KEYBOARD_LAYOUT = 0x09
+EFI_HII_PACKAGE_ANIMATIONS = 0x0A
+EFI_HII_PACKAGE_END = 0xDF
+EFI_HII_PACKAGE_TYPE_SYSTEM_BEGIN = 0xE0
+EFI_HII_PACKAGE_TYPE_SYSTEM_END = 0xFF
+
+class IdfFileClassObject(object):
+ def __init__(self, FileList = []):
+ self.ImageFilesDict = {}
+ self.ImageIDList = []
+ for File in FileList:
+ if File is None:
+ EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'No Image definition file is given.')
+
+ try:
+ IdfFile = open(LongFilePath(File.Path), mode='r')
+ FileIn = IdfFile.read()
+ IdfFile.close()
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
+
+ ImageFileList = []
+ for Line in FileIn.splitlines():
+ Line = Line.strip()
+ Line = StripComments(Line)
+ if len(Line) == 0:
+ continue
+
+ LineNo = GetLineNo(FileIn, Line, False)
+ if not Line.startswith('#image '):
+ EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The %s in Line %s of File %s is invalid.' % (Line, LineNo, File.Path))
+
+ if Line.find('#image ') >= 0:
+ LineDetails = Line.split()
+ Len = len(LineDetails)
+ if Len != 3 and Len != 4:
+ EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The format is not match #image IMAGE_ID [TRANSPARENT] ImageFileName in Line %s of File %s.' % (LineNo, File.Path))
+ if Len == 4 and LineDetails[2] != 'TRANSPARENT':
+ EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'Please use the keyword "TRANSPARENT" to describe the transparency setting in Line %s of File %s.' % (LineNo, File.Path))
+ MatchString = gIdentifierPattern.match(LineDetails[1])
+ if MatchString is None:
+ EdkLogger.error('Image Definition File Parser', FORMAT_INVALID, 'The Image token name %s defined in Idf file %s contains the invalid character.' % (LineDetails[1], File.Path))
+ if LineDetails[1] not in self.ImageIDList:
+ self.ImageIDList.append(LineDetails[1])
+ else:
+ EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The %s in Line %s of File %s is already defined.' % (LineDetails[1], LineNo, File.Path))
+ if Len == 4:
+ ImageFile = ImageFileObject(LineDetails[Len-1], LineDetails[1], True)
+ else:
+ ImageFile = ImageFileObject(LineDetails[Len-1], LineDetails[1], False)
+ ImageFileList.append(ImageFile)
+ if ImageFileList:
+ self.ImageFilesDict[File] = ImageFileList
+
+def SearchImageID(ImageFileObject, FileList):
+ if FileList == []:
+ return ImageFileObject
+
+ for File in FileList:
+ if os.path.isfile(File):
+ Lines = open(File, 'r')
+ for Line in Lines:
+ ImageIdList = IMAGE_TOKEN.findall(Line)
+ for ID in ImageIdList:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "Found ImageID identifier: " + ID)
+ ImageFileObject.SetImageIDReferenced(ID)
+
+class ImageFileObject(object):
+ def __init__(self, FileName, ImageID, TransParent = False):
+ self.FileName = FileName
+ self.File = ''
+ self.ImageID = ImageID
+ self.TransParent = TransParent
+ self.Referenced = False
+
+ def SetImageIDReferenced(self, ImageID):
+ if ImageID == self.ImageID:
+ self.Referenced = True
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IncludesAutoGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IncludesAutoGen.py
new file mode 100755
index 00000000..29169f67
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/IncludesAutoGen.py
@@ -0,0 +1,304 @@
+## @file
+# Build cache intermediate result and state
+#
+# Copyright (c) 2019 - 2020, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from Common.caching import cached_property
+import Common.EdkLogger as EdkLogger
+import Common.LongFilePathOs as os
+from Common.BuildToolError import *
+from Common.Misc import SaveFileOnChange, PathClass
+from Common.Misc import TemplateString
+import sys
+gIsFileMap = {}
+
+DEP_FILE_TAIL = "# Updated \n"
+
+class IncludesAutoGen():
+ """ This class is to manage the dependent files witch are used in Makefile to support incremental build.
+ 1. C files:
+ 1. MSVS.
+ cl.exe has a build option /showIncludes to display include files on stdout. Build tool captures
+ that messages and generate dependency files, .deps files.
+ 2. CLANG and GCC
+ -MMD -MF build option are used to generate dependency files by compiler. Build tool updates the
+ .deps files.
+ 2. ASL files:
+ 1. Trim find out all the included files with asl specific include format and generate .trim.deps file.
+ 2. ASL PP use c preprocessor to find out all included files with #include format and generate a .deps file
+ 3. build tool updates the .deps file
+ 3. ASM files (.asm, .s or .nasm):
+ 1. Trim find out all the included files with asl specific include format and generate .trim.deps file.
+ 2. ASM PP use c preprocessor to find out all included files with #include format and generate a deps file
+ 3. build tool updates the .deps file
+ """
+ def __init__(self, makefile_folder, ModuleAuto):
+ self.d_folder = makefile_folder
+ self.makefile_folder = makefile_folder
+ self.module_autogen = ModuleAuto
+ self.ToolChainFamily = ModuleAuto.ToolChainFamily
+ self.workspace = ModuleAuto.WorkspaceDir
+
+ def CreateModuleDeps(self):
+ SaveFileOnChange(os.path.join(self.makefile_folder,"deps.txt"),"\n".join(self.DepsCollection),False)
+
+ def CreateDepsInclude(self):
+ deps_file = {'deps_file':self.deps_files}
+
+ MakePath = self.module_autogen.BuildOption.get('MAKE', {}).get('PATH')
+ if not MakePath:
+ EdkLogger.error("build", PARAMETER_MISSING, Message="No Make path available.")
+ elif "nmake" in MakePath:
+ _INCLUDE_DEPS_TEMPLATE = TemplateString('''
+${BEGIN}
+!IF EXIST(${deps_file})
+!INCLUDE ${deps_file}
+!ENDIF
+${END}
+ ''')
+ else:
+ _INCLUDE_DEPS_TEMPLATE = TemplateString('''
+${BEGIN}
+-include ${deps_file}
+${END}
+ ''')
+
+ try:
+ deps_include_str = _INCLUDE_DEPS_TEMPLATE.Replace(deps_file)
+ except Exception as e:
+ print(e)
+ SaveFileOnChange(os.path.join(self.makefile_folder,"dependency"),deps_include_str,False)
+
+ def CreateDepsTarget(self):
+ SaveFileOnChange(os.path.join(self.makefile_folder,"deps_target"),"\n".join([item +":" for item in self.DepsCollection]),False)
+
+ @cached_property
+ def deps_files(self):
+ """ Get all .deps file under module build folder. """
+ deps_files = []
+ for root, _, files in os.walk(self.d_folder, topdown=False):
+ for name in files:
+ if not name.endswith(".deps"):
+ continue
+ abspath = os.path.join(root, name)
+ deps_files.append(abspath)
+ return deps_files
+
+ @cached_property
+ def DepsCollection(self):
+ """ Collect all the dependency files list from all .deps files under a module's build folder """
+ includes = set()
+ targetname = [item[0].Name for item in self.TargetFileList.values()]
+ for abspath in self.deps_files:
+ try:
+ with open(abspath,"r") as fd:
+ lines = fd.readlines()
+
+ firstlineitems = lines[0].split(": ")
+ dependency_file = firstlineitems[1].strip(" \\\n")
+ dependency_file = dependency_file.strip('''"''')
+ if dependency_file:
+ if os.path.normpath(dependency_file +".deps") == abspath:
+ continue
+ filename = os.path.basename(dependency_file).strip()
+ if filename not in targetname:
+ includes.add(dependency_file.strip())
+
+ for item in lines[1:]:
+ if item == DEP_FILE_TAIL:
+ continue
+ dependency_file = item.strip(" \\\n")
+ dependency_file = dependency_file.strip('''"''')
+ if dependency_file == '':
+ continue
+ if os.path.normpath(dependency_file +".deps") == abspath:
+ continue
+ filename = os.path.basename(dependency_file).strip()
+ if filename in targetname:
+ continue
+ includes.add(dependency_file.strip())
+ except Exception as e:
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
+ continue
+ rt = sorted(list(set([item.strip(' " \\\n') for item in includes])))
+ return rt
+
+ @cached_property
+ def SourceFileList(self):
+ """ Get a map of module's source files name to module's source files path """
+ source = {os.path.basename(item.File):item.Path for item in self.module_autogen.SourceFileList}
+ middle_file = {}
+ for afile in source:
+ if afile.upper().endswith(".VFR"):
+ middle_file.update({afile.split(".")[0]+".c":os.path.join(self.module_autogen.DebugDir,afile.split(".")[0]+".c")})
+ if afile.upper().endswith((".S","ASM")):
+ middle_file.update({afile.split(".")[0]+".i":os.path.join(self.module_autogen.OutputDir,afile.split(".")[0]+".i")})
+ if afile.upper().endswith(".ASL"):
+ middle_file.update({afile.split(".")[0]+".i":os.path.join(self.module_autogen.OutputDir,afile.split(".")[0]+".i")})
+ source.update({"AutoGen.c":os.path.join(self.module_autogen.OutputDir,"AutoGen.c")})
+ source.update(middle_file)
+ return source
+
+ @cached_property
+ def HasNamesakeSourceFile(self):
+ source_base_name = set([os.path.basename(item.File) for item in self.module_autogen.SourceFileList])
+ rt = len(source_base_name) != len(self.module_autogen.SourceFileList)
+ return rt
+ @cached_property
+ def CcPPCommandPathSet(self):
+ rt = set()
+ rt.add(self.module_autogen.BuildOption.get('CC',{}).get('PATH'))
+ rt.add(self.module_autogen.BuildOption.get('ASLCC',{}).get('PATH'))
+ rt.add(self.module_autogen.BuildOption.get('ASLPP',{}).get('PATH'))
+ rt.add(self.module_autogen.BuildOption.get('VFRPP',{}).get('PATH'))
+ rt.add(self.module_autogen.BuildOption.get('PP',{}).get('PATH'))
+ rt.add(self.module_autogen.BuildOption.get('APP',{}).get('PATH'))
+ rt.discard(None)
+ return rt
+ @cached_property
+ def TargetFileList(self):
+ """ Get a map of module's target name to a tuple of module's targets path and whose input file path """
+ targets = {}
+ targets["AutoGen.obj"] = (PathClass(os.path.join(self.module_autogen.OutputDir,"AutoGen.obj")),PathClass(os.path.join(self.module_autogen.DebugDir,"AutoGen.c")))
+ for item in self.module_autogen.Targets.values():
+ for block in item:
+ targets[block.Target.Path] = (block.Target,block.Inputs[0])
+ return targets
+
+ def GetRealTarget(self,source_file_abs):
+ """ Get the final target file based on source file abspath """
+ source_target_map = {item[1].Path:item[0].Path for item in self.TargetFileList.values()}
+ source_name_map = {item[1].File:item[0].Path for item in self.TargetFileList.values()}
+ target_abs = source_target_map.get(source_file_abs)
+ if target_abs is None:
+ if source_file_abs.strip().endswith(".i"):
+ sourcefilename = os.path.basename(source_file_abs.strip())
+ for sourcefile in source_name_map:
+ if sourcefilename.split(".")[0] == sourcefile.split(".")[0]:
+ target_abs = source_name_map[sourcefile]
+ break
+ else:
+ target_abs = source_file_abs
+ else:
+ target_abs = source_file_abs
+ return target_abs
+
+ def CreateDepsFileForMsvc(self, DepList):
+ """ Generate dependency files, .deps file from /showIncludes output message """
+ if not DepList:
+ return
+ ModuleDepDict = {}
+ current_source = ""
+ SourceFileAbsPathMap = self.SourceFileList
+ for line in DepList:
+ line = line.strip()
+ if self.HasNamesakeSourceFile:
+ for cc_cmd in self.CcPPCommandPathSet:
+ if cc_cmd in line:
+ if '''"'''+cc_cmd+'''"''' in line:
+ cc_options = line[len(cc_cmd)+2:].split()
+ else:
+ cc_options = line[len(cc_cmd):].split()
+ for item in cc_options:
+ if not item.startswith("/"):
+ if item.endswith(".txt") and item.startswith("@"):
+ with open(item[1:], "r") as file:
+ source_files = file.readlines()[0].split()
+ SourceFileAbsPathMap = {os.path.basename(file): file for file in source_files if
+ os.path.exists(file)}
+ else:
+ if os.path.exists(item):
+ SourceFileAbsPathMap.update({os.path.basename(item): item.strip()})
+ # SourceFileAbsPathMap = {os.path.basename(item):item for item in cc_options if not item.startswith("/") and os.path.exists(item)}
+ if line in SourceFileAbsPathMap:
+ current_source = line
+ if current_source not in ModuleDepDict:
+ ModuleDepDict[SourceFileAbsPathMap[current_source]] = []
+ elif "Note: including file:" == line.lstrip()[:21]:
+ if not current_source:
+ EdkLogger.error("build",BUILD_ERROR, "Parse /showIncludes output failed. line: %s. \n" % line, RaiseError=False)
+ else:
+ ModuleDepDict[SourceFileAbsPathMap[current_source]].append(line.lstrip()[22:].strip())
+
+ for source_abs in ModuleDepDict:
+ if ModuleDepDict[source_abs]:
+ target_abs = self.GetRealTarget(source_abs)
+ dep_file_name = os.path.basename(source_abs) + ".deps"
+ SaveFileOnChange(os.path.join(os.path.dirname(target_abs),dep_file_name)," \\\n".join([target_abs+":"] + ['''"''' + item +'''"''' for item in ModuleDepDict[source_abs]]),False)
+
+ def UpdateDepsFileforNonMsvc(self):
+ """ Update .deps files.
+ 1. Update target path to absolute path.
+ 2. Update middle target to final target.
+ """
+
+ for abspath in self.deps_files:
+ if abspath.endswith(".trim.deps"):
+ continue
+ try:
+ newcontent = []
+ with open(abspath,"r") as fd:
+ lines = fd.readlines()
+ if lines[-1] == DEP_FILE_TAIL:
+ continue
+ firstlineitems = lines[0].strip().split(" ")
+
+ if len(firstlineitems) > 2:
+ sourceitem = firstlineitems[1]
+ else:
+ sourceitem = lines[1].strip().split(" ")[0]
+
+ source_abs = self.SourceFileList.get(sourceitem,sourceitem)
+ firstlineitems[0] = self.GetRealTarget(source_abs)
+ p_target = firstlineitems
+ if not p_target[0].strip().endswith(":"):
+ p_target[0] += ": "
+
+ if len(p_target) == 2:
+ p_target[0] += lines[1]
+ newcontent.append(p_target[0])
+ newcontent.extend(lines[2:])
+ else:
+ line1 = " ".join(p_target).strip()
+ line1 += "\n"
+ newcontent.append(line1)
+ newcontent.extend(lines[1:])
+
+ newcontent.append("\n")
+ newcontent.append(DEP_FILE_TAIL)
+ with open(abspath,"w") as fw:
+ fw.write("".join(newcontent))
+ except Exception as e:
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
+ continue
+
+ def UpdateDepsFileforTrim(self):
+ """ Update .deps file which generated by trim. """
+
+ for abspath in self.deps_files:
+ if not abspath.endswith(".trim.deps"):
+ continue
+ try:
+ newcontent = []
+ with open(abspath,"r") as fd:
+ lines = fd.readlines()
+ if lines[-1] == DEP_FILE_TAIL:
+ continue
+
+ source_abs = lines[0].strip().split(" ")[0]
+ targetitem = self.GetRealTarget(source_abs.strip(" :"))
+
+ targetitem += ": "
+ if len(lines)>=2:
+ targetitem += lines[1]
+ newcontent.append(targetitem)
+ newcontent.extend(lines[2:])
+ newcontent.append("\n")
+ newcontent.append(DEP_FILE_TAIL)
+ with open(abspath,"w") as fw:
+ fw.write("".join(newcontent))
+ except Exception as e:
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
+ continue
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/InfSectionParser.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/InfSectionParser.py
new file mode 100755
index 00000000..38e3861c
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/InfSectionParser.py
@@ -0,0 +1,119 @@
+## @file
+# Parser a Inf file and Get specify section data.
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from Common.DataType import *
+
+
+class InfSectionParser():
+ def __init__(self, FilePath):
+ self._FilePath = FilePath
+ self._FileSectionDataList = []
+ self._ParserInf()
+
+ def _ParserInf(self):
+ FileLinesList = []
+ UserExtFind = False
+ FindEnd = True
+ FileLastLine = False
+ SectionLine = ''
+ SectionData = []
+
+ try:
+ with open(self._FilePath, "r") as File:
+ FileLinesList = File.readlines()
+ except BaseException:
+ EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)
+
+ for Index in range(0, len(FileLinesList)):
+ line = str(FileLinesList[Index]).strip()
+ if Index + 1 == len(FileLinesList):
+ FileLastLine = True
+ NextLine = ''
+ else:
+ NextLine = str(FileLinesList[Index + 1]).strip()
+ if UserExtFind and FindEnd == False:
+ if line:
+ SectionData.append(line)
+ if line.startswith(TAB_SECTION_START) and line.endswith(TAB_SECTION_END):
+ SectionLine = line
+ UserExtFind = True
+ FindEnd = False
+
+ if (NextLine != '' and NextLine[0] == TAB_SECTION_START and \
+ NextLine[-1] == TAB_SECTION_END) or FileLastLine:
+ UserExtFind = False
+ FindEnd = True
+ self._FileSectionDataList.append({SectionLine: SectionData[:]})
+ del SectionData[:]
+ SectionLine = ''
+
+ # Get user extension TianoCore data
+ #
+ # @return: a list include some dictionary that key is section and value is a list contain all data.
+ def GetUserExtensionTianoCore(self):
+ UserExtensionTianoCore = []
+ if not self._FileSectionDataList:
+ return UserExtensionTianoCore
+ for SectionDataDict in self._FileSectionDataList:
+ for key in SectionDataDict:
+ if key.lower().startswith("[userextensions") and key.lower().find('.tianocore.') > -1:
+ SectionLine = key.lstrip(TAB_SECTION_START).rstrip(TAB_SECTION_END)
+ SubSectionList = [SectionLine]
+ if str(SectionLine).find(TAB_COMMA_SPLIT) > -1:
+ SubSectionList = str(SectionLine).split(TAB_COMMA_SPLIT)
+ for SubSection in SubSectionList:
+ if SubSection.lower().find('.tianocore.') > -1:
+ UserExtensionTianoCore.append({SubSection: SectionDataDict[key]})
+ return UserExtensionTianoCore
+
+ # Get depex expression
+ #
+ # @return: a list include some dictionary that key is section and value is a list contain all data.
+ def GetDepexExpresionList(self):
+ DepexExpressionList = []
+ if not self._FileSectionDataList:
+ return DepexExpressionList
+ for SectionDataDict in self._FileSectionDataList:
+ for key in SectionDataDict:
+ if key.lower() == "[depex]" or key.lower().startswith("[depex."):
+ SectionLine = key.lstrip(TAB_SECTION_START).rstrip(TAB_SECTION_END)
+ SubSectionList = [SectionLine]
+ if str(SectionLine).find(TAB_COMMA_SPLIT) > -1:
+ SubSectionList = str(SectionLine).split(TAB_COMMA_SPLIT)
+ for SubSection in SubSectionList:
+ SectionList = SubSection.split(TAB_SPLIT)
+ SubKey = ()
+ if len(SectionList) == 1:
+ SubKey = (TAB_ARCH_COMMON, TAB_ARCH_COMMON)
+ elif len(SectionList) == 2:
+ SubKey = (SectionList[1], TAB_ARCH_COMMON)
+ elif len(SectionList) == 3:
+ SubKey = (SectionList[1], SectionList[2])
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR, 'Section %s is invalid.' % key)
+ DepexExpressionList.append({SubKey: SectionDataDict[key]})
+ return DepexExpressionList
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
new file mode 100755
index 00000000..2bf2cf1a
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py
@@ -0,0 +1,2456 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+from AutoGen.AutoGen import AutoGen
+from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath
+from Common.BuildToolError import *
+from Common.DataType import *
+from Common.Misc import *
+from Common.StringUtils import NormPath,GetSplitList
+from collections import defaultdict
+from Workspace.WorkspaceCommon import OrderedListDict
+import os.path as path
+import copy
+import hashlib
+from . import InfSectionParser
+from . import GenC
+from . import GenMake
+from . import GenDepex
+from io import BytesIO
+from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
+from Workspace.MetaFileCommentParser import UsageList
+from .GenPcdDb import CreatePcdDatabaseCode
+from Common.caching import cached_class_function
+from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo
+import json
+import tempfile
+
+## Mapping Makefile type
+gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}
+#
+# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
+# is the former use /I , the Latter used -I to specify include directories
+#
+gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
+gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
+
+## default file name for AutoGen
+gAutoGenCodeFileName = "AutoGen.c"
+gAutoGenHeaderFileName = "AutoGen.h"
+gAutoGenStringFileName = "%(module_name)sStrDefs.h"
+gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
+gAutoGenDepexFileName = "%(module_name)s.depex"
+gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"
+gAutoGenIdfFileName = "%(module_name)sIdf.hpk"
+gInfSpecVersion = "0x00010017"
+
+#
+# Match name = variable
+#
+gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
+#
+# The format of guid in efivarstore statement likes following and must be correct:
+# guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
+#
+gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
+
+#
+# Template string to generic AsBuilt INF
+#
+gAsBuiltInfHeaderString = TemplateString("""${header_comments}
+
+# DO NOT EDIT
+# FILE auto-generated
+
+[Defines]
+ INF_VERSION = ${module_inf_version}
+ BASE_NAME = ${module_name}
+ FILE_GUID = ${module_guid}
+ MODULE_TYPE = ${module_module_type}${BEGIN}
+ VERSION_STRING = ${module_version_string}${END}${BEGIN}
+ PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
+ UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
+ PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
+ ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
+ UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
+ CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
+ DESTRUCTOR = ${module_destructor}${END}${BEGIN}
+ SHADOW = ${module_shadow}${END}${BEGIN}
+ PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
+ PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
+ PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
+ PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
+ BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
+ SPEC = ${module_spec}${END}${BEGIN}
+ UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
+ MODULE_UNI_FILE = ${module_uni_file}${END}
+
+[Packages.${module_arch}]${BEGIN}
+ ${package_item}${END}
+
+[Binaries.${module_arch}]${BEGIN}
+ ${binary_item}${END}
+
+[PatchPcd.${module_arch}]${BEGIN}
+ ${patchablepcd_item}
+${END}
+
+[Protocols.${module_arch}]${BEGIN}
+ ${protocol_item}
+${END}
+
+[Ppis.${module_arch}]${BEGIN}
+ ${ppi_item}
+${END}
+
+[Guids.${module_arch}]${BEGIN}
+ ${guid_item}
+${END}
+
+[PcdEx.${module_arch}]${BEGIN}
+ ${pcd_item}
+${END}
+
+[LibraryClasses.${module_arch}]
+## @LIB_INSTANCES${BEGIN}
+# ${libraryclasses_item}${END}
+
+${depexsection_item}
+
+${userextension_tianocore_item}
+
+${tail_comments}
+
+[BuildOptions.${module_arch}]
+## @AsBuilt${BEGIN}
+## ${flags_item}${END}
+""")
+#
+# extend lists contained in a dictionary with lists stored in another dictionary
+# if CopyToDict is not derived from DefaultDict(list) then this may raise exception
+#
+def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
+ for Key in CopyFromDict:
+ CopyToDict[Key].extend(CopyFromDict[Key])
+
+# Create a directory specified by a set of path elements and return the full path
+def _MakeDir(PathList):
+ RetVal = path.join(*PathList)
+ CreateDirectory(RetVal)
+ return RetVal
+
+#
+# Convert string to C format array
+#
+def _ConvertStringToByteArray(Value):
+ Value = Value.strip()
+ if not Value:
+ return None
+ if Value[0] == '{':
+ if not Value.endswith('}'):
+ return None
+ Value = Value.replace(' ', '').replace('{', '').replace('}', '')
+ ValFields = Value.split(',')
+ try:
+ for Index in range(len(ValFields)):
+ ValFields[Index] = str(int(ValFields[Index], 0))
+ except ValueError:
+ return None
+ Value = '{' + ','.join(ValFields) + '}'
+ return Value
+
+ Unicode = False
+ if Value.startswith('L"'):
+ if not Value.endswith('"'):
+ return None
+ Value = Value[1:]
+ Unicode = True
+ elif not Value.startswith('"') or not Value.endswith('"'):
+ return None
+
+ Value = eval(Value) # translate escape character
+ NewValue = '{'
+ for Index in range(0, len(Value)):
+ if Unicode:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
+ else:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','
+ Value = NewValue + '0}'
+ return Value
+
+## ModuleAutoGen class
+#
+# This class encapsules the AutoGen behaviors for the build tools. In addition to
+# the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
+# to the [depex] section in module's inf file.
+#
+class ModuleAutoGen(AutoGen):
+ # call super().__init__ then call the worker function with different parameter count
+ def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ if not hasattr(self, "_Init"):
+ self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
+ self._Init = True
+
+ ## Cache the timestamps of metafiles of every module in a class attribute
+ #
+ TimeDict = {}
+
+ def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+# check if this module is employed by active platform
+ if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):
+ EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
+ % (MetaFile, Arch))
+ return None
+ return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
+
+ ## Initialize ModuleAutoGen
+ #
+ # @param Workspace EdkIIWorkspaceBuild object
+ # @param ModuleFile The path of module file
+ # @param Target Build target (DEBUG, RELEASE)
+ # @param Toolchain Name of tool chain
+ # @param Arch The arch the module supports
+ # @param PlatformFile Platform meta-file
+ #
+ def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):
+ EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
+ GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
+
+ self.Workspace = Workspace
+ self.WorkspaceDir = ""
+ self.PlatformInfo = None
+ self.DataPipe = DataPipe
+ self.__init_platform_info__()
+ self.MetaFile = ModuleFile
+ self.SourceDir = self.MetaFile.SubDir
+ self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
+
+ self.ToolChain = Toolchain
+ self.BuildTarget = Target
+ self.Arch = Arch
+ self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
+ self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
+
+ self.IsCodeFileCreated = False
+ self.IsAsBuiltInfCreated = False
+ self.DepexGenerated = False
+
+ self.BuildDatabase = self.Workspace.BuildDatabase
+ self.BuildRuleOrder = None
+ self.BuildTime = 0
+
+ self._GuidComments = OrderedListDict()
+ self._ProtocolComments = OrderedListDict()
+ self._PpiComments = OrderedListDict()
+ self._BuildTargets = None
+ self._IntroBuildTargetList = None
+ self._FinalBuildTargetList = None
+ self._FileTypes = None
+
+ self.AutoGenDepSet = set()
+ self.ReferenceModules = []
+ self.ConstPcd = {}
+ self.Makefile = None
+ self.FileDependCache = {}
+
+ def __init_platform_info__(self):
+ pinfo = self.DataPipe.Get("P_Info")
+ self.WorkspaceDir = pinfo.get("WorkspaceDir")
+ self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)
+ ## hash() operator of ModuleAutoGen
+ #
+ # The module file path and arch string will be used to represent
+ # hash value of this object
+ #
+ # @retval int Hash value of the module file path and arch
+ #
+ @cached_class_function
+ def __hash__(self):
+ return hash((self.MetaFile, self.Arch, self.ToolChain,self.BuildTarget))
+ def __repr__(self):
+ return "%s [%s]" % (self.MetaFile, self.Arch)
+
+ # Get FixedAtBuild Pcds of this Module
+ @cached_property
+ def FixedAtBuildPcds(self):
+ RetVal = []
+ for Pcd in self.ModulePcdList:
+ if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:
+ continue
+ if Pcd not in RetVal:
+ RetVal.append(Pcd)
+ return RetVal
+
+ @cached_property
+ def FixedVoidTypePcds(self):
+ RetVal = {}
+ for Pcd in self.FixedAtBuildPcds:
+ if Pcd.DatumType == TAB_VOID:
+ if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:
+ RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue
+ return RetVal
+
+ @property
+ def UniqueBaseName(self):
+ ModuleNames = self.DataPipe.Get("M_Name")
+ if not ModuleNames:
+ return self.Name
+ return ModuleNames.get((self.Name,self.MetaFile),self.Name)
+
+ # Macros could be used in build_rule.txt (also Makefile)
+ @cached_property
+ def Macros(self):
+ return OrderedDict((
+ ("WORKSPACE" ,self.WorkspaceDir),
+ ("MODULE_NAME" ,self.Name),
+ ("MODULE_NAME_GUID" ,self.UniqueBaseName),
+ ("MODULE_GUID" ,self.Guid),
+ ("MODULE_VERSION" ,self.Version),
+ ("MODULE_TYPE" ,self.ModuleType),
+ ("MODULE_FILE" ,str(self.MetaFile)),
+ ("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),
+ ("MODULE_RELATIVE_DIR" ,self.SourceDir),
+ ("MODULE_DIR" ,self.SourceDir),
+ ("BASE_NAME" ,self.Name),
+ ("ARCH" ,self.Arch),
+ ("TOOLCHAIN" ,self.ToolChain),
+ ("TOOLCHAIN_TAG" ,self.ToolChain),
+ ("TOOL_CHAIN_TAG" ,self.ToolChain),
+ ("TARGET" ,self.BuildTarget),
+ ("BUILD_DIR" ,self.PlatformInfo.BuildDir),
+ ("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
+ ("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
+ ("MODULE_BUILD_DIR" ,self.BuildDir),
+ ("OUTPUT_DIR" ,self.OutputDir),
+ ("DEBUG_DIR" ,self.DebugDir),
+ ("DEST_DIR_OUTPUT" ,self.OutputDir),
+ ("DEST_DIR_DEBUG" ,self.DebugDir),
+ ("PLATFORM_NAME" ,self.PlatformInfo.Name),
+ ("PLATFORM_GUID" ,self.PlatformInfo.Guid),
+ ("PLATFORM_VERSION" ,self.PlatformInfo.Version),
+ ("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),
+ ("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),
+ ("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),
+ ("FFS_OUTPUT_DIR" ,self.FfsOutputDir)
+ ))
+
+ ## Return the module build data object
+ @cached_property
+ def Module(self):
+ return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
+
+ ## Return the module name
+ @cached_property
+ def Name(self):
+ return self.Module.BaseName
+
+ ## Return the module DxsFile if exist
+ @cached_property
+ def DxsFile(self):
+ return self.Module.DxsFile
+
+ ## Return the module meta-file GUID
+ @cached_property
+ def Guid(self):
+ #
+ # To build same module more than once, the module path with FILE_GUID overridden has
+ # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
+ # in DSC. The overridden GUID can be retrieved from file name
+ #
+ if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
+ #
+ # Length of GUID is 36
+ #
+ return os.path.basename(self.MetaFile.Path)[:36]
+ return self.Module.Guid
+
+ ## Return the module version
+ @cached_property
+ def Version(self):
+ return self.Module.Version
+
+ ## Return the module type
+ @cached_property
+ def ModuleType(self):
+ return self.Module.ModuleType
+
+ ## Return the component type (for Edk.x style of module)
+ @cached_property
+ def ComponentType(self):
+ return self.Module.ComponentType
+
+ ## Return the build type
+ @cached_property
+ def BuildType(self):
+ return self.Module.BuildType
+
+ ## Return the PCD_IS_DRIVER setting
+ @cached_property
+ def PcdIsDriver(self):
+ return self.Module.PcdIsDriver
+
+ ## Return the autogen version, i.e. module meta-file version
+ @cached_property
+ def AutoGenVersion(self):
+ return self.Module.AutoGenVersion
+
+ ## Check if the module is library or not
+ @cached_property
+ def IsLibrary(self):
+ return bool(self.Module.LibraryClass)
+
+ ## Check if the module is binary module or not
+ @cached_property
+ def IsBinaryModule(self):
+ return self.Module.IsBinaryModule
+
+ ## Return the directory to store intermediate files of the module
+ @cached_property
+ def BuildDir(self):
+ return _MakeDir((
+ self.PlatformInfo.BuildDir,
+ self.Arch,
+ self.SourceDir,
+ self.MetaFile.BaseName
+ ))
+
+ ## Return the directory to store the intermediate object files of the module
+ @cached_property
+ def OutputDir(self):
+ return _MakeDir((self.BuildDir, "OUTPUT"))
+
+ ## Return the directory path to store ffs file
+ @cached_property
+ def FfsOutputDir(self):
+ if GlobalData.gFdfParser:
+ return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
+ return ''
+
+ ## Return the directory to store auto-gened source files of the module
+ @cached_property
+ def DebugDir(self):
+ return _MakeDir((self.BuildDir, "DEBUG"))
+
+ ## Return the path of custom file
+ @cached_property
+ def CustomMakefile(self):
+ RetVal = {}
+ for Type in self.Module.CustomMakefile:
+ MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'
+ File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
+ RetVal[MakeType] = File
+ return RetVal
+
+ ## Return the directory of the makefile
+ #
+ # @retval string The directory string of module's makefile
+ #
+ @cached_property
+ def MakeFileDir(self):
+ return self.BuildDir
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ @cached_property
+ def BuildCommand(self):
+ return self.PlatformInfo.BuildCommand
+
+ ## Get Module package and Platform package
+ #
+ # @retval list The list of package object
+ #
+ @cached_property
+ def PackageList(self):
+ PkagList = []
+ if self.Module.Packages:
+ PkagList.extend(self.Module.Packages)
+ Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
+ for Package in Platform.Packages:
+ if Package in PkagList:
+ continue
+ PkagList.append(Package)
+ return PkagList
+
+ ## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on
+ #
+ # @retval list The list of package object
+ #
+ @cached_property
+ def DerivedPackageList(self):
+ PackageList = []
+ PackageList.extend(self.PackageList)
+ for M in self.DependentLibraryList:
+ for Package in M.Packages:
+ if Package in PackageList:
+ continue
+ PackageList.append(Package)
+ return PackageList
+
+ ## Get the depex string
+ #
+ # @return : a string contain all depex expression.
+ def _GetDepexExpresionString(self):
+ DepexStr = ''
+ DepexList = []
+ ## DPX_SOURCE IN Define section.
+ if self.Module.DxsFile:
+ return DepexStr
+ for M in [self.Module] + self.DependentLibraryList:
+ Filename = M.MetaFile.Path
+ InfObj = InfSectionParser.InfSectionParser(Filename)
+ DepexExpressionList = InfObj.GetDepexExpresionList()
+ for DepexExpression in DepexExpressionList:
+ for key in DepexExpression:
+ Arch, ModuleType = key
+ DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]
+ # the type of build module is USER_DEFINED.
+ # All different DEPEX section tags would be copied into the As Built INF file
+ # and there would be separate DEPEX section tags
+ if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
+ if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
+ DepexList.append({(Arch, ModuleType): DepexExpr})
+ else:
+ if Arch.upper() == TAB_ARCH_COMMON or \
+ (Arch.upper() == self.Arch.upper() and \
+ ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
+ DepexList.append({(Arch, ModuleType): DepexExpr})
+
+ #the type of build module is USER_DEFINED.
+ if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
+ for Depex in DepexList:
+ for key in Depex:
+ DepexStr += '[Depex.%s.%s]\n' % key
+ DepexStr += '\n'.join('# '+ val for val in Depex[key])
+ DepexStr += '\n\n'
+ if not DepexStr:
+ return '[Depex.%s]\n' % self.Arch
+ return DepexStr
+
+ #the type of build module not is USER_DEFINED.
+ Count = 0
+ for Depex in DepexList:
+ Count += 1
+ if DepexStr != '':
+ DepexStr += ' AND '
+ DepexStr += '('
+ for D in Depex.values():
+ DepexStr += ' '.join(val for val in D)
+ Index = DepexStr.find('END')
+ if Index > -1 and Index == len(DepexStr) - 3:
+ DepexStr = DepexStr[:-3]
+ DepexStr = DepexStr.strip()
+ DepexStr += ')'
+ if Count == 1:
+ DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
+ if not DepexStr:
+ return '[Depex.%s]\n' % self.Arch
+ return '[Depex.%s]\n# ' % self.Arch + DepexStr
+
+ ## Merge dependency expression
+ #
+ # @retval list The token list of the dependency expression after parsed
+ #
+ @cached_property
+ def DepexList(self):
+ if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
+ return {}
+
+ DepexList = []
+ #
+ # Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
+ #
+ FixedVoidTypePcds = {}
+ for M in [self] + self.LibraryAutoGenList:
+ FixedVoidTypePcds.update(M.FixedVoidTypePcds)
+ for M in [self] + self.LibraryAutoGenList:
+ Inherited = False
+ for D in M.Module.Depex[self.Arch, self.ModuleType]:
+ if DepexList != []:
+ DepexList.append('AND')
+ DepexList.append('(')
+ #replace D with value if D is FixedAtBuild PCD
+ NewList = []
+ for item in D:
+ if '.' not in item:
+ NewList.append(item)
+ else:
+ try:
+ Value = FixedVoidTypePcds[item]
+ if len(Value.split(',')) != 16:
+ EdkLogger.error("build", FORMAT_INVALID,
+ "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))
+ NewList.append(Value)
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))
+
+ DepexList.extend(NewList)
+ if DepexList[-1] == 'END': # no need of a END at this time
+ DepexList.pop()
+ DepexList.append(')')
+ Inherited = True
+ if Inherited:
+ EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))
+ if 'BEFORE' in DepexList or 'AFTER' in DepexList:
+ break
+ if len(DepexList) > 0:
+ EdkLogger.verbose('')
+ return {self.ModuleType:DepexList}
+
+ ## Merge dependency expression
+ #
+ # @retval list The token list of the dependency expression after parsed
+ #
+ @cached_property
+ def DepexExpressionDict(self):
+ if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
+ return {}
+
+ DepexExpressionString = ''
+ #
+ # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
+ #
+ for M in [self.Module] + self.DependentLibraryList:
+ Inherited = False
+ for D in M.DepexExpression[self.Arch, self.ModuleType]:
+ if DepexExpressionString != '':
+ DepexExpressionString += ' AND '
+ DepexExpressionString += '('
+ DepexExpressionString += D
+ DepexExpressionString = DepexExpressionString.rstrip('END').strip()
+ DepexExpressionString += ')'
+ Inherited = True
+ if Inherited:
+ EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))
+ if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:
+ break
+ if len(DepexExpressionString) > 0:
+ EdkLogger.verbose('')
+
+ return {self.ModuleType:DepexExpressionString}
+
+ # Get the tiano core user extension, it is contain dependent library.
+ # @retval: a list contain tiano core userextension.
+ #
+ def _GetTianoCoreUserExtensionList(self):
+ TianoCoreUserExtentionList = []
+ for M in [self.Module] + self.DependentLibraryList:
+ Filename = M.MetaFile.Path
+ InfObj = InfSectionParser.InfSectionParser(Filename)
+ TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()
+ for TianoCoreUserExtent in TianoCoreUserExtenList:
+ for Section in TianoCoreUserExtent:
+ ItemList = Section.split(TAB_SPLIT)
+ Arch = self.Arch
+ if len(ItemList) == 4:
+ Arch = ItemList[3]
+ if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():
+ TianoCoreList = []
+ TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])
+ TianoCoreList.extend(TianoCoreUserExtent[Section][:])
+ TianoCoreList.append('\n')
+ TianoCoreUserExtentionList.append(TianoCoreList)
+
+ return TianoCoreUserExtentionList
+
+ ## Return the list of specification version required for the module
+ #
+ # @retval list The list of specification defined in module file
+ #
+ @cached_property
+ def Specification(self):
+ return self.Module.Specification
+
+ ## Tool option for the module build
+ #
+ # @param PlatformInfo The object of PlatformBuildInfo
+ # @retval dict The dict containing valid options
+ #
+ @cached_property
+ def BuildOption(self):
+ RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
+ if self.BuildRuleOrder:
+ self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
+ return RetVal
+
+ ## Get include path list from tool option for the module build
+ #
+ # @retval list The include path list
+ #
+ @cached_property
+ def BuildOptionIncPathList(self):
+ #
+ # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
+ # is the former use /I , the Latter used -I to specify include directories
+ #
+ if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):
+ BuildOptIncludeRegEx = gBuildOptIncludePatternMsft
+ elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
+ BuildOptIncludeRegEx = gBuildOptIncludePatternOther
+ else:
+ #
+ # New ToolChainFamily, don't known whether there is option to specify include directories
+ #
+ return []
+
+ RetVal = []
+ for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
+ try:
+ FlagOption = self.BuildOption[Tool]['FLAGS']
+ except KeyError:
+ FlagOption = ''
+
+ if self.ToolChainFamily != 'RVCT':
+ IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
+ else:
+ #
+ # RVCT may specify a list of directory seperated by commas
+ #
+ IncPathList = []
+ for Path in BuildOptIncludeRegEx.findall(FlagOption):
+ PathList = GetSplitList(Path, TAB_COMMA_SPLIT)
+ IncPathList.extend(NormPath(PathEntry, self.Macros) for PathEntry in PathList)
+
+ #
+ # EDK II modules must not reference header files outside of the packages they depend on or
+ # within the module's directory tree. Report error if violation.
+ #
+ if GlobalData.gDisableIncludePathCheck == False:
+ for Path in IncPathList:
+ if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
+ ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
+ EdkLogger.error("build",
+ PARAMETER_INVALID,
+ ExtraData=ErrMsg,
+ File=str(self.MetaFile))
+ RetVal += IncPathList
+ return RetVal
+
+ ## Return a list of files which can be built from source
+ #
+ # What kind of files can be built is determined by build rules in
+ # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
+ #
+ @cached_property
+ def SourceFileList(self):
+ RetVal = []
+ ToolChainTagSet = {"", TAB_STAR, self.ToolChain}
+ ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}
+ for F in self.Module.Sources:
+ # match tool chain
+ if F.TagName not in ToolChainTagSet:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
+ "but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))
+ continue
+ # match tool chain family or build rule family
+ if F.ToolChainFamily not in ToolChainFamilySet:
+ EdkLogger.debug(
+ EdkLogger.DEBUG_0,
+ "The file [%s] must be built by tools of [%s], " \
+ "but current toolchain family is [%s], buildrule family is [%s]" \
+ % (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))
+ continue
+
+ # add the file path into search path list for file including
+ if F.Dir not in self.IncludePathList:
+ self.IncludePathList.insert(0, F.Dir)
+ RetVal.append(F)
+
+ self._MatchBuildRuleOrder(RetVal)
+
+ for F in RetVal:
+ self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
+ return RetVal
+
+ def _MatchBuildRuleOrder(self, FileList):
+ Order_Dict = {}
+ self.BuildOption
+ for SingleFile in FileList:
+ if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:
+ key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]
+ if key in Order_Dict:
+ Order_Dict[key].append(SingleFile.Ext)
+ else:
+ Order_Dict[key] = [SingleFile.Ext]
+
+ RemoveList = []
+ for F in Order_Dict:
+ if len(Order_Dict[F]) > 1:
+ Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
+ for Ext in Order_Dict[F][1:]:
+ RemoveList.append(F + Ext)
+
+ for item in RemoveList:
+ FileList.remove(item)
+
+ return FileList
+
+ ## Return the list of unicode files
+ @cached_property
+ def UnicodeFileList(self):
+ return self.FileTypes.get(TAB_UNICODE_FILE,[])
+
+ ## Return the list of vfr files
+ @cached_property
+ def VfrFileList(self):
+ return self.FileTypes.get(TAB_VFR_FILE, [])
+
+ ## Return the list of Image Definition files
+ @cached_property
+ def IdfFileList(self):
+ return self.FileTypes.get(TAB_IMAGE_FILE,[])
+
+ ## Return a list of files which can be built from binary
+ #
+ # "Build" binary files are just to copy them to build directory.
+ #
+ # @retval list The list of files which can be built later
+ #
+ @cached_property
+ def BinaryFileList(self):
+ RetVal = []
+ for F in self.Module.Binaries:
+ if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:
+ continue
+ RetVal.append(F)
+ self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)
+ return RetVal
+
+ @cached_property
+ def BuildRules(self):
+ RetVal = {}
+ BuildRuleDatabase = self.PlatformInfo.BuildRule
+ for Type in BuildRuleDatabase.FileTypeList:
+ #first try getting build rule by BuildRuleFamily
+ RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
+ if not RuleObject:
+ # build type is always module type, but ...
+ if self.ModuleType != self.BuildType:
+ RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
+ #second try getting build rule by ToolChainFamily
+ if not RuleObject:
+ RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
+ if not RuleObject:
+ # build type is always module type, but ...
+ if self.ModuleType != self.BuildType:
+ RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
+ if not RuleObject:
+ continue
+ RuleObject = RuleObject.Instantiate(self.Macros)
+ RetVal[Type] = RuleObject
+ for Ext in RuleObject.SourceFileExtList:
+ RetVal[Ext] = RuleObject
+ return RetVal
+
+ def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):
+ if self._BuildTargets is None:
+ self._IntroBuildTargetList = set()
+ self._FinalBuildTargetList = set()
+ self._BuildTargets = defaultdict(set)
+ self._FileTypes = defaultdict(set)
+
+ if not BinaryFileList:
+ BinaryFileList = self.BinaryFileList
+
+ SubDirectory = os.path.join(self.OutputDir, File.SubDir)
+ if not os.path.exists(SubDirectory):
+ CreateDirectory(SubDirectory)
+ TargetList = set()
+ FinalTargetName = set()
+ RuleChain = set()
+ SourceList = [File]
+ Index = 0
+ #
+ # Make sure to get build rule order value
+ #
+ self.BuildOption
+
+ while Index < len(SourceList):
+ # Reset the FileType if not the first iteration.
+ if Index > 0:
+ FileType = TAB_UNKNOWN_FILE
+ Source = SourceList[Index]
+ Index = Index + 1
+
+ if Source != File:
+ CreateDirectory(Source.Dir)
+
+ if File.IsBinary and File == Source and File in BinaryFileList:
+ # Skip all files that are not binary libraries
+ if not self.IsLibrary:
+ continue
+ RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
+ elif FileType in self.BuildRules:
+ RuleObject = self.BuildRules[FileType]
+ elif Source.Ext in self.BuildRules:
+ RuleObject = self.BuildRules[Source.Ext]
+ else:
+ # No more rule to apply: Source is a final target.
+ FinalTargetName.add(Source)
+ continue
+
+ FileType = RuleObject.SourceFileType
+ self._FileTypes[FileType].add(Source)
+
+ # stop at STATIC_LIBRARY for library
+ if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
+ FinalTargetName.add(Source)
+ continue
+
+ Target = RuleObject.Apply(Source, self.BuildRuleOrder)
+ if not Target:
+ # No Target: Source is a final target.
+ FinalTargetName.add(Source)
+ continue
+
+ TargetList.add(Target)
+ self._BuildTargets[FileType].add(Target)
+
+ if not Source.IsBinary and Source == File:
+ self._IntroBuildTargetList.add(Target)
+
+ # to avoid cyclic rule
+ if FileType in RuleChain:
+ EdkLogger.error("build", ERROR_STATEMENT, "Cyclic dependency detected while generating rule for %s" % str(Source))
+
+ RuleChain.add(FileType)
+ SourceList.extend(Target.Outputs)
+
+ # For each final target name, retrieve the corresponding TargetDescBlock instance.
+ for FTargetName in FinalTargetName:
+ for Target in TargetList:
+ if FTargetName == Target.Target:
+ self._FinalBuildTargetList.add(Target)
+
+ @cached_property
+ def Targets(self):
+ if self._BuildTargets is None:
+ self._IntroBuildTargetList = set()
+ self._FinalBuildTargetList = set()
+ self._BuildTargets = defaultdict(set)
+ self._FileTypes = defaultdict(set)
+
+ #TRICK: call SourceFileList property to apply build rule for source files
+ self.SourceFileList
+
+ #TRICK: call _GetBinaryFileList to apply build rule for binary files
+ self.BinaryFileList
+
+ return self._BuildTargets
+
+ @cached_property
+ def IntroTargetList(self):
+ self.Targets
+ return self._IntroBuildTargetList
+
+ @cached_property
+ def CodaTargetList(self):
+ self.Targets
+ return self._FinalBuildTargetList
+
+ @cached_property
+ def FileTypes(self):
+ self.Targets
+ return self._FileTypes
+
+ ## Get the list of package object the module depends on and the Platform depends on
+ #
+ # @retval list The package object list
+ #
+ @cached_property
+ def DependentPackageList(self):
+ return self.PackageList
+
+ ## Return the list of auto-generated code file
+ #
+ # @retval list The list of auto-generated file
+ #
+ @cached_property
+ def AutoGenFileList(self):
+ AutoGenUniIdf = self.BuildType != 'UEFI_HII'
+ UniStringBinBuffer = BytesIO()
+ IdfGenBinBuffer = BytesIO()
+ RetVal = {}
+ AutoGenC = TemplateString()
+ AutoGenH = TemplateString()
+ StringH = TemplateString()
+ StringIdf = TemplateString()
+ GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)
+ #
+ # AutoGen.c is generated if there are library classes in inf, or there are object files
+ #
+ if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0
+ or TAB_OBJECT_FILE in self.FileTypes):
+ AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
+ RetVal[AutoFile] = str(AutoGenC)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if str(AutoGenH) != "":
+ AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
+ RetVal[AutoFile] = str(AutoGenH)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if str(StringH) != "":
+ AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
+ RetVal[AutoFile] = str(StringH)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
+ AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
+ RetVal[AutoFile] = UniStringBinBuffer.getvalue()
+ AutoFile.IsBinary = True
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if UniStringBinBuffer is not None:
+ UniStringBinBuffer.close()
+ if str(StringIdf) != "":
+ AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
+ RetVal[AutoFile] = str(StringIdf)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
+ AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
+ RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
+ AutoFile.IsBinary = True
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if IdfGenBinBuffer is not None:
+ IdfGenBinBuffer.close()
+ return RetVal
+
+ ## Return the list of library modules explicitly or implicitly used by this module
+ @cached_property
+ def DependentLibraryList(self):
+ # only merge library classes and PCD for non-library module
+ if self.IsLibrary:
+ return []
+ return self.PlatformInfo.ApplyLibraryInstance(self.Module)
+
+ ## Get the list of PCDs from current module
+ #
+ # @retval list The list of PCD
+ #
+ @cached_property
+ def ModulePcdList(self):
+ # apply PCD settings from platform
+ RetVal = self.PlatformInfo.ApplyPcdSetting(self, self.Module.Pcds)
+
+ return RetVal
+ @cached_property
+ def _PcdComments(self):
+ ReVal = OrderedListDict()
+ ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)
+ if not self.IsLibrary:
+ for Library in self.DependentLibraryList:
+ ExtendCopyDictionaryLists(ReVal, Library.PcdComments)
+ return ReVal
+
+ ## Get the list of PCDs from dependent libraries
+ #
+ # @retval list The list of PCD
+ #
+ @cached_property
+ def LibraryPcdList(self):
+ if self.IsLibrary:
+ return []
+ RetVal = []
+ Pcds = set()
+ # get PCDs from dependent libraries
+ for Library in self.DependentLibraryList:
+ PcdsInLibrary = OrderedDict()
+ for Key in Library.Pcds:
+ # skip duplicated PCDs
+ if Key in self.Module.Pcds or Key in Pcds:
+ continue
+ Pcds.add(Key)
+ PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
+ RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self, PcdsInLibrary, Library=Library))
+ return RetVal
+
+ ## Get the GUID value mapping
+ #
+ # @retval dict The mapping between GUID cname and its value
+ #
+ @cached_property
+ def GuidList(self):
+ RetVal = self.Module.Guids
+ for Library in self.DependentLibraryList:
+ RetVal.update(Library.Guids)
+ ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)
+ ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)
+ return RetVal
+
+ @cached_property
+ def GetGuidsUsedByPcd(self):
+ RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())
+ for Library in self.DependentLibraryList:
+ RetVal.update(Library.GetGuidsUsedByPcd())
+ return RetVal
+ ## Get the protocol value mapping
+ #
+ # @retval dict The mapping between protocol cname and its value
+ #
+ @cached_property
+ def ProtocolList(self):
+ RetVal = OrderedDict(self.Module.Protocols)
+ for Library in self.DependentLibraryList:
+ RetVal.update(Library.Protocols)
+ ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)
+ ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)
+ return RetVal
+
+ ## Get the PPI value mapping
+ #
+ # @retval dict The mapping between PPI cname and its value
+ #
+ @cached_property
+ def PpiList(self):
+ RetVal = OrderedDict(self.Module.Ppis)
+ for Library in self.DependentLibraryList:
+ RetVal.update(Library.Ppis)
+ ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)
+ ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)
+ return RetVal
+
+ ## Get the list of include search path
+ #
+ # @retval list The list path
+ #
+ @cached_property
+ def IncludePathList(self):
+ RetVal = []
+ RetVal.append(self.MetaFile.Dir)
+ RetVal.append(self.DebugDir)
+
+ for Package in self.PackageList:
+ PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
+ if PackageDir not in RetVal:
+ RetVal.append(PackageDir)
+ IncludesList = Package.Includes
+ if Package._PrivateIncludes:
+ if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):
+ IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
+ for Inc in IncludesList:
+ if Inc not in RetVal:
+ RetVal.append(str(Inc))
+ RetVal.extend(self.IncPathFromBuildOptions)
+ return RetVal
+
+ @cached_property
+ def IncPathFromBuildOptions(self):
+ IncPathList = []
+ for tool in self.BuildOption:
+ if 'FLAGS' in self.BuildOption[tool]:
+ flags = self.BuildOption[tool]['FLAGS']
+ whitespace = False
+ for flag in flags.split(" "):
+ flag = flag.strip()
+ if flag.startswith(("/I","-I")):
+ if len(flag)>2:
+ if os.path.exists(flag[2:]):
+ IncPathList.append(flag[2:])
+ else:
+ whitespace = True
+ continue
+ if whitespace and flag:
+ if os.path.exists(flag):
+ IncPathList.append(flag)
+ whitespace = False
+ return IncPathList
+
+ @cached_property
+ def IncludePathLength(self):
+ return sum(len(inc)+1 for inc in self.IncludePathList)
+
+ ## Get the list of include paths from the packages
+ #
+ # @IncludesList list The list path
+ #
+ @cached_property
+ def PackageIncludePathList(self):
+ IncludesList = []
+ for Package in self.PackageList:
+ PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
+ IncludesList = Package.Includes
+ if Package._PrivateIncludes:
+ if not self.MetaFile.Path.startswith(PackageDir):
+ IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
+ return IncludesList
+
+ ## Get HII EX PCDs which maybe used by VFR
+ #
+ # efivarstore used by VFR may relate with HII EX PCDs
+ # Get the variable name and GUID from efivarstore and HII EX PCD
+ # List the HII EX PCDs in As Built INF if both name and GUID match.
+ #
+ # @retval list HII EX PCDs
+ #
+ def _GetPcdsMaybeUsedByVfr(self):
+ if not self.SourceFileList:
+ return []
+
+ NameGuids = set()
+ for SrcFile in self.SourceFileList:
+ if SrcFile.Ext.lower() != '.vfr':
+ continue
+ Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
+ if not os.path.exists(Vfri):
+ continue
+ VfriFile = open(Vfri, 'r')
+ Content = VfriFile.read()
+ VfriFile.close()
+ Pos = Content.find('efivarstore')
+ while Pos != -1:
+ #
+ # Make sure 'efivarstore' is the start of efivarstore statement
+ # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
+ #
+ Index = Pos - 1
+ while Index >= 0 and Content[Index] in ' \t\r\n':
+ Index -= 1
+ if Index >= 0 and Content[Index] != ';':
+ Pos = Content.find('efivarstore', Pos + len('efivarstore'))
+ continue
+ #
+ # 'efivarstore' must be followed by name and guid
+ #
+ Name = gEfiVarStoreNamePattern.search(Content, Pos)
+ if not Name:
+ break
+ Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
+ if not Guid:
+ break
+ NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')
+ NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
+ Pos = Content.find('efivarstore', Name.end())
+ if not NameGuids:
+ return []
+ HiiExPcds = []
+ for Pcd in self.PlatformInfo.Pcds.values():
+ if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
+ continue
+ for SkuInfo in Pcd.SkuInfoList.values():
+ Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)
+ if not Value:
+ continue
+ Name = _ConvertStringToByteArray(SkuInfo.VariableName)
+ Guid = GuidStructureStringToGuidString(Value)
+ if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
+ HiiExPcds.append(Pcd)
+ break
+
+ return HiiExPcds
+
+ def _GenOffsetBin(self):
+ VfrUniBaseName = {}
+ for SourceFile in self.Module.Sources:
+ if SourceFile.Type.upper() == ".VFR" :
+ #
+ # search the .map file to find the offset of vfr binary in the PE32+/TE file.
+ #
+ VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
+ elif SourceFile.Type.upper() == ".UNI" :
+ #
+ # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
+ #
+ VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
+
+ if not VfrUniBaseName:
+ return None
+ MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
+ EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
+ VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
+ if not VfrUniOffsetList:
+ return None
+
+ OutputName = '%sOffset.bin' % self.Name
+ UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)
+
+ try:
+ fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
+
+ # Use a instance of BytesIO to cache data
+ fStringIO = BytesIO()
+
+ for Item in VfrUniOffsetList:
+ if (Item[0].find("Strings") != -1):
+ #
+ # UNI offset in image.
+ # GUID + Offset
+ # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
+ #
+ UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
+ fStringIO.write(UniGuid)
+ UniValue = pack ('Q', int (Item[1], 16))
+ fStringIO.write (UniValue)
+ else:
+ #
+ # VFR binary offset in image.
+ # GUID + Offset
+ # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
+ #
+ VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
+ fStringIO.write(VfrGuid)
+ VfrValue = pack ('Q', int (Item[1], 16))
+ fStringIO.write (VfrValue)
+ #
+ # write data into file.
+ #
+ try :
+ fInputfile.write (fStringIO.getvalue())
+ except:
+ EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
+ "file been locked or using by other applications." %UniVfrOffsetFileName, None)
+
+ fStringIO.close ()
+ fInputfile.close ()
+ return OutputName
+
+ @cached_property
+ def OutputFile(self):
+ retVal = set()
+
+ for Root, Dirs, Files in os.walk(self.BuildDir):
+ for File in Files:
+ # lib file is already added through above CodaTargetList, skip it here
+ if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):
+ NewFile = path.join(Root, File)
+ retVal.add(NewFile)
+
+ for Root, Dirs, Files in os.walk(self.FfsOutputDir):
+ for File in Files:
+ NewFile = path.join(Root, File)
+ retVal.add(NewFile)
+
+ return retVal
+
+ ## Create AsBuilt INF file the module
+ #
+ def CreateAsBuiltInf(self):
+
+ if self.IsAsBuiltInfCreated:
+ return
+
+ # Skip INF file generation for libraries
+ if self.IsLibrary:
+ return
+
+ # Skip the following code for modules with no source files
+ if not self.SourceFileList:
+ return
+
+ # Skip the following code for modules without any binary files
+ if self.BinaryFileList:
+ return
+
+ ### TODO: How to handles mixed source and binary modules
+
+ # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
+ # Also find all packages that the DynamicEx PCDs depend on
+ Pcds = []
+ PatchablePcds = []
+ Packages = []
+ PcdCheckList = []
+ PcdTokenSpaceList = []
+ for Pcd in self.ModulePcdList + self.LibraryPcdList:
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ PatchablePcds.append(Pcd)
+ PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
+ elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ if Pcd not in Pcds:
+ Pcds.append(Pcd)
+ PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))
+ PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))
+ PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
+ GuidList = OrderedDict(self.GuidList)
+ for TokenSpace in self.GetGuidsUsedByPcd:
+ # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
+ # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
+ if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
+ GuidList.pop(TokenSpace)
+ CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
+ for Package in self.DerivedPackageList:
+ if Package in Packages:
+ continue
+ BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
+ Found = False
+ for Index in range(len(BeChecked)):
+ for Item in CheckList[Index]:
+ if Item in BeChecked[Index]:
+ Packages.append(Package)
+ Found = True
+ break
+ if Found:
+ break
+
+ VfrPcds = self._GetPcdsMaybeUsedByVfr()
+ for Pkg in self.PlatformInfo.PackageList:
+ if Pkg in Packages:
+ continue
+ for VfrPcd in VfrPcds:
+ if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or
+ (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):
+ Packages.append(Pkg)
+ break
+
+ ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType
+ DriverType = self.PcdIsDriver if self.PcdIsDriver else ''
+ Guid = self.Guid
+ MDefs = self.Module.Defines
+
+ AsBuiltInfDict = {
+ 'module_name' : self.Name,
+ 'module_guid' : Guid,
+ 'module_module_type' : ModuleType,
+ 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
+ 'pcd_is_driver_string' : [],
+ 'module_uefi_specification_version' : [],
+ 'module_pi_specification_version' : [],
+ 'module_entry_point' : self.Module.ModuleEntryPointList,
+ 'module_unload_image' : self.Module.ModuleUnloadImageList,
+ 'module_constructor' : self.Module.ConstructorList,
+ 'module_destructor' : self.Module.DestructorList,
+ 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
+ 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
+ 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
+ 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
+ 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
+ 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
+ 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
+ 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
+ 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
+ 'module_arch' : self.Arch,
+ 'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],
+ 'binary_item' : [],
+ 'patchablepcd_item' : [],
+ 'pcd_item' : [],
+ 'protocol_item' : [],
+ 'ppi_item' : [],
+ 'guid_item' : [],
+ 'flags_item' : [],
+ 'libraryclasses_item' : []
+ }
+
+ if 'MODULE_UNI_FILE' in MDefs:
+ UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])
+ if os.path.isfile(UNIFile):
+ shutil.copy2(UNIFile, self.OutputDir)
+
+ if self.AutoGenVersion > int(gInfSpecVersion, 0):
+ AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
+ else:
+ AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
+
+ if DriverType:
+ AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)
+
+ if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
+ AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])
+ if 'PI_SPECIFICATION_VERSION' in self.Specification:
+ AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])
+
+ OutputDir = self.OutputDir.replace('\\', '/').strip('/')
+ DebugDir = self.DebugDir.replace('\\', '/').strip('/')
+ for Item in self.CodaTargetList:
+ File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
+ if os.path.isabs(File):
+ File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
+ if Item.Target.Ext.lower() == '.aml':
+ AsBuiltInfDict['binary_item'].append('ASL|' + File)
+ elif Item.Target.Ext.lower() == '.acpi':
+ AsBuiltInfDict['binary_item'].append('ACPI|' + File)
+ elif Item.Target.Ext.lower() == '.efi':
+ AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
+ else:
+ AsBuiltInfDict['binary_item'].append('BIN|' + File)
+ if not self.DepexGenerated:
+ DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
+ if os.path.exists(DepexFile):
+ self.DepexGenerated = True
+ if self.DepexGenerated:
+ if self.ModuleType in [SUP_MODULE_PEIM]:
+ AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')
+ elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
+ AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')
+ elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:
+ AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')
+
+ Bin = self._GenOffsetBin()
+ if Bin:
+ AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)
+
+ for Root, Dirs, Files in os.walk(OutputDir):
+ for File in Files:
+ if File.lower().endswith('.pdb'):
+ AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
+ HeaderComments = self.Module.HeaderComments
+ StartPos = 0
+ for Index in range(len(HeaderComments)):
+ if HeaderComments[Index].find('@BinaryHeader') != -1:
+ HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
+ StartPos = Index
+ break
+ AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
+ AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
+
+ GenList = [
+ (self.ProtocolList, self._ProtocolComments, 'protocol_item'),
+ (self.PpiList, self._PpiComments, 'ppi_item'),
+ (GuidList, self._GuidComments, 'guid_item')
+ ]
+ for Item in GenList:
+ for CName in Item[0]:
+ Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''
+ Entry = Comments + '\n ' + CName if Comments else CName
+ AsBuiltInfDict[Item[2]].append(Entry)
+ PatchList = parsePcdInfoFromMapFile(
+ os.path.join(self.OutputDir, self.Name + '.map'),
+ os.path.join(self.OutputDir, self.Name + '.efi')
+ )
+ if PatchList:
+ for Pcd in PatchablePcds:
+ TokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ for PatchPcd in PatchList:
+ if TokenCName == PatchPcd[0]:
+ break
+ else:
+ continue
+ PcdValue = ''
+ if Pcd.DatumType == 'BOOLEAN':
+ BoolValue = Pcd.DefaultValue.upper()
+ if BoolValue == 'TRUE':
+ Pcd.DefaultValue = '1'
+ elif BoolValue == 'FALSE':
+ Pcd.DefaultValue = '0'
+
+ if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
+ HexFormat = '0x%02x'
+ if Pcd.DatumType == TAB_UINT16:
+ HexFormat = '0x%04x'
+ elif Pcd.DatumType == TAB_UINT32:
+ HexFormat = '0x%08x'
+ elif Pcd.DatumType == TAB_UINT64:
+ HexFormat = '0x%016x'
+ PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
+ else:
+ if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
+ )
+ ArraySize = int(Pcd.MaxDatumSize, 0)
+ PcdValue = Pcd.DefaultValue
+ if PcdValue[0] != '{':
+ Unicode = False
+ if PcdValue[0] == 'L':
+ Unicode = True
+ PcdValue = PcdValue.lstrip('L')
+ PcdValue = eval(PcdValue)
+ NewValue = '{'
+ for Index in range(0, len(PcdValue)):
+ if Unicode:
+ CharVal = ord(PcdValue[Index])
+ NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
+ + '0x%02x' % (CharVal >> 8) + ', '
+ else:
+ NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
+ Padding = '0x00, '
+ if Unicode:
+ Padding = Padding * 2
+ ArraySize = ArraySize // 2
+ if ArraySize < (len(PcdValue) + 1):
+ if Pcd.MaxSizeUserSet:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
+ )
+ else:
+ ArraySize = len(PcdValue) + 1
+ if ArraySize > len(PcdValue) + 1:
+ NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
+ PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
+ elif len(PcdValue.split(',')) <= ArraySize:
+ PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
+ PcdValue += '}'
+ else:
+ if Pcd.MaxSizeUserSet:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
+ )
+ else:
+ ArraySize = len(PcdValue) + 1
+ PcdItem = '%s.%s|%s|0x%X' % \
+ (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])
+ PcdComments = ''
+ if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
+ PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
+ if PcdComments:
+ PcdItem = PcdComments + '\n ' + PcdItem
+ AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
+
+ for Pcd in Pcds + VfrPcds:
+ PcdCommentList = []
+ HiiInfo = ''
+ TokenCName = Pcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
+ for SkuName in Pcd.SkuInfoList:
+ SkuInfo = Pcd.SkuInfoList[SkuName]
+ HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
+ break
+ if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
+ PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
+ if HiiInfo:
+ UsageIndex = -1
+ UsageStr = ''
+ for Index, Comment in enumerate(PcdCommentList):
+ for Usage in UsageList:
+ if Comment.find(Usage) != -1:
+ UsageStr = Usage
+ UsageIndex = Index
+ break
+ if UsageIndex != -1:
+ PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
+ else:
+ PcdCommentList.append('## UNDEFINED ' + HiiInfo)
+ PcdComments = '\n '.join(PcdCommentList)
+ PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName
+ if PcdComments:
+ PcdEntry = PcdComments + '\n ' + PcdEntry
+ AsBuiltInfDict['pcd_item'].append(PcdEntry)
+ for Item in self.BuildOption:
+ if 'FLAGS' in self.BuildOption[Item]:
+ AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))
+
+ # Generated LibraryClasses section in comments.
+ for Library in self.LibraryAutoGenList:
+ AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
+
+ # Generated UserExtensions TianoCore section.
+ # All tianocore user extensions are copied.
+ UserExtStr = ''
+ for TianoCore in self._GetTianoCoreUserExtensionList():
+ UserExtStr += '\n'.join(TianoCore)
+ ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])
+ if os.path.isfile(ExtensionFile):
+ shutil.copy2(ExtensionFile, self.OutputDir)
+ AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr
+
+ # Generated depex expression section in comments.
+ DepexExpression = self._GetDepexExpresionString()
+ AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''
+
+ AsBuiltInf = TemplateString()
+ AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
+
+ SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
+
+ self.IsAsBuiltInfCreated = True
+
+ def CacheCopyFile(self, DestDir, SourceDir, File):
+ if os.path.isdir(File):
+ return
+
+ sub_dir = os.path.relpath(File, SourceDir)
+ destination_file = os.path.join(DestDir, sub_dir)
+ destination_dir = os.path.dirname(destination_file)
+ CreateDirectory(destination_dir)
+ try:
+ CopyFileOnChange(File, destination_dir)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))
+ return
+
+ def CopyModuleToCache(self):
+ # Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList
+ # and PreMakeHashFileList files
+ MakeHashStr = None
+ PreMakeHashStr = None
+ MakeTimeStamp = 0
+ PreMakeTimeStamp = 0
+ Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]
+ for File in Files:
+ if ".MakeHashFileList." in File:
+ #find lastest file through time stamp
+ FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]
+ if FileTimeStamp > MakeTimeStamp:
+ MakeTimeStamp = FileTimeStamp
+ MakeHashStr = File.split('.')[-1]
+ if len(MakeHashStr) != 32:
+ EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))
+ if ".PreMakeHashFileList." in File:
+ FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]
+ if FileTimeStamp > PreMakeTimeStamp:
+ PreMakeTimeStamp = FileTimeStamp
+ PreMakeHashStr = File.split('.')[-1]
+ if len(PreMakeHashStr) != 32:
+ EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))
+
+ if not MakeHashStr:
+ EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))
+ return
+ if not PreMakeHashStr:
+ EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))
+ return
+
+ # Create Cache destination dirs
+ FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
+ FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
+ CacheFileDir = path.join(FileDir, MakeHashStr)
+ CacheFfsDir = path.join(FfsDir, MakeHashStr)
+ CreateDirectory (CacheFileDir)
+ CreateDirectory (CacheFfsDir)
+
+ # Create ModuleHashPair file to support multiple version cache together
+ ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
+ if os.path.exists(ModuleHashPair):
+ with open(ModuleHashPair, 'r') as f:
+ ModuleHashPairList = json.load(f)
+ if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):
+ ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))
+ with open(ModuleHashPair, 'w') as f:
+ json.dump(ModuleHashPairList, f, indent=2)
+
+ # Copy files to Cache destination dirs
+ if not self.OutputFile:
+ Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
+ self.OutputFile = Ma.Binaries
+ for File in self.OutputFile:
+ if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):
+ self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)
+ else:
+ if self.Name + ".autogen.hash." in File or \
+ self.Name + ".autogen.hashchain." in File or \
+ self.Name + ".hash." in File or \
+ self.Name + ".hashchain." in File or \
+ self.Name + ".PreMakeHashFileList." in File or \
+ self.Name + ".MakeHashFileList." in File:
+ self.CacheCopyFile(FileDir, self.BuildDir, File)
+ else:
+ self.CacheCopyFile(CacheFileDir, self.BuildDir, File)
+ ## Create makefile for the module and its dependent libraries
+ #
+ # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
+ # dependent libraries will be created
+ #
+ @cached_class_function
+ def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):
+
+ # nest this function inside it's only caller.
+ def CreateTimeStamp():
+ FileSet = {self.MetaFile.Path}
+
+ for SourceFile in self.Module.Sources:
+ FileSet.add (SourceFile.Path)
+
+ for Lib in self.DependentLibraryList:
+ FileSet.add (Lib.MetaFile.Path)
+
+ for f in self.AutoGenDepSet:
+ FileSet.add (f.Path)
+
+ if os.path.exists (self.TimeStampPath):
+ os.remove (self.TimeStampPath)
+
+ SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)
+
+ # Ignore generating makefile when it is a binary module
+ if self.IsBinaryModule:
+ return
+
+ self.GenFfsList = GenFfsList
+
+ if not self.IsLibrary and CreateLibraryMakeFile:
+ for LibraryAutoGen in self.LibraryAutoGenList:
+ LibraryAutoGen.CreateMakeFile()
+
+ # CanSkip uses timestamps to determine build skipping
+ if self.CanSkip():
+ return
+
+ if len(self.CustomMakefile) == 0:
+ Makefile = GenMake.ModuleMakefile(self)
+ else:
+ Makefile = GenMake.CustomMakefile(self)
+ if Makefile.Generate():
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
+ (self.Name, self.Arch))
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
+ (self.Name, self.Arch))
+
+ CreateTimeStamp()
+
+ MakefileType = Makefile._FileType
+ MakefileName = Makefile._FILE_NAME_[MakefileType]
+ MakefilePath = os.path.join(self.MakeFileDir, MakefileName)
+ FilePath = path.join(self.BuildDir, self.Name + ".makefile")
+ SaveFileOnChange(FilePath, MakefilePath, False)
+
+ def CopyBinaryFiles(self):
+ for File in self.Module.Binaries:
+ SrcPath = File.Path
+ DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))
+ CopyLongFilePath(SrcPath, DstPath)
+ ## Create autogen code for the module and its dependent libraries
+ #
+ # @param CreateLibraryCodeFile Flag indicating if or not the code of
+ # dependent libraries will be created
+ #
+ def CreateCodeFile(self, CreateLibraryCodeFile=True):
+
+ if self.IsCodeFileCreated:
+ return
+
+ # Need to generate PcdDatabase even PcdDriver is binarymodule
+ if self.IsBinaryModule and self.PcdIsDriver != '':
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
+ return
+ if self.IsBinaryModule:
+ if self.IsLibrary:
+ self.CopyBinaryFiles()
+ return
+
+ if not self.IsLibrary and CreateLibraryCodeFile:
+ for LibraryAutoGen in self.LibraryAutoGenList:
+ LibraryAutoGen.CreateCodeFile()
+
+ self.LibraryAutoGenList
+ AutoGenList = []
+ IgoredAutoGenList = []
+
+ for File in self.AutoGenFileList:
+ if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
+ AutoGenList.append(str(File))
+ else:
+ IgoredAutoGenList.append(str(File))
+
+
+ for ModuleType in self.DepexList:
+ # Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
+ if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:
+ continue
+
+ Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
+ DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
+
+ if len(Dpx.PostfixNotation) != 0:
+ self.DepexGenerated = True
+
+ if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
+ AutoGenList.append(str(DpxFile))
+ else:
+ IgoredAutoGenList.append(str(DpxFile))
+
+ if IgoredAutoGenList == []:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
+ (" ".join(AutoGenList), self.Name, self.Arch))
+ elif AutoGenList == []:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
+ (" ".join(IgoredAutoGenList), self.Name, self.Arch))
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
+ (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
+
+ self.IsCodeFileCreated = True
+
+ return AutoGenList
+
+ ## Summarize the ModuleAutoGen objects of all libraries used by this module
+ @cached_property
+ def LibraryAutoGenList(self):
+ RetVal = []
+ for Library in self.DependentLibraryList:
+ La = ModuleAutoGen(
+ self.Workspace,
+ Library.MetaFile,
+ self.BuildTarget,
+ self.ToolChain,
+ self.Arch,
+ self.PlatformInfo.MetaFile,
+ self.DataPipe
+ )
+ La.IsLibrary = True
+ if La not in RetVal:
+ RetVal.append(La)
+ for Lib in La.CodaTargetList:
+ self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
+ return RetVal
+
+ def GenCMakeHash(self):
+ # GenCMakeHash can only be called in --binary-destination
+ # Never called in multiprocessing and always directly save result in main process,
+ # so no need remote dict to share the gCMakeHashFile result with main process
+
+ DependencyFileSet = set()
+ # Add AutoGen files
+ if self.AutoGenFileList:
+ for File in set(self.AutoGenFileList):
+ DependencyFileSet.add(File)
+
+ # Add Makefile
+ abspath = path.join(self.BuildDir, self.Name + ".makefile")
+ try:
+ with open(LongFilePath(abspath),"r") as fd:
+ lines = fd.readlines()
+ except Exception as e:
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
+ if lines:
+ DependencyFileSet.update(lines)
+
+ # Caculate all above dependency files hash
+ # Initialze hash object
+ FileList = []
+ m = hashlib.md5()
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):
+ if not path.exists(LongFilePath(str(File))):
+ EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
+ continue
+ with open(LongFilePath(str(File)), 'rb') as f:
+ Content = f.read()
+ m.update(Content)
+ FileList.append((str(File), hashlib.md5(Content).hexdigest()))
+
+ HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())
+ GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
+ try:
+ with open(LongFilePath(HashChainFile), 'w') as f:
+ json.dump(FileList, f, indent=2)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
+ return False
+
+ def GenModuleHash(self):
+ # GenModuleHash only called after autogen phase
+ # Never called in multiprocessing and always directly save result in main process,
+ # so no need remote dict to share the gModuleHashFile result with main process
+ #
+ # GenPreMakefileHashList consume no dict.
+ # GenPreMakefileHashList produce local gModuleHashFile dict.
+
+ DependencyFileSet = set()
+ # Add Module Meta file
+ DependencyFileSet.add(self.MetaFile.Path)
+
+ # Add Module's source files
+ if self.SourceFileList:
+ for File in set(self.SourceFileList):
+ DependencyFileSet.add(File.Path)
+
+ # Add modules's include header files
+ # Directly use the deps.txt file in the module BuildDir
+ abspath = path.join(self.BuildDir, "deps.txt")
+ rt = None
+ try:
+ with open(LongFilePath(abspath),"r") as fd:
+ lines = fd.readlines()
+ if lines:
+ rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])
+ except Exception as e:
+ EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
+
+ if rt:
+ DependencyFileSet.update(rt)
+
+
+ # Caculate all above dependency files hash
+ # Initialze hash object
+ FileList = []
+ m = hashlib.md5()
+ BuildDirStr = path.abspath(self.BuildDir).lower()
+ for File in sorted(DependencyFileSet, key=lambda x: str(x)):
+ # Skip the AutoGen files in BuildDir which already been
+ # included in .autogen.hash. file
+ if BuildDirStr in path.abspath(File).lower():
+ continue
+ if not path.exists(LongFilePath(File)):
+ EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
+ continue
+ with open(LongFilePath(File), 'rb') as f:
+ Content = f.read()
+ m.update(Content)
+ FileList.append((File, hashlib.md5(Content).hexdigest()))
+
+ HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
+ try:
+ with open(LongFilePath(HashChainFile), 'w') as f:
+ json.dump(FileList, f, indent=2)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
+ return False
+
+ def GenPreMakefileHashList(self):
+ # GenPreMakefileHashList consume below dicts:
+ # gPlatformHashFile
+ # gPackageHashFile
+ # gModuleHashFile
+ # GenPreMakefileHashList produce no dict.
+ # gModuleHashFile items might be produced in multiprocessing, so
+ # need check gModuleHashFile remote dict
+
+ # skip binary module
+ if self.IsBinaryModule:
+ return
+
+ FileList = []
+ m = hashlib.md5()
+ # Add Platform level hash
+ HashFile = GlobalData.gPlatformHashFile
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)
+
+ # Add Package level hash
+ if self.DependentPackageList:
+ for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
+ if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:
+ EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))
+ continue
+ HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)
+
+ # Add Module self
+ # GenPreMakefileHashList needed in both --binary-destination
+ # and --hash. And --hash might save ModuleHashFile in remote dict
+ # during multiprocessing.
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:
+ HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]
+ else:
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)
+
+ # Add Library hash
+ if self.LibraryAutoGenList:
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):
+
+ if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:
+ HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]
+ else:
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)
+
+ # Save PreMakeHashFileList
+ FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())
+ try:
+ with open(LongFilePath(FilePath), 'w') as f:
+ json.dump(FileList, f, indent=0)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)
+
+ def GenMakefileHashList(self):
+ # GenMakefileHashList only need in --binary-destination which will
+ # everything in local dict. So don't need check remote dict.
+
+ # skip binary module
+ if self.IsBinaryModule:
+ return
+
+ FileList = []
+ m = hashlib.md5()
+ # Add AutoGen hash
+ HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)
+
+ # Add Module self
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:
+ HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]
+ else:
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)
+
+ # Add Library hash
+ if self.LibraryAutoGenList:
+ for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):
+ if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:
+ HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]
+ else:
+ EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))
+ if path.exists(LongFilePath(HashFile)):
+ FileList.append(HashFile)
+ m.update(HashFile.encode('utf-8'))
+ else:
+ EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)
+
+ # Save MakeHashFileList
+ FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())
+ try:
+ with open(LongFilePath(FilePath), 'w') as f:
+ json.dump(FileList, f, indent=0)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)
+
+ def CheckHashChainFile(self, HashChainFile):
+ # Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'
+ # The x is module name and the 16BytesHexStr is md5 hexdigest of
+ # all hashchain files content
+ HashStr = HashChainFile.split('.')[-1]
+ if len(HashStr) != 32:
+ EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))
+ return False
+
+ try:
+ with open(LongFilePath(HashChainFile), 'r') as f:
+ HashChainList = json.load(f)
+ except:
+ EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)
+ return False
+
+ # Print the different file info
+ # print(HashChainFile)
+ for idx, (SrcFile, SrcHash) in enumerate (HashChainList):
+ if SrcFile in GlobalData.gFileHashDict:
+ DestHash = GlobalData.gFileHashDict[SrcFile]
+ else:
+ try:
+ with open(LongFilePath(SrcFile), 'rb') as f:
+ Content = f.read()
+ DestHash = hashlib.md5(Content).hexdigest()
+ GlobalData.gFileHashDict[SrcFile] = DestHash
+ except IOError as X:
+ # cache miss if SrcFile is removed in new version code
+ GlobalData.gFileHashDict[SrcFile] = 0
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))
+ return False
+ if SrcHash != DestHash:
+ EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))
+ return False
+
+ return True
+
+ ## Decide whether we can skip the left autogen and make process
+ def CanSkipbyMakeCache(self):
+ # For --binary-source only
+ # CanSkipbyMakeCache consume below dicts:
+ # gModuleMakeCacheStatus
+ # gHashChainStatus
+ # GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.
+ # all these dicts might be produced in multiprocessing, so
+ # need check these remote dict
+
+ if not GlobalData.gBinCacheSource:
+ return False
+
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:
+ return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]
+
+ # If Module is binary, which has special build rule, do not skip by cache.
+ if self.IsBinaryModule:
+ print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ # see .inc as binary file, do not skip by hash
+ for f_ext in self.SourceFileList:
+ if '.inc' in str(f_ext):
+ print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
+
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
+ ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")
+ try:
+ with open(LongFilePath(ModuleHashPair), 'r') as f:
+ ModuleHashPairList = json.load(f)
+ except:
+ # ModuleHashPair might not exist for new added module
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
+ print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)
+ return False
+
+ # Check the PreMakeHash in ModuleHashPairList one by one
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
+ SourceHashDir = path.join(ModuleCacheDir, MakeHash)
+ SourceFfsHashDir = path.join(FfsDir, MakeHash)
+ PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)
+ MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)
+
+ try:
+ with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:
+ MakeHashFileList = json.load(f)
+ except:
+ EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)
+ continue
+
+ HashMiss = False
+ for HashChainFile in MakeHashFileList:
+ HashChainStatus = None
+ if HashChainFile in GlobalData.gHashChainStatus:
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
+ if HashChainStatus == False:
+ HashMiss = True
+ break
+ elif HashChainStatus == True:
+ continue
+ # Convert to path start with cache source dir
+ RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)
+ NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)
+ if self.CheckHashChainFile(NewFilePath):
+ GlobalData.gHashChainStatus[HashChainFile] = True
+ # Save the module self HashFile for GenPreMakefileHashList later usage
+ if self.Name + ".hashchain." in HashChainFile:
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
+ else:
+ GlobalData.gHashChainStatus[HashChainFile] = False
+ HashMiss = True
+ break
+
+ if HashMiss:
+ continue
+
+ # PreMakefile cache hit, restore the module build result
+ for root, dir, files in os.walk(SourceHashDir):
+ for f in files:
+ File = path.join(root, f)
+ self.CacheCopyFile(self.BuildDir, SourceHashDir, File)
+ if os.path.exists(SourceFfsHashDir):
+ for root, dir, files in os.walk(SourceFfsHashDir):
+ for f in files:
+ File = path.join(root, f)
+ self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)
+
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
+
+ print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
+ return True
+
+ print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ ## Decide whether we can skip the left autogen and make process
+ def CanSkipbyPreMakeCache(self):
+ # CanSkipbyPreMakeCache consume below dicts:
+ # gModulePreMakeCacheStatus
+ # gHashChainStatus
+ # gModuleHashFile
+ # GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.
+ # all these dicts might be produced in multiprocessing, so
+ # need check these remote dicts
+
+ if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:
+ return False
+
+ if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:
+ return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]
+
+ # If Module is binary, which has special build rule, do not skip by cache.
+ if self.IsBinaryModule:
+ print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ # see .inc as binary file, do not skip by hash
+ for f_ext in self.SourceFileList:
+ if '.inc' in str(f_ext):
+ print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ # For --hash only in the incremental build
+ if not GlobalData.gBinCacheSource:
+ Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]
+ PreMakeHashFileList_FilePah = None
+ MakeTimeStamp = 0
+ # Find latest PreMakeHashFileList file in self.BuildDir folder
+ for File in Files:
+ if ".PreMakeHashFileList." in File:
+ FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]
+ if FileTimeStamp > MakeTimeStamp:
+ MakeTimeStamp = FileTimeStamp
+ PreMakeHashFileList_FilePah = File
+ if not PreMakeHashFileList_FilePah:
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ try:
+ with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:
+ PreMakeHashFileList = json.load(f)
+ except:
+ EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ HashMiss = False
+ for HashChainFile in PreMakeHashFileList:
+ HashChainStatus = None
+ if HashChainFile in GlobalData.gHashChainStatus:
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
+ if HashChainStatus == False:
+ HashMiss = True
+ break
+ elif HashChainStatus == True:
+ continue
+ if self.CheckHashChainFile(HashChainFile):
+ GlobalData.gHashChainStatus[HashChainFile] = True
+ # Save the module self HashFile for GenPreMakefileHashList later usage
+ if self.Name + ".hashchain." in HashChainFile:
+ GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
+ else:
+ GlobalData.gHashChainStatus[HashChainFile] = False
+ HashMiss = True
+ break
+
+ if HashMiss:
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+ else:
+ print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
+ return True
+
+ ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
+ FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
+
+ ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
+ ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")
+ try:
+ with open(LongFilePath(ModuleHashPair), 'r') as f:
+ ModuleHashPairList = json.load(f)
+ except:
+ # ModuleHashPair might not exist for new added module
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ return False
+
+ # Check the PreMakeHash in ModuleHashPairList one by one
+ for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
+ SourceHashDir = path.join(ModuleCacheDir, MakeHash)
+ SourceFfsHashDir = path.join(FfsDir, MakeHash)
+ PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)
+ MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)
+
+ try:
+ with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:
+ PreMakeHashFileList = json.load(f)
+ except:
+ EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)
+ continue
+
+ HashMiss = False
+ for HashChainFile in PreMakeHashFileList:
+ HashChainStatus = None
+ if HashChainFile in GlobalData.gHashChainStatus:
+ HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
+ if HashChainStatus == False:
+ HashMiss = True
+ break
+ elif HashChainStatus == True:
+ continue
+ # Convert to path start with cache source dir
+ RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)
+ NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)
+ if self.CheckHashChainFile(NewFilePath):
+ GlobalData.gHashChainStatus[HashChainFile] = True
+ else:
+ GlobalData.gHashChainStatus[HashChainFile] = False
+ HashMiss = True
+ break
+
+ if HashMiss:
+ continue
+
+ # PreMakefile cache hit, restore the module build result
+ for root, dir, files in os.walk(SourceHashDir):
+ for f in files:
+ File = path.join(root, f)
+ self.CacheCopyFile(self.BuildDir, SourceHashDir, File)
+ if os.path.exists(SourceFfsHashDir):
+ for root, dir, files in os.walk(SourceFfsHashDir):
+ for f in files:
+ File = path.join(root, f)
+ self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)
+
+ if self.Name == "PcdPeim" or self.Name == "PcdDxe":
+ CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
+
+ print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
+ return True
+
+ print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
+ GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
+ return False
+
+ ## Decide whether we can skip the Module build
+ def CanSkipbyCache(self, gHitSet):
+ # Hashing feature is off
+ if not GlobalData.gBinCacheSource:
+ return False
+
+ if self in gHitSet:
+ return True
+
+ return False
+
+ ## Decide whether we can skip the ModuleAutoGen process
+ # If any source file is newer than the module than we cannot skip
+ #
+ def CanSkip(self):
+ # Don't skip if cache feature enabled
+ if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:
+ return False
+ if self.MakeFileDir in GlobalData.gSikpAutoGenCache:
+ return True
+ if not os.path.exists(self.TimeStampPath):
+ return False
+ #last creation time of the module
+ DstTimeStamp = os.stat(self.TimeStampPath)[8]
+
+ SrcTimeStamp = self.Workspace._SrcTimeStamp
+ if SrcTimeStamp > DstTimeStamp:
+ return False
+
+ with open(self.TimeStampPath,'r') as f:
+ for source in f:
+ source = source.rstrip('\n')
+ if not os.path.exists(source):
+ return False
+ if source not in ModuleAutoGen.TimeDict :
+ ModuleAutoGen.TimeDict[source] = os.stat(source)[8]
+ if ModuleAutoGen.TimeDict[source] > DstTimeStamp:
+ return False
+ GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)
+ return True
+
+ @cached_property
+ def TimeStampPath(self):
+ return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py
new file mode 100755
index 00000000..09168047
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py
@@ -0,0 +1,674 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019 - 2021, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from __future__ import absolute_import
+from Workspace.WorkspaceDatabase import WorkspaceDatabase,BuildDB
+from Common.caching import cached_property
+from AutoGen.BuildEngine import BuildRule,AutoGenReqBuildRuleVerNum
+from AutoGen.AutoGen import CalculatePriorityValue
+from Common.Misc import CheckPcdDatum,GuidValue
+from Common.Expression import ValueExpressionEx
+from Common.DataType import *
+from CommonDataClass.Exceptions import *
+from CommonDataClass.CommonClass import SkuInfoClass
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import OPTION_CONFLICT,FORMAT_INVALID,RESOURCE_NOT_AVAILABLE
+from Common.MultipleWorkspace import MultipleWorkspace as mws
+from collections import defaultdict
+from Common.Misc import PathClass
+import os
+
+
+#
+# The priority list while override build option
+#
+PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
+ "0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
+ "0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
+ "0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
+ "0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE
+ "0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE
+ "0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE
+ "0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE
+ "0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE
+ "0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE
+ "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
+ "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
+ "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
+## Base class for AutoGen
+#
+# This class just implements the cache mechanism of AutoGen objects.
+#
+class AutoGenInfo(object):
+ # database to maintain the objects in each child class
+ __ObjectCache = {} # (BuildTarget, ToolChain, ARCH, platform file): AutoGen object
+
+ ## Factory method
+ #
+ # @param Class class object of real AutoGen class
+ # (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)
+ # @param Workspace Workspace directory or WorkspaceAutoGen object
+ # @param MetaFile The path of meta file
+ # @param Target Build target
+ # @param Toolchain Tool chain name
+ # @param Arch Target arch
+ # @param *args The specific class related parameters
+ # @param **kwargs The specific class related dict parameters
+ #
+ @classmethod
+ def GetCache(cls):
+ return cls.__ObjectCache
+ def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ # check if the object has been created
+ Key = (Target, Toolchain, Arch, MetaFile)
+ if Key in cls.__ObjectCache:
+ # if it exists, just return it directly
+ return cls.__ObjectCache[Key]
+ # it didnt exist. create it, cache it, then return it
+ RetVal = cls.__ObjectCache[Key] = super(AutoGenInfo, cls).__new__(cls)
+ return RetVal
+
+
+ ## hash() operator
+ #
+ # The file path of platform file will be used to represent hash value of this object
+ #
+ # @retval int Hash value of the file path of platform file
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+ ## str() operator
+ #
+ # The file path of platform file will be used to represent this object
+ #
+ # @retval string String of platform file path
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## "==" operator
+ def __eq__(self, Other):
+ return Other and self.MetaFile == Other
+
+ ## Expand * in build option key
+ #
+ # @param Options Options to be expanded
+ # @param ToolDef Use specified ToolDef instead of full version.
+ # This is needed during initialization to prevent
+ # infinite recursion betweeh BuildOptions,
+ # ToolDefinition, and this function.
+ #
+ # @retval options Options expanded
+ #
+ def _ExpandBuildOption(self, Options, ModuleStyle=None, ToolDef=None):
+ if not ToolDef:
+ ToolDef = self.ToolDefinition
+ BuildOptions = {}
+ FamilyMatch = False
+ FamilyIsNull = True
+
+ OverrideList = {}
+ #
+ # Construct a list contain the build options which need override.
+ #
+ for Key in Options:
+ #
+ # Key[0] -- tool family
+ # Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ #
+ if (Key[0] == self.BuildRuleFamily and
+ (ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
+ Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
+ if (Target == self.BuildTarget or Target == TAB_STAR) and\
+ (ToolChain == self.ToolChain or ToolChain == TAB_STAR) and\
+ (Arch == self.Arch or Arch == TAB_STAR) and\
+ Options[Key].startswith("="):
+
+ if OverrideList.get(Key[1]) is not None:
+ OverrideList.pop(Key[1])
+ OverrideList[Key[1]] = Options[Key]
+
+ #
+ # Use the highest priority value.
+ #
+ if (len(OverrideList) >= 2):
+ KeyList = list(OverrideList.keys())
+ for Index in range(len(KeyList)):
+ NowKey = KeyList[Index]
+ Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
+ for Index1 in range(len(KeyList) - Index - 1):
+ NextKey = KeyList[Index1 + Index + 1]
+ #
+ # Compare two Key, if one is included by another, choose the higher priority one
+ #
+ Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
+ if (Target1 == Target2 or Target1 == TAB_STAR or Target2 == TAB_STAR) and\
+ (ToolChain1 == ToolChain2 or ToolChain1 == TAB_STAR or ToolChain2 == TAB_STAR) and\
+ (Arch1 == Arch2 or Arch1 == TAB_STAR or Arch2 == TAB_STAR) and\
+ (CommandType1 == CommandType2 or CommandType1 == TAB_STAR or CommandType2 == TAB_STAR) and\
+ (Attr1 == Attr2 or Attr1 == TAB_STAR or Attr2 == TAB_STAR):
+
+ if CalculatePriorityValue(NowKey) > CalculatePriorityValue(NextKey):
+ if Options.get((self.BuildRuleFamily, NextKey)) is not None:
+ Options.pop((self.BuildRuleFamily, NextKey))
+ else:
+ if Options.get((self.BuildRuleFamily, NowKey)) is not None:
+ Options.pop((self.BuildRuleFamily, NowKey))
+
+ for Key in Options:
+ if ModuleStyle is not None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
+ Family = Key[0]
+ Target, Tag, Arch, Tool, Attr = Key[1].split("_")
+ # if tool chain family doesn't match, skip it
+ if Family != "":
+ Found = False
+ if Tool in ToolDef:
+ FamilyIsNull = False
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[Tool]:
+ if Family == ToolDef[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
+ FamilyMatch = True
+ Found = True
+ if TAB_STAR in ToolDef:
+ FamilyIsNull = False
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[TAB_STAR]:
+ if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
+ FamilyMatch = True
+ Found = True
+ if not Found:
+ continue
+ # expand any wildcard
+ if Target == TAB_STAR or Target == self.BuildTarget:
+ if Tag == TAB_STAR or Tag == self.ToolChain:
+ if Arch == TAB_STAR or Arch == self.Arch:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+ if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
+ BuildOptions[Tool][Attr] = Options[Key]
+ else:
+ # append options for the same tool except PATH
+ if Attr != 'PATH':
+ BuildOptions[Tool][Attr] += " " + Options[Key]
+ else:
+ BuildOptions[Tool][Attr] = Options[Key]
+ # Build Option Family has been checked, which need't to be checked again for family.
+ if FamilyMatch or FamilyIsNull:
+ return BuildOptions
+
+ for Key in Options:
+ if ModuleStyle is not None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
+ Family = Key[0]
+ Target, Tag, Arch, Tool, Attr = Key[1].split("_")
+ # if tool chain family doesn't match, skip it
+ if Family == "":
+ continue
+ # option has been added before
+ Found = False
+ if Tool in ToolDef:
+ if TAB_TOD_DEFINES_FAMILY in ToolDef[Tool]:
+ if Family == ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:
+ Found = True
+ if TAB_STAR in ToolDef:
+ if TAB_TOD_DEFINES_FAMILY in ToolDef[TAB_STAR]:
+ if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_FAMILY]:
+ Found = True
+ if not Found:
+ continue
+
+ # expand any wildcard
+ if Target == TAB_STAR or Target == self.BuildTarget:
+ if Tag == TAB_STAR or Tag == self.ToolChain:
+ if Arch == TAB_STAR or Arch == self.Arch:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+ if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
+ BuildOptions[Tool][Attr] = Options[Key]
+ else:
+ # append options for the same tool except PATH
+ if Attr != 'PATH':
+ BuildOptions[Tool][Attr] += " " + Options[Key]
+ else:
+ BuildOptions[Tool][Attr] = Options[Key]
+ return BuildOptions
+#
+#This class is the pruned WorkSpaceAutoGen for ModuleAutoGen in multiple thread
+#
+class WorkSpaceInfo(AutoGenInfo):
+ def __init__(self,Workspace, MetaFile, Target, ToolChain, Arch):
+ if not hasattr(self, "_Init"):
+ self.do_init(Workspace, MetaFile, Target, ToolChain, Arch)
+ self._Init = True
+ def do_init(self,Workspace, MetaFile, Target, ToolChain, Arch):
+ self._SrcTimeStamp = 0
+ self.Db = BuildDB
+ self.BuildDatabase = self.Db.BuildObject
+ self.Target = Target
+ self.ToolChain = ToolChain
+ self.WorkspaceDir = Workspace
+ self.ActivePlatform = MetaFile
+ self.ArchList = Arch
+ self.AutoGenObjectList = []
+ @property
+ def BuildDir(self):
+ return self.AutoGenObjectList[0].BuildDir
+
+ @property
+ def Name(self):
+ return self.AutoGenObjectList[0].Platform.PlatformName
+
+ @property
+ def FlashDefinition(self):
+ return self.AutoGenObjectList[0].Platform.FlashDefinition
+ @property
+ def GenFdsCommandDict(self):
+ FdsCommandDict = self.AutoGenObjectList[0].DataPipe.Get("FdsCommandDict")
+ if FdsCommandDict:
+ return FdsCommandDict
+ return {}
+
+ @cached_property
+ def FvDir(self):
+ return os.path.join(self.BuildDir, TAB_FV_DIRECTORY)
+
+class PlatformInfo(AutoGenInfo):
+ def __init__(self, Workspace, MetaFile, Target, ToolChain, Arch,DataPipe):
+ if not hasattr(self, "_Init"):
+ self.do_init(Workspace, MetaFile, Target, ToolChain, Arch,DataPipe)
+ self._Init = True
+ def do_init(self,Workspace, MetaFile, Target, ToolChain, Arch,DataPipe):
+ self.Wa = Workspace
+ self.WorkspaceDir = self.Wa.WorkspaceDir
+ self.MetaFile = MetaFile
+ self.Arch = Arch
+ self.Target = Target
+ self.BuildTarget = Target
+ self.ToolChain = ToolChain
+ self.Platform = self.Wa.BuildDatabase[self.MetaFile, self.Arch, self.Target, self.ToolChain]
+
+ self.SourceDir = MetaFile.SubDir
+ self.DataPipe = DataPipe
+ @cached_property
+ def _AsBuildModuleList(self):
+ retVal = self.DataPipe.Get("AsBuildModuleList")
+ if retVal is None:
+ retVal = {}
+ return retVal
+
+ ## Test if a module is supported by the platform
+ #
+ # An error will be raised directly if the module or its arch is not supported
+ # by the platform or current configuration
+ #
+ def ValidModule(self, Module):
+ return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \
+ or Module in self._AsBuildModuleList
+
+ @cached_property
+ def ToolChainFamily(self):
+ retVal = self.DataPipe.Get("ToolChainFamily")
+ if retVal is None:
+ retVal = {}
+ return retVal
+
+ @cached_property
+ def BuildRuleFamily(self):
+ retVal = self.DataPipe.Get("BuildRuleFamily")
+ if retVal is None:
+ retVal = {}
+ return retVal
+
+ @cached_property
+ def _MbList(self):
+ return [self.Wa.BuildDatabase[m, self.Arch, self.BuildTarget, self.ToolChain] for m in self.Platform.Modules]
+
+ @cached_property
+ def PackageList(self):
+ RetVal = set()
+ for dec_file,Arch in self.DataPipe.Get("PackageList"):
+ RetVal.add(self.Wa.BuildDatabase[dec_file,Arch,self.BuildTarget, self.ToolChain])
+ return list(RetVal)
+
+ ## Return the directory to store all intermediate and final files built
+ @cached_property
+ def BuildDir(self):
+ if os.path.isabs(self.OutputDir):
+ RetVal = os.path.join(
+ os.path.abspath(self.OutputDir),
+ self.Target + "_" + self.ToolChain,
+ )
+ else:
+ RetVal = os.path.join(
+ self.WorkspaceDir,
+ self.OutputDir,
+ self.Target + "_" + self.ToolChain,
+ )
+ return RetVal
+
+ ## Return the build output directory platform specifies
+ @cached_property
+ def OutputDir(self):
+ return self.Platform.OutputDirectory
+
+ ## Return platform name
+ @cached_property
+ def Name(self):
+ return self.Platform.PlatformName
+
+ ## Return meta-file GUID
+ @cached_property
+ def Guid(self):
+ return self.Platform.Guid
+
+ ## Return platform version
+ @cached_property
+ def Version(self):
+ return self.Platform.Version
+
+ ## Return paths of tools
+ @cached_property
+ def ToolDefinition(self):
+ retVal = self.DataPipe.Get("TOOLDEF")
+ if retVal is None:
+ retVal = {}
+ return retVal
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ @cached_property
+ def BuildCommand(self):
+ retVal = self.DataPipe.Get("BuildCommand")
+ if retVal is None:
+ retVal = []
+ return retVal
+
+ @cached_property
+ def PcdTokenNumber(self):
+ retVal = self.DataPipe.Get("PCD_TNUM")
+ if retVal is None:
+ retVal = {}
+ return retVal
+
+ ## Override PCD setting (type, value, ...)
+ #
+ # @param ToPcd The PCD to be overridden
+ # @param FromPcd The PCD overriding from
+ #
+ def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):
+ #
+ # in case there's PCDs coming from FDF file, which have no type given.
+ # at this point, ToPcd.Type has the type found from dependent
+ # package
+ #
+ TokenCName = ToPcd.TokenCName
+ for PcdItem in self.MixedPcd:
+ if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in self.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ if FromPcd is not None:
+ if ToPcd.Pending and FromPcd.Type:
+ ToPcd.Type = FromPcd.Type
+ elif ToPcd.Type and FromPcd.Type\
+ and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:
+ if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:
+ ToPcd.Type = FromPcd.Type
+ elif ToPcd.Type and FromPcd.Type \
+ and ToPcd.Type != FromPcd.Type:
+ if Library:
+ Module = str(Module) + " 's library file (" + str(Library) + ")"
+ EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
+ ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\
+ % (ToPcd.TokenSpaceGuidCName, TokenCName,
+ ToPcd.Type, Module, FromPcd.Type, Msg),
+ File=self.MetaFile)
+
+ if FromPcd.MaxDatumSize:
+ ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
+ ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize
+ if FromPcd.DefaultValue:
+ ToPcd.DefaultValue = FromPcd.DefaultValue
+ if FromPcd.TokenValue:
+ ToPcd.TokenValue = FromPcd.TokenValue
+ if FromPcd.DatumType:
+ ToPcd.DatumType = FromPcd.DatumType
+ if FromPcd.SkuInfoList:
+ ToPcd.SkuInfoList = FromPcd.SkuInfoList
+ if FromPcd.UserDefinedDefaultStoresFlag:
+ ToPcd.UserDefinedDefaultStoresFlag = FromPcd.UserDefinedDefaultStoresFlag
+ # Add Flexible PCD format parse
+ if ToPcd.DefaultValue:
+ try:
+ ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self._GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),
+ File=self.MetaFile)
+
+ # check the validation of datum
+ IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)
+ if not IsValid:
+ EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,
+ ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))
+ ToPcd.validateranges = FromPcd.validateranges
+ ToPcd.validlists = FromPcd.validlists
+ ToPcd.expressions = FromPcd.expressions
+ ToPcd.CustomAttribute = FromPcd.CustomAttribute
+
+ if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
+ % (ToPcd.TokenSpaceGuidCName, TokenCName))
+ Value = ToPcd.DefaultValue
+ if not Value:
+ ToPcd.MaxDatumSize = '1'
+ elif Value[0] == 'L':
+ ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)
+ elif Value[0] == '{':
+ ToPcd.MaxDatumSize = str(len(Value.split(',')))
+ else:
+ ToPcd.MaxDatumSize = str(len(Value) - 1)
+
+ # apply default SKU for dynamic PCDS if specified one is not available
+ if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \
+ and not ToPcd.SkuInfoList:
+ if self.Platform.SkuName in self.Platform.SkuIds:
+ SkuName = self.Platform.SkuName
+ else:
+ SkuName = TAB_DEFAULT
+ ToPcd.SkuInfoList = {
+ SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)
+ }
+
+ def ApplyPcdSetting(self, Ma, Pcds, Library=""):
+ # for each PCD in module
+ Module=Ma.Module
+ for Name, Guid in Pcds:
+ PcdInModule = Pcds[Name, Guid]
+ # find out the PCD setting in platform
+ if (Name, Guid) in self.Pcds:
+ PcdInPlatform = self.Pcds[Name, Guid]
+ else:
+ PcdInPlatform = None
+ # then override the settings if any
+ self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)
+ # resolve the VariableGuid value
+ for SkuId in PcdInModule.SkuInfoList:
+ Sku = PcdInModule.SkuInfoList[SkuId]
+ if Sku.VariableGuid == '': continue
+ Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
+ if Sku.VariableGuidValue is None:
+ PackageList = "\n\t".join(str(P) for P in self.PackageList)
+ EdkLogger.error(
+ 'build',
+ RESOURCE_NOT_AVAILABLE,
+ "Value of GUID [%s] is not found in" % Sku.VariableGuid,
+ ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \
+ % (Guid, Name, str(Module)),
+ File=self.MetaFile
+ )
+
+ # override PCD settings with module specific setting
+ ModuleScopePcds = self.DataPipe.Get("MOL_PCDS")
+ if Module in self.Platform.Modules:
+ PlatformModule = self.Platform.Modules[str(Module)]
+ PCD_DATA = ModuleScopePcds.get(Ma.Guid,{})
+ mPcds = {(pcd.TokenCName,pcd.TokenSpaceGuidCName): pcd for pcd in PCD_DATA}
+ for Key in mPcds:
+ if self.BuildOptionPcd:
+ for pcd in self.BuildOptionPcd:
+ (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd
+ if (TokenCName, TokenSpaceGuidCName) == Key and FieldName =="":
+ PlatformModule.Pcds[Key].DefaultValue = pcdvalue
+ PlatformModule.Pcds[Key].PcdValueFromComm = pcdvalue
+ break
+ Flag = False
+ if Key in Pcds:
+ ToPcd = Pcds[Key]
+ Flag = True
+ elif Key in self.MixedPcd:
+ for PcdItem in self.MixedPcd[Key]:
+ if PcdItem in Pcds:
+ ToPcd = Pcds[PcdItem]
+ Flag = True
+ break
+ if Flag:
+ self._OverridePcd(ToPcd, mPcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)
+ # use PCD value to calculate the MaxDatumSize when it is not specified
+ for Name, Guid in Pcds:
+ Pcd = Pcds[Name, Guid]
+ if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:
+ Pcd.MaxSizeUserSet = None
+ Value = Pcd.DefaultValue
+ if not Value:
+ Pcd.MaxDatumSize = '1'
+ elif Value[0] == 'L':
+ Pcd.MaxDatumSize = str((len(Value) - 2) * 2)
+ elif Value[0] == '{':
+ Pcd.MaxDatumSize = str(len(Value.split(',')))
+ else:
+ Pcd.MaxDatumSize = str(len(Value) - 1)
+ return list(Pcds.values())
+
+ @cached_property
+ def Pcds(self):
+ PlatformPcdData = self.DataPipe.Get("PLA_PCD")
+# for pcd in PlatformPcdData:
+# for skuid in pcd.SkuInfoList:
+# pcd.SkuInfoList[skuid] = self.CreateSkuInfoFromDict(pcd.SkuInfoList[skuid])
+ return {(pcddata.TokenCName,pcddata.TokenSpaceGuidCName):pcddata for pcddata in PlatformPcdData}
+
+ def CreateSkuInfoFromDict(self,SkuInfoDict):
+ return SkuInfoClass(
+ SkuInfoDict.get("SkuIdName"),
+ SkuInfoDict.get("SkuId"),
+ SkuInfoDict.get("VariableName"),
+ SkuInfoDict.get("VariableGuid"),
+ SkuInfoDict.get("VariableOffset"),
+ SkuInfoDict.get("HiiDefaultValue"),
+ SkuInfoDict.get("VpdOffset"),
+ SkuInfoDict.get("DefaultValue"),
+ SkuInfoDict.get("VariableGuidValue"),
+ SkuInfoDict.get("VariableAttribute",""),
+ SkuInfoDict.get("DefaultStore",None)
+ )
+ @cached_property
+ def MixedPcd(self):
+ return self.DataPipe.Get("MixedPcd")
+ @cached_property
+ def _GuidDict(self):
+ RetVal = self.DataPipe.Get("GuidDict")
+ if RetVal is None:
+ RetVal = {}
+ return RetVal
+ @cached_property
+ def BuildOptionPcd(self):
+ return self.DataPipe.Get("BuildOptPcd")
+ def ApplyBuildOption(self,module):
+ PlatformOptions = self.DataPipe.Get("PLA_BO")
+ ModuleBuildOptions = self.DataPipe.Get("MOL_BO")
+ ModuleOptionFromDsc = ModuleBuildOptions.get((module.MetaFile.File,module.MetaFile.Root))
+ if ModuleOptionFromDsc:
+ ModuleTypeOptions, PlatformModuleOptions = ModuleOptionFromDsc["ModuleTypeOptions"],ModuleOptionFromDsc["PlatformModuleOptions"]
+ else:
+ ModuleTypeOptions, PlatformModuleOptions = {}, {}
+ ToolDefinition = self.DataPipe.Get("TOOLDEF")
+ ModuleOptions = self._ExpandBuildOption(module.BuildOptions)
+ BuildRuleOrder = None
+ for Options in [ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
+ for Tool in Options:
+ for Attr in Options[Tool]:
+ if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
+ BuildRuleOrder = Options[Tool][Attr]
+
+ AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
+ list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
+ list(ToolDefinition.keys()))
+ BuildOptions = defaultdict(lambda: defaultdict(str))
+ for Tool in AllTools:
+ for Options in [ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
+ if Tool not in Options:
+ continue
+ for Attr in Options[Tool]:
+ #
+ # Do not generate it in Makefile
+ #
+ if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
+ continue
+ Value = Options[Tool][Attr]
+ ToolList = [Tool]
+ if Tool == TAB_STAR:
+ ToolList = list(AllTools)
+ ToolList.remove(TAB_STAR)
+ for ExpandedTool in ToolList:
+ # check if override is indicated
+ if Value.startswith('='):
+ BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value[1:])
+ else:
+ if Attr != 'PATH':
+ BuildOptions[ExpandedTool][Attr] += " " + mws.handleWsMacro(Value)
+ else:
+ BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value)
+
+ return BuildOptions, BuildRuleOrder
+
+ def ApplyLibraryInstance(self,module):
+ alldeps = self.DataPipe.Get("DEPS")
+ if alldeps is None:
+ alldeps = {}
+ mod_libs = alldeps.get((module.MetaFile.File,module.MetaFile.Root,module.Arch,module.MetaFile.Path),[])
+ retVal = []
+ for (file_path,root,arch,abs_path) in mod_libs:
+ libMetaFile = PathClass(file_path,root)
+ libMetaFile.OriginalPath = PathClass(file_path,root)
+ libMetaFile.Path = abs_path
+ retVal.append(self.Wa.BuildDatabase[libMetaFile, arch, self.Target,self.ToolChain])
+ return retVal
+
+ ## Parse build_rule.txt in Conf Directory.
+ #
+ # @retval BuildRule object
+ #
+ @cached_property
+ def BuildRule(self):
+ WInfo = self.DataPipe.Get("P_Info")
+ RetVal = WInfo.get("BuildRuleFile")
+ if RetVal._FileVersion == "":
+ RetVal._FileVersion = AutoGenReqBuildRuleVerNum
+ return RetVal
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py
new file mode 100755
index 00000000..29bc5439
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py
@@ -0,0 +1,1603 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019 - 2021, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+import os.path as path
+import copy
+from collections import defaultdict
+
+from .BuildEngine import BuildRule,gDefaultBuildRuleFile,AutoGenReqBuildRuleVerNum
+from .GenVar import VariableMgr, var_info
+from . import GenMake
+from AutoGen.DataPipe import MemoryDataPipe
+from AutoGen.ModuleAutoGen import ModuleAutoGen
+from AutoGen.AutoGen import AutoGen
+from AutoGen.AutoGen import CalculatePriorityValue
+from Workspace.WorkspaceCommon import GetModuleLibInstances
+from CommonDataClass.CommonClass import SkuInfoClass
+from Common.caching import cached_class_function
+from Common.Expression import ValueExpressionEx
+from Common.StringUtils import StringToArray,NormPath
+from Common.BuildToolError import *
+from Common.DataType import *
+from Common.Misc import *
+import Common.VpdInfoFile as VpdInfoFile
+
+## Split command line option string to list
+#
+# subprocess.Popen needs the args to be a sequence. Otherwise there's problem
+# in non-windows platform to launch command
+#
+def _SplitOption(OptionString):
+ OptionList = []
+ LastChar = " "
+ OptionStart = 0
+ QuotationMark = ""
+ for Index in range(0, len(OptionString)):
+ CurrentChar = OptionString[Index]
+ if CurrentChar in ['"', "'"]:
+ if QuotationMark == CurrentChar:
+ QuotationMark = ""
+ elif QuotationMark == "":
+ QuotationMark = CurrentChar
+ continue
+ elif QuotationMark:
+ continue
+
+ if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:
+ if Index > OptionStart:
+ OptionList.append(OptionString[OptionStart:Index - 1])
+ OptionStart = Index
+ LastChar = CurrentChar
+ OptionList.append(OptionString[OptionStart:])
+ return OptionList
+
+## AutoGen class for platform
+#
+# PlatformAutoGen class will process the original information in platform
+# file in order to generate makefile for platform.
+#
+class PlatformAutoGen(AutoGen):
+ # call super().__init__ then call the worker function with different parameter count
+ def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ if not hasattr(self, "_Init"):
+ self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)
+ self._Init = True
+ #
+ # Used to store all PCDs for both PEI and DXE phase, in order to generate
+ # correct PCD database
+ #
+ _DynaPcdList_ = []
+ _NonDynaPcdList_ = []
+ _PlatformPcds = {}
+
+
+
+ ## Initialize PlatformAutoGen
+ #
+ #
+ # @param Workspace WorkspaceAutoGen object
+ # @param PlatformFile Platform file (DSC file)
+ # @param Target Build target (DEBUG, RELEASE)
+ # @param Toolchain Name of tool chain
+ # @param Arch arch of the platform supports
+ #
+ def _InitWorker(self, Workspace, PlatformFile, Target, Toolchain, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen platform [%s] [%s]" % (PlatformFile, Arch))
+ GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (PlatformFile, Arch, Toolchain, Target)
+
+ self.MetaFile = PlatformFile
+ self.Workspace = Workspace
+ self.WorkspaceDir = Workspace.WorkspaceDir
+ self.ToolChain = Toolchain
+ self.BuildTarget = Target
+ self.Arch = Arch
+ self.SourceDir = PlatformFile.SubDir
+ self.FdTargetList = self.Workspace.FdTargetList
+ self.FvTargetList = self.Workspace.FvTargetList
+ # get the original module/package/platform objects
+ self.BuildDatabase = Workspace.BuildDatabase
+ self.DscBuildDataObj = Workspace.Platform
+
+ # MakeFileName is used to get the Makefile name and as a flag
+ # indicating whether the file has been created.
+ self.MakeFileName = ""
+
+ self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
+ self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
+
+ self._AsBuildInfList = []
+ self._AsBuildModuleList = []
+
+ self.VariableInfo = None
+
+ if GlobalData.gFdfParser is not None:
+ self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList
+ for Inf in self._AsBuildInfList:
+ InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)
+ M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
+ if not M.IsBinaryModule:
+ continue
+ self._AsBuildModuleList.append(InfClass)
+ # get library/modules for build
+ self.LibraryBuildDirectoryList = []
+ self.ModuleBuildDirectoryList = []
+
+ self.DataPipe = MemoryDataPipe(self.BuildDir)
+ self.DataPipe.FillData(self)
+
+ return True
+ def FillData_LibConstPcd(self):
+ libConstPcd = {}
+ for LibAuto in self.LibraryAutoGenList:
+ if LibAuto.ConstPcd:
+ libConstPcd[(LibAuto.MetaFile.File,LibAuto.MetaFile.Root,LibAuto.Arch,LibAuto.MetaFile.Path)] = LibAuto.ConstPcd
+ self.DataPipe.DataContainer = {"LibConstPcd":libConstPcd}
+ ## hash() operator of PlatformAutoGen
+ #
+ # The platform file path and arch string will be used to represent
+ # hash value of this object
+ #
+ # @retval int Hash value of the platform file path and arch
+ #
+ @cached_class_function
+ def __hash__(self):
+ return hash((self.MetaFile, self.Arch,self.ToolChain,self.BuildTarget))
+ @cached_class_function
+ def __repr__(self):
+ return "%s [%s]" % (self.MetaFile, self.Arch)
+
+ ## Create autogen code for platform and modules
+ #
+ # Since there's no autogen code for platform, this method will do nothing
+ # if CreateModuleCodeFile is set to False.
+ #
+ # @param CreateModuleCodeFile Flag indicating if creating module's
+ # autogen code file or not
+ #
+ @cached_class_function
+ def CreateCodeFile(self, CreateModuleCodeFile=False):
+ # only module has code to be created, so do nothing if CreateModuleCodeFile is False
+ if not CreateModuleCodeFile:
+ return
+
+ for Ma in self.ModuleAutoGenList:
+ Ma.CreateCodeFile(CreateModuleCodeFile)
+
+ ## Generate Fds Command
+ @cached_property
+ def GenFdsCommand(self):
+ return self.Workspace.GenFdsCommand
+
+ ## Create makefile for the platform and modules in it
+ #
+ # @param CreateModuleMakeFile Flag indicating if the makefile for
+ # modules will be created as well
+ #
+ def CreateMakeFile(self, CreateModuleMakeFile=False, FfsCommand = {}):
+ if CreateModuleMakeFile:
+ for Ma in self._MaList:
+ key = (Ma.MetaFile.File, self.Arch)
+ if key in FfsCommand:
+ Ma.CreateMakeFile(CreateModuleMakeFile, FfsCommand[key])
+ else:
+ Ma.CreateMakeFile(CreateModuleMakeFile)
+ self.CreateLibModuelDirs()
+
+ def CreateLibModuelDirs(self):
+ # No need to create makefile for the platform more than once.
+ if self.MakeFileName:
+ return
+
+ # create library/module build dirs for platform
+ Makefile = GenMake.PlatformMakefile(self)
+ self.LibraryBuildDirectoryList = Makefile.GetLibraryBuildDirectoryList()
+ self.ModuleBuildDirectoryList = Makefile.GetModuleBuildDirectoryList()
+ self.MakeFileName = Makefile.getMakefileName()
+
+ @property
+ def AllPcdList(self):
+ return self.DynamicPcdList + self.NonDynamicPcdList
+ ## Deal with Shared FixedAtBuild Pcds
+ #
+ def CollectFixedAtBuildPcds(self):
+ for LibAuto in self.LibraryAutoGenList:
+ FixedAtBuildPcds = {}
+ ShareFixedAtBuildPcdsSameValue = {}
+ for Module in LibAuto.ReferenceModules:
+ for Pcd in set(Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds):
+ DefaultValue = Pcd.DefaultValue
+ # Cover the case: DSC component override the Pcd value and the Pcd only used in one Lib
+ if Pcd in Module.LibraryPcdList:
+ Index = Module.LibraryPcdList.index(Pcd)
+ DefaultValue = Module.LibraryPcdList[Index].DefaultValue
+ key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ if key not in FixedAtBuildPcds:
+ ShareFixedAtBuildPcdsSameValue[key] = True
+ FixedAtBuildPcds[key] = DefaultValue
+ else:
+ if FixedAtBuildPcds[key] != DefaultValue:
+ ShareFixedAtBuildPcdsSameValue[key] = False
+ for Pcd in LibAuto.FixedAtBuildPcds:
+ key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:
+ continue
+ else:
+ DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]
+ if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD:
+ continue
+ if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
+ LibAuto.ConstPcd[key] = FixedAtBuildPcds[key]
+
+ def CollectVariables(self, DynamicPcdSet):
+ VpdRegionSize = 0
+ VpdRegionBase = 0
+ if self.Workspace.FdfFile:
+ FdDict = self.Workspace.FdfProfile.FdDict[GlobalData.gFdfParser.CurrentFdName]
+ for FdRegion in FdDict.RegionList:
+ for item in FdRegion.RegionDataList:
+ if self.Platform.VpdToolGuid.strip() and self.Platform.VpdToolGuid in item:
+ VpdRegionSize = FdRegion.Size
+ VpdRegionBase = FdRegion.Offset
+ break
+
+ VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj.SkuIds)
+ VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)
+ VariableInfo.SetVpdRegionOffset(VpdRegionBase)
+ Index = 0
+ for Pcd in sorted(DynamicPcdSet):
+ pcdname = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ for SkuName in Pcd.SkuInfoList:
+ Sku = Pcd.SkuInfoList[SkuName]
+ SkuId = Sku.SkuId
+ if SkuId is None or SkuId == '':
+ continue
+ if len(Sku.VariableName) > 0:
+ if Sku.VariableAttribute and 'NV' not in Sku.VariableAttribute:
+ continue
+ VariableGuidStructure = Sku.VariableGuidValue
+ VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure)
+ for StorageName in Sku.DefaultStoreDict:
+ VariableInfo.append_variable(var_info(Index, pcdname, StorageName, SkuName, StringToArray(Sku.VariableName), VariableGuid, Sku.VariableOffset, Sku.VariableAttribute, Sku.HiiDefaultValue, Sku.DefaultStoreDict[StorageName] if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES else StringToArray(Sku.DefaultStoreDict[StorageName]), Pcd.DatumType, Pcd.CustomAttribute['DscPosition'], Pcd.CustomAttribute.get('IsStru',False)))
+ Index += 1
+ return VariableInfo
+
+ def UpdateNVStoreMaxSize(self, OrgVpdFile):
+ if self.VariableInfo:
+ VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)
+ PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]
+
+ if PcdNvStoreDfBuffer:
+ try:
+ OrgVpdFile.Read(VpdMapFilePath)
+ PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])
+ NvStoreOffset = list(PcdItems.values())[0].strip() if PcdItems else '0'
+ except:
+ EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
+
+ NvStoreOffset = int(NvStoreOffset, 16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset)
+ default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)
+ maxsize = self.VariableInfo.VpdRegionSize - NvStoreOffset if self.VariableInfo.VpdRegionSize else len(default_skuobj.DefaultValue.split(","))
+ var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize)
+
+ if var_data and default_skuobj:
+ default_skuobj.DefaultValue = var_data
+ PcdNvStoreDfBuffer[0].DefaultValue = var_data
+ PcdNvStoreDfBuffer[0].SkuInfoList.clear()
+ PcdNvStoreDfBuffer[0].SkuInfoList[TAB_DEFAULT] = default_skuobj
+ PcdNvStoreDfBuffer[0].MaxDatumSize = str(len(default_skuobj.DefaultValue.split(",")))
+
+ return OrgVpdFile
+
+ ## Collect dynamic PCDs
+ #
+ # Gather dynamic PCDs list from each module and their settings from platform
+ # This interface should be invoked explicitly when platform action is created.
+ #
+ def CollectPlatformDynamicPcds(self):
+ self.CategoryPcds()
+ self.SortDynamicPcd()
+
+ def CategoryPcds(self):
+ # Category Pcds into DynamicPcds and NonDynamicPcds
+ # for gathering error information
+ NoDatumTypePcdList = set()
+ FdfModuleList = []
+ for InfName in self._AsBuildInfList:
+ InfName = mws.join(self.WorkspaceDir, InfName)
+ FdfModuleList.append(os.path.normpath(InfName))
+ for M in self._MbList:
+# F is the Module for which M is the module autogen
+ ModPcdList = self.ApplyPcdSetting(M, M.ModulePcdList)
+ LibPcdList = []
+ for lib in M.LibraryPcdList:
+ LibPcdList.extend(self.ApplyPcdSetting(M, M.LibraryPcdList[lib], lib))
+ for PcdFromModule in ModPcdList + LibPcdList:
+
+ # make sure that the "VOID*" kind of datum has MaxDatumSize set
+ if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
+ NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, M.MetaFile))
+
+ # Check the PCD from Binary INF or Source INF
+ if M.IsBinaryModule == True:
+ PcdFromModule.IsFromBinaryInf = True
+
+ # Check the PCD from DSC or not
+ PcdFromModule.IsFromDsc = (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds
+
+ if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET or PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ if M.MetaFile.Path not in FdfModuleList:
+ # If one of the Source built modules listed in the DSC is not listed
+ # in FDF modules, and the INF lists a PCD can only use the PcdsDynamic
+ # access method (it is only listed in the DEC file that declares the
+ # PCD as PcdsDynamic), then build tool will report warning message
+ # notify the PI that they are attempting to build a module that must
+ # be included in a flash image in order to be functional. These Dynamic
+ # PCD will not be added into the Database unless it is used by other
+ # modules that are included in the FDF file.
+ if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET and \
+ PcdFromModule.IsFromBinaryInf == False:
+ # Print warning message to let the developer make a determine.
+ continue
+ # If one of the Source built modules listed in the DSC is not listed in
+ # FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx
+ # access method (it is only listed in the DEC file that declares the
+ # PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the
+ # PCD to the Platform's PCD Database.
+ if PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ continue
+ #
+ # If a dynamic PCD used by a PEM module/PEI module & DXE module,
+ # it should be stored in Pcd PEI database, If a dynamic only
+ # used by DXE module, it should be stored in DXE PCD database.
+ # The default Phase is DXE
+ #
+ if M.ModuleType in SUP_MODULE_SET_PEI:
+ PcdFromModule.Phase = "PEI"
+ if PcdFromModule not in self._DynaPcdList_:
+ self._DynaPcdList_.append(PcdFromModule)
+ elif PcdFromModule.Phase == 'PEI':
+ # overwrite any the same PCD existing, if Phase is PEI
+ Index = self._DynaPcdList_.index(PcdFromModule)
+ self._DynaPcdList_[Index] = PcdFromModule
+ elif PcdFromModule not in self._NonDynaPcdList_:
+ self._NonDynaPcdList_.append(PcdFromModule)
+ elif PcdFromModule in self._NonDynaPcdList_ and PcdFromModule.IsFromBinaryInf == True:
+ Index = self._NonDynaPcdList_.index(PcdFromModule)
+ if self._NonDynaPcdList_[Index].IsFromBinaryInf == False:
+ #The PCD from Binary INF will override the same one from source INF
+ self._NonDynaPcdList_.remove (self._NonDynaPcdList_[Index])
+ PcdFromModule.Pending = False
+ self._NonDynaPcdList_.append (PcdFromModule)
+ DscModuleSet = {os.path.normpath(ModuleInf.Path) for ModuleInf in self.Platform.Modules}
+ # add the PCD from modules that listed in FDF but not in DSC to Database
+ for InfName in FdfModuleList:
+ if InfName not in DscModuleSet:
+ InfClass = PathClass(InfName)
+ M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
+ # If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)
+ # for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.
+ # For binary module, if in current arch, we need to list the PCDs into database.
+ if not M.IsBinaryModule:
+ continue
+ # Override the module PCD setting by platform setting
+ ModulePcdList = self.ApplyPcdSetting(M, M.Pcds)
+ for PcdFromModule in ModulePcdList:
+ PcdFromModule.IsFromBinaryInf = True
+ PcdFromModule.IsFromDsc = False
+ # Only allow the DynamicEx and Patchable PCD in AsBuild INF
+ if PcdFromModule.Type not in PCD_DYNAMIC_EX_TYPE_SET and PcdFromModule.Type not in TAB_PCDS_PATCHABLE_IN_MODULE:
+ EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
+ File=self.MetaFile,
+ ExtraData="\n\tExisted %s PCD %s in:\n\t\t%s\n"
+ % (PcdFromModule.Type, PcdFromModule.TokenCName, InfName))
+ # make sure that the "VOID*" kind of datum has MaxDatumSize set
+ if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
+ NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, InfName))
+ if M.ModuleType in SUP_MODULE_SET_PEI:
+ PcdFromModule.Phase = "PEI"
+ if PcdFromModule not in self._DynaPcdList_ and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ self._DynaPcdList_.append(PcdFromModule)
+ elif PcdFromModule not in self._NonDynaPcdList_ and PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE:
+ self._NonDynaPcdList_.append(PcdFromModule)
+ if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ # Overwrite the phase of any the same PCD existing, if Phase is PEI.
+ # It is to solve the case that a dynamic PCD used by a PEM module/PEI
+ # module & DXE module at a same time.
+ # Overwrite the type of the PCDs in source INF by the type of AsBuild
+ # INF file as DynamicEx.
+ Index = self._DynaPcdList_.index(PcdFromModule)
+ self._DynaPcdList_[Index].Phase = PcdFromModule.Phase
+ self._DynaPcdList_[Index].Type = PcdFromModule.Type
+ for PcdFromModule in self._NonDynaPcdList_:
+ # If a PCD is not listed in the DSC file, but binary INF files used by
+ # this platform all (that use this PCD) list the PCD in a [PatchPcds]
+ # section, AND all source INF files used by this platform the build
+ # that use the PCD list the PCD in either a [Pcds] or [PatchPcds]
+ # section, then the tools must NOT add the PCD to the Platform's PCD
+ # Database; the build must assign the access method for this PCD as
+ # PcdsPatchableInModule.
+ if PcdFromModule not in self._DynaPcdList_:
+ continue
+ Index = self._DynaPcdList_.index(PcdFromModule)
+ if PcdFromModule.IsFromDsc == False and \
+ PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE and \
+ PcdFromModule.IsFromBinaryInf == True and \
+ self._DynaPcdList_[Index].IsFromBinaryInf == False:
+ Index = self._DynaPcdList_.index(PcdFromModule)
+ self._DynaPcdList_.remove (self._DynaPcdList_[Index])
+
+ # print out error information and break the build, if error found
+ if len(NoDatumTypePcdList) > 0:
+ NoDatumTypePcdListString = "\n\t\t".join(NoDatumTypePcdList)
+ EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
+ File=self.MetaFile,
+ ExtraData="\n\tPCD(s) without MaxDatumSize:\n\t\t%s\n"
+ % NoDatumTypePcdListString)
+ self._NonDynamicPcdList = sorted(self._NonDynaPcdList_)
+ self._DynamicPcdList = self._DynaPcdList_
+
+ def SortDynamicPcd(self):
+ #
+ # Sort dynamic PCD list to:
+ # 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
+ # try to be put header of dynamicd List
+ # 2) If PCD is HII type, the PCD item should be put after unicode type PCD
+ #
+ # The reason of sorting is make sure the unicode string is in double-byte alignment in string table.
+ #
+ UnicodePcdArray = set()
+ HiiPcdArray = set()
+ OtherPcdArray = set()
+ VpdPcdDict = {}
+ VpdFile = VpdInfoFile.VpdInfoFile()
+ NeedProcessVpdMapFile = False
+
+ for pcd in self.Platform.Pcds:
+ if pcd not in self._PlatformPcds:
+ self._PlatformPcds[pcd] = self.Platform.Pcds[pcd]
+
+ for item in self._PlatformPcds:
+ if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
+ self._PlatformPcds[item].DatumType = TAB_VOID
+
+ if (self.Workspace.ArchList[-1] == self.Arch):
+ for Pcd in self._DynamicPcdList:
+ # just pick the a value to determine whether is unicode string type
+ Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
+ Pcd.DatumType = TAB_VOID
+
+ # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
+ # if found HII type PCD then insert to right of UnicodeIndex
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
+
+ #Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer
+ PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer", "gEfiMdeModulePkgTokenSpaceGuid"))
+ if PcdNvStoreDfBuffer:
+ self.VariableInfo = self.CollectVariables(self._DynamicPcdList)
+ vardump = self.VariableInfo.dump()
+ if vardump:
+ #
+ #According to PCD_DATABASE_INIT in edk2\MdeModulePkg\Include\Guid\PcdDataBaseSignatureGuid.h,
+ #the max size for string PCD should not exceed USHRT_MAX 65535(0xffff).
+ #typedef UINT16 SIZE_INFO;
+ #//SIZE_INFO SizeTable[];
+ if len(vardump.split(",")) > 0xffff:
+ EdkLogger.error("build", RESOURCE_OVERFLOW, 'The current length of PCD %s value is %d, it exceeds to the max size of String PCD.' %(".".join([PcdNvStoreDfBuffer.TokenSpaceGuidCName,PcdNvStoreDfBuffer.TokenCName]) ,len(vardump.split(","))))
+ PcdNvStoreDfBuffer.DefaultValue = vardump
+ for skuname in PcdNvStoreDfBuffer.SkuInfoList:
+ PcdNvStoreDfBuffer.SkuInfoList[skuname].DefaultValue = vardump
+ PcdNvStoreDfBuffer.MaxDatumSize = str(len(vardump.split(",")))
+ else:
+ #If the end user define [DefaultStores] and [XXX.Menufacturing] in DSC, but forget to configure PcdNvStoreDefaultValueBuffer to PcdsDynamicVpd
+ if [Pcd for Pcd in self._DynamicPcdList if Pcd.UserDefinedDefaultStoresFlag]:
+ EdkLogger.warn("build", "PcdNvStoreDefaultValueBuffer should be defined as PcdsDynamicExVpd in dsc file since the DefaultStores is enabled for this platform.\n%s" %self.Platform.MetaFile.Path)
+ PlatformPcds = sorted(self._PlatformPcds.keys())
+ #
+ # Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.
+ #
+ VpdSkuMap = {}
+ for PcdKey in PlatformPcds:
+ Pcd = self._PlatformPcds[PcdKey]
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \
+ PcdKey in VpdPcdDict:
+ Pcd = VpdPcdDict[PcdKey]
+ SkuValueMap = {}
+ DefaultSku = Pcd.SkuInfoList.get(TAB_DEFAULT)
+ if DefaultSku:
+ PcdValue = DefaultSku.DefaultValue
+ if PcdValue not in SkuValueMap:
+ SkuValueMap[PcdValue] = []
+ VpdFile.Add(Pcd, TAB_DEFAULT, DefaultSku.VpdOffset)
+ SkuValueMap[PcdValue].append(DefaultSku)
+
+ for (SkuName, Sku) in Pcd.SkuInfoList.items():
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+ PcdValue = Sku.DefaultValue
+ if PcdValue == "":
+ PcdValue = Pcd.DefaultValue
+ if Sku.VpdOffset != TAB_STAR:
+ if PcdValue.startswith("{"):
+ Alignment = 8
+ elif PcdValue.startswith("L"):
+ Alignment = 2
+ else:
+ Alignment = 1
+ try:
+ VpdOffset = int(Sku.VpdOffset)
+ except:
+ try:
+ VpdOffset = int(Sku.VpdOffset, 16)
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
+ if VpdOffset % Alignment != 0:
+ if PcdValue.startswith("{"):
+ EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName), File=self.MetaFile)
+ else:
+ EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment))
+ if PcdValue not in SkuValueMap:
+ SkuValueMap[PcdValue] = []
+ VpdFile.Add(Pcd, SkuName, Sku.VpdOffset)
+ SkuValueMap[PcdValue].append(Sku)
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:
+ NeedProcessVpdMapFile = True
+ if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':
+ EdkLogger.error("Build", FILE_NOT_FOUND, \
+ "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
+
+ VpdSkuMap[PcdKey] = SkuValueMap
+ #
+ # Fix the PCDs define in VPD PCD section that never referenced by module.
+ # An example is PCD for signature usage.
+ #
+ for DscPcd in PlatformPcds:
+ DscPcdEntry = self._PlatformPcds[DscPcd]
+ if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):
+ FoundFlag = False
+ for VpdPcd in VpdFile._VpdArray:
+ # This PCD has been referenced by module
+ if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (VpdPcd.TokenCName == DscPcdEntry.TokenCName):
+ FoundFlag = True
+
+ # Not found, it should be signature
+ if not FoundFlag :
+ # just pick the a value to determine whether is unicode string type
+ SkuValueMap = {}
+ SkuObjList = list(DscPcdEntry.SkuInfoList.items())
+ DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)
+ if DefaultSku:
+ defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
+ SkuObjList[0], SkuObjList[defaultindex] = SkuObjList[defaultindex], SkuObjList[0]
+ for (SkuName, Sku) in SkuObjList:
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ # Need to iterate DEC pcd information to get the value & datumtype
+ for eachDec in self.PackageList:
+ for DecPcd in eachDec.Pcds:
+ DecPcdEntry = eachDec.Pcds[DecPcd]
+ if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
+ # Print warning message to let the developer make a determine.
+ EdkLogger.warn("build", "Unreferenced vpd pcd used!",
+ File=self.MetaFile, \
+ ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
+ %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
+
+ DscPcdEntry.DatumType = DecPcdEntry.DatumType
+ DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
+ DscPcdEntry.TokenValue = DecPcdEntry.TokenValue
+ DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]
+ # Only fix the value while no value provided in DSC file.
+ if not Sku.DefaultValue:
+ DscPcdEntry.SkuInfoList[list(DscPcdEntry.SkuInfoList.keys())[0]].DefaultValue = DecPcdEntry.DefaultValue
+
+ if DscPcdEntry not in self._DynamicPcdList:
+ self._DynamicPcdList.append(DscPcdEntry)
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+ PcdValue = Sku.DefaultValue
+ if PcdValue == "":
+ PcdValue = DscPcdEntry.DefaultValue
+ if Sku.VpdOffset != TAB_STAR:
+ if PcdValue.startswith("{"):
+ Alignment = 8
+ elif PcdValue.startswith("L"):
+ Alignment = 2
+ else:
+ Alignment = 1
+ try:
+ VpdOffset = int(Sku.VpdOffset)
+ except:
+ try:
+ VpdOffset = int(Sku.VpdOffset, 16)
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName))
+ if VpdOffset % Alignment != 0:
+ if PcdValue.startswith("{"):
+ EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName), File=self.MetaFile)
+ else:
+ EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment))
+ if PcdValue not in SkuValueMap:
+ SkuValueMap[PcdValue] = []
+ VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)
+ SkuValueMap[PcdValue].append(Sku)
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:
+ NeedProcessVpdMapFile = True
+ if DscPcdEntry.DatumType == TAB_VOID and PcdValue.startswith("L"):
+ UnicodePcdArray.add(DscPcdEntry)
+ elif len(Sku.VariableName) > 0:
+ HiiPcdArray.add(DscPcdEntry)
+ else:
+ OtherPcdArray.add(DscPcdEntry)
+
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ VpdSkuMap[DscPcd] = SkuValueMap
+ if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \
+ VpdFile.GetCount() != 0:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
+ "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
+
+ if VpdFile.GetCount() != 0:
+
+ self.FixVpdOffset(VpdFile)
+
+ self.FixVpdOffset(self.UpdateNVStoreMaxSize(VpdFile))
+ PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]
+ if PcdNvStoreDfBuffer:
+ PcdName,PcdGuid = PcdNvStoreDfBuffer[0].TokenCName, PcdNvStoreDfBuffer[0].TokenSpaceGuidCName
+ if (PcdName,PcdGuid) in VpdSkuMap:
+ DefaultSku = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)
+ VpdSkuMap[(PcdName,PcdGuid)] = {DefaultSku.DefaultValue:[SkuObj for SkuObj in PcdNvStoreDfBuffer[0].SkuInfoList.values() ]}
+
+ # Process VPD map file generated by third party BPDG tool
+ if NeedProcessVpdMapFile:
+ VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)
+ try:
+ VpdFile.Read(VpdMapFilePath)
+
+ # Fixup TAB_STAR offset
+ for pcd in VpdSkuMap:
+ vpdinfo = VpdFile.GetVpdInfo(pcd)
+ if vpdinfo is None:
+ # just pick the a value to determine whether is unicode string type
+ continue
+ for pcdvalue in VpdSkuMap[pcd]:
+ for sku in VpdSkuMap[pcd][pcdvalue]:
+ for item in vpdinfo:
+ if item[2] == pcdvalue:
+ sku.VpdOffset = item[1]
+ except:
+ EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
+
+ # Delete the DynamicPcdList At the last time enter into this function
+ for Pcd in self._DynamicPcdList:
+ # just pick the a value to determine whether is unicode string type
+ Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
+ Pcd.DatumType = TAB_VOID
+
+ PcdValue = Sku.DefaultValue
+ if Pcd.DatumType == TAB_VOID and PcdValue.startswith("L"):
+ # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
+ UnicodePcdArray.add(Pcd)
+ elif len(Sku.VariableName) > 0:
+ # if found HII type PCD then insert to right of UnicodeIndex
+ HiiPcdArray.add(Pcd)
+ else:
+ OtherPcdArray.add(Pcd)
+ del self._DynamicPcdList[:]
+ self._DynamicPcdList.extend(list(UnicodePcdArray))
+ self._DynamicPcdList.extend(list(HiiPcdArray))
+ self._DynamicPcdList.extend(list(OtherPcdArray))
+ self._DynamicPcdList.sort()
+ allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]
+ for pcd in self._DynamicPcdList:
+ if len(pcd.SkuInfoList) == 1:
+ for (SkuName, SkuId) in allskuset:
+ if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:
+ continue
+ pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
+ pcd.SkuInfoList[SkuName].SkuId = SkuId
+ pcd.SkuInfoList[SkuName].SkuIdName = SkuName
+
+ def FixVpdOffset(self, VpdFile ):
+ FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY)
+ if not os.path.exists(FvPath):
+ try:
+ os.makedirs(FvPath)
+ except:
+ EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
+
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
+
+ if VpdFile.Write(VpdFilePath):
+ # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
+ BPDGToolName = None
+ for ToolDef in self.ToolDefinition.values():
+ if TAB_GUID in ToolDef and ToolDef[TAB_GUID] == self.Platform.VpdToolGuid:
+ if "PATH" not in ToolDef:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
+ BPDGToolName = ToolDef["PATH"]
+ break
+ # Call third party GUID BPDG tool.
+ if BPDGToolName is not None:
+ VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
+ else:
+ EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
+
+ ## Return the platform build data object
+ @cached_property
+ def Platform(self):
+ return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
+
+ ## Return platform name
+ @cached_property
+ def Name(self):
+ return self.Platform.PlatformName
+
+ ## Return the meta file GUID
+ @cached_property
+ def Guid(self):
+ return self.Platform.Guid
+
+ ## Return the platform version
+ @cached_property
+ def Version(self):
+ return self.Platform.Version
+
+ ## Return the FDF file name
+ @cached_property
+ def FdfFile(self):
+ if self.Workspace.FdfFile:
+ RetVal= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)
+ else:
+ RetVal = ''
+ return RetVal
+
+ ## Return the build output directory platform specifies
+ @cached_property
+ def OutputDir(self):
+ return self.Platform.OutputDirectory
+
+ ## Return the directory to store all intermediate and final files built
+ @cached_property
+ def BuildDir(self):
+ if os.path.isabs(self.OutputDir):
+ GlobalData.gBuildDirectory = RetVal = path.join(
+ path.abspath(self.OutputDir),
+ self.BuildTarget + "_" + self.ToolChain,
+ )
+ else:
+ GlobalData.gBuildDirectory = RetVal = path.join(
+ self.WorkspaceDir,
+ self.OutputDir,
+ self.BuildTarget + "_" + self.ToolChain,
+ )
+ return RetVal
+
+ ## Return directory of platform makefile
+ #
+ # @retval string Makefile directory
+ #
+ @cached_property
+ def MakeFileDir(self):
+ return path.join(self.BuildDir, self.Arch)
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ @cached_property
+ def BuildCommand(self):
+ if "MAKE" in self.EdkIIBuildOption and "PATH" in self.EdkIIBuildOption["MAKE"]:
+ # MAKE_PATH in DSC [BuildOptions] section is higher priority
+ Path = self.EdkIIBuildOption["MAKE"]["PATH"]
+ if Path.startswith('='):
+ Path = Path[1:].strip()
+ RetVal = _SplitOption(Path)
+ elif "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:
+ RetVal = _SplitOption(self.ToolDefinition["MAKE"]["PATH"])
+ else:
+ return []
+ if "MAKE" in self.ToolDefinition and "FLAGS" in self.ToolDefinition["MAKE"]:
+ NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()
+ if NewOption != '':
+ RetVal += _SplitOption(NewOption)
+ if "MAKE" in self.EdkIIBuildOption and "FLAGS" in self.EdkIIBuildOption["MAKE"]:
+ Flags = self.EdkIIBuildOption["MAKE"]["FLAGS"]
+ if Flags.startswith('='):
+ RetVal = [RetVal[0]] + _SplitOption(Flags[1:].strip())
+ else:
+ RetVal = RetVal + _SplitOption(Flags.strip())
+ return RetVal
+
+ ## Compute a tool defintion key priority value in range 0..15
+ #
+ # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE 15
+ # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE 14
+ # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE 13
+ # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE 12
+ # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE 11
+ # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE 10
+ # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE 9
+ # ******_*********_****_COMMANDTYPE_ATTRIBUTE 8
+ # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE 7
+ # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE 6
+ # TARGET_*********_ARCH_***********_ATTRIBUTE 5
+ # ******_*********_ARCH_***********_ATTRIBUTE 4
+ # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE 3
+ # ******_TOOLCHAIN_****_***********_ATTRIBUTE 2
+ # TARGET_*********_****_***********_ATTRIBUTE 1
+ # ******_*********_****_***********_ATTRIBUTE 0
+ #
+ def ToolDefinitionPriority (self,Key):
+ KeyList = Key.split('_')
+ Priority = 0
+ for Index in range (0, min(4, len(KeyList))):
+ if KeyList[Index] != '*':
+ Priority += (1 << Index)
+ return Priority
+
+ ## Get tool chain definition
+ #
+ # Get each tool definition for given tool chain from tools_def.txt and platform
+ #
+ @cached_property
+ def ToolDefinition(self):
+ ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary
+ if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",
+ ExtraData="[%s]" % self.MetaFile)
+ RetVal = OrderedDict()
+ DllPathList = set()
+
+ PrioritizedDefList = sorted(ToolDefinition.keys(), key=self.ToolDefinitionPriority, reverse=True)
+ for Def in PrioritizedDefList:
+ Target, Tag, Arch, Tool, Attr = Def.split("_")
+ if Target == TAB_STAR:
+ Target = self.BuildTarget
+ if Tag == TAB_STAR:
+ Tag = self.ToolChain
+ if Arch == TAB_STAR:
+ Arch = self.Arch
+ if Target != self.BuildTarget or Tag != self.ToolChain or Arch != self.Arch:
+ continue
+
+ Value = ToolDefinition[Def]
+ # don't record the DLL
+ if Attr == "DLL":
+ DllPathList.add(Value)
+ continue
+
+ #
+ # ToolDefinition is sorted from highest priority to lowest priority.
+ # Only add the first(highest priority) match to RetVal
+ #
+ if Tool not in RetVal:
+ RetVal[Tool] = OrderedDict()
+ if Attr not in RetVal[Tool]:
+ RetVal[Tool][Attr] = Value
+
+ ToolsDef = ''
+ if GlobalData.gOptions.SilentMode and "MAKE" in RetVal:
+ if "FLAGS" not in RetVal["MAKE"]:
+ RetVal["MAKE"]["FLAGS"] = ""
+ RetVal["MAKE"]["FLAGS"] += " -s"
+ MakeFlags = ''
+
+ ToolList = list(RetVal.keys())
+ ToolList.sort()
+ for Tool in ToolList:
+ if Tool == TAB_STAR:
+ continue
+ AttrList = list(RetVal[Tool].keys())
+ if TAB_STAR in ToolList:
+ AttrList += list(RetVal[TAB_STAR])
+ AttrList.sort()
+ for Attr in AttrList:
+ if Attr in RetVal[Tool]:
+ Value = RetVal[Tool][Attr]
+ else:
+ Value = RetVal[TAB_STAR][Attr]
+ if Tool in self._BuildOptionWithToolDef(RetVal) and Attr in self._BuildOptionWithToolDef(RetVal)[Tool]:
+ # check if override is indicated
+ if self._BuildOptionWithToolDef(RetVal)[Tool][Attr].startswith('='):
+ Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr][1:].strip()
+ else:
+ # Do not append PATH or GUID
+ if Attr != 'PATH' and Attr != 'GUID':
+ Value += " " + self._BuildOptionWithToolDef(RetVal)[Tool][Attr]
+ else:
+ Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr]
+ if Attr == "PATH":
+ # Don't put MAKE definition in the file
+ if Tool != "MAKE":
+ ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
+ elif Attr != "DLL":
+ # Don't put MAKE definition in the file
+ if Tool == "MAKE":
+ if Attr == "FLAGS":
+ MakeFlags = Value
+ else:
+ ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
+ ToolsDef += "\n"
+
+ tool_def_file = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
+ SaveFileOnChange(tool_def_file, ToolsDef, False)
+ for DllPath in DllPathList:
+ os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]
+ os.environ["MAKE_FLAGS"] = MakeFlags
+
+ return RetVal
+
+ ## Return the paths of tools
+ @cached_property
+ def ToolDefinitionFile(self):
+ tool_def_file = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
+ if not os.path.exists(tool_def_file):
+ self.ToolDefinition
+ return tool_def_file
+
+ ## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.
+ @cached_property
+ def ToolChainFamily(self):
+ ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
+ if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
+ or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
+ or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]:
+ EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
+ % self.ToolChain)
+ RetVal = TAB_COMPILER_MSFT
+ else:
+ RetVal = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]
+ return RetVal
+
+ @cached_property
+ def BuildRuleFamily(self):
+ ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \
+ or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \
+ or not ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]:
+ EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
+ % self.ToolChain)
+ return TAB_COMPILER_MSFT
+
+ return ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]
+
+ ## Return the build options specific for all modules in this platform
+ @cached_property
+ def BuildOption(self):
+ return self._ExpandBuildOption(self.Platform.BuildOptions)
+
+ def _BuildOptionWithToolDef(self, ToolDef):
+ return self._ExpandBuildOption(self.Platform.BuildOptions, ToolDef=ToolDef)
+
+ ## Return the build options specific for EDK modules in this platform
+ @cached_property
+ def EdkBuildOption(self):
+ return self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
+
+ ## Return the build options specific for EDKII modules in this platform
+ @cached_property
+ def EdkIIBuildOption(self):
+ return self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
+
+ ## Parse build_rule.txt in Conf Directory.
+ #
+ # @retval BuildRule object
+ #
+ @cached_property
+ def BuildRule(self):
+ BuildRuleFile = None
+ if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:
+ BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
+ if not BuildRuleFile:
+ BuildRuleFile = gDefaultBuildRuleFile
+ RetVal = BuildRule(BuildRuleFile)
+ if RetVal._FileVersion == "":
+ RetVal._FileVersion = AutoGenReqBuildRuleVerNum
+ else:
+ if RetVal._FileVersion < AutoGenReqBuildRuleVerNum :
+ # If Build Rule's version is less than the version number required by the tools, halting the build.
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\
+ % (RetVal._FileVersion, AutoGenReqBuildRuleVerNum))
+ return RetVal
+
+ ## Summarize the packages used by modules in this platform
+ @cached_property
+ def PackageList(self):
+ RetVal = set()
+ for Mb in self._MbList:
+ RetVal.update(Mb.Packages)
+ for lb in Mb.LibInstances:
+ RetVal.update(lb.Packages)
+ #Collect package set information from INF of FDF
+ for ModuleFile in self._AsBuildModuleList:
+ if ModuleFile in self.Platform.Modules:
+ continue
+ ModuleData = self.BuildDatabase[ModuleFile, self.Arch, self.BuildTarget, self.ToolChain]
+ RetVal.update(ModuleData.Packages)
+ RetVal.update(self.Platform.Packages)
+ return list(RetVal)
+
+ @cached_property
+ def NonDynamicPcdDict(self):
+ return {(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):Pcd for Pcd in self.NonDynamicPcdList}
+
+ ## Get list of non-dynamic PCDs
+ @property
+ def NonDynamicPcdList(self):
+ if not self._NonDynamicPcdList:
+ self.CollectPlatformDynamicPcds()
+ return self._NonDynamicPcdList
+
+ ## Get list of dynamic PCDs
+ @property
+ def DynamicPcdList(self):
+ if not self._DynamicPcdList:
+ self.CollectPlatformDynamicPcds()
+ return self._DynamicPcdList
+
+ ## Generate Token Number for all PCD
+ @cached_property
+ def PcdTokenNumber(self):
+ RetVal = OrderedDict()
+ TokenNumber = 1
+ #
+ # Make the Dynamic and DynamicEx PCD use within different TokenNumber area.
+ # Such as:
+ #
+ # Dynamic PCD:
+ # TokenNumber 0 ~ 10
+ # DynamicEx PCD:
+ # TokeNumber 11 ~ 20
+ #
+ for Pcd in self.DynamicPcdList:
+ if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
+ RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+
+ for Pcd in self.DynamicPcdList:
+ if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
+ RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+
+ for Pcd in self.DynamicPcdList:
+ if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
+ RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+
+ for Pcd in self.DynamicPcdList:
+ if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
+ RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+
+ for Pcd in self.NonDynamicPcdList:
+ RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = 0
+ return RetVal
+
+ @cached_property
+ def _MbList(self):
+ ModuleList = []
+ for m in self.Platform.Modules:
+ component = self.Platform.Modules[m]
+ module = self.BuildDatabase[m, self.Arch, self.BuildTarget, self.ToolChain]
+ module.Guid = component.Guid
+ ModuleList.append(module)
+ return ModuleList
+
+ @cached_property
+ def _MaList(self):
+ for ModuleFile in self.Platform.Modules:
+ Ma = ModuleAutoGen(
+ self.Workspace,
+ ModuleFile,
+ self.BuildTarget,
+ self.ToolChain,
+ self.Arch,
+ self.MetaFile,
+ self.DataPipe
+ )
+ self.Platform.Modules[ModuleFile].M = Ma
+ return [x.M for x in self.Platform.Modules.values()]
+
+ ## Summarize ModuleAutoGen objects of all modules to be built for this platform
+ @cached_property
+ def ModuleAutoGenList(self):
+ RetVal = []
+ for Ma in self._MaList:
+ if Ma not in RetVal:
+ RetVal.append(Ma)
+ return RetVal
+
+ ## Summarize ModuleAutoGen objects of all libraries to be built for this platform
+ @cached_property
+ def LibraryAutoGenList(self):
+ RetVal = []
+ for Ma in self._MaList:
+ for La in Ma.LibraryAutoGenList:
+ if La not in RetVal:
+ RetVal.append(La)
+ if Ma not in La.ReferenceModules:
+ La.ReferenceModules.append(Ma)
+ return RetVal
+
+ ## Test if a module is supported by the platform
+ #
+ # An error will be raised directly if the module or its arch is not supported
+ # by the platform or current configuration
+ #
+ def ValidModule(self, Module):
+ return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \
+ or Module in self._AsBuildModuleList
+ @cached_property
+ def GetAllModuleInfo(self,WithoutPcd=True):
+ ModuleLibs = set()
+ for m in self.Platform.Modules:
+ module_obj = self.BuildDatabase[m,self.Arch,self.BuildTarget,self.ToolChain]
+ if not bool(module_obj.LibraryClass):
+ Libs = GetModuleLibInstances(module_obj, self.Platform, self.BuildDatabase, self.Arch,self.BuildTarget,self.ToolChain,self.MetaFile,EdkLogger)
+ else:
+ Libs = []
+ ModuleLibs.update( set([(l.MetaFile.File,l.MetaFile.Root,l.MetaFile.Path,l.MetaFile.BaseName,l.MetaFile.OriginalPath,l.Arch,True) for l in Libs]))
+ if WithoutPcd and module_obj.PcdIsDriver:
+ continue
+ ModuleLibs.add((m.File,m.Root,m.Path,m.BaseName,m.OriginalPath,module_obj.Arch,bool(module_obj.LibraryClass)))
+
+ return ModuleLibs
+
+ ## Resolve the library classes in a module to library instances
+ #
+ # This method will not only resolve library classes but also sort the library
+ # instances according to the dependency-ship.
+ #
+ # @param Module The module from which the library classes will be resolved
+ #
+ # @retval library_list List of library instances sorted
+ #
+ def ApplyLibraryInstance(self, Module):
+ # Cover the case that the binary INF file is list in the FDF file but not DSC file, return empty list directly
+ if str(Module) not in self.Platform.Modules:
+ return []
+
+ return GetModuleLibInstances(Module,
+ self.Platform,
+ self.BuildDatabase,
+ self.Arch,
+ self.BuildTarget,
+ self.ToolChain,
+ self.MetaFile,
+ EdkLogger)
+
+ ## Override PCD setting (type, value, ...)
+ #
+ # @param ToPcd The PCD to be overridden
+ # @param FromPcd The PCD overriding from
+ #
+ def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):
+ #
+ # in case there's PCDs coming from FDF file, which have no type given.
+ # at this point, ToPcd.Type has the type found from dependent
+ # package
+ #
+ TokenCName = ToPcd.TokenCName
+ for PcdItem in GlobalData.MixedPcd:
+ if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ TokenCName = PcdItem[0]
+ break
+ if FromPcd is not None:
+ if ToPcd.Pending and FromPcd.Type:
+ ToPcd.Type = FromPcd.Type
+ elif ToPcd.Type and FromPcd.Type\
+ and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:
+ if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:
+ ToPcd.Type = FromPcd.Type
+ elif ToPcd.Type and FromPcd.Type \
+ and ToPcd.Type != FromPcd.Type:
+ if Library:
+ Module = str(Module) + " 's library file (" + str(Library) + ")"
+ EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
+ ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\
+ % (ToPcd.TokenSpaceGuidCName, TokenCName,
+ ToPcd.Type, Module, FromPcd.Type, Msg),
+ File=self.MetaFile)
+
+ if FromPcd.MaxDatumSize:
+ ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
+ ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize
+ if FromPcd.DefaultValue:
+ ToPcd.DefaultValue = FromPcd.DefaultValue
+ if FromPcd.TokenValue:
+ ToPcd.TokenValue = FromPcd.TokenValue
+ if FromPcd.DatumType:
+ ToPcd.DatumType = FromPcd.DatumType
+ if FromPcd.SkuInfoList:
+ ToPcd.SkuInfoList = FromPcd.SkuInfoList
+ if FromPcd.UserDefinedDefaultStoresFlag:
+ ToPcd.UserDefinedDefaultStoresFlag = FromPcd.UserDefinedDefaultStoresFlag
+ # Add Flexible PCD format parse
+ if ToPcd.DefaultValue:
+ try:
+ ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self.Platform._GuidDict)(True)
+ except BadExpression as Value:
+ EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),
+ File=self.MetaFile)
+
+ # check the validation of datum
+ IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)
+ if not IsValid:
+ EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,
+ ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))
+ ToPcd.validateranges = FromPcd.validateranges
+ ToPcd.validlists = FromPcd.validlists
+ ToPcd.expressions = FromPcd.expressions
+ ToPcd.CustomAttribute = FromPcd.CustomAttribute
+
+ if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
+ % (ToPcd.TokenSpaceGuidCName, TokenCName))
+ Value = ToPcd.DefaultValue
+ if not Value:
+ ToPcd.MaxDatumSize = '1'
+ elif Value[0] == 'L':
+ ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)
+ elif Value[0] == '{':
+ ToPcd.MaxDatumSize = str(len(Value.split(',')))
+ else:
+ ToPcd.MaxDatumSize = str(len(Value) - 1)
+
+ # apply default SKU for dynamic PCDS if specified one is not available
+ if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \
+ and not ToPcd.SkuInfoList:
+ if self.Platform.SkuName in self.Platform.SkuIds:
+ SkuName = self.Platform.SkuName
+ else:
+ SkuName = TAB_DEFAULT
+ ToPcd.SkuInfoList = {
+ SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)
+ }
+
+ ## Apply PCD setting defined platform to a module
+ #
+ # @param Module The module from which the PCD setting will be overridden
+ #
+ # @retval PCD_list The list PCDs with settings from platform
+ #
+ def ApplyPcdSetting(self, Module, Pcds, Library=""):
+ # for each PCD in module
+ for Name, Guid in Pcds:
+ PcdInModule = Pcds[Name, Guid]
+ # find out the PCD setting in platform
+ if (Name, Guid) in self.Platform.Pcds:
+ PcdInPlatform = self.Platform.Pcds[Name, Guid]
+ else:
+ PcdInPlatform = None
+ # then override the settings if any
+ self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)
+ # resolve the VariableGuid value
+ for SkuId in PcdInModule.SkuInfoList:
+ Sku = PcdInModule.SkuInfoList[SkuId]
+ if Sku.VariableGuid == '': continue
+ Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
+ if Sku.VariableGuidValue is None:
+ PackageList = "\n\t".join(str(P) for P in self.PackageList)
+ EdkLogger.error(
+ 'build',
+ RESOURCE_NOT_AVAILABLE,
+ "Value of GUID [%s] is not found in" % Sku.VariableGuid,
+ ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \
+ % (Guid, Name, str(Module)),
+ File=self.MetaFile
+ )
+
+ # override PCD settings with module specific setting
+ if Module in self.Platform.Modules:
+ PlatformModule = self.Platform.Modules[str(Module)]
+ for Key in PlatformModule.Pcds:
+ if GlobalData.BuildOptionPcd:
+ for pcd in GlobalData.BuildOptionPcd:
+ (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd
+ if (TokenCName, TokenSpaceGuidCName) == Key and FieldName =="":
+ PlatformModule.Pcds[Key].DefaultValue = pcdvalue
+ PlatformModule.Pcds[Key].PcdValueFromComm = pcdvalue
+ break
+ Flag = False
+ if Key in Pcds:
+ ToPcd = Pcds[Key]
+ Flag = True
+ elif Key in GlobalData.MixedPcd:
+ for PcdItem in GlobalData.MixedPcd[Key]:
+ if PcdItem in Pcds:
+ ToPcd = Pcds[PcdItem]
+ Flag = True
+ break
+ if Flag:
+ self._OverridePcd(ToPcd, PlatformModule.Pcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)
+ # use PCD value to calculate the MaxDatumSize when it is not specified
+ for Name, Guid in Pcds:
+ Pcd = Pcds[Name, Guid]
+ if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:
+ Pcd.MaxSizeUserSet = None
+ Value = Pcd.DefaultValue
+ if not Value:
+ Pcd.MaxDatumSize = '1'
+ elif Value[0] == 'L':
+ Pcd.MaxDatumSize = str((len(Value) - 2) * 2)
+ elif Value[0] == '{':
+ Pcd.MaxDatumSize = str(len(Value.split(',')))
+ else:
+ Pcd.MaxDatumSize = str(len(Value) - 1)
+ return list(Pcds.values())
+
+ ## Append build options in platform to a module
+ #
+ # @param Module The module to which the build options will be appended
+ #
+ # @retval options The options appended with build options in platform
+ #
+ def ApplyBuildOption(self, Module):
+ # Get the different options for the different style module
+ PlatformOptions = self.EdkIIBuildOption
+ ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)
+ ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)
+ ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)
+ if Module in self.Platform.Modules:
+ PlatformModule = self.Platform.Modules[str(Module)]
+ PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)
+ else:
+ PlatformModuleOptions = {}
+
+ BuildRuleOrder = None
+ for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
+ for Tool in Options:
+ for Attr in Options[Tool]:
+ if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
+ BuildRuleOrder = Options[Tool][Attr]
+
+ AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
+ list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
+ list(self.ToolDefinition.keys()))
+ BuildOptions = defaultdict(lambda: defaultdict(str))
+ for Tool in AllTools:
+ for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
+ if Tool not in Options:
+ continue
+ for Attr in Options[Tool]:
+ #
+ # Do not generate it in Makefile
+ #
+ if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
+ continue
+ Value = Options[Tool][Attr]
+ ToolList = [Tool]
+ if Tool == TAB_STAR:
+ ToolList = list(AllTools)
+ ToolList.remove(TAB_STAR)
+ for ExpandedTool in ToolList:
+ # check if override is indicated
+ if Value.startswith('='):
+ BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value[1:])
+ else:
+ if Attr != 'PATH':
+ BuildOptions[ExpandedTool][Attr] += " " + mws.handleWsMacro(Value)
+ else:
+ BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value)
+
+ return BuildOptions, BuildRuleOrder
+
+
+ def GetGlobalBuildOptions(self,Module):
+ ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)
+ ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)
+
+ if Module in self.Platform.Modules:
+ PlatformModule = self.Platform.Modules[str(Module)]
+ PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)
+ else:
+ PlatformModuleOptions = {}
+
+ return ModuleTypeOptions,PlatformModuleOptions
+ def ModuleGuid(self,Module):
+ if os.path.basename(Module.MetaFile.File) != os.path.basename(Module.MetaFile.Path):
+ #
+ # Length of GUID is 36
+ #
+ return os.path.basename(Module.MetaFile.Path)[:36]
+ return Module.Guid
+ @cached_property
+ def UniqueBaseName(self):
+ retVal ={}
+ ModuleNameDict = {}
+ UniqueName = {}
+ for Module in self._MbList:
+ unique_base_name = '%s_%s' % (Module.BaseName,self.ModuleGuid(Module))
+ if unique_base_name not in ModuleNameDict:
+ ModuleNameDict[unique_base_name] = []
+ ModuleNameDict[unique_base_name].append(Module.MetaFile)
+ if Module.BaseName not in UniqueName:
+ UniqueName[Module.BaseName] = set()
+ UniqueName[Module.BaseName].add((self.ModuleGuid(Module),Module.MetaFile))
+ for module_paths in ModuleNameDict.values():
+ if len(set(module_paths))>1:
+ samemodules = list(set(module_paths))
+ EdkLogger.error("build", FILE_DUPLICATED, 'Modules have same BaseName and FILE_GUID:\n'
+ ' %s\n %s' % (samemodules[0], samemodules[1]))
+ for name in UniqueName:
+ Guid_Path = UniqueName[name]
+ if len(Guid_Path) > 1:
+ for guid,mpath in Guid_Path:
+ retVal[(name,mpath)] = '%s_%s' % (name,guid)
+ return retVal
+ ## Expand * in build option key
+ #
+ # @param Options Options to be expanded
+ # @param ToolDef Use specified ToolDef instead of full version.
+ # This is needed during initialization to prevent
+ # infinite recursion betweeh BuildOptions,
+ # ToolDefinition, and this function.
+ #
+ # @retval options Options expanded
+ #
+ def _ExpandBuildOption(self, Options, ModuleStyle=None, ToolDef=None):
+ if not ToolDef:
+ ToolDef = self.ToolDefinition
+ BuildOptions = {}
+ FamilyMatch = False
+ FamilyIsNull = True
+
+ OverrideList = {}
+ #
+ # Construct a list contain the build options which need override.
+ #
+ for Key in Options:
+ #
+ # Key[0] -- tool family
+ # Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ #
+ if (Key[0] == self.BuildRuleFamily and
+ (ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
+ Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
+ if (Target == self.BuildTarget or Target == TAB_STAR) and\
+ (ToolChain == self.ToolChain or ToolChain == TAB_STAR) and\
+ (Arch == self.Arch or Arch == TAB_STAR) and\
+ Options[Key].startswith("="):
+
+ if OverrideList.get(Key[1]) is not None:
+ OverrideList.pop(Key[1])
+ OverrideList[Key[1]] = Options[Key]
+
+ #
+ # Use the highest priority value.
+ #
+ if (len(OverrideList) >= 2):
+ KeyList = list(OverrideList.keys())
+ for Index in range(len(KeyList)):
+ NowKey = KeyList[Index]
+ Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
+ for Index1 in range(len(KeyList) - Index - 1):
+ NextKey = KeyList[Index1 + Index + 1]
+ #
+ # Compare two Key, if one is included by another, choose the higher priority one
+ #
+ Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
+ if (Target1 == Target2 or Target1 == TAB_STAR or Target2 == TAB_STAR) and\
+ (ToolChain1 == ToolChain2 or ToolChain1 == TAB_STAR or ToolChain2 == TAB_STAR) and\
+ (Arch1 == Arch2 or Arch1 == TAB_STAR or Arch2 == TAB_STAR) and\
+ (CommandType1 == CommandType2 or CommandType1 == TAB_STAR or CommandType2 == TAB_STAR) and\
+ (Attr1 == Attr2 or Attr1 == TAB_STAR or Attr2 == TAB_STAR):
+
+ if CalculatePriorityValue(NowKey) > CalculatePriorityValue(NextKey):
+ if Options.get((self.BuildRuleFamily, NextKey)) is not None:
+ Options.pop((self.BuildRuleFamily, NextKey))
+ else:
+ if Options.get((self.BuildRuleFamily, NowKey)) is not None:
+ Options.pop((self.BuildRuleFamily, NowKey))
+
+ for Key in Options:
+ if ModuleStyle is not None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
+ Family = Key[0]
+ Target, Tag, Arch, Tool, Attr = Key[1].split("_")
+ # if tool chain family doesn't match, skip it
+ if Family != "":
+ Found = False
+ if Tool in ToolDef:
+ FamilyIsNull = False
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[Tool]:
+ if Family == ToolDef[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
+ FamilyMatch = True
+ Found = True
+ if TAB_STAR in ToolDef:
+ FamilyIsNull = False
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[TAB_STAR]:
+ if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
+ FamilyMatch = True
+ Found = True
+ if not Found:
+ continue
+
+ # expand any wildcard
+ if Target == TAB_STAR or Target == self.BuildTarget:
+ if Tag == TAB_STAR or Tag == self.ToolChain:
+ if Arch == TAB_STAR or Arch == self.Arch:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+ if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
+ BuildOptions[Tool][Attr] = Options[Key]
+ else:
+ # append options for the same tool except PATH
+ if Attr != 'PATH':
+ BuildOptions[Tool][Attr] += " " + Options[Key]
+ else:
+ BuildOptions[Tool][Attr] = Options[Key]
+ # Build Option Family has been checked, which need't to be checked again for family.
+ if FamilyMatch or FamilyIsNull:
+ return BuildOptions
+
+ for Key in Options:
+ if ModuleStyle is not None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
+ Family = Key[0]
+ Target, Tag, Arch, Tool, Attr = Key[1].split("_")
+ # if tool chain family doesn't match, skip it
+ if Family == "":
+ continue
+ # option has been added before
+ Found = False
+ if Tool in ToolDef:
+ if TAB_TOD_DEFINES_FAMILY in ToolDef[Tool]:
+ if Family == ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:
+ Found = True
+ if TAB_STAR in ToolDef:
+ if TAB_TOD_DEFINES_FAMILY in ToolDef[TAB_STAR]:
+ if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_FAMILY]:
+ Found = True
+ if not Found:
+ continue
+
+ # expand any wildcard
+ if Target == TAB_STAR or Target == self.BuildTarget:
+ if Tag == TAB_STAR or Tag == self.ToolChain:
+ if Arch == TAB_STAR or Arch == self.Arch:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+ if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
+ BuildOptions[Tool][Attr] = Options[Key]
+ else:
+ # append options for the same tool except PATH
+ if Attr != 'PATH':
+ BuildOptions[Tool][Attr] += " " + Options[Key]
+ else:
+ BuildOptions[Tool][Attr] = Options[Key]
+ return BuildOptions
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/StrGather.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/StrGather.py
new file mode 100755
index 00000000..895e2a75
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/StrGather.py
@@ -0,0 +1,630 @@
+## @file
+# This file is used to parse a strings file and create or add to a string database
+# file.
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+##
+# Import Modules
+#
+from __future__ import absolute_import
+import re
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from .UniClassObject import *
+from io import BytesIO
+from struct import pack, unpack
+from Common.LongFilePathSupport import OpenLongFilePath as open
+
+##
+# Static definitions
+#
+EFI_HII_SIBT_END = '0x00'
+EFI_HII_SIBT_STRING_SCSU = '0x10'
+EFI_HII_SIBT_STRING_SCSU_FONT = '0x11'
+EFI_HII_SIBT_STRINGS_SCSU = '0x12'
+EFI_HII_SIBT_STRINGS_SCSU_FONT = '0x13'
+EFI_HII_SIBT_STRING_UCS2 = '0x14'
+EFI_HII_SIBT_STRING_UCS2_FONT = '0x15'
+EFI_HII_SIBT_STRINGS_UCS2 = '0x16'
+EFI_HII_SIBT_STRINGS_UCS2_FONT = '0x17'
+EFI_HII_SIBT_DUPLICATE = '0x20'
+EFI_HII_SIBT_SKIP2 = '0x21'
+EFI_HII_SIBT_SKIP1 = '0x22'
+EFI_HII_SIBT_EXT1 = '0x30'
+EFI_HII_SIBT_EXT2 = '0x31'
+EFI_HII_SIBT_EXT4 = '0x32'
+EFI_HII_SIBT_FONT = '0x40'
+
+EFI_HII_PACKAGE_STRINGS = '0x04'
+EFI_HII_PACKAGE_FORM = '0x02'
+
+StringPackageType = EFI_HII_PACKAGE_STRINGS
+StringPackageForm = EFI_HII_PACKAGE_FORM
+StringBlockType = EFI_HII_SIBT_STRING_UCS2
+StringSkipType = EFI_HII_SIBT_SKIP2
+
+HexHeader = '0x'
+
+COMMENT = '// '
+DEFINE_STR = '#define'
+COMMENT_DEFINE_STR = COMMENT + DEFINE_STR
+NOT_REFERENCED = 'not referenced'
+COMMENT_NOT_REFERENCED = ' ' + COMMENT + NOT_REFERENCED
+CHAR_ARRAY_DEFIN = 'unsigned char'
+COMMON_FILE_NAME = 'Strings'
+STRING_TOKEN = re.compile('STRING_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
+
+EFI_HII_ARRAY_SIZE_LENGTH = 4
+EFI_HII_PACKAGE_HEADER_LENGTH = 4
+EFI_HII_HDR_SIZE_LENGTH = 4
+EFI_HII_STRING_OFFSET_LENGTH = 4
+EFI_STRING_ID = 1
+EFI_STRING_ID_LENGTH = 2
+EFI_HII_LANGUAGE_WINDOW = 0
+EFI_HII_LANGUAGE_WINDOW_LENGTH = 2
+EFI_HII_LANGUAGE_WINDOW_NUMBER = 16
+EFI_HII_STRING_PACKAGE_HDR_LENGTH = EFI_HII_PACKAGE_HEADER_LENGTH + EFI_HII_HDR_SIZE_LENGTH + EFI_HII_STRING_OFFSET_LENGTH + EFI_HII_LANGUAGE_WINDOW_LENGTH * EFI_HII_LANGUAGE_WINDOW_NUMBER + EFI_STRING_ID_LENGTH
+
+H_C_FILE_HEADER = ['//', \
+ '// DO NOT EDIT -- auto-generated file', \
+ '//', \
+ '// This file is generated by the StrGather utility', \
+ '//']
+LANGUAGE_NAME_STRING_NAME = '$LANGUAGE_NAME'
+PRINTABLE_LANGUAGE_NAME_STRING_NAME = '$PRINTABLE_LANGUAGE_NAME'
+
+## Convert a dec number to a hex string
+#
+# Convert a dec number to a formatted hex string in length digit
+# The digit is set to default 8
+# The hex string starts with "0x"
+# DecToHexStr(1000) is '0x000003E8'
+# DecToHexStr(1000, 6) is '0x0003E8'
+#
+# @param Dec: The number in dec format
+# @param Digit: The needed digit of hex string
+#
+# @retval: The formatted hex string
+#
+def DecToHexStr(Dec, Digit = 8):
+ return '0x{0:0{1}X}'.format(Dec, Digit)
+
+## Convert a dec number to a hex list
+#
+# Convert a dec number to a formatted hex list in size digit
+# The digit is set to default 8
+# DecToHexList(1000) is ['0xE8', '0x03', '0x00', '0x00']
+# DecToHexList(1000, 6) is ['0xE8', '0x03', '0x00']
+#
+# @param Dec: The number in dec format
+# @param Digit: The needed digit of hex list
+#
+# @retval: A list for formatted hex string
+#
+def DecToHexList(Dec, Digit = 8):
+ Hex = '{0:0{1}X}'.format(Dec, Digit)
+ return ["0x" + Hex[Bit:Bit + 2] for Bit in range(Digit - 2, -1, -2)]
+
+## Convert a acsii string to a hex list
+#
+# Convert a acsii string to a formatted hex list
+# AscToHexList('en-US') is ['0x65', '0x6E', '0x2D', '0x55', '0x53']
+#
+# @param Ascii: The acsii string
+#
+# @retval: A list for formatted hex string
+#
+def AscToHexList(Ascii):
+ try:
+ return ['0x{0:02X}'.format(Item) for Item in Ascii]
+ except:
+ return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
+
+## Create content of .h file
+#
+# Create content of .h file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible mode
+# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
+#
+# @retval Str: A string of .h file content
+#
+def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
+ Str = []
+ ValueStartPtr = 60
+ Line = COMMENT_DEFINE_STR + ' ' + LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(0, 4) + COMMENT_NOT_REFERENCED
+ Str = WriteLine(Str, Line)
+ Line = COMMENT_DEFINE_STR + ' ' + PRINTABLE_LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + PRINTABLE_LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(1, 4) + COMMENT_NOT_REFERENCED
+ Str = WriteLine(Str, Line)
+ UnusedStr = ''
+
+ #Group the referred/Unused STRING token together.
+ for Index in range(2, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]])):
+ StringItem = UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]][Index]
+ Name = StringItem.StringName
+ Token = StringItem.Token
+ Referenced = StringItem.Referenced
+ if Name is not None:
+ Line = ''
+ if Referenced == True:
+ if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
+ Line = DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4)
+ else:
+ Line = DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4)
+ Str = WriteLine(Str, Line)
+ else:
+ if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
+ Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
+ else:
+ Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
+ UnusedStr = WriteLine(UnusedStr, Line)
+
+ Str.extend( UnusedStr)
+
+ Str = WriteLine(Str, '')
+ if IsCompatibleMode or UniGenCFlag:
+ Str = WriteLine(Str, 'extern unsigned char ' + BaseName + 'Strings[];')
+ return "".join(Str)
+
+## Create a complete .h file
+#
+# Create a complet .h file with file header and file content
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible mode
+# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
+#
+# @retval Str: A string of complete .h file
+#
+def CreateHFile(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
+ HFile = WriteLine('', CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag))
+
+ return "".join(HFile)
+
+## Create a buffer to store all items in an array
+#
+# @param BinBuffer Buffer to contain Binary data.
+# @param Array: The array need to be formatted
+#
+def CreateBinBuffer(BinBuffer, Array):
+ for Item in Array:
+ BinBuffer.write(pack("B", int(Item, 16)))
+
+## Create a formatted string all items in an array
+#
+# Use ',' to join each item in an array, and break an new line when reaching the width (default is 16)
+#
+# @param Array: The array need to be formatted
+# @param Width: The line length, the default value is set to 16
+#
+# @retval ArrayItem: A string for all formatted array items
+#
+def CreateArrayItem(Array, Width = 16):
+ MaxLength = Width
+ Index = 0
+ Line = ' '
+ ArrayItem = []
+
+ for Item in Array:
+ if Index < MaxLength:
+ Line = Line + Item + ', '
+ Index = Index + 1
+ else:
+ ArrayItem = WriteLine(ArrayItem, Line)
+ Line = ' ' + Item + ', '
+ Index = 1
+ ArrayItem = Write(ArrayItem, Line.rstrip())
+
+ return "".join(ArrayItem)
+
+## CreateCFileStringValue
+#
+# Create a line with string value
+#
+# @param Value: Value of the string
+#
+# @retval Str: A formatted string with string value
+#
+
+def CreateCFileStringValue(Value):
+ Value = [StringBlockType] + Value
+ Str = WriteLine('', CreateArrayItem(Value))
+
+ return "".join(Str)
+
+## GetFilteredLanguage
+#
+# apply get best language rules to the UNI language code list
+#
+# @param UniLanguageList: language code definition list in *.UNI file
+# @param LanguageFilterList: language code filter list of RFC4646 format in DSC file
+#
+# @retval UniLanguageListFiltered: the filtered language code
+#
+def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
+ UniLanguageListFiltered = []
+ # if filter list is empty, then consider there is no filter
+ if LanguageFilterList == []:
+ UniLanguageListFiltered = UniLanguageList
+ return UniLanguageListFiltered
+ for Language in LanguageFilterList:
+ # first check for exact match
+ if Language in UniLanguageList:
+ if Language not in UniLanguageListFiltered:
+ UniLanguageListFiltered.append(Language)
+ # find the first one with the same/equivalent primary tag
+ else:
+ if Language.find('-') != -1:
+ PrimaryTag = Language[0:Language.find('-')].lower()
+ else:
+ PrimaryTag = Language
+
+ if len(PrimaryTag) == 3:
+ PrimaryTag = LangConvTable.get(PrimaryTag)
+
+ for UniLanguage in UniLanguageList:
+ if UniLanguage.find('-') != -1:
+ UniLanguagePrimaryTag = UniLanguage[0:UniLanguage.find('-')].lower()
+ else:
+ UniLanguagePrimaryTag = UniLanguage
+
+ if len(UniLanguagePrimaryTag) == 3:
+ UniLanguagePrimaryTag = LangConvTable.get(UniLanguagePrimaryTag)
+
+ if PrimaryTag == UniLanguagePrimaryTag:
+ if UniLanguage not in UniLanguageListFiltered:
+ UniLanguageListFiltered.append(UniLanguage)
+ break
+ else:
+ # Here is rule 3 for "get best language"
+ # If tag is not listed in the Unicode file, the default ("en") tag should be used for that language
+ # for better processing, find the one that best suit for it.
+ DefaultTag = 'en'
+ if DefaultTag not in UniLanguageListFiltered:
+ # check whether language code with primary code equivalent with DefaultTag already in the list, if so, use that
+ for UniLanguage in UniLanguageList:
+ if UniLanguage.startswith('en-') or UniLanguage.startswith('eng-'):
+ if UniLanguage not in UniLanguageListFiltered:
+ UniLanguageListFiltered.append(UniLanguage)
+ break
+ else:
+ UniLanguageListFiltered.append(DefaultTag)
+ return UniLanguageListFiltered
+
+
+## Create content of .c file
+#
+# Create content of .c file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible mode
+# @param UniBinBuffer UniBinBuffer to contain UniBinary data.
+# @param FilterInfo Platform language filter information
+#
+# @retval Str: A string of .c file content
+#
+def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer, FilterInfo):
+ #
+ # Init array length
+ #
+ TotalLength = EFI_HII_ARRAY_SIZE_LENGTH
+ Str = ''
+ Offset = 0
+
+ EDK2Module = FilterInfo[0]
+ if EDK2Module:
+ LanguageFilterList = FilterInfo[1]
+ else:
+ # EDK module is using ISO639-2 format filter, convert to the RFC4646 format
+ LanguageFilterList = [LangConvTable.get(F.lower()) for F in FilterInfo[1]]
+
+ UniLanguageList = []
+ for IndexI in range(len(UniObjectClass.LanguageDef)):
+ UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
+
+ UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList)
+
+
+ #
+ # Create lines for each language's strings
+ #
+ for IndexI in range(len(UniObjectClass.LanguageDef)):
+ Language = UniObjectClass.LanguageDef[IndexI][0]
+ if Language not in UniLanguageListFiltered:
+ continue
+
+ StringBuffer = BytesIO()
+ StrStringValue = ''
+ ArrayLength = 0
+ NumberOfUseOtherLangDef = 0
+ Index = 0
+ for IndexJ in range(1, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[IndexI][0]])):
+ Item = UniObjectClass.OrderedStringListByToken[Language][IndexJ]
+
+ Name = Item.StringName
+ Value = Item.StringValueByteList
+ Referenced = Item.Referenced
+ Token = Item.Token
+ UseOtherLangDef = Item.UseOtherLangDef
+
+ if UseOtherLangDef != '' and Referenced:
+ NumberOfUseOtherLangDef = NumberOfUseOtherLangDef + 1
+ Index = Index + 1
+ else:
+ if NumberOfUseOtherLangDef > 0:
+ StrStringValue = WriteLine(StrStringValue, CreateArrayItem([StringSkipType] + DecToHexList(NumberOfUseOtherLangDef, 4)))
+ CreateBinBuffer (StringBuffer, ([StringSkipType] + DecToHexList(NumberOfUseOtherLangDef, 4)))
+ NumberOfUseOtherLangDef = 0
+ ArrayLength = ArrayLength + 3
+ if Referenced and Item.Token > 0:
+ Index = Index + 1
+ StrStringValue = WriteLine(StrStringValue, "// %s: %s:%s" % (DecToHexStr(Index, 4), Name, DecToHexStr(Token, 4)))
+ StrStringValue = Write(StrStringValue, CreateCFileStringValue(Value))
+ CreateBinBuffer (StringBuffer, [StringBlockType] + Value)
+ ArrayLength = ArrayLength + Item.Length + 1 # 1 is for the length of string type
+
+ #
+ # EFI_HII_PACKAGE_HEADER
+ #
+ Offset = EFI_HII_STRING_PACKAGE_HDR_LENGTH + len(Language) + 1
+ ArrayLength = Offset + ArrayLength + 1
+
+ #
+ # Create PACKAGE HEADER
+ #
+ Str = WriteLine(Str, '// PACKAGE HEADER\n')
+ TotalLength = TotalLength + ArrayLength
+
+ List = DecToHexList(ArrayLength, 6) + \
+ [StringPackageType] + \
+ DecToHexList(Offset) + \
+ DecToHexList(Offset) + \
+ DecToHexList(EFI_HII_LANGUAGE_WINDOW, EFI_HII_LANGUAGE_WINDOW_LENGTH * 2) * EFI_HII_LANGUAGE_WINDOW_NUMBER + \
+ DecToHexList(EFI_STRING_ID, 4) + \
+ AscToHexList(Language) + \
+ DecToHexList(0, 2)
+ Str = WriteLine(Str, CreateArrayItem(List, 16) + '\n')
+
+ #
+ # Create PACKAGE DATA
+ #
+ Str = WriteLine(Str, '// PACKAGE DATA\n')
+ Str = Write(Str, StrStringValue)
+
+ #
+ # Add an EFI_HII_SIBT_END at last
+ #
+ Str = WriteLine(Str, ' ' + EFI_HII_SIBT_END + ",")
+
+ #
+ # Create binary UNI string
+ #
+ if UniBinBuffer:
+ CreateBinBuffer (UniBinBuffer, List)
+ UniBinBuffer.write (StringBuffer.getvalue())
+ UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END, 16)))
+ StringBuffer.close()
+
+ #
+ # Create line for string variable name
+ # "unsigned char $(BaseName)Strings[] = {"
+ #
+ AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n')
+
+ if IsCompatibleMode:
+ #
+ # Create FRAMEWORK_EFI_HII_PACK_HEADER in compatible mode
+ #
+ AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Length')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength + 2)) + '\n')
+ AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Type')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(2, 4)) + '\n')
+ else:
+ #
+ # Create whole array length in UEFI mode
+ #
+ AllStr = WriteLine(AllStr, '// STRGATHER_OUTPUT_HEADER')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength)) + '\n')
+
+ #
+ # Join package data
+ #
+ AllStr = Write(AllStr, Str)
+
+ return "".join(AllStr)
+
+## Create end of .c file
+#
+# Create end of .c file
+#
+# @retval Str: A string of .h file end
+#
+def CreateCFileEnd():
+ Str = Write('', '};')
+ return Str
+
+## Create a .c file
+#
+# Create a complete .c file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible Mode
+# @param FilterInfo Platform language filter information
+#
+# @retval CFile: A string of complete .c file
+#
+def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode, FilterInfo):
+ CFile = ''
+ CFile = WriteLine(CFile, CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, None, FilterInfo))
+ CFile = WriteLine(CFile, CreateCFileEnd())
+ return "".join(CFile)
+
+## GetFileList
+#
+# Get a list for all files
+#
+# @param IncludeList: A list of all path to be searched
+# @param SkipList: A list of all types of file could be skipped
+#
+# @retval FileList: A list of all files found
+#
+def GetFileList(SourceFileList, IncludeList, SkipList):
+ if IncludeList is None:
+ EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined")
+
+ FileList = []
+ if SkipList is None:
+ SkipList = []
+
+ for File in SourceFileList:
+ for Dir in IncludeList:
+ if not os.path.exists(Dir):
+ continue
+ File = os.path.join(Dir, File.Path)
+ #
+ # Ignore Dir
+ #
+ if os.path.isfile(File) != True:
+ continue
+ #
+ # Ignore file listed in skip list
+ #
+ IsSkip = False
+ for Skip in SkipList:
+ if os.path.splitext(File)[1].upper() == Skip.upper():
+ EdkLogger.verbose("Skipped %s for string token uses search" % File)
+ IsSkip = True
+ break
+
+ if not IsSkip:
+ FileList.append(File)
+
+ break
+
+ return FileList
+
+## SearchString
+#
+# Search whether all string defined in UniObjectClass are referenced
+# All string used should be set to Referenced
+#
+# @param UniObjectClass: Input UniObjectClass
+# @param FileList: Search path list
+# @param IsCompatibleMode Compatible Mode
+#
+# @retval UniObjectClass: UniObjectClass after searched
+#
+def SearchString(UniObjectClass, FileList, IsCompatibleMode):
+ if FileList == []:
+ return UniObjectClass
+
+ for File in FileList:
+ try:
+ if os.path.isfile(File):
+ Lines = open(File, 'r')
+ for Line in Lines:
+ for StrName in STRING_TOKEN.findall(Line):
+ EdkLogger.debug(EdkLogger.DEBUG_5, "Found string identifier: " + StrName)
+ UniObjectClass.SetStringReferenced(StrName)
+ except:
+ EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "SearchString: Error while processing file", File=File, RaiseError=False)
+ raise
+
+ UniObjectClass.ReToken()
+
+ return UniObjectClass
+
+## GetStringFiles
+#
+# This function is used for UEFI2.1 spec
+#
+#
+def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None, FilterInfo = [True, []]):
+ if len(UniFilList) > 0:
+ if ShellMode:
+ #
+ # support ISO 639-2 codes in .UNI files of EDK Shell
+ #
+ Uni = UniFileClassObject(sorted(UniFilList, key=lambda x: x.File), True, IncludePathList)
+ else:
+ Uni = UniFileClassObject(sorted(UniFilList, key=lambda x: x.File), IsCompatibleMode, IncludePathList)
+ else:
+ EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, 'No unicode files given')
+
+ FileList = GetFileList(SourceFileList, IncludeList, SkipList)
+
+ Uni = SearchString(Uni, sorted (FileList), IsCompatibleMode)
+
+ HFile = CreateHFile(BaseName, Uni, IsCompatibleMode, UniGenCFlag)
+ CFile = None
+ if IsCompatibleMode or UniGenCFlag:
+ CFile = CreateCFile(BaseName, Uni, IsCompatibleMode, FilterInfo)
+ if UniGenBinBuffer:
+ CreateCFileContent(BaseName, Uni, IsCompatibleMode, UniGenBinBuffer, FilterInfo)
+
+ return HFile, CFile
+
+#
+# Write an item
+#
+def Write(Target, Item):
+ if isinstance(Target,str):
+ Target = [Target]
+ if not Target:
+ Target = []
+ if isinstance(Item,list):
+ Target.extend(Item)
+ else:
+ Target.append(Item)
+ return Target
+
+#
+# Write an item with a break line
+#
+def WriteLine(Target, Item):
+ if isinstance(Target,str):
+ Target = [Target]
+ if not Target:
+ Target = []
+ if isinstance(Item, list):
+ Target.extend(Item)
+ else:
+ Target.append(Item)
+ Target.append('\n')
+ return Target
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+ EdkLogger.info('start')
+
+ UniFileList = [
+ r'C:\\Edk\\Strings2.uni',
+ r'C:\\Edk\\Strings.uni'
+ ]
+
+ SrcFileList = []
+ for Root, Dirs, Files in os.walk('C:\\Edk'):
+ for File in Files:
+ SrcFileList.append(File)
+
+ IncludeList = [
+ r'C:\\Edk'
+ ]
+
+ SkipList = ['.inf', '.uni']
+ BaseName = 'DriverSample'
+ (h, c) = GetStringFiles(UniFileList, SrcFileList, IncludeList, SkipList, BaseName, True)
+ hfile = open('unistring.h', 'w')
+ cfile = open('unistring.c', 'w')
+ hfile.write(h)
+ cfile.write(c)
+
+ EdkLogger.info('end')
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/UniClassObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/UniClassObject.py
new file mode 100755
index 00000000..a13a27b5
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/UniClassObject.py
@@ -0,0 +1,683 @@
+## @file
+# This file is used to collect all defined strings in multiple uni files
+#
+#
+# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.<BR>
+#
+# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+##
+# Import Modules
+#
+from __future__ import print_function
+import Common.LongFilePathOs as os, codecs, re
+import distutils.util
+import Common.EdkLogger as EdkLogger
+from io import BytesIO
+from Common.BuildToolError import *
+from Common.StringUtils import GetLineNo
+from Common.Misc import PathClass
+from Common.LongFilePathSupport import LongFilePath
+from Common.GlobalData import *
+##
+# Static definitions
+#
+UNICODE_WIDE_CHAR = u'\\wide'
+UNICODE_NARROW_CHAR = u'\\narrow'
+UNICODE_NON_BREAKING_CHAR = u'\\nbr'
+UNICODE_UNICODE_CR = '\r'
+UNICODE_UNICODE_LF = '\n'
+
+NARROW_CHAR = u'\uFFF0'
+WIDE_CHAR = u'\uFFF1'
+NON_BREAKING_CHAR = u'\uFFF2'
+CR = u'\u000D'
+LF = u'\u000A'
+NULL = u'\u0000'
+TAB = u'\t'
+BACK_SLASH_PLACEHOLDER = u'\u0006'
+
+gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
+
+## Convert a unicode string to a Hex list
+#
+# Convert a unicode string to a Hex list
+# UniToHexList('ABC') is ['0x41', '0x00', '0x42', '0x00', '0x43', '0x00']
+#
+# @param Uni: The python unicode string
+#
+# @retval List: The formatted hex list
+#
+def UniToHexList(Uni):
+ List = []
+ for Item in Uni:
+ Temp = '%04X' % ord(Item)
+ List.append('0x' + Temp[2:4])
+ List.append('0x' + Temp[0:2])
+ return List
+
+LangConvTable = {'eng':'en', 'fra':'fr', \
+ 'aar':'aa', 'abk':'ab', 'ave':'ae', 'afr':'af', 'aka':'ak', 'amh':'am', \
+ 'arg':'an', 'ara':'ar', 'asm':'as', 'ava':'av', 'aym':'ay', 'aze':'az', \
+ 'bak':'ba', 'bel':'be', 'bul':'bg', 'bih':'bh', 'bis':'bi', 'bam':'bm', \
+ 'ben':'bn', 'bod':'bo', 'bre':'br', 'bos':'bs', 'cat':'ca', 'che':'ce', \
+ 'cha':'ch', 'cos':'co', 'cre':'cr', 'ces':'cs', 'chu':'cu', 'chv':'cv', \
+ 'cym':'cy', 'dan':'da', 'deu':'de', 'div':'dv', 'dzo':'dz', 'ewe':'ee', \
+ 'ell':'el', 'epo':'eo', 'spa':'es', 'est':'et', 'eus':'eu', 'fas':'fa', \
+ 'ful':'ff', 'fin':'fi', 'fij':'fj', 'fao':'fo', 'fry':'fy', 'gle':'ga', \
+ 'gla':'gd', 'glg':'gl', 'grn':'gn', 'guj':'gu', 'glv':'gv', 'hau':'ha', \
+ 'heb':'he', 'hin':'hi', 'hmo':'ho', 'hrv':'hr', 'hat':'ht', 'hun':'hu', \
+ 'hye':'hy', 'her':'hz', 'ina':'ia', 'ind':'id', 'ile':'ie', 'ibo':'ig', \
+ 'iii':'ii', 'ipk':'ik', 'ido':'io', 'isl':'is', 'ita':'it', 'iku':'iu', \
+ 'jpn':'ja', 'jav':'jv', 'kat':'ka', 'kon':'kg', 'kik':'ki', 'kua':'kj', \
+ 'kaz':'kk', 'kal':'kl', 'khm':'km', 'kan':'kn', 'kor':'ko', 'kau':'kr', \
+ 'kas':'ks', 'kur':'ku', 'kom':'kv', 'cor':'kw', 'kir':'ky', 'lat':'la', \
+ 'ltz':'lb', 'lug':'lg', 'lim':'li', 'lin':'ln', 'lao':'lo', 'lit':'lt', \
+ 'lub':'lu', 'lav':'lv', 'mlg':'mg', 'mah':'mh', 'mri':'mi', 'mkd':'mk', \
+ 'mal':'ml', 'mon':'mn', 'mar':'mr', 'msa':'ms', 'mlt':'mt', 'mya':'my', \
+ 'nau':'na', 'nob':'nb', 'nde':'nd', 'nep':'ne', 'ndo':'ng', 'nld':'nl', \
+ 'nno':'nn', 'nor':'no', 'nbl':'nr', 'nav':'nv', 'nya':'ny', 'oci':'oc', \
+ 'oji':'oj', 'orm':'om', 'ori':'or', 'oss':'os', 'pan':'pa', 'pli':'pi', \
+ 'pol':'pl', 'pus':'ps', 'por':'pt', 'que':'qu', 'roh':'rm', 'run':'rn', \
+ 'ron':'ro', 'rus':'ru', 'kin':'rw', 'san':'sa', 'srd':'sc', 'snd':'sd', \
+ 'sme':'se', 'sag':'sg', 'sin':'si', 'slk':'sk', 'slv':'sl', 'smo':'sm', \
+ 'sna':'sn', 'som':'so', 'sqi':'sq', 'srp':'sr', 'ssw':'ss', 'sot':'st', \
+ 'sun':'su', 'swe':'sv', 'swa':'sw', 'tam':'ta', 'tel':'te', 'tgk':'tg', \
+ 'tha':'th', 'tir':'ti', 'tuk':'tk', 'tgl':'tl', 'tsn':'tn', 'ton':'to', \
+ 'tur':'tr', 'tso':'ts', 'tat':'tt', 'twi':'tw', 'tah':'ty', 'uig':'ug', \
+ 'ukr':'uk', 'urd':'ur', 'uzb':'uz', 'ven':'ve', 'vie':'vi', 'vol':'vo', \
+ 'wln':'wa', 'wol':'wo', 'xho':'xh', 'yid':'yi', 'yor':'yo', 'zha':'za', \
+ 'zho':'zh', 'zul':'zu'}
+
+## GetLanguageCode
+#
+# Check the language code read from .UNI file and convert ISO 639-2 codes to RFC 4646 codes if appropriate
+# ISO 639-2 language codes supported in compatibility mode
+# RFC 4646 language codes supported in native mode
+#
+# @param LangName: Language codes read from .UNI file
+#
+# @retval LangName: Valid language code in RFC 4646 format or None
+#
+def GetLanguageCode(LangName, IsCompatibleMode, File):
+ length = len(LangName)
+ if IsCompatibleMode:
+ if length == 3 and LangName.isalpha():
+ TempLangName = LangConvTable.get(LangName.lower())
+ if TempLangName is not None:
+ return TempLangName
+ return LangName
+ else:
+ EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid ISO 639-2 language code : %s" % LangName, File)
+
+ if (LangName[0] == 'X' or LangName[0] == 'x') and LangName[1] == '-':
+ return LangName
+ if length == 2:
+ if LangName.isalpha():
+ return LangName
+ elif length == 3:
+ if LangName.isalpha() and LangConvTable.get(LangName.lower()) is None:
+ return LangName
+ elif length == 5:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ return LangName
+ elif length >= 6:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ return LangName
+ if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) is None and LangName[3] == '-':
+ return LangName
+
+ EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)
+
+## Ucs2Codec
+#
+# This is only a partial codec implementation. It only supports
+# encoding, and is primarily used to check that all the characters are
+# valid for UCS-2.
+#
+class Ucs2Codec(codecs.Codec):
+ def __init__(self):
+ self.__utf16 = codecs.lookup('utf-16')
+
+ def encode(self, input, errors='strict'):
+ for Char in input:
+ CodePoint = ord(Char)
+ if CodePoint >= 0xd800 and CodePoint <= 0xdfff:
+ raise ValueError("Code Point is in range reserved for " +
+ "UTF-16 surrogate pairs")
+ elif CodePoint > 0xffff:
+ raise ValueError("Code Point too large to encode in UCS-2")
+ return self.__utf16.encode(input)
+
+TheUcs2Codec = Ucs2Codec()
+def Ucs2Search(name):
+ if name in ['ucs-2', 'ucs_2']:
+ return codecs.CodecInfo(
+ name=name,
+ encode=TheUcs2Codec.encode,
+ decode=TheUcs2Codec.decode)
+ else:
+ return None
+codecs.register(Ucs2Search)
+
+## StringDefClassObject
+#
+# A structure for language definition
+#
+class StringDefClassObject(object):
+ def __init__(self, Name = None, Value = None, Referenced = False, Token = None, UseOtherLangDef = ''):
+ self.StringName = ''
+ self.StringNameByteList = []
+ self.StringValue = ''
+ self.StringValueByteList = ''
+ self.Token = 0
+ self.Referenced = Referenced
+ self.UseOtherLangDef = UseOtherLangDef
+ self.Length = 0
+
+ if Name is not None:
+ self.StringName = Name
+ self.StringNameByteList = UniToHexList(Name)
+ if Value is not None:
+ self.StringValue = Value + u'\x00' # Add a NULL at string tail
+ self.StringValueByteList = UniToHexList(self.StringValue)
+ self.Length = len(self.StringValueByteList)
+ if Token is not None:
+ self.Token = Token
+
+ def __str__(self):
+ return repr(self.StringName) + ' ' + \
+ repr(self.Token) + ' ' + \
+ repr(self.Referenced) + ' ' + \
+ repr(self.StringValue) + ' ' + \
+ repr(self.UseOtherLangDef)
+
+ def UpdateValue(self, Value = None):
+ if Value is not None:
+ self.StringValue = Value + u'\x00' # Add a NULL at string tail
+ self.StringValueByteList = UniToHexList(self.StringValue)
+ self.Length = len(self.StringValueByteList)
+
+def StripComments(Line):
+ Comment = u'//'
+ CommentPos = Line.find(Comment)
+ while CommentPos >= 0:
+ # if there are non matched quotes before the comment header
+ # then we are in the middle of a string
+ # but we need to ignore the escaped quotes and backslashes.
+ if ((Line.count(u'"', 0, CommentPos) - Line.count(u'\\"', 0, CommentPos)) & 1) == 1:
+ CommentPos = Line.find (Comment, CommentPos + 1)
+ else:
+ return Line[:CommentPos].strip()
+ return Line.strip()
+
+## UniFileClassObject
+#
+# A structure for .uni file definition
+#
+class UniFileClassObject(object):
+ def __init__(self, FileList = [], IsCompatibleMode = False, IncludePathList = []):
+ self.FileList = FileList
+ self.Token = 2
+ self.LanguageDef = [] #[ [u'LanguageIdentifier', u'PrintableName'], ... ]
+ self.OrderedStringList = {} #{ u'LanguageIdentifier' : [StringDefClassObject] }
+ self.OrderedStringDict = {} #{ u'LanguageIdentifier' : {StringName:(IndexInList)} }
+ self.OrderedStringListByToken = {} #{ u'LanguageIdentifier' : {Token: StringDefClassObject} }
+ self.IsCompatibleMode = IsCompatibleMode
+ self.IncludePathList = IncludePathList
+ if len(self.FileList) > 0:
+ self.LoadUniFiles(FileList)
+
+ #
+ # Get Language definition
+ #
+ def GetLangDef(self, File, Line):
+ Lang = distutils.util.split_quoted((Line.split(u"//")[0]))
+ if len(Lang) != 3:
+ try:
+ FileIn = UniFileClassObject.OpenUniFile(LongFilePath(File.Path))
+ except UnicodeError as X:
+ EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File);
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File);
+ LineNo = GetLineNo(FileIn, Line, False)
+ EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition",
+ ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File=File, Line=LineNo)
+ else:
+ LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
+ LangPrintName = Lang[2]
+
+ IsLangInDef = False
+ for Item in self.LanguageDef:
+ if Item[0] == LangName:
+ IsLangInDef = True
+ break;
+
+ if not IsLangInDef:
+ self.LanguageDef.append([LangName, LangPrintName])
+
+ #
+ # Add language string
+ #
+ self.AddStringToList(u'$LANGUAGE_NAME', LangName, LangName, 0, True, Index=0)
+ self.AddStringToList(u'$PRINTABLE_LANGUAGE_NAME', LangName, LangPrintName, 1, True, Index=1)
+
+ if not IsLangInDef:
+ #
+ # The found STRING tokens will be added into new language string list
+ # so that the unique STRING identifier is reserved for all languages in the package list.
+ #
+ FirstLangName = self.LanguageDef[0][0]
+ if LangName != FirstLangName:
+ for Index in range (2, len (self.OrderedStringList[FirstLangName])):
+ Item = self.OrderedStringList[FirstLangName][Index]
+ if Item.UseOtherLangDef != '':
+ OtherLang = Item.UseOtherLangDef
+ else:
+ OtherLang = FirstLangName
+ self.OrderedStringList[LangName].append (StringDefClassObject(Item.StringName, '', Item.Referenced, Item.Token, OtherLang))
+ self.OrderedStringDict[LangName][Item.StringName] = len(self.OrderedStringList[LangName]) - 1
+ return True
+
+ @staticmethod
+ def OpenUniFile(FileName):
+ #
+ # Read file
+ #
+ try:
+ UniFile = open(FileName, mode='rb')
+ FileIn = UniFile.read()
+ UniFile.close()
+ except:
+ EdkLogger.Error("build", FILE_OPEN_FAILURE, ExtraData=File)
+
+ #
+ # Detect Byte Order Mark at beginning of file. Default to UTF-8
+ #
+ Encoding = 'utf-8'
+ if (FileIn.startswith(codecs.BOM_UTF16_BE) or
+ FileIn.startswith(codecs.BOM_UTF16_LE)):
+ Encoding = 'utf-16'
+
+ UniFileClassObject.VerifyUcs2Data(FileIn, FileName, Encoding)
+
+ UniFile = BytesIO(FileIn)
+ Info = codecs.lookup(Encoding)
+ (Reader, Writer) = (Info.streamreader, Info.streamwriter)
+ return codecs.StreamReaderWriter(UniFile, Reader, Writer)
+
+ @staticmethod
+ def VerifyUcs2Data(FileIn, FileName, Encoding):
+ Ucs2Info = codecs.lookup('ucs-2')
+ #
+ # Convert to unicode
+ #
+ try:
+ FileDecoded = codecs.decode(FileIn, Encoding)
+ Ucs2Info.encode(FileDecoded)
+ except:
+ UniFile = BytesIO(FileIn)
+ Info = codecs.lookup(Encoding)
+ (Reader, Writer) = (Info.streamreader, Info.streamwriter)
+ File = codecs.StreamReaderWriter(UniFile, Reader, Writer)
+ LineNumber = 0
+ ErrMsg = lambda Encoding, LineNumber: \
+ '%s contains invalid %s characters on line %d.' % \
+ (FileName, Encoding, LineNumber)
+ while True:
+ LineNumber = LineNumber + 1
+ try:
+ Line = File.readline()
+ if Line == '':
+ EdkLogger.error('Unicode File Parser', PARSER_ERROR,
+ ErrMsg(Encoding, LineNumber))
+ Ucs2Info.encode(Line)
+ except:
+ EdkLogger.error('Unicode File Parser', PARSER_ERROR,
+ ErrMsg('UCS-2', LineNumber))
+
+ #
+ # Get String name and value
+ #
+ def GetStringObject(self, Item):
+ Language = ''
+ Value = ''
+
+ Name = Item.split()[1]
+ # Check the string name
+ if Name != '':
+ MatchString = gIdentifierPattern.match(Name)
+ if MatchString is None:
+ EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
+ LanguageList = Item.split(u'#language ')
+ for IndexI in range(len(LanguageList)):
+ if IndexI == 0:
+ continue
+ else:
+ Language = LanguageList[IndexI].split()[0]
+ Value = LanguageList[IndexI][LanguageList[IndexI].find(u'\"') + len(u'\"') : LanguageList[IndexI].rfind(u'\"')] #.replace(u'\r\n', u'')
+ Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
+ self.AddStringToList(Name, Language, Value)
+
+ #
+ # Get include file list and load them
+ #
+ def GetIncludeFile(self, Item, Dir):
+ FileName = Item[Item.find(u'#include ') + len(u'#include ') :Item.find(u' ', len(u'#include '))][1:-1]
+ self.LoadUniFile(FileName)
+
+ #
+ # Pre-process before parse .uni file
+ #
+ def PreProcess(self, File):
+ try:
+ FileIn = UniFileClassObject.OpenUniFile(LongFilePath(File.Path))
+ except UnicodeError as X:
+ EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File.Path);
+ except OSError:
+ EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, ExtraData=File.Path)
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File.Path);
+
+ Lines = []
+ #
+ # Use unique identifier
+ #
+ for Line in FileIn:
+ Line = Line.strip()
+ Line = Line.replace(u'\\\\', BACK_SLASH_PLACEHOLDER)
+ Line = StripComments(Line)
+
+ #
+ # Ignore empty line
+ #
+ if len(Line) == 0:
+ continue
+
+
+ Line = Line.replace(u'/langdef', u'#langdef')
+ Line = Line.replace(u'/string', u'#string')
+ Line = Line.replace(u'/language', u'#language')
+ Line = Line.replace(u'/include', u'#include')
+
+ Line = Line.replace(UNICODE_WIDE_CHAR, WIDE_CHAR)
+ Line = Line.replace(UNICODE_NARROW_CHAR, NARROW_CHAR)
+ Line = Line.replace(UNICODE_NON_BREAKING_CHAR, NON_BREAKING_CHAR)
+
+ Line = Line.replace(u'\\r\\n', CR + LF)
+ Line = Line.replace(u'\\n', CR + LF)
+ Line = Line.replace(u'\\r', CR)
+ Line = Line.replace(u'\\t', u' ')
+ Line = Line.replace(u'\t', u' ')
+ Line = Line.replace(u'\\"', u'"')
+ Line = Line.replace(u"\\'", u"'")
+ Line = Line.replace(BACK_SLASH_PLACEHOLDER, u'\\')
+
+ StartPos = Line.find(u'\\x')
+ while (StartPos != -1):
+ EndPos = Line.find(u'\\', StartPos + 1, StartPos + 7)
+ if EndPos != -1 and EndPos - StartPos == 6 :
+ if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):
+ EndStr = Line[EndPos: ]
+ UniStr = Line[StartPos + 2: EndPos]
+ if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
+ if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):
+ Line = Line[0 : StartPos] + UniStr + EndStr
+ else:
+ Line = Line[0 : StartPos] + UniStr + EndStr[1:]
+ StartPos = Line.find(u'\\x', StartPos + 1)
+
+ IncList = gIncludePattern.findall(Line)
+ if len(IncList) == 1:
+ for Dir in [File.Dir] + self.IncludePathList:
+ IncFile = PathClass(str(IncList[0]), Dir)
+ if os.path.isfile(IncFile.Path):
+ Lines.extend(self.PreProcess(IncFile))
+ break
+ else:
+ EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, Message="Cannot find include file", ExtraData=str(IncList[0]))
+ continue
+
+ Lines.append(Line)
+
+ return Lines
+
+ #
+ # Load a .uni file
+ #
+ def LoadUniFile(self, File = None):
+ if File is None:
+ EdkLogger.error("Unicode File Parser", PARSER_ERROR, 'No unicode file is given')
+ self.File = File
+ #
+ # Process special char in file
+ #
+ Lines = self.PreProcess(File)
+
+ #
+ # Get Unicode Information
+ #
+ for IndexI in range(len(Lines)):
+ Line = Lines[IndexI]
+ if (IndexI + 1) < len(Lines):
+ SecondLine = Lines[IndexI + 1]
+ if (IndexI + 2) < len(Lines):
+ ThirdLine = Lines[IndexI + 2]
+
+ #
+ # Get Language def information
+ #
+ if Line.find(u'#langdef ') >= 0:
+ self.GetLangDef(File, Line)
+ continue
+
+ Name = ''
+ Language = ''
+ Value = ''
+ #
+ # Get string def information format 1 as below
+ #
+ # #string MY_STRING_1
+ # #language eng
+ # My first English string line 1
+ # My first English string line 2
+ # #string MY_STRING_1
+ # #language spa
+ # Mi segunda secuencia 1
+ # Mi segunda secuencia 2
+ #
+ if Line.find(u'#string ') >= 0 and Line.find(u'#language ') < 0 and \
+ SecondLine.find(u'#string ') < 0 and SecondLine.find(u'#language ') >= 0 and \
+ ThirdLine.find(u'#string ') < 0 and ThirdLine.find(u'#language ') < 0:
+ Name = Line[Line.find(u'#string ') + len(u'#string ') : ].strip(' ')
+ Language = SecondLine[SecondLine.find(u'#language ') + len(u'#language ') : ].strip(' ')
+ for IndexJ in range(IndexI + 2, len(Lines)):
+ if Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') < 0:
+ Value = Value + Lines[IndexJ]
+ else:
+ IndexI = IndexJ
+ break
+ # Value = Value.replace(u'\r\n', u'')
+ Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
+ # Check the string name
+ if not self.IsCompatibleMode and Name != '':
+ MatchString = gIdentifierPattern.match(Name)
+ if MatchString is None:
+ EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
+ self.AddStringToList(Name, Language, Value)
+ continue
+
+ #
+ # Get string def information format 2 as below
+ #
+ # #string MY_STRING_1 #language eng "My first English string line 1"
+ # "My first English string line 2"
+ # #language spa "Mi segunda secuencia 1"
+ # "Mi segunda secuencia 2"
+ # #string MY_STRING_2 #language eng "My first English string line 1"
+ # "My first English string line 2"
+ # #string MY_STRING_2 #language spa "Mi segunda secuencia 1"
+ # "Mi segunda secuencia 2"
+ #
+ if Line.find(u'#string ') >= 0 and Line.find(u'#language ') >= 0:
+ StringItem = Line
+ for IndexJ in range(IndexI + 1, len(Lines)):
+ if Lines[IndexJ].find(u'#string ') >= 0 and Lines[IndexJ].find(u'#language ') >= 0:
+ IndexI = IndexJ
+ break
+ elif Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') >= 0:
+ StringItem = StringItem + Lines[IndexJ]
+ elif Lines[IndexJ].count(u'\"') >= 2:
+ StringItem = StringItem[ : StringItem.rfind(u'\"')] + Lines[IndexJ][Lines[IndexJ].find(u'\"') + len(u'\"') : ]
+ self.GetStringObject(StringItem)
+ continue
+
+ #
+ # Load multiple .uni files
+ #
+ def LoadUniFiles(self, FileList):
+ if len(FileList) > 0:
+ for File in FileList:
+ self.LoadUniFile(File)
+
+ #
+ # Add a string to list
+ #
+ def AddStringToList(self, Name, Language, Value, Token = None, Referenced = False, UseOtherLangDef = '', Index = -1):
+ for LangNameItem in self.LanguageDef:
+ if Language == LangNameItem[0]:
+ break
+ else:
+ EdkLogger.error('Unicode File Parser', FORMAT_NOT_SUPPORTED, "The language '%s' for %s is not defined in Unicode file %s." \
+ % (Language, Name, self.File))
+
+ if Language not in self.OrderedStringList:
+ self.OrderedStringList[Language] = []
+ self.OrderedStringDict[Language] = {}
+
+ IsAdded = True
+ if Name in self.OrderedStringDict[Language]:
+ IsAdded = False
+ if Value is not None:
+ ItemIndexInList = self.OrderedStringDict[Language][Name]
+ Item = self.OrderedStringList[Language][ItemIndexInList]
+ Item.UpdateValue(Value)
+ Item.UseOtherLangDef = ''
+
+ if IsAdded:
+ Token = len(self.OrderedStringList[Language])
+ if Index == -1:
+ self.OrderedStringList[Language].append(StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
+ self.OrderedStringDict[Language][Name] = Token
+ for LangName in self.LanguageDef:
+ #
+ # New STRING token will be added into all language string lists.
+ # so that the unique STRING identifier is reserved for all languages in the package list.
+ #
+ if LangName[0] != Language:
+ if UseOtherLangDef != '':
+ OtherLangDef = UseOtherLangDef
+ else:
+ OtherLangDef = Language
+ self.OrderedStringList[LangName[0]].append(StringDefClassObject(Name, '', Referenced, Token, OtherLangDef))
+ self.OrderedStringDict[LangName[0]][Name] = len(self.OrderedStringList[LangName[0]]) - 1
+ else:
+ self.OrderedStringList[Language].insert(Index, StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
+ self.OrderedStringDict[Language][Name] = Index
+
+ #
+ # Set the string as referenced
+ #
+ def SetStringReferenced(self, Name):
+ #
+ # String stoken are added in the same order in all language string lists.
+ # So, only update the status of string stoken in first language string list.
+ #
+ Lang = self.LanguageDef[0][0]
+ if Name in self.OrderedStringDict[Lang]:
+ ItemIndexInList = self.OrderedStringDict[Lang][Name]
+ Item = self.OrderedStringList[Lang][ItemIndexInList]
+ Item.Referenced = True
+
+ #
+ # Search the string in language definition by Name
+ #
+ def FindStringValue(self, Name, Lang):
+ if Name in self.OrderedStringDict[Lang]:
+ ItemIndexInList = self.OrderedStringDict[Lang][Name]
+ return self.OrderedStringList[Lang][ItemIndexInList]
+
+ return None
+
+ #
+ # Search the string in language definition by Token
+ #
+ def FindByToken(self, Token, Lang):
+ for Item in self.OrderedStringList[Lang]:
+ if Item.Token == Token:
+ return Item
+
+ return None
+
+ #
+ # Re-order strings and re-generate tokens
+ #
+ def ReToken(self):
+ #
+ # Retoken all language strings according to the status of string stoken in the first language string.
+ #
+ FirstLangName = self.LanguageDef[0][0]
+
+ # Convert the OrderedStringList to be OrderedStringListByToken in order to faciliate future search by token
+ for LangNameItem in self.LanguageDef:
+ self.OrderedStringListByToken[LangNameItem[0]] = {}
+
+ #
+ # Use small token for all referred string stoken.
+ #
+ RefToken = 0
+ for Index in range (0, len (self.OrderedStringList[FirstLangName])):
+ FirstLangItem = self.OrderedStringList[FirstLangName][Index]
+ if FirstLangItem.Referenced == True:
+ for LangNameItem in self.LanguageDef:
+ LangName = LangNameItem[0]
+ OtherLangItem = self.OrderedStringList[LangName][Index]
+ OtherLangItem.Referenced = True
+ OtherLangItem.Token = RefToken
+ self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
+ RefToken = RefToken + 1
+
+ #
+ # Use big token for all unreferred string stoken.
+ #
+ UnRefToken = 0
+ for Index in range (0, len (self.OrderedStringList[FirstLangName])):
+ FirstLangItem = self.OrderedStringList[FirstLangName][Index]
+ if FirstLangItem.Referenced == False:
+ for LangNameItem in self.LanguageDef:
+ LangName = LangNameItem[0]
+ OtherLangItem = self.OrderedStringList[LangName][Index]
+ OtherLangItem.Token = RefToken + UnRefToken
+ self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
+ UnRefToken = UnRefToken + 1
+
+ #
+ # Show the instance itself
+ #
+ def ShowMe(self):
+ print(self.LanguageDef)
+ #print self.OrderedStringList
+ for Item in self.OrderedStringList:
+ print(Item)
+ for Member in self.OrderedStringList[Item]:
+ print(str(Member))
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+ a = UniFileClassObject([PathClass("C:\\Edk\\Strings.uni"), PathClass("C:\\Edk\\Strings2.uni")])
+ a.ReToken()
+ a.ShowMe()
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py
new file mode 100755
index 00000000..7681d48e
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py
@@ -0,0 +1,280 @@
+# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+
+#
+# This file is used to collect the Variable checking information
+#
+
+# #
+# Import Modules
+#
+import os
+from Common.RangeExpression import RangeExpression
+from Common.Misc import *
+from io import BytesIO
+from struct import pack
+from Common.DataType import *
+
+class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
+ def __init__(self):
+ self.var_check_info = []
+
+ def push_back(self, var_check_tab):
+ for tab in self.var_check_info:
+ if tab.equal(var_check_tab):
+ tab.merge(var_check_tab)
+ break
+ else:
+ self.var_check_info.append(var_check_tab)
+
+ def dump(self, dest, Phase):
+
+ if not os.path.isabs(dest):
+ return
+ if not os.path.exists(dest):
+ os.mkdir(dest)
+ BinFileName = "PcdVarCheck.bin"
+ BinFilePath = os.path.join(dest, BinFileName)
+ Buffer = bytearray()
+ index = 0
+ for var_check_tab in self.var_check_info:
+ index += 1
+ realLength = 0
+ realLength += 32
+ Name = var_check_tab.Name[1:-1]
+ NameChars = Name.split(",")
+ realLength += len(NameChars)
+ if (index < len(self.var_check_info) and realLength % 4) or (index == len(self.var_check_info) and len(var_check_tab.validtab) > 0 and realLength % 4):
+ realLength += (4 - (realLength % 4))
+ itemIndex = 0
+ for item in var_check_tab.validtab:
+ itemIndex += 1
+ realLength += 5
+ for v_data in item.data:
+ if isinstance(v_data, int):
+ realLength += item.StorageWidth
+ else:
+ realLength += item.StorageWidth
+ realLength += item.StorageWidth
+ if (index == len(self.var_check_info)) :
+ if (itemIndex < len(var_check_tab.validtab)) and realLength % 4:
+ realLength += (4 - (realLength % 4))
+ else:
+ if realLength % 4:
+ realLength += (4 - (realLength % 4))
+ var_check_tab.Length = realLength
+ realLength = 0
+ index = 0
+ for var_check_tab in self.var_check_info:
+ index += 1
+
+ b = pack("=H", var_check_tab.Revision)
+ Buffer += b
+ realLength += 2
+
+ b = pack("=H", var_check_tab.HeaderLength)
+ Buffer += b
+ realLength += 2
+
+ b = pack("=L", var_check_tab.Length)
+ Buffer += b
+ realLength += 4
+
+ b = pack("=B", var_check_tab.Type)
+ Buffer += b
+ realLength += 1
+
+ for i in range(0, 3):
+ b = pack("=B", var_check_tab.Reserved)
+ Buffer += b
+ realLength += 1
+
+ b = pack("=L", var_check_tab.Attributes)
+ Buffer += b
+ realLength += 4
+
+ Guid = var_check_tab.Guid
+ b = PackByteFormatGUID(Guid)
+ Buffer += b
+ realLength += 16
+
+ Name = var_check_tab.Name[1:-1]
+ NameChars = Name.split(",")
+ for NameChar in NameChars:
+ NameCharNum = int(NameChar, 16)
+ b = pack("=B", NameCharNum)
+ Buffer += b
+ realLength += 1
+
+ if (index < len(self.var_check_info) and realLength % 4) or (index == len(self.var_check_info) and len(var_check_tab.validtab) > 0 and realLength % 4):
+ for i in range(4 - (realLength % 4)):
+ b = pack("=B", var_check_tab.pad)
+ Buffer += b
+ realLength += 1
+ itemIndex = 0
+ for item in var_check_tab.validtab:
+ itemIndex += 1
+
+ b = pack("=B", item.Type)
+ Buffer += b
+ realLength += 1
+
+ b = pack("=B", item.Length)
+ Buffer += b
+ realLength += 1
+
+ b = pack("=H", int(item.VarOffset, 16))
+ Buffer += b
+ realLength += 2
+
+ b = pack("=B", item.StorageWidth)
+ Buffer += b
+ realLength += 1
+ for v_data in item.data:
+ if isinstance(v_data, int):
+ b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data)
+ Buffer += b
+ realLength += item.StorageWidth
+ else:
+ b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data[0])
+ Buffer += b
+ realLength += item.StorageWidth
+ b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data[1])
+ Buffer += b
+ realLength += item.StorageWidth
+
+ if (index == len(self.var_check_info)) :
+ if (itemIndex < len(var_check_tab.validtab)) and realLength % 4:
+ for i in range(4 - (realLength % 4)):
+ b = pack("=B", var_check_tab.pad)
+ Buffer += b
+ realLength += 1
+ else:
+ if realLength % 4:
+ for i in range(4 - (realLength % 4)):
+ b = pack("=B", var_check_tab.pad)
+ Buffer += b
+ realLength += 1
+
+ DbFile = BytesIO()
+ if Phase == 'DXE' and os.path.exists(BinFilePath):
+ BinFile = open(BinFilePath, "rb")
+ BinBuffer = BinFile.read()
+ BinFile.close()
+ BinBufferSize = len(BinBuffer)
+ if (BinBufferSize % 4):
+ for i in range(4 - (BinBufferSize % 4)):
+ b = pack("=B", VAR_CHECK_PCD_VARIABLE_TAB.pad)
+ BinBuffer += b
+ Buffer = BinBuffer + Buffer
+ DbFile.write(Buffer)
+ SaveFileOnChange(BinFilePath, DbFile.getvalue(), True)
+
+
+class VAR_CHECK_PCD_VARIABLE_TAB(object):
+ pad = 0xDA
+ def __init__(self, TokenSpaceGuid, PcdCName):
+ self.Revision = 0x0001
+ self.HeaderLength = 0
+ self.Length = 0 # Length include this header
+ self.Type = 0
+ self.Reserved = 0
+ self.Attributes = 0x00000000
+ self.Guid = eval("[" + TokenSpaceGuid.replace("{", "").replace("}", "") + "]")
+ self.Name = PcdCName
+ self.validtab = []
+
+ def UpdateSize(self):
+ self.HeaderLength = 32 + len(self.Name.split(","))
+ self.Length = 32 + len(self.Name.split(",")) + self.GetValidTabLen()
+
+ def GetValidTabLen(self):
+ validtablen = 0
+ for item in self.validtab:
+ validtablen += item.Length
+ return validtablen
+
+ def SetAttributes(self, attributes):
+ self.Attributes = attributes
+
+ def push_back(self, valid_obj):
+ if valid_obj is not None:
+ self.validtab.append(valid_obj)
+
+ def equal(self, varchecktab):
+ if self.Guid == varchecktab.Guid and self.Name == varchecktab.Name:
+ return True
+ else:
+ return False
+
+ def merge(self, varchecktab):
+ for validobj in varchecktab.validtab:
+ if validobj in self.validtab:
+ continue
+ self.validtab.append(validobj)
+ self.UpdateSize()
+
+
+class VAR_CHECK_PCD_VALID_OBJ(object):
+ def __init__(self, VarOffset, data, PcdDataType):
+ self.Type = 1
+ self.Length = 0 # Length include this header
+ self.VarOffset = VarOffset
+ self.PcdDataType = PcdDataType.strip()
+ self.rawdata = data
+ self.data = set()
+ try:
+ self.StorageWidth = MAX_SIZE_TYPE[self.PcdDataType]
+ self.ValidData = True
+ except:
+ self.StorageWidth = 0
+ self.ValidData = False
+
+ def __eq__(self, validObj):
+ return validObj and self.VarOffset == validObj.VarOffset
+
+class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
+ def __init__(self, VarOffset, validlist, PcdDataType):
+ super(VAR_CHECK_PCD_VALID_LIST, self).__init__(VarOffset, validlist, PcdDataType)
+ self.Type = 1
+ valid_num_list = []
+ for item in self.rawdata:
+ valid_num_list.extend(item.split(','))
+
+ for valid_num in valid_num_list:
+ valid_num = valid_num.strip()
+
+ if valid_num.startswith('0x') or valid_num.startswith('0X'):
+ self.data.add(int(valid_num, 16))
+ else:
+ self.data.add(int(valid_num))
+
+
+ self.Length = 5 + len(self.data) * self.StorageWidth
+
+
+class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
+ def __init__(self, VarOffset, validrange, PcdDataType):
+ super(VAR_CHECK_PCD_VALID_RANGE, self).__init__(VarOffset, validrange, PcdDataType)
+ self.Type = 2
+ RangeExpr = ""
+ i = 0
+ for item in self.rawdata:
+ if i == 0:
+ RangeExpr = "( " + item + " )"
+ else:
+ RangeExpr = RangeExpr + "OR ( " + item + " )"
+ range_result = RangeExpression(RangeExpr, self.PcdDataType)(True)
+ for rangelist in range_result:
+ for obj in rangelist.pop():
+ self.data.add((obj.start, obj.end))
+ self.Length = 5 + len(self.data) * 2 * self.StorageWidth
+
+
+def GetValidationObject(PcdClass, VarOffset):
+ if PcdClass.validateranges:
+ return VAR_CHECK_PCD_VALID_RANGE(VarOffset, PcdClass.validateranges, PcdClass.DatumType)
+ if PcdClass.validlists:
+ return VAR_CHECK_PCD_VALID_LIST(VarOffset, PcdClass.validlists, PcdClass.DatumType)
+ else:
+ return None
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py
new file mode 100755
index 00000000..93624d0f
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py
@@ -0,0 +1,971 @@
+## @file
+# Create makefile for MS nmake and GNU make
+#
+# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+## Import Modules
+#
+from __future__ import print_function
+from __future__ import absolute_import
+import os.path as path
+import hashlib
+from collections import defaultdict
+from GenFds.FdfParser import FdfParser
+from Workspace.WorkspaceCommon import GetModuleLibInstances
+from AutoGen import GenMake
+from AutoGen.AutoGen import AutoGen
+from AutoGen.PlatformAutoGen import PlatformAutoGen
+from AutoGen.BuildEngine import gDefaultBuildRuleFile
+from Common.ToolDefClassObject import gDefaultToolsDefFile
+from Common.StringUtils import NormPath
+from Common.BuildToolError import *
+from Common.DataType import *
+from Common.Misc import *
+import json
+
+## Regular expression for splitting Dependency Expression string into tokens
+gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
+
+## Regular expression for match: PCD(xxxx.yyy)
+gPCDAsGuidPattern = re.compile(r"^PCD\(.+\..+\)$")
+
+## Workspace AutoGen class
+#
+# This class is used mainly to control the whole platform build for different
+# architecture. This class will generate top level makefile.
+#
+class WorkspaceAutoGen(AutoGen):
+ # call super().__init__ then call the worker function with different parameter count
+ def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ if not hasattr(self, "_Init"):
+ self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
+ self._Init = True
+
+ ## Initialize WorkspaceAutoGen
+ #
+ # @param WorkspaceDir Root directory of workspace
+ # @param ActivePlatform Meta-file of active platform
+ # @param Target Build target
+ # @param Toolchain Tool chain name
+ # @param ArchList List of architecture of current build
+ # @param MetaFileDb Database containing meta-files
+ # @param BuildConfig Configuration of build
+ # @param ToolDefinition Tool chain definitions
+ # @param FlashDefinitionFile File of flash definition
+ # @param Fds FD list to be generated
+ # @param Fvs FV list to be generated
+ # @param Caps Capsule list to be generated
+ # @param SkuId SKU id from command line
+ #
+ def _InitWorker(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,
+ BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None,
+ Progress=None, BuildModule=None):
+ self.BuildDatabase = MetaFileDb
+ self.MetaFile = ActivePlatform
+ self.WorkspaceDir = WorkspaceDir
+ self.Platform = self.BuildDatabase[self.MetaFile, TAB_ARCH_COMMON, Target, Toolchain]
+ GlobalData.gActivePlatform = self.Platform
+ self.BuildTarget = Target
+ self.ToolChain = Toolchain
+ self.ArchList = ArchList
+ self.SkuId = SkuId
+ self.UniFlag = UniFlag
+
+ self.TargetTxt = BuildConfig
+ self.ToolDef = ToolDefinition
+ self.FdfFile = FlashDefinitionFile
+ self.FdTargetList = Fds if Fds else []
+ self.FvTargetList = Fvs if Fvs else []
+ self.CapTargetList = Caps if Caps else []
+ self.AutoGenObjectList = []
+ self._GuidDict = {}
+
+ # there's many relative directory operations, so ...
+ os.chdir(self.WorkspaceDir)
+
+ self.MergeArch()
+ self.ValidateBuildTarget()
+
+ EdkLogger.info("")
+ if self.ArchList:
+ EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))
+ EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))
+ EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))
+
+ EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))
+ if BuildModule:
+ EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))
+
+ if self.FdfFile:
+ EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile))
+
+ EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)
+
+ if Progress:
+ Progress.Start("\nProcessing meta-data")
+ #
+ # Mark now build in AutoGen Phase
+ #
+ #
+ # Collect Platform Guids to support Guid name in Fdfparser.
+ #
+ self.CollectPlatformGuids()
+ GlobalData.gAutoGenPhase = True
+ self.ProcessModuleFromPdf()
+ self.ProcessPcdType()
+ self.ProcessMixedPcd()
+ self.VerifyPcdsFromFDF()
+ self.CollectAllPcds()
+ for Pa in self.AutoGenObjectList:
+ Pa.FillData_LibConstPcd()
+ self.GeneratePkgLevelHash()
+ #
+ # Check PCDs token value conflict in each DEC file.
+ #
+ self._CheckAllPcdsTokenValueConflict()
+ #
+ # Check PCD type and definition between DSC and DEC
+ #
+ self._CheckPcdDefineAndType()
+
+ self.CreateBuildOptionsFile()
+ self.CreatePcdTokenNumberFile()
+ self.GeneratePlatformLevelHash()
+
+ #
+ # Merge Arch
+ #
+ def MergeArch(self):
+ if not self.ArchList:
+ ArchList = set(self.Platform.SupArchList)
+ else:
+ ArchList = set(self.ArchList) & set(self.Platform.SupArchList)
+ if not ArchList:
+ EdkLogger.error("build", PARAMETER_INVALID,
+ ExtraData = "Invalid ARCH specified. [Valid ARCH: %s]" % (" ".join(self.Platform.SupArchList)))
+ elif self.ArchList and len(ArchList) != len(self.ArchList):
+ SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))
+ EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"
+ % (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))
+ self.ArchList = tuple(ArchList)
+
+ # Validate build target
+ def ValidateBuildTarget(self):
+ if self.BuildTarget not in self.Platform.BuildTargets:
+ EdkLogger.error("build", PARAMETER_INVALID,
+ ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"
+ % (self.BuildTarget, " ".join(self.Platform.BuildTargets)))
+
+ def CollectPlatformGuids(self):
+ oriInfList = []
+ oriPkgSet = set()
+ PlatformPkg = set()
+ for Arch in self.ArchList:
+ Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ oriInfList = Platform.Modules
+ for ModuleFile in oriInfList:
+ ModuleData = self.BuildDatabase[ModuleFile, Platform._Arch, Platform._Target, Platform._Toolchain]
+ oriPkgSet.update(ModuleData.Packages)
+ for Pkg in oriPkgSet:
+ Guids = Pkg.Guids
+ GlobalData.gGuidDict.update(Guids)
+ if Platform.Packages:
+ PlatformPkg.update(Platform.Packages)
+ for Pkg in PlatformPkg:
+ Guids = Pkg.Guids
+ GlobalData.gGuidDict.update(Guids)
+
+ @cached_property
+ def FdfProfile(self):
+ if not self.FdfFile:
+ self.FdfFile = self.Platform.FlashDefinition
+
+ FdfProfile = None
+ if self.FdfFile:
+ Fdf = FdfParser(self.FdfFile.Path)
+ Fdf.ParseFile()
+ GlobalData.gFdfParser = Fdf
+ if Fdf.CurrentFdName and Fdf.CurrentFdName in Fdf.Profile.FdDict:
+ FdDict = Fdf.Profile.FdDict[Fdf.CurrentFdName]
+ for FdRegion in FdDict.RegionList:
+ if str(FdRegion.RegionType) == 'FILE' and self.Platform.VpdToolGuid in str(FdRegion.RegionDataList):
+ if int(FdRegion.Offset) % 8 != 0:
+ EdkLogger.error("build", FORMAT_INVALID, 'The VPD Base Address %s must be 8-byte aligned.' % (FdRegion.Offset))
+ FdfProfile = Fdf.Profile
+ else:
+ if self.FdTargetList:
+ EdkLogger.info("No flash definition file found. FD [%s] will be ignored." % " ".join(self.FdTargetList))
+ self.FdTargetList = []
+ if self.FvTargetList:
+ EdkLogger.info("No flash definition file found. FV [%s] will be ignored." % " ".join(self.FvTargetList))
+ self.FvTargetList = []
+ if self.CapTargetList:
+ EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList))
+ self.CapTargetList = []
+
+ return FdfProfile
+
+ def ProcessModuleFromPdf(self):
+
+ if self.FdfProfile:
+ for fvname in self.FvTargetList:
+ if fvname.upper() not in self.FdfProfile.FvDict:
+ EdkLogger.error("build", OPTION_VALUE_INVALID,
+ "No such an FV in FDF file: %s" % fvname)
+
+ # In DSC file may use FILE_GUID to override the module, then in the Platform.Modules use FILE_GUIDmodule.inf as key,
+ # but the path (self.MetaFile.Path) is the real path
+ for key in self.FdfProfile.InfDict:
+ if key == 'ArchTBD':
+ MetaFile_cache = defaultdict(set)
+ for Arch in self.ArchList:
+ Current_Platform_cache = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ for Pkey in Current_Platform_cache.Modules:
+ MetaFile_cache[Arch].add(Current_Platform_cache.Modules[Pkey].MetaFile)
+ for Inf in self.FdfProfile.InfDict[key]:
+ ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
+ for Arch in self.ArchList:
+ if ModuleFile in MetaFile_cache[Arch]:
+ break
+ else:
+ ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
+ if not ModuleData.IsBinaryModule:
+ EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)
+
+ else:
+ for Arch in self.ArchList:
+ if Arch == key:
+ Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ MetaFileList = set()
+ for Pkey in Platform.Modules:
+ MetaFileList.add(Platform.Modules[Pkey].MetaFile)
+ for Inf in self.FdfProfile.InfDict[key]:
+ ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
+ if ModuleFile in MetaFileList:
+ continue
+ ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
+ if not ModuleData.IsBinaryModule:
+ EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)
+
+
+
+ # parse FDF file to get PCDs in it, if any
+ def VerifyPcdsFromFDF(self):
+
+ if self.FdfProfile:
+ PcdSet = self.FdfProfile.PcdDict
+ self.VerifyPcdDeclearation(PcdSet)
+
+ def ProcessPcdType(self):
+ for Arch in self.ArchList:
+ Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ Platform.Pcds
+ # generate the SourcePcdDict and BinaryPcdDict
+ Libs = []
+ for BuildData in list(self.BuildDatabase._CACHE_.values()):
+ if BuildData.Arch != Arch:
+ continue
+ if BuildData.MetaFile.Ext == '.inf' and str(BuildData) in Platform.Modules :
+ Libs.extend(GetModuleLibInstances(BuildData, Platform,
+ self.BuildDatabase,
+ Arch,
+ self.BuildTarget,
+ self.ToolChain,
+ self.Platform.MetaFile,
+ EdkLogger
+ ))
+ for BuildData in list(self.BuildDatabase._CACHE_.values()):
+ if BuildData.Arch != Arch:
+ continue
+ if BuildData.MetaFile.Ext == '.inf':
+ for key in BuildData.Pcds:
+ if BuildData.Pcds[key].Pending:
+ if key in Platform.Pcds:
+ PcdInPlatform = Platform.Pcds[key]
+ if PcdInPlatform.Type:
+ BuildData.Pcds[key].Type = PcdInPlatform.Type
+ BuildData.Pcds[key].Pending = False
+
+ if BuildData.MetaFile in Platform.Modules:
+ PlatformModule = Platform.Modules[str(BuildData.MetaFile)]
+ if key in PlatformModule.Pcds:
+ PcdInPlatform = PlatformModule.Pcds[key]
+ if PcdInPlatform.Type:
+ BuildData.Pcds[key].Type = PcdInPlatform.Type
+ BuildData.Pcds[key].Pending = False
+ else:
+ #Pcd used in Library, Pcd Type from reference module if Pcd Type is Pending
+ if BuildData.Pcds[key].Pending:
+ if bool(BuildData.LibraryClass):
+ if BuildData in set(Libs):
+ ReferenceModules = BuildData.ReferenceModules
+ for ReferenceModule in ReferenceModules:
+ if ReferenceModule.MetaFile in Platform.Modules:
+ RefPlatformModule = Platform.Modules[str(ReferenceModule.MetaFile)]
+ if key in RefPlatformModule.Pcds:
+ PcdInReferenceModule = RefPlatformModule.Pcds[key]
+ if PcdInReferenceModule.Type:
+ BuildData.Pcds[key].Type = PcdInReferenceModule.Type
+ BuildData.Pcds[key].Pending = False
+ break
+
+ def ProcessMixedPcd(self):
+ for Arch in self.ArchList:
+ SourcePcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set(),TAB_PCDS_DYNAMIC:set(),TAB_PCDS_FIXED_AT_BUILD:set()}
+ BinaryPcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set()}
+ SourcePcdDict_Keys = SourcePcdDict.keys()
+ BinaryPcdDict_Keys = BinaryPcdDict.keys()
+
+ # generate the SourcePcdDict and BinaryPcdDict
+
+ for BuildData in list(self.BuildDatabase._CACHE_.values()):
+ if BuildData.Arch != Arch:
+ continue
+ if BuildData.MetaFile.Ext == '.inf':
+ for key in BuildData.Pcds:
+ if TAB_PCDS_DYNAMIC_EX in BuildData.Pcds[key].Type:
+ if BuildData.IsBinaryModule:
+ BinaryPcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+ else:
+ SourcePcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+
+ elif TAB_PCDS_PATCHABLE_IN_MODULE in BuildData.Pcds[key].Type:
+ if BuildData.MetaFile.Ext == '.inf':
+ if BuildData.IsBinaryModule:
+ BinaryPcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+ else:
+ SourcePcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+
+ elif TAB_PCDS_DYNAMIC in BuildData.Pcds[key].Type:
+ SourcePcdDict[TAB_PCDS_DYNAMIC].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+ elif TAB_PCDS_FIXED_AT_BUILD in BuildData.Pcds[key].Type:
+ SourcePcdDict[TAB_PCDS_FIXED_AT_BUILD].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
+
+ #
+ # A PCD can only use one type for all source modules
+ #
+ for i in SourcePcdDict_Keys:
+ for j in SourcePcdDict_Keys:
+ if i != j:
+ Intersections = SourcePcdDict[i].intersection(SourcePcdDict[j])
+ if len(Intersections) > 0:
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "Building modules from source INFs, following PCD use %s and %s access method. It must be corrected to use only one access method." % (i, j),
+ ExtraData='\n\t'.join(str(P[1]+'.'+P[0]) for P in Intersections)
+ )
+
+ #
+ # intersection the BinaryPCD for Mixed PCD
+ #
+ for i in BinaryPcdDict_Keys:
+ for j in BinaryPcdDict_Keys:
+ if i != j:
+ Intersections = BinaryPcdDict[i].intersection(BinaryPcdDict[j])
+ for item in Intersections:
+ NewPcd1 = (item[0] + '_' + i, item[1])
+ NewPcd2 = (item[0] + '_' + j, item[1])
+ if item not in GlobalData.MixedPcd:
+ GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]
+ else:
+ if NewPcd1 not in GlobalData.MixedPcd[item]:
+ GlobalData.MixedPcd[item].append(NewPcd1)
+ if NewPcd2 not in GlobalData.MixedPcd[item]:
+ GlobalData.MixedPcd[item].append(NewPcd2)
+
+ #
+ # intersection the SourcePCD and BinaryPCD for Mixed PCD
+ #
+ for i in SourcePcdDict_Keys:
+ for j in BinaryPcdDict_Keys:
+ if i != j:
+ Intersections = SourcePcdDict[i].intersection(BinaryPcdDict[j])
+ for item in Intersections:
+ NewPcd1 = (item[0] + '_' + i, item[1])
+ NewPcd2 = (item[0] + '_' + j, item[1])
+ if item not in GlobalData.MixedPcd:
+ GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]
+ else:
+ if NewPcd1 not in GlobalData.MixedPcd[item]:
+ GlobalData.MixedPcd[item].append(NewPcd1)
+ if NewPcd2 not in GlobalData.MixedPcd[item]:
+ GlobalData.MixedPcd[item].append(NewPcd2)
+
+ BuildData = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ for key in BuildData.Pcds:
+ for SinglePcd in GlobalData.MixedPcd:
+ if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:
+ for item in GlobalData.MixedPcd[SinglePcd]:
+ Pcd_Type = item[0].split('_')[-1]
+ if (Pcd_Type == BuildData.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and BuildData.Pcds[key].Type in PCD_DYNAMIC_EX_TYPE_SET) or \
+ (Pcd_Type == TAB_PCDS_DYNAMIC and BuildData.Pcds[key].Type in PCD_DYNAMIC_TYPE_SET):
+ Value = BuildData.Pcds[key]
+ Value.TokenCName = BuildData.Pcds[key].TokenCName + '_' + Pcd_Type
+ if len(key) == 2:
+ newkey = (Value.TokenCName, key[1])
+ elif len(key) == 3:
+ newkey = (Value.TokenCName, key[1], key[2])
+ del BuildData.Pcds[key]
+ BuildData.Pcds[newkey] = Value
+ break
+ break
+
+ if self.FdfProfile:
+ PcdSet = self.FdfProfile.PcdDict
+ # handle the mixed pcd in FDF file
+ for key in PcdSet:
+ if key in GlobalData.MixedPcd:
+ Value = PcdSet[key]
+ del PcdSet[key]
+ for item in GlobalData.MixedPcd[key]:
+ PcdSet[item] = Value
+
+ #Collect package set information from INF of FDF
+ @cached_property
+ def PkgSet(self):
+ if not self.FdfFile:
+ self.FdfFile = self.Platform.FlashDefinition
+
+ if self.FdfFile:
+ ModuleList = self.FdfProfile.InfList
+ else:
+ ModuleList = []
+ Pkgs = {}
+ for Arch in self.ArchList:
+ Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ PkgSet = set()
+ for mb in [self.BuildDatabase[m, Arch, self.BuildTarget, self.ToolChain] for m in Platform.Modules]:
+ PkgSet.update(mb.Packages)
+ for Inf in ModuleList:
+ ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
+ if ModuleFile in Platform.Modules:
+ continue
+ ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
+ PkgSet.update(ModuleData.Packages)
+ PkgSet.update(Platform.Packages)
+ Pkgs[Arch] = list(PkgSet)
+ return Pkgs
+
+ def VerifyPcdDeclearation(self,PcdSet):
+ for Arch in self.ArchList:
+ Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
+ Pkgs = self.PkgSet[Arch]
+ DecPcds = set()
+ DecPcdsKey = set()
+ for Pkg in Pkgs:
+ for Pcd in Pkg.Pcds:
+ DecPcds.add((Pcd[0], Pcd[1]))
+ DecPcdsKey.add((Pcd[0], Pcd[1], Pcd[2]))
+
+ Platform.SkuName = self.SkuId
+ for Name, Guid,Fileds in PcdSet:
+ if (Name, Guid) not in DecPcds:
+ EdkLogger.error(
+ 'build',
+ PARSER_ERROR,
+ "PCD (%s.%s) used in FDF is not declared in DEC files." % (Guid, Name),
+ File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],
+ Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]
+ )
+ else:
+ # Check whether Dynamic or DynamicEx PCD used in FDF file. If used, build break and give a error message.
+ if (Name, Guid, TAB_PCDS_FIXED_AT_BUILD) in DecPcdsKey \
+ or (Name, Guid, TAB_PCDS_PATCHABLE_IN_MODULE) in DecPcdsKey \
+ or (Name, Guid, TAB_PCDS_FEATURE_FLAG) in DecPcdsKey:
+ continue
+ elif (Name, Guid, TAB_PCDS_DYNAMIC) in DecPcdsKey or (Name, Guid, TAB_PCDS_DYNAMIC_EX) in DecPcdsKey:
+ EdkLogger.error(
+ 'build',
+ PARSER_ERROR,
+ "Using Dynamic or DynamicEx type of PCD [%s.%s] in FDF file is not allowed." % (Guid, Name),
+ File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],
+ Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]
+ )
+ def CollectAllPcds(self):
+
+ for Arch in self.ArchList:
+ Pa = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)
+ #
+ # Explicitly collect platform's dynamic PCDs
+ #
+ Pa.CollectPlatformDynamicPcds()
+ Pa.CollectFixedAtBuildPcds()
+ self.AutoGenObjectList.append(Pa)
+ # We need to calculate the PcdTokenNumber after all Arch Pcds are collected.
+ for Arch in self.ArchList:
+ #Pcd TokenNumber
+ Pa = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)
+ self.UpdateModuleDataPipe(Arch, {"PCD_TNUM":Pa.PcdTokenNumber})
+
+ def UpdateModuleDataPipe(self,arch, attr_dict):
+ for (Target, Toolchain, Arch, MetaFile) in AutoGen.Cache():
+ if Arch != arch:
+ continue
+ try:
+ AutoGen.Cache()[(Target, Toolchain, Arch, MetaFile)].DataPipe.DataContainer = attr_dict
+ except Exception:
+ pass
+ #
+ # Generate Package level hash value
+ #
+ def GeneratePkgLevelHash(self):
+ for Arch in self.ArchList:
+ GlobalData.gPackageHash = {}
+ if GlobalData.gUseHashCache:
+ for Pkg in self.PkgSet[Arch]:
+ self._GenPkgLevelHash(Pkg)
+
+
+ def CreateBuildOptionsFile(self):
+ #
+ # Create BuildOptions Macro & PCD metafile, also add the Active Platform and FDF file.
+ #
+ content = 'gCommandLineDefines: '
+ content += str(GlobalData.gCommandLineDefines)
+ content += TAB_LINE_BREAK
+ content += 'BuildOptionPcd: '
+ content += str(GlobalData.BuildOptionPcd)
+ content += TAB_LINE_BREAK
+ content += 'Active Platform: '
+ content += str(self.Platform)
+ content += TAB_LINE_BREAK
+ if self.FdfFile:
+ content += 'Flash Image Definition: '
+ content += str(self.FdfFile)
+ content += TAB_LINE_BREAK
+ SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)
+
+ def CreatePcdTokenNumberFile(self):
+ #
+ # Create PcdToken Number file for Dynamic/DynamicEx Pcd.
+ #
+ PcdTokenNumber = 'PcdTokenNumber: '
+ Pa = self.AutoGenObjectList[0]
+ if Pa.PcdTokenNumber:
+ if Pa.DynamicPcdList:
+ for Pcd in Pa.DynamicPcdList:
+ PcdTokenNumber += TAB_LINE_BREAK
+ PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
+ PcdTokenNumber += ' : '
+ PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])
+ SaveFileOnChange(os.path.join(self.BuildDir, 'PcdTokenNumber'), PcdTokenNumber, False)
+
+ def GeneratePlatformLevelHash(self):
+ #
+ # Get set of workspace metafiles
+ #
+ AllWorkSpaceMetaFiles = self._GetMetaFiles(self.BuildTarget, self.ToolChain)
+ AllWorkSpaceMetaFileList = sorted(AllWorkSpaceMetaFiles, key=lambda x: str(x))
+ #
+ # Retrieve latest modified time of all metafiles
+ #
+ SrcTimeStamp = 0
+ for f in AllWorkSpaceMetaFiles:
+ if os.stat(f)[8] > SrcTimeStamp:
+ SrcTimeStamp = os.stat(f)[8]
+ self._SrcTimeStamp = SrcTimeStamp
+
+ if GlobalData.gUseHashCache:
+ FileList = []
+ m = hashlib.md5()
+ for file in AllWorkSpaceMetaFileList:
+ if file.endswith('.dec'):
+ continue
+ f = open(file, 'rb')
+ Content = f.read()
+ f.close()
+ m.update(Content)
+ FileList.append((str(file), hashlib.md5(Content).hexdigest()))
+
+ HashDir = path.join(self.BuildDir, "Hash_Platform")
+ HashFile = path.join(HashDir, 'Platform.hash.' + m.hexdigest())
+ SaveFileOnChange(HashFile, m.hexdigest(), False)
+ HashChainFile = path.join(HashDir, 'Platform.hashchain.' + m.hexdigest())
+ GlobalData.gPlatformHashFile = HashChainFile
+ try:
+ with open(HashChainFile, 'w') as f:
+ json.dump(FileList, f, indent=2)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
+
+ if GlobalData.gBinCacheDest:
+ # Copy platform hash files to cache destination
+ FileDir = path.join(GlobalData.gBinCacheDest, self.OutputDir, self.BuildTarget + "_" + self.ToolChain, "Hash_Platform")
+ CacheFileDir = FileDir
+ CreateDirectory(CacheFileDir)
+ CopyFileOnChange(HashFile, CacheFileDir)
+ CopyFileOnChange(HashChainFile, CacheFileDir)
+
+ #
+ # Write metafile list to build directory
+ #
+ AutoGenFilePath = os.path.join(self.BuildDir, 'AutoGen')
+ if os.path.exists (AutoGenFilePath):
+ os.remove(AutoGenFilePath)
+ if not os.path.exists(self.BuildDir):
+ os.makedirs(self.BuildDir)
+ with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:
+ for f in AllWorkSpaceMetaFileList:
+ print(f, file=file)
+ return True
+
+ def _GenPkgLevelHash(self, Pkg):
+ if Pkg.PackageName in GlobalData.gPackageHash:
+ return
+
+ PkgDir = os.path.join(self.BuildDir, Pkg.Arch, "Hash_Pkg", Pkg.PackageName)
+ CreateDirectory(PkgDir)
+ FileList = []
+ m = hashlib.md5()
+ # Get .dec file's hash value
+ f = open(Pkg.MetaFile.Path, 'rb')
+ Content = f.read()
+ f.close()
+ m.update(Content)
+ FileList.append((str(Pkg.MetaFile.Path), hashlib.md5(Content).hexdigest()))
+ # Get include files hash value
+ if Pkg.Includes:
+ for inc in sorted(Pkg.Includes, key=lambda x: str(x)):
+ for Root, Dirs, Files in os.walk(str(inc)):
+ for File in sorted(Files):
+ File_Path = os.path.join(Root, File)
+ f = open(File_Path, 'rb')
+ Content = f.read()
+ f.close()
+ m.update(Content)
+ FileList.append((str(File_Path), hashlib.md5(Content).hexdigest()))
+ GlobalData.gPackageHash[Pkg.PackageName] = m.hexdigest()
+
+ HashDir = PkgDir
+ HashFile = path.join(HashDir, Pkg.PackageName + '.hash.' + m.hexdigest())
+ SaveFileOnChange(HashFile, m.hexdigest(), False)
+ HashChainFile = path.join(HashDir, Pkg.PackageName + '.hashchain.' + m.hexdigest())
+ GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)] = HashChainFile
+ try:
+ with open(HashChainFile, 'w') as f:
+ json.dump(FileList, f, indent=2)
+ except:
+ EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
+
+ if GlobalData.gBinCacheDest:
+ # Copy Pkg hash files to cache destination dir
+ FileDir = path.join(GlobalData.gBinCacheDest, self.OutputDir, self.BuildTarget + "_" + self.ToolChain, Pkg.Arch, "Hash_Pkg", Pkg.PackageName)
+ CacheFileDir = FileDir
+ CreateDirectory(CacheFileDir)
+ CopyFileOnChange(HashFile, CacheFileDir)
+ CopyFileOnChange(HashChainFile, CacheFileDir)
+
+ def _GetMetaFiles(self, Target, Toolchain):
+ AllWorkSpaceMetaFiles = set()
+ #
+ # add fdf
+ #
+ if self.FdfFile:
+ AllWorkSpaceMetaFiles.add (self.FdfFile.Path)
+ for f in GlobalData.gFdfParser.GetAllIncludedFile():
+ AllWorkSpaceMetaFiles.add (f.FileName)
+ #
+ # add dsc
+ #
+ AllWorkSpaceMetaFiles.add(self.MetaFile.Path)
+
+ #
+ # add build_rule.txt & tools_def.txt
+ #
+ AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultBuildRuleFile))
+ AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultToolsDefFile))
+
+ # add BuildOption metafile
+ #
+ AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'BuildOptions'))
+
+ # add PcdToken Number file for Dynamic/DynamicEx Pcd
+ #
+ AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'PcdTokenNumber'))
+
+ for Pa in self.AutoGenObjectList:
+ AllWorkSpaceMetaFiles.add(Pa.ToolDefinitionFile)
+
+ for Arch in self.ArchList:
+ #
+ # add dec
+ #
+ for Package in PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch).PackageList:
+ AllWorkSpaceMetaFiles.add(Package.MetaFile.Path)
+
+ #
+ # add included dsc
+ #
+ for filePath in self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]._RawData.IncludedFiles:
+ AllWorkSpaceMetaFiles.add(filePath.Path)
+
+ return AllWorkSpaceMetaFiles
+
+ def _CheckPcdDefineAndType(self):
+ PcdTypeSet = {TAB_PCDS_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC,
+ TAB_PCDS_DYNAMIC_EX}
+
+ # This dict store PCDs which are not used by any modules with specified arches
+ UnusedPcd = OrderedDict()
+ for Pa in self.AutoGenObjectList:
+ # Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid
+ for Pcd in Pa.Platform.Pcds:
+ PcdType = Pa.Platform.Pcds[Pcd].Type
+
+ # If no PCD type, this PCD comes from FDF
+ if not PcdType:
+ continue
+
+ # Try to remove Hii and Vpd suffix
+ if PcdType.startswith(TAB_PCDS_DYNAMIC_EX):
+ PcdType = TAB_PCDS_DYNAMIC_EX
+ elif PcdType.startswith(TAB_PCDS_DYNAMIC):
+ PcdType = TAB_PCDS_DYNAMIC
+
+ for Package in Pa.PackageList:
+ # Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType
+ if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:
+ break
+ for Type in PcdTypeSet:
+ if (Pcd[0], Pcd[1], Type) in Package.Pcds:
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "Type [%s] of PCD [%s.%s] in DSC file doesn't match the type [%s] defined in DEC file." \
+ % (Pa.Platform.Pcds[Pcd].Type, Pcd[1], Pcd[0], Type),
+ ExtraData=None
+ )
+ return
+ else:
+ UnusedPcd.setdefault(Pcd, []).append(Pa.Arch)
+
+ for Pcd in UnusedPcd:
+ EdkLogger.warn(
+ 'build',
+ "The PCD was not specified by any INF module in the platform for the given architecture.\n"
+ "\tPCD: [%s.%s]\n\tPlatform: [%s]\n\tArch: %s"
+ % (Pcd[1], Pcd[0], os.path.basename(str(self.MetaFile)), str(UnusedPcd[Pcd])),
+ ExtraData=None
+ )
+
+ def __repr__(self):
+ return "%s [%s]" % (self.MetaFile, ", ".join(self.ArchList))
+
+ ## Return the directory to store FV files
+ @cached_property
+ def FvDir(self):
+ return path.join(self.BuildDir, TAB_FV_DIRECTORY)
+
+ ## Return the directory to store all intermediate and final files built
+ @cached_property
+ def BuildDir(self):
+ return self.AutoGenObjectList[0].BuildDir
+
+ ## Return the build output directory platform specifies
+ @cached_property
+ def OutputDir(self):
+ return self.Platform.OutputDirectory
+
+ ## Return platform name
+ @cached_property
+ def Name(self):
+ return self.Platform.PlatformName
+
+ ## Return meta-file GUID
+ @cached_property
+ def Guid(self):
+ return self.Platform.Guid
+
+ ## Return platform version
+ @cached_property
+ def Version(self):
+ return self.Platform.Version
+
+ ## Return paths of tools
+ @cached_property
+ def ToolDefinition(self):
+ return self.AutoGenObjectList[0].ToolDefinition
+
+ ## Return directory of platform makefile
+ #
+ # @retval string Makefile directory
+ #
+ @cached_property
+ def MakeFileDir(self):
+ return self.BuildDir
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ @cached_property
+ def BuildCommand(self):
+ # BuildCommand should be all the same. So just get one from platform AutoGen
+ return self.AutoGenObjectList[0].BuildCommand
+
+ ## Check the PCDs token value conflict in each DEC file.
+ #
+ # Will cause build break and raise error message while two PCDs conflict.
+ #
+ # @return None
+ #
+ def _CheckAllPcdsTokenValueConflict(self):
+ for Pa in self.AutoGenObjectList:
+ for Package in Pa.PackageList:
+ PcdList = list(Package.Pcds.values())
+ PcdList.sort(key=lambda x: int(x.TokenValue, 0))
+ Count = 0
+ while (Count < len(PcdList) - 1) :
+ Item = PcdList[Count]
+ ItemNext = PcdList[Count + 1]
+ #
+ # Make sure in the same token space the TokenValue should be unique
+ #
+ if (int(Item.TokenValue, 0) == int(ItemNext.TokenValue, 0)):
+ SameTokenValuePcdList = []
+ SameTokenValuePcdList.append(Item)
+ SameTokenValuePcdList.append(ItemNext)
+ RemainPcdListLength = len(PcdList) - Count - 2
+ for ValueSameCount in range(RemainPcdListLength):
+ if int(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount].TokenValue, 0) == int(Item.TokenValue, 0):
+ SameTokenValuePcdList.append(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount])
+ else:
+ break;
+ #
+ # Sort same token value PCD list with TokenGuid and TokenCName
+ #
+ SameTokenValuePcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
+ SameTokenValuePcdListCount = 0
+ while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):
+ Flag = False
+ TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]
+ TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]
+
+ if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):
+ for PcdItem in GlobalData.MixedPcd:
+ if (TemListItem.TokenCName, TemListItem.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem] or \
+ (TemListItemNext.TokenCName, TemListItemNext.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
+ Flag = True
+ if not Flag:
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "The TokenValue [%s] of PCD [%s.%s] is conflict with: [%s.%s] in %s"\
+ % (TemListItem.TokenValue, TemListItem.TokenSpaceGuidCName, TemListItem.TokenCName, TemListItemNext.TokenSpaceGuidCName, TemListItemNext.TokenCName, Package),
+ ExtraData=None
+ )
+ SameTokenValuePcdListCount += 1
+ Count += SameTokenValuePcdListCount
+ Count += 1
+
+ PcdList = list(Package.Pcds.values())
+ PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
+ Count = 0
+ while (Count < len(PcdList) - 1) :
+ Item = PcdList[Count]
+ ItemNext = PcdList[Count + 1]
+ #
+ # Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.
+ #
+ if (Item.TokenSpaceGuidCName == ItemNext.TokenSpaceGuidCName) and (Item.TokenCName == ItemNext.TokenCName) and (int(Item.TokenValue, 0) != int(ItemNext.TokenValue, 0)):
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "The TokenValue [%s] of PCD [%s.%s] in %s defined in two places should be same as well."\
+ % (Item.TokenValue, Item.TokenSpaceGuidCName, Item.TokenCName, Package),
+ ExtraData=None
+ )
+ Count += 1
+ ## Generate fds command
+ @property
+ def GenFdsCommand(self):
+ return (GenMake.TopLevelMakefile(self)._TEMPLATE_.Replace(GenMake.TopLevelMakefile(self)._TemplateDict)).strip()
+
+ @property
+ def GenFdsCommandDict(self):
+ FdsCommandDict = {}
+ LogLevel = EdkLogger.GetLevel()
+ if LogLevel == EdkLogger.VERBOSE:
+ FdsCommandDict["verbose"] = True
+ elif LogLevel <= EdkLogger.DEBUG_9:
+ FdsCommandDict["debug"] = LogLevel - 1
+ elif LogLevel == EdkLogger.QUIET:
+ FdsCommandDict["quiet"] = True
+
+ FdsCommandDict["GenfdsMultiThread"] = GlobalData.gEnableGenfdsMultiThread
+ if GlobalData.gIgnoreSource:
+ FdsCommandDict["IgnoreSources"] = True
+
+ FdsCommandDict["OptionPcd"] = []
+ for pcd in GlobalData.BuildOptionPcd:
+ if pcd[2]:
+ pcdname = '.'.join(pcd[0:3])
+ else:
+ pcdname = '.'.join(pcd[0:2])
+ if pcd[3].startswith('{'):
+ FdsCommandDict["OptionPcd"].append(pcdname + '=' + 'H' + '"' + pcd[3] + '"')
+ else:
+ FdsCommandDict["OptionPcd"].append(pcdname + '=' + pcd[3])
+
+ MacroList = []
+ # macros passed to GenFds
+ MacroDict = {}
+ MacroDict.update(GlobalData.gGlobalDefines)
+ MacroDict.update(GlobalData.gCommandLineDefines)
+ for MacroName in MacroDict:
+ if MacroDict[MacroName] != "":
+ MacroList.append('"%s=%s"' % (MacroName, MacroDict[MacroName].replace('\\', '\\\\')))
+ else:
+ MacroList.append('"%s"' % MacroName)
+ FdsCommandDict["macro"] = MacroList
+
+ FdsCommandDict["fdf_file"] = [self.FdfFile]
+ FdsCommandDict["build_target"] = self.BuildTarget
+ FdsCommandDict["toolchain_tag"] = self.ToolChain
+ FdsCommandDict["active_platform"] = str(self)
+
+ FdsCommandDict["conf_directory"] = GlobalData.gConfDirectory
+ FdsCommandDict["build_architecture_list"] = ','.join(self.ArchList)
+ FdsCommandDict["platform_build_directory"] = self.BuildDir
+
+ FdsCommandDict["fd"] = self.FdTargetList
+ FdsCommandDict["fv"] = self.FvTargetList
+ FdsCommandDict["cap"] = self.CapTargetList
+ return FdsCommandDict
+
+ ## Create makefile for the platform and modules in it
+ #
+ # @param CreateDepsMakeFile Flag indicating if the makefile for
+ # modules will be created as well
+ #
+ def CreateMakeFile(self, CreateDepsMakeFile=False):
+ if not CreateDepsMakeFile:
+ return
+ for Pa in self.AutoGenObjectList:
+ Pa.CreateMakeFile(CreateDepsMakeFile)
+
+ ## Create autogen code for platform and modules
+ #
+ # Since there's no autogen code for platform, this method will do nothing
+ # if CreateModuleCodeFile is set to False.
+ #
+ # @param CreateDepsCodeFile Flag indicating if creating module's
+ # autogen code file or not
+ #
+ def CreateCodeFile(self, CreateDepsCodeFile=False):
+ if not CreateDepsCodeFile:
+ return
+ for Pa in self.AutoGenObjectList:
+ Pa.CreateCodeFile(CreateDepsCodeFile)
+
+ ## Create AsBuilt INF file the platform
+ #
+ def CreateAsBuiltInf(self):
+ return
+
diff --git a/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/__init__.py b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/__init__.py
new file mode 100644
index 00000000..069f49cc
--- /dev/null
+++ b/src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/AutoGen/__init__.py
@@ -0,0 +1,11 @@
+## @file
+# Python 'AutoGen' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+
+__all__ = ["AutoGen"]