python_code
stringlengths 0
679k
| repo_name
stringlengths 9
41
| file_path
stringlengths 6
149
|
---|---|---|
## @file
# This file is used to define class objects of INF file header.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfHeaderObject
'''
## INF file header object
#
# A sample file header
#
# ## @file xxx.inf FileName
# # Abstract
# #
# # Description
# #
# # Copyright
# #
# # License
# #
#
class InfHeaderObject():
def __init__(self):
self.FileName = ''
self.Abstract = ''
self.Description = ''
self.Copyright = ''
self.License = ''
## SetFileName
#
# @param FileName: File Name
#
def SetFileName(self, FileName):
if not (FileName == '' or FileName is None):
self.FileName = FileName
return True
else:
return False
## GetFileName
#
def GetFileName(self):
return self.FileName
## SetAbstract
#
# @param Abstract: Abstract
#
def SetAbstract(self, Abstract):
if not (Abstract == '' or Abstract is None):
self.Abstract = Abstract
return True
else:
return False
## GetAbstract
#
def GetAbstract(self):
return self.Abstract
## SetDescription
#
# @param Description: Description content
#
def SetDescription(self, Description):
if not (Description == '' or Description is None):
self.Description = Description
return True
else:
return False
## GetAbstract
#
def GetDescription(self):
return self.Description
## SetCopyright
#
# @param Copyright: Copyright content
#
def SetCopyright(self, Copyright):
if not (Copyright == '' or Copyright is None):
self.Copyright = Copyright
return True
else:
return False
## GetCopyright
#
def GetCopyright(self):
return self.Copyright
## SetCopyright
#
# @param License: License content
#
def SetLicense(self, License):
if not (License == '' or License is None):
self.License = License
return True
else:
return False
## GetLicense
#
def GetLicense(self):
return self.License
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfHeaderObject.py |
## @file
# This file is used to define class objects of INF file [LibraryClasses] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfLibraryClassesObject
'''
from Logger import StringTable as ST
from Logger import ToolError
import Logger.Log as Logger
from Library import GlobalData
from Library.Misc import Sdict
from Object.Parser.InfCommonObject import CurrentLine
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Library.ParserValidate import IsValidLibName
## GetArchModuleType
#
# Get Arch List and ModuleType List
#
def GetArchModuleType(KeyList):
__SupArchList = []
__SupModuleList = []
for (ArchItem, ModuleItem) in KeyList:
#
# Validate Arch
#
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
if (ModuleItem == '' or ModuleItem is None):
ModuleItem = 'COMMON'
if ArchItem not in __SupArchList:
__SupArchList.append(ArchItem)
List = ModuleItem.split('|')
for Entry in List:
if Entry not in __SupModuleList:
__SupModuleList.append(Entry)
return (__SupArchList, __SupModuleList)
class InfLibraryClassItem():
def __init__(self, LibName='', FeatureFlagExp='', HelpString=None):
self.LibName = LibName
self.FeatureFlagExp = FeatureFlagExp
self.HelpString = HelpString
self.CurrentLine = CurrentLine()
self.SupArchList = []
self.SupModuleList = []
self.FileGuid = ''
self.Version = ''
def SetLibName(self, LibName):
self.LibName = LibName
def GetLibName(self):
return self.LibName
def SetHelpString(self, HelpString):
self.HelpString = HelpString
def GetHelpString(self):
return self.HelpString
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
def SetSupModuleList(self, SupModuleList):
self.SupModuleList = SupModuleList
def GetSupModuleList(self):
return self.SupModuleList
#
# As Build related information
#
def SetFileGuid(self, FileGuid):
self.FileGuid = FileGuid
def GetFileGuid(self):
return self.FileGuid
def SetVersion(self, Version):
self.Version = Version
def GetVersion(self):
return self.Version
## INF LibraryClass Section
#
#
#
class InfLibraryClassObject():
def __init__(self):
self.LibraryClasses = Sdict()
#
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
##SetLibraryClasses
#
#
# @param HelpString: It can be a common comment or contain a recommend
# instance.
#
def SetLibraryClasses(self, LibContent, KeyList=None):
#
# Validate Arch
#
(__SupArchList, __SupModuleList) = GetArchModuleType(KeyList)
for LibItem in LibContent:
LibItemObj = InfLibraryClassItem()
if not GlobalData.gIS_BINARY_INF:
HelpStringObj = LibItem[1]
LibItemObj.CurrentLine.SetFileName(LibItem[2][2])
LibItemObj.CurrentLine.SetLineNo(LibItem[2][1])
LibItemObj.CurrentLine.SetLineString(LibItem[2][0])
LibItem = LibItem[0]
if HelpStringObj is not None:
LibItemObj.SetHelpString(HelpStringObj)
if len(LibItem) >= 1:
if LibItem[0].strip() != '':
if IsValidLibName(LibItem[0].strip()):
if LibItem[0].strip() != 'NULL':
LibItemObj.SetLibName(LibItem[0])
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_LIB_NAME_INVALID,
File=GlobalData.gINF_MODULE_NAME,
Line=LibItemObj.CurrentLine.GetLineNo(),
ExtraData=LibItemObj.CurrentLine.GetLineString())
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID % (LibItem[0]),
File=GlobalData.gINF_MODULE_NAME,
Line=LibItemObj.CurrentLine.GetLineNo(),
ExtraData=LibItemObj.CurrentLine.GetLineString())
else:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_LIBRARY_SECTION_LIBNAME_MISSING,
File=GlobalData.gINF_MODULE_NAME,
Line=LibItemObj.CurrentLine.GetLineNo(),
ExtraData=LibItemObj.CurrentLine.GetLineString())
if len(LibItem) == 2:
if LibItem[1].strip() == '':
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
File=GlobalData.gINF_MODULE_NAME,
Line=LibItemObj.CurrentLine.GetLineNo(),
ExtraData=LibItemObj.CurrentLine.GetLineString())
#
# Validate FFE
#
FeatureFlagRtv = IsValidFeatureFlagExp(LibItem[1].strip())
if not FeatureFlagRtv[0]:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID % (FeatureFlagRtv[1]),
File=GlobalData.gINF_MODULE_NAME,
Line=LibItemObj.CurrentLine.GetLineNo(),
ExtraData=LibItemObj.CurrentLine.GetLineString())
LibItemObj.SetFeatureFlagExp(LibItem[1].strip())
#
# Invalid strings
#
if len(LibItem) < 1 or len(LibItem) > 2:
Logger.Error("InfParser",
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_LIBRARY_SECTION_CONTENT_ERROR,
File=GlobalData.gINF_MODULE_NAME,
Line=LibItemObj.CurrentLine.GetLineNo(),
ExtraData=LibItemObj.CurrentLine.GetLineString())
LibItemObj.SetSupArchList(__SupArchList)
LibItemObj.SetSupModuleList(__SupModuleList)
#
# Determine Library class duplicate. Follow below rule:
#
# A library class keyword must not be duplicated within a
# [LibraryClasses] section. Library class keywords may appear in
# multiple architectural and module type [LibraryClasses] sections.
# A library class keyword listed in an architectural or module type
# [LibraryClasses] section must not be listed in the common
# architectural or module type [LibraryClasses] section.
#
# NOTE: This check will not report error now. But keep code for future enhancement.
#
# for Item in self.LibraryClasses:
# if Item.GetLibName() == LibItemObj.GetLibName():
# ItemSupArchList = Item.GetSupArchList()
# ItemSupModuleList = Item.GetSupModuleList()
# for ItemArch in ItemSupArchList:
# for ItemModule in ItemSupModuleList:
# for LibItemObjArch in __SupArchList:
# for LibItemObjModule in __SupModuleList:
# if ItemArch == LibItemObjArch and LibItemObjModule == ItemModule:
# #
# # ERR_INF_PARSER_ITEM_DUPLICATE
# #
# pass
# if (ItemArch.upper() == 'COMMON' or LibItemObjArch.upper() == 'COMMON') \
# and LibItemObjModule == ItemModule:
# #
# # ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
# #
# pass
else:
#
# Assume the file GUID is well formatted.
#
LibItemObj.SetFileGuid(LibItem[0])
LibItemObj.SetVersion(LibItem[1])
LibItemObj.SetSupArchList(__SupArchList)
if (LibItemObj) in self.LibraryClasses:
LibraryList = self.LibraryClasses[LibItemObj]
LibraryList.append(LibItemObj)
self.LibraryClasses[LibItemObj] = LibraryList
else:
LibraryList = []
LibraryList.append(LibItemObj)
self.LibraryClasses[LibItemObj] = LibraryList
return True
def GetLibraryClasses(self):
return self.LibraryClasses
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfLibraryClassesObject.py |
## @file
# This file is used to define class objects of INF file [BuildOptions] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfBuildOptionObject
'''
from Library import GlobalData
from Object.Parser.InfCommonObject import InfSectionCommonDef
class InfBuildOptionItem():
def __init__(self):
self.Content = ''
self.SupArchList = []
self.AsBuildList = []
def SetContent(self, Content):
self.Content = Content
def GetContent(self):
return self.Content
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
#
# AsBuild Information
#
def SetAsBuildList(self, AsBuildList):
self.AsBuildList = AsBuildList
def GetAsBuildList(self):
return self.AsBuildList
## INF BuildOption section
# Macro define is not permitted for this section.
#
#
class InfBuildOptionsObject(InfSectionCommonDef):
def __init__(self):
self.BuildOptions = []
InfSectionCommonDef.__init__(self)
## SetBuildOptions function
#
# For BuildOptionName, need to validate its format
# For BuildOptionValue, just ignore it.
#
# @param Arch Indicated which arch of build options belong to.
# @param BuildOptCont A list contain BuildOption related information.
# The element in the list contain 3 members.
# BuildOptionName, BuildOptionValue and IsReplace
# flag.
#
# @return True Build options set/validate successfully
# @return False Build options set/validate failed
#
def SetBuildOptions(self, BuildOptCont, ArchList = None, SectionContent = ''):
if not GlobalData.gIS_BINARY_INF:
if SectionContent.strip() != '':
InfBuildOptionItemObj = InfBuildOptionItem()
InfBuildOptionItemObj.SetContent(SectionContent)
InfBuildOptionItemObj.SetSupArchList(ArchList)
self.BuildOptions.append(InfBuildOptionItemObj)
else:
#
# For AsBuild INF file
#
if len(BuildOptCont) >= 1:
InfBuildOptionItemObj = InfBuildOptionItem()
InfBuildOptionItemObj.SetAsBuildList(BuildOptCont)
InfBuildOptionItemObj.SetSupArchList(ArchList)
self.BuildOptions.append(InfBuildOptionItemObj)
return True
def GetBuildOptions(self):
return self.BuildOptions
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfBuildOptionObject.py |
## @file
# This file is used to define common class objects of [Defines] section for INF file.
# It will consumed by InfParser
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfDefineCommonObject
'''
from Object.Parser.InfCommonObject import InfLineCommentObject
## InfDefineImageExeParamItem
#
class InfDefineImageExeParamItem():
def __init__(self):
self.CName = ''
self.FeatureFlagExp = ''
self.Comments = InfLineCommentObject()
def SetCName(self, CName):
self.CName = CName
def GetCName(self):
return self.CName
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
## InfDefineEntryPointItem
#
class InfDefineEntryPointItem(InfDefineImageExeParamItem):
def __init__(self):
InfDefineImageExeParamItem.__init__(self)
## InfDefineUnloadImageItem
#
class InfDefineUnloadImageItem(InfDefineImageExeParamItem):
def __init__(self):
InfDefineImageExeParamItem.__init__(self)
## InfDefineConstructorItem
#
class InfDefineConstructorItem(InfDefineImageExeParamItem):
def __init__(self):
InfDefineImageExeParamItem.__init__(self)
self.SupModList = []
def SetSupModList(self, SupModList):
self.SupModList = SupModList
def GetSupModList(self):
return self.SupModList
## InfDefineDestructorItem
#
class InfDefineDestructorItem(InfDefineImageExeParamItem):
def __init__(self):
InfDefineImageExeParamItem.__init__(self)
self.SupModList = []
def SetSupModList(self, SupModList):
self.SupModList = SupModList
def GetSupModList(self):
return self.SupModList
## InfDefineLibraryItem
#
class InfDefineLibraryItem():
def __init__(self):
self.LibraryName = ''
self.Types = []
self.Comments = InfLineCommentObject()
def SetLibraryName(self, Name):
self.LibraryName = Name
def GetLibraryName(self):
return self.LibraryName
def SetTypes(self, Type):
self.Types = Type
def GetTypes(self):
return self.Types
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfDefineCommonObject.py |
## @file
# This file is used to define class objects of INF file [UserExtension] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfUserExtensionsObject
'''
from Logger import StringTable as ST
from Logger import ToolError
import Logger.Log as Logger
from Library import GlobalData
from Library.Misc import Sdict
class InfUserExtensionItem():
def __init__(self,
Content = '',
UserId = '',
IdString = ''):
self.Content = Content
self.UserId = UserId
self.IdString = IdString
self.SupArchList = []
def SetContent(self, Content):
self.Content = Content
def GetContent(self):
return self.Content
def SetUserId(self, UserId):
self.UserId = UserId
def GetUserId(self):
return self.UserId
def SetIdString(self, IdString):
self.IdString = IdString
def GetIdString(self):
return self.IdString
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
##
#
#
#
class InfUserExtensionObject():
def __init__(self):
self.UserExtension = Sdict()
def SetUserExtension(self, UserExtensionCont, IdContent=None, LineNo=None):
if not UserExtensionCont or UserExtensionCont == '':
return True
#
# IdContent is a list contain UserId and IdString
# For this call the general section header parser, if no definition of
# IdString/UserId, it will return 'COMMON'
#
for IdContentItem in IdContent:
InfUserExtensionItemObj = InfUserExtensionItem()
if IdContentItem[0] == 'COMMON':
UserId = ''
else:
UserId = IdContentItem[0]
if IdContentItem[1] == 'COMMON':
IdString = ''
else:
IdString = IdContentItem[1]
#
# Fill UserExtensionObj members.
#
InfUserExtensionItemObj.SetUserId(UserId)
InfUserExtensionItemObj.SetIdString(IdString)
InfUserExtensionItemObj.SetContent(UserExtensionCont)
InfUserExtensionItemObj.SetSupArchList(IdContentItem[2])
# for CheckItem in self.UserExtension:
# if IdContentItem[0] == CheckItem[0] and IdContentItem[1] == CheckItem[1]:
# if IdContentItem[2].upper() == 'COMMON' or CheckItem[2].upper() == 'COMMON':
# #
# # For COMMON ARCH type, do special check.
# #
# Logger.Error('InfParser',
# ToolError.FORMAT_INVALID,
# ST.ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR%\
# (IdContentItem[0] + '.' + IdContentItem[1] + '.' + IdContentItem[2]),
# File=GlobalData.gINF_MODULE_NAME,
# Line=LineNo,
# ExtraData=None)
if IdContentItem in self.UserExtension:
#
# Each UserExtensions section header must have a unique set
# of UserId, IdString and Arch values.
# This means that the same UserId can be used in more than one
# section header, provided the IdString or Arch values are
# different. The same IdString values can be used in more than
# one section header if the UserId or Arch values are
# different. The same UserId and the same IdString can be used
# in a section header if the Arch values are different in each
# of the section headers.
#
Logger.Error('InfParser',
ToolError.FORMAT_INVALID,
ST.ERR_INF_PARSER_UE_SECTION_DUPLICATE_ERROR%\
(IdContentItem[0] + '.' + IdContentItem[1] + '.' + IdContentItem[2]),
File=GlobalData.gINF_MODULE_NAME,
Line=LineNo,
ExtraData=None)
else:
UserExtensionList = []
UserExtensionList.append(InfUserExtensionItemObj)
self.UserExtension[IdContentItem] = UserExtensionList
return True
def GetUserExtension(self):
return self.UserExtension
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfUserExtensionObject.py |
## @file
# This file is used to define class objects of INF file [Protocols] section.
# It will consumed by InfParser.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
'''
InfProtocolObject
'''
from Library.ParserValidate import IsValidCVariableName
from Library.CommentParsing import ParseComment
from Library.ExpressionValidate import IsValidFeatureFlagExp
from Library.Misc import Sdict
from Object.Parser.InfMisc import ErrorInInf
from Library import DataType as DT
from Logger import StringTable as ST
def ParseProtocolComment(CommentsList, InfProtocolItemObj):
CommentInsList = []
PreUsage = None
PreNotify = None
PreHelpText = ''
BlockFlag = -1
Count = 0
for CommentItem in CommentsList:
Count = Count + 1
CommentItemUsage, \
CommentItemNotify, \
CommentItemString, \
CommentItemHelpText = \
ParseComment(CommentItem,
DT.PROTOCOL_USAGE_TOKENS,
DT.PROTOCOL_NOTIFY_TOKENS,
['PROTOCOL'],
False)
if CommentItemString:
pass
if CommentItemHelpText is None:
CommentItemHelpText = ''
if Count == len(CommentsList) and CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = DT.END_OF_LINE
if Count == len(CommentsList):
if BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
BlockFlag = 4
else:
BlockFlag = 3
elif BlockFlag == -1:
BlockFlag = 4
if BlockFlag == -1 or BlockFlag == 1 or BlockFlag == 2:
if CommentItemUsage == CommentItemNotify == DT.ITEM_UNDEFINED:
if BlockFlag == -1:
BlockFlag = 1
elif BlockFlag == 1:
BlockFlag = 2
else:
if BlockFlag == 1 or BlockFlag == 2:
BlockFlag = 3
elif BlockFlag == -1:
BlockFlag = 4
#
# Combine two comment line if they are generic comment
#
if CommentItemUsage == CommentItemNotify == PreUsage == PreNotify == DT.ITEM_UNDEFINED:
CommentItemHelpText = PreHelpText + DT.END_OF_LINE + CommentItemHelpText
PreHelpText = CommentItemHelpText
if BlockFlag == 4:
CommentItemIns = InfProtocolItemCommentContent()
CommentItemIns.SetUsageItem(CommentItemUsage)
CommentItemIns.SetNotify(CommentItemNotify)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
BlockFlag = -1
PreUsage = None
PreNotify = None
PreHelpText = ''
elif BlockFlag == 3:
#
# Add previous help string
#
CommentItemIns = InfProtocolItemCommentContent()
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
if PreHelpText == '' or PreHelpText.endswith(DT.END_OF_LINE):
PreHelpText += DT.END_OF_LINE
CommentItemIns.SetHelpStringItem(PreHelpText)
CommentInsList.append(CommentItemIns)
#
# Add Current help string
#
CommentItemIns = InfProtocolItemCommentContent()
CommentItemIns.SetUsageItem(CommentItemUsage)
CommentItemIns.SetNotify(CommentItemNotify)
CommentItemIns.SetHelpStringItem(CommentItemHelpText)
CommentInsList.append(CommentItemIns)
BlockFlag = -1
PreUsage = None
PreNotify = None
PreHelpText = ''
else:
PreUsage = CommentItemUsage
PreNotify = CommentItemNotify
PreHelpText = CommentItemHelpText
InfProtocolItemObj.SetCommentList(CommentInsList)
return InfProtocolItemObj
class InfProtocolItemCommentContent():
def __init__(self):
#
# ## SOMETIMES_CONSUMES ## HelpString
#
self.UsageItem = ''
#
# Help String
#
self.HelpStringItem = ''
self.Notify = ''
self.CommentList = []
def SetUsageItem(self, UsageItem):
self.UsageItem = UsageItem
def GetUsageItem(self):
return self.UsageItem
def SetNotify(self, Notify):
if Notify != DT.ITEM_UNDEFINED:
self.Notify = 'true'
def GetNotify(self):
return self.Notify
def SetHelpStringItem(self, HelpStringItem):
self.HelpStringItem = HelpStringItem
def GetHelpStringItem(self):
return self.HelpStringItem
class InfProtocolItem():
def __init__(self):
self.Name = ''
self.FeatureFlagExp = ''
self.SupArchList = []
self.CommentList = []
def SetName(self, Name):
self.Name = Name
def GetName(self):
return self.Name
def SetFeatureFlagExp(self, FeatureFlagExp):
self.FeatureFlagExp = FeatureFlagExp
def GetFeatureFlagExp(self):
return self.FeatureFlagExp
def SetSupArchList(self, SupArchList):
self.SupArchList = SupArchList
def GetSupArchList(self):
return self.SupArchList
def SetCommentList(self, CommentList):
self.CommentList = CommentList
def GetCommentList(self):
return self.CommentList
##
#
#
#
class InfProtocolObject():
def __init__(self):
self.Protocols = Sdict()
#
# Macro defined in this section should be only used in this section.
#
self.Macros = {}
def SetProtocol(self, ProtocolContent, Arch = None,):
__SupArchList = []
for ArchItem in Arch:
#
# Validate Arch
#
if (ArchItem == '' or ArchItem is None):
ArchItem = 'COMMON'
__SupArchList.append(ArchItem)
for Item in ProtocolContent:
#
# Get Comment content of this protocol
#
CommentsList = None
if len(Item) == 3:
CommentsList = Item[1]
CurrentLineOfItem = Item[2]
LineInfo = (CurrentLineOfItem[2], CurrentLineOfItem[1], CurrentLineOfItem[0])
Item = Item[0]
InfProtocolItemObj = InfProtocolItem()
if len(Item) >= 1 and len(Item) <= 2:
#
# Only CName contained
#
if not IsValidCVariableName(Item[0]):
ErrorInInf(ST.ERR_INF_PARSER_INVALID_CNAME%(Item[0]),
LineInfo=LineInfo)
if (Item[0] != ''):
InfProtocolItemObj.SetName(Item[0])
else:
ErrorInInf(ST.ERR_INF_PARSER_CNAME_MISSING,
LineInfo=LineInfo)
if len(Item) == 2:
#
# Contained CName and Feature Flag Express
# <statements> ::= <CName> ["|"
# <FeatureFlagExpress>]
# For Protocol Object
#
if Item[1].strip() == '':
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_MISSING,
LineInfo=LineInfo)
#
# Validate Feature Flag Express for Item[1]
#
FeatureFlagRtv = IsValidFeatureFlagExp(Item[1].strip())
if not FeatureFlagRtv[0]:
ErrorInInf(ST.ERR_INF_PARSER_FEATURE_FLAG_EXP_SYNTAX_INVLID%(FeatureFlagRtv[1]),
LineInfo=LineInfo)
InfProtocolItemObj.SetFeatureFlagExp(Item[1])
if len(Item) < 1 or len(Item) > 2:
#
# Invalid format of Protocols statement
#
ErrorInInf(ST.ERR_INF_PARSER_GUID_PPI_PROTOCOL_SECTION_CONTENT_ERROR,
LineInfo=LineInfo)
#
# Get/Set Usage and HelpString for Protocol entry
#
if CommentsList is not None and len(CommentsList) != 0:
InfProtocolItemObj = ParseProtocolComment(CommentsList, InfProtocolItemObj)
else:
CommentItemIns = InfProtocolItemCommentContent()
CommentItemIns.SetUsageItem(DT.ITEM_UNDEFINED)
CommentItemIns.SetNotify(DT.ITEM_UNDEFINED)
InfProtocolItemObj.SetCommentList([CommentItemIns])
InfProtocolItemObj.SetSupArchList(__SupArchList)
#
# Determine protocol name duplicate. Follow below rule:
#
# A protocol must not be duplicated within a [Protocols] section.
# A protocol may appear in multiple architectural [Protocols]
# sections. A protocol listed in an architectural [Protocols]
# section must not be listed in the common architectural
# [Protocols] section.
#
# NOTE: This check will not report error now.
#
for Item in self.Protocols:
if Item.GetName() == InfProtocolItemObj.GetName():
ItemSupArchList = Item.GetSupArchList()
for ItemArch in ItemSupArchList:
for ProtocolItemObjArch in __SupArchList:
if ItemArch == ProtocolItemObjArch:
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE
#
pass
if ItemArch.upper() == 'COMMON' or ProtocolItemObjArch.upper() == 'COMMON':
#
# ST.ERR_INF_PARSER_ITEM_DUPLICATE_COMMON
#
pass
if (InfProtocolItemObj) in self.Protocols:
ProcotolList = self.Protocols[InfProtocolItemObj]
ProcotolList.append(InfProtocolItemObj)
self.Protocols[InfProtocolItemObj] = ProcotolList
else:
ProcotolList = []
ProcotolList.append(InfProtocolItemObj)
self.Protocols[InfProtocolItemObj] = ProcotolList
return True
def GetProtocol(self):
return self.Protocols
| edk2-master | BaseTools/Source/Python/UPT/Object/Parser/InfProtocolObject.py |
## @file InfPomAlignmentMisc.py
# This file contained the routines for InfPomAlignment
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfPomAlignmentMisc
'''
##
# Import modules
#
import Logger.Log as Logger
from Library import DataType as DT
from Library.Misc import ConvertArchList
from Object.POM.ModuleObject import BinaryFileObject
from Object.POM import CommonObject
from Library.StringUtils import FORMAT_INVALID
from Library.Misc import CheckGuidRegFormat
from Logger import StringTable as ST
## GenModuleHeaderUserExt
#
#
def GenModuleHeaderUserExt(DefineObj, ArchString):
DefinesDictNew = {}
EdkReleaseVersion = DefineObj.GetEdkReleaseVersion()
Shadow = DefineObj.GetShadow()
DpxSource = DefineObj.GetDpxSource()
PciVendorId = DefineObj.GetPciVendorId()
PciDeviceId = DefineObj.GetPciDeviceId()
PciClassCode = DefineObj.GetPciClassCode()
PciRevision = DefineObj.GetPciRevision()
PciCompress = DefineObj.GetPciCompress()
CustomMakefile = DefineObj.GetCustomMakefile()
UefiHiiResourceSection = DefineObj.GetUefiHiiResourceSection()
if EdkReleaseVersion is not None:
Name = DT.TAB_INF_DEFINES_EDK_RELEASE_VERSION
Value = EdkReleaseVersion.GetValue()
Statement = _GenInfDefineStateMent(EdkReleaseVersion.Comments.GetHeaderComments(),
Name,
Value,
EdkReleaseVersion.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
if Shadow is not None:
Name = DT.TAB_INF_DEFINES_SHADOW
Value = Shadow.GetValue()
Statement = _GenInfDefineStateMent(Shadow.Comments.GetHeaderComments(),
Name,
Value,
Shadow.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
if DpxSource is not None:
Name = DT.TAB_INF_DEFINES_DPX_SOURCE
for DpxSourceItem in DpxSource:
Value = DpxSourceItem[0]
Statement = _GenInfDefineStateMent(DpxSourceItem[1].GetHeaderComments(),
Name,
Value,
DpxSourceItem[1].GetTailComments())
DefinesDictNew[Statement] = ArchString
if PciVendorId is not None:
Name = DT.TAB_INF_DEFINES_PCI_VENDOR_ID
Value = PciVendorId.GetValue()
Statement = _GenInfDefineStateMent(PciVendorId.Comments.GetHeaderComments(),
Name,
Value,
PciVendorId.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
if PciDeviceId is not None:
Name = DT.TAB_INF_DEFINES_PCI_DEVICE_ID
Value = PciDeviceId.GetValue()
Statement = _GenInfDefineStateMent(PciDeviceId.Comments.GetHeaderComments(),
Name,
Value,
PciDeviceId.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
if PciClassCode is not None:
Name = DT.TAB_INF_DEFINES_PCI_CLASS_CODE
Value = PciClassCode.GetValue()
Statement = _GenInfDefineStateMent(PciClassCode.Comments.GetHeaderComments(),
Name,
Value,
PciClassCode.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
if PciRevision is not None:
Name = DT.TAB_INF_DEFINES_PCI_REVISION
Value = PciRevision.GetValue()
Statement = _GenInfDefineStateMent(PciRevision.Comments.GetHeaderComments(),
Name,
Value,
PciRevision.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
if PciCompress is not None:
Name = DT.TAB_INF_DEFINES_PCI_COMPRESS
Value = PciCompress.GetValue()
Statement = _GenInfDefineStateMent(PciCompress.Comments.GetHeaderComments(),
Name,
Value,
PciCompress.Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
if len(CustomMakefile) >= 1:
for CustomMakefileItem in CustomMakefile:
Name = DT.TAB_INF_DEFINES_CUSTOM_MAKEFILE
#
# Not with Feature Flag Expression
#
if len(CustomMakefileItem) == 3:
if CustomMakefileItem[0] != '':
Value = CustomMakefileItem[0] + ' | ' + CustomMakefileItem[1]
else:
Value = CustomMakefileItem[1]
Comments = CustomMakefileItem[2]
Statement = _GenInfDefineStateMent(Comments.GetHeaderComments(),
Name,
Value,
Comments.GetTailComments())
DefinesDictNew[Statement] = ArchString
if UefiHiiResourceSection is not None:
Name = DT.TAB_INF_DEFINES_UEFI_HII_RESOURCE_SECTION
Value = UefiHiiResourceSection.GetValue()
HeaderComment = UefiHiiResourceSection.Comments.GetHeaderComments()
TailComment = UefiHiiResourceSection.Comments.GetTailComments()
Statement = _GenInfDefineStateMent(HeaderComment,
Name,
Value,
TailComment)
DefinesDictNew[Statement] = ""
return DefinesDictNew
## Generate the define statement that will be put into userextension
# Not support comments.
#
# @param HeaderComment: the original header comment (# not removed)
# @param Name: the definition keyword, should not be empty or none
# @param Value: the definition keyword value
# @param TailComment: the original Tail comment (# not removed)
#
# @return: the regenerated define statement
#
def _GenInfDefineStateMent(HeaderComment, Name, Value, TailComment):
Logger.Debug(5, HeaderComment + TailComment)
Statement = '%s = %s' % (Name, Value)
return Statement
## GenBinaryData
#
#
def GenBinaryData(BinaryData, BinaryObj, BinariesDict, AsBuildIns, BinaryFileObjectList, \
SupArchList, BinaryModule, DecObjList=None):
if BinaryModule:
pass
OriSupArchList = SupArchList
for Item in BinaryData:
ItemObj = BinaryObj[Item][0][0]
if ItemObj.GetType() not in DT.BINARY_FILE_TYPE_UI_LIST + DT.BINARY_FILE_TYPE_VER_LIST:
TagName = ItemObj.GetTagName()
Family = ItemObj.GetFamily()
else:
TagName = ''
Family = ''
FFE = ItemObj.GetFeatureFlagExp()
#
# If have architecturie specified, then use the specified architecturie;
# If the section tag does not have an architecture modifier or the modifier is "common" (case in-sensitive),
# and the VALID_ARCHITECTURES comment exists, the list from the VALID_ARCHITECTURES comment
# can be used for the attribute.
# If both not have VALID_ARCHITECTURE comment and no architecturie specified, then keep it empty.
#
SupArchList = sorted(ConvertArchList(ItemObj.GetSupArchList()))
if len(SupArchList) == 1 and SupArchList[0] == 'COMMON':
if not (len(OriSupArchList) == 1 or OriSupArchList[0] == 'COMMON'):
SupArchList = OriSupArchList
else:
SupArchList = ['COMMON']
FileNameObj = CommonObject.FileNameObject()
FileNameObj.SetFileType(ItemObj.GetType())
FileNameObj.SetFilename(ItemObj.GetFileName())
FileNameObj.SetFeatureFlag(FFE)
#
# Get GUID value of the GUID CName in the DEC file
#
if ItemObj.GetType() == DT.SUBTYPE_GUID_BINARY_FILE_TYPE:
if not CheckGuidRegFormat(ItemObj.GetGuidValue()):
if not DecObjList:
if DT.TAB_HORIZON_LINE_SPLIT in ItemObj.GetGuidValue() or \
DT.TAB_COMMA_SPLIT in ItemObj.GetGuidValue():
Logger.Error("\nMkPkg",
FORMAT_INVALID,
ST.ERR_DECPARSE_DEFINE_PKGGUID,
ExtraData=ItemObj.GetGuidValue(),
RaiseError=True)
else:
Logger.Error("\nMkPkg",
FORMAT_INVALID,
ST.ERR_UNI_SUBGUID_VALUE_DEFINE_DEC_NOT_FOUND % \
(ItemObj.GetGuidValue()),
RaiseError=True)
else:
for DecObj in DecObjList:
for GuidObj in DecObj.GetGuidList():
if GuidObj.GetCName() == ItemObj.GetGuidValue():
FileNameObj.SetGuidValue(GuidObj.GetGuid())
break
if not FileNameObj.GetGuidValue():
Logger.Error("\nMkPkg",
FORMAT_INVALID,
ST.ERR_DECPARSE_CGUID_NOT_FOUND % \
(ItemObj.GetGuidValue()),
RaiseError=True)
else:
FileNameObj.SetGuidValue(ItemObj.GetGuidValue().strip())
FileNameObj.SetSupArchList(SupArchList)
FileNameList = [FileNameObj]
BinaryFile = BinaryFileObject()
BinaryFile.SetFileNameList(FileNameList)
BinaryFile.SetAsBuiltList(AsBuildIns)
BinaryFileObjectList.append(BinaryFile)
SupArchStr = ' '.join(SupArchList)
Key = (ItemObj.GetFileName(), ItemObj.GetType(), FFE, SupArchStr)
ValueItem = (ItemObj.GetTarget(), Family, TagName, '')
if Key in BinariesDict:
ValueList = BinariesDict[Key]
ValueList.append(ValueItem)
BinariesDict[Key] = ValueList
else:
BinariesDict[Key] = [ValueItem]
return BinariesDict, AsBuildIns, BinaryFileObjectList
| edk2-master | BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignmentMisc.py |
## @file DecPomAlignment.py
# This file contained the adapter for convert INF parser object to POM Object
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
DecPomAlignment
'''
from __future__ import print_function
##
# Import Modules
#
import os.path
from os import sep
import platform
import re
import Logger.Log as Logger
from Logger import StringTable as ST
from Logger.ToolError import UPT_MUL_DEC_ERROR
from Logger.ToolError import FORMAT_INVALID
from Library.Parsing import NormPath
from Library.DataType import ARCH_LIST
from Library.DataType import TAB_GUIDS
from Library.DataType import TAB_PROTOCOLS
from Library.DataType import TAB_PPIS
from Library.DataType import TAB_DEC_DEFINES_PACKAGE_NAME
from Library.DataType import TAB_DEC_DEFINES_PACKAGE_GUID
from Library.DataType import TAB_DEC_DEFINES_PACKAGE_VERSION
from Library.DataType import TAB_DEC_DEFINES_DEC_SPECIFICATION
from Library.DataType import TAB_DEC_DEFINES_PKG_UNI_FILE
from Library.DataType import TAB_ARCH_COMMON
from Library.DataType import TAB_INCLUDES
from Library.DataType import TAB_LIBRARY_CLASSES
from Library.DataType import TAB_PCDS
from Library.DataType import TAB_PCDS_FIXED_AT_BUILD_NULL
from Library.DataType import TAB_PCDS_PATCHABLE_IN_MODULE_NULL
from Library.DataType import TAB_PCDS_FEATURE_FLAG_NULL
from Library.DataType import TAB_PCDS_DYNAMIC_EX_NULL
from Library.DataType import TAB_PCDS_DYNAMIC_NULL
from Library.DataType import TAB_PTR_TYPE_PCD
from Library.DataType import ITEM_UNDEFINED
from Library.DataType import TAB_DEC_BINARY_ABSTRACT
from Library.DataType import TAB_DEC_BINARY_DESCRIPTION
from Library.DataType import TAB_LANGUAGE_EN_US
from Library.DataType import TAB_BINARY_HEADER_IDENTIFIER
from Library.DataType import TAB_BINARY_HEADER_USERID
from Library.DataType import TAB_LANGUAGE_EN_X
from Library.DataType import TAB_LANGUAGE_EN
from Library.DataType import TAB_STR_TOKENCNAME
from Library.DataType import TAB_STR_TOKENPROMPT
from Library.DataType import TAB_STR_TOKENHELP
from Library.DataType import TAB_STR_TOKENERR
from Library.DataType import TAB_HEX_START
from Library.DataType import TAB_SPLIT
import Library.DataType as DT
from Library.CommentParsing import ParseHeaderCommentSection
from Library.CommentParsing import ParseGenericComment
from Library.CommentParsing import ParseDecPcdGenericComment
from Library.CommentParsing import ParseDecPcdTailComment
from Library.Misc import GetFiles
from Library.Misc import Sdict
from Library.Misc import GetRelativePath
from Library.Misc import PathClass
from Library.Misc import ValidateUNIFilePath
from Library.UniClassObject import UniFileClassObject
from Library.UniClassObject import ConvertSpecialUnicodes
from Library.UniClassObject import GetLanguageCode1766
from Library.ParserValidate import IsValidPath
from Parser.DecParser import Dec
from Object.POM.PackageObject import PackageObject
from Object.POM.CommonObject import UserExtensionObject
from Object.POM.CommonObject import IncludeObject
from Object.POM.CommonObject import GuidObject
from Object.POM.CommonObject import ProtocolObject
from Object.POM.CommonObject import PpiObject
from Object.POM.CommonObject import LibraryClassObject
from Object.POM.CommonObject import PcdObject
from Object.POM.CommonObject import TextObject
from Object.POM.CommonObject import MiscFileObject
from Object.POM.CommonObject import FileObject
## DecPomAlignment
#
# Inherited from PackageObject
#
class DecPomAlignment(PackageObject):
def __init__(self, Filename, WorkspaceDir = None, CheckMulDec = False):
PackageObject.__init__(self)
self.UserExtensions = ''
self.WorkspaceDir = WorkspaceDir
self.SupArchList = ARCH_LIST
self.CheckMulDec = CheckMulDec
self.DecParser = None
self.UniFileClassObject = None
self.PcdDefaultValueDict = {}
#
# Load Dec file
#
self.LoadDecFile(Filename)
#
# Transfer to Package Object if IsToPackage is True
#
self.DecToPackage()
## Load Dec file
#
# Load the file if it exists
#
# @param Filename: Input value for filename of Dec file
#
def LoadDecFile(self, Filename):
#
# Insert a record for file
#
Filename = NormPath(Filename)
(Path, Name) = os.path.split(Filename)
self.SetFullPath(Filename)
self.SetRelaPath(Path)
self.SetFileName(Name)
self.SetPackagePath(GetRelativePath(Path, self.WorkspaceDir))
self.SetCombinePath(GetRelativePath(Filename, self.WorkspaceDir))
self.DecParser = Dec(Filename)
## Transfer to Package Object
#
# Transfer all contents of a Dec file to a standard Package Object
#
def DecToPackage(self):
#
# Init global information for the file
#
ContainerFile = self.GetFullPath()
#
# Generate Package Header
#
self.GenPackageHeader(ContainerFile)
#
# Generate Includes
#
self.GenIncludes(ContainerFile)
#
# Generate Guids
#
self.GenGuidProtocolPpis(TAB_GUIDS, ContainerFile)
#
# Generate Protocols
#
self.GenGuidProtocolPpis(TAB_PROTOCOLS, ContainerFile)
#
# Generate Ppis
#
self.GenGuidProtocolPpis(TAB_PPIS, ContainerFile)
#
# Generate LibraryClasses
#
self.GenLibraryClasses(ContainerFile)
#
# Generate Pcds
#
self.GenPcds(ContainerFile)
#
# Generate Module File list, will be used later on to generate
# distribution
#
self.GenModuleFileList(ContainerFile)
#
# Generate user extensions
#
self.GenUserExtensions()
## Generate user extension
#
#
def GenUserExtensions(self):
UEObj = self.DecParser.GetUserExtensionSectionObject()
UEList = UEObj.GetAllUserExtensions()
for Item in UEList:
if not Item.UserString:
continue
UserExtension = UserExtensionObject()
UserId = Item.UserId
if UserId.startswith('"') and UserId.endswith('"'):
UserId = UserId[1:-1]
UserExtension.SetUserID(UserId)
Identifier = Item.IdString
if Identifier.startswith('"') and Identifier.endswith('"'):
Identifier = Identifier[1:-1]
#
# Generate miscellaneous files of DEC file
#
if UserId == 'TianoCore' and Identifier == 'ExtraFiles':
self.GenMiscFiles(Item.UserString)
UserExtension.SetIdentifier(Identifier)
UserExtension.SetStatement(Item.UserString)
UserExtension.SetSupArchList(
Item.ArchAndModuleType
)
self.SetUserExtensionList(
self.GetUserExtensionList() + [UserExtension]
)
# Add Private sections to UserExtension
if self.DecParser.GetPrivateSections():
PrivateUserExtension = UserExtensionObject()
PrivateUserExtension.SetStatement(self.DecParser.GetPrivateSections())
PrivateUserExtension.SetIdentifier(DT.TAB_PRIVATE)
PrivateUserExtension.SetUserID(DT.TAB_INTEL)
self.SetUserExtensionList(self.GetUserExtensionList() + [PrivateUserExtension])
## Generate miscellaneous files on DEC file
#
#
def GenMiscFiles(self, Content):
MiscFileObj = MiscFileObject()
for Line in Content.splitlines():
FileName = ''
if '#' in Line:
FileName = Line[:Line.find('#')]
else:
FileName = Line
if FileName:
if IsValidPath(FileName, self.GetRelaPath()):
FileObj = FileObject()
FileObj.SetURI(FileName)
MiscFileObj.SetFileList(MiscFileObj.GetFileList()+[FileObj])
else:
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Line),
File=self.GetFileName(),
ExtraData=Line)
self.SetMiscFileList(self.GetMiscFileList()+[MiscFileObj])
## Generate Package Header
#
# Gen Package Header of Dec as <Key> = <Value>
#
# @param ContainerFile: The Dec file full path
#
def GenPackageHeader(self, ContainerFile):
Logger.Debug(2, "Generate PackageHeader ...")
DefinesDict = {}
#
# Update all defines item in database
#
DefObj = self.DecParser.GetDefineSectionObject()
for Item in DefObj.GetDefines():
#
# put items into Dict except for PackageName, Guid, Version, DEC_SPECIFICATION
#
SkipItemList = [TAB_DEC_DEFINES_PACKAGE_NAME, \
TAB_DEC_DEFINES_PACKAGE_GUID, TAB_DEC_DEFINES_PACKAGE_VERSION, \
TAB_DEC_DEFINES_DEC_SPECIFICATION, TAB_DEC_DEFINES_PKG_UNI_FILE]
if Item.Key in SkipItemList:
continue
DefinesDict['%s = %s' % (Item.Key, Item.Value)] = TAB_ARCH_COMMON
self.SetBaseName(DefObj.GetPackageName())
self.SetVersion(DefObj.GetPackageVersion())
# self.SetName(DefObj.GetPackageName() + ' Version ' + \
# DefObj.GetPackageVersion())
self.SetName(os.path.splitext(self.GetFileName())[0])
self.SetGuid(DefObj.GetPackageGuid())
if DefObj.GetPackageUniFile():
ValidateUNIFilePath(DefObj.GetPackageUniFile())
self.UniFileClassObject = \
UniFileClassObject([PathClass(os.path.join(DefObj.GetPackagePath(), DefObj.GetPackageUniFile()))])
else:
self.UniFileClassObject = None
if DefinesDict:
UserExtension = UserExtensionObject()
UserExtension.SetDefinesDict(DefinesDict)
UserExtension.SetIdentifier('DefineModifiers')
UserExtension.SetUserID('EDK2')
self.SetUserExtensionList(
self.GetUserExtensionList() + [UserExtension]
)
#
# Get File header information
#
if self.UniFileClassObject:
Lang = TAB_LANGUAGE_EN_X
else:
Lang = TAB_LANGUAGE_EN_US
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(self.DecParser.GetHeadComment(),
ContainerFile)
if Abstract:
self.SetAbstract((Lang, Abstract))
if Description:
self.SetDescription((Lang, Description))
if Copyright:
self.SetCopyright(('', Copyright))
if License:
self.SetLicense(('', License))
#
# Get Binary header information
#
if self.DecParser.BinaryHeadComment:
Abstract, Description, Copyright, License = \
ParseHeaderCommentSection(self.DecParser.BinaryHeadComment,
ContainerFile, True)
if not Abstract or not Description or not Copyright or not License:
Logger.Error('MkPkg',
FORMAT_INVALID,
ST.ERR_INVALID_BINARYHEADER_FORMAT,
ContainerFile)
else:
self.SetBinaryHeaderAbstract((Lang, Abstract))
self.SetBinaryHeaderDescription((Lang, Description))
self.SetBinaryHeaderCopyright(('', Copyright))
self.SetBinaryHeaderLicense(('', License))
BinaryAbstractList = []
BinaryDescriptionList = []
#Get Binary header from UNI file
# Initialize the UniStrDict dictionary, top keys are language codes
UniStrDict = {}
if self.UniFileClassObject:
UniStrDict = self.UniFileClassObject.OrderedStringList
for Lang in UniStrDict:
for StringDefClassObject in UniStrDict[Lang]:
Lang = GetLanguageCode1766(Lang)
if StringDefClassObject.StringName == TAB_DEC_BINARY_ABSTRACT:
if (Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)) \
not in self.GetBinaryHeaderAbstract():
BinaryAbstractList.append((Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
if StringDefClassObject.StringName == TAB_DEC_BINARY_DESCRIPTION:
if (Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)) \
not in self.GetBinaryHeaderDescription():
BinaryDescriptionList.append((Lang,
ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
#Combine Binary header from DEC file and UNI file
BinaryAbstractList = self.GetBinaryHeaderAbstract() + BinaryAbstractList
BinaryDescriptionList = self.GetBinaryHeaderDescription() + BinaryDescriptionList
BinaryCopyrightList = self.GetBinaryHeaderCopyright()
BinaryLicenseList = self.GetBinaryHeaderLicense()
#Generate the UserExtensionObject for TianoCore."BinaryHeader"
if BinaryAbstractList or BinaryDescriptionList or BinaryCopyrightList or BinaryLicenseList:
BinaryUserExtension = UserExtensionObject()
BinaryUserExtension.SetBinaryAbstract(BinaryAbstractList)
BinaryUserExtension.SetBinaryDescription(BinaryDescriptionList)
BinaryUserExtension.SetBinaryCopyright(BinaryCopyrightList)
BinaryUserExtension.SetBinaryLicense(BinaryLicenseList)
BinaryUserExtension.SetIdentifier(TAB_BINARY_HEADER_IDENTIFIER)
BinaryUserExtension.SetUserID(TAB_BINARY_HEADER_USERID)
self.SetUserExtensionList(self.GetUserExtensionList() + [BinaryUserExtension])
## GenIncludes
#
# Gen Includes of Dec
#
# @param ContainerFile: The Dec file full path
#
def GenIncludes(self, ContainerFile):
if ContainerFile:
pass
Logger.Debug(2, "Generate %s ..." % TAB_INCLUDES)
IncludesDict = Sdict()
IncObj = self.DecParser.GetIncludeSectionObject()
for Item in IncObj.GetAllIncludes():
IncludePath = os.path.normpath(Item.File)
if platform.system() != 'Windows' and platform.system() != 'Microsoft':
IncludePath = IncludePath.replace('\\', '/')
if IncludePath in IncludesDict:
if Item.GetArchList() == [TAB_ARCH_COMMON] or IncludesDict[IncludePath] == [TAB_ARCH_COMMON]:
IncludesDict[IncludePath] = [TAB_ARCH_COMMON]
else:
IncludesDict[IncludePath] = IncludesDict[IncludePath] + Item.GetArchList()
else:
IncludesDict[IncludePath] = Item.GetArchList()
#
# get the standardIncludeFileList(industry), packageIncludeFileList
# (others) for PackageObject
#
PackagePath = os.path.split(self.GetFullPath())[0]
IncludePathList = \
sorted([os.path.normpath(Path) + sep for Path in IncludesDict.keys()])
#
# get a non-overlap set of include path, IncludePathList should be
# sorted, and path should be end with path separator '\'
#
NonOverLapList = []
for Path1 in IncludePathList:
for Path2 in NonOverLapList:
if Path1.startswith(Path2):
break
else:
NonOverLapList.append(Path1)
#
# revert the list so the longest path shown first in list, also need
# to remove the extra path separator '\'
# as this list is used to search the supported Arch info
#
for IndexN in range (0, len(IncludePathList)):
IncludePathList[IndexN] = os.path.normpath(IncludePathList[IndexN])
IncludePathList.sort()
IncludePathList.reverse()
#
# save the include path list for later usage
#
self.SetIncludePathList(IncludePathList)
StandardIncludeFileList = []
PackageIncludeFileList = []
IncludeFileList = []
for Path in NonOverLapList:
FileList = GetFiles(os.path.join(PackagePath, Path), ['CVS', '.svn'], False)
IncludeFileList += [os.path.normpath(os.path.join(Path, File)) for File in FileList]
for Includefile in IncludeFileList:
ExtName = os.path.splitext(Includefile)[1]
if ExtName.upper() == '.DEC' and self.CheckMulDec:
Logger.Error('MkPkg',
UPT_MUL_DEC_ERROR,
ST.ERR_MUL_DEC_ERROR%(os.path.dirname(ContainerFile),
os.path.basename(ContainerFile),
Includefile))
FileCombinePath = os.path.dirname(Includefile)
Include = IncludeObject()
for Path in IncludePathList:
if FileCombinePath.startswith(Path):
SupArchList = IncludesDict[Path]
break
Include.SetFilePath(Includefile)
Include.SetSupArchList(SupArchList)
if Includefile.find('IndustryStandard') != -1:
StandardIncludeFileList.append(Include)
else:
PackageIncludeFileList.append(Include)
self.SetStandardIncludeFileList(StandardIncludeFileList)
#
# put include path into the PackageIncludeFileList
#
PackagePathList = []
IncObj = self.DecParser.GetIncludeSectionObject()
for Item in IncObj.GetAllIncludes():
IncludePath = Item.File
Include = IncludeObject()
Include.SetFilePath(IncludePath)
Include.SetSupArchList(Item.GetArchList())
PackagePathList.append(Include)
self.SetPackageIncludeFileList(PackagePathList + PackageIncludeFileList)
## GenPpis
#
# Gen Ppis of Dec
# <CName>=<GuidValue>
#
# @param ContainerFile: The Dec file full path
#
def GenGuidProtocolPpis(self, Type, ContainerFile):
if ContainerFile:
pass
Logger.Debug(2, "Generate %s ..." % Type)
Obj = None
Factory = None
if Type == TAB_GUIDS:
Obj = self.DecParser.GetGuidSectionObject()
def CreateGuidObject():
Object = GuidObject()
Object.SetGuidTypeList([])
Object.SetUsage(None)
Object.SetName(None)
return Object
Factory = CreateGuidObject
elif Type == TAB_PROTOCOLS:
Obj = self.DecParser.GetProtocolSectionObject()
def CreateProtocolObject():
return ProtocolObject()
Factory = CreateProtocolObject
elif Type == TAB_PPIS:
Obj = self.DecParser.GetPpiSectionObject()
def CreatePpiObject():
return PpiObject()
Factory = CreatePpiObject
else:
#
# Should not be here
#
return
DeclarationsList = []
#
# Go through each arch
#
for Item in Obj.GetGuidStyleAllItems():
Name = Item.GuidCName
Value = Item.GuidString
HelpTxt = ParseGenericComment(Item.GetHeadComment() + \
Item.GetTailComment())
ListObject = Factory()
ListObject.SetCName(Name)
ListObject.SetGuid(Value)
ListObject.SetSupArchList(Item.GetArchList())
if HelpTxt:
if self.UniFileClassObject:
HelpTxt.SetLang(TAB_LANGUAGE_EN_X)
ListObject.SetHelpTextList([HelpTxt])
DeclarationsList.append(ListObject)
#
#GuidTypeList is abstracted from help
#
if Type == TAB_GUIDS:
self.SetGuidList(self.GetGuidList() + DeclarationsList)
elif Type == TAB_PROTOCOLS:
self.SetProtocolList(self.GetProtocolList() + DeclarationsList)
elif Type == TAB_PPIS:
self.SetPpiList(self.GetPpiList() + DeclarationsList)
## GenLibraryClasses
#
# Gen LibraryClasses of Dec
# <CName>=<GuidValue>
#
# @param ContainerFile: The Dec file full path
#
def GenLibraryClasses(self, ContainerFile):
if ContainerFile:
pass
Logger.Debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
LibraryClassDeclarations = []
LibObj = self.DecParser.GetLibraryClassSectionObject()
for Item in LibObj.GetAllLibraryclasses():
LibraryClass = LibraryClassObject()
LibraryClass.SetLibraryClass(Item.Libraryclass)
LibraryClass.SetSupArchList(Item.GetArchList())
LibraryClass.SetIncludeHeader(Item.File)
HelpTxt = ParseGenericComment(Item.GetHeadComment() + \
Item.GetTailComment(), None, '@libraryclass')
if HelpTxt:
if self.UniFileClassObject:
HelpTxt.SetLang(TAB_LANGUAGE_EN_X)
LibraryClass.SetHelpTextList([HelpTxt])
LibraryClassDeclarations.append(LibraryClass)
self.SetLibraryClassList(self.GetLibraryClassList() + \
LibraryClassDeclarations)
## GenPcds
#
# Gen Pcds of Dec
# <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
#
# @param ContainerFile: The Dec file full path
#
def GenPcds(self, ContainerFile):
Logger.Debug(2, "Generate %s ..." % TAB_PCDS)
PcdObj = self.DecParser.GetPcdSectionObject()
#
# Get all Pcds
#
PcdDeclarations = []
IterList = [
(TAB_PCDS_FIXED_AT_BUILD_NULL, 'FixedPcd'),
(TAB_PCDS_PATCHABLE_IN_MODULE_NULL, 'PatchPcd'),
(TAB_PCDS_FEATURE_FLAG_NULL, 'FeaturePcd'),
(TAB_PCDS_DYNAMIC_EX_NULL, 'PcdEx'),
(TAB_PCDS_DYNAMIC_NULL, 'Pcd')]
PromptStrList = []
HelpStrList = []
PcdErrStrList = []
# Initialize UniStrDict dictionary, top keys are language codes
UniStrDict = {}
StrList = []
Language = ''
if self.UniFileClassObject:
Language = TAB_LANGUAGE_EN_X
else:
Language = TAB_LANGUAGE_EN_US
if self.UniFileClassObject:
UniStrDict = self.UniFileClassObject.OrderedStringList
for Lang in UniStrDict:
for StringDefClassObject in UniStrDict[Lang]:
StrList = StringDefClassObject.StringName.split('_')
# StringName format is STR_<TOKENSPACECNAME>_<PCDCNAME>_PROMPT
if len(StrList) == 4 and StrList[0] == TAB_STR_TOKENCNAME and StrList[3] == TAB_STR_TOKENPROMPT:
PromptStrList.append((GetLanguageCode1766(Lang), StringDefClassObject.StringName, \
StringDefClassObject.StringValue))
# StringName format is STR_<TOKENSPACECNAME>_<PCDCNAME>_HELP
if len(StrList) == 4 and StrList[0] == TAB_STR_TOKENCNAME and StrList[3] == TAB_STR_TOKENHELP:
HelpStrList.append((GetLanguageCode1766(Lang), StringDefClassObject.StringName, \
StringDefClassObject.StringValue))
# StringName format is STR_<TOKENSPACECNAME>_ERR_##
if len(StrList) == 4 and StrList[0] == TAB_STR_TOKENCNAME and StrList[2] == TAB_STR_TOKENERR:
PcdErrStrList.append((GetLanguageCode1766(Lang), StringDefClassObject.StringName, \
StringDefClassObject.StringValue))
#
# For each PCD type
#
for PcdType, Type in IterList:
#
# Go through all archs
#
# for Arch in self.SupArchList + [TAB_ARCH_COMMON]:
#
for Item in PcdObj.GetPcdsByType(PcdType.upper()):
PcdDeclaration = GenPcdDeclaration(
ContainerFile,
(Item.TokenSpaceGuidCName, Item.TokenCName,
Item.DefaultValue, Item.DatumType, Item.TokenValue,
Type, Item.GetHeadComment(), Item.GetTailComment(), ''),
Language,
self.DecParser.GetDefineSectionMacro()
)
PcdDeclaration.SetSupArchList(Item.GetArchListOfType(PcdType))
#
# Get PCD error message from PCD error comment section in DEC file
#
for PcdErr in PcdDeclaration.GetPcdErrorsList():
if (PcdDeclaration.GetTokenSpaceGuidCName(), PcdErr.GetErrorNumber()) \
in self.DecParser.PcdErrorCommentDict:
Key = (PcdDeclaration.GetTokenSpaceGuidCName(), PcdErr.GetErrorNumber())
PcdErr.SetErrorMessageList(PcdErr.GetErrorMessageList() + \
[(Language, self.DecParser.PcdErrorCommentDict[Key])])
for Index in range(0, len(PromptStrList)):
StrNameList = PromptStrList[Index][1].split('_')
if StrNameList[1].lower() == Item.TokenSpaceGuidCName.lower() and \
StrNameList[2].lower() == Item.TokenCName.lower():
TxtObj = TextObject()
TxtObj.SetLang(PromptStrList[Index][0])
TxtObj.SetString(PromptStrList[Index][2])
for Prompt in PcdDeclaration.GetPromptList():
if Prompt.GetLang() == TxtObj.GetLang() and \
Prompt.GetString() == TxtObj.GetString():
break
else:
PcdDeclaration.SetPromptList(PcdDeclaration.GetPromptList() + [TxtObj])
for Index in range(0, len(HelpStrList)):
StrNameList = HelpStrList[Index][1].split('_')
if StrNameList[1].lower() == Item.TokenSpaceGuidCName.lower() and \
StrNameList[2].lower() == Item.TokenCName.lower():
TxtObj = TextObject()
TxtObj.SetLang(HelpStrList[Index][0])
TxtObj.SetString(HelpStrList[Index][2])
for HelpStrObj in PcdDeclaration.GetHelpTextList():
if HelpStrObj.GetLang() == TxtObj.GetLang() and \
HelpStrObj.GetString() == TxtObj.GetString():
break
else:
PcdDeclaration.SetHelpTextList(PcdDeclaration.GetHelpTextList() + [TxtObj])
#
# Get PCD error message from UNI file
#
for Index in range(0, len(PcdErrStrList)):
StrNameList = PcdErrStrList[Index][1].split('_')
if StrNameList[1].lower() == Item.TokenSpaceGuidCName.lower() and \
StrNameList[2].lower() == TAB_STR_TOKENERR.lower():
for PcdErr in PcdDeclaration.GetPcdErrorsList():
if PcdErr.GetErrorNumber().lower() == (TAB_HEX_START + StrNameList[3]).lower() and \
(PcdErrStrList[Index][0], PcdErrStrList[Index][2]) not in PcdErr.GetErrorMessageList():
PcdErr.SetErrorMessageList(PcdErr.GetErrorMessageList() + \
[(PcdErrStrList[Index][0], PcdErrStrList[Index][2])])
#
# Check to prevent missing error message if a Pcd has the error code.
#
for PcdErr in PcdDeclaration.GetPcdErrorsList():
if PcdErr.GetErrorNumber().strip():
if not PcdErr.GetErrorMessageList():
Logger.Error('UPT',
FORMAT_INVALID,
ST.ERR_DECPARSE_PCD_UNMATCHED_ERRORCODE % PcdErr.GetErrorNumber(),
ContainerFile,
PcdErr.GetLineNum(),
PcdErr.GetFileLine())
PcdDeclarations.append(PcdDeclaration)
self.SetPcdList(self.GetPcdList() + PcdDeclarations)
self.CheckPcdValue()
##
# Get error message via language
# @param ErrorMessageList: Error message tuple list the language and its message
# @param Lang: the language of setting
# @return: the error message described in the related UNI file
def GetEnErrorMessage(self, ErrorMessageList):
if self.FullPath:
pass
Lang = TAB_LANGUAGE_EN_US
for (Language, Message) in ErrorMessageList:
if Language == Lang:
return Message
for (Language, Message) in ErrorMessageList:
if Language.find(TAB_LANGUAGE_EN) >= 0:
return Message
else:
try:
return ErrorMessageList[0][1]
except IndexError:
return ''
return ''
##
# Replace the strings for Python eval function.
# @param ReplaceValue: The string that needs to be replaced.
# @return: The string was replaced, then eval function is always making out it.
def ReplaceForEval(self, ReplaceValue, IsRange=False, IsExpr=False):
if self.FullPath:
pass
#
# deal with "NOT EQ", "NOT LT", "NOT GT", "NOT LE", "NOT GE", "NOT NOT"
#
NOTNOT_Pattern = '[\t\s]*NOT[\t\s]+NOT[\t\s]*'
NOTGE_Pattern = '[\t\s]*NOT[\t\s]+GE[\t\s]*'
NOTLE_Pattern = '[\t\s]*NOT[\t\s]+LE[\t\s]*'
NOTGT_Pattern = '[\t\s]*NOT[\t\s]+GT[\t\s]*'
NOTLT_Pattern = '[\t\s]*NOT[\t\s]+LT[\t\s]*'
NOTEQ_Pattern = '[\t\s]*NOT[\t\s]+EQ[\t\s]*'
ReplaceValue = re.compile(NOTNOT_Pattern).sub('', ReplaceValue)
ReplaceValue = re.compile(NOTLT_Pattern).sub('x >= ', ReplaceValue)
ReplaceValue = re.compile(NOTGT_Pattern).sub('x <= ', ReplaceValue)
ReplaceValue = re.compile(NOTLE_Pattern).sub('x > ', ReplaceValue)
ReplaceValue = re.compile(NOTGE_Pattern).sub('x < ', ReplaceValue)
ReplaceValue = re.compile(NOTEQ_Pattern).sub('x != ', ReplaceValue)
if IsRange:
ReplaceValue = ReplaceValue.replace('EQ', 'x ==')
ReplaceValue = ReplaceValue.replace('LT', 'x <')
ReplaceValue = ReplaceValue.replace('LE', 'x <=')
ReplaceValue = ReplaceValue.replace('GT', 'x >')
ReplaceValue = ReplaceValue.replace('GE', 'x >=')
ReplaceValue = ReplaceValue.replace('XOR', 'x ^')
elif IsExpr:
ReplaceValue = ReplaceValue.replace('EQ', '==')
ReplaceValue = ReplaceValue.replace('NE', '!=')
ReplaceValue = ReplaceValue.replace('LT', '<')
ReplaceValue = ReplaceValue.replace('LE', '<=')
ReplaceValue = ReplaceValue.replace('GT', '>')
ReplaceValue = ReplaceValue.replace('GE', '>=')
ReplaceValue = ReplaceValue.replace('XOR', '^')
ReplaceValue = ReplaceValue.replace('AND', 'and')
ReplaceValue = ReplaceValue.replace('&&', ' and ')
ReplaceValue = ReplaceValue.replace('xor', '^')
ReplaceValue = ReplaceValue.replace('OR', 'or')
ReplaceValue = ReplaceValue.replace('||', ' or ')
ReplaceValue = ReplaceValue.replace('NOT', 'not')
if ReplaceValue.find('!') >= 0 and ReplaceValue[ReplaceValue.index('!') + 1] != '=':
ReplaceValue = ReplaceValue.replace('!', ' not ')
if '.' in ReplaceValue:
Pattern = '[a-zA-Z0-9]{1,}\.[a-zA-Z0-9]{1,}'
MatchedList = re.findall(Pattern, ReplaceValue)
for MatchedItem in MatchedList:
if MatchedItem not in self.PcdDefaultValueDict:
Logger.Error("Dec File Parser", FORMAT_INVALID, Message=ST.ERR_DECPARSE_PCD_NODEFINED % MatchedItem,
File=self.FullPath)
ReplaceValue = ReplaceValue.replace(MatchedItem, self.PcdDefaultValueDict[MatchedItem])
return ReplaceValue
##
# Check pcd's default value according to the pcd's description
#
def CheckPcdValue(self):
for Pcd in self.GetPcdList():
self.PcdDefaultValueDict[TAB_SPLIT.join((Pcd.GetTokenSpaceGuidCName(), Pcd.GetCName())).strip()] = \
Pcd.GetDefaultValue()
for Pcd in self.GetPcdList():
ValidationExpressions = []
PcdGuidName = TAB_SPLIT.join((Pcd.GetTokenSpaceGuidCName(), Pcd.GetCName()))
Valids = Pcd.GetPcdErrorsList()
for Valid in Valids:
Expression = Valid.GetExpression()
if Expression:
#
# Delete the 'L' prefix of a quoted string, this operation is for eval()
#
QUOTED_PATTERN = '[\t\s]*L?"[^"]*"'
QuotedMatchedObj = re.search(QUOTED_PATTERN, Expression)
if QuotedMatchedObj:
MatchedStr = QuotedMatchedObj.group().strip()
if MatchedStr.startswith('L'):
Expression = Expression.replace(MatchedStr, MatchedStr[1:].strip())
Expression = self.ReplaceForEval(Expression, IsExpr=True)
Expression = Expression.replace(PcdGuidName, 'x')
Message = self.GetEnErrorMessage(Valid.GetErrorMessageList())
ValidationExpressions.append((Expression, Message))
ValidList = Valid.GetValidValue()
if ValidList:
ValidValue = 'x in %s' % [eval(v) for v in ValidList.split(' ') if v]
Message = self.GetEnErrorMessage(Valid.GetErrorMessageList())
ValidationExpressions.append((ValidValue, Message))
ValidValueRange = Valid.GetValidValueRange()
if ValidValueRange:
ValidValueRange = self.ReplaceForEval(ValidValueRange, IsRange=True)
if ValidValueRange.find('-') >= 0:
ValidValueRange = ValidValueRange.replace('-', '<= x <=')
elif not ValidValueRange.startswith('x ') and not ValidValueRange.startswith('not ') \
and not ValidValueRange.startswith('not(') and not ValidValueRange.startswith('('):
ValidValueRange = 'x %s' % ValidValueRange
Message = self.GetEnErrorMessage(Valid.GetErrorMessageList())
ValidationExpressions.append((ValidValueRange, Message))
DefaultValue = self.PcdDefaultValueDict[PcdGuidName.strip()]
#
# Delete the 'L' prefix of a quoted string, this operation is for eval()
#
QUOTED_PATTERN = '[\t\s]*L?"[^"]*"'
QuotedMatchedObj = re.search(QUOTED_PATTERN, DefaultValue)
if QuotedMatchedObj:
MatchedStr = QuotedMatchedObj.group().strip()
if MatchedStr.startswith('L'):
DefaultValue = DefaultValue.replace(MatchedStr, MatchedStr[1:].strip())
try:
DefaultValue = eval(DefaultValue.replace('TRUE', 'True').replace('true', 'True')
.replace('FALSE', 'False').replace('false', 'False'))
except BaseException:
pass
for (Expression, Msg) in ValidationExpressions:
try:
if not eval(Expression, {'x':DefaultValue}):
Logger.Error("Dec File Parser", FORMAT_INVALID, ExtraData='%s, value = %s' %\
(PcdGuidName, DefaultValue), Message=Msg, File=self.FullPath)
except TypeError:
Logger.Error("Dec File Parser", FORMAT_INVALID, ExtraData=PcdGuidName, \
Message=Msg, File=self.FullPath)
## GenModuleFileList
#
def GenModuleFileList(self, ContainerFile):
ModuleFileList = []
ContainerFileName = os.path.basename(ContainerFile)
ContainerFilePath = os.path.dirname(ContainerFile)
for Item in GetFiles(ContainerFilePath,
['CVS', '.svn'] + self.GetIncludePathList(), False):
ExtName = os.path.splitext(Item)[1]
if ExtName.lower() == '.inf':
ModuleFileList.append(Item)
elif ExtName.upper() == '.DEC' and self.CheckMulDec:
if Item == ContainerFileName:
continue
Logger.Error('MkPkg',
UPT_MUL_DEC_ERROR,
ST.ERR_MUL_DEC_ERROR%(ContainerFilePath,
ContainerFileName,
Item))
self.SetModuleFileList(ModuleFileList)
## Show detailed information of Package
#
# Print all members and their values of Package class
#
def ShowPackage(self):
print('\nName =', self.GetName())
print('\nBaseName =', self.GetBaseName())
print('\nVersion =', self.GetVersion())
print('\nGuid =', self.GetGuid())
print('\nStandardIncludes = %d ' \
% len(self.GetStandardIncludeFileList()), end=' ')
for Item in self.GetStandardIncludeFileList():
print(Item.GetFilePath(), ' ', Item.GetSupArchList())
print('\nPackageIncludes = %d \n' \
% len(self.GetPackageIncludeFileList()), end=' ')
for Item in self.GetPackageIncludeFileList():
print(Item.GetFilePath(), ' ', Item.GetSupArchList())
print('\nGuids =', self.GetGuidList())
for Item in self.GetGuidList():
print(Item.GetCName(), Item.GetGuid(), Item.GetSupArchList())
print('\nProtocols =', self.GetProtocolList())
for Item in self.GetProtocolList():
print(Item.GetCName(), Item.GetGuid(), Item.GetSupArchList())
print('\nPpis =', self.GetPpiList())
for Item in self.GetPpiList():
print(Item.GetCName(), Item.GetGuid(), Item.GetSupArchList())
print('\nLibraryClasses =', self.GetLibraryClassList())
for Item in self.GetLibraryClassList():
print(Item.GetLibraryClass(), Item.GetRecommendedInstance(), \
Item.GetSupArchList())
print('\nPcds =', self.GetPcdList())
for Item in self.GetPcdList():
print('CName=', Item.GetCName(), 'TokenSpaceGuidCName=', \
Item.GetTokenSpaceGuidCName(), \
'DefaultValue=', Item.GetDefaultValue(), \
'ValidUsage=', Item.GetValidUsage(), \
'SupArchList', Item.GetSupArchList(), \
'Token=', Item.GetToken(), 'DatumType=', Item.GetDatumType())
for Item in self.GetMiscFileList():
print(Item.GetName())
for FileObjectItem in Item.GetFileList():
print(FileObjectItem.GetURI())
print('****************\n')
## GenPcdDeclaration
#
# @param ContainerFile: File name of the DEC file
# @param PcdInfo: Pcd information, of format (TokenGuidCName,
# TokenName, Value, DatumType, Token, Type,
# GenericComment, TailComment, Arch)
# @param Language: The language of HelpText, Prompt
#
def GenPcdDeclaration(ContainerFile, PcdInfo, Language, MacroReplaceDict):
HelpStr = ''
PromptStr = ''
TailHelpStr = ''
TokenGuidCName, TokenName, Value, DatumType, Token, Type, \
GenericComment, TailComment, Arch = PcdInfo
Pcd = PcdObject()
Pcd.SetCName(TokenName)
Pcd.SetToken(Token)
Pcd.SetTokenSpaceGuidCName(TokenGuidCName)
Pcd.SetDatumType(DatumType)
Pcd.SetDefaultValue(Value)
Pcd.SetValidUsage(Type)
#
# MaxDatumSize is required field for 'VOID*' PCD
#
if DatumType == TAB_PTR_TYPE_PCD:
Pcd.SetMaxDatumSize(ITEM_UNDEFINED)
SupArchList = [Arch]
Pcd.SetSupArchList(SupArchList)
if GenericComment:
HelpStr, PcdErrList, PromptStr = ParseDecPcdGenericComment(GenericComment,
ContainerFile,
TokenGuidCName,
TokenName,
MacroReplaceDict)
if PcdErrList:
Pcd.SetPcdErrorsList(PcdErrList)
if TailComment:
SupModuleList, TailHelpStr = ParseDecPcdTailComment(TailComment,
ContainerFile)
if SupModuleList:
Pcd.SetSupModuleList(SupModuleList)
if HelpStr and (not HelpStr.endswith('\n')) and TailHelpStr:
HelpStr += '\n'
HelpStr += TailHelpStr
if HelpStr:
HelpTxtObj = TextObject()
HelpTxtObj.SetLang(Language)
HelpTxtObj.SetString(HelpStr)
Pcd.SetHelpTextList([HelpTxtObj])
if PromptStr:
TxtObj = TextObject()
TxtObj.SetLang(Language)
TxtObj.SetString(PromptStr)
Pcd.SetPromptList([TxtObj])
return Pcd
| edk2-master | BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py |
## @file
# Python 'Parser' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
PomAdapter
'''
| edk2-master | BaseTools/Source/Python/UPT/PomAdapter/__init__.py |
## @file InfPomAlignment.py
# This file contained the adapter for convert INF parser object to POM Object
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
InfPomAlignment
'''
##
# Import modules
#
import os.path
from Logger import StringTable as ST
import Logger.Log as Logger
from Library.StringUtils import FORMAT_INVALID
from Library.StringUtils import PARSER_ERROR
from Library.StringUtils import NormPath
from Library.StringUtils import GetSplitValueList
from Library.Misc import ConvertVersionToDecimal
from Library.Misc import GetHelpStringByRemoveHashKey
from Library.Misc import ConvertArchList
from Library.Misc import GetRelativePath
from Library.Misc import PathClass
from Library.Parsing import GetPkgInfoFromDec
from Library.UniClassObject import UniFileClassObject
from Library.UniClassObject import ConvertSpecialUnicodes
from Library.UniClassObject import GetLanguageCode1766
from Library import DataType as DT
from Library import GlobalData
from Library.ParserValidate import IsValidPath
from Object.POM import CommonObject
from Object.POM.ModuleObject import ModuleObject
from Object.POM.ModuleObject import ExternObject
from Object.POM.ModuleObject import HobObject
from Object.POM.ModuleObject import EventObject
from Object.POM.ModuleObject import BootModeObject
from Object.POM.ModuleObject import PackageDependencyObject
from Object.POM.ModuleObject import SourceFileObject
from Object.POM.ModuleObject import DepexObject
from Object.POM.ModuleObject import AsBuildLibraryClassObject
from Object.POM.ModuleObject import AsBuiltObject
from PomAdapter.InfPomAlignmentMisc import GenModuleHeaderUserExt
from PomAdapter.InfPomAlignmentMisc import GenBinaryData
from Parser import InfParser
from PomAdapter.DecPomAlignment import DecPomAlignment
from Common.MultipleWorkspace import MultipleWorkspace as mws
## InfPomAlignment
#
# Inherit from ModuleObject
#
class InfPomAlignment(ModuleObject):
## Construct of InfPomAlignment
# Skip means that UPT don't care the syntax of INF, this may be the not
# distributed INF files during creation or the INF files checked for
# dependency rule during remove.
#
def __init__(self, FileName, WorkSpace=None, PackagePath='', Skip=False):
ModuleObject.__init__(self)
self.Parser = None
self.FileName = FileName
self.WorkSpace = WorkSpace
self.CombinePath = ''
self.LibModuleTypeList = []
self.FullPath = ''
self.ModulePath = ''
self.WorkspaceDir = " "
self.CustomMakefile = []
self.UniFileClassObject = None
self.SetPackagePath(PackagePath)
#
# Call GenInfPomObjects function to fill POM object.
#
if Skip:
OrigConfig = Logger.SUPRESS_ERROR
Logger.SUPRESS_ERROR = True
try:
self._GenInfPomObjects(Skip)
finally:
Logger.SUPRESS_ERROR = OrigConfig
else:
self._GenInfPomObjects(Skip)
##
# Generate all POM objects, the original input comes
# from INF parser's output
#
def _GenInfPomObjects(self, Skip):
#
# Call INF Parser to get information from INF file
#
self.Parser = InfParser.InfParser(self.FileName, self.WorkSpace)
self.FullPath = self.Parser.FullPath
self.GetFullPath()
self._GenModuleHeader()
#
# Call GenBinaries after Module Header for Binary INF consideration.
#
self._GenBinaries()
self._GenBuildOptions()
self._GenLibraryClasses()
self._GenPackages(Skip)
self._GenPcds()
self._GenSources()
self._GenUserExtensions()
self._GenGuidProtocolPpis(DT.TAB_GUIDS)
self._GenGuidProtocolPpis(DT.TAB_PROTOCOLS)
self._GenGuidProtocolPpis(DT.TAB_PPIS)
self._GenDepexes()
## Convert [Defines] section content to InfDefObject
#
# Convert [Defines] section content to InfDefObject
#
# @param Defines The content under [Defines] section
# @param ModuleHeader An object of ModuleHeaderClass
# @param Arch The supported ARCH
#
def _GenModuleHeader(self):
Logger.Debug(2, "Generate ModuleHeader ...")
#
# Get all defines information form InfParser Object
#
RecordSet = self.Parser.InfDefSection.Defines
#
# Should only have one ArchString Item.
#
ArchString = list(RecordSet.keys())[0]
ArchList = GetSplitValueList(ArchString, ' ')
ArchList = ConvertArchList(ArchList)
HasCalledFlag = False
#
# Get data from Sdict()
#
ValueList = RecordSet[ArchString]
self.SetFileName(self.FileName)
self.SetFullPath(self.FullPath)
#
# The INF's filename (without the directory path or the extension)
# must be used for the value of the
# ModuleSurfaceArea.Header.Name element
#
self.SetName(os.path.splitext(os.path.basename(self.FileName))[0])
self.WorkspaceDir = " "
#
# CombinePath and ModulePath
#
CombinePath = GetRelativePath(self.FullPath, self.WorkSpace)
self.SetCombinePath(CombinePath)
ModulePath = os.path.split(CombinePath)[0]
ModuleRelativePath = ModulePath
if self.GetPackagePath() != '':
ModuleRelativePath = GetRelativePath(ModulePath, self.GetPackagePath())
self.SetModulePath(ModuleRelativePath)
#
# For Define Seciton Items.
#
DefineObj = ValueList
#
# Convert UEFI/PI version to decimal number
#
if DefineObj.GetUefiSpecificationVersion() is not None:
__UefiVersion = DefineObj.GetUefiSpecificationVersion().GetValue()
__UefiVersion = ConvertVersionToDecimal(__UefiVersion)
self.SetUefiSpecificationVersion(str(__UefiVersion))
if DefineObj.GetPiSpecificationVersion() is not None:
__PiVersion = DefineObj.GetPiSpecificationVersion().GetValue()
__PiVersion = ConvertVersionToDecimal(__PiVersion)
self.SetPiSpecificationVersion(str(__PiVersion))
SpecList = DefineObj.GetSpecification()
NewSpecList = []
for SpecItem in SpecList:
NewSpecList.append((SpecItem[0], ConvertVersionToDecimal(SpecItem[1])))
self.SetSpecList(NewSpecList)
#
# must exist items in INF define section
# MODULE_TYPE/BASE_NAME/INF_VERSION/FILE_GUID/VERSION_STRING
#
if DefineObj.GetModuleType() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("MODULE_TYPE"), File=self.FullPath)
else:
self.SetModuleType(DefineObj.GetModuleType().GetValue())
ModuleType = DefineObj.GetModuleType().GetValue()
if ModuleType:
#
# Drivers and applications are not allowed to have a MODULE_TYPE of "BASE". Only
# libraries are permitted to a have a MODULE_TYPE of "BASE".
#
if len(DefineObj.LibraryClass) == 0 and ModuleType == 'BASE':
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_MODULETYPE_INVALID,
File=self.FullPath,
Line=DefineObj.ModuleType.CurrentLine.LineNo,
ExtraData=DefineObj.ModuleType.CurrentLine.LineString)
self.LibModuleTypeList.append(ModuleType)
if DefineObj.GetBaseName() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("BASE_NAME"), File=self.FullPath)
else:
self.SetBaseName(DefineObj.GetBaseName().GetValue())
if DefineObj.GetModuleUniFileName():
self.UniFileClassObject = UniFileClassObject([PathClass(DefineObj.GetModuleUniFileName())])
else:
self.UniFileClassObject = None
if DefineObj.GetInfVersion() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("INF_VERSION"), File=self.FullPath)
else:
self.SetVersion(DefineObj.GetInfVersion().GetValue())
if DefineObj.GetFileGuid() is None:
Logger.Error("InfParser", FORMAT_INVALID,
ST.ERR_INF_PARSER_DEFINE_SECTION_MUST_ITEM_NOT_EXIST % ("FILE_GUID"), File=self.FullPath)
else:
self.SetGuid(DefineObj.GetFileGuid().GetValue())
if DefineObj.GetVersionString() is None:
#
# VERSION_STRING is missing from the [Defines] section, tools must assume that the module's version is 0.
#
self.SetVersion('0')
else:
#
# Get version of INF
#
if DefineObj.GetVersionString().GetValue() != "":
#
# EDK2 inf
#
VersionString = DefineObj.GetVersionString().GetValue()
if len(VersionString) > 0:
VersionString = ConvertVersionToDecimal(VersionString)
self.SetVersion(VersionString)
else:
#
# EDK1 inf
#
Logger.Error("Parser", PARSER_ERROR, ST.ERR_INF_PARSER_NOT_SUPPORT_EDKI_INF, ExtraData=self.FullPath,
RaiseError=Logger.IS_RAISE_ERROR)
#
# if there is Shadow, Should judge the MODULE_TYPE in
# SEC, PEI_CORE and PEIM
#
if DefineObj.GetShadow():
ModuleTypeValue = DefineObj.GetModuleType().GetValue()
if not (ModuleTypeValue == 'SEC' or ModuleTypeValue == 'PEI_CORE' or ModuleTypeValue == 'PEIM'):
Logger.Error("InfParser", FORMAT_INVALID, ST.ERR_INF_PARSER_DEFINE_SHADOW_INVALID, File=self.FullPath)
if DefineObj.GetPcdIsDriver() is not None:
self.SetPcdIsDriver(DefineObj.GetPcdIsDriver().GetValue())
#
# LIBRARY_CLASS
#
self._GenModuleHeaderLibClass(DefineObj, ArchList)
#
# CUSTOM_MAKEFILE
#
self.CustomMakefile = DefineObj.GetCustomMakefile()
#
# Externs in Defines section
# Only one define section, so just call once.
#
if not HasCalledFlag:
self._GenModuleHeaderExterns(DefineObj)
HasCalledFlag = True
#
# each module has only one module header
#
self.SetSupArchList(ArchList)
#
# Get Hob/BootMode/EventList information
#
self._GenSpecialComments()
#
# put all define statement into user-extension sections
#
DefinesDictNew = GenModuleHeaderUserExt(DefineObj, ArchString)
if DefinesDictNew:
UserExtension = CommonObject.UserExtensionObject()
UserExtension.SetDefinesDict(DefinesDictNew)
UserExtension.SetIdentifier('DefineModifiers')
UserExtension.SetUserID('EDK2')
self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
#
# Get all meta-file header information
# the record is list of items formatted:
# [LineValue, Arch, StartLine, ID, Third]
#
InfHeaderObj = self.Parser.InfHeader
#
# Put header information into POM object
#
if self.UniFileClassObject:
Lang = DT.TAB_LANGUAGE_EN_X
else:
Lang = DT.TAB_LANGUAGE_EN_US
if InfHeaderObj.GetAbstract():
self.SetAbstract((Lang, InfHeaderObj.GetAbstract()))
if InfHeaderObj.GetDescription():
self.SetDescription((Lang, InfHeaderObj.GetDescription()))
if InfHeaderObj.GetCopyright():
self.SetCopyright(('', InfHeaderObj.GetCopyright()))
if InfHeaderObj.GetLicense():
self.SetLicense(('', InfHeaderObj.GetLicense()))
#
# Put Binary header information into POM object
#
InfBinaryHeaderObj = self.Parser.InfBinaryHeader
if InfBinaryHeaderObj.GetAbstract():
self.SetBinaryHeaderAbstract((Lang, InfBinaryHeaderObj.GetAbstract()))
if InfBinaryHeaderObj.GetDescription():
self.SetBinaryHeaderDescription((Lang, InfBinaryHeaderObj.GetDescription()))
if InfBinaryHeaderObj.GetCopyright():
self.SetBinaryHeaderCopyright(('', InfBinaryHeaderObj.GetCopyright()))
if InfBinaryHeaderObj.GetLicense():
self.SetBinaryHeaderLicense(('', InfBinaryHeaderObj.GetLicense()))
## GenModuleHeaderLibClass
#
#
def _GenModuleHeaderLibClass(self, DefineObj, ArchList):
LibraryList = DefineObj.GetLibraryClass()
for LibraryItem in LibraryList:
Lib = CommonObject.LibraryClassObject()
Lib.SetLibraryClass(LibraryItem.GetLibraryName())
Lib.SetUsage(DT.USAGE_ITEM_PRODUCES)
SupModuleList = LibraryItem.GetTypes()
self.LibModuleTypeList += SupModuleList
Lib.SetSupModuleList(SupModuleList)
Lib.SetSupArchList(ArchList)
self.SetLibraryClassList(self.GetLibraryClassList() + [Lib])
self.SetIsLibrary(True)
self.SetIsLibraryModList(self.GetIsLibraryModList() + SupModuleList)
## GenModuleHeaderExterns
#
#
def _GenModuleHeaderExterns(self, DefineObj):
EntryPointList = DefineObj.GetEntryPoint()
for EntryPoint in EntryPointList:
Image = ExternObject()
Image.SetEntryPoint(EntryPoint.GetCName())
#
# Future enhancement
#
self.SetExternList(self.GetExternList() + [Image])
#
# UNLOAD_IMAGE
#
UnloadImageList = DefineObj.GetUnloadImages()
for UnloadImage in UnloadImageList:
Image = ExternObject()
#
# Future enhancement
#
Image.SetUnloadImage(UnloadImage.GetCName())
self.SetExternList(self.GetExternList() + [Image])
#
# CONSTRUCTOR
#
ConstructorList = DefineObj.GetConstructor()
for ConstructorItem in ConstructorList:
Image = ExternObject()
#
# Future enhancement
#
Image.SetConstructor(ConstructorItem.GetCName())
self.SetExternList(self.GetExternList() + [Image])
#
# DESTRUCTOR
#
DestructorList = DefineObj.GetDestructor()
for DestructorItem in DestructorList:
Image = ExternObject()
#
# Future enhancement
#
Image.SetDestructor(DestructorItem.GetCName())
self.SetExternList(self.GetExternList() + [Image])
## GenModuleHeaderExterns
# BootMode/HOB/Event
#
def _GenSpecialComments(self):
SpecialCommentsList = self.Parser.InfSpecialCommentSection.GetSpecialComments()
for Key in SpecialCommentsList:
if Key == DT.TYPE_HOB_SECTION:
HobList = []
for Item in SpecialCommentsList[Key]:
Hob = HobObject()
Hob.SetHobType(Item.GetHobType())
Hob.SetUsage(Item.GetUsage())
Hob.SetSupArchList(Item.GetSupArchList())
if Item.GetHelpString():
HelpTextObj = CommonObject.TextObject()
if self.UniFileClassObject:
HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTextObj.SetString(Item.GetHelpString())
Hob.SetHelpTextList([HelpTextObj])
HobList.append(Hob)
self.SetHobList(HobList)
elif Key == DT.TYPE_EVENT_SECTION:
EventList = []
for Item in SpecialCommentsList[Key]:
Event = EventObject()
Event.SetEventType(Item.GetEventType())
Event.SetUsage(Item.GetUsage())
if Item.GetHelpString():
HelpTextObj = CommonObject.TextObject()
if self.UniFileClassObject:
HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTextObj.SetString(Item.GetHelpString())
Event.SetHelpTextList([HelpTextObj])
EventList.append(Event)
self.SetEventList(EventList)
elif Key == DT.TYPE_BOOTMODE_SECTION:
BootModeList = []
for Item in SpecialCommentsList[Key]:
BootMode = BootModeObject()
BootMode.SetSupportedBootModes(Item.GetSupportedBootModes())
BootMode.SetUsage(Item.GetUsage())
if Item.GetHelpString():
HelpTextObj = CommonObject.TextObject()
if self.UniFileClassObject:
HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTextObj.SetString(Item.GetHelpString())
BootMode.SetHelpTextList([HelpTextObj])
BootModeList.append(BootMode)
self.SetBootModeList(BootModeList)
## GenBuildOptions
#
# Gen BuildOptions of Inf
# [<Family>:]<ToolFlag>=Flag
#
#
def _GenBuildOptions(self):
Logger.Debug(2, "Generate %s ..." % DT.TAB_BUILD_OPTIONS)
#
# Get all BuildOptions
#
BuildOptionsList = self.Parser.InfBuildOptionSection.GetBuildOptions()
if not GlobalData.gIS_BINARY_INF:
BuildOptionDict = {}
for BuildOptionObj in BuildOptionsList:
ArchList = BuildOptionObj.GetSupArchList()
ArchList = ConvertArchList(ArchList)
BuildOptionsContent = BuildOptionObj.GetContent()
ArchString = ' '.join(ArchList)
if not BuildOptionsContent:
continue
BuildOptionDict[ArchString] = BuildOptionsContent
if not BuildOptionDict:
return
UserExtension = CommonObject.UserExtensionObject()
UserExtension.SetBuildOptionDict(BuildOptionDict)
UserExtension.SetIdentifier('BuildOptionModifiers')
UserExtension.SetUserID('EDK2')
self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
else:
#
# Not process this information, will be processed in GenBinaries()
#
pass
## GenLibraryClasses
#
# Get LibraryClass of Inf
# <LibraryClassKeyWord>|<LibraryInstance>
#
# @param ContainerFile: The Inf file full path
#
def _GenLibraryClasses(self):
Logger.Debug(2, "Generate %s ..." % DT.TAB_LIBRARY_CLASSES)
if not GlobalData.gIS_BINARY_INF:
#
# Get all LibraryClasses
#
for LibraryClassData in self.Parser.InfLibraryClassSection.LibraryClasses.values():
for Item in LibraryClassData:
LibraryClass = CommonObject.LibraryClassObject()
LibraryClass.SetUsage(DT.USAGE_ITEM_CONSUMES)
LibraryClass.SetLibraryClass(Item.GetLibName())
LibraryClass.SetRecommendedInstance(None)
LibraryClass.SetFeatureFlag(Item.GetFeatureFlagExp())
LibraryClass.SetSupArchList(ConvertArchList(Item.GetSupArchList()))
LibraryClass.SetSupModuleList(Item.GetSupModuleList())
HelpStringObj = Item.GetHelpString()
if HelpStringObj is not None:
CommentString = GetHelpStringByRemoveHashKey(HelpStringObj.HeaderComments +
HelpStringObj.TailComments)
HelpTextHeaderObj = CommonObject.TextObject()
if self.UniFileClassObject:
HelpTextHeaderObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTextHeaderObj.SetString(CommentString)
LibraryClass.SetHelpTextList([HelpTextHeaderObj])
self.SetLibraryClassList(self.GetLibraryClassList() + [LibraryClass])
## GenPackages
#
# Gen Packages of Inf
#
#
# @param ContainerFile: The Inf file full path
#
def _GenPackages(self, Skip):
Logger.Debug(2, "Generate %s ..." % DT.TAB_PACKAGES)
#
# Get all Packages
#
PackageObj = self.Parser.InfPackageSection.Packages
#
# Go through each arch
#
for PackageItemObj in PackageObj:
#
# Need package information for dependency check usage
#
PackageDependency = PackageDependencyObject()
PackageDependency.SetPackageFilePath(NormPath(PackageItemObj.GetPackageName()))
PackageDependency.SetSupArchList(ConvertArchList(PackageItemObj.GetSupArchList()))
PackageDependency.SetFeatureFlag(PackageItemObj.GetFeatureFlagExp())
PkgInfo = GetPkgInfoFromDec(mws.join(self.WorkSpace, NormPath(PackageItemObj.GetPackageName())))
if PkgInfo[1] and PkgInfo[2]:
PackageDependency.SetGuid(PkgInfo[1])
PackageDependency.SetVersion(PkgInfo[2])
elif Skip:
continue
else:
Logger.Error("\nUPT", PARSER_ERROR,
ST.ERR_INF_GET_PKG_DEPENDENCY_FAIL % PackageItemObj.GetPackageName(), File=self.FullPath)
PackageDependencyList = self.GetPackageDependencyList()
PackageDependencyList.append(PackageDependency)
self.SetPackageDependencyList(PackageDependencyList)
## GenPcds
#
# Gen Pcds of Inf
# <TokenSpaceGuidCName>.<PcdCName>[|<Value> [|<FFE>]]
#
# @param ContainerFile: The Inf file full path
#
def _GenPcds(self):
if not GlobalData.gIS_BINARY_INF:
Logger.Debug(2, "Generate %s ..." % DT.TAB_PCDS)
#
# Get all Pcds
#
PcdObj = self.Parser.InfPcdSection.Pcds
KeysList = PcdObj.keys()
#
# Go through each arch
#
for (PcdType, PcdKey) in KeysList:
PcdData = PcdObj[PcdType, PcdKey]
for PcdItemObj in PcdData:
CommentList = PcdItemObj.GetHelpStringList()
if CommentList:
for CommentItem in CommentList:
Pcd = CommonObject.PcdObject()
Pcd.SetCName(PcdItemObj.GetCName())
Pcd.SetTokenSpaceGuidCName(PcdItemObj.GetTokenSpaceGuidCName())
Pcd.SetDefaultValue(PcdItemObj.GetDefaultValue())
Pcd.SetItemType(PcdType)
Pcd.SetValidUsage(CommentItem.GetUsageItem())
Pcd.SetFeatureFlag(PcdItemObj.GetFeatureFlagExp())
Pcd.SetSupArchList(ConvertArchList(PcdItemObj.GetSupportArchList()))
HelpTextObj = CommonObject.TextObject()
if self.UniFileClassObject:
HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTextObj.SetString(CommentItem.GetHelpStringItem())
Pcd.SetHelpTextList([HelpTextObj])
PcdList = self.GetPcdList()
PcdList.append(Pcd)
self.SetPcdList(PcdList)
## GenSources
#
# Gen Sources of Inf
# <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
#
# @param ContainerFile: The Inf file full path
#
def _GenSources(self):
Logger.Debug(2, "Generate %s ..." % DT.TAB_SOURCES)
#
# Get all SourceFiles
#
SourceObj = self.Parser.InfSourcesSection.Sources
DataList = SourceObj.keys()
#
# Go through each arch
#
SourceList = []
for Key in DataList:
SourceData = SourceObj[Key]
for Item in SourceData:
SourceFile = Item.GetSourceFileName()
Family = Item.GetFamily()
FeatureFlag = Item.GetFeatureFlagExp()
SupArchList = sorted(ConvertArchList(Item.GetSupArchList()))
Source = SourceFileObject()
Source.SetSourceFile(SourceFile)
Source.SetFamily(Family)
Source.SetFeatureFlag(FeatureFlag)
Source.SetSupArchList(SupArchList)
SourceList.append(Source)
self.SetSourceFileList(self.GetSourceFileList() + SourceList)
## GenUserExtensions
#
# Gen UserExtensions of Inf
#
def _GenUserExtensions(self):
#
# UserExtensions
#
UserExtensionObj = self.Parser.InfUserExtensionSection.UserExtension
Keys = UserExtensionObj.keys()
for Key in Keys:
UserExtensionData = UserExtensionObj[Key]
for UserExtensionDataObj in UserExtensionData:
UserExtension = CommonObject.UserExtensionObject()
UserId = UserExtensionDataObj.GetUserId()
if UserId.startswith('"') and UserId.endswith('"'):
UserId = UserId[1:-1]
UserExtension.SetUserID(UserId)
Identifier = UserExtensionDataObj.GetIdString()
if Identifier.startswith('"') and Identifier.endswith('"'):
Identifier = Identifier[1:-1]
#
# Generate miscellaneous files on INF file
#
if UserId == 'TianoCore' and Identifier == 'ExtraFiles':
self._GenMiscFiles(UserExtensionDataObj.GetContent())
UserExtension.SetIdentifier(Identifier)
UserExtension.SetStatement(UserExtensionDataObj.GetContent())
UserExtension.SetSupArchList(ConvertArchList(UserExtensionDataObj.GetSupArchList()))
self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
#
# Gen UserExtensions of TianoCore."BinaryHeader"
#
#Get Binary header from INF file
BinaryAbstractList = self.BinaryHeaderAbstractList
BinaryDescriptionList = self.BinaryHeaderDescriptionList
BinaryCopyrightList = self.BinaryHeaderCopyrightList
BinaryLicenseList = self.BinaryHeaderLicenseList
#Get Binary header from UNI file
# Initialize UniStrDict, the top keys are language codes
UniStrDict = {}
if self.UniFileClassObject:
UniStrDict = self.UniFileClassObject.OrderedStringList
for Lang in UniStrDict:
for StringDefClassObject in UniStrDict[Lang]:
Lang = GetLanguageCode1766(Lang)
if StringDefClassObject.StringName == DT.TAB_INF_BINARY_ABSTRACT:
BinaryAbstractList.append((Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
if StringDefClassObject.StringName == DT.TAB_INF_BINARY_DESCRIPTION:
BinaryDescriptionList.append((Lang, ConvertSpecialUnicodes(StringDefClassObject.StringValue)))
if BinaryAbstractList or BinaryDescriptionList or BinaryCopyrightList or BinaryLicenseList:
BinaryUserExtension = CommonObject.UserExtensionObject()
BinaryUserExtension.SetBinaryAbstract(BinaryAbstractList)
BinaryUserExtension.SetBinaryDescription(BinaryDescriptionList)
BinaryUserExtension.SetBinaryCopyright(BinaryCopyrightList)
BinaryUserExtension.SetBinaryLicense(BinaryLicenseList)
BinaryUserExtension.SetIdentifier(DT.TAB_BINARY_HEADER_IDENTIFIER)
BinaryUserExtension.SetUserID(DT.TAB_BINARY_HEADER_USERID)
self.SetUserExtensionList(self.GetUserExtensionList() + [BinaryUserExtension])
def _GenDepexesList(self, SmmDepexList, DxeDepexList, PeiDepexList):
if SmmDepexList:
self.SetSmmDepex(SmmDepexList)
if DxeDepexList:
self.SetDxeDepex(DxeDepexList)
if PeiDepexList:
self.SetPeiDepex(PeiDepexList)
## GenDepexes
#
# Gen Depex of Inf
#
# @param ContainerFile: The Inf file full path
#
def _GenDepexes(self):
Logger.Debug(2, "Generate %s ..." % DT.TAB_DEPEX)
PEI_LIST = [DT.SUP_MODULE_PEIM]
SMM_LIST = [DT.SUP_MODULE_DXE_SMM_DRIVER]
DXE_LIST = [DT.SUP_MODULE_DXE_DRIVER, DT.SUP_MODULE_DXE_SAL_DRIVER,
DT.SUP_MODULE_DXE_RUNTIME_DRIVER]
IsLibraryClass = self.GetIsLibrary()
#
# Get all Depexes
#
DepexData = self.Parser.InfDepexSection.GetDepex()
SmmDepexList = []
DxeDepexList = []
PeiDepexList = []
for Depex in DepexData:
ModuleType = Depex.GetModuleType()
ModuleTypeList = []
if IsLibraryClass:
if self.GetModuleType() == 'BASE' and not ModuleType:
Logger.Error("\nMkPkg", PARSER_ERROR,
ST.ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_BASE_LIBRARY_CLASS,
self.GetFullPath(), RaiseError=True)
if self.GetModuleType() != 'BASE' and not self.GetIsLibraryModList():
Logger.Error("\nMkPkg", PARSER_ERROR, ST.ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_LIBRARY_CLASS,
self.GetFullPath(), RaiseError=True)
if self.GetModuleType() != 'BASE' and ModuleType and ModuleType not in self.GetIsLibraryModList():
Logger.Error("\nMkPkg", PARSER_ERROR, ST.ERR_INF_PARSER_DEPEX_SECTION_NOT_DETERMINED,
self.GetFullPath(), RaiseError=True)
if ModuleType:
ModuleTypeList = [ModuleType]
else:
for ModuleTypeInList in self.GetIsLibraryModList():
if ModuleTypeInList in DT.VALID_DEPEX_MODULE_TYPE_LIST:
ModuleTypeList.append(ModuleTypeInList)
if not ModuleTypeList:
Logger.Error("\nMkPkg", PARSER_ERROR, ST.ERR_INF_PARSER_DEPEX_SECTION_NOT_DETERMINED,
self.GetFullPath(), RaiseError=True)
else:
if not ModuleType:
ModuleType = self.ModuleType
if ModuleType not in DT.VALID_DEPEX_MODULE_TYPE_LIST:
Logger.Error("\nMkPkg", PARSER_ERROR,
ST.ERR_INF_PARSER_DEPEX_SECTION_MODULE_TYPE_ERROR % (ModuleType),
self.GetFullPath(), RaiseError=True)
if ModuleType != self.ModuleType:
Logger.Error("\nMkPkg", PARSER_ERROR, ST.ERR_INF_PARSER_DEPEX_SECTION_NOT_DETERMINED,
self.GetFullPath(), RaiseError=True)
ModuleTypeList = [ModuleType]
for ModuleType in ModuleTypeList:
DepexIns = DepexObject()
DepexIns.SetDepex(Depex.GetDepexContent())
if IsLibraryClass:
DepexIns.SetModuleType(ModuleType)
else:
if Depex.GetModuleType():
DepexIns.SetModuleType(Depex.GetModuleType())
DepexIns.SetSupArchList(ConvertArchList([Depex.GetSupArch()]))
DepexIns.SetFeatureFlag(Depex.GetFeatureFlagExp())
if Depex.HelpString:
HelpIns = CommonObject.TextObject()
if self.UniFileClassObject:
HelpIns.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpIns.SetString(GetHelpStringByRemoveHashKey(Depex.HelpString))
DepexIns.SetHelpText(HelpIns)
if ModuleType in SMM_LIST:
SmmDepexList.append(DepexIns)
if ModuleType in DXE_LIST:
DxeDepexList.append(DepexIns)
if ModuleType in PEI_LIST:
PeiDepexList.append(DepexIns)
if ModuleType == DT.SUP_MODULE_UEFI_DRIVER:
if IsLibraryClass:
DxeDepexList.append(DepexIns)
else:
Logger.Error("\nMkPkg", PARSER_ERROR, ST.ERR_INF_PARSER_DEPEX_SECTION_INVALID_FOR_DRIVER,
self.GetFullPath(), RaiseError=True)
#End of for ModuleType in ModuleTypeList
self._GenDepexesList(SmmDepexList, DxeDepexList, PeiDepexList)
#End of for Depex in DepexData
## GenBinaries
#
# Gen Binary of Inf, must be called after Pcd/Library is generated
# <FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]
#
# @param ContainerFile: The Inf file full path
#
def _GenBinaries(self):
Logger.Debug(2, "Generate %s ..." % DT.TAB_BINARIES)
BinariesDict = {}
#
# Get all Binary data
#
BinaryObj = self.Parser.InfBinariesSection.GetBinary()
BinaryData = BinaryObj.keys()
#
# If the INF file does not contain a [Sources] section, and the INF file does contain a [Binaries] section,
# then the ModuleSurfaceArea.BinaryModule attribute must be set to true. Otherwise, do not use the attribute
#
if BinaryObj and not self.Parser.InfSourcesSection.GetSources():
self.BinaryModule = True
else:
self.BinaryModule = False
BinaryFileObjectList = []
AsBuildLibraryClassList = []
AsBuildBuildOptionList = []
AsBuildIns = AsBuiltObject()
#
# Library AsBuild Info
#
for LibItem in self.Parser.InfLibraryClassSection.GetLibraryClasses():
AsBuildLibIns = AsBuildLibraryClassObject()
AsBuildLibIns.SetLibGuid(LibItem.GetFileGuid())
AsBuildLibIns.SetLibVersion(LibItem.GetVersion())
AsBuildLibIns.SetSupArchList(LibItem.GetSupArchList())
AsBuildLibraryClassList.append(AsBuildLibIns)
AsBuildIns.SetLibraryInstancesList(AsBuildLibraryClassList)
#
# BuildOption AsBuild Info
#
for BuildOptionItem in self.Parser.InfBuildOptionSection.GetBuildOptions():
AsBuildBuildOptionList.append(BuildOptionItem)
AsBuildIns.SetBuildFlagsList(AsBuildBuildOptionList)
#
# PatchPcd and PcdEx
#
AsBuildIns = self._GenAsBuiltPcds(self.Parser.InfPcdSection.GetPcds(), AsBuildIns)
#
# Parse the DEC file that contains the GUID value of the GUID CName which is used by
# SUBTYPE_GUID type binary file in the Binaries section in the INF file
#
DecObjList = []
if not self.PackagePath:
WorkSpace = os.path.normpath(self.WorkSpace)
TempPath = ModulePath = os.path.normpath(self.ModulePath)
while ModulePath:
TempPath = ModulePath
ModulePath = os.path.dirname(ModulePath)
PackageName = TempPath
DecFilePath = os.path.normpath(os.path.join(WorkSpace, PackageName))
if DecFilePath:
for File in os.listdir(DecFilePath):
if File.upper().endswith('.DEC'):
DecFileFullPath = os.path.normpath(os.path.join(DecFilePath, File))
DecObjList.append(DecPomAlignment(DecFileFullPath, self.WorkSpace))
BinariesDict, AsBuildIns, BinaryFileObjectList = GenBinaryData(BinaryData, BinaryObj,
BinariesDict,
AsBuildIns,
BinaryFileObjectList,
self.GetSupArchList(),
self.BinaryModule,
DecObjList)
BinariesDict2 = {}
for Key in BinariesDict:
ValueList = BinariesDict[Key]
if len(ValueList) > 1:
BinariesDict2[Key] = ValueList
else:
#
# if there is no TagName, ToolCode, HelpStr,
# then do not need to put them into userextension
#
(Target, Family, TagName, HelpStr) = ValueList[0]
if not (Target or Family or TagName or HelpStr):
continue
else:
BinariesDict2[Key] = ValueList
self.SetBinaryFileList(self.GetBinaryFileList() + BinaryFileObjectList)
if BinariesDict2:
UserExtension = CommonObject.UserExtensionObject()
UserExtension.SetBinariesDict(BinariesDict2)
UserExtension.SetIdentifier('BinaryFileModifiers')
UserExtension.SetUserID('EDK2')
self.SetUserExtensionList(self.GetUserExtensionList() + [UserExtension])
## GenAsBuiltPcds
#
#
def _GenAsBuiltPcds(self, PcdList, AsBuildIns):
AsBuildPatchPcdList = []
AsBuildPcdExList = []
#
# Pcd AsBuild Info
#
for PcdItem in PcdList:
if PcdItem[0].upper() == DT.TAB_INF_PATCH_PCD.upper():
PcdItemObj = PcdItem[1]
Pcd = CommonObject.PcdObject()
Pcd.SetCName(PcdItemObj.GetCName())
Pcd.SetTokenSpaceGuidCName(PcdItemObj.GetTokenSpaceGuidCName())
if PcdItemObj.GetTokenSpaceGuidValue() == '' and self.BinaryModule:
Logger.Error("\nMkPkg",
PARSER_ERROR,
ST.ERR_ASBUILD_PCD_TOKENSPACE_GUID_VALUE_MISS % \
(PcdItemObj.GetTokenSpaceGuidCName()),
self.GetFullPath(), RaiseError=True)
else:
Pcd.SetTokenSpaceGuidValue(PcdItemObj.GetTokenSpaceGuidValue())
if (PcdItemObj.GetToken() == '' or PcdItemObj.GetDatumType() == '') and self.BinaryModule:
Logger.Error("\nMkPkg",
PARSER_ERROR,
ST.ERR_ASBUILD_PCD_DECLARITION_MISS % \
(PcdItemObj.GetTokenSpaceGuidCName() + '.' + PcdItemObj.GetCName()),
self.GetFullPath(), RaiseError=True)
Pcd.SetToken(PcdItemObj.GetToken())
Pcd.SetDatumType(PcdItemObj.GetDatumType())
Pcd.SetMaxDatumSize(PcdItemObj.GetMaxDatumSize())
Pcd.SetDefaultValue(PcdItemObj.GetDefaultValue())
Pcd.SetOffset(PcdItemObj.GetOffset())
Pcd.SetItemType(PcdItem[0])
Pcd.SetFeatureFlag(PcdItemObj.GetFeatureFlagExp())
Pcd.SetSupArchList(ConvertArchList(PcdItemObj.GetSupportArchList()))
Pcd.SetValidUsage(PcdItemObj.GetValidUsage())
for CommentItem in PcdItemObj.GetHelpStringList():
HelpTextObj = CommonObject.TextObject()
if self.UniFileClassObject:
HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTextObj.SetString(CommentItem.GetHelpStringItem())
Pcd.SetHelpTextList(Pcd.GetHelpTextList() + [HelpTextObj])
AsBuildPatchPcdList.append(Pcd)
elif PcdItem[0].upper() == DT.TAB_INF_PCD_EX.upper():
PcdItemObj = PcdItem[1]
Pcd = CommonObject.PcdObject()
Pcd.SetTokenSpaceGuidValue(PcdItemObj.GetTokenSpaceGuidValue())
Pcd.SetToken(PcdItemObj.GetToken())
Pcd.SetDatumType(PcdItemObj.GetDatumType())
Pcd.SetMaxDatumSize(PcdItemObj.GetMaxDatumSize())
Pcd.SetDefaultValue(PcdItemObj.GetDefaultValue())
Pcd.SetItemType(PcdItem[0])
Pcd.SetFeatureFlag(PcdItemObj.GetFeatureFlagExp())
Pcd.SetSupArchList(ConvertArchList(PcdItemObj.GetSupportArchList()))
Pcd.SetValidUsage(PcdItemObj.GetValidUsage())
for CommentItem in PcdItemObj.GetHelpStringList():
HelpTextObj = CommonObject.TextObject()
if self.UniFileClassObject:
HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTextObj.SetString(CommentItem.GetHelpStringItem())
Pcd.SetHelpTextList(Pcd.GetHelpTextList() + [HelpTextObj])
AsBuildPcdExList.append(Pcd)
AsBuildIns.SetPatchPcdList(AsBuildPatchPcdList)
AsBuildIns.SetPcdExList(AsBuildPcdExList)
return AsBuildIns
## GenGuidProtocolPpis
#
# Gen Guids/Protocol/Ppis of INF
# <CName>=<GuidValue>
#
def _GenGuidProtocolPpis(self, Type):
Logger.Debug(2, "Generate %s ..." % Type)
#
# Get all Guid/Protocol/Ppis data
#
GuidObj = self.Parser.InfGuidSection.GetGuid()
ProtocolObj = self.Parser.InfProtocolSection.GetProtocol()
PpisObj = self.Parser.InfPpiSection.GetPpi()
GuidProtocolPpiList = []
if Type == DT.TAB_GUIDS:
GuidData = GuidObj.keys()
for Item in GuidData:
CommentList = Item.GetCommentList()
#
# Depend on CommentList content
# generate muti-guid-obj
#
if CommentList:
for GuidComentItem in CommentList:
ListObject = CommonObject.GuidObject()
ListObject.SetGuidTypeList([GuidComentItem.GetGuidTypeItem()])
ListObject.SetVariableName(GuidComentItem.GetVariableNameItem())
ListObject.SetUsage(GuidComentItem.GetUsageItem())
ListObject.SetName(Item.GetName())
ListObject.SetCName(Item.GetName())
ListObject.SetSupArchList(ConvertArchList(Item.GetSupArchList()))
ListObject.SetFeatureFlag(Item.GetFeatureFlagExp())
HelpString = GuidComentItem.GetHelpStringItem()
if HelpString.strip():
HelpTxtTailObj = CommonObject.TextObject()
if self.UniFileClassObject:
HelpTxtTailObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTxtTailObj.SetString(HelpString)
ListObject.SetHelpTextList([HelpTxtTailObj])
GuidProtocolPpiList.append(ListObject)
elif Type == DT.TAB_PROTOCOLS:
ProtocolData = ProtocolObj.keys()
for Item in ProtocolData:
CommentList = Item.GetCommentList()
for CommentItem in CommentList:
ListObject = CommonObject.ProtocolObject()
ListObject.SetCName(Item.GetName())
ListObject.SetSupArchList(ConvertArchList(Item.GetSupArchList()))
ListObject.SetFeatureFlag(Item.GetFeatureFlagExp())
ListObject.SetNotify(CommentItem.GetNotify())
ListObject.SetUsage(CommentItem.GetUsageItem())
HelpString = CommentItem.GetHelpStringItem()
if HelpString.strip():
HelpTxtObj = CommonObject.TextObject()
if self.UniFileClassObject:
HelpTxtObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTxtObj.SetString(HelpString)
ListObject.SetHelpTextList([HelpTxtObj])
GuidProtocolPpiList.append(ListObject)
elif Type == DT.TAB_PPIS:
PpiData = PpisObj.keys()
for Item in PpiData:
CommentList = Item.GetCommentList()
for CommentItem in CommentList:
ListObject = CommonObject.PpiObject()
ListObject.SetCName(Item.GetName())
ListObject.SetSupArchList(ConvertArchList(Item.GetSupArchList()))
ListObject.SetFeatureFlag(Item.GetFeatureFlagExp())
ListObject.SetNotify(CommentItem.GetNotify())
ListObject.SetUsage(CommentItem.GetUsage())
HelpString = CommentItem.GetHelpStringItem()
if HelpString.strip():
HelpTextObj = CommonObject.TextObject()
if self.UniFileClassObject:
HelpTextObj.SetLang(DT.TAB_LANGUAGE_EN_X)
HelpTextObj.SetString(HelpString)
ListObject.SetHelpTextList([HelpTextObj])
GuidProtocolPpiList.append(ListObject)
if Type == DT.TAB_GUIDS:
self.SetGuidList(self.GetGuidList() + GuidProtocolPpiList)
elif Type == DT.TAB_PROTOCOLS:
self.SetProtocolList(self.GetProtocolList() + GuidProtocolPpiList)
elif Type == DT.TAB_PPIS:
self.SetPpiList(self.GetPpiList() + GuidProtocolPpiList)
## GenMiscFiles
#
# Gen MiscellaneousFiles of Inf
#
# @param ContainerFile: The Inf file full path
#
def _GenMiscFiles(self, Content):
MiscFileObj = CommonObject.MiscFileObject()
for Line in Content.splitlines():
FileName = ''
if '#' in Line:
FileName = Line[:Line.find('#')]
else:
FileName = Line
if FileName:
if IsValidPath(FileName, GlobalData.gINF_MODULE_DIR):
FileObj = CommonObject.FileObject()
FileObj.SetURI(FileName)
MiscFileObj.SetFileList(MiscFileObj.GetFileList()+[FileObj])
else:
Logger.Error("InfParser",
FORMAT_INVALID,
ST.ERR_INF_PARSER_FILE_NOT_EXIST_OR_NAME_INVALID%(Line),
File=GlobalData.gINF_MODULE_NAME,
ExtraData=Line)
self.SetMiscFileList(self.GetMiscFileList()+[MiscFileObj])
| edk2-master | BaseTools/Source/Python/UPT/PomAdapter/InfPomAlignment.py |
## @file
# Python 'Library' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
GenMetaFile
'''
| edk2-master | BaseTools/Source/Python/UPT/GenMetaFile/__init__.py |
## @file GenMetaFileMisc.py
#
# This file contained the miscellaneous routines for GenMetaFile usage.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
GenMetaFileMisc
'''
from Library import DataType as DT
from Library import GlobalData
from Parser.DecParser import Dec
# AddExternToDefineSec
#
# @param SectionDict: string of source file path/name
# @param Arch: string of source file family field
# @param ExternList: string of source file FeatureFlag field
#
def AddExternToDefineSec(SectionDict, Arch, ExternList):
LeftOffset = 31
for ArchList, EntryPoint, UnloadImage, Constructor, Destructor, FFE, HelpStringList in ExternList:
if Arch or ArchList:
if EntryPoint:
Statement = (u'%s ' % DT.TAB_INF_DEFINES_ENTRY_POINT).ljust(LeftOffset) + u'= %s' % EntryPoint
if FFE:
Statement += ' | %s' % FFE
if len(HelpStringList) > 0:
Statement = HelpStringList[0].GetString() + '\n' + Statement
if len(HelpStringList) > 1:
Statement = Statement + HelpStringList[1].GetString()
SectionDict[Arch] = SectionDict[Arch] + [Statement]
if UnloadImage:
Statement = (u'%s ' % DT.TAB_INF_DEFINES_UNLOAD_IMAGE).ljust(LeftOffset) + u'= %s' % UnloadImage
if FFE:
Statement += ' | %s' % FFE
if len(HelpStringList) > 0:
Statement = HelpStringList[0].GetString() + '\n' + Statement
if len(HelpStringList) > 1:
Statement = Statement + HelpStringList[1].GetString()
SectionDict[Arch] = SectionDict[Arch] + [Statement]
if Constructor:
Statement = (u'%s ' % DT.TAB_INF_DEFINES_CONSTRUCTOR).ljust(LeftOffset) + u'= %s' % Constructor
if FFE:
Statement += ' | %s' % FFE
if len(HelpStringList) > 0:
Statement = HelpStringList[0].GetString() + '\n' + Statement
if len(HelpStringList) > 1:
Statement = Statement + HelpStringList[1].GetString()
SectionDict[Arch] = SectionDict[Arch] + [Statement]
if Destructor:
Statement = (u'%s ' % DT.TAB_INF_DEFINES_DESTRUCTOR).ljust(LeftOffset) + u'= %s' % Destructor
if FFE:
Statement += ' | %s' % FFE
if len(HelpStringList) > 0:
Statement = HelpStringList[0].GetString() + '\n' + Statement
if len(HelpStringList) > 1:
Statement = Statement + HelpStringList[1].GetString()
SectionDict[Arch] = SectionDict[Arch] + [Statement]
## ObtainPcdName
#
# Using TokenSpaceGuidValue and Token to obtain PcdName from DEC file
#
def ObtainPcdName(Packages, TokenSpaceGuidValue, Token):
TokenSpaceGuidName = ''
PcdCName = ''
TokenSpaceGuidNameFound = False
for PackageDependency in Packages:
#
# Generate generic comment
#
Guid = PackageDependency.GetGuid()
Version = PackageDependency.GetVersion()
Path = None
#
# find package path/name
#
for PkgInfo in GlobalData.gWSPKG_LIST:
if Guid == PkgInfo[1]:
if (not Version) or (Version == PkgInfo[2]):
Path = PkgInfo[3]
break
# The dependency package in workspace
if Path:
DecFile = None
if Path not in GlobalData.gPackageDict:
DecFile = Dec(Path)
GlobalData.gPackageDict[Path] = DecFile
else:
DecFile = GlobalData.gPackageDict[Path]
DecGuidsDict = DecFile.GetGuidSectionObject().ValueDict
DecPcdsDict = DecFile.GetPcdSectionObject().ValueDict
TokenSpaceGuidName = ''
PcdCName = ''
TokenSpaceGuidNameFound = False
#
# Get TokenSpaceGuidCName from Guids section
#
for GuidKey in DecGuidsDict:
GuidList = DecGuidsDict[GuidKey]
for GuidItem in GuidList:
if TokenSpaceGuidValue.upper() == GuidItem.GuidString.upper():
TokenSpaceGuidName = GuidItem.GuidCName
TokenSpaceGuidNameFound = True
break
if TokenSpaceGuidNameFound:
break
#
# Retrieve PcdCName from Pcds Section
#
for PcdKey in DecPcdsDict:
PcdList = DecPcdsDict[PcdKey]
for PcdItem in PcdList:
if TokenSpaceGuidName == PcdItem.TokenSpaceGuidCName and Token == PcdItem.TokenValue:
PcdCName = PcdItem.TokenCName
return TokenSpaceGuidName, PcdCName
# The dependency package in ToBeInstalledDist
else:
for Dist in GlobalData.gTO_BE_INSTALLED_DIST_LIST:
for Package in Dist.PackageSurfaceArea.values():
if Guid == Package.Guid:
for GuidItem in Package.GuidList:
if TokenSpaceGuidValue.upper() == GuidItem.Guid.upper():
TokenSpaceGuidName = GuidItem.CName
TokenSpaceGuidNameFound = True
break
for PcdItem in Package.PcdList:
if TokenSpaceGuidName == PcdItem.TokenSpaceGuidCName and Token == PcdItem.Token:
PcdCName = PcdItem.CName
return TokenSpaceGuidName, PcdCName
return TokenSpaceGuidName, PcdCName
## _TransferDict
# transfer dict that using (Statement, SortedArch) as key,
# (GenericComment, UsageComment) as value into a dict that using SortedArch as
# key and NewStatement as value
#
def TransferDict(OrigDict, Type=None):
NewDict = {}
LeftOffset = 0
if Type in ['INF_GUID', 'INF_PPI_PROTOCOL']:
LeftOffset = 45
if Type in ['INF_PCD']:
LeftOffset = 75
if LeftOffset > 0:
for Statement, SortedArch in OrigDict:
if len(Statement) > LeftOffset:
LeftOffset = len(Statement)
for Statement, SortedArch in OrigDict:
Comment = OrigDict[Statement, SortedArch]
#
# apply the NComment/1Comment rule
#
if Comment.find('\n') != len(Comment) - 1:
NewStateMent = Comment + Statement
else:
if LeftOffset:
NewStateMent = Statement.ljust(LeftOffset) + ' ' + Comment.rstrip('\n')
else:
NewStateMent = Statement + ' ' + Comment.rstrip('\n')
if SortedArch in NewDict:
NewDict[SortedArch] = NewDict[SortedArch] + [NewStateMent]
else:
NewDict[SortedArch] = [NewStateMent]
return NewDict
| edk2-master | BaseTools/Source/Python/UPT/GenMetaFile/GenMetaFileMisc.py |
## @file GenInfFile.py
#
# This file contained the logical of transfer package object to INF files.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
GenInf
'''
import os
import stat
import codecs
from hashlib import md5
from Core.FileHook import __FileHookOpen__
from Library.StringUtils import GetSplitValueList
from Library.Parsing import GenSection
from Library.Parsing import GetWorkspacePackage
from Library.Parsing import ConvertArchForInstall
from Library.Misc import SaveFileOnChange
from Library.Misc import IsAllModuleList
from Library.Misc import Sdict
from Library.Misc import ConvertPath
from Library.Misc import ConvertSpec
from Library.Misc import GetRelativePath
from Library.Misc import GetLocalValue
from Library.CommentGenerating import GenHeaderCommentSection
from Library.CommentGenerating import GenGenericCommentF
from Library.CommentGenerating import _GetHelpStr
from Library import GlobalData
from Logger import StringTable as ST
from Logger import ToolError
import Logger.Log as Logger
from Library import DataType as DT
from GenMetaFile import GenMetaFileMisc
from Library.UniClassObject import FormatUniEntry
from Library.StringUtils import GetUniFileName
## Transfer Module Object to Inf files
#
# Transfer all contents of a standard Module Object to an Inf file
# @param ModuleObject: A Module Object
#
def ModuleToInf(ModuleObject, PackageObject=None, DistHeader=None):
if not GlobalData.gWSPKG_LIST:
GlobalData.gWSPKG_LIST = GetWorkspacePackage()
#
# Init global information for the file
#
ContainerFile = ModuleObject.GetFullPath()
Content = ''
#
# Generate file header, If any Abstract, Description, Copyright or License XML elements are missing,
# should 1) use the Abstract, Description, Copyright or License from the PackageSurfaceArea.Header elements
# that the module belongs to, or 2) if this is a stand-alone module that is not included in a PackageSurfaceArea,
# use the abstract, description, copyright or license from the DistributionPackage.Header elements.
#
ModuleAbstract = GetLocalValue(ModuleObject.GetAbstract())
if not ModuleAbstract and PackageObject:
ModuleAbstract = GetLocalValue(PackageObject.GetAbstract())
if not ModuleAbstract and DistHeader:
ModuleAbstract = GetLocalValue(DistHeader.GetAbstract())
ModuleDescription = GetLocalValue(ModuleObject.GetDescription())
if not ModuleDescription and PackageObject:
ModuleDescription = GetLocalValue(PackageObject.GetDescription())
if not ModuleDescription and DistHeader:
ModuleDescription = GetLocalValue(DistHeader.GetDescription())
ModuleCopyright = ''
for (Lang, Copyright) in ModuleObject.GetCopyright():
if Lang:
pass
ModuleCopyright = Copyright
if not ModuleCopyright and PackageObject:
for (Lang, Copyright) in PackageObject.GetCopyright():
if Lang:
pass
ModuleCopyright = Copyright
if not ModuleCopyright and DistHeader:
for (Lang, Copyright) in DistHeader.GetCopyright():
if Lang:
pass
ModuleCopyright = Copyright
ModuleLicense = ''
for (Lang, License) in ModuleObject.GetLicense():
if Lang:
pass
ModuleLicense = License
if not ModuleLicense and PackageObject:
for (Lang, License) in PackageObject.GetLicense():
if Lang:
pass
ModuleLicense = License
if not ModuleLicense and DistHeader:
for (Lang, License) in DistHeader.GetLicense():
if Lang:
pass
ModuleLicense = License
#
# Generate header comment section of INF file
#
Content += GenHeaderCommentSection(ModuleAbstract,
ModuleDescription,
ModuleCopyright,
ModuleLicense).replace('\r\n', '\n')
#
# Generate Binary Header
#
for UserExtension in ModuleObject.GetUserExtensionList():
if UserExtension.GetUserID() == DT.TAB_BINARY_HEADER_USERID \
and UserExtension.GetIdentifier() == DT.TAB_BINARY_HEADER_IDENTIFIER:
ModuleBinaryAbstract = GetLocalValue(UserExtension.GetBinaryAbstract())
ModuleBinaryDescription = GetLocalValue(UserExtension.GetBinaryDescription())
ModuleBinaryCopyright = ''
ModuleBinaryLicense = ''
for (Lang, Copyright) in UserExtension.GetBinaryCopyright():
ModuleBinaryCopyright = Copyright
for (Lang, License) in UserExtension.GetBinaryLicense():
ModuleBinaryLicense = License
if ModuleBinaryAbstract and ModuleBinaryDescription and \
ModuleBinaryCopyright and ModuleBinaryLicense:
Content += GenHeaderCommentSection(ModuleBinaryAbstract,
ModuleBinaryDescription,
ModuleBinaryCopyright,
ModuleBinaryLicense,
True)
#
# Generate MODULE_UNI_FILE for module
#
FileHeader = GenHeaderCommentSection(ModuleAbstract, ModuleDescription, ModuleCopyright, ModuleLicense, False, \
DT.TAB_COMMENT_EDK1_SPLIT)
ModuleUniFile = GenModuleUNIEncodeFile(ModuleObject, FileHeader)
if ModuleUniFile:
ModuleObject.SetModuleUniFile(os.path.basename(ModuleUniFile))
#
# Judge whether the INF file is an AsBuild INF.
#
if ModuleObject.BinaryModule:
GlobalData.gIS_BINARY_INF = True
else:
GlobalData.gIS_BINARY_INF = False
#
# for each section, maintain a dict, sorted arch will be its key,
# statement list will be its data
# { 'Arch1 Arch2 Arch3': [statement1, statement2],
# 'Arch1' : [statement1, statement3]
# }
#
# Gen section contents
#
Content += GenDefines(ModuleObject)
Content += GenBuildOptions(ModuleObject)
Content += GenLibraryClasses(ModuleObject)
Content += GenPackages(ModuleObject)
Content += GenPcdSections(ModuleObject)
Content += GenSources(ModuleObject)
Content += GenProtocolPPiSections(ModuleObject.GetProtocolList(), True)
Content += GenProtocolPPiSections(ModuleObject.GetPpiList(), False)
Content += GenGuidSections(ModuleObject.GetGuidList())
Content += GenBinaries(ModuleObject)
Content += GenDepex(ModuleObject)
__UserExtensionsContent = GenUserExtensions(ModuleObject)
Content += __UserExtensionsContent
if ModuleObject.GetEventList() or ModuleObject.GetBootModeList() or ModuleObject.GetHobList():
Content += '\n'
#
# generate [Event], [BootMode], [Hob] section
#
Content += GenSpecialSections(ModuleObject.GetEventList(), 'Event', __UserExtensionsContent)
Content += GenSpecialSections(ModuleObject.GetBootModeList(), 'BootMode', __UserExtensionsContent)
Content += GenSpecialSections(ModuleObject.GetHobList(), 'Hob', __UserExtensionsContent)
SaveFileOnChange(ContainerFile, Content, False)
if DistHeader.ReadOnly:
os.chmod(ContainerFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
else:
os.chmod(ContainerFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
return ContainerFile
## GenModuleUNIEncodeFile
# GenModuleUNIEncodeFile, default is a UCS-2LE encode file
#
def GenModuleUNIEncodeFile(ModuleObject, UniFileHeader='', Encoding=DT.TAB_ENCODING_UTF16LE):
GenUNIFlag = False
OnlyLANGUAGE_EN_X = True
BinaryAbstract = []
BinaryDescription = []
#
# If more than one language code is used for any element that would be present in the MODULE_UNI_FILE,
# then the MODULE_UNI_FILE must be created.
#
for (Key, Value) in ModuleObject.GetAbstract() + ModuleObject.GetDescription():
if Key == DT.TAB_LANGUAGE_EN_X:
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
for UserExtension in ModuleObject.GetUserExtensionList():
if UserExtension.GetUserID() == DT.TAB_BINARY_HEADER_USERID \
and UserExtension.GetIdentifier() == DT.TAB_BINARY_HEADER_IDENTIFIER:
for (Key, Value) in UserExtension.GetBinaryAbstract():
if Key == DT.TAB_LANGUAGE_EN_X:
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
BinaryAbstract.append((Key, Value))
for (Key, Value) in UserExtension.GetBinaryDescription():
if Key == DT.TAB_LANGUAGE_EN_X:
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
BinaryDescription.append((Key, Value))
if not GenUNIFlag:
return
elif OnlyLANGUAGE_EN_X:
return
else:
ModuleObject.UNIFlag = True
ContainerFile = GetUniFileName(os.path.dirname(ModuleObject.GetFullPath()), ModuleObject.GetBaseName())
if not os.path.exists(os.path.dirname(ModuleObject.GetFullPath())):
os.makedirs(os.path.dirname(ModuleObject.GetFullPath()))
Content = UniFileHeader + '\r\n'
Content += '\r\n'
Content += FormatUniEntry('#string ' + DT.TAB_INF_ABSTRACT, ModuleObject.GetAbstract(), ContainerFile) + '\r\n'
Content += FormatUniEntry('#string ' + DT.TAB_INF_DESCRIPTION, ModuleObject.GetDescription(), ContainerFile) \
+ '\r\n'
BinaryAbstractString = FormatUniEntry('#string ' + DT.TAB_INF_BINARY_ABSTRACT, BinaryAbstract, ContainerFile)
if BinaryAbstractString:
Content += BinaryAbstractString + '\r\n'
BinaryDescriptionString = FormatUniEntry('#string ' + DT.TAB_INF_BINARY_DESCRIPTION, BinaryDescription, \
ContainerFile)
if BinaryDescriptionString:
Content += BinaryDescriptionString + '\r\n'
if not os.path.exists(ContainerFile):
File = codecs.open(ContainerFile, 'wb', Encoding)
File.write(u'\uFEFF' + Content)
File.stream.close()
Md5Signature = md5(__FileHookOpen__(str(ContainerFile), 'rb').read())
Md5Sum = Md5Signature.hexdigest()
if (ContainerFile, Md5Sum) not in ModuleObject.FileList:
ModuleObject.FileList.append((ContainerFile, Md5Sum))
return ContainerFile
def GenDefines(ModuleObject):
#
# generate [Defines] section
#
LeftOffset = 31
Content = ''
NewSectionDict = {}
for UserExtension in ModuleObject.GetUserExtensionList():
DefinesDict = UserExtension.GetDefinesDict()
if not DefinesDict:
continue
for Statement in DefinesDict:
if len(Statement.split(DT.TAB_EQUAL_SPLIT)) > 1:
Statement = (u'%s ' % Statement.split(DT.TAB_EQUAL_SPLIT, 1)[0]).ljust(LeftOffset) \
+ u'= %s' % Statement.split(DT.TAB_EQUAL_SPLIT, 1)[1].lstrip()
SortedArch = DT.TAB_ARCH_COMMON
if Statement.strip().startswith(DT.TAB_INF_DEFINES_CUSTOM_MAKEFILE):
pos = Statement.find(DT.TAB_VALUE_SPLIT)
if pos == -1:
pos = Statement.find(DT.TAB_EQUAL_SPLIT)
Makefile = ConvertPath(Statement[pos + 1:].strip())
Statement = Statement[:pos + 1] + ' ' + Makefile
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
SpecialStatementList = []
# TAB_INF_DEFINES_INF_VERSION
Statement = (u'%s ' % DT.TAB_INF_DEFINES_INF_VERSION).ljust(LeftOffset) + u'= %s' % '0x00010017'
SpecialStatementList.append(Statement)
# BaseName
BaseName = ModuleObject.GetBaseName()
if BaseName.startswith('.') or BaseName.startswith('-'):
BaseName = '_' + BaseName
Statement = (u'%s ' % DT.TAB_INF_DEFINES_BASE_NAME).ljust(LeftOffset) + u'= %s' % BaseName
SpecialStatementList.append(Statement)
# TAB_INF_DEFINES_FILE_GUID
Statement = (u'%s ' % DT.TAB_INF_DEFINES_FILE_GUID).ljust(LeftOffset) + u'= %s' % ModuleObject.GetGuid()
SpecialStatementList.append(Statement)
# TAB_INF_DEFINES_VERSION_STRING
Statement = (u'%s ' % DT.TAB_INF_DEFINES_VERSION_STRING).ljust(LeftOffset) + u'= %s' % ModuleObject.GetVersion()
SpecialStatementList.append(Statement)
# TAB_INF_DEFINES_VERSION_STRING
if ModuleObject.UNIFlag:
Statement = (u'%s ' % DT.TAB_INF_DEFINES_MODULE_UNI_FILE).ljust(LeftOffset) + \
u'= %s' % ModuleObject.GetModuleUniFile()
SpecialStatementList.append(Statement)
# TAB_INF_DEFINES_MODULE_TYPE
if ModuleObject.GetModuleType():
Statement = (u'%s ' % DT.TAB_INF_DEFINES_MODULE_TYPE).ljust(LeftOffset) + u'= %s' % ModuleObject.GetModuleType()
SpecialStatementList.append(Statement)
# TAB_INF_DEFINES_PCD_IS_DRIVER
if ModuleObject.GetPcdIsDriver():
Statement = (u'%s ' % DT.TAB_INF_DEFINES_PCD_IS_DRIVER).ljust(LeftOffset) + \
u'= %s' % ModuleObject.GetPcdIsDriver()
SpecialStatementList.append(Statement)
# TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION
if ModuleObject.GetUefiSpecificationVersion():
Statement = (u'%s ' % DT.TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION).ljust(LeftOffset) + \
u'= %s' % ModuleObject.GetUefiSpecificationVersion()
SpecialStatementList.append(Statement)
# TAB_INF_DEFINES_PI_SPECIFICATION_VERSION
if ModuleObject.GetPiSpecificationVersion():
Statement = (u'%s ' % DT.TAB_INF_DEFINES_PI_SPECIFICATION_VERSION).ljust(LeftOffset) + \
u'= %s' % ModuleObject.GetPiSpecificationVersion()
SpecialStatementList.append(Statement)
# LibraryClass
for LibraryClass in ModuleObject.GetLibraryClassList():
if LibraryClass.GetUsage() == DT.USAGE_ITEM_PRODUCES or \
LibraryClass.GetUsage() == DT.USAGE_ITEM_SOMETIMES_PRODUCES:
Statement = (u'%s ' % DT.TAB_INF_DEFINES_LIBRARY_CLASS).ljust(LeftOffset) + \
u'= %s' % LibraryClass.GetLibraryClass()
if LibraryClass.GetSupModuleList():
Statement += '|' + DT.TAB_SPACE_SPLIT.join(l for l in LibraryClass.GetSupModuleList())
SpecialStatementList.append(Statement)
# Spec Item
for SpecItem in ModuleObject.GetSpecList():
Spec, Version = SpecItem
Spec = ConvertSpec(Spec)
Statement = '%s %s = %s' % (DT.TAB_INF_DEFINES_SPEC, Spec, Version)
SpecialStatementList.append(Statement)
# Extern
ExternList = []
for Extern in ModuleObject.GetExternList():
ArchList = Extern.GetSupArchList()
EntryPoint = Extern.GetEntryPoint()
UnloadImage = Extern.GetUnloadImage()
Constructor = Extern.GetConstructor()
Destructor = Extern.GetDestructor()
HelpStringList = Extern.GetHelpTextList()
FFE = Extern.GetFeatureFlag()
ExternList.append([ArchList, EntryPoint, UnloadImage, Constructor, Destructor, FFE, HelpStringList])
#
# Add VALID_ARCHITECTURES information
#
ValidArchStatement = None
if ModuleObject.SupArchList:
ValidArchStatement = '\n' + '# ' + '\n'
ValidArchStatement += '# The following information is for reference only and not required by the build tools.\n'
ValidArchStatement += '# ' + '\n'
ValidArchStatement += '# VALID_ARCHITECTURES = %s' % (' '.join(ModuleObject.SupArchList)) + '\n'
ValidArchStatement += '# '
if DT.TAB_ARCH_COMMON not in NewSectionDict:
NewSectionDict[DT.TAB_ARCH_COMMON] = []
NewSectionDict[DT.TAB_ARCH_COMMON] = NewSectionDict[DT.TAB_ARCH_COMMON] + SpecialStatementList
GenMetaFileMisc.AddExternToDefineSec(NewSectionDict, DT.TAB_ARCH_COMMON, ExternList)
if ValidArchStatement is not None:
NewSectionDict[DT.TAB_ARCH_COMMON] = NewSectionDict[DT.TAB_ARCH_COMMON] + [ValidArchStatement]
Content += GenSection('Defines', NewSectionDict)
return Content
def GenLibraryClasses(ModuleObject):
#
# generate [LibraryClasses] section
#
Content = ''
NewSectionDict = {}
if not GlobalData.gIS_BINARY_INF:
for LibraryClass in ModuleObject.GetLibraryClassList():
if LibraryClass.GetUsage() == DT.USAGE_ITEM_PRODUCES:
continue
#
# Generate generic comment
#
HelpTextList = LibraryClass.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CommentStr = GenGenericCommentF(HelpStr)
Statement = CommentStr
Name = LibraryClass.GetLibraryClass()
FFE = LibraryClass.GetFeatureFlag()
Statement += Name
if FFE:
Statement += '|' + FFE
ModuleList = LibraryClass.GetSupModuleList()
ArchList = LibraryClass.GetSupArchList()
for Index in range(0, len(ArchList)):
ArchList[Index] = ConvertArchForInstall(ArchList[Index])
ArchList.sort()
SortedArch = ' '.join(ArchList)
KeyList = []
if not ModuleList or IsAllModuleList(ModuleList):
KeyList = [SortedArch]
else:
ModuleString = DT.TAB_VALUE_SPLIT.join(l for l in ModuleList)
if not ArchList:
SortedArch = DT.TAB_ARCH_COMMON
KeyList = [SortedArch + '.' + ModuleString]
else:
KeyList = [Arch + '.' + ModuleString for Arch in ArchList]
for Key in KeyList:
if Key in NewSectionDict:
NewSectionDict[Key] = NewSectionDict[Key] + [Statement]
else:
NewSectionDict[Key] = [Statement]
Content += GenSection('LibraryClasses', NewSectionDict)
else:
LibraryClassDict = {}
for BinaryFile in ModuleObject.GetBinaryFileList():
if not BinaryFile.AsBuiltList:
continue
for LibraryItem in BinaryFile.AsBuiltList[0].LibraryInstancesList:
Statement = '# Guid: ' + LibraryItem.Guid + ' Version: ' + LibraryItem.Version
if len(BinaryFile.SupArchList) == 0:
if 'COMMON' in LibraryClassDict and Statement not in LibraryClassDict['COMMON']:
LibraryClassDict['COMMON'].append(Statement)
else:
LibraryClassDict['COMMON'] = ['## @LIB_INSTANCES']
LibraryClassDict['COMMON'].append(Statement)
else:
for Arch in BinaryFile.SupArchList:
if Arch in LibraryClassDict:
if Statement not in LibraryClassDict[Arch]:
LibraryClassDict[Arch].append(Statement)
else:
continue
else:
LibraryClassDict[Arch] = ['## @LIB_INSTANCES']
LibraryClassDict[Arch].append(Statement)
Content += GenSection('LibraryClasses', LibraryClassDict)
return Content
def GenPackages(ModuleObject):
Content = ''
#
# generate [Packages] section
#
NewSectionDict = Sdict()
WorkspaceDir = GlobalData.gWORKSPACE
for PackageDependency in ModuleObject.GetPackageDependencyList():
#
# Generate generic comment
#
CommentStr = ''
HelpText = PackageDependency.GetHelpText()
if HelpText:
HelpStr = HelpText.GetString()
CommentStr = GenGenericCommentF(HelpStr)
Statement = CommentStr
Guid = PackageDependency.GetGuid()
Version = PackageDependency.GetVersion()
FFE = PackageDependency.GetFeatureFlag()
Path = ''
#
# find package path/name
#
for PkgInfo in GlobalData.gWSPKG_LIST:
if Guid == PkgInfo[1]:
if (not Version) or (Version == PkgInfo[2]):
Path = PkgInfo[3]
break
#
# get relative path
#
RelaPath = GetRelativePath(Path, WorkspaceDir)
Statement += RelaPath.replace('\\', '/')
if FFE:
Statement += '|' + FFE
ArchList = sorted(PackageDependency.GetSupArchList())
SortedArch = ' '.join(ArchList)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
Content += GenSection('Packages', NewSectionDict)
return Content
def GenSources(ModuleObject):
#
# generate [Sources] section
#
Content = ''
NewSectionDict = {}
for Source in ModuleObject.GetSourceFileList():
SourceFile = Source.GetSourceFile()
Family = Source.GetFamily()
FeatureFlag = Source.GetFeatureFlag()
SupArchList = sorted(Source.GetSupArchList())
SortedArch = ' '.join(SupArchList)
Statement = GenSourceStatement(ConvertPath(SourceFile), Family, FeatureFlag)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
Content += GenSection('Sources', NewSectionDict)
return Content
def GenDepex(ModuleObject):
#
# generate [Depex] section
#
NewSectionDict = Sdict()
Content = ''
for Depex in ModuleObject.GetPeiDepex() + ModuleObject.GetDxeDepex() + ModuleObject.GetSmmDepex():
HelpTextList = Depex.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CommentStr = GenGenericCommentF(HelpStr)
SupArchList = Depex.GetSupArchList()
SupModList = Depex.GetModuleType()
Expression = Depex.GetDepex()
Statement = CommentStr + Expression
SupArchList.sort()
KeyList = []
if not SupArchList:
SupArchList.append(DT.TAB_ARCH_COMMON.lower())
if not SupModList:
KeyList = SupArchList
else:
for ModuleType in SupModList:
for Arch in SupArchList:
KeyList.append(ConvertArchForInstall(Arch) + '.' + ModuleType)
for Key in KeyList:
if Key in NewSectionDict:
NewSectionDict[Key] = NewSectionDict[Key] + [Statement]
else:
NewSectionDict[Key] = [Statement]
Content += GenSection('Depex', NewSectionDict, False)
return Content
## GenUserExtensions
#
# GenUserExtensions
#
def GenUserExtensions(ModuleObject):
NewSectionDict = {}
for UserExtension in ModuleObject.GetUserExtensionList():
if UserExtension.GetUserID() == DT.TAB_BINARY_HEADER_USERID and \
UserExtension.GetIdentifier() == DT.TAB_BINARY_HEADER_IDENTIFIER:
continue
if UserExtension.GetIdentifier() == 'Depex':
continue
Statement = UserExtension.GetStatement()
# Comment the code to support user extension without any statement just the section header in []
# if not Statement:
# continue
ArchList = UserExtension.GetSupArchList()
for Index in range(0, len(ArchList)):
ArchList[Index] = ConvertArchForInstall(ArchList[Index])
ArchList.sort()
KeyList = []
CommonPreFix = ''
if UserExtension.GetUserID():
CommonPreFix = UserExtension.GetUserID()
if CommonPreFix.find('.') > -1:
CommonPreFix = '"' + CommonPreFix + '"'
if UserExtension.GetIdentifier():
CommonPreFix += '.' + '"' + UserExtension.GetIdentifier() + '"'
if ArchList:
KeyList = [CommonPreFix + '.' + Arch for Arch in ArchList]
else:
KeyList = [CommonPreFix]
for Key in KeyList:
if Key in NewSectionDict:
NewSectionDict[Key] = NewSectionDict[Key] + [Statement]
else:
NewSectionDict[Key] = [Statement]
Content = GenSection('UserExtensions', NewSectionDict, False)
return Content
# GenSourceStatement
#
# @param SourceFile: string of source file path/name
# @param Family: string of source file family field
# @param FeatureFlag: string of source file FeatureFlag field
# @param TagName: string of source file TagName field
# @param ToolCode: string of source file ToolCode field
# @param HelpStr: string of source file HelpStr field
#
# @retval Statement: The generated statement for source
#
def GenSourceStatement(SourceFile, Family, FeatureFlag, TagName=None,
ToolCode=None, HelpStr=None):
Statement = ''
if HelpStr:
Statement += GenGenericCommentF(HelpStr)
#
# format of SourceFile|Family|TagName|ToolCode|FeatureFlag
#
Statement += SourceFile
if TagName is None:
TagName = ''
if ToolCode is None:
ToolCode = ''
if HelpStr is None:
HelpStr = ''
if FeatureFlag:
Statement += '|' + Family + '|' + TagName + '|' + ToolCode + '|' + FeatureFlag
elif ToolCode:
Statement += '|' + Family + '|' + TagName + '|' + ToolCode
elif TagName:
Statement += '|' + Family + '|' + TagName
elif Family:
Statement += '|' + Family
return Statement
# GenBinaryStatement
#
# @param Key: (FileName, FileType, FFE, SortedArch)
# @param Value: (Target, Family, TagName, Comment)
#
#
def GenBinaryStatement(Key, Value, SubTypeGuidValue=None):
(FileName, FileType, FFE, SortedArch) = Key
if SortedArch:
pass
if Value:
(Target, Family, TagName, Comment) = Value
else:
Target = ''
Family = ''
TagName = ''
Comment = ''
if Comment:
Statement = GenGenericCommentF(Comment)
else:
Statement = ''
if FileType == 'SUBTYPE_GUID' and SubTypeGuidValue:
Statement += FileType + '|' + SubTypeGuidValue + '|' + FileName
else:
Statement += FileType + '|' + FileName
if FileType in DT.BINARY_FILE_TYPE_UI_LIST + DT.BINARY_FILE_TYPE_VER_LIST:
if FFE:
Statement += '|' + Target + '|' + FFE
elif Target:
Statement += '|' + Target
else:
if FFE:
Statement += '|' + Target + '|' + Family + '|' + TagName + '|' + FFE
elif TagName:
Statement += '|' + Target + '|' + Family + '|' + TagName
elif Family:
Statement += '|' + Target + '|' + Family
elif Target:
Statement += '|' + Target
return Statement
## GenGuidSections
#
# @param GuidObjList: List of GuidObject
# @retVal Content: The generated section contents
#
def GenGuidSections(GuidObjList):
#
# generate [Guids] section
#
Content = ''
GuidDict = Sdict()
for Guid in GuidObjList:
HelpTextList = Guid.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CName = Guid.GetCName()
FFE = Guid.GetFeatureFlag()
Statement = CName
if FFE:
Statement += '|' + FFE
Usage = Guid.GetUsage()
GuidType = Guid.GetGuidTypeList()[0]
VariableName = Guid.GetVariableName()
#
# Differentiate the generic comment and usage comment as multiple generic comment need to be put at first
#
if Usage == DT.ITEM_UNDEFINED and GuidType == DT.ITEM_UNDEFINED:
# generate list of generic comment
Comment = GenGenericCommentF(HelpStr)
else:
# generate list of other comment
Comment = HelpStr.replace('\n', ' ')
Comment = Comment.strip()
if Comment:
Comment = ' # ' + Comment
else:
Comment = ''
if Usage != DT.ITEM_UNDEFINED and GuidType == DT.ITEM_UNDEFINED:
Comment = '## ' + Usage + Comment
elif GuidType == 'Variable':
Comment = '## ' + Usage + ' ## ' + GuidType + ':' + VariableName + Comment
else:
Comment = '## ' + Usage + ' ## ' + GuidType + Comment
if Comment:
Comment += '\n'
#
# merge duplicate items
#
ArchList = sorted(Guid.GetSupArchList())
SortedArch = ' '.join(ArchList)
if (Statement, SortedArch) in GuidDict:
PreviousComment = GuidDict[Statement, SortedArch]
Comment = PreviousComment + Comment
GuidDict[Statement, SortedArch] = Comment
NewSectionDict = GenMetaFileMisc.TransferDict(GuidDict, 'INF_GUID')
#
# generate the section contents
#
if NewSectionDict:
Content = GenSection('Guids', NewSectionDict)
return Content
## GenProtocolPPiSections
#
# @param ObjList: List of ProtocolObject or Ppi Object
# @retVal Content: The generated section contents
#
def GenProtocolPPiSections(ObjList, IsProtocol):
Content = ''
Dict = Sdict()
for Object in ObjList:
HelpTextList = Object.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CName = Object.GetCName()
FFE = Object.GetFeatureFlag()
Statement = CName
if FFE:
Statement += '|' + FFE
Usage = Object.GetUsage()
Notify = Object.GetNotify()
#
# Differentiate the generic comment and usage comment as consecutive generic comment need to be put together
#
if Usage == DT.ITEM_UNDEFINED and Notify == '':
# generate list of generic comment
Comment = GenGenericCommentF(HelpStr)
else:
# generate list of other comment
Comment = HelpStr.replace('\n', ' ')
Comment = Comment.strip()
if Comment:
Comment = ' # ' + Comment
else:
Comment = ''
if Usage == DT.ITEM_UNDEFINED and not Comment and Notify == '':
Comment = ''
else:
if Notify:
Comment = '## ' + Usage + ' ## ' + 'NOTIFY' + Comment
else:
Comment = '## ' + Usage + Comment
if Comment:
Comment += '\n'
#
# merge duplicate items
#
ArchList = sorted(Object.GetSupArchList())
SortedArch = ' '.join(ArchList)
if (Statement, SortedArch) in Dict:
PreviousComment = Dict[Statement, SortedArch]
Comment = PreviousComment + Comment
Dict[Statement, SortedArch] = Comment
NewSectionDict = GenMetaFileMisc.TransferDict(Dict, 'INF_PPI_PROTOCOL')
#
# generate the section contents
#
if NewSectionDict:
if IsProtocol:
Content = GenSection('Protocols', NewSectionDict)
else:
Content = GenSection('Ppis', NewSectionDict)
return Content
## GenPcdSections
#
#
def GenPcdSections(ModuleObject):
Content = ''
if not GlobalData.gIS_BINARY_INF:
#
# for each Pcd Itemtype, maintain a dict so the same type will be grouped
# together
#
ItemTypeDict = {}
for Pcd in ModuleObject.GetPcdList():
HelpTextList = Pcd.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
Statement = ''
CName = Pcd.GetCName()
TokenSpaceGuidCName = Pcd.GetTokenSpaceGuidCName()
DefaultValue = Pcd.GetDefaultValue()
ItemType = Pcd.GetItemType()
if ItemType in ItemTypeDict:
Dict = ItemTypeDict[ItemType]
else:
Dict = Sdict()
ItemTypeDict[ItemType] = Dict
FFE = Pcd.GetFeatureFlag()
Statement += TokenSpaceGuidCName + '.' + CName
if DefaultValue:
Statement += '|' + DefaultValue
if FFE:
Statement += '|' + FFE
elif FFE:
Statement += '||' + FFE
#
# Generate comment
#
Usage = Pcd.GetValidUsage()
# if FeatureFlag Pcd, then assume all Usage is CONSUMES
if ItemType == DT.TAB_INF_FEATURE_PCD:
Usage = DT.USAGE_ITEM_CONSUMES
if Usage == DT.ITEM_UNDEFINED:
# generate list of generic comment
Comment = GenGenericCommentF(HelpStr)
else:
# generate list of other comment
Comment = HelpStr.replace('\n', ' ')
Comment = Comment.strip()
if Comment:
Comment = ' # ' + Comment
else:
Comment = ''
Comment = '## ' + Usage + Comment
if Comment:
Comment += '\n'
#
# Merge duplicate entries
#
ArchList = sorted(Pcd.GetSupArchList())
SortedArch = ' '.join(ArchList)
if (Statement, SortedArch) in Dict:
PreviousComment = Dict[Statement, SortedArch]
Comment = PreviousComment + Comment
Dict[Statement, SortedArch] = Comment
for ItemType in ItemTypeDict:
# First we need to transfer the Dict to use SortedArch as key
Dict = ItemTypeDict[ItemType]
NewSectionDict = GenMetaFileMisc.TransferDict(Dict, 'INF_PCD')
if NewSectionDict:
Content += GenSection(ItemType, NewSectionDict)
#
# For AsBuild INF files
#
else:
Content += GenAsBuiltPacthPcdSections(ModuleObject)
Content += GenAsBuiltPcdExSections(ModuleObject)
return Content
## GenPcdSections
#
#
def GenAsBuiltPacthPcdSections(ModuleObject):
PatchPcdDict = {}
for BinaryFile in ModuleObject.GetBinaryFileList():
if not BinaryFile.AsBuiltList:
continue
for PatchPcd in BinaryFile.AsBuiltList[0].PatchPcdList:
TokenSpaceName = ''
PcdCName = PatchPcd.CName
PcdValue = PatchPcd.DefaultValue
PcdOffset = PatchPcd.Offset
TokenSpaceGuidValue = PatchPcd.TokenSpaceGuidValue
Token = PatchPcd.Token
HelpTextList = PatchPcd.HelpTextList
HelpString = ''
for HelpStringItem in HelpTextList:
for HelpLine in GetSplitValueList(HelpStringItem.String, '\n'):
HelpString += '## ' + HelpLine + '\n'
TokenSpaceName, PcdCName = GenMetaFileMisc.ObtainPcdName(ModuleObject.PackageDependencyList,
TokenSpaceGuidValue,
Token)
if TokenSpaceName == '' or PcdCName == '':
Logger.Error("Upt",
ToolError.RESOURCE_NOT_AVAILABLE,
ST.ERR_INSTALL_FILE_DEC_FILE_ERROR % (TokenSpaceGuidValue, Token),
File=ModuleObject.GetFullPath())
Statement = HelpString + TokenSpaceName + '.' + PcdCName + ' | ' + PcdValue + ' | ' + \
PcdOffset + DT.TAB_SPACE_SPLIT
#
# Use binary file's Arch to be Pcd's Arch
#
ArchList = []
FileNameObjList = BinaryFile.GetFileNameList()
if FileNameObjList:
ArchList = FileNameObjList[0].GetSupArchList()
if len(ArchList) == 0:
if DT.TAB_ARCH_COMMON in PatchPcdDict:
if Statement not in PatchPcdDict[DT.TAB_ARCH_COMMON]:
PatchPcdDict[DT.TAB_ARCH_COMMON].append(Statement)
else:
PatchPcdDict[DT.TAB_ARCH_COMMON] = [Statement]
else:
for Arch in ArchList:
if Arch in PatchPcdDict:
if Statement not in PatchPcdDict[Arch]:
PatchPcdDict[Arch].append(Statement)
else:
PatchPcdDict[Arch] = [Statement]
return GenSection(DT.TAB_INF_PATCH_PCD, PatchPcdDict)
## GenPcdSections
#
#
def GenAsBuiltPcdExSections(ModuleObject):
PcdExDict = {}
for BinaryFile in ModuleObject.GetBinaryFileList():
if not BinaryFile.AsBuiltList:
continue
for PcdExItem in BinaryFile.AsBuiltList[0].PcdExValueList:
TokenSpaceName = ''
PcdCName = PcdExItem.CName
TokenSpaceGuidValue = PcdExItem.TokenSpaceGuidValue
Token = PcdExItem.Token
HelpTextList = PcdExItem.HelpTextList
HelpString = ''
for HelpStringItem in HelpTextList:
for HelpLine in GetSplitValueList(HelpStringItem.String, '\n'):
HelpString += '## ' + HelpLine + '\n'
TokenSpaceName, PcdCName = GenMetaFileMisc.ObtainPcdName(ModuleObject.PackageDependencyList,
TokenSpaceGuidValue, Token)
if TokenSpaceName == '' or PcdCName == '':
Logger.Error("Upt",
ToolError.RESOURCE_NOT_AVAILABLE,
ST.ERR_INSTALL_FILE_DEC_FILE_ERROR % (TokenSpaceGuidValue, Token),
File=ModuleObject.GetFullPath())
Statement = HelpString + TokenSpaceName + DT.TAB_SPLIT + PcdCName + DT.TAB_SPACE_SPLIT
#
# Use binary file's Arch to be Pcd's Arch
#
ArchList = []
FileNameObjList = BinaryFile.GetFileNameList()
if FileNameObjList:
ArchList = FileNameObjList[0].GetSupArchList()
if len(ArchList) == 0:
if 'COMMON' in PcdExDict:
PcdExDict['COMMON'].append(Statement)
else:
PcdExDict['COMMON'] = [Statement]
else:
for Arch in ArchList:
if Arch in PcdExDict:
if Statement not in PcdExDict[Arch]:
PcdExDict[Arch].append(Statement)
else:
PcdExDict[Arch] = [Statement]
return GenSection('PcdEx', PcdExDict)
## GenSpecialSections
# generate special sections for Event/BootMode/Hob
#
def GenSpecialSections(ObjectList, SectionName, UserExtensionsContent=''):
#
# generate section
#
Content = ''
NewSectionDict = {}
for Obj in ObjectList:
#
# Generate comment
#
CommentStr = ''
HelpTextList = Obj.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CommentStr = GenGenericCommentF(HelpStr)
if SectionName == 'Hob':
Type = Obj.GetHobType()
elif SectionName == 'Event':
Type = Obj.GetEventType()
elif SectionName == 'BootMode':
Type = Obj.GetSupportedBootModes()
else:
assert(SectionName)
Usage = Obj.GetUsage()
# If the content already in UserExtensionsContent then ignore
if '[%s]' % SectionName in UserExtensionsContent and Type in UserExtensionsContent:
return ''
Statement = ' ' + Type + ' ## ' + Usage
if CommentStr in ['#\n', '#\n#\n']:
CommentStr = '#\n#\n#\n'
#
# the first head comment line should start with '##\n', if it starts with '#\n', then add one '#'
# else add '##\n' to meet the format defined in INF spec
#
if CommentStr.startswith('#\n'):
CommentStr = '#' + CommentStr
elif CommentStr:
CommentStr = '##\n' + CommentStr
if CommentStr and not CommentStr.endswith('\n#\n'):
CommentStr = CommentStr + '#\n'
NewStateMent = CommentStr + Statement
SupArch = sorted(Obj.GetSupArchList())
SortedArch = ' '.join(SupArch)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [NewStateMent]
else:
NewSectionDict[SortedArch] = [NewStateMent]
SectionContent = GenSection(SectionName, NewSectionDict)
SectionContent = SectionContent.strip()
if SectionContent:
Content = '# ' + ('\n' + '# ').join(GetSplitValueList(SectionContent, '\n'))
Content = Content.lstrip()
#
# add a return to differentiate it between other possible sections
#
if Content:
Content += '\n'
return Content
## GenBuildOptions
#
#
def GenBuildOptions(ModuleObject):
Content = ''
if not ModuleObject.BinaryModule:
#
# generate [BuildOptions] section
#
NewSectionDict = {}
for UserExtension in ModuleObject.GetUserExtensionList():
BuildOptionDict = UserExtension.GetBuildOptionDict()
if not BuildOptionDict:
continue
for Arch in BuildOptionDict:
if Arch in NewSectionDict:
NewSectionDict[Arch] = NewSectionDict[Arch] + [BuildOptionDict[Arch]]
else:
NewSectionDict[Arch] = [BuildOptionDict[Arch]]
Content = GenSection('BuildOptions', NewSectionDict)
else:
BuildOptionDict = {}
for BinaryFile in ModuleObject.GetBinaryFileList():
if not BinaryFile.AsBuiltList:
continue
for BuilOptionItem in BinaryFile.AsBuiltList[0].BinaryBuildFlagList:
Statement = '#' + BuilOptionItem.AsBuiltOptionFlags
if len(BinaryFile.SupArchList) == 0:
if 'COMMON' in BuildOptionDict:
if Statement not in BuildOptionDict['COMMON']:
BuildOptionDict['COMMON'].append(Statement)
else:
BuildOptionDict['COMMON'] = ['## @AsBuilt']
BuildOptionDict['COMMON'].append(Statement)
else:
for Arch in BinaryFile.SupArchList:
if Arch in BuildOptionDict:
if Statement not in BuildOptionDict[Arch]:
BuildOptionDict[Arch].append(Statement)
else:
BuildOptionDict[Arch] = ['## @AsBuilt']
BuildOptionDict[Arch].append(Statement)
Content = GenSection('BuildOptions', BuildOptionDict)
return Content
## GenBinaries
#
#
def GenBinaries(ModuleObject):
NewSectionDict = {}
BinariesDict = []
for UserExtension in ModuleObject.GetUserExtensionList():
BinariesDict = UserExtension.GetBinariesDict()
if BinariesDict:
break
for BinaryFile in ModuleObject.GetBinaryFileList():
FileNameObjList = BinaryFile.GetFileNameList()
for FileNameObj in FileNameObjList:
FileName = ConvertPath(FileNameObj.GetFilename())
FileType = FileNameObj.GetFileType()
FFE = FileNameObj.GetFeatureFlag()
ArchList = sorted(FileNameObj.GetSupArchList())
SortedArch = ' '.join(ArchList)
Key = (FileName, FileType, FFE, SortedArch)
if Key in BinariesDict:
ValueList = BinariesDict[Key]
for ValueItem in ValueList:
Statement = GenBinaryStatement(Key, ValueItem)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
#
# as we already generated statement for this DictKey here set the Valuelist to be empty
# to avoid generate duplicate entries as the DictKey may have multiple entries
#
BinariesDict[Key] = []
else:
if FileType == 'SUBTYPE_GUID' and FileNameObj.GetGuidValue():
Statement = GenBinaryStatement(Key, None, FileNameObj.GetGuidValue())
else:
Statement = GenBinaryStatement(Key, None)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
Content = GenSection('Binaries', NewSectionDict)
return Content
| edk2-master | BaseTools/Source/Python/UPT/GenMetaFile/GenInfFile.py |
## @file GenDecFile.py
#
# This file contained the logical of transfer package object to DEC files.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
GenDEC
'''
import os
import stat
import codecs
from hashlib import md5
from Core.FileHook import __FileHookOpen__
from Library.Parsing import GenSection
from Library.CommentGenerating import GenHeaderCommentSection
from Library.CommentGenerating import GenGenericCommentF
from Library.CommentGenerating import GenDecTailComment
from Library.CommentGenerating import _GetHelpStr
from Library.Misc import GuidStringToGuidStructureString
from Library.Misc import SaveFileOnChange
from Library.Misc import ConvertPath
from Library.Misc import GetLocalValue
from Library.DataType import TAB_SPACE_SPLIT
from Library.DataType import TAB_COMMA_SPLIT
from Library.DataType import END_OF_LINE
from Library.DataType import TAB_ARCH_COMMON
from Library.DataType import TAB_VALUE_SPLIT
from Library.DataType import TAB_COMMENT_SPLIT
from Library.DataType import TAB_PCD_VALIDRANGE
from Library.DataType import TAB_PCD_VALIDLIST
from Library.DataType import TAB_PCD_EXPRESSION
from Library.DataType import TAB_DEC_DEFINES_DEC_SPECIFICATION
from Library.DataType import TAB_DEC_DEFINES_PACKAGE_NAME
from Library.DataType import TAB_DEC_DEFINES_PACKAGE_GUID
from Library.DataType import TAB_DEC_DEFINES_PACKAGE_VERSION
from Library.DataType import TAB_DEC_DEFINES_PKG_UNI_FILE
from Library.DataType import TAB_DEC_PACKAGE_ABSTRACT
from Library.DataType import TAB_DEC_PACKAGE_DESCRIPTION
from Library.DataType import TAB_DEC_BINARY_ABSTRACT
from Library.DataType import TAB_DEC_BINARY_DESCRIPTION
from Library.DataType import TAB_LANGUAGE_EN_X
from Library.DataType import TAB_BINARY_HEADER_USERID
from Library.DataType import TAB_BINARY_HEADER_IDENTIFIER
from Library.DataType import TAB_COMMENT_EDK1_SPLIT
from Library.DataType import TAB_ENCODING_UTF16LE
from Library.DataType import TAB_CAPHEX_START
from Library.DataType import TAB_HEX_START
from Library.DataType import TAB_UNDERLINE_SPLIT
from Library.DataType import TAB_STR_TOKENERR
from Library.DataType import TAB_STR_TOKENCNAME
from Library.DataType import TAB_PCD_ERROR_SECTION_COMMENT
from Library.DataType import TAB_PCD_ERROR
from Library.DataType import TAB_SECTION_START
from Library.DataType import TAB_SECTION_END
from Library.DataType import TAB_SPLIT
import Library.DataType as DT
from Library.UniClassObject import FormatUniEntry
from Library.StringUtils import GetUniFileName
def GenPcd(Package, Content):
#
# generate [Pcd] section
# <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
#
ValidUsageDict = {}
for Pcd in Package.GetPcdList():
#
# Generate generic comment
#
HelpTextList = Pcd.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CommentStr = GenGenericCommentF(HelpStr, 2)
PromptList = Pcd.GetPromptList()
PromptStr = _GetHelpStr(PromptList)
CommentStr += GenGenericCommentF(PromptStr.strip(), 1, True)
PcdErrList = Pcd.GetPcdErrorsList()
for PcdErr in PcdErrList:
CommentStr += GenPcdErrComment(PcdErr)
Statement = CommentStr
CName = Pcd.GetCName()
TokenSpaceGuidCName = Pcd.GetTokenSpaceGuidCName()
DefaultValue = Pcd.GetDefaultValue()
DatumType = Pcd.GetDatumType()
Token = Pcd.GetToken()
ValidUsage = Pcd.GetValidUsage()
if ValidUsage == 'FeaturePcd':
ValidUsage = 'PcdsFeatureFlag'
elif ValidUsage == 'PatchPcd':
ValidUsage = 'PcdsPatchableInModule'
elif ValidUsage == 'FixedPcd':
ValidUsage = 'PcdsFixedAtBuild'
elif ValidUsage == 'Pcd':
ValidUsage = 'PcdsDynamic'
elif ValidUsage == 'PcdEx':
ValidUsage = 'PcdsDynamicEx'
if ValidUsage in ValidUsageDict:
NewSectionDict = ValidUsageDict[ValidUsage]
else:
NewSectionDict = {}
ValidUsageDict[ValidUsage] = NewSectionDict
Statement += TokenSpaceGuidCName + '.' + CName
Statement += '|' + DefaultValue
Statement += '|' + DatumType
Statement += '|' + Token
#
# generate tail comment
#
if Pcd.GetSupModuleList():
Statement += GenDecTailComment(Pcd.GetSupModuleList())
ArchList = sorted(Pcd.GetSupArchList())
SortedArch = ' '.join(ArchList)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
for ValidUsage in ValidUsageDict:
Content += GenSection(ValidUsage, ValidUsageDict[ValidUsage], True, True)
return Content
def GenPcdErrorMsgSection(Package, Content):
if not Package.PcdErrorCommentDict:
return Content
#
# Generate '# [Error.<TokenSpcCName>]' section
#
Content += END_OF_LINE + END_OF_LINE
SectionComment = TAB_COMMENT_SPLIT + END_OF_LINE
SectionComment += TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_ERROR_SECTION_COMMENT + END_OF_LINE
SectionComment += TAB_COMMENT_SPLIT + END_OF_LINE
TokenSpcCNameList = []
#
# Get TokenSpcCName list in PcdErrorCommentDict in Package object
#
for (TokenSpcCName, ErrorNumber) in Package.PcdErrorCommentDict:
if TokenSpcCName not in TokenSpcCNameList:
TokenSpcCNameList.append(TokenSpcCName)
for TokenSpcCNameItem in TokenSpcCNameList:
SectionName = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_SECTION_START + TAB_PCD_ERROR + \
TAB_SPLIT + TokenSpcCNameItem + TAB_SECTION_END + END_OF_LINE
Content += SectionComment
Content += SectionName
for (TokenSpcCName, ErrorNumber) in Package.PcdErrorCommentDict:
if TokenSpcCNameItem == TokenSpcCName:
PcdErrorMsg = GetLocalValue(Package.PcdErrorCommentDict[(TokenSpcCName, ErrorNumber)])
SectionItem = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_SPACE_SPLIT + \
ErrorNumber + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT + \
PcdErrorMsg + END_OF_LINE
Content += SectionItem
Content += TAB_COMMENT_SPLIT
return Content
def GenGuidProtocolPpi(Package, Content):
#
# generate [Guids] section
#
NewSectionDict = {}
LeftOffset = 46
# Get the line offset need
# If the real one < the min one, use the min one
# else use the real one
for Guid in Package.GetGuidList():
if len(Guid.GetCName()) > LeftOffset:
LeftOffset = len(Guid.GetCName())
# Generate
for Guid in Package.GetGuidList():
#
# Generate generic comment
#
HelpTextList = Guid.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CommentStr = GenGenericCommentF(HelpStr, 2)
Statement = CommentStr
CName = Guid.GetCName()
Value = GuidStringToGuidStructureString(Guid.GetGuid())
Statement += CName.ljust(LeftOffset) + ' = ' + Value
#
# generate tail comment
#
if Guid.GetSupModuleList():
Statement += GenDecTailComment(Guid.GetSupModuleList())
ArchList = sorted(Guid.GetSupArchList())
SortedArch = ' '.join(ArchList)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
Content += GenSection('Guids', NewSectionDict, True, True)
#
# generate [Protocols] section
#
NewSectionDict = {}
LeftOffset = 46
# Get the line offset need
# If the real one < the min one, use the min one
# else use the real one
for Protocol in Package.GetProtocolList():
if len(Protocol.GetCName()) > LeftOffset:
LeftOffset = len(Protocol.GetCName())
for Protocol in Package.GetProtocolList():
#
# Generate generic comment
#
HelpTextList = Protocol.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CommentStr = GenGenericCommentF(HelpStr, 2)
Statement = CommentStr
CName = Protocol.GetCName()
Value = GuidStringToGuidStructureString(Protocol.GetGuid())
Statement += CName.ljust(LeftOffset) + ' = ' + Value
#
# generate tail comment
#
if Protocol.GetSupModuleList():
Statement += GenDecTailComment(Protocol.GetSupModuleList())
ArchList = sorted(Protocol.GetSupArchList())
SortedArch = ' '.join(ArchList)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
Content += GenSection('Protocols', NewSectionDict, True, True)
#
# generate [Ppis] section
#
NewSectionDict = {}
LeftOffset = 46
# Get the line offset need
# If the real one < the min one, use the min one
# else use the real one
for Ppi in Package.GetPpiList():
if len(Ppi.GetCName()) > LeftOffset:
LeftOffset = len(Ppi.GetCName())
for Ppi in Package.GetPpiList():
#
# Generate generic comment
#
HelpTextList = Ppi.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
CommentStr = GenGenericCommentF(HelpStr, 2)
Statement = CommentStr
CName = Ppi.GetCName()
Value = GuidStringToGuidStructureString(Ppi.GetGuid())
Statement += CName.ljust(LeftOffset) + ' = ' + Value
#
# generate tail comment
#
if Ppi.GetSupModuleList():
Statement += GenDecTailComment(Ppi.GetSupModuleList())
ArchList = sorted(Ppi.GetSupArchList())
SortedArch = ' '.join(ArchList)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
Content += GenSection('Ppis', NewSectionDict, True, True)
return Content
## Transfer Package Object to Dec files
#
# Transfer all contents of a standard Package Object to a Dec file
#
# @param Package: A Package
#
def PackageToDec(Package, DistHeader = None):
#
# Init global information for the file
#
ContainerFile = Package.GetFullPath()
Content = ''
#
# Generate file header
#
PackageAbstract = GetLocalValue(Package.GetAbstract())
PackageDescription = GetLocalValue(Package.GetDescription())
PackageCopyright = ''
PackageLicense = ''
for (Lang, Copyright) in Package.GetCopyright():
if Lang:
pass
PackageCopyright = Copyright
for (Lang, License) in Package.GetLicense():
if Lang:
pass
PackageLicense = License
if not PackageAbstract and DistHeader:
PackageAbstract = GetLocalValue(DistHeader.GetAbstract())
if not PackageDescription and DistHeader:
PackageDescription = GetLocalValue(DistHeader.GetDescription())
if not PackageCopyright and DistHeader:
for (Lang, Copyright) in DistHeader.GetCopyright():
PackageCopyright = Copyright
if not PackageLicense and DistHeader:
for (Lang, License) in DistHeader.GetLicense():
PackageLicense = License
#
# Generate header comment section of DEC file
#
Content += GenHeaderCommentSection(PackageAbstract, \
PackageDescription, \
PackageCopyright, \
PackageLicense).replace('\r\n', '\n')
#
# Generate Binary header
#
for UserExtension in Package.GetUserExtensionList():
if UserExtension.GetUserID() == TAB_BINARY_HEADER_USERID \
and UserExtension.GetIdentifier() == TAB_BINARY_HEADER_IDENTIFIER:
PackageBinaryAbstract = GetLocalValue(UserExtension.GetBinaryAbstract())
PackageBinaryDescription = GetLocalValue(UserExtension.GetBinaryDescription())
PackageBinaryCopyright = ''
PackageBinaryLicense = ''
for (Lang, Copyright) in UserExtension.GetBinaryCopyright():
PackageBinaryCopyright = Copyright
for (Lang, License) in UserExtension.GetBinaryLicense():
PackageBinaryLicense = License
if PackageBinaryAbstract and PackageBinaryDescription and \
PackageBinaryCopyright and PackageBinaryLicense:
Content += GenHeaderCommentSection(PackageBinaryAbstract,
PackageBinaryDescription,
PackageBinaryCopyright,
PackageBinaryLicense,
True)
#
# Generate PACKAGE_UNI_FILE for the Package
#
FileHeader = GenHeaderCommentSection(PackageAbstract, PackageDescription, PackageCopyright, PackageLicense, False, \
TAB_COMMENT_EDK1_SPLIT)
GenPackageUNIEncodeFile(Package, FileHeader)
#
# for each section, maintain a dict, sorted arch will be its key,
#statement list will be its data
# { 'Arch1 Arch2 Arch3': [statement1, statement2],
# 'Arch1' : [statement1, statement3]
# }
#
#
# generate [Defines] section
#
LeftOffset = 31
NewSectionDict = {TAB_ARCH_COMMON : []}
SpecialItemList = []
Statement = (u'%s ' % TAB_DEC_DEFINES_DEC_SPECIFICATION).ljust(LeftOffset) + u'= %s' % '0x00010017'
SpecialItemList.append(Statement)
BaseName = Package.GetBaseName()
if BaseName.startswith('.') or BaseName.startswith('-'):
BaseName = '_' + BaseName
Statement = (u'%s ' % TAB_DEC_DEFINES_PACKAGE_NAME).ljust(LeftOffset) + u'= %s' % BaseName
SpecialItemList.append(Statement)
Statement = (u'%s ' % TAB_DEC_DEFINES_PACKAGE_VERSION).ljust(LeftOffset) + u'= %s' % Package.GetVersion()
SpecialItemList.append(Statement)
Statement = (u'%s ' % TAB_DEC_DEFINES_PACKAGE_GUID).ljust(LeftOffset) + u'= %s' % Package.GetGuid()
SpecialItemList.append(Statement)
if Package.UNIFlag:
Statement = (u'%s ' % TAB_DEC_DEFINES_PKG_UNI_FILE).ljust(LeftOffset) + u'= %s' % Package.GetBaseName() + '.uni'
SpecialItemList.append(Statement)
for SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + SpecialItemList
Content += GenSection('Defines', NewSectionDict)
#
# generate [Includes] section
#
NewSectionDict = {}
IncludeArchList = Package.GetIncludeArchList()
if IncludeArchList:
for Path, ArchList in IncludeArchList:
Statement = Path
ArchList.sort()
SortedArch = ' '.join(ArchList)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [ConvertPath(Statement)]
else:
NewSectionDict[SortedArch] = [ConvertPath(Statement)]
Content += GenSection('Includes', NewSectionDict)
#
# generate [guids][protocols][ppis] sections
#
Content = GenGuidProtocolPpi(Package, Content)
#
# generate [LibraryClasses] section
#
NewSectionDict = {}
for LibraryClass in Package.GetLibraryClassList():
#
# Generate generic comment
#
HelpTextList = LibraryClass.GetHelpTextList()
HelpStr = _GetHelpStr(HelpTextList)
if HelpStr:
HelpStr = '@libraryclass' + HelpStr
CommentStr = GenGenericCommentF(HelpStr, 2, False, True)
Statement = CommentStr
Name = LibraryClass.GetLibraryClass()
IncludeHeader = LibraryClass.GetIncludeHeader()
Statement += Name + '|' + ConvertPath(IncludeHeader)
#
# generate tail comment
#
if LibraryClass.GetSupModuleList():
Statement += \
GenDecTailComment(LibraryClass.GetSupModuleList())
ArchList = sorted(LibraryClass.GetSupArchList())
SortedArch = ' '.join(ArchList)
if SortedArch in NewSectionDict:
NewSectionDict[SortedArch] = \
NewSectionDict[SortedArch] + [Statement]
else:
NewSectionDict[SortedArch] = [Statement]
Content += GenSection('LibraryClasses', NewSectionDict, True, True)
#
# Generate '# [Error.<TokenSpcCName>]' section
#
Content = GenPcdErrorMsgSection(Package, Content)
Content = GenPcd(Package, Content)
#
# generate [UserExtensions] section
#
NewSectionDict = {}
for UserExtension in Package.GetUserExtensionList():
if UserExtension.GetUserID() == TAB_BINARY_HEADER_USERID and \
UserExtension.GetIdentifier() == TAB_BINARY_HEADER_IDENTIFIER:
continue
# Generate Private Section first
if UserExtension.GetUserID() == DT.TAB_INTEL and UserExtension.GetIdentifier() == DT.TAB_PRIVATE:
Content += '\n' + UserExtension.GetStatement()
continue
Statement = UserExtension.GetStatement()
if not Statement:
continue
else:
LineList = Statement.split('\n')
NewStatement = ""
for Line in LineList:
NewStatement += " %s\n" % Line
SectionList = []
SectionName = 'UserExtensions'
UserId = UserExtension.GetUserID()
if UserId:
if '.' in UserId:
UserId = '"' + UserId + '"'
SectionName += '.' + UserId
if UserExtension.GetIdentifier():
SectionName += '.' + '"' + UserExtension.GetIdentifier() + '"'
if not UserExtension.GetSupArchList():
SectionList.append(SectionName)
else:
for Arch in UserExtension.GetSupArchList():
SectionList.append(SectionName + '.' + Arch)
SectionName = ', '.join(SectionList)
SectionName = ''.join(['[', SectionName, ']\n'])
Content += '\n' + SectionName + NewStatement
SaveFileOnChange(ContainerFile, Content, False)
if DistHeader.ReadOnly:
os.chmod(ContainerFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
else:
os.chmod(ContainerFile, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH|stat.S_IWUSR|stat.S_IWGRP|stat.S_IWOTH)
return ContainerFile
## GenPackageUNIEncodeFile
# GenPackageUNIEncodeFile, default is a UCS-2LE encode file
#
def GenPackageUNIEncodeFile(PackageObject, UniFileHeader = '', Encoding=TAB_ENCODING_UTF16LE):
GenUNIFlag = False
OnlyLANGUAGE_EN_X = True
BinaryAbstract = []
BinaryDescription = []
#
# If more than one language code is used for any element that would be present in the PACKAGE_UNI_FILE,
# then the PACKAGE_UNI_FILE must be created.
#
for (Key, Value) in PackageObject.GetAbstract() + PackageObject.GetDescription():
if Key == TAB_LANGUAGE_EN_X:
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
for UserExtension in PackageObject.GetUserExtensionList():
if UserExtension.GetUserID() == TAB_BINARY_HEADER_USERID \
and UserExtension.GetIdentifier() == TAB_BINARY_HEADER_IDENTIFIER:
for (Key, Value) in UserExtension.GetBinaryAbstract():
if Key == TAB_LANGUAGE_EN_X:
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
BinaryAbstract.append((Key, Value))
for (Key, Value) in UserExtension.GetBinaryDescription():
if Key == TAB_LANGUAGE_EN_X:
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
BinaryDescription.append((Key, Value))
for Pcd in PackageObject.GetPcdList():
for TxtObj in Pcd.GetPromptList() + Pcd.GetHelpTextList():
if TxtObj.GetLang() == TAB_LANGUAGE_EN_X:
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
for PcdError in Pcd.GetPcdErrorsList():
if PcdError.GetErrorNumber().startswith('0x') or PcdError.GetErrorNumber().startswith('0X'):
for (Key, Value) in PcdError.GetErrorMessageList():
if Key == TAB_LANGUAGE_EN_X:
GenUNIFlag = True
else:
OnlyLANGUAGE_EN_X = False
if not GenUNIFlag:
return
elif OnlyLANGUAGE_EN_X:
return
else:
PackageObject.UNIFlag = True
if not os.path.exists(os.path.dirname(PackageObject.GetFullPath())):
os.makedirs(os.path.dirname(PackageObject.GetFullPath()))
ContainerFile = GetUniFileName(os.path.dirname(PackageObject.GetFullPath()), PackageObject.GetBaseName())
Content = UniFileHeader + '\r\n'
Content += '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_ABSTRACT, PackageObject.GetAbstract(), ContainerFile) + '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_PACKAGE_DESCRIPTION, PackageObject.GetDescription(), ContainerFile) \
+ '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_ABSTRACT, BinaryAbstract, ContainerFile) + '\r\n'
Content += FormatUniEntry('#string ' + TAB_DEC_BINARY_DESCRIPTION, BinaryDescription, ContainerFile) + '\r\n'
PromptGenList = []
HelpTextGenList = []
for Pcd in PackageObject.GetPcdList():
# Generate Prompt for each Pcd
PcdPromptStrName = '#string ' + 'STR_' + Pcd.GetTokenSpaceGuidCName() + '_' + Pcd.GetCName() + '_PROMPT '
TokenValueList = []
for TxtObj in Pcd.GetPromptList():
Lang = TxtObj.GetLang()
PromptStr = TxtObj.GetString()
#
# Avoid generating the same PROMPT entry more than one time.
#
if (PcdPromptStrName, Lang) not in PromptGenList:
TokenValueList.append((Lang, PromptStr))
PromptGenList.append((PcdPromptStrName, Lang))
PromptString = FormatUniEntry(PcdPromptStrName, TokenValueList, ContainerFile) + '\r\n'
if PromptString not in Content:
Content += PromptString
# Generate Help String for each Pcd
PcdHelpStrName = '#string ' + 'STR_' + Pcd.GetTokenSpaceGuidCName() + '_' + Pcd.GetCName() + '_HELP '
TokenValueList = []
for TxtObj in Pcd.GetHelpTextList():
Lang = TxtObj.GetLang()
HelpStr = TxtObj.GetString()
#
# Avoid generating the same HELP entry more than one time.
#
if (PcdHelpStrName, Lang) not in HelpTextGenList:
TokenValueList.append((Lang, HelpStr))
HelpTextGenList.append((PcdHelpStrName, Lang))
HelpTextString = FormatUniEntry(PcdHelpStrName, TokenValueList, ContainerFile) + '\r\n'
if HelpTextString not in Content:
Content += HelpTextString
# Generate PcdError for each Pcd if ErrorNo exist.
for PcdError in Pcd.GetPcdErrorsList():
ErrorNo = PcdError.GetErrorNumber()
if ErrorNo.startswith(TAB_HEX_START) or ErrorNo.startswith(TAB_CAPHEX_START):
PcdErrStrName = '#string ' + TAB_STR_TOKENCNAME + TAB_UNDERLINE_SPLIT + Pcd.GetTokenSpaceGuidCName() \
+ TAB_UNDERLINE_SPLIT + TAB_STR_TOKENERR \
+ TAB_UNDERLINE_SPLIT + ErrorNo[2:]
PcdErrString = FormatUniEntry(PcdErrStrName, PcdError.GetErrorMessageList(), ContainerFile) + '\r\n'
if PcdErrString not in Content:
Content += PcdErrString
File = codecs.open(ContainerFile, 'w', Encoding)
File.write(u'\uFEFF' + Content)
File.stream.close()
Md5Signature = md5(__FileHookOpen__(str(ContainerFile), 'rb').read())
Md5Sum = Md5Signature.hexdigest()
if (ContainerFile, Md5Sum) not in PackageObject.FileList:
PackageObject.FileList.append((ContainerFile, Md5Sum))
return ContainerFile
## GenPcdErrComment
#
# @param PcdErrObject: PcdErrorObject
#
# @retval CommentStr: Generated comment lines, with prefix "#"
#
def GenPcdErrComment (PcdErrObject):
CommentStr = ''
ErrorCode = PcdErrObject.GetErrorNumber()
ValidValueRange = PcdErrObject.GetValidValueRange()
if ValidValueRange:
CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_VALIDRANGE + TAB_SPACE_SPLIT
if ErrorCode:
CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
CommentStr += ValidValueRange + END_OF_LINE
ValidValue = PcdErrObject.GetValidValue()
if ValidValue:
ValidValueList = \
[Value for Value in ValidValue.split(TAB_SPACE_SPLIT) if Value]
CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_VALIDLIST + TAB_SPACE_SPLIT
if ErrorCode:
CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
CommentStr += TAB_COMMA_SPLIT.join(ValidValueList) + END_OF_LINE
Expression = PcdErrObject.GetExpression()
if Expression:
CommentStr = TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT + TAB_PCD_EXPRESSION + TAB_SPACE_SPLIT
if ErrorCode:
CommentStr += ErrorCode + TAB_SPACE_SPLIT + TAB_VALUE_SPLIT + TAB_SPACE_SPLIT
CommentStr += Expression + END_OF_LINE
return CommentStr
| edk2-master | BaseTools/Source/Python/UPT/GenMetaFile/GenDecFile.py |
## @file GenXmlFile.py
#
# This file contained the logical of generate XML files.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
GenXmlFile
'''
| edk2-master | BaseTools/Source/Python/UPT/GenMetaFile/GenXmlFile.py |
## @file
# Generate a capsule windows driver.
#
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
GenerateWindowsDriver
'''
import sys
import argparse
import uuid
import struct
import subprocess
import os
import tempfile
import shutil
import platform
import re
import logging
from WindowsCapsuleSupportHelper import WindowsCapsuleSupportHelper
from Common.Uefi.Capsule.FmpCapsuleHeader import FmpCapsuleHeaderClass
from Common.Uefi.Capsule.UefiCapsuleHeader import UefiCapsuleHeaderClass
#
# Globals for help information
#
__prog__ = 'GenerateWindowsDriver'
__version__ = '0.0'
__copyright__ = 'Copyright (c) 2019, Intel Corporation. All rights reserved.'
__description__ = 'Generate Capsule Windows Driver.\n'
def GetCapGuid (InputFile):
with open(InputFile, 'rb') as File:
Buffer = File.read()
try:
Result = UefiCapsuleHeader.Decode (Buffer)
if len (Result) > 0:
FmpCapsuleHeader.Decode (Result)
for index in range (0, FmpCapsuleHeader.PayloadItemCount):
Guid = FmpCapsuleHeader.GetFmpCapsuleImageHeader (index).UpdateImageTypeId
return Guid
except:
print ('GenerateCapsule: error: can not decode capsule')
sys.exit (1)
def ArgCheck(args):
Version = args.CapsuleVersion_DotString.split('.')
if len(Version) != 4:
logging.critical("Name invalid: '%s'", args.CapsuleVersion_DotString)
raise ValueError("Name invalid.")
for sub in Version:
if int(sub, 16) > 65536:
logging.critical("Name invalid: '%s'", args.CapsuleVersion_DotString)
raise ValueError("Name exceed limit 65536.")
if not (re.compile(r'[\a-fA-F0-9]*$')).match(args.CapsuleVersion_DotString):
logging.critical("Name invalid: '%s'", args.CapsuleVersion_DotString)
raise ValueError("Name has invalid chars.")
def CapsuleGuidCheck(InputFile, Guid):
CapGuid = GetCapGuid(InputFile)
if (str(Guid).lower() != str(CapGuid)):
print('GenerateWindowsDriver error: Different Guid from Capsule')
sys.exit(1)
if __name__ == '__main__':
def convert_arg_line_to_args(arg_line):
for arg in arg_line.split():
if not arg.strip():
continue
yield arg
parser = argparse.ArgumentParser (
prog = __prog__,
description = __description__ + __copyright__,
conflict_handler = 'resolve',
fromfile_prefix_chars = '@'
)
parser.convert_arg_line_to_args = convert_arg_line_to_args
parser.add_argument("--output-folder", dest = 'OutputFolder', help = "firmware resource update driver package output folder.")
parser.add_argument("--product-fmp-guid", dest = 'ProductFmpGuid', help = "firmware GUID of resource update driver package")
parser.add_argument("--capsuleversion-dotstring", dest = 'CapsuleVersion_DotString', help = "firmware version with date on which update driver package is authored")
parser.add_argument("--capsuleversion-hexstring", dest = 'CapsuleVersion_HexString', help = "firmware version in Hex of update driver package")
parser.add_argument("--product-fw-provider", dest = 'ProductFwProvider', help = "vendor/provider of entire firmware resource update driver package")
parser.add_argument("--product-fw-mfg-name", dest = 'ProductFwMfgName', help = "manufacturer/vendor of firmware resource update driver package")
parser.add_argument("--product-fw-desc", dest = "ProductFwDesc", help = "description about resource update driver")
parser.add_argument("--capsule-file-name", dest = 'CapsuleFileName', help ="firmware resource image file")
parser.add_argument("--pfx-file", dest = 'PfxFile', help = "pfx file path used to sign resource update driver")
parser.add_argument("--arch", dest = 'Arch', help = "supported architecture:arm/x64/amd64/arm64/aarch64", default = 'amd64')
parser.add_argument("--operating-system-string", dest = 'OperatingSystemString', help = "supported operating system:win10/10/10_au/10_rs2/10_rs3/10_rs4/server10/server2016/serverrs2/serverrs3/serverrs4", default = "win10")
args = parser.parse_args()
InputFile = os.path.join(args.OutputFolder, '') + args.CapsuleFileName
UefiCapsuleHeader = UefiCapsuleHeaderClass ()
FmpCapsuleHeader = FmpCapsuleHeaderClass ()
CapsuleGuidCheck(InputFile, args.ProductFmpGuid)
ArgCheck(args)
ProductName = os.path.splitext(args.CapsuleFileName)[0]
WindowsDriver = WindowsCapsuleSupportHelper ()
WindowsDriver.PackageWindowsCapsuleFiles (
args.OutputFolder,
ProductName,
args.ProductFmpGuid,
args.CapsuleVersion_DotString,
args.CapsuleVersion_HexString,
args.ProductFwProvider,
args.ProductFwMfgName,
args.ProductFwDesc,
args.CapsuleFileName,
args.PfxFile,
None,
None,
args.Arch,
args.OperatingSystemString
)
| edk2-master | BaseTools/Source/Python/Capsule/GenerateWindowsDriver.py |
## @file
# Generate a capsule.
#
# This tool generates a UEFI Capsule around an FMP Capsule. The capsule payload
# be signed using signtool or OpenSSL and if it is signed the signed content
# includes an FMP Payload Header.
#
# This tool is intended to be used to generate UEFI Capsules to update the
# system firmware or device firmware for integrated devices. In order to
# keep the tool as simple as possible, it has the following limitations:
# * Do not support vendor code bytes in a capsule.
#
# Copyright (c) 2018 - 2022, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
GenerateCapsule
'''
import sys
import argparse
import uuid
import struct
import subprocess
import os
import tempfile
import shutil
import platform
import json
from Common.Uefi.Capsule.UefiCapsuleHeader import UefiCapsuleHeaderClass
from Common.Uefi.Capsule.FmpCapsuleHeader import FmpCapsuleHeaderClass
from Common.Uefi.Capsule.FmpAuthHeader import FmpAuthHeaderClass
from Common.Uefi.Capsule.CapsuleDependency import CapsuleDependencyClass
from Common.Edk2.Capsule.FmpPayloadHeader import FmpPayloadHeaderClass
#
# Globals for help information
#
__prog__ = 'GenerateCapsule'
__version__ = '0.10'
__copyright__ = 'Copyright (c) 2022, Intel Corporation. All rights reserved.'
__description__ = 'Generate a capsule.\n'
def SignPayloadSignTool (Payload, ToolPath, PfxFile, SubjectName, Verbose = False):
#
# Create a temporary directory
#
TempDirectoryName = tempfile.mkdtemp()
#
# Generate temp file name for the payload contents
#
TempFileName = os.path.join (TempDirectoryName, 'Payload.bin')
#
# Create temporary payload file for signing
#
try:
with open (TempFileName, 'wb') as File:
File.write (Payload)
except:
shutil.rmtree (TempDirectoryName)
raise ValueError ('GenerateCapsule: error: can not write temporary payload file.')
#
# Build signtool command
#
if ToolPath is None:
ToolPath = ''
Command = ''
Command = Command + '"{Path}" '.format (Path = os.path.join (ToolPath, 'signtool.exe'))
Command = Command + 'sign /fd sha256 /p7ce DetachedSignedData /p7co 1.2.840.113549.1.7.2 '
Command = Command + '/p7 {TempDir} '.format (TempDir = TempDirectoryName)
if PfxFile is not None:
Command = Command + '/f {PfxFile} '.format (PfxFile = PfxFile)
if SubjectName is not None:
Command = Command + '/n {SubjectName} '.format (SubjectName = SubjectName)
Command = Command + TempFileName
if Verbose:
print (Command)
#
# Sign the input file using the specified private key
#
try:
Process = subprocess.Popen (Command, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell = True)
Result = Process.communicate('')
except:
shutil.rmtree (TempDirectoryName)
raise ValueError ('GenerateCapsule: error: can not run signtool.')
if Process.returncode != 0:
shutil.rmtree (TempDirectoryName)
print (Result[1].decode())
raise ValueError ('GenerateCapsule: error: signtool failed.')
#
# Read the signature from the generated output file
#
try:
with open (TempFileName + '.p7', 'rb') as File:
Signature = File.read ()
except:
shutil.rmtree (TempDirectoryName)
raise ValueError ('GenerateCapsule: error: can not read signature file.')
shutil.rmtree (TempDirectoryName)
return Signature
def VerifyPayloadSignTool (Payload, CertData, ToolPath, PfxFile, SubjectName, Verbose = False):
print ('signtool verify is not supported.')
raise ValueError ('GenerateCapsule: error: signtool verify is not supported.')
def SignPayloadOpenSsl (Payload, ToolPath, SignerPrivateCertFile, OtherPublicCertFile, TrustedPublicCertFile, Verbose = False):
#
# Build openssl command
#
if ToolPath is None:
ToolPath = ''
Command = ''
Command = Command + '"{Path}" '.format (Path = os.path.join (ToolPath, 'openssl'))
Command = Command + 'smime -sign -binary -outform DER -md sha256 '
Command = Command + '-signer "{Private}" -certfile "{Public}"'.format (Private = SignerPrivateCertFile, Public = OtherPublicCertFile)
if Verbose:
print (Command)
#
# Sign the input file using the specified private key and capture signature from STDOUT
#
try:
Process = subprocess.Popen (Command, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell = True)
Result = Process.communicate(input = Payload)
Signature = Result[0]
except:
raise ValueError ('GenerateCapsule: error: can not run openssl.')
if Process.returncode != 0:
print (Result[1].decode())
raise ValueError ('GenerateCapsule: error: openssl failed.')
return Signature
def VerifyPayloadOpenSsl (Payload, CertData, ToolPath, SignerPrivateCertFile, OtherPublicCertFile, TrustedPublicCertFile, Verbose = False):
#
# Create a temporary directory
#
TempDirectoryName = tempfile.mkdtemp()
#
# Generate temp file name for the payload contents
#
TempFileName = os.path.join (TempDirectoryName, 'Payload.bin')
#
# Create temporary payload file for verification
#
try:
with open (TempFileName, 'wb') as File:
File.write (Payload)
except:
shutil.rmtree (TempDirectoryName)
raise ValueError ('GenerateCapsule: error: can not write temporary payload file.')
#
# Build openssl command
#
if ToolPath is None:
ToolPath = ''
Command = ''
Command = Command + '"{Path}" '.format (Path = os.path.join (ToolPath, 'openssl'))
Command = Command + 'smime -verify -inform DER '
Command = Command + '-content {Content} -CAfile "{Public}"'.format (Content = TempFileName, Public = TrustedPublicCertFile)
if Verbose:
print (Command)
#
# Verify signature
#
try:
Process = subprocess.Popen (Command, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.PIPE, shell = True)
Result = Process.communicate(input = CertData)
except:
shutil.rmtree (TempDirectoryName)
raise ValueError ('GenerateCapsule: error: can not run openssl.')
if Process.returncode != 0:
shutil.rmtree (TempDirectoryName)
print (Result[1].decode())
raise ValueError ('GenerateCapsule: error: openssl failed.')
shutil.rmtree (TempDirectoryName)
return Payload
if __name__ == '__main__':
def convert_arg_line_to_args(arg_line):
for arg in arg_line.split():
if not arg.strip():
continue
yield arg
def ValidateUnsignedInteger (Argument):
try:
Value = int (Argument, 0)
except:
Message = '{Argument} is not a valid integer value.'.format (Argument = Argument)
raise argparse.ArgumentTypeError (Message)
if Value < 0:
Message = '{Argument} is a negative value.'.format (Argument = Argument)
raise argparse.ArgumentTypeError (Message)
return Value
def ValidateRegistryFormatGuid (Argument):
try:
Value = uuid.UUID (Argument)
except:
Message = '{Argument} is not a valid registry format GUID value.'.format (Argument = Argument)
raise argparse.ArgumentTypeError (Message)
return Value
def ConvertJsonValue (Config, FieldName, Convert, Required = True, Default = None, Open = False):
if FieldName not in Config:
if Required:
print ('GenerateCapsule: error: Payload descriptor invalid syntax. Could not find {Key} in payload descriptor.'.format(Key = FieldName))
sys.exit (1)
return Default
try:
Value = Convert (Config[FieldName])
except:
print ('GenerateCapsule: error: {Key} in payload descriptor has invalid syntax.'.format (Key = FieldName))
sys.exit (1)
if Open:
try:
Value = open (Value, "rb")
except:
print ('GenerateCapsule: error: can not open file {File}'.format (File = FieldName))
sys.exit (1)
return Value
def DecodeJsonFileParse (Json):
if 'Payloads' not in Json:
print ('GenerateCapsule: error "Payloads" section not found in JSON file {File}'.format (File = args.JsonFile.name))
sys.exit (1)
for Config in Json['Payloads']:
#
# Parse fields from JSON
#
PayloadFile = ConvertJsonValue (Config, 'Payload', os.path.expandvars, Required = False)
Guid = ConvertJsonValue (Config, 'Guid', ValidateRegistryFormatGuid, Required = False)
FwVersion = ConvertJsonValue (Config, 'FwVersion', ValidateUnsignedInteger, Required = False)
LowestSupportedVersion = ConvertJsonValue (Config, 'LowestSupportedVersion', ValidateUnsignedInteger, Required = False)
HardwareInstance = ConvertJsonValue (Config, 'HardwareInstance', ValidateUnsignedInteger, Required = False, Default = 0)
MonotonicCount = ConvertJsonValue (Config, 'MonotonicCount', ValidateUnsignedInteger, Required = False, Default = 0)
SignToolPfxFile = ConvertJsonValue (Config, 'SignToolPfxFile', os.path.expandvars, Required = False, Default = None, Open = True)
SignToolSubjectName = ConvertJsonValue (Config, 'SignToolSubjectName', os.path.expandvars, Required = False, Default = None, Open = True)
OpenSslSignerPrivateCertFile = ConvertJsonValue (Config, 'OpenSslSignerPrivateCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
OpenSslOtherPublicCertFile = ConvertJsonValue (Config, 'OpenSslOtherPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
OpenSslTrustedPublicCertFile = ConvertJsonValue (Config, 'OpenSslTrustedPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
SigningToolPath = ConvertJsonValue (Config, 'SigningToolPath', os.path.expandvars, Required = False, Default = None)
UpdateImageIndex = ConvertJsonValue (Config, 'UpdateImageIndex', ValidateUnsignedInteger, Required = False, Default = 1)
PayloadDescriptorList.append (PayloadDescriptor (
PayloadFile,
Guid,
FwVersion,
LowestSupportedVersion,
MonotonicCount,
HardwareInstance,
UpdateImageIndex,
SignToolPfxFile,
SignToolSubjectName,
OpenSslSignerPrivateCertFile,
OpenSslOtherPublicCertFile,
OpenSslTrustedPublicCertFile,
SigningToolPath
))
def EncodeJsonFileParse (Json):
if 'EmbeddedDrivers' not in Json:
print ('GenerateCapsule: warning "EmbeddedDrivers" section not found in JSON file {File}'.format (File = args.JsonFile.name))
else:
for Config in Json['EmbeddedDrivers']:
EmbeddedDriverFile = ConvertJsonValue(Config, 'Driver', os.path.expandvars, Open = True)
#
#Read EmbeddedDriver file
#
try:
if args.Verbose:
print ('Read EmbeddedDriver file {File}'.format (File = EmbeddedDriverFile.name))
Driver = EmbeddedDriverFile.read()
except:
print ('GenerateCapsule: error: can not read EmbeddedDriver file {File}'.format (File = EmbeddedDriverFile.name))
sys.exit (1)
EmbeddedDriverDescriptorList.append (Driver)
if 'Payloads' not in Json:
print ('GenerateCapsule: error: "Payloads" section not found in JSON file {File}'.format (File = args.JsonFile.name))
sys.exit (1)
for Config in Json['Payloads']:
#
# Parse fields from JSON
#
PayloadFile = ConvertJsonValue (Config, 'Payload', os.path.expandvars, Open = True)
Guid = ConvertJsonValue (Config, 'Guid', ValidateRegistryFormatGuid)
FwVersion = ConvertJsonValue (Config, 'FwVersion', ValidateUnsignedInteger)
LowestSupportedVersion = ConvertJsonValue (Config, 'LowestSupportedVersion', ValidateUnsignedInteger)
HardwareInstance = ConvertJsonValue (Config, 'HardwareInstance', ValidateUnsignedInteger, Required = False, Default = 0)
UpdateImageIndex = ConvertJsonValue (Config, 'UpdateImageIndex', ValidateUnsignedInteger, Required = False, Default = 1)
MonotonicCount = ConvertJsonValue (Config, 'MonotonicCount', ValidateUnsignedInteger, Required = False, Default = 0)
SignToolPfxFile = ConvertJsonValue (Config, 'SignToolPfxFile', os.path.expandvars, Required = False, Default = None, Open = True)
SignToolSubjectName = ConvertJsonValue (Config, 'SignToolSubjectName', os.path.expandvars, Required = False, Default = None, Open = True)
OpenSslSignerPrivateCertFile = ConvertJsonValue (Config, 'OpenSslSignerPrivateCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
OpenSslOtherPublicCertFile = ConvertJsonValue (Config, 'OpenSslOtherPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
OpenSslTrustedPublicCertFile = ConvertJsonValue (Config, 'OpenSslTrustedPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
SigningToolPath = ConvertJsonValue (Config, 'SigningToolPath', os.path.expandvars, Required = False, Default = None)
DepexExp = ConvertJsonValue (Config, 'Dependencies', str, Required = False, Default = None)
#
# Read binary input file
#
try:
if args.Verbose:
print ('Read binary input file {File}'.format (File = PayloadFile.name))
Payload = PayloadFile.read()
PayloadFile.close ()
except:
print ('GenerateCapsule: error: can not read binary input file {File}'.format (File = PayloadFile.name))
sys.exit (1)
PayloadDescriptorList.append (PayloadDescriptor (
Payload,
Guid,
FwVersion,
LowestSupportedVersion,
MonotonicCount,
HardwareInstance,
UpdateImageIndex,
SignToolPfxFile,
SignToolSubjectName,
OpenSslSignerPrivateCertFile,
OpenSslOtherPublicCertFile,
OpenSslTrustedPublicCertFile,
SigningToolPath,
DepexExp
))
def GenerateOutputJson (PayloadJsonDescriptorList):
PayloadJson = {
"Payloads" : [
{
"Guid": str(PayloadDescriptor.Guid).upper(),
"FwVersion": str(PayloadDescriptor.FwVersion),
"LowestSupportedVersion": str(PayloadDescriptor.LowestSupportedVersion),
"MonotonicCount": str(PayloadDescriptor.MonotonicCount),
"Payload": PayloadDescriptor.Payload,
"HardwareInstance": str(PayloadDescriptor.HardwareInstance),
"UpdateImageIndex": str(PayloadDescriptor.UpdateImageIndex),
"SignToolPfxFile": str(PayloadDescriptor.SignToolPfxFile),
"SignToolSubjectName": str(PayloadDescriptor.SignToolSubjectName),
"OpenSslSignerPrivateCertFile": str(PayloadDescriptor.OpenSslSignerPrivateCertFile),
"OpenSslOtherPublicCertFile": str(PayloadDescriptor.OpenSslOtherPublicCertFile),
"OpenSslTrustedPublicCertFile": str(PayloadDescriptor.OpenSslTrustedPublicCertFile),
"SigningToolPath": str(PayloadDescriptor.SigningToolPath),
"Dependencies" : str(PayloadDescriptor.DepexExp)
}for PayloadDescriptor in PayloadJsonDescriptorList
]
}
OutputJsonFile = args.OutputFile.name + '.json'
if 'Payloads' in PayloadJson:
PayloadSection = PayloadJson ['Payloads']
Index = 0
for PayloadField in PayloadSection:
if PayloadJsonDescriptorList[Index].SignToolPfxFile is None:
del PayloadField ['SignToolPfxFile']
if PayloadJsonDescriptorList[Index].SignToolSubjectName is None:
del PayloadField ['SignToolSubjectName']
if PayloadJsonDescriptorList[Index].OpenSslSignerPrivateCertFile is None:
del PayloadField ['OpenSslSignerPrivateCertFile']
if PayloadJsonDescriptorList[Index].OpenSslOtherPublicCertFile is None:
del PayloadField ['OpenSslOtherPublicCertFile']
if PayloadJsonDescriptorList[Index].OpenSslTrustedPublicCertFile is None:
del PayloadField ['OpenSslTrustedPublicCertFile']
if PayloadJsonDescriptorList[Index].SigningToolPath is None:
del PayloadField ['SigningToolPath']
Index = Index + 1
Result = json.dumps (PayloadJson, indent=4, sort_keys=True, separators=(',', ': '))
with open (OutputJsonFile, 'w') as OutputFile:
OutputFile.write (Result)
def CheckArgumentConflict (args):
if args.Encode:
if args.InputFile:
print ('GenerateCapsule: error: Argument InputFile conflicts with Argument -j')
sys.exit (1)
if args.EmbeddedDriver:
print ('GenerateCapsule: error: Argument --embedded-driver conflicts with Argument -j')
sys.exit (1)
if args.Guid:
print ('GenerateCapsule: error: Argument --guid conflicts with Argument -j')
sys.exit (1)
if args.FwVersion:
print ('GenerateCapsule: error: Argument --fw-version conflicts with Argument -j')
sys.exit (1)
if args.LowestSupportedVersion:
print ('GenerateCapsule: error: Argument --lsv conflicts with Argument -j')
sys.exit (1)
if args.MonotonicCount:
print ('GenerateCapsule: error: Argument --monotonic-count conflicts with Argument -j')
sys.exit (1)
if args.HardwareInstance:
print ('GenerateCapsule: error: Argument --hardware-instance conflicts with Argument -j')
sys.exit (1)
if args.SignToolPfxFile:
print ('GenerateCapsule: error: Argument --pfx-file conflicts with Argument -j')
sys.exit (1)
if args.SignToolSubjectName:
print ('GenerateCapsule: error: Argument --SubjectName conflicts with Argument -j')
sys.exit (1)
if args.OpenSslSignerPrivateCertFile:
print ('GenerateCapsule: error: Argument --signer-private-cert conflicts with Argument -j')
sys.exit (1)
if args.OpenSslOtherPublicCertFile:
print ('GenerateCapsule: error: Argument --other-public-cert conflicts with Argument -j')
sys.exit (1)
if args.OpenSslTrustedPublicCertFile:
print ('GenerateCapsule: error: Argument --trusted-public-cert conflicts with Argument -j')
sys.exit (1)
if args.SigningToolPath:
print ('GenerateCapsule: error: Argument --signing-tool-path conflicts with Argument -j')
sys.exit (1)
class PayloadDescriptor (object):
def __init__(self,
Payload,
Guid,
FwVersion,
LowestSupportedVersion,
MonotonicCount = 0,
HardwareInstance = 0,
UpdateImageIndex = 1,
SignToolPfxFile = None,
SignToolSubjectName = None,
OpenSslSignerPrivateCertFile = None,
OpenSslOtherPublicCertFile = None,
OpenSslTrustedPublicCertFile = None,
SigningToolPath = None,
DepexExp = None
):
self.Payload = Payload
self.Guid = Guid
self.FwVersion = FwVersion
self.LowestSupportedVersion = LowestSupportedVersion
self.MonotonicCount = MonotonicCount
self.HardwareInstance = HardwareInstance
self.UpdateImageIndex = UpdateImageIndex
self.SignToolPfxFile = SignToolPfxFile
self.SignToolSubjectName = SignToolSubjectName
self.OpenSslSignerPrivateCertFile = OpenSslSignerPrivateCertFile
self.OpenSslOtherPublicCertFile = OpenSslOtherPublicCertFile
self.OpenSslTrustedPublicCertFile = OpenSslTrustedPublicCertFile
self.SigningToolPath = SigningToolPath
self.DepexExp = DepexExp
self.UseSignTool = (self.SignToolPfxFile is not None or
self.SignToolSubjectName is not None)
self.UseOpenSsl = (self.OpenSslSignerPrivateCertFile is not None and
self.OpenSslOtherPublicCertFile is not None and
self.OpenSslTrustedPublicCertFile is not None)
self.AnyOpenSsl = (self.OpenSslSignerPrivateCertFile is not None or
self.OpenSslOtherPublicCertFile is not None or
self.OpenSslTrustedPublicCertFile is not None)
self.UseDependency = self.DepexExp is not None
def Validate(self, args):
if self.UseSignTool and self.AnyOpenSsl:
raise argparse.ArgumentTypeError ('Providing both signtool and OpenSSL options is not supported')
if not self.UseSignTool and not self.UseOpenSsl and self.AnyOpenSsl:
if args.JsonFile:
raise argparse.ArgumentTypeError ('the following JSON fields are required for OpenSSL: OpenSslSignerPrivateCertFile, OpenSslOtherPublicCertFile, OpenSslTrustedPublicCertFile')
else:
raise argparse.ArgumentTypeError ('the following options are required for OpenSSL: --signer-private-cert, --other-public-cert, --trusted-public-cert')
if self.UseSignTool and platform.system() != 'Windows':
raise argparse.ArgumentTypeError ('Use of signtool is not supported on this operating system.')
if args.Encode:
if self.FwVersion is None or self.LowestSupportedVersion is None:
if args.JsonFile:
raise argparse.ArgumentTypeError ('the following JSON fields are required: FwVersion, LowestSupportedVersion')
else:
raise argparse.ArgumentTypeError ('the following options are required: --fw-version, --lsv')
if self.FwVersion > 0xFFFFFFFF:
if args.JsonFile:
raise argparse.ArgumentTypeError ('JSON field FwVersion must be an integer in range 0x0..0xffffffff')
else:
raise argparse.ArgumentTypeError ('--fw-version must be an integer in range 0x0..0xffffffff')
if self.LowestSupportedVersion > 0xFFFFFFFF:
if args.JsonFile:
raise argparse.ArgumentTypeError ('JSON field LowestSupportedVersion must be an integer in range 0x0..0xffffffff')
else:
raise argparse.ArgumentTypeError ('--lsv must be an integer in range 0x0..0xffffffff')
if args.Encode:
if self.Guid is None:
if args.JsonFile:
raise argparse.ArgumentTypeError ('the following JSON field is required: Guid')
else:
raise argparse.ArgumentTypeError ('the following option is required: --guid')
if self.HardwareInstance > 0xFFFFFFFFFFFFFFFF:
if args.JsonFile:
raise argparse.ArgumentTypeError ('JSON field HardwareInstance must be an integer in range 0x0..0xffffffffffffffff')
else:
raise argparse.ArgumentTypeError ('--hardware-instance must be an integer in range 0x0..0xffffffffffffffff')
if self.MonotonicCount > 0xFFFFFFFFFFFFFFFF:
if args.JsonFile:
raise argparse.ArgumentTypeError ('JSON field MonotonicCount must be an integer in range 0x0..0xffffffffffffffff')
else:
raise argparse.ArgumentTypeError ('--monotonic-count must be an integer in range 0x0..0xffffffffffffffff')
if self.UpdateImageIndex >0xFF:
if args.JsonFile:
raise argparse.ArgumentTypeError ('JSON field UpdateImageIndex must be an integer in range 0x0..0xff')
else:
raise argparse.ArgumentTypeError ('--update-image-index must be an integer in range 0x0..0xff')
if self.UseSignTool:
if self.SignToolPfxFile is not None:
self.SignToolPfxFile.close()
self.SignToolPfxFile = self.SignToolPfxFile.name
if self.UseOpenSsl:
self.OpenSslSignerPrivateCertFile.close()
self.OpenSslOtherPublicCertFile.close()
self.OpenSslTrustedPublicCertFile.close()
self.OpenSslSignerPrivateCertFile = self.OpenSslSignerPrivateCertFile.name
self.OpenSslOtherPublicCertFile = self.OpenSslOtherPublicCertFile.name
self.OpenSslTrustedPublicCertFile = self.OpenSslTrustedPublicCertFile.name
#
# Perform additional argument verification
#
if args.Encode:
if 'PersistAcrossReset' not in args.CapsuleFlag:
if 'InitiateReset' in args.CapsuleFlag:
raise argparse.ArgumentTypeError ('--capflag InitiateReset also requires --capflag PersistAcrossReset')
if args.CapsuleOemFlag > 0xFFFF:
raise argparse.ArgumentTypeError ('--capoemflag must be an integer between 0x0000 and 0xffff')
return True
def Encode (PayloadDescriptorList, EmbeddedDriverDescriptorList, Buffer):
if args.JsonFile:
CheckArgumentConflict(args)
try:
Json = json.loads (args.JsonFile.read ())
except:
print ('GenerateCapsule: error: {JSONFile} loads failure. '.format (JSONFile = args.JsonFile))
sys.exit (1)
EncodeJsonFileParse(Json)
else:
for Driver in args.EmbeddedDriver:
EmbeddedDriverDescriptorList.append (Driver.read())
PayloadDescriptorList.append (PayloadDescriptor (
Buffer,
args.Guid,
args.FwVersion,
args.LowestSupportedVersion,
args.MonotonicCount,
args.HardwareInstance,
args.UpdateImageIndex,
args.SignToolPfxFile,
args.SignToolSubjectName,
args.OpenSslSignerPrivateCertFile,
args.OpenSslOtherPublicCertFile,
args.OpenSslTrustedPublicCertFile,
args.SigningToolPath,
None
))
for SinglePayloadDescriptor in PayloadDescriptorList:
try:
SinglePayloadDescriptor.Validate (args)
except Exception as Msg:
print ('GenerateCapsule: error:' + str(Msg))
sys.exit (1)
for SinglePayloadDescriptor in PayloadDescriptorList:
ImageCapsuleSupport = 0x0000000000000000
Result = SinglePayloadDescriptor.Payload
try:
FmpPayloadHeader.FwVersion = SinglePayloadDescriptor.FwVersion
FmpPayloadHeader.LowestSupportedVersion = SinglePayloadDescriptor.LowestSupportedVersion
FmpPayloadHeader.Payload = SinglePayloadDescriptor.Payload
Result = FmpPayloadHeader.Encode ()
if args.Verbose:
FmpPayloadHeader.DumpInfo ()
except:
print ('GenerateCapsule: error: can not encode FMP Payload Header')
sys.exit (1)
if SinglePayloadDescriptor.UseDependency:
CapsuleDependency.Payload = Result
CapsuleDependency.DepexExp = SinglePayloadDescriptor.DepexExp
ImageCapsuleSupport |= FmpCapsuleHeader.CAPSULE_SUPPORT_DEPENDENCY
Result = CapsuleDependency.Encode ()
if args.Verbose:
CapsuleDependency.DumpInfo ()
if SinglePayloadDescriptor.UseOpenSsl or SinglePayloadDescriptor.UseSignTool:
#
# Sign image with 64-bit MonotonicCount appended to end of image
#
try:
if SinglePayloadDescriptor.UseSignTool:
CertData = SignPayloadSignTool (
Result + struct.pack ('<Q', SinglePayloadDescriptor.MonotonicCount),
SinglePayloadDescriptor.SigningToolPath,
SinglePayloadDescriptor.SignToolPfxFile,
SinglePayloadDescriptor.SignToolSubjectName,
Verbose = args.Verbose
)
else:
CertData = SignPayloadOpenSsl (
Result + struct.pack ('<Q', SinglePayloadDescriptor.MonotonicCount),
SinglePayloadDescriptor.SigningToolPath,
SinglePayloadDescriptor.OpenSslSignerPrivateCertFile,
SinglePayloadDescriptor.OpenSslOtherPublicCertFile,
SinglePayloadDescriptor.OpenSslTrustedPublicCertFile,
Verbose = args.Verbose
)
except Exception as Msg:
print ('GenerateCapsule: error: can not sign payload \n' + str(Msg))
sys.exit (1)
try:
FmpAuthHeader.MonotonicCount = SinglePayloadDescriptor.MonotonicCount
FmpAuthHeader.CertData = CertData
FmpAuthHeader.Payload = Result
ImageCapsuleSupport |= FmpCapsuleHeader.CAPSULE_SUPPORT_AUTHENTICATION
Result = FmpAuthHeader.Encode ()
if args.Verbose:
FmpAuthHeader.DumpInfo ()
except:
print ('GenerateCapsule: error: can not encode FMP Auth Header')
sys.exit (1)
FmpCapsuleHeader.AddPayload (SinglePayloadDescriptor.Guid, Result, HardwareInstance = SinglePayloadDescriptor.HardwareInstance, UpdateImageIndex = SinglePayloadDescriptor.UpdateImageIndex, CapsuleSupport = ImageCapsuleSupport)
try:
for EmbeddedDriver in EmbeddedDriverDescriptorList:
FmpCapsuleHeader.AddEmbeddedDriver(EmbeddedDriver)
Result = FmpCapsuleHeader.Encode ()
if args.Verbose:
FmpCapsuleHeader.DumpInfo ()
except:
print ('GenerateCapsule: error: can not encode FMP Capsule Header')
sys.exit (1)
try:
UefiCapsuleHeader.OemFlags = args.CapsuleOemFlag
UefiCapsuleHeader.PersistAcrossReset = 'PersistAcrossReset' in args.CapsuleFlag
UefiCapsuleHeader.PopulateSystemTable = False
UefiCapsuleHeader.InitiateReset = 'InitiateReset' in args.CapsuleFlag
UefiCapsuleHeader.Payload = Result
Result = UefiCapsuleHeader.Encode ()
if args.Verbose:
UefiCapsuleHeader.DumpInfo ()
except:
print ('GenerateCapsule: error: can not encode UEFI Capsule Header')
sys.exit (1)
try:
if args.Verbose:
print ('Write binary output file {File}'.format (File = args.OutputFile.name))
args.OutputFile.write (Result)
args.OutputFile.close ()
except:
print ('GenerateCapsule: error: can not write binary output file {File}'.format (File = args.OutputFile.name))
sys.exit (1)
def Decode (PayloadDescriptorList, PayloadJsonDescriptorList, Buffer):
if args.JsonFile:
CheckArgumentConflict(args)
#
# Parse payload descriptors from JSON
#
try:
Json = json.loads (args.JsonFile.read())
except:
print ('GenerateCapsule: error: {JSONFile} loads failure. '.format (JSONFile = args.JsonFile))
sys.exit (1)
DecodeJsonFileParse (Json)
else:
PayloadDescriptorList.append (PayloadDescriptor (
Buffer,
args.Guid,
args.FwVersion,
args.LowestSupportedVersion,
args.MonotonicCount,
args.HardwareInstance,
args.UpdateImageIndex,
args.SignToolPfxFile,
args.SignSubjectName,
args.OpenSslSignerPrivateCertFile,
args.OpenSslOtherPublicCertFile,
args.OpenSslTrustedPublicCertFile,
args.SigningToolPath,
None
))
#
# Perform additional verification on payload descriptors
#
for SinglePayloadDescriptor in PayloadDescriptorList:
try:
SinglePayloadDescriptor.Validate (args)
except Exception as Msg:
print ('GenerateCapsule: error:' + str(Msg))
sys.exit (1)
try:
Result = UefiCapsuleHeader.Decode (Buffer)
if len (Result) > 0:
Result = FmpCapsuleHeader.Decode (Result)
if args.JsonFile:
if FmpCapsuleHeader.PayloadItemCount != len (PayloadDescriptorList):
CapsulePayloadNum = FmpCapsuleHeader.PayloadItemCount
JsonPayloadNum = len (PayloadDescriptorList)
print ('GenerateCapsule: Decode error: {JsonPayloadNumber} payloads in JSON file {File} and {CapsulePayloadNumber} payloads in Capsule {CapsuleName}'.format (JsonPayloadNumber = JsonPayloadNum, File = args.JsonFile.name, CapsulePayloadNumber = CapsulePayloadNum, CapsuleName = args.InputFile.name))
sys.exit (1)
for Index in range (0, FmpCapsuleHeader.PayloadItemCount):
if Index < len (PayloadDescriptorList):
GUID = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateImageTypeId
HardwareInstance = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateHardwareInstance
UpdateImageIndex = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateImageIndex
if PayloadDescriptorList[Index].Guid != GUID or PayloadDescriptorList[Index].HardwareInstance != HardwareInstance:
print ('GenerateCapsule: Decode error: Guid or HardwareInstance pair in input JSON file {File} does not match the payload {PayloadIndex} in Capsule {InputCapsule}'.format (File = args.JsonFile.name, PayloadIndex = Index + 1, InputCapsule = args.InputFile.name))
sys.exit (1)
PayloadDescriptorList[Index].Payload = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).Payload
DecodeJsonOutput = args.OutputFile.name + '.Payload.{Index:d}.bin'.format (Index = Index + 1)
PayloadJsonDescriptorList.append (PayloadDescriptor (
DecodeJsonOutput,
GUID,
None,
None,
None,
HardwareInstance,
UpdateImageIndex,
PayloadDescriptorList[Index].SignToolPfxFile,
PayloadDescriptorList[Index].SignToolSubjectName,
PayloadDescriptorList[Index].OpenSslSignerPrivateCertFile,
PayloadDescriptorList[Index].OpenSslOtherPublicCertFile,
PayloadDescriptorList[Index].OpenSslTrustedPublicCertFile,
PayloadDescriptorList[Index].SigningToolPath,
None
))
else:
PayloadDescriptorList[0].Payload = FmpCapsuleHeader.GetFmpCapsuleImageHeader (0).Payload
for Index in range (0, FmpCapsuleHeader.PayloadItemCount):
if Index > 0:
PayloadDecodeFile = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).Payload
PayloadDescriptorList.append (PayloadDescriptor (PayloadDecodeFile,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None
))
GUID = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateImageTypeId
HardwareInstance = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateHardwareInstance
UpdateImageIndex = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).UpdateImageIndex
DecodeJsonOutput = args.OutputFile.name + '.Payload.{Index:d}.bin'.format (Index = Index + 1)
PayloadJsonDescriptorList.append (PayloadDescriptor (
DecodeJsonOutput,
GUID,
None,
None,
None,
HardwareInstance,
UpdateImageIndex,
PayloadDescriptorList[Index].SignToolPfxFile,
PayloadDescriptorList[Index].SignToolSubjectName,
PayloadDescriptorList[Index].OpenSslSignerPrivateCertFile,
PayloadDescriptorList[Index].OpenSslOtherPublicCertFile,
PayloadDescriptorList[Index].OpenSslTrustedPublicCertFile,
PayloadDescriptorList[Index].SigningToolPath,
None
))
JsonIndex = 0
for SinglePayloadDescriptor in PayloadDescriptorList:
if args.Verbose:
print ('========')
UefiCapsuleHeader.DumpInfo ()
print ('--------')
FmpCapsuleHeader.DumpInfo ()
if FmpAuthHeader.IsSigned(SinglePayloadDescriptor.Payload):
if not SinglePayloadDescriptor.UseOpenSsl and not SinglePayloadDescriptor.UseSignTool:
print ('GenerateCapsule: decode warning: can not verify singed payload without cert or pfx file. Index = {Index}'.format (Index = JsonIndex + 1))
SinglePayloadDescriptor.Payload = FmpAuthHeader.Decode (SinglePayloadDescriptor.Payload)
PayloadJsonDescriptorList[JsonIndex].MonotonicCount = FmpAuthHeader.MonotonicCount
if args.Verbose:
print ('--------')
FmpAuthHeader.DumpInfo ()
#
# Verify Image with 64-bit MonotonicCount appended to end of image
#
try:
if SinglePayloadDescriptor.UseSignTool:
CertData = VerifyPayloadSignTool (
FmpAuthHeader.Payload + struct.pack ('<Q', FmpAuthHeader.MonotonicCount),
FmpAuthHeader.CertData,
SinglePayloadDescriptor.SigningToolPath,
SinglePayloadDescriptor.SignToolPfxFile,
SinglePayloadDescriptor.SignToolSubjectName,
Verbose = args.Verbose
)
else:
CertData = VerifyPayloadOpenSsl (
FmpAuthHeader.Payload + struct.pack ('<Q', FmpAuthHeader.MonotonicCount),
FmpAuthHeader.CertData,
SinglePayloadDescriptor.SigningToolPath,
SinglePayloadDescriptor.OpenSslSignerPrivateCertFile,
SinglePayloadDescriptor.OpenSslOtherPublicCertFile,
SinglePayloadDescriptor.OpenSslTrustedPublicCertFile,
Verbose = args.Verbose
)
except Exception as Msg:
print ('GenerateCapsule: warning: payload verification failed Index = {Index} \n'.format (Index = JsonIndex + 1) + str(Msg))
else:
if args.Verbose:
print ('--------')
print ('No EFI_FIRMWARE_IMAGE_AUTHENTICATION')
PayloadSignature = struct.unpack ('<I', SinglePayloadDescriptor.Payload[0:4])
if PayloadSignature != FmpPayloadHeader.Signature:
SinglePayloadDescriptor.UseDependency = True
try:
SinglePayloadDescriptor.Payload = CapsuleDependency.Decode (SinglePayloadDescriptor.Payload)
PayloadJsonDescriptorList[JsonIndex].DepexExp = CapsuleDependency.DepexExp
if args.Verbose:
print ('--------')
CapsuleDependency.DumpInfo ()
except Exception as Msg:
print ('GenerateCapsule: error: invalid dependency expression')
else:
if args.Verbose:
print ('--------')
print ('No EFI_FIRMWARE_IMAGE_DEP')
try:
SinglePayloadDescriptor.Payload = FmpPayloadHeader.Decode (SinglePayloadDescriptor.Payload)
PayloadJsonDescriptorList[JsonIndex].FwVersion = FmpPayloadHeader.FwVersion
PayloadJsonDescriptorList[JsonIndex].LowestSupportedVersion = FmpPayloadHeader.LowestSupportedVersion
JsonIndex = JsonIndex + 1
if args.Verbose:
print ('--------')
FmpPayloadHeader.DumpInfo ()
print ('========')
except:
if args.Verbose:
print ('--------')
print ('No FMP_PAYLOAD_HEADER')
print ('========')
sys.exit (1)
#
# Write embedded driver file(s)
#
for Index in range (0, FmpCapsuleHeader.EmbeddedDriverCount):
EmbeddedDriverBuffer = FmpCapsuleHeader.GetEmbeddedDriver (Index)
EmbeddedDriverPath = args.OutputFile.name + '.EmbeddedDriver.{Index:d}.efi'.format (Index = Index + 1)
try:
if args.Verbose:
print ('Write embedded driver file {File}'.format (File = EmbeddedDriverPath))
with open (EmbeddedDriverPath, 'wb') as EmbeddedDriverFile:
EmbeddedDriverFile.write (EmbeddedDriverBuffer)
except:
print ('GenerateCapsule: error: can not write embedded driver file {File}'.format (File = EmbeddedDriverPath))
sys.exit (1)
except:
print ('GenerateCapsule: error: can not decode capsule')
sys.exit (1)
GenerateOutputJson(PayloadJsonDescriptorList)
PayloadIndex = 0
for SinglePayloadDescriptor in PayloadDescriptorList:
if args.OutputFile is None:
print ('GenerateCapsule: Decode error: OutputFile is needed for decode output')
sys.exit (1)
try:
if args.Verbose:
print ('Write binary output file {File}'.format (File = args.OutputFile.name))
PayloadDecodePath = args.OutputFile.name + '.Payload.{Index:d}.bin'.format (Index = PayloadIndex + 1)
with open (PayloadDecodePath, 'wb') as PayloadDecodeFile:
PayloadDecodeFile.write (SinglePayloadDescriptor.Payload)
PayloadIndex = PayloadIndex + 1
except:
print ('GenerateCapsule: error: can not write binary output file {File}'.format (File = SinglePayloadDescriptor.OutputFile.name))
sys.exit (1)
def DumpInfo (Buffer, args):
if args.OutputFile is not None:
raise argparse.ArgumentTypeError ('the following option is not supported for dumpinfo operations: --output')
try:
Result = UefiCapsuleHeader.Decode (Buffer)
print ('========')
UefiCapsuleHeader.DumpInfo ()
if len (Result) > 0:
FmpCapsuleHeader.Decode (Result)
print ('--------')
FmpCapsuleHeader.DumpInfo ()
for Index in range (0, FmpCapsuleHeader.PayloadItemCount):
Result = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).Payload
try:
Result = FmpAuthHeader.Decode (Result)
print ('--------')
FmpAuthHeader.DumpInfo ()
except:
print ('--------')
print ('No EFI_FIRMWARE_IMAGE_AUTHENTICATION')
PayloadSignature = struct.unpack ('<I', Result[0:4])
if PayloadSignature != FmpPayloadHeader.Signature:
try:
Result = CapsuleDependency.Decode (Result)
print ('--------')
CapsuleDependency.DumpInfo ()
except:
print ('GenerateCapsule: error: invalid dependency expression')
else:
print ('--------')
print ('No EFI_FIRMWARE_IMAGE_DEP')
try:
Result = FmpPayloadHeader.Decode (Result)
print ('--------')
FmpPayloadHeader.DumpInfo ()
except:
print ('--------')
print ('No FMP_PAYLOAD_HEADER')
print ('========')
except:
print ('GenerateCapsule: error: can not decode capsule')
sys.exit (1)
#
# Create command line argument parser object
#
parser = argparse.ArgumentParser (
prog = __prog__,
description = __description__ + __copyright__,
conflict_handler = 'resolve',
fromfile_prefix_chars = '@'
)
parser.convert_arg_line_to_args = convert_arg_line_to_args
#
# Add input and output file arguments
#
parser.add_argument("InputFile", type = argparse.FileType('rb'), nargs='?',
help = "Input binary payload filename.")
parser.add_argument("-o", "--output", dest = 'OutputFile', type = argparse.FileType('wb'),
help = "Output filename.")
#
# Add group for -e and -d flags that are mutually exclusive and required
#
group = parser.add_mutually_exclusive_group (required = True)
group.add_argument ("-e", "--encode", dest = 'Encode', action = "store_true",
help = "Encode file")
group.add_argument ("-d", "--decode", dest = 'Decode', action = "store_true",
help = "Decode file")
group.add_argument ("--dump-info", dest = 'DumpInfo', action = "store_true",
help = "Display FMP Payload Header information")
#
# Add optional arguments for this command
#
parser.add_argument ("-j", "--json-file", dest = 'JsonFile', type=argparse.FileType('r'),
help = "JSON configuration file for multiple payloads and embedded drivers.")
parser.add_argument ("--capflag", dest = 'CapsuleFlag', action='append', default = [],
choices=['PersistAcrossReset', 'InitiateReset'],
help = "Capsule flag can be PersistAcrossReset or InitiateReset or not set")
parser.add_argument ("--capoemflag", dest = 'CapsuleOemFlag', type = ValidateUnsignedInteger, default = 0x0000,
help = "Capsule OEM Flag is an integer between 0x0000 and 0xffff.")
parser.add_argument ("--guid", dest = 'Guid', type = ValidateRegistryFormatGuid,
help = "The FMP/ESRT GUID in registry format. Required for single payload encode operations.")
parser.add_argument ("--hardware-instance", dest = 'HardwareInstance', type = ValidateUnsignedInteger, default = 0x0000000000000000,
help = "The 64-bit hardware instance. The default is 0x0000000000000000")
parser.add_argument ("--monotonic-count", dest = 'MonotonicCount', type = ValidateUnsignedInteger, default = 0x0000000000000000,
help = "64-bit monotonic count value in header. Default is 0x0000000000000000.")
parser.add_argument ("--fw-version", dest = 'FwVersion', type = ValidateUnsignedInteger,
help = "The 32-bit version of the binary payload (e.g. 0x11223344 or 5678). Required for encode operations.")
parser.add_argument ("--lsv", dest = 'LowestSupportedVersion', type = ValidateUnsignedInteger,
help = "The 32-bit lowest supported version of the binary payload (e.g. 0x11223344 or 5678). Required for encode operations.")
parser.add_argument ("--pfx-file", dest='SignToolPfxFile', type=argparse.FileType('rb'),
help="signtool PFX certificate filename.")
parser.add_argument ("--subject-name", dest='SignToolSubjectName',
help="signtool certificate subject name.")
parser.add_argument ("--signer-private-cert", dest='OpenSslSignerPrivateCertFile', type=argparse.FileType('rb'),
help="OpenSSL signer private certificate filename.")
parser.add_argument ("--other-public-cert", dest='OpenSslOtherPublicCertFile', type=argparse.FileType('rb'),
help="OpenSSL other public certificate filename.")
parser.add_argument ("--trusted-public-cert", dest='OpenSslTrustedPublicCertFile', type=argparse.FileType('rb'),
help="OpenSSL trusted public certificate filename.")
parser.add_argument ("--signing-tool-path", dest = 'SigningToolPath',
help = "Path to signtool or OpenSSL tool. Optional if path to tools are already in PATH.")
parser.add_argument ("--embedded-driver", dest = 'EmbeddedDriver', type = argparse.FileType('rb'), action='append', default = [],
help = "Path to embedded UEFI driver to add to capsule.")
#
# Add optional arguments common to all operations
#
parser.add_argument ('--version', action='version', version='%(prog)s ' + __version__)
parser.add_argument ("-v", "--verbose", dest = 'Verbose', action = "store_true",
help = "Turn on verbose output with informational messages printed, including capsule headers and warning messages.")
parser.add_argument ("-q", "--quiet", dest = 'Quiet', action = "store_true",
help = "Disable all messages except fatal errors.")
parser.add_argument ("--debug", dest = 'Debug', type = int, metavar = '[0-9]', choices = range (0, 10), default = 0,
help = "Set debug level")
parser.add_argument ("--update-image-index", dest = 'UpdateImageIndex', type = ValidateUnsignedInteger, default = 0x01, help = "unique number identifying the firmware image within the device ")
#
# Parse command line arguments
#
args = parser.parse_args()
#
# Read binary input file
#
Buffer = ''
if args.InputFile:
if os.path.getsize (args.InputFile.name) == 0:
print ('GenerateCapsule: error: InputFile {File} is empty'.format (File = args.InputFile.name))
sys.exit (1)
try:
if args.Verbose:
print ('Read binary input file {File}'.format (File = args.InputFile.name))
Buffer = args.InputFile.read ()
args.InputFile.close ()
except:
print ('GenerateCapsule: error: can not read binary input file {File}'.format (File = args.InputFile.name))
sys.exit (1)
#
# Create objects
#
UefiCapsuleHeader = UefiCapsuleHeaderClass ()
FmpCapsuleHeader = FmpCapsuleHeaderClass ()
FmpAuthHeader = FmpAuthHeaderClass ()
FmpPayloadHeader = FmpPayloadHeaderClass ()
CapsuleDependency = CapsuleDependencyClass ()
EmbeddedDriverDescriptorList = []
PayloadDescriptorList = []
PayloadJsonDescriptorList = []
#
#Encode Operation
#
if args.Encode:
Encode (PayloadDescriptorList, EmbeddedDriverDescriptorList, Buffer)
#
#Decode Operation
#
if args.Decode:
Decode (PayloadDescriptorList, PayloadJsonDescriptorList, Buffer)
#
#Dump Info Operation
#
if args.DumpInfo:
DumpInfo (Buffer, args)
if args.Verbose:
print('Success')
| edk2-master | BaseTools/Source/Python/Capsule/GenerateCapsule.py |
##
# UefiBuild Plugin that supports Window Capsule files based on the
# Windows Firmware Update Platform spec.
# Creates INF, Cat, and then signs it
#
# To install run pip install --upgrade edk2-pytool-library
# edk2-pytool-library-0.9.1 is required.
#
# Copyright (c) Microsoft Corporation. All rights reserved.
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import sys
import re
import datetime
import os
import logging
from edk2toollib.windows.capsule.cat_generator import CatGenerator
from edk2toollib.windows.capsule.inf_generator import InfGenerator
from edk2toollib.utility_functions import CatalogSignWithSignTool
from edk2toollib.windows.locate_tools import FindToolInWinSdk
class WindowsCapsuleSupportHelper(object):
def RegisterHelpers(self, obj):
fp = os.path.abspath(__file__)
obj.Register("PackageWindowsCapsuleFiles", WindowsCapsuleSupportHelper.PackageWindowsCapsuleFiles, fp)
@staticmethod
def PackageWindowsCapsuleFiles(OutputFolder, ProductName, ProductFmpGuid, CapsuleVersion_DotString,
CapsuleVersion_HexString, ProductFwProvider, ProductFwMfgName, ProductFwDesc, CapsuleFileName, PfxFile=None, PfxPass=None,
Rollback=False, Arch='amd64', OperatingSystem_String='Win10'):
logging.debug("CapsulePackage: Create Windows Capsule Files")
#Make INF
InfFilePath = os.path.join(OutputFolder, ProductName + ".inf")
InfTool = InfGenerator(ProductName, ProductFwProvider, ProductFmpGuid, Arch, ProductFwDesc, CapsuleVersion_DotString, CapsuleVersion_HexString)
InfTool.Manufacturer = ProductFwMfgName #optional
ret = InfTool.MakeInf(InfFilePath, CapsuleFileName, Rollback)
if(ret != 0):
raise Exception("CreateWindowsInf Failed with errorcode %d" % ret)
#Make CAT
CatFilePath = os.path.realpath(os.path.join(OutputFolder, ProductName + ".cat"))
CatTool = CatGenerator(Arch, OperatingSystem_String)
ret = CatTool.MakeCat(CatFilePath)
if(ret != 0):
raise Exception("Creating Cat file Failed with errorcode %d" % ret)
if(PfxFile is not None):
#Find Signtool
SignToolPath = FindToolInWinSdk("signtool.exe")
if not os.path.exists(SignToolPath):
raise Exception("Can't find signtool on this machine.")
#dev sign the cat file
ret = CatalogSignWithSignTool(SignToolPath, CatFilePath, PfxFile, PfxPass)
if(ret != 0):
raise Exception("Signing Cat file Failed with errorcode %d" % ret)
return ret
| edk2-master | BaseTools/Source/Python/Capsule/WindowsCapsuleSupportHelper.py |
## @file
# This file is used to define the FMMT dependent external tool.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
| edk2-master | BaseTools/Source/Python/FMMT/__init__.py |
# @file
# Firmware Module Management Tool.
#
# Copyright (c) 2021, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import argparse
from core.FMMTOperation import *
parser = argparse.ArgumentParser(description='''
View the Binary Structure of FD/FV/Ffs/Section, and Delete/Extract/Add/Replace a Ffs from/into a FV.
''')
parser.add_argument("--version", action="version", version='%(prog)s Version 1.0',
help="Print debug information.")
parser.add_argument("-v", "--View", dest="View", nargs='+',
help="View each FV and the named files within each FV: '-v inputfile outputfile, inputfiletype(.Fd/.Fv/.ffs/.sec)'")
parser.add_argument("-d", "--Delete", dest="Delete", nargs='+',
help="Delete a Ffs from FV: '-d inputfile TargetFvName(Optional) TargetFfsName outputfile\
If not given TargetFvName, all the existed target Ffs will be deleted'")
parser.add_argument("-e", "--Extract", dest="Extract", nargs='+',
help="Extract a Ffs Info: '-e inputfile TargetFvName(Optional) TargetFfsName outputfile\
If not given TargetFvName, the first found target Ffs will be extracted.\
If only given TargetFvName, not given TargetFfsName, the TargetFv will be extracted to output file'")
parser.add_argument("-a", "--Add", dest="Add", nargs='+',
help="Add a Ffs into a FV:'-a inputfile TargetFvName newffsfile outputfile'")
parser.add_argument("-r", "--Replace", dest="Replace", nargs='+',
help="Replace a Ffs in a FV: '-r inputfile TargetFvName(Optional) TargetFfsName newffsfile outputfile\
If not given TargetFvName, all the existed target Ffs will be replaced with new Ffs file)'")
parser.add_argument("-l", "--LayoutFileName", dest="LayoutFileName", nargs='+',
help="The output file which saves Binary layout: '-l xxx.txt'/'-l xxx.json'\
If only provide file format as 'txt', \
the file will be generated with default name (Layout_'InputFileName'.txt). \
Currently supports two formats: json, txt. More formats will be added in the future")
parser.add_argument("-c", "--ConfigFilePath", dest="ConfigFilePath", nargs='+',
help="Provide the target FmmtConf.ini file path: '-c C:\Code\FmmtConf.ini' \
FmmtConf file saves the target guidtool used in compress/uncompress process.\
If do not provide, FMMT tool will search the inputfile folder for FmmtConf.ini firstly, if not found,\
the FmmtConf.ini saved in FMMT tool's folder will be used as default.")
parser.add_argument("-s", "--ShrinkFv", dest="ShrinkFv", nargs='+',
help="Shrink the Fv file: '-s InputFvfile OutputFvfile")
def print_banner():
print("")
class FMMT():
def __init__(self) -> None:
self.firmware_packet = {}
def SetConfigFilePath(self, configfilepath:str) -> str:
os.environ['FmmtConfPath'] = os.path.abspath(configfilepath)
def SetDestPath(self, inputfile:str) -> str:
os.environ['FmmtConfPath'] = ''
self.dest_path = os.path.dirname(os.path.abspath(inputfile))
old_env = os.environ['PATH']
os.environ['PATH'] = self.dest_path + os.pathsep + old_env
def CheckFfsName(self, FfsName:str) -> str:
try:
return uuid.UUID(FfsName)
except:
return FfsName
def GetFvName(self, FvName:str) -> str:
try:
return uuid.UUID(FvName)
except:
return FvName
def View(self, inputfile: str, layoutfilename: str=None, outputfile: str=None) -> None:
# ViewFile(inputfile, ROOT_TYPE, logfile, outputfile)
self.SetDestPath(inputfile)
filetype = os.path.splitext(inputfile)[1].lower()
if filetype == '.fd':
ROOT_TYPE = ROOT_TREE
elif filetype == '.fv':
ROOT_TYPE = ROOT_FV_TREE
elif filetype == '.ffs':
ROOT_TYPE = ROOT_FFS_TREE
elif filetype == '.sec':
ROOT_TYPE = ROOT_SECTION_TREE
else:
ROOT_TYPE = ROOT_TREE
ViewFile(inputfile, ROOT_TYPE, layoutfilename, outputfile)
def Delete(self, inputfile: str, TargetFfs_name: str, outputfile: str, Fv_name: str=None) -> None:
self.SetDestPath(inputfile)
if Fv_name:
DeleteFfs(inputfile, self.CheckFfsName(TargetFfs_name), outputfile, self.GetFvName(Fv_name))
else:
DeleteFfs(inputfile, self.CheckFfsName(TargetFfs_name), outputfile)
def Extract(self, inputfile: str, Ffs_name: str, outputfile: str, Fv_name: str=None) -> None:
self.SetDestPath(inputfile)
if Fv_name:
ExtractFfs(inputfile, self.CheckFfsName(Ffs_name), outputfile, self.GetFvName(Fv_name))
else:
ExtractFfs(inputfile, self.CheckFfsName(Ffs_name), outputfile)
def Add(self, inputfile: str, Fv_name: str, newffsfile: str, outputfile: str) -> None:
self.SetDestPath(inputfile)
AddNewFfs(inputfile, self.CheckFfsName(Fv_name), newffsfile, outputfile)
def Replace(self,inputfile: str, Ffs_name: str, newffsfile: str, outputfile: str, Fv_name: str=None) -> None:
self.SetDestPath(inputfile)
if Fv_name:
ReplaceFfs(inputfile, self.CheckFfsName(Ffs_name), newffsfile, outputfile, self.GetFvName(Fv_name))
else:
ReplaceFfs(inputfile, self.CheckFfsName(Ffs_name), newffsfile, outputfile)
def Shrink(self,inputfile: str, outputfile: str) -> None:
self.SetDestPath(inputfile)
ShrinkFv(inputfile, outputfile)
def main():
args=parser.parse_args()
status=0
try:
fmmt=FMMT()
if args.ConfigFilePath:
fmmt.SetConfigFilePath(args.ConfigFilePath[0])
if args.View:
if args.LayoutFileName:
fmmt.View(args.View[0], args.LayoutFileName[0])
else:
fmmt.View(args.View[0])
elif args.Delete:
if len(args.Delete) == 4:
fmmt.Delete(args.Delete[0],args.Delete[2],args.Delete[3],args.Delete[1])
else:
fmmt.Delete(args.Delete[0],args.Delete[1],args.Delete[2])
elif args.Extract:
if len(args.Extract) == 4:
fmmt.Extract(args.Extract[0],args.Extract[2],args.Extract[3], args.Extract[1])
else:
fmmt.Extract(args.Extract[0],args.Extract[1],args.Extract[2])
elif args.Add:
fmmt.Add(args.Add[0],args.Add[1],args.Add[2],args.Add[3])
elif args.Replace:
if len(args.Replace) == 5:
fmmt.Replace(args.Replace[0],args.Replace[2],args.Replace[3],args.Replace[4],args.Replace[1])
else:
fmmt.Replace(args.Replace[0],args.Replace[1],args.Replace[2],args.Replace[3])
elif args.ShrinkFv:
fmmt.Shrink(args.ShrinkFv[0], args.ShrinkFv[1])
else:
parser.print_help()
except Exception as e:
print(e)
return status
if __name__ == "__main__":
exit(main())
| edk2-master | BaseTools/Source/Python/FMMT/FMMT.py |
## @file
# This file is used to define the BIOS Tree Node.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
from FirmwareStorageFormat.FvHeader import *
from FirmwareStorageFormat.FfsFileHeader import *
from FirmwareStorageFormat.SectionHeader import *
from FirmwareStorageFormat.Common import *
from utils.FmmtLogger import FmmtLogger as logger
import uuid
SectionHeaderType = {
0x01:'EFI_COMPRESSION_SECTION',
0x02:'EFI_GUID_DEFINED_SECTION',
0x03:'EFI_SECTION_DISPOSABLE',
0x10:'EFI_SECTION_PE32',
0x11:'EFI_SECTION_PIC',
0x12:'EFI_SECTION_TE',
0x13:'EFI_SECTION_DXE_DEPEX',
0x14:'EFI_SECTION_VERSION',
0x15:'EFI_SECTION_USER_INTERFACE',
0x16:'EFI_SECTION_COMPATIBILITY16',
0x17:'EFI_SECTION_FIRMWARE_VOLUME_IMAGE',
0x18:'EFI_FREEFORM_SUBTYPE_GUID_SECTION',
0x19:'EFI_SECTION_RAW',
0x1B:'EFI_SECTION_PEI_DEPEX',
0x1C:'EFI_SECTION_MM_DEPEX'
}
HeaderType = [0x01, 0x02, 0x14, 0x15, 0x18]
class BinaryNode:
def __init__(self, name: str) -> None:
self.Size = 0
self.Name = "BINARY" + str(name)
self.HOffset = 0
self.Data = b''
class FvNode:
def __init__(self, name, buffer: bytes) -> None:
self.Header = EFI_FIRMWARE_VOLUME_HEADER.from_buffer_copy(buffer)
Map_num = (self.Header.HeaderLength - 56)//8
self.Header = Refine_FV_Header(Map_num).from_buffer_copy(buffer)
self.FvId = "FV" + str(name)
self.Name = "FV" + str(name)
if self.Header.ExtHeaderOffset:
self.ExtHeader = EFI_FIRMWARE_VOLUME_EXT_HEADER.from_buffer_copy(buffer[self.Header.ExtHeaderOffset:])
self.Name = uuid.UUID(bytes_le=struct2stream(self.ExtHeader.FvName))
self.ExtEntryOffset = self.Header.ExtHeaderOffset + 20
if self.ExtHeader.ExtHeaderSize != 20:
self.ExtEntryExist = 1
self.ExtEntry = EFI_FIRMWARE_VOLUME_EXT_ENTRY.from_buffer_copy(buffer[self.ExtEntryOffset:])
self.ExtTypeExist = 1
if self.ExtEntry.ExtEntryType == 0x01:
nums = (self.ExtEntry.ExtEntrySize - 8) // 16
self.ExtEntry = Refine_FV_EXT_ENTRY_OEM_TYPE_Header(nums).from_buffer_copy(buffer[self.ExtEntryOffset:])
elif self.ExtEntry.ExtEntryType == 0x02:
nums = self.ExtEntry.ExtEntrySize - 20
self.ExtEntry = Refine_FV_EXT_ENTRY_GUID_TYPE_Header(nums).from_buffer_copy(buffer[self.ExtEntryOffset:])
elif self.ExtEntry.ExtEntryType == 0x03:
self.ExtEntry = EFI_FIRMWARE_VOLUME_EXT_ENTRY_USED_SIZE_TYPE.from_buffer_copy(buffer[self.ExtEntryOffset:])
else:
self.ExtTypeExist = 0
else:
self.ExtEntryExist = 0
self.Size = self.Header.FvLength
self.HeaderLength = self.Header.HeaderLength
self.HOffset = 0
self.DOffset = 0
self.ROffset = 0
self.Data = b''
if self.Header.Signature != 1213613663:
logger.error('Invalid Fv Header! Fv {} signature {} is not "_FVH".'.format(struct2stream(self.Header), self.Header.Signature))
raise Exception("Process Failed: Fv Header Signature!")
self.PadData = b''
self.Free_Space = 0
self.ModCheckSum()
def ModCheckSum(self) -> None:
# Fv Header Sums to 0.
Header = struct2stream(self.Header)[::-1]
Size = self.HeaderLength // 2
Sum = 0
for i in range(Size):
Sum += int(Header[i*2: i*2 + 2].hex(), 16)
if Sum & 0xffff:
self.Header.Checksum = 0x10000 - (Sum - self.Header.Checksum) % 0x10000
def ModFvExt(self) -> None:
# If used space changes and self.ExtEntry.UsedSize exists, self.ExtEntry.UsedSize need to be changed.
if self.Header.ExtHeaderOffset and self.ExtEntryExist and self.ExtTypeExist and self.ExtEntry.Hdr.ExtEntryType == 0x03:
self.ExtEntry.UsedSize = self.Header.FvLength - self.Free_Space
def ModFvSize(self) -> None:
# If Fv Size changed, self.Header.FvLength and self.Header.BlockMap[i].NumBlocks need to be changed.
BlockMapNum = len(self.Header.BlockMap)
for i in range(BlockMapNum):
if self.Header.BlockMap[i].Length:
self.Header.BlockMap[i].NumBlocks = self.Header.FvLength // self.Header.BlockMap[i].Length
def ModExtHeaderData(self) -> None:
if self.Header.ExtHeaderOffset:
ExtHeaderData = struct2stream(self.ExtHeader)
ExtHeaderDataOffset = self.Header.ExtHeaderOffset - self.HeaderLength
self.Data = self.Data[:ExtHeaderDataOffset] + ExtHeaderData + self.Data[ExtHeaderDataOffset+20:]
if self.Header.ExtHeaderOffset and self.ExtEntryExist:
ExtHeaderEntryData = struct2stream(self.ExtEntry)
ExtHeaderEntryDataOffset = self.Header.ExtHeaderOffset + 20 - self.HeaderLength
self.Data = self.Data[:ExtHeaderEntryDataOffset] + ExtHeaderEntryData + self.Data[ExtHeaderEntryDataOffset+len(ExtHeaderEntryData):]
class FfsNode:
def __init__(self, buffer: bytes) -> None:
self.Header = EFI_FFS_FILE_HEADER.from_buffer_copy(buffer)
# self.Attributes = unpack("<B", buffer[21:22])[0]
if self.Header.FFS_FILE_SIZE != 0 and self.Header.Attributes != 0xff and self.Header.Attributes & 0x01 == 1:
logger.error('Error Ffs Header! Ffs {} Header Size and Attributes is not matched!'.format(uuid.UUID(bytes_le=struct2stream(self.Header.Name))))
raise Exception("Process Failed: Error Ffs Header!")
if self.Header.FFS_FILE_SIZE == 0 and self.Header.Attributes & 0x01 == 1:
self.Header = EFI_FFS_FILE_HEADER2.from_buffer_copy(buffer)
self.Name = uuid.UUID(bytes_le=struct2stream(self.Header.Name))
self.UiName = b''
self.Version = b''
self.Size = self.Header.FFS_FILE_SIZE
self.HeaderLength = self.Header.HeaderLength
self.HOffset = 0
self.DOffset = 0
self.ROffset = 0
self.Data = b''
self.PadData = b''
self.SectionMaxAlignment = SECTION_COMMON_ALIGNMENT # 4-align
def ModCheckSum(self) -> None:
HeaderData = struct2stream(self.Header)
HeaderSum = 0
for item in HeaderData:
HeaderSum += item
HeaderSum -= self.Header.State
HeaderSum -= self.Header.IntegrityCheck.Checksum.File
if HeaderSum & 0xff:
Header = self.Header.IntegrityCheck.Checksum.Header + 0x100 - HeaderSum % 0x100
self.Header.IntegrityCheck.Checksum.Header = Header % 0x100
class SectionNode:
def __init__(self, buffer: bytes) -> None:
if buffer[0:3] != b'\xff\xff\xff':
self.Header = EFI_COMMON_SECTION_HEADER.from_buffer_copy(buffer)
else:
self.Header = EFI_COMMON_SECTION_HEADER2.from_buffer_copy(buffer)
if self.Header.Type in SectionHeaderType:
self.Name = SectionHeaderType[self.Header.Type]
elif self.Header.Type == 0:
self.Name = "EFI_SECTION_ALL"
else:
self.Name = "SECTION"
if self.Header.Type in HeaderType:
self.ExtHeader = self.GetExtHeader(self.Header.Type, buffer[self.Header.Common_Header_Size():], (self.Header.SECTION_SIZE-self.Header.Common_Header_Size()))
self.HeaderLength = self.Header.Common_Header_Size() + self.ExtHeader.ExtHeaderSize()
else:
self.ExtHeader = None
self.HeaderLength = self.Header.Common_Header_Size()
self.Size = self.Header.SECTION_SIZE
self.Type = self.Header.Type
self.HOffset = 0
self.DOffset = 0
self.ROffset = 0
self.Data = b''
self.OriData = b''
self.OriHeader = b''
self.PadData = b''
self.IsPadSection = False
self.SectionMaxAlignment = SECTION_COMMON_ALIGNMENT # 4-align
def GetExtHeader(self, Type: int, buffer: bytes, nums: int=0) -> None:
if Type == 0x01:
return EFI_COMPRESSION_SECTION.from_buffer_copy(buffer)
elif Type == 0x02:
return EFI_GUID_DEFINED_SECTION.from_buffer_copy(buffer)
elif Type == 0x14:
return Get_VERSION_Header((nums - 2)//2).from_buffer_copy(buffer)
elif Type == 0x15:
return Get_USER_INTERFACE_Header(nums//2).from_buffer_copy(buffer)
elif Type == 0x18:
return EFI_FREEFORM_SUBTYPE_GUID_SECTION.from_buffer_copy(buffer)
class FreeSpaceNode:
def __init__(self, buffer: bytes) -> None:
self.Name = 'Free_Space'
self.Data = buffer
self.Size = len(buffer)
self.HOffset = 0
self.DOffset = 0
self.ROffset = 0
self.PadData = b''
| edk2-master | BaseTools/Source/Python/FMMT/core/BiosTreeNode.py |
## @file
# This file is used to the implementation of Bios layout handler.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import os
from core.BiosTree import *
from core.GuidTools import GUIDTools
from core.BiosTreeNode import *
from FirmwareStorageFormat.Common import *
from utils.FmmtLogger import FmmtLogger as logger
EFI_FVB2_ERASE_POLARITY = 0x00000800
def ChangeSize(TargetTree, size_delta: int=0) -> None:
# If Size increase delta, then should be: size_delta = -delta
if type(TargetTree.Data.Header) == type(EFI_FFS_FILE_HEADER2()) or type(TargetTree.Data.Header) == type(EFI_COMMON_SECTION_HEADER2()):
TargetTree.Data.Size -= size_delta
TargetTree.Data.Header.ExtendedSize -= size_delta
elif TargetTree.type == SECTION_TREE and TargetTree.Data.OriData:
OriSize = TargetTree.Data.Header.SECTION_SIZE
OriSize -= size_delta
TargetTree.Data.Header.Size[0] = OriSize % (16**2)
TargetTree.Data.Header.Size[1] = OriSize % (16**4) //(16**2)
TargetTree.Data.Header.Size[2] = OriSize // (16**4)
else:
TargetTree.Data.Size -= size_delta
TargetTree.Data.Header.Size[0] = TargetTree.Data.Size % (16**2)
TargetTree.Data.Header.Size[1] = TargetTree.Data.Size % (16**4) //(16**2)
TargetTree.Data.Header.Size[2] = TargetTree.Data.Size // (16**4)
def ModifyFfsType(TargetFfs) -> None:
if type(TargetFfs.Data.Header) == type(EFI_FFS_FILE_HEADER()) and TargetFfs.Data.Size > 0xFFFFFF:
ExtendSize = TargetFfs.Data.Header.FFS_FILE_SIZE + 8
New_Header = EFI_FFS_FILE_HEADER2()
New_Header.Name = TargetFfs.Data.Header.Name
New_Header.IntegrityCheck = TargetFfs.Data.Header.IntegrityCheck
New_Header.Type = TargetFfs.Data.Header.Type
New_Header.Attributes = TargetFfs.Data.Header.Attributes | 0x01 # set the Attribute with FFS_ATTRIB_LARGE_FILE (0x01)
NewSize = 0
New_Header.Size[0] = NewSize % (16**2) # minus the delta size of Header
New_Header.Size[1] = NewSize % (16**4) //(16**2)
New_Header.Size[2] = NewSize // (16**4)
New_Header.State = TargetFfs.Data.Header.State
New_Header.ExtendedSize = ExtendSize
TargetFfs.Data.Header = New_Header
TargetFfs.Data.Size = TargetFfs.Data.Header.FFS_FILE_SIZE
TargetFfs.Data.HeaderLength = TargetFfs.Data.Header.HeaderLength
TargetFfs.Data.ModCheckSum()
elif type(TargetFfs.Data.Header) == type(EFI_FFS_FILE_HEADER2()) and TargetFfs.Data.Size <= 0xFFFFFF:
New_Header = EFI_FFS_FILE_HEADER()
New_Header.Name = TargetFfs.Data.Header.Name
New_Header.IntegrityCheck = TargetFfs.Data.Header.IntegrityCheck
New_Header.Type = TargetFfs.Data.Header.Type
New_Header.Attributes = TargetFfs.Data.Header.Attributes - 1 # remove the FFS_ATTRIB_LARGE_FILE (0x01) from Attribute
New_Header.Size[0] = (TargetFfs.Data.Size - 8) % (16**2) # minus the delta size of Header
New_Header.Size[1] = (TargetFfs.Data.Size - 8) % (16**4) //(16**2)
New_Header.Size[2] = (TargetFfs.Data.Size - 8) // (16**4)
New_Header.State = TargetFfs.Data.Header.State
TargetFfs.Data.Header = New_Header
TargetFfs.Data.Size = TargetFfs.Data.Header.FFS_FILE_SIZE
TargetFfs.Data.HeaderLength = TargetFfs.Data.Header.HeaderLength
TargetFfs.Data.ModCheckSum()
if struct2stream(TargetFfs.Parent.Data.Header.FileSystemGuid) == EFI_FIRMWARE_FILE_SYSTEM3_GUID_BYTE:
NeedChange = True
for item in TargetFfs.Parent.Child:
if type(item.Data.Header) == type(EFI_FFS_FILE_HEADER2()):
NeedChange = False
if NeedChange:
TargetFfs.Parent.Data.Header.FileSystemGuid = ModifyGuidFormat("8c8ce578-8a3d-4f1c-9935-896185c32dd3")
if type(TargetFfs.Data.Header) == type(EFI_FFS_FILE_HEADER2()):
TarParent = TargetFfs.Parent
while TarParent:
if TarParent.type == FV_TREE and struct2stream(TarParent.Data.Header.FileSystemGuid) == EFI_FIRMWARE_FILE_SYSTEM2_GUID_BYTE:
TarParent.Data.Header.FileSystemGuid = ModifyGuidFormat("5473C07A-3DCB-4dca-BD6F-1E9689E7349A")
TarParent = TarParent.Parent
def PadSectionModify(PadSection, Offset) -> None:
# Offset > 0, Size decrease; Offset < 0, Size increase;
ChangeSize(PadSection, Offset)
PadSection.Data.Data = (PadSection.Data.Size - PadSection.Data.HeaderLength) * b'\xff'
def ModifySectionType(TargetSection) -> None:
# If Section Size is increased larger than 0xFFFFFF, need modify Section Header from EFI_COMMON_SECTION_HEADER to EFI_COMMON_SECTION_HEADER2.
if type(TargetSection.Data.Header) == type(EFI_COMMON_SECTION_HEADER()) and TargetSection.Data.Size >= 0xFFFFFF:
New_Header = EFI_COMMON_SECTION_HEADER2()
New_Header.Type = TargetSection.Data.Header.Type
NewSize = 0xFFFFFF
New_Header.Size[0] = NewSize % (16**2) # minus the delta size of Header
New_Header.Size[1] = NewSize % (16**4) //(16**2)
New_Header.Size[2] = NewSize // (16**4)
New_Header.ExtendedSize = TargetSection.Data.Size + 4
TargetSection.Data.Header = New_Header
TargetSection.Data.Size = TargetSection.Data.Header.SECTION_SIZE
# Align the Header's added 4 bit to 8-alignment to promise the following Ffs's align correctly.
if TargetSection.LastRel.Data.IsPadSection:
PadSectionModify(TargetSection.LastRel, -4)
else:
SecParent = TargetSection.Parent
Target_index = SecParent.Child.index(TargetSection)
NewPadSection = SectionNode(b'\x00\x00\x00\x19')
SecParent.insertChild(NewPadSection, Target_index)
# If Section Size is decreased smaller than 0xFFFFFF, need modify Section Header from EFI_COMMON_SECTION_HEADER2 to EFI_COMMON_SECTION_HEADER.
elif type(TargetSection.Data.Header) == type(EFI_COMMON_SECTION_HEADER2()) and TargetSection.Data.Size < 0xFFFFFF:
New_Header = EFI_COMMON_SECTION_HEADER()
New_Header.Type = TargetSection.Data.Header.Type
New_Header.Size[0] = (TargetSection.Data.Size - 4) % (16**2) # minus the delta size of Header
New_Header.Size[1] = (TargetSection.Data.Size - 4) % (16**4) //(16**2)
New_Header.Size[2] = (TargetSection.Data.Size - 4) // (16**4)
TargetSection.Data.Header = New_Header
TargetSection.Data.Size = TargetSection.Data.Header.SECTION_SIZE
# Align the Header's added 4 bit to 8-alignment to promise the following Ffs's align correctly.
if TargetSection.LastRel.Data.IsPadSection:
PadSectionModify(TargetSection.LastRel, -4)
else:
SecParent = TargetSection.Parent
Target_index = SecParent.Child.index(TargetSection)
NewPadSection = SectionNode(b'\x00\x00\x00\x19')
SecParent.insertChild(NewPadSection, Target_index)
def ModifyFvExtData(TreeNode) -> None:
FvExtData = b''
if TreeNode.Data.Header.ExtHeaderOffset:
FvExtHeader = struct2stream(TreeNode.Data.ExtHeader)
FvExtData += FvExtHeader
if TreeNode.Data.Header.ExtHeaderOffset and TreeNode.Data.ExtEntryExist:
FvExtEntry = struct2stream(TreeNode.Data.ExtEntry)
FvExtData += FvExtEntry
if FvExtData:
InfoNode = TreeNode.Child[0]
InfoNode.Data.Data = FvExtData + InfoNode.Data.Data[TreeNode.Data.ExtHeader.ExtHeaderSize:]
InfoNode.Data.ModCheckSum()
def ModifyFvSystemGuid(TargetFv) -> None:
if struct2stream(TargetFv.Data.Header.FileSystemGuid) == EFI_FIRMWARE_FILE_SYSTEM2_GUID_BYTE:
TargetFv.Data.Header.FileSystemGuid = ModifyGuidFormat("5473C07A-3DCB-4dca-BD6F-1E9689E7349A")
TargetFv.Data.ModCheckSum()
TargetFv.Data.Data = b''
for item in TargetFv.Child:
if item.type == FFS_FREE_SPACE:
TargetFv.Data.Data += item.Data.Data + item.Data.PadData
else:
TargetFv.Data.Data += struct2stream(item.Data.Header)+ item.Data.Data + item.Data.PadData
class FvHandler:
def __init__(self, NewFfs, TargetFfs=None) -> None:
self.NewFfs = NewFfs
self.TargetFfs = TargetFfs
self.Status = False
self.Remain_New_Free_Space = 0
## Use for Compress the Section Data
def CompressData(self, TargetTree) -> None:
TreePath = TargetTree.GetTreePath()
pos = len(TreePath)
while pos:
if not self.Status:
if TreePath[pos-1].type == SECTION_TREE and TreePath[pos-1].Data.Type == 0x02:
self.CompressSectionData(TreePath[pos-1], None, TreePath[pos-1].Data.ExtHeader.SectionDefinitionGuid)
else:
if pos == len(TreePath):
self.CompressSectionData(TreePath[pos-1], pos)
else:
self.CompressSectionData(TreePath[pos-1], None)
pos -= 1
def CompressSectionData(self, TargetTree, pos: int, GuidTool=None) -> None:
NewData = b''
temp_save_child = TargetTree.Child
if TargetTree.Data:
# Update current node data as adding all the header and data of its child node.
for item in temp_save_child:
if item.type == SECTION_TREE and not item.Data.OriData and item.Data.ExtHeader:
NewData += struct2stream(item.Data.Header) + struct2stream(item.Data.ExtHeader) + item.Data.Data + item.Data.PadData
elif item.type == SECTION_TREE and item.Data.OriData and not item.Data.ExtHeader:
NewData += struct2stream(item.Data.Header) + item.Data.OriData + item.Data.PadData
elif item.type == SECTION_TREE and item.Data.OriData and item.Data.ExtHeader:
NewData += struct2stream(item.Data.Header) + struct2stream(item.Data.ExtHeader) + item.Data.OriData + item.Data.PadData
elif item.type == FFS_FREE_SPACE:
NewData += item.Data.Data + item.Data.PadData
else:
NewData += struct2stream(item.Data.Header) + item.Data.Data + item.Data.PadData
# If node is FFS_TREE, update Pad data and Header info.
# Remain_New_Free_Space is used for move more free space into lst level Fv.
if TargetTree.type == FFS_TREE:
New_Pad_Size = GetPadSize(len(NewData), 8)
Size_delta = len(NewData) - len(TargetTree.Data.Data)
ChangeSize(TargetTree, -Size_delta)
Delta_Pad_Size = len(TargetTree.Data.PadData) - New_Pad_Size
self.Remain_New_Free_Space += Delta_Pad_Size
TargetTree.Data.PadData = b'\xff' * New_Pad_Size
TargetTree.Data.ModCheckSum()
# If node is FV_TREE, update Pad data and Header info.
# Consume Remain_New_Free_Space is used for move more free space into lst level Fv.
elif TargetTree.type == FV_TREE or TargetTree.type == SEC_FV_TREE and not pos:
if self.Remain_New_Free_Space:
if TargetTree.Data.Free_Space:
TargetTree.Data.Free_Space += self.Remain_New_Free_Space
NewData += self.Remain_New_Free_Space * b'\xff'
TargetTree.Child[-1].Data.Data += self.Remain_New_Free_Space * b'\xff'
else:
TargetTree.Data.Data += self.Remain_New_Free_Space * b'\xff'
New_Free_Space = BIOSTREE('FREE_SPACE')
New_Free_Space.type = FFS_FREE_SPACE
New_Free_Space.Data = FreeSpaceNode(b'\xff' * self.Remain_New_Free_Space)
TargetTree.insertChild(New_Free_Space)
self.Remain_New_Free_Space = 0
if TargetTree.type == SEC_FV_TREE:
Size_delta = len(NewData) + self.Remain_New_Free_Space - len(TargetTree.Data.Data)
TargetTree.Data.Header.FvLength += Size_delta
TargetTree.Data.ModFvExt()
TargetTree.Data.ModFvSize()
TargetTree.Data.ModExtHeaderData()
ModifyFvExtData(TargetTree)
TargetTree.Data.ModCheckSum()
# If node is SECTION_TREE and not guided section, update Pad data and Header info.
# Remain_New_Free_Space is used for move more free space into lst level Fv.
elif TargetTree.type == SECTION_TREE and TargetTree.Data.Type != 0x02:
New_Pad_Size = GetPadSize(len(NewData), 4)
Size_delta = len(NewData) - len(TargetTree.Data.Data)
ChangeSize(TargetTree, -Size_delta)
if TargetTree.NextRel:
Delta_Pad_Size = len(TargetTree.Data.PadData) - New_Pad_Size
self.Remain_New_Free_Space += Delta_Pad_Size
TargetTree.Data.PadData = b'\x00' * New_Pad_Size
TargetTree.Data.Data = NewData
if GuidTool:
guidtool = GUIDTools().__getitem__(struct2stream(GuidTool))
if not guidtool.ifexist:
logger.error("GuidTool {} is not found when decompressing {} file.\n".format(guidtool.command, TargetTree.Parent.Data.Name))
raise Exception("Process Failed: GuidTool not found!")
CompressedData = guidtool.pack(TargetTree.Data.Data)
if len(CompressedData) < len(TargetTree.Data.OriData):
New_Pad_Size = GetPadSize(len(CompressedData), SECTION_COMMON_ALIGNMENT)
Size_delta = len(CompressedData) - len(TargetTree.Data.OriData)
ChangeSize(TargetTree, -Size_delta)
if TargetTree.NextRel:
TargetTree.Data.PadData = b'\x00' * New_Pad_Size
self.Remain_New_Free_Space = len(TargetTree.Data.OriData) + len(TargetTree.Data.PadData) - len(CompressedData) - New_Pad_Size
else:
TargetTree.Data.PadData = b''
self.Remain_New_Free_Space = len(TargetTree.Data.OriData) - len(CompressedData)
TargetTree.Data.OriData = CompressedData
elif len(CompressedData) == len(TargetTree.Data.OriData):
TargetTree.Data.OriData = CompressedData
elif len(CompressedData) > len(TargetTree.Data.OriData):
New_Pad_Size = GetPadSize(len(CompressedData), SECTION_COMMON_ALIGNMENT)
self.Remain_New_Free_Space = len(CompressedData) + New_Pad_Size - len(TargetTree.Data.OriData) - len(TargetTree.Data.PadData)
self.ModifyTest(TargetTree, self.Remain_New_Free_Space)
self.Status = True
def ModifyTest(self, ParTree, Needed_Space: int) -> None:
# If have needed space, will find if there have free space in parent tree, meanwhile update the node data.
if Needed_Space > 0:
# If current node is a Fv node
if ParTree.type == FV_TREE or ParTree.type == SEC_FV_TREE:
ParTree.Data.Data = b''
# First check if Fv free space is enough for needed space.
# If so, use the current Fv free space;
# Else, use all the Free space, and recalculate needed space, continue finding in its parent node.
Needed_Space = Needed_Space - ParTree.Data.Free_Space
if Needed_Space < 0:
ParTree.Child[-1].Data.Data = b'\xff' * (-Needed_Space)
ParTree.Data.Free_Space = (-Needed_Space)
self.Status = True
else:
if ParTree.type == FV_TREE:
self.Status = False
else:
BlockSize = ParTree.Data.Header.BlockMap[0].Length
New_Add_Len = BlockSize - Needed_Space%BlockSize
if New_Add_Len % BlockSize:
ParTree.Child[-1].Data.Data = b'\xff' * New_Add_Len
ParTree.Data.Free_Space = New_Add_Len
Needed_Space += New_Add_Len
else:
ParTree.Child.remove(ParTree.Child[-1])
ParTree.Data.Free_Space = 0
ParTree.Data.Size += Needed_Space
ParTree.Data.Header.Fvlength = ParTree.Data.Size
ModifyFvSystemGuid(ParTree)
for item in ParTree.Child:
if item.type == FFS_FREE_SPACE:
ParTree.Data.Data += item.Data.Data + item.Data.PadData
else:
ParTree.Data.Data += struct2stream(item.Data.Header)+ item.Data.Data + item.Data.PadData
ParTree.Data.ModFvExt()
ParTree.Data.ModFvSize()
ParTree.Data.ModExtHeaderData()
ModifyFvExtData(ParTree)
ParTree.Data.ModCheckSum()
# If current node is a Ffs node
elif ParTree.type == FFS_TREE:
ParTree.Data.Data = b''
OriHeaderLen = ParTree.Data.HeaderLength
# Update its data as adding all the header and data of its child node.
for item in ParTree.Child:
if item.Data.OriData:
if item.Data.ExtHeader:
ParTree.Data.Data += struct2stream(item.Data.Header) + struct2stream(item.Data.ExtHeader) + item.Data.OriData + item.Data.PadData
else:
ParTree.Data.Data += struct2stream(item.Data.Header)+ item.Data.OriData + item.Data.PadData
else:
if item.Data.ExtHeader:
ParTree.Data.Data += struct2stream(item.Data.Header) + struct2stream(item.Data.ExtHeader) + item.Data.Data + item.Data.PadData
else:
ParTree.Data.Data += struct2stream(item.Data.Header)+ item.Data.Data + item.Data.PadData
ChangeSize(ParTree, -Needed_Space)
ModifyFfsType(ParTree)
# Recalculate pad data, update needed space with Delta_Pad_Size.
Needed_Space += ParTree.Data.HeaderLength - OriHeaderLen
New_Pad_Size = GetPadSize(ParTree.Data.Size, FFS_COMMON_ALIGNMENT)
Delta_Pad_Size = New_Pad_Size - len(ParTree.Data.PadData)
Needed_Space += Delta_Pad_Size
ParTree.Data.PadData = b'\xff' * GetPadSize(ParTree.Data.Size, FFS_COMMON_ALIGNMENT)
ParTree.Data.ModCheckSum()
# If current node is a Section node
elif ParTree.type == SECTION_TREE:
OriData = ParTree.Data.Data
OriHeaderLen = ParTree.Data.HeaderLength
ParTree.Data.Data = b''
# Update its data as adding all the header and data of its child node.
for item in ParTree.Child:
if item.type == SECTION_TREE and item.Data.ExtHeader and item.Data.Type != 0x02:
ParTree.Data.Data += struct2stream(item.Data.Header) + struct2stream(item.Data.ExtHeader) + item.Data.Data + item.Data.PadData
elif item.type == SECTION_TREE and item.Data.ExtHeader and item.Data.Type == 0x02:
ParTree.Data.Data += struct2stream(item.Data.Header) + struct2stream(item.Data.ExtHeader) + item.Data.OriData + item.Data.PadData
else:
ParTree.Data.Data += struct2stream(item.Data.Header) + item.Data.Data + item.Data.PadData
# If the current section is guided section
if ParTree.Data.Type == 0x02:
guidtool = GUIDTools().__getitem__(struct2stream(ParTree.Data.ExtHeader.SectionDefinitionGuid))
if not guidtool.ifexist:
logger.error("GuidTool {} is not found when decompressing {} file.\n".format(guidtool.command, ParTree.Parent.Data.Name))
raise Exception("Process Failed: GuidTool not found!")
# Recompress current data, and recalculate the needed space
CompressedData = guidtool.pack(ParTree.Data.Data)
Needed_Space = len(CompressedData) - len(ParTree.Data.OriData)
ParTree.Data.OriData = CompressedData
New_Size = ParTree.Data.HeaderLength + len(CompressedData)
ParTree.Data.Header.Size[0] = New_Size % (16**2)
ParTree.Data.Header.Size[1] = New_Size % (16**4) //(16**2)
ParTree.Data.Header.Size[2] = New_Size // (16**4)
ParTree.Data.Size = ParTree.Data.Header.SECTION_SIZE
ModifySectionType(ParTree)
Needed_Space += ParTree.Data.HeaderLength - OriHeaderLen
# Update needed space with Delta_Pad_Size
if ParTree.NextRel:
New_Pad_Size = GetPadSize(ParTree.Data.Size, SECTION_COMMON_ALIGNMENT)
Delta_Pad_Size = New_Pad_Size - len(ParTree.Data.PadData)
ParTree.Data.PadData = b'\x00' * New_Pad_Size
Needed_Space += Delta_Pad_Size
else:
ParTree.Data.PadData = b''
if Needed_Space < 0:
self.Remain_New_Free_Space = len(ParTree.Data.OriData) - len(CompressedData)
# If current section is not guided section
elif Needed_Space:
ChangeSize(ParTree, -Needed_Space)
ModifySectionType(ParTree)
# Update needed space with Delta_Pad_Size
Needed_Space += ParTree.Data.HeaderLength - OriHeaderLen
New_Pad_Size = GetPadSize(ParTree.Data.Size, SECTION_COMMON_ALIGNMENT)
Delta_Pad_Size = New_Pad_Size - len(ParTree.Data.PadData)
Needed_Space += Delta_Pad_Size
ParTree.Data.PadData = b'\x00' * New_Pad_Size
NewParTree = ParTree.Parent
ROOT_TYPE = [ROOT_FV_TREE, ROOT_FFS_TREE, ROOT_SECTION_TREE, ROOT_TREE]
if NewParTree and NewParTree.type not in ROOT_TYPE:
self.ModifyTest(NewParTree, Needed_Space)
# If current node have enough space, will recompress all the related node data, return true.
else:
self.CompressData(ParTree)
self.Status = True
def ReplaceFfs(self) -> bool:
logger.debug('Start Replacing Process......')
TargetFv = self.TargetFfs.Parent
# If the Fv Header Attributes is EFI_FVB2_ERASE_POLARITY, Child Ffs Header State need be reversed.
if TargetFv.Data.Header.Attributes & EFI_FVB2_ERASE_POLARITY:
self.NewFfs.Data.Header.State = c_uint8(
~self.NewFfs.Data.Header.State)
# NewFfs parsing will not calculate the PadSize, thus recalculate.
self.NewFfs.Data.PadData = b'\xff' * GetPadSize(self.NewFfs.Data.Size, FFS_COMMON_ALIGNMENT)
if self.NewFfs.Data.Size >= self.TargetFfs.Data.Size:
Needed_Space = self.NewFfs.Data.Size + len(self.NewFfs.Data.PadData) - self.TargetFfs.Data.Size - len(self.TargetFfs.Data.PadData)
# If TargetFv have enough free space, just move part of the free space to NewFfs.
if TargetFv.Data.Free_Space >= Needed_Space:
# Modify TargetFv Child info and BiosTree.
TargetFv.Child[-1].Data.Data = b'\xff' * (TargetFv.Data.Free_Space - Needed_Space)
TargetFv.Data.Free_Space -= Needed_Space
Target_index = TargetFv.Child.index(self.TargetFfs)
TargetFv.Child.remove(self.TargetFfs)
TargetFv.insertChild(self.NewFfs, Target_index)
# Modify TargetFv Header and ExtHeader info.
TargetFv.Data.ModFvExt()
TargetFv.Data.ModFvSize()
TargetFv.Data.ModExtHeaderData()
ModifyFvExtData(TargetFv)
TargetFv.Data.ModCheckSum()
# Recompress from the Fv node to update all the related node data.
self.CompressData(TargetFv)
# return the Status
self.Status = True
# If TargetFv do not have enough free space, need move part of the free space of TargetFv's parent Fv to TargetFv/NewFfs.
else:
if TargetFv.type == FV_TREE:
self.Status = False
else:
# Recalculate TargetFv needed space to keep it match the BlockSize setting.
Needed_Space -= TargetFv.Data.Free_Space
BlockSize = TargetFv.Data.Header.BlockMap[0].Length
New_Add_Len = BlockSize - Needed_Space%BlockSize
Target_index = TargetFv.Child.index(self.TargetFfs)
if New_Add_Len % BlockSize:
TargetFv.Child[-1].Data.Data = b'\xff' * New_Add_Len
TargetFv.Data.Free_Space = New_Add_Len
Needed_Space += New_Add_Len
TargetFv.insertChild(self.NewFfs, Target_index)
TargetFv.Child.remove(self.TargetFfs)
else:
TargetFv.Child.remove(self.TargetFfs)
TargetFv.Data.Free_Space = 0
TargetFv.insertChild(self.NewFfs)
# Encapsulate the Fv Data for update.
TargetFv.Data.Data = b''
for item in TargetFv.Child:
if item.type == FFS_FREE_SPACE:
TargetFv.Data.Data += item.Data.Data + item.Data.PadData
else:
TargetFv.Data.Data += struct2stream(item.Data.Header)+ item.Data.Data + item.Data.PadData
TargetFv.Data.Size += Needed_Space
# Modify TargetFv Data Header and ExtHeader info.
TargetFv.Data.Header.FvLength = TargetFv.Data.Size
TargetFv.Data.ModFvExt()
TargetFv.Data.ModFvSize()
TargetFv.Data.ModExtHeaderData()
ModifyFvExtData(TargetFv)
TargetFv.Data.ModCheckSum()
# Start free space calculating and moving process.
self.ModifyTest(TargetFv.Parent, Needed_Space)
else:
New_Free_Space = self.TargetFfs.Data.Size - self.NewFfs.Data.Size
# If TargetFv already have free space, move the new free space into it.
if TargetFv.Data.Free_Space:
TargetFv.Child[-1].Data.Data += b'\xff' * New_Free_Space
TargetFv.Data.Free_Space += New_Free_Space
Target_index = TargetFv.Child.index(self.TargetFfs)
TargetFv.Child.remove(self.TargetFfs)
TargetFv.insertChild(self.NewFfs, Target_index)
self.Status = True
# If TargetFv do not have free space, create free space for Fv.
else:
New_Free_Space_Tree = BIOSTREE('FREE_SPACE')
New_Free_Space_Tree.type = FFS_FREE_SPACE
New_Free_Space_Tree.Data = FfsNode(b'\xff' * New_Free_Space)
TargetFv.Data.Free_Space = New_Free_Space
TargetFv.insertChild(New_Free_Space)
Target_index = TargetFv.Child.index(self.TargetFfs)
TargetFv.Child.remove(self.TargetFfs)
TargetFv.insertChild(self.NewFfs, Target_index)
self.Status = True
# Modify TargetFv Header and ExtHeader info.
TargetFv.Data.ModFvExt()
TargetFv.Data.ModFvSize()
TargetFv.Data.ModExtHeaderData()
ModifyFvExtData(TargetFv)
TargetFv.Data.ModCheckSum()
# Recompress from the Fv node to update all the related node data.
self.CompressData(TargetFv)
logger.debug('Done!')
return self.Status
def AddFfs(self) -> bool:
logger.debug('Start Adding Process......')
# NewFfs parsing will not calculate the PadSize, thus recalculate.
self.NewFfs.Data.PadData = b'\xff' * GetPadSize(self.NewFfs.Data.Size, FFS_COMMON_ALIGNMENT)
if self.TargetFfs.type == FFS_FREE_SPACE:
TargetLen = self.NewFfs.Data.Size + len(self.NewFfs.Data.PadData) - self.TargetFfs.Data.Size - len(self.TargetFfs.Data.PadData)
TargetFv = self.TargetFfs.Parent
# If the Fv Header Attributes is EFI_FVB2_ERASE_POLARITY, Child Ffs Header State need be reversed.
if TargetFv.Data.Header.Attributes & EFI_FVB2_ERASE_POLARITY:
self.NewFfs.Data.Header.State = c_uint8(
~self.NewFfs.Data.Header.State)
# If TargetFv have enough free space, just move part of the free space to NewFfs, split free space to NewFfs and new free space.
if TargetLen < 0:
self.TargetFfs.Data.Data = b'\xff' * (-TargetLen)
TargetFv.Data.Free_Space = (-TargetLen)
TargetFv.Data.ModFvExt()
TargetFv.Data.ModExtHeaderData()
ModifyFvExtData(TargetFv)
TargetFv.Data.ModCheckSum()
TargetFv.insertChild(self.NewFfs, -1)
ModifyFfsType(self.NewFfs)
# Recompress from the Fv node to update all the related node data.
self.CompressData(TargetFv)
self.Status = True
elif TargetLen == 0:
TargetFv.Child.remove(self.TargetFfs)
TargetFv.insertChild(self.NewFfs)
ModifyFfsType(self.NewFfs)
# Recompress from the Fv node to update all the related node data.
self.CompressData(TargetFv)
self.Status = True
# If TargetFv do not have enough free space, need move part of the free space of TargetFv's parent Fv to TargetFv/NewFfs.
else:
if TargetFv.type == FV_TREE:
self.Status = False
elif TargetFv.type == SEC_FV_TREE:
# Recalculate TargetFv needed space to keep it match the BlockSize setting.
BlockSize = TargetFv.Data.Header.BlockMap[0].Length
New_Add_Len = BlockSize - TargetLen%BlockSize
if New_Add_Len % BlockSize:
self.TargetFfs.Data.Data = b'\xff' * New_Add_Len
self.TargetFfs.Data.Size = New_Add_Len
TargetLen += New_Add_Len
TargetFv.insertChild(self.NewFfs, -1)
TargetFv.Data.Free_Space = New_Add_Len
else:
TargetFv.Child.remove(self.TargetFfs)
TargetFv.insertChild(self.NewFfs)
TargetFv.Data.Free_Space = 0
ModifyFfsType(self.NewFfs)
ModifyFvSystemGuid(TargetFv)
TargetFv.Data.Data = b''
for item in TargetFv.Child:
if item.type == FFS_FREE_SPACE:
TargetFv.Data.Data += item.Data.Data + item.Data.PadData
else:
TargetFv.Data.Data += struct2stream(item.Data.Header)+ item.Data.Data + item.Data.PadData
# Encapsulate the Fv Data for update.
TargetFv.Data.Size += TargetLen
TargetFv.Data.Header.FvLength = TargetFv.Data.Size
TargetFv.Data.ModFvExt()
TargetFv.Data.ModFvSize()
TargetFv.Data.ModExtHeaderData()
ModifyFvExtData(TargetFv)
TargetFv.Data.ModCheckSum()
# Start free space calculating and moving process.
self.ModifyTest(TargetFv.Parent, TargetLen)
else:
# If TargetFv do not have free space, need directly move part of the free space of TargetFv's parent Fv to TargetFv/NewFfs.
TargetLen = self.NewFfs.Data.Size + len(self.NewFfs.Data.PadData)
TargetFv = self.TargetFfs.Parent
if TargetFv.Data.Header.Attributes & EFI_FVB2_ERASE_POLARITY:
self.NewFfs.Data.Header.State = c_uint8(
~self.NewFfs.Data.Header.State)
if TargetFv.type == FV_TREE:
self.Status = False
elif TargetFv.type == SEC_FV_TREE:
BlockSize = TargetFv.Data.Header.BlockMap[0].Length
New_Add_Len = BlockSize - TargetLen%BlockSize
if New_Add_Len % BlockSize:
New_Free_Space = BIOSTREE('FREE_SPACE')
New_Free_Space.type = FFS_FREE_SPACE
New_Free_Space.Data = FreeSpaceNode(b'\xff' * New_Add_Len)
TargetLen += New_Add_Len
TargetFv.Data.Free_Space = New_Add_Len
TargetFv.insertChild(self.NewFfs)
TargetFv.insertChild(New_Free_Space)
else:
TargetFv.insertChild(self.NewFfs)
ModifyFfsType(self.NewFfs)
ModifyFvSystemGuid(TargetFv)
TargetFv.Data.Data = b''
for item in TargetFv.Child:
if item.type == FFS_FREE_SPACE:
TargetFv.Data.Data += item.Data.Data + item.Data.PadData
else:
TargetFv.Data.Data += struct2stream(item.Data.Header)+ item.Data.Data + item.Data.PadData
TargetFv.Data.Size += TargetLen
TargetFv.Data.Header.FvLength = TargetFv.Data.Size
TargetFv.Data.ModFvExt()
TargetFv.Data.ModFvSize()
TargetFv.Data.ModExtHeaderData()
ModifyFvExtData(TargetFv)
TargetFv.Data.ModCheckSum()
self.ModifyTest(TargetFv.Parent, TargetLen)
logger.debug('Done!')
return self.Status
def DeleteFfs(self) -> bool:
logger.debug('Start Deleting Process......')
Delete_Ffs = self.TargetFfs
Delete_Fv = Delete_Ffs.Parent
# Calculate free space
Add_Free_Space = Delete_Ffs.Data.Size + len(Delete_Ffs.Data.PadData)
# If Ffs parent Fv have free space, follow the rules to merge the new free space.
if Delete_Fv.Data.Free_Space:
# If Fv is a Section fv, free space need to be recalculated to keep align with BlockSize.
# Other free space saved in self.Remain_New_Free_Space, will be moved to the 1st level Fv.
if Delete_Fv.type == SEC_FV_TREE:
Used_Size = Delete_Fv.Data.Size - Delete_Fv.Data.Free_Space - Add_Free_Space
BlockSize = Delete_Fv.Data.Header.BlockMap[0].Length
New_Free_Space = BlockSize - Used_Size % BlockSize
self.Remain_New_Free_Space += Delete_Fv.Data.Free_Space + Add_Free_Space - New_Free_Space
Delete_Fv.Child[-1].Data.Data = New_Free_Space * b'\xff'
Delete_Fv.Data.Free_Space = New_Free_Space
# If Fv is lst level Fv, new free space will be merged with origin free space.
else:
Used_Size = Delete_Fv.Data.Size - Delete_Fv.Data.Free_Space - Add_Free_Space
Delete_Fv.Child[-1].Data.Data += Add_Free_Space * b'\xff'
Delete_Fv.Data.Free_Space += Add_Free_Space
New_Free_Space = Delete_Fv.Data.Free_Space
# If Ffs parent Fv not have free space, will create new free space node to save the free space.
else:
# If Fv is a Section fv, new free space need to be recalculated to keep align with BlockSize.
# Then create a Free spcae node to save the 0xff data, and insert into the Fv.
# If have more space left, move to 1st level fv.
if Delete_Fv.type == SEC_FV_TREE:
Used_Size = Delete_Fv.Data.Size - Add_Free_Space
BlockSize = Delete_Fv.Data.Header.BlockMap[0].Length
New_Free_Space = BlockSize - Used_Size % BlockSize
self.Remain_New_Free_Space += Add_Free_Space - New_Free_Space
Add_Free_Space = New_Free_Space
# If Fv is lst level Fv, new free space node will be created to save the free space.
else:
Used_Size = Delete_Fv.Data.Size - Add_Free_Space
New_Free_Space = Add_Free_Space
New_Free_Space_Info = FfsNode(Add_Free_Space * b'\xff')
New_Free_Space_Info.Data = Add_Free_Space * b'\xff'
New_Ffs_Tree = BIOSTREE(New_Free_Space_Info.Name)
New_Ffs_Tree.type = FFS_FREE_SPACE
New_Ffs_Tree.Data = New_Free_Space_Info
Delete_Fv.insertChild(New_Ffs_Tree)
Delete_Fv.Data.Free_Space = Add_Free_Space
Delete_Fv.Child.remove(Delete_Ffs)
Delete_Fv.Data.Header.FvLength = Used_Size + New_Free_Space
Delete_Fv.Data.ModFvExt()
Delete_Fv.Data.ModFvSize()
Delete_Fv.Data.ModExtHeaderData()
ModifyFvExtData(Delete_Fv)
Delete_Fv.Data.ModCheckSum()
# Recompress from the Fv node to update all the related node data.
self.CompressData(Delete_Fv)
self.Status = True
logger.debug('Done!')
return self.Status
def ShrinkFv(self) -> bool:
TargetFv = self.NewFfs
TargetFv.Data.Data = b''
if not TargetFv.Data.Free_Space:
self.Status = True
else:
BlockSize = TargetFv.Data.Header.BlockMap[0].Length
New_Free_Space = TargetFv.Data.Free_Space%BlockSize
Removed_Space = TargetFv.Data.Free_Space - New_Free_Space
TargetFv.Child[-1].Data.Data = b'\xff' * New_Free_Space
TargetFv.Data.Size -= Removed_Space
TargetFv.Data.Header.Fvlength = TargetFv.Data.Size
ModifyFvSystemGuid(TargetFv)
for item in TargetFv.Child:
if item.type == FFS_FREE_SPACE:
TargetFv.Data.Data += item.Data.Data + item.Data.PadData
else:
TargetFv.Data.Data += struct2stream(item.Data.Header)+ item.Data.Data + item.Data.PadData
TargetFv.Data.ModFvExt()
TargetFv.Data.ModFvSize()
TargetFv.Data.ModExtHeaderData()
ModifyFvExtData(TargetFv)
TargetFv.Data.ModCheckSum()
self.Status = True
return self.Status
| edk2-master | BaseTools/Source/Python/FMMT/core/FvHandler.py |
## @file
# This file is used to define the functions to operate bios binary file.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
from core.FMMTParser import *
from core.FvHandler import *
from utils.FvLayoutPrint import *
from utils.FmmtLogger import FmmtLogger as logger
global Fv_count
Fv_count = 0
# The ROOT_TYPE can be 'ROOT_TREE', 'ROOT_FV_TREE', 'ROOT_FFS_TREE', 'ROOT_SECTION_TREE'
def ViewFile(inputfile: str, ROOT_TYPE: str, layoutfile: str=None, outputfile: str=None) -> None:
if not os.path.exists(inputfile):
logger.error("Invalid inputfile, can not open {}.".format(inputfile))
raise Exception("Process Failed: Invalid inputfile!")
# 1. Data Prepare
with open(inputfile, "rb") as f:
whole_data = f.read()
FmmtParser = FMMTParser(inputfile, ROOT_TYPE)
# 2. DataTree Create
logger.debug('Parsing inputfile data......')
FmmtParser.ParserFromRoot(FmmtParser.WholeFvTree, whole_data)
logger.debug('Done!')
# 3. Log Output
InfoDict = FmmtParser.WholeFvTree.ExportTree()
logger.debug('BinaryTree created, start parsing BinaryTree data......')
FmmtParser.WholeFvTree.parserTree(InfoDict, FmmtParser.BinaryInfo)
logger.debug('Done!')
GetFormatter("").LogPrint(FmmtParser.BinaryInfo)
if layoutfile:
if os.path.splitext(layoutfile)[1]:
layoutfilename = layoutfile
layoutfileformat = os.path.splitext(layoutfile)[1][1:].lower()
else:
layoutfilename = "Layout_{}{}".format(os.path.basename(inputfile),".{}".format(layoutfile.lower()))
layoutfileformat = layoutfile.lower()
GetFormatter(layoutfileformat).dump(InfoDict, FmmtParser.BinaryInfo, layoutfilename)
# 4. Data Encapsulation
if outputfile:
logger.debug('Start encapsulating data......')
FmmtParser.Encapsulation(FmmtParser.WholeFvTree, False)
with open(outputfile, "wb") as f:
f.write(FmmtParser.FinalData)
logger.debug('Encapsulated data is saved in {}.'.format(outputfile))
def DeleteFfs(inputfile: str, TargetFfs_name: str, outputfile: str, Fv_name: str=None) -> None:
if not os.path.exists(inputfile):
logger.error("Invalid inputfile, can not open {}.".format(inputfile))
raise Exception("Process Failed: Invalid inputfile!")
# 1. Data Prepare
with open(inputfile, "rb") as f:
whole_data = f.read()
FmmtParser = FMMTParser(inputfile, ROOT_TREE)
# 2. DataTree Create
logger.debug('Parsing inputfile data......')
FmmtParser.ParserFromRoot(FmmtParser.WholeFvTree, whole_data)
logger.debug('Done!')
# 3. Data Modify
FmmtParser.WholeFvTree.FindNode(TargetFfs_name, FmmtParser.WholeFvTree.Findlist)
# Choose the Specfic DeleteFfs with Fv info
if Fv_name:
FindNum = len(FmmtParser.WholeFvTree.Findlist)
for index in range(FindNum-1, -1, -1):
if FmmtParser.WholeFvTree.Findlist[index].Parent.key != Fv_name and FmmtParser.WholeFvTree.Findlist[index].Parent.Data.Name != Fv_name:
FmmtParser.WholeFvTree.Findlist.remove(FmmtParser.WholeFvTree.Findlist[index])
Status = False
if FmmtParser.WholeFvTree.Findlist != []:
for Delete_Ffs in FmmtParser.WholeFvTree.Findlist:
FfsMod = FvHandler(None, Delete_Ffs)
Status = FfsMod.DeleteFfs()
else:
logger.error('Target Ffs not found!!!')
# 4. Data Encapsulation
if Status:
logger.debug('Start encapsulating data......')
FmmtParser.Encapsulation(FmmtParser.WholeFvTree, False)
with open(outputfile, "wb") as f:
f.write(FmmtParser.FinalData)
logger.debug('Encapsulated data is saved in {}.'.format(outputfile))
def AddNewFfs(inputfile: str, Fv_name: str, newffsfile: str, outputfile: str) -> None:
if not os.path.exists(inputfile):
logger.error("Invalid inputfile, can not open {}.".format(inputfile))
raise Exception("Process Failed: Invalid inputfile!")
if not os.path.exists(newffsfile):
logger.error("Invalid ffsfile, can not open {}.".format(newffsfile))
raise Exception("Process Failed: Invalid ffs file!")
# 1. Data Prepare
with open(inputfile, "rb") as f:
whole_data = f.read()
FmmtParser = FMMTParser(inputfile, ROOT_TREE)
# 2. DataTree Create
logger.debug('Parsing inputfile data......')
FmmtParser.ParserFromRoot(FmmtParser.WholeFvTree, whole_data)
logger.debug('Done!')
# Get Target Fv and Target Ffs_Pad
FmmtParser.WholeFvTree.FindNode(Fv_name, FmmtParser.WholeFvTree.Findlist)
# Create new ffs Tree
with open(newffsfile, "rb") as f:
new_ffs_data = f.read()
NewFmmtParser = FMMTParser(newffsfile, ROOT_FFS_TREE)
Status = False
# 3. Data Modify
if FmmtParser.WholeFvTree.Findlist:
for TargetFv in FmmtParser.WholeFvTree.Findlist:
TargetFfsPad = TargetFv.Child[-1]
logger.debug('Parsing newffsfile data......')
if TargetFfsPad.type == FFS_FREE_SPACE:
NewFmmtParser.ParserFromRoot(NewFmmtParser.WholeFvTree, new_ffs_data, TargetFfsPad.Data.HOffset)
else:
NewFmmtParser.ParserFromRoot(NewFmmtParser.WholeFvTree, new_ffs_data, TargetFfsPad.Data.HOffset+TargetFfsPad.Data.Size)
logger.debug('Done!')
FfsMod = FvHandler(NewFmmtParser.WholeFvTree.Child[0], TargetFfsPad)
Status = FfsMod.AddFfs()
else:
logger.error('Target Fv not found!!!')
# 4. Data Encapsulation
if Status:
logger.debug('Start encapsulating data......')
FmmtParser.Encapsulation(FmmtParser.WholeFvTree, False)
with open(outputfile, "wb") as f:
f.write(FmmtParser.FinalData)
logger.debug('Encapsulated data is saved in {}.'.format(outputfile))
def ReplaceFfs(inputfile: str, Ffs_name: str, newffsfile: str, outputfile: str, Fv_name: str=None) -> None:
if not os.path.exists(inputfile):
logger.error("Invalid inputfile, can not open {}.".format(inputfile))
raise Exception("Process Failed: Invalid inputfile!")
# 1. Data Prepare
with open(inputfile, "rb") as f:
whole_data = f.read()
FmmtParser = FMMTParser(inputfile, ROOT_TREE)
# 2. DataTree Create
logger.debug('Parsing inputfile data......')
FmmtParser.ParserFromRoot(FmmtParser.WholeFvTree, whole_data)
logger.debug('Done!')
with open(newffsfile, "rb") as f:
new_ffs_data = f.read()
newFmmtParser = FMMTParser(newffsfile, FV_TREE)
logger.debug('Parsing newffsfile data......')
newFmmtParser.ParserFromRoot(newFmmtParser.WholeFvTree, new_ffs_data)
logger.debug('Done!')
Status = False
# 3. Data Modify
new_ffs = newFmmtParser.WholeFvTree.Child[0]
new_ffs.Data.PadData = GetPadSize(new_ffs.Data.Size, FFS_COMMON_ALIGNMENT) * b'\xff'
FmmtParser.WholeFvTree.FindNode(Ffs_name, FmmtParser.WholeFvTree.Findlist)
if Fv_name:
FindNum = len(FmmtParser.WholeFvTree.Findlist)
for index in range(FindNum-1, -1, -1):
if FmmtParser.WholeFvTree.Findlist[index].Parent.key != Fv_name and FmmtParser.WholeFvTree.Findlist[index].Parent.Data.Name != Fv_name:
FmmtParser.WholeFvTree.Findlist.remove(FmmtParser.WholeFvTree.Findlist[index])
if FmmtParser.WholeFvTree.Findlist != []:
for TargetFfs in FmmtParser.WholeFvTree.Findlist:
FfsMod = FvHandler(newFmmtParser.WholeFvTree.Child[0], TargetFfs)
Status = FfsMod.ReplaceFfs()
else:
logger.error('Target Ffs not found!!!')
# 4. Data Encapsulation
if Status:
logger.debug('Start encapsulating data......')
FmmtParser.Encapsulation(FmmtParser.WholeFvTree, False)
with open(outputfile, "wb") as f:
f.write(FmmtParser.FinalData)
logger.debug('Encapsulated data is saved in {}.'.format(outputfile))
def ExtractFfs(inputfile: str, Ffs_name: str, outputfile: str, Fv_name: str=None) -> None:
if not os.path.exists(inputfile):
logger.error("Invalid inputfile, can not open {}.".format(inputfile))
raise Exception("Process Failed: Invalid inputfile!")
# 1. Data Prepare
with open(inputfile, "rb") as f:
whole_data = f.read()
FmmtParser = FMMTParser(inputfile, ROOT_TREE)
# 2. DataTree Create
logger.debug('Parsing inputfile data......')
FmmtParser.ParserFromRoot(FmmtParser.WholeFvTree, whole_data)
logger.debug('Done!')
FmmtParser.WholeFvTree.FindNode(Ffs_name, FmmtParser.WholeFvTree.Findlist)
if Fv_name:
FindNum = len(FmmtParser.WholeFvTree.Findlist)
for index in range(FindNum-1, -1, -1):
if FmmtParser.WholeFvTree.Findlist[index].Parent.key != Fv_name and FmmtParser.WholeFvTree.Findlist[index].Parent.Data.Name != Fv_name:
FmmtParser.WholeFvTree.Findlist.remove(FmmtParser.WholeFvTree.Findlist[index])
if FmmtParser.WholeFvTree.Findlist != []:
TargetNode = FmmtParser.WholeFvTree.Findlist[0]
if TargetNode.type == FV_TREE or SEC_FV_TREE or DATA_FV_TREE:
FinalData = struct2stream(TargetNode.Data.Header) + TargetNode.Data.Data
with open(outputfile, "wb") as f:
f.write(FinalData)
logger.debug('Extract fv data is saved in {}.'.format(outputfile))
else:
TargetFv = TargetNode.Parent
if TargetFv.Data.Header.Attributes & EFI_FVB2_ERASE_POLARITY:
TargetNode.Data.Header.State = c_uint8(
~TargetNode.Data.Header.State)
FinalData = struct2stream(TargetNode.Data.Header) + TargetNode.Data.Data
with open(outputfile, "wb") as f:
f.write(FinalData)
logger.debug('Extract ffs data is saved in {}.'.format(outputfile))
else:
logger.error('Target Ffs/Fv not found!!!')
def ShrinkFv(inputfile: str, outputfile: str) -> None:
if not os.path.exists(inputfile):
logger.error("Invalid inputfile, can not open {}.".format(inputfile))
raise Exception("Process Failed: Invalid inputfile!")
# 1. Data Prepare
with open(inputfile, "rb") as f:
whole_data = f.read()
FmmtParser = FMMTParser(inputfile, ROOT_TREE)
# 2. DataTree Create
logger.debug('Parsing inputfile data......')
FmmtParser.ParserFromRoot(FmmtParser.WholeFvTree, whole_data)
logger.debug('Done!')
TargetFv = FmmtParser.WholeFvTree.Child[0]
if TargetFv:
FvMod = FvHandler(TargetFv)
Status = FvMod.ShrinkFv()
else:
logger.error('Target Fv not found!!!')
# 4. Data Encapsulation
if Status:
logger.debug('Start encapsulating data......')
FmmtParser.Encapsulation(FmmtParser.WholeFvTree, False)
with open(outputfile, "wb") as f:
f.write(FmmtParser.FinalData)
logger.debug('Encapsulated data is saved in {}.'.format(outputfile))
| edk2-master | BaseTools/Source/Python/FMMT/core/FMMTOperation.py |
## @file
# This file is used to define the FMMT dependent external tool management class.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import glob
import logging
import os
import shutil
import sys
import tempfile
import uuid
from FirmwareStorageFormat.Common import *
from utils.FmmtLogger import FmmtLogger as logger
import subprocess
def ExecuteCommand(cmd: list) -> None:
subprocess.run(cmd,stdout=subprocess.DEVNULL)
class GUIDTool:
def __init__(self, guid: str, short_name: str, command: str) -> None:
self.guid: str = guid
self.short_name: str = short_name
self.command: str = command
self.ifexist: bool = False
def pack(self, buffer: bytes) -> bytes:
"""
compress file.
"""
tool = self.command
if tool:
tmp = tempfile.mkdtemp(dir=os.environ.get('tmp'))
ToolInputFile = os.path.join(tmp, "pack_uncompress_sec_file")
ToolOuputFile = os.path.join(tmp, "pack_sec_file")
try:
file = open(ToolInputFile, "wb")
file.write(buffer)
file.close()
command = [tool, '-e', '-o', ToolOuputFile,
ToolInputFile]
ExecuteCommand(command)
buf = open(ToolOuputFile, "rb")
res_buffer = buf.read()
except Exception as msg:
logger.error(msg)
return ""
else:
buf.close()
if os.path.exists(tmp):
shutil.rmtree(tmp)
return res_buffer
else:
logger.error(
"Error parsing section: EFI_SECTION_GUID_DEFINED cannot be parsed at this time.")
logger.info("Its GUID is: %s" % self.guid)
return ""
def unpack(self, buffer: bytes) -> bytes:
"""
buffer: remove common header
uncompress file
"""
tool = self.command
if tool:
tmp = tempfile.mkdtemp(dir=os.environ.get('tmp'))
ToolInputFile = os.path.join(tmp, "unpack_sec_file")
ToolOuputFile = os.path.join(tmp, "unpack_uncompress_sec_file")
try:
file = open(ToolInputFile, "wb")
file.write(buffer)
file.close()
command = [tool, '-d', '-o', ToolOuputFile, ToolInputFile]
ExecuteCommand(command)
buf = open(ToolOuputFile, "rb")
res_buffer = buf.read()
except Exception as msg:
logger.error(msg)
return ""
else:
buf.close()
if os.path.exists(tmp):
shutil.rmtree(tmp)
return res_buffer
else:
logger.error("Error parsing section: EFI_SECTION_GUID_DEFINED cannot be parsed at this time.")
logger.info("Its GUID is: %s" % self.guid)
return ""
class GUIDTools:
'''
GUIDTools is responsible for reading FMMTConfig.ini, verify the tools and provide interfaces to access those tools.
'''
default_tools = {
struct2stream(ModifyGuidFormat("a31280ad-481e-41b6-95e8-127f4c984779")): GUIDTool("a31280ad-481e-41b6-95e8-127f4c984779", "TIANO", "TianoCompress"),
struct2stream(ModifyGuidFormat("ee4e5898-3914-4259-9d6e-dc7bd79403cf")): GUIDTool("ee4e5898-3914-4259-9d6e-dc7bd79403cf", "LZMA", "LzmaCompress"),
struct2stream(ModifyGuidFormat("fc1bcdb0-7d31-49aa-936a-a4600d9dd083")): GUIDTool("fc1bcdb0-7d31-49aa-936a-a4600d9dd083", "CRC32", "GenCrc32"),
struct2stream(ModifyGuidFormat("d42ae6bd-1352-4bfb-909a-ca72a6eae889")): GUIDTool("d42ae6bd-1352-4bfb-909a-ca72a6eae889", "LZMAF86", "LzmaF86Compress"),
struct2stream(ModifyGuidFormat("3d532050-5cda-4fd0-879e-0f7f630d5afb")): GUIDTool("3d532050-5cda-4fd0-879e-0f7f630d5afb", "BROTLI", "BrotliCompress"),
}
def __init__(self, tooldef_file: str=None) -> None:
self.dir = os.path.join(os.path.dirname(__file__), "..")
self.tooldef_file = tooldef_file if tooldef_file else os.path.join(self.dir, "FmmtConf.ini")
self.tooldef = dict()
def SetConfigFile(self) -> None:
if os.environ['FmmtConfPath']:
self.tooldef_file = os.path.join(os.environ['FmmtConfPath'], 'FmmtConf.ini')
else:
PathList = os.environ['PATH']
for CurrentPath in PathList:
if os.path.exists(os.path.join(CurrentPath, 'FmmtConf.ini')):
self.tooldef_file = os.path.join(CurrentPath, 'FmmtConf.ini')
break
def VerifyTools(self, guidtool) -> None:
"""
Verify Tools and Update Tools path.
"""
path_env = os.environ.get("PATH")
path_env_list = path_env.split(os.pathsep)
path_env_list.append(os.path.dirname(__file__))
path_env_list = list(set(path_env_list))
cmd = guidtool.command
if os.path.isabs(cmd):
if not os.path.exists(cmd):
if guidtool not in self.default_tools:
logger.error("Tool Not found %s, which causes compress/uncompress process error." % cmd)
logger.error("Please goto edk2 repo in current console, run 'edksetup.bat rebuild' command, and try again.\n")
else:
logger.error("Tool Not found %s, which causes compress/uncompress process error." % cmd)
else:
guidtool.ifexist = True
else:
for syspath in path_env_list:
if glob.glob(os.path.join(syspath, cmd+"*")):
guidtool.ifexist = True
break
else:
if guidtool not in self.default_tools:
logger.error("Tool Not found %s, which causes compress/uncompress process error." % cmd)
logger.error("Please goto edk2 repo in current console, run 'edksetup.bat rebuild' command, and try again.\n")
else:
logger.error("Tool Not found %s, which causes compress/uncompress process error." % cmd)
def LoadingTools(self) -> None:
self.SetConfigFile()
if os.path.exists(self.tooldef_file):
with open(self.tooldef_file, "r") as fd:
config_data = fd.readlines()
for line in config_data:
try:
if not line.startswith("#"):
guid, short_name, command = line.split()
new_format_guid = struct2stream(ModifyGuidFormat(guid.strip()))
self.tooldef[new_format_guid] = GUIDTool(
guid.strip(), short_name.strip(), command.strip())
except:
logger.error("GuidTool load error!")
continue
else:
self.tooldef.update(self.default_tools)
def __getitem__(self, guid):
if not self.tooldef:
self.LoadingTools()
guid_tool = self.tooldef.get(guid)
if guid_tool:
self.VerifyTools(guid_tool)
return guid_tool
else:
logger.error("{} GuidTool is not defined!".format(guid))
raise Exception("Process Failed: is not defined!")
guidtools = GUIDTools()
| edk2-master | BaseTools/Source/Python/FMMT/core/GuidTools.py |
## @file
# This file is used to implement of the various bianry parser.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
from re import T
import copy
import os
import sys
from FirmwareStorageFormat.Common import *
from core.BiosTreeNode import *
from core.BiosTree import *
from core.GuidTools import GUIDTools
from utils.FmmtLogger import FmmtLogger as logger
ROOT_TREE = 'ROOT'
ROOT_FV_TREE = 'ROOT_FV_TREE'
ROOT_FFS_TREE = 'ROOT_FFS_TREE'
ROOT_SECTION_TREE = 'ROOT_SECTION_TREE'
FV_TREE = 'FV'
DATA_FV_TREE = 'DATA_FV'
FFS_TREE = 'FFS'
FFS_PAD = 'FFS_PAD'
FFS_FREE_SPACE = 'FFS_FREE_SPACE'
SECTION_TREE = 'SECTION'
SEC_FV_TREE = 'SEC_FV_IMAGE'
BINARY_DATA = 'BINARY'
Fv_count = 0
## Abstract factory
class BinaryFactory():
type:list = []
def Create_Product():
pass
class BinaryProduct():
## Use GuidTool to decompress data.
def DeCompressData(self, GuidTool, Section_Data: bytes, FileName) -> bytes:
guidtool = GUIDTools().__getitem__(struct2stream(GuidTool))
if not guidtool.ifexist:
logger.error("GuidTool {} is not found when decompressing {} file.\n".format(guidtool.command, FileName))
raise Exception("Process Failed: GuidTool not found!")
DecompressedData = guidtool.unpack(Section_Data)
return DecompressedData
def ParserData():
pass
class SectionFactory(BinaryFactory):
type = [SECTION_TREE]
def Create_Product():
return SectionProduct()
class FfsFactory(BinaryFactory):
type = [ROOT_SECTION_TREE, FFS_TREE]
def Create_Product():
return FfsProduct()
class FvFactory(BinaryFactory):
type = [ROOT_FFS_TREE, FV_TREE, SEC_FV_TREE]
def Create_Product():
return FvProduct()
class FdFactory(BinaryFactory):
type = [ROOT_FV_TREE, ROOT_TREE]
def Create_Product():
return FdProduct()
class SectionProduct(BinaryProduct):
## Decompress the compressed section.
def ParserData(self, Section_Tree, whole_Data: bytes, Rel_Whole_Offset: int=0) -> None:
if Section_Tree.Data.Type == 0x01:
Section_Tree.Data.OriData = Section_Tree.Data.Data
self.ParserSection(Section_Tree, b'')
# Guided Define Section
elif Section_Tree.Data.Type == 0x02:
Section_Tree.Data.OriData = Section_Tree.Data.Data
DeCompressGuidTool = Section_Tree.Data.ExtHeader.SectionDefinitionGuid
Section_Tree.Data.Data = self.DeCompressData(DeCompressGuidTool, Section_Tree.Data.Data, Section_Tree.Parent.Data.Name)
Section_Tree.Data.Size = len(Section_Tree.Data.Data) + Section_Tree.Data.HeaderLength
self.ParserSection(Section_Tree, b'')
elif Section_Tree.Data.Type == 0x03:
Section_Tree.Data.OriData = Section_Tree.Data.Data
self.ParserSection(Section_Tree, b'')
# SEC_FV Section
elif Section_Tree.Data.Type == 0x17:
global Fv_count
Sec_Fv_Info = FvNode(Fv_count, Section_Tree.Data.Data)
Sec_Fv_Tree = BIOSTREE('FV'+ str(Fv_count))
Sec_Fv_Tree.type = SEC_FV_TREE
Sec_Fv_Tree.Data = Sec_Fv_Info
Sec_Fv_Tree.Data.HOffset = Section_Tree.Data.DOffset
Sec_Fv_Tree.Data.DOffset = Sec_Fv_Tree.Data.HOffset + Sec_Fv_Tree.Data.Header.HeaderLength
Sec_Fv_Tree.Data.Data = Section_Tree.Data.Data[Sec_Fv_Tree.Data.Header.HeaderLength:]
Section_Tree.insertChild(Sec_Fv_Tree)
Fv_count += 1
def ParserSection(self, ParTree, Whole_Data: bytes, Rel_Whole_Offset: int=0) -> None:
Rel_Offset = 0
Section_Offset = 0
# Get the Data from parent tree, if do not have the tree then get it from the whole_data.
if ParTree.Data != None:
Data_Size = len(ParTree.Data.Data)
Section_Offset = ParTree.Data.DOffset
Whole_Data = ParTree.Data.Data
else:
Data_Size = len(Whole_Data)
# Parser all the data to collect all the Section recorded in its Parent Section.
while Rel_Offset < Data_Size:
# Create a SectionNode and set it as the SectionTree's Data
Section_Info = SectionNode(Whole_Data[Rel_Offset:])
Section_Tree = BIOSTREE(Section_Info.Name)
Section_Tree.type = SECTION_TREE
Section_Info.Data = Whole_Data[Rel_Offset+Section_Info.HeaderLength: Rel_Offset+Section_Info.Size]
Section_Info.DOffset = Section_Offset + Section_Info.HeaderLength + Rel_Whole_Offset
Section_Info.HOffset = Section_Offset + Rel_Whole_Offset
Section_Info.ROffset = Rel_Offset
if Section_Info.Header.Type == 0:
break
# The final Section in parent Section does not need to add padding, else must be 4-bytes align with parent Section start offset
Pad_Size = 0
if (Rel_Offset+Section_Info.HeaderLength+len(Section_Info.Data) != Data_Size):
Pad_Size = GetPadSize(Section_Info.Size, SECTION_COMMON_ALIGNMENT)
Section_Info.PadData = Pad_Size * b'\x00'
if Section_Info.Header.Type == 0x02:
Section_Info.DOffset = Section_Offset + Section_Info.ExtHeader.DataOffset + Rel_Whole_Offset
Section_Info.Data = Whole_Data[Rel_Offset+Section_Info.ExtHeader.DataOffset: Rel_Offset+Section_Info.Size]
if Section_Info.Header.Type == 0x14:
ParTree.Data.Version = Section_Info.ExtHeader.GetVersionString()
if Section_Info.Header.Type == 0x15:
ParTree.Data.UiName = Section_Info.ExtHeader.GetUiString()
if Section_Info.Header.Type == 0x19:
if Section_Info.Data.replace(b'\x00', b'') == b'':
Section_Info.IsPadSection = True
Section_Offset += Section_Info.Size + Pad_Size
Rel_Offset += Section_Info.Size + Pad_Size
Section_Tree.Data = Section_Info
ParTree.insertChild(Section_Tree)
class FfsProduct(BinaryProduct):
# ParserFFs / GetSection
def ParserData(self, ParTree, Whole_Data: bytes, Rel_Whole_Offset: int=0) -> None:
Rel_Offset = 0
Section_Offset = 0
# Get the Data from parent tree, if do not have the tree then get it from the whole_data.
if ParTree.Data != None:
Data_Size = len(ParTree.Data.Data)
Section_Offset = ParTree.Data.DOffset
Whole_Data = ParTree.Data.Data
else:
Data_Size = len(Whole_Data)
# Parser all the data to collect all the Section recorded in Ffs.
while Rel_Offset < Data_Size:
# Create a SectionNode and set it as the SectionTree's Data
Section_Info = SectionNode(Whole_Data[Rel_Offset:])
Section_Tree = BIOSTREE(Section_Info.Name)
Section_Tree.type = SECTION_TREE
Section_Info.Data = Whole_Data[Rel_Offset+Section_Info.HeaderLength: Rel_Offset+Section_Info.Size]
Section_Info.DOffset = Section_Offset + Section_Info.HeaderLength + Rel_Whole_Offset
Section_Info.HOffset = Section_Offset + Rel_Whole_Offset
Section_Info.ROffset = Rel_Offset
if Section_Info.Header.Type == 0:
break
# The final Section in Ffs does not need to add padding, else must be 4-bytes align with Ffs start offset
Pad_Size = 0
if (Rel_Offset+Section_Info.HeaderLength+len(Section_Info.Data) != Data_Size):
Pad_Size = GetPadSize(Section_Info.Size, SECTION_COMMON_ALIGNMENT)
Section_Info.PadData = Pad_Size * b'\x00'
if Section_Info.Header.Type == 0x02:
Section_Info.DOffset = Section_Offset + Section_Info.ExtHeader.DataOffset + Rel_Whole_Offset
Section_Info.Data = Whole_Data[Rel_Offset+Section_Info.ExtHeader.DataOffset: Rel_Offset+Section_Info.Size]
# If Section is Version or UI type, it saves the version and UI info of its parent Ffs.
if Section_Info.Header.Type == 0x14:
ParTree.Data.Version = Section_Info.ExtHeader.GetVersionString()
if Section_Info.Header.Type == 0x15:
ParTree.Data.UiName = Section_Info.ExtHeader.GetUiString()
if Section_Info.Header.Type == 0x19:
if Section_Info.Data.replace(b'\x00', b'') == b'':
Section_Info.IsPadSection = True
Section_Offset += Section_Info.Size + Pad_Size
Rel_Offset += Section_Info.Size + Pad_Size
Section_Tree.Data = Section_Info
ParTree.insertChild(Section_Tree)
class FvProduct(BinaryProduct):
## ParserFv / GetFfs
def ParserData(self, ParTree, Whole_Data: bytes, Rel_Whole_Offset: int=0) -> None:
Ffs_Offset = 0
Rel_Offset = 0
# Get the Data from parent tree, if do not have the tree then get it from the whole_data.
if ParTree.Data != None:
Data_Size = len(ParTree.Data.Data)
Ffs_Offset = ParTree.Data.DOffset
Whole_Data = ParTree.Data.Data
else:
Data_Size = len(Whole_Data)
# Parser all the data to collect all the Ffs recorded in Fv.
while Rel_Offset < Data_Size:
# Create a FfsNode and set it as the FFsTree's Data
if Data_Size - Rel_Offset < 24:
Ffs_Tree = BIOSTREE('Free_Space')
Ffs_Tree.type = FFS_FREE_SPACE
Ffs_Tree.Data = FreeSpaceNode(Whole_Data[Rel_Offset:])
Ffs_Tree.Data.HOffset = Ffs_Offset + Rel_Whole_Offset
Ffs_Tree.Data.DOffset = Ffs_Tree.Data.HOffset
ParTree.Data.Free_Space = Data_Size - Rel_Offset
ParTree.insertChild(Ffs_Tree)
Rel_Offset = Data_Size
else:
Ffs_Info = FfsNode(Whole_Data[Rel_Offset:])
Ffs_Tree = BIOSTREE(Ffs_Info.Name)
Ffs_Info.HOffset = Ffs_Offset + Rel_Whole_Offset
Ffs_Info.DOffset = Ffs_Offset + Ffs_Info.Header.HeaderLength + Rel_Whole_Offset
Ffs_Info.ROffset = Rel_Offset
if Ffs_Info.Name == PADVECTOR:
Ffs_Tree.type = FFS_PAD
Ffs_Info.Data = Whole_Data[Rel_Offset+Ffs_Info.Header.HeaderLength: Rel_Offset+Ffs_Info.Size]
Ffs_Info.Size = len(Ffs_Info.Data) + Ffs_Info.Header.HeaderLength
# if current Ffs is the final ffs of Fv and full of b'\xff', define it with Free_Space
if struct2stream(Ffs_Info.Header).replace(b'\xff', b'') == b'':
Ffs_Tree.type = FFS_FREE_SPACE
Ffs_Info.Data = Whole_Data[Rel_Offset:]
Ffs_Info.Size = len(Ffs_Info.Data)
ParTree.Data.Free_Space = Ffs_Info.Size
else:
Ffs_Tree.type = FFS_TREE
Ffs_Info.Data = Whole_Data[Rel_Offset+Ffs_Info.Header.HeaderLength: Rel_Offset+Ffs_Info.Size]
# The final Ffs in Fv does not need to add padding, else must be 8-bytes align with Fv start offset
Pad_Size = 0
if Ffs_Tree.type != FFS_FREE_SPACE and (Rel_Offset+Ffs_Info.Header.HeaderLength+len(Ffs_Info.Data) != Data_Size):
Pad_Size = GetPadSize(Ffs_Info.Size, FFS_COMMON_ALIGNMENT)
Ffs_Info.PadData = Pad_Size * b'\xff'
Ffs_Offset += Ffs_Info.Size + Pad_Size
Rel_Offset += Ffs_Info.Size + Pad_Size
Ffs_Tree.Data = Ffs_Info
ParTree.insertChild(Ffs_Tree)
class FdProduct(BinaryProduct):
type = [ROOT_FV_TREE, ROOT_TREE]
## Create DataTree with first level /fv Info, then parser each Fv.
def ParserData(self, WholeFvTree, whole_data: bytes=b'', offset: int=0) -> None:
# Get all Fv image in Fd with offset and length
Fd_Struct = self.GetFvFromFd(whole_data)
data_size = len(whole_data)
Binary_count = 0
global Fv_count
# If the first Fv image is the Binary Fv, add it into the tree.
if Fd_Struct[0][1] != 0:
Binary_node = BIOSTREE('BINARY'+ str(Binary_count))
Binary_node.type = BINARY_DATA
Binary_node.Data = BinaryNode(str(Binary_count))
Binary_node.Data.Data = whole_data[:Fd_Struct[0][1]]
Binary_node.Data.Size = len(Binary_node.Data.Data)
Binary_node.Data.HOffset = 0 + offset
WholeFvTree.insertChild(Binary_node)
Binary_count += 1
# Add the first collected Fv image into the tree.
Cur_node = BIOSTREE(Fd_Struct[0][0]+ str(Fv_count))
Cur_node.type = Fd_Struct[0][0]
Cur_node.Data = FvNode(Fv_count, whole_data[Fd_Struct[0][1]:Fd_Struct[0][1]+Fd_Struct[0][2][0]])
Cur_node.Data.HOffset = Fd_Struct[0][1] + offset
Cur_node.Data.DOffset = Cur_node.Data.HOffset+Cur_node.Data.Header.HeaderLength
Cur_node.Data.Data = whole_data[Fd_Struct[0][1]+Cur_node.Data.Header.HeaderLength:Fd_Struct[0][1]+Cur_node.Data.Size]
WholeFvTree.insertChild(Cur_node)
Fv_count += 1
Fv_num = len(Fd_Struct)
# Add all the collected Fv image and the Binary Fv image between them into the tree.
for i in range(Fv_num-1):
if Fd_Struct[i][1]+Fd_Struct[i][2][0] != Fd_Struct[i+1][1]:
Binary_node = BIOSTREE('BINARY'+ str(Binary_count))
Binary_node.type = BINARY_DATA
Binary_node.Data = BinaryNode(str(Binary_count))
Binary_node.Data.Data = whole_data[Fd_Struct[i][1]+Fd_Struct[i][2][0]:Fd_Struct[i+1][1]]
Binary_node.Data.Size = len(Binary_node.Data.Data)
Binary_node.Data.HOffset = Fd_Struct[i][1]+Fd_Struct[i][2][0] + offset
WholeFvTree.insertChild(Binary_node)
Binary_count += 1
Cur_node = BIOSTREE(Fd_Struct[i+1][0]+ str(Fv_count))
Cur_node.type = Fd_Struct[i+1][0]
Cur_node.Data = FvNode(Fv_count, whole_data[Fd_Struct[i+1][1]:Fd_Struct[i+1][1]+Fd_Struct[i+1][2][0]])
Cur_node.Data.HOffset = Fd_Struct[i+1][1] + offset
Cur_node.Data.DOffset = Cur_node.Data.HOffset+Cur_node.Data.Header.HeaderLength
Cur_node.Data.Data = whole_data[Fd_Struct[i+1][1]+Cur_node.Data.Header.HeaderLength:Fd_Struct[i+1][1]+Cur_node.Data.Size]
WholeFvTree.insertChild(Cur_node)
Fv_count += 1
# If the final Fv image is the Binary Fv, add it into the tree
if Fd_Struct[-1][1] + Fd_Struct[-1][2][0] != data_size:
Binary_node = BIOSTREE('BINARY'+ str(Binary_count))
Binary_node.type = BINARY_DATA
Binary_node.Data = BinaryNode(str(Binary_count))
Binary_node.Data.Data = whole_data[Fd_Struct[-1][1]+Fd_Struct[-1][2][0]:]
Binary_node.Data.Size = len(Binary_node.Data.Data)
Binary_node.Data.HOffset = Fd_Struct[-1][1]+Fd_Struct[-1][2][0] + offset
WholeFvTree.insertChild(Binary_node)
Binary_count += 1
## Get the first level Fv from Fd file.
def GetFvFromFd(self, whole_data: bytes=b'') -> list:
Fd_Struct = []
data_size = len(whole_data)
cur_index = 0
# Get all the EFI_FIRMWARE_FILE_SYSTEM2_GUID_BYTE FV image offset and length.
while cur_index < data_size:
if EFI_FIRMWARE_FILE_SYSTEM2_GUID_BYTE in whole_data[cur_index:]:
target_index = whole_data[cur_index:].index(EFI_FIRMWARE_FILE_SYSTEM2_GUID_BYTE) + cur_index
if whole_data[target_index+24:target_index+28] == FVH_SIGNATURE:
Fd_Struct.append([FV_TREE, target_index - 16, unpack("Q", whole_data[target_index+16:target_index+24])])
cur_index = Fd_Struct[-1][1] + Fd_Struct[-1][2][0]
else:
cur_index = target_index + 16
else:
cur_index = data_size
cur_index = 0
# Get all the EFI_FIRMWARE_FILE_SYSTEM3_GUID_BYTE FV image offset and length.
while cur_index < data_size:
if EFI_FIRMWARE_FILE_SYSTEM3_GUID_BYTE in whole_data[cur_index:]:
target_index = whole_data[cur_index:].index(EFI_FIRMWARE_FILE_SYSTEM3_GUID_BYTE) + cur_index
if whole_data[target_index+24:target_index+28] == FVH_SIGNATURE:
Fd_Struct.append([FV_TREE, target_index - 16, unpack("Q", whole_data[target_index+16:target_index+24])])
cur_index = Fd_Struct[-1][1] + Fd_Struct[-1][2][0]
else:
cur_index = target_index + 16
else:
cur_index = data_size
cur_index = 0
# Get all the EFI_SYSTEM_NVDATA_FV_GUID_BYTE FV image offset and length.
while cur_index < data_size:
if EFI_SYSTEM_NVDATA_FV_GUID_BYTE in whole_data[cur_index:]:
target_index = whole_data[cur_index:].index(EFI_SYSTEM_NVDATA_FV_GUID_BYTE) + cur_index
if whole_data[target_index+24:target_index+28] == FVH_SIGNATURE:
Fd_Struct.append([DATA_FV_TREE, target_index - 16, unpack("Q", whole_data[target_index+16:target_index+24])])
cur_index = Fd_Struct[-1][1] + Fd_Struct[-1][2][0]
else:
cur_index = target_index + 16
else:
cur_index = data_size
# Sort all the collect Fv image with offset.
Fd_Struct.sort(key=lambda x:x[1])
tmp_struct = copy.deepcopy(Fd_Struct)
tmp_index = 0
Fv_num = len(Fd_Struct)
# Remove the Fv image included in another Fv image.
for i in range(1,Fv_num):
if tmp_struct[i][1]+tmp_struct[i][2][0] < tmp_struct[i-1][1]+tmp_struct[i-1][2][0]:
Fd_Struct.remove(Fd_Struct[i-tmp_index])
tmp_index += 1
return Fd_Struct
class ParserEntry():
FactoryTable:dict = {
SECTION_TREE: SectionFactory,
ROOT_SECTION_TREE: FfsFactory,
FFS_TREE: FfsFactory,
ROOT_FFS_TREE: FvFactory,
FV_TREE: FvFactory,
SEC_FV_TREE: FvFactory,
ROOT_FV_TREE: FdFactory,
ROOT_TREE: FdFactory,
}
def GetTargetFactory(self, Tree_type: str) -> BinaryFactory:
if Tree_type in self.FactoryTable:
return self.FactoryTable[Tree_type]
def Generate_Product(self, TargetFactory: BinaryFactory, Tree, Data: bytes, Offset: int) -> None:
New_Product = TargetFactory.Create_Product()
New_Product.ParserData(Tree, Data, Offset)
def DataParser(self, Tree, Data: bytes, Offset: int) -> None:
TargetFactory = self.GetTargetFactory(Tree.type)
if TargetFactory:
self.Generate_Product(TargetFactory, Tree, Data, Offset)
| edk2-master | BaseTools/Source/Python/FMMT/core/BinaryFactoryProduct.py |
## @file
# This file is used to define the interface of Bios Parser.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
from FirmwareStorageFormat.Common import *
from core.BinaryFactoryProduct import ParserEntry
from core.BiosTreeNode import *
from core.BiosTree import *
from core.GuidTools import *
from utils.FmmtLogger import FmmtLogger as logger
class FMMTParser:
def __init__(self, name: str, TYPE: str) -> None:
self.WholeFvTree = BIOSTREE(name)
self.WholeFvTree.type = TYPE
self.FinalData = b''
self.BinaryInfo = []
## Parser the nodes in WholeTree.
def ParserFromRoot(self, WholeFvTree=None, whole_data: bytes=b'', Reloffset: int=0) -> None:
if WholeFvTree.type == ROOT_TREE or WholeFvTree.type == ROOT_FV_TREE:
ParserEntry().DataParser(self.WholeFvTree, whole_data, Reloffset)
else:
ParserEntry().DataParser(WholeFvTree, whole_data, Reloffset)
for Child in WholeFvTree.Child:
self.ParserFromRoot(Child, "")
## Encapuslation all the data in tree into self.FinalData
def Encapsulation(self, rootTree, CompressStatus: bool) -> None:
# If current node is Root node, skip it.
if rootTree.type == ROOT_TREE or rootTree.type == ROOT_FV_TREE or rootTree.type == ROOT_FFS_TREE or rootTree.type == ROOT_SECTION_TREE:
logger.debug('Encapsulated successfully!')
# If current node do not have Header, just add Data.
elif rootTree.type == BINARY_DATA or rootTree.type == FFS_FREE_SPACE:
self.FinalData += rootTree.Data.Data
rootTree.Child = []
# If current node do not have Child and ExtHeader, just add its Header and Data.
elif rootTree.type == DATA_FV_TREE or rootTree.type == FFS_PAD:
self.FinalData += struct2stream(rootTree.Data.Header) + rootTree.Data.Data + rootTree.Data.PadData
if rootTree.isFinalChild():
ParTree = rootTree.Parent
if ParTree.type != 'ROOT':
self.FinalData += ParTree.Data.PadData
rootTree.Child = []
# If current node is not Section node and may have Child and ExtHeader, add its Header,ExtHeader. If do not have Child, add its Data.
elif rootTree.type == FV_TREE or rootTree.type == FFS_TREE or rootTree.type == SEC_FV_TREE:
if rootTree.HasChild():
self.FinalData += struct2stream(rootTree.Data.Header)
else:
self.FinalData += struct2stream(rootTree.Data.Header) + rootTree.Data.Data + rootTree.Data.PadData
if rootTree.isFinalChild():
ParTree = rootTree.Parent
if ParTree.type != 'ROOT':
self.FinalData += ParTree.Data.PadData
# If current node is Section, need to consider its ExtHeader, Child and Compressed Status.
elif rootTree.type == SECTION_TREE:
# Not compressed section
if rootTree.Data.OriData == b'' or (rootTree.Data.OriData != b'' and CompressStatus):
if rootTree.HasChild():
if rootTree.Data.ExtHeader:
self.FinalData += struct2stream(rootTree.Data.Header) + struct2stream(rootTree.Data.ExtHeader)
else:
self.FinalData += struct2stream(rootTree.Data.Header)
else:
Data = rootTree.Data.Data
if rootTree.Data.ExtHeader:
self.FinalData += struct2stream(rootTree.Data.Header) + struct2stream(rootTree.Data.ExtHeader) + Data + rootTree.Data.PadData
else:
self.FinalData += struct2stream(rootTree.Data.Header) + Data + rootTree.Data.PadData
if rootTree.isFinalChild():
ParTree = rootTree.Parent
self.FinalData += ParTree.Data.PadData
# If compressed section
else:
Data = rootTree.Data.OriData
rootTree.Child = []
if rootTree.Data.ExtHeader:
self.FinalData += struct2stream(rootTree.Data.Header) + struct2stream(rootTree.Data.ExtHeader) + Data + rootTree.Data.PadData
else:
self.FinalData += struct2stream(rootTree.Data.Header) + Data + rootTree.Data.PadData
if rootTree.isFinalChild():
ParTree = rootTree.Parent
self.FinalData += ParTree.Data.PadData
for Child in rootTree.Child:
self.Encapsulation(Child, CompressStatus)
| edk2-master | BaseTools/Source/Python/FMMT/core/FMMTParser.py |
## @file
# This file is used to define the Bios layout tree structure and related operations.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import collections
from FirmwareStorageFormat.Common import *
from utils.FmmtLogger import FmmtLogger as logger
ROOT_TREE = 'ROOT'
ROOT_FV_TREE = 'ROOT_FV_TREE'
ROOT_FFS_TREE = 'ROOT_FFS_TREE'
ROOT_SECTION_TREE = 'ROOT_SECTION_TREE'
FV_TREE = 'FV'
DATA_FV_TREE = 'DATA_FV'
FFS_TREE = 'FFS'
FFS_PAD = 'FFS_PAD'
FFS_FREE_SPACE = 'FFS_FREE_SPACE'
SECTION_TREE = 'SECTION'
SEC_FV_TREE = 'SEC_FV_IMAGE'
BINARY_DATA = 'BINARY'
RootType = [ROOT_TREE, ROOT_FV_TREE, ROOT_FFS_TREE, ROOT_SECTION_TREE]
FvType = [FV_TREE, SEC_FV_TREE]
FfsType = FFS_TREE
SecType = SECTION_TREE
class BIOSTREE:
def __init__(self, NodeName: str) -> None:
self.key = NodeName
self.type = None
self.Data = None
self.Child = []
self.Findlist = []
self.Parent = None
self.NextRel = None
self.LastRel = None
def HasChild(self) -> bool:
if self.Child == []:
return False
else:
return True
def isFinalChild(self) -> bool:
ParTree = self.Parent
if ParTree:
if ParTree.Child[-1] == self:
return True
return False
# FvTree.insertChild()
def insertChild(self, newNode, pos: int=None) -> None:
if len(self.Child) == 0:
self.Child.append(newNode)
else:
if not pos:
LastTree = self.Child[-1]
self.Child.append(newNode)
LastTree.NextRel = newNode
newNode.LastRel = LastTree
else:
newNode.NextRel = self.Child[pos-1].NextRel
newNode.LastRel = self.Child[pos].LastRel
self.Child[pos-1].NextRel = newNode
self.Child[pos].LastRel = newNode
self.Child.insert(pos, newNode)
newNode.Parent = self
# lastNode.insertRel(newNode)
def insertRel(self, newNode) -> None:
if self.Parent:
parentTree = self.Parent
new_index = parentTree.Child.index(self) + 1
parentTree.Child.insert(new_index, newNode)
self.NextRel = newNode
newNode.LastRel = self
def deleteNode(self, deletekey: str) -> None:
FindStatus, DeleteTree = self.FindNode(deletekey)
if FindStatus:
parentTree = DeleteTree.Parent
lastTree = DeleteTree.LastRel
nextTree = DeleteTree.NextRel
if parentTree:
index = parentTree.Child.index(DeleteTree)
del parentTree.Child[index]
if lastTree and nextTree:
lastTree.NextRel = nextTree
nextTree.LastRel = lastTree
elif lastTree:
lastTree.NextRel = None
elif nextTree:
nextTree.LastRel = None
return DeleteTree
else:
logger.error('Could not find the target tree')
return None
def FindNode(self, key: str, Findlist: list) -> None:
if self.key == key or (self.Data and self.Data.Name == key) or (self.type == FFS_TREE and self.Data.UiName == key):
Findlist.append(self)
for item in self.Child:
item.FindNode(key, Findlist)
def GetTreePath(self):
BiosTreePath = [self]
while self.Parent:
BiosTreePath.insert(0, self.Parent)
self = self.Parent
return BiosTreePath
def parserTree(self, TargetDict: dict=None, Info: list=None, space: int=0, ParFvId="") -> None:
Key = list(TargetDict.keys())[0]
if TargetDict[Key]["Type"] in RootType:
Info.append("Image File: {}".format(Key))
Info.append("FilesNum: {}".format(TargetDict.get(Key).get('FilesNum')))
Info.append("\n")
elif TargetDict[Key]["Type"] in FvType:
space += 2
if TargetDict[Key]["Type"] == SEC_FV_TREE:
Info.append("{}Child FV named {} of {}".format(space*" ", Key, ParFvId))
space += 2
else:
Info.append("FvId: {}".format(Key))
ParFvId = Key
Info.append("{}FvNameGuid: {}".format(space*" ", TargetDict.get(Key).get('FvNameGuid')))
Info.append("{}Attributes: {}".format(space*" ", TargetDict.get(Key).get('Attributes')))
Info.append("{}Total Volume Size: {}".format(space*" ", TargetDict.get(Key).get('Size')))
Info.append("{}Free Volume Size: {}".format(space*" ", TargetDict.get(Key).get('FreeSize')))
Info.append("{}Volume Offset: {}".format(space*" ", TargetDict.get(Key).get('Offset')))
Info.append("{}FilesNum: {}".format(space*" ", TargetDict.get(Key).get('FilesNum')))
elif TargetDict[Key]["Type"] in FfsType:
space += 2
if TargetDict.get(Key).get('UiName') != "b''":
Info.append("{}File: {} / {}".format(space*" ", Key, TargetDict.get(Key).get('UiName')))
else:
Info.append("{}File: {}".format(space*" ", Key))
if "Files" in list(TargetDict[Key].keys()):
for item in TargetDict[Key]["Files"]:
self.parserTree(item, Info, space, ParFvId)
def ExportTree(self,TreeInfo: dict=None) -> dict:
if TreeInfo is None:
TreeInfo =collections.OrderedDict()
if self.type == ROOT_TREE or self.type == ROOT_FV_TREE or self.type == ROOT_FFS_TREE or self.type == ROOT_SECTION_TREE:
key = str(self.key)
TreeInfo[self.key] = collections.OrderedDict()
TreeInfo[self.key]["Name"] = key
TreeInfo[self.key]["Type"] = self.type
TreeInfo[self.key]["FilesNum"] = len(self.Child)
elif self.type == FV_TREE or self.type == SEC_FV_TREE:
key = str(self.Data.FvId)
TreeInfo[key] = collections.OrderedDict()
TreeInfo[key]["Name"] = key
if self.Data.FvId != self.Data.Name:
TreeInfo[key]["FvNameGuid"] = str(self.Data.Name)
TreeInfo[key]["Type"] = self.type
TreeInfo[key]["Attributes"] = hex(self.Data.Header.Attributes)
TreeInfo[key]["Size"] = hex(self.Data.Header.FvLength)
TreeInfo[key]["FreeSize"] = hex(self.Data.Free_Space)
TreeInfo[key]["Offset"] = hex(self.Data.HOffset)
TreeInfo[key]["FilesNum"] = len(self.Child)
elif self.type == FFS_TREE:
key = str(self.Data.Name)
TreeInfo[key] = collections.OrderedDict()
TreeInfo[key]["Name"] = key
TreeInfo[key]["UiName"] = '{}'.format(self.Data.UiName)
TreeInfo[key]["Version"] = '{}'.format(self.Data.Version)
TreeInfo[key]["Type"] = self.type
TreeInfo[key]["Size"] = hex(self.Data.Size)
TreeInfo[key]["Offset"] = hex(self.Data.HOffset)
TreeInfo[key]["FilesNum"] = len(self.Child)
elif self.type == SECTION_TREE and self.Data.Type == 0x02:
key = str(self.Data.Name)
TreeInfo[key] = collections.OrderedDict()
TreeInfo[key]["Name"] = key
TreeInfo[key]["Type"] = self.type
TreeInfo[key]["Size"] = hex(len(self.Data.OriData) + self.Data.HeaderLength)
TreeInfo[key]["DecompressedSize"] = hex(self.Data.Size)
TreeInfo[key]["Offset"] = hex(self.Data.HOffset)
TreeInfo[key]["FilesNum"] = len(self.Child)
elif self is not None:
key = str(self.Data.Name)
TreeInfo[key] = collections.OrderedDict()
TreeInfo[key]["Name"] = key
TreeInfo[key]["Type"] = self.type
TreeInfo[key]["Size"] = hex(self.Data.Size)
TreeInfo[key]["Offset"] = hex(self.Data.HOffset)
TreeInfo[key]["FilesNum"] = len(self.Child)
for item in self.Child:
TreeInfo[key].setdefault('Files',[]).append( item.ExportTree())
return TreeInfo
| edk2-master | BaseTools/Source/Python/FMMT/core/BiosTree.py |
## @file
# This file is used to define the Fmmt Logger.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import logging
import sys
import os
logfile = 'FMMT_Build.log'
if os.path.exists(logfile):
os.remove(logfile)
FmmtLogger = logging.getLogger('FMMT')
FmmtLogger.setLevel(logging.DEBUG)
log_stream_handler=logging.StreamHandler(sys.stdout)
log_file_handler=logging.FileHandler(logfile)
log_stream_handler.setLevel(logging.INFO)
stream_format=logging.Formatter("%(levelname)-8s: %(message)s")
file_format=logging.Formatter("%(levelname)-8s: %(message)s")
log_stream_handler.setFormatter(stream_format)
log_file_handler.setFormatter(file_format)
FmmtLogger.addHandler(log_stream_handler)
FmmtLogger.addHandler(log_file_handler)
| edk2-master | BaseTools/Source/Python/FMMT/utils/FmmtLogger.py |
## @file
# This file is used to define the printer for Bios layout.
#
# Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
from utils.FmmtLogger import FmmtLogger as logger
def GetFormatter(layout_format: str):
if layout_format == 'json':
return JsonFormatter()
elif layout_format == 'yaml':
return YamlFormatter()
elif layout_format == 'html':
return HtmlFormatter()
else:
return TxtFormatter()
class Formatter(object):
def dump(self, layoutdict, layoutlist, outputfile: str=None) -> None:
raise NotImplemented
class JsonFormatter(Formatter):
def dump(self,layoutdict: dict, layoutlist: list, outputfile: str=None) -> None:
try:
import json
except:
TxtFormatter().dump(layoutdict, layoutlist, outputfile)
return
print(outputfile)
if outputfile:
with open(outputfile,"w") as fw:
json.dump(layoutdict, fw, indent=2)
else:
print(json.dumps(layoutdict,indent=2))
class TxtFormatter(Formatter):
def LogPrint(self,layoutlist: list) -> None:
for item in layoutlist:
print(item)
print('\n')
def dump(self,layoutdict: dict, layoutlist: list, outputfile: str=None) -> None:
logger.info('Binary Layout Info is saved in {} file.'.format(outputfile))
with open(outputfile, "w") as f:
for item in layoutlist:
f.writelines(item + '\n')
class YamlFormatter(Formatter):
def dump(self,layoutdict, layoutlist, outputfile = None):
TxtFormatter().dump(layoutdict, layoutlist, outputfile)
class HtmlFormatter(Formatter):
def dump(self,layoutdict, layoutlist, outputfile = None):
TxtFormatter().dump(layoutdict, layoutlist, outputfile)
| edk2-master | BaseTools/Source/Python/FMMT/utils/FvLayoutPrint.py |
## @file
# Common routines used by workspace
#
# Copyright (c) 2012 - 2020, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
from collections import OrderedDict, defaultdict
from Common.DataType import SUP_MODULE_USER_DEFINED
from Common.DataType import SUP_MODULE_HOST_APPLICATION
from .BuildClassObject import LibraryClassObject
import Common.GlobalData as GlobalData
from Workspace.BuildClassObject import StructurePcd
from Common.BuildToolError import RESOURCE_NOT_AVAILABLE
from Common.BuildToolError import OPTION_MISSING
from Common.BuildToolError import BUILD_ERROR
import Common.EdkLogger as EdkLogger
class OrderedListDict(OrderedDict):
def __init__(self, *args, **kwargs):
super(OrderedListDict, self).__init__(*args, **kwargs)
self.default_factory = list
def __missing__(self, key):
self[key] = Value = self.default_factory()
return Value
## Get all packages from platform for specified arch, target and toolchain
#
# @param Platform: DscBuildData instance
# @param BuildDatabase: The database saves all data for all metafiles
# @param Arch: Current arch
# @param Target: Current target
# @param Toolchain: Current toolchain
# @retval: List of packages which are DecBuildData instances
#
def GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain):
PkgSet = set()
if Platform.Packages:
PkgSet.update(Platform.Packages)
for ModuleFile in Platform.Modules:
Data = BuildDatabase[ModuleFile, Arch, Target, Toolchain]
PkgSet.update(Data.Packages)
for Lib in GetLiabraryInstances(Data, Platform, BuildDatabase, Arch, Target, Toolchain):
PkgSet.update(Lib.Packages)
return list(PkgSet)
## Get all declared PCD from platform for specified arch, target and toolchain
#
# @param Platform: DscBuildData instance
# @param BuildDatabase: The database saves all data for all metafiles
# @param Arch: Current arch
# @param Target: Current target
# @param Toolchain: Current toolchain
# @retval: A dictionary contains instances of PcdClassObject with key (PcdCName, TokenSpaceGuid)
# @retval: A dictionary contains real GUIDs of TokenSpaceGuid
#
def GetDeclaredPcd(Platform, BuildDatabase, Arch, Target, Toolchain, additionalPkgs):
PkgList = GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain)
PkgList = set(PkgList)
PkgList |= additionalPkgs
DecPcds = {}
GuidDict = {}
for Pkg in PkgList:
Guids = Pkg.Guids
GuidDict.update(Guids)
for Pcd in Pkg.Pcds:
PcdCName = Pcd[0]
PcdTokenName = Pcd[1]
if GlobalData.MixedPcd:
for PcdItem in GlobalData.MixedPcd:
if (PcdCName, PcdTokenName) in GlobalData.MixedPcd[PcdItem]:
PcdCName = PcdItem[0]
break
if (PcdCName, PcdTokenName) not in DecPcds:
DecPcds[PcdCName, PcdTokenName] = Pkg.Pcds[Pcd]
return DecPcds, GuidDict
## Get all dependent libraries for a module
#
# @param Module: InfBuildData instance
# @param Platform: DscBuildData instance
# @param BuildDatabase: The database saves all data for all metafiles
# @param Arch: Current arch
# @param Target: Current target
# @param Toolchain: Current toolchain
# @retval: List of dependent libraries which are InfBuildData instances
#
def GetLiabraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain):
return GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain,Platform.MetaFile,EdkLogger)
def GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain, FileName = '', EdkLogger = None):
if Module.LibInstances:
return Module.LibInstances
ModuleType = Module.ModuleType
# add forced library instances (specified under LibraryClasses sections)
#
# If a module has a MODULE_TYPE of USER_DEFINED,
# do not link in NULL library class instances from the global [LibraryClasses.*] sections.
#
if Module.ModuleType != SUP_MODULE_USER_DEFINED:
for LibraryClass in Platform.LibraryClasses.GetKeys():
if LibraryClass.startswith("NULL") and Platform.LibraryClasses[LibraryClass, Module.ModuleType]:
Module.LibraryClasses[LibraryClass] = Platform.LibraryClasses[LibraryClass, Module.ModuleType]
# add forced library instances (specified in module overrides)
for LibraryClass in Platform.Modules[str(Module)].LibraryClasses:
if LibraryClass.startswith("NULL"):
Module.LibraryClasses[LibraryClass] = Platform.Modules[str(Module)].LibraryClasses[LibraryClass]
# EdkII module
LibraryConsumerList = [Module]
Constructor = []
ConsumedByList = OrderedListDict()
LibraryInstance = OrderedDict()
if not Module.LibraryClass:
EdkLogger.verbose("")
EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), Arch))
while len(LibraryConsumerList) > 0:
M = LibraryConsumerList.pop()
for LibraryClassName in M.LibraryClasses:
if LibraryClassName not in LibraryInstance:
# override library instance for this module
LibraryPath = Platform.Modules[str(Module)].LibraryClasses.get(LibraryClassName,Platform.LibraryClasses[LibraryClassName, ModuleType])
if LibraryPath is None:
LibraryPath = M.LibraryClasses.get(LibraryClassName)
if LibraryPath is None:
if not Module.LibraryClass:
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
"Instance of library class [%s] is not found" % LibraryClassName,
File=FileName,
ExtraData="in [%s] [%s]\n\tconsumed by module [%s]" % (str(M), Arch, str(Module)))
else:
return []
LibraryModule = BuildDatabase[LibraryPath, Arch, Target, Toolchain]
# for those forced library instance (NULL library), add a fake library class
if LibraryClassName.startswith("NULL"):
LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
elif LibraryModule.LibraryClass is None \
or len(LibraryModule.LibraryClass) == 0 \
or (ModuleType != SUP_MODULE_USER_DEFINED and ModuleType != SUP_MODULE_HOST_APPLICATION
and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
# only USER_DEFINED can link against any library instance despite of its SupModList
if not Module.LibraryClass:
EdkLogger.error("build", OPTION_MISSING,
"Module type [%s] is not supported by library instance [%s]" \
% (ModuleType, LibraryPath), File=FileName,
ExtraData="consumed by library instance [%s] which is consumed by module [%s]" \
% (str(M), str(Module))
)
else:
return []
LibraryInstance[LibraryClassName] = LibraryModule
LibraryConsumerList.append(LibraryModule)
if not Module.LibraryClass:
EdkLogger.verbose("\t" + str(LibraryClassName) + " : " + str(LibraryModule))
else:
LibraryModule = LibraryInstance[LibraryClassName]
if LibraryModule is None:
continue
if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
Constructor.append(LibraryModule)
# don't add current module itself to consumer list
if M != Module:
if M in ConsumedByList[LibraryModule]:
continue
ConsumedByList[LibraryModule].append(M)
#
# Initialize the sorted output list to the empty set
#
SortedLibraryList = []
#
# Q <- Set of all nodes with no incoming edges
#
LibraryList = [] #LibraryInstance.values()
Q = []
for LibraryClassName in LibraryInstance:
M = LibraryInstance[LibraryClassName]
LibraryList.append(M)
if not ConsumedByList[M]:
Q.append(M)
#
# start the DAG algorithm
#
while True:
EdgeRemoved = True
while Q == [] and EdgeRemoved:
EdgeRemoved = False
# for each node Item with a Constructor
for Item in LibraryList:
if Item not in Constructor:
continue
# for each Node without a constructor with an edge e from Item to Node
for Node in ConsumedByList[Item]:
if Node in Constructor:
continue
# remove edge e from the graph if Node has no constructor
ConsumedByList[Item].remove(Node)
EdgeRemoved = True
if not ConsumedByList[Item]:
# insert Item into Q
Q.insert(0, Item)
break
if Q != []:
break
# DAG is done if there's no more incoming edge for all nodes
if Q == []:
break
# remove node from Q
Node = Q.pop()
# output Node
SortedLibraryList.append(Node)
# for each node Item with an edge e from Node to Item do
for Item in LibraryList:
if Node not in ConsumedByList[Item]:
continue
# remove edge e from the graph
ConsumedByList[Item].remove(Node)
if ConsumedByList[Item]:
continue
# insert Item into Q, if Item has no other incoming edges
Q.insert(0, Item)
#
# if any remaining node Item in the graph has a constructor and an incoming edge, then the graph has a cycle
#
for Item in LibraryList:
if ConsumedByList[Item] and Item in Constructor and len(Constructor) > 1:
if not Module.LibraryClass:
ErrorMessage = "\tconsumed by " + "\n\tconsumed by ".join(str(L) for L in ConsumedByList[Item])
EdkLogger.error("build", BUILD_ERROR, 'Library [%s] with constructors has a cycle' % str(Item),
ExtraData=ErrorMessage, File=FileName)
else:
return []
if Item not in SortedLibraryList:
SortedLibraryList.append(Item)
#
# Build the list of constructor and destructor names
# The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order
#
SortedLibraryList.reverse()
Module.LibInstances = SortedLibraryList
SortedLibraryList = [lib.SetReferenceModule(Module) for lib in SortedLibraryList]
return SortedLibraryList
| edk2-master | BaseTools/Source/Python/Workspace/WorkspaceCommon.py |
## @file
# This file is used to define each component of the build database
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from collections import OrderedDict, namedtuple
from Common.DataType import *
import collections
import re
from collections import OrderedDict
from Common.Misc import CopyDict,ArrayIndex
import copy
from CommonDataClass.DataClass import *
import Common.EdkLogger as EdkLogger
import Common.GlobalData as GlobalData
from Common.BuildToolError import OPTION_VALUE_INVALID
from Common.caching import cached_property
StructPattern = re.compile(r'[_a-zA-Z][0-9A-Za-z_\[\]]*$')
## PcdClassObject
#
# This Class is used for PcdObject
#
# @param object: Inherited from object class
# @param Name: Input value for Name of Pcd, default is None
# @param Guid: Input value for Guid of Pcd, default is None
# @param Type: Input value for Type of Pcd, default is None
# @param DatumType: Input value for DatumType of Pcd, default is None
# @param Value: Input value for Value of Pcd, default is None
# @param Token: Input value for Token of Pcd, default is None
# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
# @param GuidValue: Input value for TokenSpaceGuidValue of Pcd, default is None
#
# @var TokenCName: To store value for TokenCName
# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
# @var Type: To store value for Type
# @var DatumType: To store value for DatumType
# @var TokenValue: To store value for TokenValue
# @var MaxDatumSize: To store value for MaxDatumSize
# @var SkuInfoList: To store value for SkuInfoList
# @var IsOverrided: To store value for IsOverrided
# @var Phase: To store value for Phase, default is "DXE"
#
class PcdClassObject(object):
def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = None, IsOverrided = False, GuidValue = None, validateranges = None, validlists = None, expressions = None, IsDsc = False, UserDefinedDefaultStoresFlag = False):
self.TokenCName = Name
self.TokenSpaceGuidCName = Guid
self.TokenSpaceGuidValue = GuidValue
self.Type = Type
self._DatumType = DatumType
self.DefaultValue = Value
self.TokenValue = Token
self.MaxDatumSize = MaxDatumSize
self.MaxSizeUserSet = None
self.SkuInfoList = SkuInfoList if SkuInfoList is not None else OrderedDict()
self.Phase = "DXE"
self.Pending = False
self.IsOverrided = IsOverrided
self.IsFromBinaryInf = False
self.IsFromDsc = False
self.validateranges = validateranges if validateranges is not None else []
self.validlists = validlists if validlists is not None else []
self.expressions = expressions if expressions is not None else []
self.DscDefaultValue = None
self.DscRawValue = {}
self.DscRawValueInfo = {}
if IsDsc:
self.DscDefaultValue = Value
self.PcdValueFromComm = ""
self.PcdValueFromFdf = ""
self.PcdValueFromComponents = {} #{ModuleGuid:value, file_path,lineNo}
self.CustomAttribute = {}
self.UserDefinedDefaultStoresFlag = UserDefinedDefaultStoresFlag
self._Capacity = None
@property
def Capacity(self):
if self._Capacity is None:
self._Capacity = []
dimension = ArrayIndex.findall(self._DatumType)
for item in dimension:
maxsize = item.lstrip("[").rstrip("]").strip()
if not maxsize:
maxsize = "-1"
maxsize = str(int(maxsize,16)) if maxsize.startswith(("0x","0X")) else maxsize
self._Capacity.append(maxsize)
if hasattr(self, "SkuOverrideValues"):
for sku in self.SkuOverrideValues:
for defaultstore in self.SkuOverrideValues[sku]:
fields = self.SkuOverrideValues[sku][defaultstore]
for demesionattr in fields:
fieldinfo = fields[demesionattr]
deme = ArrayIndex.findall(demesionattr)
for i in range(len(deme)):
if int(deme[i].lstrip("[").rstrip("]").strip()) >= int(self._Capacity[i]):
if self._Capacity[i] != "-1":
firstfieldinfo = list(fieldinfo.values())[0]
EdkLogger.error('Build', OPTION_VALUE_INVALID, "For Pcd %s, Array Index exceed the Array size. From %s Line %s \n " %
(".".join((self.TokenSpaceGuidCName, self.TokenCName)), firstfieldinfo[1],firstfieldinfo[2] ))
if hasattr(self,"DefaultValues"):
for demesionattr in self.DefaultValues:
fieldinfo = self.DefaultValues[demesionattr]
deme = ArrayIndex.findall(demesionattr)
for i in range(len(deme)):
if int(deme[i].lstrip("[").rstrip("]").strip()) >= int(self._Capacity[i]):
if self._Capacity[i] != "-1":
firstfieldinfo = list(fieldinfo.values())[0]
EdkLogger.error('Build', OPTION_VALUE_INVALID, "For Pcd %s, Array Index exceed the Array size. From %s Line %s \n " %
(".".join((self.TokenSpaceGuidCName, self.TokenCName)), firstfieldinfo[1],firstfieldinfo[2] ))
return self._Capacity
def PcdArraySize(self):
if self.Capacity[-1] == "-1":
return -1
size = 1
for de in self.Capacity:
size = size * int(de)
return size
@property
def DatumType(self):
return self._DatumType
@DatumType.setter
def DatumType(self,DataType):
self._DatumType = DataType
self._Capacity = None
@property
def BaseDatumType(self):
if self.IsArray():
return self._DatumType[:self._DatumType.index("[")]
else:
return self._DatumType
def IsArray(self):
return True if len(self.Capacity) else False
def IsAggregateDatumType(self):
if self.DatumType in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
return False
if self.IsArray() or StructPattern.match(self.DatumType):
return True
return False
def IsSimpleTypeArray(self):
if self.IsArray() and self.BaseDatumType in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, "BOOLEAN"]:
return True
return False
@staticmethod
def GetPcdMaxSizeWorker(PcdString, MaxSize):
if PcdString.startswith("{") and PcdString.endswith("}"):
return max([len(PcdString.split(",")),MaxSize])
if PcdString.startswith("\"") or PcdString.startswith("\'"):
return max([len(PcdString)-2+1,MaxSize])
if PcdString.startswith("L\""):
return max([2*(len(PcdString)-3+1),MaxSize])
return max([len(PcdString),MaxSize])
## Get the maximum number of bytes
def GetPcdMaxSize(self):
if self.DatumType in TAB_PCD_NUMERIC_TYPES:
return MAX_SIZE_TYPE[self.DatumType]
MaxSize = int(self.MaxDatumSize, 10) if self.MaxDatumSize else 0
if self.PcdValueFromFdf:
MaxSize = self.GetPcdMaxSizeWorker(self.PcdValueFromFdf,MaxSize)
if self.PcdValueFromComm:
MaxSize = self.GetPcdMaxSizeWorker(self.PcdValueFromComm,MaxSize)
if hasattr(self, "DefaultValueFromDec"):
MaxSize = self.GetPcdMaxSizeWorker(self.DefaultValueFromDec,MaxSize)
return MaxSize
## Get the number of bytes
def GetPcdSize(self):
if self.DatumType in TAB_PCD_NUMERIC_TYPES:
return MAX_SIZE_TYPE[self.DatumType]
if not self.DefaultValue:
return 1
elif self.DefaultValue[0] == 'L':
return (len(self.DefaultValue) - 2) * 2
elif self.DefaultValue[0] == '{':
return len(self.DefaultValue.split(','))
else:
return len(self.DefaultValue) - 1
## Convert the class to a string
#
# Convert each member of the class to string
# Organize to a single line format string
#
# @retval Rtn Formatted String
#
def __str__(self):
Rtn = '\tTokenCName=' + str(self.TokenCName) + ', ' + \
'TokenSpaceGuidCName=' + str(self.TokenSpaceGuidCName) + ', ' + \
'Type=' + str(self.Type) + ', ' + \
'DatumType=' + str(self.DatumType) + ', ' + \
'DefaultValue=' + str(self.DefaultValue) + ', ' + \
'TokenValue=' + str(self.TokenValue) + ', ' + \
'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
for Item in self.SkuInfoList.values():
Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
Rtn = Rtn + ', IsOverrided=' + str(self.IsOverrided)
return Rtn
## Override __eq__ function
#
# Check whether pcds are the same
#
# @retval False The two pcds are different
# @retval True The two pcds are the same
#
def __eq__(self, Other):
return Other and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
## Override __hash__ function
#
# Use (TokenCName, TokenSpaceGuidCName) as key in hash table
#
# @retval truple() Key for hash table
#
def __hash__(self):
return hash((self.TokenCName, self.TokenSpaceGuidCName))
@cached_property
def _fullname(self):
return ".".join((self.TokenSpaceGuidCName,self.TokenCName))
def __lt__(self,pcd):
return self._fullname < pcd._fullname
def __gt__(self,pcd):
return self._fullname > pcd._fullname
def sharedcopy(self,new_pcd):
new_pcd.TokenCName = self.TokenCName
new_pcd.TokenSpaceGuidCName = self.TokenSpaceGuidCName
new_pcd.TokenSpaceGuidValue = self.TokenSpaceGuidValue
new_pcd.Type = self.Type
new_pcd.DatumType = self.DatumType
new_pcd.DefaultValue = self.DefaultValue
new_pcd.TokenValue = self.TokenValue
new_pcd.MaxDatumSize = self.MaxDatumSize
new_pcd.MaxSizeUserSet = self.MaxSizeUserSet
new_pcd.Phase = self.Phase
new_pcd.Pending = self.Pending
new_pcd.IsOverrided = self.IsOverrided
new_pcd.IsFromBinaryInf = self.IsFromBinaryInf
new_pcd.IsFromDsc = self.IsFromDsc
new_pcd.PcdValueFromComm = self.PcdValueFromComm
new_pcd.PcdValueFromFdf = self.PcdValueFromFdf
new_pcd.UserDefinedDefaultStoresFlag = self.UserDefinedDefaultStoresFlag
new_pcd.DscRawValue = self.DscRawValue
new_pcd.DscRawValueInfo = self.DscRawValueInfo
new_pcd.CustomAttribute = self.CustomAttribute
new_pcd.validateranges = [item for item in self.validateranges]
new_pcd.validlists = [item for item in self.validlists]
new_pcd.expressions = [item for item in self.expressions]
new_pcd.SkuInfoList = {key: copy.deepcopy(skuobj) for key,skuobj in self.SkuInfoList.items()}
return new_pcd
def __deepcopy__(self,memo):
new_pcd = PcdClassObject()
self.sharedcopy(new_pcd)
return new_pcd
class StructurePcd(PcdClassObject):
def __init__(self, StructuredPcdIncludeFile=None, Packages=None, Name=None, Guid=None, Type=None, DatumType=None, Value=None, Token=None, MaxDatumSize=None, SkuInfoList=None, IsOverrided=False, GuidValue=None, validateranges=None, validlists=None, expressions=None,default_store = TAB_DEFAULT_STORES_DEFAULT):
if SkuInfoList is None:
SkuInfoList = {}
if validateranges is None:
validateranges = []
if validlists is None:
validlists = []
if expressions is None:
expressions = []
if Packages is None:
Packages = []
super(StructurePcd, self).__init__(Name, Guid, Type, DatumType, Value, Token, MaxDatumSize, SkuInfoList, IsOverrided, GuidValue, validateranges, validlists, expressions)
self.StructuredPcdIncludeFile = [] if StructuredPcdIncludeFile is None else StructuredPcdIncludeFile
self.PackageDecs = Packages
self.DefaultStoreName = [default_store]
self.DefaultValues = OrderedDict()
self.PcdMode = None
self.SkuOverrideValues = OrderedDict()
self.StructName = None
self.PcdDefineLineNo = 0
self.PkgPath = ""
self.DefaultValueFromDec = ""
self.DefaultValueFromDecInfo = None
self.ValueChain = set()
self.PcdFieldValueFromComm = OrderedDict()
self.PcdFieldValueFromFdf = OrderedDict()
self.DefaultFromDSC=None
self.PcdFiledValueFromDscComponent = OrderedDict()
def __repr__(self):
return self.TypeName
def AddDefaultValue (self, FieldName, Value, FileName="", LineNo=0,DimensionAttr ="-1"):
if DimensionAttr not in self.DefaultValues:
self.DefaultValues[DimensionAttr] = collections.OrderedDict()
if FieldName in self.DefaultValues[DimensionAttr]:
del self.DefaultValues[DimensionAttr][FieldName]
self.DefaultValues[DimensionAttr][FieldName] = [Value.strip(), FileName, LineNo]
return self.DefaultValues[DimensionAttr][FieldName]
def SetDecDefaultValue(self, DefaultValue,decpath=None,lineno=None):
self.DefaultValueFromDec = DefaultValue
self.DefaultValueFromDecInfo = (decpath,lineno)
def AddOverrideValue (self, FieldName, Value, SkuName, DefaultStoreName, FileName="", LineNo=0, DimensionAttr = '-1'):
if SkuName not in self.SkuOverrideValues:
self.SkuOverrideValues[SkuName] = OrderedDict()
if DefaultStoreName not in self.SkuOverrideValues[SkuName]:
self.SkuOverrideValues[SkuName][DefaultStoreName] = OrderedDict()
if DimensionAttr not in self.SkuOverrideValues[SkuName][DefaultStoreName]:
self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr] = collections.OrderedDict()
if FieldName in self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr]:
del self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr][FieldName]
self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr][FieldName] = [Value.strip(), FileName, LineNo]
return self.SkuOverrideValues[SkuName][DefaultStoreName][DimensionAttr][FieldName]
def AddComponentOverrideValue(self,FieldName, Value, ModuleGuid, FileName="", LineNo=0, DimensionAttr = '-1'):
self.PcdFiledValueFromDscComponent.setdefault(ModuleGuid, OrderedDict())
self.PcdFiledValueFromDscComponent[ModuleGuid].setdefault(DimensionAttr,OrderedDict())
self.PcdFiledValueFromDscComponent[ModuleGuid][DimensionAttr][FieldName] = [Value.strip(), FileName, LineNo]
return self.PcdFiledValueFromDscComponent[ModuleGuid][DimensionAttr][FieldName]
def SetPcdMode (self, PcdMode):
self.PcdMode = PcdMode
def copy(self, PcdObject):
self.TokenCName = PcdObject.TokenCName if PcdObject.TokenCName else self.TokenCName
self.TokenSpaceGuidCName = PcdObject.TokenSpaceGuidCName if PcdObject.TokenSpaceGuidCName else PcdObject.TokenSpaceGuidCName
self.TokenSpaceGuidValue = PcdObject.TokenSpaceGuidValue if PcdObject.TokenSpaceGuidValue else self.TokenSpaceGuidValue
self.Type = PcdObject.Type if PcdObject.Type else self.Type
self._DatumType = PcdObject.DatumType if PcdObject.DatumType else self.DatumType
self.DefaultValue = PcdObject.DefaultValue if PcdObject.DefaultValue else self.DefaultValue
self.TokenValue = PcdObject.TokenValue if PcdObject.TokenValue else self.TokenValue
self.MaxDatumSize = PcdObject.MaxDatumSize if PcdObject.MaxDatumSize else self.MaxDatumSize
self.SkuInfoList = PcdObject.SkuInfoList if PcdObject.SkuInfoList else self.SkuInfoList
self.Phase = PcdObject.Phase if PcdObject.Phase else self.Phase
self.Pending = PcdObject.Pending if PcdObject.Pending else self.Pending
self.IsOverrided = PcdObject.IsOverrided if PcdObject.IsOverrided else self.IsOverrided
self.IsFromBinaryInf = PcdObject.IsFromBinaryInf if PcdObject.IsFromBinaryInf else self.IsFromBinaryInf
self.IsFromDsc = PcdObject.IsFromDsc if PcdObject.IsFromDsc else self.IsFromDsc
self.validateranges = PcdObject.validateranges if PcdObject.validateranges else self.validateranges
self.validlists = PcdObject.validlists if PcdObject.validlists else self.validlists
self.expressions = PcdObject.expressions if PcdObject.expressions else self.expressions
self.DscRawValue = PcdObject.DscRawValue if PcdObject.DscRawValue else self.DscRawValue
self.DscRawValueInfo = PcdObject.DscRawValueInfo if PcdObject.DscRawValueInfo else self.DscRawValueInfo
self.PcdValueFromComm = PcdObject.PcdValueFromComm if PcdObject.PcdValueFromComm else self.PcdValueFromComm
self.PcdValueFromFdf = PcdObject.PcdValueFromFdf if PcdObject.PcdValueFromFdf else self.PcdValueFromFdf
self.CustomAttribute = PcdObject.CustomAttribute if PcdObject.CustomAttribute else self.CustomAttribute
self.UserDefinedDefaultStoresFlag = PcdObject.UserDefinedDefaultStoresFlag if PcdObject.UserDefinedDefaultStoresFlag else self.UserDefinedDefaultStoresFlag
if isinstance(PcdObject, StructurePcd):
self.StructuredPcdIncludeFile = PcdObject.StructuredPcdIncludeFile if PcdObject.StructuredPcdIncludeFile else self.StructuredPcdIncludeFile
self.PackageDecs = PcdObject.PackageDecs if PcdObject.PackageDecs else self.PackageDecs
self.DefaultValues = PcdObject.DefaultValues if PcdObject.DefaultValues else self.DefaultValues
self.PcdMode = PcdObject.PcdMode if PcdObject.PcdMode else self.PcdMode
self.DefaultValueFromDec = PcdObject.DefaultValueFromDec if PcdObject.DefaultValueFromDec else self.DefaultValueFromDec
self.DefaultValueFromDecInfo = PcdObject.DefaultValueFromDecInfo if PcdObject.DefaultValueFromDecInfo else self.DefaultValueFromDecInfo
self.SkuOverrideValues = PcdObject.SkuOverrideValues if PcdObject.SkuOverrideValues else self.SkuOverrideValues
self.StructName = PcdObject.DatumType if PcdObject.DatumType else self.StructName
self.PcdDefineLineNo = PcdObject.PcdDefineLineNo if PcdObject.PcdDefineLineNo else self.PcdDefineLineNo
self.PkgPath = PcdObject.PkgPath if PcdObject.PkgPath else self.PkgPath
self.ValueChain = PcdObject.ValueChain if PcdObject.ValueChain else self.ValueChain
self.PcdFieldValueFromComm = PcdObject.PcdFieldValueFromComm if PcdObject.PcdFieldValueFromComm else self.PcdFieldValueFromComm
self.PcdFieldValueFromFdf = PcdObject.PcdFieldValueFromFdf if PcdObject.PcdFieldValueFromFdf else self.PcdFieldValueFromFdf
self.PcdFiledValueFromDscComponent = PcdObject.PcdFiledValueFromDscComponent if PcdObject.PcdFiledValueFromDscComponent else self.PcdFiledValueFromDscComponent
def __deepcopy__(self,memo):
new_pcd = StructurePcd()
self.sharedcopy(new_pcd)
new_pcd.DefaultValueFromDec = self.DefaultValueFromDec
new_pcd.DefaultValueFromDecInfo = self.DefaultValueFromDecInfo
new_pcd.PcdMode = self.PcdMode
new_pcd.StructName = self.DatumType
new_pcd.PcdDefineLineNo = self.PcdDefineLineNo
new_pcd.PkgPath = self.PkgPath
new_pcd.StructuredPcdIncludeFile = [item for item in self.StructuredPcdIncludeFile]
new_pcd.PackageDecs = [item for item in self.PackageDecs]
new_pcd.DefaultValues = CopyDict(self.DefaultValues)
new_pcd.DefaultFromDSC=CopyDict(self.DefaultFromDSC)
new_pcd.SkuOverrideValues = CopyDict(self.SkuOverrideValues)
new_pcd.PcdFieldValueFromComm = CopyDict(self.PcdFieldValueFromComm)
new_pcd.PcdFieldValueFromFdf = CopyDict(self.PcdFieldValueFromFdf)
new_pcd.PcdFiledValueFromDscComponent = CopyDict(self.PcdFiledValueFromDscComponent)
new_pcd.ValueChain = {item for item in self.ValueChain}
return new_pcd
LibraryClassObject = namedtuple('LibraryClassObject', ['LibraryClass','SupModList'])
class BuildData(object):
# dict used to convert PCD type in database to string used by build tool
_PCD_TYPE_STRING_ = {
MODEL_PCD_FIXED_AT_BUILD : TAB_PCDS_FIXED_AT_BUILD,
MODEL_PCD_PATCHABLE_IN_MODULE : TAB_PCDS_PATCHABLE_IN_MODULE,
MODEL_PCD_FEATURE_FLAG : TAB_PCDS_FEATURE_FLAG,
MODEL_PCD_DYNAMIC : TAB_PCDS_DYNAMIC,
MODEL_PCD_DYNAMIC_DEFAULT : TAB_PCDS_DYNAMIC,
MODEL_PCD_DYNAMIC_HII : TAB_PCDS_DYNAMIC_HII,
MODEL_PCD_DYNAMIC_VPD : TAB_PCDS_DYNAMIC_VPD,
MODEL_PCD_DYNAMIC_EX : TAB_PCDS_DYNAMIC_EX,
MODEL_PCD_DYNAMIC_EX_DEFAULT : TAB_PCDS_DYNAMIC_EX,
MODEL_PCD_DYNAMIC_EX_HII : TAB_PCDS_DYNAMIC_EX_HII,
MODEL_PCD_DYNAMIC_EX_VPD : TAB_PCDS_DYNAMIC_EX_VPD,
}
def UpdatePcdTypeDict(self):
if GlobalData.gCommandLineDefines.get(TAB_DSC_DEFINES_PCD_DYNAMIC_AS_DYNAMICEX,"FALSE").upper() == "TRUE":
self._PCD_TYPE_STRING_ = {
MODEL_PCD_FIXED_AT_BUILD : TAB_PCDS_FIXED_AT_BUILD,
MODEL_PCD_PATCHABLE_IN_MODULE : TAB_PCDS_PATCHABLE_IN_MODULE,
MODEL_PCD_FEATURE_FLAG : TAB_PCDS_FEATURE_FLAG,
MODEL_PCD_DYNAMIC : TAB_PCDS_DYNAMIC_EX,
MODEL_PCD_DYNAMIC_DEFAULT : TAB_PCDS_DYNAMIC_EX,
MODEL_PCD_DYNAMIC_HII : TAB_PCDS_DYNAMIC_EX_HII,
MODEL_PCD_DYNAMIC_VPD : TAB_PCDS_DYNAMIC_EX_VPD,
MODEL_PCD_DYNAMIC_EX : TAB_PCDS_DYNAMIC_EX,
MODEL_PCD_DYNAMIC_EX_DEFAULT : TAB_PCDS_DYNAMIC_EX,
MODEL_PCD_DYNAMIC_EX_HII : TAB_PCDS_DYNAMIC_EX_HII,
MODEL_PCD_DYNAMIC_EX_VPD : TAB_PCDS_DYNAMIC_EX_VPD,
}
## Convert the class to a string
#
# Convert member MetaFile of the class to a string
#
# @retval string Formatted String
#
def __str__(self):
return str(self.MetaFile)
## Override __eq__ function
#
# Check whether ModuleBuildClassObjects are the same
#
# @retval False The two ModuleBuildClassObjects are different
# @retval True The two ModuleBuildClassObjects are the same
#
def __eq__(self, Other):
return self.MetaFile == Other
## Override __hash__ function
#
# Use MetaFile as key in hash table
#
# @retval string Key for hash table
#
def __hash__(self):
return hash(self.MetaFile)
## ModuleBuildClassObject
#
# This Class defines ModuleBuildClass
#
# @param object: Inherited from object class
#
# @var MetaFile: To store value for module meta file path
# @var BaseName: To store value for BaseName
# @var ModuleType: To store value for ModuleType
# @var Guid: To store value for Guid
# @var Version: To store value for Version
# @var PcdIsDriver: To store value for PcdIsDriver
# @var BinaryModule: To store value for BinaryModule
# @var CustomMakefile: To store value for CustomMakefile
# @var Specification: To store value for Specification
# @var Shadow To store value for Shadow
# @var LibraryClass: To store value for LibraryClass, it is a list structure as
# [ LibraryClassObject, ...]
# @var ModuleEntryPointList: To store value for ModuleEntryPointList
# @var ModuleUnloadImageList: To store value for ModuleUnloadImageList
# @var ConstructorList: To store value for ConstructorList
# @var DestructorList: To store value for DestructorList
# @var Binaries: To store value for Binaries, it is a list structure as
# [ ModuleBinaryClassObject, ...]
# @var Sources: To store value for Sources, it is a list structure as
# [ ModuleSourceFilesClassObject, ... ]
# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
# { [LibraryClassName, ModuleType] : LibraryClassInfFile }
# @var Protocols: To store value for Protocols, it is a list structure as
# [ ProtocolName, ... ]
# @var Ppis: To store value for Ppis, it is a list structure as
# [ PpiName, ... ]
# @var Guids: To store value for Guids, it is a list structure as
# [ GuidName, ... ]
# @var Includes: To store value for Includes, it is a list structure as
# [ IncludePath, ... ]
# @var Packages: To store value for Packages, it is a list structure as
# [ DecFileName, ... ]
# @var Pcds: To store value for Pcds, it is a set structure as
# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
# @var BuildOptions: To store value for BuildOptions, it is a set structure as
# { [BuildOptionKey] : BuildOptionValue}
# @var Depex: To store value for Depex
#
class ModuleBuildClassObject(BuildData):
def __init__(self):
self.AutoGenVersion = 0
self.MetaFile = ''
self.BaseName = ''
self.ModuleType = ''
self.Guid = ''
self.Version = ''
self.PcdIsDriver = ''
self.BinaryModule = ''
self.Shadow = ''
self.CustomMakefile = {}
self.Specification = {}
self.LibraryClass = []
self.ModuleEntryPointList = []
self.ModuleUnloadImageList = []
self.ConstructorList = []
self.DestructorList = []
self.Binaries = []
self.Sources = []
self.LibraryClasses = OrderedDict()
self.Libraries = []
self.Protocols = []
self.Ppis = []
self.Guids = []
self.Includes = []
self.Packages = []
self.Pcds = {}
self.BuildOptions = {}
self.Depex = {}
self.StrPcdSet = []
self.StrPcdOverallValue = {}
## PackageBuildClassObject
#
# This Class defines PackageBuildClass
#
# @param object: Inherited from object class
#
# @var MetaFile: To store value for package meta file path
# @var PackageName: To store value for PackageName
# @var Guid: To store value for Guid
# @var Version: To store value for Version
# @var Protocols: To store value for Protocols, it is a set structure as
# { [ProtocolName] : Protocol Guid, ... }
# @var Ppis: To store value for Ppis, it is a set structure as
# { [PpiName] : Ppi Guid, ... }
# @var Guids: To store value for Guids, it is a set structure as
# { [GuidName] : Guid, ... }
# @var Includes: To store value for Includes, it is a list structure as
# [ IncludePath, ... ]
# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
# { [LibraryClassName] : LibraryClassInfFile }
# @var Pcds: To store value for Pcds, it is a set structure as
# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
#
class PackageBuildClassObject(BuildData):
def __init__(self):
self.MetaFile = ''
self.PackageName = ''
self.Guid = ''
self.Version = ''
self.Protocols = {}
self.Ppis = {}
self.Guids = {}
self.Includes = []
self.LibraryClasses = {}
self.Pcds = {}
## PlatformBuildClassObject
#
# This Class defines PlatformBuildClass
#
# @param object: Inherited from object class
#
# @var MetaFile: To store value for platform meta-file path
# @var PlatformName: To store value for PlatformName
# @var Guid: To store value for Guid
# @var Version: To store value for Version
# @var DscSpecification: To store value for DscSpecification
# @var OutputDirectory: To store value for OutputDirectory
# @var FlashDefinition: To store value for FlashDefinition
# @var BuildNumber: To store value for BuildNumber
# @var SkuIds: To store value for SkuIds, it is a set structure as
# { 'SkuName' : SkuId, '!include' : includefilename, ...}
# @var Modules: To store value for Modules, it is a list structure as
# [ InfFileName, ... ]
# @var Libraries: To store value for Libraries, it is a list structure as
# [ InfFileName, ... ]
# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
# { (LibraryClassName, ModuleType) : LibraryClassInfFile }
# @var Pcds: To store value for Pcds, it is a set structure as
# { [(PcdCName, PcdGuidCName)] : PcdClassObject }
# @var BuildOptions: To store value for BuildOptions, it is a set structure as
# { [BuildOptionKey] : BuildOptionValue }
#
class PlatformBuildClassObject(BuildData):
def __init__(self):
self.MetaFile = ''
self.PlatformName = ''
self.Guid = ''
self.Version = ''
self.DscSpecification = ''
self.OutputDirectory = ''
self.FlashDefinition = ''
self.BuildNumber = ''
self.SkuIds = {}
self.Modules = []
self.LibraryInstances = []
self.LibraryClasses = {}
self.Libraries = {}
self.Pcds = {}
self.BuildOptions = {}
| edk2-master | BaseTools/Source/Python/Workspace/BuildClassObject.py |
## @file
# Python 'Workspace' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/Workspace/__init__.py |
## @file
# This file is used to create a database used by build tool
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from Common.StringUtils import *
from Common.DataType import *
from Common.Misc import *
from types import *
from .MetaDataTable import *
from .MetaFileTable import *
from .MetaFileParser import *
from Workspace.DecBuildData import DecBuildData
from Workspace.DscBuildData import DscBuildData
from Workspace.InfBuildData import InfBuildData
## Database
#
# This class defined the build database for all modules, packages and platform.
# It will call corresponding parser for the given file if it cannot find it in
# the database.
#
# @param DbPath Path of database file
# @param GlobalMacros Global macros used for replacement during file parsing
# @param RenewDb=False Create new database file if it's already there
#
class WorkspaceDatabase(object):
#
# internal class used for call corresponding file parser and caching the result
# to avoid unnecessary re-parsing
#
class BuildObjectFactory(object):
_FILE_TYPE_ = {
".inf" : MODEL_FILE_INF,
".dec" : MODEL_FILE_DEC,
".dsc" : MODEL_FILE_DSC,
}
# file parser
_FILE_PARSER_ = {
MODEL_FILE_INF : InfParser,
MODEL_FILE_DEC : DecParser,
MODEL_FILE_DSC : DscParser,
}
# convert to xxxBuildData object
_GENERATOR_ = {
MODEL_FILE_INF : InfBuildData,
MODEL_FILE_DEC : DecBuildData,
MODEL_FILE_DSC : DscBuildData,
}
_CACHE_ = {} # (FilePath, Arch) : <object>
def GetCache(self):
return self._CACHE_
# constructor
def __init__(self, WorkspaceDb):
self.WorkspaceDb = WorkspaceDb
# key = (FilePath, Arch=None)
def __contains__(self, Key):
FilePath = Key[0]
if len(Key) > 1:
Arch = Key[1]
else:
Arch = None
return (FilePath, Arch) in self._CACHE_
# key = (FilePath, Arch=None, Target=None, Toolchain=None)
def __getitem__(self, Key):
FilePath = Key[0]
KeyLength = len(Key)
if KeyLength > 1:
Arch = Key[1]
else:
Arch = None
if KeyLength > 2:
Target = Key[2]
else:
Target = None
if KeyLength > 3:
Toolchain = Key[3]
else:
Toolchain = None
# if it's generated before, just return the cached one
Key = (FilePath, Arch, Target, Toolchain)
if Key in self._CACHE_:
return self._CACHE_[Key]
# check file type
BuildObject = self.CreateBuildObject(FilePath, Arch, Target, Toolchain)
self._CACHE_[Key] = BuildObject
return BuildObject
def CreateBuildObject(self,FilePath, Arch, Target, Toolchain):
Ext = FilePath.Type
if Ext not in self._FILE_TYPE_:
return None
FileType = self._FILE_TYPE_[Ext]
if FileType not in self._GENERATOR_:
return None
# get the parser ready for this file
MetaFile = self._FILE_PARSER_[FileType](
FilePath,
FileType,
Arch,
MetaFileStorage(self.WorkspaceDb, FilePath, FileType)
)
# always do post-process, in case of macros change
MetaFile.DoPostProcess()
# object the build is based on
BuildObject = self._GENERATOR_[FileType](
FilePath,
MetaFile,
self,
Arch,
Target,
Toolchain
)
return BuildObject
# placeholder for file format conversion
class TransformObjectFactory:
def __init__(self, WorkspaceDb):
self.WorkspaceDb = WorkspaceDb
# key = FilePath, Arch
def __getitem__(self, Key):
pass
## Constructor of WorkspaceDatabase
#
# @param DbPath Path of database file
# @param GlobalMacros Global macros used for replacement during file parsing
# @param RenewDb=False Create new database file if it's already there
#
def __init__(self):
self.DB = dict()
# create table for internal uses
self.TblDataModel = DataClass.MODEL_LIST
self.TblFile = []
self.Platform = None
# conversion object for build or file format conversion purpose
self.BuildObject = WorkspaceDatabase.BuildObjectFactory(self)
self.TransformObject = WorkspaceDatabase.TransformObjectFactory(self)
## Summarize all packages in the database
def GetPackageList(self, Platform, Arch, TargetName, ToolChainTag):
self.Platform = Platform
PackageList = []
Pa = self.BuildObject[self.Platform, Arch, TargetName, ToolChainTag]
#
# Get Package related to Modules
#
for Module in Pa.Modules:
ModuleObj = self.BuildObject[Module, Arch, TargetName, ToolChainTag]
for Package in ModuleObj.Packages:
if Package not in PackageList:
PackageList.append(Package)
#
# Get Packages related to Libraries
#
for Lib in Pa.LibraryInstances:
LibObj = self.BuildObject[Lib, Arch, TargetName, ToolChainTag]
for Package in LibObj.Packages:
if Package not in PackageList:
PackageList.append(Package)
for Package in Pa.Packages:
if Package in PackageList:
continue
PackageList.append(Package)
return PackageList
def MapPlatform(self, Dscfile):
Platform = self.BuildObject[PathClass(Dscfile), TAB_COMMON]
if Platform is None:
EdkLogger.error('build', PARSER_ERROR, "Failed to parser DSC file: %s" % Dscfile)
return Platform
BuildDB = WorkspaceDatabase()
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
pass
| edk2-master | BaseTools/Source/Python/Workspace/WorkspaceDatabase.py |
## @file
# This file is used to create a database used by build tool
#
# Copyright (c) 2008 - 2020, Intel Corporation. All rights reserved.<BR>
# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## Platform build information from DSC file
#
# This class is used to retrieve information stored in database and convert them
# into PlatformBuildClassObject form for easier use for AutoGen.
#
from __future__ import print_function
from __future__ import absolute_import
from Common.StringUtils import *
from Common.DataType import *
from Common.Misc import *
from types import *
from Common.Expression import *
from CommonDataClass.CommonClass import SkuInfoClass
from Common.TargetTxtClassObject import TargetTxtDict,gDefaultTargetTxtFile
from Common.ToolDefClassObject import ToolDefDict,gDefaultToolsDefFile
from .MetaDataTable import *
from .MetaFileTable import *
from .MetaFileParser import *
from .WorkspaceCommon import GetDeclaredPcd
from Common.Misc import AnalyzeDscPcd
from Common.Misc import ProcessDuplicatedInf,RemoveCComments,ArrayIndex
import re
from Common.Parsing import IsValidWord
from Common.VariableAttributes import VariableAttributes
import Common.GlobalData as GlobalData
import subprocess
from functools import reduce
from Common.Misc import SaveFileOnChange
from Workspace.BuildClassObject import PlatformBuildClassObject, StructurePcd, PcdClassObject, ModuleBuildClassObject
from collections import OrderedDict, defaultdict
def _IsFieldValueAnArray (Value):
Value = Value.strip()
if Value.startswith(TAB_GUID) and Value.endswith(')'):
return True
if Value.startswith('L"') and Value.endswith('"') and len(list(Value[2:-1])) > 1:
return True
if Value[0] == '"' and Value[-1] == '"' and len(list(Value[1:-1])) > 1:
return True
if Value[0] == '{' and Value[-1] == '}':
return True
if Value.startswith("L'") and Value.endswith("'") and len(list(Value[2:-1])) > 1:
return True
if Value[0] == "'" and Value[-1] == "'" and len(list(Value[1:-1])) > 1:
return True
return False
PcdValueInitName = 'PcdValueInit'
PcdValueCommonName = 'PcdValueCommon'
PcdMainCHeader = '''
/**
DO NOT EDIT
FILE auto-generated
**/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <PcdValueCommon.h>
'''
PcdMainCEntry = '''
int
main (
int argc,
char *argv[]
)
{
return PcdValueMain (argc, argv);
}
'''
PcdMakefileHeader = '''
#
# DO NOT EDIT
# This file is auto-generated by build utility
#
'''
WindowsCFLAGS = 'CFLAGS = $(CFLAGS) /wd4200 /wd4034 /wd4101 '
LinuxCFLAGS = 'CFLAGS += -Wno-pointer-to-int-cast -Wno-unused-variable '
PcdMakefileEnd = '''
!INCLUDE $(BASE_TOOLS_PATH)\Source\C\Makefiles\ms.common
!INCLUDE $(BASE_TOOLS_PATH)\Source\C\Makefiles\ms.app
'''
AppTarget = '''
all: $(APPFILE)
$(APPLICATION): $(OBJECTS)
$(APPFILE): $(APPLICATION)
%s
'''
PcdGccMakefile = '''
MAKEROOT ?= $(EDK_TOOLS_PATH)/Source/C
LIBS = -lCommon
'''
variablePattern = re.compile(r'[\t\s]*0[xX][a-fA-F0-9]+$')
SkuIdPattern = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$')
## regular expressions for finding decimal and hex numbers
Pattern = re.compile('^[1-9]\d*|0$')
HexPattern = re.compile(r'0[xX][0-9a-fA-F]+$')
## Regular expression for finding header file inclusions
from AutoGen.GenMake import gIncludePattern
## Find dependencies for one source file
#
# By searching recursively "#include" directive in file, find out all the
# files needed by given source file. The dependecies will be only searched
# in given search path list.
#
# @param SearchPathList The list of search path
#
# @retval list The list of files the given source file depends on
#
def GetDependencyList(FileStack, SearchPathList):
DepDb = dict()
DependencySet = set(FileStack)
while len(FileStack) > 0:
F = FileStack.pop()
FullPathDependList = []
CurrentFileDependencyList = []
if F in DepDb:
CurrentFileDependencyList = DepDb[F]
else:
try:
Fd = open(F, 'r')
FileContent = Fd.read()
except BaseException as X:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F + "\n\t" + str(X))
finally:
if "Fd" in dir(locals()):
Fd.close()
if len(FileContent) == 0:
continue
try:
if FileContent[0] == 0xff or FileContent[0] == 0xfe:
FileContent = FileContent.decode('utf-16')
else:
FileContent = FileContent.decode()
except:
# The file is not txt file. for example .mcb file
continue
IncludedFileList = gIncludePattern.findall(FileContent)
for Inc in IncludedFileList:
Inc = Inc.strip()
Inc = os.path.normpath(Inc)
CurrentFileDependencyList.append(Inc)
DepDb[F] = CurrentFileDependencyList
CurrentFilePath = os.path.dirname(F)
PathList = [CurrentFilePath] + SearchPathList
for Inc in CurrentFileDependencyList:
for SearchPath in PathList:
FilePath = os.path.join(SearchPath, Inc)
if not os.path.exists(FilePath):
continue
if FilePath not in DependencySet:
FileStack.append(FilePath)
FullPathDependList.append(FilePath)
break
DependencySet.update(FullPathDependList)
DependencyList = list(DependencySet) # remove duplicate ones
return DependencyList
class DscBuildData(PlatformBuildClassObject):
# dict used to convert part of [Defines] to members of DscBuildData directly
_PROPERTY_ = {
#
# Required Fields
#
TAB_DSC_DEFINES_PLATFORM_NAME : "_PlatformName",
TAB_DSC_DEFINES_PLATFORM_GUID : "_Guid",
TAB_DSC_DEFINES_PLATFORM_VERSION : "_Version",
TAB_DSC_DEFINES_DSC_SPECIFICATION : "_DscSpecification",
# TAB_DSC_DEFINES_OUTPUT_DIRECTORY : "_OutputDirectory",
# TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES : "_SupArchList",
# TAB_DSC_DEFINES_BUILD_TARGETS : "_BuildTargets",
TAB_DSC_DEFINES_SKUID_IDENTIFIER : "_SkuName",
# TAB_DSC_DEFINES_FLASH_DEFINITION : "_FlashDefinition",
TAB_DSC_DEFINES_BUILD_NUMBER : "_BuildNumber",
TAB_DSC_DEFINES_MAKEFILE_NAME : "_MakefileName",
TAB_DSC_DEFINES_BS_BASE_ADDRESS : "_BsBaseAddress",
TAB_DSC_DEFINES_RT_BASE_ADDRESS : "_RtBaseAddress",
# TAB_DSC_DEFINES_RFC_LANGUAGES : "_RFCLanguages",
# TAB_DSC_DEFINES_ISO_LANGUAGES : "_ISOLanguages",
}
# used to compose dummy library class name for those forced library instances
_NullLibraryNumber = 0
## Constructor of DscBuildData
#
# Initialize object of DscBuildData
#
# @param FilePath The path of platform description file
# @param RawData The raw data of DSC file
# @param BuildDataBase Database used to retrieve module/package information
# @param Arch The target architecture
# @param Platform (not used for DscBuildData)
# @param Macros Macros used for replacement in DSC file
#
def __init__(self, FilePath, RawData, BuildDataBase, Arch=TAB_ARCH_COMMON, Target=None, Toolchain=None):
self.MetaFile = FilePath
self._RawData = RawData
self._Bdb = BuildDataBase
self._Arch = Arch
self._Target = Target
self._Toolchain = Toolchain
self._ToolChainFamily = None
self._Clear()
self.WorkspaceDir = os.getenv("WORKSPACE") if os.getenv("WORKSPACE") else ""
self.DefaultStores = None
self.SkuIdMgr = SkuClass(self.SkuName, self.SkuIds)
self.UpdatePcdTypeDict()
@property
def OutputPath(self):
if os.getenv("WORKSPACE"):
return os.path.join(os.getenv("WORKSPACE"), self.OutputDirectory, self._Target + "_" + self._Toolchain, PcdValueInitName)
else:
return os.path.dirname(self.DscFile)
## XXX[key] = value
def __setitem__(self, key, value):
self.__dict__[self._PROPERTY_[key]] = value
## value = XXX[key]
def __getitem__(self, key):
return self.__dict__[self._PROPERTY_[key]]
## "in" test support
def __contains__(self, key):
return key in self._PROPERTY_
## Set all internal used members of DscBuildData to None
def _Clear(self):
self._Header = None
self._PlatformName = None
self._Guid = None
self._Version = None
self._DscSpecification = None
self._OutputDirectory = None
self._SupArchList = None
self._BuildTargets = None
self._SkuName = None
self._PcdInfoFlag = None
self._VarCheckFlag = None
self._FlashDefinition = None
self._Prebuild = None
self._Postbuild = None
self._BuildNumber = None
self._MakefileName = None
self._BsBaseAddress = None
self._RtBaseAddress = None
self._SkuIds = None
self._Modules = None
self._LibraryInstances = None
self._LibraryClasses = None
self._Pcds = None
self._DecPcds = None
self._BuildOptions = None
self._ModuleTypeOptions = None
self._LoadFixAddress = None
self._RFCLanguages = None
self._ISOLanguages = None
self._VpdToolGuid = None
self._MacroDict = None
self.DefaultStores = None
## Get current effective macros
@property
def _Macros(self):
if self._MacroDict is None:
self._MacroDict = {}
self._MacroDict.update(GlobalData.gPlatformDefines)
self._MacroDict.update(GlobalData.gGlobalDefines)
self._MacroDict.update(GlobalData.gCommandLineDefines)
return self._MacroDict
## Get architecture
@property
def Arch(self):
return self._Arch
@property
def Dir(self):
return self.MetaFile.Dir
## Retrieve all information in [Defines] section
#
# (Retrieving all [Defines] information in one-shot is just to save time.)
#
def _GetHeaderInfo(self):
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
for Record in RecordList:
Name = Record[1]
# items defined _PROPERTY_ don't need additional processing
# some special items in [Defines] section need special treatment
if Name == TAB_DSC_DEFINES_OUTPUT_DIRECTORY:
self._OutputDirectory = NormPath(Record[2], self._Macros)
if ' ' in self._OutputDirectory:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in OUTPUT_DIRECTORY",
File=self.MetaFile, Line=Record[-1],
ExtraData=self._OutputDirectory)
elif Name == TAB_DSC_DEFINES_FLASH_DEFINITION:
self._FlashDefinition = PathClass(NormPath(Record[2], self._Macros), GlobalData.gWorkspace)
ErrorCode, ErrorInfo = self._FlashDefinition.Validate('.fdf')
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=Record[-1],
ExtraData=ErrorInfo)
elif Name == TAB_DSC_PREBUILD:
PrebuildValue = Record[2]
if Record[2][0] == '"':
if Record[2][-1] != '"':
EdkLogger.error('build', FORMAT_INVALID, 'Missing double quotes in the end of %s statement.' % TAB_DSC_PREBUILD,
File=self.MetaFile, Line=Record[-1])
PrebuildValue = Record[2][1:-1]
self._Prebuild = PrebuildValue
elif Name == TAB_DSC_POSTBUILD:
PostbuildValue = Record[2]
if Record[2][0] == '"':
if Record[2][-1] != '"':
EdkLogger.error('build', FORMAT_INVALID, 'Missing double quotes in the end of %s statement.' % TAB_DSC_POSTBUILD,
File=self.MetaFile, Line=Record[-1])
PostbuildValue = Record[2][1:-1]
self._Postbuild = PostbuildValue
elif Name == TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES:
self._SupArchList = GetSplitValueList(Record[2], TAB_VALUE_SPLIT)
elif Name == TAB_DSC_DEFINES_BUILD_TARGETS:
self._BuildTargets = GetSplitValueList(Record[2])
elif Name == TAB_DSC_DEFINES_SKUID_IDENTIFIER:
if self._SkuName is None:
self._SkuName = Record[2]
if GlobalData.gSKUID_CMD:
self._SkuName = GlobalData.gSKUID_CMD
elif Name == TAB_DSC_DEFINES_PCD_INFO_GENERATION:
self._PcdInfoFlag = Record[2]
elif Name == TAB_DSC_DEFINES_PCD_VAR_CHECK_GENERATION:
self._VarCheckFlag = Record[2]
elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:
try:
self._LoadFixAddress = int (Record[2], 0)
except:
EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS %s is not valid dec or hex string" % (Record[2]))
elif Name == TAB_DSC_DEFINES_RFC_LANGUAGES:
if not Record[2] or Record[2][0] != '"' or Record[2][-1] != '"' or len(Record[2]) == 1:
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'language code for RFC_LANGUAGES must have double quotes around it, for example: RFC_LANGUAGES = "en-us;zh-hans"',
File=self.MetaFile, Line=Record[-1])
LanguageCodes = Record[2][1:-1]
if not LanguageCodes:
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more RFC4646 format language code must be provided for RFC_LANGUAGES statement',
File=self.MetaFile, Line=Record[-1])
LanguageList = GetSplitValueList(LanguageCodes, TAB_SEMI_COLON_SPLIT)
# check whether there is empty entries in the list
if None in LanguageList:
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more empty language code is in RFC_LANGUAGES statement',
File=self.MetaFile, Line=Record[-1])
self._RFCLanguages = LanguageList
elif Name == TAB_DSC_DEFINES_ISO_LANGUAGES:
if not Record[2] or Record[2][0] != '"' or Record[2][-1] != '"' or len(Record[2]) == 1:
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'language code for ISO_LANGUAGES must have double quotes around it, for example: ISO_LANGUAGES = "engchn"',
File=self.MetaFile, Line=Record[-1])
LanguageCodes = Record[2][1:-1]
if not LanguageCodes:
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more ISO639-2 format language code must be provided for ISO_LANGUAGES statement',
File=self.MetaFile, Line=Record[-1])
if len(LanguageCodes) % 3:
EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'bad ISO639-2 format for ISO_LANGUAGES',
File=self.MetaFile, Line=Record[-1])
LanguageList = []
for i in range(0, len(LanguageCodes), 3):
LanguageList.append(LanguageCodes[i:i + 3])
self._ISOLanguages = LanguageList
elif Name == TAB_DSC_DEFINES_VPD_AUTHENTICATED_VARIABLE_STORE:
if TAB_DSC_DEFINES_VPD_AUTHENTICATED_VARIABLE_STORE not in gCommandLineDefines:
gCommandLineDefines[TAB_DSC_DEFINES_VPD_AUTHENTICATED_VARIABLE_STORE] = Record[2].strip()
elif Name == TAB_DSC_DEFINES_VPD_TOOL_GUID:
#
# try to convert GUID to a real UUID value to see whether the GUID is format
# for VPD_TOOL_GUID is correct.
#
try:
uuid.UUID(Record[2])
except:
EdkLogger.error("build", FORMAT_INVALID, "Invalid GUID format for VPD_TOOL_GUID", File=self.MetaFile)
self._VpdToolGuid = Record[2]
elif Name == TAB_DSC_DEFINES_PCD_DYNAMIC_AS_DYNAMICEX:
if TAB_DSC_DEFINES_PCD_DYNAMIC_AS_DYNAMICEX not in gCommandLineDefines:
gCommandLineDefines[TAB_DSC_DEFINES_PCD_DYNAMIC_AS_DYNAMICEX] = Record[2].strip()
elif Name in self:
self[Name] = Record[2]
# set _Header to non-None in order to avoid database re-querying
self._Header = 'DUMMY'
## Retrieve platform name
@property
def PlatformName(self):
if self._PlatformName is None:
if self._Header is None:
self._GetHeaderInfo()
if self._PlatformName is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_NAME", File=self.MetaFile)
return self._PlatformName
@property
def Platform(self):
return self.PlatformName
## Retrieve file guid
@property
def Guid(self):
if self._Guid is None:
if self._Header is None:
self._GetHeaderInfo()
if self._Guid is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_GUID", File=self.MetaFile)
return self._Guid
## Retrieve platform version
@property
def Version(self):
if self._Version is None:
if self._Header is None:
self._GetHeaderInfo()
if self._Version is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_VERSION", File=self.MetaFile)
return self._Version
## Retrieve platform description file version
@property
def DscSpecification(self):
if self._DscSpecification is None:
if self._Header is None:
self._GetHeaderInfo()
if self._DscSpecification is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No DSC_SPECIFICATION", File=self.MetaFile)
return self._DscSpecification
## Retrieve OUTPUT_DIRECTORY
@property
def OutputDirectory(self):
if self._OutputDirectory is None:
if self._Header is None:
self._GetHeaderInfo()
if self._OutputDirectory is None:
self._OutputDirectory = os.path.join("Build", self._PlatformName)
return self._OutputDirectory
## Retrieve SUPPORTED_ARCHITECTURES
@property
def SupArchList(self):
if self._SupArchList is None:
if self._Header is None:
self._GetHeaderInfo()
if self._SupArchList is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No SUPPORTED_ARCHITECTURES", File=self.MetaFile)
return self._SupArchList
## Retrieve BUILD_TARGETS
@property
def BuildTargets(self):
if self._BuildTargets is None:
if self._Header is None:
self._GetHeaderInfo()
if self._BuildTargets is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BUILD_TARGETS", File=self.MetaFile)
return self._BuildTargets
@property
def PcdInfoFlag(self):
if self._PcdInfoFlag is None or self._PcdInfoFlag.upper() == 'FALSE':
return False
elif self._PcdInfoFlag.upper() == 'TRUE':
return True
else:
return False
@property
def VarCheckFlag(self):
if self._VarCheckFlag is None or self._VarCheckFlag.upper() == 'FALSE':
return False
elif self._VarCheckFlag.upper() == 'TRUE':
return True
else:
return False
# # Retrieve SKUID_IDENTIFIER
@property
def SkuName(self):
if self._SkuName is None:
if self._Header is None:
self._GetHeaderInfo()
if self._SkuName is None:
self._SkuName = TAB_DEFAULT
return self._SkuName
## Override SKUID_IDENTIFIER
@SkuName.setter
def SkuName(self, Value):
self._SkuName = Value
@property
def FlashDefinition(self):
if self._FlashDefinition is None:
if self._Header is None:
self._GetHeaderInfo()
if self._FlashDefinition is None:
self._FlashDefinition = ''
return self._FlashDefinition
@property
def Prebuild(self):
if self._Prebuild is None:
if self._Header is None:
self._GetHeaderInfo()
if self._Prebuild is None:
self._Prebuild = ''
return self._Prebuild
@property
def Postbuild(self):
if self._Postbuild is None:
if self._Header is None:
self._GetHeaderInfo()
if self._Postbuild is None:
self._Postbuild = ''
return self._Postbuild
## Retrieve FLASH_DEFINITION
@property
def BuildNumber(self):
if self._BuildNumber is None:
if self._Header is None:
self._GetHeaderInfo()
if self._BuildNumber is None:
self._BuildNumber = ''
return self._BuildNumber
## Retrieve MAKEFILE_NAME
@property
def MakefileName(self):
if self._MakefileName is None:
if self._Header is None:
self._GetHeaderInfo()
if self._MakefileName is None:
self._MakefileName = ''
return self._MakefileName
## Retrieve BsBaseAddress
@property
def BsBaseAddress(self):
if self._BsBaseAddress is None:
if self._Header is None:
self._GetHeaderInfo()
if self._BsBaseAddress is None:
self._BsBaseAddress = ''
return self._BsBaseAddress
## Retrieve RtBaseAddress
@property
def RtBaseAddress(self):
if self._RtBaseAddress is None:
if self._Header is None:
self._GetHeaderInfo()
if self._RtBaseAddress is None:
self._RtBaseAddress = ''
return self._RtBaseAddress
## Retrieve the top address for the load fix address
@property
def LoadFixAddress(self):
if self._LoadFixAddress is None:
if self._Header is None:
self._GetHeaderInfo()
if self._LoadFixAddress is None:
self._LoadFixAddress = self._Macros.get(TAB_FIX_LOAD_TOP_MEMORY_ADDRESS, '0')
try:
self._LoadFixAddress = int (self._LoadFixAddress, 0)
except:
EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS %s is not valid dec or hex string" % (self._LoadFixAddress))
#
# If command line defined, should override the value in DSC file.
#
if 'FIX_LOAD_TOP_MEMORY_ADDRESS' in GlobalData.gCommandLineDefines:
try:
self._LoadFixAddress = int(GlobalData.gCommandLineDefines['FIX_LOAD_TOP_MEMORY_ADDRESS'], 0)
except:
EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS %s is not valid dec or hex string" % (GlobalData.gCommandLineDefines['FIX_LOAD_TOP_MEMORY_ADDRESS']))
if self._LoadFixAddress < 0:
EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS is set to the invalid negative value 0x%x" % (self._LoadFixAddress))
if self._LoadFixAddress != 0xFFFFFFFFFFFFFFFF and self._LoadFixAddress % 0x1000 != 0:
EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS is set to the invalid unaligned 4K value 0x%x" % (self._LoadFixAddress))
return self._LoadFixAddress
## Retrieve RFCLanguage filter
@property
def RFCLanguages(self):
if self._RFCLanguages is None:
if self._Header is None:
self._GetHeaderInfo()
if self._RFCLanguages is None:
self._RFCLanguages = []
return self._RFCLanguages
## Retrieve ISOLanguage filter
@property
def ISOLanguages(self):
if self._ISOLanguages is None:
if self._Header is None:
self._GetHeaderInfo()
if self._ISOLanguages is None:
self._ISOLanguages = []
return self._ISOLanguages
## Retrieve the GUID string for VPD tool
@property
def VpdToolGuid(self):
if self._VpdToolGuid is None:
if self._Header is None:
self._GetHeaderInfo()
if self._VpdToolGuid is None:
self._VpdToolGuid = ''
return self._VpdToolGuid
## Retrieve [SkuIds] section information
@property
def SkuIds(self):
if self._SkuIds is None:
self._SkuIds = OrderedDict()
RecordList = self._RawData[MODEL_EFI_SKU_ID, self._Arch]
for Record in RecordList:
if not Record[0]:
EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID number',
File=self.MetaFile, Line=Record[-1])
if not Record[1]:
EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID name',
File=self.MetaFile, Line=Record[-1])
if not Pattern.match(Record[0]) and not HexPattern.match(Record[0]):
EdkLogger.error('build', FORMAT_INVALID, "The format of the Sku ID number is invalid. It only support Integer and HexNumber",
File=self.MetaFile, Line=Record[-1])
if not SkuIdPattern.match(Record[1]) or (Record[2] and not SkuIdPattern.match(Record[2])):
EdkLogger.error('build', FORMAT_INVALID, "The format of the Sku ID name is invalid. The correct format is '(a-zA-Z_)(a-zA-Z0-9_)*'",
File=self.MetaFile, Line=Record[-1])
self._SkuIds[Record[1].upper()] = (str(DscBuildData.ToInt(Record[0])), Record[1].upper(), Record[2].upper())
if TAB_DEFAULT not in self._SkuIds:
self._SkuIds[TAB_DEFAULT] = ("0", TAB_DEFAULT, TAB_DEFAULT)
if TAB_COMMON not in self._SkuIds:
self._SkuIds[TAB_COMMON] = ("0", TAB_DEFAULT, TAB_DEFAULT)
return self._SkuIds
@staticmethod
def ToInt(intstr):
return int(intstr, 16) if intstr.upper().startswith("0X") else int(intstr)
def _GetDefaultStores(self):
if self.DefaultStores is None:
self.DefaultStores = OrderedDict()
RecordList = self._RawData[MODEL_EFI_DEFAULT_STORES, self._Arch]
for Record in RecordList:
if not Record[0]:
EdkLogger.error('build', FORMAT_INVALID, 'No DefaultStores ID number',
File=self.MetaFile, Line=Record[-1])
if not Record[1]:
EdkLogger.error('build', FORMAT_INVALID, 'No DefaultStores ID name',
File=self.MetaFile, Line=Record[-1])
if not Pattern.match(Record[0]) and not HexPattern.match(Record[0]):
EdkLogger.error('build', FORMAT_INVALID, "The format of the DefaultStores ID number is invalid. It only support Integer and HexNumber",
File=self.MetaFile, Line=Record[-1])
if not IsValidWord(Record[1]):
EdkLogger.error('build', FORMAT_INVALID, "The format of the DefaultStores ID name is invalid. The correct format is '(a-zA-Z0-9_)(a-zA-Z0-9_-.)*'",
File=self.MetaFile, Line=Record[-1])
self.DefaultStores[Record[1].upper()] = (DscBuildData.ToInt(Record[0]), Record[1].upper())
if TAB_DEFAULT_STORES_DEFAULT not in self.DefaultStores:
self.DefaultStores[TAB_DEFAULT_STORES_DEFAULT] = (0, TAB_DEFAULT_STORES_DEFAULT)
GlobalData.gDefaultStores = sorted(self.DefaultStores.keys())
return self.DefaultStores
def OverrideDuplicateModule(self):
RecordList = self._RawData[MODEL_META_DATA_COMPONENT, self._Arch]
Macros = self._Macros
Components = {}
for Record in RecordList:
ModuleId = Record[6]
file_guid = self._RawData[MODEL_META_DATA_HEADER, self._Arch, None, ModuleId]
file_guid_str = file_guid[0][2] if file_guid else "NULL"
ModuleFile = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
if self._Arch != TAB_ARCH_COMMON and (file_guid_str,str(ModuleFile)) in Components:
self._RawData.DisableOverrideComponent(Components[(file_guid_str,str(ModuleFile))])
Components[(file_guid_str,str(ModuleFile))] = ModuleId
self._RawData._PostProcessed = False
## Retrieve packages this Platform depends on
@cached_property
def Packages(self):
RetVal = set()
RecordList = self._RawData[MODEL_META_DATA_PACKAGE, self._Arch]
Macros = self._Macros
for Record in RecordList:
File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
# check the file validation
ErrorCode, ErrorInfo = File.Validate('.dec')
if ErrorCode != 0:
LineNo = Record[-1]
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
# parse this package now. we need it to get protocol/ppi/guid value
RetVal.add(self._Bdb[File, self._Arch, self._Target, self._Toolchain])
return RetVal
## Retrieve [Components] section information
@property
def Modules(self):
if self._Modules is not None:
return self._Modules
self.OverrideDuplicateModule()
self._Modules = OrderedDict()
RecordList = self._RawData[MODEL_META_DATA_COMPONENT, self._Arch]
Macros = self._Macros
for Record in RecordList:
ModuleFile = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
ModuleId = Record[6]
LineNo = Record[7]
# check the file validation
ErrorCode, ErrorInfo = ModuleFile.Validate('.inf')
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
ExtraData=ErrorInfo)
ModuleBuildData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
Module = ModuleBuildClassObject()
Module.MetaFile = ModuleFile
Module.Guid = ModuleBuildData.Guid
# get module private library instance
RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, ModuleId]
for Record in RecordList:
LibraryClass = Record[0]
LibraryPath = PathClass(NormPath(Record[1], Macros), GlobalData.gWorkspace, Arch=self._Arch)
LineNo = Record[-1]
# check the file validation
ErrorCode, ErrorInfo = LibraryPath.Validate('.inf')
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
ExtraData=ErrorInfo)
if LibraryClass == '' or LibraryClass == 'NULL':
self._NullLibraryNumber += 1
LibraryClass = 'NULL%d' % self._NullLibraryNumber
EdkLogger.verbose("Found forced library for %s\n\t%s [%s]" % (ModuleFile, LibraryPath, LibraryClass))
Module.LibraryClasses[LibraryClass] = LibraryPath
if LibraryPath not in self.LibraryInstances:
self.LibraryInstances.append(LibraryPath)
S_PcdSet = []
# get module private PCD setting
for Type in [MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, \
MODEL_PCD_FEATURE_FLAG, MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:
RecordList = self._RawData[Type, self._Arch, None, ModuleId]
for TokenSpaceGuid, PcdCName, Setting, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
TokenList = GetSplitValueList(Setting)
DefaultValue = TokenList[0]
# the format is PcdName| Value | VOID* | MaxDatumSize
if len(TokenList) > 2:
MaxDatumSize = TokenList[2]
else:
MaxDatumSize = ''
TypeString = self._PCD_TYPE_STRING_[Type]
TCName,PCName,DimensionAttr,Field = self.ParsePcdNameStruct(TokenSpaceGuid, PcdCName)
if ("." in TokenSpaceGuid or "[" in PcdCName):
S_PcdSet.append([ TCName,PCName,DimensionAttr,Field, ModuleBuildData.Guid, "", Dummy5, AnalyzePcdExpression(Setting)[0]])
DefaultValue = ''
if ( PCName,TCName) not in Module.Pcds:
Pcd = PcdClassObject(
PCName,
TCName,
TypeString,
'',
DefaultValue,
'',
MaxDatumSize,
{},
False,
None,
IsDsc=True)
Module.Pcds[PCName, TCName] = Pcd
Module.StrPcdSet = S_PcdSet
for TCName,PCName, _,_,_,_,_,_ in S_PcdSet:
if (PCName,TCName) in Module.Pcds:
Module.StrPcdOverallValue[(PCName,TCName)] = Module.Pcds[(PCName,TCName)].DefaultValue, self.MetaFile,Dummy5
# get module private build options
RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, None, ModuleId]
for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
if (ToolChainFamily, ToolChain) not in Module.BuildOptions:
Module.BuildOptions[ToolChainFamily, ToolChain] = Option
else:
OptionString = Module.BuildOptions[ToolChainFamily, ToolChain]
Module.BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, None, ModuleId]
if RecordList:
if len(RecordList) != 1:
EdkLogger.error('build', OPTION_UNKNOWN, 'Only FILE_GUID can be listed in <Defines> section.',
File=self.MetaFile, ExtraData=str(ModuleFile), Line=LineNo)
ModuleFile = ProcessDuplicatedInf(ModuleFile, RecordList[0][2], GlobalData.gWorkspace)
ModuleFile.Arch = self._Arch
Module.Guid = RecordList[0][2]
for item in Module.StrPcdSet:
item[4] = RecordList[0][2]
self._Modules[ModuleFile] = Module
return self._Modules
## Retrieve all possible library instances used in this platform
@property
def LibraryInstances(self):
if self._LibraryInstances is None:
self.LibraryClasses
return self._LibraryInstances
## Retrieve [LibraryClasses] information
@property
def LibraryClasses(self):
if self._LibraryClasses is None:
self._LibraryInstances = []
#
# tdict is a special dict kind of type, used for selecting correct
# library instance for given library class and module type
#
LibraryClassDict = tdict(True, 3)
# track all library class names
LibraryClassSet = set()
RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, -1]
Macros = self._Macros
for Record in RecordList:
LibraryClass, LibraryInstance, Dummy, Arch, ModuleType, Dummy, Dummy, LineNo = Record
if LibraryClass == '' or LibraryClass == 'NULL':
self._NullLibraryNumber += 1
LibraryClass = 'NULL%d' % self._NullLibraryNumber
EdkLogger.verbose("Found forced library for arch=%s\n\t%s [%s]" % (Arch, LibraryInstance, LibraryClass))
LibraryClassSet.add(LibraryClass)
LibraryInstance = PathClass(NormPath(LibraryInstance, Macros), GlobalData.gWorkspace, Arch=self._Arch)
# check the file validation
ErrorCode, ErrorInfo = LibraryInstance.Validate('.inf')
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
ExtraData=ErrorInfo)
if ModuleType != TAB_COMMON and ModuleType not in SUP_MODULE_LIST:
EdkLogger.error('build', OPTION_UNKNOWN, "Unknown module type [%s]" % ModuleType,
File=self.MetaFile, ExtraData=LibraryInstance, Line=LineNo)
LibraryClassDict[Arch, ModuleType, LibraryClass] = LibraryInstance
if LibraryInstance not in self._LibraryInstances:
self._LibraryInstances.append(LibraryInstance)
# resolve the specific library instance for each class and each module type
self._LibraryClasses = tdict(True)
for LibraryClass in LibraryClassSet:
# try all possible module types
for ModuleType in SUP_MODULE_LIST:
LibraryInstance = LibraryClassDict[self._Arch, ModuleType, LibraryClass]
if LibraryInstance is None:
continue
self._LibraryClasses[LibraryClass, ModuleType] = LibraryInstance
RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch]
for Record in RecordList:
File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
LineNo = Record[-1]
# check the file validation
ErrorCode, ErrorInfo = File.Validate('.inf')
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
ExtraData=ErrorInfo)
if File not in self._LibraryInstances:
self._LibraryInstances.append(File)
#
# we need the module name as the library class name, so we have
# to parse it here. (self._Bdb[] will trigger a file parse if it
# hasn't been parsed)
#
Library = self._Bdb[File, self._Arch, self._Target, self._Toolchain]
self._LibraryClasses[Library.BaseName, ':dummy:'] = Library
return self._LibraryClasses
def _ValidatePcd(self, PcdCName, TokenSpaceGuid, Setting, PcdType, LineNo):
if not self._DecPcds:
FdfInfList = []
if GlobalData.gFdfParser:
FdfInfList = GlobalData.gFdfParser.Profile.InfList
PkgSet = set()
for Inf in FdfInfList:
ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch)
if ModuleFile in self._Modules:
continue
ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
PkgSet.update(ModuleData.Packages)
if self.Packages:
PkgSet.update(self.Packages)
self._DecPcds, self._GuidDict = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain, PkgSet)
self._GuidDict.update(GlobalData.gPlatformPcds)
if (PcdCName, TokenSpaceGuid) not in self._DecPcds:
EdkLogger.error('build', PARSER_ERROR,
"Pcd (%s.%s) defined in DSC is not declared in DEC files referenced in INF files in FDF. Arch: ['%s']" % (TokenSpaceGuid, PcdCName, self._Arch),
File=self.MetaFile, Line=LineNo)
ValueList, IsValid, Index = AnalyzeDscPcd(Setting, PcdType, self._DecPcds[PcdCName, TokenSpaceGuid].DatumType)
if not IsValid:
if PcdType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self.MetaFile, Line=LineNo,
ExtraData="%s.%s|%s" % (TokenSpaceGuid, PcdCName, Setting))
else:
if ValueList[2] == '-1':
EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self.MetaFile, Line=LineNo,
ExtraData="%s.%s|%s" % (TokenSpaceGuid, PcdCName, Setting))
if ValueList[Index]:
DatumType = self._DecPcds[PcdCName, TokenSpaceGuid].DatumType
if "{CODE(" not in ValueList[Index]:
try:
ValueList[Index] = ValueExpressionEx(ValueList[Index], DatumType, self._GuidDict)(True)
except BadExpression as Value:
EdkLogger.error('Parser', FORMAT_INVALID, Value, File=self.MetaFile, Line=LineNo,
ExtraData="PCD [%s.%s] Value \"%s\" " % (
TokenSpaceGuid, PcdCName, ValueList[Index]))
except EvaluationException as Excpt:
if hasattr(Excpt, 'Pcd'):
if Excpt.Pcd in GlobalData.gPlatformOtherPcds:
EdkLogger.error('Parser', FORMAT_INVALID, "Cannot use this PCD (%s) in an expression as"
" it must be defined in a [PcdsFixedAtBuild] or [PcdsFeatureFlag] section"
" of the DSC file" % Excpt.Pcd,
File=self.MetaFile, Line=LineNo)
else:
EdkLogger.error('Parser', FORMAT_INVALID, "PCD (%s) is not defined in DSC file" % Excpt.Pcd,
File=self.MetaFile, Line=LineNo)
else:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt),
File=self.MetaFile, Line=LineNo)
if ValueList[Index]:
Valid, ErrStr = CheckPcdDatum(self._DecPcds[PcdCName, TokenSpaceGuid].DatumType, ValueList[Index])
if not Valid:
EdkLogger.error('build', FORMAT_INVALID, ErrStr, File=self.MetaFile, Line=LineNo,
ExtraData="%s.%s" % (TokenSpaceGuid, PcdCName))
if PcdType in (MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE):
if self._DecPcds[PcdCName, TokenSpaceGuid].DatumType.strip() != ValueList[1].strip():
DecPcd = self._DecPcds[PcdCName, TokenSpaceGuid]
EdkLogger.error('build', FORMAT_INVALID,
"Pcd datumtype used in DSC file is not the same as its declaration. DatumType:%s"%DecPcd.DatumType,
File=self.MetaFile, Line=LineNo,
ExtraData="Dsc:%s.%s|%s\n Dec:%s.%s|%s|%s|%s" % (TokenSpaceGuid, PcdCName, Setting, TokenSpaceGuid, \
PcdCName, DecPcd.DefaultValue, DecPcd.DatumType, DecPcd.TokenValue))
if (TokenSpaceGuid + '.' + PcdCName) in GlobalData.gPlatformPcds:
if GlobalData.gPlatformPcds[TokenSpaceGuid + '.' + PcdCName] != ValueList[Index]:
GlobalData.gPlatformPcds[TokenSpaceGuid + '.' + PcdCName] = ValueList[Index]
GlobalData.gPlatformFinalPcds[TokenSpaceGuid + '.' + PcdCName] = ValueList[Index]
return ValueList
def _FilterPcdBySkuUsage(self, Pcds):
available_sku = self.SkuIdMgr.AvailableSkuIdSet
sku_usage = self.SkuIdMgr.SkuUsageType
if sku_usage == SkuClass.SINGLE:
for pcdname in Pcds:
pcd = Pcds[pcdname]
Pcds[pcdname].SkuInfoList = {TAB_DEFAULT:pcd.SkuInfoList[skuid] for skuid in pcd.SkuInfoList if skuid in available_sku}
if isinstance(pcd, StructurePcd) and pcd.SkuOverrideValues:
Pcds[pcdname].SkuOverrideValues = {TAB_DEFAULT:pcd.SkuOverrideValues[skuid] for skuid in pcd.SkuOverrideValues if skuid in available_sku}
else:
for pcdname in Pcds:
pcd = Pcds[pcdname]
Pcds[pcdname].SkuInfoList = {skuid:pcd.SkuInfoList[skuid] for skuid in pcd.SkuInfoList if skuid in available_sku}
if isinstance(pcd, StructurePcd) and pcd.SkuOverrideValues:
Pcds[pcdname].SkuOverrideValues = {skuid:pcd.SkuOverrideValues[skuid] for skuid in pcd.SkuOverrideValues if skuid in available_sku}
return Pcds
def CompleteHiiPcdsDefaultStores(self, Pcds):
HiiPcd = [Pcds[pcd] for pcd in Pcds if Pcds[pcd].Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]]
DefaultStoreMgr = DefaultStore(self.DefaultStores)
for pcd in HiiPcd:
for skuid in pcd.SkuInfoList:
skuobj = pcd.SkuInfoList.get(skuid)
if TAB_DEFAULT_STORES_DEFAULT not in skuobj.DefaultStoreDict:
PcdDefaultStoreSet = set(defaultstorename for defaultstorename in skuobj.DefaultStoreDict)
mindefaultstorename = DefaultStoreMgr.GetMin(PcdDefaultStoreSet)
skuobj.DefaultStoreDict[TAB_DEFAULT_STORES_DEFAULT] = skuobj.DefaultStoreDict[mindefaultstorename]
return Pcds
def RecoverCommandLinePcd(self):
def UpdateCommandLineValue(pcd):
if pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
pcd.PcdValueFromComm = pcd.DefaultValue
elif pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
pcd.PcdValueFromComm = pcd.SkuInfoList.get(TAB_DEFAULT).HiiDefaultValue
else:
pcd.PcdValueFromComm = pcd.SkuInfoList.get(TAB_DEFAULT).DefaultValue
for pcd in self._Pcds:
if isinstance(self._Pcds[pcd], StructurePcd) and (self._Pcds[pcd].PcdValueFromComm or self._Pcds[pcd].PcdFieldValueFromComm):
UpdateCommandLineValue(self._Pcds[pcd])
def __ParsePcdFromCommandLine(self):
if GlobalData.BuildOptionPcd:
for i, pcd in enumerate(GlobalData.BuildOptionPcd):
if isinstance(pcd, tuple):
continue
(pcdname, pcdvalue) = pcd.split('=')
if not pcdvalue:
EdkLogger.error('build', AUTOGEN_ERROR, "No Value specified for the PCD %s." % (pcdname))
if '.' in pcdname:
(Name1, Name2) = pcdname.split('.', 1)
if "." in Name2:
(Name3, FieldName) = Name2.split(".", 1)
if ((Name3, Name1)) in self.DecPcds:
HasTokenSpace = True
TokenCName = Name3
TokenSpaceGuidCName = Name1
else:
FieldName = Name2
TokenCName = Name1
TokenSpaceGuidCName = ''
HasTokenSpace = False
else:
if ((Name2, Name1)) in self.DecPcds:
HasTokenSpace = True
TokenCName = Name2
TokenSpaceGuidCName = Name1
FieldName =""
else:
FieldName = Name2
TokenCName = Name1
TokenSpaceGuidCName = ''
HasTokenSpace = False
else:
FieldName = ""
TokenCName = pcdname
TokenSpaceGuidCName = ''
HasTokenSpace = False
TokenSpaceGuidCNameList = []
FoundFlag = False
PcdDatumType = ''
DisplayName = TokenCName
if FieldName:
DisplayName = TokenCName + '.' + FieldName
if not HasTokenSpace:
for key in self.DecPcds:
PcdItem = self.DecPcds[key]
if TokenCName == PcdItem.TokenCName:
if not PcdItem.TokenSpaceGuidCName in TokenSpaceGuidCNameList:
if len (TokenSpaceGuidCNameList) < 1:
TokenSpaceGuidCNameList.append(PcdItem.TokenSpaceGuidCName)
TokenSpaceGuidCName = PcdItem.TokenSpaceGuidCName
PcdDatumType = PcdItem.DatumType
FoundFlag = True
else:
EdkLogger.error(
'build',
AUTOGEN_ERROR,
"The Pcd %s is found under multiple different TokenSpaceGuid: %s and %s." % (DisplayName, PcdItem.TokenSpaceGuidCName, TokenSpaceGuidCNameList[0])
)
else:
if (TokenCName, TokenSpaceGuidCName) in self.DecPcds:
PcdDatumType = self.DecPcds[(TokenCName, TokenSpaceGuidCName)].DatumType
FoundFlag = True
if not FoundFlag:
if HasTokenSpace:
EdkLogger.error('build', AUTOGEN_ERROR, "The Pcd %s.%s is not found in the DEC file." % (TokenSpaceGuidCName, DisplayName))
else:
EdkLogger.error('build', AUTOGEN_ERROR, "The Pcd %s is not found in the DEC file." % (DisplayName))
pcdvalue = pcdvalue.replace("\\\\\\'", '\\\\\\"').replace('\\\'', '\'').replace('\\\\\\"', "\\'")
if FieldName:
pcdvalue = DscBuildData.HandleFlexiblePcd(TokenSpaceGuidCName, TokenCName, pcdvalue, PcdDatumType, self._GuidDict, FieldName)
else:
pcdvalue = DscBuildData.HandleFlexiblePcd(TokenSpaceGuidCName, TokenCName, pcdvalue, PcdDatumType, self._GuidDict)
IsValid, Cause = CheckPcdDatum(PcdDatumType, pcdvalue)
if not IsValid:
EdkLogger.error("build", FORMAT_INVALID, Cause, ExtraData="%s.%s" % (TokenSpaceGuidCName, TokenCName))
GlobalData.BuildOptionPcd[i] = (TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, ("build command options", 1))
if GlobalData.BuildOptionPcd:
inf_objs = [item for item in self._Bdb._CACHE_.values() if item.Arch == self.Arch and item.MetaFile.Ext.lower() == '.inf']
for pcd in GlobalData.BuildOptionPcd:
(TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd
for BuildData in inf_objs:
for key in BuildData.Pcds:
PcdItem = BuildData.Pcds[key]
if (TokenSpaceGuidCName, TokenCName) == (PcdItem.TokenSpaceGuidCName, PcdItem.TokenCName) and FieldName =="":
PcdItem.DefaultValue = pcdvalue
PcdItem.PcdValueFromComm = pcdvalue
#In command line, the latter full assign value in commandLine should override the former field assign value.
#For example, --pcd Token.pcd.field="" --pcd Token.pcd=H"{}"
delete_assign = []
field_assign = {}
if GlobalData.BuildOptionPcd:
for pcdTuple in GlobalData.BuildOptionPcd:
TokenSpaceGuid, Token, Field = pcdTuple[0], pcdTuple[1], pcdTuple[2]
if Field:
if (TokenSpaceGuid, Token) not in field_assign:
field_assign[TokenSpaceGuid, Token] = []
field_assign[TokenSpaceGuid, Token].append(pcdTuple)
else:
if (TokenSpaceGuid, Token) in field_assign:
delete_assign.extend(field_assign[TokenSpaceGuid, Token])
field_assign[TokenSpaceGuid, Token] = []
for item in delete_assign:
GlobalData.BuildOptionPcd.remove(item)
@staticmethod
def HandleFlexiblePcd(TokenSpaceGuidCName, TokenCName, PcdValue, PcdDatumType, GuidDict, FieldName=''):
if FieldName:
IsArray = False
TokenCName += '.' + FieldName
if PcdValue.startswith('H'):
if FieldName and _IsFieldValueAnArray(PcdValue[1:]):
PcdDatumType = TAB_VOID
IsArray = True
if FieldName and not IsArray:
return PcdValue
try:
PcdValue = ValueExpressionEx(PcdValue[1:], PcdDatumType, GuidDict)(True)
except BadExpression as Value:
EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %
(TokenSpaceGuidCName, TokenCName, PcdValue, Value))
elif PcdValue.startswith("L'") or PcdValue.startswith("'"):
if FieldName and _IsFieldValueAnArray(PcdValue):
PcdDatumType = TAB_VOID
IsArray = True
if FieldName and not IsArray:
return PcdValue
try:
PcdValue = ValueExpressionEx(PcdValue, PcdDatumType, GuidDict)(True)
except BadExpression as Value:
EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %
(TokenSpaceGuidCName, TokenCName, PcdValue, Value))
elif PcdValue.startswith('L'):
PcdValue = 'L"' + PcdValue[1:] + '"'
if FieldName and _IsFieldValueAnArray(PcdValue):
PcdDatumType = TAB_VOID
IsArray = True
if FieldName and not IsArray:
return PcdValue
try:
PcdValue = ValueExpressionEx(PcdValue, PcdDatumType, GuidDict)(True)
except BadExpression as Value:
EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %
(TokenSpaceGuidCName, TokenCName, PcdValue, Value))
else:
if PcdValue.upper() == 'FALSE':
PcdValue = str(0)
if PcdValue.upper() == 'TRUE':
PcdValue = str(1)
if not FieldName:
if PcdDatumType not in TAB_PCD_NUMERIC_TYPES:
PcdValue = '"' + PcdValue + '"'
elif not PcdValue.isdigit() and not PcdValue.upper().startswith('0X'):
PcdValue = '"' + PcdValue + '"'
else:
IsArray = False
Base = 10
if PcdValue.upper().startswith('0X'):
Base = 16
try:
Num = int(PcdValue, Base)
except:
PcdValue = '"' + PcdValue + '"'
if _IsFieldValueAnArray(PcdValue):
PcdDatumType = TAB_VOID
IsArray = True
if not IsArray:
return PcdValue
try:
PcdValue = ValueExpressionEx(PcdValue, PcdDatumType, GuidDict)(True)
except BadExpression as Value:
EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %
(TokenSpaceGuidCName, TokenCName, PcdValue, Value))
return PcdValue
## Retrieve all PCD settings in platform
@property
def Pcds(self):
if self._Pcds is None:
self._Pcds = OrderedDict()
self.__ParsePcdFromCommandLine()
self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
self._Pcds.update(self._GetDynamicPcd(MODEL_PCD_DYNAMIC_DEFAULT))
self._Pcds.update(self._GetDynamicHiiPcd(MODEL_PCD_DYNAMIC_HII))
self._Pcds.update(self._GetDynamicVpdPcd(MODEL_PCD_DYNAMIC_VPD))
self._Pcds.update(self._GetDynamicPcd(MODEL_PCD_DYNAMIC_EX_DEFAULT))
self._Pcds.update(self._GetDynamicHiiPcd(MODEL_PCD_DYNAMIC_EX_HII))
self._Pcds.update(self._GetDynamicVpdPcd(MODEL_PCD_DYNAMIC_EX_VPD))
self._Pcds = self.CompletePcdValues(self._Pcds)
self._Pcds = self.OverrideByFdfOverAll(self._Pcds)
self._Pcds = self.OverrideByCommOverAll(self._Pcds)
self._Pcds = self.UpdateStructuredPcds(MODEL_PCD_TYPE_LIST, self._Pcds)
self._Pcds = self.CompleteHiiPcdsDefaultStores(self._Pcds)
self._Pcds = self._FilterPcdBySkuUsage(self._Pcds)
self.RecoverCommandLinePcd()
return self._Pcds
## Retrieve [BuildOptions]
@property
def BuildOptions(self):
if self._BuildOptions is None:
self._BuildOptions = OrderedDict()
#
# Retrieve build option for EDKII and EDK style module
#
for CodeBase in (EDKII_NAME, EDK_NAME):
RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, CodeBase]
for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
if Dummy3.upper() != TAB_COMMON:
continue
CurKey = (ToolChainFamily, ToolChain, CodeBase)
#
# Only flags can be appended
#
if CurKey not in self._BuildOptions or not ToolChain.endswith('_FLAGS') or Option.startswith('='):
self._BuildOptions[CurKey] = Option
else:
if ' ' + Option not in self._BuildOptions[CurKey]:
self._BuildOptions[CurKey] += ' ' + Option
return self._BuildOptions
def GetBuildOptionsByPkg(self, Module, ModuleType):
local_pkg = os.path.split(Module.LocalPkg())[0]
if self._ModuleTypeOptions is None:
self._ModuleTypeOptions = OrderedDict()
if ModuleType not in self._ModuleTypeOptions:
options = OrderedDict()
self._ModuleTypeOptions[ ModuleType] = options
RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch]
for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
if Dummy2 not in (TAB_COMMON,local_pkg.upper(),"EDKII"):
continue
Type = Dummy3
if Type.upper() == ModuleType.upper():
Key = (ToolChainFamily, ToolChain)
if Key not in options or not ToolChain.endswith('_FLAGS') or Option.startswith('='):
options[Key] = Option
else:
if ' ' + Option not in options[Key]:
options[Key] += ' ' + Option
return self._ModuleTypeOptions[ModuleType]
def GetBuildOptionsByModuleType(self, Edk, ModuleType):
if self._ModuleTypeOptions is None:
self._ModuleTypeOptions = OrderedDict()
if (Edk, ModuleType) not in self._ModuleTypeOptions:
options = OrderedDict()
self._ModuleTypeOptions[Edk, ModuleType] = options
DriverType = '%s.%s' % (Edk, ModuleType)
CommonDriverType = '%s.%s' % (TAB_COMMON, ModuleType)
RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch]
for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
Type = Dummy2 + '.' + Dummy3
if Type.upper() == DriverType.upper() or Type.upper() == CommonDriverType.upper():
Key = (ToolChainFamily, ToolChain, Edk)
if Key not in options or not ToolChain.endswith('_FLAGS') or Option.startswith('='):
options[Key] = Option
else:
if ' ' + Option not in options[Key]:
options[Key] += ' ' + Option
return self._ModuleTypeOptions[Edk, ModuleType]
@staticmethod
def GetStructurePcdInfo(PcdSet):
structure_pcd_data = defaultdict(list)
for item in PcdSet:
structure_pcd_data[(item[0], item[1])].append(item)
return structure_pcd_data
@staticmethod
def OverrideByFdf(StruPcds,workspace):
if GlobalData.gFdfParser is None:
return StruPcds
StructurePcdInFdf = OrderedDict()
fdfpcd = GlobalData.gFdfParser.Profile.PcdDict
fdfpcdlocation = GlobalData.gFdfParser.Profile.PcdLocalDict
for item in fdfpcd :
if len(item[2]) and (item[0],item[1]) in StruPcds:
StructurePcdInFdf[(item[1],item[0],item[2] )] = fdfpcd[item]
GlobalPcds = {(item[0],item[1]) for item in StructurePcdInFdf}
for Pcd in StruPcds.values():
if (Pcd.TokenSpaceGuidCName,Pcd.TokenCName) not in GlobalPcds:
continue
FieldValues = OrderedDict()
for item in StructurePcdInFdf:
if (Pcd.TokenSpaceGuidCName,Pcd.TokenCName) == (item[0],item[1]) and item[2]:
FieldValues[item[2]] = StructurePcdInFdf[item]
for field in FieldValues:
if field not in Pcd.PcdFieldValueFromFdf:
Pcd.PcdFieldValueFromFdf[field] = ["","",""]
Pcd.PcdFieldValueFromFdf[field][0] = FieldValues[field]
Pcd.PcdFieldValueFromFdf[field][1] = os.path.relpath(fdfpcdlocation[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName,field)][0],workspace)
Pcd.PcdFieldValueFromFdf[field][2] = fdfpcdlocation[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName,field)][1]
return StruPcds
@staticmethod
def OverrideByComm(StruPcds):
StructurePcdInCom = OrderedDict()
for item in GlobalData.BuildOptionPcd:
if len(item) == 5 and (item[1], item[0]) in StruPcds:
StructurePcdInCom[(item[0], item[1], item[2] )] = (item[3], item[4])
GlobalPcds = {(item[0], item[1]) for item in StructurePcdInCom}
for Pcd in StruPcds.values():
if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) not in GlobalPcds:
continue
FieldValues = OrderedDict()
for item in StructurePcdInCom:
if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) == (item[0], item[1]) and item[2]:
FieldValues[item[2]] = StructurePcdInCom[item]
for field in FieldValues:
if field not in Pcd.PcdFieldValueFromComm:
Pcd.PcdFieldValueFromComm[field] = ["", "", ""]
Pcd.PcdFieldValueFromComm[field][0] = FieldValues[field][0]
Pcd.PcdFieldValueFromComm[field][1] = FieldValues[field][1][0]
Pcd.PcdFieldValueFromComm[field][2] = FieldValues[field][1][1]
return StruPcds
def OverrideByCommOverAll(self,AllPcds):
def CheckStructureInComm(commpcds):
if not commpcds:
return False
if len(commpcds[0]) == 5:
return True
return False
NoFiledValues = OrderedDict()
if CheckStructureInComm(GlobalData.BuildOptionPcd):
StructurePcdInCom = OrderedDict()
for item in GlobalData.BuildOptionPcd:
StructurePcdInCom[(item[0], item[1], item[2] )] = (item[3], item[4])
for item in StructurePcdInCom:
if not item[2]:
NoFiledValues[(item[0], item[1])] = StructurePcdInCom[item]
else:
for item in GlobalData.BuildOptionPcd:
NoFiledValues[(item[0], item[1])] = [item[2]]
for Guid, Name in NoFiledValues:
if (Name, Guid) in AllPcds:
Pcd = AllPcds.get((Name, Guid))
if isinstance(self._DecPcds.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName), None), StructurePcd):
self._DecPcds.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName)).PcdValueFromComm = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
else:
Pcd.PcdValueFromComm = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
Pcd.DefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
for sku in Pcd.SkuInfoList:
SkuInfo = Pcd.SkuInfoList[sku]
if SkuInfo.DefaultValue:
SkuInfo.DefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
else:
SkuInfo.HiiDefaultValue = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
for defaultstore in SkuInfo.DefaultStoreDict:
SkuInfo.DefaultStoreDict[defaultstore] = NoFiledValues[(Pcd.TokenSpaceGuidCName, Pcd.TokenCName)][0]
if Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII]]:
if Pcd.DatumType == TAB_VOID:
if not Pcd.MaxDatumSize:
Pcd.MaxDatumSize = '0'
CurrentSize = int(Pcd.MaxDatumSize, 16) if Pcd.MaxDatumSize.upper().startswith("0X") else int(Pcd.MaxDatumSize)
OptionSize = len((StringToArray(Pcd.PcdValueFromComm)).split(","))
MaxSize = max(CurrentSize, OptionSize)
Pcd.MaxDatumSize = str(MaxSize)
else:
PcdInDec = self.DecPcds.get((Name, Guid))
if PcdInDec:
PcdInDec.PcdValueFromComm = NoFiledValues[(Guid, Name)][0]
if PcdInDec.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE],
self._PCD_TYPE_STRING_[MODEL_PCD_FEATURE_FLAG],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX]]:
self._Pcds[Name, Guid] = copy.deepcopy(PcdInDec)
self._Pcds[Name, Guid].DefaultValue = NoFiledValues[( Guid, Name)][0]
if PcdInDec.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX]]:
self._Pcds[Name, Guid].SkuInfoList = {TAB_DEFAULT:SkuInfoClass(TAB_DEFAULT, self.SkuIds[TAB_DEFAULT][0], '', '', '', '', '', NoFiledValues[( Guid, Name)][0])}
return AllPcds
def OverrideByFdfOverAll(self,AllPcds):
if GlobalData.gFdfParser is None:
return AllPcds
NoFiledValues = GlobalData.gFdfParser.Profile.PcdDict
for Name,Guid,Field in NoFiledValues:
if len(Field):
continue
Value = NoFiledValues[(Name,Guid,Field)]
if (Name,Guid) in AllPcds:
Pcd = AllPcds.get((Name,Guid))
if isinstance(self._DecPcds.get((Pcd.TokenCName,Pcd.TokenSpaceGuidCName), None),StructurePcd):
self._DecPcds.get((Pcd.TokenCName,Pcd.TokenSpaceGuidCName)).PcdValueFromComm = Value
else:
Pcd.PcdValueFromComm = Value
Pcd.DefaultValue = Value
for sku in Pcd.SkuInfoList:
SkuInfo = Pcd.SkuInfoList[sku]
if SkuInfo.DefaultValue:
SkuInfo.DefaultValue = Value
else:
SkuInfo.HiiDefaultValue = Value
for defaultstore in SkuInfo.DefaultStoreDict:
SkuInfo.DefaultStoreDict[defaultstore] = Value
if Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII]]:
if Pcd.DatumType == TAB_VOID:
if not Pcd.MaxDatumSize:
Pcd.MaxDatumSize = '0'
CurrentSize = int(Pcd.MaxDatumSize,16) if Pcd.MaxDatumSize.upper().startswith("0X") else int(Pcd.MaxDatumSize)
OptionSize = len((StringToArray(Pcd.PcdValueFromComm)).split(","))
MaxSize = max(CurrentSize, OptionSize)
Pcd.MaxDatumSize = str(MaxSize)
else:
PcdInDec = self.DecPcds.get((Name,Guid))
if PcdInDec:
PcdInDec.PcdValueFromFdf = Value
if PcdInDec.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE],
self._PCD_TYPE_STRING_[MODEL_PCD_FEATURE_FLAG]]:
self._Pcds[Name, Guid] = copy.deepcopy(PcdInDec)
self._Pcds[Name, Guid].DefaultValue = Value
return AllPcds
def ParsePcdNameStruct(self,NamePart1,NamePart2):
TokenSpaceCName = PcdCName = DimensionAttr = Field = ""
if "." in NamePart1:
TokenSpaceCName, TempPcdCName = NamePart1.split(".")
if "[" in TempPcdCName:
PcdCName = TempPcdCName[:TempPcdCName.index("[")]
DimensionAttr = TempPcdCName[TempPcdCName.index("["):]
else:
PcdCName = TempPcdCName
Field = NamePart2
else:
TokenSpaceCName = NamePart1
if "[" in NamePart2:
PcdCName = NamePart2[:NamePart2.index("[")]
DimensionAttr = NamePart2[NamePart2.index("["):]
else:
PcdCName = NamePart2
return TokenSpaceCName,PcdCName,DimensionAttr,Field
def UpdateStructuredPcds(self, TypeList, AllPcds):
DynamicPcdType = [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_DEFAULT],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_VPD],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_DEFAULT],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_VPD]]
Pcds = AllPcds
DefaultStoreMgr = DefaultStore(self.DefaultStores)
SkuIds = self.SkuIds
self.SkuIdMgr.AvailableSkuIdSet.update({TAB_DEFAULT:0})
DefaultStores = {storename for pcdobj in AllPcds.values() for skuobj in pcdobj.SkuInfoList.values() for storename in skuobj.DefaultStoreDict}
DefaultStores.add(TAB_DEFAULT_STORES_DEFAULT)
S_PcdSet = []
# Find out all possible PCD candidates for self._Arch
RecordList = []
for Type in TypeList:
RecordList.extend(self._RawData[Type, self._Arch])
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, default_store, Dummy4, Dummy5 in RecordList:
SkuName = SkuName.upper()
default_store = default_store.upper()
SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
if SkuName not in SkuIds:
continue
TCName,PCName,DimensionAttr,Field = self.ParsePcdNameStruct(TokenSpaceGuid, PcdCName)
pcd_in_dec = self._DecPcds.get((PCName,TCName), None)
if pcd_in_dec is None:
EdkLogger.error('build', PARSER_ERROR,
"Pcd (%s.%s) defined in DSC is not declared in DEC files. Arch: ['%s']" % (TCName, PCName, self._Arch),
File=self.MetaFile, Line = Dummy5)
if SkuName in SkuIds and ("." in TokenSpaceGuid or "[" in PcdCName):
if not isinstance (pcd_in_dec, StructurePcd):
EdkLogger.error('build', PARSER_ERROR,
"Pcd (%s.%s) is not declared as Structure PCD in DEC files. Arch: ['%s']" % (TCName, PCName, self._Arch),
File=self.MetaFile, Line = Dummy5)
S_PcdSet.append([ TCName,PCName,DimensionAttr,Field, SkuName, default_store, Dummy5, AnalyzePcdExpression(Setting)[0]])
ModuleScopeOverallValue = {}
for m in self.Modules.values():
mguid = m.Guid
if m.StrPcdSet:
S_PcdSet.extend(m.StrPcdSet)
mguid = m.StrPcdSet[0][4]
for (PCName,TCName) in m.StrPcdOverallValue:
Value, dsc_file, lineNo = m.StrPcdOverallValue[(PCName,TCName)]
ModuleScopeOverallValue.setdefault((PCName,TCName),{})[mguid] = Value, dsc_file, lineNo
# handle pcd value override
StrPcdSet = DscBuildData.GetStructurePcdInfo(S_PcdSet)
S_pcd_set = OrderedDict()
for str_pcd in StrPcdSet:
str_pcd_obj = Pcds.get((str_pcd[1], str_pcd[0]), None)
str_pcd_dec = self._DecPcds.get((str_pcd[1], str_pcd[0]), None)
str_pcd_obj_str = StructurePcd()
str_pcd_obj_str.copy(str_pcd_dec)
if str_pcd_obj:
str_pcd_obj_str.copy(str_pcd_obj)
if str_pcd_obj.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
str_pcd_obj_str.DefaultFromDSC = {skuname:{defaultstore: str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.get(defaultstore, str_pcd_obj.SkuInfoList[skuname].HiiDefaultValue) for defaultstore in DefaultStores} for skuname in str_pcd_obj.SkuInfoList}
else:
str_pcd_obj_str.DefaultFromDSC = {skuname:{defaultstore: str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.get(defaultstore, str_pcd_obj.SkuInfoList[skuname].DefaultValue) for defaultstore in DefaultStores} for skuname in str_pcd_obj.SkuInfoList}
for str_pcd_data in StrPcdSet[str_pcd]:
if str_pcd_data[4] in SkuIds:
str_pcd_obj_str.AddOverrideValue(str_pcd_data[3], str(str_pcd_data[7]), TAB_DEFAULT if str_pcd_data[4] == TAB_COMMON else str_pcd_data[4], TAB_DEFAULT_STORES_DEFAULT if str_pcd_data[5] == TAB_COMMON else str_pcd_data[5], self.MetaFile.File if self.WorkspaceDir not in self.MetaFile.File else self.MetaFile.File[len(self.WorkspaceDir) if self.WorkspaceDir.endswith(os.path.sep) else len(self.WorkspaceDir)+1:], LineNo=str_pcd_data[6],DimensionAttr = str_pcd_data[2])
elif GlobalData.gGuidPattern.match(str_pcd_data[4]):
str_pcd_obj_str.AddComponentOverrideValue(str_pcd_data[3], str(str_pcd_data[7]), str_pcd_data[4].replace("-","S"), self.MetaFile.File if self.WorkspaceDir not in self.MetaFile.File else self.MetaFile.File[len(self.WorkspaceDir) if self.WorkspaceDir.endswith(os.path.sep) else len(self.WorkspaceDir)+1:], LineNo=str_pcd_data[6],DimensionAttr = str_pcd_data[2])
PcdComponentValue = ModuleScopeOverallValue.get((str_pcd_obj_str.TokenCName,str_pcd_obj_str.TokenSpaceGuidCName))
for module_guid in PcdComponentValue:
str_pcd_obj_str.PcdValueFromComponents[module_guid.replace("-","S")] = PcdComponentValue[module_guid]
S_pcd_set[str_pcd[1], str_pcd[0]] = str_pcd_obj_str
# Add the Structure PCD that only defined in DEC, don't have override in DSC file
for Pcd in self.DecPcds:
if isinstance(self._DecPcds[Pcd], StructurePcd):
if Pcd not in S_pcd_set:
str_pcd_obj_str = StructurePcd()
str_pcd_obj_str.copy(self._DecPcds[Pcd])
str_pcd_obj = Pcds.get(Pcd, None)
if str_pcd_obj:
str_pcd_obj_str.copy(str_pcd_obj)
if str_pcd_obj.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
str_pcd_obj_str.DefaultFromDSC = {skuname:{defaultstore: str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.get(defaultstore, str_pcd_obj.SkuInfoList[skuname].HiiDefaultValue) for defaultstore in DefaultStores} for skuname in str_pcd_obj.SkuInfoList}
else:
str_pcd_obj_str.DefaultFromDSC = {skuname:{defaultstore: str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.get(defaultstore, str_pcd_obj.SkuInfoList[skuname].DefaultValue) for defaultstore in DefaultStores} for skuname in str_pcd_obj.SkuInfoList}
S_pcd_set[Pcd] = str_pcd_obj_str
if S_pcd_set:
GlobalData.gStructurePcd[self.Arch] = S_pcd_set.copy()
self.FilterStrcturePcd(S_pcd_set)
for stru_pcd in S_pcd_set.values():
for skuid in SkuIds:
if skuid in stru_pcd.SkuOverrideValues:
continue
nextskuid = self.SkuIdMgr.GetNextSkuId(skuid)
NoDefault = False
if skuid not in stru_pcd.SkuOverrideValues:
while nextskuid not in stru_pcd.SkuOverrideValues:
if nextskuid == TAB_DEFAULT:
NoDefault = True
break
nextskuid = self.SkuIdMgr.GetNextSkuId(nextskuid)
stru_pcd.SkuOverrideValues[skuid] = copy.deepcopy(stru_pcd.SkuOverrideValues[nextskuid]) if not NoDefault else copy.deepcopy({defaultstorename: stru_pcd.DefaultValues for defaultstorename in DefaultStores} if DefaultStores else {}) #{TAB_DEFAULT_STORES_DEFAULT:stru_pcd.DefaultValues})
if not NoDefault:
stru_pcd.ValueChain.add((skuid, ''))
if 'DEFAULT' in stru_pcd.SkuOverrideValues and not GlobalData.gPcdSkuOverrides.get((stru_pcd.TokenCName, stru_pcd.TokenSpaceGuidCName)):
GlobalData.gPcdSkuOverrides.update(
{(stru_pcd.TokenCName, stru_pcd.TokenSpaceGuidCName): {'DEFAULT':stru_pcd.SkuOverrideValues['DEFAULT']}})
if stru_pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
for skuid in SkuIds:
nextskuid = skuid
NoDefault = False
if skuid not in stru_pcd.SkuOverrideValues:
while nextskuid not in stru_pcd.SkuOverrideValues:
if nextskuid == TAB_DEFAULT:
NoDefault = True
break
nextskuid = self.SkuIdMgr.GetNextSkuId(nextskuid)
if NoDefault:
continue
PcdDefaultStoreSet = set(defaultstorename for defaultstorename in stru_pcd.SkuOverrideValues[nextskuid])
mindefaultstorename = DefaultStoreMgr.GetMin(PcdDefaultStoreSet)
for defaultstoreid in DefaultStores:
if defaultstoreid not in stru_pcd.SkuOverrideValues[skuid]:
stru_pcd.SkuOverrideValues[skuid][defaultstoreid] = CopyDict(stru_pcd.SkuOverrideValues[nextskuid][mindefaultstorename])
stru_pcd.ValueChain.add((skuid, defaultstoreid))
S_pcd_set = DscBuildData.OverrideByFdf(S_pcd_set,self.WorkspaceDir)
S_pcd_set = DscBuildData.OverrideByComm(S_pcd_set)
# Create a tool to caculate structure pcd value
Str_Pcd_Values = self.GenerateByteArrayValue(S_pcd_set)
if Str_Pcd_Values:
for (skuname, StoreName, PcdGuid, PcdName, PcdValue) in Str_Pcd_Values:
str_pcd_obj = S_pcd_set.get((PcdName, PcdGuid))
if str_pcd_obj is None:
print(PcdName, PcdGuid)
raise
if str_pcd_obj.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
if skuname not in str_pcd_obj.SkuInfoList:
str_pcd_obj.SkuInfoList[skuname] = SkuInfoClass(SkuIdName=skuname, SkuId=self.SkuIds[skuname][0], HiiDefaultValue=PcdValue, DefaultStore = {StoreName:PcdValue})
else:
str_pcd_obj.SkuInfoList[skuname].HiiDefaultValue = PcdValue
str_pcd_obj.SkuInfoList[skuname].DefaultStoreDict.update({StoreName:PcdValue})
elif str_pcd_obj.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
if skuname in (self.SkuIdMgr.SystemSkuId, TAB_DEFAULT, TAB_COMMON):
str_pcd_obj.DefaultValue = PcdValue
else:
#Module Scope Structure Pcd
moduleguid = skuname.replace("S","-")
if GlobalData.gGuidPattern.match(moduleguid):
for component in self.Modules.values():
if component.Guid == moduleguid:
component.Pcds[(PcdName, PcdGuid)].DefaultValue = PcdValue
else:
if skuname not in str_pcd_obj.SkuInfoList:
nextskuid = self.SkuIdMgr.GetNextSkuId(skuname)
NoDefault = False
while nextskuid not in str_pcd_obj.SkuInfoList:
if nextskuid == TAB_DEFAULT:
NoDefault = True
break
nextskuid = self.SkuIdMgr.GetNextSkuId(nextskuid)
str_pcd_obj.SkuInfoList[skuname] = copy.deepcopy(str_pcd_obj.SkuInfoList[nextskuid]) if not NoDefault else SkuInfoClass(SkuIdName=skuname, SkuId=self.SkuIds[skuname][0], DefaultValue=PcdValue)
str_pcd_obj.SkuInfoList[skuname].SkuId = self.SkuIds[skuname][0]
str_pcd_obj.SkuInfoList[skuname].SkuIdName = skuname
else:
str_pcd_obj.SkuInfoList[skuname].DefaultValue = PcdValue
for str_pcd_obj in S_pcd_set.values():
if str_pcd_obj.Type not in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
continue
PcdDefaultStoreSet = set(defaultstorename for skuobj in str_pcd_obj.SkuInfoList.values() for defaultstorename in skuobj.DefaultStoreDict)
DefaultStoreObj = DefaultStore(self._GetDefaultStores())
mindefaultstorename = DefaultStoreObj.GetMin(PcdDefaultStoreSet)
str_pcd_obj.SkuInfoList[self.SkuIdMgr.SystemSkuId].HiiDefaultValue = str_pcd_obj.SkuInfoList[self.SkuIdMgr.SystemSkuId].DefaultStoreDict[mindefaultstorename]
for str_pcd_obj in S_pcd_set.values():
str_pcd_obj.MaxDatumSize = DscBuildData.GetStructurePcdMaxSize(str_pcd_obj)
Pcds[str_pcd_obj.TokenCName, str_pcd_obj.TokenSpaceGuidCName] = str_pcd_obj
Pcds[str_pcd_obj.TokenCName, str_pcd_obj.TokenSpaceGuidCName].CustomAttribute['IsStru']=True
for pcdkey in Pcds:
pcd = Pcds[pcdkey]
if TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
pcd.SkuInfoList[TAB_DEFAULT] = pcd.SkuInfoList[TAB_COMMON]
del pcd.SkuInfoList[TAB_COMMON]
elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
del pcd.SkuInfoList[TAB_COMMON]
list(map(self.FilterSkuSettings, [Pcds[pcdkey] for pcdkey in Pcds if Pcds[pcdkey].Type in DynamicPcdType]))
return Pcds
@cached_property
def PlatformUsedPcds(self):
FdfInfList = []
if GlobalData.gFdfParser:
FdfInfList = GlobalData.gFdfParser.Profile.InfList
FdfModuleList = [PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch) for Inf in FdfInfList]
AllModulePcds = set()
ModuleSet = set(list(self._Modules.keys()) + FdfModuleList)
for ModuleFile in ModuleSet:
ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
AllModulePcds = AllModulePcds | ModuleData.PcdsName
for ModuleFile in self.LibraryInstances:
ModuleData = self._Bdb.CreateBuildObject(ModuleFile, self._Arch, self._Target, self._Toolchain)
AllModulePcds = AllModulePcds | ModuleData.PcdsName
return AllModulePcds
#Filter the StrucutrePcd that is not used by any module in dsc file and fdf file.
def FilterStrcturePcd(self, S_pcd_set):
UnusedStruPcds = set(S_pcd_set.keys()) - self.PlatformUsedPcds
for (Token, TokenSpaceGuid) in UnusedStruPcds:
del S_pcd_set[(Token, TokenSpaceGuid)]
## Retrieve non-dynamic PCD settings
#
# @param Type PCD type
#
# @retval a dict object contains settings of given PCD type
#
def _GetPcd(self, Type):
Pcds = OrderedDict()
#
# tdict is a special dict kind of type, used for selecting correct
# PCD settings for certain ARCH
#
AvailableSkuIdSet = copy.copy(self.SkuIds)
PcdDict = tdict(True, 4)
PcdList = []
# Find out all possible PCD candidates for self._Arch
RecordList = self._RawData[Type, self._Arch]
PcdValueDict = OrderedDict()
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4, Dummy5 in RecordList:
SkuName = SkuName.upper()
SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
if SkuName not in AvailableSkuIdSet:
EdkLogger.error('build ', PARAMETER_INVALID, 'Sku %s is not defined in [SkuIds] section' % SkuName,
File=self.MetaFile, Line=Dummy5)
if SkuName in (self.SkuIdMgr.SystemSkuId, TAB_DEFAULT, TAB_COMMON):
if "." not in TokenSpaceGuid and "[" not in PcdCName and (PcdCName, TokenSpaceGuid, SkuName, Dummy5) not in PcdList:
PcdList.append((PcdCName, TokenSpaceGuid, SkuName, Dummy5))
PcdDict[Arch, PcdCName, TokenSpaceGuid, SkuName] = Setting
for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid, SkuName]
if Setting is None:
continue
PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
if MaxDatumSize:
if int(MaxDatumSize, 0) > 0xFFFF:
EdkLogger.error('build', FORMAT_INVALID, "The size value must not exceed the maximum value of 0xFFFF (UINT16) for %s." % ".".join((TokenSpaceGuid, PcdCName)),
File=self.MetaFile, Line=Dummy4)
if int(MaxDatumSize, 0) < 0:
EdkLogger.error('build', FORMAT_INVALID, "The size value can't be set to negative value for %s." % ".".join((TokenSpaceGuid, PcdCName)),
File=self.MetaFile, Line=Dummy4)
if (PcdCName, TokenSpaceGuid) in PcdValueDict:
PcdValueDict[PcdCName, TokenSpaceGuid][SkuName] = (PcdValue, DatumType, MaxDatumSize,Dummy4)
else:
PcdValueDict[PcdCName, TokenSpaceGuid] = {SkuName:(PcdValue, DatumType, MaxDatumSize,Dummy4)}
for ((PcdCName, TokenSpaceGuid), PcdSetting) in PcdValueDict.items():
if self.SkuIdMgr.SystemSkuId in PcdSetting:
PcdValue, DatumType, MaxDatumSize,_ = PcdSetting[self.SkuIdMgr.SystemSkuId]
elif TAB_DEFAULT in PcdSetting:
PcdValue, DatumType, MaxDatumSize,_ = PcdSetting[TAB_DEFAULT]
elif TAB_COMMON in PcdSetting:
PcdValue, DatumType, MaxDatumSize,_ = PcdSetting[TAB_COMMON]
else:
PcdValue = None
DatumType = None
MaxDatumSize = None
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
TokenSpaceGuid,
self._PCD_TYPE_STRING_[Type],
DatumType,
PcdValue,
'',
MaxDatumSize,
{},
False,
None,
IsDsc=True)
for SkuName in PcdValueDict[PcdCName, TokenSpaceGuid]:
Settings = PcdValueDict[PcdCName, TokenSpaceGuid][SkuName]
if SkuName not in Pcds[PcdCName, TokenSpaceGuid].DscRawValue:
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {}
Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName] = {}
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][TAB_DEFAULT_STORES_DEFAULT] = Settings[0]
Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName][TAB_DEFAULT_STORES_DEFAULT] = (self.MetaFile.File,Settings[3])
return Pcds
@staticmethod
def GetStructurePcdMaxSize(str_pcd):
pcd_default_value = str_pcd.DefaultValue
sku_values = [skuobj.HiiDefaultValue if str_pcd.Type in [DscBuildData._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], DscBuildData._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]] else skuobj.DefaultValue for skuobj in str_pcd.SkuInfoList.values()]
sku_values.append(pcd_default_value)
def get_length(value):
Value = value.strip()
if len(value) > 1:
if Value.startswith(TAB_GUID) and Value.endswith(')'):
return 16
if Value.startswith('L"') and Value.endswith('"'):
return len(Value[2:-1])
if Value[0] == '"' and Value[-1] == '"':
return len(Value) - 2
if Value.strip().startswith("{CODE("):
tmpValue = RemoveCComments(Value)
return len(tmpValue.split(","))
if (Value[0] == '{' and Value[-1] == '}'):
return len(Value.split(","))
if Value.startswith("L'") and Value.endswith("'") and len(list(Value[2:-1])) > 1:
return len(list(Value[2:-1]))
if Value[0] == "'" and Value[-1] == "'" and len(list(Value[1:-1])) > 1:
return len(Value) - 2
return len(Value)
return str(max(get_length(item) for item in sku_values))
@staticmethod
def ExecuteCommand (Command):
try:
Process = subprocess.Popen(Command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except:
EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s' % Command)
Result = Process.communicate()
return Process.returncode, Result[0].decode(errors='ignore'), Result[1].decode(errors='ignore')
@staticmethod
def IntToCString(Value, ValueSize):
Result = '"'
if not isinstance (Value, str):
for Index in range(0, ValueSize):
Result = Result + '\\x%02x' % (Value & 0xff)
Value = Value >> 8
Result = Result + '"'
return Result
def GenerateSizeFunction(self, Pcd):
CApp = "// Default Value in Dec \n"
CApp = CApp + "void Cal_%s_%s_Size(UINT32 *Size){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
if Pcd.IsArray() and Pcd.Capacity[-1] != "-1":
CApp += " *Size = (sizeof (%s) > *Size ? sizeof (%s) : *Size);\n" % (Pcd.DatumType,Pcd.DatumType)
else:
if "{CODE(" in Pcd.DefaultValueFromDec:
CApp += " *Size = (sizeof (%s_%s_INIT_Value) > *Size ? sizeof (%s_%s_INIT_Value) : *Size);\n" % (Pcd.TokenSpaceGuidCName,Pcd.TokenCName,Pcd.TokenSpaceGuidCName,Pcd.TokenCName)
if Pcd.Type in PCD_DYNAMIC_TYPE_SET | PCD_DYNAMIC_EX_TYPE_SET:
for skuname in Pcd.SkuInfoList:
skuobj = Pcd.SkuInfoList[skuname]
if skuobj.VariableName:
for defaultstore in skuobj.DefaultStoreDict:
pcddef = self.GetPcdDscRawDefaultValue(Pcd,skuname,defaultstore)
if pcddef:
if "{CODE(" in pcddef:
CApp += " *Size = (sizeof (%s_%s_%s_%s_Value) > *Size ? sizeof (%s_%s_%s_%s_Value) : *Size);\n" % (Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,defaultstore,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,defaultstore)
else:
CApp += " *Size = %s > *Size ? %s : *Size;\n" % (self.GetStructurePcdMaxSize(Pcd),self.GetStructurePcdMaxSize(Pcd))
else:
pcddef = self.GetPcdDscRawDefaultValue(Pcd,skuname,TAB_DEFAULT_STORES_DEFAULT)
if pcddef:
if "{CODE(" in pcddef:
CApp += " *Size = (sizeof (%s_%s_%s_%s_Value) > *Size ? sizeof (%s_%s_%s_%s_Value) : *Size);\n" % (Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,TAB_DEFAULT_STORES_DEFAULT,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,TAB_DEFAULT_STORES_DEFAULT)
else:
CApp += " *Size = %s > *Size ? %s : *Size;\n" % (self.GetStructurePcdMaxSize(Pcd),self.GetStructurePcdMaxSize(Pcd))
else:
pcddef = self.GetPcdDscRawDefaultValue(Pcd,TAB_DEFAULT,TAB_DEFAULT_STORES_DEFAULT)
if pcddef:
if "{CODE(" in pcddef:
CApp += " *Size = (sizeof (%s_%s_%s_%s_Value) > *Size ? sizeof (%s_%s_%s_%s_Value) : *Size);\n" % (Pcd.TokenSpaceGuidCName,Pcd.TokenCName,TAB_DEFAULT,TAB_DEFAULT_STORES_DEFAULT,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,TAB_DEFAULT,TAB_DEFAULT_STORES_DEFAULT)
else:
CApp += " *Size = %s > *Size ? %s : *Size;\n" % (self.GetStructurePcdMaxSize(Pcd),self.GetStructurePcdMaxSize(Pcd))
ActualCap = []
for index in Pcd.DefaultValues:
if index:
ActualCap.append(index)
FieldList = Pcd.DefaultValues[index]
if not FieldList:
continue
for FieldName in FieldList:
FieldName = "." + FieldName
IsArray = _IsFieldValueAnArray(FieldList[FieldName.strip(".")][0])
if IsArray and not (FieldList[FieldName.strip(".")][0].startswith('{GUID') and FieldList[FieldName.strip(".")][0].endswith('}')):
try:
Value = ValueExpressionEx(FieldList[FieldName.strip(".")][0], TAB_VOID, self._GuidDict)(True)
except BadExpression:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName.strip('.'))), FieldList[FieldName.strip(".")][1], FieldList[FieldName.strip(".")][2]))
Value, ValueSize = ParseFieldValue(Value)
if not Pcd.IsArray():
CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d / __ARRAY_ELEMENT_SIZE(%s, %s) + ((%d %% __ARRAY_ELEMENT_SIZE(%s, %s)) ? 1 : 0)); // From %s Line %d Value %s \n' % (Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), FieldList[FieldName.strip(".")][1], FieldList[FieldName.strip(".")][2], FieldList[FieldName.strip(".")][0]);
else:
NewFieldName = ''
FieldName_ori = FieldName.strip('.')
while '[' in FieldName:
NewFieldName = NewFieldName + FieldName.split('[', 1)[0] + '[0]'
Array_Index = int(FieldName.split('[', 1)[1].split(']', 1)[0])
FieldName = FieldName.split(']', 1)[1]
FieldName = NewFieldName + FieldName
while '[' in FieldName and not Pcd.IsArray():
FieldName = FieldName.rsplit('[', 1)[0]
CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d); // From %s Line %d Value %s\n' % (Pcd.DatumType, FieldName.strip("."), Array_Index + 1, FieldList[FieldName_ori][1], FieldList[FieldName_ori][2], FieldList[FieldName_ori][0])
flexisbale_size_statement_cache = set()
for skuname in Pcd.SkuOverrideValues:
if skuname == TAB_COMMON:
continue
for defaultstorenameitem in Pcd.SkuOverrideValues[skuname]:
CApp = CApp + "// SkuName: %s, DefaultStoreName: %s \n" % (skuname, defaultstorenameitem)
for index in Pcd.SkuOverrideValues[skuname][defaultstorenameitem]:
if index:
ActualCap.append(index)
for FieldList in [Pcd.SkuOverrideValues[skuname][defaultstorenameitem][index]]:
if not FieldList:
continue
for FieldName in FieldList:
fieldinfo = tuple(FieldList[FieldName])
if fieldinfo in flexisbale_size_statement_cache:
continue
flexisbale_size_statement_cache.add(fieldinfo)
FieldName = "." + FieldName
IsArray = _IsFieldValueAnArray(FieldList[FieldName.strip(".")][0])
if IsArray and not (FieldList[FieldName.strip(".")][0].startswith('{GUID') and FieldList[FieldName.strip(".")][0].endswith('}')):
try:
Value = ValueExpressionEx(FieldList[FieldName.strip(".")][0], TAB_VOID, self._GuidDict)(True)
except BadExpression:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName.strip('.'))), FieldList[FieldName.strip(".")][1], FieldList[FieldName.strip(".")][2]))
Value, ValueSize = ParseFieldValue(Value)
if not Pcd.IsArray():
CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d / __ARRAY_ELEMENT_SIZE(%s, %s) + ((%d %% __ARRAY_ELEMENT_SIZE(%s, %s)) ? 1 : 0)); // From %s Line %d Value %s\n' % (Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), FieldList[FieldName.strip(".")][1], FieldList[FieldName.strip(".")][2], FieldList[FieldName.strip(".")][0]);
else:
NewFieldName = ''
FieldName_ori = FieldName.strip('.')
while '[' in FieldName:
NewFieldName = NewFieldName + FieldName.split('[', 1)[0] + '[0]'
Array_Index = int(FieldName.split('[', 1)[1].split(']', 1)[0])
FieldName = FieldName.split(']', 1)[1]
FieldName = NewFieldName + FieldName
while '[' in FieldName and not Pcd.IsArray():
FieldName = FieldName.rsplit('[', 1)[0]
CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d); // From %s Line %d Value %s \n' % (Pcd.DatumType, FieldName.strip("."), Array_Index + 1, FieldList[FieldName_ori][1], FieldList[FieldName_ori][2], FieldList[FieldName_ori][0])
if Pcd.PcdFieldValueFromFdf:
CApp = CApp + "// From fdf \n"
for FieldName in Pcd.PcdFieldValueFromFdf:
FieldName = "." + FieldName
IsArray = _IsFieldValueAnArray(Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][0])
if IsArray and not (Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][0].startswith('{GUID') and Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][0].endswith('}')):
try:
Value = ValueExpressionEx(Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][0], TAB_VOID, self._GuidDict)(True)
except BadExpression:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName.strip('.'))), Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][1], Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][2]))
Value, ValueSize = ParseFieldValue(Value)
if not Pcd.IsArray():
CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d / __ARRAY_ELEMENT_SIZE(%s, %s) + ((%d %% __ARRAY_ELEMENT_SIZE(%s, %s)) ? 1 : 0)); // From %s Line %d Value %s\n' % (Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][1], Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][2], Pcd.PcdFieldValueFromFdf[FieldName.strip(".")][0]);
else:
NewFieldName = ''
FieldName_ori = FieldName.strip('.')
while '[' in FieldName:
NewFieldName = NewFieldName + FieldName.split('[', 1)[0] + '[0]'
Array_Index = int(FieldName.split('[', 1)[1].split(']', 1)[0])
FieldName = FieldName.split(']', 1)[1]
FieldName = NewFieldName + FieldName
while '[' in FieldName:
FieldName = FieldName.rsplit('[', 1)[0]
CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d); // From %s Line %s Value %s \n' % (Pcd.DatumType, FieldName.strip("."), Array_Index + 1, Pcd.PcdFieldValueFromFdf[FieldName_ori][1], Pcd.PcdFieldValueFromFdf[FieldName_ori][2], Pcd.PcdFieldValueFromFdf[FieldName_ori][0])
if Pcd.PcdFieldValueFromComm:
CApp = CApp + "// From Command Line \n"
for FieldName in Pcd.PcdFieldValueFromComm:
FieldName = "." + FieldName
IsArray = _IsFieldValueAnArray(Pcd.PcdFieldValueFromComm[FieldName.strip(".")][0])
if IsArray and not (Pcd.PcdFieldValueFromComm[FieldName.strip(".")][0].startswith('{GUID') and Pcd.PcdFieldValueFromComm[FieldName.strip(".")][0].endswith('}')):
try:
Value = ValueExpressionEx(Pcd.PcdFieldValueFromComm[FieldName.strip(".")][0], TAB_VOID, self._GuidDict)(True)
except BadExpression:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName.strip('.'))), Pcd.PcdFieldValueFromComm[FieldName.strip(".")][1], Pcd.PcdFieldValueFromComm[FieldName.strip(".")][2]))
Value, ValueSize = ParseFieldValue(Value)
if not Pcd.IsArray():
CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d / __ARRAY_ELEMENT_SIZE(%s, %s) + ((%d %% __ARRAY_ELEMENT_SIZE(%s, %s)) ? 1 : 0)); // From %s Line %d Value %s\n' % (Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), ValueSize, Pcd.DatumType, FieldName.strip("."), Pcd.PcdFieldValueFromComm[FieldName.strip(".")][1], Pcd.PcdFieldValueFromComm[FieldName.strip(".")][2], Pcd.PcdFieldValueFromComm[FieldName.strip(".")][0]);
else:
NewFieldName = ''
FieldName_ori = FieldName.strip('.')
while '[' in FieldName:
NewFieldName = NewFieldName + FieldName.split('[', 1)[0] + '[0]'
Array_Index = int(FieldName.split('[', 1)[1].split(']', 1)[0])
FieldName = FieldName.split(']', 1)[1]
FieldName = NewFieldName + FieldName
while '[' in FieldName and not Pcd.IsArray():
FieldName = FieldName.rsplit('[', 1)[0]
CApp = CApp + ' __FLEXIBLE_SIZE(*Size, %s, %s, %d); // From %s Line %d Value %s \n' % (Pcd.DatumType, FieldName.strip("."), Array_Index + 1, Pcd.PcdFieldValueFromComm[FieldName_ori][1], Pcd.PcdFieldValueFromComm[FieldName_ori][2], Pcd.PcdFieldValueFromComm[FieldName_ori][0])
if Pcd.GetPcdMaxSize():
CApp = CApp + " *Size = (%d > *Size ? %d : *Size); // The Pcd maxsize is %d \n" % (Pcd.GetPcdMaxSize(), Pcd.GetPcdMaxSize(), Pcd.GetPcdMaxSize())
ArraySizeByAssign = self.CalculateActualCap(ActualCap)
if ArraySizeByAssign > 1:
CApp = CApp + " *Size = (%d > *Size ? %d : *Size); \n" % (ArraySizeByAssign, ArraySizeByAssign)
CApp = CApp + "}\n"
return CApp
def CalculateActualCap(self,ActualCap):
if not ActualCap:
return 1
maxsize = 1
for item in ActualCap:
index_elements = ArrayIndex.findall(item)
rt = 1
for index_e in index_elements:
index_num = index_e.lstrip("[").rstrip("]").strip()
if not index_num:
# Not support flexiable pcd array assignment
return 1
index_num = int(index_num,16) if index_num.startswith(("0x","0X")) else int(index_num)
rt = rt * (index_num+1)
if rt >maxsize:
maxsize = rt
return maxsize
@staticmethod
def GenerateSizeStatments(Pcd,skuname,defaultstorename):
if Pcd.IsArray():
r_datatype = [Pcd.BaseDatumType]
lastoneisEmpty = False
for dem in Pcd.Capacity:
if lastoneisEmpty:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))))
if dem == '0' or dem == "-1":
r_datatype.append("[1]")
lastoneisEmpty = True
else:
r_datatype.append("[" + dem + "]")
if Pcd.Type in [MODEL_PCD_DYNAMIC_EX_HII, MODEL_PCD_DYNAMIC_HII]:
PcdDefValue = Pcd.SkuInfoList.get(skuname).DefaultStoreDict.get(defaultstorename)
elif Pcd.Type in [MODEL_PCD_DYNAMIC_EX_DEFAULT,MODEL_PCD_DYNAMIC_VPD,MODEL_PCD_DYNAMIC_DEFAULT,MODEL_PCD_DYNAMIC_EX_VPD]:
PcdDefValue = Pcd.SkuInfoList.get(skuname).DefaultValue
else:
PcdDefValue = Pcd.DefaultValue
if lastoneisEmpty:
if "{CODE(" not in PcdDefValue:
sizebasevalue_plus = "(%s / sizeof(%s) + 1)" % ((DscBuildData.GetStructurePcdMaxSize(Pcd), Pcd.BaseDatumType))
sizebasevalue = "(%s / sizeof(%s))" % ((DscBuildData.GetStructurePcdMaxSize(Pcd), Pcd.BaseDatumType))
sizeof = "sizeof(%s)" % Pcd.BaseDatumType
CApp = ' int ArraySize = %s %% %s ? %s : %s ;\n' % ( (DscBuildData.GetStructurePcdMaxSize(Pcd), sizeof, sizebasevalue_plus, sizebasevalue))
CApp += ' Size = ArraySize * sizeof(%s); \n' % Pcd.BaseDatumType
else:
CApp = " Size = 0;\n"
else:
CApp = ' Size = sizeof(%s);\n' % ("".join(r_datatype) )
else:
CApp = ' Size = sizeof(%s);\n' % (Pcd.DatumType)
CApp = CApp + ' Cal_%s_%s_Size(&Size);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
return CApp
def GetIndicator(self,index,FieldName,Pcd):
def cleanupindex(indexstr):
return indexstr.strip("[").strip("]").strip()
index_elements = ArrayIndex.findall(index)
pcd_capacity = Pcd.Capacity
if index:
indicator = "(Pcd"
if len(pcd_capacity)>2:
for i in range(0,len(index_elements)):
index_ele = index_elements[i]
index_num = index_ele.strip("[").strip("]").strip()
if i == len(index_elements) -2:
indicator += "+ %d*Size/sizeof(%s)/%d + %s)" %(int(cleanupindex(index_elements[i+1])),Pcd.BaseDatumType,reduce(lambda x,y: int(x)*int(y),pcd_capacity[:-1]), cleanupindex(index_elements[i]))
break
else:
indicator += " + %d*%s*Size/sizeof(%s)/%d" %(int(cleanupindex(index_elements[i])),reduce(lambda x,y: int(x)*int(y),pcd_capacity[i+1:-1]),Pcd.BaseDatumType,reduce(lambda x,y: int(x)*int(y),pcd_capacity[:-1]))
elif len(pcd_capacity) == 2:
indicator += "+ %d*Size/sizeof(%s)/%d + %s)" %(int(cleanupindex(index_elements[0])),Pcd.BaseDatumType,int(pcd_capacity[0]), index_elements[1].strip("[").strip("]").strip())
elif len(pcd_capacity) == 1:
index_ele = index_elements[0]
index_num = index_ele.strip("[").strip("]").strip()
indicator += " + %s)" % (index_num)
else:
indicator = "Pcd"
if FieldName:
indicator += "->" + FieldName
return indicator
def GetStarNum(self,Pcd):
if not Pcd.IsArray():
return 1
elif Pcd.IsSimpleTypeArray():
return len(Pcd.Capacity)
else:
return len(Pcd.Capacity) + 1
def GenerateDefaultValueAssignFunction(self, Pcd):
CApp = "// Default value in Dec \n"
CApp = CApp + "void Assign_%s_%s_Default_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.BaseDatumType)
CApp = CApp + ' UINT32 FieldSize;\n'
CApp = CApp + ' CHAR8 *Value;\n'
CApp = CApp + ' UINT32 PcdArraySize;\n'
DefaultValueFromDec = Pcd.DefaultValueFromDec
IsArray = _IsFieldValueAnArray(Pcd.DefaultValueFromDec)
if IsArray:
try:
DefaultValueFromDec = ValueExpressionEx(Pcd.DefaultValueFromDec, TAB_VOID)(True)
except BadExpression:
EdkLogger.error("Build", FORMAT_INVALID, "Invalid value format for %s.%s, from DEC: %s" %
(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, DefaultValueFromDec))
DefaultValueFromDec = StringToArray(DefaultValueFromDec)
Value, ValueSize = ParseFieldValue (DefaultValueFromDec)
if IsArray:
#
# Use memcpy() to copy value into field
#
if Pcd.IsArray():
pcdarraysize = Pcd.PcdArraySize()
if "{CODE(" in Pcd.DefaultValueFromDec:
if Pcd.Capacity[-1] != "-1":
CApp = CApp + '__STATIC_ASSERT(sizeof(%s_%s_INIT_Value) <= %d * sizeof(%s), "Pcd %s.%s Value in Dec exceed the array capability %s"); // From %s Line %s \n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,Pcd.DefaultValueFromDecInfo[0],Pcd.DefaultValueFromDecInfo[1])
CApp = CApp + ' PcdArraySize = sizeof(%s_%s_INIT_Value);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
CApp = CApp + ' memcpy (Pcd, %s_%s_INIT_Value,PcdArraySize);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
else:
if Pcd.Capacity[-1] != "-1":
CApp = CApp + '__STATIC_ASSERT(%d <= %d * sizeof(%s), "Pcd %s.%s Value in Dec exceed the array capability %s"); // From %s Line %s \n' % (ValueSize,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,Pcd.DefaultValueFromDecInfo[0],Pcd.DefaultValueFromDecInfo[1])
CApp = CApp + ' PcdArraySize = %d;\n' % ValueSize
CApp = CApp + ' Value = %s; // From DEC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DefaultValueFromDec)
CApp = CApp + ' memcpy (Pcd, Value, PcdArraySize);\n'
else:
if "{CODE(" in Pcd.DefaultValueFromDec:
CApp = CApp + ' PcdArraySize = sizeof(%s_%s_INIT_Value);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
CApp = CApp + ' memcpy (Pcd, &%s_%s_INIT_Value,PcdArraySize);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
else:
CApp = CApp + ' Value = %s; // From DEC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DefaultValueFromDec)
CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
elif isinstance(Value, str):
CApp = CApp + ' Pcd = %s; // From DEC Default Value %s\n' % (Value, Pcd.DefaultValueFromDec)
for index in Pcd.DefaultValues:
FieldList = Pcd.DefaultValues[index]
if not FieldList:
continue
for FieldName in FieldList:
IsArray = _IsFieldValueAnArray(FieldList[FieldName][0])
if IsArray:
try:
FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
except BadExpression:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
try:
Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
except Exception:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
indicator = self.GetIndicator(index, FieldName,Pcd)
if IsArray:
#
# Use memcpy() to copy value into field
#
CApp = CApp + ' FieldSize = __FIELD_SIZE(%s, %s);\n' % (Pcd.BaseDatumType, FieldName)
CApp = CApp + ' Value = %s; // From %s Line %d Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + ' __STATIC_ASSERT((__FIELD_SIZE(%s, %s) >= %d) || (__FIELD_SIZE(%s, %s) == 0), "Input buffer exceeds the buffer array"); // From %s Line %d Value %s\n' % (Pcd.BaseDatumType, FieldName, ValueSize, Pcd.BaseDatumType, FieldName, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + ' memcpy (&%s, Value, (FieldSize > 0 && FieldSize < %d) ? FieldSize : %d);\n' % (indicator, ValueSize, ValueSize)
elif isinstance(Value, str):
CApp = CApp + ' %s = %s; // From %s Line %d Value %s\n' % (indicator, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
else:
if '[' in FieldName and ']' in FieldName:
Index = int(FieldName.split('[')[1].split(']')[0])
CApp = CApp + ' __STATIC_ASSERT((%d < __ARRAY_SIZE(Pcd->%s)) || (__ARRAY_SIZE(Pcd->%s) == 0), "array index exceeds the array number"); // From %s Line %d Index of %s\n' % (Index, FieldName.split('[')[0], FieldName.split('[')[0], FieldList[FieldName][1], FieldList[FieldName][2], FieldName)
if ValueSize > 4:
CApp = CApp + ' %s = %dULL; // From %s Line %d Value %s\n' % (indicator, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
else:
CApp = CApp + ' %s = %d; // From %s Line %d Value %s\n' % (indicator, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + "}\n"
return CApp
@staticmethod
def GenerateDefaultValueAssignStatement(Pcd):
CApp = ' Assign_%s_%s_Default_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
return CApp
def GetPcdDscRawDefaultValue(self,Pcd, SkuName,DefaultStoreName):
if Pcd.Type in PCD_DYNAMIC_TYPE_SET or Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
if (SkuName, DefaultStoreName) == (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT):
pcddefaultvalue = Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT) if Pcd.DefaultFromDSC else None
else:
pcddefaultvalue = Pcd.DscRawValue.get(SkuName, {}).get(DefaultStoreName)
else:
pcddefaultvalue = Pcd.DscRawValue.get(SkuName, {}).get(TAB_DEFAULT_STORES_DEFAULT)
return pcddefaultvalue
def GetPcdDscRawValueInfo(self,Pcd, SkuName,DefaultStoreName):
DscValueInfo = Pcd.DscRawValueInfo.get(SkuName, {}).get(DefaultStoreName)
if DscValueInfo:
dscfilepath,lineno = DscValueInfo
else:
dscfilepath = self.MetaFile.File
lineno = ""
return dscfilepath,lineno
def GenerateInitValueFunction(self, Pcd, SkuName, DefaultStoreName):
CApp = "// Value in Dsc for Sku: %s, DefaultStore %s\n" % (SkuName, DefaultStoreName)
CApp = CApp + "void Assign_%s_%s_%s_%s_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, SkuName, DefaultStoreName, Pcd.BaseDatumType)
CApp = CApp + ' UINT32 FieldSize;\n'
CApp = CApp + ' CHAR8 *Value;\n'
CApp = CApp + ' UINT32 PcdArraySize;\n'
CApp = CApp + "// SkuName: %s, DefaultStoreName: %s \n" % (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT)
inherit_OverrideValues = Pcd.SkuOverrideValues[SkuName]
dscfilepath,lineno = self.GetPcdDscRawValueInfo(Pcd, SkuName, DefaultStoreName)
if lineno:
valuefrom = "%s Line %s" % (dscfilepath,str(lineno))
else:
valuefrom = dscfilepath
pcddefaultvalue = self.GetPcdDscRawDefaultValue(Pcd, SkuName, DefaultStoreName)
if pcddefaultvalue:
FieldList = pcddefaultvalue
IsArray = _IsFieldValueAnArray(FieldList)
if IsArray:
if "{CODE(" not in FieldList:
try:
FieldList = ValueExpressionEx(FieldList, TAB_VOID)(True)
except BadExpression:
EdkLogger.error("Build", FORMAT_INVALID, "Invalid value format for %s.%s, from DSC: %s" %
(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldList))
Value, ValueSize = ParseFieldValue (FieldList)
if (SkuName, DefaultStoreName) == (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT):
if isinstance(Value, str):
if "{CODE(" in Value:
if Pcd.IsArray() and Pcd.Capacity[-1] != "-1":
pcdarraysize = Pcd.PcdArraySize()
CApp = CApp + '__STATIC_ASSERT(sizeof(%s_%s_%s_%s_Value) <= %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType, valuefrom)
CApp = CApp+ ' PcdArraySize = sizeof(%s_%s_%s_%s_Value);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
CApp = CApp + ' memcpy (Pcd, &%s_%s_%s_%s_Value,PcdArraySize);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
else:
CApp = CApp + ' Pcd = %s; // From DSC Default Value %s\n' % (Value, Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue)
elif IsArray:
#
# Use memcpy() to copy value into field
#
if Pcd.IsArray():
pcdarraysize = Pcd.PcdArraySize()
if "{CODE(" in pcddefaultvalue:
if Pcd.Capacity[-1] != "-1":
CApp = CApp + '__STATIC_ASSERT(sizeof(%s_%s_%s_%s_Value) <= %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,valuefrom)
CApp = CApp + ' PcdArraySize = sizeof(%s_%s_%s_%s_Value);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
CApp = CApp + ' memcpy (Pcd, %s_%s_%s_%s_Value, PcdArraySize);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
else:
if Pcd.Capacity[-1] != "-1":
CApp = CApp + '__STATIC_ASSERT(%d <= %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (ValueSize,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,valuefrom)
CApp = CApp + ' PcdArraySize = %d;\n' % ValueSize
CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue)
CApp = CApp + ' memcpy (Pcd, Value, PcdArraySize);\n'
else:
if "{CODE(" in pcddefaultvalue:
CApp = CApp + ' PcdArraySize = %d < sizeof(%s) * %d ? %d: sizeof(%s) * %d;\n ' % (ValueSize,Pcd.BaseDatumType,pcdarraysize,ValueSize,Pcd.BaseDatumType,pcdarraysize)
CApp = CApp + ' memcpy (Pcd, &%s_%s_%s_%s_Value, PcdArraySize);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
else:
CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DefaultFromDSC.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue)
CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
else:
if isinstance(Value, str):
if "{CODE(" in Value:
if Pcd.IsArray() and Pcd.Capacity[-1] != "-1":
pcdarraysize = Pcd.PcdArraySize()
CApp = CApp + '__STATIC_ASSERT(sizeof(%s_%s_%s_%s_Value) <= %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,valuefrom)
CApp = CApp + ' PcdArraySize = sizeof(%s_%s_%s_%s_Value);\n '% (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
CApp = CApp + ' memcpy (Pcd, &%s_%s_%s_%s_Value, PcdArraySize);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
else:
CApp = CApp + ' Pcd = %s; // From DSC Default Value %s\n' % (Value, Pcd.DscRawValue.get(SkuName, {}).get(DefaultStoreName))
elif IsArray:
#
# Use memcpy() to copy value into field
#
if Pcd.IsArray():
pcdarraysize = Pcd.PcdArraySize()
if "{CODE(" in pcddefaultvalue:
if Pcd.Capacity[-1] != "-1":
CApp = CApp + '__STATIC_ASSERT(sizeof(%s_%s_%s_%s_Value) <= %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,valuefrom)
CApp + ' PcdArraySize = sizeof(%s_%s_%s_%s_Value);\n ' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
CApp = CApp + ' memcpy (Pcd, %s_%s_%s_%s_Value, PcdArraySize);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
else:
if Pcd.Capacity[-1] != "-1":
CApp = CApp + '__STATIC_ASSERT(%d <= %d * sizeof(%s), "Pcd %s.%s Value in Dsc exceed the array capability %s"); // From %s \n' % (ValueSize,pcdarraysize,Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.DatumType,valuefrom)
CApp = CApp + ' PcdArraySize = %d;\n' % ValueSize
CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DscRawValue.get(TAB_DEFAULT, {}).get(TAB_DEFAULT_STORES_DEFAULT, Pcd.DefaultValue) if Pcd.DefaultFromDSC else Pcd.DefaultValue)
CApp = CApp + ' memcpy (Pcd, Value, PcdArraySize);\n'
else:
if "{CODE(" in pcddefaultvalue:
CApp = CApp + ' PcdArraySize = %d < sizeof(%s) * %d ? %d: sizeof(%s) * %d;\n ' % (ValueSize,Pcd.BaseDatumType,pcdarraysize,ValueSize,Pcd.BaseDatumType,pcdarraysize)
CApp = CApp + ' memcpy (Pcd, &%s_%s_%s_%s_Value, PcdArraySize);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,SkuName, DefaultStoreName)
else:
CApp = CApp + ' Value = %s; // From DSC Default Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), Pcd.DscRawValue.get(SkuName, {}).get(DefaultStoreName))
CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
inheritvalue = inherit_OverrideValues.get(DefaultStoreName)
if not inheritvalue:
inheritvalue = []
for index in inheritvalue:
FieldList = inheritvalue[index]
if not FieldList:
continue
if (SkuName, DefaultStoreName) == (TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT) or (( (SkuName, '') not in Pcd.ValueChain) and ( (SkuName, DefaultStoreName) not in Pcd.ValueChain )):
for FieldName in FieldList:
indicator = self.GetIndicator(index, FieldName,Pcd)
IsArray = _IsFieldValueAnArray(FieldList[FieldName][0])
if IsArray:
try:
FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
except BadExpression:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
try:
Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
except Exception:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
if isinstance(Value, str):
CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
elif IsArray:
#
# Use memcpy() to copy value into field
#
CApp = CApp + ' FieldSize = __FIELD_SIZE(%s, %s);\n' % (Pcd.BaseDatumType, FieldName)
CApp = CApp + ' Value = %s; // From %s Line %d Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + ' __STATIC_ASSERT((__FIELD_SIZE(%s, %s) >= %d) || (__FIELD_SIZE(%s, %s) == 0), "Input buffer exceeds the buffer array"); // From %s Line %d Value %s\n' % (Pcd.BaseDatumType, FieldName, ValueSize, Pcd.BaseDatumType, FieldName, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + ' memcpy (&%s, Value, (FieldSize > 0 && FieldSize < %d) ? FieldSize : %d);\n' % (indicator, ValueSize, ValueSize)
else:
if '[' in FieldName and ']' in FieldName:
Index = int(FieldName.split('[')[1].split(']')[0])
CApp = CApp + ' __STATIC_ASSERT((%d < __ARRAY_SIZE(Pcd->%s)) || (__ARRAY_SIZE(Pcd->%s) == 0), "array index exceeds the array number"); // From %s Line %d Index of %s\n' % (Index, FieldName.split('[')[0], FieldName.split('[')[0], FieldList[FieldName][1], FieldList[FieldName][2], FieldName)
if ValueSize > 4:
CApp = CApp + ' %s = %dULL; // From %s Line %d Value %s\n' % (indicator, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
else:
CApp = CApp + ' %s = %d; // From %s Line %d Value %s\n' % (indicator, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + "}\n"
return CApp
@staticmethod
def GenerateInitValueStatement(Pcd, SkuName, DefaultStoreName):
CApp = ' Assign_%s_%s_%s_%s_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, SkuName, DefaultStoreName)
return CApp
def GenerateCommandLineValue(self, Pcd):
CApp = "// Value in CommandLine\n"
CApp = CApp + "void Assign_%s_%s_CommandLine_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.BaseDatumType)
CApp = CApp + ' UINT32 FieldSize;\n'
CApp = CApp + ' CHAR8 *Value;\n'
pcddefaultvalue = Pcd.PcdValueFromComm
for FieldList in [pcddefaultvalue, Pcd.PcdFieldValueFromComm]:
if not FieldList:
continue
if pcddefaultvalue and FieldList == pcddefaultvalue:
IsArray = _IsFieldValueAnArray(FieldList)
if IsArray:
try:
FieldList = ValueExpressionEx(FieldList, TAB_VOID)(True)
except BadExpression:
EdkLogger.error("Build", FORMAT_INVALID, "Invalid value format for %s.%s, from Command: %s" %
(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldList))
Value, ValueSize = ParseFieldValue (FieldList)
if isinstance(Value, str):
CApp = CApp + ' Pcd = %s; // From Command Line \n' % (Value)
elif IsArray:
#
# Use memcpy() to copy value into field
#
CApp = CApp + ' Value = %s; // From Command Line.\n' % (DscBuildData.IntToCString(Value, ValueSize))
CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
continue
for FieldName in FieldList:
IsArray = _IsFieldValueAnArray(FieldList[FieldName][0])
if IsArray:
try:
FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
except BadExpression:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
except:
print("error")
try:
Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
except Exception:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
if isinstance(Value, str):
CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
elif IsArray:
#
# Use memcpy() to copy value into field
#
CApp = CApp + ' FieldSize = __FIELD_SIZE(%s, %s);\n' % (Pcd.BaseDatumType, FieldName)
CApp = CApp + ' Value = %s; // From %s Line %d Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + ' __STATIC_ASSERT((__FIELD_SIZE(%s, %s) >= %d) || (__FIELD_SIZE(%s, %s) == 0), "Input buffer exceeds the buffer array"); // From %s Line %d Value %s\n' % (Pcd.BaseDatumType, FieldName, ValueSize, Pcd.BaseDatumType, FieldName, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + ' memcpy (&Pcd->%s, Value, (FieldSize > 0 && FieldSize < %d) ? FieldSize : %d);\n' % (FieldName, ValueSize, ValueSize)
else:
if '[' in FieldName and ']' in FieldName:
Index = int(FieldName.split('[')[1].split(']')[0])
CApp = CApp + ' __STATIC_ASSERT((%d < __ARRAY_SIZE(Pcd->%s)) || (__ARRAY_SIZE(Pcd->%s) == 0), "array index exceeds the array number"); // From %s Line %d Index of %s\n' % (Index, FieldName.split('[')[0], FieldName.split('[')[0], FieldList[FieldName][1], FieldList[FieldName][2], FieldName)
if ValueSize > 4:
CApp = CApp + ' Pcd->%s = %dULL; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
else:
CApp = CApp + ' Pcd->%s = %d; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + "}\n"
return CApp
def GenerateModuleScopeValue(self, Pcd):
CApp = "// Value in Dsc Module scope \n"
for ModuleGuid in Pcd.PcdFiledValueFromDscComponent:
CApp = CApp + "void Assign_%s_%s_%s_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, ModuleGuid,Pcd.BaseDatumType)
CApp = CApp + ' UINT32 FieldSize;\n'
CApp = CApp + ' CHAR8 *Value;\n'
pcddefaultvalue, file_path,lineNo = Pcd.PcdValueFromComponents.get(ModuleGuid,(None,None,None))
if pcddefaultvalue:
IsArray = _IsFieldValueAnArray(pcddefaultvalue)
if IsArray:
try:
FieldList = ValueExpressionEx(pcddefaultvalue, TAB_VOID)(True)
except BadExpression:
EdkLogger.error("Build", FORMAT_INVALID, "Invalid value format for %s.%s, from %s Line %s: %s" %
(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, file_path, lineNo, FieldList))
Value, ValueSize = ParseFieldValue (FieldList)
if isinstance(Value, str):
CApp = CApp + ' Pcd = %s; // From %s Line %s \n' % (Value, file_path, lineNo)
elif IsArray:
#
# Use memcpy() to copy value into field
#
CApp = CApp + ' Value = %s; // From %s Line %s.\n' % (DscBuildData.IntToCString(Value, ValueSize), file_path, lineNo)
CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
PcdFiledValue = Pcd.PcdFiledValueFromDscComponent.get(ModuleGuid)
for index in PcdFiledValue:
FieldList = PcdFiledValue[index]
if not FieldList:
continue
for FieldName in FieldList:
IsArray = _IsFieldValueAnArray(FieldList[FieldName][0])
if IsArray:
try:
FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
except BadExpression:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
except:
print("error")
try:
Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
except Exception:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
if isinstance(Value, str):
CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
elif IsArray:
#
# Use memcpy() to copy value into field
#
CApp = CApp + ' FieldSize = __FIELD_SIZE(%s, %s);\n' % (Pcd.BaseDatumType, FieldName)
CApp = CApp + ' Value = %s; // From %s Line %d Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + ' __STATIC_ASSERT((__FIELD_SIZE(%s, %s) >= %d) || (__FIELD_SIZE(%s, %s) == 0), "Input buffer exceeds the buffer array"); // From %s Line %d Value %s\n' % (Pcd.BaseDatumType, FieldName, ValueSize, Pcd.BaseDatumType, FieldName, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + ' memcpy (&Pcd->%s, Value, (FieldSize > 0 && FieldSize < %d) ? FieldSize : %d);\n' % (FieldName, ValueSize, ValueSize)
else:
if '[' in FieldName and ']' in FieldName:
Index = int(FieldName.split('[')[1].split(']')[0])
CApp = CApp + ' __STATIC_ASSERT((%d < __ARRAY_SIZE(Pcd->%s)) || (__ARRAY_SIZE(Pcd->%s) == 0), "array index exceeds the array number"); // From %s Line %d Index of %s\n' % (Index, FieldName.split('[')[0], FieldName.split('[')[0], FieldList[FieldName][1], FieldList[FieldName][2], FieldName)
if ValueSize > 4:
CApp = CApp + ' Pcd->%s = %dULL; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
else:
CApp = CApp + ' Pcd->%s = %d; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + "}\n"
return CApp
@staticmethod
def GenerateCommandLineValueStatement(Pcd):
CApp = ' Assign_%s_%s_CommandLine_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
return CApp
def GenerateFdfValue(self,Pcd):
CApp = "// Value in Fdf\n"
CApp = CApp + "void Assign_%s_%s_Fdf_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName,Pcd.BaseDatumType)
CApp = CApp + ' UINT32 FieldSize;\n'
CApp = CApp + ' CHAR8 *Value;\n'
pcddefaultvalue = Pcd.PcdValueFromFdf
for FieldList in [pcddefaultvalue,Pcd.PcdFieldValueFromFdf]:
if not FieldList:
continue
if pcddefaultvalue and FieldList == pcddefaultvalue:
IsArray = _IsFieldValueAnArray(FieldList)
if IsArray:
try:
FieldList = ValueExpressionEx(FieldList, TAB_VOID)(True)
except BadExpression:
EdkLogger.error("Build", FORMAT_INVALID, "Invalid value format for %s.%s, from Fdf: %s" %
(Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldList))
Value, ValueSize = ParseFieldValue (FieldList)
if isinstance(Value, str):
CApp = CApp + ' Pcd = %s; // From Fdf \n' % (Value)
elif IsArray:
#
# Use memcpy() to copy value into field
#
CApp = CApp + ' Value = %s; // From Fdf .\n' % (DscBuildData.IntToCString(Value, ValueSize))
CApp = CApp + ' memcpy (Pcd, Value, %d);\n' % (ValueSize)
continue
for FieldName in FieldList:
IsArray = _IsFieldValueAnArray(FieldList[FieldName][0])
if IsArray:
try:
FieldList[FieldName][0] = ValueExpressionEx(FieldList[FieldName][0], TAB_VOID, self._GuidDict)(True)
except BadExpression:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " %
(".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName, FieldName)), FieldList[FieldName][1], FieldList[FieldName][2]))
except:
print("error")
try:
Value, ValueSize = ParseFieldValue (FieldList[FieldName][0])
except Exception:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid value format for %s. From %s Line %d " % (".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName,FieldName)),FieldList[FieldName][1], FieldList[FieldName][2]))
if isinstance(Value, str):
CApp = CApp + ' Pcd->%s = %s; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
elif IsArray:
#
# Use memcpy() to copy value into field
#
CApp = CApp + ' FieldSize = __FIELD_SIZE(%s, %s);\n' % (Pcd.BaseDatumType, FieldName)
CApp = CApp + ' Value = %s; // From %s Line %d Value %s\n' % (DscBuildData.IntToCString(Value, ValueSize), FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + ' __STATIC_ASSERT((__FIELD_SIZE(%s, %s) >= %d) || (__FIELD_SIZE(%s, %s) == 0), "Input buffer exceeds the buffer array"); // From %s Line %d Value %s\n' % (Pcd.BaseDatumType, FieldName, ValueSize, Pcd.BaseDatumType, FieldName, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + ' memcpy (&Pcd->%s, Value, (FieldSize > 0 && FieldSize < %d) ? FieldSize : %d);\n' % (FieldName, ValueSize, ValueSize)
else:
if '[' in FieldName and ']' in FieldName:
Index = int(FieldName.split('[')[1].split(']')[0])
CApp = CApp + ' __STATIC_ASSERT((%d < __ARRAY_SIZE(Pcd->%s)) || (__ARRAY_SIZE(Pcd->%s) == 0), "array index exceeds the array number"); // From %s Line %d Index of %s\n' % (Index, FieldName.split('[')[0], FieldName.split('[')[0], FieldList[FieldName][1], FieldList[FieldName][2], FieldName)
if ValueSize > 4:
CApp = CApp + ' Pcd->%s = %dULL; // From %s Line %d Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
else:
CApp = CApp + ' Pcd->%s = %d; // From %s Line %s Value %s\n' % (FieldName, Value, FieldList[FieldName][1], FieldList[FieldName][2], FieldList[FieldName][0])
CApp = CApp + "}\n"
return CApp
@staticmethod
def GenerateFdfValueStatement(Pcd):
CApp = ' Assign_%s_%s_Fdf_Value(Pcd);\n' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
return CApp
@staticmethod
def GenerateModuleValueStatement(module_guid, Pcd):
CApp = " Assign_%s_%s_%s_Value(Pcd);\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, module_guid)
return CApp
def GenerateModuleScopeInitializeFunc(self,SkuName, Pcd, InitByteValue, CApp):
for module_guid in Pcd.PcdFiledValueFromDscComponent:
CApp = CApp + 'void\n'
CApp = CApp + 'Initialize_%s_%s_%s_%s(\n' % (module_guid, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
CApp = CApp + ' void\n'
CApp = CApp + ' )\n'
CApp = CApp + '{\n'
CApp = CApp + ' UINT32 Size;\n'
CApp = CApp + ' UINT32 FieldSize;\n'
CApp = CApp + ' CHAR8 *Value;\n'
CApp = CApp + ' UINT32 OriginalSize;\n'
CApp = CApp + ' VOID *OriginalPcd;\n'
CApp = CApp + ' %s *Pcd; // From %s Line %d \n' % (Pcd.BaseDatumType,Pcd.PkgPath, Pcd.PcdDefineLineNo)
CApp = CApp + '\n'
PcdDefaultValue = StringToArray(Pcd.DefaultValueFromDec.strip())
InitByteValue += '%s.%s.%s.%s|%s|%s\n' % (module_guid, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.DatumType, PcdDefaultValue)
#
# Get current PCD value and size
#
CApp = CApp + ' OriginalPcd = PcdGetPtr (%s, %s, %s, %s, &OriginalSize);\n' % (module_guid, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
#
# Determine the size of the PCD. For simple structures, sizeof(TYPE) provides
# the correct value. For structures with a flexible array member, the flexible
# array member is detected, and the size is based on the highest index used with
# the flexible array member. The flexible array member must be the last field
# in a structure. The size formula for this case is:
# OFFSET_OF(FlexbleArrayField) + sizeof(FlexibleArray[0]) * (HighestIndex + 1)
#
CApp = CApp + DscBuildData.GenerateSizeStatments(Pcd,SkuName,TAB_DEFAULT_STORES_DEFAULT)
if Pcd.IsArray() and Pcd.Capacity[-1] != "-1":
CApp = CApp + ' OriginalSize = OriginalSize < sizeof(%s) * %d? OriginalSize:sizeof(%s) * %d; \n' % (Pcd.BaseDatumType,Pcd.PcdArraySize(),Pcd.BaseDatumType,Pcd.PcdArraySize())
CApp = CApp + ' Size = sizeof(%s) * %d; \n' % (Pcd.BaseDatumType,Pcd.PcdArraySize())
#
# Allocate and zero buffer for the PCD
# Must handle cases where current value is smaller, larger, or same size
# Always keep that larger one as the current size
#
CApp = CApp + ' Size = (OriginalSize > Size ? OriginalSize : Size);\n'
CApp = CApp + ' Pcd = (%s *)malloc (Size);\n' % (Pcd.BaseDatumType,)
CApp = CApp + ' memset (Pcd, 0, Size);\n'
#
# Copy current PCD value into allocated buffer.
#
CApp = CApp + ' memcpy (Pcd, OriginalPcd, OriginalSize);\n'
#
# Assign field values in PCD
#
CApp = CApp + DscBuildData.GenerateDefaultValueAssignStatement(Pcd)
CApp = CApp + "// SkuName: %s, DefaultStoreName: STANDARD \n" % self.SkuIdMgr.SystemSkuId
CApp = CApp + DscBuildData.GenerateInitValueStatement(Pcd, self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT)
CApp = CApp + DscBuildData.GenerateModuleValueStatement(module_guid,Pcd)
CApp = CApp + DscBuildData.GenerateFdfValueStatement(Pcd)
CApp = CApp + DscBuildData.GenerateCommandLineValueStatement(Pcd)
#
# Set new PCD value and size
#
CApp = CApp + ' PcdSetPtr (%s, %s, %s, %s, Size, (void *)Pcd);\n' % (module_guid, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
#
# Free PCD
#
CApp = CApp + ' free (Pcd);\n'
CApp = CApp + '}\n'
CApp = CApp + '\n'
return InitByteValue,CApp
def GenerateInitializeFunc(self, SkuName, DefaultStore, Pcd, InitByteValue, CApp):
OverrideValues = {DefaultStore:{}}
if Pcd.SkuOverrideValues:
OverrideValues = Pcd.SkuOverrideValues[SkuName]
if not OverrideValues:
OverrideValues = {TAB_DEFAULT_STORES_DEFAULT:Pcd.DefaultValues}
for DefaultStoreName in OverrideValues:
CApp = CApp + 'void\n'
CApp = CApp + 'Initialize_%s_%s_%s_%s(\n' % (SkuName, DefaultStoreName, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
CApp = CApp + ' void\n'
CApp = CApp + ' )\n'
CApp = CApp + '{\n'
CApp = CApp + ' UINT32 Size;\n'
CApp = CApp + ' UINT32 FieldSize;\n'
CApp = CApp + ' CHAR8 *Value;\n'
CApp = CApp + ' UINT32 OriginalSize;\n'
CApp = CApp + ' VOID *OriginalPcd;\n'
CApp = CApp + ' %s *Pcd; // From %s Line %d \n' % (Pcd.BaseDatumType,Pcd.PkgPath, Pcd.PcdDefineLineNo)
CApp = CApp + '\n'
PcdDefaultValue = StringToArray(Pcd.DefaultValueFromDec.strip())
InitByteValue += '%s.%s.%s.%s|%s|%s\n' % (SkuName, DefaultStoreName, Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.DatumType, PcdDefaultValue)
#
# Get current PCD value and size
#
CApp = CApp + ' OriginalPcd = PcdGetPtr (%s, %s, %s, %s, &OriginalSize);\n' % (SkuName, DefaultStoreName, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
#
# Determine the size of the PCD. For simple structures, sizeof(TYPE) provides
# the correct value. For structures with a flexible array member, the flexible
# array member is detected, and the size is based on the highest index used with
# the flexible array member. The flexible array member must be the last field
# in a structure. The size formula for this case is:
# OFFSET_OF(FlexbleArrayField) + sizeof(FlexibleArray[0]) * (HighestIndex + 1)
#
CApp = CApp + DscBuildData.GenerateSizeStatments(Pcd,SkuName,DefaultStoreName)
if Pcd.IsArray() and Pcd.Capacity[-1] != "-1":
CApp = CApp + ' OriginalSize = OriginalSize < sizeof(%s) * %d? OriginalSize:sizeof(%s) * %d; \n' % (Pcd.BaseDatumType,Pcd.PcdArraySize(),Pcd.BaseDatumType,Pcd.PcdArraySize())
CApp = CApp + ' Size = sizeof(%s) * %d; \n' % (Pcd.BaseDatumType,Pcd.PcdArraySize())
#
# Allocate and zero buffer for the PCD
# Must handle cases where current value is smaller, larger, or same size
# Always keep that larger one as the current size
#
CApp = CApp + ' Size = (OriginalSize > Size ? OriginalSize : Size);\n'
CApp = CApp + ' Pcd = (%s *)malloc (Size);\n' % (Pcd.BaseDatumType,)
CApp = CApp + ' memset (Pcd, 0, Size);\n'
#
# Copy current PCD value into allocated buffer.
#
CApp = CApp + ' memcpy (Pcd, OriginalPcd, OriginalSize);\n'
#
# Assign field values in PCD
#
CApp = CApp + DscBuildData.GenerateDefaultValueAssignStatement(Pcd)
if Pcd.Type not in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
for skuname in self.SkuIdMgr.GetSkuChain(SkuName):
storeset = [DefaultStoreName] if DefaultStoreName == TAB_DEFAULT_STORES_DEFAULT else [TAB_DEFAULT_STORES_DEFAULT, DefaultStoreName]
for defaultstorenameitem in storeset:
CApp = CApp + "// SkuName: %s, DefaultStoreName: %s \n" % (skuname, defaultstorenameitem)
CApp = CApp + DscBuildData.GenerateInitValueStatement(Pcd, skuname, defaultstorenameitem)
if skuname == SkuName:
break
else:
CApp = CApp + "// SkuName: %s, DefaultStoreName: STANDARD \n" % self.SkuIdMgr.SystemSkuId
CApp = CApp + DscBuildData.GenerateInitValueStatement(Pcd, self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT)
CApp = CApp + DscBuildData.GenerateFdfValueStatement(Pcd)
CApp = CApp + DscBuildData.GenerateCommandLineValueStatement(Pcd)
#
# Set new PCD value and size
#
CApp = CApp + ' PcdSetPtr (%s, %s, %s, %s, Size, (void *)Pcd);\n' % (SkuName, DefaultStoreName, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
#
# Free PCD
#
CApp = CApp + ' free (Pcd);\n'
CApp = CApp + '}\n'
CApp = CApp + '\n'
return InitByteValue, CApp
def GenerateArrayAssignment(self, Pcd):
CApp = ""
if not Pcd:
return CApp
Demesion = ""
for d in Pcd.Capacity:
Demesion += "[]"
Value = Pcd.DefaultValueFromDec
if "{CODE(" in Pcd.DefaultValueFromDec:
realvalue = Pcd.DefaultValueFromDec.strip()[6:-2] # "{CODE(").rstrip(")}"
CApp += "static %s %s_%s_INIT_Value%s = %s;\n" % (Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,Demesion,realvalue)
if Pcd.Type in PCD_DYNAMIC_TYPE_SET | PCD_DYNAMIC_EX_TYPE_SET:
for skuname in Pcd.SkuInfoList:
skuinfo = Pcd.SkuInfoList[skuname]
if skuinfo.VariableName:
for defaultstore in skuinfo.DefaultStoreDict:
pcddscrawdefaultvalue = self.GetPcdDscRawDefaultValue(Pcd, skuname, defaultstore)
if pcddscrawdefaultvalue:
Value = skuinfo.DefaultStoreDict[defaultstore]
if "{CODE(" in Value:
realvalue = Value.strip()[6:-2] # "{CODE(").rstrip(")}"
CApp += "static %s %s_%s_%s_%s_Value%s = %s;\n" % (Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,defaultstore,Demesion,realvalue)
else:
pcddscrawdefaultvalue = self.GetPcdDscRawDefaultValue(Pcd, skuname, TAB_DEFAULT_STORES_DEFAULT)
if pcddscrawdefaultvalue:
Value = skuinfo.DefaultValue
if "{CODE(" in Value:
realvalue = Value.strip()[6:-2] # "{CODE(").rstrip(")}"
CApp += "static %s %s_%s_%s_%s_Value%s = %s;\n" % (Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,skuname,TAB_DEFAULT_STORES_DEFAULT,Demesion,realvalue)
else:
pcddscrawdefaultvalue = self.GetPcdDscRawDefaultValue(Pcd, TAB_DEFAULT, TAB_DEFAULT_STORES_DEFAULT)
if pcddscrawdefaultvalue:
if "{CODE(" in Pcd.DefaultValue:
realvalue = Pcd.DefaultValue.strip()[6:-2] # "{CODE(").rstrip(")}"
CApp += "static %s %s_%s_DEFAULT_STANDARD_Value%s = %s;\n" % (Pcd.BaseDatumType,Pcd.TokenSpaceGuidCName,Pcd.TokenCName,Demesion,realvalue)
return CApp
def SkuOverrideValuesEmpty(self,OverrideValues):
if not OverrideValues:
return True
for key in OverrideValues:
if OverrideValues[key]:
return False
return True
def ParseCCFlags(self, ccflag):
ccflags = set()
ccflaglist = ccflag.split(" ")
i = 0
while i < len(ccflaglist):
item = ccflaglist[i].strip()
if item in (r"/D", r"/U","-D","-U"):
ccflags.add(" ".join((ccflaglist[i],ccflaglist[i+1])))
i = i+1
elif item.startswith((r"/D", r"/U","-D","-U")):
ccflags.add(item)
i +=1
return ccflags
def GenerateByteArrayValue (self, StructuredPcds):
#
# Generate/Compile/Run C application to determine if there are any flexible array members
#
if not StructuredPcds:
return
InitByteValue = ""
CApp = PcdMainCHeader
IncludeFiles = set()
for PcdName in StructuredPcds:
Pcd = StructuredPcds[PcdName]
for IncludeFile in Pcd.StructuredPcdIncludeFile:
if IncludeFile not in IncludeFiles:
IncludeFiles.add(IncludeFile)
CApp = CApp + '#include <%s>\n' % (IncludeFile)
CApp = CApp + '\n'
for Pcd in StructuredPcds.values():
CApp = CApp + self.GenerateArrayAssignment(Pcd)
for PcdName in sorted(StructuredPcds.keys()):
Pcd = StructuredPcds[PcdName]
#create void void Cal_tocken_cname_Size functions
CApp = CApp + self.GenerateSizeFunction(Pcd)
#create void Assign_ functions
# From DEC
CApp = CApp + self.GenerateDefaultValueAssignFunction(Pcd)
# From Fdf
CApp = CApp + self.GenerateFdfValue(Pcd)
# From CommandLine
CApp = CApp + self.GenerateCommandLineValue(Pcd)
# From Dsc Global setting
if self.SkuOverrideValuesEmpty(Pcd.SkuOverrideValues) or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
CApp = CApp + self.GenerateInitValueFunction(Pcd, self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT)
else:
for SkuName in self.SkuIdMgr.SkuOverrideOrder():
if SkuName not in Pcd.SkuOverrideValues:
continue
for DefaultStoreName in Pcd.SkuOverrideValues[SkuName]:
CApp = CApp + self.GenerateInitValueFunction(Pcd, SkuName, DefaultStoreName)
# From Dsc module scope setting
CApp = CApp + self.GenerateModuleScopeValue(Pcd)
#create Initialize_ functions
if self.SkuOverrideValuesEmpty(Pcd.SkuOverrideValues) or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD],
self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
InitByteValue, CApp = self.GenerateInitializeFunc(self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT, Pcd, InitByteValue, CApp)
InitByteValue, CApp = self.GenerateModuleScopeInitializeFunc(self.SkuIdMgr.SystemSkuId,Pcd,InitByteValue,CApp)
else:
for SkuName in self.SkuIdMgr.SkuOverrideOrder():
if SkuName not in Pcd.SkuOverrideValues:
continue
for DefaultStoreName in Pcd.DefaultStoreName:
Pcd = StructuredPcds[PcdName]
InitByteValue, CApp = self.GenerateInitializeFunc(SkuName, DefaultStoreName, Pcd, InitByteValue, CApp)
CApp = CApp + 'VOID\n'
CApp = CApp + 'PcdEntryPoint(\n'
CApp = CApp + ' VOID\n'
CApp = CApp + ' )\n'
CApp = CApp + '{\n'
for Pcd in StructuredPcds.values():
if self.SkuOverrideValuesEmpty(Pcd.SkuOverrideValues) or Pcd.Type in [self._PCD_TYPE_STRING_[MODEL_PCD_FIXED_AT_BUILD], self._PCD_TYPE_STRING_[MODEL_PCD_PATCHABLE_IN_MODULE]]:
CApp = CApp + ' Initialize_%s_%s_%s_%s();\n' % (self.SkuIdMgr.SystemSkuId, TAB_DEFAULT_STORES_DEFAULT, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
for ModuleGuid in Pcd.PcdFiledValueFromDscComponent:
CApp += " Initialize_%s_%s_%s_%s();\n" % (ModuleGuid,TAB_DEFAULT_STORES_DEFAULT ,Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
else:
for SkuName in self.SkuIdMgr.SkuOverrideOrder():
if SkuName not in self.SkuIdMgr.AvailableSkuIdSet:
continue
for DefaultStoreName in Pcd.SkuOverrideValues[SkuName]:
CApp = CApp + ' Initialize_%s_%s_%s_%s();\n' % (SkuName, DefaultStoreName, Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
CApp = CApp + '}\n'
CApp = CApp + PcdMainCEntry + '\n'
if not os.path.exists(self.OutputPath):
os.makedirs(self.OutputPath)
CAppBaseFileName = os.path.join(self.OutputPath, PcdValueInitName)
SaveFileOnChange(CAppBaseFileName + '.c', CApp, False)
# start generating makefile
MakeApp = PcdMakefileHeader
if sys.platform == "win32":
MakeApp = MakeApp + 'APPFILE = %s\%s.exe\n' % (self.OutputPath, PcdValueInitName) + 'APPNAME = %s\n' % (PcdValueInitName) + 'OBJECTS = %s\%s.obj %s.obj\n' % (self.OutputPath, PcdValueInitName, os.path.join(self.OutputPath, PcdValueCommonName)) + 'INC = '
else:
MakeApp = MakeApp + PcdGccMakefile
MakeApp = MakeApp + 'APPFILE = %s/%s\n' % (self.OutputPath, PcdValueInitName) + 'APPNAME = %s\n' % (PcdValueInitName) + 'OBJECTS = %s/%s.o %s.o\n' % (self.OutputPath, PcdValueInitName, os.path.join(self.OutputPath, PcdValueCommonName)) + \
'include $(MAKEROOT)/Makefiles/app.makefile\n' + 'TOOL_INCLUDE +='
IncSearchList = []
PlatformInc = OrderedDict()
for Cache in self._Bdb._CACHE_.values():
if Cache.MetaFile.Ext.lower() != '.dec':
continue
if Cache.Includes:
if str(Cache.MetaFile.Path) not in PlatformInc:
PlatformInc[str(Cache.MetaFile.Path)] = []
PlatformInc[str(Cache.MetaFile.Path)].append (os.path.dirname(Cache.MetaFile.Path))
PlatformInc[str(Cache.MetaFile.Path)].extend (Cache.CommonIncludes)
PcdDependDEC = []
for Pcd in StructuredPcds.values():
for PackageDec in Pcd.PackageDecs:
Package = os.path.normpath(mws.join(GlobalData.gWorkspace, PackageDec))
if not os.path.exists(Package):
EdkLogger.error('Build', RESOURCE_NOT_AVAILABLE, "The dependent Package %s of PCD %s.%s is not exist." % (PackageDec, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if Package not in PcdDependDEC:
PcdDependDEC.append(Package)
if PlatformInc and PcdDependDEC:
for pkg in PcdDependDEC:
if pkg in PlatformInc:
for inc in PlatformInc[pkg]:
#
# Get list of files in potential -I include path
#
FileList = os.listdir (str(inc))
#
# Skip -I include path if one of the include files required
# by PcdValueInit.c are present in the include paths from
# the DEC file. PcdValueInit.c must use the standard include
# files from the host compiler.
#
if 'stdio.h' in FileList:
continue
if 'stdlib.h' in FileList:
continue
if 'string.h' in FileList:
continue
MakeApp += '-I' + str(inc) + ' '
IncSearchList.append(inc)
MakeApp = MakeApp + '\n'
CC_FLAGS = LinuxCFLAGS
if sys.platform == "win32":
CC_FLAGS = WindowsCFLAGS
BuildOptions = OrderedDict()
for Options in self.BuildOptions:
if Options[2] != EDKII_NAME:
continue
Family = Options[0]
if Family and Family != self.ToolChainFamily:
continue
Target, Tag, Arch, Tool, Attr = Options[1].split("_")
if Tool != 'CC':
continue
if Attr != "FLAGS":
continue
if Target == TAB_STAR or Target == self._Target:
if Tag == TAB_STAR or Tag == self._Toolchain:
if 'COMMON' not in BuildOptions:
BuildOptions['COMMON'] = set()
if Arch == TAB_STAR:
BuildOptions['COMMON']|= self.ParseCCFlags(self.BuildOptions[Options])
if Arch in self.SupArchList:
if Arch not in BuildOptions:
BuildOptions[Arch] = set()
BuildOptions[Arch] |= self.ParseCCFlags(self.BuildOptions[Options])
if BuildOptions:
ArchBuildOptions = {arch:flags for arch,flags in BuildOptions.items() if arch != 'COMMON'}
if len(ArchBuildOptions.keys()) == 1:
BuildOptions['COMMON'] |= (list(ArchBuildOptions.values())[0])
elif len(ArchBuildOptions.keys()) > 1:
CommonBuildOptions = reduce(lambda x,y: x&y, ArchBuildOptions.values())
BuildOptions['COMMON'] |= CommonBuildOptions
ValueList = [item for item in BuildOptions['COMMON'] if item.startswith((r"/U","-U"))]
ValueList.extend([item for item in BuildOptions['COMMON'] if item.startswith((r"/D", "-D"))])
CC_FLAGS += " ".join(ValueList)
MakeApp += CC_FLAGS
if sys.platform == "win32":
MakeApp = MakeApp + PcdMakefileEnd
MakeApp = MakeApp + AppTarget % ("""\tcopy $(APPLICATION) $(APPFILE) /y """)
else:
MakeApp = MakeApp + AppTarget % ("""\tcp -p $(APPLICATION) $(APPFILE) """)
MakeApp = MakeApp + '\n'
IncludeFileFullPaths = []
for includefile in IncludeFiles:
for includepath in IncSearchList:
includefullpath = os.path.join(str(includepath), includefile)
if os.path.exists(includefullpath):
IncludeFileFullPaths.append(os.path.normpath(includefullpath))
break
SearchPathList = []
SearchPathList.append(os.path.normpath(mws.join(GlobalData.gGlobalDefines["EDK_TOOLS_PATH"], "BaseTools/Source/C/Include")))
SearchPathList.append(os.path.normpath(mws.join(GlobalData.gGlobalDefines["EDK_TOOLS_PATH"], "BaseTools/Source/C/Common")))
SearchPathList.extend(str(item) for item in IncSearchList)
IncFileList = GetDependencyList(IncludeFileFullPaths, SearchPathList)
for include_file in IncFileList:
MakeApp += "$(OBJECTS) : %s\n" % include_file
if sys.platform == "win32":
PcdValueCommonPath = os.path.normpath(mws.join(GlobalData.gGlobalDefines["EDK_TOOLS_PATH"], "Source\C\Common\PcdValueCommon.c"))
MakeApp = MakeApp + '%s\PcdValueCommon.c : %s\n' % (self.OutputPath, PcdValueCommonPath)
MakeApp = MakeApp + '\tcopy /y %s $@\n' % (PcdValueCommonPath)
else:
PcdValueCommonPath = os.path.normpath(mws.join(GlobalData.gGlobalDefines["EDK_TOOLS_PATH"], "Source/C/Common/PcdValueCommon.c"))
MakeApp = MakeApp + '%s/PcdValueCommon.c : %s\n' % (self.OutputPath, PcdValueCommonPath)
MakeApp = MakeApp + '\tcp -p -f %s %s/PcdValueCommon.c\n' % (PcdValueCommonPath, self.OutputPath)
MakeFileName = os.path.join(self.OutputPath, 'Makefile')
MakeApp += "$(OBJECTS) : %s\n" % MakeFileName
SaveFileOnChange(MakeFileName, MakeApp, False)
# start generating input file
InputValueFile = os.path.join(self.OutputPath, 'Input.txt')
OutputValueFile = os.path.join(self.OutputPath, 'Output.txt')
SaveFileOnChange(InputValueFile, InitByteValue, False)
Dest_PcdValueInitExe = PcdValueInitName
if not sys.platform == "win32":
Dest_PcdValueInitExe = os.path.join(self.OutputPath, PcdValueInitName)
else:
Dest_PcdValueInitExe = os.path.join(self.OutputPath, PcdValueInitName) +".exe"
#start building the structure pcd value tool
Messages = ''
if sys.platform == "win32":
MakeCommand = 'nmake -f %s' % (MakeFileName)
returncode, StdOut, StdErr = DscBuildData.ExecuteCommand (MakeCommand)
Messages = StdOut
else:
MakeCommand = 'make -f %s' % (MakeFileName)
returncode, StdOut, StdErr = DscBuildData.ExecuteCommand (MakeCommand)
Messages = StdErr
EdkLogger.verbose ('%s\n%s\n%s' % (MakeCommand, StdOut, StdErr))
Messages = Messages.split('\n')
MessageGroup = []
if returncode != 0:
CAppBaseFileName = os.path.join(self.OutputPath, PcdValueInitName)
File = open (CAppBaseFileName + '.c', 'r')
FileData = File.readlines()
File.close()
for Message in Messages:
if " error" in Message or "warning" in Message:
try:
FileInfo = Message.strip().split('(')
if len (FileInfo) > 1:
FileName = FileInfo [0]
FileLine = FileInfo [1].split (')')[0]
else:
FileInfo = Message.strip().split(':')
if len(FileInfo) < 2:
continue
FileName = FileInfo [0]
FileLine = FileInfo [1]
except:
continue
if "PcdValueInit.c" not in FileName:
continue
if FileLine.isdigit():
error_line = FileData[int (FileLine) - 1]
if r"//" in error_line:
c_line, dsc_line = error_line.split(r"//")
else:
dsc_line = error_line
message_itmes = Message.split(":")
Index = 0
if "PcdValueInit.c" not in Message:
if not MessageGroup:
MessageGroup.append(Message)
break
else:
for item in message_itmes:
if "PcdValueInit.c" in item:
Index = message_itmes.index(item)
message_itmes[Index] = dsc_line.strip()
break
MessageGroup.append(":".join(message_itmes[Index:]).strip())
continue
else:
MessageGroup.append(Message)
if MessageGroup:
EdkLogger.error("build", PCD_STRUCTURE_PCD_ERROR, "\n".join(MessageGroup) )
else:
EdkLogger.error('Build', COMMAND_FAILURE, 'Can not execute command: %s\n%s\n%s' % (MakeCommand, StdOut, StdErr))
#start executing the structure pcd value tool
if DscBuildData.NeedUpdateOutput(OutputValueFile, Dest_PcdValueInitExe, InputValueFile):
Command = Dest_PcdValueInitExe + ' -i %s -o %s' % (InputValueFile, OutputValueFile)
returncode, StdOut, StdErr = DscBuildData.ExecuteCommand (Command)
EdkLogger.verbose ('%s\n%s\n%s' % (Command, StdOut, StdErr))
if returncode != 0:
EdkLogger.warn('Build', COMMAND_FAILURE, 'Can not collect output from command: %s\n%s\n%s\n' % (Command, StdOut, StdErr))
#start update structure pcd final value
File = open (OutputValueFile, 'r')
FileBuffer = File.readlines()
File.close()
StructurePcdSet = []
for Pcd in FileBuffer:
PcdValue = Pcd.split ('|')
PcdInfo = PcdValue[0].split ('.')
StructurePcdSet.append((PcdInfo[0], PcdInfo[1], PcdInfo[2], PcdInfo[3], PcdValue[2].strip()))
return StructurePcdSet
@staticmethod
def NeedUpdateOutput(OutputFile, ValueCFile, StructureInput):
if not os.path.exists(OutputFile):
return True
if os.stat(OutputFile).st_mtime <= os.stat(ValueCFile).st_mtime:
return True
if os.stat(OutputFile).st_mtime <= os.stat(StructureInput).st_mtime:
return True
return False
## Retrieve dynamic PCD settings
#
# @param Type PCD type
#
# @retval a dict object contains settings of given PCD type
#
def _GetDynamicPcd(self, Type):
Pcds = OrderedDict()
#
# tdict is a special dict kind of type, used for selecting correct
# PCD settings for certain ARCH and SKU
#
PcdDict = tdict(True, 4)
PcdList = []
# Find out all possible PCD candidates for self._Arch
RecordList = self._RawData[Type, self._Arch]
AvailableSkuIdSet = copy.copy(self.SkuIds)
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4, Dummy5 in RecordList:
SkuName = SkuName.upper()
SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
if SkuName not in AvailableSkuIdSet:
EdkLogger.error('build', PARAMETER_INVALID, 'Sku %s is not defined in [SkuIds] section' % SkuName,
File=self.MetaFile, Line=Dummy5)
if "." not in TokenSpaceGuid and "[" not in PcdCName and (PcdCName, TokenSpaceGuid, SkuName, Dummy5) not in PcdList:
PcdList.append((PcdCName, TokenSpaceGuid, SkuName, Dummy5))
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
# Remove redundant PCD candidates, per the ARCH and SKU
for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
if Setting is None:
continue
PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
if MaxDatumSize:
if int(MaxDatumSize, 0) > 0xFFFF:
EdkLogger.error('build', FORMAT_INVALID, "The size value must not exceed the maximum value of 0xFFFF (UINT16) for %s." % ".".join((TokenSpaceGuid, PcdCName)),
File=self.MetaFile, Line=Dummy4)
if int(MaxDatumSize, 0) < 0:
EdkLogger.error('build', FORMAT_INVALID, "The size value can't be set to negative value for %s." % ".".join((TokenSpaceGuid, PcdCName)),
File=self.MetaFile, Line=Dummy4)
SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], '', '', '', '', '', PcdValue)
if (PcdCName, TokenSpaceGuid) in Pcds:
pcdObject = Pcds[PcdCName, TokenSpaceGuid]
pcdObject.SkuInfoList[SkuName] = SkuInfo
if MaxDatumSize.strip():
CurrentMaxSize = int(MaxDatumSize.strip(), 0)
else:
CurrentMaxSize = 0
if pcdObject.MaxDatumSize:
PcdMaxSize = int(pcdObject.MaxDatumSize, 0)
else:
PcdMaxSize = 0
if CurrentMaxSize > PcdMaxSize:
pcdObject.MaxDatumSize = str(CurrentMaxSize)
else:
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
TokenSpaceGuid,
self._PCD_TYPE_STRING_[Type],
DatumType,
PcdValue,
'',
MaxDatumSize,
OrderedDict({SkuName : SkuInfo}),
False,
None,
IsDsc=True)
if SkuName not in Pcds[PcdCName, TokenSpaceGuid].DscRawValue:
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {}
Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName] = {}
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][TAB_DEFAULT_STORES_DEFAULT] = PcdValue
Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName][TAB_DEFAULT_STORES_DEFAULT] = (self.MetaFile.File,Dummy4)
for pcd in Pcds.values():
pcdDecObject = self._DecPcds[pcd.TokenCName, pcd.TokenSpaceGuidCName]
# Only fix the value while no value provided in DSC file.
for sku in pcd.SkuInfoList.values():
if not sku.DefaultValue:
sku.DefaultValue = pcdDecObject.DefaultValue
if TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON not in pcd.SkuInfoList:
valuefromDec = pcdDecObject.DefaultValue
SkuInfo = SkuInfoClass(TAB_DEFAULT, '0', '', '', '', '', '', valuefromDec)
pcd.SkuInfoList[TAB_DEFAULT] = SkuInfo
elif TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
pcd.SkuInfoList[TAB_DEFAULT] = pcd.SkuInfoList[TAB_COMMON]
del pcd.SkuInfoList[TAB_COMMON]
elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
del pcd.SkuInfoList[TAB_COMMON]
list(map(self.FilterSkuSettings, Pcds.values()))
return Pcds
def FilterSkuSettings(self, PcdObj):
if self.SkuIdMgr.SkuUsageType == self.SkuIdMgr.SINGLE:
if TAB_DEFAULT in PcdObj.SkuInfoList and self.SkuIdMgr.SystemSkuId not in PcdObj.SkuInfoList:
PcdObj.SkuInfoList[self.SkuIdMgr.SystemSkuId] = PcdObj.SkuInfoList[TAB_DEFAULT]
PcdObj.SkuInfoList = {TAB_DEFAULT:PcdObj.SkuInfoList[self.SkuIdMgr.SystemSkuId]}
PcdObj.SkuInfoList[TAB_DEFAULT].SkuIdName = TAB_DEFAULT
PcdObj.SkuInfoList[TAB_DEFAULT].SkuId = '0'
elif self.SkuIdMgr.SkuUsageType == self.SkuIdMgr.DEFAULT:
PcdObj.SkuInfoList = {TAB_DEFAULT:PcdObj.SkuInfoList[TAB_DEFAULT]}
return PcdObj
@staticmethod
def CompareVarAttr(Attr1, Attr2):
if not Attr1 or not Attr2: # for empty string
return True
Attr1s = [attr.strip() for attr in Attr1.split(",")]
Attr1Set = set(Attr1s)
Attr2s = [attr.strip() for attr in Attr2.split(",")]
Attr2Set = set(Attr2s)
if Attr2Set == Attr1Set:
return True
else:
return False
def CompletePcdValues(self, PcdSet):
Pcds = OrderedDict()
DefaultStoreObj = DefaultStore(self._GetDefaultStores())
SkuIds = {skuname:skuid for skuname, skuid in self.SkuIdMgr.AvailableSkuIdSet.items() if skuname != TAB_COMMON}
DefaultStores = set(storename for pcdobj in PcdSet.values() for skuobj in pcdobj.SkuInfoList.values() for storename in skuobj.DefaultStoreDict)
for PcdCName, TokenSpaceGuid in PcdSet:
PcdObj = PcdSet[(PcdCName, TokenSpaceGuid)]
if PcdObj.Type not in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_DEFAULT],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_VPD],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_DEFAULT],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII],
self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_VPD]]:
Pcds[PcdCName, TokenSpaceGuid]= PcdObj
continue
PcdType = PcdObj.Type
if PcdType in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
for skuid in PcdObj.SkuInfoList:
skuobj = PcdObj.SkuInfoList[skuid]
mindefaultstorename = DefaultStoreObj.GetMin(set(defaultstorename for defaultstorename in skuobj.DefaultStoreDict))
for defaultstorename in DefaultStores:
if defaultstorename not in skuobj.DefaultStoreDict:
skuobj.DefaultStoreDict[defaultstorename] = skuobj.DefaultStoreDict[mindefaultstorename]
skuobj.HiiDefaultValue = skuobj.DefaultStoreDict[mindefaultstorename]
for skuname, skuid in SkuIds.items():
if skuname not in PcdObj.SkuInfoList:
nextskuid = self.SkuIdMgr.GetNextSkuId(skuname)
while nextskuid not in PcdObj.SkuInfoList:
nextskuid = self.SkuIdMgr.GetNextSkuId(nextskuid)
PcdObj.SkuInfoList[skuname] = copy.deepcopy(PcdObj.SkuInfoList[nextskuid])
PcdObj.SkuInfoList[skuname].SkuId = skuid
PcdObj.SkuInfoList[skuname].SkuIdName = skuname
if PcdType in [self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_HII], self._PCD_TYPE_STRING_[MODEL_PCD_DYNAMIC_EX_HII]]:
PcdObj.DefaultValue = list(PcdObj.SkuInfoList.values())[0].HiiDefaultValue if self.SkuIdMgr.SkuUsageType == self.SkuIdMgr.SINGLE else PcdObj.SkuInfoList[TAB_DEFAULT].HiiDefaultValue
Pcds[PcdCName, TokenSpaceGuid]= PcdObj
return Pcds
## Retrieve dynamic HII PCD settings
#
# @param Type PCD type
#
# @retval a dict object contains settings of given PCD type
#
def _GetDynamicHiiPcd(self, Type):
VariableAttrs = {}
Pcds = OrderedDict()
UserDefinedDefaultStores = []
#
# tdict is a special dict kind of type, used for selecting correct
# PCD settings for certain ARCH and SKU
#
PcdDict = tdict(True, 5)
PcdList = []
RecordList = self._RawData[Type, self._Arch]
# Find out all possible PCD candidates for self._Arch
AvailableSkuIdSet = copy.copy(self.SkuIds)
DefaultStoresDefine = self._GetDefaultStores()
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, DefaultStore, Dummy4, Dummy5 in RecordList:
SkuName = SkuName.upper()
SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
DefaultStore = DefaultStore.upper()
if DefaultStore == TAB_COMMON:
DefaultStore = TAB_DEFAULT_STORES_DEFAULT
else:
#The end user define [DefaultStores] and [SKUID_IDENTIFIER.Menufacturing] in DSC
UserDefinedDefaultStores.append((PcdCName, TokenSpaceGuid))
if SkuName not in AvailableSkuIdSet:
EdkLogger.error('build', PARAMETER_INVALID, 'Sku %s is not defined in [SkuIds] section' % SkuName,
File=self.MetaFile, Line=Dummy5)
if DefaultStore not in DefaultStoresDefine:
EdkLogger.error('build', PARAMETER_INVALID, 'DefaultStores %s is not defined in [DefaultStores] section' % DefaultStore,
File=self.MetaFile, Line=Dummy5)
if "." not in TokenSpaceGuid and "[" not in PcdCName and (PcdCName, TokenSpaceGuid, SkuName, DefaultStore, Dummy5) not in PcdList:
PcdList.append((PcdCName, TokenSpaceGuid, SkuName, DefaultStore, Dummy5))
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid, DefaultStore] = Setting
# Remove redundant PCD candidates, per the ARCH and SKU
for index,(PcdCName, TokenSpaceGuid, SkuName, DefaultStore, Dummy4) in enumerate(PcdList):
Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid, DefaultStore]
if Setting is None:
continue
VariableName, VariableGuid, VariableOffset, DefaultValue, VarAttribute = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
rt, Msg = VariableAttributes.ValidateVarAttributes(VarAttribute)
if not rt:
EdkLogger.error("build", PCD_VARIABLE_ATTRIBUTES_ERROR, "Variable attributes settings for %s is incorrect.\n %s" % (".".join((TokenSpaceGuid, PcdCName)), Msg),
ExtraData="[%s]" % VarAttribute)
ExceedMax = False
FormatCorrect = True
if VariableOffset.isdigit():
if int(VariableOffset, 10) > 0xFFFF:
ExceedMax = True
elif variablePattern.match(VariableOffset):
if int(VariableOffset, 16) > 0xFFFF:
ExceedMax = True
# For Offset written in "A.B"
elif VariableOffset.find('.') > -1:
VariableOffsetList = VariableOffset.split(".")
if not (len(VariableOffsetList) == 2
and IsValidWord(VariableOffsetList[0])
and IsValidWord(VariableOffsetList[1])):
FormatCorrect = False
else:
FormatCorrect = False
if not FormatCorrect:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid syntax or format of the variable offset value is incorrect for %s." % ".".join((TokenSpaceGuid, PcdCName)))
if ExceedMax:
EdkLogger.error('Build', OPTION_VALUE_INVALID, "The variable offset value must not exceed the maximum value of 0xFFFF (UINT16) for %s." % ".".join((TokenSpaceGuid, PcdCName)))
if (VariableName, VariableGuid) not in VariableAttrs:
VariableAttrs[(VariableName, VariableGuid)] = VarAttribute
else:
if not DscBuildData.CompareVarAttr(VariableAttrs[(VariableName, VariableGuid)], VarAttribute):
EdkLogger.error('Build', PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR, "The variable %s.%s for DynamicHii PCDs has conflicting attributes [%s] and [%s] " % (VariableGuid, VariableName, VarAttribute, VariableAttrs[(VariableName, VariableGuid)]))
pcdDecObject = self._DecPcds[PcdCName, TokenSpaceGuid]
if (PcdCName, TokenSpaceGuid) in Pcds:
pcdObject = Pcds[PcdCName, TokenSpaceGuid]
if SkuName in pcdObject.SkuInfoList:
Skuitem = pcdObject.SkuInfoList[SkuName]
Skuitem.DefaultStoreDict.update({DefaultStore:DefaultValue})
else:
SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], VariableName, VariableGuid, VariableOffset, DefaultValue, VariableAttribute=VarAttribute, DefaultStore={DefaultStore:DefaultValue})
pcdObject.SkuInfoList[SkuName] = SkuInfo
else:
SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], VariableName, VariableGuid, VariableOffset, DefaultValue, VariableAttribute=VarAttribute, DefaultStore={DefaultStore:DefaultValue})
PcdClassObj = PcdClassObject(
PcdCName,
TokenSpaceGuid,
self._PCD_TYPE_STRING_[Type],
'',
DefaultValue,
'',
'',
OrderedDict({SkuName : SkuInfo}),
False,
None,
pcdDecObject.validateranges,
pcdDecObject.validlists,
pcdDecObject.expressions,
IsDsc=True)
if (PcdCName, TokenSpaceGuid) in UserDefinedDefaultStores:
PcdClassObj.UserDefinedDefaultStoresFlag = True
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObj
Pcds[PcdCName, TokenSpaceGuid].CustomAttribute['DscPosition'] = index
if SkuName not in Pcds[PcdCName, TokenSpaceGuid].DscRawValue:
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {}
Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName] = {}
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][DefaultStore] = DefaultValue
Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName][DefaultStore] = (self.MetaFile.File,Dummy4)
for pcd in Pcds.values():
pcdDecObject = self._DecPcds[pcd.TokenCName, pcd.TokenSpaceGuidCName]
pcd.DatumType = pcdDecObject.DatumType
# Only fix the value while no value provided in DSC file.
for sku in pcd.SkuInfoList.values():
if (sku.HiiDefaultValue == "" or sku.HiiDefaultValue is None):
sku.HiiDefaultValue = pcdDecObject.DefaultValue
for default_store in sku.DefaultStoreDict:
sku.DefaultStoreDict[default_store]=pcdDecObject.DefaultValue
pcd.DefaultValue = pcdDecObject.DefaultValue
if TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON not in pcd.SkuInfoList:
SkuInfoObj = list(pcd.SkuInfoList.values())[0]
valuefromDec = pcdDecObject.DefaultValue
SkuInfo = SkuInfoClass(TAB_DEFAULT, '0', SkuInfoObj.VariableName, SkuInfoObj.VariableGuid, SkuInfoObj.VariableOffset, valuefromDec, VariableAttribute=SkuInfoObj.VariableAttribute, DefaultStore={DefaultStore:valuefromDec})
pcd.SkuInfoList[TAB_DEFAULT] = SkuInfo
elif TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
pcd.SkuInfoList[TAB_DEFAULT] = pcd.SkuInfoList[TAB_COMMON]
del pcd.SkuInfoList[TAB_COMMON]
elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
del pcd.SkuInfoList[TAB_COMMON]
if pcd.MaxDatumSize.strip():
MaxSize = int(pcd.MaxDatumSize, 0)
else:
MaxSize = 0
if pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
for (_, skuobj) in pcd.SkuInfoList.items():
datalen = 0
skuobj.HiiDefaultValue = StringToArray(skuobj.HiiDefaultValue)
datalen = len(skuobj.HiiDefaultValue.split(","))
if datalen > MaxSize:
MaxSize = datalen
for defaultst in skuobj.DefaultStoreDict:
skuobj.DefaultStoreDict[defaultst] = StringToArray(skuobj.DefaultStoreDict[defaultst])
pcd.DefaultValue = StringToArray(pcd.DefaultValue)
pcd.MaxDatumSize = str(MaxSize)
rt, invalidhii = DscBuildData.CheckVariableNameAssignment(Pcds)
if not rt:
invalidpcd = ",".join(invalidhii)
EdkLogger.error('build', PCD_VARIABLE_INFO_ERROR, Message='The same HII PCD must map to the same EFI variable for all SKUs', File=self.MetaFile, ExtraData=invalidpcd)
list(map(self.FilterSkuSettings, Pcds.values()))
return Pcds
@staticmethod
def CheckVariableNameAssignment(Pcds):
invalidhii = []
for pcdname in Pcds:
pcd = Pcds[pcdname]
varnameset = set(sku.VariableName for (skuid, sku) in pcd.SkuInfoList.items())
if len(varnameset) > 1:
invalidhii.append(".".join((pcdname[1], pcdname[0])))
if len(invalidhii):
return False, invalidhii
else:
return True, []
## Retrieve dynamic VPD PCD settings
#
# @param Type PCD type
#
# @retval a dict object contains settings of given PCD type
#
def _GetDynamicVpdPcd(self, Type):
Pcds = OrderedDict()
#
# tdict is a special dict kind of type, used for selecting correct
# PCD settings for certain ARCH and SKU
#
PcdDict = tdict(True, 4)
PcdList = []
# Find out all possible PCD candidates for self._Arch
RecordList = self._RawData[Type, self._Arch]
AvailableSkuIdSet = copy.copy(self.SkuIds)
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4, Dummy5 in RecordList:
SkuName = SkuName.upper()
SkuName = TAB_DEFAULT if SkuName == TAB_COMMON else SkuName
if SkuName not in AvailableSkuIdSet:
EdkLogger.error('build', PARAMETER_INVALID, 'Sku %s is not defined in [SkuIds] section' % SkuName,
File=self.MetaFile, Line=Dummy5)
if "." not in TokenSpaceGuid and "[" not in PcdCName and (PcdCName, TokenSpaceGuid, SkuName, Dummy5) not in PcdList:
PcdList.append((PcdCName, TokenSpaceGuid, SkuName, Dummy5))
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
# Remove redundant PCD candidates, per the ARCH and SKU
for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
if Setting is None:
continue
#
# For the VOID* type, it can have optional data of MaxDatumSize and InitialValue
# For the Integer & Boolean type, the optional data can only be InitialValue.
# At this point, we put all the data into the PcdClssObject for we don't know the PCD's datumtype
# until the DEC parser has been called.
#
VpdOffset, MaxDatumSize, InitialValue = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
if MaxDatumSize:
if int(MaxDatumSize, 0) > 0xFFFF:
EdkLogger.error('build', FORMAT_INVALID, "The size value must not exceed the maximum value of 0xFFFF (UINT16) for %s." % ".".join((TokenSpaceGuid, PcdCName)),
File=self.MetaFile, Line=Dummy4)
if int(MaxDatumSize, 0) < 0:
EdkLogger.error('build', FORMAT_INVALID, "The size value can't be set to negative value for %s." % ".".join((TokenSpaceGuid, PcdCName)),
File=self.MetaFile, Line=Dummy4)
SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName][0], '', '', '', '', VpdOffset, InitialValue)
if (PcdCName, TokenSpaceGuid) in Pcds:
pcdObject = Pcds[PcdCName, TokenSpaceGuid]
pcdObject.SkuInfoList[SkuName] = SkuInfo
if MaxDatumSize.strip():
CurrentMaxSize = int(MaxDatumSize.strip(), 0)
else:
CurrentMaxSize = 0
if pcdObject.MaxDatumSize:
PcdMaxSize = int(pcdObject.MaxDatumSize, 0)
else:
PcdMaxSize = 0
if CurrentMaxSize > PcdMaxSize:
pcdObject.MaxDatumSize = str(CurrentMaxSize)
else:
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
TokenSpaceGuid,
self._PCD_TYPE_STRING_[Type],
'',
InitialValue,
'',
MaxDatumSize,
OrderedDict({SkuName : SkuInfo}),
False,
None,
IsDsc=True)
if SkuName not in Pcds[PcdCName, TokenSpaceGuid].DscRawValue:
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName] = {}
Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName] = {}
Pcds[PcdCName, TokenSpaceGuid].DscRawValue[SkuName][TAB_DEFAULT_STORES_DEFAULT] = InitialValue
Pcds[PcdCName, TokenSpaceGuid].DscRawValueInfo[SkuName][TAB_DEFAULT_STORES_DEFAULT] = (self.MetaFile.File,Dummy4)
for pcd in Pcds.values():
pcdDecObject = self._DecPcds[pcd.TokenCName, pcd.TokenSpaceGuidCName]
pcd.DatumType = pcdDecObject.DatumType
# Only fix the value while no value provided in DSC file.
for sku in pcd.SkuInfoList.values():
if not sku.DefaultValue:
sku.DefaultValue = pcdDecObject.DefaultValue
if TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON not in pcd.SkuInfoList:
SkuInfoObj = list(pcd.SkuInfoList.values())[0]
valuefromDec = pcdDecObject.DefaultValue
SkuInfo = SkuInfoClass(TAB_DEFAULT, '0', '', '', '', '', SkuInfoObj.VpdOffset, valuefromDec)
pcd.SkuInfoList[TAB_DEFAULT] = SkuInfo
elif TAB_DEFAULT not in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
pcd.SkuInfoList[TAB_DEFAULT] = pcd.SkuInfoList[TAB_COMMON]
del pcd.SkuInfoList[TAB_COMMON]
elif TAB_DEFAULT in pcd.SkuInfoList and TAB_COMMON in pcd.SkuInfoList:
del pcd.SkuInfoList[TAB_COMMON]
#For the same one VOID* pcd, if the default value type of one SKU is "Unicode string",
#the other SKUs are "OtherVOID*"(ASCII string or byte array),Then convert "Unicode string" to "byte array".
for pcd in Pcds.values():
PcdValueTypeSet = set()
for sku in pcd.SkuInfoList.values():
PcdValueTypeSet.add("UnicodeString" if sku.DefaultValue.startswith(('L"',"L'")) else "OtherVOID*")
if len(PcdValueTypeSet) > 1:
for sku in pcd.SkuInfoList.values():
sku.DefaultValue = StringToArray(sku.DefaultValue) if sku.DefaultValue.startswith(('L"',"L'")) else sku.DefaultValue
list(map(self.FilterSkuSettings, Pcds.values()))
return Pcds
## Add external modules
#
# The external modules are mostly those listed in FDF file, which don't
# need "build".
#
# @param FilePath The path of module description file
#
def AddModule(self, FilePath):
FilePath = NormPath(FilePath)
if FilePath not in self.Modules:
Module = ModuleBuildClassObject()
Module.MetaFile = FilePath
self.Modules.append(Module)
@property
def ToolChainFamily(self):
self._ToolChainFamily = TAB_COMPILER_MSFT
TargetObj = TargetTxtDict()
TargetTxt = TargetObj.Target
BuildConfigurationFile = os.path.normpath(os.path.join(GlobalData.gConfDirectory, gDefaultTargetTxtFile))
if os.path.isfile(BuildConfigurationFile) == True:
ToolDefinitionFile = TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
if ToolDefinitionFile == '':
ToolDefinitionFile = os.path.normpath(mws.join(self.WorkspaceDir, 'Conf', gDefaultToolsDefFile))
if os.path.isfile(ToolDefinitionFile) == True:
ToolDefObj = ToolDefDict((os.path.join(os.getenv("WORKSPACE"), "Conf")))
ToolDefinition = ToolDefObj.ToolDef.ToolsDefTxtDatabase
if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
or self._Toolchain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self._Toolchain]:
self._ToolChainFamily = TAB_COMPILER_MSFT
else:
self._ToolChainFamily = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self._Toolchain]
return self._ToolChainFamily
## Add external PCDs
#
# The external PCDs are mostly those listed in FDF file to specify address
# or offset information.
#
# @param Name Name of the PCD
# @param Guid Token space guid of the PCD
# @param Value Value of the PCD
#
def AddPcd(self, Name, Guid, Value):
if (Name, Guid) not in self.Pcds:
self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, False, None)
self.Pcds[Name, Guid].DefaultValue = Value
@property
def DecPcds(self):
if self._DecPcds is None:
FdfInfList = []
if GlobalData.gFdfParser:
FdfInfList = GlobalData.gFdfParser.Profile.InfList
PkgSet = set()
for Inf in FdfInfList:
ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch)
if ModuleFile in self._Modules:
continue
ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
PkgSet.update(ModuleData.Packages)
if self.Packages:
PkgSet.update(self.Packages)
self._DecPcds, self._GuidDict = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain, PkgSet)
self._GuidDict.update(GlobalData.gPlatformPcds)
return self._DecPcds
| edk2-master | BaseTools/Source/Python/Workspace/DscBuildData.py |
## @file
# This file is used to create/update/query/erase a meta file table
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import uuid
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import FORMAT_INVALID
from CommonDataClass.DataClass import MODEL_FILE_DSC, MODEL_FILE_DEC, MODEL_FILE_INF, \
MODEL_FILE_OTHERS
from Common.DataType import *
class MetaFileTable():
# TRICK: use file ID as the part before '.'
_ID_STEP_ = 1
_ID_MAX_ = 99999999
## Constructor
def __init__(self, DB, MetaFile, FileType, Temporary, FromItem=None):
self.MetaFile = MetaFile
self.TableName = ""
self.DB = DB
self._NumpyTab = None
self.CurrentContent = []
DB.TblFile.append([MetaFile.Name,
MetaFile.Ext,
MetaFile.Dir,
MetaFile.Path,
FileType,
MetaFile.TimeStamp,
FromItem])
self.FileId = len(DB.TblFile)
self.ID = self.FileId * 10**8
if Temporary:
self.TableName = "_%s_%s_%s" % (FileType, len(DB.TblFile), uuid.uuid4().hex)
else:
self.TableName = "_%s_%s" % (FileType, len(DB.TblFile))
def IsIntegrity(self):
Result = False
try:
TimeStamp = self.MetaFile.TimeStamp
if not self.CurrentContent:
Result = False
else:
Result = self.CurrentContent[-1][0] < 0
except Exception as Exc:
EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc))
return False
return Result
def SetEndFlag(self):
self.CurrentContent.append(self._DUMMY_)
def GetAll(self):
return [item for item in self.CurrentContent if item[0] >= 0 and item[-1]>=0]
## Python class representation of table storing module data
class ModuleTable(MetaFileTable):
_COLUMN_ = '''
ID REAL PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 TEXT NOT NULL,
Value2 TEXT,
Value3 TEXT,
Scope1 TEXT,
Scope2 TEXT,
BelongsToItem REAL NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
'''
# used as table end flag, in case the changes to database is not committed to db file
_DUMMY_ = [-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1]
## Constructor
def __init__(self, Db, MetaFile, Temporary):
MetaFileTable.__init__(self, Db, MetaFile, MODEL_FILE_INF, Temporary)
## Insert a record into table Inf
#
# @param Model: Model of a Inf item
# @param Value1: Value1 of a Inf item
# @param Value2: Value2 of a Inf item
# @param Value3: Value3 of a Inf item
# @param Scope1: Arch of a Inf item
# @param Scope2 Platform os a Inf item
# @param BelongsToItem: The item belongs to which another item
# @param StartLine: StartLine of a Inf item
# @param StartColumn: StartColumn of a Inf item
# @param EndLine: EndLine of a Inf item
# @param EndColumn: EndColumn of a Inf item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON,
BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
(Value1, Value2, Value3, Scope1, Scope2) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip())
self.ID = self.ID + self._ID_STEP_
if self.ID >= (MODEL_FILE_INF + self._ID_MAX_):
self.ID = MODEL_FILE_INF + self._ID_STEP_
row = [ self.ID,
Model,
Value1,
Value2,
Value3,
Scope1,
Scope2,
BelongsToItem,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
]
self.CurrentContent.append(row)
return self.ID
## Query table
#
# @param Model: The Model of Record
# @param Arch: The Arch attribute of Record
# @param Platform The Platform attribute of Record
#
# @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None, Platform=None, BelongsToItem=None):
QueryTab = self.CurrentContent
result = [item for item in QueryTab if item[1] == Model and item[-1]>=0 ]
if Arch is not None and Arch != TAB_ARCH_COMMON:
ArchList = set(['COMMON'])
ArchList.add(Arch)
result = [item for item in result if item[5] in ArchList]
if Platform is not None and Platform != TAB_COMMON:
Platformlist = set( ['COMMON','DEFAULT'])
Platformlist.add(Platform)
result = [item for item in result if item[6] in Platformlist]
if BelongsToItem is not None:
result = [item for item in result if item[7] == BelongsToItem]
result = [ [r[2],r[3],r[4],r[5],r[6],r[0],r[8]] for r in result ]
return result
## Python class representation of table storing package data
class PackageTable(MetaFileTable):
_COLUMN_ = '''
ID REAL PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 TEXT NOT NULL,
Value2 TEXT,
Value3 TEXT,
Scope1 TEXT,
Scope2 TEXT,
BelongsToItem REAL NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
'''
# used as table end flag, in case the changes to database is not committed to db file
_DUMMY_ = [-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1]
## Constructor
def __init__(self, Cursor, MetaFile, Temporary):
MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_DEC, Temporary)
## Insert table
#
# Insert a record into table Dec
#
# @param Model: Model of a Dec item
# @param Value1: Value1 of a Dec item
# @param Value2: Value2 of a Dec item
# @param Value3: Value3 of a Dec item
# @param Scope1: Arch of a Dec item
# @param Scope2: Module type of a Dec item
# @param BelongsToItem: The item belongs to which another item
# @param StartLine: StartLine of a Dec item
# @param StartColumn: StartColumn of a Dec item
# @param EndLine: EndLine of a Dec item
# @param EndColumn: EndColumn of a Dec item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON,
BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
(Value1, Value2, Value3, Scope1, Scope2) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip())
self.ID = self.ID + self._ID_STEP_
row = [ self.ID,
Model,
Value1,
Value2,
Value3,
Scope1,
Scope2,
BelongsToItem,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
]
self.CurrentContent.append(row)
return self.ID
## Query table
#
# @param Model: The Model of Record
# @param Arch: The Arch attribute of Record
#
# @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None):
QueryTab = self.CurrentContent
result = [item for item in QueryTab if item[1] == Model and item[-1]>=0 ]
if Arch is not None and Arch != TAB_ARCH_COMMON:
ArchList = set(['COMMON'])
ArchList.add(Arch)
result = [item for item in result if item[5] in ArchList]
return [[r[2], r[3], r[4], r[5], r[6], r[0], r[8]] for r in result]
def GetValidExpression(self, TokenSpaceGuid, PcdCName):
QueryTab = self.CurrentContent
result = [[item[2], item[8]] for item in QueryTab if item[3] == TokenSpaceGuid and item[4] == PcdCName]
validateranges = []
validlists = []
expressions = []
try:
for row in result:
comment = row[0]
LineNum = row[1]
comment = comment.strip("#")
comment = comment.strip()
oricomment = comment
if comment.startswith("@ValidRange"):
comment = comment.replace("@ValidRange", "", 1)
validateranges.append(comment.split("|")[1].strip())
if comment.startswith("@ValidList"):
comment = comment.replace("@ValidList", "", 1)
validlists.append(comment.split("|")[1].strip())
if comment.startswith("@Expression"):
comment = comment.replace("@Expression", "", 1)
expressions.append(comment.split("|")[1].strip())
except Exception as Exc:
ValidType = ""
if oricomment.startswith("@ValidRange"):
ValidType = "@ValidRange"
if oricomment.startswith("@ValidList"):
ValidType = "@ValidList"
if oricomment.startswith("@Expression"):
ValidType = "@Expression"
EdkLogger.error('Parser', FORMAT_INVALID, "The syntax for %s of PCD %s.%s is incorrect" % (ValidType, TokenSpaceGuid, PcdCName),
ExtraData=oricomment, File=self.MetaFile, Line=LineNum)
return set(), set(), set()
return set(validateranges), set(validlists), set(expressions)
## Python class representation of table storing platform data
class PlatformTable(MetaFileTable):
_COLUMN_ = '''
ID REAL PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 TEXT NOT NULL,
Value2 TEXT,
Value3 TEXT,
Scope1 TEXT,
Scope2 TEXT,
Scope3 TEXT,
BelongsToItem REAL NOT NULL,
FromItem REAL NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
'''
# used as table end flag, in case the changes to database is not committed to db file
_DUMMY_ = [-1, -1, '====', '====', '====', '====', '====','====', -1, -1, -1, -1, -1, -1, -1]
## Constructor
def __init__(self, Cursor, MetaFile, Temporary, FromItem=0):
MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_DSC, Temporary, FromItem)
## Insert table
#
# Insert a record into table Dsc
#
# @param Model: Model of a Dsc item
# @param Value1: Value1 of a Dsc item
# @param Value2: Value2 of a Dsc item
# @param Value3: Value3 of a Dsc item
# @param Scope1: Arch of a Dsc item
# @param Scope2: Module type of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param FromItem: The item belongs to which dsc file
# @param StartLine: StartLine of a Dsc item
# @param StartColumn: StartColumn of a Dsc item
# @param EndLine: EndLine of a Dsc item
# @param EndColumn: EndColumn of a Dsc item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Scope1=TAB_ARCH_COMMON, Scope2=TAB_COMMON, Scope3=TAB_DEFAULT_STORES_DEFAULT,BelongsToItem=-1,
FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
(Value1, Value2, Value3, Scope1, Scope2, Scope3) = (Value1.strip(), Value2.strip(), Value3.strip(), Scope1.strip(), Scope2.strip(), Scope3.strip())
self.ID = self.ID + self._ID_STEP_
row = [ self.ID,
Model,
Value1,
Value2,
Value3,
Scope1,
Scope2,
Scope3,
BelongsToItem,
FromItem,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
]
self.CurrentContent.append(row)
return self.ID
## Query table
#
# @param Model: The Model of Record
# @param Scope1: Arch of a Dsc item
# @param Scope2: Module type of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param FromItem: The item belongs to which dsc file
#
# @retval: A recordSet of all found records
#
def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
QueryTab = self.CurrentContent
result = [item for item in QueryTab if item[1] == Model and item[-1]>0 ]
if Scope1 is not None and Scope1 != TAB_ARCH_COMMON:
Sc1 = set(['COMMON'])
Sc1.add(Scope1)
result = [item for item in result if item[5] in Sc1]
Sc2 = set( ['COMMON','DEFAULT'])
if Scope2 and Scope2 != TAB_COMMON:
if '.' in Scope2:
Index = Scope2.index('.')
NewScope = TAB_COMMON + Scope2[Index:]
Sc2.add(NewScope)
Sc2.add(Scope2)
result = [item for item in result if item[6] in Sc2]
if BelongsToItem is not None:
result = [item for item in result if item[8] == BelongsToItem]
else:
result = [item for item in result if item[8] < 0]
if FromItem is not None:
result = [item for item in result if item[9] == FromItem]
result = [ [r[2],r[3],r[4],r[5],r[6],r[7],r[0],r[10]] for r in result ]
return result
def DisableComponent(self,comp_id):
for item in self.CurrentContent:
if item[0] == comp_id or item[8] == comp_id:
item[-1] = -1
## Factory class to produce different storage for different type of meta-file
class MetaFileStorage(object):
_FILE_TABLE_ = {
MODEL_FILE_INF : ModuleTable,
MODEL_FILE_DEC : PackageTable,
MODEL_FILE_DSC : PlatformTable,
MODEL_FILE_OTHERS : MetaFileTable,
}
_FILE_TYPE_ = {
".inf" : MODEL_FILE_INF,
".dec" : MODEL_FILE_DEC,
".dsc" : MODEL_FILE_DSC,
}
_ObjectCache = {}
## Constructor
def __new__(Class, Cursor, MetaFile, FileType=None, Temporary=False, FromItem=None):
# no type given, try to find one
key = (MetaFile.Path, FileType,Temporary,FromItem)
if key in Class._ObjectCache:
return Class._ObjectCache[key]
if not FileType:
if MetaFile.Type in self._FILE_TYPE_:
FileType = Class._FILE_TYPE_[MetaFile.Type]
else:
FileType = MODEL_FILE_OTHERS
# don't pass the type around if it's well known
if FileType == MODEL_FILE_OTHERS:
Args = (Cursor, MetaFile, FileType, Temporary)
else:
Args = (Cursor, MetaFile, Temporary)
if FromItem:
Args = Args + (FromItem,)
# create the storage object and return it to caller
reval = Class._FILE_TABLE_[FileType](*Args)
if not Temporary:
Class._ObjectCache[key] = reval
return reval
| edk2-master | BaseTools/Source/Python/Workspace/MetaFileTable.py |
## @file
# This file is used to check format of comments
#
# Copyright (c) 2012, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from CommonDataClass.DataClass import (
MODEL_PCD_PATCHABLE_IN_MODULE,
MODEL_PCD_DYNAMIC_EX,
MODEL_PCD_DYNAMIC,
MODEL_EFI_GUID,
MODEL_EFI_PPI,
MODEL_EFI_PROTOCOL
)
from Common.BuildToolError import FORMAT_INVALID
import Common.EdkLogger as EdkLogger
UsageList = ("PRODUCES", "PRODUCED", "ALWAYS_PRODUCES", "ALWAYS_PRODUCED", "SOMETIMES_PRODUCES",
"SOMETIMES_PRODUCED", "CONSUMES", "CONSUMED", "ALWAYS_CONSUMES", "ALWAYS_CONSUMED",
"SOMETIMES_CONSUMES", "SOMETIMES_CONSUMED", "SOMETIME_CONSUMES")
ErrorMsgMap = {
MODEL_EFI_GUID : "The usage for this GUID is not listed in this INF: %s[%d]:%s",
MODEL_EFI_PPI : "The usage for this PPI is not listed in this INF: %s[%d]:%s.",
MODEL_EFI_PROTOCOL : "The usage for this Protocol is not listed in this INF: %s[%d]:%s.",
MODEL_PCD_DYNAMIC : "The usage for this PCD is not listed in this INF: %s[%d]:%s."
}
def CheckInfComment(SectionType, Comments, InfFile, LineNo, ValueList):
if SectionType in [MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_EX, MODEL_PCD_DYNAMIC]:
CheckUsage(Comments, UsageList, InfFile, LineNo, ValueList[0]+'.'+ValueList[1], ErrorMsgMap[MODEL_PCD_DYNAMIC])
elif SectionType in [MODEL_EFI_GUID, MODEL_EFI_PPI]:
CheckUsage(Comments, UsageList, InfFile, LineNo, ValueList[0], ErrorMsgMap[SectionType])
elif SectionType == MODEL_EFI_PROTOCOL:
CheckUsage(Comments, UsageList + ("TO_START", "BY_START"), InfFile, LineNo, ValueList[0], ErrorMsgMap[SectionType])
def CheckUsage(Comments, Usages, InfFile, LineNo, Value, ErrorMsg):
for Comment in Comments:
for Word in Comment[0].replace('#', ' ').split():
if Word in Usages:
return
EdkLogger.error(
"Parser", FORMAT_INVALID,
ErrorMsg % (InfFile, LineNo, Value)
)
| edk2-master | BaseTools/Source/Python/Workspace/MetaFileCommentParser.py |
## @file
# This file is used to create/update/query/erase table for files
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import Common.LongFilePathOs as os
import Common.EdkLogger as EdkLogger
from CommonDataClass import DataClass
from CommonDataClass.DataClass import FileClass
## Convert to SQL required string format
def ConvertToSqlString(StringList):
return list(map(lambda s: "'" + s.replace("'", "''") + "'", StringList))
## TableFile
#
# This class defined a common table
#
# @param object: Inherited from object class
#
# @param Cursor: Cursor of the database
# @param TableName: Name of the table
#
class Table(object):
_COLUMN_ = ''
_ID_STEP_ = 1
_ID_MAX_ = 0x80000000
_DUMMY_ = 0
def __init__(self, Db, Name='', IdBase=0, Temporary=False):
self.Db = Db
self.Table = Name
self.IdBase = int(IdBase)
self.ID = int(IdBase)
self.Temporary = Temporary
self.Contents = []
def __str__(self):
return self.Table
## Create table
#
# Create a table
#
def Create(self, NewTable=True):
self.Db.CreateEmptyTable(self.Table)
self.ID = self.GetId()
## Insert table
#
# Insert a record into a table
#
def Insert(self, *Args):
self.ID = self.ID + self._ID_STEP_
if self.ID >= (self.IdBase + self._ID_MAX_):
self.ID = self.IdBase + self._ID_STEP_
row = [self.ID]
row.extend(Args)
self.Contents.append(row)
return self.ID
## Get count
#
# Get a count of all records of the table
#
# @retval Count: Total count of all records
#
def GetCount(self):
tab = self.Db.GetTable(self.Table)
return len(tab)
def GetId(self):
tab = self.Db.GetTable(self.Table)
Id = max([int(item[0]) for item in tab])
if Id is None:
Id = self.IdBase
return Id
## Init the ID of the table
#
# Init the ID of the table
#
def InitID(self):
self.ID = self.GetId()
## Exec
#
# Exec Sql Command, return result
#
# @param SqlCommand: The SqlCommand to be executed
#
# @retval RecordSet: The result after executed
#
def Exec(self, SqlCommand):
EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
self.Db.execute(SqlCommand)
RecordSet = self.Db.fetchall()
return RecordSet
def SetEndFlag(self):
Tab = self.Db.GetTable(self.Table)
Tab.append(self._DUMMY_)
def IsIntegral(self):
tab = self.Db.GetTable(self.Table)
Id = min([int(item[0]) for item in tab])
if Id != -1:
return False
return True
def GetAll(self):
tab = self.Db.GetTable(self.Table)
return tab
## TableFile
#
# This class defined a table used for file
#
# @param object: Inherited from object class
#
class TableFile(Table):
_COLUMN_ = '''
ID INTEGER PRIMARY KEY,
Name VARCHAR NOT NULL,
ExtName VARCHAR,
Path VARCHAR,
FullPath VARCHAR NOT NULL,
Model INTEGER DEFAULT 0,
TimeStamp SINGLE NOT NULL,
FromItem REAL NOT NULL
'''
def __init__(self, Cursor):
Table.__init__(self, Cursor, 'File')
## Insert table
#
# Insert a record into table File
#
# @param Name: Name of a File
# @param ExtName: ExtName of a File
# @param Path: Path of a File
# @param FullPath: FullPath of a File
# @param Model: Model of a File
# @param TimeStamp: TimeStamp of a File
#
def Insert(self, Name, ExtName, Path, FullPath, Model, TimeStamp, FromItem=0):
(Name, ExtName, Path, FullPath) = ConvertToSqlString((Name, ExtName, Path, FullPath))
return Table.Insert(
self,
Name,
ExtName,
Path,
FullPath,
Model,
TimeStamp,
FromItem
)
## InsertFile
#
# Insert one file to table
#
# @param FileFullPath: The full path of the file
# @param Model: The model of the file
#
# @retval FileID: The ID after record is inserted
#
def InsertFile(self, File, Model, FromItem=''):
if FromItem:
return self.Insert(
File.Name,
File.Ext,
File.Dir,
File.Path,
Model,
File.TimeStamp,
FromItem
)
return self.Insert(
File.Name,
File.Ext,
File.Dir,
File.Path,
Model,
File.TimeStamp
)
## Get type of a given file
#
# @param FileId ID of a file
#
# @retval file_type Model value of given file in the table
#
def GetFileType(self, FileId):
QueryScript = "select Model from %s where ID = '%s'" % (self.Table, FileId)
RecordList = self.Exec(QueryScript)
if len(RecordList) == 0:
return None
return RecordList[0][0]
## Get file timestamp of a given file
#
# @param FileId ID of file
#
# @retval timestamp TimeStamp value of given file in the table
#
def GetFileTimeStamp(self, FileId):
QueryScript = "select TimeStamp from %s where ID = '%s'" % (self.Table, FileId)
RecordList = self.Exec(QueryScript)
if len(RecordList) == 0:
return None
return RecordList[0][0]
## Update the timestamp of a given file
#
# @param FileId ID of file
# @param TimeStamp Time stamp of file
#
def SetFileTimeStamp(self, FileId, TimeStamp):
self.Exec("update %s set TimeStamp=%s where ID='%s'" % (self.Table, TimeStamp, FileId))
## Get list of file with given type
#
# @param FileType Type value of file
#
# @retval file_list List of files with the given type
#
def GetFileList(self, FileType):
RecordList = self.Exec("select FullPath from %s where Model=%s" % (self.Table, FileType))
if len(RecordList) == 0:
return []
return [R[0] for R in RecordList]
## TableDataModel
#
# This class defined a table used for data model
#
# @param object: Inherited from object class
#
#
class TableDataModel(Table):
_COLUMN_ = """
ID INTEGER PRIMARY KEY,
CrossIndex INTEGER NOT NULL,
Name VARCHAR NOT NULL,
Description VARCHAR
"""
def __init__(self, Cursor):
Table.__init__(self, Cursor, 'DataModel')
## Insert table
#
# Insert a record into table DataModel
#
# @param ID: ID of a ModelType
# @param CrossIndex: CrossIndex of a ModelType
# @param Name: Name of a ModelType
# @param Description: Description of a ModelType
#
def Insert(self, CrossIndex, Name, Description):
(Name, Description) = ConvertToSqlString((Name, Description))
return Table.Insert(self, CrossIndex, Name, Description)
## Init table
#
# Create all default records of table DataModel
#
def InitTable(self):
EdkLogger.verbose("\nInitialize table DataModel started ...")
Count = self.GetCount()
if Count is not None and Count != 0:
return
for Item in DataClass.MODEL_LIST:
CrossIndex = Item[1]
Name = Item[0]
Description = Item[0]
self.Insert(CrossIndex, Name, Description)
EdkLogger.verbose("Initialize table DataModel ... DONE!")
## Get CrossIndex
#
# Get a model's cross index from its name
#
# @param ModelName: Name of the model
# @retval CrossIndex: CrossIndex of the model
#
def GetCrossIndex(self, ModelName):
CrossIndex = -1
SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
self.Db.execute(SqlCommand)
for Item in self.Db:
CrossIndex = Item[0]
return CrossIndex
| edk2-master | BaseTools/Source/Python/Workspace/MetaDataTable.py |
## @file
# This file is used to parse meta files
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# (C) Copyright 2015-2018 Hewlett Packard Enterprise Development LP<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import Common.LongFilePathOs as os
import re
import time
import copy
from hashlib import md5
import Common.EdkLogger as EdkLogger
import Common.GlobalData as GlobalData
from CommonDataClass.DataClass import *
from Common.DataType import *
from Common.StringUtils import *
from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData, AnalyzeDscPcd, AnalyzePcdExpression, ParseFieldValue, StructPattern
from Common.Expression import *
from CommonDataClass.Exceptions import *
from Common.LongFilePathSupport import OpenLongFilePath as open
from collections import defaultdict
from .MetaFileTable import MetaFileStorage
from .MetaFileCommentParser import CheckInfComment
from Common.DataType import TAB_COMMENT_EDK_START, TAB_COMMENT_EDK_END
## RegEx for finding file versions
hexVersionPattern = re.compile(r'0[xX][\da-f-A-F]{5,8}')
decVersionPattern = re.compile(r'\d+\.\d+')
CODEPattern = re.compile(r"{CODE\([a-fA-F0-9Xx\{\},\s]*\)}")
## A decorator used to parse macro definition
def ParseMacro(Parser):
def MacroParser(self):
Match = GlobalData.gMacroDefPattern.match(self._CurrentLine)
if not Match:
# Not 'DEFINE/EDK_GLOBAL' statement, call decorated method
Parser(self)
return
TokenList = GetSplitValueList(self._CurrentLine[Match.end(1):], TAB_EQUAL_SPLIT, 1)
# Syntax check
if not TokenList[0]:
EdkLogger.error('Parser', FORMAT_INVALID, "No macro name given",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
if len(TokenList) < 2:
TokenList.append('')
Type = Match.group(1)
Name, Value = TokenList
# Global macros can be only defined via environment variable
if Name in GlobalData.gGlobalDefines:
EdkLogger.error('Parser', FORMAT_INVALID, "%s can only be defined via environment variable" % Name,
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
# Only upper case letters, digit and '_' are allowed
if not GlobalData.gMacroNamePattern.match(Name):
EdkLogger.error('Parser', FORMAT_INVALID, "The macro name must be in the pattern [A-Z][A-Z0-9_]*",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
Value = ReplaceMacro(Value, self._Macros)
if Type in self.DataType:
self._ItemType = self.DataType[Type]
else:
self._ItemType = MODEL_META_DATA_DEFINE
# DEFINE defined macros
if Type == TAB_DSC_DEFINES_DEFINE:
#
# First judge whether this DEFINE is in conditional directive statements or not.
#
if isinstance(self, DscParser) and self._InDirective > -1:
pass
else:
if isinstance(self, DecParser):
if MODEL_META_DATA_HEADER in self._SectionType:
self._FileLocalMacros[Name] = Value
else:
self._ConstructSectionMacroDict(Name, Value)
elif self._SectionType == MODEL_META_DATA_HEADER:
self._FileLocalMacros[Name] = Value
else:
self._ConstructSectionMacroDict(Name, Value)
# EDK_GLOBAL defined macros
elif not isinstance(self, DscParser):
EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used in .dsc file",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
elif self._SectionType != MODEL_META_DATA_HEADER:
EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used under [Defines] section",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
elif (Name in self._FileLocalMacros) and (self._FileLocalMacros[Name] != Value):
EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL defined a macro with the same name and different value as one defined by 'DEFINE'",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList = [Type, Name, Value]
return MacroParser
## Base class of parser
#
# This class is used for derivation purpose. The specific parser for one kind
# type file must derive this class and implement some public interfaces.
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
# @param Owner Owner ID (for sub-section parsing)
# @param From ID from which the data comes (for !INCLUDE directive)
#
class MetaFileParser(object):
# data type (file content) for specific file type
DataType = {}
# Parser objects used to implement singleton
MetaFiles = {}
## Factory method
#
# One file, one parser object. This factory method makes sure that there's
# only one object constructed for one meta file.
#
# @param Class class object of real AutoGen class
# (InfParser, DecParser or DscParser)
# @param FilePath The path of meta file
# @param *args The specific class related parameters
# @param **kwargs The specific class related dict parameters
#
def __new__(Class, FilePath, *args, **kwargs):
if FilePath in Class.MetaFiles:
return Class.MetaFiles[FilePath]
else:
ParserObject = super(MetaFileParser, Class).__new__(Class)
Class.MetaFiles[FilePath] = ParserObject
return ParserObject
## Constructor of MetaFileParser
#
# Initialize object of MetaFileParser
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Arch Default Arch value for filtering sections
# @param Table Database used to retrieve module/package information
# @param Owner Owner ID (for sub-section parsing)
# @param From ID from which the data comes (for !INCLUDE directive)
#
def __init__(self, FilePath, FileType, Arch, Table, Owner= -1, From= -1):
self._Table = Table
self._RawTable = Table
self._Arch = Arch
self._FileType = FileType
self.MetaFile = FilePath
self._FileDir = self.MetaFile.Dir
self._Defines = {}
self._Packages = []
self._FileLocalMacros = {}
self._SectionsMacroDict = defaultdict(dict)
# for recursive parsing
self._Owner = [Owner]
self._From = From
# parsr status for parsing
self._ValueList = ['', '', '', '', '']
self._Scope = []
self._LineIndex = 0
self._CurrentLine = ''
self._SectionType = MODEL_UNKNOWN
self._SectionName = ''
self._InSubsection = False
self._SubsectionType = MODEL_UNKNOWN
self._SubsectionName = ''
self._ItemType = MODEL_UNKNOWN
self._LastItem = -1
self._Enabled = 0
self._Finished = False
self._PostProcessed = False
# Different version of meta-file has different way to parse.
self._Version = 0
self._GuidDict = {} # for Parser PCD value {GUID(gTokeSpaceGuidName)}
self._PcdCodeValue = ""
self._PcdDataTypeCODE = False
self._CurrentPcdName = ""
## Store the parsed data in table
def _Store(self, *Args):
return self._Table.Insert(*Args)
## Virtual method for starting parse
def Start(self):
raise NotImplementedError
## Notify a post-process is needed
def DoPostProcess(self):
self._PostProcessed = False
## Set parsing complete flag in both class and table
def _Done(self):
self._Finished = True
self._Table.SetEndFlag()
def _PostProcess(self):
self._PostProcessed = True
## Get the parse complete flag
@property
def Finished(self):
return self._Finished
## Set the complete flag
@Finished.setter
def Finished(self, Value):
self._Finished = Value
## Remove records that do not match given Filter Arch
def _FilterRecordList(self, RecordList, FilterArch):
NewRecordList = []
for Record in RecordList:
Arch = Record[3]
if Arch == TAB_ARCH_COMMON or Arch == FilterArch:
NewRecordList.append(Record)
return NewRecordList
## Use [] style to query data in table, just for readability
#
# DataInfo = [data_type, scope1(arch), scope2(platform/moduletype)]
#
def __getitem__(self, DataInfo):
if not isinstance(DataInfo, type(())):
DataInfo = (DataInfo,)
# Parse the file first, if necessary
self.StartParse()
# No specific ARCH or Platform given, use raw data
if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None):
return self._FilterRecordList(self._RawTable.Query(*DataInfo), self._Arch)
# Do post-process if necessary
if not self._PostProcessed:
self._PostProcess()
return self._FilterRecordList(self._Table.Query(*DataInfo), DataInfo[1])
def StartParse(self):
if not self._Finished:
if self._RawTable.IsIntegrity():
self._Finished = True
else:
self._Table = self._RawTable
self._PostProcessed = False
self.Start()
## Data parser for the common format in different type of file
#
# The common format in the meatfile is like
#
# xxx1 | xxx2 | xxx3
#
@ParseMacro
def _CommonParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
self._ValueList[0:len(TokenList)] = TokenList
## Data parser for the format in which there's path
#
# Only path can have macro used. So we need to replace them before use.
#
@ParseMacro
def _PathParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
self._ValueList[0:len(TokenList)] = TokenList
# Don't do macro replacement for dsc file at this point
if not isinstance(self, DscParser):
Macros = self._Macros
self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
## Skip unsupported data
def _Skip(self):
EdkLogger.warn("Parser", "Unrecognized content", File=self.MetaFile,
Line=self._LineIndex + 1, ExtraData=self._CurrentLine);
self._ValueList[0:1] = [self._CurrentLine]
## Skip unsupported data for UserExtension Section
def _SkipUserExtension(self):
self._ValueList[0:1] = [self._CurrentLine]
## Section header parser
#
# The section header is always in following format:
#
# [section_name.arch<.platform|module_type>]
#
def _SectionHeaderParser(self):
self._Scope = []
self._SectionName = ''
ArchList = set()
for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
if Item == '':
continue
ItemList = GetSplitValueList(Item, TAB_SPLIT, 3)
# different section should not mix in one section
if self._SectionName != '' and self._SectionName != ItemList[0].upper():
EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section",
File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
self._SectionName = ItemList[0].upper()
if self._SectionName in self.DataType:
self._SectionType = self.DataType[self._SectionName]
# Check if the section name is valid
if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH_SET and len(ItemList) > 3:
EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
self.MetaFile, self._LineIndex + 1, self._CurrentLine)
elif self._Version >= 0x00010005:
EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
self.MetaFile, self._LineIndex + 1, self._CurrentLine)
else:
self._SectionType = MODEL_UNKNOWN
# S1 is always Arch
if len(ItemList) > 1:
S1 = ItemList[1].upper()
else:
S1 = TAB_ARCH_COMMON
S1 = ReplaceMacro(S1, self._Macros)
ArchList.add(S1)
# S2 may be Platform or ModuleType
if len(ItemList) > 2:
if self._SectionName.upper() in SECTIONS_HAVE_ITEM_PCD_SET:
S2 = ItemList[2]
else:
S2 = ItemList[2].upper()
else:
S2 = TAB_COMMON
if len(ItemList) > 3:
S3 = ItemList[3]
else:
S3 = TAB_COMMON
self._Scope.append([S1, S2, S3])
# 'COMMON' must not be used with specific ARCHs at the same section
if TAB_ARCH_COMMON in ArchList and len(ArchList) > 1:
EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
# If the section information is needed later, it should be stored in database
self._ValueList[0] = self._SectionName
## [packages] section parser
@ParseMacro
def _PackageParser(self):
self._CurrentLine = CleanString(self._CurrentLine)
self._Packages.append(self._CurrentLine)
self._ValueList[0] = self._CurrentLine
## [defines] section parser
@ParseMacro
def _DefineParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
self._ValueList[1:len(TokenList)] = TokenList
if not self._ValueList[1]:
EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
if not self._ValueList[2]:
EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
Name, Value = self._ValueList[1], self._ValueList[2]
MacroUsed = GlobalData.gMacroRefPattern.findall(Value)
if len(MacroUsed) != 0:
for Macro in MacroUsed:
if Macro in GlobalData.gGlobalDefines:
EdkLogger.error("Parser", FORMAT_INVALID, "Global macro %s is not permitted." % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
else:
EdkLogger.error("Parser", FORMAT_INVALID, "%s not defined" % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
# Sometimes, we need to make differences between EDK and EDK2 modules
if Name == 'INF_VERSION':
if hexVersionPattern.match(Value):
self._Version = int(Value, 0)
elif decVersionPattern.match(Value):
ValueList = Value.split('.')
Major = int(ValueList[0], 0)
Minor = int(ValueList[1], 0)
if Major > 0xffff or Minor > 0xffff:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
self._Version = int('0x{0:04x}{1:04x}'.format(Major, Minor), 0)
else:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
if isinstance(self, InfParser) and self._Version < 0x00010005:
# EDK module allows using defines as macros
self._FileLocalMacros[Name] = Value
self._Defines[Name] = Value
## [BuildOptions] section parser
@ParseMacro
def _BuildOptionParser(self):
self._CurrentLine = CleanString(self._CurrentLine, BuildOption=True)
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
if len(TokenList2) == 2:
self._ValueList[0] = TokenList2[0] # toolchain family
self._ValueList[1] = TokenList2[1] # keys
else:
self._ValueList[1] = TokenList[0]
if len(TokenList) == 2 and not isinstance(self, DscParser): # value
self._ValueList[2] = ReplaceMacro(TokenList[1], self._Macros)
if self._ValueList[1].count('_') != 4:
EdkLogger.error(
'Parser',
FORMAT_INVALID,
"'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
ExtraData=self._CurrentLine,
File=self.MetaFile,
Line=self._LineIndex + 1
)
def GetValidExpression(self, TokenSpaceGuid, PcdCName):
return self._Table.GetValidExpression(TokenSpaceGuid, PcdCName)
@property
def _Macros(self):
Macros = {}
Macros.update(self._FileLocalMacros)
Macros.update(self._GetApplicableSectionMacro())
return Macros
## Construct section Macro dict
def _ConstructSectionMacroDict(self, Name, Value):
ScopeKey = [(Scope[0], Scope[1], Scope[2]) for Scope in self._Scope]
ScopeKey = tuple(ScopeKey)
#
# DecParser SectionType is a list, will contain more than one item only in Pcd Section
# As Pcd section macro usage is not allowed, so here it is safe
#
if isinstance(self, DecParser):
SectionDictKey = self._SectionType[0], ScopeKey
else:
SectionDictKey = self._SectionType, ScopeKey
self._SectionsMacroDict[SectionDictKey][Name] = Value
## Get section Macros that are applicable to current line, which may come from other sections
## that share the same name while scope is wider
def _GetApplicableSectionMacro(self):
Macros = {}
ComComMacroDict = {}
ComSpeMacroDict = {}
SpeSpeMacroDict = {}
ActiveSectionType = self._SectionType
if isinstance(self, DecParser):
ActiveSectionType = self._SectionType[0]
for (SectionType, Scope) in self._SectionsMacroDict:
if SectionType != ActiveSectionType:
continue
for ActiveScope in self._Scope:
Scope0, Scope1, Scope2= ActiveScope[0], ActiveScope[1], ActiveScope[2]
if(Scope0, Scope1, Scope2) not in Scope:
break
else:
SpeSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
for ActiveScope in self._Scope:
Scope0, Scope1, Scope2 = ActiveScope[0], ActiveScope[1], ActiveScope[2]
if(Scope0, Scope1, Scope2) not in Scope and (Scope0, TAB_COMMON, TAB_COMMON) not in Scope and (TAB_COMMON, Scope1, TAB_COMMON) not in Scope:
break
else:
ComSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
if (TAB_COMMON, TAB_COMMON, TAB_COMMON) in Scope:
ComComMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
Macros.update(ComComMacroDict)
Macros.update(ComSpeMacroDict)
Macros.update(SpeSpeMacroDict)
return Macros
def ProcessMultipleLineCODEValue(self,Content):
CODEBegin = False
CODELine = ""
continuelinecount = 0
newContent = []
for Index in range(0, len(Content)):
Line = Content[Index]
if CODEBegin:
CODELine = CODELine + Line
continuelinecount +=1
if ")}" in Line:
newContent.append(CODELine)
for _ in range(continuelinecount):
newContent.append("")
CODEBegin = False
CODELine = ""
continuelinecount = 0
else:
if not Line:
newContent.append(Line)
continue
if "{CODE(" not in Line:
newContent.append(Line)
continue
elif CODEPattern.findall(Line):
newContent.append(Line)
continue
else:
CODEBegin = True
CODELine = Line
return newContent
_SectionParser = {}
## INF file parser class
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
#
class InfParser(MetaFileParser):
# INF file supported data types (one type per section)
DataType = {
TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
TAB_GUIDS.upper() : MODEL_EFI_GUID,
TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
TAB_PPIS.upper() : MODEL_EFI_PPI,
TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
}
## Constructor of InfParser
#
# Initialize object of InfParser
#
# @param FilePath The path of module description file
# @param FileType The raw data of DSC file
# @param Arch Default Arch value for filtering sections
# @param Table Database used to retrieve module/package information
#
def __init__(self, FilePath, FileType, Arch, Table):
# prevent re-initialization
if hasattr(self, "_Table"):
return
MetaFileParser.__init__(self, FilePath, FileType, Arch, Table)
self.PcdsDict = {}
## Parser starter
def Start(self):
NmakeLine = ''
Content = ''
try:
with open(str(self.MetaFile), 'r') as File:
Content = File.readlines()
except:
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
# parse the file line by line
IsFindBlockComment = False
GetHeaderComment = False
TailComments = []
SectionComments = []
Comments = []
for Index in range(0, len(Content)):
# skip empty, commented, block commented lines
Line, Comment = CleanString2(Content[Index], AllowCppStyleComment=True)
NextLine = ''
if Index + 1 < len(Content):
NextLine, NextComment = CleanString2(Content[Index + 1])
if Line == '':
if Comment:
Comments.append((Comment, Index + 1))
elif GetHeaderComment:
SectionComments.extend(Comments)
Comments = []
continue
if Line.find(TAB_COMMENT_EDK_START) > -1:
IsFindBlockComment = True
continue
if Line.find(TAB_COMMENT_EDK_END) > -1:
IsFindBlockComment = False
continue
if IsFindBlockComment:
continue
self._LineIndex = Index
self._CurrentLine = Line
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
if not GetHeaderComment:
for Cmt, LNo in Comments:
self._Store(MODEL_META_DATA_HEADER_COMMENT, Cmt, '', '', TAB_COMMON,
TAB_COMMON, self._Owner[-1], LNo, -1, LNo, -1, 0)
GetHeaderComment = True
else:
TailComments.extend(SectionComments + Comments)
Comments = []
self._SectionHeaderParser()
# Check invalid sections
if self._Version < 0x00010005:
if self._SectionType in [MODEL_META_DATA_BUILD_OPTION,
MODEL_EFI_LIBRARY_CLASS,
MODEL_META_DATA_PACKAGE,
MODEL_PCD_FIXED_AT_BUILD,
MODEL_PCD_PATCHABLE_IN_MODULE,
MODEL_PCD_FEATURE_FLAG,
MODEL_PCD_DYNAMIC_EX,
MODEL_PCD_DYNAMIC,
MODEL_EFI_GUID,
MODEL_EFI_PROTOCOL,
MODEL_EFI_PPI,
MODEL_META_DATA_USER_EXTENSION]:
EdkLogger.error('Parser', FORMAT_INVALID,
"Section [%s] is not allowed in inf file without version" % (self._SectionName),
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
elif self._SectionType in [MODEL_EFI_INCLUDE,
MODEL_EFI_LIBRARY_INSTANCE,
MODEL_META_DATA_NMAKE]:
EdkLogger.error('Parser', FORMAT_INVALID,
"Section [%s] is not allowed in inf file with version 0x%08x" % (self._SectionName, self._Version),
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
continue
# merge two lines specified by '\' in section NMAKE
elif self._SectionType == MODEL_META_DATA_NMAKE:
if Line[-1] == '\\':
if NextLine == '':
self._CurrentLine = NmakeLine + Line[0:-1]
NmakeLine = ''
else:
if NextLine[0] == TAB_SECTION_START and NextLine[-1] == TAB_SECTION_END:
self._CurrentLine = NmakeLine + Line[0:-1]
NmakeLine = ''
else:
NmakeLine = NmakeLine + ' ' + Line[0:-1]
continue
else:
self._CurrentLine = NmakeLine + Line
NmakeLine = ''
# section content
self._ValueList = ['', '', '']
# parse current line, result will be put in self._ValueList
self._SectionParser[self._SectionType](self)
if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
Comments = []
continue
if Comment:
Comments.append((Comment, Index + 1))
if GlobalData.gOptions and GlobalData.gOptions.CheckUsage:
CheckInfComment(self._SectionType, Comments, str(self.MetaFile), Index + 1, self._ValueList)
#
# Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1,
# LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
#
for Arch, Platform, _ in self._Scope:
LastItem = self._Store(self._SectionType,
self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
Arch,
Platform,
self._Owner[-1],
self._LineIndex + 1,
- 1,
self._LineIndex + 1,
- 1,
0
)
for Comment, LineNo in Comments:
self._Store(MODEL_META_DATA_COMMENT, Comment, '', '', Arch, Platform,
LastItem, LineNo, -1, LineNo, -1, 0)
Comments = []
SectionComments = []
TailComments.extend(SectionComments + Comments)
if IsFindBlockComment:
EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
File=self.MetaFile)
# If there are tail comments in INF file, save to database whatever the comments are
for Comment in TailComments:
self._Store(MODEL_META_DATA_TAIL_COMMENT, Comment[0], '', '', TAB_COMMON,
TAB_COMMON, self._Owner[-1], -1, -1, -1, -1, 0)
self._Done()
## Data parser for the format in which there's path
#
# Only path can have macro used. So we need to replace them before use.
#
def _IncludeParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
self._ValueList[0:len(TokenList)] = TokenList
Macros = self._Macros
if Macros:
for Index in range(0, len(self._ValueList)):
Value = self._ValueList[Index]
if not Value:
continue
self._ValueList[Index] = ReplaceMacro(Value, Macros)
## Parse [Sources] section
#
# Only path can have macro used. So we need to replace them before use.
#
@ParseMacro
def _SourceFileParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
# Let TokenList[2] be TagName|ToolCode|FeatureFlag
if len(TokenList) > 3:
for extraToken in range(3, len(TokenList)):
TokenList[2] = TokenList[2] + '|' + TokenList[extraToken]
self._ValueList[0:len(TokenList)] = TokenList
Macros = self._Macros
# For Acpi tables, remove macro like ' TABLE_NAME=Sata1'
if 'COMPONENT_TYPE' in Macros:
if self._Defines['COMPONENT_TYPE'].upper() == 'ACPITABLE':
self._ValueList[0] = GetSplitValueList(self._ValueList[0], ' ', 1)[0]
if self._Defines['BASE_NAME'] == 'Microcode':
pass
self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
## Parse [Binaries] section
#
# Only path can have macro used. So we need to replace them before use.
#
@ParseMacro
def _BinaryFileParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 2)
if len(TokenList) < 2:
EdkLogger.error('Parser', FORMAT_INVALID, "No file type or path specified",
ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
File=self.MetaFile, Line=self._LineIndex + 1)
if not TokenList[0]:
EdkLogger.error('Parser', FORMAT_INVALID, "No file type specified",
ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
File=self.MetaFile, Line=self._LineIndex + 1)
if not TokenList[1]:
EdkLogger.error('Parser', FORMAT_INVALID, "No file path specified",
ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList[0:len(TokenList)] = TokenList
self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
## [nmake] section parser (Edk.x style only)
def _NmakeParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
self._ValueList[0:len(TokenList)] = TokenList
# remove macros
self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
# remove self-reference in macro setting
#self._ValueList[1] = ReplaceMacro(self._ValueList[1], {self._ValueList[0]:''})
## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
@ParseMacro
def _PcdParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
if len(ValueList) != 2:
EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList[0:1] = ValueList
if len(TokenList) > 1:
self._ValueList[2] = TokenList[1]
if self._ValueList[0] == '' or self._ValueList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
File=self.MetaFile, Line=self._LineIndex + 1)
# if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
if self._ValueList[2] != '':
InfPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
if InfPcdValueList[0] in ['True', 'true', 'TRUE']:
self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '1', 1)
elif InfPcdValueList[0] in ['False', 'false', 'FALSE']:
self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '0', 1)
elif isinstance(InfPcdValueList[0], str) and InfPcdValueList[0].find('$(') >= 0:
Value = ReplaceExprMacro(InfPcdValueList[0],self._Macros)
if Value != '0':
self._ValueList[2] = Value
if (self._ValueList[0], self._ValueList[1]) not in self.PcdsDict:
self.PcdsDict[self._ValueList[0], self._ValueList[1]] = self._SectionType
elif self.PcdsDict[self._ValueList[0], self._ValueList[1]] != self._SectionType:
EdkLogger.error('Parser', FORMAT_INVALID, "It is not permissible to list a specified PCD in different PCD type sections.",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
File=self.MetaFile, Line=self._LineIndex + 1)
## [depex] section parser
@ParseMacro
def _DepexParser(self):
self._ValueList[0:1] = [self._CurrentLine]
_SectionParser = {
MODEL_UNKNOWN : MetaFileParser._Skip,
MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
MODEL_META_DATA_BUILD_OPTION : MetaFileParser._BuildOptionParser,
MODEL_EFI_INCLUDE : _IncludeParser, # for Edk.x modules
MODEL_EFI_LIBRARY_INSTANCE : MetaFileParser._CommonParser, # for Edk.x modules
MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
MODEL_META_DATA_PACKAGE : MetaFileParser._PathParser,
MODEL_META_DATA_NMAKE : _NmakeParser, # for Edk.x modules
MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
MODEL_PCD_FEATURE_FLAG : _PcdParser,
MODEL_PCD_DYNAMIC_EX : _PcdParser,
MODEL_PCD_DYNAMIC : _PcdParser,
MODEL_EFI_SOURCE_FILE : _SourceFileParser,
MODEL_EFI_GUID : MetaFileParser._CommonParser,
MODEL_EFI_PROTOCOL : MetaFileParser._CommonParser,
MODEL_EFI_PPI : MetaFileParser._CommonParser,
MODEL_EFI_DEPEX : _DepexParser,
MODEL_EFI_BINARY_FILE : _BinaryFileParser,
MODEL_META_DATA_USER_EXTENSION : MetaFileParser._SkipUserExtension,
}
## DSC file parser class
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
# @param Owner Owner ID (for sub-section parsing)
# @param From ID from which the data comes (for !INCLUDE directive)
#
class DscParser(MetaFileParser):
# DSC file supported data types (one type per section)
DataType = {
TAB_SKUIDS.upper() : MODEL_EFI_SKU_ID,
TAB_DEFAULT_STORES.upper() : MODEL_EFI_DEFAULT_STORES,
TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_DEFAULT,
TAB_PCDS_DYNAMIC_HII_NULL.upper() : MODEL_PCD_DYNAMIC_HII,
TAB_PCDS_DYNAMIC_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_VPD,
TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_EX_DEFAULT,
TAB_PCDS_DYNAMIC_EX_HII_NULL.upper() : MODEL_PCD_DYNAMIC_EX_HII,
TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_EX_VPD,
TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
TAB_DSC_DEFINES.upper() : MODEL_META_DATA_HEADER,
TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
TAB_DSC_DEFINES_EDKGLOBAL : MODEL_META_DATA_GLOBAL_DEFINE,
TAB_INCLUDE.upper() : MODEL_META_DATA_INCLUDE,
TAB_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
TAB_IF_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
TAB_IF_N_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF,
TAB_ELSE_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF,
TAB_ELSE.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE,
TAB_END_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF,
TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION,
TAB_ERROR.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR,
}
# Valid names in define section
DefineKeywords = [
"DSC_SPECIFICATION",
"PLATFORM_NAME",
"PLATFORM_GUID",
"PLATFORM_VERSION",
"SKUID_IDENTIFIER",
"PCD_INFO_GENERATION",
"PCD_VAR_CHECK_GENERATION",
"SUPPORTED_ARCHITECTURES",
"BUILD_TARGETS",
"OUTPUT_DIRECTORY",
"FLASH_DEFINITION",
"BUILD_NUMBER",
"RFC_LANGUAGES",
"ISO_LANGUAGES",
"TIME_STAMP_FILE",
"VPD_TOOL_GUID",
"FIX_LOAD_TOP_MEMORY_ADDRESS",
"PREBUILD",
"POSTBUILD"
]
SubSectionDefineKeywords = [
"FILE_GUID"
]
SymbolPattern = ValueExpression.SymbolPattern
IncludedFiles = set()
## Constructor of DscParser
#
# Initialize object of DscParser
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Arch Default Arch value for filtering sections
# @param Table Database used to retrieve module/package information
# @param Owner Owner ID (for sub-section parsing)
# @param From ID from which the data comes (for !INCLUDE directive)
#
def __init__(self, FilePath, FileType, Arch, Table, Owner= -1, From= -1):
# prevent re-initialization
if hasattr(self, "_Table") and self._Table is Table:
return
MetaFileParser.__init__(self, FilePath, FileType, Arch, Table, Owner, From)
self._Version = 0x00010005 # Only EDK2 dsc file is supported
# to store conditional directive evaluation result
self._DirectiveStack = []
self._DirectiveEvalStack = []
self._Enabled = 1
#
# Specify whether current line is in uncertain condition
#
self._InDirective = -1
# Final valid replacable symbols
self._Symbols = {}
#
# Map the ID between the original table and new table to track
# the owner item
#
self._IdMapping = {-1:-1}
self._Content = None
## Parser starter
def Start(self):
Content = ''
try:
with open(str(self.MetaFile), 'r') as File:
Content = File.readlines()
except:
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
OwnerId = {}
Content = self.ProcessMultipleLineCODEValue(Content)
for Index in range(0, len(Content)):
Line = CleanString(Content[Index])
# skip empty line
if Line == '':
continue
self._CurrentLine = Line
self._LineIndex = Index
if self._InSubsection and self._Owner[-1] == -1:
self._Owner.append(self._LastItem)
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionType = MODEL_META_DATA_SECTION_HEADER
# subsection ending
elif Line[0] == '}' and self._InSubsection:
self._InSubsection = False
self._SubsectionType = MODEL_UNKNOWN
self._SubsectionName = ''
self._Owner[-1] = -1
OwnerId.clear()
continue
# subsection header
elif Line[0] == TAB_OPTION_START and Line[-1] == TAB_OPTION_END:
self._SubsectionType = MODEL_META_DATA_SUBSECTION_HEADER
# directive line
elif Line[0] == '!':
TokenList = GetSplitValueList(Line, ' ', 1)
if TokenList[0] == TAB_INCLUDE:
for Arch, ModuleType, DefaultStore in self._Scope:
if self._SubsectionType != MODEL_UNKNOWN and Arch in OwnerId:
self._Owner[-1] = OwnerId[Arch]
self._DirectiveParser()
else:
self._DirectiveParser()
continue
if Line[0] == TAB_OPTION_START and not self._InSubsection:
EdkLogger.error("Parser", FILE_READ_FAILURE, "Missing the '{' before %s in Line %s" % (Line, Index+1), ExtraData=self.MetaFile)
if self._InSubsection:
SectionType = self._SubsectionType
else:
SectionType = self._SectionType
self._ItemType = SectionType
self._ValueList = ['', '', '']
# "SET pcd = pcd_expression" syntax is not supported in Dsc file.
if self._CurrentLine.upper().strip().startswith("SET "):
EdkLogger.error('Parser', FORMAT_INVALID, '''"SET pcd = pcd_expression" syntax is not support in Dsc file''',
ExtraData=self._CurrentLine,
File=self.MetaFile, Line=self._LineIndex + 1)
self._SectionParser[SectionType](self)
if self._ValueList is None:
continue
#
# Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
# LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
#
for Arch, ModuleType, DefaultStore in self._Scope:
Owner = self._Owner[-1]
if self._SubsectionType != MODEL_UNKNOWN and Arch in OwnerId:
Owner = OwnerId[Arch]
self._LastItem = self._Store(
self._ItemType,
self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
Arch,
ModuleType,
DefaultStore,
Owner,
self._From,
self._LineIndex + 1,
- 1,
self._LineIndex + 1,
- 1,
self._Enabled
)
if self._SubsectionType == MODEL_UNKNOWN and self._InSubsection:
OwnerId[Arch] = self._LastItem
if self._DirectiveStack:
Type, Line, Text = self._DirectiveStack[-1]
EdkLogger.error('Parser', FORMAT_INVALID, "No matching '!endif' found",
ExtraData=Text, File=self.MetaFile, Line=Line)
self._Done()
## <subsection_header> parser
def _SubsectionHeaderParser(self):
self._SubsectionName = self._CurrentLine[1:-1].upper()
if self._SubsectionName in self.DataType:
self._SubsectionType = self.DataType[self._SubsectionName]
else:
self._SubsectionType = MODEL_UNKNOWN
EdkLogger.warn("Parser", "Unrecognized sub-section", File=self.MetaFile,
Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
self._ValueList[0] = self._SubsectionName
## Directive statement parser
def _DirectiveParser(self):
self._ValueList = ['', '', '']
TokenList = GetSplitValueList(self._CurrentLine, ' ', 1)
self._ValueList[0:len(TokenList)] = TokenList
# Syntax check
DirectiveName = self._ValueList[0].upper()
if DirectiveName not in self.DataType:
EdkLogger.error("Parser", FORMAT_INVALID, "Unknown directive [%s]" % DirectiveName,
File=self.MetaFile, Line=self._LineIndex + 1)
if DirectiveName in ['!IF', '!IFDEF', '!IFNDEF']:
self._InDirective += 1
if DirectiveName in ['!ENDIF']:
self._InDirective -= 1
if DirectiveName in ['!IF', '!IFDEF', '!INCLUDE', '!IFNDEF', '!ELSEIF'] and self._ValueList[1] == '':
EdkLogger.error("Parser", FORMAT_INVALID, "Missing expression",
File=self.MetaFile, Line=self._LineIndex + 1,
ExtraData=self._CurrentLine)
ItemType = self.DataType[DirectiveName]
Scope = [[TAB_COMMON, TAB_COMMON, TAB_COMMON]]
if ItemType == MODEL_META_DATA_INCLUDE:
Scope = self._Scope
elif ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR:
Scope = self._Scope
if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
# Remove all directives between !if and !endif, including themselves
while self._DirectiveStack:
# Remove any !else or !elseif
DirectiveInfo = self._DirectiveStack.pop()
if DirectiveInfo[0] in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
break
else:
EdkLogger.error("Parser", FORMAT_INVALID, "Redundant '!endif'",
File=self.MetaFile, Line=self._LineIndex + 1,
ExtraData=self._CurrentLine)
elif ItemType not in {MODEL_META_DATA_INCLUDE, MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR}:
# Break if there's a !else is followed by a !elseif
if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF and \
self._DirectiveStack and \
self._DirectiveStack[-1][0] == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
EdkLogger.error("Parser", FORMAT_INVALID, "'!elseif' after '!else'",
File=self.MetaFile, Line=self._LineIndex + 1,
ExtraData=self._CurrentLine)
self._DirectiveStack.append((ItemType, self._LineIndex + 1, self._CurrentLine))
#
# Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
# LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
#
for Arch, ModuleType, DefaultStore in Scope:
self._LastItem = self._Store(
ItemType,
self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
Arch,
ModuleType,
DefaultStore,
self._Owner[-1],
self._From,
self._LineIndex + 1,
- 1,
self._LineIndex + 1,
- 1,
0
)
## [defines] section parser
@ParseMacro
def _DefineParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
self._ValueList[1:len(TokenList)] = TokenList
# Syntax check
if not self._ValueList[1]:
EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
if not self._ValueList[2]:
EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
if (not self._ValueList[1] in self.DefineKeywords and
(self._InSubsection and self._ValueList[1] not in self.SubSectionDefineKeywords)):
EdkLogger.error('Parser', FORMAT_INVALID,
"Unknown keyword found: %s. "
"If this is a macro you must "
"add it as a DEFINE in the DSC" % self._ValueList[1],
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
if not self._InSubsection:
self._Defines[self._ValueList[1]] = self._ValueList[2]
self._ItemType = self.DataType[TAB_DSC_DEFINES.upper()]
@ParseMacro
def _SkuIdParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
if len(TokenList) not in (2, 3):
EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '<Number>|<UiName>[|<UiName>]'",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList[0:len(TokenList)] = TokenList
@ParseMacro
def _DefaultStoresParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
if len(TokenList) != 2:
EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '<Number>|<UiName>'",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList[0:len(TokenList)] = TokenList
## Parse Edk style of library modules
@ParseMacro
def _LibraryInstanceParser(self):
self._ValueList[0] = self._CurrentLine
def _DecodeCODEData(self):
pass
## PCD sections parser
#
# [PcdsFixedAtBuild]
# [PcdsPatchableInModule]
# [PcdsFeatureFlag]
# [PcdsDynamicEx
# [PcdsDynamicExDefault]
# [PcdsDynamicExVpd]
# [PcdsDynamicExHii]
# [PcdsDynamic]
# [PcdsDynamicDefault]
# [PcdsDynamicVpd]
# [PcdsDynamicHii]
#
@ParseMacro
def _PcdParser(self):
if self._PcdDataTypeCODE:
self._PcdCodeValue = self._PcdCodeValue + "\n " + self._CurrentLine
if self._CurrentLine.endswith(")}"):
self._CurrentLine = "|".join((self._CurrentPcdName, self._PcdCodeValue))
self._PcdDataTypeCODE = False
self._PcdCodeValue = ""
else:
self._ValueList = None
return
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
self._CurrentPcdName = TokenList[0]
if len(TokenList) == 2 and TokenList[1].strip().startswith("{CODE"):
self._PcdDataTypeCODE = True
self._PcdCodeValue = TokenList[1].strip()
if self._PcdDataTypeCODE:
if self._CurrentLine.endswith(")}"):
self._PcdDataTypeCODE = False
self._PcdCodeValue = ""
else:
self._ValueList = None
return
self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
PcdNameTockens = GetSplitValueList(TokenList[0], TAB_SPLIT)
if len(PcdNameTockens) == 2:
self._ValueList[0], self._ValueList[1] = PcdNameTockens[0], PcdNameTockens[1]
elif len(PcdNameTockens) == 3:
self._ValueList[0], self._ValueList[1] = ".".join((PcdNameTockens[0], PcdNameTockens[1])), PcdNameTockens[2]
elif len(PcdNameTockens) > 3:
self._ValueList[0], self._ValueList[1] = ".".join((PcdNameTockens[0], PcdNameTockens[1])), ".".join(PcdNameTockens[2:])
if len(TokenList) == 2:
self._ValueList[2] = TokenList[1]
if self._ValueList[0] == '' or self._ValueList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
File=self.MetaFile, Line=self._LineIndex + 1)
if self._ValueList[2] == '':
#
# The PCD values are optional for FIXEDATBUILD, PATCHABLEINMODULE, Dynamic/DynamicEx default
#
if self._SectionType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT):
return
EdkLogger.error('Parser', FORMAT_INVALID, "No PCD value given",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
File=self.MetaFile, Line=self._LineIndex + 1)
# Validate the datum type of Dynamic Defaul PCD and DynamicEx Default PCD
ValueList = GetSplitValueList(self._ValueList[2])
if len(ValueList) > 1 and ValueList[1] in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64] \
and self._ItemType in [MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT]:
EdkLogger.error('Parser', FORMAT_INVALID, "The datum type '%s' of PCD is wrong" % ValueList[1],
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
# Validate the VariableName of DynamicHii and DynamicExHii for PCD Entry must not be an empty string
if self._ItemType in [MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII]:
DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
if len(DscPcdValueList[0].replace('L', '').replace('"', '').strip()) == 0:
EdkLogger.error('Parser', FORMAT_INVALID, "The VariableName field in the HII format PCD entry must not be an empty string",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
# if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
if DscPcdValueList[0] in ['True', 'true', 'TRUE']:
self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '1', 1);
elif DscPcdValueList[0] in ['False', 'false', 'FALSE']:
self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '0', 1);
## [components] section parser
@ParseMacro
def _ComponentParser(self):
if self._CurrentLine[-1] == '{':
self._ValueList[0] = self._CurrentLine[0:-1].strip()
self._InSubsection = True
self._SubsectionType = MODEL_UNKNOWN
else:
self._ValueList[0] = self._CurrentLine
## [LibraryClasses] section
@ParseMacro
def _LibraryClassParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
if len(TokenList) < 2:
EdkLogger.error('Parser', FORMAT_INVALID, "No library class or instance specified",
ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
File=self.MetaFile, Line=self._LineIndex + 1)
if TokenList[0] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No library class specified",
ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
File=self.MetaFile, Line=self._LineIndex + 1)
if TokenList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No library instance specified",
ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList[0:len(TokenList)] = TokenList
## [BuildOptions] section parser
@ParseMacro
def _BuildOptionParser(self):
self._CurrentLine = CleanString(self._CurrentLine, BuildOption=True)
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
if len(TokenList2) == 2:
self._ValueList[0] = TokenList2[0] # toolchain family
self._ValueList[1] = TokenList2[1] # keys
else:
self._ValueList[1] = TokenList[0]
if len(TokenList) == 2: # value
self._ValueList[2] = TokenList[1]
if self._ValueList[1].count('_') != 4:
EdkLogger.error(
'Parser',
FORMAT_INVALID,
"'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
ExtraData=self._CurrentLine,
File=self.MetaFile,
Line=self._LineIndex + 1
)
## Override parent's method since we'll do all macro replacements in parser
@property
def _Macros(self):
Macros = {}
Macros.update(self._FileLocalMacros)
Macros.update(self._GetApplicableSectionMacro())
Macros.update(GlobalData.gEdkGlobal)
Macros.update(GlobalData.gPlatformDefines)
Macros.update(GlobalData.gCommandLineDefines)
# PCD cannot be referenced in macro definition
if self._ItemType not in [MODEL_META_DATA_DEFINE, MODEL_META_DATA_GLOBAL_DEFINE]:
Macros.update(self._Symbols)
if GlobalData.BuildOptionPcd:
for Item in GlobalData.BuildOptionPcd:
if isinstance(Item, tuple):
continue
PcdName, TmpValue = Item.split("=")
TmpValue = BuildOptionValue(TmpValue, self._GuidDict)
Macros[PcdName.strip()] = TmpValue
return Macros
def _PostProcess(self):
Processer = {
MODEL_META_DATA_SECTION_HEADER : self.__ProcessSectionHeader,
MODEL_META_DATA_SUBSECTION_HEADER : self.__ProcessSubsectionHeader,
MODEL_META_DATA_HEADER : self.__ProcessDefine,
MODEL_META_DATA_DEFINE : self.__ProcessDefine,
MODEL_META_DATA_GLOBAL_DEFINE : self.__ProcessDefine,
MODEL_META_DATA_INCLUDE : self.__ProcessDirective,
MODEL_META_DATA_PACKAGE : self.__ProcessPackages,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IF : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF : self.__ProcessDirective,
MODEL_EFI_SKU_ID : self.__ProcessSkuId,
MODEL_EFI_DEFAULT_STORES : self.__ProcessDefaultStores,
MODEL_EFI_LIBRARY_INSTANCE : self.__ProcessLibraryInstance,
MODEL_EFI_LIBRARY_CLASS : self.__ProcessLibraryClass,
MODEL_PCD_FIXED_AT_BUILD : self.__ProcessPcd,
MODEL_PCD_PATCHABLE_IN_MODULE : self.__ProcessPcd,
MODEL_PCD_FEATURE_FLAG : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_DEFAULT : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_HII : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_VPD : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_EX_DEFAULT : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_EX_HII : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_EX_VPD : self.__ProcessPcd,
MODEL_META_DATA_COMPONENT : self.__ProcessComponent,
MODEL_META_DATA_BUILD_OPTION : self.__ProcessBuildOption,
MODEL_UNKNOWN : self._Skip,
MODEL_META_DATA_USER_EXTENSION : self._SkipUserExtension,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR : self._ProcessError,
}
self._Table = MetaFileStorage(self._RawTable.DB, self.MetaFile, MODEL_FILE_DSC, True)
self._DirectiveStack = []
self._DirectiveEvalStack = []
self._FileWithError = self.MetaFile
self._FileLocalMacros = {}
self._SectionsMacroDict.clear()
GlobalData.gPlatformDefines = {}
# Get all macro and PCD which has straitforward value
self.__RetrievePcdValue()
self._Content = self._RawTable.GetAll()
self._ContentIndex = 0
self._InSubsection = False
while self._ContentIndex < len(self._Content) :
Id, self._ItemType, V1, V2, V3, S1, S2, S3, Owner, self._From, \
LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex]
if self._From < 0:
self._FileWithError = self.MetaFile
self._ContentIndex += 1
self._Scope = [[S1, S2, S3]]
#
# For !include directive, handle it specially,
# merge arch and module type in case of duplicate items
#
while self._ItemType == MODEL_META_DATA_INCLUDE:
if self._ContentIndex >= len(self._Content):
break
Record = self._Content[self._ContentIndex]
if LineStart == Record[10] and LineEnd == Record[12]:
if [Record[5], Record[6], Record[7]] not in self._Scope:
self._Scope.append([Record[5], Record[6], Record[7]])
self._ContentIndex += 1
else:
break
self._LineIndex = LineStart - 1
self._ValueList = [V1, V2, V3]
if Owner > 0 and Owner in self._IdMapping:
self._InSubsection = True
else:
self._InSubsection = False
try:
Processer[self._ItemType]()
except EvaluationException as Excpt:
#
# Only catch expression evaluation error here. We need to report
# the precise number of line on which the error occurred
#
if hasattr(Excpt, 'Pcd'):
if Excpt.Pcd in GlobalData.gPlatformOtherPcds:
Info = GlobalData.gPlatformOtherPcds[Excpt.Pcd]
EdkLogger.error('Parser', FORMAT_INVALID, "Cannot use this PCD (%s) in an expression as"
" it must be defined in a [PcdsFixedAtBuild] or [PcdsFeatureFlag] section"
" of the DSC file, and it is currently defined in this section:"
" %s, line #: %d." % (Excpt.Pcd, Info[0], Info[1]),
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex + 1)
else:
EdkLogger.error('Parser', FORMAT_INVALID, "PCD (%s) is not defined in DSC file" % Excpt.Pcd,
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex + 1)
else:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt),
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex + 1)
except MacroException as Excpt:
EdkLogger.error('Parser', FORMAT_INVALID, str(Excpt),
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex + 1)
if self._ValueList is None:
continue
NewOwner = self._IdMapping.get(Owner, -1)
self._Enabled = int((not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack))
self._LastItem = self._Store(
self._ItemType,
self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
S1,
S2,
S3,
NewOwner,
self._From,
self._LineIndex + 1,
- 1,
self._LineIndex + 1,
- 1,
self._Enabled
)
self._IdMapping[Id] = self._LastItem
GlobalData.gPlatformDefines.update(self._FileLocalMacros)
self._PostProcessed = True
self._Content = None
def _ProcessError(self):
if not self._Enabled:
return
EdkLogger.error('Parser', ERROR_STATEMENT, self._ValueList[1], File=self.MetaFile, Line=self._LineIndex + 1)
def __ProcessSectionHeader(self):
self._SectionName = self._ValueList[0]
if self._SectionName in self.DataType:
self._SectionType = self.DataType[self._SectionName]
else:
self._SectionType = MODEL_UNKNOWN
def __ProcessSubsectionHeader(self):
self._SubsectionName = self._ValueList[0]
if self._SubsectionName in self.DataType:
self._SubsectionType = self.DataType[self._SubsectionName]
else:
self._SubsectionType = MODEL_UNKNOWN
def __RetrievePcdValue(self):
try:
with open(str(self.MetaFile), 'r') as File:
Content = File.readlines()
except:
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
GlobalData.gPlatformOtherPcds['DSCFILE'] = str(self.MetaFile)
for PcdType in (MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_HII,
MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_DYNAMIC_EX_HII,
MODEL_PCD_DYNAMIC_EX_VPD):
Records = self._RawTable.Query(PcdType, BelongsToItem= -1.0)
for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, Dummy4, ID, Line in Records:
Name = TokenSpaceGuid + '.' + PcdName
if Name not in GlobalData.gPlatformOtherPcds:
PcdLine = Line
while not Content[Line - 1].lstrip().startswith(TAB_SECTION_START):
Line -= 1
GlobalData.gPlatformOtherPcds[Name] = (CleanString(Content[Line - 1]), PcdLine, PcdType)
def __ProcessDefine(self):
if not self._Enabled:
return
Type, Name, Value = self._ValueList
Value = ReplaceMacro(Value, self._Macros, False)
#
# If it is <Defines>, return
#
if self._InSubsection:
self._ValueList = [Type, Name, Value]
return
if self._ItemType == MODEL_META_DATA_DEFINE:
if self._SectionType == MODEL_META_DATA_HEADER:
self._FileLocalMacros[Name] = Value
else:
self._ConstructSectionMacroDict(Name, Value)
elif self._ItemType == MODEL_META_DATA_GLOBAL_DEFINE:
GlobalData.gEdkGlobal[Name] = Value
#
# Keyword in [Defines] section can be used as Macros
#
if (self._ItemType == MODEL_META_DATA_HEADER) and (self._SectionType == MODEL_META_DATA_HEADER):
self._FileLocalMacros[Name] = Value
self._ValueList = [Type, Name, Value]
def __ProcessDirective(self):
Result = None
if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF]:
Macros = self._Macros
Macros.update(GlobalData.gGlobalDefines)
try:
Result = ValueExpression(self._ValueList[1], Macros)()
except SymbolNotFound as Exc:
EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
Result = False
except WrnExpression as Excpt:
#
# Catch expression evaluation warning here. We need to report
# the precise number of line and return the evaluation result
#
EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex + 1)
Result = Excpt.result
if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
self._DirectiveStack.append(self._ItemType)
if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IF:
Result = bool(Result)
else:
Macro = self._ValueList[1]
Macro = Macro[2:-1] if (Macro.startswith("$(") and Macro.endswith(")")) else Macro
Result = Macro in self._Macros
if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF:
Result = not Result
self._DirectiveEvalStack.append(Result)
elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF:
self._DirectiveStack.append(self._ItemType)
self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
self._DirectiveEvalStack.append(bool(Result))
elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
self._DirectiveStack.append(self._ItemType)
self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
self._DirectiveEvalStack.append(True)
elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
# Back to the nearest !if/!ifdef/!ifndef
while self._DirectiveStack:
self._DirectiveEvalStack.pop()
Directive = self._DirectiveStack.pop()
if Directive in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
break
elif self._ItemType == MODEL_META_DATA_INCLUDE:
# The included file must be relative to workspace or same directory as DSC file
__IncludeMacros = {}
#
# Allow using system environment variables in path after !include
#
__IncludeMacros['WORKSPACE'] = GlobalData.gGlobalDefines['WORKSPACE']
#
# Allow using MACROs comes from [Defines] section to keep compatible.
#
__IncludeMacros.update(self._Macros)
IncludedFile = NormPath(ReplaceMacro(self._ValueList[1], __IncludeMacros, RaiseError=True))
#
# First search the include file under the same directory as DSC file
#
IncludedFile1 = PathClass(IncludedFile, self.MetaFile.Dir)
if self._Enabled:
ErrorCode, ErrorInfo1 = IncludedFile1.Validate()
if ErrorCode != 0:
#
# Also search file under the WORKSPACE directory
#
IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
ErrorCode, ErrorInfo2 = IncludedFile1.Validate()
if ErrorCode != 0:
EdkLogger.error('parser', ErrorCode, File=self._FileWithError,
Line=self._LineIndex + 1, ExtraData=ErrorInfo1 + "\n" + ErrorInfo2)
self._FileWithError = IncludedFile1
FromItem = self._Content[self._ContentIndex - 1][0]
if self._InSubsection:
Owner = self._Content[self._ContentIndex - 1][8]
else:
Owner = self._Content[self._ContentIndex - 1][0]
IncludedFileTable = MetaFileStorage(self._RawTable.DB, IncludedFile1, MODEL_FILE_DSC, False, FromItem=FromItem)
Parser = DscParser(IncludedFile1, self._FileType, self._Arch, IncludedFileTable,
Owner=Owner, From=FromItem)
self.IncludedFiles.add (IncludedFile1)
# set the parser status with current status
Parser._SectionName = self._SectionName
Parser._SubsectionType = self._SubsectionType
Parser._InSubsection = self._InSubsection
Parser._SectionType = self._SectionType
Parser._Scope = self._Scope
Parser._Enabled = self._Enabled
# Parse the included file
Parser.StartParse()
# Insert all records in the table for the included file into dsc file table
Records = IncludedFileTable.GetAll()
if Records:
self._Content[self._ContentIndex:self._ContentIndex] = Records
self._Content.pop(self._ContentIndex - 1)
self._ValueList = None
self._ContentIndex -= 1
def __ProcessPackages(self):
self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
def __ProcessSkuId(self):
self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True)
for Value in self._ValueList]
def __ProcessDefaultStores(self):
self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True)
for Value in self._ValueList]
def __ProcessLibraryInstance(self):
self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
def __ProcessLibraryClass(self):
self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros, RaiseError=True)
def __ProcessPcd(self):
if self._ItemType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
self._ValueList[2] = ReplaceMacro(self._ValueList[2], self._Macros, RaiseError=True)
return
ValList, Valid, Index = AnalyzeDscPcd(self._ValueList[2], self._ItemType)
if not Valid:
if self._ItemType in (MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE):
if ValList[1] != TAB_VOID and StructPattern.match(ValList[1]) is None and ValList[2]:
EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect. The datum type info should be VOID* or a valid struct name.", File=self._FileWithError,
Line=self._LineIndex + 1, ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2]))
EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex + 1,
ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2]))
PcdValue = ValList[Index]
if PcdValue and "." not in self._ValueList[0]:
try:
ValList[Index] = ValueExpression(PcdValue, self._Macros)(True)
except WrnExpression as Value:
ValList[Index] = Value.result
except:
pass
if ValList[Index] == 'True':
ValList[Index] = '1'
if ValList[Index] == 'False':
ValList[Index] = '0'
if (not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack):
GlobalData.gPlatformPcds[TAB_SPLIT.join(self._ValueList[0:2])] = PcdValue
self._Symbols[TAB_SPLIT.join(self._ValueList[0:2])] = PcdValue
try:
self._ValueList[2] = '|'.join(ValList)
except Exception:
print(ValList)
def __ProcessComponent(self):
self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
def __ProcessBuildOption(self):
self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=False)
for Value in self._ValueList]
def DisableOverrideComponent(self,module_id):
for ori_id in self._IdMapping:
if self._IdMapping[ori_id] == module_id:
self._RawTable.DisableComponent(ori_id)
_SectionParser = {
MODEL_META_DATA_HEADER : _DefineParser,
MODEL_EFI_SKU_ID : _SkuIdParser,
MODEL_EFI_DEFAULT_STORES : _DefaultStoresParser,
MODEL_EFI_LIBRARY_INSTANCE : _LibraryInstanceParser,
MODEL_EFI_LIBRARY_CLASS : _LibraryClassParser,
MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
MODEL_PCD_FEATURE_FLAG : _PcdParser,
MODEL_PCD_DYNAMIC_DEFAULT : _PcdParser,
MODEL_PCD_DYNAMIC_HII : _PcdParser,
MODEL_PCD_DYNAMIC_VPD : _PcdParser,
MODEL_PCD_DYNAMIC_EX_DEFAULT : _PcdParser,
MODEL_PCD_DYNAMIC_EX_HII : _PcdParser,
MODEL_PCD_DYNAMIC_EX_VPD : _PcdParser,
MODEL_META_DATA_COMPONENT : _ComponentParser,
MODEL_META_DATA_BUILD_OPTION : _BuildOptionParser,
MODEL_UNKNOWN : MetaFileParser._Skip,
MODEL_META_DATA_PACKAGE : MetaFileParser._PackageParser,
MODEL_META_DATA_USER_EXTENSION : MetaFileParser._SkipUserExtension,
MODEL_META_DATA_SECTION_HEADER : MetaFileParser._SectionHeaderParser,
MODEL_META_DATA_SUBSECTION_HEADER : _SubsectionHeaderParser,
}
## DEC file parser class
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
#
class DecParser(MetaFileParser):
# DEC file supported data types (one type per section)
DataType = {
TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
TAB_GUIDS.upper() : MODEL_EFI_GUID,
TAB_PPIS.upper() : MODEL_EFI_PPI,
TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION,
}
## Constructor of DecParser
#
# Initialize object of DecParser
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Arch Default Arch value for filtering sections
# @param Table Database used to retrieve module/package information
#
def __init__(self, FilePath, FileType, Arch, Table):
# prevent re-initialization
if hasattr(self, "_Table"):
return
MetaFileParser.__init__(self, FilePath, FileType, Arch, Table, -1)
self._Comments = []
self._Version = 0x00010005 # Only EDK2 dec file is supported
self._AllPCDs = [] # Only for check duplicate PCD
self._AllPcdDict = {}
self._CurrentStructurePcdName = ""
self._include_flag = False
self._package_flag = False
self._RestofValue = ""
## Parser starter
def Start(self):
Content = ''
try:
with open(str(self.MetaFile), 'r') as File:
Content = File.readlines()
except:
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
Content = self.ProcessMultipleLineCODEValue(Content)
self._DefinesCount = 0
for Index in range(0, len(Content)):
Line, Comment = CleanString2(Content[Index])
self._CurrentLine = Line
self._LineIndex = Index
# save comment for later use
if Comment:
self._Comments.append((Comment, self._LineIndex + 1))
# skip empty line
if Line == '':
continue
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionHeaderParser()
if self._SectionName == TAB_DEC_DEFINES.upper():
self._DefinesCount += 1
self._Comments = []
continue
if self._SectionType == MODEL_UNKNOWN:
EdkLogger.error("Parser", FORMAT_INVALID,
""
"Not able to determine \"%s\" in which section."%self._CurrentLine,
self.MetaFile, self._LineIndex + 1)
elif len(self._SectionType) == 0:
self._Comments = []
continue
# section content
self._ValueList = ['', '', '']
self._SectionParser[self._SectionType[0]](self)
if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
self._Comments = []
continue
#
# Model, Value1, Value2, Value3, Arch, BelongsToItem=-1, LineBegin=-1,
# ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, FeatureFlag='', Enabled=-1
#
for Arch, ModuleType, Type in self._Scope:
self._LastItem = self._Store(
Type,
self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
Arch,
ModuleType,
self._Owner[-1],
self._LineIndex + 1,
- 1,
self._LineIndex + 1,
- 1,
0
)
for Comment, LineNo in self._Comments:
self._Store(
MODEL_META_DATA_COMMENT,
Comment,
self._ValueList[0],
self._ValueList[1],
Arch,
ModuleType,
self._LastItem,
LineNo,
- 1,
LineNo,
- 1,
0
)
self._Comments = []
if self._DefinesCount > 1:
EdkLogger.error('Parser', FORMAT_INVALID, 'Multiple [Defines] section is exist.', self.MetaFile )
if self._DefinesCount == 0:
EdkLogger.error('Parser', FORMAT_INVALID, 'No [Defines] section exist.', self.MetaFile)
self._Done()
## Section header parser
#
# The section header is always in following format:
#
# [section_name.arch<.platform|module_type>]
#
def _SectionHeaderParser(self):
self._Scope = []
self._SectionName = ''
self._SectionType = []
ArchList = set()
PrivateList = set()
Line = re.sub(',[\s]*', TAB_COMMA_SPLIT, self._CurrentLine)
for Item in Line[1:-1].split(TAB_COMMA_SPLIT):
if Item == '':
EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR,
"section name can NOT be empty or incorrectly use separator comma",
self.MetaFile, self._LineIndex + 1, self._CurrentLine)
ItemList = Item.split(TAB_SPLIT)
# different types of PCD are permissible in one section
self._SectionName = ItemList[0].upper()
if self._SectionName == TAB_DEC_DEFINES.upper() and (len(ItemList) > 1 or len(Line.split(TAB_COMMA_SPLIT)) > 1):
EdkLogger.error("Parser", FORMAT_INVALID, "Defines section format is invalid",
self.MetaFile, self._LineIndex + 1, self._CurrentLine)
if self._SectionName in self.DataType:
if self.DataType[self._SectionName] not in self._SectionType:
self._SectionType.append(self.DataType[self._SectionName])
else:
EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
self.MetaFile, self._LineIndex + 1, self._CurrentLine)
if MODEL_PCD_FEATURE_FLAG in self._SectionType and len(self._SectionType) > 1:
EdkLogger.error(
'Parser',
FORMAT_INVALID,
"%s must not be in the same section of other types of PCD" % TAB_PCDS_FEATURE_FLAG_NULL,
File=self.MetaFile,
Line=self._LineIndex + 1,
ExtraData=self._CurrentLine
)
# S1 is always Arch
if len(ItemList) > 1:
S1 = ItemList[1].upper()
else:
S1 = TAB_ARCH_COMMON
ArchList.add(S1)
# S2 may be Platform or ModuleType
if len(ItemList) > 2:
S2 = ItemList[2].upper()
# only Includes, GUIDs, PPIs, Protocols section have Private tag
if self._SectionName in [TAB_INCLUDES.upper(), TAB_GUIDS.upper(), TAB_PROTOCOLS.upper(), TAB_PPIS.upper()]:
if S2 != 'PRIVATE':
EdkLogger.error("Parser", FORMAT_INVALID, 'Please use keyword "Private" as section tag modifier.',
File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
else:
S2 = TAB_COMMON
PrivateList.add(S2)
if [S1, S2, self.DataType[self._SectionName]] not in self._Scope:
self._Scope.append([S1, S2, self.DataType[self._SectionName]])
# 'COMMON' must not be used with specific ARCHs at the same section
if TAB_ARCH_COMMON in ArchList and len(ArchList) > 1:
EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
# It is not permissible to mix section tags without the Private attribute with section tags with the Private attribute
if TAB_COMMON in PrivateList and len(PrivateList) > 1:
EdkLogger.error('Parser', FORMAT_INVALID, "Can't mix section tags without the Private attribute with section tags with the Private attribute",
File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
## [guids], [ppis] and [protocols] section parser
@ParseMacro
def _GuidParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
if len(TokenList) < 2:
EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name or value specified",
ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
File=self.MetaFile, Line=self._LineIndex + 1)
if TokenList[0] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name specified",
ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
File=self.MetaFile, Line=self._LineIndex + 1)
if TokenList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No GUID value specified",
ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
File=self.MetaFile, Line=self._LineIndex + 1)
if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidStructureStringToGuidString(TokenList[1]) == '':
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
ExtraData=self._CurrentLine + \
" (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)",
File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList[0] = TokenList[0]
self._ValueList[1] = TokenList[1]
if self._ValueList[0] not in self._GuidDict:
self._GuidDict[self._ValueList[0]] = self._ValueList[1]
def ParsePcdName(self,namelist):
if "[" in namelist[1]:
pcdname = namelist[1][:namelist[1].index("[")]
arrayindex = namelist[1][namelist[1].index("["):]
namelist[1] = pcdname
if len(namelist) == 2:
namelist.append(arrayindex)
else:
namelist[2] = ".".join((arrayindex,namelist[2]))
return namelist
## PCD sections parser
#
# [PcdsFixedAtBuild]
# [PcdsPatchableInModule]
# [PcdsFeatureFlag]
# [PcdsDynamicEx
# [PcdsDynamic]
#
@ParseMacro
def _PcdParser(self):
if self._CurrentStructurePcdName:
self._ValueList[0] = self._CurrentStructurePcdName
if "|" not in self._CurrentLine:
if "<HeaderFiles>" == self._CurrentLine:
self._include_flag = True
self._package_flag = False
self._ValueList = None
return
if "<Packages>" == self._CurrentLine:
self._package_flag = True
self._ValueList = None
self._include_flag = False
return
if self._include_flag:
self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
self._ValueList[2] = self._CurrentLine
if self._package_flag and "}" != self._CurrentLine:
self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
self._ValueList[2] = self._CurrentLine
if self._CurrentLine == "}":
self._package_flag = False
self._include_flag = False
self._ValueList = None
return
else:
PcdTockens = self._CurrentLine.split(TAB_VALUE_SPLIT)
PcdNames = self.ParsePcdName(PcdTockens[0].split(TAB_SPLIT))
if len(PcdNames) == 2:
if PcdNames[1].strip().endswith("]"):
PcdName = PcdNames[1][:PcdNames[1].index('[')]
Index = PcdNames[1][PcdNames[1].index('['):]
self._ValueList[0] = TAB_SPLIT.join((PcdNames[0],PcdName))
self._ValueList[1] = Index
self._ValueList[2] = PcdTockens[1]
else:
self._CurrentStructurePcdName = ""
else:
if self._CurrentStructurePcdName != TAB_SPLIT.join(PcdNames[:2]):
EdkLogger.error('Parser', FORMAT_INVALID, "Pcd Name does not match: %s and %s " % (self._CurrentStructurePcdName, TAB_SPLIT.join(PcdNames[:2])),
File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList[1] = TAB_SPLIT.join(PcdNames[2:])
self._ValueList[2] = PcdTockens[1]
if not self._CurrentStructurePcdName:
if self._PcdDataTypeCODE:
if ")}" in self._CurrentLine:
ValuePart,RestofValue = self._CurrentLine.split(")}")
self._PcdCodeValue = self._PcdCodeValue + "\n " + ValuePart
self._CurrentLine = "|".join((self._CurrentPcdName, self._PcdCodeValue,RestofValue))
self._PcdDataTypeCODE = False
self._PcdCodeValue = ""
else:
self._PcdCodeValue = self._PcdCodeValue + "\n " + self._CurrentLine
self._ValueList = None
return
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
self._CurrentPcdName = TokenList[0]
if len(TokenList) == 2 and TokenList[1].strip().startswith("{CODE"):
if ")}" in self._CurrentLine:
self._PcdDataTypeCODE = False
self._PcdCodeValue = ""
else:
self._PcdDataTypeCODE = True
self._PcdCodeValue = TokenList[1].strip()
self._ValueList = None
return
self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
ValueRe = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*')
# check PCD information
if self._ValueList[0] == '' or self._ValueList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex + 1)
# check format of token space GUID CName
if not ValueRe.match(self._ValueList[0]):
EdkLogger.error('Parser', FORMAT_INVALID, "The format of the token space GUID CName is invalid. The correct format is '(a-zA-Z_)[a-zA-Z0-9_]*'",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex + 1)
# check format of PCD CName
if not ValueRe.match(self._ValueList[1]):
EdkLogger.error('Parser', FORMAT_INVALID, "The format of the PCD CName is invalid. The correct format is '(a-zA-Z_)[a-zA-Z0-9_]*'",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex + 1)
# check PCD datum information
if len(TokenList) < 2 or TokenList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No PCD Datum information given",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex + 1)
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
PtrValue = ValueRe.findall(TokenList[1])
# Has VOID* type string, may contain "|" character in the string.
if len(PtrValue) != 0:
ptrValueList = re.sub(ValueRe, '', TokenList[1])
ValueList = AnalyzePcdExpression(ptrValueList)
ValueList[0] = PtrValue[0]
else:
ValueList = AnalyzePcdExpression(TokenList[1])
# check if there's enough datum information given
if len(ValueList) != 3:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex + 1)
# check default value
if ValueList[0] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "Missing DefaultValue in PCD Datum information",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex + 1)
# check datum type
if ValueList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "Missing DatumType in PCD Datum information",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex + 1)
# check token of the PCD
if ValueList[2] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "Missing Token in PCD Datum information",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex + 1)
PcdValue = ValueList[0]
if PcdValue:
try:
self._GuidDict.update(self._AllPcdDict)
ValueList[0] = ValueExpressionEx(ValueList[0], ValueList[1], self._GuidDict)(True)
except BadExpression as Value:
EdkLogger.error('Parser', FORMAT_INVALID, Value, ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
# check format of default value against the datum type
IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0])
if not IsValid:
EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine,
File=self.MetaFile, Line=self._LineIndex + 1)
if Cause == "StructurePcd":
self._CurrentStructurePcdName = TAB_SPLIT.join(self._ValueList[0:2])
self._ValueList[0] = self._CurrentStructurePcdName
self._ValueList[1] = ValueList[1].strip()
if ValueList[0] in ['True', 'true', 'TRUE']:
ValueList[0] = '1'
elif ValueList[0] in ['False', 'false', 'FALSE']:
ValueList[0] = '0'
# check for duplicate PCD definition
if (self._Scope[0], self._ValueList[0], self._ValueList[1]) in self._AllPCDs:
EdkLogger.error('Parser', FORMAT_INVALID,
"The same PCD name and GUID have been already defined",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
else:
self._AllPCDs.append((self._Scope[0], self._ValueList[0], self._ValueList[1]))
self._AllPcdDict[TAB_SPLIT.join(self._ValueList[0:2])] = ValueList[0]
self._ValueList[2] = ValueList[0].strip() + '|' + ValueList[1].strip() + '|' + ValueList[2].strip()
_SectionParser = {
MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
MODEL_EFI_INCLUDE : MetaFileParser._PathParser,
MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
MODEL_EFI_GUID : _GuidParser,
MODEL_EFI_PPI : _GuidParser,
MODEL_EFI_PROTOCOL : _GuidParser,
MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
MODEL_PCD_FEATURE_FLAG : _PcdParser,
MODEL_PCD_DYNAMIC : _PcdParser,
MODEL_PCD_DYNAMIC_EX : _PcdParser,
MODEL_UNKNOWN : MetaFileParser._Skip,
MODEL_META_DATA_USER_EXTENSION : MetaFileParser._SkipUserExtension,
}
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
pass
| edk2-master | BaseTools/Source/Python/Workspace/MetaFileParser.py |
## @file
# This file is used to create a database used by build tool
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from Common.StringUtils import *
from Common.DataType import *
from Common.Misc import *
from types import *
from collections import OrderedDict
from CommonDataClass.DataClass import *
from Workspace.BuildClassObject import PackageBuildClassObject, StructurePcd, PcdClassObject
from Common.GlobalData import gGlobalDefines
from re import compile
## Platform build information from DEC file
#
# This class is used to retrieve information stored in database and convert them
# into PackageBuildClassObject form for easier use for AutoGen.
#
class DecBuildData(PackageBuildClassObject):
# dict used to convert part of [Defines] to members of DecBuildData directly
_PROPERTY_ = {
#
# Required Fields
#
TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",
TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",
TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",
TAB_DEC_DEFINES_PKG_UNI_FILE : "_PkgUniFile",
}
## Constructor of DecBuildData
#
# Initialize object of DecBuildData
#
# @param FilePath The path of package description file
# @param RawData The raw data of DEC file
# @param BuildDataBase Database used to retrieve module information
# @param Arch The target architecture
# @param Platform (not used for DecBuildData)
# @param Macros Macros used for replacement in DSC file
#
def __init__(self, File, RawData, BuildDataBase, Arch=TAB_ARCH_COMMON, Target=None, Toolchain=None):
self.MetaFile = File
self._PackageDir = File.Dir
self._RawData = RawData
self._Bdb = BuildDataBase
self._Arch = Arch
self._Target = Target
self._Toolchain = Toolchain
self._Clear()
self.UpdatePcdTypeDict()
## XXX[key] = value
def __setitem__(self, key, value):
self.__dict__[self._PROPERTY_[key]] = value
## value = XXX[key]
def __getitem__(self, key):
return self.__dict__[self._PROPERTY_[key]]
## "in" test support
def __contains__(self, key):
return key in self._PROPERTY_
## Set all internal used members of DecBuildData to None
def _Clear(self):
self._Header = None
self._PackageName = None
self._Guid = None
self._Version = None
self._PkgUniFile = None
self._Protocols = None
self._Ppis = None
self._Guids = None
self._Includes = None
self._CommonIncludes = None
self._LibraryClasses = None
self._Pcds = None
self._MacroDict = None
self._PrivateProtocols = None
self._PrivatePpis = None
self._PrivateGuids = None
self._PrivateIncludes = None
## Get current effective macros
@property
def _Macros(self):
if self._MacroDict is None:
self._MacroDict = dict(gGlobalDefines)
return self._MacroDict
## Get architecture
@property
def Arch(self):
return self._Arch
## Retrieve all information in [Defines] section
#
# (Retrieving all [Defines] information in one-shot is just to save time.)
#
def _GetHeaderInfo(self):
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
for Record in RecordList:
Name = Record[1]
if Name in self:
self[Name] = Record[2]
self._Header = 'DUMMY'
## Retrieve package name
@property
def PackageName(self):
if self._PackageName is None:
if self._Header is None:
self._GetHeaderInfo()
if self._PackageName is None:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_NAME", File=self.MetaFile)
return self._PackageName
## Retrieve file guid
@property
def PackageName(self):
if self._Guid is None:
if self._Header is None:
self._GetHeaderInfo()
if self._Guid is None:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_GUID", File=self.MetaFile)
return self._Guid
## Retrieve package version
@property
def Version(self):
if self._Version is None:
if self._Header is None:
self._GetHeaderInfo()
if self._Version is None:
self._Version = ''
return self._Version
## Retrieve protocol definitions (name/value pairs)
@property
def Protocols(self):
if self._Protocols is None:
#
# tdict is a special kind of dict, used for selecting correct
# protocol definition for given ARCH
#
ProtocolDict = tdict(True)
PrivateProtocolDict = tdict(True)
NameList = []
PrivateNameList = []
PublicNameList = []
# find out all protocol definitions for specific and 'common' arch
RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch]
for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
if PrivateFlag == 'PRIVATE':
if Name not in PrivateNameList:
PrivateNameList.append(Name)
PrivateProtocolDict[Arch, Name] = Guid
if Name in PublicNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
else:
if Name not in PublicNameList:
PublicNameList.append(Name)
if Name in PrivateNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
if Name not in NameList:
NameList.append(Name)
ProtocolDict[Arch, Name] = Guid
# use OrderedDict to keep the order
self._Protocols = OrderedDict()
self._PrivateProtocols = OrderedDict()
for Name in NameList:
#
# limit the ARCH to self._Arch, if no self._Arch found, tdict
# will automatically turn to 'common' ARCH for trying
#
self._Protocols[Name] = ProtocolDict[self._Arch, Name]
for Name in PrivateNameList:
self._PrivateProtocols[Name] = PrivateProtocolDict[self._Arch, Name]
return self._Protocols
## Retrieve PPI definitions (name/value pairs)
@property
def Ppis(self):
if self._Ppis is None:
#
# tdict is a special kind of dict, used for selecting correct
# PPI definition for given ARCH
#
PpiDict = tdict(True)
PrivatePpiDict = tdict(True)
NameList = []
PrivateNameList = []
PublicNameList = []
# find out all PPI definitions for specific arch and 'common' arch
RecordList = self._RawData[MODEL_EFI_PPI, self._Arch]
for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
if PrivateFlag == 'PRIVATE':
if Name not in PrivateNameList:
PrivateNameList.append(Name)
PrivatePpiDict[Arch, Name] = Guid
if Name in PublicNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
else:
if Name not in PublicNameList:
PublicNameList.append(Name)
if Name in PrivateNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
if Name not in NameList:
NameList.append(Name)
PpiDict[Arch, Name] = Guid
# use OrderedDict to keep the order
self._Ppis = OrderedDict()
self._PrivatePpis = OrderedDict()
for Name in NameList:
#
# limit the ARCH to self._Arch, if no self._Arch found, tdict
# will automatically turn to 'common' ARCH for trying
#
self._Ppis[Name] = PpiDict[self._Arch, Name]
for Name in PrivateNameList:
self._PrivatePpis[Name] = PrivatePpiDict[self._Arch, Name]
return self._Ppis
## Retrieve GUID definitions (name/value pairs)
@property
def Guids(self):
if self._Guids is None:
#
# tdict is a special kind of dict, used for selecting correct
# GUID definition for given ARCH
#
GuidDict = tdict(True)
PrivateGuidDict = tdict(True)
NameList = []
PrivateNameList = []
PublicNameList = []
# find out all protocol definitions for specific and 'common' arch
RecordList = self._RawData[MODEL_EFI_GUID, self._Arch]
for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
if PrivateFlag == 'PRIVATE':
if Name not in PrivateNameList:
PrivateNameList.append(Name)
PrivateGuidDict[Arch, Name] = Guid
if Name in PublicNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
else:
if Name not in PublicNameList:
PublicNameList.append(Name)
if Name in PrivateNameList:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
if Name not in NameList:
NameList.append(Name)
GuidDict[Arch, Name] = Guid
# use OrderedDict to keep the order
self._Guids = OrderedDict()
self._PrivateGuids = OrderedDict()
for Name in NameList:
#
# limit the ARCH to self._Arch, if no self._Arch found, tdict
# will automatically turn to 'common' ARCH for trying
#
self._Guids[Name] = GuidDict[self._Arch, Name]
for Name in PrivateNameList:
self._PrivateGuids[Name] = PrivateGuidDict[self._Arch, Name]
return self._Guids
## Retrieve public include paths declared in this package
@property
def Includes(self):
if self._Includes is None or self._CommonIncludes is None:
self._CommonIncludes = []
self._Includes = []
self._PrivateIncludes = []
PublicInclues = []
RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch]
Macros = self._Macros
for Record in RecordList:
File = PathClass(NormPath(Record[0], Macros), self._PackageDir, Arch=self._Arch)
LineNo = Record[-1]
# validate the path
ErrorCode, ErrorInfo = File.Validate()
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
# avoid duplicate include path
if File not in self._Includes:
self._Includes.append(File)
if Record[4] == 'PRIVATE':
if File not in self._PrivateIncludes:
self._PrivateIncludes.append(File)
if File in PublicInclues:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % File, File=self.MetaFile, Line=LineNo)
else:
if File not in PublicInclues:
PublicInclues.append(File)
if File in self._PrivateIncludes:
EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % File, File=self.MetaFile, Line=LineNo)
if Record[3] == TAB_COMMON:
self._CommonIncludes.append(File)
return self._Includes
## Retrieve library class declarations (not used in build at present)
@property
def LibraryClasses(self):
if self._LibraryClasses is None:
#
# tdict is a special kind of dict, used for selecting correct
# library class declaration for given ARCH
#
LibraryClassDict = tdict(True)
LibraryClassSet = set()
RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch]
Macros = self._Macros
for LibraryClass, File, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
File = PathClass(NormPath(File, Macros), self._PackageDir, Arch=self._Arch)
# check the file validation
ErrorCode, ErrorInfo = File.Validate()
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
LibraryClassSet.add(LibraryClass)
LibraryClassDict[Arch, LibraryClass] = File
self._LibraryClasses = OrderedDict()
for LibraryClass in LibraryClassSet:
self._LibraryClasses[LibraryClass] = LibraryClassDict[self._Arch, LibraryClass]
return self._LibraryClasses
## Retrieve PCD declarations
@property
def Pcds(self):
if self._Pcds is None:
self._Pcds = OrderedDict()
self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC))
self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX))
return self._Pcds
def ParsePcdName(self,TokenCName):
TokenCName = TokenCName.strip()
if TokenCName.startswith("["):
if "." in TokenCName:
Demesionattr = TokenCName[:TokenCName.index(".")]
Fields = TokenCName[TokenCName.index(".")+1:]
else:
Demesionattr = TokenCName
Fields = ""
else:
Demesionattr = ""
Fields = TokenCName
return Demesionattr,Fields
def ProcessStructurePcd(self, StructurePcdRawDataSet):
s_pcd_set = OrderedDict()
for s_pcd, LineNo in StructurePcdRawDataSet:
if s_pcd.TokenSpaceGuidCName not in s_pcd_set:
s_pcd_set[s_pcd.TokenSpaceGuidCName] = []
s_pcd_set[s_pcd.TokenSpaceGuidCName].append((s_pcd, LineNo))
str_pcd_set = []
for pcdname in s_pcd_set:
dep_pkgs = []
struct_pcd = StructurePcd()
for item, LineNo in s_pcd_set[pcdname]:
if not item.TokenCName:
continue
if "<HeaderFiles>" in item.TokenCName:
struct_pcd.StructuredPcdIncludeFile.append(item.DefaultValue)
elif "<Packages>" in item.TokenCName:
dep_pkgs.append(item.DefaultValue)
elif item.DatumType == item.TokenCName:
struct_pcd.copy(item)
struct_pcd.TokenValue = struct_pcd.TokenValue.strip("{").strip()
struct_pcd.TokenSpaceGuidCName, struct_pcd.TokenCName = pcdname.split(".")
struct_pcd.PcdDefineLineNo = LineNo
struct_pcd.PkgPath = self.MetaFile.File
struct_pcd.SetDecDefaultValue(item.DefaultValue,self.MetaFile.File,LineNo)
else:
DemesionAttr, Fields = self.ParsePcdName(item.TokenCName)
struct_pcd.AddDefaultValue(Fields, item.DefaultValue, self.MetaFile.File, LineNo,DemesionAttr)
struct_pcd.PackageDecs = dep_pkgs
str_pcd_set.append(struct_pcd)
return str_pcd_set
## Retrieve PCD declarations for given type
def _GetPcd(self, Type):
Pcds = OrderedDict()
#
# tdict is a special kind of dict, used for selecting correct
# PCD declaration for given ARCH
#
PcdDict = tdict(True, 3)
# for summarizing PCD
PcdSet = []
# find out all PCDs of the 'type'
StrPcdSet = []
RecordList = self._RawData[Type, self._Arch]
for TokenSpaceGuid, PcdCName, Setting, Arch, PrivateFlag, Dummy1, Dummy2 in RecordList:
PcdDict[Arch, PcdCName, TokenSpaceGuid] = (Setting, Dummy2)
if not (PcdCName, TokenSpaceGuid) in PcdSet:
PcdSet.append((PcdCName, TokenSpaceGuid))
DefinitionPosition = {}
for PcdCName, TokenSpaceGuid in PcdSet:
#
# limit the ARCH to self._Arch, if no self._Arch found, tdict
# will automatically turn to 'common' ARCH and try again
#
Setting, LineNo = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
if Setting is None:
continue
DefaultValue, DatumType, TokenNumber = AnalyzePcdData(Setting)
validateranges, validlists, expressions = self._RawData.GetValidExpression(TokenSpaceGuid, PcdCName)
PcdObj = PcdClassObject(
PcdCName,
TokenSpaceGuid,
self._PCD_TYPE_STRING_[Type],
DatumType,
DefaultValue,
TokenNumber,
'',
{},
False,
None,
list(validateranges),
list(validlists),
list(expressions)
)
DefinitionPosition[PcdObj] = (self.MetaFile.File, LineNo)
if "." in TokenSpaceGuid:
StrPcdSet.append((PcdObj, LineNo))
else:
Pcds[PcdCName, TokenSpaceGuid, self._PCD_TYPE_STRING_[Type]] = PcdObj
StructurePcds = self.ProcessStructurePcd(StrPcdSet)
for pcd in StructurePcds:
Pcds[pcd.TokenCName, pcd.TokenSpaceGuidCName, self._PCD_TYPE_STRING_[Type]] = pcd
StructPattern = compile(r'[_a-zA-Z][0-9A-Za-z_]*$')
for pcd in Pcds.values():
if pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
if not pcd.IsAggregateDatumType():
EdkLogger.error('build', FORMAT_INVALID, "DatumType only support BOOLEAN, UINT8, UINT16, UINT32, UINT64, VOID* or a valid struct name.", DefinitionPosition[pcd][0], DefinitionPosition[pcd][1])
elif not pcd.IsArray() and not pcd.StructuredPcdIncludeFile:
EdkLogger.error("build", PCD_STRUCTURE_PCD_ERROR, "The structure Pcd %s.%s header file is not found in %s line %s \n" % (pcd.TokenSpaceGuidCName, pcd.TokenCName, pcd.DefinitionPosition[0], pcd.DefinitionPosition[1] ))
return Pcds
@property
def CommonIncludes(self):
if self._CommonIncludes is None:
self.Includes
return self._CommonIncludes
| edk2-master | BaseTools/Source/Python/Workspace/DecBuildData.py |
## @file
# This file is used to create a database used by build tool
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
from Common.DataType import *
from Common.Misc import *
from Common.caching import cached_property, cached_class_function
from types import *
from .MetaFileParser import *
from collections import OrderedDict
from Workspace.BuildClassObject import ModuleBuildClassObject, LibraryClassObject, PcdClassObject
from Common.Expression import ValueExpressionEx, PcdPattern
## Get Protocol value from given packages
#
# @param CName The CName of the GUID
# @param PackageList List of packages looking-up in
# @param Inffile The driver file
#
# @retval GuidValue if the CName is found in any given package
# @retval None if the CName is not found in all given packages
#
def _ProtocolValue(CName, PackageList, Inffile = None):
for P in PackageList:
ProtocolKeys = list(P.Protocols.keys())
if Inffile and P._PrivateProtocols:
if not Inffile.startswith(P.MetaFile.Dir):
ProtocolKeys = [x for x in P.Protocols if x not in P._PrivateProtocols]
if CName in ProtocolKeys:
return P.Protocols[CName]
return None
## Get PPI value from given packages
#
# @param CName The CName of the GUID
# @param PackageList List of packages looking-up in
# @param Inffile The driver file
#
# @retval GuidValue if the CName is found in any given package
# @retval None if the CName is not found in all given packages
#
def _PpiValue(CName, PackageList, Inffile = None):
for P in PackageList:
PpiKeys = list(P.Ppis.keys())
if Inffile and P._PrivatePpis:
if not Inffile.startswith(P.MetaFile.Dir):
PpiKeys = [x for x in P.Ppis if x not in P._PrivatePpis]
if CName in PpiKeys:
return P.Ppis[CName]
return None
## Module build information from INF file
#
# This class is used to retrieve information stored in database and convert them
# into ModuleBuildClassObject form for easier use for AutoGen.
#
class InfBuildData(ModuleBuildClassObject):
# dict used to convert part of [Defines] to members of InfBuildData directly
_PROPERTY_ = {
#
# Required Fields
#
TAB_INF_DEFINES_BASE_NAME : "_BaseName",
TAB_INF_DEFINES_FILE_GUID : "_Guid",
TAB_INF_DEFINES_MODULE_TYPE : "_ModuleType",
#
# Optional Fields
#
# TAB_INF_DEFINES_INF_VERSION : "_AutoGenVersion",
TAB_INF_DEFINES_COMPONENT_TYPE : "_ComponentType",
TAB_INF_DEFINES_MAKEFILE_NAME : "_MakefileName",
# TAB_INF_DEFINES_CUSTOM_MAKEFILE : "_CustomMakefile",
TAB_INF_DEFINES_DPX_SOURCE :"_DxsFile",
TAB_INF_DEFINES_VERSION_NUMBER : "_Version",
TAB_INF_DEFINES_VERSION_STRING : "_Version",
TAB_INF_DEFINES_VERSION : "_Version",
TAB_INF_DEFINES_PCD_IS_DRIVER : "_PcdIsDriver",
TAB_INF_DEFINES_SHADOW : "_Shadow"
}
# regular expression for converting XXX_FLAGS in [nmake] section to new type
_NMAKE_FLAG_PATTERN_ = re.compile("(?:EBC_)?([A-Z]+)_(?:STD_|PROJ_|ARCH_)?FLAGS(?:_DLL|_ASL|_EXE)?", re.UNICODE)
# dict used to convert old tool name used in [nmake] section to new ones
_TOOL_CODE_ = {
"C" : "CC",
BINARY_FILE_TYPE_LIB : "SLINK",
"LINK" : "DLINK",
}
## Constructor of InfBuildData
#
# Initialize object of InfBuildData
#
# @param FilePath The path of platform description file
# @param RawData The raw data of DSC file
# @param BuildDataBase Database used to retrieve module/package information
# @param Arch The target architecture
# @param Platform The name of platform employing this module
# @param Macros Macros used for replacement in DSC file
#
def __init__(self, FilePath, RawData, BuildDatabase, Arch=TAB_ARCH_COMMON, Target=None, Toolchain=None):
self.MetaFile = FilePath
self._ModuleDir = FilePath.Dir
self._RawData = RawData
self._Bdb = BuildDatabase
self._Arch = Arch
self._Target = Target
self._Toolchain = Toolchain
self._Platform = TAB_COMMON
self._TailComments = None
self._BaseName = None
self._DxsFile = None
self._ModuleType = None
self._ComponentType = None
self._BuildType = None
self._Guid = None
self._Version = None
self._PcdIsDriver = None
self._BinaryModule = None
self._Shadow = None
self._MakefileName = None
self._CustomMakefile = None
self._Specification = None
self._LibraryClass = None
self._ModuleEntryPointList = None
self._ModuleUnloadImageList = None
self._ConstructorList = None
self._DestructorList = None
self._Defs = OrderedDict()
self._ProtocolComments = None
self._PpiComments = None
self._GuidsUsedByPcd = OrderedDict()
self._GuidComments = None
self._PcdComments = None
self._BuildOptions = None
self._DependencyFileList = None
self.UpdatePcdTypeDict()
self.LibInstances = []
self.ReferenceModules = set()
def SetReferenceModule(self,Module):
self.ReferenceModules.add(Module)
return self
## XXX[key] = value
def __setitem__(self, key, value):
self.__dict__[self._PROPERTY_[key]] = value
## value = XXX[key]
def __getitem__(self, key):
return self.__dict__[self._PROPERTY_[key]]
## "in" test support
def __contains__(self, key):
return key in self._PROPERTY_
## Get current effective macros
@cached_property
def _Macros(self):
RetVal = {}
return RetVal
## Get architecture
@cached_property
def Arch(self):
return self._Arch
## Return the name of platform employing this module
@cached_property
def Platform(self):
return self._Platform
@cached_property
def HeaderComments(self):
return [a[0] for a in self._RawData[MODEL_META_DATA_HEADER_COMMENT]]
@cached_property
def TailComments(self):
return [a[0] for a in self._RawData[MODEL_META_DATA_TAIL_COMMENT]]
## Retrieve all information in [Defines] section
#
# (Retrieving all [Defines] information in one-shot is just to save time.)
#
@cached_class_function
def _GetHeaderInfo(self):
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
for Record in RecordList:
Name, Value = Record[1], ReplaceMacro(Record[2], self._Macros, False)
# items defined _PROPERTY_ don't need additional processing
if Name in self:
self[Name] = Value
self._Defs[Name] = Value
self._Macros[Name] = Value
# some special items in [Defines] section need special treatment
elif Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION', 'EDK_RELEASE_VERSION', 'PI_SPECIFICATION_VERSION'):
if Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION'):
Name = 'UEFI_SPECIFICATION_VERSION'
if self._Specification is None:
self._Specification = OrderedDict()
self._Specification[Name] = GetHexVerValue(Value)
if self._Specification[Name] is None:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
"'%s' format is not supported for %s" % (Value, Name),
File=self.MetaFile, Line=Record[-1])
elif Name == 'LIBRARY_CLASS':
if self._LibraryClass is None:
self._LibraryClass = []
ValueList = GetSplitValueList(Value)
LibraryClass = ValueList[0]
if len(ValueList) > 1:
SupModuleList = GetSplitValueList(ValueList[1], ' ')
else:
SupModuleList = SUP_MODULE_LIST
self._LibraryClass.append(LibraryClassObject(LibraryClass, SupModuleList))
elif Name == 'ENTRY_POINT':
if self._ModuleEntryPointList is None:
self._ModuleEntryPointList = []
self._ModuleEntryPointList.append(Value)
elif Name == 'UNLOAD_IMAGE':
if self._ModuleUnloadImageList is None:
self._ModuleUnloadImageList = []
if not Value:
continue
self._ModuleUnloadImageList.append(Value)
elif Name == 'CONSTRUCTOR':
if self._ConstructorList is None:
self._ConstructorList = []
if not Value:
continue
self._ConstructorList.append(Value)
elif Name == 'DESTRUCTOR':
if self._DestructorList is None:
self._DestructorList = []
if not Value:
continue
self._DestructorList.append(Value)
elif Name == TAB_INF_DEFINES_CUSTOM_MAKEFILE:
TokenList = GetSplitValueList(Value)
if self._CustomMakefile is None:
self._CustomMakefile = {}
if len(TokenList) < 2:
self._CustomMakefile[TAB_COMPILER_MSFT] = TokenList[0]
self._CustomMakefile['GCC'] = TokenList[0]
else:
if TokenList[0] not in [TAB_COMPILER_MSFT, 'GCC']:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
"No supported family [%s]" % TokenList[0],
File=self.MetaFile, Line=Record[-1])
self._CustomMakefile[TokenList[0]] = TokenList[1]
else:
self._Defs[Name] = Value
self._Macros[Name] = Value
#
# Retrieve information in sections specific to Edk.x modules
#
if not self._ModuleType:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"MODULE_TYPE is not given", File=self.MetaFile)
if self._ModuleType not in SUP_MODULE_LIST:
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
for Record in RecordList:
Name = Record[1]
if Name == "MODULE_TYPE":
LineNo = Record[6]
break
EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
"MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType, ' '.join(l for l in SUP_MODULE_LIST)),
File=self.MetaFile, Line=LineNo)
if (self._Specification is None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A):
if self._ModuleType == SUP_MODULE_SMM_CORE:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.MetaFile)
if (self._Specification is None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x00010032):
if self._ModuleType == SUP_MODULE_MM_CORE_STANDALONE:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "MM_CORE_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.MetaFile)
if self._ModuleType == SUP_MODULE_MM_STANDALONE:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "MM_STANDALONE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x00010032", File=self.MetaFile)
if 'PCI_DEVICE_ID' in self._Defs and 'PCI_VENDOR_ID' in self._Defs \
and 'PCI_CLASS_CODE' in self._Defs and 'PCI_REVISION' in self._Defs:
self._BuildType = 'UEFI_OPTIONROM'
if 'PCI_COMPRESS' in self._Defs:
if self._Defs['PCI_COMPRESS'] not in ('TRUE', 'FALSE'):
EdkLogger.error("build", FORMAT_INVALID, "Expected TRUE/FALSE for PCI_COMPRESS: %s" % self.MetaFile)
elif 'UEFI_HII_RESOURCE_SECTION' in self._Defs \
and self._Defs['UEFI_HII_RESOURCE_SECTION'] == 'TRUE':
self._BuildType = 'UEFI_HII'
else:
self._BuildType = self._ModuleType.upper()
if self._DxsFile:
File = PathClass(NormPath(self._DxsFile), self._ModuleDir, Arch=self._Arch)
# check the file validation
ErrorCode, ErrorInfo = File.Validate(".dxs", CaseSensitive=False)
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo,
File=self.MetaFile, Line=LineNo)
if not self._DependencyFileList:
self._DependencyFileList = []
self._DependencyFileList.append(File)
## Retrieve file version
@cached_property
def AutoGenVersion(self):
RetVal = 0x00010000
RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
for Record in RecordList:
if Record[1] == TAB_INF_DEFINES_INF_VERSION:
if '.' in Record[2]:
ValueList = Record[2].split('.')
Major = '%04o' % int(ValueList[0], 0)
Minor = '%04o' % int(ValueList[1], 0)
RetVal = int('0x' + Major + Minor, 0)
else:
RetVal = int(Record[2], 0)
break
return RetVal
## Retrieve BASE_NAME
@cached_property
def BaseName(self):
if self._BaseName is None:
self._GetHeaderInfo()
if self._BaseName is None:
EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BASE_NAME name", File=self.MetaFile)
return self._BaseName
## Retrieve DxsFile
@cached_property
def DxsFile(self):
if self._DxsFile is None:
self._GetHeaderInfo()
if self._DxsFile is None:
self._DxsFile = ''
return self._DxsFile
## Retrieve MODULE_TYPE
@cached_property
def ModuleType(self):
if self._ModuleType is None:
self._GetHeaderInfo()
if self._ModuleType is None:
self._ModuleType = SUP_MODULE_BASE
if self._ModuleType not in SUP_MODULE_LIST:
self._ModuleType = SUP_MODULE_USER_DEFINED
return self._ModuleType
## Retrieve COMPONENT_TYPE
@cached_property
def ComponentType(self):
if self._ComponentType is None:
self._GetHeaderInfo()
if self._ComponentType is None:
self._ComponentType = SUP_MODULE_USER_DEFINED
return self._ComponentType
## Retrieve "BUILD_TYPE"
@cached_property
def BuildType(self):
if self._BuildType is None:
self._GetHeaderInfo()
if not self._BuildType:
self._BuildType = SUP_MODULE_BASE
return self._BuildType
## Retrieve file guid
@cached_property
def Guid(self):
if self._Guid is None:
self._GetHeaderInfo()
if self._Guid is None:
self._Guid = '00000000-0000-0000-0000-000000000000'
return self._Guid
## Retrieve module version
@cached_property
def Version(self):
if self._Version is None:
self._GetHeaderInfo()
if self._Version is None:
self._Version = '0.0'
return self._Version
## Retrieve PCD_IS_DRIVER
@cached_property
def PcdIsDriver(self):
if self._PcdIsDriver is None:
self._GetHeaderInfo()
if self._PcdIsDriver is None:
self._PcdIsDriver = ''
return self._PcdIsDriver
## Retrieve SHADOW
@cached_property
def Shadow(self):
if self._Shadow is None:
self._GetHeaderInfo()
if self._Shadow and self._Shadow.upper() == 'TRUE':
self._Shadow = True
else:
self._Shadow = False
return self._Shadow
## Retrieve CUSTOM_MAKEFILE
@cached_property
def CustomMakefile(self):
if self._CustomMakefile is None:
self._GetHeaderInfo()
if self._CustomMakefile is None:
self._CustomMakefile = {}
return self._CustomMakefile
## Retrieve EFI_SPECIFICATION_VERSION
@cached_property
def Specification(self):
if self._Specification is None:
self._GetHeaderInfo()
if self._Specification is None:
self._Specification = {}
return self._Specification
## Retrieve LIBRARY_CLASS
@cached_property
def LibraryClass(self):
if self._LibraryClass is None:
self._GetHeaderInfo()
if self._LibraryClass is None:
self._LibraryClass = []
return self._LibraryClass
## Retrieve ENTRY_POINT
@cached_property
def ModuleEntryPointList(self):
if self._ModuleEntryPointList is None:
self._GetHeaderInfo()
if self._ModuleEntryPointList is None:
self._ModuleEntryPointList = []
return self._ModuleEntryPointList
## Retrieve UNLOAD_IMAGE
@cached_property
def ModuleUnloadImageList(self):
if self._ModuleUnloadImageList is None:
self._GetHeaderInfo()
if self._ModuleUnloadImageList is None:
self._ModuleUnloadImageList = []
return self._ModuleUnloadImageList
## Retrieve CONSTRUCTOR
@cached_property
def ConstructorList(self):
if self._ConstructorList is None:
self._GetHeaderInfo()
if self._ConstructorList is None:
self._ConstructorList = []
return self._ConstructorList
## Retrieve DESTRUCTOR
@cached_property
def DestructorList(self):
if self._DestructorList is None:
self._GetHeaderInfo()
if self._DestructorList is None:
self._DestructorList = []
return self._DestructorList
## Retrieve definies other than above ones
@cached_property
def Defines(self):
self._GetHeaderInfo()
return self._Defs
## Retrieve binary files
@cached_class_function
def _GetBinaries(self):
RetVal = []
RecordList = self._RawData[MODEL_EFI_BINARY_FILE, self._Arch, self._Platform]
Macros = self._Macros
Macros['PROCESSOR'] = self._Arch
for Record in RecordList:
FileType = Record[0]
LineNo = Record[-1]
Target = TAB_COMMON
FeatureFlag = []
if Record[2]:
TokenList = GetSplitValueList(Record[2], TAB_VALUE_SPLIT)
if TokenList:
Target = TokenList[0]
if len(TokenList) > 1:
FeatureFlag = Record[1:]
File = PathClass(NormPath(Record[1], Macros), self._ModuleDir, '', FileType, True, self._Arch, '', Target)
# check the file validation
ErrorCode, ErrorInfo = File.Validate()
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
RetVal.append(File)
return RetVal
## Retrieve binary files with error check.
@cached_property
def Binaries(self):
RetVal = self._GetBinaries()
if GlobalData.gIgnoreSource and not RetVal:
ErrorInfo = "The INF file does not contain any RetVal to use in creating the image\n"
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, ExtraData=ErrorInfo, File=self.MetaFile)
return RetVal
## Retrieve source files
@cached_property
def Sources(self):
self._GetHeaderInfo()
# Ignore all source files in a binary build mode
if GlobalData.gIgnoreSource:
return []
RetVal = []
RecordList = self._RawData[MODEL_EFI_SOURCE_FILE, self._Arch, self._Platform]
Macros = self._Macros
for Record in RecordList:
LineNo = Record[-1]
ToolChainFamily = Record[1]
# OptionsList := [TagName, ToolCode, FeatureFlag]
OptionsList = ['', '', '']
TokenList = GetSplitValueList(Record[2], TAB_VALUE_SPLIT)
for Index in range(len(TokenList)):
OptionsList[Index] = TokenList[Index]
if OptionsList[2]:
FeaturePcdExpression = self.CheckFeatureFlagPcd(OptionsList[2])
if not FeaturePcdExpression:
continue
File = PathClass(NormPath(Record[0], Macros), self._ModuleDir, '',
'', False, self._Arch, ToolChainFamily, '', OptionsList[0], OptionsList[1])
# check the file validation
ErrorCode, ErrorInfo = File.Validate()
if ErrorCode != 0:
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
RetVal.append(File)
# add any previously found dependency files to the source list
if self._DependencyFileList:
RetVal.extend(self._DependencyFileList)
return RetVal
## Retrieve library classes employed by this module
@cached_property
def LibraryClasses(self):
RetVal = OrderedDict()
RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, self._Platform]
for Record in RecordList:
Lib = Record[0]
Instance = Record[1]
if Instance:
Instance = NormPath(Instance, self._Macros)
RetVal[Lib] = Instance
else:
RetVal[Lib] = None
return RetVal
## Retrieve library names (for Edk.x style of modules)
@cached_property
def Libraries(self):
RetVal = []
RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch, self._Platform]
for Record in RecordList:
LibraryName = ReplaceMacro(Record[0], self._Macros, False)
# in case of name with '.lib' extension, which is unusual in Edk.x inf
LibraryName = os.path.splitext(LibraryName)[0]
if LibraryName not in RetVal:
RetVal.append(LibraryName)
return RetVal
@cached_property
def ProtocolComments(self):
self.Protocols
return self._ProtocolComments
## Retrieve protocols consumed/produced by this module
@cached_property
def Protocols(self):
RetVal = OrderedDict()
self._ProtocolComments = OrderedDict()
RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch, self._Platform]
for Record in RecordList:
CName = Record[0]
Value = _ProtocolValue(CName, self.Packages, self.MetaFile.Path)
if Value is None:
PackageList = "\n\t".join(str(P) for P in self.Packages)
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of Protocol [%s] is not found under [Protocols] section in" % CName,
ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
RetVal[CName] = Value
CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Record[5]]
self._ProtocolComments[CName] = [a[0] for a in CommentRecords]
return RetVal
@cached_property
def PpiComments(self):
self.Ppis
return self._PpiComments
## Retrieve PPIs consumed/produced by this module
@cached_property
def Ppis(self):
RetVal = OrderedDict()
self._PpiComments = OrderedDict()
RecordList = self._RawData[MODEL_EFI_PPI, self._Arch, self._Platform]
for Record in RecordList:
CName = Record[0]
Value = _PpiValue(CName, self.Packages, self.MetaFile.Path)
if Value is None:
PackageList = "\n\t".join(str(P) for P in self.Packages)
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of PPI [%s] is not found under [Ppis] section in " % CName,
ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
RetVal[CName] = Value
CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Record[5]]
self._PpiComments[CName] = [a[0] for a in CommentRecords]
return RetVal
@cached_property
def GuidComments(self):
self.Guids
return self._GuidComments
## Retrieve GUIDs consumed/produced by this module
@cached_property
def Guids(self):
RetVal = OrderedDict()
self._GuidComments = OrderedDict()
RecordList = self._RawData[MODEL_EFI_GUID, self._Arch, self._Platform]
for Record in RecordList:
CName = Record[0]
Value = GuidValue(CName, self.Packages, self.MetaFile.Path)
if Value is None:
PackageList = "\n\t".join(str(P) for P in self.Packages)
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of Guid [%s] is not found under [Guids] section in" % CName,
ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
RetVal[CName] = Value
CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Record[5]]
self._GuidComments[CName] = [a[0] for a in CommentRecords]
for Type in [MODEL_PCD_FIXED_AT_BUILD,MODEL_PCD_PATCHABLE_IN_MODULE,MODEL_PCD_FEATURE_FLAG,MODEL_PCD_DYNAMIC,MODEL_PCD_DYNAMIC_EX]:
RecordList = self._RawData[Type, self._Arch, self._Platform]
for TokenSpaceGuid, _, _, _, _, _, LineNo in RecordList:
# get the guid value
if TokenSpaceGuid not in RetVal:
Value = GuidValue(TokenSpaceGuid, self.Packages, self.MetaFile.Path)
if Value is None:
PackageList = "\n\t".join(str(P) for P in self.Packages)
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of Guid [%s] is not found under [Guids] section in" % TokenSpaceGuid,
ExtraData=PackageList, File=self.MetaFile, Line=LineNo)
RetVal[TokenSpaceGuid] = Value
self._GuidsUsedByPcd[TokenSpaceGuid] = Value
return RetVal
## Retrieve include paths necessary for this module (for Edk.x style of modules)
@cached_property
def Includes(self):
RetVal = []
Macros = self._Macros
Macros['PROCESSOR'] = GlobalData.gEdkGlobal.get('PROCESSOR', self._Arch)
RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch, self._Platform]
for Record in RecordList:
File = NormPath(Record[0], Macros)
if File[0] == '.':
File = os.path.join(self._ModuleDir, File)
else:
File = mws.join(GlobalData.gWorkspace, File)
File = RealPath(os.path.normpath(File))
if File:
RetVal.append(File)
return RetVal
## Retrieve packages this module depends on
@cached_property
def Packages(self):
RetVal = []
RecordList = self._RawData[MODEL_META_DATA_PACKAGE, self._Arch, self._Platform]
Macros = self._Macros
for Record in RecordList:
File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
# check the file validation
ErrorCode, ErrorInfo = File.Validate('.dec')
if ErrorCode != 0:
LineNo = Record[-1]
EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
# parse this package now. we need it to get protocol/ppi/guid value
RetVal.append(self._Bdb[File, self._Arch, self._Target, self._Toolchain])
return RetVal
## Retrieve PCD comments
@cached_property
def PcdComments(self):
self.Pcds
return self._PcdComments
## Retrieve PCDs used in this module
@cached_property
def Pcds(self):
self._PcdComments = OrderedDict()
RetVal = OrderedDict()
RetVal.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
RetVal.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
RetVal.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
RetVal.update(self._GetPcd(MODEL_PCD_DYNAMIC))
RetVal.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX))
return RetVal
@cached_property
def ModulePcdList(self):
RetVal = self.Pcds
return RetVal
@cached_property
def LibraryPcdList(self):
if bool(self.LibraryClass):
return []
RetVal = {}
Pcds = set()
for Library in self.LibInstances:
PcdsInLibrary = OrderedDict()
for Key in Library.Pcds:
if Key in self.Pcds or Key in Pcds:
continue
Pcds.add(Key)
PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
RetVal[Library] = PcdsInLibrary
return RetVal
@cached_property
def PcdsName(self):
PcdsName = set()
for Type in (MODEL_PCD_FIXED_AT_BUILD,MODEL_PCD_PATCHABLE_IN_MODULE,MODEL_PCD_FEATURE_FLAG,MODEL_PCD_DYNAMIC,MODEL_PCD_DYNAMIC_EX):
RecordList = self._RawData[Type, self._Arch, self._Platform]
for TokenSpaceGuid, PcdCName, _, _, _, _, _ in RecordList:
PcdsName.add((PcdCName, TokenSpaceGuid))
return PcdsName
## Retrieve build options specific to this module
@cached_property
def BuildOptions(self):
if self._BuildOptions is None:
self._BuildOptions = OrderedDict()
RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, self._Platform]
for Record in RecordList:
ToolChainFamily = Record[0]
ToolChain = Record[1]
Option = Record[2]
if (ToolChainFamily, ToolChain) not in self._BuildOptions or Option.startswith('='):
self._BuildOptions[ToolChainFamily, ToolChain] = Option
else:
# concatenate the option string if they're for the same tool
OptionString = self._BuildOptions[ToolChainFamily, ToolChain]
self._BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option
return self._BuildOptions
## Retrieve dependency expression
@cached_property
def Depex(self):
RetVal = tdict(False, 2)
# If the module has only Binaries and no Sources, then ignore [Depex]
if not self.Sources and self.Binaries:
return RetVal
RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
# PEIM and DXE drivers must have a valid [Depex] section
if len(self.LibraryClass) == 0 and len(RecordList) == 0:
if self.ModuleType == SUP_MODULE_DXE_DRIVER or self.ModuleType == SUP_MODULE_PEIM or self.ModuleType == SUP_MODULE_DXE_SMM_DRIVER or \
self.ModuleType == SUP_MODULE_DXE_SAL_DRIVER or self.ModuleType == SUP_MODULE_DXE_RUNTIME_DRIVER:
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No [Depex] section or no valid expression in [Depex] section for [%s] module" \
% self.ModuleType, File=self.MetaFile)
if len(RecordList) != 0 and (self.ModuleType == SUP_MODULE_USER_DEFINED or self.ModuleType == SUP_MODULE_HOST_APPLICATION):
for Record in RecordList:
if Record[4] not in [SUP_MODULE_PEIM, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER]:
EdkLogger.error('build', FORMAT_INVALID,
"'%s' module must specify the type of [Depex] section" % self.ModuleType,
File=self.MetaFile)
TemporaryDictionary = OrderedDict()
for Record in RecordList:
DepexStr = ReplaceMacro(Record[0], self._Macros, False)
Arch = Record[3]
ModuleType = Record[4]
TokenList = DepexStr.split()
if (Arch, ModuleType) not in TemporaryDictionary:
TemporaryDictionary[Arch, ModuleType] = []
DepexList = TemporaryDictionary[Arch, ModuleType]
for Token in TokenList:
if Token in DEPEX_SUPPORTED_OPCODE_SET:
DepexList.append(Token)
elif Token.endswith(".inf"): # module file name
ModuleFile = os.path.normpath(Token)
Module = self.BuildDatabase[ModuleFile]
if Module is None:
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "Module is not found in active platform",
ExtraData=Token, File=self.MetaFile, Line=Record[-1])
DepexList.append(Module.Guid)
else:
# it use the Fixed PCD format
if '.' in Token:
if tuple(Token.split('.')[::-1]) not in self.Pcds:
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "PCD [{}] used in [Depex] section should be listed in module PCD section".format(Token), File=self.MetaFile, Line=Record[-1])
else:
if self.Pcds[tuple(Token.split('.')[::-1])].DatumType != TAB_VOID:
EdkLogger.error('build', FORMAT_INVALID, "PCD [{}] used in [Depex] section should be VOID* datum type".format(Token), File=self.MetaFile, Line=Record[-1])
Value = Token
else:
# get the GUID value now
Value = _ProtocolValue(Token, self.Packages, self.MetaFile.Path)
if Value is None:
Value = _PpiValue(Token, self.Packages, self.MetaFile.Path)
if Value is None:
Value = GuidValue(Token, self.Packages, self.MetaFile.Path)
if Value is None:
PackageList = "\n\t".join(str(P) for P in self.Packages)
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
"Value of [%s] is not found in" % Token,
ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
DepexList.append(Value)
for Arch, ModuleType in TemporaryDictionary:
RetVal[Arch, ModuleType] = TemporaryDictionary[Arch, ModuleType]
return RetVal
## Retrieve dependency expression
@cached_property
def DepexExpression(self):
RetVal = tdict(False, 2)
RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
TemporaryDictionary = OrderedDict()
for Record in RecordList:
DepexStr = ReplaceMacro(Record[0], self._Macros, False)
Arch = Record[3]
ModuleType = Record[4]
TokenList = DepexStr.split()
if (Arch, ModuleType) not in TemporaryDictionary:
TemporaryDictionary[Arch, ModuleType] = ''
for Token in TokenList:
TemporaryDictionary[Arch, ModuleType] = TemporaryDictionary[Arch, ModuleType] + Token.strip() + ' '
for Arch, ModuleType in TemporaryDictionary:
RetVal[Arch, ModuleType] = TemporaryDictionary[Arch, ModuleType]
return RetVal
def LocalPkg(self):
module_path = self.MetaFile.File
subdir = os.path.split(module_path)[0]
TopDir = ""
while subdir:
subdir,TopDir = os.path.split(subdir)
for file_name in os.listdir(os.path.join(self.MetaFile.Root,TopDir)):
if file_name.upper().endswith("DEC"):
pkg = os.path.join(TopDir,file_name)
return pkg
@cached_class_function
def GetGuidsUsedByPcd(self):
self.Guid
return self._GuidsUsedByPcd
## Retrieve PCD for given type
def _GetPcd(self, Type):
Pcds = OrderedDict()
PcdDict = tdict(True, 4)
PcdList = []
RecordList = self._RawData[Type, self._Arch, self._Platform]
for TokenSpaceGuid, PcdCName, Setting, Arch, Platform, Id, LineNo in RecordList:
PcdDict[Arch, Platform, PcdCName, TokenSpaceGuid] = (Setting, LineNo)
PcdList.append((PcdCName, TokenSpaceGuid))
CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Id]
Comments = []
for CmtRec in CommentRecords:
Comments.append(CmtRec[0])
self._PcdComments[TokenSpaceGuid, PcdCName] = Comments
# resolve PCD type, value, datum info, etc. by getting its definition from package
_GuidDict = self.Guids.copy()
for PcdCName, TokenSpaceGuid in PcdList:
PcdRealName = PcdCName
Setting, LineNo = PcdDict[self._Arch, self.Platform, PcdCName, TokenSpaceGuid]
if Setting is None:
continue
ValueList = AnalyzePcdData(Setting)
DefaultValue = ValueList[0]
Pcd = PcdClassObject(
PcdCName,
TokenSpaceGuid,
'',
'',
DefaultValue,
'',
'',
{},
False,
self.Guids[TokenSpaceGuid]
)
if Type == MODEL_PCD_PATCHABLE_IN_MODULE and ValueList[1]:
# Patch PCD: TokenSpace.PcdCName|Value|Offset
Pcd.Offset = ValueList[1]
if (PcdRealName, TokenSpaceGuid) in GlobalData.MixedPcd:
for Package in self.Packages:
for key in Package.Pcds:
if (Package.Pcds[key].TokenCName, Package.Pcds[key].TokenSpaceGuidCName) == (PcdRealName, TokenSpaceGuid):
for item in GlobalData.MixedPcd[(PcdRealName, TokenSpaceGuid)]:
Pcd_Type = item[0].split('_')[-1]
if Pcd_Type == Package.Pcds[key].Type:
Value = Package.Pcds[key]
Value.TokenCName = Package.Pcds[key].TokenCName + '_' + Pcd_Type
if len(key) == 2:
newkey = (Value.TokenCName, key[1])
elif len(key) == 3:
newkey = (Value.TokenCName, key[1], key[2])
del Package.Pcds[key]
Package.Pcds[newkey] = Value
break
else:
pass
else:
pass
# get necessary info from package declaring this PCD
for Package in self.Packages:
#
# 'dynamic' in INF means its type is determined by platform;
# if platform doesn't give its type, use 'lowest' one in the
# following order, if any
#
# TAB_PCDS_FIXED_AT_BUILD, TAB_PCDS_PATCHABLE_IN_MODULE, TAB_PCDS_FEATURE_FLAG, TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_EX
#
_GuidDict.update(Package.Guids)
PcdType = self._PCD_TYPE_STRING_[Type]
if Type == MODEL_PCD_DYNAMIC:
Pcd.Pending = True
for T in PCD_TYPE_LIST:
if (PcdRealName, TokenSpaceGuid) in GlobalData.MixedPcd:
for item in GlobalData.MixedPcd[(PcdRealName, TokenSpaceGuid)]:
if str(item[0]).endswith(T) and (item[0], item[1], T) in Package.Pcds:
PcdType = T
PcdCName = item[0]
break
else:
pass
break
else:
if (PcdRealName, TokenSpaceGuid, T) in Package.Pcds:
PcdType = T
break
else:
Pcd.Pending = False
if (PcdRealName, TokenSpaceGuid) in GlobalData.MixedPcd:
for item in GlobalData.MixedPcd[(PcdRealName, TokenSpaceGuid)]:
Pcd_Type = item[0].split('_')[-1]
if Pcd_Type == PcdType:
PcdCName = item[0]
break
else:
pass
else:
pass
if (PcdCName, TokenSpaceGuid, PcdType) in Package.Pcds:
PcdInPackage = Package.Pcds[PcdCName, TokenSpaceGuid, PcdType]
Pcd.Type = PcdType
Pcd.TokenValue = PcdInPackage.TokenValue
#
# Check whether the token value exist or not.
#
if Pcd.TokenValue is None or Pcd.TokenValue == "":
EdkLogger.error(
'build',
FORMAT_INVALID,
"No TokenValue for PCD [%s.%s] in [%s]!" % (TokenSpaceGuid, PcdRealName, str(Package)),
File=self.MetaFile, Line=LineNo,
ExtraData=None
)
#
# Check hexadecimal token value length and format.
#
ReIsValidPcdTokenValue = re.compile(r"^[0][x|X][0]*[0-9a-fA-F]{1,8}$", re.DOTALL)
if Pcd.TokenValue.startswith("0x") or Pcd.TokenValue.startswith("0X"):
if ReIsValidPcdTokenValue.match(Pcd.TokenValue) is None:
EdkLogger.error(
'build',
FORMAT_INVALID,
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid:" % (Pcd.TokenValue, TokenSpaceGuid, PcdRealName, str(Package)),
File=self.MetaFile, Line=LineNo,
ExtraData=None
)
#
# Check decimal token value length and format.
#
else:
try:
TokenValueInt = int (Pcd.TokenValue, 10)
if (TokenValueInt < 0 or TokenValueInt > 4294967295):
EdkLogger.error(
'build',
FORMAT_INVALID,
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, as a decimal it should between: 0 - 4294967295!" % (Pcd.TokenValue, TokenSpaceGuid, PcdRealName, str(Package)),
File=self.MetaFile, Line=LineNo,
ExtraData=None
)
except:
EdkLogger.error(
'build',
FORMAT_INVALID,
"The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, it should be hexadecimal or decimal!" % (Pcd.TokenValue, TokenSpaceGuid, PcdRealName, str(Package)),
File=self.MetaFile, Line=LineNo,
ExtraData=None
)
Pcd.DatumType = PcdInPackage.DatumType
Pcd.MaxDatumSize = PcdInPackage.MaxDatumSize
Pcd.InfDefaultValue = Pcd.DefaultValue
if not Pcd.DefaultValue:
Pcd.DefaultValue = PcdInPackage.DefaultValue
else:
try:
Pcd.DefaultValue = ValueExpressionEx(Pcd.DefaultValue, Pcd.DatumType, _GuidDict)(True)
except BadExpression as Value:
EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(TokenSpaceGuid, PcdRealName, Pcd.DefaultValue, Value),
File=self.MetaFile, Line=LineNo)
break
else:
EdkLogger.error(
'build',
FORMAT_INVALID,
"PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdRealName, self.MetaFile),
File=self.MetaFile, Line=LineNo,
ExtraData="\t%s" % '\n\t'.join(str(P) for P in self.Packages)
)
Pcds[PcdCName, TokenSpaceGuid] = Pcd
return Pcds
## check whether current module is binary module
@property
def IsBinaryModule(self):
if (self.Binaries and not self.Sources) or GlobalData.gIgnoreSource:
return True
return False
def CheckFeatureFlagPcd(self,Instance):
Pcds = GlobalData.gPlatformFinalPcds.copy()
if PcdPattern.search(Instance):
PcdTuple = tuple(Instance.split('.')[::-1])
if PcdTuple in self.Pcds:
if not (self.Pcds[PcdTuple].Type == 'FeatureFlag' or self.Pcds[PcdTuple].Type == 'FixedAtBuild'):
EdkLogger.error('build', FORMAT_INVALID,
"\nFeatureFlagPcd must be defined in a [PcdsFeatureFlag] or [PcdsFixedAtBuild] section of Dsc or Dec file",
File=str(self), ExtraData=Instance)
if not Instance in Pcds:
Pcds[Instance] = self.Pcds[PcdTuple].DefaultValue
else: #if PcdTuple not in self.Pcds:
EdkLogger.error('build', FORMAT_INVALID,
"\nFeatureFlagPcd must be defined in [FeaturePcd] or [FixedPcd] of Inf file",
File=str(self), ExtraData=Instance)
if Instance in Pcds:
if Pcds[Instance] == '0':
return False
elif Pcds[Instance] == '1':
return True
try:
Value = ValueExpression(Instance, Pcds)()
if Value == True:
return True
return False
except:
EdkLogger.warn('build', FORMAT_INVALID,"The FeatureFlagExpression cannot be evaluated", File=str(self), ExtraData=Instance)
return False
else:
for Name, Guid in self.Pcds:
if self.Pcds[(Name, Guid)].Type == 'FeatureFlag' or self.Pcds[(Name, Guid)].Type == 'FixedAtBuild':
PcdFullName = '%s.%s' % (Guid, Name);
if not PcdFullName in Pcds:
Pcds[PcdFullName] = self.Pcds[(Name, Guid)].DefaultValue
try:
Value = ValueExpression(Instance, Pcds)()
if Value == True:
return True
return False
except:
EdkLogger.warn('build', FORMAT_INVALID, "The FeatureFlagExpression cannot be evaluated", File=str(self), ExtraData=Instance)
return False
def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
for Key in CopyFromDict:
CopyToDict[Key].extend(CopyFromDict[Key])
| edk2-master | BaseTools/Source/Python/Workspace/InfBuildData.py |
# @file
# Split a file into two pieces at the request offset.
#
# Copyright (c) 2021, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
import unittest
import tempfile
import os
import shutil
import Split.Split as sp
import struct as st
class TestSplit(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.binary_file = os.path.join(self.tmpdir, "Binary.bin")
self.create_inputfile()
def tearDown(self):
if os.path.exists(self.tmpdir):
shutil.rmtree(self.tmpdir)
def test_splitFile_position(self):
position = [-1, 0, 256, 512, 700, 1024, 2048]
result = [(0, 1024), (0, 1024), (256, 768),
(512, 512), (700, 324), (1024, 0), (1024, 0)]
outputfolder = self.tmpdir
for index, po in enumerate(position):
try:
sp.splitFile(self.binary_file, po)
except Exception as e:
self.assertTrue(False, msg="splitFile function error")
output1 = os.path.join(outputfolder, "Binary.bin1")
output2 = os.path.join(outputfolder, "Binary.bin2")
with open(output1, "rb") as f1:
size1 = len(f1.read())
with open(output2, "rb") as f2:
size2 = len(f2.read())
ex_result = result[index]
self.assertEqual(size1, ex_result[0])
self.assertEqual(size2, ex_result[1])
def create_inputfile(self):
with open(self.binary_file, "wb") as fout:
for i in range(512):
fout.write(st.pack("<H", i))
def test_splitFile_outputfile(self):
output = [
None,
"Binary.bin",
"Binary1.bin",
r"output/Binary1.bin",
os.path.abspath( r"output/Binary1.bin")
]
expected_output = [
os.path.join(os.path.dirname(self.binary_file),"Binary.bin1" ),
os.path.join(os.getcwd(),"Binary.bin"),
os.path.join(os.getcwd(),"Binary1.bin"),
os.path.join(os.getcwd(),r"output/Binary1.bin"),
os.path.join(os.path.abspath( r"output/Binary1.bin"))
]
for index, o in enumerate(output):
try:
sp.splitFile(self.binary_file, 123, outputfile1=o)
except Exception as e:
self.assertTrue(False, msg="splitFile function error")
self.assertTrue(os.path.exists(expected_output[index]))
self.create_inputfile()
def test_splitFile_outputfolder(self):
outputfolder = [
None,
"output",
r"output1/output2",
os.path.abspath("output"),
"output"
]
output = [
None,
None,
"Binary1.bin",
r"output/Binary1.bin",
os.path.abspath( r"output_1/Binary1.bin")
]
expected_output = [
os.path.join(os.path.dirname(self.binary_file),"Binary.bin1" ),
os.path.join(os.getcwd(),"output", "Binary.bin1"),
os.path.join(os.getcwd(), r"output1/output2" , "Binary1.bin"),
os.path.join(os.getcwd(),r"output", "output/Binary1.bin"),
os.path.join(os.path.abspath( r"output/Binary1.bin"))
]
for index, o in enumerate(outputfolder):
try:
sp.splitFile(self.binary_file, 123, outputdir=o,outputfile1=output[index])
except Exception as e:
self.assertTrue(False, msg="splitFile function error")
self.assertTrue(os.path.exists(expected_output[index]))
self.create_inputfile()
if __name__ == '__main__':
unittest.main()
| edk2-master | BaseTools/Source/Python/tests/Split/test_split.py |
## @file
# Python 'PatchPcdValue' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/PatchPcdValue/__init__.py |
## @file
# Patch value into the binary file.
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import Common.LongFilePathOs as os
from Common.LongFilePathSupport import OpenLongFilePath as open
import sys
from optparse import OptionParser
from optparse import make_option
from Common.BuildToolError import *
import Common.EdkLogger as EdkLogger
from Common.BuildVersion import gBUILD_VERSION
import array
from Common.DataType import *
# Version and Copyright
__version_number__ = ("0.10" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + __version_number__
__copyright__ = "Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved."
## PatchBinaryFile method
#
# This method mainly patches the data into binary file.
#
# @param FileName File path of the binary file
# @param ValueOffset Offset value
# @param TypeName DataType Name
# @param Value Value String
# @param MaxSize MaxSize value
#
# @retval 0 File is updated successfully.
# @retval not 0 File is updated failed.
#
def PatchBinaryFile(FileName, ValueOffset, TypeName, ValueString, MaxSize=0):
#
# Length of Binary File
#
FileHandle = open(FileName, 'rb')
FileHandle.seek (0, 2)
FileLength = FileHandle.tell()
FileHandle.close()
#
# Unify string to upper string
#
TypeName = TypeName.upper()
#
# Get PCD value data length
#
ValueLength = 0
if TypeName == 'BOOLEAN':
ValueLength = 1
elif TypeName == TAB_UINT8:
ValueLength = 1
elif TypeName == TAB_UINT16:
ValueLength = 2
elif TypeName == TAB_UINT32:
ValueLength = 4
elif TypeName == TAB_UINT64:
ValueLength = 8
elif TypeName == TAB_VOID:
if MaxSize == 0:
return OPTION_MISSING, "PcdMaxSize is not specified for VOID* type PCD."
ValueLength = int(MaxSize)
else:
return PARAMETER_INVALID, "PCD type %s is not valid." % (CommandOptions.PcdTypeName)
#
# Check PcdValue is in the input binary file.
#
if ValueOffset + ValueLength > FileLength:
return PARAMETER_INVALID, "PcdOffset + PcdMaxSize(DataType) is larger than the input file size."
#
# Read binary file into array
#
FileHandle = open(FileName, 'rb')
ByteArray = array.array('B')
ByteArray.fromfile(FileHandle, FileLength)
FileHandle.close()
OrigByteList = ByteArray.tolist()
ByteList = ByteArray.tolist()
#
# Clear the data in file
#
for Index in range(ValueLength):
ByteList[ValueOffset + Index] = 0
#
# Patch value into offset
#
SavedStr = ValueString
ValueString = ValueString.upper()
ValueNumber = 0
if TypeName == 'BOOLEAN':
#
# Get PCD value for BOOLEAN data type
#
try:
if ValueString == 'TRUE':
ValueNumber = 1
elif ValueString == 'FALSE':
ValueNumber = 0
ValueNumber = int (ValueString, 0)
if ValueNumber != 0:
ValueNumber = 1
except:
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (ValueString)
#
# Set PCD value into binary data
#
ByteList[ValueOffset] = ValueNumber
elif TypeName in TAB_PCD_CLEAN_NUMERIC_TYPES:
#
# Get PCD value for UINT* data type
#
try:
ValueNumber = int (ValueString, 0)
except:
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string." % (ValueString)
#
# Set PCD value into binary data
#
for Index in range(ValueLength):
ByteList[ValueOffset + Index] = ValueNumber % 0x100
ValueNumber = ValueNumber // 0x100
elif TypeName == TAB_VOID:
ValueString = SavedStr
if ValueString.startswith('L"'):
#
# Patch Unicode String
#
Index = 0
for ByteString in ValueString[2:-1]:
#
# Reserve zero as unicode tail
#
if Index + 2 >= ValueLength:
break
#
# Set string value one by one/ 0x100
#
ByteList[ValueOffset + Index] = ord(ByteString)
Index = Index + 2
elif ValueString.startswith("{") and ValueString.endswith("}"):
#
# Patch {0x1, 0x2, ...} byte by byte
#
ValueList = ValueString[1 : len(ValueString) - 1].split(',')
Index = 0
try:
for ByteString in ValueList:
ByteString = ByteString.strip()
if ByteString.upper().startswith('0X'):
ByteValue = int(ByteString, 16)
else:
ByteValue = int(ByteString)
ByteList[ValueOffset + Index] = ByteValue % 0x100
Index = Index + 1
if Index >= ValueLength:
break
except:
return PARAMETER_INVALID, "PCD Value %s is not valid dec or hex string array." % (ValueString)
else:
#
# Patch ascii string
#
Index = 0
for ByteString in ValueString[1:-1]:
#
# Reserve zero as string tail
#
if Index + 1 >= ValueLength:
break
#
# Set string value one by one
#
ByteList[ValueOffset + Index] = ord(ByteString)
Index = Index + 1
#
# Update new data into input file.
#
if ByteList != OrigByteList:
ByteArray = array.array('B')
ByteArray.fromlist(ByteList)
FileHandle = open(FileName, 'wb')
ByteArray.tofile(FileHandle)
FileHandle.close()
return 0, "Patch Value into File %s successfully." % (FileName)
## Parse command line options
#
# Using standard Python module optparse to parse command line option of this tool.
#
# @retval Options A optparse.Values object containing the parsed options
# @retval InputFile Path of file to be trimmed
#
def Options():
OptionList = [
make_option("-f", "--offset", dest="PcdOffset", action="store", type="int",
help="Start offset to the image is used to store PCD value."),
make_option("-u", "--value", dest="PcdValue", action="store",
help="PCD value will be updated into the image."),
make_option("-t", "--type", dest="PcdTypeName", action="store",
help="The name of PCD data type may be one of VOID*,BOOLEAN, UINT8, UINT16, UINT32, UINT64."),
make_option("-s", "--maxsize", dest="PcdMaxSize", action="store", type="int",
help="Max size of data buffer is taken by PCD value.It must be set when PCD type is VOID*."),
make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE,
help="Run verbosely"),
make_option("-d", "--debug", dest="LogLevel", type="int",
help="Run with debug information"),
make_option("-q", "--quiet", dest="LogLevel", action="store_const", const=EdkLogger.QUIET,
help="Run quietly"),
make_option("-?", action="help", help="show this help message and exit"),
]
# use clearer usage to override default usage message
UsageString = "%prog -f Offset -u Value -t Type [-s MaxSize] <input_file>"
Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString)
Parser.set_defaults(LogLevel=EdkLogger.INFO)
Options, Args = Parser.parse_args()
# error check
if len(Args) == 0:
EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData=Parser.get_usage())
InputFile = Args[len(Args) - 1]
return Options, InputFile
## Entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
# @retval 0 Tool was successful
# @retval 1 Tool failed
#
def Main():
try:
#
# Check input parameter
#
EdkLogger.Initialize()
CommandOptions, InputFile = Options()
if CommandOptions.LogLevel < EdkLogger.DEBUG_9:
EdkLogger.SetLevel(CommandOptions.LogLevel + 1)
else:
EdkLogger.SetLevel(CommandOptions.LogLevel)
if not os.path.exists (InputFile):
EdkLogger.error("PatchPcdValue", FILE_NOT_FOUND, ExtraData=InputFile)
return 1
if CommandOptions.PcdOffset is None or CommandOptions.PcdValue is None or CommandOptions.PcdTypeName is None:
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdOffset or PcdValue of PcdTypeName is not specified.")
return 1
if CommandOptions.PcdTypeName.upper() not in TAB_PCD_NUMERIC_TYPES_VOID:
EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData="PCD type %s is not valid." % (CommandOptions.PcdTypeName))
return 1
if CommandOptions.PcdTypeName.upper() == TAB_VOID and CommandOptions.PcdMaxSize is None:
EdkLogger.error("PatchPcdValue", OPTION_MISSING, ExtraData="PcdMaxSize is not specified for VOID* type PCD.")
return 1
#
# Patch value into binary image.
#
ReturnValue, ErrorInfo = PatchBinaryFile (InputFile, CommandOptions.PcdOffset, CommandOptions.PcdTypeName, CommandOptions.PcdValue, CommandOptions.PcdMaxSize)
if ReturnValue != 0:
EdkLogger.error("PatchPcdValue", ReturnValue, ExtraData=ErrorInfo)
return 1
return 0
except:
return 1
if __name__ == '__main__':
r = Main()
sys.exit(r)
| edk2-master | BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py |
## @file
# Trim files preprocessed by compiler
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import Common.LongFilePathOs as os
import sys
import re
from io import BytesIO
import codecs
from optparse import OptionParser
from optparse import make_option
from Common.BuildToolError import *
from Common.Misc import *
from Common.DataType import *
from Common.BuildVersion import gBUILD_VERSION
import Common.EdkLogger as EdkLogger
from Common.LongFilePathSupport import OpenLongFilePath as open
# Version and Copyright
__version_number__ = ("0.10" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + __version_number__
__copyright__ = "Copyright (c) 2007-2018, Intel Corporation. All rights reserved."
## Regular expression for matching Line Control directive like "#line xxx"
gLineControlDirective = re.compile('^\s*#(?:line)?\s+([0-9]+)\s+"*([^"]*)"')
## Regular expression for matching "typedef struct"
gTypedefPattern = re.compile("^\s*typedef\s+struct(\s+\w+)?\s*[{]*$", re.MULTILINE)
## Regular expression for matching "#pragma pack"
gPragmaPattern = re.compile("^\s*#pragma\s+pack", re.MULTILINE)
## Regular expression for matching "typedef"
gTypedef_SinglePattern = re.compile("^\s*typedef", re.MULTILINE)
## Regular expression for matching "typedef struct, typedef union, struct, union"
gTypedef_MulPattern = re.compile("^\s*(typedef)?\s+(struct|union)(\s+\w+)?\s*[{]*$", re.MULTILINE)
#
# The following number pattern match will only match if following criteria is met:
# There is leading non-(alphanumeric or _) character, and no following alphanumeric or _
# as the pattern is greedily match, so it is ok for the gDecNumberPattern or gHexNumberPattern to grab the maximum match
#
## Regular expression for matching HEX number
gHexNumberPattern = re.compile("(?<=[^a-zA-Z0-9_])(0[xX])([0-9a-fA-F]+)(U(?=$|[^a-zA-Z0-9_]))?")
## Regular expression for matching decimal number with 'U' postfix
gDecNumberPattern = re.compile("(?<=[^a-zA-Z0-9_])([0-9]+)U(?=$|[^a-zA-Z0-9_])")
## Regular expression for matching constant with 'ULL' 'LL' postfix
gLongNumberPattern = re.compile("(?<=[^a-zA-Z0-9_])(0[xX][0-9a-fA-F]+|[0-9]+)U?LL(?=$|[^a-zA-Z0-9_])")
## Regular expression for matching "Include ()" in asl file
gAslIncludePattern = re.compile("^(\s*)[iI]nclude\s*\(\"?([^\"\(\)]+)\"\)", re.MULTILINE)
## Regular expression for matching C style #include "XXX.asl" in asl file
gAslCIncludePattern = re.compile(r'^(\s*)#include\s*[<"]\s*([-\\/\w.]+)\s*([>"])', re.MULTILINE)
## Patterns used to convert EDK conventions to EDK2 ECP conventions
## Regular expression for finding header file inclusions
gIncludePattern = re.compile(r"^[ \t]*[%]?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE | re.UNICODE | re.IGNORECASE)
## file cache to avoid circular include in ASL file
gIncludedAslFile = []
## Trim preprocessed source code
#
# Remove extra content made by preprocessor. The preprocessor must enable the
# line number generation option when preprocessing.
#
# @param Source File to be trimmed
# @param Target File to store the trimmed content
# @param Convert If True, convert standard HEX format to MASM format
#
def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong):
CreateDirectory(os.path.dirname(Target))
try:
with open(Source, "r") as File:
Lines = File.readlines()
except IOError:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
except:
EdkLogger.error("Trim", AUTOGEN_ERROR, "TrimPreprocessedFile: Error while processing file", File=Source)
PreprocessedFile = ""
InjectedFile = ""
LineIndexOfOriginalFile = None
NewLines = []
LineControlDirectiveFound = False
for Index in range(len(Lines)):
Line = Lines[Index]
#
# Find out the name of files injected by preprocessor from the lines
# with Line Control directive
#
MatchList = gLineControlDirective.findall(Line)
if MatchList != []:
MatchList = MatchList[0]
if len(MatchList) == 2:
LineNumber = int(MatchList[0], 0)
InjectedFile = MatchList[1]
InjectedFile = os.path.normpath(InjectedFile)
InjectedFile = os.path.normcase(InjectedFile)
# The first injected file must be the preprocessed file itself
if PreprocessedFile == "":
PreprocessedFile = InjectedFile
LineControlDirectiveFound = True
continue
elif PreprocessedFile == "" or InjectedFile != PreprocessedFile:
continue
if LineIndexOfOriginalFile is None:
#
# Any non-empty lines must be from original preprocessed file.
# And this must be the first one.
#
LineIndexOfOriginalFile = Index
EdkLogger.verbose("Found original file content starting from line %d"
% (LineIndexOfOriginalFile + 1))
if TrimLong:
Line = gLongNumberPattern.sub(r"\1", Line)
# convert HEX number format if indicated
if ConvertHex:
Line = gHexNumberPattern.sub(r"0\2h", Line)
else:
Line = gHexNumberPattern.sub(r"\1\2", Line)
# convert Decimal number format
Line = gDecNumberPattern.sub(r"\1", Line)
if LineNumber is not None:
EdkLogger.verbose("Got line directive: line=%d" % LineNumber)
# in case preprocessor removed some lines, like blank or comment lines
if LineNumber <= len(NewLines):
# possible?
NewLines[LineNumber - 1] = Line
else:
if LineNumber > (len(NewLines) + 1):
for LineIndex in range(len(NewLines), LineNumber-1):
NewLines.append(TAB_LINE_BREAK)
NewLines.append(Line)
LineNumber = None
EdkLogger.verbose("Now we have lines: %d" % len(NewLines))
else:
NewLines.append(Line)
# in case there's no line directive or linemarker found
if (not LineControlDirectiveFound) and NewLines == []:
MulPatternFlag = False
SinglePatternFlag = False
Brace = 0
for Index in range(len(Lines)):
Line = Lines[Index]
if MulPatternFlag == False and gTypedef_MulPattern.search(Line) is None:
if SinglePatternFlag == False and gTypedef_SinglePattern.search(Line) is None:
# remove "#pragram pack" directive
if gPragmaPattern.search(Line) is None:
NewLines.append(Line)
continue
elif SinglePatternFlag == False:
SinglePatternFlag = True
if Line.find(";") >= 0:
SinglePatternFlag = False
elif MulPatternFlag == False:
# found "typedef struct, typedef union, union, struct", keep its position and set a flag
MulPatternFlag = True
# match { and } to find the end of typedef definition
if Line.find("{") >= 0:
Brace += 1
elif Line.find("}") >= 0:
Brace -= 1
# "typedef struct, typedef union, union, struct" must end with a ";"
if Brace == 0 and Line.find(";") >= 0:
MulPatternFlag = False
# save to file
try:
with open(Target, 'w') as File:
File.writelines(NewLines)
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
## Trim preprocessed VFR file
#
# Remove extra content made by preprocessor. The preprocessor doesn't need to
# enable line number generation option when preprocessing.
#
# @param Source File to be trimmed
# @param Target File to store the trimmed content
#
def TrimPreprocessedVfr(Source, Target):
CreateDirectory(os.path.dirname(Target))
try:
with open(Source, "r") as File:
Lines = File.readlines()
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
# read whole file
FoundTypedef = False
Brace = 0
TypedefStart = 0
TypedefEnd = 0
for Index in range(len(Lines)):
Line = Lines[Index]
# don't trim the lines from "formset" definition to the end of file
if Line.strip() == 'formset':
break
if FoundTypedef == False and (Line.find('#line') == 0 or Line.find('# ') == 0):
# empty the line number directive if it's not aomong "typedef struct"
Lines[Index] = "\n"
continue
if FoundTypedef == False and gTypedefPattern.search(Line) is None:
# keep "#pragram pack" directive
if gPragmaPattern.search(Line) is None:
Lines[Index] = "\n"
continue
elif FoundTypedef == False:
# found "typedef struct", keept its position and set a flag
FoundTypedef = True
TypedefStart = Index
# match { and } to find the end of typedef definition
if Line.find("{") >= 0:
Brace += 1
elif Line.find("}") >= 0:
Brace -= 1
# "typedef struct" must end with a ";"
if Brace == 0 and Line.find(";") >= 0:
FoundTypedef = False
TypedefEnd = Index
# keep all "typedef struct" except to GUID, EFI_PLABEL and PAL_CALL_RETURN
if Line.strip("} ;\r\n") in [TAB_GUID, "EFI_PLABEL", "PAL_CALL_RETURN"]:
for i in range(TypedefStart, TypedefEnd+1):
Lines[i] = "\n"
# save all lines trimmed
try:
with open(Target, 'w') as File:
File.writelines(Lines)
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
## Read the content ASL file, including ASL included, recursively
#
# @param Source File to be read
# @param Indent Spaces before the Include() statement
# @param IncludePathList The list of external include file
# @param LocalSearchPath If LocalSearchPath is specified, this path will be searched
# first for the included file; otherwise, only the path specified
# in the IncludePathList will be searched.
#
def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None, IncludeFileList = None, filetype=None):
NewFileContent = []
if IncludeFileList is None:
IncludeFileList = []
try:
#
# Search LocalSearchPath first if it is specified.
#
if LocalSearchPath:
SearchPathList = [LocalSearchPath] + IncludePathList
else:
SearchPathList = IncludePathList
for IncludePath in SearchPathList:
IncludeFile = os.path.join(IncludePath, Source)
if os.path.isfile(IncludeFile):
try:
with open(IncludeFile, "r") as File:
F = File.readlines()
except:
with codecs.open(IncludeFile, "r", encoding='utf-8') as File:
F = File.readlines()
break
else:
EdkLogger.warn("Trim", "Failed to find include file %s" % Source)
return []
except:
EdkLogger.warn("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
return []
# avoid A "include" B and B "include" A
IncludeFile = os.path.abspath(os.path.normpath(IncludeFile))
if IncludeFile in gIncludedAslFile:
EdkLogger.warn("Trim", "Circular include",
ExtraData= "%s -> %s" % (" -> ".join(gIncludedAslFile), IncludeFile))
return []
gIncludedAslFile.append(IncludeFile)
IncludeFileList.append(IncludeFile.strip())
for Line in F:
LocalSearchPath = None
if filetype == "ASL":
Result = gAslIncludePattern.findall(Line)
if len(Result) == 0:
Result = gAslCIncludePattern.findall(Line)
if len(Result) == 0 or os.path.splitext(Result[0][1])[1].lower() not in [".asl", ".asi"]:
NewFileContent.append("%s%s" % (Indent, Line))
continue
#
# We should first search the local directory if current file are using pattern #include "XXX"
#
if Result[0][2] == '"':
LocalSearchPath = os.path.dirname(IncludeFile)
CurrentIndent = Indent + Result[0][0]
IncludedFile = Result[0][1]
NewFileContent.extend(DoInclude(IncludedFile, CurrentIndent, IncludePathList, LocalSearchPath,IncludeFileList,filetype))
NewFileContent.append("\n")
elif filetype == "ASM":
Result = gIncludePattern.findall(Line)
if len(Result) == 0:
NewFileContent.append("%s%s" % (Indent, Line))
continue
IncludedFile = Result[0]
IncludedFile = IncludedFile.strip()
IncludedFile = os.path.normpath(IncludedFile)
NewFileContent.extend(DoInclude(IncludedFile, '', IncludePathList, LocalSearchPath,IncludeFileList,filetype))
NewFileContent.append("\n")
gIncludedAslFile.pop()
return NewFileContent
## Trim ASL file
#
# Replace ASL include statement with the content the included file
#
# @param Source File to be trimmed
# @param Target File to store the trimmed content
# @param IncludePathFile The file to log the external include path
#
def TrimAslFile(Source, Target, IncludePathFile,AslDeps = False):
CreateDirectory(os.path.dirname(Target))
SourceDir = os.path.dirname(Source)
if SourceDir == '':
SourceDir = '.'
#
# Add source directory as the first search directory
#
IncludePathList = [SourceDir]
#
# If additional include path file is specified, append them all
# to the search directory list.
#
if IncludePathFile:
try:
LineNum = 0
with open(IncludePathFile, 'r') as File:
FileLines = File.readlines()
for Line in FileLines:
LineNum += 1
if Line.startswith("/I") or Line.startswith ("-I"):
IncludePathList.append(Line[2:].strip())
else:
EdkLogger.warn("Trim", "Invalid include line in include list file.", IncludePathFile, LineNum)
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=IncludePathFile)
AslIncludes = []
Lines = DoInclude(Source, '', IncludePathList,IncludeFileList=AslIncludes,filetype='ASL')
AslIncludes = [item for item in AslIncludes if item !=Source]
SaveFileOnChange(os.path.join(os.path.dirname(Target),os.path.basename(Source))+".trim.deps", " \\\n".join([Source+":"] +AslIncludes),False)
#
# Undef MIN and MAX to avoid collision in ASL source code
#
Lines.insert(0, "#undef MIN\n#undef MAX\n")
# save all lines trimmed
try:
with open(Target, 'w') as File:
File.writelines(Lines)
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
## Trim ASM file
#
# Output ASM include statement with the content the included file
#
# @param Source File to be trimmed
# @param Target File to store the trimmed content
# @param IncludePathFile The file to log the external include path
#
def TrimAsmFile(Source, Target, IncludePathFile):
CreateDirectory(os.path.dirname(Target))
SourceDir = os.path.dirname(Source)
if SourceDir == '':
SourceDir = '.'
#
# Add source directory as the first search directory
#
IncludePathList = [SourceDir]
#
# If additional include path file is specified, append them all
# to the search directory list.
#
if IncludePathFile:
try:
LineNum = 0
with open(IncludePathFile, 'r') as File:
FileLines = File.readlines()
for Line in FileLines:
LineNum += 1
if Line.startswith("/I") or Line.startswith ("-I"):
IncludePathList.append(Line[2:].strip())
else:
EdkLogger.warn("Trim", "Invalid include line in include list file.", IncludePathFile, LineNum)
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=IncludePathFile)
AsmIncludes = []
Lines = DoInclude(Source, '', IncludePathList,IncludeFileList=AsmIncludes,filetype='ASM')
AsmIncludes = [item for item in AsmIncludes if item != Source]
if AsmIncludes:
SaveFileOnChange(os.path.join(os.path.dirname(Target),os.path.basename(Source))+".trim.deps", " \\\n".join([Source+":"] +AsmIncludes),False)
# save all lines trimmed
try:
with open(Target, 'w') as File:
File.writelines(Lines)
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
def GenerateVfrBinSec(ModuleName, DebugDir, OutputFile):
VfrNameList = []
if os.path.isdir(DebugDir):
for CurrentDir, Dirs, Files in os.walk(DebugDir):
for FileName in Files:
Name, Ext = os.path.splitext(FileName)
if Ext == '.c' and Name != 'AutoGen':
VfrNameList.append (Name + 'Bin')
VfrNameList.append (ModuleName + 'Strings')
EfiFileName = os.path.join(DebugDir, ModuleName + '.efi')
MapFileName = os.path.join(DebugDir, ModuleName + '.map')
VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrNameList)
if not VfrUniOffsetList:
return
try:
fInputfile = open(OutputFile, "wb+")
except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, "File open failed for %s" %OutputFile, None)
# Use a instance of BytesIO to cache data
fStringIO = BytesIO()
for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1):
#
# UNI offset in image.
# GUID + Offset
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
#
UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
fStringIO.write(UniGuid)
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
#
# VFR binary offset in image.
# GUID + Offset
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
#
VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
fStringIO.write(VfrGuid)
type (Item[1])
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
#
# write data into file.
#
try :
fInputfile.write (fStringIO.getvalue())
except:
EdkLogger.error("Trim", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %OutputFile, None)
fStringIO.close ()
fInputfile.close ()
## Parse command line options
#
# Using standard Python module optparse to parse command line option of this tool.
#
# @retval Options A optparse.Values object containing the parsed options
# @retval InputFile Path of file to be trimmed
#
def Options():
OptionList = [
make_option("-s", "--source-code", dest="FileType", const="SourceCode", action="store_const",
help="The input file is preprocessed source code, including C or assembly code"),
make_option("-r", "--vfr-file", dest="FileType", const="Vfr", action="store_const",
help="The input file is preprocessed VFR file"),
make_option("--Vfr-Uni-Offset", dest="FileType", const="VfrOffsetBin", action="store_const",
help="The input file is EFI image"),
make_option("--asl-deps", dest="AslDeps", const="True", action="store_const",
help="Generate Asl dependent files."),
make_option("-a", "--asl-file", dest="FileType", const="Asl", action="store_const",
help="The input file is ASL file"),
make_option( "--asm-file", dest="FileType", const="Asm", action="store_const",
help="The input file is asm file"),
make_option("-c", "--convert-hex", dest="ConvertHex", action="store_true",
help="Convert standard hex format (0xabcd) to MASM format (abcdh)"),
make_option("-l", "--trim-long", dest="TrimLong", action="store_true",
help="Remove postfix of long number"),
make_option("-i", "--include-path-file", dest="IncludePathFile",
help="The input file is include path list to search for ASL include file"),
make_option("-o", "--output", dest="OutputFile",
help="File to store the trimmed content"),
make_option("--ModuleName", dest="ModuleName", help="The module's BASE_NAME"),
make_option("--DebugDir", dest="DebugDir",
help="Debug Output directory to store the output files"),
make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE,
help="Run verbosely"),
make_option("-d", "--debug", dest="LogLevel", type="int",
help="Run with debug information"),
make_option("-q", "--quiet", dest="LogLevel", action="store_const", const=EdkLogger.QUIET,
help="Run quietly"),
make_option("-?", action="help", help="show this help message and exit"),
]
# use clearer usage to override default usage message
UsageString = "%prog [-s|-r|-a|--Vfr-Uni-Offset] [-c] [-v|-d <debug_level>|-q] [-i <include_path_file>] [-o <output_file>] [--ModuleName <ModuleName>] [--DebugDir <DebugDir>] [<input_file>]"
Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString)
Parser.set_defaults(FileType="Vfr")
Parser.set_defaults(ConvertHex=False)
Parser.set_defaults(LogLevel=EdkLogger.INFO)
Options, Args = Parser.parse_args()
# error check
if Options.FileType == 'VfrOffsetBin':
if len(Args) == 0:
return Options, ''
elif len(Args) > 1:
EdkLogger.error("Trim", OPTION_NOT_SUPPORTED, ExtraData=Parser.get_usage())
if len(Args) == 0:
EdkLogger.error("Trim", OPTION_MISSING, ExtraData=Parser.get_usage())
if len(Args) > 1:
EdkLogger.error("Trim", OPTION_NOT_SUPPORTED, ExtraData=Parser.get_usage())
InputFile = Args[0]
return Options, InputFile
## Entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
# @retval 0 Tool was successful
# @retval 1 Tool failed
#
def Main():
try:
EdkLogger.Initialize()
CommandOptions, InputFile = Options()
if CommandOptions.LogLevel < EdkLogger.DEBUG_9:
EdkLogger.SetLevel(CommandOptions.LogLevel + 1)
else:
EdkLogger.SetLevel(CommandOptions.LogLevel)
except FatalError as X:
return 1
try:
if CommandOptions.FileType == "Vfr":
if CommandOptions.OutputFile is None:
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
TrimPreprocessedVfr(InputFile, CommandOptions.OutputFile)
elif CommandOptions.FileType == "Asl":
if CommandOptions.OutputFile is None:
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
TrimAslFile(InputFile, CommandOptions.OutputFile, CommandOptions.IncludePathFile,CommandOptions.AslDeps)
elif CommandOptions.FileType == "VfrOffsetBin":
GenerateVfrBinSec(CommandOptions.ModuleName, CommandOptions.DebugDir, CommandOptions.OutputFile)
elif CommandOptions.FileType == "Asm":
TrimAsmFile(InputFile, CommandOptions.OutputFile, CommandOptions.IncludePathFile)
else :
if CommandOptions.OutputFile is None:
CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii'
TrimPreprocessedFile(InputFile, CommandOptions.OutputFile, CommandOptions.ConvertHex, CommandOptions.TrimLong)
except FatalError as X:
import platform
import traceback
if CommandOptions is not None and CommandOptions.LogLevel <= EdkLogger.DEBUG_9:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
return 1
except:
import traceback
import platform
EdkLogger.error(
"\nTrim",
CODE_ERROR,
"Unknown fatal error when trimming [%s]" % InputFile,
ExtraData="\n(Please send email to %s for help, attaching following call stack trace!)\n" % MSG_EDKII_MAIL_ADDR,
RaiseError=False
)
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
return 1
return 0
if __name__ == '__main__':
r = Main()
## 0-127 is a safe return range, and 1 is a standard default error
if r < 0 or r > 127: r = 1
sys.exit(r)
| edk2-master | BaseTools/Source/Python/Trim/Trim.py |
## @file
# This file implements the log mechanism for Python tools.
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
# Copyright 2001-2016 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# This copyright is for QueueHandler.
## Import modules
from __future__ import absolute_import
import Common.LongFilePathOs as os, sys, logging
import traceback
from .BuildToolError import *
try:
from logging.handlers import QueueHandler
except:
class QueueHandler(logging.Handler):
"""
This handler sends events to a queue. Typically, it would be used together
with a multiprocessing Queue to centralise logging to file in one process
(in a multi-process application), so as to avoid file write contention
between processes.
This code is new in Python 3.2, but this class can be copy pasted into
user code for use with earlier Python versions.
"""
def __init__(self, queue):
"""
Initialise an instance, using the passed queue.
"""
logging.Handler.__init__(self)
self.queue = queue
def enqueue(self, record):
"""
Enqueue a record.
The base implementation uses put_nowait. You may want to override
this method if you want to use blocking, timeouts or custom queue
implementations.
"""
self.queue.put_nowait(record)
def prepare(self, record):
"""
Prepares a record for queuing. The object returned by this method is
enqueued.
The base implementation formats the record to merge the message
and arguments, and removes unpickleable items from the record
in-place.
You might want to override this method if you want to convert
the record to a dict or JSON string, or send a modified copy
of the record while leaving the original intact.
"""
# The format operation gets traceback text into record.exc_text
# (if there's exception data), and also returns the formatted
# message. We can then use this to replace the original
# msg + args, as these might be unpickleable. We also zap the
# exc_info and exc_text attributes, as they are no longer
# needed and, if not None, will typically not be pickleable.
msg = self.format(record)
record.message = msg
record.msg = msg
record.args = None
record.exc_info = None
record.exc_text = None
return record
def emit(self, record):
"""
Emit a record.
Writes the LogRecord to the queue, preparing it for pickling first.
"""
try:
self.enqueue(self.prepare(record))
except Exception:
self.handleError(record)
class BlockQueueHandler(QueueHandler):
def enqueue(self, record):
self.queue.put(record,True)
## Log level constants
DEBUG_0 = 1
DEBUG_1 = 2
DEBUG_2 = 3
DEBUG_3 = 4
DEBUG_4 = 5
DEBUG_5 = 6
DEBUG_6 = 7
DEBUG_7 = 8
DEBUG_8 = 9
DEBUG_9 = 10
VERBOSE = 15
INFO = 20
WARN = 30
QUIET = 40
ERROR = 50
SILENT = 99
IsRaiseError = True
# Tool name
_ToolName = os.path.basename(sys.argv[0])
# For validation purpose
_LogLevels = [DEBUG_0, DEBUG_1, DEBUG_2, DEBUG_3, DEBUG_4, DEBUG_5,
DEBUG_6, DEBUG_7, DEBUG_8, DEBUG_9, VERBOSE, WARN, INFO,
ERROR, QUIET, SILENT]
# For DEBUG level (All DEBUG_0~9 are applicable)
_DebugLogger = logging.getLogger("tool_debug")
_DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
# For VERBOSE, INFO, WARN level
_InfoLogger = logging.getLogger("tool_info")
_InfoFormatter = logging.Formatter("%(message)s")
# For ERROR level
_ErrorLogger = logging.getLogger("tool_error")
_ErrorFormatter = logging.Formatter("%(message)s")
# String templates for ERROR/WARN/DEBUG log message
_ErrorMessageTemplate = '\n\n%(tool)s...\n%(file)s(%(line)s): error %(errorcode)04X: %(msg)s\n\t%(extra)s'
_ErrorMessageTemplateWithoutFile = '\n\n%(tool)s...\n : error %(errorcode)04X: %(msg)s\n\t%(extra)s'
_WarningMessageTemplate = '%(tool)s...\n%(file)s(%(line)s): warning: %(msg)s'
_WarningMessageTemplateWithoutFile = '%(tool)s: : warning: %(msg)s'
_DebugMessageTemplate = '%(file)s(%(line)s): debug: \n %(msg)s'
#
# Flag used to take WARN as ERROR.
# By default, only ERROR message will break the tools execution.
#
_WarningAsError = False
## Log debug message
#
# @param Level DEBUG level (DEBUG0~9)
# @param Message Debug information
# @param ExtraData More information associated with "Message"
#
def debug(Level, Message, ExtraData=None):
if _DebugLogger.level > Level:
return
if Level > DEBUG_9:
return
# Find out the caller method information
CallerStack = traceback.extract_stack()[-2]
TemplateDict = {
"file" : CallerStack[0],
"line" : CallerStack[1],
"msg" : Message,
}
if ExtraData is not None:
LogText = _DebugMessageTemplate % TemplateDict + "\n %s" % ExtraData
else:
LogText = _DebugMessageTemplate % TemplateDict
_DebugLogger.log(Level, LogText)
## Log verbose message
#
# @param Message Verbose information
#
def verbose(Message):
return _InfoLogger.log(VERBOSE, Message)
## Log warning message
#
# Warning messages are those which might be wrong but won't fail the tool.
#
# @param ToolName The name of the tool. If not given, the name of caller
# method will be used.
# @param Message Warning information
# @param File The name of file which caused the warning.
# @param Line The line number in the "File" which caused the warning.
# @param ExtraData More information associated with "Message"
#
def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
if _InfoLogger.level > WARN:
return
# if no tool name given, use caller's source file name as tool name
if ToolName is None or ToolName == "":
ToolName = os.path.basename(traceback.extract_stack()[-2][0])
if Line is None:
Line = "..."
else:
Line = "%d" % Line
TemplateDict = {
"tool" : ToolName,
"file" : File,
"line" : Line,
"msg" : Message,
}
if File is not None:
LogText = _WarningMessageTemplate % TemplateDict
else:
LogText = _WarningMessageTemplateWithoutFile % TemplateDict
if ExtraData is not None:
LogText += "\n %s" % ExtraData
_InfoLogger.log(WARN, LogText)
# Raise an exception if indicated
if _WarningAsError == True:
raise FatalError(WARNING_AS_ERROR)
## Log INFO message
info = _InfoLogger.info
## Log ERROR message
#
# Once an error messages is logged, the tool's execution will be broken by raising
# an exception. If you don't want to break the execution later, you can give
# "RaiseError" with "False" value.
#
# @param ToolName The name of the tool. If not given, the name of caller
# method will be used.
# @param ErrorCode The error code
# @param Message Warning information
# @param File The name of file which caused the error.
# @param Line The line number in the "File" which caused the warning.
# @param ExtraData More information associated with "Message"
# @param RaiseError Raise an exception to break the tool's execution if
# it's True. This is the default behavior.
#
def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=None, RaiseError=IsRaiseError):
if Line is None:
Line = "..."
else:
Line = "%d" % Line
if Message is None:
if ErrorCode in gErrorMessage:
Message = gErrorMessage[ErrorCode]
else:
Message = gErrorMessage[UNKNOWN_ERROR]
if ExtraData is None:
ExtraData = ""
TemplateDict = {
"tool" : _ToolName,
"file" : File,
"line" : Line,
"errorcode" : ErrorCode,
"msg" : Message,
"extra" : ExtraData
}
if File is not None:
LogText = _ErrorMessageTemplate % TemplateDict
else:
LogText = _ErrorMessageTemplateWithoutFile % TemplateDict
_ErrorLogger.log(ERROR, LogText)
if RaiseError and IsRaiseError:
raise FatalError(ErrorCode)
# Log information which should be always put out
quiet = _ErrorLogger.error
## Initialize log system
def LogClientInitialize(log_q):
#
# Since we use different format to log different levels of message into different
# place (stdout or stderr), we have to use different "Logger" objects to do this.
#
# For DEBUG level (All DEBUG_0~9 are applicable)
_DebugLogger.setLevel(INFO)
_DebugChannel = BlockQueueHandler(log_q)
_DebugChannel.setFormatter(_DebugFormatter)
_DebugLogger.addHandler(_DebugChannel)
# For VERBOSE, INFO, WARN level
_InfoLogger.setLevel(INFO)
_InfoChannel = BlockQueueHandler(log_q)
_InfoChannel.setFormatter(_InfoFormatter)
_InfoLogger.addHandler(_InfoChannel)
# For ERROR level
_ErrorLogger.setLevel(INFO)
_ErrorCh = BlockQueueHandler(log_q)
_ErrorCh.setFormatter(_ErrorFormatter)
_ErrorLogger.addHandler(_ErrorCh)
## Set log level
#
# @param Level One of log level in _LogLevel
def SetLevel(Level):
if Level not in _LogLevels:
info("Not supported log level (%d). Use default level instead." % Level)
Level = INFO
_DebugLogger.setLevel(Level)
_InfoLogger.setLevel(Level)
_ErrorLogger.setLevel(Level)
## Initialize log system
def Initialize():
#
# Since we use different format to log different levels of message into different
# place (stdout or stderr), we have to use different "Logger" objects to do this.
#
# For DEBUG level (All DEBUG_0~9 are applicable)
_DebugLogger.setLevel(INFO)
_DebugChannel = logging.StreamHandler(sys.stdout)
_DebugChannel.setFormatter(_DebugFormatter)
_DebugLogger.addHandler(_DebugChannel)
# For VERBOSE, INFO, WARN level
_InfoLogger.setLevel(INFO)
_InfoChannel = logging.StreamHandler(sys.stdout)
_InfoChannel.setFormatter(_InfoFormatter)
_InfoLogger.addHandler(_InfoChannel)
# For ERROR level
_ErrorLogger.setLevel(INFO)
_ErrorCh = logging.StreamHandler(sys.stderr)
_ErrorCh.setFormatter(_ErrorFormatter)
_ErrorLogger.addHandler(_ErrorCh)
def InitializeForUnitTest():
Initialize()
SetLevel(SILENT)
## Get current log level
def GetLevel():
return _InfoLogger.getEffectiveLevel()
## Raise up warning as error
def SetWarningAsError():
global _WarningAsError
_WarningAsError = True
## Specify a file to store the log message as well as put on console
#
# @param LogFile The file path used to store the log message
#
def SetLogFile(LogFile):
if os.path.exists(LogFile):
os.remove(LogFile)
_Ch = logging.FileHandler(LogFile)
_Ch.setFormatter(_DebugFormatter)
_DebugLogger.addHandler(_Ch)
_Ch= logging.FileHandler(LogFile)
_Ch.setFormatter(_InfoFormatter)
_InfoLogger.addHandler(_Ch)
_Ch = logging.FileHandler(LogFile)
_Ch.setFormatter(_ErrorFormatter)
_ErrorLogger.addHandler(_Ch)
if __name__ == '__main__':
pass
| edk2-master | BaseTools/Source/Python/Common/EdkLogger.py |
## @file
# Common routines used by all tools
#
# Copyright (c) 2007 - 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import sys
import string
import threading
import time
import re
import pickle
import array
import shutil
import filecmp
from random import sample
from struct import pack
import uuid
import subprocess
import tempfile
from collections import OrderedDict
import Common.LongFilePathOs as os
from Common import EdkLogger as EdkLogger
from Common import GlobalData as GlobalData
from Common.DataType import *
from Common.BuildToolError import *
from CommonDataClass.DataClass import *
from Common.Parsing import GetSplitValueList
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.LongFilePathSupport import CopyLongFilePath as CopyLong
from Common.LongFilePathSupport import LongFilePath as LongFilePath
from Common.MultipleWorkspace import MultipleWorkspace as mws
from CommonDataClass.Exceptions import BadExpression
from Common.caching import cached_property
import struct
ArrayIndex = re.compile("\[\s*[0-9a-fA-FxX]*\s*\]")
## Regular expression used to find out place holders in string template
gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE | re.UNICODE)
## regular expressions for map file processing
startPatternGeneral = re.compile("^Start[' ']+Length[' ']+Name[' ']+Class")
addressPatternGeneral = re.compile("^Address[' ']+Publics by Value[' ']+Rva\+Base")
valuePatternGcc = re.compile('^([\w_\.]+) +([\da-fA-Fx]+) +([\da-fA-Fx]+)$')
pcdPatternGcc = re.compile('^([\da-fA-Fx]+) +([\da-fA-Fx]+)')
secReGeneral = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\da-fA-F]+)[Hh]? +([.\w\$]+) +(\w+)', re.UNICODE)
StructPattern = re.compile(r'[_a-zA-Z][0-9A-Za-z_]*$')
## Dictionary used to store dependencies of files
gDependencyDatabase = {} # arch : {file path : [dependent files list]}
#
# If a module is built more than once with different PCDs or library classes
# a temporary INF file with same content is created, the temporary file is removed
# when build exits.
#
_TempInfs = []
def GetVariableOffset(mapfilepath, efifilepath, varnames):
""" Parse map file to get variable offset in current EFI file
@param mapfilepath Map file absolution path
@param efifilepath: EFI binary file full path
@param varnames iteratable container whose elements are variable names to be searched
@return List whos elements are tuple with variable name and raw offset
"""
lines = []
try:
f = open(mapfilepath, 'r')
lines = f.readlines()
f.close()
except:
return None
if len(lines) == 0: return None
firstline = lines[0].strip()
if re.match('^\s*Address\s*Size\s*Align\s*Out\s*In\s*Symbol\s*$', firstline):
return _parseForXcodeAndClang9(lines, efifilepath, varnames)
if (firstline.startswith("Archive member included ") and
firstline.endswith(" file (symbol)")):
return _parseForGCC(lines, efifilepath, varnames)
if firstline.startswith("# Path:"):
return _parseForXcodeAndClang9(lines, efifilepath, varnames)
return _parseGeneral(lines, efifilepath, varnames)
def _parseForXcodeAndClang9(lines, efifilepath, varnames):
status = 0
ret = []
for line in lines:
line = line.strip()
if status == 0 and (re.match('^\s*Address\s*Size\s*Align\s*Out\s*In\s*Symbol\s*$', line) \
or line == "# Symbols:"):
status = 1
continue
if status == 1 and len(line) != 0:
for varname in varnames:
if varname in line:
# cannot pregenerate this RegEx since it uses varname from varnames.
m = re.match('^([\da-fA-FxX]+)([\s\S]*)([_]*%s)$' % varname, line)
if m is not None:
ret.append((varname, m.group(1)))
return ret
def _parseForGCC(lines, efifilepath, varnames):
""" Parse map file generated by GCC linker """
status = 0
sections = []
varoffset = []
for index, line in enumerate(lines):
line = line.strip()
# status machine transection
if status == 0 and line == "Memory Configuration":
status = 1
continue
elif status == 1 and line == 'Linker script and memory map':
status = 2
continue
elif status ==2 and line == 'START GROUP':
status = 3
continue
# status handler
if status == 3:
m = valuePatternGcc.match(line)
if m is not None:
sections.append(m.groups(0))
for varname in varnames:
Str = ''
m = re.match("^.data.(%s)" % varname, line)
if m is not None:
m = re.match(".data.(%s)$" % varname, line)
if m is not None:
Str = lines[index + 1]
else:
Str = line[len(".data.%s" % varname):]
if Str:
m = pcdPatternGcc.match(Str.strip())
if m is not None:
varoffset.append((varname, int(m.groups(0)[0], 16), int(sections[-1][1], 16), sections[-1][0]))
if not varoffset:
return []
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
if efisecs is None or len(efisecs) == 0:
return []
#redirection
redirection = 0
for efisec in efisecs:
for section in sections:
if section[0].strip() == efisec[0].strip() and section[0].strip() == '.text':
redirection = int(section[1], 16) - efisec[1]
ret = []
for var in varoffset:
for efisec in efisecs:
if var[1] >= efisec[1] and var[1] < efisec[1]+efisec[3]:
ret.append((var[0], hex(efisec[2] + var[1] - efisec[1] - redirection)))
return ret
def _parseGeneral(lines, efifilepath, varnames):
status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table
secs = [] # key = section name
varoffset = []
symRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\.:\\\\\w\?@\$-]+) +([\da-fA-F]+)', re.UNICODE)
for line in lines:
line = line.strip()
if startPatternGeneral.match(line):
status = 1
continue
if addressPatternGeneral.match(line):
status = 2
continue
if line.startswith("entry point at"):
status = 3
continue
if status == 1 and len(line) != 0:
m = secReGeneral.match(line)
assert m is not None, "Fail to parse the section in map file , line is %s" % line
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
if status == 2 and len(line) != 0:
for varname in varnames:
m = symRe.match(line)
assert m is not None, "Fail to parse the symbol in map file, line is %s" % line
sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
sec_no = int(sec_no, 16)
sym_offset = int(sym_offset, 16)
vir_addr = int(vir_addr, 16)
# cannot pregenerate this RegEx since it uses varname from varnames.
m2 = re.match('^[_]*(%s)' % varname, sym_name)
if m2 is not None:
# fond a binary pcd entry in map file
for sec in secs:
if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):
varoffset.append([varname, sec[3], sym_offset, vir_addr, sec_no])
if not varoffset: return []
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
if efisecs is None or len(efisecs) == 0:
return []
ret = []
for var in varoffset:
index = 0
for efisec in efisecs:
index = index + 1
if var[1].strip() == efisec[0].strip():
ret.append((var[0], hex(efisec[2] + var[2])))
elif var[4] == index:
ret.append((var[0], hex(efisec[2] + var[2])))
return ret
## Routine to process duplicated INF
#
# This function is called by following two cases:
# Case 1 in DSC:
# [components.arch]
# Pkg/module/module.inf
# Pkg/module/module.inf {
# <Defines>
# FILE_GUID = 0D1B936F-68F3-4589-AFCC-FB8B7AEBC836
# }
# Case 2 in FDF:
# INF Pkg/module/module.inf
# INF FILE_GUID = 0D1B936F-68F3-4589-AFCC-FB8B7AEBC836 Pkg/module/module.inf
#
# This function copies Pkg/module/module.inf to
# Conf/.cache/0D1B936F-68F3-4589-AFCC-FB8B7AEBC836module.inf
#
# @param Path Original PathClass object
# @param BaseName New file base name
#
# @retval return the new PathClass object
#
def ProcessDuplicatedInf(Path, BaseName, Workspace):
Filename = os.path.split(Path.File)[1]
if '.' in Filename:
Filename = BaseName + Path.BaseName + Filename[Filename.rfind('.'):]
else:
Filename = BaseName + Path.BaseName
DbDir = os.path.split(GlobalData.gDatabasePath)[0]
#
# A temporary INF is copied to database path which must have write permission
# The temporary will be removed at the end of build
# In case of name conflict, the file name is
# FILE_GUIDBaseName (0D1B936F-68F3-4589-AFCC-FB8B7AEBC836module.inf)
#
TempFullPath = os.path.join(DbDir,
Filename)
RtPath = PathClass(Path.File, Workspace)
#
# Modify the full path to temporary path, keep other unchanged
#
# To build same module more than once, the module path with FILE_GUID overridden has
# the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
# in DSC which is used as relative path by C files and other files in INF.
# A trick was used: all module paths are PathClass instances, after the initialization
# of PathClass, the PathClass.Path is overridden by the temporary INF path.
#
# The reason for creating a temporary INF is:
# Platform.Modules which is the base to create ModuleAutoGen objects is a dictionary,
# the key is the full path of INF, the value is an object to save overridden library instances, PCDs.
# A different key for the same module is needed to create different output directory,
# retrieve overridden PCDs, library instances.
#
# The BaseName is the FILE_GUID which is also the output directory name.
#
#
RtPath.Path = TempFullPath
RtPath.BaseName = BaseName
RtPath.OriginalPath = Path
#
# If file exists, compare contents
#
if os.path.exists(TempFullPath):
with open(str(Path), 'rb') as f1, open(TempFullPath, 'rb') as f2:
if f1.read() == f2.read():
return RtPath
_TempInfs.append(TempFullPath)
shutil.copy2(str(Path), TempFullPath)
return RtPath
## Remove temporary created INFs whose paths were saved in _TempInfs
#
def ClearDuplicatedInf():
while _TempInfs:
File = _TempInfs.pop()
if os.path.exists(File):
os.remove(File)
## Convert GUID string in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx style to C structure style
#
# @param Guid The GUID string
#
# @retval string The GUID string in C structure style
#
def GuidStringToGuidStructureString(Guid):
GuidList = Guid.split('-')
Result = '{'
for Index in range(0, 3, 1):
Result = Result + '0x' + GuidList[Index] + ', '
Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4]
for Index in range(0, 12, 2):
Result = Result + ', 0x' + GuidList[4][Index:Index + 2]
Result += '}}'
return Result
## Convert GUID structure in byte array to xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
#
# @param GuidValue The GUID value in byte array
#
# @retval string The GUID value in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format
#
def GuidStructureByteArrayToGuidString(GuidValue):
guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "").replace(";", "")
guidValueList = guidValueString.split(",")
if len(guidValueList) != 16:
return ''
#EdkLogger.error(None, None, "Invalid GUID value string %s" % GuidValue)
try:
return "%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x" % (
int(guidValueList[3], 16),
int(guidValueList[2], 16),
int(guidValueList[1], 16),
int(guidValueList[0], 16),
int(guidValueList[5], 16),
int(guidValueList[4], 16),
int(guidValueList[7], 16),
int(guidValueList[6], 16),
int(guidValueList[8], 16),
int(guidValueList[9], 16),
int(guidValueList[10], 16),
int(guidValueList[11], 16),
int(guidValueList[12], 16),
int(guidValueList[13], 16),
int(guidValueList[14], 16),
int(guidValueList[15], 16)
)
except:
return ''
## Convert GUID string in C structure style to xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
#
# @param GuidValue The GUID value in C structure format
#
# @retval string The GUID value in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format
#
def GuidStructureStringToGuidString(GuidValue):
if not GlobalData.gGuidCFormatPattern.match(GuidValue):
return ''
guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "").replace(";", "")
guidValueList = guidValueString.split(",")
if len(guidValueList) != 11:
return ''
#EdkLogger.error(None, None, "Invalid GUID value string %s" % GuidValue)
try:
return "%08x-%04x-%04x-%02x%02x-%02x%02x%02x%02x%02x%02x" % (
int(guidValueList[0], 16),
int(guidValueList[1], 16),
int(guidValueList[2], 16),
int(guidValueList[3], 16),
int(guidValueList[4], 16),
int(guidValueList[5], 16),
int(guidValueList[6], 16),
int(guidValueList[7], 16),
int(guidValueList[8], 16),
int(guidValueList[9], 16),
int(guidValueList[10], 16)
)
except:
return ''
## Convert GUID string in C structure style to xxxxxxxx_xxxx_xxxx_xxxx_xxxxxxxxxxxx
#
# @param GuidValue The GUID value in C structure format
#
# @retval string The GUID value in xxxxxxxx_xxxx_xxxx_xxxx_xxxxxxxxxxxx format
#
def GuidStructureStringToGuidValueName(GuidValue):
guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "")
guidValueList = guidValueString.split(",")
if len(guidValueList) != 11:
EdkLogger.error(None, FORMAT_INVALID, "Invalid GUID value string [%s]" % GuidValue)
return "%08x_%04x_%04x_%02x%02x_%02x%02x%02x%02x%02x%02x" % (
int(guidValueList[0], 16),
int(guidValueList[1], 16),
int(guidValueList[2], 16),
int(guidValueList[3], 16),
int(guidValueList[4], 16),
int(guidValueList[5], 16),
int(guidValueList[6], 16),
int(guidValueList[7], 16),
int(guidValueList[8], 16),
int(guidValueList[9], 16),
int(guidValueList[10], 16)
)
## Create directories
#
# @param Directory The directory name
#
def CreateDirectory(Directory):
if Directory is None or Directory.strip() == "":
return True
try:
if not os.access(Directory, os.F_OK):
os.makedirs(Directory)
except:
return False
return True
## Remove directories, including files and sub-directories in it
#
# @param Directory The directory name
#
def RemoveDirectory(Directory, Recursively=False):
if Directory is None or Directory.strip() == "" or not os.path.exists(Directory):
return
if Recursively:
CurrentDirectory = os.getcwd()
os.chdir(Directory)
for File in os.listdir("."):
if os.path.isdir(File):
RemoveDirectory(File, Recursively)
else:
os.remove(File)
os.chdir(CurrentDirectory)
os.rmdir(Directory)
## Store content in file
#
# This method is used to save file only when its content is changed. This is
# quite useful for "make" system to decide what will be re-built and what won't.
#
# @param File The path of file
# @param Content The new content of the file
# @param IsBinaryFile The flag indicating if the file is binary file or not
#
# @retval True If the file content is changed and the file is renewed
# @retval False If the file content is the same
#
def SaveFileOnChange(File, Content, IsBinaryFile=True, FileLock=None):
# Convert to long file path format
File = LongFilePath(File)
if os.path.exists(File):
if IsBinaryFile:
try:
with open(File, "rb") as f:
if Content == f.read():
return False
except:
EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)
else:
try:
with open(File, "r") as f:
if Content == f.read():
return False
except:
EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)
DirName = os.path.dirname(File)
if not CreateDirectory(DirName):
EdkLogger.error(None, FILE_CREATE_FAILURE, "Could not create directory %s" % DirName)
else:
if DirName == '':
DirName = os.getcwd()
if not os.access(DirName, os.W_OK):
EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)
OpenMode = "w"
if IsBinaryFile:
OpenMode = "wb"
# use default file_lock if no input new lock
if not FileLock:
FileLock = GlobalData.file_lock
if FileLock:
FileLock.acquire()
if GlobalData.gIsWindows and not os.path.exists(File):
try:
with open(File, OpenMode) as tf:
tf.write(Content)
except IOError as X:
if GlobalData.gBinCacheSource:
EdkLogger.quiet("[cache error]:fails to save file with error: %s" % (X))
else:
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
finally:
if FileLock:
FileLock.release()
else:
try:
with open(File, OpenMode) as Fd:
Fd.write(Content)
except IOError as X:
if GlobalData.gBinCacheSource:
EdkLogger.quiet("[cache error]:fails to save file with error: %s" % (X))
else:
EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData='IOError %s' % X)
finally:
if FileLock:
FileLock.release()
return True
## Copy source file only if it is different from the destination file
#
# This method is used to copy file only if the source file and destination
# file content are different. This is quite useful to avoid duplicated
# file writing.
#
# @param SrcFile The path of source file
# @param Dst The path of destination file or folder
#
# @retval True The two files content are different and the file is copied
# @retval False No copy really happen
#
def CopyFileOnChange(SrcFile, Dst, FileLock=None):
# Convert to long file path format
SrcFile = LongFilePath(SrcFile)
Dst = LongFilePath(Dst)
if os.path.isdir(SrcFile):
EdkLogger.error(None, FILE_COPY_FAILURE, ExtraData='CopyFileOnChange SrcFile is a dir, not a file: %s' % SrcFile)
return False
if os.path.isdir(Dst):
DstFile = os.path.join(Dst, os.path.basename(SrcFile))
else:
DstFile = Dst
if os.path.exists(DstFile) and filecmp.cmp(SrcFile, DstFile, shallow=False):
return False
DirName = os.path.dirname(DstFile)
if not CreateDirectory(DirName):
EdkLogger.error(None, FILE_CREATE_FAILURE, "Could not create directory %s" % DirName)
else:
if DirName == '':
DirName = os.getcwd()
if not os.access(DirName, os.W_OK):
EdkLogger.error(None, PERMISSION_FAILURE, "Do not have write permission on directory %s" % DirName)
# use default file_lock if no input new lock
if not FileLock:
FileLock = GlobalData.file_lock
if FileLock:
FileLock.acquire()
try:
CopyLong(SrcFile, DstFile)
except IOError as X:
if GlobalData.gBinCacheSource:
EdkLogger.quiet("[cache error]:fails to copy file with error: %s" % (X))
else:
EdkLogger.error(None, FILE_COPY_FAILURE, ExtraData='IOError %s' % X)
finally:
if FileLock:
FileLock.release()
return True
## Retrieve and cache the real path name in file system
#
# @param Root The root directory of path relative to
#
# @retval str The path string if the path exists
# @retval None If path doesn't exist
#
class DirCache:
_CACHE_ = set()
_UPPER_CACHE_ = {}
def __init__(self, Root):
self._Root = Root
for F in os.listdir(Root):
self._CACHE_.add(F)
self._UPPER_CACHE_[F.upper()] = F
# =[] operator
def __getitem__(self, Path):
Path = Path[len(os.path.commonprefix([Path, self._Root])):]
if not Path:
return self._Root
if Path and Path[0] == os.path.sep:
Path = Path[1:]
if Path in self._CACHE_:
return os.path.join(self._Root, Path)
UpperPath = Path.upper()
if UpperPath in self._UPPER_CACHE_:
return os.path.join(self._Root, self._UPPER_CACHE_[UpperPath])
IndexList = []
LastSepIndex = -1
SepIndex = Path.find(os.path.sep)
while SepIndex > -1:
Parent = UpperPath[:SepIndex]
if Parent not in self._UPPER_CACHE_:
break
LastSepIndex = SepIndex
SepIndex = Path.find(os.path.sep, LastSepIndex + 1)
if LastSepIndex == -1:
return None
Cwd = os.getcwd()
os.chdir(self._Root)
SepIndex = LastSepIndex
while SepIndex > -1:
Parent = Path[:SepIndex]
ParentKey = UpperPath[:SepIndex]
if ParentKey not in self._UPPER_CACHE_:
os.chdir(Cwd)
return None
if Parent in self._CACHE_:
ParentDir = Parent
else:
ParentDir = self._UPPER_CACHE_[ParentKey]
for F in os.listdir(ParentDir):
Dir = os.path.join(ParentDir, F)
self._CACHE_.add(Dir)
self._UPPER_CACHE_[Dir.upper()] = Dir
SepIndex = Path.find(os.path.sep, SepIndex + 1)
os.chdir(Cwd)
if Path in self._CACHE_:
return os.path.join(self._Root, Path)
elif UpperPath in self._UPPER_CACHE_:
return os.path.join(self._Root, self._UPPER_CACHE_[UpperPath])
return None
def RealPath(File, Dir='', OverrideDir=''):
NewFile = os.path.normpath(os.path.join(Dir, File))
NewFile = GlobalData.gAllFiles[NewFile]
if not NewFile and OverrideDir:
NewFile = os.path.normpath(os.path.join(OverrideDir, File))
NewFile = GlobalData.gAllFiles[NewFile]
return NewFile
## Get GUID value from given packages
#
# @param CName The CName of the GUID
# @param PackageList List of packages looking-up in
# @param Inffile The driver file
#
# @retval GuidValue if the CName is found in any given package
# @retval None if the CName is not found in all given packages
#
def GuidValue(CName, PackageList, Inffile = None):
for P in PackageList:
GuidKeys = list(P.Guids.keys())
if Inffile and P._PrivateGuids:
if not Inffile.startswith(P.MetaFile.Dir):
GuidKeys = [x for x in P.Guids if x not in P._PrivateGuids]
if CName in GuidKeys:
return P.Guids[CName]
return None
## A string template class
#
# This class implements a template for string replacement. A string template
# looks like following
#
# ${BEGIN} other_string ${placeholder_name} other_string ${END}
#
# The string between ${BEGIN} and ${END} will be repeated as many times as the
# length of "placeholder_name", which is a list passed through a dict. The
# "placeholder_name" is the key name of the dict. The ${BEGIN} and ${END} can
# be not used and, in this case, the "placeholder_name" must not a list and it
# will just be replaced once.
#
class TemplateString(object):
_REPEAT_START_FLAG = "BEGIN"
_REPEAT_END_FLAG = "END"
class Section(object):
_LIST_TYPES = [type([]), type(set()), type((0,))]
def __init__(self, TemplateSection, PlaceHolderList):
self._Template = TemplateSection
self._PlaceHolderList = []
# Split the section into sub-sections according to the position of placeholders
if PlaceHolderList:
self._SubSectionList = []
SubSectionStart = 0
#
# The placeholders passed in must be in the format of
#
# PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint
#
for PlaceHolder, Start, End in PlaceHolderList:
self._SubSectionList.append(TemplateSection[SubSectionStart:Start])
self._SubSectionList.append(TemplateSection[Start:End])
self._PlaceHolderList.append(PlaceHolder)
SubSectionStart = End
if SubSectionStart < len(TemplateSection):
self._SubSectionList.append(TemplateSection[SubSectionStart:])
else:
self._SubSectionList = [TemplateSection]
def __str__(self):
return self._Template + " : " + str(self._PlaceHolderList)
def Instantiate(self, PlaceHolderValues):
RepeatTime = -1
RepeatPlaceHolders = {}
NonRepeatPlaceHolders = {}
for PlaceHolder in self._PlaceHolderList:
if PlaceHolder not in PlaceHolderValues:
continue
Value = PlaceHolderValues[PlaceHolder]
if type(Value) in self._LIST_TYPES:
if RepeatTime < 0:
RepeatTime = len(Value)
elif RepeatTime != len(Value):
EdkLogger.error(
"TemplateString",
PARAMETER_INVALID,
"${%s} has different repeat time from others!" % PlaceHolder,
ExtraData=str(self._Template)
)
RepeatPlaceHolders["${%s}" % PlaceHolder] = Value
else:
NonRepeatPlaceHolders["${%s}" % PlaceHolder] = Value
if NonRepeatPlaceHolders:
StringList = []
for S in self._SubSectionList:
if S not in NonRepeatPlaceHolders:
StringList.append(S)
else:
StringList.append(str(NonRepeatPlaceHolders[S]))
else:
StringList = self._SubSectionList
if RepeatPlaceHolders:
TempStringList = []
for Index in range(RepeatTime):
for S in StringList:
if S not in RepeatPlaceHolders:
TempStringList.append(S)
else:
TempStringList.append(str(RepeatPlaceHolders[S][Index]))
StringList = TempStringList
return "".join(StringList)
## Constructor
def __init__(self, Template=None):
self.String = []
self.IsBinary = False
self._Template = Template
self._TemplateSectionList = self._Parse(Template)
## str() operator
#
# @retval string The string replaced
#
def __str__(self):
return "".join(self.String)
## Split the template string into fragments per the ${BEGIN} and ${END} flags
#
# @retval list A list of TemplateString.Section objects
#
def _Parse(self, Template):
SectionStart = 0
SearchFrom = 0
MatchEnd = 0
PlaceHolderList = []
TemplateSectionList = []
while Template:
MatchObj = gPlaceholderPattern.search(Template, SearchFrom)
if not MatchObj:
if MatchEnd <= len(Template):
TemplateSection = TemplateString.Section(Template[SectionStart:], PlaceHolderList)
TemplateSectionList.append(TemplateSection)
break
MatchString = MatchObj.group(1)
MatchStart = MatchObj.start()
MatchEnd = MatchObj.end()
if MatchString == self._REPEAT_START_FLAG:
if MatchStart > SectionStart:
TemplateSection = TemplateString.Section(Template[SectionStart:MatchStart], PlaceHolderList)
TemplateSectionList.append(TemplateSection)
SectionStart = MatchEnd
PlaceHolderList = []
elif MatchString == self._REPEAT_END_FLAG:
TemplateSection = TemplateString.Section(Template[SectionStart:MatchStart], PlaceHolderList)
TemplateSectionList.append(TemplateSection)
SectionStart = MatchEnd
PlaceHolderList = []
else:
PlaceHolderList.append((MatchString, MatchStart - SectionStart, MatchEnd - SectionStart))
SearchFrom = MatchEnd
return TemplateSectionList
## Replace the string template with dictionary of placeholders and append it to previous one
#
# @param AppendString The string template to append
# @param Dictionary The placeholder dictionaries
#
def Append(self, AppendString, Dictionary=None):
if Dictionary:
SectionList = self._Parse(AppendString)
self.String.append( "".join(S.Instantiate(Dictionary) for S in SectionList))
else:
if isinstance(AppendString,list):
self.String.extend(AppendString)
else:
self.String.append(AppendString)
## Replace the string template with dictionary of placeholders
#
# @param Dictionary The placeholder dictionaries
#
# @retval str The string replaced with placeholder values
#
def Replace(self, Dictionary=None):
return "".join(S.Instantiate(Dictionary) for S in self._TemplateSectionList)
## Progress indicator class
#
# This class makes use of thread to print progress on console.
#
class Progressor:
# for avoiding deadloop
_StopFlag = None
_ProgressThread = None
_CheckInterval = 0.25
## Constructor
#
# @param OpenMessage The string printed before progress characters
# @param CloseMessage The string printed after progress characters
# @param ProgressChar The character used to indicate the progress
# @param Interval The interval in seconds between two progress characters
#
def __init__(self, OpenMessage="", CloseMessage="", ProgressChar='.', Interval=1.0):
self.PromptMessage = OpenMessage
self.CodaMessage = CloseMessage
self.ProgressChar = ProgressChar
self.Interval = Interval
if Progressor._StopFlag is None:
Progressor._StopFlag = threading.Event()
## Start to print progress character
#
# @param OpenMessage The string printed before progress characters
#
def Start(self, OpenMessage=None):
if OpenMessage is not None:
self.PromptMessage = OpenMessage
Progressor._StopFlag.clear()
if Progressor._ProgressThread is None:
Progressor._ProgressThread = threading.Thread(target=self._ProgressThreadEntry)
Progressor._ProgressThread.setDaemon(False)
Progressor._ProgressThread.start()
## Stop printing progress character
#
# @param CloseMessage The string printed after progress characters
#
def Stop(self, CloseMessage=None):
OriginalCodaMessage = self.CodaMessage
if CloseMessage is not None:
self.CodaMessage = CloseMessage
self.Abort()
self.CodaMessage = OriginalCodaMessage
## Thread entry method
def _ProgressThreadEntry(self):
sys.stdout.write(self.PromptMessage + " ")
sys.stdout.flush()
TimeUp = 0.0
while not Progressor._StopFlag.isSet():
if TimeUp <= 0.0:
sys.stdout.write(self.ProgressChar)
sys.stdout.flush()
TimeUp = self.Interval
time.sleep(self._CheckInterval)
TimeUp -= self._CheckInterval
sys.stdout.write(" " + self.CodaMessage + "\n")
sys.stdout.flush()
## Abort the progress display
@staticmethod
def Abort():
if Progressor._StopFlag is not None:
Progressor._StopFlag.set()
if Progressor._ProgressThread is not None:
Progressor._ProgressThread.join()
Progressor._ProgressThread = None
## Dictionary using prioritized list as key
#
class tdict:
_ListType = type([])
_TupleType = type(())
_Wildcard = 'COMMON'
_ValidWildcardList = ['COMMON', 'DEFAULT', 'ALL', TAB_STAR, 'PLATFORM']
def __init__(self, _Single_=False, _Level_=2):
self._Level_ = _Level_
self.data = {}
self._Single_ = _Single_
# =[] operator
def __getitem__(self, key):
KeyType = type(key)
RestKeys = None
if KeyType == self._ListType or KeyType == self._TupleType:
FirstKey = key[0]
if len(key) > 1:
RestKeys = key[1:]
elif self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
else:
FirstKey = key
if self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
if FirstKey is None or str(FirstKey).upper() in self._ValidWildcardList:
FirstKey = self._Wildcard
if self._Single_:
return self._GetSingleValue(FirstKey, RestKeys)
else:
return self._GetAllValues(FirstKey, RestKeys)
def _GetSingleValue(self, FirstKey, RestKeys):
Value = None
#print "%s-%s" % (FirstKey, self._Level_) ,
if self._Level_ > 1:
if FirstKey == self._Wildcard:
if FirstKey in self.data:
Value = self.data[FirstKey][RestKeys]
if Value is None:
for Key in self.data:
Value = self.data[Key][RestKeys]
if Value is not None: break
else:
if FirstKey in self.data:
Value = self.data[FirstKey][RestKeys]
if Value is None and self._Wildcard in self.data:
#print "Value=None"
Value = self.data[self._Wildcard][RestKeys]
else:
if FirstKey == self._Wildcard:
if FirstKey in self.data:
Value = self.data[FirstKey]
if Value is None:
for Key in self.data:
Value = self.data[Key]
if Value is not None: break
else:
if FirstKey in self.data:
Value = self.data[FirstKey]
elif self._Wildcard in self.data:
Value = self.data[self._Wildcard]
return Value
def _GetAllValues(self, FirstKey, RestKeys):
Value = []
if self._Level_ > 1:
if FirstKey == self._Wildcard:
for Key in self.data:
Value += self.data[Key][RestKeys]
else:
if FirstKey in self.data:
Value += self.data[FirstKey][RestKeys]
if self._Wildcard in self.data:
Value += self.data[self._Wildcard][RestKeys]
else:
if FirstKey == self._Wildcard:
for Key in self.data:
Value.append(self.data[Key])
else:
if FirstKey in self.data:
Value.append(self.data[FirstKey])
if self._Wildcard in self.data:
Value.append(self.data[self._Wildcard])
return Value
## []= operator
def __setitem__(self, key, value):
KeyType = type(key)
RestKeys = None
if KeyType == self._ListType or KeyType == self._TupleType:
FirstKey = key[0]
if len(key) > 1:
RestKeys = key[1:]
else:
RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
else:
FirstKey = key
if self._Level_ > 1:
RestKeys = [self._Wildcard for i in range(0, self._Level_ - 1)]
if FirstKey in self._ValidWildcardList:
FirstKey = self._Wildcard
if FirstKey not in self.data and self._Level_ > 0:
self.data[FirstKey] = tdict(self._Single_, self._Level_ - 1)
if self._Level_ > 1:
self.data[FirstKey][RestKeys] = value
else:
self.data[FirstKey] = value
def SetGreedyMode(self):
self._Single_ = False
if self._Level_ > 1:
for Key in self.data:
self.data[Key].SetGreedyMode()
def SetSingleMode(self):
self._Single_ = True
if self._Level_ > 1:
for Key in self.data:
self.data[Key].SetSingleMode()
def GetKeys(self, KeyIndex=0):
assert KeyIndex >= 0
if KeyIndex == 0:
return set(self.data.keys())
else:
keys = set()
for Key in self.data:
keys |= self.data[Key].GetKeys(KeyIndex - 1)
return keys
def AnalyzePcdExpression(Setting):
RanStr = ''.join(sample(string.ascii_letters + string.digits, 8))
Setting = Setting.replace('\\\\', RanStr).strip()
# There might be escaped quote in a string: \", \\\" , \', \\\'
Data = Setting
# There might be '|' in string and in ( ... | ... ), replace it with '-'
NewStr = ''
InSingleQuoteStr = False
InDoubleQuoteStr = False
Pair = 0
for Index, ch in enumerate(Data):
if ch == '"' and not InSingleQuoteStr:
if Data[Index - 1] != '\\':
InDoubleQuoteStr = not InDoubleQuoteStr
elif ch == "'" and not InDoubleQuoteStr:
if Data[Index - 1] != '\\':
InSingleQuoteStr = not InSingleQuoteStr
elif ch == '(' and not (InSingleQuoteStr or InDoubleQuoteStr):
Pair += 1
elif ch == ')' and not (InSingleQuoteStr or InDoubleQuoteStr):
Pair -= 1
if (Pair > 0 or InSingleQuoteStr or InDoubleQuoteStr) and ch == TAB_VALUE_SPLIT:
NewStr += '-'
else:
NewStr += ch
FieldList = []
StartPos = 0
while True:
Pos = NewStr.find(TAB_VALUE_SPLIT, StartPos)
if Pos < 0:
FieldList.append(Setting[StartPos:].strip())
break
FieldList.append(Setting[StartPos:Pos].strip())
StartPos = Pos + 1
for i, ch in enumerate(FieldList):
if RanStr in ch:
FieldList[i] = ch.replace(RanStr,'\\\\')
return FieldList
def ParseFieldValue (Value):
def ParseDevPathValue (Value):
if '\\' in Value:
Value.replace('\\', '/').replace(' ', '')
Cmd = 'DevicePath ' + '"' + Value + '"'
try:
p = subprocess.Popen(Cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
out, err = p.communicate()
except Exception as X:
raise BadExpression("DevicePath: %s" % (str(X)) )
finally:
subprocess._cleanup()
p.stdout.close()
p.stderr.close()
if err:
raise BadExpression("DevicePath: %s" % str(err))
out = out.decode()
Size = len(out.split())
out = ','.join(out.split())
return '{' + out + '}', Size
if "{CODE(" in Value:
return Value, len(Value.split(","))
if isinstance(Value, type(0)):
return Value, (Value.bit_length() + 7) // 8
if not isinstance(Value, type('')):
raise BadExpression('Type %s is %s' %(Value, type(Value)))
Value = Value.strip()
if Value.startswith(TAB_UINT8) and Value.endswith(')'):
Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1])
if Size > 1:
raise BadExpression('Value (%s) Size larger than %d' %(Value, Size))
return Value, 1
if Value.startswith(TAB_UINT16) and Value.endswith(')'):
Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1])
if Size > 2:
raise BadExpression('Value (%s) Size larger than %d' %(Value, Size))
return Value, 2
if Value.startswith(TAB_UINT32) and Value.endswith(')'):
Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1])
if Size > 4:
raise BadExpression('Value (%s) Size larger than %d' %(Value, Size))
return Value, 4
if Value.startswith(TAB_UINT64) and Value.endswith(')'):
Value, Size = ParseFieldValue(Value.split('(', 1)[1][:-1])
if Size > 8:
raise BadExpression('Value (%s) Size larger than %d' % (Value, Size))
return Value, 8
if Value.startswith(TAB_GUID) and Value.endswith(')'):
Value = Value.split('(', 1)[1][:-1].strip()
if Value[0] == '{' and Value[-1] == '}':
TmpValue = GuidStructureStringToGuidString(Value)
if not TmpValue:
raise BadExpression("Invalid GUID value string %s" % Value)
Value = TmpValue
if Value[0] == '"' and Value[-1] == '"':
Value = Value[1:-1]
try:
Value = uuid.UUID(Value).bytes_le
ValueL, ValueH = struct.unpack('2Q', Value)
Value = (ValueH << 64 ) | ValueL
except ValueError as Message:
raise BadExpression(Message)
return Value, 16
if Value.startswith('L"') and Value.endswith('"'):
# Unicode String
# translate escape character
Value = Value[1:]
try:
Value = eval(Value)
except:
Value = Value[1:-1]
List = list(Value)
List.reverse()
Value = 0
for Char in List:
Value = (Value << 16) | ord(Char)
return Value, (len(List) + 1) * 2
if Value.startswith('"') and Value.endswith('"'):
# ASCII String
# translate escape character
try:
Value = eval(Value)
except:
Value = Value[1:-1]
List = list(Value)
List.reverse()
Value = 0
for Char in List:
Value = (Value << 8) | ord(Char)
return Value, len(List) + 1
if Value.startswith("L'") and Value.endswith("'"):
# Unicode Character Constant
# translate escape character
Value = Value[1:]
try:
Value = eval(Value)
except:
Value = Value[1:-1]
List = list(Value)
if len(List) == 0:
raise BadExpression('Length %s is %s' % (Value, len(List)))
List.reverse()
Value = 0
for Char in List:
Value = (Value << 16) | ord(Char)
return Value, len(List) * 2
if Value.startswith("'") and Value.endswith("'"):
# Character constant
# translate escape character
try:
Value = eval(Value)
except:
Value = Value[1:-1]
List = list(Value)
if len(List) == 0:
raise BadExpression('Length %s is %s' % (Value, len(List)))
List.reverse()
Value = 0
for Char in List:
Value = (Value << 8) | ord(Char)
return Value, len(List)
if Value.startswith('{') and Value.endswith('}'):
# Byte array
Value = Value[1:-1]
List = [Item.strip() for Item in Value.split(',')]
List.reverse()
Value = 0
RetSize = 0
for Item in List:
ItemValue, Size = ParseFieldValue(Item)
RetSize += Size
for I in range(Size):
Value = (Value << 8) | ((ItemValue >> 8 * I) & 0xff)
return Value, RetSize
if Value.startswith('DEVICE_PATH(') and Value.endswith(')'):
Value = Value.replace("DEVICE_PATH(", '').rstrip(')')
Value = Value.strip().strip('"')
return ParseDevPathValue(Value)
if Value.lower().startswith('0x'):
try:
Value = int(Value, 16)
except:
raise BadExpression("invalid hex value: %s" % Value)
if Value == 0:
return 0, 1
return Value, (Value.bit_length() + 7) // 8
if Value[0].isdigit():
Value = int(Value, 10)
if Value == 0:
return 0, 1
return Value, (Value.bit_length() + 7) // 8
if Value.lower() == 'true':
return 1, 1
if Value.lower() == 'false':
return 0, 1
return Value, 1
## AnalyzeDscPcd
#
# Analyze DSC PCD value, since there is no data type info in DSC
# This function is used to match functions (AnalyzePcdData) used for retrieving PCD value from database
# 1. Feature flag: TokenSpace.PcdCName|PcdValue
# 2. Fix and Patch:TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]]
# 3. Dynamic default:
# TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]]
# TokenSpace.PcdCName|PcdValue
# 4. Dynamic VPD:
# TokenSpace.PcdCName|VpdOffset[|VpdValue]
# TokenSpace.PcdCName|VpdOffset[|MaxSize[|VpdValue]]
# 5. Dynamic HII:
# TokenSpace.PcdCName|HiiString|VariableGuid|VariableOffset[|HiiValue]
# PCD value needs to be located in such kind of string, and the PCD value might be an expression in which
# there might have "|" operator, also in string value.
#
# @param Setting: String contain information described above with "TokenSpace.PcdCName|" stripped
# @param PcdType: PCD type: feature, fixed, dynamic default VPD HII
# @param DataType: The datum type of PCD: VOID*, UNIT, BOOL
# @retval:
# ValueList: A List contain fields described above
# IsValid: True if conforming EBNF, otherwise False
# Index: The index where PcdValue is in ValueList
#
def AnalyzeDscPcd(Setting, PcdType, DataType=''):
FieldList = AnalyzePcdExpression(Setting)
IsValid = True
if PcdType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT):
Value = FieldList[0]
Size = ''
if len(FieldList) > 1 and FieldList[1]:
DataType = FieldList[1]
if FieldList[1] != TAB_VOID and StructPattern.match(FieldList[1]) is None:
IsValid = False
if len(FieldList) > 2:
Size = FieldList[2]
if IsValid:
if DataType == "":
IsValid = (len(FieldList) <= 1)
else:
IsValid = (len(FieldList) <= 3)
if Size:
try:
int(Size, 16) if Size.upper().startswith("0X") else int(Size)
except:
IsValid = False
Size = -1
return [str(Value), DataType, str(Size)], IsValid, 0
elif PcdType == MODEL_PCD_FEATURE_FLAG:
Value = FieldList[0]
Size = ''
IsValid = (len(FieldList) <= 1)
return [Value, DataType, str(Size)], IsValid, 0
elif PcdType in (MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_VPD):
VpdOffset = FieldList[0]
Value = Size = ''
if not DataType == TAB_VOID:
if len(FieldList) > 1:
Value = FieldList[1]
else:
if len(FieldList) > 1:
Size = FieldList[1]
if len(FieldList) > 2:
Value = FieldList[2]
if DataType == "":
IsValid = (len(FieldList) <= 1)
else:
IsValid = (len(FieldList) <= 3)
if Size:
try:
int(Size, 16) if Size.upper().startswith("0X") else int(Size)
except:
IsValid = False
Size = -1
return [VpdOffset, str(Size), Value], IsValid, 2
elif PcdType in (MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII):
IsValid = (3 <= len(FieldList) <= 5)
HiiString = FieldList[0]
Guid = Offset = Value = Attribute = ''
if len(FieldList) > 1:
Guid = FieldList[1]
if len(FieldList) > 2:
Offset = FieldList[2]
if len(FieldList) > 3:
Value = FieldList[3]
if len(FieldList) > 4:
Attribute = FieldList[4]
return [HiiString, Guid, Offset, Value, Attribute], IsValid, 3
return [], False, 0
## AnalyzePcdData
#
# Analyze the pcd Value, Datum type and TokenNumber.
# Used to avoid split issue while the value string contain "|" character
#
# @param[in] Setting: A String contain value/datum type/token number information;
#
# @retval ValueList: A List contain value, datum type and toke number.
#
def AnalyzePcdData(Setting):
ValueList = ['', '', '']
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
PtrValue = ValueRe.findall(Setting)
ValueUpdateFlag = False
if len(PtrValue) >= 1:
Setting = re.sub(ValueRe, '', Setting)
ValueUpdateFlag = True
TokenList = Setting.split(TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
if ValueUpdateFlag:
ValueList[0] = PtrValue[0]
return ValueList
## check format of PCD value against its the datum type
#
# For PCD value setting
#
def CheckPcdDatum(Type, Value):
if Type == TAB_VOID:
ValueRe = re.compile(r'\s*L?\".*\"\s*$')
if not (((Value.startswith('L"') or Value.startswith('"')) and Value.endswith('"'))
or (Value.startswith('{') and Value.endswith('}')) or (Value.startswith("L'") or Value.startswith("'") and Value.endswith("'"))
):
return False, "Invalid value [%s] of type [%s]; must be in the form of {...} for array"\
", \"...\" or \'...\' for string, L\"...\" or L\'...\' for unicode string" % (Value, Type)
elif ValueRe.match(Value):
# Check the chars in UnicodeString or CString is printable
if Value.startswith("L"):
Value = Value[2:-1]
else:
Value = Value[1:-1]
Printset = set(string.printable)
Printset.remove(TAB_PRINTCHAR_VT)
Printset.add(TAB_PRINTCHAR_BS)
Printset.add(TAB_PRINTCHAR_NUL)
if not set(Value).issubset(Printset):
PrintList = sorted(Printset)
return False, "Invalid PCD string value of type [%s]; must be printable chars %s." % (Type, PrintList)
elif Type == 'BOOLEAN':
if Value not in ['TRUE', 'True', 'true', '0x1', '0x01', '1', 'FALSE', 'False', 'false', '0x0', '0x00', '0']:
return False, "Invalid value [%s] of type [%s]; must be one of TRUE, True, true, 0x1, 0x01, 1"\
", FALSE, False, false, 0x0, 0x00, 0" % (Value, Type)
elif Type in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64]:
if Value.startswith('0') and not Value.lower().startswith('0x') and len(Value) > 1 and Value.lstrip('0'):
Value = Value.lstrip('0')
try:
if Value and int(Value, 0) < 0:
return False, "PCD can't be set to negative value[%s] for datum type [%s]" % (Value, Type)
Value = int(Value, 0)
if Value > MAX_VAL_TYPE[Type]:
return False, "Too large PCD value[%s] for datum type [%s]" % (Value, Type)
except:
return False, "Invalid value [%s] of type [%s];"\
" must be a hexadecimal, decimal or octal in C language format." % (Value, Type)
else:
return True, "StructurePcd"
return True, ""
def CommonPath(PathList):
P1 = min(PathList).split(os.path.sep)
P2 = max(PathList).split(os.path.sep)
for Index in range(min(len(P1), len(P2))):
if P1[Index] != P2[Index]:
return os.path.sep.join(P1[:Index])
return os.path.sep.join(P1)
class PathClass(object):
def __init__(self, File='', Root='', AlterRoot='', Type='', IsBinary=False,
Arch='COMMON', ToolChainFamily='', Target='', TagName='', ToolCode=''):
self.Arch = Arch
self.File = str(File)
if os.path.isabs(self.File):
self.Root = ''
self.AlterRoot = ''
else:
self.Root = str(Root)
self.AlterRoot = str(AlterRoot)
# Remove any '.' and '..' in path
if self.Root:
self.Root = mws.getWs(self.Root, self.File)
self.Path = os.path.normpath(os.path.join(self.Root, self.File))
self.Root = os.path.normpath(CommonPath([self.Root, self.Path]))
# eliminate the side-effect of 'C:'
if self.Root[-1] == ':':
self.Root += os.path.sep
# file path should not start with path separator
if self.Root[-1] == os.path.sep:
self.File = self.Path[len(self.Root):]
else:
self.File = self.Path[len(self.Root) + 1:]
else:
self.Path = os.path.normpath(self.File)
self.SubDir, self.Name = os.path.split(self.File)
self.BaseName, self.Ext = os.path.splitext(self.Name)
if self.Root:
if self.SubDir:
self.Dir = os.path.join(self.Root, self.SubDir)
else:
self.Dir = self.Root
else:
self.Dir = self.SubDir
if IsBinary:
self.Type = Type
else:
self.Type = self.Ext.lower()
self.IsBinary = IsBinary
self.Target = Target
self.TagName = TagName
self.ToolCode = ToolCode
self.ToolChainFamily = ToolChainFamily
self.OriginalPath = self
## Convert the object of this class to a string
#
# Convert member Path of the class to a string
#
# @retval string Formatted String
#
def __str__(self):
return self.Path
## Override __eq__ function
#
# Check whether PathClass are the same
#
# @retval False The two PathClass are different
# @retval True The two PathClass are the same
#
def __eq__(self, Other):
return self.Path == str(Other)
## Override __cmp__ function
#
# Customize the comparison operation of two PathClass
#
# @retval 0 The two PathClass are different
# @retval -1 The first PathClass is less than the second PathClass
# @retval 1 The first PathClass is Bigger than the second PathClass
def __cmp__(self, Other):
OtherKey = str(Other)
SelfKey = self.Path
if SelfKey == OtherKey:
return 0
elif SelfKey > OtherKey:
return 1
else:
return -1
## Override __hash__ function
#
# Use Path as key in hash table
#
# @retval string Key for hash table
#
def __hash__(self):
return hash(self.Path)
@cached_property
def Key(self):
return self.Path.upper()
@property
def TimeStamp(self):
return os.stat(self.Path)[8]
def Validate(self, Type='', CaseSensitive=True):
def RealPath2(File, Dir='', OverrideDir=''):
NewFile = None
if OverrideDir:
NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
if NewFile:
if OverrideDir[-1] == os.path.sep:
return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)]
else:
return NewFile[len(OverrideDir) + 1:], NewFile[0:len(OverrideDir)]
if GlobalData.gAllFiles:
NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))]
if not NewFile:
NewFile = os.path.normpath(os.path.join(Dir, File))
if not os.path.exists(NewFile):
return None, None
if NewFile:
if Dir:
if Dir[-1] == os.path.sep:
return NewFile[len(Dir):], NewFile[0:len(Dir)]
else:
return NewFile[len(Dir) + 1:], NewFile[0:len(Dir)]
else:
return NewFile, ''
return None, None
if GlobalData.gCaseInsensitive:
CaseSensitive = False
if Type and Type.lower() != self.Type:
return FILE_TYPE_MISMATCH, '%s (expect %s but got %s)' % (self.File, Type, self.Type)
RealFile, RealRoot = RealPath2(self.File, self.Root, self.AlterRoot)
if not RealRoot and not RealFile:
RealFile = self.File
if self.AlterRoot:
RealFile = os.path.join(self.AlterRoot, self.File)
elif self.Root:
RealFile = os.path.join(self.Root, self.File)
if len (mws.getPkgPath()) == 0:
return FILE_NOT_FOUND, os.path.join(self.AlterRoot, RealFile)
else:
return FILE_NOT_FOUND, "%s is not found in packages path:\n\t%s" % (self.File, '\n\t'.join(mws.getPkgPath()))
ErrorCode = 0
ErrorInfo = ''
if RealRoot != self.Root or RealFile != self.File:
if CaseSensitive and (RealFile != self.File or (RealRoot != self.Root and RealRoot != self.AlterRoot)):
ErrorCode = FILE_CASE_MISMATCH
ErrorInfo = self.File + '\n\t' + RealFile + " [in file system]"
self.SubDir, self.Name = os.path.split(RealFile)
self.BaseName, self.Ext = os.path.splitext(self.Name)
if self.SubDir:
self.Dir = os.path.join(RealRoot, self.SubDir)
else:
self.Dir = RealRoot
self.File = RealFile
self.Root = RealRoot
self.Path = os.path.join(RealRoot, RealFile)
return ErrorCode, ErrorInfo
## Parse PE image to get the required PE information.
#
class PeImageClass():
## Constructor
#
# @param File FilePath of PeImage
#
def __init__(self, PeFile):
self.FileName = PeFile
self.IsValid = False
self.Size = 0
self.EntryPoint = 0
self.SectionAlignment = 0
self.SectionHeaderList = []
self.ErrorInfo = ''
try:
PeObject = open(PeFile, 'rb')
except:
self.ErrorInfo = self.FileName + ' can not be found\n'
return
# Read DOS header
ByteArray = array.array('B')
ByteArray.fromfile(PeObject, 0x3E)
ByteList = ByteArray.tolist()
# DOS signature should be 'MZ'
if self._ByteListToStr (ByteList[0x0:0x2]) != 'MZ':
self.ErrorInfo = self.FileName + ' has no valid DOS signature MZ'
return
# Read 4 byte PE Signature
PeOffset = self._ByteListToInt(ByteList[0x3C:0x3E])
PeObject.seek(PeOffset)
ByteArray = array.array('B')
ByteArray.fromfile(PeObject, 4)
# PE signature should be 'PE\0\0'
if ByteArray.tolist() != [ord('P'), ord('E'), 0, 0]:
self.ErrorInfo = self.FileName + ' has no valid PE signature PE00'
return
# Read PE file header
ByteArray = array.array('B')
ByteArray.fromfile(PeObject, 0x14)
ByteList = ByteArray.tolist()
SecNumber = self._ByteListToInt(ByteList[0x2:0x4])
if SecNumber == 0:
self.ErrorInfo = self.FileName + ' has no section header'
return
# Read PE optional header
OptionalHeaderSize = self._ByteListToInt(ByteArray[0x10:0x12])
ByteArray = array.array('B')
ByteArray.fromfile(PeObject, OptionalHeaderSize)
ByteList = ByteArray.tolist()
self.EntryPoint = self._ByteListToInt(ByteList[0x10:0x14])
self.SectionAlignment = self._ByteListToInt(ByteList[0x20:0x24])
self.Size = self._ByteListToInt(ByteList[0x38:0x3C])
# Read each Section Header
for Index in range(SecNumber):
ByteArray = array.array('B')
ByteArray.fromfile(PeObject, 0x28)
ByteList = ByteArray.tolist()
SecName = self._ByteListToStr(ByteList[0:8])
SecVirtualSize = self._ByteListToInt(ByteList[8:12])
SecRawAddress = self._ByteListToInt(ByteList[20:24])
SecVirtualAddress = self._ByteListToInt(ByteList[12:16])
self.SectionHeaderList.append((SecName, SecVirtualAddress, SecRawAddress, SecVirtualSize))
self.IsValid = True
PeObject.close()
def _ByteListToStr(self, ByteList):
String = ''
for index in range(len(ByteList)):
if ByteList[index] == 0:
break
String += chr(ByteList[index])
return String
def _ByteListToInt(self, ByteList):
Value = 0
for index in range(len(ByteList) - 1, -1, -1):
Value = (Value << 8) | int(ByteList[index])
return Value
class DefaultStore():
def __init__(self, DefaultStores ):
self.DefaultStores = DefaultStores
def DefaultStoreID(self, DefaultStoreName):
for key, value in self.DefaultStores.items():
if value == DefaultStoreName:
return key
return None
def GetDefaultDefault(self):
if not self.DefaultStores or "0" in self.DefaultStores:
return "0", TAB_DEFAULT_STORES_DEFAULT
else:
minvalue = min(int(value_str) for value_str in self.DefaultStores)
return (str(minvalue), self.DefaultStores[str(minvalue)])
def GetMin(self, DefaultSIdList):
if not DefaultSIdList:
return TAB_DEFAULT_STORES_DEFAULT
storeidset = {storeid for storeid, storename in self.DefaultStores.values() if storename in DefaultSIdList}
if not storeidset:
return ""
minid = min(storeidset )
for sid, name in self.DefaultStores.values():
if sid == minid:
return name
class SkuClass():
DEFAULT = 0
SINGLE = 1
MULTIPLE =2
def __init__(self,SkuIdentifier='', SkuIds=None):
if SkuIds is None:
SkuIds = {}
for SkuName in SkuIds:
SkuId = SkuIds[SkuName][0]
skuid_num = int(SkuId, 16) if SkuId.upper().startswith("0X") else int(SkuId)
if skuid_num > 0xFFFFFFFFFFFFFFFF:
EdkLogger.error("build", PARAMETER_INVALID,
ExtraData = "SKU-ID [%s] value %s exceeds the max value of UINT64"
% (SkuName, SkuId))
self.AvailableSkuIds = OrderedDict()
self.SkuIdSet = []
self.SkuIdNumberSet = []
self.SkuData = SkuIds
self._SkuInherit = {}
self._SkuIdentifier = SkuIdentifier
if SkuIdentifier == '' or SkuIdentifier is None:
self.SkuIdSet = ['DEFAULT']
self.SkuIdNumberSet = ['0U']
elif SkuIdentifier == 'ALL':
self.SkuIdSet = list(SkuIds.keys())
self.SkuIdNumberSet = [num[0].strip() + 'U' for num in SkuIds.values()]
else:
r = SkuIdentifier.split('|')
self.SkuIdSet=[(r[k].strip()).upper() for k in range(len(r))]
k = None
try:
self.SkuIdNumberSet = [SkuIds[k][0].strip() + 'U' for k in self.SkuIdSet]
except Exception:
EdkLogger.error("build", PARAMETER_INVALID,
ExtraData = "SKU-ID [%s] is not supported by the platform. [Valid SKU-ID: %s]"
% (k, " | ".join(SkuIds.keys())))
for each in self.SkuIdSet:
if each in SkuIds:
self.AvailableSkuIds[each] = SkuIds[each][0]
else:
EdkLogger.error("build", PARAMETER_INVALID,
ExtraData="SKU-ID [%s] is not supported by the platform. [Valid SKU-ID: %s]"
% (each, " | ".join(SkuIds.keys())))
if self.SkuUsageType != SkuClass.SINGLE:
self.AvailableSkuIds.update({'DEFAULT':0, 'COMMON':0})
if self.SkuIdSet:
GlobalData.gSkuids = (self.SkuIdSet)
if 'COMMON' in GlobalData.gSkuids:
GlobalData.gSkuids.remove('COMMON')
if self.SkuUsageType == self.SINGLE:
if len(GlobalData.gSkuids) != 1:
if 'DEFAULT' in GlobalData.gSkuids:
GlobalData.gSkuids.remove('DEFAULT')
if GlobalData.gSkuids:
GlobalData.gSkuids.sort()
def GetNextSkuId(self, skuname):
if not self._SkuInherit:
self._SkuInherit = {}
for item in self.SkuData.values():
self._SkuInherit[item[1]]=item[2] if item[2] else "DEFAULT"
return self._SkuInherit.get(skuname, "DEFAULT")
def GetSkuChain(self, sku):
if sku == "DEFAULT":
return ["DEFAULT"]
skulist = [sku]
nextsku = sku
while True:
nextsku = self.GetNextSkuId(nextsku)
skulist.append(nextsku)
if nextsku == "DEFAULT":
break
skulist.reverse()
return skulist
def SkuOverrideOrder(self):
skuorderset = []
for skuname in self.SkuIdSet:
skuorderset.append(self.GetSkuChain(skuname))
skuorder = []
for index in range(max(len(item) for item in skuorderset)):
for subset in skuorderset:
if index > len(subset)-1:
continue
if subset[index] in skuorder:
continue
skuorder.append(subset[index])
return skuorder
@property
def SkuUsageType(self):
if self._SkuIdentifier.upper() == "ALL":
return SkuClass.MULTIPLE
if len(self.SkuIdSet) == 1:
if self.SkuIdSet[0] == 'DEFAULT':
return SkuClass.DEFAULT
return SkuClass.SINGLE
if len(self.SkuIdSet) == 2 and 'DEFAULT' in self.SkuIdSet:
return SkuClass.SINGLE
return SkuClass.MULTIPLE
def DumpSkuIdArrary(self):
if self.SkuUsageType == SkuClass.SINGLE:
return "{0x0}"
ArrayStrList = []
for skuname in self.AvailableSkuIds:
if skuname == "COMMON":
continue
while skuname != "DEFAULT":
ArrayStrList.append(hex(int(self.AvailableSkuIds[skuname])))
skuname = self.GetNextSkuId(skuname)
ArrayStrList.append("0x0")
return "{{{myList}}}".format(myList=",".join(ArrayStrList))
@property
def AvailableSkuIdSet(self):
return self.AvailableSkuIds
@property
def SystemSkuId(self):
if self.SkuUsageType == SkuClass.SINGLE:
if len(self.SkuIdSet) == 1:
return self.SkuIdSet[0]
else:
return self.SkuIdSet[0] if self.SkuIdSet[0] != 'DEFAULT' else self.SkuIdSet[1]
else:
return 'DEFAULT'
## Get the integer value from string like "14U" or integer like 2
#
# @param Input The object that may be either a integer value or a string
#
# @retval Value The integer value that the input represents
#
def GetIntegerValue(Input):
if not isinstance(Input, str):
return Input
String = Input
if String.endswith("U"):
String = String[:-1]
if String.endswith("ULL"):
String = String[:-3]
if String.endswith("LL"):
String = String[:-2]
if String.startswith("0x") or String.startswith("0X"):
return int(String, 16)
elif String == '':
return 0
else:
return int(String)
#
# Pack a GUID (registry format) list into a buffer and return it
#
def PackGUID(Guid):
return pack(PACK_PATTERN_GUID,
int(Guid[0], 16),
int(Guid[1], 16),
int(Guid[2], 16),
int(Guid[3][-4:-2], 16),
int(Guid[3][-2:], 16),
int(Guid[4][-12:-10], 16),
int(Guid[4][-10:-8], 16),
int(Guid[4][-8:-6], 16),
int(Guid[4][-6:-4], 16),
int(Guid[4][-4:-2], 16),
int(Guid[4][-2:], 16)
)
#
# Pack a GUID (byte) list into a buffer and return it
#
def PackByteFormatGUID(Guid):
return pack(PACK_PATTERN_GUID,
Guid[0],
Guid[1],
Guid[2],
Guid[3],
Guid[4],
Guid[5],
Guid[6],
Guid[7],
Guid[8],
Guid[9],
Guid[10],
)
## DeepCopy dict/OrderedDict recusively
#
# @param ori_dict a nested dict or ordereddict
#
# @retval new dict or orderdict
#
def CopyDict(ori_dict):
dict_type = ori_dict.__class__
if dict_type not in (dict,OrderedDict):
return ori_dict
new_dict = dict_type()
for key in ori_dict:
if isinstance(ori_dict[key],(dict,OrderedDict)):
new_dict[key] = CopyDict(ori_dict[key])
else:
new_dict[key] = ori_dict[key]
return new_dict
#
# Remove the c/c++ comments: // and /* */
#
def RemoveCComments(ctext):
return re.sub('//.*?\n|/\*.*?\*/', '\n', ctext, flags=re.S)
| edk2-master | BaseTools/Source/Python/Common/Misc.py |
## @file
# This file is used to define each component of Target.txt file
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import Common.GlobalData as GlobalData
import Common.LongFilePathOs as os
from . import EdkLogger
from . import DataType
from .BuildToolError import *
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.MultipleWorkspace import MultipleWorkspace as mws
gDefaultTargetTxtFile = "target.txt"
## TargetTxtClassObject
#
# This class defined content used in file target.txt
#
# @param object: Inherited from object class
# @param Filename: Input value for full path of target.txt
#
# @var TargetTxtDictionary: To store keys and values defined in target.txt
#
class TargetTxtClassObject(object):
def __init__(self, Filename = None):
self.TargetTxtDictionary = {
DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM : '',
DataType.TAB_TAT_DEFINES_ACTIVE_MODULE : '',
DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF : '',
DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER : '',
DataType.TAB_TAT_DEFINES_TARGET : [],
DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG : [],
DataType.TAB_TAT_DEFINES_TARGET_ARCH : [],
DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF : '',
}
self.ConfDirectoryPath = ""
if Filename is not None:
self.LoadTargetTxtFile(Filename)
## LoadTargetTxtFile
#
# Load target.txt file and parse it, return a set structure to store keys and values
#
# @param Filename: Input value for full path of target.txt
#
# @retval set() A set structure to store keys and values
# @retval 1 Error happenes in parsing
#
def LoadTargetTxtFile(self, Filename):
if os.path.exists(Filename) and os.path.isfile(Filename):
return self.ConvertTextFileToDict(Filename, '#', '=')
else:
EdkLogger.error("Target.txt Parser", FILE_NOT_FOUND, ExtraData=Filename)
return 1
## ConvertTextFileToDict
#
# Convert a text file to a dictionary of (name:value) pairs.
# The data is saved to self.TargetTxtDictionary
#
# @param FileName: Text filename
# @param CommentCharacter: Comment char, be used to ignore comment content
# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
#
# @retval 0 Convert successfully
# @retval 1 Open file failed
#
def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
F = None
try:
F = open(FileName, 'r')
self.ConfDirectoryPath = os.path.dirname(FileName)
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)
if F is not None:
F.close()
for Line in F:
Line = Line.strip()
if Line.startswith(CommentCharacter) or Line == '':
continue
LineList = Line.split(KeySplitCharacter, 1)
Key = LineList[0].strip()
if len(LineList) == 2:
Value = LineList[1].strip()
else:
Value = ""
if Key in [DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM, DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF, \
DataType.TAB_TAT_DEFINES_ACTIVE_MODULE, DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF]:
self.TargetTxtDictionary[Key] = Value.replace('\\', '/')
if Key == DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.TargetTxtDictionary[Key]:
if self.TargetTxtDictionary[Key].startswith("Conf/"):
Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
if not os.path.exists(Tools_Def) or not os.path.isfile(Tools_Def):
# If Conf/Conf does not exist, try just the Conf/ directory
Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace("Conf/", "", 1).strip())
else:
# The File pointed to by TOOL_CHAIN_CONF is not in a Conf/ directory
Tools_Def = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
self.TargetTxtDictionary[Key] = Tools_Def
if Key == DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF and self.TargetTxtDictionary[Key]:
if self.TargetTxtDictionary[Key].startswith("Conf/"):
Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
if not os.path.exists(Build_Rule) or not os.path.isfile(Build_Rule):
# If Conf/Conf does not exist, try just the Conf/ directory
Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].replace("Conf/", "", 1).strip())
else:
# The File pointed to by BUILD_RULE_CONF is not in a Conf/ directory
Build_Rule = os.path.join(self.ConfDirectoryPath, self.TargetTxtDictionary[Key].strip())
self.TargetTxtDictionary[Key] = Build_Rule
elif Key in [DataType.TAB_TAT_DEFINES_TARGET, DataType.TAB_TAT_DEFINES_TARGET_ARCH, \
DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]:
self.TargetTxtDictionary[Key] = Value.split()
elif Key == DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER:
try:
V = int(Value, 0)
except:
EdkLogger.error("build", FORMAT_INVALID, "Invalid number of [%s]: %s." % (Key, Value),
File=FileName)
self.TargetTxtDictionary[Key] = Value
#elif Key not in GlobalData.gGlobalDefines:
# GlobalData.gGlobalDefines[Key] = Value
F.close()
return 0
## TargetTxtDict
#
# Load target.txt in input Conf dir
#
# @param ConfDir: Conf dir
#
# @retval Target An instance of TargetTxtClassObject() with loaded target.txt
#
class TargetTxtDict():
def __new__(cls, *args, **kw):
if not hasattr(cls, '_instance'):
orig = super(TargetTxtDict, cls)
cls._instance = orig.__new__(cls, *args, **kw)
return cls._instance
def __init__(self):
if not hasattr(self, 'Target'):
self.TxtTarget = None
@property
def Target(self):
if not self.TxtTarget:
self._GetTarget()
return self.TxtTarget
def _GetTarget(self):
Target = TargetTxtClassObject()
ConfDirectory = GlobalData.gCmdConfDir
if ConfDirectory:
# Get alternate Conf location, if it is absolute, then just use the absolute directory name
ConfDirectoryPath = os.path.normpath(ConfDirectory)
if not os.path.isabs(ConfDirectoryPath):
# Since alternate directory name is not absolute, the alternate directory is located within the WORKSPACE
# This also handles someone specifying the Conf directory in the workspace. Using --conf=Conf
ConfDirectoryPath = mws.join(os.environ["WORKSPACE"], ConfDirectoryPath)
else:
if "CONF_PATH" in os.environ:
ConfDirectoryPath = os.path.normcase(os.path.normpath(os.environ["CONF_PATH"]))
else:
# Get standard WORKSPACE/Conf use the absolute path to the WORKSPACE/Conf
ConfDirectoryPath = mws.join(os.environ["WORKSPACE"], 'Conf')
GlobalData.gConfDirectory = ConfDirectoryPath
targettxt = os.path.normpath(os.path.join(ConfDirectoryPath, gDefaultTargetTxtFile))
if os.path.exists(targettxt):
Target.LoadTargetTxtFile(targettxt)
self.TxtTarget = Target
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
pass
Target = TargetTxtDict(os.getenv("WORKSPACE"))
print(Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER])
print(Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TARGET])
print(Target.TargetTxtDictionary)
| edk2-master | BaseTools/Source/Python/Common/TargetTxtClassObject.py |
## @file
# This file is used to define common static strings used by INF/DEC/DSC files
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
import re
gIsWindows = None
gWorkspace = "."
gOptions = None
gCaseInsensitive = False
gAllFiles = None
gCommand = None
gSKUID_CMD = None
gGlobalDefines = {}
gPlatformDefines = {}
# PCD name and value pair for fixed at build and feature flag
gPlatformPcds = {}
gPlatformFinalPcds = {}
# PCDs with type that are not fixed at build and feature flag
gPlatformOtherPcds = {}
gActivePlatform = None
gCommandLineDefines = {}
gEdkGlobal = {}
gCommandMaxLength = 4096
# for debug trace purpose when problem occurs
gProcessingFile = ''
gBuildingModule = ''
gSkuids = []
gDefaultStores = []
gGuidDict = {}
# definition for a MACRO name. used to create regular expressions below.
_MacroNamePattern = "[A-Z][A-Z0-9_]*"
## Regular expression for matching macro used in DSC/DEC/INF file inclusion
gMacroRefPattern = re.compile("\$\(({})\)".format(_MacroNamePattern), re.UNICODE)
gMacroDefPattern = re.compile("^(DEFINE|EDK_GLOBAL)[ \t]+")
gMacroNamePattern = re.compile("^{}$".format(_MacroNamePattern))
# definition for a GUID. used to create regular expressions below.
_HexChar = r"[0-9a-fA-F]"
_GuidPattern = r"{Hex}{{8}}-{Hex}{{4}}-{Hex}{{4}}-{Hex}{{4}}-{Hex}{{12}}".format(Hex=_HexChar)
## Regular expressions for GUID matching
gGuidPattern = re.compile(r'{}'.format(_GuidPattern))
gGuidPatternEnd = re.compile(r'{}$'.format(_GuidPattern))
## Regular expressions for HEX matching
g4HexChar = re.compile(r'{}{{4}}'.format(_HexChar))
gHexPattern = re.compile(r'0[xX]{}+'.format(_HexChar))
gHexPatternAll = re.compile(r'0[xX]{}+$'.format(_HexChar))
## Regular expressions for string identifier checking
gIdentifierPattern = re.compile('^[a-zA-Z][a-zA-Z0-9_]*$', re.UNICODE)
## Regular expression for GUID c structure format
_GuidCFormatPattern = r"{{\s*0[xX]{Hex}{{1,8}}\s*,\s*0[xX]{Hex}{{1,4}}\s*,\s*0[xX]{Hex}{{1,4}}" \
r"\s*,\s*{{\s*0[xX]{Hex}{{1,2}}\s*,\s*0[xX]{Hex}{{1,2}}" \
r"\s*,\s*0[xX]{Hex}{{1,2}}\s*,\s*0[xX]{Hex}{{1,2}}" \
r"\s*,\s*0[xX]{Hex}{{1,2}}\s*,\s*0[xX]{Hex}{{1,2}}" \
r"\s*,\s*0[xX]{Hex}{{1,2}}\s*,\s*0[xX]{Hex}{{1,2}}\s*}}\s*}}".format(Hex=_HexChar)
gGuidCFormatPattern = re.compile(r"{}".format(_GuidCFormatPattern))
#
# A global variable for whether current build in AutoGen phase or not.
#
gAutoGenPhase = False
#
# The Conf dir outside the workspace dir
#
gConfDirectory = ''
gCmdConfDir = ''
gBuildDirectory = ''
#
# The relative default database file path
#
gDatabasePath = ".cache/build.db"
#
# Build flag for binary build
#
gIgnoreSource = False
#
# FDF parser
#
gFdfParser = None
BuildOptionPcd = []
#
# Mixed PCD name dict
#
MixedPcd = {}
# Structure Pcd dict
gStructurePcd = {}
gPcdSkuOverrides={}
# Pcd name for the Pcd which used in the Conditional directives
gConditionalPcds = []
gUseHashCache = None
gBinCacheDest = None
gBinCacheSource = None
gPlatformHash = None
gPlatformHashFile = None
gPackageHash = None
gPackageHashFile = None
gModuleHashFile = None
gCMakeHashFile = None
gHashChainStatus = None
gModulePreMakeCacheStatus = None
gModuleMakeCacheStatus = None
gFileHashDict = None
gModuleAllCacheStatus = None
gModuleCacheHit = None
gEnableGenfdsMultiThread = True
gSikpAutoGenCache = set()
# Common lock for the file access in multiple process AutoGens
file_lock = None
| edk2-master | BaseTools/Source/Python/Common/GlobalData.py |
## @file
# This file is used to define common parsing related functions used in parsing INF/DEC/DSC process
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
from .StringUtils import *
from CommonDataClass.DataClass import *
from .DataType import *
## ParseDefineMacro
#
# Search whole table to find all defined Macro and replaced them with the real values
#
def ParseDefineMacro2(Table, RecordSets, GlobalMacro):
Macros = {}
#
# Find all DEFINE macros in section [Header] and its section
#
SqlCommand = """select Value1, Value2, BelongsToItem, StartLine, Arch from %s
where Model = %s
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
RecordSet = Table.Exec(SqlCommand)
for Record in RecordSet:
Macros[Record[0]] = Record[1]
#
# Overridden by Global Macros
#
Macros.update(GlobalMacro)
#
# Replace the Macros
#
for Value in (v for v in RecordSets.values() if v):
for Item in Value:
Item[0] = ReplaceMacro(Item[0], Macros)
## ParseDefineMacro
#
# Search whole table to find all defined Macro and replaced them with the real values
#
def ParseDefineMacro(Table, GlobalMacro):
Macros = {}
#
# Find all DEFINE macros
#
SqlCommand = """select Value1, Value2, BelongsToItem, StartLine, Arch from %s
where Model = %s
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
RecordSet = Table.Exec(SqlCommand)
for Record in RecordSet:
#***************************************************************************************************************************************************
# The follow SqlCommand (expr replace) is not supported in Sqlite 3.3.4 which is used in Python 2.5 *
# Reserved Only *
# SqlCommand = """update %s set Value1 = replace(Value1, '%s', '%s') *
# where ID in (select ID from %s *
# where Model = %s *
# and Value1 like '%%%s%%' *
# and StartLine > %s *
# and Enabled > -1 *
# and Arch = '%s')""" % \ *
# (self.TblDsc.Table, Record[0], Record[1], self.TblDsc.Table, Record[2], Record[1], Record[3], Record[4]) *
#***************************************************************************************************************************************************
Macros[Record[0]] = Record[1]
#
# Overridden by Global Macros
#
Macros.update(GlobalMacro)
#
# Found all defined macro and replaced
#
SqlCommand = """select ID, Value1 from %s
where Model != %s
and Value1 like '%%$(%%' and Value1 like '%%)%%'
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
FoundRecords = Table.Exec(SqlCommand)
for FoundRecord in FoundRecords:
NewValue = ReplaceMacro(FoundRecord[1], Macros)
SqlCommand = """update %s set Value1 = '%s'
where ID = %s""" % (Table.Table, ConvertToSqlString2(NewValue), FoundRecord[0])
Table.Exec(SqlCommand)
##QueryDefinesItem
#
# Search item of section [Defines] by name, return its values
#
# @param Table: The Table to be executed
# @param Name: The Name of item of section [Defines]
# @param Arch: The Arch of item of section [Defines]
#
# @retval RecordSet: A list of all matched records
#
def QueryDefinesItem(Table, Name, Arch, BelongsToFile):
SqlCommand = """select Value2 from %s
where Model = %s
and Value1 = '%s'
and Arch = '%s'
and BelongsToFile = %s
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Name), ConvertToSqlString2(Arch), BelongsToFile)
RecordSet = Table.Exec(SqlCommand)
if len(RecordSet) < 1:
SqlCommand = """select Value2 from %s
where Model = %s
and Value1 = '%s'
and Arch = '%s'
and BelongsToFile = %s
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Name), ConvertToSqlString2(TAB_ARCH_COMMON.upper()), BelongsToFile)
RecordSet = Table.Exec(SqlCommand)
if len(RecordSet) == 1:
if Name == TAB_INF_DEFINES_LIBRARY_CLASS:
return [RecordSet[0][0]]
else:
return GetSplitValueList(RecordSet[0][0])
elif len(RecordSet) < 1:
return ['']
elif len(RecordSet) > 1:
RetVal = []
for Record in RecordSet:
if Name == TAB_INF_DEFINES_LIBRARY_CLASS:
RetVal.append(Record[0])
else:
Items = GetSplitValueList(Record[0])
for Item in Items:
RetVal.append(Item)
return RetVal
##QueryDefinesItem
#
# Search item of section [Defines] by name, return its values
#
# @param Table: The Table to be executed
# @param Name: The Name of item of section [Defines]
# @param Arch: The Arch of item of section [Defines]
#
# @retval RecordSet: A list of all matched records
#
def QueryDefinesItem2(Table, Arch, BelongsToFile):
SqlCommand = """select Value1, Value2, StartLine from %s
where Model = %s
and Arch = '%s'
and BelongsToFile = %s
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Arch), BelongsToFile)
RecordSet = Table.Exec(SqlCommand)
if len(RecordSet) < 1:
SqlCommand = """select Value1, Value2, StartLine from %s
where Model = %s
and Arch = '%s'
and BelongsToFile = %s
and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(TAB_ARCH_COMMON), BelongsToFile)
RecordSet = Table.Exec(SqlCommand)
return RecordSet
##QueryDscItem
#
# Search all dsc item for a specific section
#
# @param Table: The Table to be executed
# @param Model: The type of section
#
# @retval RecordSet: A list of all matched records
#
def QueryDscItem(Table, Model, BelongsToItem, BelongsToFile):
SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
where Model = %s
and BelongsToItem = %s
and BelongsToFile = %s
and Enabled > -1""" % (Table.Table, Model, BelongsToItem, BelongsToFile)
return Table.Exec(SqlCommand)
##QueryDecItem
#
# Search all dec item for a specific section
#
# @param Table: The Table to be executed
# @param Model: The type of section
#
# @retval RecordSet: A list of all matched records
#
def QueryDecItem(Table, Model, BelongsToItem):
SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
where Model = %s
and BelongsToItem = %s
and Enabled > -1""" % (Table.Table, Model, BelongsToItem)
return Table.Exec(SqlCommand)
##QueryInfItem
#
# Search all dec item for a specific section
#
# @param Table: The Table to be executed
# @param Model: The type of section
#
# @retval RecordSet: A list of all matched records
#
def QueryInfItem(Table, Model, BelongsToItem):
SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
where Model = %s
and BelongsToItem = %s
and Enabled > -1""" % (Table.Table, Model, BelongsToItem)
return Table.Exec(SqlCommand)
## GetBuildOption
#
# Parse a string with format "[<Family>:]<ToolFlag>=Flag"
# Return (Family, ToolFlag, Flag)
#
# @param String: String with BuildOption statement
# @param File: The file which defines build option, used in error report
#
# @retval truple() A truple structure as (Family, ToolChain, Flag)
#
def GetBuildOption(String, File, LineNo = -1):
(Family, ToolChain, Flag) = ('', '', '')
if String.find(TAB_EQUAL_SPLIT) < 0:
RaiseParserError(String, 'BuildOptions', File, '[<Family>:]<ToolFlag>=Flag', LineNo)
else:
List = GetSplitValueList(String, TAB_EQUAL_SPLIT, MaxSplit = 1)
if List[0].find(':') > -1:
Family = List[0][ : List[0].find(':')].strip()
ToolChain = List[0][List[0].find(':') + 1 : ].strip()
else:
ToolChain = List[0].strip()
Flag = List[1].strip()
return (Family, ToolChain, Flag)
## Get Library Class
#
# Get Library of Dsc as <LibraryClassKeyWord>|<LibraryInstance>
#
# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
# @param ContainerFile: The file which describes the library class, used for error report
#
# @retval (LibraryClassKeyWord, LibraryInstance, [SUP_MODULE_LIST]) Formatted Library Item
#
def GetLibraryClass(Item, ContainerFile, WorkspaceDir, LineNo = -1):
List = GetSplitValueList(Item[0])
SupMod = SUP_MODULE_LIST_STRING
if len(List) != 2:
RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, '<LibraryClassKeyWord>|<LibraryInstance>')
else:
CheckFileType(List[1], '.Inf', ContainerFile, 'library class instance', Item[0], LineNo)
CheckFileExist(WorkspaceDir, List[1], ContainerFile, 'LibraryClasses', Item[0], LineNo)
if Item[1] != '':
SupMod = Item[1]
return (List[0], List[1], SupMod)
## Get Library Class
#
# Get Library of Dsc as <LibraryClassKeyWord>[|<LibraryInstance>][|<TokenSpaceGuidCName>.<PcdCName>]
#
# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
# @param ContainerFile: The file which describes the library class, used for error report
#
# @retval (LibraryClassKeyWord, LibraryInstance, [SUP_MODULE_LIST]) Formatted Library Item
#
def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo = -1):
ItemList = GetSplitValueList((Item[0] + DataType.TAB_VALUE_SPLIT * 2))
SupMod = SUP_MODULE_LIST_STRING
if len(ItemList) > 5:
RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, '<LibraryClassKeyWord>[|<LibraryInstance>][|<TokenSpaceGuidCName>.<PcdCName>]')
else:
CheckFileType(ItemList[1], '.Inf', ContainerFile, 'LibraryClasses', Item[0], LineNo)
CheckFileExist(WorkspaceDir, ItemList[1], ContainerFile, 'LibraryClasses', Item[0], LineNo)
if ItemList[2] != '':
CheckPcdTokenInfo(ItemList[2], 'LibraryClasses', ContainerFile, LineNo)
if Item[1] != '':
SupMod = Item[1]
return (ItemList[0], ItemList[1], ItemList[2], SupMod)
## CheckPcdTokenInfo
#
# Check if PcdTokenInfo is following <TokenSpaceGuidCName>.<PcdCName>
#
# @param TokenInfoString: String to be checked
# @param Section: Used for error report
# @param File: Used for error report
#
# @retval True PcdTokenInfo is in correct format
#
def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo = -1):
Format = '<TokenSpaceGuidCName>.<PcdCName>'
if TokenInfoString != '' and TokenInfoString is not None:
TokenInfoList = GetSplitValueList(TokenInfoString, TAB_SPLIT)
if len(TokenInfoList) == 2:
return True
RaiseParserError(TokenInfoString, Section, File, Format, LineNo)
## Get Pcd
#
# Get Pcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
# @param ContainerFile: The file which describes the pcd, used for error report
#
# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], Type)
#
def GetPcd(Item, Type, ContainerFile, LineNo = -1):
TokenGuid, TokenName, Value, MaximumDatumSize, Token = '', '', '', '', ''
List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
if len(List) < 4 or len(List) > 6:
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]', LineNo)
else:
Value = List[1]
MaximumDatumSize = List[2]
Token = List[3]
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
(TokenGuid, TokenName) = GetSplitValueList(List[0], TAB_SPLIT)
return (TokenName, TokenGuid, Value, MaximumDatumSize, Token, Type)
## Get FeatureFlagPcd
#
# Get FeatureFlagPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
# @param ContainerFile: The file which describes the pcd, used for error report
#
# @retval (TokenInfo[1], TokenInfo[0], List[1], Type)
#
def GetFeatureFlagPcd(Item, Type, ContainerFile, LineNo = -1):
TokenGuid, TokenName, Value = '', '', ''
List = GetSplitValueList(Item)
if len(List) != 2:
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE', LineNo)
else:
Value = List[1]
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
return (TokenName, TokenGuid, Value, Type)
## Get DynamicDefaultPcd
#
# Get DynamicDefaultPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
# @param ContainerFile: The file which describes the pcd, used for error report
#
# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], Type)
#
def GetDynamicDefaultPcd(Item, Type, ContainerFile, LineNo = -1):
TokenGuid, TokenName, Value, DatumTyp, MaxDatumSize = '', '', '', '', ''
List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
if len(List) < 4 or len(List) > 8:
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]', LineNo)
else:
Value = List[1]
DatumTyp = List[2]
MaxDatumSize = List[3]
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
(TokenGuid, TokenName) = GetSplitValueList(List[0], TAB_SPLIT)
return (TokenName, TokenGuid, Value, DatumTyp, MaxDatumSize, Type)
## Get DynamicHiiPcd
#
# Get DynamicHiiPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
# @param ContainerFile: The file which describes the pcd, used for error report
#
# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], List[4], List[5], Type)
#
def GetDynamicHiiPcd(Item, Type, ContainerFile, LineNo = -1):
TokenGuid, TokenName, L1, L2, L3, L4, L5 = '', '', '', '', '', '', ''
List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
if len(List) < 6 or len(List) > 8:
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]', LineNo)
else:
L1, L2, L3, L4, L5 = List[1], List[2], List[3], List[4], List[5]
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
return (TokenName, TokenGuid, L1, L2, L3, L4, L5, Type)
## Get DynamicVpdPcd
#
# Get DynamicVpdPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]
#
# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
# @param ContainerFile: The file which describes the pcd, used for error report
#
# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], Type)
#
def GetDynamicVpdPcd(Item, Type, ContainerFile, LineNo = -1):
TokenGuid, TokenName, L1, L2 = '', '', '', ''
List = GetSplitValueList(Item + TAB_VALUE_SPLIT)
if len(List) < 3 or len(List) > 4:
RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]', LineNo)
else:
L1, L2 = List[1], List[2]
if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
(TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
return (TokenName, TokenGuid, L1, L2, Type)
## GetComponent
#
# Parse block of the components defined in dsc file
# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
#
# @param Lines: The content to be parsed
# @param KeyValues: To store data after parsing
#
# @retval True Get component successfully
#
def GetComponent(Lines, KeyValues):
(findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
ListItem = None
LibraryClassItem = []
BuildOption = []
Pcd = []
for Line in Lines:
Line = Line[0]
#
# Ignore !include statement
#
if Line.upper().find(TAB_INCLUDE.upper() + ' ') > -1 or Line.upper().find(TAB_DEFINE + ' ') > -1:
continue
if findBlock == False:
ListItem = Line
#
# find '{' at line tail
#
if Line.endswith('{'):
findBlock = True
ListItem = CleanString(Line.rsplit('{', 1)[0], DataType.TAB_COMMENT_SPLIT)
#
# Parse a block content
#
if findBlock:
if Line.find('<LibraryClasses>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (True, False, False, False, False, False, False)
continue
if Line.find('<BuildOptions>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, True, False, False, False, False, False)
continue
if Line.find('<PcdsFeatureFlag>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, True, False, False, False, False)
continue
if Line.find('<PcdsPatchableInModule>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, True, False, False, False)
continue
if Line.find('<PcdsFixedAtBuild>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, True, False, False)
continue
if Line.find('<PcdsDynamic>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, True, False)
continue
if Line.find('<PcdsDynamicEx>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, True)
continue
if Line.endswith('}'):
#
# find '}' at line tail
#
KeyValues.append([ListItem, LibraryClassItem, BuildOption, Pcd])
(findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
LibraryClassItem, BuildOption, Pcd = [], [], []
continue
if findBlock:
if findLibraryClass:
LibraryClassItem.append(Line)
elif findBuildOption:
BuildOption.append(Line)
elif findPcdsFeatureFlag:
Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG_NULL, Line))
elif findPcdsPatchableInModule:
Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE_NULL, Line))
elif findPcdsFixedAtBuild:
Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD_NULL, Line))
elif findPcdsDynamic:
Pcd.append((DataType.TAB_PCDS_DYNAMIC_DEFAULT_NULL, Line))
elif findPcdsDynamicEx:
Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, Line))
else:
KeyValues.append([ListItem, [], [], []])
return True
## GetExec
#
# Parse a string with format "InfFilename [EXEC = ExecFilename]"
# Return (InfFilename, ExecFilename)
#
# @param String: String with EXEC statement
#
# @retval truple() A pair as (InfFilename, ExecFilename)
#
def GetExec(String):
InfFilename = ''
ExecFilename = ''
if String.find('EXEC') > -1:
InfFilename = String[ : String.find('EXEC')].strip()
ExecFilename = String[String.find('EXEC') + len('EXEC') : ].strip()
else:
InfFilename = String.strip()
return (InfFilename, ExecFilename)
## GetComponents
#
# Parse block of the components defined in dsc file
# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
#
# @param Lines: The content to be parsed
# @param Key: Reserved
# @param KeyValues: To store data after parsing
# @param CommentCharacter: Comment char, used to ignore comment content
#
# @retval True Get component successfully
#
def GetComponents(Lines, Key, KeyValues, CommentCharacter):
if Lines.find(DataType.TAB_SECTION_END) > -1:
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
(findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
ListItem = None
LibraryClassItem = []
BuildOption = []
Pcd = []
LineList = Lines.split('\n')
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
if Line is None or Line == '':
continue
if findBlock == False:
ListItem = Line
#
# find '{' at line tail
#
if Line.endswith('{'):
findBlock = True
ListItem = CleanString(Line.rsplit('{', 1)[0], CommentCharacter)
#
# Parse a block content
#
if findBlock:
if Line.find('<LibraryClasses>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (True, False, False, False, False, False, False)
continue
if Line.find('<BuildOptions>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, True, False, False, False, False, False)
continue
if Line.find('<PcdsFeatureFlag>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, True, False, False, False, False)
continue
if Line.find('<PcdsPatchableInModule>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, True, False, False, False)
continue
if Line.find('<PcdsFixedAtBuild>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, True, False, False)
continue
if Line.find('<PcdsDynamic>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, True, False)
continue
if Line.find('<PcdsDynamicEx>') != -1:
(findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, True)
continue
if Line.endswith('}'):
#
# find '}' at line tail
#
KeyValues.append([ListItem, LibraryClassItem, BuildOption, Pcd])
(findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
LibraryClassItem, BuildOption, Pcd = [], [], []
continue
if findBlock:
if findLibraryClass:
LibraryClassItem.append(Line)
elif findBuildOption:
BuildOption.append(Line)
elif findPcdsFeatureFlag:
Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG, Line))
elif findPcdsPatchableInModule:
Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE, Line))
elif findPcdsFixedAtBuild:
Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD, Line))
elif findPcdsDynamic:
Pcd.append((DataType.TAB_PCDS_DYNAMIC, Line))
elif findPcdsDynamicEx:
Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX, Line))
else:
KeyValues.append([ListItem, [], [], []])
return True
## Get Source
#
# Get Source of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
#
# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
# @param ContainerFile: The file which describes the library class, used for error report
#
# @retval (List[0], List[1], List[2], List[3], List[4])
#
def GetSource(Item, ContainerFile, FileRelativePath, LineNo = -1):
ItemNew = Item + DataType.TAB_VALUE_SPLIT * 4
List = GetSplitValueList(ItemNew)
if len(List) < 5 or len(List) > 9:
RaiseParserError(Item, 'Sources', ContainerFile, '<Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]', LineNo)
List[0] = NormPath(List[0])
CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Sources', Item, LineNo)
if List[4] != '':
CheckPcdTokenInfo(List[4], 'Sources', ContainerFile, LineNo)
return (List[0], List[1], List[2], List[3], List[4])
## Get Binary
#
# Get Binary of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
#
# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
# @param ContainerFile: The file which describes the library class, used for error report
#
# @retval (List[0], List[1], List[2], List[3])
# @retval List
#
def GetBinary(Item, ContainerFile, FileRelativePath, LineNo = -1):
ItemNew = Item + DataType.TAB_VALUE_SPLIT
List = GetSplitValueList(ItemNew)
if len(List) != 4 and len(List) != 5:
RaiseParserError(Item, 'Binaries', ContainerFile, "<FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]", LineNo)
else:
if List[3] != '':
CheckPcdTokenInfo(List[3], 'Binaries', ContainerFile, LineNo)
if len(List) == 4:
return (List[0], List[1], List[2], List[3])
elif len(List) == 3:
return (List[0], List[1], List[2], '')
elif len(List) == 2:
return (List[0], List[1], '', '')
elif len(List) == 1:
return (List[0], '', '', '')
## Get Guids/Protocols/Ppis
#
# Get Guids/Protocols/Ppis of Inf as <GuidCName>[|<PcdFeatureFlag>]
#
# @param Item: String as <GuidCName>[|<PcdFeatureFlag>]
# @param Type: Type of parsing string
# @param ContainerFile: The file which describes the library class, used for error report
#
# @retval (List[0], List[1])
#
def GetGuidsProtocolsPpisOfInf(Item, Type, ContainerFile, LineNo = -1):
ItemNew = Item + TAB_VALUE_SPLIT
List = GetSplitValueList(ItemNew)
if List[1] != '':
CheckPcdTokenInfo(List[1], Type, ContainerFile, LineNo)
return (List[0], List[1])
## Get Guids/Protocols/Ppis
#
# Get Guids/Protocols/Ppis of Dec as <GuidCName>=<GuidValue>
#
# @param Item: String as <GuidCName>=<GuidValue>
# @param Type: Type of parsing string
# @param ContainerFile: The file which describes the library class, used for error report
#
# @retval (List[0], List[1])
#
def GetGuidsProtocolsPpisOfDec(Item, Type, ContainerFile, LineNo = -1):
List = GetSplitValueList(Item, DataType.TAB_EQUAL_SPLIT)
if len(List) != 2:
RaiseParserError(Item, Type, ContainerFile, '<CName>=<GuidValue>', LineNo)
return (List[0], List[1])
## GetPackage
#
# Get Package of Inf as <PackagePath>[|<PcdFeatureFlag>]
#
# @param Item: String as <PackagePath>[|<PcdFeatureFlag>]
# @param Type: Type of parsing string
# @param ContainerFile: The file which describes the library class, used for error report
#
# @retval (List[0], List[1])
#
def GetPackage(Item, ContainerFile, FileRelativePath, LineNo = -1):
ItemNew = Item + TAB_VALUE_SPLIT
List = GetSplitValueList(ItemNew)
CheckFileType(List[0], '.Dec', ContainerFile, 'package', List[0], LineNo)
CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Packages', List[0], LineNo)
if List[1] != '':
CheckPcdTokenInfo(List[1], 'Packages', ContainerFile, LineNo)
return (List[0], List[1])
## Get Pcd Values of Inf
#
# Get Pcd of Inf as <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
#
# @param Item: The string describes pcd
# @param Type: The type of Pcd
# @param File: The file which describes the pcd, used for error report
#
# @retval (TokenSpcCName, TokenCName, Value, ItemType) Formatted Pcd Item
#
def GetPcdOfInf(Item, Type, File, LineNo):
Format = '<TokenSpaceGuidCName>.<PcdCName>[|<Value>]'
TokenGuid, TokenName, Value, InfType = '', '', '', ''
if Type == TAB_PCDS_FIXED_AT_BUILD:
InfType = TAB_INF_FIXED_PCD
elif Type == TAB_PCDS_PATCHABLE_IN_MODULE:
InfType = TAB_INF_PATCH_PCD
elif Type == TAB_PCDS_FEATURE_FLAG:
InfType = TAB_INF_FEATURE_PCD
elif Type == TAB_PCDS_DYNAMIC_EX:
InfType = TAB_INF_PCD_EX
elif Type == TAB_PCDS_DYNAMIC:
InfType = TAB_INF_PCD
List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT)
if len(List) < 2 or len(List) > 3:
RaiseParserError(Item, InfType, File, Format, LineNo)
else:
Value = List[1]
TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
if len(TokenInfo) != 2:
RaiseParserError(Item, InfType, File, Format, LineNo)
else:
TokenGuid = TokenInfo[0]
TokenName = TokenInfo[1]
return (TokenGuid, TokenName, Value, Type)
## Get Pcd Values of Dec
#
# Get Pcd of Dec as <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
# @retval (TokenSpcCName, TokenCName, Value, DatumType, Token, ItemType) Formatted Pcd Item
#
def GetPcdOfDec(Item, Type, File, LineNo = -1):
Format = '<TokenSpaceGuidCName>.<PcdCName>|<Value>|<DatumType>|<Token>'
TokenGuid, TokenName, Value, DatumType, Token = '', '', '', '', ''
List = GetSplitValueList(Item)
if len(List) != 4:
RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
else:
Value = List[1]
DatumType = List[2]
Token = List[3]
TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
if len(TokenInfo) != 2:
RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
else:
TokenGuid = TokenInfo[0]
TokenName = TokenInfo[1]
return (TokenGuid, TokenName, Value, DatumType, Token, Type)
## Parse DEFINE statement
#
# Get DEFINE macros
#
# 1. Insert a record into TblDec
# Value1: Macro Name
# Value2: Macro Value
#
def ParseDefine(LineValue, StartLine, Table, FileID, Filename, SectionName, SectionModel, Arch):
EdkLogger.debug(EdkLogger.DEBUG_2, "DEFINE statement '%s' found in section %s" % (LineValue, SectionName))
Define = GetSplitValueList(CleanString(LineValue[LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') + len(DataType.TAB_DEFINE + ' ') : ]), TAB_EQUAL_SPLIT, 1)
Table.Insert(MODEL_META_DATA_DEFINE, Define[0], Define[1], '', '', '', Arch, SectionModel, FileID, StartLine, -1, StartLine, -1, 0)
## InsertSectionItems
#
# Insert item data of a section to a dict
#
def InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, RecordSet):
# Insert each item data of a section
for Index in range(0, len(ArchList)):
Arch = ArchList[Index]
Third = ThirdList[Index]
if Arch == '':
Arch = TAB_ARCH_COMMON
Records = RecordSet[Model]
for SectionItem in SectionItemList:
BelongsToItem, EndLine, EndColumn = -1, -1, -1
LineValue, StartLine, EndLine, Comment = SectionItem[0], SectionItem[1], SectionItem[1], SectionItem[2]
EdkLogger.debug(4, "Parsing %s ..." %LineValue)
# And then parse DEFINE statement
if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
continue
# At last parse other sections
ID = -1
Records.append([LineValue, Arch, StartLine, ID, Third, Comment])
if RecordSet != {}:
RecordSet[Model] = Records
## Insert records to database
#
# Insert item data of a section to database
# @param Table: The Table to be inserted
# @param FileID: The ID of belonging file
# @param Filename: The name of belonging file
# @param CurrentSection: The name of current section
# @param SectionItemList: A list of items of the section
# @param ArchList: A list of arches
# @param ThirdList: A list of third parameters, ModuleType for LibraryClass and SkuId for Dynamic Pcds
# @param IfDefList: A list of all conditional statements
# @param RecordSet: A dict of all parsed records
#
def InsertSectionItemsIntoDatabase(Table, FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, RecordSet):
#
# Insert each item data of a section
#
for Index in range(0, len(ArchList)):
Arch = ArchList[Index]
Third = ThirdList[Index]
if Arch == '':
Arch = TAB_ARCH_COMMON
Records = RecordSet[Model]
for SectionItem in SectionItemList:
BelongsToItem, EndLine, EndColumn = -1, -1, -1
LineValue, StartLine, EndLine = SectionItem[0], SectionItem[1], SectionItem[1]
EdkLogger.debug(4, "Parsing %s ..." %LineValue)
#
# And then parse DEFINE statement
#
if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
ParseDefine(LineValue, StartLine, Table, FileID, Filename, CurrentSection, Model, Arch)
continue
#
# At last parse other sections
#
ID = Table.Insert(Model, LineValue, Third, Third, '', '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
Records.append([LineValue, Arch, StartLine, ID, Third])
if RecordSet != {}:
RecordSet[Model] = Records
## GenMetaDatSectionItem
def GenMetaDatSectionItem(Key, Value, List):
if Key not in List:
List[Key] = [Value]
else:
List[Key].append(Value)
## IsValidWord
#
# Check whether the word is valid.
# <Word> ::= (a-zA-Z0-9_)(a-zA-Z0-9_-){0,} Alphanumeric characters with
# optional
# dash "-" and/or underscore "_" characters. No whitespace
# characters are permitted.
#
# @param Word: The word string need to be checked.
#
def IsValidWord(Word):
if not Word:
return False
#
# The first char should be alpha, _ or Digit.
#
if not Word[0].isalnum() and \
not Word[0] == '_' and \
not Word[0].isdigit():
return False
LastChar = ''
for Char in Word[1:]:
if (not Char.isalpha()) and \
(not Char.isdigit()) and \
Char != '-' and \
Char != '_' and \
Char != '.':
return False
if Char == '.' and LastChar == '.':
return False
LastChar = Char
return True
| edk2-master | BaseTools/Source/Python/Common/Parsing.py |
## @file
#
# This file is for build version number auto generation
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
gBUILD_VERSION = "Developer Build based on Revision: Unknown"
| edk2-master | BaseTools/Source/Python/Common/BuildVersion.py |
# # @file
# This file is used to parse and evaluate range expression in Pcd declaration.
#
# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
# # Import Modules
#
from __future__ import print_function
from Common.GlobalData import *
from CommonDataClass.Exceptions import BadExpression
from CommonDataClass.Exceptions import WrnExpression
import uuid
from Common.Expression import PcdPattern, BaseExpression
from Common.DataType import *
from re import compile
ERR_STRING_EXPR = 'This operator cannot be used in string expression: [%s].'
ERR_SNYTAX = 'Syntax error, the rest of expression cannot be evaluated: [%s].'
ERR_MATCH = 'No matching right parenthesis.'
ERR_STRING_TOKEN = 'Bad string token: [%s].'
ERR_MACRO_TOKEN = 'Bad macro token: [%s].'
ERR_EMPTY_TOKEN = 'Empty token is not allowed.'
ERR_PCD_RESOLVE = 'The PCD should be FeatureFlag type or FixedAtBuild type: [%s].'
ERR_VALID_TOKEN = 'No more valid token found from rest of string: [%s].'
ERR_EXPR_TYPE = 'Different types found in expression.'
ERR_OPERATOR_UNSUPPORT = 'Unsupported operator: [%s]'
ERR_REL_NOT_IN = 'Expect "IN" after "not" operator.'
WRN_BOOL_EXPR = 'Operand of boolean type cannot be used in arithmetic expression.'
WRN_EQCMP_STR_OTHERS = '== Comparison between Operand of string type and Boolean/Number Type always return False.'
WRN_NECMP_STR_OTHERS = '!= Comparison between Operand of string type and Boolean/Number Type always return True.'
ERR_RELCMP_STR_OTHERS = 'Operator taking Operand of string type and Boolean/Number Type is not allowed: [%s].'
ERR_STRING_CMP = 'Unicode string and general string cannot be compared: [%s %s %s]'
ERR_ARRAY_TOKEN = 'Bad C array or C format GUID token: [%s].'
ERR_ARRAY_ELE = 'This must be HEX value for NList or Array: [%s].'
ERR_EMPTY_EXPR = 'Empty expression is not allowed.'
ERR_IN_OPERAND = 'Macro after IN operator can only be: $(FAMILY), $(ARCH), $(TOOL_CHAIN_TAG) and $(TARGET).'
class RangeObject(object):
def __init__(self, start, end, empty = False):
if int(start) < int(end):
self.start = int(start)
self.end = int(end)
else:
self.start = int(end)
self.end = int(start)
self.empty = empty
class RangeContainer(object):
def __init__(self):
self.rangelist = []
def push(self, RangeObject):
self.rangelist.append(RangeObject)
self.rangelist = sorted(self.rangelist, key = lambda rangeobj : rangeobj.start)
self.merge()
def pop(self):
for item in self.rangelist:
yield item
def __clean__(self):
newrangelist = []
for rangeobj in self.rangelist:
if rangeobj.empty == True:
continue
else:
newrangelist.append(rangeobj)
self.rangelist = newrangelist
def merge(self):
self.__clean__()
for i in range(0, len(self.rangelist) - 1):
if self.rangelist[i + 1].start > self.rangelist[i].end:
continue
else:
self.rangelist[i + 1].start = self.rangelist[i].start
self.rangelist[i + 1].end = self.rangelist[i + 1].end > self.rangelist[i].end and self.rangelist[i + 1].end or self.rangelist[i].end
self.rangelist[i].empty = True
self.__clean__()
def dump(self):
print("----------------------")
rangelist = ""
for object in self.rangelist:
rangelist = rangelist + "[%d , %d]" % (object.start, object.end)
print(rangelist)
class XOROperatorObject(object):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "XOR ..."
raise BadExpression(ERR_SNYTAX % Expr)
rangeId = str(uuid.uuid1())
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(0, int(Operand) - 1))
rangeContainer.push(RangeObject(int(Operand) + 1, MAX_VAL_TYPE[DataType]))
SymbolTable[rangeId] = rangeContainer
return rangeId
class LEOperatorObject(object):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "LE ..."
raise BadExpression(ERR_SNYTAX % Expr)
rangeId1 = str(uuid.uuid1())
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(0, int(Operand)))
SymbolTable[rangeId1] = rangeContainer
return rangeId1
class LTOperatorObject(object):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "LT ..."
raise BadExpression(ERR_SNYTAX % Expr)
rangeId1 = str(uuid.uuid1())
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(0, int(Operand) - 1))
SymbolTable[rangeId1] = rangeContainer
return rangeId1
class GEOperatorObject(object):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "GE ..."
raise BadExpression(ERR_SNYTAX % Expr)
rangeId1 = str(uuid.uuid1())
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(int(Operand), MAX_VAL_TYPE[DataType]))
SymbolTable[rangeId1] = rangeContainer
return rangeId1
class GTOperatorObject(object):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "GT ..."
raise BadExpression(ERR_SNYTAX % Expr)
rangeId1 = str(uuid.uuid1())
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(int(Operand) + 1, MAX_VAL_TYPE[DataType]))
SymbolTable[rangeId1] = rangeContainer
return rangeId1
class EQOperatorObject(object):
def __init__(self):
pass
def Calculate(self, Operand, DataType, SymbolTable):
if isinstance(Operand, type('')) and not Operand.isalnum():
Expr = "EQ ..."
raise BadExpression(ERR_SNYTAX % Expr)
rangeId1 = str(uuid.uuid1())
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(int(Operand), int(Operand)))
SymbolTable[rangeId1] = rangeContainer
return rangeId1
def GetOperatorObject(Operator):
if Operator == '>':
return GTOperatorObject()
elif Operator == '>=':
return GEOperatorObject()
elif Operator == '<':
return LTOperatorObject()
elif Operator == '<=':
return LEOperatorObject()
elif Operator == '==':
return EQOperatorObject()
elif Operator == '^':
return XOROperatorObject()
else:
raise BadExpression("Bad Operator")
class RangeExpression(BaseExpression):
# Logical operator mapping
LogicalOperators = {
'&&' : 'and', '||' : 'or',
'!' : 'not', 'AND': 'and',
'OR' : 'or' , 'NOT': 'not',
'XOR': '^' , 'xor': '^',
'EQ' : '==' , 'NE' : '!=',
'GT' : '>' , 'LT' : '<',
'GE' : '>=' , 'LE' : '<=',
'IN' : 'in'
}
NonLetterOpLst = ['+', '-', '&', '|', '^', '!', '=', '>', '<']
RangePattern = compile(r'[0-9]+ - [0-9]+')
def preProcessRangeExpr(self, expr):
# convert hex to int
# convert interval to object index. ex. 1 - 10 to a GUID
expr = expr.strip()
NumberDict = {}
for HexNumber in gHexPattern.findall(expr):
Number = str(int(HexNumber, 16))
NumberDict[HexNumber] = Number
for HexNum in NumberDict:
expr = expr.replace(HexNum, NumberDict[HexNum])
rangedict = {}
for validrange in self.RangePattern.findall(expr):
start, end = validrange.split(" - ")
start = start.strip()
end = end.strip()
rangeid = str(uuid.uuid1())
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(start, end))
self.operanddict[str(rangeid)] = rangeContainer
rangedict[validrange] = str(rangeid)
for validrange in rangedict:
expr = expr.replace(validrange, rangedict[validrange])
self._Expr = expr
return expr
def EvalRange(self, Operator, Oprand):
operatorobj = GetOperatorObject(Operator)
return operatorobj.Calculate(Oprand, self.PcdDataType, self.operanddict)
def Rangeintersection(self, Oprand1, Oprand2):
rangeContainer1 = self.operanddict[Oprand1]
rangeContainer2 = self.operanddict[Oprand2]
rangeContainer = RangeContainer()
for range1 in rangeContainer1.pop():
for range2 in rangeContainer2.pop():
start1 = range1.start
end1 = range1.end
start2 = range2.start
end2 = range2.end
if start1 >= start2:
start1, start2 = start2, start1
end1, end2 = end2, end1
if range1.empty:
rangeid = str(uuid.uuid1())
rangeContainer.push(RangeObject(0, 0, True))
if end1 < start2:
rangeid = str(uuid.uuid1())
rangeContainer.push(RangeObject(0, 0, True))
elif end1 == start2:
rangeid = str(uuid.uuid1())
rangeContainer.push(RangeObject(end1, end1))
elif end1 <= end2 and end1 > start2:
rangeid = str(uuid.uuid1())
rangeContainer.push(RangeObject(start2, end1))
elif end1 >= end2:
rangeid = str(uuid.uuid1())
rangeContainer.push(RangeObject(start2, end2))
self.operanddict[rangeid] = rangeContainer
# rangeContainer.dump()
return rangeid
def Rangecollections(self, Oprand1, Oprand2):
rangeContainer1 = self.operanddict[Oprand1]
rangeContainer2 = self.operanddict[Oprand2]
rangeContainer = RangeContainer()
for rangeobj in rangeContainer2.pop():
rangeContainer.push(rangeobj)
for rangeobj in rangeContainer1.pop():
rangeContainer.push(rangeobj)
rangeid = str(uuid.uuid1())
self.operanddict[rangeid] = rangeContainer
# rangeContainer.dump()
return rangeid
def NegativeRange(self, Oprand1):
rangeContainer1 = self.operanddict[Oprand1]
rangeids = []
for rangeobj in rangeContainer1.pop():
rangeContainer = RangeContainer()
rangeid = str(uuid.uuid1())
if rangeobj.empty:
rangeContainer.push(RangeObject(0, MAX_VAL_TYPE[self.PcdDataType]))
else:
if rangeobj.start > 0:
rangeContainer.push(RangeObject(0, rangeobj.start - 1))
if rangeobj.end < MAX_VAL_TYPE[self.PcdDataType]:
rangeContainer.push(RangeObject(rangeobj.end + 1, MAX_VAL_TYPE[self.PcdDataType]))
self.operanddict[rangeid] = rangeContainer
rangeids.append(rangeid)
if len(rangeids) == 0:
rangeContainer = RangeContainer()
rangeContainer.push(RangeObject(0, MAX_VAL_TYPE[self.PcdDataType]))
rangeid = str(uuid.uuid1())
self.operanddict[rangeid] = rangeContainer
return rangeid
if len(rangeids) == 1:
return rangeids[0]
re = self.Rangeintersection(rangeids[0], rangeids[1])
for i in range(2, len(rangeids)):
re = self.Rangeintersection(re, rangeids[i])
rangeid2 = str(uuid.uuid1())
self.operanddict[rangeid2] = self.operanddict[re]
return rangeid2
def Eval(self, Operator, Oprand1, Oprand2 = None):
if Operator in ["!", "NOT", "not"]:
if not gGuidPattern.match(Oprand1.strip()):
raise BadExpression(ERR_STRING_EXPR % Operator)
return self.NegativeRange(Oprand1)
else:
if Operator in ["==", ">=", "<=", ">", "<", '^']:
return self.EvalRange(Operator, Oprand1)
elif Operator == 'and' :
if not gGuidPatternEnd.match(Oprand1.strip()) or not gGuidPatternEnd.match(Oprand2.strip()):
raise BadExpression(ERR_STRING_EXPR % Operator)
return self.Rangeintersection(Oprand1, Oprand2)
elif Operator == 'or':
if not gGuidPatternEnd.match(Oprand1.strip()) or not gGuidPatternEnd.match(Oprand2.strip()):
raise BadExpression(ERR_STRING_EXPR % Operator)
return self.Rangecollections(Oprand1, Oprand2)
else:
raise BadExpression(ERR_STRING_EXPR % Operator)
def __init__(self, Expression, PcdDataType, SymbolTable = None):
if SymbolTable is None:
SymbolTable = {}
super(RangeExpression, self).__init__(self, Expression, PcdDataType, SymbolTable)
self._NoProcess = False
if not isinstance(Expression, type('')):
self._Expr = Expression
self._NoProcess = True
return
self._Expr = Expression.strip()
if not self._Expr.strip():
raise BadExpression(ERR_EMPTY_EXPR)
#
# The symbol table including PCD and macro mapping
#
self._Symb = SymbolTable
self._Symb.update(self.LogicalOperators)
self._Idx = 0
self._Len = len(self._Expr)
self._Token = ''
self._WarnExcept = None
# Literal token without any conversion
self._LiteralToken = ''
# store the operand object
self.operanddict = {}
# The Pcd max value depends on PcdDataType
self.PcdDataType = PcdDataType
# Public entry for this class
# @param RealValue: False: only evaluate if the expression is true or false, used for conditional expression
# True : return the evaluated str(value), used for PCD value
#
# @return: True or False if RealValue is False
# Evaluated value of string format if RealValue is True
#
def __call__(self, RealValue = False, Depth = 0):
if self._NoProcess:
return self._Expr
self._Depth = Depth
self._Expr = self._Expr.strip()
self.preProcessRangeExpr(self._Expr)
# check if the expression does not need to evaluate
if RealValue and Depth == 0:
self._Token = self._Expr
if gGuidPatternEnd.match(self._Expr):
return [self.operanddict[self._Expr] ]
self._Idx = 0
self._Token = ''
Val = self._OrExpr()
RealVal = Val
RangeIdList = RealVal.split("or")
RangeList = []
for rangeid in RangeIdList:
RangeList.append(self.operanddict[rangeid.strip()])
return RangeList
# Template function to parse binary operators which have same precedence
# Expr [Operator Expr]*
def _ExprFuncTemplate(self, EvalFunc, OpSet):
Val = EvalFunc()
while self._IsOperator(OpSet):
Op = self._Token
try:
Val = self.Eval(Op, Val, EvalFunc())
except WrnExpression as Warn:
self._WarnExcept = Warn
Val = Warn.result
return Val
# A [|| B]*
def _OrExpr(self):
return self._ExprFuncTemplate(self._AndExpr, {"OR", "or"})
# A [&& B]*
def _AndExpr(self):
return self._ExprFuncTemplate(self._NeExpr, {"AND", "and"})
def _NeExpr(self):
Val = self._RelExpr()
while self._IsOperator({"!=", "NOT", "not"}):
Op = self._Token
if Op in ["!", "NOT", "not"]:
if not self._IsOperator({"IN", "in"}):
raise BadExpression(ERR_REL_NOT_IN)
Op += ' ' + self._Token
try:
Val = self.Eval(Op, Val, self._RelExpr())
except WrnExpression as Warn:
self._WarnExcept = Warn
Val = Warn.result
return Val
# [!]*A
def _RelExpr(self):
if self._IsOperator({"NOT", "LE", "GE", "LT", "GT", "EQ", "XOR"}):
Token = self._Token
Val = self._NeExpr()
try:
return self.Eval(Token, Val)
except WrnExpression as Warn:
self._WarnExcept = Warn
return Warn.result
return self._IdenExpr()
# Parse identifier or encapsulated expression
def _IdenExpr(self):
Tk = self._GetToken()
if Tk == '(':
Val = self._OrExpr()
try:
# _GetToken may also raise BadExpression
if self._GetToken() != ')':
raise BadExpression(ERR_MATCH)
except BadExpression:
raise BadExpression(ERR_MATCH)
return Val
return Tk
# Skip whitespace or tab
def __SkipWS(self):
for Char in self._Expr[self._Idx:]:
if Char not in ' \t':
break
self._Idx += 1
# Try to convert string to number
def __IsNumberToken(self):
Radix = 10
if self._Token.lower()[0:2] == '0x' and len(self._Token) > 2:
Radix = 16
try:
self._Token = int(self._Token, Radix)
return True
except ValueError:
return False
except TypeError:
return False
# Parse array: {...}
def __GetArray(self):
Token = '{'
self._Idx += 1
self.__GetNList(True)
Token += self._LiteralToken
if self._Idx >= self._Len or self._Expr[self._Idx] != '}':
raise BadExpression(ERR_ARRAY_TOKEN % Token)
Token += '}'
# All whitespace and tabs in array are already stripped.
IsArray = IsGuid = False
if len(Token.split(',')) == 11 and len(Token.split(',{')) == 2 \
and len(Token.split('},')) == 1:
HexLen = [11, 6, 6, 5, 4, 4, 4, 4, 4, 4, 6]
HexList = Token.split(',')
if HexList[3].startswith('{') and \
not [Index for Index, Hex in enumerate(HexList) if len(Hex) > HexLen[Index]]:
IsGuid = True
if Token.lstrip('{').rstrip('}').find('{') == -1:
if not [Hex for Hex in Token.lstrip('{').rstrip('}').split(',') if len(Hex) > 4]:
IsArray = True
if not IsArray and not IsGuid:
raise BadExpression(ERR_ARRAY_TOKEN % Token)
self._Idx += 1
self._Token = self._LiteralToken = Token
return self._Token
# Parse string, the format must be: "..."
def __GetString(self):
Idx = self._Idx
# Skip left quote
self._Idx += 1
# Replace escape \\\", \"
Expr = self._Expr[self._Idx:].replace('\\\\', '//').replace('\\\"', '\\\'')
for Ch in Expr:
self._Idx += 1
if Ch == '"':
break
self._Token = self._LiteralToken = self._Expr[Idx:self._Idx]
if not self._Token.endswith('"'):
raise BadExpression(ERR_STRING_TOKEN % self._Token)
self._Token = self._Token[1:-1]
return self._Token
# Get token that is comprised by alphanumeric, underscore or dot(used by PCD)
# @param IsAlphaOp: Indicate if parsing general token or script operator(EQ, NE...)
def __GetIdToken(self, IsAlphaOp = False):
IdToken = ''
for Ch in self._Expr[self._Idx:]:
if not self.__IsIdChar(Ch):
break
self._Idx += 1
IdToken += Ch
self._Token = self._LiteralToken = IdToken
if not IsAlphaOp:
self.__ResolveToken()
return self._Token
# Try to resolve token
def __ResolveToken(self):
if not self._Token:
raise BadExpression(ERR_EMPTY_TOKEN)
# PCD token
if PcdPattern.match(self._Token):
if self._Token not in self._Symb:
Ex = BadExpression(ERR_PCD_RESOLVE % self._Token)
Ex.Pcd = self._Token
raise Ex
self._Token = RangeExpression(self._Symb[self._Token], self._Symb)(True, self._Depth + 1)
if not isinstance(self._Token, type('')):
self._LiteralToken = hex(self._Token)
return
if self._Token.startswith('"'):
self._Token = self._Token[1:-1]
elif self._Token in ["FALSE", "false", "False"]:
self._Token = False
elif self._Token in ["TRUE", "true", "True"]:
self._Token = True
else:
self.__IsNumberToken()
def __GetNList(self, InArray = False):
self._GetSingleToken()
if not self.__IsHexLiteral():
if InArray:
raise BadExpression(ERR_ARRAY_ELE % self._Token)
return self._Token
self.__SkipWS()
Expr = self._Expr[self._Idx:]
if not Expr.startswith(','):
return self._Token
NList = self._LiteralToken
while Expr.startswith(','):
NList += ','
self._Idx += 1
self.__SkipWS()
self._GetSingleToken()
if not self.__IsHexLiteral():
raise BadExpression(ERR_ARRAY_ELE % self._Token)
NList += self._LiteralToken
self.__SkipWS()
Expr = self._Expr[self._Idx:]
self._Token = self._LiteralToken = NList
return self._Token
def __IsHexLiteral(self):
if self._LiteralToken.startswith('{') and \
self._LiteralToken.endswith('}'):
return True
if gHexPattern.match(self._LiteralToken):
Token = self._LiteralToken[2:]
Token = Token.lstrip('0')
if not Token:
self._LiteralToken = '0x0'
else:
self._LiteralToken = '0x' + Token.lower()
return True
return False
def _GetToken(self):
return self.__GetNList()
@staticmethod
def __IsIdChar(Ch):
return Ch in '._/:' or Ch.isalnum()
# Parse operand
def _GetSingleToken(self):
self.__SkipWS()
Expr = self._Expr[self._Idx:]
if Expr.startswith('L"'):
# Skip L
self._Idx += 1
UStr = self.__GetString()
self._Token = 'L"' + UStr + '"'
return self._Token
self._Token = ''
if Expr:
Ch = Expr[0]
Match = gGuidPattern.match(Expr)
if Match and not Expr[Match.end():Match.end() + 1].isalnum() \
and Expr[Match.end():Match.end() + 1] != '_':
self._Idx += Match.end()
self._Token = Expr[0:Match.end()]
return self._Token
elif self.__IsIdChar(Ch):
return self.__GetIdToken()
elif Ch == '(' or Ch == ')':
self._Idx += 1
self._Token = Ch
return self._Token
raise BadExpression(ERR_VALID_TOKEN % Expr)
# Parse operator
def _GetOperator(self):
self.__SkipWS()
LegalOpLst = ['&&', '||', '!=', '==', '>=', '<='] + self.NonLetterOpLst
self._Token = ''
Expr = self._Expr[self._Idx:]
# Reach end of expression
if not Expr:
return ''
# Script operator: LT, GT, LE, GE, EQ, NE, and, or, xor, not
if Expr[0].isalpha():
return self.__GetIdToken(True)
# Start to get regular operator: +, -, <, > ...
if Expr[0] not in self.NonLetterOpLst:
return ''
OpToken = ''
for Ch in Expr:
if Ch in self.NonLetterOpLst:
if '!' == Ch and OpToken:
break
self._Idx += 1
OpToken += Ch
else:
break
if OpToken not in LegalOpLst:
raise BadExpression(ERR_OPERATOR_UNSUPPORT % OpToken)
self._Token = OpToken
return OpToken
| edk2-master | BaseTools/Source/Python/Common/RangeExpression.py |
## @file
# This file is used to define common string related functions used in parsing process
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import re
from . import DataType
import Common.LongFilePathOs as os
import string
from . import EdkLogger as EdkLogger
from . import GlobalData
from .BuildToolError import *
from CommonDataClass.Exceptions import *
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.MultipleWorkspace import MultipleWorkspace as mws
gHexVerPatt = re.compile('0x[a-f0-9]{4}[a-f0-9]{4}$', re.IGNORECASE)
gHumanReadableVerPatt = re.compile(r'([1-9][0-9]*|0)\.[0-9]{1,2}$')
## GetSplitValueList
#
# Get a value list from a string with multiple values split with SplitTag
# The default SplitTag is DataType.TAB_VALUE_SPLIT
# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
#
# @param String: The input string to be splitted
# @param SplitTag: The split key, default is DataType.TAB_VALUE_SPLIT
# @param MaxSplit: The max number of split values, default is -1
#
# @retval list() A list for splitted string
#
def GetSplitValueList(String, SplitTag=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
ValueList = []
Last = 0
Escaped = False
InSingleQuoteString = False
InDoubleQuoteString = False
InParenthesis = 0
for Index in range(0, len(String)):
Char = String[Index]
if not Escaped:
# Found a splitter not in a string, split it
if (not InSingleQuoteString or not InDoubleQuoteString) and InParenthesis == 0 and Char == SplitTag:
ValueList.append(String[Last:Index].strip())
Last = Index + 1
if MaxSplit > 0 and len(ValueList) >= MaxSplit:
break
if Char == '\\' and (InSingleQuoteString or InDoubleQuoteString):
Escaped = True
elif Char == '"' and not InSingleQuoteString:
if not InDoubleQuoteString:
InDoubleQuoteString = True
else:
InDoubleQuoteString = False
elif Char == "'" and not InDoubleQuoteString:
if not InSingleQuoteString:
InSingleQuoteString = True
else:
InSingleQuoteString = False
elif Char == '(':
InParenthesis = InParenthesis + 1
elif Char == ')':
InParenthesis = InParenthesis - 1
else:
Escaped = False
if Last < len(String):
ValueList.append(String[Last:].strip())
elif Last == len(String):
ValueList.append('')
return ValueList
## GetSplitList
#
# Get a value list from a string with multiple values split with SplitString
# The default SplitTag is DataType.TAB_VALUE_SPLIT
# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
#
# @param String: The input string to be splitted
# @param SplitStr: The split key, default is DataType.TAB_VALUE_SPLIT
# @param MaxSplit: The max number of split values, default is -1
#
# @retval list() A list for splitted string
#
def GetSplitList(String, SplitStr=DataType.TAB_VALUE_SPLIT, MaxSplit= -1):
return list(map(lambda l: l.strip(), String.split(SplitStr, MaxSplit)))
## MergeArches
#
# Find a key's all arches in dict, add the new arch to the list
# If not exist any arch, set the arch directly
#
# @param Dict: The input value for Dict
# @param Key: The input value for Key
# @param Arch: The Arch to be added or merged
#
def MergeArches(Dict, Key, Arch):
if Key in Dict:
Dict[Key].append(Arch)
else:
Dict[Key] = Arch.split()
## GenDefines
#
# Parse a string with format "DEFINE <VarName> = <PATH>"
# Generate a map Defines[VarName] = PATH
# Return False if invalid format
#
# @param String: String with DEFINE statement
# @param Arch: Supported Arch
# @param Defines: DEFINE statement to be parsed
#
# @retval 0 DEFINE statement found, and valid
# @retval 1 DEFINE statement found, but not valid
# @retval -1 DEFINE statement not found
#
def GenDefines(String, Arch, Defines):
if String.find(DataType.TAB_DEFINE + ' ') > -1:
List = String.replace(DataType.TAB_DEFINE + ' ', '').split(DataType.TAB_EQUAL_SPLIT)
if len(List) == 2:
Defines[(CleanString(List[0]), Arch)] = CleanString(List[1])
return 0
else:
return -1
return 1
## GenInclude
#
# Parse a string with format "!include <Filename>"
# Return the file path
# Return False if invalid format or NOT FOUND
#
# @param String: String with INCLUDE statement
# @param IncludeFiles: INCLUDE statement to be parsed
# @param Arch: Supported Arch
#
# @retval True
# @retval False
#
def GenInclude(String, IncludeFiles, Arch):
if String.upper().find(DataType.TAB_INCLUDE.upper() + ' ') > -1:
IncludeFile = CleanString(String[String.upper().find(DataType.TAB_INCLUDE.upper() + ' ') + len(DataType.TAB_INCLUDE + ' ') : ])
MergeArches(IncludeFiles, IncludeFile, Arch)
return True
else:
return False
## GetLibraryClassesWithModuleType
#
# Get Library Class definition when no module type defined
#
# @param Lines: The content to be parsed
# @param Key: Reserved
# @param KeyValues: To store data after parsing
# @param CommentCharacter: Comment char, used to ignore comment content
#
# @retval True Get library classes successfully
#
def GetLibraryClassesWithModuleType(Lines, Key, KeyValues, CommentCharacter):
newKey = SplitModuleType(Key)
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
LineList = Lines.splitlines()
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
if Line != '' and Line[0] != CommentCharacter:
KeyValues.append([CleanString(Line, CommentCharacter), newKey[1]])
return True
## GetDynamics
#
# Get Dynamic Pcds
#
# @param Lines: The content to be parsed
# @param Key: Reserved
# @param KeyValues: To store data after parsing
# @param CommentCharacter: Comment char, used to ignore comment content
#
# @retval True Get Dynamic Pcds successfully
#
def GetDynamics(Lines, Key, KeyValues, CommentCharacter):
#
# Get SkuId Name List
#
SkuIdNameList = SplitModuleType(Key)
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
LineList = Lines.splitlines()
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
if Line != '' and Line[0] != CommentCharacter:
KeyValues.append([CleanString(Line, CommentCharacter), SkuIdNameList[1]])
return True
## SplitModuleType
#
# Split ModuleType out of section defien to get key
# [LibraryClass.Arch.ModuleType|ModuleType|ModuleType] -> [ 'LibraryClass.Arch', ['ModuleType', 'ModuleType', 'ModuleType'] ]
#
# @param Key: String to be parsed
#
# @retval ReturnValue A list for module types
#
def SplitModuleType(Key):
KeyList = Key.split(DataType.TAB_SPLIT)
#
# Fill in for arch
#
KeyList.append('')
#
# Fill in for moduletype
#
KeyList.append('')
ReturnValue = []
KeyValue = KeyList[0]
if KeyList[1] != '':
KeyValue = KeyValue + DataType.TAB_SPLIT + KeyList[1]
ReturnValue.append(KeyValue)
ReturnValue.append(GetSplitValueList(KeyList[2]))
return ReturnValue
## Replace macro in strings list
#
# This method replace macros used in a given string list. The macros are
# given in a dictionary.
#
# @param StringList StringList to be processed
# @param MacroDefinitions The macro definitions in the form of dictionary
# @param SelfReplacement To decide whether replace un-defined macro to ''
#
# @retval NewList A new string list whose macros are replaced
#
def ReplaceMacros(StringList, MacroDefinitions=None, SelfReplacement=False):
NewList = []
if MacroDefinitions is None:
MacroDefinitions = {}
for String in StringList:
if isinstance(String, type('')):
NewList.append(ReplaceMacro(String, MacroDefinitions, SelfReplacement))
else:
NewList.append(String)
return NewList
## Replace macro in string
#
# This method replace macros used in given string. The macros are given in a
# dictionary.
#
# @param String String to be processed
# @param MacroDefinitions The macro definitions in the form of dictionary
# @param SelfReplacement To decide whether replace un-defined macro to ''
#
# @retval string The string whose macros are replaced
#
def ReplaceMacro(String, MacroDefinitions=None, SelfReplacement=False, RaiseError=False):
LastString = String
if MacroDefinitions is None:
MacroDefinitions = {}
while String and MacroDefinitions:
MacroUsed = GlobalData.gMacroRefPattern.findall(String)
# no macro found in String, stop replacing
if len(MacroUsed) == 0:
break
for Macro in MacroUsed:
if Macro not in MacroDefinitions:
if RaiseError:
raise SymbolNotFound("%s not defined" % Macro)
if SelfReplacement:
String = String.replace("$(%s)" % Macro, '')
continue
if "$(%s)" % Macro not in MacroDefinitions[Macro]:
String = String.replace("$(%s)" % Macro, MacroDefinitions[Macro])
# in case there's macro not defined
if String == LastString:
break
LastString = String
return String
## NormPath
#
# Create a normal path
# And replace DEFINE in the path
#
# @param Path: The input value for Path to be converted
# @param Defines: A set for DEFINE statement
#
# @retval Path Formatted path
#
def NormPath(Path, Defines=None):
IsRelativePath = False
if Path:
if Path[0] == '.':
IsRelativePath = True
#
# Replace with Define
#
if Defines:
Path = ReplaceMacro(Path, Defines)
#
# To local path format
#
Path = os.path.normpath(Path)
if Path.startswith(GlobalData.gWorkspace) and not Path.startswith(GlobalData.gBuildDirectory) and not os.path.exists(Path):
Path = Path[len (GlobalData.gWorkspace):]
if Path[0] == os.path.sep:
Path = Path[1:]
Path = mws.join(GlobalData.gWorkspace, Path)
if IsRelativePath and Path[0] != '.':
Path = os.path.join('.', Path)
return Path
## CleanString
#
# Remove comments in a string
# Remove spaces
#
# @param Line: The string to be cleaned
# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
#
# @retval Path Formatted path
#
def CleanString(Line, CommentCharacter=DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False, BuildOption=False):
#
# remove whitespace
#
Line = Line.strip();
#
# Replace Edk's comment character
#
if AllowCppStyleComment:
Line = Line.replace(DataType.TAB_COMMENT_EDK_SPLIT, CommentCharacter)
#
# remove comments, but we should escape comment character in string
#
InDoubleQuoteString = False
InSingleQuoteString = False
CommentInString = False
for Index in range(0, len(Line)):
if Line[Index] == '"' and not InSingleQuoteString:
InDoubleQuoteString = not InDoubleQuoteString
elif Line[Index] == "'" and not InDoubleQuoteString:
InSingleQuoteString = not InSingleQuoteString
elif Line[Index] == CommentCharacter and (InSingleQuoteString or InDoubleQuoteString):
CommentInString = True
elif Line[Index] == CommentCharacter and not (InSingleQuoteString or InDoubleQuoteString):
Line = Line[0: Index]
break
if CommentInString and BuildOption:
Line = Line.replace('"', '')
ChIndex = Line.find('#')
while ChIndex >= 0:
if GlobalData.gIsWindows:
if ChIndex == 0 or Line[ChIndex - 1] != '^':
Line = Line[0:ChIndex] + '^' + Line[ChIndex:]
ChIndex = Line.find('#', ChIndex + 2)
else:
ChIndex = Line.find('#', ChIndex + 1)
else:
if ChIndex == 0 or Line[ChIndex - 1] != '\\':
Line = Line[0:ChIndex] + '\\' + Line[ChIndex:]
ChIndex = Line.find('#', ChIndex + 2)
else:
ChIndex = Line.find('#', ChIndex + 1)
#
# remove whitespace again
#
Line = Line.strip();
return Line
## CleanString2
#
# Split statement with comments in a string
# Remove spaces
#
# @param Line: The string to be cleaned
# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
#
# @retval Path Formatted path
#
def CleanString2(Line, CommentCharacter=DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
#
# remove whitespace
#
Line = Line.strip();
#
# Replace Edk's comment character
#
if AllowCppStyleComment:
Line = Line.replace(DataType.TAB_COMMENT_EDK_SPLIT, CommentCharacter)
#
# separate comments and statements, but we should escape comment character in string
#
InDoubleQuoteString = False
InSingleQuoteString = False
CommentInString = False
Comment = ''
for Index in range(0, len(Line)):
if Line[Index] == '"' and not InSingleQuoteString:
InDoubleQuoteString = not InDoubleQuoteString
elif Line[Index] == "'" and not InDoubleQuoteString:
InSingleQuoteString = not InSingleQuoteString
elif Line[Index] == CommentCharacter and (InDoubleQuoteString or InSingleQuoteString):
CommentInString = True
elif Line[Index] == CommentCharacter and not (InDoubleQuoteString or InSingleQuoteString):
Comment = Line[Index:].strip()
Line = Line[0:Index].strip()
break
return Line, Comment
## GetMultipleValuesOfKeyFromLines
#
# Parse multiple strings to clean comment and spaces
# The result is saved to KeyValues
#
# @param Lines: The content to be parsed
# @param Key: Reserved
# @param KeyValues: To store data after parsing
# @param CommentCharacter: Comment char, used to ignore comment content
#
# @retval True Successfully executed
#
def GetMultipleValuesOfKeyFromLines(Lines, Key, KeyValues, CommentCharacter):
Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
LineList = Lines.split('\n')
for Line in LineList:
Line = CleanString(Line, CommentCharacter)
if Line != '' and Line[0] != CommentCharacter:
KeyValues.append(Line)
return True
## GetDefineValue
#
# Parse a DEFINE statement to get defined value
# DEFINE Key Value
#
# @param String: The content to be parsed
# @param Key: The key of DEFINE statement
# @param CommentCharacter: Comment char, used to ignore comment content
#
# @retval string The defined value
#
def GetDefineValue(String, Key, CommentCharacter):
String = CleanString(String)
return String[String.find(Key + ' ') + len(Key + ' ') : ]
## GetHexVerValue
#
# Get a Hex Version Value
#
# @param VerString: The version string to be parsed
#
#
# @retval: If VerString is incorrectly formatted, return "None" which will break the build.
# If VerString is correctly formatted, return a Hex value of the Version Number (0xmmmmnnnn)
# where mmmm is the major number and nnnn is the adjusted minor number.
#
def GetHexVerValue(VerString):
VerString = CleanString(VerString)
if gHumanReadableVerPatt.match(VerString):
ValueList = VerString.split('.')
Major = ValueList[0]
Minor = ValueList[1]
if len(Minor) == 1:
Minor += '0'
DeciValue = (int(Major) << 16) + int(Minor);
return "0x%08x" % DeciValue
elif gHexVerPatt.match(VerString):
return VerString
else:
return None
## GetSingleValueOfKeyFromLines
#
# Parse multiple strings as below to get value of each definition line
# Key1 = Value1
# Key2 = Value2
# The result is saved to Dictionary
#
# @param Lines: The content to be parsed
# @param Dictionary: To store data after parsing
# @param CommentCharacter: Comment char, be used to ignore comment content
# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
#
# @retval True Successfully executed
#
def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
Lines = Lines.split('\n')
Keys = []
Value = ''
DefineValues = ['']
SpecValues = ['']
for Line in Lines:
#
# Handle DEFINE and SPEC
#
if Line.find(DataType.TAB_INF_DEFINES_DEFINE + ' ') > -1:
if '' in DefineValues:
DefineValues.remove('')
DefineValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_DEFINE, CommentCharacter))
continue
if Line.find(DataType.TAB_INF_DEFINES_SPEC + ' ') > -1:
if '' in SpecValues:
SpecValues.remove('')
SpecValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_SPEC, CommentCharacter))
continue
#
# Handle Others
#
LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2:
Key = LineList[0].split()
if len(Key) == 1 and Key[0][0] != CommentCharacter:
#
# Remove comments and white spaces
#
LineList[1] = CleanString(LineList[1], CommentCharacter)
if ValueSplitFlag:
Value = list(map(string.strip, LineList[1].split(ValueSplitCharacter)))
else:
Value = CleanString(LineList[1], CommentCharacter).splitlines()
if Key[0] in Dictionary:
if Key[0] not in Keys:
Dictionary[Key[0]] = Value
Keys.append(Key[0])
else:
Dictionary[Key[0]].extend(Value)
else:
Dictionary[DataType.TAB_INF_DEFINES_MACRO][Key[0]] = Value[0]
if DefineValues == []:
DefineValues = ['']
if SpecValues == []:
SpecValues = ['']
Dictionary[DataType.TAB_INF_DEFINES_DEFINE] = DefineValues
Dictionary[DataType.TAB_INF_DEFINES_SPEC] = SpecValues
return True
## The content to be parsed
#
# Do pre-check for a file before it is parsed
# Check $()
# Check []
#
# @param FileName: Used for error report
# @param FileContent: File content to be parsed
# @param SupSectionTag: Used for error report
#
def PreCheck(FileName, FileContent, SupSectionTag):
LineNo = 0
IsFailed = False
NewFileContent = ''
for Line in FileContent.splitlines():
LineNo = LineNo + 1
#
# Clean current line
#
Line = CleanString(Line)
#
# Remove commented line
#
if Line.find(DataType.TAB_COMMA_SPLIT) == 0:
Line = ''
#
# Check $()
#
if Line.find('$') > -1:
if Line.find('$(') < 0 or Line.find(')') < 0:
EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=EdkLogger.IsRaiseError)
#
# Check []
#
if Line.find('[') > -1 or Line.find(']') > -1:
#
# Only get one '[' or one ']'
#
if not (Line.find('[') > -1 and Line.find(']') > -1):
EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=EdkLogger.IsRaiseError)
#
# Regenerate FileContent
#
NewFileContent = NewFileContent + Line + '\r\n'
if IsFailed:
EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=EdkLogger.IsRaiseError)
return NewFileContent
## CheckFileType
#
# Check if the Filename is including ExtName
# Return True if it exists
# Raise a error message if it not exists
#
# @param CheckFilename: Name of the file to be checked
# @param ExtName: Ext name of the file to be checked
# @param ContainerFilename: The container file which describes the file to be checked, used for error report
# @param SectionName: Used for error report
# @param Line: The line in container file which defines the file to be checked
#
# @retval True The file type is correct
#
def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo= -1):
if CheckFilename != '' and CheckFilename is not None:
(Root, Ext) = os.path.splitext(CheckFilename)
if Ext.upper() != ExtName.upper():
ContainerFile = open(ContainerFilename, 'r').read()
if LineNo == -1:
LineNo = GetLineNo(ContainerFile, Line)
ErrorMsg = "Invalid %s. '%s' is found, but '%s' file is needed" % (SectionName, CheckFilename, ExtName)
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, Line=LineNo,
File=ContainerFilename, RaiseError=EdkLogger.IsRaiseError)
return True
## CheckFileExist
#
# Check if the file exists
# Return True if it exists
# Raise a error message if it not exists
#
# @param CheckFilename: Name of the file to be checked
# @param WorkspaceDir: Current workspace dir
# @param ContainerFilename: The container file which describes the file to be checked, used for error report
# @param SectionName: Used for error report
# @param Line: The line in container file which defines the file to be checked
#
# @retval The file full path if the file exists
#
def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo= -1):
CheckFile = ''
if CheckFilename != '' and CheckFilename is not None:
CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
if not os.path.isfile(CheckFile):
ContainerFile = open(ContainerFilename, 'r').read()
if LineNo == -1:
LineNo = GetLineNo(ContainerFile, Line)
ErrorMsg = "Can't find file '%s' defined in section '%s'" % (CheckFile, SectionName)
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg,
File=ContainerFilename, Line=LineNo, RaiseError=EdkLogger.IsRaiseError)
return CheckFile
## GetLineNo
#
# Find the index of a line in a file
#
# @param FileContent: Search scope
# @param Line: Search key
#
# @retval int Index of the line
# @retval -1 The line is not found
#
def GetLineNo(FileContent, Line, IsIgnoreComment=True):
LineList = FileContent.splitlines()
for Index in range(len(LineList)):
if LineList[Index].find(Line) > -1:
#
# Ignore statement in comment
#
if IsIgnoreComment:
if LineList[Index].strip()[0] == DataType.TAB_COMMENT_SPLIT:
continue
return Index + 1
return -1
## RaiseParserError
#
# Raise a parser error
#
# @param Line: String which has error
# @param Section: Used for error report
# @param File: File which has the string
# @param Format: Correct format
#
def RaiseParserError(Line, Section, File, Format='', LineNo= -1):
if LineNo == -1:
LineNo = GetLineNo(open(os.path.normpath(File), 'r').read(), Line)
ErrorMsg = "Invalid statement '%s' is found in section '%s'" % (Line, Section)
if Format != '':
Format = "Correct format is " + Format
EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=File, Line=LineNo, ExtraData=Format, RaiseError=EdkLogger.IsRaiseError)
## WorkspaceFile
#
# Return a full path with workspace dir
#
# @param WorkspaceDir: Workspace dir
# @param Filename: Relative file name
#
# @retval string A full path
#
def WorkspaceFile(WorkspaceDir, Filename):
return mws.join(NormPath(WorkspaceDir), NormPath(Filename))
## Split string
#
# Remove '"' which startswith and endswith string
#
# @param String: The string need to be split
#
# @retval String: The string after removed '""'
#
def SplitString(String):
if String.startswith('\"'):
String = String[1:]
if String.endswith('\"'):
String = String[:-1]
return String
## Convert To Sql String
#
# 1. Replace "'" with "''" in each item of StringList
#
# @param StringList: A list for strings to be converted
#
def ConvertToSqlString(StringList):
return list(map(lambda s: s.replace("'", "''"), StringList))
## Convert To Sql String
#
# 1. Replace "'" with "''" in the String
#
# @param String: A String to be converted
#
def ConvertToSqlString2(String):
return String.replace("'", "''")
#
# Remove comment block
#
def RemoveBlockComment(Lines):
IsFindBlockComment = False
IsFindBlockCode = False
ReservedLine = ''
NewLines = []
for Line in Lines:
Line = Line.strip()
#
# Remove comment block
#
if Line.find(DataType.TAB_COMMENT_EDK_START) > -1:
ReservedLine = GetSplitList(Line, DataType.TAB_COMMENT_EDK_START, 1)[0]
IsFindBlockComment = True
if Line.find(DataType.TAB_COMMENT_EDK_END) > -1:
Line = ReservedLine + GetSplitList(Line, DataType.TAB_COMMENT_EDK_END, 1)[1]
ReservedLine = ''
IsFindBlockComment = False
if IsFindBlockComment:
NewLines.append('')
continue
NewLines.append(Line)
return NewLines
#
# Get String of a List
#
def GetStringOfList(List, Split=' '):
if not isinstance(List, type([])):
return List
Str = ''
for Item in List:
Str = Str + Item + Split
return Str.strip()
#
# Get HelpTextList from HelpTextClassList
#
def GetHelpTextList(HelpTextClassList):
List = []
if HelpTextClassList:
for HelpText in HelpTextClassList:
if HelpText.String.endswith('\n'):
HelpText.String = HelpText.String[0: len(HelpText.String) - len('\n')]
List.extend(HelpText.String.split('\n'))
return List
def StringToArray(String):
if String.startswith('L"'):
if String == "L\"\"":
return "{0x00,0x00}"
else:
return "{%s,0x00,0x00}" % ",".join("0x%02x,0x00" % ord(C) for C in String[2:-1])
elif String.startswith('"'):
if String == "\"\"":
return "{0x00,0x00}"
else:
StringLen = len(String[1:-1])
if StringLen % 2:
return "{%s,0x00}" % ",".join("0x%02x" % ord(C) for C in String[1:-1])
else:
return "{%s,0x00,0x00}" % ",".join("0x%02x" % ord(C) for C in String[1:-1])
elif String.startswith('{'):
return "{%s}" % ",".join(C.strip() for C in String[1:-1].split(','))
else:
if len(String.split()) % 2:
return '{%s,0}' % ','.join(String.split())
else:
return '{%s,0,0}' % ','.join(String.split())
def StringArrayLength(String):
if String.startswith('L"'):
return (len(String) - 3 + 1) * 2
elif String.startswith('"'):
return (len(String) - 2 + 1)
else:
return len(String.split()) + 1
def RemoveDupOption(OptionString, Which="/I", Against=None):
OptionList = OptionString.split()
ValueList = []
if Against:
ValueList += Against
for Index in range(len(OptionList)):
Opt = OptionList[Index]
if not Opt.startswith(Which):
continue
if len(Opt) > len(Which):
Val = Opt[len(Which):]
else:
Val = ""
if Val in ValueList:
OptionList[Index] = ""
else:
ValueList.append(Val)
return " ".join(OptionList)
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
pass
| edk2-master | BaseTools/Source/Python/Common/StringUtils.py |
## @file
# Override built in module os to provide support for long file path
#
# Copyright (c) 2014 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
import os
from . import LongFilePathOsPath
from Common.LongFilePathSupport import LongFilePath
import time
path = LongFilePathOsPath
def access(path, mode):
return os.access(LongFilePath(path), mode)
def remove(path):
Timeout = 0.0
while Timeout < 5.0:
try:
return os.remove(LongFilePath(path))
except:
time.sleep(0.1)
Timeout = Timeout + 0.1
return os.remove(LongFilePath(path))
def removedirs(name):
return os.removedirs(LongFilePath(name))
def rmdir(path):
return os.rmdir(LongFilePath(path))
def mkdir(path):
return os.mkdir(LongFilePath(path))
def makedirs(name, mode=0o777):
return os.makedirs(LongFilePath(name), mode)
def rename(old, new):
return os.rename(LongFilePath(old), LongFilePath(new))
def chdir(path):
return os.chdir(LongFilePath(path))
def chmod(path, mode):
return os.chmod(LongFilePath(path), mode)
def stat(path):
return os.stat(LongFilePath(path))
def utime(path, times):
return os.utime(LongFilePath(path), times)
def listdir(path):
List = []
uList = os.listdir(u"%s" % LongFilePath(path))
for Item in uList:
List.append(Item)
return List
if hasattr(os, 'replace'):
def replace(src, dst):
return os.replace(LongFilePath(src), LongFilePath(dst))
environ = os.environ
getcwd = os.getcwd
chdir = os.chdir
walk = os.walk
W_OK = os.W_OK
F_OK = os.F_OK
sep = os.sep
linesep = os.linesep
getenv = os.getenv
pathsep = os.pathsep
name = os.name
SEEK_SET = os.SEEK_SET
SEEK_END = os.SEEK_END
| edk2-master | BaseTools/Source/Python/Common/LongFilePathOs.py |
## @file
# Python 'Common' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/Common/__init__.py |
## @file
# manage multiple workspace file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
import Common.LongFilePathOs as os
from Common.DataType import TAB_WORKSPACE
## MultipleWorkspace
#
# This class manage multiple workspace behavior
#
# @param class:
#
# @var WORKSPACE: defined the current WORKSPACE
# @var PACKAGES_PATH: defined the other WORKSPACE, if current WORKSPACE is invalid, search valid WORKSPACE from PACKAGES_PATH
#
class MultipleWorkspace(object):
WORKSPACE = ''
PACKAGES_PATH = None
## convertPackagePath()
#
# Convert path to match workspace.
#
# @param cls The class pointer
# @param Ws The current WORKSPACE
# @param Path Path to be converted to match workspace.
#
@classmethod
def convertPackagePath(cls, Ws, Path):
if str(os.path.normcase (Path)).startswith(Ws):
return os.path.join(Ws, os.path.relpath(Path, Ws))
return Path
## setWs()
#
# set WORKSPACE and PACKAGES_PATH environment
#
# @param cls The class pointer
# @param Ws initialize WORKSPACE variable
# @param PackagesPath initialize PackagesPath variable
#
@classmethod
def setWs(cls, Ws, PackagesPath=None):
cls.WORKSPACE = Ws
if PackagesPath:
cls.PACKAGES_PATH = [cls.convertPackagePath (Ws, os.path.normpath(Path.strip())) for Path in PackagesPath.split(os.pathsep)]
else:
cls.PACKAGES_PATH = []
## join()
#
# rewrite os.path.join function
#
# @param cls The class pointer
# @param Ws the current WORKSPACE
# @param *p path of the inf/dec/dsc/fdf/conf file
# @retval Path the absolute path of specified file
#
@classmethod
def join(cls, Ws, *p):
Path = os.path.join(Ws, *p)
if not os.path.exists(Path):
for Pkg in cls.PACKAGES_PATH:
Path = os.path.join(Pkg, *p)
if os.path.exists(Path):
return Path
Path = os.path.join(Ws, *p)
return Path
## relpath()
#
# rewrite os.path.relpath function
#
# @param cls The class pointer
# @param Path path of the inf/dec/dsc/fdf/conf file
# @param Ws the current WORKSPACE
# @retval Path the relative path of specified file
#
@classmethod
def relpath(cls, Path, Ws):
for Pkg in cls.PACKAGES_PATH:
if Path.lower().startswith(Pkg.lower()):
Path = os.path.relpath(Path, Pkg)
return Path
if Path.lower().startswith(Ws.lower()):
Path = os.path.relpath(Path, Ws)
return Path
## getWs()
#
# get valid workspace for the path
#
# @param cls The class pointer
# @param Ws the current WORKSPACE
# @param Path path of the inf/dec/dsc/fdf/conf file
# @retval Ws the valid workspace relative to the specified file path
#
@classmethod
def getWs(cls, Ws, Path):
absPath = os.path.join(Ws, Path)
if not os.path.exists(absPath):
for Pkg in cls.PACKAGES_PATH:
absPath = os.path.join(Pkg, Path)
if os.path.exists(absPath):
return Pkg
return Ws
## handleWsMacro()
#
# handle the $(WORKSPACE) tag, if current workspace is invalid path relative the tool, replace it.
#
# @param cls The class pointer
# @retval PathStr Path string include the $(WORKSPACE)
#
@classmethod
def handleWsMacro(cls, PathStr):
if TAB_WORKSPACE in PathStr:
PathList = PathStr.split()
if PathList:
for i, str in enumerate(PathList):
MacroStartPos = str.find(TAB_WORKSPACE)
if MacroStartPos != -1:
Substr = str[MacroStartPos:]
Path = Substr.replace(TAB_WORKSPACE, cls.WORKSPACE).strip()
if not os.path.exists(Path):
for Pkg in cls.PACKAGES_PATH:
Path = Substr.replace(TAB_WORKSPACE, Pkg).strip()
if os.path.exists(Path):
break
PathList[i] = str[0:MacroStartPos] + Path
PathStr = ' '.join(PathList)
return PathStr
## getPkgPath()
#
# get all package paths.
#
# @param cls The class pointer
#
@classmethod
def getPkgPath(cls):
return cls.PACKAGES_PATH
| edk2-master | BaseTools/Source/Python/Common/MultipleWorkspace.py |
## @file
# This file is used to define each component of tools_def.txt file
#
# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
import re
from . import EdkLogger
from .BuildToolError import *
from Common.TargetTxtClassObject import TargetTxtDict
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.Misc import PathClass
from Common.StringUtils import NormPath
import Common.GlobalData as GlobalData
from Common import GlobalData
from Common.MultipleWorkspace import MultipleWorkspace as mws
from .DataType import TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG,\
TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE\
, TAB_TOD_DEFINES_FAMILY, TAB_TOD_DEFINES_BUILDRULEFAMILY,\
TAB_STAR, TAB_TAT_DEFINES_TOOL_CHAIN_CONF
##
# Static variables used for pattern
#
gMacroRefPattern = re.compile('(DEF\([^\(\)]+\))')
gEnvRefPattern = re.compile('(ENV\([^\(\)]+\))')
gMacroDefPattern = re.compile("DEFINE\s+([^\s]+)")
gDefaultToolsDefFile = "tools_def.txt"
## ToolDefClassObject
#
# This class defined content used in file tools_def.txt
#
# @param object: Inherited from object class
# @param Filename: Input value for full path of tools_def.txt
#
# @var ToolsDefTxtDictionary: To store keys and values defined in target.txt
# @var MacroDictionary: To store keys and values defined in DEFINE statement
#
class ToolDefClassObject(object):
def __init__(self, FileName=None):
self.ToolsDefTxtDictionary = {}
self.MacroDictionary = {}
for Env in os.environ:
self.MacroDictionary["ENV(%s)" % Env] = os.environ[Env]
if FileName is not None:
self.LoadToolDefFile(FileName)
## LoadToolDefFile
#
# Load target.txt file and parse it
#
# @param Filename: Input value for full path of tools_def.txt
#
def LoadToolDefFile(self, FileName):
# set multiple workspace
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(GlobalData.gWorkspace, PackagesPath)
self.ToolsDefTxtDatabase = {
TAB_TOD_DEFINES_TARGET : [],
TAB_TOD_DEFINES_TOOL_CHAIN_TAG : [],
TAB_TOD_DEFINES_TARGET_ARCH : [],
TAB_TOD_DEFINES_COMMAND_TYPE : []
}
self.IncludeToolDefFile(FileName)
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET]))
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG]))
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH]))
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE]))
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET].sort()
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG].sort()
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH].sort()
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort()
## IncludeToolDefFile
#
# Load target.txt file and parse it as if its contents were inside the main file
#
# @param Filename: Input value for full path of tools_def.txt
#
def IncludeToolDefFile(self, FileName):
FileContent = []
if os.path.isfile(FileName):
try:
F = open(FileName, 'r')
FileContent = F.readlines()
except:
EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName)
else:
EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=FileName)
for Index in range(len(FileContent)):
Line = FileContent[Index].strip()
if Line == "" or Line[0] == '#':
continue
if Line.startswith("!include"):
IncFile = Line[8:].strip()
Done, IncFile = self.ExpandMacros(IncFile)
if not Done:
EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
"Macro or Environment has not been defined",
ExtraData=IncFile[4:-1], File=FileName, Line=Index+1)
IncFile = NormPath(IncFile)
if not os.path.isabs(IncFile):
#
# try WORKSPACE
#
IncFileTmp = PathClass(IncFile, GlobalData.gWorkspace)
ErrorCode = IncFileTmp.Validate()[0]
if ErrorCode != 0:
#
# try PACKAGES_PATH
#
IncFileTmp = mws.join(GlobalData.gWorkspace, IncFile)
if not os.path.exists(IncFileTmp):
#
# try directory of current file
#
IncFileTmp = PathClass(IncFile, os.path.dirname(FileName))
ErrorCode = IncFileTmp.Validate()[0]
if ErrorCode != 0:
EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=IncFile)
if isinstance(IncFileTmp, PathClass):
IncFile = IncFileTmp.Path
else:
IncFile = IncFileTmp
self.IncludeToolDefFile(IncFile)
continue
NameValuePair = Line.split("=", 1)
if len(NameValuePair) != 2:
EdkLogger.warn("tools_def.txt parser", "Line %d: not correct assignment statement, skipped" % (Index + 1))
continue
Name = NameValuePair[0].strip()
Value = NameValuePair[1].strip()
if Name == "IDENTIFIER":
EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found identifier statement, skipped: %s" % ((Index + 1), Value))
continue
MacroDefinition = gMacroDefPattern.findall(Name)
if MacroDefinition != []:
Done, Value = self.ExpandMacros(Value)
if not Done:
EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
"Macro or Environment has not been defined",
ExtraData=Value[4:-1], File=FileName, Line=Index+1)
MacroName = MacroDefinition[0].strip()
self.MacroDictionary["DEF(%s)" % MacroName] = Value
EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found macro: %s = %s" % ((Index + 1), MacroName, Value))
continue
Done, Value = self.ExpandMacros(Value)
if not Done:
EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
"Macro or Environment has not been defined",
ExtraData=Value[4:-1], File=FileName, Line=Index+1)
List = Name.split('_')
if len(List) != 5:
EdkLogger.verbose("Line %d: Not a valid name of definition: %s" % ((Index + 1), Name))
continue
elif List[4] == TAB_STAR:
EdkLogger.verbose("Line %d: '*' is not allowed in last field: %s" % ((Index + 1), Name))
continue
else:
self.ToolsDefTxtDictionary[Name] = Value
if List[0] != TAB_STAR:
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] += [List[0]]
if List[1] != TAB_STAR:
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] += [List[1]]
if List[2] != TAB_STAR:
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] += [List[2]]
if List[3] != TAB_STAR:
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] += [List[3]]
if List[4] == TAB_TOD_DEFINES_FAMILY and List[2] == TAB_STAR and List[3] == TAB_STAR:
if TAB_TOD_DEFINES_FAMILY not in self.ToolsDefTxtDatabase:
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY] = {}
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY] = {}
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
elif List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
elif self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] != Value:
EdkLogger.verbose("Line %d: No override allowed for the family of a tool chain: %s" % ((Index + 1), Name))
if List[4] == TAB_TOD_DEFINES_BUILDRULEFAMILY and List[2] == TAB_STAR and List[3] == TAB_STAR:
if TAB_TOD_DEFINES_BUILDRULEFAMILY not in self.ToolsDefTxtDatabase \
or List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
EdkLogger.verbose("Line %d: The family is not specified, but BuildRuleFamily is specified for the tool chain: %s" % ((Index + 1), Name))
self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
## ExpandMacros
#
# Replace defined macros with real value
#
# @param Value: The string with unreplaced macros
#
# @retval Value: The string which has been replaced with real value
#
def ExpandMacros(self, Value):
# os.environ contains all environment variables uppercase on Windows which cause the key in the self.MacroDictionary is uppercase, but Ref may not
EnvReference = gEnvRefPattern.findall(Value)
for Ref in EnvReference:
if Ref not in self.MacroDictionary and Ref.upper() not in self.MacroDictionary:
Value = Value.replace(Ref, "")
else:
if Ref in self.MacroDictionary:
Value = Value.replace(Ref, self.MacroDictionary[Ref])
else:
Value = Value.replace(Ref, self.MacroDictionary[Ref.upper()])
MacroReference = gMacroRefPattern.findall(Value)
for Ref in MacroReference:
if Ref not in self.MacroDictionary:
return False, Ref
Value = Value.replace(Ref, self.MacroDictionary[Ref])
return True, Value
## ToolDefDict
#
# Load tools_def.txt in input Conf dir
#
# @param ConfDir: Conf dir
#
# @retval ToolDef An instance of ToolDefClassObject() with loaded tools_def.txt
#
class ToolDefDict():
def __new__(cls, ConfDir, *args, **kw):
if not hasattr(cls, '_instance'):
orig = super(ToolDefDict, cls)
cls._instance = orig.__new__(cls, *args, **kw)
return cls._instance
def __init__(self, ConfDir):
self.ConfDir = ConfDir
if not hasattr(self, 'ToolDef'):
self._ToolDef = None
@property
def ToolDef(self):
if not self._ToolDef:
self._GetToolDef()
return self._ToolDef
def _GetToolDef(self):
TargetObj = TargetTxtDict()
Target = TargetObj.Target
ToolDef = ToolDefClassObject()
if TAB_TAT_DEFINES_TOOL_CHAIN_CONF in Target.TargetTxtDictionary:
ToolsDefFile = Target.TargetTxtDictionary[TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
if ToolsDefFile:
ToolDef.LoadToolDefFile(os.path.normpath(ToolsDefFile))
else:
ToolDef.LoadToolDefFile(os.path.normpath(os.path.join(self.ConfDir, gDefaultToolsDefFile)))
else:
ToolDef.LoadToolDefFile(os.path.normpath(os.path.join(self.ConfDir, gDefaultToolsDefFile)))
self._ToolDef = ToolDef
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
ToolDef = ToolDefDict(os.getenv("WORKSPACE"))
pass
| edk2-master | BaseTools/Source/Python/Common/ToolDefClassObject.py |
## @file
# help with caching in BaseTools
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## Import Modules
#
# for class function
class cached_class_function(object):
def __init__(self, function):
self._function = function
def __get__(self, obj, cls):
def CallMeHere(*args,**kwargs):
Value = self._function(obj, *args,**kwargs)
obj.__dict__[self._function.__name__] = lambda *args,**kwargs:Value
return Value
return CallMeHere
# for class property
class cached_property(object):
def __init__(self, function):
self._function = function
def __get__(self, obj, cls):
Value = obj.__dict__[self._function.__name__] = self._function(obj)
return Value
# for non-class function
class cached_basic_function(object):
def __init__(self, function):
self._function = function
# wrapper to call _do since <class>.__dict__ doesn't support changing __call__
def __call__(self,*args,**kwargs):
return self._do(*args,**kwargs)
def _do(self,*args,**kwargs):
Value = self._function(*args,**kwargs)
self.__dict__['_do'] = lambda self,*args,**kwargs:Value
return Value
| edk2-master | BaseTools/Source/Python/Common/caching.py |
## @file
# This file is used to define common static strings used by INF/DEC/DSC files
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# Portions copyright (c) 2011 - 2013, ARM Ltd. All rights reserved.<BR>
# Portions Copyright (c) 2020, Hewlett Packard Enterprise Development LP. All rights reserved.<BR>
# Portions Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
# Common Definitions
#
TAB_SPLIT = '.'
TAB_COMMENT_EDK_START = '/*'
TAB_COMMENT_EDK_END = '*/'
TAB_COMMENT_EDK_SPLIT = '//'
TAB_COMMENT_SPLIT = '#'
TAB_SPECIAL_COMMENT = '##'
TAB_EQUAL_SPLIT = '='
TAB_VALUE_SPLIT = '|'
TAB_COMMA_SPLIT = ','
TAB_SPACE_SPLIT = ' '
TAB_SEMI_COLON_SPLIT = ';'
TAB_SECTION_START = '['
TAB_SECTION_END = ']'
TAB_OPTION_START = '<'
TAB_OPTION_END = '>'
TAB_SLASH = '\\'
TAB_BACK_SLASH = '/'
TAB_STAR = '*'
TAB_LINE_BREAK = '\n'
TAB_PRINTCHAR_VT = '\x0b'
TAB_PRINTCHAR_BS = '\b'
TAB_PRINTCHAR_NUL = '\0'
TAB_UINT8 = 'UINT8'
TAB_UINT16 = 'UINT16'
TAB_UINT32 = 'UINT32'
TAB_UINT64 = 'UINT64'
TAB_VOID = 'VOID*'
TAB_GUID = 'GUID'
TAB_PCD_CLEAN_NUMERIC_TYPES = {TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64}
TAB_PCD_NUMERIC_TYPES = {TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, 'BOOLEAN'}
TAB_PCD_NUMERIC_TYPES_VOID = {TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, 'BOOLEAN', TAB_VOID}
TAB_WORKSPACE = '$(WORKSPACE)'
TAB_FV_DIRECTORY = 'FV'
TAB_ARCH_NULL = ''
TAB_ARCH_COMMON = 'COMMON'
TAB_ARCH_IA32 = 'IA32'
TAB_ARCH_X64 = 'X64'
TAB_ARCH_ARM = 'ARM'
TAB_ARCH_EBC = 'EBC'
TAB_ARCH_AARCH64 = 'AARCH64'
TAB_ARCH_RISCV64 = 'RISCV64'
TAB_ARCH_LOONGARCH64 = 'LOONGARCH64'
ARCH_SET_FULL = {TAB_ARCH_IA32, TAB_ARCH_X64, TAB_ARCH_ARM, TAB_ARCH_EBC, TAB_ARCH_AARCH64, TAB_ARCH_RISCV64, TAB_ARCH_LOONGARCH64, TAB_ARCH_COMMON}
SUP_MODULE_BASE = 'BASE'
SUP_MODULE_SEC = 'SEC'
SUP_MODULE_PEI_CORE = 'PEI_CORE'
SUP_MODULE_PEIM = 'PEIM'
SUP_MODULE_DXE_CORE = 'DXE_CORE'
SUP_MODULE_DXE_DRIVER = 'DXE_DRIVER'
SUP_MODULE_DXE_RUNTIME_DRIVER = 'DXE_RUNTIME_DRIVER'
SUP_MODULE_DXE_SAL_DRIVER = 'DXE_SAL_DRIVER'
SUP_MODULE_DXE_SMM_DRIVER = 'DXE_SMM_DRIVER'
SUP_MODULE_UEFI_DRIVER = 'UEFI_DRIVER'
SUP_MODULE_UEFI_APPLICATION = 'UEFI_APPLICATION'
SUP_MODULE_USER_DEFINED = 'USER_DEFINED'
SUP_MODULE_HOST_APPLICATION = 'HOST_APPLICATION'
SUP_MODULE_SMM_CORE = 'SMM_CORE'
SUP_MODULE_MM_STANDALONE = 'MM_STANDALONE'
SUP_MODULE_MM_CORE_STANDALONE = 'MM_CORE_STANDALONE'
SUP_MODULE_LIST = [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, \
SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_UEFI_DRIVER, \
SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_SMM_CORE, SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]
SUP_MODULE_LIST_STRING = TAB_VALUE_SPLIT.join(SUP_MODULE_LIST)
SUP_MODULE_SET_PEI = {SUP_MODULE_PEIM, SUP_MODULE_PEI_CORE}
EDK_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
EDK_COMPONENT_TYPE_SECURITY_CORE = 'SECURITY_CORE'
EDK_COMPONENT_TYPE_PEI_CORE = SUP_MODULE_PEI_CORE
EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER = 'COMBINED_PEIM_DRIVER'
EDK_COMPONENT_TYPE_PIC_PEIM = 'PIC_PEIM'
EDK_COMPONENT_TYPE_RELOCATABLE_PEIM = 'RELOCATABLE_PEIM'
EDK_COMPONENT_TYPE_BS_DRIVER = 'BS_DRIVER'
EDK_COMPONENT_TYPE_RT_DRIVER = 'RT_DRIVER'
EDK_COMPONENT_TYPE_SAL_RT_DRIVER = 'SAL_RT_DRIVER'
EDK_COMPONENT_TYPE_APPLICATION = 'APPLICATION'
EDK_NAME = 'EDK'
EDKII_NAME = 'EDKII'
MSG_EDKII_MAIL_ADDR = '[email protected]'
COMPONENT_TO_MODULE_MAP_DICT = {
EDK_COMPONENT_TYPE_LIBRARY : SUP_MODULE_BASE,
EDK_COMPONENT_TYPE_SECURITY_CORE : SUP_MODULE_SEC,
EDK_COMPONENT_TYPE_PEI_CORE : SUP_MODULE_PEI_CORE,
EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER : SUP_MODULE_PEIM,
EDK_COMPONENT_TYPE_PIC_PEIM : SUP_MODULE_PEIM,
EDK_COMPONENT_TYPE_RELOCATABLE_PEIM : SUP_MODULE_PEIM,
"PE32_PEIM" : SUP_MODULE_PEIM,
EDK_COMPONENT_TYPE_BS_DRIVER : SUP_MODULE_DXE_DRIVER,
EDK_COMPONENT_TYPE_RT_DRIVER : SUP_MODULE_DXE_RUNTIME_DRIVER,
EDK_COMPONENT_TYPE_SAL_RT_DRIVER : SUP_MODULE_DXE_SAL_DRIVER,
EDK_COMPONENT_TYPE_APPLICATION : SUP_MODULE_UEFI_APPLICATION,
"LOGO" : SUP_MODULE_BASE,
}
BINARY_FILE_TYPE_FW = 'FW'
BINARY_FILE_TYPE_GUID = 'GUID'
BINARY_FILE_TYPE_PREEFORM = 'PREEFORM'
BINARY_FILE_TYPE_UEFI_APP = 'UEFI_APP'
BINARY_FILE_TYPE_UNI_UI = 'UNI_UI'
BINARY_FILE_TYPE_UNI_VER = 'UNI_VER'
BINARY_FILE_TYPE_LIB = 'LIB'
BINARY_FILE_TYPE_PE32 = 'PE32'
BINARY_FILE_TYPE_PIC = 'PIC'
BINARY_FILE_TYPE_PEI_DEPEX = 'PEI_DEPEX'
BINARY_FILE_TYPE_DXE_DEPEX = 'DXE_DEPEX'
BINARY_FILE_TYPE_SMM_DEPEX = 'SMM_DEPEX'
BINARY_FILE_TYPE_TE = 'TE'
BINARY_FILE_TYPE_VER = 'VER'
BINARY_FILE_TYPE_UI = 'UI'
BINARY_FILE_TYPE_BIN = 'BIN'
BINARY_FILE_TYPE_FV = 'FV'
BINARY_FILE_TYPE_RAW = 'RAW_BINARY'
PLATFORM_COMPONENT_TYPE_LIBRARY_CLASS = 'LIBRARY_CLASS'
PLATFORM_COMPONENT_TYPE_MODULE = 'MODULE'
TAB_SOURCES = 'Sources'
TAB_SOURCES_COMMON = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_SOURCES_IA32 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IA32
TAB_SOURCES_X64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_X64
TAB_SOURCES_ARM = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_ARM
TAB_SOURCES_EBC = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_EBC
TAB_SOURCES_AARCH64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_SOURCES_LOONGARCH64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_BINARIES = 'Binaries'
TAB_BINARIES_COMMON = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_BINARIES_IA32 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IA32
TAB_BINARIES_X64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_X64
TAB_BINARIES_ARM = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_ARM
TAB_BINARIES_EBC = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_EBC
TAB_BINARIES_AARCH64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_BINARIES_LOONGARCH64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_INCLUDES = 'Includes'
TAB_INCLUDES_COMMON = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_INCLUDES_IA32 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IA32
TAB_INCLUDES_X64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_X64
TAB_INCLUDES_ARM = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_ARM
TAB_INCLUDES_EBC = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_EBC
TAB_INCLUDES_AARCH64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_INCLUDES_LOONGARCH64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_GUIDS = 'Guids'
TAB_GUIDS_COMMON = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_COMMON
TAB_GUIDS_IA32 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IA32
TAB_GUIDS_X64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_X64
TAB_GUIDS_ARM = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_ARM
TAB_GUIDS_EBC = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_EBC
TAB_GUIDS_AARCH64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_GUIDS_LOONGARCH64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PROTOCOLS = 'Protocols'
TAB_PROTOCOLS_COMMON = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PROTOCOLS_IA32 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IA32
TAB_PROTOCOLS_X64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_X64
TAB_PROTOCOLS_ARM = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_ARM
TAB_PROTOCOLS_EBC = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_EBC
TAB_PROTOCOLS_AARCH64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_PROTOCOLS_LOONGARCH64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PPIS = 'Ppis'
TAB_PPIS_COMMON = TAB_PPIS + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PPIS_IA32 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IA32
TAB_PPIS_X64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_X64
TAB_PPIS_ARM = TAB_PPIS + TAB_SPLIT + TAB_ARCH_ARM
TAB_PPIS_EBC = TAB_PPIS + TAB_SPLIT + TAB_ARCH_EBC
TAB_PPIS_AARCH64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_PPIS_LOONGARCH64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_LIBRARY_CLASSES = 'LibraryClasses'
TAB_LIBRARY_CLASSES_COMMON = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_LIBRARY_CLASSES_IA32 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IA32
TAB_LIBRARY_CLASSES_X64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_X64
TAB_LIBRARY_CLASSES_ARM = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_ARM
TAB_LIBRARY_CLASSES_EBC = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_EBC
TAB_LIBRARY_CLASSES_AARCH64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_LIBRARY_CLASSES_LOONGARCH64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PACKAGES = 'Packages'
TAB_PACKAGES_COMMON = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PACKAGES_IA32 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IA32
TAB_PACKAGES_X64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_X64
TAB_PACKAGES_ARM = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_ARM
TAB_PACKAGES_EBC = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_EBC
TAB_PACKAGES_AARCH64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_PACKAGES_LOONGARCH64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PCDS = 'Pcds'
TAB_PCDS_FIXED_AT_BUILD = 'FixedAtBuild'
TAB_PCDS_PATCHABLE_IN_MODULE = 'PatchableInModule'
TAB_PCDS_FEATURE_FLAG = 'FeatureFlag'
TAB_PCDS_DYNAMIC_EX = 'DynamicEx'
TAB_PCDS_DYNAMIC_EX_DEFAULT = 'DynamicExDefault'
TAB_PCDS_DYNAMIC_EX_VPD = 'DynamicExVpd'
TAB_PCDS_DYNAMIC_EX_HII = 'DynamicExHii'
TAB_PCDS_DYNAMIC = 'Dynamic'
TAB_PCDS_DYNAMIC_DEFAULT = 'DynamicDefault'
TAB_PCDS_DYNAMIC_VPD = 'DynamicVpd'
TAB_PCDS_DYNAMIC_HII = 'DynamicHii'
PCD_DYNAMIC_TYPE_SET = {TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_HII}
PCD_DYNAMIC_EX_TYPE_SET = {TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII}
# leave as a list for order
PCD_TYPE_LIST = [TAB_PCDS_FIXED_AT_BUILD, TAB_PCDS_PATCHABLE_IN_MODULE, TAB_PCDS_FEATURE_FLAG, TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_EX]
TAB_PCDS_FIXED_AT_BUILD_NULL = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD
TAB_PCDS_FIXED_AT_BUILD_COMMON = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PCDS_FIXED_AT_BUILD_IA32 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_IA32
TAB_PCDS_FIXED_AT_BUILD_X64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_X64
TAB_PCDS_FIXED_AT_BUILD_ARM = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_ARM
TAB_PCDS_FIXED_AT_BUILD_EBC = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_EBC
TAB_PCDS_FIXED_AT_BUILD_AARCH64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_PCDS_FIXED_AT_BUILD_LOONGARCH64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PCDS_PATCHABLE_IN_MODULE_NULL = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE
TAB_PCDS_PATCHABLE_IN_MODULE_COMMON = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PCDS_PATCHABLE_IN_MODULE_IA32 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_IA32
TAB_PCDS_PATCHABLE_IN_MODULE_X64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_X64
TAB_PCDS_PATCHABLE_IN_MODULE_ARM = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_ARM
TAB_PCDS_PATCHABLE_IN_MODULE_EBC = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_EBC
TAB_PCDS_PATCHABLE_IN_MODULE_AARCH64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_PCDS_PATCHABLE_IN_MODULE_LOONGARCH64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PCDS_FEATURE_FLAG_NULL = TAB_PCDS + TAB_PCDS_FEATURE_FLAG
TAB_PCDS_FEATURE_FLAG_COMMON = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PCDS_FEATURE_FLAG_IA32 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_IA32
TAB_PCDS_FEATURE_FLAG_X64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_X64
TAB_PCDS_FEATURE_FLAG_ARM = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_ARM
TAB_PCDS_FEATURE_FLAG_EBC = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_EBC
TAB_PCDS_FEATURE_FLAG_AARCH64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_PCDS_FEATURE_FLAG_LOONGARCH64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PCDS_DYNAMIC_EX_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX
TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_DEFAULT
TAB_PCDS_DYNAMIC_EX_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_HII
TAB_PCDS_DYNAMIC_EX_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_VPD
TAB_PCDS_DYNAMIC_EX_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PCDS_DYNAMIC_EX_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_IA32
TAB_PCDS_DYNAMIC_EX_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_X64
TAB_PCDS_DYNAMIC_EX_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_ARM
TAB_PCDS_DYNAMIC_EX_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_EBC
TAB_PCDS_DYNAMIC_EX_AARCH64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_PCDS_DYNAMIC_EX_LOONGARCH64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PCDS_DYNAMIC_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC
TAB_PCDS_DYNAMIC_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_DEFAULT
TAB_PCDS_DYNAMIC_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_HII
TAB_PCDS_DYNAMIC_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_VPD
TAB_PCDS_DYNAMIC_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_COMMON
TAB_PCDS_DYNAMIC_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IA32
TAB_PCDS_DYNAMIC_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_X64
TAB_PCDS_DYNAMIC_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_ARM
TAB_PCDS_DYNAMIC_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_EBC
TAB_PCDS_DYNAMIC_AARCH64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_PCDS_DYNAMIC_LOONGARCH64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE = 'PcdLoadFixAddressPeiCodePageNumber'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE = 'UINT32'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE = 'PcdLoadFixAddressBootTimeCodePageNumber'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE = 'UINT32'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE = 'PcdLoadFixAddressRuntimeCodePageNumber'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE = 'UINT32'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE = 'PcdLoadFixAddressSmmCodePageNumber'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE = 'UINT32'
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SET = {TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE, \
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE, \
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE, \
TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE}
## The mapping dictionary from datum type to its maximum number.
MAX_VAL_TYPE = {"BOOLEAN":0x01, TAB_UINT8:0xFF, TAB_UINT16:0xFFFF, TAB_UINT32:0xFFFFFFFF, TAB_UINT64:0xFFFFFFFFFFFFFFFF}
## The mapping dictionary from datum type to size string.
MAX_SIZE_TYPE = {"BOOLEAN":1, TAB_UINT8:1, TAB_UINT16:2, TAB_UINT32:4, TAB_UINT64:8}
TAB_DEPEX = 'Depex'
TAB_DEPEX_COMMON = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_COMMON
TAB_DEPEX_IA32 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IA32
TAB_DEPEX_X64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_X64
TAB_DEPEX_ARM = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_ARM
TAB_DEPEX_EBC = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_EBC
TAB_DEPEX_AARCH64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_DEPEX_LOONGARCH64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_SKUIDS = 'SkuIds'
TAB_DEFAULT_STORES = 'DefaultStores'
TAB_DEFAULT_STORES_DEFAULT = 'STANDARD'
TAB_LIBRARIES = 'Libraries'
TAB_LIBRARIES_COMMON = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_COMMON
TAB_LIBRARIES_IA32 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IA32
TAB_LIBRARIES_X64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_X64
TAB_LIBRARIES_ARM = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_ARM
TAB_LIBRARIES_EBC = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_EBC
TAB_LIBRARIES_AARCH64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_LIBRARIES_LOONGARCH64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_COMPONENTS = 'Components'
TAB_COMPONENTS_COMMON = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_COMMON
TAB_COMPONENTS_IA32 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IA32
TAB_COMPONENTS_X64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_X64
TAB_COMPONENTS_ARM = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_ARM
TAB_COMPONENTS_EBC = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_EBC
TAB_COMPONENTS_AARCH64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_AARCH64
TAB_COMPONENTS_LOONGARCH64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_LOONGARCH64
TAB_BUILD_OPTIONS = 'BuildOptions'
TAB_DEFINE = 'DEFINE'
TAB_NMAKE = 'Nmake'
TAB_USER_EXTENSIONS = 'UserExtensions'
TAB_INCLUDE = '!include'
TAB_DEFAULT = 'DEFAULT'
TAB_COMMON = 'COMMON'
#
# Common Define
#
TAB_COMMON_DEFINES = 'Defines'
#
# Inf Definitions
#
TAB_INF_DEFINES = TAB_COMMON_DEFINES
TAB_INF_DEFINES_INF_VERSION = 'INF_VERSION'
TAB_INF_DEFINES_BASE_NAME = 'BASE_NAME'
TAB_INF_DEFINES_FILE_GUID = 'FILE_GUID'
TAB_INF_DEFINES_MODULE_TYPE = 'MODULE_TYPE'
TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION = 'EFI_SPECIFICATION_VERSION'
TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION = 'UEFI_SPECIFICATION_VERSION'
TAB_INF_DEFINES_PI_SPECIFICATION_VERSION = 'PI_SPECIFICATION_VERSION'
TAB_INF_DEFINES_EDK_RELEASE_VERSION = 'EDK_RELEASE_VERSION'
TAB_INF_DEFINES_BINARY_MODULE = 'BINARY_MODULE'
TAB_INF_DEFINES_LIBRARY_CLASS = 'LIBRARY_CLASS'
TAB_INF_DEFINES_COMPONENT_TYPE = 'COMPONENT_TYPE'
TAB_INF_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
TAB_INF_DEFINES_DPX_SOURCE = 'DPX_SOURCE'
TAB_INF_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
TAB_INF_DEFINES_BUILD_TYPE = 'BUILD_TYPE'
TAB_INF_DEFINES_FFS_EXT = 'FFS_EXT'
TAB_INF_DEFINES_FV_EXT = 'FV_EXT'
TAB_INF_DEFINES_SOURCE_FV = 'SOURCE_FV'
TAB_INF_DEFINES_VERSION_NUMBER = 'VERSION_NUMBER'
TAB_INF_DEFINES_VERSION = 'VERSION' # for Edk inf, the same as VERSION_NUMBER
TAB_INF_DEFINES_VERSION_STRING = 'VERSION_STRING'
TAB_INF_DEFINES_PCD_IS_DRIVER = 'PCD_IS_DRIVER'
TAB_INF_DEFINES_TIANO_EDK_FLASHMAP_H = 'TIANO_EDK_FLASHMAP_H'
TAB_INF_DEFINES_ENTRY_POINT = 'ENTRY_POINT'
TAB_INF_DEFINES_UNLOAD_IMAGE = 'UNLOAD_IMAGE'
TAB_INF_DEFINES_CONSTRUCTOR = 'CONSTRUCTOR'
TAB_INF_DEFINES_DESTRUCTOR = 'DESTRUCTOR'
TAB_INF_DEFINES_DEFINE = 'DEFINE'
TAB_INF_DEFINES_SPEC = 'SPEC'
TAB_INF_DEFINES_CUSTOM_MAKEFILE = 'CUSTOM_MAKEFILE'
TAB_INF_DEFINES_MACRO = '__MACROS__'
TAB_INF_DEFINES_SHADOW = 'SHADOW'
TAB_INF_FIXED_PCD = 'FixedPcd'
TAB_INF_FEATURE_PCD = 'FeaturePcd'
TAB_INF_PATCH_PCD = 'PatchPcd'
TAB_INF_PCD = 'Pcd'
TAB_INF_PCD_EX = 'PcdEx'
TAB_INF_USAGE_PRO = 'PRODUCES'
TAB_INF_USAGE_SOME_PRO = 'SOMETIMES_PRODUCES'
TAB_INF_USAGE_CON = 'CONSUMES'
TAB_INF_USAGE_SOME_CON = 'SOMETIMES_CONSUMES'
TAB_INF_USAGE_NOTIFY = 'NOTIFY'
TAB_INF_USAGE_TO_START = 'TO_START'
TAB_INF_USAGE_BY_START = 'BY_START'
TAB_INF_GUIDTYPE_EVENT = 'Event'
TAB_INF_GUIDTYPE_FILE = 'File'
TAB_INF_GUIDTYPE_FV = 'FV'
TAB_INF_GUIDTYPE_GUID = 'GUID'
TAB_INF_GUIDTYPE_HII = 'HII'
TAB_INF_GUIDTYPE_HOB = 'HOB'
TAB_INF_GUIDTYPE_ST = 'SystemTable'
TAB_INF_GUIDTYPE_TSG = 'TokenSpaceGuid'
TAB_INF_GUIDTYPE_VAR = 'Variable'
TAB_INF_GUIDTYPE_PROTOCOL = 'PROTOCOL'
TAB_INF_GUIDTYPE_PPI = 'PPI'
TAB_INF_USAGE_UNDEFINED = 'UNDEFINED'
#
# Dec Definitions
#
TAB_DEC_DEFINES = TAB_COMMON_DEFINES
TAB_DEC_DEFINES_DEC_SPECIFICATION = 'DEC_SPECIFICATION'
TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
TAB_DEC_DEFINES_PKG_UNI_FILE = 'PKG_UNI_FILE'
#
# Dsc Definitions
#
TAB_DSC_DEFINES = TAB_COMMON_DEFINES
TAB_DSC_DEFINES_PLATFORM_NAME = 'PLATFORM_NAME'
TAB_DSC_DEFINES_PLATFORM_GUID = 'PLATFORM_GUID'
TAB_DSC_DEFINES_PLATFORM_VERSION = 'PLATFORM_VERSION'
TAB_DSC_DEFINES_DSC_SPECIFICATION = 'DSC_SPECIFICATION'
TAB_DSC_DEFINES_OUTPUT_DIRECTORY = 'OUTPUT_DIRECTORY'
TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES = 'SUPPORTED_ARCHITECTURES'
TAB_DSC_DEFINES_BUILD_TARGETS = 'BUILD_TARGETS'
TAB_DSC_DEFINES_SKUID_IDENTIFIER = 'SKUID_IDENTIFIER'
TAB_DSC_DEFINES_PCD_INFO_GENERATION = 'PCD_INFO_GENERATION'
TAB_DSC_DEFINES_PCD_DYNAMIC_AS_DYNAMICEX = 'PCD_DYNAMIC_AS_DYNAMICEX'
TAB_DSC_DEFINES_PCD_VAR_CHECK_GENERATION = 'PCD_VAR_CHECK_GENERATION'
TAB_DSC_DEFINES_VPD_AUTHENTICATED_VARIABLE_STORE = 'VPD_AUTHENTICATED_VARIABLE_STORE'
TAB_DSC_DEFINES_FLASH_DEFINITION = 'FLASH_DEFINITION'
TAB_DSC_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
TAB_DSC_DEFINES_RFC_LANGUAGES = 'RFC_LANGUAGES'
TAB_DSC_DEFINES_ISO_LANGUAGES = 'ISO_LANGUAGES'
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
TAB_DSC_DEFINES_VPD_TOOL_GUID = 'VPD_TOOL_GUID'
TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
TAB_DSC_DEFINES_EDKGLOBAL = 'EDK_GLOBAL'
TAB_DSC_PREBUILD = 'PREBUILD'
TAB_DSC_POSTBUILD = 'POSTBUILD'
#
# TargetTxt Definitions
#
TAB_TAT_DEFINES_ACTIVE_PLATFORM = 'ACTIVE_PLATFORM'
TAB_TAT_DEFINES_ACTIVE_MODULE = 'ACTIVE_MODULE'
TAB_TAT_DEFINES_TOOL_CHAIN_CONF = 'TOOL_CHAIN_CONF'
TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER = 'MAX_CONCURRENT_THREAD_NUMBER'
TAB_TAT_DEFINES_TARGET = 'TARGET'
TAB_TAT_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
TAB_TAT_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
TAB_TAT_DEFINES_BUILD_RULE_CONF = "BUILD_RULE_CONF"
#
# ToolDef Definitions
#
TAB_TOD_DEFINES_TARGET = 'TARGET'
TAB_TOD_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
TAB_TOD_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
TAB_TOD_DEFINES_COMMAND_TYPE = 'COMMAND_TYPE'
TAB_TOD_DEFINES_FAMILY = 'FAMILY'
TAB_TOD_DEFINES_BUILDRULEFAMILY = 'BUILDRULEFAMILY'
TAB_TOD_DEFINES_BUILDRULEORDER = 'BUILDRULEORDER'
#
# Conditional Statements
#
TAB_IF = '!if'
TAB_END_IF = '!endif'
TAB_ELSE_IF = '!elseif'
TAB_ELSE = '!else'
TAB_IF_DEF = '!ifdef'
TAB_IF_N_DEF = '!ifndef'
TAB_IF_EXIST = '!if exist'
TAB_ERROR = '!error'
#
# Unknown section
#
TAB_UNKNOWN = 'UNKNOWN'
#
# Build database path
#
DATABASE_PATH = ":memory:" #"BuildDatabase.db"
# used by ECC
MODIFIER_SET = {'IN', 'OUT', 'OPTIONAL', 'UNALIGNED', 'EFI_RUNTIMESERVICE', 'EFI_BOOTSERVICE', 'EFIAPI'}
# Dependency Opcodes
DEPEX_OPCODE_BEFORE = "BEFORE"
DEPEX_OPCODE_AFTER = "AFTER"
DEPEX_OPCODE_PUSH = "PUSH"
DEPEX_OPCODE_AND = "AND"
DEPEX_OPCODE_OR = "OR"
DEPEX_OPCODE_NOT = "NOT"
DEPEX_OPCODE_END = "END"
DEPEX_OPCODE_SOR = "SOR"
DEPEX_OPCODE_TRUE = "TRUE"
DEPEX_OPCODE_FALSE = "FALSE"
# Dependency Expression
DEPEX_SUPPORTED_OPCODE_SET = {"BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "END", "SOR", "TRUE", "FALSE", '(', ')'}
TAB_STATIC_LIBRARY = "STATIC-LIBRARY-FILE"
TAB_DYNAMIC_LIBRARY = "DYNAMIC-LIBRARY-FILE"
TAB_FRAMEWORK_IMAGE = "EFI-IMAGE-FILE"
TAB_C_CODE_FILE = "C-CODE-FILE"
TAB_C_HEADER_FILE = "C-HEADER-FILE"
TAB_UNICODE_FILE = "UNICODE-TEXT-FILE"
TAB_IMAGE_FILE = "IMAGE-DEFINITION-FILE"
TAB_DEPENDENCY_EXPRESSION_FILE = "DEPENDENCY-EXPRESSION-FILE"
TAB_UNKNOWN_FILE = "UNKNOWN-TYPE-FILE"
TAB_DEFAULT_BINARY_FILE = "_BINARY_FILE_"
TAB_OBJECT_FILE = "OBJECT-FILE"
TAB_VFR_FILE = 'VISUAL-FORM-REPRESENTATION-FILE'
# used by BRG
TAB_BRG_PCD = 'PCD'
TAB_BRG_LIBRARY = 'Library'
#
# Build Rule File Version Definition
#
TAB_BUILD_RULE_VERSION = "build_rule_version"
# section name for PCDs
PCDS_DYNAMIC_DEFAULT = "PcdsDynamicDefault"
PCDS_DYNAMIC_VPD = "PcdsDynamicVpd"
PCDS_DYNAMIC_HII = "PcdsDynamicHii"
PCDS_DYNAMICEX_DEFAULT = "PcdsDynamicExDefault"
PCDS_DYNAMICEX_VPD = "PcdsDynamicExVpd"
PCDS_DYNAMICEX_HII = "PcdsDynamicExHii"
SECTIONS_HAVE_ITEM_PCD_SET = {PCDS_DYNAMIC_DEFAULT.upper(), PCDS_DYNAMIC_VPD.upper(), PCDS_DYNAMIC_HII.upper(), \
PCDS_DYNAMICEX_DEFAULT.upper(), PCDS_DYNAMICEX_VPD.upper(), PCDS_DYNAMICEX_HII.upper()}
# Section allowed to have items after arch
SECTIONS_HAVE_ITEM_AFTER_ARCH_SET = {TAB_LIBRARY_CLASSES.upper(), TAB_DEPEX.upper(), TAB_USER_EXTENSIONS.upper(),
PCDS_DYNAMIC_DEFAULT.upper(),
PCDS_DYNAMIC_VPD.upper(),
PCDS_DYNAMIC_HII.upper(),
PCDS_DYNAMICEX_DEFAULT.upper(),
PCDS_DYNAMICEX_VPD.upper(),
PCDS_DYNAMICEX_HII.upper(),
TAB_BUILD_OPTIONS.upper(),
TAB_PACKAGES.upper(),
TAB_INCLUDES.upper()}
#
# pack codes as used in PcdDb and elsewhere
#
PACK_PATTERN_GUID = '=LHHBBBBBBBB'
PACK_CODE_BY_SIZE = {8:'=Q',
4:'=L',
2:'=H',
1:'=B',
0:'=B',
16:""}
TAB_COMPILER_MSFT = 'MSFT'
| edk2-master | BaseTools/Source/Python/Common/DataType.py |
## @file
#
# This package manage the VPD PCD information file which will be generated
# by build tool's autogen.
# The VPD PCD information file will be input for third-party BPDG tool which
# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
#
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import print_function
import Common.LongFilePathOs as os
import re
import Common.EdkLogger as EdkLogger
import Common.BuildToolError as BuildToolError
import subprocess
import Common.GlobalData as GlobalData
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.Misc import SaveFileOnChange
from Common.DataType import *
FILE_COMMENT_TEMPLATE = \
"""
## @file
#
# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
#
# This file lists all VPD informations for a platform collected by build.exe.
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
"""
## The class manage VpdInfoFile.
#
# This file contains an ordered (based on position in the DSC file) list of the PCDs specified in the platform description file (DSC). The Value field that will be assigned to the PCD comes from the DSC file, INF file (if not defined in the DSC file) or the DEC file (if not defined in the INF file). This file is used as an input to the BPDG tool.
# Format for this file (using EBNF notation) is:
# <File> :: = [<CommentBlock>]
# [<PcdEntry>]*
# <CommentBlock> ::= ["#" <String> <EOL>]*
# <PcdEntry> ::= <PcdName> "|" <Offset> "|" <Size> "|" <Value> <EOL>
# <PcdName> ::= <TokenSpaceCName> "." <PcdCName>
# <TokenSpaceCName> ::= C Variable Name of the Token Space GUID
# <PcdCName> ::= C Variable Name of the PCD
# <Offset> ::= {TAB_STAR} {<HexNumber>}
# <HexNumber> ::= "0x" (a-fA-F0-9){1,8}
# <Size> ::= <HexNumber>
# <Value> ::= {<HexNumber>} {<NonNegativeInt>} {<QString>} {<Array>}
# <NonNegativeInt> ::= (0-9)+
# <QString> ::= ["L"] <DblQuote> <String> <DblQuote>
# <DblQuote> ::= 0x22
# <Array> ::= {<CArray>} {<NList>}
# <CArray> ::= "{" <HexNumber> ["," <HexNumber>]* "}"
# <NList> ::= <HexNumber> ["," <HexNumber>]*
#
class VpdInfoFile:
_rVpdPcdLine = None
## Constructor
def __init__(self):
## Dictionary for VPD in following format
#
# Key : PcdClassObject instance.
# @see BuildClassObject.PcdClassObject
# Value : offset in different SKU such as [sku1_offset, sku2_offset]
self._VpdArray = {}
self._VpdInfo = {}
## Add a VPD PCD collected from platform's autogen when building.
#
# @param vpds The list of VPD PCD collected for a platform.
# @see BuildClassObject.PcdClassObject
#
# @param offset integer value for VPD's offset in specific SKU.
#
def Add(self, Vpd, skuname, Offset):
if (Vpd is None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
if not (Offset >= "0" or Offset == TAB_STAR):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
if Vpd.DatumType == TAB_VOID:
if Vpd.MaxDatumSize <= "0":
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
elif Vpd.DatumType in TAB_PCD_NUMERIC_TYPES:
if not Vpd.MaxDatumSize:
Vpd.MaxDatumSize = MAX_SIZE_TYPE[Vpd.DatumType]
else:
if Vpd.MaxDatumSize <= "0":
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
if Vpd not in self._VpdArray:
#
# If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
#
self._VpdArray[Vpd] = {}
self._VpdArray[Vpd].update({skuname:Offset})
## Generate VPD PCD information into a text file
#
# If parameter FilePath is invalid, then assert.
# If
# @param FilePath The given file path which would hold VPD information
def Write(self, FilePath):
if not (FilePath is not None or len(FilePath) != 0):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid parameter FilePath: %s." % FilePath)
Content = FILE_COMMENT_TEMPLATE
Pcds = sorted(self._VpdArray.keys(), key=lambda x: x.TokenCName)
for Pcd in Pcds:
i = 0
PcdTokenCName = Pcd.TokenCName
for PcdItem in GlobalData.MixedPcd:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
PcdTokenCName = PcdItem[0]
for skuname in self._VpdArray[Pcd]:
PcdValue = str(Pcd.SkuInfoList[skuname].DefaultValue).strip()
if PcdValue == "" :
PcdValue = Pcd.DefaultValue
Content += "%s.%s|%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, PcdTokenCName, skuname, str(self._VpdArray[Pcd][skuname]).strip(), str(Pcd.MaxDatumSize).strip(), PcdValue)
i += 1
return SaveFileOnChange(FilePath, Content, False)
## Read an existing VPD PCD info file.
#
# This routine will read VPD PCD information from existing file and construct
# internal PcdClassObject array.
# This routine could be used by third-party tool to parse VPD info file content.
#
# @param FilePath The full path string for existing VPD PCD info file.
def Read(self, FilePath):
try:
fd = open(FilePath, "r")
except:
EdkLogger.error("VpdInfoFile",
BuildToolError.FILE_OPEN_FAILURE,
"Fail to open file %s for written." % FilePath)
Lines = fd.readlines()
for Line in Lines:
Line = Line.strip()
if len(Line) == 0 or Line.startswith("#"):
continue
#
# the line must follow output format defined in BPDG spec.
#
try:
PcdName, SkuId, Offset, Size, Value = Line.split("#")[0].split("|")
PcdName, SkuId, Offset, Size, Value = PcdName.strip(), SkuId.strip(), Offset.strip(), Size.strip(), Value.strip()
TokenSpaceName, PcdTokenName = PcdName.split(".")
except:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)
Found = False
if (TokenSpaceName, PcdTokenName) not in self._VpdInfo:
self._VpdInfo[(TokenSpaceName, PcdTokenName)] = {}
self._VpdInfo[(TokenSpaceName, PcdTokenName)][(SkuId, Offset)] = Value
for VpdObject in self._VpdArray:
VpdObjectTokenCName = VpdObject.TokenCName
for PcdItem in GlobalData.MixedPcd:
if (VpdObject.TokenCName, VpdObject.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
VpdObjectTokenCName = PcdItem[0]
for sku in VpdObject.SkuInfoList:
if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObjectTokenCName == PcdTokenName.strip() and sku == SkuId:
if self._VpdArray[VpdObject][sku] == TAB_STAR:
if Offset == TAB_STAR:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
self._VpdArray[VpdObject][sku] = Offset
Found = True
if not Found:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")
## Get count of VPD PCD collected from platform's autogen when building.
#
# @return The integer count value
def GetCount(self):
Count = 0
for OffsetList in self._VpdArray.values():
Count += len(OffsetList)
return Count
## Get an offset value for a given VPD PCD
#
# Because BPDG only support one Sku, so only return offset for SKU default.
#
# @param vpd A given VPD PCD
def GetOffset(self, vpd):
if vpd not in self._VpdArray:
return None
if len(self._VpdArray[vpd]) == 0:
return None
return self._VpdArray[vpd]
def GetVpdInfo(self, arg):
(PcdTokenName, TokenSpaceName) = arg
return [(sku,offset,value) for (sku,offset),value in self._VpdInfo.get((TokenSpaceName, PcdTokenName)).items()]
## Call external BPDG tool to process VPD file
#
# @param ToolPath The string path name for BPDG tool
# @param VpdFileName The string path name for VPD information guid.txt
#
def CallExtenalBPDGTool(ToolPath, VpdFileName):
assert ToolPath is not None, "Invalid parameter ToolPath"
assert VpdFileName is not None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
OutputDir = os.path.dirname(VpdFileName)
FileName = os.path.basename(VpdFileName)
BaseName, ext = os.path.splitext(FileName)
OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
try:
PopenObject = subprocess.Popen(' '.join([ToolPath,
'-o', OutputBinFileName,
'-m', OutputMapFileName,
'-q',
'-f',
VpdFileName]),
stdout=subprocess.PIPE,
stderr= subprocess.PIPE,
shell=True)
except Exception as X:
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData=str(X))
(out, error) = PopenObject.communicate()
print(out.decode())
while PopenObject.returncode is None :
PopenObject.wait()
if PopenObject.returncode != 0:
EdkLogger.debug(EdkLogger.DEBUG_1, "Fail to call BPDG tool", str(error.decode()))
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, "Fail to execute BPDG tool with exit code: %d, the error message is: \n %s" % \
(PopenObject.returncode, str(error.decode())))
return PopenObject.returncode
| edk2-master | BaseTools/Source/Python/Common/VpdInfoFile.py |
## @file
# Standardized Error Handling infrastructures.
#
# Copyright (c) 2007 - 2016, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
FILE_OPEN_FAILURE = 1
FILE_WRITE_FAILURE = 2
FILE_PARSE_FAILURE = 3
FILE_READ_FAILURE = 4
FILE_CREATE_FAILURE = 5
FILE_CHECKSUM_FAILURE = 6
FILE_COMPRESS_FAILURE = 7
FILE_DECOMPRESS_FAILURE = 8
FILE_MOVE_FAILURE = 9
FILE_DELETE_FAILURE = 10
FILE_COPY_FAILURE = 11
FILE_POSITIONING_FAILURE = 12
FILE_ALREADY_EXIST = 13
FILE_NOT_FOUND = 14
FILE_TYPE_MISMATCH = 15
FILE_CASE_MISMATCH = 16
FILE_DUPLICATED = 17
FILE_UNKNOWN_ERROR = 0x0FFF
OPTION_UNKNOWN = 0x1000
OPTION_MISSING = 0x1001
OPTION_CONFLICT = 0x1002
OPTION_VALUE_INVALID = 0x1003
OPTION_DEPRECATED = 0x1004
OPTION_NOT_SUPPORTED = 0x1005
OPTION_UNKNOWN_ERROR = 0x1FFF
PARAMETER_INVALID = 0x2000
PARAMETER_MISSING = 0x2001
PARAMETER_UNKNOWN_ERROR =0x2FFF
FORMAT_INVALID = 0x3000
FORMAT_NOT_SUPPORTED = 0x3001
FORMAT_UNKNOWN = 0x3002
FORMAT_UNKNOWN_ERROR = 0x3FFF
RESOURCE_NOT_AVAILABLE = 0x4000
RESOURCE_ALLOCATE_FAILURE = 0x4001
RESOURCE_FULL = 0x4002
RESOURCE_OVERFLOW = 0x4003
RESOURCE_UNDERRUN = 0x4004
RESOURCE_UNKNOWN_ERROR = 0x4FFF
ATTRIBUTE_NOT_AVAILABLE = 0x5000
ATTRIBUTE_GET_FAILURE = 0x5001
ATTRIBUTE_SET_FAILURE = 0x5002
ATTRIBUTE_UPDATE_FAILURE = 0x5003
ATTRIBUTE_ACCESS_DENIED = 0x5004
ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF
IO_NOT_READY = 0x6000
IO_BUSY = 0x6001
IO_TIMEOUT = 0x6002
IO_UNKNOWN_ERROR = 0x6FFF
COMMAND_FAILURE = 0x7000
PERMISSION_FAILURE = 0x8000
FV_FREESIZE_ERROR = 0x9000
CODE_ERROR = 0xC0DE
AUTOGEN_ERROR = 0xF000
PARSER_ERROR = 0xF001
BUILD_ERROR = 0xF002
GENFDS_ERROR = 0xF003
ECC_ERROR = 0xF004
EOT_ERROR = 0xF005
PREBUILD_ERROR = 0xF007
POSTBUILD_ERROR = 0xF008
DDC_ERROR = 0xF009
WARNING_AS_ERROR = 0xF006
MIGRATION_ERROR = 0xF010
PCD_VALIDATION_INFO_ERROR = 0xF011
PCD_VARIABLE_ATTRIBUTES_ERROR = 0xF012
PCD_VARIABLE_INFO_ERROR = 0xF016
PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR = 0xF013
PCD_STRUCTURE_PCD_INVALID_FIELD_ERROR = 0xF014
PCD_STRUCTURE_PCD_ERROR = 0xF015
ERROR_STATEMENT = 0xFFFD
ABORT_ERROR = 0xFFFE
UNKNOWN_ERROR = 0xFFFF
## Error message of each error code
gErrorMessage = {
FILE_NOT_FOUND : "File/directory not found in workspace",
FILE_OPEN_FAILURE : "File open failure",
FILE_WRITE_FAILURE : "File write failure",
FILE_PARSE_FAILURE : "File parse failure",
FILE_READ_FAILURE : "File read failure",
FILE_CREATE_FAILURE : "File create failure",
FILE_CHECKSUM_FAILURE : "Invalid checksum of file",
FILE_COMPRESS_FAILURE : "File compress failure",
FILE_DECOMPRESS_FAILURE : "File decompress failure",
FILE_MOVE_FAILURE : "File move failure",
FILE_DELETE_FAILURE : "File delete failure",
FILE_COPY_FAILURE : "File copy failure",
FILE_POSITIONING_FAILURE: "Failed to seeking position",
FILE_ALREADY_EXIST : "File or directory already exists",
FILE_TYPE_MISMATCH : "Incorrect file type",
FILE_CASE_MISMATCH : "File name case mismatch",
FILE_DUPLICATED : "Duplicated file found",
FILE_UNKNOWN_ERROR : "Unknown error encountered on file",
OPTION_UNKNOWN : "Unknown option",
OPTION_MISSING : "Missing option",
OPTION_CONFLICT : "Conflict options",
OPTION_VALUE_INVALID : "Invalid value of option",
OPTION_DEPRECATED : "Deprecated option",
OPTION_NOT_SUPPORTED : "Unsupported option",
OPTION_UNKNOWN_ERROR : "Unknown error when processing options",
PARAMETER_INVALID : "Invalid parameter",
PARAMETER_MISSING : "Missing parameter",
PARAMETER_UNKNOWN_ERROR : "Unknown error in parameters",
FORMAT_INVALID : "Invalid syntax/format",
FORMAT_NOT_SUPPORTED : "Not supported syntax/format",
FORMAT_UNKNOWN : "Unknown format",
FORMAT_UNKNOWN_ERROR : "Unknown error in syntax/format ",
RESOURCE_NOT_AVAILABLE : "Not available",
RESOURCE_ALLOCATE_FAILURE : "Allocate failure",
RESOURCE_FULL : "Full",
RESOURCE_OVERFLOW : "Overflow",
RESOURCE_UNDERRUN : "Underrun",
RESOURCE_UNKNOWN_ERROR : "Unknown error",
ATTRIBUTE_NOT_AVAILABLE : "Not available",
ATTRIBUTE_GET_FAILURE : "Failed to retrieve",
ATTRIBUTE_SET_FAILURE : "Failed to set",
ATTRIBUTE_UPDATE_FAILURE: "Failed to update",
ATTRIBUTE_ACCESS_DENIED : "Access denied",
ATTRIBUTE_UNKNOWN_ERROR : "Unknown error when accessing",
COMMAND_FAILURE : "Failed to execute command",
IO_NOT_READY : "Not ready",
IO_BUSY : "Busy",
IO_TIMEOUT : "Timeout",
IO_UNKNOWN_ERROR : "Unknown error in IO operation",
ERROR_STATEMENT : "!error statement",
UNKNOWN_ERROR : "Unknown error",
}
## Exception indicating a fatal error
class FatalError(Exception):
pass
if __name__ == "__main__":
pass
| edk2-master | BaseTools/Source/Python/Common/BuildToolError.py |
## @file
# This file is used to parse and evaluate expression in directive or PCD value.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
## Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
from Common.GlobalData import *
from CommonDataClass.Exceptions import BadExpression
from CommonDataClass.Exceptions import WrnExpression
from .Misc import GuidStringToGuidStructureString, ParseFieldValue,CopyDict
import Common.EdkLogger as EdkLogger
import copy
from Common.DataType import *
import sys
from random import sample
import string
ERR_STRING_EXPR = 'This operator cannot be used in string expression: [%s].'
ERR_SNYTAX = 'Syntax error, the rest of expression cannot be evaluated: [%s].'
ERR_MATCH = 'No matching right parenthesis.'
ERR_STRING_TOKEN = 'Bad string token: [%s].'
ERR_MACRO_TOKEN = 'Bad macro token: [%s].'
ERR_EMPTY_TOKEN = 'Empty token is not allowed.'
ERR_PCD_RESOLVE = 'The PCD should be FeatureFlag type or FixedAtBuild type: [%s].'
ERR_VALID_TOKEN = 'No more valid token found from rest of string: [%s].'
ERR_EXPR_TYPE = 'Different types found in expression.'
ERR_OPERATOR_UNSUPPORT = 'Unsupported operator: [%s]'
ERR_REL_NOT_IN = 'Expect "IN" after "not" operator.'
WRN_BOOL_EXPR = 'Operand of boolean type cannot be used in arithmetic expression.'
WRN_EQCMP_STR_OTHERS = '== Comparison between Operand of string type and Boolean/Number Type always return False.'
WRN_NECMP_STR_OTHERS = '!= Comparison between Operand of string type and Boolean/Number Type always return True.'
ERR_RELCMP_STR_OTHERS = 'Operator taking Operand of string type and Boolean/Number Type is not allowed: [%s].'
ERR_STRING_CMP = 'Unicode string and general string cannot be compared: [%s %s %s]'
ERR_ARRAY_TOKEN = 'Bad C array or C format GUID token: [%s].'
ERR_ARRAY_ELE = 'This must be HEX value for NList or Array: [%s].'
ERR_EMPTY_EXPR = 'Empty expression is not allowed.'
ERR_IN_OPERAND = 'Macro after IN operator can only be: $(FAMILY), $(ARCH), $(TOOL_CHAIN_TAG) and $(TARGET).'
__ValidString = re.compile(r'[_a-zA-Z][_0-9a-zA-Z]*$')
_ReLabel = re.compile('LABEL\((\w+)\)')
_ReOffset = re.compile('OFFSET_OF\((\w+)\)')
PcdPattern = re.compile(r'^[_a-zA-Z][0-9A-Za-z_]*\.[_a-zA-Z][0-9A-Za-z_]*$')
## SplitString
# Split string to list according double quote
# For example: abc"de\"f"ghi"jkl"mn will be: ['abc', '"de\"f"', 'ghi', '"jkl"', 'mn']
#
def SplitString(String):
# There might be escaped quote: "abc\"def\\\"ghi", 'abc\'def\\\'ghi'
RanStr = ''.join(sample(string.ascii_letters + string.digits, 8))
String = String.replace('\\\\', RanStr).strip()
RetList = []
InSingleQuote = False
InDoubleQuote = False
Item = ''
for i, ch in enumerate(String):
if ch == '"' and not InSingleQuote:
if String[i - 1] != '\\':
InDoubleQuote = not InDoubleQuote
if not InDoubleQuote:
Item += String[i]
RetList.append(Item)
Item = ''
continue
if Item:
RetList.append(Item)
Item = ''
elif ch == "'" and not InDoubleQuote:
if String[i - 1] != '\\':
InSingleQuote = not InSingleQuote
if not InSingleQuote:
Item += String[i]
RetList.append(Item)
Item = ''
continue
if Item:
RetList.append(Item)
Item = ''
Item += String[i]
if InSingleQuote or InDoubleQuote:
raise BadExpression(ERR_STRING_TOKEN % Item)
if Item:
RetList.append(Item)
for i, ch in enumerate(RetList):
if RanStr in ch:
RetList[i] = ch.replace(RanStr,'\\\\')
return RetList
def SplitPcdValueString(String):
# There might be escaped comma in GUID() or DEVICE_PATH() or " "
# or ' ' or L' ' or L" "
RanStr = ''.join(sample(string.ascii_letters + string.digits, 8))
String = String.replace('\\\\', RanStr).strip()
RetList = []
InParenthesis = 0
InSingleQuote = False
InDoubleQuote = False
Item = ''
for i, ch in enumerate(String):
if ch == '(':
InParenthesis += 1
elif ch == ')':
if InParenthesis:
InParenthesis -= 1
else:
raise BadExpression(ERR_STRING_TOKEN % Item)
elif ch == '"' and not InSingleQuote:
if String[i-1] != '\\':
InDoubleQuote = not InDoubleQuote
elif ch == "'" and not InDoubleQuote:
if String[i-1] != '\\':
InSingleQuote = not InSingleQuote
elif ch == ',':
if InParenthesis or InSingleQuote or InDoubleQuote:
Item += String[i]
continue
elif Item:
RetList.append(Item)
Item = ''
continue
Item += String[i]
if InSingleQuote or InDoubleQuote or InParenthesis:
raise BadExpression(ERR_STRING_TOKEN % Item)
if Item:
RetList.append(Item)
for i, ch in enumerate(RetList):
if RanStr in ch:
RetList[i] = ch.replace(RanStr,'\\\\')
return RetList
def IsValidCName(Str):
return True if __ValidString.match(Str) else False
def BuildOptionValue(PcdValue, GuidDict):
if PcdValue.startswith('H'):
InputValue = PcdValue[1:]
elif PcdValue.startswith("L'") or PcdValue.startswith("'"):
InputValue = PcdValue
elif PcdValue.startswith('L'):
InputValue = 'L"' + PcdValue[1:] + '"'
else:
InputValue = PcdValue
try:
PcdValue = ValueExpressionEx(InputValue, TAB_VOID, GuidDict)(True)
except:
pass
return PcdValue
## ReplaceExprMacro
#
def ReplaceExprMacro(String, Macros, ExceptionList = None):
StrList = SplitString(String)
for i, String in enumerate(StrList):
InQuote = False
if String.startswith('"'):
InQuote = True
MacroStartPos = String.find('$(')
if MacroStartPos < 0:
for Pcd in gPlatformPcds:
if Pcd in String:
if Pcd not in gConditionalPcds:
gConditionalPcds.append(Pcd)
continue
RetStr = ''
while MacroStartPos >= 0:
RetStr = String[0:MacroStartPos]
MacroEndPos = String.find(')', MacroStartPos)
if MacroEndPos < 0:
raise BadExpression(ERR_MACRO_TOKEN % String[MacroStartPos:])
Macro = String[MacroStartPos+2:MacroEndPos]
if Macro not in Macros:
# From C reference manual:
# If an undefined macro name appears in the constant-expression of
# !if or !elif, it is replaced by the integer constant 0.
RetStr += '0'
elif not InQuote:
Tklst = RetStr.split()
if Tklst and Tklst[-1] in {'IN', 'in'} and ExceptionList and Macro not in ExceptionList:
raise BadExpression(ERR_IN_OPERAND)
# Make sure the macro in exception list is encapsulated by double quote
# For example: DEFINE ARCH = IA32 X64
# $(ARCH) is replaced with "IA32 X64"
if ExceptionList and Macro in ExceptionList:
RetStr += '"' + Macros[Macro] + '"'
elif Macros[Macro].strip():
RetStr += Macros[Macro]
else:
RetStr += '""'
else:
RetStr += Macros[Macro]
RetStr += String[MacroEndPos+1:]
String = RetStr
MacroStartPos = String.find('$(')
StrList[i] = RetStr
return ''.join(StrList)
# transfer int to string for in/not in expression
def IntToStr(Value):
StrList = []
while Value > 0:
StrList.append(chr(Value & 0xff))
Value = Value >> 8
Value = '"' + ''.join(StrList) + '"'
return Value
SupportedInMacroList = ['TARGET', 'TOOL_CHAIN_TAG', 'ARCH', 'FAMILY']
class BaseExpression(object):
def __init__(self, *args, **kwargs):
super(BaseExpression, self).__init__()
# Check if current token matches the operators given from parameter
def _IsOperator(self, OpSet):
Idx = self._Idx
self._GetOperator()
if self._Token in OpSet:
if self._Token in self.LogicalOperators:
self._Token = self.LogicalOperators[self._Token]
return True
self._Idx = Idx
return False
class ValueExpression(BaseExpression):
# Logical operator mapping
LogicalOperators = {
'&&' : 'and', '||' : 'or',
'!' : 'not', 'AND': 'and',
'OR' : 'or' , 'NOT': 'not',
'XOR': '^' , 'xor': '^',
'EQ' : '==' , 'NE' : '!=',
'GT' : '>' , 'LT' : '<',
'GE' : '>=' , 'LE' : '<=',
'IN' : 'in'
}
NonLetterOpLst = ['+', '-', TAB_STAR, '/', '%', '&', '|', '^', '~', '<<', '>>', '!', '=', '>', '<', '?', ':']
SymbolPattern = re.compile("("
"\$\([A-Z][A-Z0-9_]*\)|\$\(\w+\.\w+\)|\w+\.\w+|"
"&&|\|\||!(?!=)|"
"(?<=\W)AND(?=\W)|(?<=\W)OR(?=\W)|(?<=\W)NOT(?=\W)|(?<=\W)XOR(?=\W)|"
"(?<=\W)EQ(?=\W)|(?<=\W)NE(?=\W)|(?<=\W)GT(?=\W)|(?<=\W)LT(?=\W)|(?<=\W)GE(?=\W)|(?<=\W)LE(?=\W)"
")")
@staticmethod
def Eval(Operator, Oprand1, Oprand2 = None):
WrnExp = None
if Operator not in {"==", "!=", ">=", "<=", ">", "<", "in", "not in"} and \
(isinstance(Oprand1, type('')) or isinstance(Oprand2, type(''))):
raise BadExpression(ERR_STRING_EXPR % Operator)
if Operator in {'in', 'not in'}:
if not isinstance(Oprand1, type('')):
Oprand1 = IntToStr(Oprand1)
if not isinstance(Oprand2, type('')):
Oprand2 = IntToStr(Oprand2)
TypeDict = {
type(0) : 0,
# For python2 long type
type(sys.maxsize + 1) : 0,
type('') : 1,
type(True) : 2
}
EvalStr = ''
if Operator in {"!", "NOT", "not"}:
if isinstance(Oprand1, type('')):
raise BadExpression(ERR_STRING_EXPR % Operator)
EvalStr = 'not Oprand1'
elif Operator in {"~"}:
if isinstance(Oprand1, type('')):
raise BadExpression(ERR_STRING_EXPR % Operator)
EvalStr = '~ Oprand1'
else:
if Operator in {"+", "-"} and (type(True) in {type(Oprand1), type(Oprand2)}):
# Boolean in '+'/'-' will be evaluated but raise warning
WrnExp = WrnExpression(WRN_BOOL_EXPR)
elif type('') in {type(Oprand1), type(Oprand2)} and not isinstance(Oprand1, type(Oprand2)):
# == between string and number/boolean will always return False, != return True
if Operator == "==":
WrnExp = WrnExpression(WRN_EQCMP_STR_OTHERS)
WrnExp.result = False
raise WrnExp
elif Operator == "!=":
WrnExp = WrnExpression(WRN_NECMP_STR_OTHERS)
WrnExp.result = True
raise WrnExp
else:
raise BadExpression(ERR_RELCMP_STR_OTHERS % Operator)
elif TypeDict[type(Oprand1)] != TypeDict[type(Oprand2)]:
if Operator in {"==", "!=", ">=", "<=", ">", "<"} and set((TypeDict[type(Oprand1)], TypeDict[type(Oprand2)])) == set((TypeDict[type(True)], TypeDict[type(0)])):
# comparison between number and boolean is allowed
pass
elif Operator in {'&', '|', '^', "and", "or"} and set((TypeDict[type(Oprand1)], TypeDict[type(Oprand2)])) == set((TypeDict[type(True)], TypeDict[type(0)])):
# bitwise and logical operation between number and boolean is allowed
pass
else:
raise BadExpression(ERR_EXPR_TYPE)
if isinstance(Oprand1, type('')) and isinstance(Oprand2, type('')):
if ((Oprand1.startswith('L"') or Oprand1.startswith("L'")) and (not Oprand2.startswith('L"')) and (not Oprand2.startswith("L'"))) or \
(((not Oprand1.startswith('L"')) and (not Oprand1.startswith("L'"))) and (Oprand2.startswith('L"') or Oprand2.startswith("L'"))):
raise BadExpression(ERR_STRING_CMP % (Oprand1, Operator, Oprand2))
if 'in' in Operator and isinstance(Oprand2, type('')):
Oprand2 = Oprand2.split()
EvalStr = 'Oprand1 ' + Operator + ' Oprand2'
# Local symbols used by built in eval function
Dict = {
'Oprand1' : Oprand1,
'Oprand2' : Oprand2
}
try:
Val = eval(EvalStr, {}, Dict)
except Exception as Excpt:
raise BadExpression(str(Excpt))
if Operator in {'and', 'or'}:
if Val:
Val = True
else:
Val = False
if WrnExp:
WrnExp.result = Val
raise WrnExp
return Val
def __init__(self, Expression, SymbolTable={}):
super(ValueExpression, self).__init__(self, Expression, SymbolTable)
self._NoProcess = False
if not isinstance(Expression, type('')):
self._Expr = Expression
self._NoProcess = True
return
self._Expr = ReplaceExprMacro(Expression.strip(),
SymbolTable,
SupportedInMacroList)
if not self._Expr.strip():
raise BadExpression(ERR_EMPTY_EXPR)
#
# The symbol table including PCD and macro mapping
#
self._Symb = CopyDict(SymbolTable)
self._Symb.update(self.LogicalOperators)
self._Idx = 0
self._Len = len(self._Expr)
self._Token = ''
self._WarnExcept = None
# Literal token without any conversion
self._LiteralToken = ''
# Public entry for this class
# @param RealValue: False: only evaluate if the expression is true or false, used for conditional expression
# True : return the evaluated str(value), used for PCD value
#
# @return: True or False if RealValue is False
# Evaluated value of string format if RealValue is True
#
def __call__(self, RealValue=False, Depth=0):
if self._NoProcess:
return self._Expr
self._Depth = Depth
self._Expr = self._Expr.strip()
if RealValue and Depth == 0:
self._Token = self._Expr
if self.__IsNumberToken():
return self._Expr
Token = ''
try:
Token = self._GetToken()
except BadExpression:
pass
if isinstance(Token, type('')) and Token.startswith('{') and Token.endswith('}') and self._Idx >= self._Len:
return self._Expr
self._Idx = 0
self._Token = ''
Val = self._ConExpr()
RealVal = Val
if isinstance(Val, type('')):
if Val == 'L""':
Val = False
elif not Val:
Val = False
RealVal = '""'
elif not Val.startswith('L"') and not Val.startswith('{') and not Val.startswith("L'") and not Val.startswith("'"):
Val = True
RealVal = '"' + RealVal + '"'
# The expression has been parsed, but the end of expression is not reached
# It means the rest does not comply EBNF of <Expression>
if self._Idx != self._Len:
raise BadExpression(ERR_SNYTAX % self._Expr[self._Idx:])
if RealValue:
RetVal = str(RealVal)
elif Val:
RetVal = True
else:
RetVal = False
if self._WarnExcept:
self._WarnExcept.result = RetVal
raise self._WarnExcept
else:
return RetVal
# Template function to parse binary operators which have same precedence
# Expr [Operator Expr]*
def _ExprFuncTemplate(self, EvalFunc, OpSet):
Val = EvalFunc()
while self._IsOperator(OpSet):
Op = self._Token
if Op == '?':
Val2 = EvalFunc()
if self._IsOperator({':'}):
Val3 = EvalFunc()
if Val:
Val = Val2
else:
Val = Val3
continue
#
# PEP 238 -- Changing the Division Operator
# x/y to return a reasonable approximation of the mathematical result of the division ("true division")
# x//y to return the floor ("floor division")
#
if Op == '/':
Op = '//'
try:
Val = self.Eval(Op, Val, EvalFunc())
except WrnExpression as Warn:
self._WarnExcept = Warn
Val = Warn.result
return Val
# A [? B]*
def _ConExpr(self):
return self._ExprFuncTemplate(self._OrExpr, {'?', ':'})
# A [|| B]*
def _OrExpr(self):
return self._ExprFuncTemplate(self._AndExpr, {"OR", "or", "||"})
# A [&& B]*
def _AndExpr(self):
return self._ExprFuncTemplate(self._BitOr, {"AND", "and", "&&"})
# A [ | B]*
def _BitOr(self):
return self._ExprFuncTemplate(self._BitXor, {"|"})
# A [ ^ B]*
def _BitXor(self):
return self._ExprFuncTemplate(self._BitAnd, {"XOR", "xor", "^"})
# A [ & B]*
def _BitAnd(self):
return self._ExprFuncTemplate(self._EqExpr, {"&"})
# A [ == B]*
def _EqExpr(self):
Val = self._RelExpr()
while self._IsOperator({"==", "!=", "EQ", "NE", "IN", "in", "!", "NOT", "not"}):
Op = self._Token
if Op in {"!", "NOT", "not"}:
if not self._IsOperator({"IN", "in"}):
raise BadExpression(ERR_REL_NOT_IN)
Op += ' ' + self._Token
try:
Val = self.Eval(Op, Val, self._RelExpr())
except WrnExpression as Warn:
self._WarnExcept = Warn
Val = Warn.result
return Val
# A [ > B]*
def _RelExpr(self):
return self._ExprFuncTemplate(self._ShiftExpr, {"<=", ">=", "<", ">", "LE", "GE", "LT", "GT"})
def _ShiftExpr(self):
return self._ExprFuncTemplate(self._AddExpr, {"<<", ">>"})
# A [ + B]*
def _AddExpr(self):
return self._ExprFuncTemplate(self._MulExpr, {"+", "-"})
# A [ * B]*
def _MulExpr(self):
return self._ExprFuncTemplate(self._UnaryExpr, {TAB_STAR, "/", "%"})
# [!]*A
def _UnaryExpr(self):
if self._IsOperator({"!", "NOT", "not"}):
Val = self._UnaryExpr()
try:
return self.Eval('not', Val)
except WrnExpression as Warn:
self._WarnExcept = Warn
return Warn.result
if self._IsOperator({"~"}):
Val = self._UnaryExpr()
try:
return self.Eval('~', Val)
except WrnExpression as Warn:
self._WarnExcept = Warn
return Warn.result
return self._IdenExpr()
# Parse identifier or encapsulated expression
def _IdenExpr(self):
Tk = self._GetToken()
if Tk == '(':
Val = self._ConExpr()
try:
# _GetToken may also raise BadExpression
if self._GetToken() != ')':
raise BadExpression(ERR_MATCH)
except BadExpression:
raise BadExpression(ERR_MATCH)
return Val
return Tk
# Skip whitespace or tab
def __SkipWS(self):
for Char in self._Expr[self._Idx:]:
if Char not in ' \t':
break
self._Idx += 1
# Try to convert string to number
def __IsNumberToken(self):
Radix = 10
if self._Token.lower()[0:2] == '0x' and len(self._Token) > 2:
Radix = 16
if self._Token.startswith('"') or self._Token.startswith('L"'):
Flag = 0
for Index in range(len(self._Token)):
if self._Token[Index] in {'"'}:
if self._Token[Index - 1] == '\\':
continue
Flag += 1
if Flag == 2 and self._Token.endswith('"'):
return True
if self._Token.startswith("'") or self._Token.startswith("L'"):
Flag = 0
for Index in range(len(self._Token)):
if self._Token[Index] in {"'"}:
if self._Token[Index - 1] == '\\':
continue
Flag += 1
if Flag == 2 and self._Token.endswith("'"):
return True
try:
self._Token = int(self._Token, Radix)
return True
except ValueError:
return False
except TypeError:
return False
# Parse array: {...}
def __GetArray(self):
Token = '{'
self._Idx += 1
self.__GetNList(True)
Token += self._LiteralToken
if self._Idx >= self._Len or self._Expr[self._Idx] != '}':
raise BadExpression(ERR_ARRAY_TOKEN % Token)
Token += '}'
# All whitespace and tabs in array are already stripped.
IsArray = IsGuid = False
if len(Token.split(',')) == 11 and len(Token.split(',{')) == 2 \
and len(Token.split('},')) == 1:
HexLen = [11, 6, 6, 5, 4, 4, 4, 4, 4, 4, 6]
HexList= Token.split(',')
if HexList[3].startswith('{') and \
not [Index for Index, Hex in enumerate(HexList) if len(Hex) > HexLen[Index]]:
IsGuid = True
if Token.lstrip('{').rstrip('}').find('{') == -1:
if not [Hex for Hex in Token.lstrip('{').rstrip('}').split(',') if len(Hex) > 4]:
IsArray = True
if not IsArray and not IsGuid:
raise BadExpression(ERR_ARRAY_TOKEN % Token)
self._Idx += 1
self._Token = self._LiteralToken = Token
return self._Token
# Parse string, the format must be: "..."
def __GetString(self):
Idx = self._Idx
# Skip left quote
self._Idx += 1
# Replace escape \\\", \"
if self._Expr[Idx] == '"':
Expr = self._Expr[self._Idx:].replace('\\\\', '//').replace('\\\"', '\\\'')
for Ch in Expr:
self._Idx += 1
if Ch == '"':
break
self._Token = self._LiteralToken = self._Expr[Idx:self._Idx]
if not self._Token.endswith('"'):
raise BadExpression(ERR_STRING_TOKEN % self._Token)
#Replace escape \\\', \'
elif self._Expr[Idx] == "'":
Expr = self._Expr[self._Idx:].replace('\\\\', '//').replace("\\\'", "\\\"")
for Ch in Expr:
self._Idx += 1
if Ch == "'":
break
self._Token = self._LiteralToken = self._Expr[Idx:self._Idx]
if not self._Token.endswith("'"):
raise BadExpression(ERR_STRING_TOKEN % self._Token)
self._Token = self._Token[1:-1]
return self._Token
# Get token that is comprised by alphanumeric, underscore or dot(used by PCD)
# @param IsAlphaOp: Indicate if parsing general token or script operator(EQ, NE...)
def __GetIdToken(self, IsAlphaOp = False):
IdToken = ''
for Ch in self._Expr[self._Idx:]:
if not self.__IsIdChar(Ch) or ('?' in self._Expr and Ch == ':'):
break
self._Idx += 1
IdToken += Ch
self._Token = self._LiteralToken = IdToken
if not IsAlphaOp:
self.__ResolveToken()
return self._Token
# Try to resolve token
def __ResolveToken(self):
if not self._Token:
raise BadExpression(ERR_EMPTY_TOKEN)
# PCD token
if PcdPattern.match(self._Token):
if self._Token not in self._Symb:
Ex = BadExpression(ERR_PCD_RESOLVE % self._Token)
Ex.Pcd = self._Token
raise Ex
self._Token = ValueExpression(self._Symb[self._Token], self._Symb)(True, self._Depth+1)
if not isinstance(self._Token, type('')):
self._LiteralToken = hex(self._Token)
return
if self._Token.startswith('"'):
self._Token = self._Token[1:-1]
elif self._Token in {"FALSE", "false", "False"}:
self._Token = False
elif self._Token in {"TRUE", "true", "True"}:
self._Token = True
else:
self.__IsNumberToken()
def __GetNList(self, InArray=False):
self._GetSingleToken()
if not self.__IsHexLiteral():
if InArray:
raise BadExpression(ERR_ARRAY_ELE % self._Token)
return self._Token
self.__SkipWS()
Expr = self._Expr[self._Idx:]
if not Expr.startswith(','):
return self._Token
NList = self._LiteralToken
while Expr.startswith(','):
NList += ','
self._Idx += 1
self.__SkipWS()
self._GetSingleToken()
if not self.__IsHexLiteral():
raise BadExpression(ERR_ARRAY_ELE % self._Token)
NList += self._LiteralToken
self.__SkipWS()
Expr = self._Expr[self._Idx:]
self._Token = self._LiteralToken = NList
return self._Token
def __IsHexLiteral(self):
if self._LiteralToken.startswith('{') and \
self._LiteralToken.endswith('}'):
return True
if gHexPattern.match(self._LiteralToken):
Token = self._LiteralToken[2:]
if not Token:
self._LiteralToken = '0x0'
else:
self._LiteralToken = '0x' + Token
return True
return False
def _GetToken(self):
return self.__GetNList()
@staticmethod
def __IsIdChar(Ch):
return Ch in '._:' or Ch.isalnum()
# Parse operand
def _GetSingleToken(self):
self.__SkipWS()
Expr = self._Expr[self._Idx:]
if Expr.startswith('L"'):
# Skip L
self._Idx += 1
UStr = self.__GetString()
self._Token = 'L"' + UStr + '"'
return self._Token
elif Expr.startswith("L'"):
# Skip L
self._Idx += 1
UStr = self.__GetString()
self._Token = "L'" + UStr + "'"
return self._Token
elif Expr.startswith("'"):
UStr = self.__GetString()
self._Token = "'" + UStr + "'"
return self._Token
elif Expr.startswith('UINT'):
Re = re.compile('(?:UINT8|UINT16|UINT32|UINT64)\((.+)\)')
try:
RetValue = Re.search(Expr).group(1)
except:
raise BadExpression('Invalid Expression %s' % Expr)
Idx = self._Idx
for Ch in Expr:
self._Idx += 1
if Ch == '(':
Prefix = self._Expr[Idx:self._Idx - 1]
Idx = self._Idx
if Ch == ')':
TmpValue = self._Expr[Idx :self._Idx - 1]
TmpValue = ValueExpression(TmpValue)(True)
TmpValue = '0x%x' % int(TmpValue) if not isinstance(TmpValue, type('')) else TmpValue
break
self._Token, Size = ParseFieldValue(Prefix + '(' + TmpValue + ')')
return self._Token
self._Token = ''
if Expr:
Ch = Expr[0]
Match = gGuidPattern.match(Expr)
if Match and not Expr[Match.end():Match.end()+1].isalnum() \
and Expr[Match.end():Match.end()+1] != '_':
self._Idx += Match.end()
self._Token = ValueExpression(GuidStringToGuidStructureString(Expr[0:Match.end()]))(True, self._Depth+1)
return self._Token
elif self.__IsIdChar(Ch):
return self.__GetIdToken()
elif Ch == '"':
return self.__GetString()
elif Ch == '{':
return self.__GetArray()
elif Ch == '(' or Ch == ')':
self._Idx += 1
self._Token = Ch
return self._Token
raise BadExpression(ERR_VALID_TOKEN % Expr)
# Parse operator
def _GetOperator(self):
self.__SkipWS()
LegalOpLst = ['&&', '||', '!=', '==', '>=', '<='] + self.NonLetterOpLst + ['?', ':']
self._Token = ''
Expr = self._Expr[self._Idx:]
# Reach end of expression
if not Expr:
return ''
# Script operator: LT, GT, LE, GE, EQ, NE, and, or, xor, not
if Expr[0].isalpha():
return self.__GetIdToken(True)
# Start to get regular operator: +, -, <, > ...
if Expr[0] not in self.NonLetterOpLst:
return ''
OpToken = ''
for Ch in Expr:
if Ch in self.NonLetterOpLst:
if Ch in ['!', '~'] and OpToken:
break
self._Idx += 1
OpToken += Ch
else:
break
if OpToken not in LegalOpLst:
raise BadExpression(ERR_OPERATOR_UNSUPPORT % OpToken)
self._Token = OpToken
return OpToken
class ValueExpressionEx(ValueExpression):
def __init__(self, PcdValue, PcdType, SymbolTable={}):
ValueExpression.__init__(self, PcdValue, SymbolTable)
self.PcdValue = PcdValue
self.PcdType = PcdType
def __call__(self, RealValue=False, Depth=0):
PcdValue = self.PcdValue
if "{CODE(" not in PcdValue:
try:
PcdValue = ValueExpression.__call__(self, RealValue, Depth)
if self.PcdType == TAB_VOID and (PcdValue.startswith("'") or PcdValue.startswith("L'")):
PcdValue, Size = ParseFieldValue(PcdValue)
PcdValueList = []
for I in range(Size):
PcdValueList.append('0x%02X'%(PcdValue & 0xff))
PcdValue = PcdValue >> 8
PcdValue = '{' + ','.join(PcdValueList) + '}'
elif self.PcdType in TAB_PCD_NUMERIC_TYPES and (PcdValue.startswith("'") or \
PcdValue.startswith('"') or PcdValue.startswith("L'") or PcdValue.startswith('L"') or PcdValue.startswith('{')):
raise BadExpression
except WrnExpression as Value:
PcdValue = Value.result
except BadExpression as Value:
if self.PcdType in TAB_PCD_NUMERIC_TYPES:
PcdValue = PcdValue.strip()
if PcdValue.startswith('{') and PcdValue.endswith('}'):
PcdValue = SplitPcdValueString(PcdValue[1:-1])
if isinstance(PcdValue, type([])):
TmpValue = 0
Size = 0
ValueType = ''
for Item in PcdValue:
Item = Item.strip()
if Item.startswith(TAB_UINT8):
ItemSize = 1
ValueType = TAB_UINT8
elif Item.startswith(TAB_UINT16):
ItemSize = 2
ValueType = TAB_UINT16
elif Item.startswith(TAB_UINT32):
ItemSize = 4
ValueType = TAB_UINT32
elif Item.startswith(TAB_UINT64):
ItemSize = 8
ValueType = TAB_UINT64
elif Item[0] in {'"', "'", 'L'}:
ItemSize = 0
ValueType = TAB_VOID
else:
ItemSize = 0
ValueType = TAB_UINT8
Item = ValueExpressionEx(Item, ValueType, self._Symb)(True)
if ItemSize == 0:
try:
tmpValue = int(Item, 0)
if tmpValue > 255:
raise BadExpression("Byte array number %s should less than 0xFF." % Item)
except BadExpression as Value:
raise BadExpression(Value)
except ValueError:
pass
ItemValue, ItemSize = ParseFieldValue(Item)
else:
ItemValue = ParseFieldValue(Item)[0]
if isinstance(ItemValue, type('')):
ItemValue = int(ItemValue, 0)
TmpValue = (ItemValue << (Size * 8)) | TmpValue
Size = Size + ItemSize
else:
try:
TmpValue, Size = ParseFieldValue(PcdValue)
except BadExpression as Value:
raise BadExpression("Type: %s, Value: %s, %s" % (self.PcdType, PcdValue, Value))
if isinstance(TmpValue, type('')):
try:
TmpValue = int(TmpValue)
except:
raise BadExpression(Value)
else:
PcdValue = '0x%0{}X'.format(Size) % (TmpValue)
if TmpValue < 0:
raise BadExpression('Type %s PCD Value is negative' % self.PcdType)
if self.PcdType == TAB_UINT8 and Size > 1:
raise BadExpression('Type %s PCD Value Size is Larger than 1 byte' % self.PcdType)
if self.PcdType == TAB_UINT16 and Size > 2:
raise BadExpression('Type %s PCD Value Size is Larger than 2 byte' % self.PcdType)
if self.PcdType == TAB_UINT32 and Size > 4:
raise BadExpression('Type %s PCD Value Size is Larger than 4 byte' % self.PcdType)
if self.PcdType == TAB_UINT64 and Size > 8:
raise BadExpression('Type %s PCD Value Size is Larger than 8 byte' % self.PcdType)
else:
try:
TmpValue = int(PcdValue)
TmpList = []
if TmpValue.bit_length() == 0:
PcdValue = '{0x00}'
else:
for I in range((TmpValue.bit_length() + 7) // 8):
TmpList.append('0x%02x' % ((TmpValue >> I * 8) & 0xff))
PcdValue = '{' + ', '.join(TmpList) + '}'
except:
if PcdValue.strip().startswith('{'):
PcdValueList = SplitPcdValueString(PcdValue.strip()[1:-1])
LabelDict = {}
NewPcdValueList = []
LabelOffset = 0
for Item in PcdValueList:
# compute byte offset of every LABEL
LabelList = _ReLabel.findall(Item)
Item = _ReLabel.sub('', Item)
Item = Item.strip()
if LabelList:
for Label in LabelList:
if not IsValidCName(Label):
raise BadExpression('%s is not a valid c variable name' % Label)
if Label not in LabelDict:
LabelDict[Label] = str(LabelOffset)
if Item.startswith(TAB_UINT8):
LabelOffset = LabelOffset + 1
elif Item.startswith(TAB_UINT16):
LabelOffset = LabelOffset + 2
elif Item.startswith(TAB_UINT32):
LabelOffset = LabelOffset + 4
elif Item.startswith(TAB_UINT64):
LabelOffset = LabelOffset + 8
else:
try:
ItemValue, ItemSize = ParseFieldValue(Item)
LabelOffset = LabelOffset + ItemSize
except:
LabelOffset = LabelOffset + 1
for Item in PcdValueList:
# for LABEL parse
Item = Item.strip()
try:
Item = _ReLabel.sub('', Item)
except:
pass
try:
OffsetList = _ReOffset.findall(Item)
except:
pass
# replace each offset, except errors
for Offset in OffsetList:
try:
Item = Item.replace('OFFSET_OF({})'.format(Offset), LabelDict[Offset])
except:
raise BadExpression('%s not defined' % Offset)
NewPcdValueList.append(Item)
AllPcdValueList = []
for Item in NewPcdValueList:
Size = 0
ValueStr = ''
TokenSpaceGuidName = ''
if Item.startswith(TAB_GUID) and Item.endswith(')'):
try:
TokenSpaceGuidName = re.search('GUID\((\w+)\)', Item).group(1)
except:
pass
if TokenSpaceGuidName and TokenSpaceGuidName in self._Symb:
Item = 'GUID(' + self._Symb[TokenSpaceGuidName] + ')'
elif TokenSpaceGuidName:
raise BadExpression('%s not found in DEC file' % TokenSpaceGuidName)
Item, Size = ParseFieldValue(Item)
for Index in range(0, Size):
ValueStr = '0x%02X' % (int(Item) & 255)
Item >>= 8
AllPcdValueList.append(ValueStr)
continue
elif Item.startswith('DEVICE_PATH') and Item.endswith(')'):
Item, Size = ParseFieldValue(Item)
AllPcdValueList.append(Item[1:-1])
continue
else:
ValueType = ""
if Item.startswith(TAB_UINT8):
ItemSize = 1
ValueType = TAB_UINT8
elif Item.startswith(TAB_UINT16):
ItemSize = 2
ValueType = TAB_UINT16
elif Item.startswith(TAB_UINT32):
ItemSize = 4
ValueType = TAB_UINT32
elif Item.startswith(TAB_UINT64):
ItemSize = 8
ValueType = TAB_UINT64
else:
ItemSize = 0
if ValueType:
TmpValue = ValueExpressionEx(Item, ValueType, self._Symb)(True)
else:
TmpValue = ValueExpressionEx(Item, self.PcdType, self._Symb)(True)
Item = '0x%x' % TmpValue if not isinstance(TmpValue, type('')) else TmpValue
if ItemSize == 0:
ItemValue, ItemSize = ParseFieldValue(Item)
if Item[0] not in {'"', 'L', '{'} and ItemSize > 1:
raise BadExpression("Byte array number %s should less than 0xFF." % Item)
else:
ItemValue = ParseFieldValue(Item)[0]
for I in range(0, ItemSize):
ValueStr = '0x%02X' % (int(ItemValue) & 255)
ItemValue >>= 8
AllPcdValueList.append(ValueStr)
Size += ItemSize
if Size > 0:
PcdValue = '{' + ', '.join(AllPcdValueList) + '}'
else:
raise BadExpression("Type: %s, Value: %s, %s"%(self.PcdType, PcdValue, Value))
if PcdValue == 'True':
PcdValue = '1'
if PcdValue == 'False':
PcdValue = '0'
if RealValue:
return PcdValue
if __name__ == '__main__':
pass
while True:
input = raw_input('Input expr: ')
if input in 'qQ':
break
try:
print(ValueExpression(input)(True))
print(ValueExpression(input)(False))
except WrnExpression as Ex:
print(Ex.result)
print(str(Ex))
except Exception as Ex:
print(str(Ex))
| edk2-master | BaseTools/Source/Python/Common/Expression.py |
## @file
# Override built in function file.open to provide support for long file path
#
# Copyright (c) 2014 - 2015, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
import os
import platform
import shutil
import codecs
##
# OpenLongPath
# Convert a file path to a long file path
#
def LongFilePath(FileName):
FileName = os.path.normpath(FileName)
if platform.system() == 'Windows':
if FileName.startswith('\\\\?\\'):
return FileName
if FileName.startswith('\\\\'):
return '\\\\?\\UNC\\' + FileName[2:]
if os.path.isabs(FileName):
return '\\\\?\\' + FileName
return FileName
##
# OpenLongFilePath
# wrap open to support opening a long file path
#
def OpenLongFilePath(FileName, Mode='r', Buffer= -1):
return open(LongFilePath(FileName), Mode, Buffer)
def CodecOpenLongFilePath(Filename, Mode='rb', Encoding=None, Errors='strict', Buffering=1):
return codecs.open(LongFilePath(Filename), Mode, Encoding, Errors, Buffering)
##
# CopyLongFilePath
# wrap copyfile to support copy a long file path
#
def CopyLongFilePath(src, dst):
with open(LongFilePath(src), 'rb') as fsrc:
with open(LongFilePath(dst), 'wb') as fdst:
shutil.copyfileobj(fsrc, fdst)
| edk2-master | BaseTools/Source/Python/Common/LongFilePathSupport.py |
# # @file
#
# This file is used to handle the variable attributes and property information
#
#
# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
class VariableAttributes(object):
EFI_VARIABLE_NON_VOLATILE = 0x00000001
EFI_VARIABLE_BOOTSERVICE_ACCESS = 0x00000002
EFI_VARIABLE_RUNTIME_ACCESS = 0x00000004
VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY = 0x00000001
VarAttributesMap = {
"NV":EFI_VARIABLE_NON_VOLATILE,
"BS":EFI_VARIABLE_BOOTSERVICE_ACCESS,
"RT":EFI_VARIABLE_RUNTIME_ACCESS,
"RO":VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY
}
def __init__(self):
pass
@staticmethod
def GetVarAttributes(var_attr_str):
VarAttr = 0x00000000
VarProp = 0x00000000
attr_list = var_attr_str.split(",")
for attr in attr_list:
attr = attr.strip()
if attr == 'RO':
VarProp = VariableAttributes.VAR_CHECK_VARIABLE_PROPERTY_READ_ONLY
else:
VarAttr = VarAttr | VariableAttributes.VarAttributesMap.get(attr, 0x00000000)
return VarAttr, VarProp
@staticmethod
def ValidateVarAttributes(var_attr_str):
if not var_attr_str:
return True, ""
attr_list = var_attr_str.split(",")
attr_temp = []
for attr in attr_list:
attr = attr.strip()
attr_temp.append(attr)
if attr not in VariableAttributes.VarAttributesMap:
return False, "The variable attribute %s is not support to be specified in dsc file. Supported variable attribute are ['BS','NV','RT','RO'] "
if 'RT' in attr_temp and 'BS' not in attr_temp:
return False, "the RT attribute need the BS attribute to be present"
return True, ""
| edk2-master | BaseTools/Source/Python/Common/VariableAttributes.py |
## @file
# Override built in module os.path to provide support for long file path
#
# Copyright (c) 2014, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
import os
from Common.LongFilePathSupport import LongFilePath
def isfile(path):
return os.path.isfile(LongFilePath(path))
def isdir(path):
return os.path.isdir(LongFilePath(path))
def exists(path):
return os.path.exists(LongFilePath(path))
def getsize(filename):
return os.path.getsize(LongFilePath(filename))
def getmtime(filename):
return os.path.getmtime(LongFilePath(filename))
def getatime(filename):
return os.path.getatime(LongFilePath(filename))
def getctime(filename):
return os.path.getctime(LongFilePath(filename))
join = os.path.join
splitext = os.path.splitext
splitdrive = os.path.splitdrive
split = os.path.split
abspath = os.path.abspath
basename = os.path.basename
commonprefix = os.path.commonprefix
sep = os.path.sep
normpath = os.path.normpath
normcase = os.path.normcase
dirname = os.path.dirname
islink = os.path.islink
isabs = os.path.isabs
realpath = os.path.realpath
relpath = os.path.relpath
pardir = os.path.pardir
| edk2-master | BaseTools/Source/Python/Common/LongFilePathOsPath.py |
## @file
# Python 'Common.Uefi' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/Common/Uefi/__init__.py |
## @file
# Module that encodes and decodes a EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER with
# a payload.
#
# Copyright (c) 2018 - 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
FmpCapsuleHeader
'''
import struct
import uuid
class FmpCapsuleImageHeaderClass (object):
# typedef struct {
# UINT32 Version;
#
# ///
# /// Used to identify device firmware targeted by this update. This guid is matched by
# /// system firmware against ImageTypeId field within a EFI_FIRMWARE_IMAGE_DESCRIPTOR
# ///
# EFI_GUID UpdateImageTypeId;
#
# ///
# /// Passed as ImageIndex in call to EFI_FIRMWARE_MANAGEMENT_PROTOCOL.SetImage ()
# ///
# UINT8 UpdateImageIndex;
# UINT8 reserved_bytes[3];
#
# ///
# /// Size of the binary update image which immediately follows this structure
# ///
# UINT32 UpdateImageSize;
#
# ///
# /// Size of the VendorCode bytes which optionally immediately follow binary update image in the capsule
# ///
# UINT32 UpdateVendorCodeSize;
#
# ///
# /// The HardwareInstance to target with this update. If value is zero it means match all
# /// HardwareInstances. This field allows update software to target only a single device in
# /// cases where there are more than one device with the same ImageTypeId GUID.
# /// This header is outside the signed data of the Authentication Info structure and
# /// therefore can be modified without changing the Auth data.
# ///
# UINT64 UpdateHardwareInstance;
#
# ///
# /// Bits which indicate authentication and depex information for the image that follows this structure
# ///
# UINT64 ImageCapsuleSupport
# } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER;
#
# #define EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION 0x00000003
_StructFormat = '<I16sB3BIIQQ'
_StructSize = struct.calcsize (_StructFormat)
EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION = 0x00000003
def __init__ (self):
self._Valid = False
self.Version = self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION
self.UpdateImageTypeId = uuid.UUID ('00000000-0000-0000-0000-000000000000')
self.UpdateImageIndex = 0
self.UpdateImageSize = 0
self.UpdateVendorCodeSize = 0
self.UpdateHardwareInstance = 0x0000000000000000
self.ImageCapsuleSupport = 0x0000000000000000
self.Payload = b''
self.VendorCodeBytes = b''
def Encode (self):
self.UpdateImageSize = len (self.Payload)
self.UpdateVendorCodeSize = len (self.VendorCodeBytes)
FmpCapsuleImageHeader = struct.pack (
self._StructFormat,
self.Version,
self.UpdateImageTypeId.bytes_le,
self.UpdateImageIndex,
0,0,0,
self.UpdateImageSize,
self.UpdateVendorCodeSize,
self.UpdateHardwareInstance,
self.ImageCapsuleSupport
)
self._Valid = True
return FmpCapsuleImageHeader + self.Payload + self.VendorCodeBytes
def Decode (self, Buffer):
if len (Buffer) < self._StructSize:
raise ValueError
(Version, UpdateImageTypeId, UpdateImageIndex, r0, r1, r2, UpdateImageSize, UpdateVendorCodeSize, UpdateHardwareInstance, ImageCapsuleSupport) = \
struct.unpack (
self._StructFormat,
Buffer[0:self._StructSize]
)
if Version < self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER_INIT_VERSION:
raise ValueError
if UpdateImageIndex < 1:
raise ValueError
if UpdateImageSize + UpdateVendorCodeSize != len (Buffer[self._StructSize:]):
raise ValueError
self.Version = Version
self.UpdateImageTypeId = uuid.UUID (bytes_le = UpdateImageTypeId)
self.UpdateImageIndex = UpdateImageIndex
self.UpdateImageSize = UpdateImageSize
self.UpdateVendorCodeSize = UpdateVendorCodeSize
self.UpdateHardwareInstance = UpdateHardwareInstance
self.ImageCapsuleSupport = ImageCapsuleSupport
self.Payload = Buffer[self._StructSize:self._StructSize + UpdateImageSize]
self.VendorCodeBytes = Buffer[self._StructSize + UpdateImageSize:]
self._Valid = True
return Buffer[self._StructSize:]
def DumpInfo (self):
if not self._Valid:
raise ValueError
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.Version = {Version:08X}'.format (Version = self.Version))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateImageTypeId = {UpdateImageTypeId}'.format (UpdateImageTypeId = str(self.UpdateImageTypeId).upper()))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateImageIndex = {UpdateImageIndex:08X}'.format (UpdateImageIndex = self.UpdateImageIndex))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateImageSize = {UpdateImageSize:08X}'.format (UpdateImageSize = self.UpdateImageSize))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateVendorCodeSize = {UpdateVendorCodeSize:08X}'.format (UpdateVendorCodeSize = self.UpdateVendorCodeSize))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.UpdateHardwareInstance = {UpdateHardwareInstance:016X}'.format (UpdateHardwareInstance = self.UpdateHardwareInstance))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER.ImageCapsuleSupport = {ImageCapsuleSupport:016X}'.format (ImageCapsuleSupport = self.ImageCapsuleSupport))
print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
print ('sizeof (VendorCodeBytes) = {Size:08X}'.format (Size = len (self.VendorCodeBytes)))
class FmpCapsuleHeaderClass (object):
# typedef struct {
# UINT32 Version;
#
# ///
# /// The number of drivers included in the capsule and the number of corresponding
# /// offsets stored in ItemOffsetList array.
# ///
# UINT16 EmbeddedDriverCount;
#
# ///
# /// The number of payload items included in the capsule and the number of
# /// corresponding offsets stored in the ItemOffsetList array.
# ///
# UINT16 PayloadItemCount;
#
# ///
# /// Variable length array of dimension [EmbeddedDriverCount + PayloadItemCount]
# /// containing offsets of each of the drivers and payload items contained within the capsule
# ///
# // UINT64 ItemOffsetList[];
# } EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER;
#
# #define EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION 0x00000001
_StructFormat = '<IHH'
_StructSize = struct.calcsize (_StructFormat)
_ItemOffsetFormat = '<Q'
_ItemOffsetSize = struct.calcsize (_ItemOffsetFormat)
EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION = 0x00000001
CAPSULE_SUPPORT_AUTHENTICATION = 0x0000000000000001
CAPSULE_SUPPORT_DEPENDENCY = 0x0000000000000002
def __init__ (self):
self._Valid = False
self.Version = self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION
self.EmbeddedDriverCount = 0
self.PayloadItemCount = 0
self._ItemOffsetList = []
self._EmbeddedDriverList = []
self._PayloadList = []
self._FmpCapsuleImageHeaderList = []
def AddEmbeddedDriver (self, EmbeddedDriver):
self._EmbeddedDriverList.append (EmbeddedDriver)
def GetEmbeddedDriver (self, Index):
if Index > len (self._EmbeddedDriverList):
raise ValueError
return self._EmbeddedDriverList[Index]
def AddPayload (self, UpdateImageTypeId, Payload = b'', VendorCodeBytes = b'', HardwareInstance = 0, UpdateImageIndex = 1, CapsuleSupport = 0):
self._PayloadList.append ((UpdateImageTypeId, Payload, VendorCodeBytes, HardwareInstance, UpdateImageIndex, CapsuleSupport))
def GetFmpCapsuleImageHeader (self, Index):
if Index >= len (self._FmpCapsuleImageHeaderList):
raise ValueError
return self._FmpCapsuleImageHeaderList[Index]
def Encode (self):
self.EmbeddedDriverCount = len (self._EmbeddedDriverList)
self.PayloadItemCount = len (self._PayloadList)
FmpCapsuleHeader = struct.pack (
self._StructFormat,
self.Version,
self.EmbeddedDriverCount,
self.PayloadItemCount
)
FmpCapsuleData = b''
Offset = self._StructSize + (self.EmbeddedDriverCount + self.PayloadItemCount) * self._ItemOffsetSize
for EmbeddedDriver in self._EmbeddedDriverList:
FmpCapsuleData = FmpCapsuleData + EmbeddedDriver
self._ItemOffsetList.append (Offset)
Offset = Offset + len (EmbeddedDriver)
Index = 1
for (UpdateImageTypeId, Payload, VendorCodeBytes, HardwareInstance, UpdateImageIndex, CapsuleSupport) in self._PayloadList:
FmpCapsuleImageHeader = FmpCapsuleImageHeaderClass ()
FmpCapsuleImageHeader.UpdateImageTypeId = UpdateImageTypeId
FmpCapsuleImageHeader.UpdateImageIndex = UpdateImageIndex
FmpCapsuleImageHeader.Payload = Payload
FmpCapsuleImageHeader.VendorCodeBytes = VendorCodeBytes
FmpCapsuleImageHeader.UpdateHardwareInstance = HardwareInstance
FmpCapsuleImageHeader.ImageCapsuleSupport = CapsuleSupport
FmpCapsuleImage = FmpCapsuleImageHeader.Encode ()
FmpCapsuleData = FmpCapsuleData + FmpCapsuleImage
self._ItemOffsetList.append (Offset)
self._FmpCapsuleImageHeaderList.append (FmpCapsuleImageHeader)
Offset = Offset + len (FmpCapsuleImage)
Index = Index + 1
for Offset in self._ItemOffsetList:
FmpCapsuleHeader = FmpCapsuleHeader + struct.pack (self._ItemOffsetFormat, Offset)
self._Valid = True
return FmpCapsuleHeader + FmpCapsuleData
def Decode (self, Buffer):
if len (Buffer) < self._StructSize:
raise ValueError
(Version, EmbeddedDriverCount, PayloadItemCount) = \
struct.unpack (
self._StructFormat,
Buffer[0:self._StructSize]
)
if Version < self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER_INIT_VERSION:
raise ValueError
self.Version = Version
self.EmbeddedDriverCount = EmbeddedDriverCount
self.PayloadItemCount = PayloadItemCount
self._ItemOffsetList = []
self._EmbeddedDriverList = []
self._PayloadList = []
self._FmpCapsuleImageHeaderList = []
#
# Parse the ItemOffsetList values
#
Offset = self._StructSize
for Index in range (0, EmbeddedDriverCount + PayloadItemCount):
ItemOffset = struct.unpack (self._ItemOffsetFormat, Buffer[Offset:Offset + self._ItemOffsetSize])[0]
if ItemOffset >= len (Buffer):
raise ValueError
self._ItemOffsetList.append (ItemOffset)
Offset = Offset + self._ItemOffsetSize
Result = Buffer[Offset:]
#
# Parse the EmbeddedDrivers
#
for Index in range (0, EmbeddedDriverCount):
Offset = self._ItemOffsetList[Index]
if Index < (len (self._ItemOffsetList) - 1):
Length = self._ItemOffsetList[Index + 1] - Offset
else:
Length = len (Buffer) - Offset
self.AddEmbeddedDriver (Buffer[Offset:Offset + Length])
#
# Parse the Payloads that are FMP Capsule Images
#
for Index in range (EmbeddedDriverCount, EmbeddedDriverCount + PayloadItemCount):
Offset = self._ItemOffsetList[Index]
if Index < (len (self._ItemOffsetList) - 1):
Length = self._ItemOffsetList[Index + 1] - Offset
else:
Length = len (Buffer) - Offset
FmpCapsuleImageHeader = FmpCapsuleImageHeaderClass ()
FmpCapsuleImageHeader.Decode (Buffer[Offset:Offset + Length])
self.AddPayload (
FmpCapsuleImageHeader.UpdateImageTypeId,
FmpCapsuleImageHeader.Payload,
FmpCapsuleImageHeader.VendorCodeBytes
)
self._FmpCapsuleImageHeaderList.append (FmpCapsuleImageHeader)
self._Valid = True
return Result
def DumpInfo (self):
if not self._Valid:
raise ValueError
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.Version = {Version:08X}'.format (Version = self.Version))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.EmbeddedDriverCount = {EmbeddedDriverCount:08X}'.format (EmbeddedDriverCount = self.EmbeddedDriverCount))
for EmbeddedDriver in self._EmbeddedDriverList:
print (' sizeof (EmbeddedDriver) = {Size:08X}'.format (Size = len (EmbeddedDriver)))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.PayloadItemCount = {PayloadItemCount:08X}'.format (PayloadItemCount = self.PayloadItemCount))
print ('EFI_FIRMWARE_MANAGEMENT_CAPSULE_HEADER.ItemOffsetList = ')
for Offset in self._ItemOffsetList:
print (' {Offset:016X}'.format (Offset = Offset))
for FmpCapsuleImageHeader in self._FmpCapsuleImageHeaderList:
FmpCapsuleImageHeader.DumpInfo ()
| edk2-master | BaseTools/Source/Python/Common/Uefi/Capsule/FmpCapsuleHeader.py |
## @file
# Module that encodes and decodes a EFI_FIRMWARE_IMAGE_AUTHENTICATION with
# certificate data and payload data.
#
# Copyright (c) 2018 - 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
FmpAuthHeader
'''
import struct
import uuid
class FmpAuthHeaderClass (object):
# ///
# /// Image Attribute -Authentication Required
# ///
# typedef struct {
# ///
# /// It is included in the signature of AuthInfo. It is used to ensure freshness/no replay.
# /// It is incremented during each firmware image operation.
# ///
# UINT64 MonotonicCount;
# ///
# /// Provides the authorization for the firmware image operations. It is a signature across
# /// the image data and the Monotonic Count value. Caller uses the private key that is
# /// associated with a public key that has been provisioned via the key exchange.
# /// Because this is defined as a signature, WIN_CERTIFICATE_UEFI_GUID.CertType must
# /// be EFI_CERT_TYPE_PKCS7_GUID.
# ///
# WIN_CERTIFICATE_UEFI_GUID AuthInfo;
# } EFI_FIRMWARE_IMAGE_AUTHENTICATION;
#
# ///
# /// Certificate which encapsulates a GUID-specific digital signature
# ///
# typedef struct {
# ///
# /// This is the standard WIN_CERTIFICATE header, where
# /// wCertificateType is set to WIN_CERT_TYPE_EFI_GUID.
# ///
# WIN_CERTIFICATE Hdr;
# ///
# /// This is the unique id which determines the
# /// format of the CertData. .
# ///
# EFI_GUID CertType;
# ///
# /// The following is the certificate data. The format of
# /// the data is determined by the CertType.
# /// If CertType is EFI_CERT_TYPE_RSA2048_SHA256_GUID,
# /// the CertData will be EFI_CERT_BLOCK_RSA_2048_SHA256 structure.
# ///
# UINT8 CertData[1];
# } WIN_CERTIFICATE_UEFI_GUID;
#
# ///
# /// The WIN_CERTIFICATE structure is part of the PE/COFF specification.
# ///
# typedef struct {
# ///
# /// The length of the entire certificate,
# /// including the length of the header, in bytes.
# ///
# UINT32 dwLength;
# ///
# /// The revision level of the WIN_CERTIFICATE
# /// structure. The current revision level is 0x0200.
# ///
# UINT16 wRevision;
# ///
# /// The certificate type. See WIN_CERT_TYPE_xxx for the UEFI
# /// certificate types. The UEFI specification reserves the range of
# /// certificate type values from 0x0EF0 to 0x0EFF.
# ///
# UINT16 wCertificateType;
# ///
# /// The following is the actual certificate. The format of
# /// the certificate depends on wCertificateType.
# ///
# /// UINT8 bCertificate[ANYSIZE_ARRAY];
# ///
# } WIN_CERTIFICATE;
#
# #define WIN_CERT_TYPE_EFI_GUID 0x0EF1
#
# ///
# /// This identifies a signature containing a DER-encoded PKCS #7 version 1.5 [RFC2315]
# /// SignedData value.
# ///
# #define EFI_CERT_TYPE_PKCS7_GUID \
# { \
# 0x4aafd29d, 0x68df, 0x49ee, {0x8a, 0xa9, 0x34, 0x7d, 0x37, 0x56, 0x65, 0xa7} \
# }
_StructFormat = '<QIHH16s'
_StructSize = struct.calcsize (_StructFormat)
_MonotonicCountFormat = '<Q'
_MonotonicCountSize = struct.calcsize (_MonotonicCountFormat)
_StructAuthInfoFormat = '<IHH16s'
_StructAuthInfoSize = struct.calcsize (_StructAuthInfoFormat)
_WIN_CERT_REVISION = 0x0200
_WIN_CERT_TYPE_EFI_GUID = 0x0EF1
_EFI_CERT_TYPE_PKCS7_GUID = uuid.UUID ('4aafd29d-68df-49ee-8aa9-347d375665a7')
def __init__ (self):
self._Valid = False
self.MonotonicCount = 0
self.dwLength = self._StructAuthInfoSize
self.wRevision = self._WIN_CERT_REVISION
self.wCertificateType = self._WIN_CERT_TYPE_EFI_GUID
self.CertType = self._EFI_CERT_TYPE_PKCS7_GUID
self.CertData = b''
self.Payload = b''
def Encode (self):
if self.wRevision != self._WIN_CERT_REVISION:
raise ValueError
if self.wCertificateType != self._WIN_CERT_TYPE_EFI_GUID:
raise ValueError
if self.CertType != self._EFI_CERT_TYPE_PKCS7_GUID:
raise ValueError
self.dwLength = self._StructAuthInfoSize + len (self.CertData)
FmpAuthHeader = struct.pack (
self._StructFormat,
self.MonotonicCount,
self.dwLength,
self.wRevision,
self.wCertificateType,
self.CertType.bytes_le
)
self._Valid = True
return FmpAuthHeader + self.CertData + self.Payload
def Decode (self, Buffer):
if len (Buffer) < self._StructSize:
raise ValueError
(MonotonicCount, dwLength, wRevision, wCertificateType, CertType) = \
struct.unpack (
self._StructFormat,
Buffer[0:self._StructSize]
)
if dwLength < self._StructAuthInfoSize:
raise ValueError
if wRevision != self._WIN_CERT_REVISION:
raise ValueError
if wCertificateType != self._WIN_CERT_TYPE_EFI_GUID:
raise ValueError
if CertType != self._EFI_CERT_TYPE_PKCS7_GUID.bytes_le:
raise ValueError
self.MonotonicCount = MonotonicCount
self.dwLength = dwLength
self.wRevision = wRevision
self.wCertificateType = wCertificateType
self.CertType = uuid.UUID (bytes_le = CertType)
self.CertData = Buffer[self._StructSize:self._MonotonicCountSize + self.dwLength]
self.Payload = Buffer[self._MonotonicCountSize + self.dwLength:]
self._Valid = True
return self.Payload
def IsSigned (self, Buffer):
if len (Buffer) < self._StructSize:
return False
(MonotonicCount, dwLength, wRevision, wCertificateType, CertType) = \
struct.unpack (
self._StructFormat,
Buffer[0:self._StructSize]
)
if CertType != self._EFI_CERT_TYPE_PKCS7_GUID.bytes_le:
return False
return True
def DumpInfo (self):
if not self._Valid:
raise ValueError
print ('EFI_FIRMWARE_IMAGE_AUTHENTICATION.MonotonicCount = {MonotonicCount:016X}'.format (MonotonicCount = self.MonotonicCount))
print ('EFI_FIRMWARE_IMAGE_AUTHENTICATION.AuthInfo.Hdr.dwLength = {dwLength:08X}'.format (dwLength = self.dwLength))
print ('EFI_FIRMWARE_IMAGE_AUTHENTICATION.AuthInfo.Hdr.wRevision = {wRevision:04X}'.format (wRevision = self.wRevision))
print ('EFI_FIRMWARE_IMAGE_AUTHENTICATION.AuthInfo.Hdr.wCertificateType = {wCertificateType:04X}'.format (wCertificateType = self.wCertificateType))
print ('EFI_FIRMWARE_IMAGE_AUTHENTICATION.AuthInfo.CertType = {Guid}'.format (Guid = str(self.CertType).upper()))
print ('sizeof (EFI_FIRMWARE_IMAGE_AUTHENTICATION.AuthInfo.CertData) = {Size:08X}'.format (Size = len (self.CertData)))
print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
| edk2-master | BaseTools/Source/Python/Common/Uefi/Capsule/FmpAuthHeader.py |
## @file
# Python 'Common.Uefi.Capsule' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/Common/Uefi/Capsule/__init__.py |
## @file
# Module that encodes and decodes a EFI_CAPSULE_HEADER with a payload
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
UefiCapsuleHeader
'''
import struct
import uuid
class UefiCapsuleHeaderClass (object):
# typedef struct {
# ///
# /// A GUID that defines the contents of a capsule.
# ///
# EFI_GUID CapsuleGuid;
# ///
# /// The size of the capsule header. This may be larger than the size of
# /// the EFI_CAPSULE_HEADER since CapsuleGuid may imply
# /// extended header entries
# ///
# UINT32 HeaderSize;
# ///
# /// Bit-mapped list describing the capsule attributes. The Flag values
# /// of 0x0000 - 0xFFFF are defined by CapsuleGuid. Flag values
# /// of 0x10000 - 0xFFFFFFFF are defined by this specification
# ///
# UINT32 Flags;
# ///
# /// Size in bytes of the capsule.
# ///
# UINT32 CapsuleImageSize;
# } EFI_CAPSULE_HEADER;
#
# #define CAPSULE_FLAGS_PERSIST_ACROSS_RESET 0x00010000
# #define CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE 0x00020000
# #define CAPSULE_FLAGS_INITIATE_RESET 0x00040000
#
_StructFormat = '<16sIIII'
_StructSize = struct.calcsize (_StructFormat)
EFI_FIRMWARE_MANAGEMENT_CAPSULE_ID_GUID = uuid.UUID ('6DCBD5ED-E82D-4C44-BDA1-7194199AD92A')
_CAPSULE_FLAGS_PERSIST_ACROSS_RESET = 0x00010000
_CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE = 0x00020000
_CAPSULE_FLAGS_INITIATE_RESET = 0x00040000
def __init__ (self):
self._Valid = False
self.CapsuleGuid = self.EFI_FIRMWARE_MANAGEMENT_CAPSULE_ID_GUID
self.HeaderSize = self._StructSize
self.OemFlags = 0x0000
self.PersistAcrossReset = False
self.PopulateSystemTable = False
self.InitiateReset = False
self.CapsuleImageSize = self.HeaderSize
self.Payload = b''
def Encode (self):
Flags = self.OemFlags
if self.PersistAcrossReset:
Flags = Flags | self._CAPSULE_FLAGS_PERSIST_ACROSS_RESET
if self.PopulateSystemTable:
Flags = Flags | self._CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE
if self.InitiateReset:
Flags = Flags | self._CAPSULE_FLAGS_INITIATE_RESET
self.CapsuleImageSize = self.HeaderSize + len (self.Payload)
UefiCapsuleHeader = struct.pack (
self._StructFormat,
self.CapsuleGuid.bytes_le,
self.HeaderSize,
Flags,
self.CapsuleImageSize,
0
)
self._Valid = True
return UefiCapsuleHeader + self.Payload
def Decode (self, Buffer):
if len (Buffer) < self._StructSize:
raise ValueError
(CapsuleGuid, HeaderSize, Flags, CapsuleImageSize, Reserved) = \
struct.unpack (
self._StructFormat,
Buffer[0:self._StructSize]
)
if HeaderSize < self._StructSize:
raise ValueError
if CapsuleImageSize != len (Buffer):
raise ValueError
self.CapsuleGuid = uuid.UUID (bytes_le = CapsuleGuid)
self.HeaderSize = HeaderSize
self.OemFlags = Flags & 0xffff
self.PersistAcrossReset = (Flags & self._CAPSULE_FLAGS_PERSIST_ACROSS_RESET) != 0
self.PopulateSystemTable = (Flags & self._CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE) != 0
self.InitiateReset = (Flags & self._CAPSULE_FLAGS_INITIATE_RESET) != 0
self.CapsuleImageSize = CapsuleImageSize
self.Payload = Buffer[self.HeaderSize:]
self._Valid = True
return self.Payload
def DumpInfo (self):
if not self._Valid:
raise ValueError
Flags = self.OemFlags
if self.PersistAcrossReset:
Flags = Flags | self._CAPSULE_FLAGS_PERSIST_ACROSS_RESET
if self.PopulateSystemTable:
Flags = Flags | self._CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE
if self.InitiateReset:
Flags = Flags | self._CAPSULE_FLAGS_INITIATE_RESET
print ('EFI_CAPSULE_HEADER.CapsuleGuid = {Guid}'.format (Guid = str(self.CapsuleGuid).upper()))
print ('EFI_CAPSULE_HEADER.HeaderSize = {Size:08X}'.format (Size = self.HeaderSize))
print ('EFI_CAPSULE_HEADER.Flags = {Flags:08X}'.format (Flags = Flags))
print (' OEM Flags = {Flags:04X}'.format (Flags = self.OemFlags))
if self.PersistAcrossReset:
print (' CAPSULE_FLAGS_PERSIST_ACROSS_RESET')
if self.PopulateSystemTable:
print (' CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE')
if self.InitiateReset:
print (' CAPSULE_FLAGS_INITIATE_RESET')
print ('EFI_CAPSULE_HEADER.CapsuleImageSize = {Size:08X}'.format (Size = self.CapsuleImageSize))
print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
| edk2-master | BaseTools/Source/Python/Common/Uefi/Capsule/UefiCapsuleHeader.py |
## @file
# Module that encodes and decodes a capsule dependency.
#
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
import struct
import json
import sys
import uuid
import re
'''
CapsuleDependency
'''
class OpConvert (object):
def __init__ (self):
# Opcode: (OperandSize, PackSize, PackFmt, EncodeConvert, DecodeConvert)
self._DepexOperations = {0x00: (16, 16, 's', self.Str2Guid, self.Guid2Str),
0x01: (4, 1, 'I', self.Str2Uint, self.Uint2Str),
0x02: (1, 0, 's', self.Str2Utf8, self.Byte2Str),
}
def Str2Uint (self, Data):
try:
Value = int (Data, 16)
except:
Message = '{Data} is not a valid integer value.'.format (Data = Data)
raise ValueError (Message)
if Value < 0 or Value > 0xFFFFFFFF:
Message = '{Data} is not an UINT32.'.format (Data = Data)
raise ValueError (Message)
return Value
def Uint2Str (self, Data):
if Data < 0 or Data > 0xFFFFFFFF:
Message = '{Data} is not an UINT32.'.format (Data = Data)
raise ValueError (Message)
return "0x{Data:08x}".format (Data = Data)
def Str2Guid (self, Data):
try:
Guid = uuid.UUID (Data)
except:
Message = '{Data} is not a valid registry format GUID value.'.format (Data = Data)
raise ValueError (Message)
return Guid.bytes_le
def Guid2Str (self, Data):
try:
Guid = uuid.UUID (bytes_le = Data)
except:
Message = '{Data} is not a valid binary format GUID value.'.format (Data = Data)
raise ValueError (Message)
return str (Guid).upper ()
def Str2Utf8 (self, Data):
if isinstance (Data, str):
return Data.encode ('utf-8')
else:
Message = '{Data} is not a valid string.'.format (Data = Data)
raise ValueError (Message)
def Byte2Str (self, Data):
if isinstance (Data, bytes):
if Data[-1:] == b'\x00':
return str (Data[:-1], 'utf-8')
else:
return str (Data, 'utf-8')
else:
Message = '{Data} is not a valid binary string.'.format (Data = Data)
raise ValueError (Message)
def OpEncode (self, Opcode, Operand = None):
BinTemp = struct.pack ('<b', Opcode)
if Opcode <= 0x02 and Operand != None:
OperandSize, PackSize, PackFmt, EncodeConvert, DecodeConvert = self._DepexOperations[Opcode]
Value = EncodeConvert (Operand)
if Opcode == 0x02:
PackSize = len (Value) + 1
BinTemp += struct.pack ('<{PackSize}{PackFmt}'.format (PackSize = PackSize, PackFmt = PackFmt), Value)
return BinTemp
def OpDecode (self, Buffer):
Opcode = struct.unpack ('<b', Buffer[0:1])[0]
if Opcode <= 0x02:
OperandSize, PackSize, PackFmt, EncodeConvert, DecodeConvert = self._DepexOperations[Opcode]
if Opcode == 0x02:
try:
PackSize = Buffer[1:].index (b'\x00') + 1
OperandSize = PackSize
except:
Message = 'CapsuleDependency: OpConvert: error: decode failed with wrong opcode/string.'
raise ValueError (Message)
try:
Operand = DecodeConvert (struct.unpack ('<{PackSize}{PackFmt}'.format (PackSize = PackSize, PackFmt = PackFmt), Buffer[1:1+OperandSize])[0])
except:
Message = 'CapsuleDependency: OpConvert: error: decode failed with unpack failure.'
raise ValueError (Message)
else:
Operand = None
OperandSize = 0
return (Opcode, Operand, OperandSize)
class CapsuleDependencyClass (object):
# //**************************************************************
# // Image Attribute - Dependency
# //**************************************************************
# typedef struct {
# UINT8 Dependencies[];
# } EFI_FIRMWARE_IMAGE_DEP
# {expression operator : [precedence, opcode, type (1:unary/2:binocular)]}
_opReference = {'&&': [2, 0x03, 2],
'||': [1, 0x04, 2],
'~': [5, 0x05, 1],
'==': [3, 0x08, 2],
'>': [4, 0x09, 2],
'>=': [4, 0x0A, 2],
'<': [4, 0x0B, 2],
'<=': [4, 0x0C, 2],
}
def __init__ (self):
self.Payload = b''
self._DepexExp = None
self._DepexList = []
self._DepexDump = []
self.Depex = b''
self._Valid = False
self._DepexSize = 0
self._opReferenceReverse = {v[1] : k for k, v in self._opReference.items ()}
self.OpConverter = OpConvert ()
@property
def DepexExp (self):
return self._DepexExp
@DepexExp.setter
def DepexExp (self, DepexExp = ''):
if isinstance (DepexExp, str):
DepexExp = re.sub (r'\n',r' ',DepexExp)
DepexExp = re.sub (r'\(',r' ( ',DepexExp)
DepexExp = re.sub (r'\)',r' ) ',DepexExp)
DepexExp = re.sub (r'~',r' ~ ',DepexExp)
self._DepexList = re.findall(r"[^\s\"\']+|\"[^\"]*\"|\'[^\']*\'",DepexExp)
self._DepexExp = " ".join(self._DepexList)
else:
Msg = 'Input Depex Expression is not valid string.'
raise ValueError (Msg)
def IsValidOperator (self, op):
return op in self._opReference.keys ()
def IsValidUnaryOperator (self, op):
return op in self._opReference.keys () and self._opReference[op][2] == 1
def IsValidBinocularOperator (self, op):
return op in self._opReference.keys () and self._opReference[op][2] == 2
def IsValidGuid (self, operand):
try:
uuid.UUID (operand)
except:
return False
return True
def IsValidVersion (self, operand):
try:
Value = int (operand, 16)
if Value < 0 or Value > 0xFFFFFFFF:
return False
except:
return False
return True
def IsValidBoolean (self, operand):
try:
return operand.upper () in ['TRUE', 'FALSE']
except:
return False
def IsValidOperand (self, operand):
return self.IsValidVersion (operand) or self.IsValidGuid (operand) or self.IsValidBoolean (operand)
def IsValidString (self, operand):
return operand[0] == "\"" and operand[-1] == "\"" and len(operand) >= 2
# Check if priority of current operater is greater than pervious op
def PriorityNotGreater (self, prevOp, currOp):
return self._opReference[currOp][0] <= self._opReference[prevOp][0]
def ValidateDepex (self):
OpList = self._DepexList
i = 0
while i < len (OpList):
Op = OpList[i]
if Op == 'DECLARE':
i += 1
if i >= len (OpList):
Msg = 'No more Operand after {Op}.'.format (Op = OpList[i-1])
raise IndexError (Msg)
# Check valid string
if not self.IsValidString(OpList[i]):
Msg = '{Operand} after {Op} is not a valid expression input.'.format (Operand = OpList[i], Op = OpList[i-1])
raise ValueError (Msg)
elif Op == '(':
# Expression cannot end with (
if i == len (OpList) - 1:
Msg = 'Expression cannot end with \'(\''
raise ValueError (Msg)
# The previous op after '(' cannot be a binocular operator
if self.IsValidBinocularOperator (OpList[i+1]) :
Msg = '{Op} after \'(\' is not a valid expression input.'.format (Op = OpList[i+1])
raise ValueError (Msg)
elif Op == ')':
# Expression cannot start with )
if i == 0:
Msg = 'Expression cannot start with \')\''
raise ValueError (Msg)
# The previous op before ')' cannot be an operator
if self.IsValidOperator (OpList[i-1]):
Msg = '{Op} before \')\' is not a valid expression input.'.format (Op = OpList[i-1])
raise ValueError (Msg)
# The next op after ')' cannot be operand or unary operator
if (i + 1) < len (OpList) and (self.IsValidOperand (OpList[i+1]) or self.IsValidUnaryOperator (OpList[i+1])):
Msg = '{Op} after \')\' is not a valid expression input.'.format (Op = OpList[i+1])
raise ValueError (Msg)
elif self.IsValidOperand (Op):
# The next expression of operand cannot be operand or unary operator
if (i + 1) < len (OpList) and (self.IsValidOperand (OpList[i+1]) or self.IsValidUnaryOperator (OpList[i+1])):
Msg = '{Op} after {PrevOp} is not a valid expression input.'.format (Op = OpList[i+1], PrevOp = Op)
raise ValueError (Msg)
elif self.IsValidOperator (Op):
# The next op of operator cannot binocular operator
if (i + 1) < len (OpList) and self.IsValidBinocularOperator (OpList[i+1]):
Msg = '{Op} after {PrevOp} is not a valid expression input.'.format (Op = OpList[i+1], PrevOp = Op)
raise ValueError (Msg)
# The first op can not be binocular operator
if i == 0 and self.IsValidBinocularOperator (Op):
Msg = 'Expression cannot start with an operator {Op}.'.format (Op = Op)
raise ValueError (Msg)
# The last op can not be operator
if i == len (OpList) - 1:
Msg = 'Expression cannot ended with an operator {Op}.'.format (Op = Op)
raise ValueError (Msg)
# The next op of unary operator cannot be guid / version
if self.IsValidUnaryOperator (Op) and (self.IsValidGuid (OpList[i+1]) or self.IsValidVersion (OpList[i+1])):
Msg = '{Op} after {PrevOp} is not a valid expression input.'.format (Op = OpList[i+1], PrevOp = Op)
raise ValueError (Msg)
else:
Msg = '{Op} is not a valid expression input.'.format (Op = Op)
raise ValueError (Msg)
i += 1
def Encode (self):
# initialize
self.Depex = b''
self._DepexDump = []
OperandStack = []
OpeartorStack = []
OpList = self._DepexList
self.ValidateDepex ()
# convert
i = 0
while i < len (OpList):
Op = OpList[i]
if Op == 'DECLARE':
# This declare next expression value is a VERSION_STRING
i += 1
self.Depex += self.OpConverter.OpEncode (0x02, OpList[i][1:-1])
elif Op == '(':
OpeartorStack.append (Op)
elif Op == ')':
while (OpeartorStack and OpeartorStack[-1] != '('):
Operator = OpeartorStack.pop ()
self.Depex += self.OpConverter.OpEncode (self._opReference[Operator][1])
try:
OpeartorStack.pop () # pop out '('
except:
Msg = 'Pop out \'(\' failed, too many \')\''
raise ValueError (Msg)
elif self.IsValidGuid (Op):
if not OperandStack:
OperandStack.append (self.OpConverter.OpEncode (0x00, Op))
else:
# accroding to uefi spec 2.8, the guid/version operands is a reversed order in firmware comparison.
self.Depex += self.OpConverter.OpEncode (0x00, Op)
self.Depex += OperandStack.pop ()
elif self.IsValidVersion (Op):
if not OperandStack:
OperandStack.append (self.OpConverter.OpEncode (0x01, Op))
else:
# accroding to uefi spec 2.8, the guid/version operands is a reversed order in firmware comparison.
self.Depex += self.OpConverter.OpEncode (0x01, Op)
self.Depex += OperandStack.pop ()
elif self.IsValidBoolean (Op):
if Op.upper () == 'FALSE':
self.Depex += self.OpConverter.OpEncode (0x07)
elif Op.upper () == 'TRUE':
self.Depex += self.OpConverter.OpEncode (0x06)
elif self.IsValidOperator (Op):
while (OpeartorStack and OpeartorStack[-1] != '(' and self.PriorityNotGreater (OpeartorStack[-1], Op)):
Operator = OpeartorStack.pop ()
self.Depex += self.OpConverter.OpEncode (self._opReference[Operator][1])
OpeartorStack.append (Op)
i += 1
while OpeartorStack:
Operator = OpeartorStack.pop ()
if Operator == '(':
Msg = 'Too many \'(\'.'
raise ValueError (Msg)
self.Depex += self.OpConverter.OpEncode (self._opReference[Operator][1])
self.Depex += self.OpConverter.OpEncode (0x0D)
self._Valid = True
self._DepexSize = len (self.Depex)
return self.Depex + self.Payload
def Decode (self, Buffer):
# initialize
self.Depex = Buffer
OperandStack = []
DepexLen = 0
while True:
Opcode, Operand, OperandSize = self.OpConverter.OpDecode (Buffer[DepexLen:])
DepexLen += OperandSize + 1
if Opcode == 0x0D:
break
elif Opcode == 0x02:
if not OperandStack:
OperandStack.append ('DECLARE \"{String}\"'.format (String = Operand))
else:
PrevOperand = OperandStack.pop ()
OperandStack.append ('{Operand} DECLARE \"{String}\"'.format (Operand = PrevOperand, String = Operand))
elif Opcode in [0x00, 0x01]:
OperandStack.append (Operand)
elif Opcode == 0x06:
OperandStack.append ('TRUE')
elif Opcode == 0x07:
OperandStack.append ('FALSE')
elif self.IsValidOperator (self._opReferenceReverse[Opcode]):
Operator = self._opReferenceReverse[Opcode]
if self.IsValidUnaryOperator (self._opReferenceReverse[Opcode]) and len (OperandStack) >= 1:
Oprand = OperandStack.pop ()
OperandStack.append (' ( {Operator} {Oprand} )'.format (Operator = Operator, Oprand = Oprand))
elif self.IsValidBinocularOperator (self._opReferenceReverse[Opcode]) and len (OperandStack) >= 2:
Oprand1 = OperandStack.pop ()
Oprand2 = OperandStack.pop ()
OperandStack.append (' ( {Oprand1} {Operator} {Oprand2} )'.format (Operator = Operator, Oprand1 = Oprand1, Oprand2 = Oprand2))
else:
Msg = 'No enough Operands for {Opcode:02X}.'.format (Opcode = Opcode)
raise ValueError (Msg)
else:
Msg = '{Opcode:02X} is not a valid OpCode.'.format (Opcode = Opcode)
raise ValueError (Msg)
self.DepexExp = OperandStack[0].strip (' ')
self.Payload = Buffer[DepexLen:]
self._Valid = True
self._DepexSize = DepexLen
return self.Payload
def DumpInfo (self):
DepexLen = 0
Opcode = None
Buffer = self.Depex
if self._Valid == True:
print ('EFI_FIRMWARE_IMAGE_DEP.Dependencies = {')
while Opcode != 0x0D:
Opcode, Operand, OperandSize = self.OpConverter.OpDecode (Buffer[DepexLen:])
DepexLen += OperandSize + 1
if Operand:
print (' {Opcode:02X}, {Operand},'.format (Opcode = Opcode, Operand = Operand))
else:
print (' {Opcode:02X},'.format (Opcode = Opcode))
print ('}')
print ('sizeof (EFI_FIRMWARE_IMAGE_DEP.Dependencies) = {Size:08X}'.format (Size = self._DepexSize))
print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
| edk2-master | BaseTools/Source/Python/Common/Uefi/Capsule/CapsuleDependency.py |
## @file
# Python 'Common.Edk2' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/Common/Edk2/__init__.py |
## @file
# Python 'Common.Edk2.Capsule' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/Common/Edk2/Capsule/__init__.py |
## @file
# Module that encodes and decodes a FMP_PAYLOAD_HEADER with a payload.
# The FMP_PAYLOAD_HEADER is processed by the FmpPayloadHeaderLib in the
# FmpDevicePkg.
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
FmpPayloadHeader
'''
import struct
def _SIGNATURE_32 (A, B, C, D):
return struct.unpack ('=I',bytearray (A + B + C + D, 'ascii'))[0]
def _SIGNATURE_32_TO_STRING (Signature):
return struct.pack ("<I", Signature).decode ()
class FmpPayloadHeaderClass (object):
#
# typedef struct {
# UINT32 Signature;
# UINT32 HeaderSize;
# UINT32 FwVersion;
# UINT32 LowestSupportedVersion;
# } FMP_PAYLOAD_HEADER;
#
# #define FMP_PAYLOAD_HEADER_SIGNATURE SIGNATURE_32 ('M', 'S', 'S', '1')
#
_StructFormat = '<IIII'
_StructSize = struct.calcsize (_StructFormat)
_FMP_PAYLOAD_HEADER_SIGNATURE = _SIGNATURE_32 ('M', 'S', 'S', '1')
def __init__ (self):
self._Valid = False
self.Signature = self._FMP_PAYLOAD_HEADER_SIGNATURE
self.HeaderSize = self._StructSize
self.FwVersion = 0x00000000
self.LowestSupportedVersion = 0x00000000
self.Payload = b''
def Encode (self):
FmpPayloadHeader = struct.pack (
self._StructFormat,
self.Signature,
self.HeaderSize,
self.FwVersion,
self.LowestSupportedVersion
)
self._Valid = True
return FmpPayloadHeader + self.Payload
def Decode (self, Buffer):
if len (Buffer) < self._StructSize:
raise ValueError
(Signature, HeaderSize, FwVersion, LowestSupportedVersion) = \
struct.unpack (
self._StructFormat,
Buffer[0:self._StructSize]
)
if Signature != self._FMP_PAYLOAD_HEADER_SIGNATURE:
raise ValueError
if HeaderSize < self._StructSize:
raise ValueError
self.Signature = Signature
self.HeaderSize = HeaderSize
self.FwVersion = FwVersion
self.LowestSupportedVersion = LowestSupportedVersion
self.Payload = Buffer[self.HeaderSize:]
self._Valid = True
return self.Payload
def DumpInfo (self):
if not self._Valid:
raise ValueError
print ('FMP_PAYLOAD_HEADER.Signature = {Signature:08X} ({SignatureString})'.format (Signature = self.Signature, SignatureString = _SIGNATURE_32_TO_STRING (self.Signature)))
print ('FMP_PAYLOAD_HEADER.HeaderSize = {HeaderSize:08X}'.format (HeaderSize = self.HeaderSize))
print ('FMP_PAYLOAD_HEADER.FwVersion = {FwVersion:08X}'.format (FwVersion = self.FwVersion))
print ('FMP_PAYLOAD_HEADER.LowestSupportedVersion = {LowestSupportedVersion:08X}'.format (LowestSupportedVersion = self.LowestSupportedVersion))
print ('sizeof (Payload) = {Size:08X}'.format (Size = len (self.Payload)))
| edk2-master | BaseTools/Source/Python/Common/Edk2/Capsule/FmpPayloadHeader.py |
## @file
# This file is used to create/update/query/erase table for Identifiers
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Common.StringUtils import ConvertToSqlString
from Table.Table import Table
## TableIdentifier
#
# This class defined a table used for Identifier
#
# @param object: Inherited from object class
#
#
class TableIdentifier(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Identifier'
## Create table
#
# Create table Identifier
#
# @param ID: ID of a Identifier
# @param Modifier: Modifier of a Identifier
# @param Type: Type of a Identifier
# @param Name: Name of a Identifier
# @param Value: Value of a Identifier
# @param Model: Model of a Identifier
# @param BelongsToFile: The Identifier belongs to which file
# @param BelongsToFunction: The Identifier belongs to which function
# @param StartLine: StartLine of a Identifier
# @param StartColumn: StartColumn of a Identifier
# @param EndLine: EndLine of a Identifier
# @param EndColumn: EndColumn of a Identifier
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s(ID INTEGER PRIMARY KEY,
Modifier VARCHAR,
Type VARCHAR,
Name VARCHAR NOT NULL,
Value VARCHAR NOT NULL,
Model INTEGER NOT NULL,
BelongsToFile SINGLE NOT NULL,
BelongsToFunction SINGLE DEFAULT -1,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table Identifier
#
# @param ID: ID of a Identifier
# @param Modifier: Modifier of a Identifier
# @param Type: Type of a Identifier
# @param Name: Name of a Identifier
# @param Value: Value of a Identifier
# @param Model: Model of a Identifier
# @param BelongsToFile: The Identifier belongs to which file
# @param BelongsToFunction: The Identifier belongs to which function
# @param StartLine: StartLine of a Identifier
# @param StartColumn: StartColumn of a Identifier
# @param EndLine: EndLine of a Identifier
# @param EndColumn: EndColumn of a Identifier
#
def Insert(self, Modifier, Type, Name, Value, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn):
self.ID = self.ID + 1
(Modifier, Type, Name, Value) = ConvertToSqlString((Modifier, Type, Name, Value))
SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Modifier, Type, Name, Value, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn)
Table.Insert(self, SqlCommand)
return self.ID
| edk2-master | BaseTools/Source/Python/Table/TableIdentifier.py |
## @file
# This file is used to create/update/query/erase table for ECC reports
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import Common.LongFilePathOs as os, time
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString2
import Ecc.EccToolError as EccToolError
import Ecc.EccGlobalData as EccGlobalData
from Common.LongFilePathSupport import OpenLongFilePath as open
## TableReport
#
# This class defined a table used for data model
#
# @param object: Inherited from object class
#
#
class TableReport(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Report'
## Create table
#
# Create table report
#
# @param ID: ID of an Error
# @param ErrorID: ID of an Error TypeModel of a Report item
# @param OtherMsg: Other error message besides the standard error message
# @param BelongsToItem: The error belongs to which item
# @param Enabled: If this error enabled
# @param Corrected: if this error corrected
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
ErrorID INTEGER NOT NULL,
OtherMsg TEXT,
BelongsToTable TEXT NOT NULL,
BelongsToItem SINGLE NOT NULL,
Enabled INTEGER DEFAULT 0,
Corrected INTEGER DEFAULT -1
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table report
#
# @param ID: ID of an Error
# @param ErrorID: ID of an Error TypeModel of a report item
# @param OtherMsg: Other error message besides the standard error message
# @param BelongsToTable: The error item belongs to which table
# @param BelongsToItem: The error belongs to which item
# @param Enabled: If this error enabled
# @param Corrected: if this error corrected
#
def Insert(self, ErrorID, OtherMsg='', BelongsToTable='', BelongsToItem= -1, Enabled=0, Corrected= -1):
self.ID = self.ID + 1
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, %s, %s)""" \
% (self.Table, self.ID, ErrorID, ConvertToSqlString2(OtherMsg), BelongsToTable, BelongsToItem, Enabled, Corrected)
Table.Insert(self, SqlCommand)
return self.ID
## Query table
#
# @retval: A recordSet of all found records
#
def Query(self):
SqlCommand = """select ID, ErrorID, OtherMsg, BelongsToTable, BelongsToItem, Corrected from %s
where Enabled > -1 order by ErrorID, BelongsToItem""" % (self.Table)
return self.Exec(SqlCommand)
## Update table
#
def UpdateBelongsToItemByFile(self, ItemID=-1, File=""):
SqlCommand = """update Report set BelongsToItem=%s where BelongsToTable='File' and BelongsToItem=-2
and OtherMsg like '%%%s%%'""" % (ItemID, File)
return self.Exec(SqlCommand)
## Convert to CSV
#
# Get all enabled records from table report and save them to a .csv file
#
# @param Filename: To filename to save the report content
#
def ToCSV(self, Filename='Report.csv'):
try:
File = open(Filename, 'w+')
File.write("""No, Error Code, Error Message, File, LineNo, Other Error Message\n""")
RecordSet = self.Query()
Index = 0
for Record in RecordSet:
Index = Index + 1
ErrorID = Record[1]
OtherMsg = Record[2]
BelongsToTable = Record[3]
BelongsToItem = Record[4]
IsCorrected = Record[5]
SqlCommand = ''
if BelongsToTable == 'File':
SqlCommand = """select 1, FullPath from %s where ID = %s
""" % (BelongsToTable, BelongsToItem)
else:
SqlCommand = """select A.StartLine, B.FullPath from %s as A, File as B
where A.ID = %s and B.ID = A.BelongsToFile
""" % (BelongsToTable, BelongsToItem)
NewRecord = self.Exec(SqlCommand)
if NewRecord != []:
File.write("""%s,%s,"%s",%s,%s,"%s"\n""" % (Index, ErrorID, EccToolError.gEccErrorMessage[ErrorID], NewRecord[0][1], NewRecord[0][0], OtherMsg))
EdkLogger.quiet("%s(%s): [%s]%s %s" % (NewRecord[0][1], NewRecord[0][0], ErrorID, EccToolError.gEccErrorMessage[ErrorID], OtherMsg))
File.close()
except IOError:
NewFilename = 'Report_' + time.strftime("%Y%m%d_%H%M%S.csv", time.localtime())
EdkLogger.warn("ECC", "The report file %s is locked by other progress, use %s instead!" % (Filename, NewFilename))
self.ToCSV(NewFilename)
| edk2-master | BaseTools/Source/Python/Table/TableReport.py |
## @file
# This file is used to create/update/query/erase table for data models
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString
## TableDataModel
#
# This class defined a table used for data model
#
# @param object: Inherited from object class
#
#
class TableDataModel(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'DataModel'
## Create table
#
# Create table DataModel
#
# @param ID: ID of a ModelType
# @param CrossIndex: CrossIndex of a ModelType
# @param Name: Name of a ModelType
# @param Description: Description of a ModelType
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
CrossIndex INTEGER NOT NULL,
Name VARCHAR NOT NULL,
Description VARCHAR
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table DataModel
#
# @param ID: ID of a ModelType
# @param CrossIndex: CrossIndex of a ModelType
# @param Name: Name of a ModelType
# @param Description: Description of a ModelType
#
def Insert(self, CrossIndex, Name, Description):
self.ID = self.ID + 1
(Name, Description) = ConvertToSqlString((Name, Description))
SqlCommand = """insert into %s values(%s, %s, '%s', '%s')""" % (self.Table, self.ID, CrossIndex, Name, Description)
Table.Insert(self, SqlCommand)
return self.ID
## Init table
#
# Create all default records of table DataModel
#
def InitTable(self):
EdkLogger.verbose("\nInitialize table DataModel started ...")
for Item in DataClass.MODEL_LIST:
CrossIndex = Item[1]
Name = Item[0]
Description = Item[0]
self.Insert(CrossIndex, Name, Description)
EdkLogger.verbose("Initialize table DataModel ... DONE!")
## Get CrossIndex
#
# Get a model's cross index from its name
#
# @param ModelName: Name of the model
# @retval CrossIndex: CrossIndex of the model
#
def GetCrossIndex(self, ModelName):
CrossIndex = -1
SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
self.Cur.execute(SqlCommand)
for Item in self.Cur:
CrossIndex = Item[0]
return CrossIndex
| edk2-master | BaseTools/Source/Python/Table/TableDataModel.py |
## @file
# This file is used to create/update/query/erase table for functions
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString
## TableFunction
#
# This class defined a table used for function
#
# @param Table: Inherited from Table class
#
class TableFunction(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Function'
## Create table
#
# Create table Function
#
# @param ID: ID of a Function
# @param Header: Header of a Function
# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
# @param ReturnStatement: ReturnStatement of a Function
# @param StartLine: StartLine of a Function
# @param StartColumn: StartColumn of a Function
# @param EndLine: EndLine of a Function
# @param EndColumn: EndColumn of a Function
# @param BodyStartLine: StartLine of a Function body
# @param BodyStartColumn: StartColumn of a Function body
# @param BelongsToFile: The Function belongs to which file
# @param FunNameStartLine: StartLine of a Function name
# @param FunNameStartColumn: StartColumn of a Function name
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
Header TEXT,
Modifier VARCHAR,
Name VARCHAR NOT NULL,
ReturnStatement VARCHAR,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
BodyStartLine INTEGER NOT NULL,
BodyStartColumn INTEGER NOT NULL,
BelongsToFile SINGLE NOT NULL,
FunNameStartLine INTEGER NOT NULL,
FunNameStartColumn INTEGER NOT NULL
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table Function
#
# @param ID: ID of a Function
# @param Header: Header of a Function
# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
# @param ReturnStatement: ReturnStatement of a Function
# @param StartLine: StartLine of a Function
# @param StartColumn: StartColumn of a Function
# @param EndLine: EndLine of a Function
# @param EndColumn: EndColumn of a Function
# @param BodyStartLine: StartLine of a Function body
# @param BodyStartColumn: StartColumn of a Function body
# @param BelongsToFile: The Function belongs to which file
# @param FunNameStartLine: StartLine of a Function name
# @param FunNameStartColumn: StartColumn of a Function name
#
def Insert(self, Header, Modifier, Name, ReturnStatement, StartLine, StartColumn, EndLine, EndColumn, BodyStartLine, BodyStartColumn, BelongsToFile, FunNameStartLine, FunNameStartColumn):
self.ID = self.ID + 1
(Header, Modifier, Name, ReturnStatement) = ConvertToSqlString((Header, Modifier, Name, ReturnStatement))
SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Header, Modifier, Name, ReturnStatement, StartLine, StartColumn, EndLine, EndColumn, BodyStartLine, BodyStartColumn, BelongsToFile, FunNameStartLine, FunNameStartColumn)
Table.Insert(self, SqlCommand)
return self.ID
| edk2-master | BaseTools/Source/Python/Table/TableFunction.py |
from __future__ import absolute_import
## @file
# This file is used to create/update/query/erase table for dsc datas
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString
## TableDsc
#
# This class defined a table used for data model
#
# @param object: Inherited from object class
#
#
class TableDsc(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Dsc'
## Create table
#
# Create table Dsc
#
# @param ID: ID of a Dsc item
# @param Model: Model of a Dsc item
# @param Value1: Value1 of a Dsc item
# @param Value2: Value2 of a Dsc item
# @param Value3: Value3 of a Dsc item
# @param Arch: Arch of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param BelongsToFile: The item belongs to which dsc file
# @param StartLine: StartLine of a Dsc item
# @param StartColumn: StartColumn of a Dsc item
# @param EndLine: EndLine of a Dsc item
# @param EndColumn: EndColumn of a Dsc item
# @param Enabled: If this item enabled
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 VARCHAR NOT NULL,
Value2 VARCHAR,
Value3 VARCHAR,
Arch VarCHAR,
BelongsToItem SINGLE NOT NULL,
BelongsToFile SINGLE NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table Dsc
#
# @param ID: ID of a Dsc item
# @param Model: Model of a Dsc item
# @param Value1: Value1 of a Dsc item
# @param Value2: Value2 of a Dsc item
# @param Value3: Value3 of a Dsc item
# @param Arch: Arch of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param BelongsToFile: The item belongs to which dsc file
# @param StartLine: StartLine of a Dsc item
# @param StartColumn: StartColumn of a Dsc item
# @param EndLine: EndLine of a Dsc item
# @param EndColumn: EndColumn of a Dsc item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
self.ID = self.ID + 1
(Value1, Value2, Value3, Arch) = ConvertToSqlString((Value1, Value2, Value3, Arch))
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
return self.ID
## Query table
#
# @param Model: The Model of Record
#
# @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
where Model = %s
and Enabled > -1""" % (self.Table, Model)
EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
self.Cur.execute(SqlCommand)
return self.Cur.fetchall()
| edk2-master | BaseTools/Source/Python/Table/TableDsc.py |
## @file
# This file is used to create/update/query/erase table for files
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString
import Common.LongFilePathOs as os
from CommonDataClass.DataClass import FileClass
## TableFile
#
# This class defined a table used for file
#
# @param object: Inherited from object class
#
class TableFile(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'File'
## Create table
#
# Create table File
#
# @param ID: ID of a File
# @param Name: Name of a File
# @param ExtName: ExtName of a File
# @param Path: Path of a File
# @param FullPath: FullPath of a File
# @param Model: Model of a File
# @param TimeStamp: TimeStamp of a File
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
Name VARCHAR NOT NULL,
ExtName VARCHAR,
Path VARCHAR,
FullPath VARCHAR NOT NULL,
Model INTEGER DEFAULT 0,
TimeStamp VARCHAR NOT NULL
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table File
#
# @param ID: ID of a File
# @param Name: Name of a File
# @param ExtName: ExtName of a File
# @param Path: Path of a File
# @param FullPath: FullPath of a File
# @param Model: Model of a File
# @param TimeStamp: TimeStamp of a File
#
def Insert(self, Name, ExtName, Path, FullPath, Model, TimeStamp):
self.ID = self.ID + 1
(Name, ExtName, Path, FullPath) = ConvertToSqlString((Name, ExtName, Path, FullPath))
SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, '%s')""" \
% (self.Table, self.ID, Name, ExtName, Path, FullPath, Model, TimeStamp)
Table.Insert(self, SqlCommand)
return self.ID
## InsertFile
#
# Insert one file to table
#
# @param FileFullPath: The full path of the file
# @param Model: The model of the file
#
# @retval FileID: The ID after record is inserted
#
def InsertFile(self, FileFullPath, Model):
(Filepath, Name) = os.path.split(FileFullPath)
(Root, Ext) = os.path.splitext(FileFullPath)
TimeStamp = os.stat(FileFullPath)[8]
File = FileClass(-1, Name, Ext, Filepath, FileFullPath, Model, '', [], [], [])
return self.Insert(File.Name, File.ExtName, File.Path, File.FullPath, File.Model, TimeStamp)
## Get ID of a given file
#
# @param FilePath Path of file
#
# @retval ID ID value of given file in the table
#
def GetFileId(self, File):
QueryScript = "select ID from %s where FullPath = '%s'" % (self.Table, str(File))
RecordList = self.Exec(QueryScript)
if len(RecordList) == 0:
return None
return RecordList[0][0]
| edk2-master | BaseTools/Source/Python/Table/TableFile.py |
## @file
# Python 'Table' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/Table/__init__.py |
## @file
# This file is used to create/update/query/erase table for dec datas
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString
## TableDec
#
# This class defined a table used for data model
#
# @param object: Inherited from object class
#
#
class TableDec(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Dec'
## Create table
#
# Create table Dec
#
# @param ID: ID of a Dec item
# @param Model: Model of a Dec item
# @param Value1: Value1 of a Dec item
# @param Value2: Value2 of a Dec item
# @param Value3: Value3 of a Dec item
# @param Arch: Arch of a Dec item
# @param BelongsToItem: The item belongs to which another item
# @param BelongsToFile: The item belongs to which dsc file
# @param StartLine: StartLine of a Dec item
# @param StartColumn: StartColumn of a Dec item
# @param EndLine: EndLine of a Dec item
# @param EndColumn: EndColumn of a Dec item
# @param Enabled: If this item enabled
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 VARCHAR NOT NULL,
Value2 VARCHAR,
Value3 VARCHAR,
Arch VarCHAR,
BelongsToItem SINGLE NOT NULL,
BelongsToFile SINGLE NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table Dec
#
# @param ID: ID of a Dec item
# @param Model: Model of a Dec item
# @param Value1: Value1 of a Dec item
# @param Value2: Value2 of a Dec item
# @param Value3: Value3 of a Dec item
# @param Arch: Arch of a Dec item
# @param BelongsToItem: The item belongs to which another item
# @param BelongsToFile: The item belongs to which dsc file
# @param StartLine: StartLine of a Dec item
# @param StartColumn: StartColumn of a Dec item
# @param EndLine: EndLine of a Dec item
# @param EndColumn: EndColumn of a Dec item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
self.ID = self.ID + 1
(Value1, Value2, Value3, Arch) = ConvertToSqlString((Value1, Value2, Value3, Arch))
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
return self.ID
## Query table
#
# @param Model: The Model of Record
#
# @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
where Model = %s
and Enabled > -1""" % (self.Table, Model)
EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
self.Cur.execute(SqlCommand)
return self.Cur.fetchall()
| edk2-master | BaseTools/Source/Python/Table/TableDec.py |
## @file
# This file is used to create/update/query/erase table for pcds
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString
## TablePcd
#
# This class defined a table used for pcds
#
# @param object: Inherited from object class
#
#
class TablePcd(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Pcd'
## Create table
#
# Create table Pcd
#
# @param ID: ID of a Pcd
# @param CName: CName of a Pcd
# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
# @param Token: Token of a Pcd
# @param DatumType: DatumType of a Pcd
# @param Model: Model of a Pcd
# @param BelongsToFile: The Pcd belongs to which file
# @param BelongsToFunction: The Pcd belongs to which function
# @param StartLine: StartLine of a Pcd
# @param StartColumn: StartColumn of a Pcd
# @param EndLine: EndLine of a Pcd
# @param EndColumn: EndColumn of a Pcd
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
CName VARCHAR NOT NULL,
TokenSpaceGuidCName VARCHAR NOT NULL,
Token INTEGER,
DatumType VARCHAR,
Model INTEGER NOT NULL,
BelongsToFile SINGLE NOT NULL,
BelongsToFunction SINGLE DEFAULT -1,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table Pcd
#
# @param ID: ID of a Pcd
# @param CName: CName of a Pcd
# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
# @param Token: Token of a Pcd
# @param DatumType: DatumType of a Pcd
# @param Model: Model of a Pcd
# @param BelongsToFile: The Pcd belongs to which file
# @param BelongsToFunction: The Pcd belongs to which function
# @param StartLine: StartLine of a Pcd
# @param StartColumn: StartColumn of a Pcd
# @param EndLine: EndLine of a Pcd
# @param EndColumn: EndColumn of a Pcd
#
def Insert(self, CName, TokenSpaceGuidCName, Token, DatumType, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn):
self.ID = self.ID + 1
(CName, TokenSpaceGuidCName, DatumType) = ConvertToSqlString((CName, TokenSpaceGuidCName, DatumType))
SqlCommand = """insert into %s values(%s, '%s', '%s', %s, '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, CName, TokenSpaceGuidCName, Token, DatumType, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn)
Table.Insert(self, SqlCommand)
return self.ID
| edk2-master | BaseTools/Source/Python/Table/TablePcd.py |
## @file
# This file is used to create/update/query/erase table for fdf datas
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString
## TableFdf
#
# This class defined a table used for data model
#
# @param object: Inherited from object class
#
#
class TableFdf(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Fdf'
## Create table
#
# Create table Fdf
#
# @param ID: ID of a Fdf item
# @param Model: Model of a Fdf item
# @param Value1: Value1 of a Fdf item
# @param Value2: Value2 of a Fdf item
# @param Value3: Value3 of a Fdf item
# @param Arch: Arch of a Fdf item
# @param BelongsToItem: The item belongs to which another item
# @param BelongsToFile: The item belongs to which fdf file
# @param StartLine: StartLine of a Fdf item
# @param StartColumn: StartColumn of a Fdf item
# @param EndLine: EndLine of a Fdf item
# @param EndColumn: EndColumn of a Fdf item
# @param Enabled: If this item enabled
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 VARCHAR NOT NULL,
Value2 VARCHAR,
Value3 VARCHAR,
Scope1 VarCHAR,
Scope2 VarCHAR,
BelongsToItem SINGLE NOT NULL,
BelongsToFile SINGLE NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table Fdf
#
# @param ID: ID of a Fdf item
# @param Model: Model of a Fdf item
# @param Value1: Value1 of a Fdf item
# @param Value2: Value2 of a Fdf item
# @param Value3: Value3 of a Fdf item
# @param Arch: Arch of a Fdf item
# @param BelongsToItem: The item belongs to which another item
# @param BelongsToFile: The item belongs to which fdf file
# @param StartLine: StartLine of a Fdf item
# @param StartColumn: StartColumn of a Fdf item
# @param EndLine: EndLine of a Fdf item
# @param EndColumn: EndColumn of a Fdf item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
self.ID = self.ID + 1
(Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
return self.ID
## Query table
#
# @param Model: The Model of Record
#
# @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine from %s
where Model = %s
and Enabled > -1""" % (self.Table, Model)
EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
self.Cur.execute(SqlCommand)
return self.Cur.fetchall()
| edk2-master | BaseTools/Source/Python/Table/TableFdf.py |
## @file
# This file is used to create/update/query/erase table for Queries
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Common.StringUtils import ConvertToSqlString
from Table.Table import Table
## TableQuery
#
# This class defined a table used for Query
#
# @param object: Inherited from object class
#
#
class TableQuery(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Query'
## Create table
#
# Create table Query
#
# @param ID: ID of a Query
# @param Name: Name of a Query
# @param Modifier: Modifier of a Query
# @param Value: Type of a Query
# @param Model: Model of a Query
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s(ID INTEGER PRIMARY KEY,
Name TEXT DEFAULT '',
Modifier TEXT DEFAULT '',
Value TEXT DEFAULT '',
Model INTEGER DEFAULT 0
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table Query
#
# @param ID: ID of a Query
# @param Name: Name of a Query
# @param Modifier: Modifier of a Query
# @param Value: Value of a Query
# @param Model: Model of a Query
#
def Insert(self, Name, Modifier, Value, Model):
self.ID = self.ID + 1
SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', %s)""" \
% (self.Table, self.ID, Name, Modifier, Value, Model)
Table.Insert(self, SqlCommand)
return self.ID
| edk2-master | BaseTools/Source/Python/Table/TableQuery.py |
## @file
# This file is used to create/update/query/erase table for inf datas
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString
## TableInf
#
# This class defined a table used for data model
#
# @param object: Inherited from object class
#
#
class TableInf(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Inf'
## Create table
#
# Create table Inf
#
# @param ID: ID of a Inf item
# @param Model: Model of a Inf item
# @param Value1: Value1 of a Inf item
# @param Value2: Value2 of a Inf item
# @param Value3: Value3 of a Inf item
# @param Value4: Value4 of a Inf item
# @param Value5: Value5 of a Inf item
# @param Arch: Arch of a Inf item
# @param BelongsToItem: The item belongs to which another item
# @param BelongsToFile: The item belongs to which dsc file
# @param StartLine: StartLine of a Inf item
# @param StartColumn: StartColumn of a Inf item
# @param EndLine: EndLine of a Inf item
# @param EndColumn: EndColumn of a Inf item
# @param Enabled: If this item enabled
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 VARCHAR NOT NULL,
Value2 VARCHAR,
Value3 VARCHAR,
Value4 VARCHAR,
Value5 VARCHAR,
Arch VarCHAR,
BelongsToItem SINGLE NOT NULL,
BelongsToFile SINGLE NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table Inf
#
# @param ID: ID of a Inf item
# @param Model: Model of a Inf item
# @param Value1: Value1 of a Inf item
# @param Value2: Value2 of a Inf item
# @param Value3: Value3 of a Inf item
# @param Value4: Value4 of a Inf item
# @param Value5: Value5 of a Inf item
# @param Arch: Arch of a Inf item
# @param BelongsToItem: The item belongs to which another item
# @param BelongsToFile: The item belongs to which dsc file
# @param StartLine: StartLine of a Inf item
# @param StartColumn: StartColumn of a Inf item
# @param EndLine: EndLine of a Inf item
# @param EndColumn: EndColumn of a Inf item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
self.ID = self.ID + 1
(Value1, Value2, Value3, Value4, Value5, Arch) = ConvertToSqlString((Value1, Value2, Value3, Value4, Value5, Arch))
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
% (self.Table, self.ID, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
Table.Insert(self, SqlCommand)
return self.ID
## Query table
#
# @param Model: The Model of Record
#
# @retval: A recordSet of all found records
#
def Query(self, Model):
SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
where Model = %s
and Enabled > -1""" % (self.Table, Model)
EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
self.Cur.execute(SqlCommand)
return self.Cur.fetchall()
| edk2-master | BaseTools/Source/Python/Table/TableInf.py |
## @file
# This file is used to create/update/query/erase a common table
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import Common.EdkLogger as EdkLogger
## TableFile
#
# This class defined a common table
#
# @param object: Inherited from object class
#
# @param Cursor: Cursor of the database
# @param TableName: Name of the table
#
class Table(object):
def __init__(self, Cursor):
self.Cur = Cursor
self.Table = ''
self.ID = 0
## Create table
#
# Create a table
#
def Create(self, SqlCommand):
self.Cur.execute(SqlCommand)
self.ID = 0
EdkLogger.verbose(SqlCommand + " ... DONE!")
## Insert table
#
# Insert a record into a table
#
def Insert(self, SqlCommand):
self.Exec(SqlCommand)
## Query table
#
# Query all records of the table
#
def Query(self):
EdkLogger.verbose("\nQuery table %s started ..." % self.Table)
SqlCommand = """select * from %s""" % self.Table
self.Cur.execute(SqlCommand)
for Rs in self.Cur:
EdkLogger.verbose(str(Rs))
TotalCount = self.GetCount()
EdkLogger.verbose("*** Total %s records in table %s ***" % (TotalCount, self.Table) )
EdkLogger.verbose("Query tabel %s DONE!" % self.Table)
## Drop a table
#
# Drop the table
#
def Drop(self):
SqlCommand = """drop table IF EXISTS %s""" % self.Table
self.Cur.execute(SqlCommand)
EdkLogger.verbose("Drop tabel %s ... DONE!" % self.Table)
## Get count
#
# Get a count of all records of the table
#
# @retval Count: Total count of all records
#
def GetCount(self):
SqlCommand = """select count(ID) from %s""" % self.Table
self.Cur.execute(SqlCommand)
for Item in self.Cur:
return Item[0]
## Generate ID
#
# Generate an ID if input ID is -1
#
# @param ID: Input ID
#
# @retval ID: New generated ID
#
def GenerateID(self, ID):
if ID == -1:
self.ID = self.ID + 1
return self.ID
## Init the ID of the table
#
# Init the ID of the table
#
def InitID(self):
self.ID = self.GetCount()
## Exec
#
# Exec Sql Command, return result
#
# @param SqlCommand: The SqlCommand to be executed
#
# @retval RecordSet: The result after executed
#
def Exec(self, SqlCommand):
EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
self.Cur.execute(SqlCommand)
RecordSet = self.Cur.fetchall()
EdkLogger.debug(4, "RecordSet: %s" % RecordSet)
return RecordSet
| edk2-master | BaseTools/Source/Python/Table/Table.py |
## @file
# This file is used to create/update/query/erase table for ECC reports
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
import Common.LongFilePathOs as os, time
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString2
import Eot.EotToolError as EotToolError
import Eot.EotGlobalData as EotGlobalData
## TableReport
#
# This class defined a table used for data model
#
# @param object: Inherited from object class
#
#
class TableEotReport(Table):
def __init__(self, Cursor):
Table.__init__(self, Cursor)
self.Table = 'Report'
## Create table
#
# Create table report
#
#
def Create(self):
SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
ModuleID INTEGER DEFAULT -1,
ModuleName TEXT DEFAULT '',
ModuleGuid TEXT DEFAULT '',
SourceFileID INTEGER DEFAULT -1,
SourceFileFullPath TEXT DEFAULT '',
ItemName TEXT DEFAULT '',
ItemType TEXT DEFAULT '',
ItemMode TEXT DEFAULT '',
GuidName TEXT DEFAULT '',
GuidMacro TEXT DEFAULT '',
GuidValue TEXT DEFAULT '',
BelongsToFunction TEXT DEFAULT '',
Enabled INTEGER DEFAULT 0
)""" % self.Table
Table.Create(self, SqlCommand)
## Insert table
#
# Insert a record into table report
#
#
def Insert(self, ModuleID = -1, ModuleName = '', ModuleGuid = '', SourceFileID = -1, SourceFileFullPath = '', \
ItemName = '', ItemType = '', ItemMode = '', GuidName = '', GuidMacro = '', GuidValue = '', BelongsToFunction = '', Enabled = 0):
self.ID = self.ID + 1
SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', %s)""" \
% (self.Table, self.ID, ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, \
ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled)
Table.Insert(self, SqlCommand)
def GetMaxID(self):
SqlCommand = """select max(ID) from %s""" % self.Table
self.Cur.execute(SqlCommand)
for Item in self.Cur:
return Item[0]
| edk2-master | BaseTools/Source/Python/Table/TableEotReport.py |
## @file
# This file is used to define strings used in the BPDG tool
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
#string table starts here...
#strings are classified as following types
# MSG_...: it is a message string
# ERR_...: it is a error string
# WRN_...: it is a warning string
# LBL_...: it is a UI label (window title, control label, etc.)
# MNU_...: it is a menu item label
# HLP_...: it is a help string
# CFG_...: it is a config string used in module. Do not need to translate it.
# XRC_...: it is a user visible string from xrc file
MAP_FILE_COMMENT_TEMPLATE = \
"""
## @file
#
# THIS IS AUTO-GENERATED FILE BY BPDG TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
#
# This file lists all VPD informations for a platform fixed/adjusted by BPDG tool.
#
# Copyright (c) 2010 -2018, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
"""
LBL_BPDG_LONG_UNI = (u"Intel(r) Binary Product Data Generation Tool (Intel(r) BPDG)")
LBL_BPDG_VERSION = (u"1.0")
LBL_BPDG_USAGE = \
(
"""BPDG options -o Filename.bin -m Filename.map Filename.txt
Copyright (c) 2010 - 2018, Intel Corporation All Rights Reserved.
Intel(r) Binary Product Data Generation Tool (Intel(r) BPDG)
Required Flags:
-o BIN_FILENAME, --vpd-filename=BIN_FILENAME
Specify the file name for the VPD binary file
-m FILENAME, --map-filename=FILENAME
Generate file name for consumption during the build that contains
the mapping of Pcd name, offset, datum size and value derived
from the input file and any automatic calculations.
"""
)
MSG_OPTION_HELP = ("Show this help message and exit.")
MSG_OPTION_DEBUG_LEVEL = ("Print DEBUG statements, where DEBUG_LEVEL is 0-9.")
MSG_OPTION_VERBOSE = ("Print informational statements.")
MSG_OPTION_QUIET = ("Returns the exit code and will display only error messages.")
MSG_OPTION_VPD_FILENAME = ("Specify the file name for the VPD binary file.")
MSG_OPTION_MAP_FILENAME = ("Generate file name for consumption during the build that contains the mapping of Pcd name, offset, datum size and value derived from the input file and any automatic calculations.")
MSG_OPTION_FORCE = ("Will force overwriting existing output files rather than returning an error message.")
ERR_INVALID_DEBUG_LEVEL = ("Invalid level for debug message. Only "
"'DEBUG', 'INFO', 'WARNING', 'ERROR', "
"'CRITICAL' are supported for debugging "
"messages.")
| edk2-master | BaseTools/Source/Python/BPDG/StringTable.py |
## @file
# This file include GenVpd class for fix the Vpd type PCD offset, and PcdEntry for describe
# and process each entry of vpd type PCD.
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
from io import BytesIO
from . import StringTable as st
import array
import re
from Common.LongFilePathSupport import OpenLongFilePath as open
from struct import *
from Common.DataType import MAX_SIZE_TYPE, MAX_VAL_TYPE, TAB_STAR
import Common.EdkLogger as EdkLogger
import Common.BuildToolError as BuildToolError
_FORMAT_CHAR = {1: 'B',
2: 'H',
4: 'I',
8: 'Q'
}
## The VPD PCD data structure for store and process each VPD PCD entry.
#
# This class contain method to format and pack pcd's value.
#
class PcdEntry:
def __init__(self, PcdCName, SkuId,PcdOffset, PcdSize, PcdValue, Lineno=None, FileName=None, PcdUnpackValue=None,
PcdBinOffset=None, PcdBinSize=None, Alignment=None):
self.PcdCName = PcdCName.strip()
self.SkuId = SkuId.strip()
self.PcdOffset = PcdOffset.strip()
self.PcdSize = PcdSize.strip()
self.PcdValue = PcdValue.strip()
self.Lineno = Lineno.strip()
self.FileName = FileName.strip()
self.PcdUnpackValue = PcdUnpackValue
self.PcdBinOffset = PcdBinOffset
self.PcdBinSize = PcdBinSize
self.Alignment = Alignment
if self.PcdValue == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" % (self.PcdCName, self.FileName, self.Lineno))
if self.PcdOffset == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s Line: %s) , no Offset specified!" % (self.PcdCName, self.FileName, self.Lineno))
if self.PcdSize == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" % (self.PcdCName, self.FileName, self.Lineno))
self._GenOffsetValue ()
## Analyze the string value to judge the PCD's datum type equal to Boolean or not.
#
# @param ValueString PCD's value
# @param Size PCD's size
#
# @retval True PCD's datum type is Boolean
# @retval False PCD's datum type is not Boolean.
#
def _IsBoolean(self, ValueString, Size):
if (Size == "1"):
if ValueString.upper() in ["TRUE", "FALSE"]:
return True
elif ValueString in ["0", "1", "0x0", "0x1", "0x00", "0x01"]:
return True
return False
## Convert the PCD's value from string to integer.
#
# This function will try to convert the Offset value form string to integer
# for both hexadecimal and decimal.
#
def _GenOffsetValue(self):
if self.PcdOffset != TAB_STAR:
try:
self.PcdBinOffset = int (self.PcdOffset)
except:
try:
self.PcdBinOffset = int(self.PcdOffset, 16)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))
## Pack Boolean type VPD PCD's value form string to binary type.
#
# @param ValueString The boolean type string for pack.
#
#
def _PackBooleanValue(self, ValueString):
if ValueString.upper() == "TRUE" or ValueString in ["1", "0x1", "0x01"]:
try:
self.PcdValue = pack(_FORMAT_CHAR[1], 1)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
else:
try:
self.PcdValue = pack(_FORMAT_CHAR[1], 0)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack Integer type VPD PCD's value form string to binary type.
#
# @param ValueString The Integer type string for pack.
#
#
def _PackIntValue(self, IntValue, Size):
if Size not in _FORMAT_CHAR:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
for Type, MaxSize in MAX_SIZE_TYPE.items():
if Type == 'BOOLEAN':
continue
if Size == MaxSize:
if IntValue < 0:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"PCD can't be set to negative value %d for PCD %s in %s datum type(File: %s Line: %s)." % (
IntValue, self.PcdCName, Type, self.FileName, self.Lineno))
elif IntValue > MAX_VAL_TYPE[Type]:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Too large PCD value %d for datum type %s for PCD %s(File: %s Line: %s)." % (
IntValue, Type, self.PcdCName, self.FileName, self.Lineno))
try:
self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack VOID* type VPD PCD's value form string to binary type.
#
# The VOID* type of string divided into 3 sub-type:
# 1: L"String"/L'String', Unicode type string.
# 2: "String"/'String', Ascii type string.
# 3: {bytearray}, only support byte-array.
#
# @param ValueString The Integer type string for pack.
#
def _PackPtrValue(self, ValueString, Size):
if ValueString.startswith('L"') or ValueString.startswith("L'"):
self._PackUnicode(ValueString, Size)
elif ValueString.startswith('{') and ValueString.endswith('}'):
self._PackByteArray(ValueString, Size)
elif (ValueString.startswith('"') and ValueString.endswith('"')) or (ValueString.startswith("'") and ValueString.endswith("'")):
self._PackString(ValueString, Size)
else:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))
## Pack an Ascii PCD value.
#
# An Ascii string for a PCD should be in format as ""/''.
#
def _PackString(self, ValueString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
if (ValueString == ""):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))
QuotedFlag = True
if ValueString.startswith("'"):
QuotedFlag = False
ValueString = ValueString[1:-1]
# No null-terminator in 'string'
if (QuotedFlag and len(ValueString) + 1 > Size) or (not QuotedFlag and len(ValueString) > Size):
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
try:
self.PcdValue = pack('%ds' % Size, ValueString.encode('utf-8'))
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack a byte-array PCD value.
#
# A byte-array for a PCD should be in format as {0x01, 0x02, ...}.
#
def _PackByteArray(self, ValueString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
if (ValueString == ""):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))
ValueString = ValueString.strip()
ValueString = ValueString.lstrip('{').strip('}')
ValueList = ValueString.split(',')
ValueList = [item.strip() for item in ValueList]
if len(ValueList) > Size:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"The byte array %s is too large for size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
ReturnArray = array.array('B')
for Index in range(len(ValueList)):
Value = None
if ValueList[Index].lower().startswith('0x'):
# translate hex value
try:
Value = int(ValueList[Index], 16)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s is an invalid HEX value.(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName, self.Lineno))
else:
# translate decimal value
try:
Value = int(ValueList[Index], 10)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s is an invalid DECIMAL value.(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName, self.Lineno))
if Value > 255:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s do not in range 0 ~ 0xFF(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName, self.Lineno))
ReturnArray.append(Value)
for Index in range(len(ValueList), Size):
ReturnArray.append(0)
self.PcdValue = ReturnArray.tolist()
## Pack a unicode PCD value into byte array.
#
# A unicode string for a PCD should be in format as L""/L''.
#
def _PackUnicode(self, UnicodeString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % \
(self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
QuotedFlag = True
if UnicodeString.startswith("L'"):
QuotedFlag = False
UnicodeString = UnicodeString[2:-1]
# No null-terminator in L'string'
if (QuotedFlag and (len(UnicodeString) + 1) * 2 > Size) or (not QuotedFlag and len(UnicodeString) * 2 > Size):
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"The size of unicode string %s is too larger for size %s(File: %s Line: %s)" % \
(UnicodeString, Size, self.FileName, self.Lineno))
ReturnArray = array.array('B')
for Value in UnicodeString:
try:
ReturnArray.append(ord(Value))
ReturnArray.append(0)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid unicode character %s in unicode string %s(File: %s Line: %s)" % \
(Value, UnicodeString, self.FileName, self.Lineno))
for Index in range(len(UnicodeString) * 2, Size):
ReturnArray.append(0)
self.PcdValue = ReturnArray.tolist()
## The class implementing the BPDG VPD PCD offset fix process
#
# The VPD PCD offset fix process includes:
# 1. Parse the input guided.txt file and store it in the data structure;
# 2. Format the input file data to remove unused lines;
# 3. Fixed offset if needed;
# 4. Generate output file, including guided.map and guided.bin file;
#
class GenVPD :
## Constructor of DscBuildData
#
# Initialize object of GenVPD
# @Param InputFileName The filename include the vpd type pcd information
# @param MapFileName The filename of map file that stores vpd type pcd information.
# This file will be generated by the BPDG tool after fix the offset
# and adjust the offset to make the pcd data aligned.
# @param VpdFileName The filename of Vpd file that hold vpd pcd information.
#
def __init__(self, InputFileName, MapFileName, VpdFileName):
self.InputFileName = InputFileName
self.MapFileName = MapFileName
self.VpdFileName = VpdFileName
self.FileLinesList = []
self.PcdFixedOffsetSizeList = []
self.PcdUnknownOffsetList = []
try:
fInputfile = open(InputFileName, "r")
try:
self.FileLinesList = fInputfile.readlines()
except:
EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" % InputFileName, None)
finally:
fInputfile.close()
except:
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % InputFileName, None)
##
# Parser the input file which is generated by the build tool. Convert the value of each pcd's
# from string to its real format. Also remove the useless line in the input file.
#
def ParserInputFile (self):
count = 0
for line in self.FileLinesList:
# Strip "\r\n" generated by readlines ().
line = line.strip()
line = line.rstrip(os.linesep)
# Skip the comment line
if (not line.startswith("#")) and len(line) > 1 :
#
# Enhanced for support "|" character in the string.
#
ValueList = ['', '', '', '', '']
ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
PtrValue = ValueRe.findall(line)
ValueUpdateFlag = False
if len(PtrValue) >= 1:
line = re.sub(ValueRe, '', line)
ValueUpdateFlag = True
TokenList = line.split('|')
ValueList[0:len(TokenList)] = TokenList
if ValueUpdateFlag:
ValueList[4] = PtrValue[0]
self.FileLinesList[count] = ValueList
# Store the line number
self.FileLinesList[count].append(str(count + 1))
elif len(line) <= 1 :
# Set the blank line to "None"
self.FileLinesList[count] = None
else :
# Set the comment line to "None"
self.FileLinesList[count] = None
count += 1
# The line count contain usage information
count = 0
# Delete useless lines
while (True) :
try :
if (self.FileLinesList[count] is None) :
del(self.FileLinesList[count])
else :
count += 1
except :
break
#
# After remove the useless line, if there are no data remain in the file line list,
# Report warning messages to user's.
#
if len(self.FileLinesList) == 0 :
EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE,
"There are no VPD type pcds defined in DSC file, Please check it.")
# Process the pcds one by one base on the pcd's value and size
count = 0
for line in self.FileLinesList:
if line is not None :
PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4], line[5], self.InputFileName)
# Strip the space char
PCD.PcdCName = PCD.PcdCName.strip(' ')
PCD.SkuId = PCD.SkuId.strip(' ')
PCD.PcdOffset = PCD.PcdOffset.strip(' ')
PCD.PcdSize = PCD.PcdSize.strip(' ')
PCD.PcdValue = PCD.PcdValue.strip(' ')
PCD.Lineno = PCD.Lineno.strip(' ')
#
# Store the original pcd value.
# This information will be useful while generate the output map file.
#
PCD.PcdUnpackValue = str(PCD.PcdValue)
#
# Translate PCD size string to an integer value.
PackSize = None
try:
PackSize = int(PCD.PcdSize, 10)
PCD.PcdBinSize = PackSize
except:
try:
PackSize = int(PCD.PcdSize, 16)
PCD.PcdBinSize = PackSize
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s at file: %s line: %s" % (PCD.PcdSize, self.InputFileName, PCD.Lineno))
#
# If value is Unicode string (e.g. L""), then use 2-byte alignment
# If value is byte array (e.g. {}), then use 8-byte alignment
#
PCD.PcdOccupySize = PCD.PcdBinSize
if PCD.PcdUnpackValue.startswith("{"):
Alignment = 8
elif PCD.PcdUnpackValue.startswith("L"):
Alignment = 2
else:
Alignment = 1
PCD.Alignment = Alignment
if PCD.PcdOffset != TAB_STAR:
if PCD.PcdOccupySize % Alignment != 0:
if PCD.PcdUnpackValue.startswith("{"):
EdkLogger.warn("BPDG", "The offset value of PCD %s is not 8-byte aligned!" %(PCD.PcdCName), File=self.InputFileName)
else:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, 'The offset value of PCD %s should be %s-byte aligned.' % (PCD.PcdCName, Alignment))
else:
if PCD.PcdOccupySize % Alignment != 0:
PCD.PcdOccupySize = (PCD.PcdOccupySize // Alignment + 1) * Alignment
PackSize = PCD.PcdOccupySize
if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize):
PCD._PackBooleanValue(PCD.PcdValue)
self.FileLinesList[count] = PCD
count += 1
continue
#
# Try to translate value to an integer firstly.
#
IsInteger = True
PackValue = None
try:
PackValue = int(PCD.PcdValue)
except:
try:
PackValue = int(PCD.PcdValue, 16)
except:
IsInteger = False
if IsInteger:
PCD._PackIntValue(PackValue, PackSize)
else:
PCD._PackPtrValue(PCD.PcdValue, PackSize)
self.FileLinesList[count] = PCD
count += 1
else :
continue
##
# This function used to create a clean list only contain useful information and reorganized to make it
# easy to be sorted
#
def FormatFileLine (self) :
for eachPcd in self.FileLinesList :
if eachPcd.PcdOffset != TAB_STAR :
# Use pcd's Offset value as key, and pcd's Value as value
self.PcdFixedOffsetSizeList.append(eachPcd)
else :
# Use pcd's CName as key, and pcd's Size as value
self.PcdUnknownOffsetList.append(eachPcd)
##
# This function is use to fix the offset value which the not specified in the map file.
# Usually it use the star (meaning any offset) character in the offset field
#
def FixVpdOffset (self):
# At first, the offset should start at 0
# Sort fixed offset list in order to find out where has free spaces for the pcd's offset
# value is TAB_STAR to insert into.
self.PcdFixedOffsetSizeList.sort(key=lambda x: x.PcdBinOffset)
#
# Sort the un-fixed pcd's offset by its size.
#
self.PcdUnknownOffsetList.sort(key=lambda x: x.PcdBinSize)
index =0
for pcd in self.PcdUnknownOffsetList:
index += 1
if pcd.PcdCName == ".".join(("gEfiMdeModulePkgTokenSpaceGuid", "PcdNvStoreDefaultValueBuffer")):
if index != len(self.PcdUnknownOffsetList):
for i in range(len(self.PcdUnknownOffsetList) - index):
self.PcdUnknownOffsetList[index+i -1 ], self.PcdUnknownOffsetList[index+i] = self.PcdUnknownOffsetList[index+i], self.PcdUnknownOffsetList[index+i -1]
#
# Process all Offset value are TAB_STAR
#
if (len(self.PcdFixedOffsetSizeList) == 0) and (len(self.PcdUnknownOffsetList) != 0) :
# The offset start from 0
NowOffset = 0
for Pcd in self.PcdUnknownOffsetList :
if NowOffset % Pcd.Alignment != 0:
NowOffset = (NowOffset// Pcd.Alignment + 1) * Pcd.Alignment
Pcd.PcdBinOffset = NowOffset
Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))
NowOffset += Pcd.PcdOccupySize
self.PcdFixedOffsetSizeList = self.PcdUnknownOffsetList
return
# Check the offset of VPD type pcd's offset start from 0.
if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :
EdkLogger.warn("BPDG", "The offset of VPD type pcd should start with 0, please check it.",
None)
# Judge whether the offset in fixed pcd offset list is overlapped or not.
lenOfList = len(self.PcdFixedOffsetSizeList)
count = 0
while (count < lenOfList - 1) :
PcdNow = self.PcdFixedOffsetSizeList[count]
PcdNext = self.PcdFixedOffsetSizeList[count+1]
# Two pcd's offset is same
if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is same with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
# Overlapped
if PcdNow.PcdBinOffset + PcdNow.PcdOccupySize > PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is overlapped with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
# Has free space, raise a warning message
if PcdNow.PcdBinOffset + PcdNow.PcdOccupySize < PcdNext.PcdBinOffset :
EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offsets have free space of between %s at line: %s and %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
count += 1
LastOffset = self.PcdFixedOffsetSizeList[0].PcdBinOffset
FixOffsetSizeListCount = 0
lenOfList = len(self.PcdFixedOffsetSizeList)
lenOfUnfixedList = len(self.PcdUnknownOffsetList)
##
# Insert the un-fixed offset pcd's list into fixed offset pcd's list if has free space between those pcds.
#
while (FixOffsetSizeListCount < lenOfList) :
eachFixedPcd = self.PcdFixedOffsetSizeList[FixOffsetSizeListCount]
NowOffset = eachFixedPcd.PcdBinOffset
# Has free space
if LastOffset < NowOffset :
if lenOfUnfixedList != 0 :
countOfUnfixedList = 0
while(countOfUnfixedList < lenOfUnfixedList) :
eachUnfixedPcd = self.PcdUnknownOffsetList[countOfUnfixedList]
needFixPcdSize = eachUnfixedPcd.PcdOccupySize
# Not been fixed
if eachUnfixedPcd.PcdOffset == TAB_STAR :
if LastOffset % eachUnfixedPcd.Alignment != 0:
LastOffset = (LastOffset // eachUnfixedPcd.Alignment + 1) * eachUnfixedPcd.Alignment
# The offset un-fixed pcd can write into this free space
if needFixPcdSize <= (NowOffset - LastOffset) :
# Change the offset value of un-fixed pcd
eachUnfixedPcd.PcdOffset = str(hex(LastOffset))
eachUnfixedPcd.PcdBinOffset = LastOffset
# Insert this pcd into fixed offset pcd list.
self.PcdFixedOffsetSizeList.insert(FixOffsetSizeListCount, eachUnfixedPcd)
# Delete the item's offset that has been fixed and added into fixed offset list
self.PcdUnknownOffsetList.pop(countOfUnfixedList)
# After item added, should enlarge the length of fixed pcd offset list
lenOfList += 1
FixOffsetSizeListCount += 1
# Decrease the un-fixed pcd offset list's length
lenOfUnfixedList -= 1
# Modify the last offset value
LastOffset += needFixPcdSize
else :
# It can not insert into those two pcds, need to check still has other space can store it.
LastOffset = NowOffset + self.PcdFixedOffsetSizeList[FixOffsetSizeListCount].PcdOccupySize
FixOffsetSizeListCount += 1
break
# Set the FixOffsetSizeListCount = lenOfList for quit the loop
else :
FixOffsetSizeListCount = lenOfList
# No free space, smoothly connect with previous pcd.
elif LastOffset == NowOffset :
LastOffset = NowOffset + eachFixedPcd.PcdOccupySize
FixOffsetSizeListCount += 1
# Usually it will not enter into this thunk, if so, means it overlapped.
else :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
"The offset value definition has overlapped at pcd: %s, its offset is: %s, in file: %s line: %s" % \
(eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno),
None)
FixOffsetSizeListCount += 1
# Continue to process the un-fixed offset pcd's list, add this time, just append them behind the fixed pcd's offset list.
lenOfUnfixedList = len(self.PcdUnknownOffsetList)
lenOfList = len(self.PcdFixedOffsetSizeList)
while (lenOfUnfixedList > 0) :
# Still has items need to process
# The last pcd instance
LastPcd = self.PcdFixedOffsetSizeList[lenOfList-1]
NeedFixPcd = self.PcdUnknownOffsetList[0]
NeedFixPcd.PcdBinOffset = LastPcd.PcdBinOffset + LastPcd.PcdOccupySize
if NeedFixPcd.PcdBinOffset % NeedFixPcd.Alignment != 0:
NeedFixPcd.PcdBinOffset = (NeedFixPcd.PcdBinOffset // NeedFixPcd.Alignment + 1) * NeedFixPcd.Alignment
NeedFixPcd.PcdOffset = str(hex(NeedFixPcd.PcdBinOffset))
# Insert this pcd into fixed offset pcd list's tail.
self.PcdFixedOffsetSizeList.insert(lenOfList, NeedFixPcd)
# Delete the item's offset that has been fixed and added into fixed offset list
self.PcdUnknownOffsetList.pop(0)
lenOfList += 1
lenOfUnfixedList -= 1
##
# Write the final data into output files.
#
def GenerateVpdFile (self, MapFileName, BinFileName):
#Open an VPD file to process
try:
fVpdFile = open(BinFileName, "wb")
except:
# Open failed
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)
try :
fMapFile = open(MapFileName, "w")
except:
# Open failed
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)
# Use a instance of BytesIO to cache data
fStringIO = BytesIO()
# Write the header of map file.
try :
fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n")
except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)
for eachPcd in self.PcdFixedOffsetSizeList :
# write map file
try :
fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId, eachPcd.PcdOffset, eachPcd.PcdSize, eachPcd.PcdUnpackValue))
except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)
# Write Vpd binary file
fStringIO.seek (eachPcd.PcdBinOffset)
if isinstance(eachPcd.PcdValue, list):
for i in range(len(eachPcd.PcdValue)):
Value = eachPcd.PcdValue[i:i + 1]
if isinstance(bytes(Value), str):
fStringIO.write(chr(Value[0]))
else:
fStringIO.write(bytes(Value))
else:
fStringIO.write (eachPcd.PcdValue)
try :
fVpdFile.write (fStringIO.getvalue())
except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.VpdFileName, None)
fStringIO.close ()
fVpdFile.close ()
fMapFile.close ()
| edk2-master | BaseTools/Source/Python/BPDG/GenVpd.py |
## @file
# Python 'BPDG' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/BPDG/__init__.py |
## @file
# Intel Binary Product Data Generation Tool (Intel BPDG).
# This tool provide a simple process for the creation of a binary file containing read-only
# configuration data for EDK II platforms that contain Dynamic and DynamicEx PCDs described
# in VPD sections. It also provide an option for specifying an alternate name for a mapping
# file of PCD layout for use during the build when the platform integrator selects to use
# automatic offset calculation.
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import Common.LongFilePathOs as os
import sys
import encodings.ascii
from optparse import OptionParser
from Common import EdkLogger
from Common.BuildToolError import *
from Common.BuildVersion import gBUILD_VERSION
from . import StringTable as st
from . import GenVpd
PROJECT_NAME = st.LBL_BPDG_LONG_UNI
VERSION = (st.LBL_BPDG_VERSION + " Build " + gBUILD_VERSION)
## Tool entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
# @retval 0 Tool was successful
# @retval 1 Tool failed
#
def main():
global Options, Args
# Initialize log system
EdkLogger.Initialize()
Options, Args = MyOptionParser()
ReturnCode = 0
if Options.opt_verbose:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
elif Options.opt_quiet:
EdkLogger.SetLevel(EdkLogger.QUIET)
elif Options.debug_level is not None:
EdkLogger.SetLevel(Options.debug_level + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
if Options.bin_filename is None:
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
if Options.filename is None:
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
Force = False
if Options.opt_force is not None:
Force = True
if (Args[0] is not None) :
StartBpdg(Args[0], Options.filename, Options.bin_filename, Force)
else :
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",
None)
return ReturnCode
## Parse command line options
#
# Using standard Python module optparse to parse command line option of this tool.
#
# @retval options A optparse.Values object containing the parsed options
# @retval args Target of BPDG command
#
def MyOptionParser():
#
# Process command line firstly.
#
parser = OptionParser(version="%s - Version %s" % (PROJECT_NAME, VERSION),
description='',
prog='BPDG',
usage=st.LBL_BPDG_USAGE
)
parser.add_option('-d', '--debug', action='store', type="int", dest='debug_level',
help=st.MSG_OPTION_DEBUG_LEVEL)
parser.add_option('-v', '--verbose', action='store_true', dest='opt_verbose',
help=st.MSG_OPTION_VERBOSE)
parser.add_option('-q', '--quiet', action='store_true', dest='opt_quiet', default=False,
help=st.MSG_OPTION_QUIET)
parser.add_option('-o', '--vpd-filename', action='store', dest='bin_filename',
help=st.MSG_OPTION_VPD_FILENAME)
parser.add_option('-m', '--map-filename', action='store', dest='filename',
help=st.MSG_OPTION_MAP_FILENAME)
parser.add_option('-f', '--force', action='store_true', dest='opt_force',
help=st.MSG_OPTION_FORCE)
(options, args) = parser.parse_args()
if len(args) == 0:
EdkLogger.info("Please specify the filename.txt file which contain the VPD pcd info!")
EdkLogger.info(parser.usage)
sys.exit(1)
return options, args
## Start BPDG and call the main functions
#
# This method mainly focus on call GenVPD class member functions to complete
# BPDG's target. It will process VpdFile override, and provide the interface file
# information.
#
# @Param InputFileName The filename include the vpd type pcd information
# @param MapFileName The filename of map file that stores vpd type pcd information.
# This file will be generated by the BPDG tool after fix the offset
# and adjust the offset to make the pcd data aligned.
# @param VpdFileName The filename of Vpd file that hold vpd pcd information.
# @param Force Override the exist Vpdfile or not.
#
def StartBpdg(InputFileName, MapFileName, VpdFileName, Force):
if os.path.exists(VpdFileName) and not Force:
print("\nFile %s already exist, Overwrite(Yes/No)?[Y]: " % VpdFileName)
choice = sys.stdin.readline()
if choice.strip().lower() not in ['y', 'yes', '']:
return
GenVPD = GenVpd.GenVPD (InputFileName, MapFileName, VpdFileName)
EdkLogger.info('%-24s = %s' % ("VPD input data file: ", InputFileName))
EdkLogger.info('%-24s = %s' % ("VPD output map file: ", MapFileName))
EdkLogger.info('%-24s = %s' % ("VPD output binary file: ", VpdFileName))
GenVPD.ParserInputFile()
GenVPD.FormatFileLine()
GenVPD.FixVpdOffset()
GenVPD.GenerateVpdFile(MapFileName, VpdFileName)
EdkLogger.info("- Vpd pcd fixed done! -")
if __name__ == '__main__':
try:
r = main()
except FatalError as e:
r = e
## 0-127 is a safe return range, and 1 is a standard default error
if r < 0 or r > 127: r = 1
sys.exit(r)
| edk2-master | BaseTools/Source/Python/BPDG/BPDG.py |
## @file
# build a platform or a module
#
# Copyright (c) 2014, Hewlett-Packard Development Company, L.P.<BR>
# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2018, Hewlett Packard Enterprise Development, L.P.<BR>
# Copyright (c) 2020 - 2021, ARM Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import os.path as path
import sys
import os
import re
import glob
import time
import platform
import traceback
import multiprocessing
from threading import Thread,Event,BoundedSemaphore
import threading
from linecache import getlines
from subprocess import Popen,PIPE, STDOUT
from collections import OrderedDict, defaultdict
from AutoGen.PlatformAutoGen import PlatformAutoGen
from AutoGen.ModuleAutoGen import ModuleAutoGen
from AutoGen.WorkspaceAutoGen import WorkspaceAutoGen
from AutoGen.AutoGenWorker import AutoGenWorkerInProcess,AutoGenManager,\
LogAgent
from AutoGen import GenMake
from Common import Misc as Utils
from Common.TargetTxtClassObject import TargetTxtDict
from Common.ToolDefClassObject import ToolDefDict
from buildoptions import MyOptionParser
from Common.Misc import PathClass,SaveFileOnChange,RemoveDirectory
from Common.StringUtils import NormPath
from Common.MultipleWorkspace import MultipleWorkspace as mws
from Common.BuildToolError import *
from Common.DataType import *
import Common.EdkLogger as EdkLogger
from Workspace.WorkspaceDatabase import BuildDB
from BuildReport import BuildReport
from GenPatchPcdTable.GenPatchPcdTable import PeImageClass,parsePcdInfoFromMapFile
from PatchPcdValue.PatchPcdValue import PatchBinaryFile
import Common.GlobalData as GlobalData
from GenFds.GenFds import GenFds, GenFdsApi
import multiprocessing as mp
from multiprocessing import Manager
from AutoGen.DataPipe import MemoryDataPipe
from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo, PlatformInfo
from GenFds.FdfParser import FdfParser
from AutoGen.IncludesAutoGen import IncludesAutoGen
from GenFds.GenFds import resetFdsGlobalVariable
from AutoGen.AutoGen import CalculatePriorityValue
## standard targets of build command
gSupportedTarget = ['all', 'genc', 'genmake', 'modules', 'libraries', 'fds', 'clean', 'cleanall', 'cleanlib', 'run']
TemporaryTablePattern = re.compile(r'^_\d+_\d+_[a-fA-F0-9]+$')
TmpTableDict = {}
## Check environment PATH variable to make sure the specified tool is found
#
# If the tool is found in the PATH, then True is returned
# Otherwise, False is returned
#
def IsToolInPath(tool):
if 'PATHEXT' in os.environ:
extns = os.environ['PATHEXT'].split(os.path.pathsep)
else:
extns = ('',)
for pathDir in os.environ['PATH'].split(os.path.pathsep):
for ext in extns:
if os.path.exists(os.path.join(pathDir, tool + ext)):
return True
return False
## Check environment variables
#
# Check environment variables that must be set for build. Currently they are
#
# WORKSPACE The directory all packages/platforms start from
# EDK_TOOLS_PATH The directory contains all tools needed by the build
# PATH $(EDK_TOOLS_PATH)/Bin/<sys> must be set in PATH
#
# If any of above environment variable is not set or has error, the build
# will be broken.
#
def CheckEnvVariable():
# check WORKSPACE
if "WORKSPACE" not in os.environ:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
ExtraData="WORKSPACE")
WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"]))
if not os.path.exists(WorkspaceDir):
EdkLogger.error("build", FILE_NOT_FOUND, "WORKSPACE doesn't exist", ExtraData=WorkspaceDir)
elif ' ' in WorkspaceDir:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in WORKSPACE path",
ExtraData=WorkspaceDir)
os.environ["WORKSPACE"] = WorkspaceDir
# set multiple workspace
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(WorkspaceDir, PackagesPath)
if mws.PACKAGES_PATH:
for Path in mws.PACKAGES_PATH:
if not os.path.exists(Path):
EdkLogger.error("build", FILE_NOT_FOUND, "One Path in PACKAGES_PATH doesn't exist", ExtraData=Path)
elif ' ' in Path:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in PACKAGES_PATH", ExtraData=Path)
os.environ["EDK_TOOLS_PATH"] = os.path.normcase(os.environ["EDK_TOOLS_PATH"])
# check EDK_TOOLS_PATH
if "EDK_TOOLS_PATH" not in os.environ:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
ExtraData="EDK_TOOLS_PATH")
# check PATH
if "PATH" not in os.environ:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
ExtraData="PATH")
GlobalData.gWorkspace = WorkspaceDir
GlobalData.gGlobalDefines["WORKSPACE"] = WorkspaceDir
GlobalData.gGlobalDefines["EDK_TOOLS_PATH"] = os.environ["EDK_TOOLS_PATH"]
## Get normalized file path
#
# Convert the path to be local format, and remove the WORKSPACE path at the
# beginning if the file path is given in full path.
#
# @param FilePath File path to be normalized
# @param Workspace Workspace path which the FilePath will be checked against
#
# @retval string The normalized file path
#
def NormFile(FilePath, Workspace):
# check if the path is absolute or relative
if os.path.isabs(FilePath):
FileFullPath = os.path.normpath(FilePath)
else:
FileFullPath = os.path.normpath(mws.join(Workspace, FilePath))
Workspace = mws.getWs(Workspace, FilePath)
# check if the file path exists or not
if not os.path.isfile(FileFullPath):
EdkLogger.error("build", FILE_NOT_FOUND, ExtraData="\t%s (Please give file in absolute path or relative to WORKSPACE)" % FileFullPath)
# remove workspace directory from the beginning part of the file path
if Workspace[-1] in ["\\", "/"]:
return FileFullPath[len(Workspace):]
else:
return FileFullPath[(len(Workspace) + 1):]
## Get the output of an external program
#
# This is the entrance method of thread reading output of an external program and
# putting them in STDOUT/STDERR of current program.
#
# @param From The stream message read from
# @param To The stream message put on
# @param ExitFlag The flag used to indicate stopping reading
#
def ReadMessage(From, To, ExitFlag,MemTo=None):
while True:
# read one line a time
Line = From.readline()
# empty string means "end"
if Line is not None and Line != b"":
LineStr = Line.rstrip().decode(encoding='utf-8', errors='ignore')
if MemTo is not None:
if "Note: including file:" == LineStr.lstrip()[:21]:
MemTo.append(LineStr)
else:
To(LineStr)
MemTo.append(LineStr)
else:
To(LineStr)
else:
break
if ExitFlag.is_set():
break
class MakeSubProc(Popen):
def __init__(self,*args, **argv):
super(MakeSubProc,self).__init__(*args, **argv)
self.ProcOut = []
## Launch an external program
#
# This method will call subprocess.Popen to execute an external program with
# given options in specified directory. Because of the dead-lock issue during
# redirecting output of the external program, threads are used to do the
# redirection work.
#
# @param Command A list or string containing the call of the program
# @param WorkingDir The directory in which the program will be running
#
def LaunchCommand(Command, WorkingDir,ModuleAuto = None):
BeginTime = time.time()
# if working directory doesn't exist, Popen() will raise an exception
if not os.path.isdir(WorkingDir):
EdkLogger.error("build", FILE_NOT_FOUND, ExtraData=WorkingDir)
# Command is used as the first Argument in following Popen().
# It could be a string or sequence. We find that if command is a string in following Popen(),
# ubuntu may fail with an error message that the command is not found.
# So here we may need convert command from string to list instance.
if platform.system() != 'Windows':
if not isinstance(Command, list):
Command = Command.split()
Command = ' '.join(Command)
Proc = None
EndOfProcedure = None
try:
# launch the command
Proc = MakeSubProc(Command, stdout=PIPE, stderr=STDOUT, env=os.environ, cwd=WorkingDir, bufsize=-1, shell=True)
# launch two threads to read the STDOUT and STDERR
EndOfProcedure = Event()
EndOfProcedure.clear()
if Proc.stdout:
StdOutThread = Thread(target=ReadMessage, args=(Proc.stdout, EdkLogger.info, EndOfProcedure,Proc.ProcOut))
StdOutThread.name = "STDOUT-Redirector"
StdOutThread.daemon = False
StdOutThread.start()
# waiting for program exit
Proc.wait()
except: # in case of aborting
# terminate the threads redirecting the program output
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
if EndOfProcedure is not None:
EndOfProcedure.set()
if Proc is None:
if not isinstance(Command, type("")):
Command = " ".join(Command)
EdkLogger.error("build", COMMAND_FAILURE, "Failed to start command", ExtraData="%s [%s]" % (Command, WorkingDir))
if Proc.stdout:
StdOutThread.join()
# check the return code of the program
if Proc.returncode != 0:
if not isinstance(Command, type("")):
Command = " ".join(Command)
# print out the Response file and its content when make failure
RespFile = os.path.join(WorkingDir, 'OUTPUT', 'respfilelist.txt')
if os.path.isfile(RespFile):
f = open(RespFile)
RespContent = f.read()
f.close()
EdkLogger.info(RespContent)
EdkLogger.error("build", COMMAND_FAILURE, ExtraData="%s [%s]" % (Command, WorkingDir))
if ModuleAuto:
iau = IncludesAutoGen(WorkingDir,ModuleAuto)
if ModuleAuto.ToolChainFamily == TAB_COMPILER_MSFT:
iau.CreateDepsFileForMsvc(Proc.ProcOut)
else:
iau.UpdateDepsFileforNonMsvc()
iau.UpdateDepsFileforTrim()
iau.CreateModuleDeps()
iau.CreateDepsInclude()
iau.CreateDepsTarget()
return "%dms" % (int(round((time.time() - BeginTime) * 1000)))
## The smallest unit that can be built in multi-thread build mode
#
# This is the base class of build unit. The "Obj" parameter must provide
# __str__(), __eq__() and __hash__() methods. Otherwise there could be build units
# missing build.
#
# Currently the "Obj" should be only ModuleAutoGen or PlatformAutoGen objects.
#
class BuildUnit:
## The constructor
#
# @param self The object pointer
# @param Obj The object the build is working on
# @param Target The build target name, one of gSupportedTarget
# @param Dependency The BuildUnit(s) which must be completed in advance
# @param WorkingDir The directory build command starts in
#
def __init__(self, Obj, BuildCommand, Target, Dependency, WorkingDir="."):
self.BuildObject = Obj
self.Dependency = Dependency
self.WorkingDir = WorkingDir
self.Target = Target
self.BuildCommand = BuildCommand
if not BuildCommand:
EdkLogger.error("build", OPTION_MISSING,
"No build command found for this module. "
"Please check your setting of %s_%s_%s_MAKE_PATH in Conf/tools_def.txt file." %
(Obj.BuildTarget, Obj.ToolChain, Obj.Arch),
ExtraData=str(Obj))
## str() method
#
# It just returns the string representation of self.BuildObject
#
# @param self The object pointer
#
def __str__(self):
return str(self.BuildObject)
## "==" operator method
#
# It just compares self.BuildObject with "Other". So self.BuildObject must
# provide its own __eq__() method.
#
# @param self The object pointer
# @param Other The other BuildUnit object compared to
#
def __eq__(self, Other):
return Other and self.BuildObject == Other.BuildObject \
and Other.BuildObject \
and self.BuildObject.Arch == Other.BuildObject.Arch
## hash() method
#
# It just returns the hash value of self.BuildObject which must be hashable.
#
# @param self The object pointer
#
def __hash__(self):
return hash(self.BuildObject) + hash(self.BuildObject.Arch)
def __repr__(self):
return repr(self.BuildObject)
## The smallest module unit that can be built by nmake/make command in multi-thread build mode
#
# This class is for module build by nmake/make build system. The "Obj" parameter
# must provide __str__(), __eq__() and __hash__() methods. Otherwise there could
# be make units missing build.
#
# Currently the "Obj" should be only ModuleAutoGen object.
#
class ModuleMakeUnit(BuildUnit):
## The constructor
#
# @param self The object pointer
# @param Obj The ModuleAutoGen object the build is working on
# @param Target The build target name, one of gSupportedTarget
#
def __init__(self, Obj, BuildCommand,Target):
Dependency = [ModuleMakeUnit(La, BuildCommand,Target) for La in Obj.LibraryAutoGenList]
BuildUnit.__init__(self, Obj, BuildCommand, Target, Dependency, Obj.MakeFileDir)
if Target in [None, "", "all"]:
self.Target = "tbuild"
## The smallest platform unit that can be built by nmake/make command in multi-thread build mode
#
# This class is for platform build by nmake/make build system. The "Obj" parameter
# must provide __str__(), __eq__() and __hash__() methods. Otherwise there could
# be make units missing build.
#
# Currently the "Obj" should be only PlatformAutoGen object.
#
class PlatformMakeUnit(BuildUnit):
## The constructor
#
# @param self The object pointer
# @param Obj The PlatformAutoGen object the build is working on
# @param Target The build target name, one of gSupportedTarget
#
def __init__(self, Obj, BuildCommand, Target):
Dependency = [ModuleMakeUnit(Lib, BuildCommand, Target) for Lib in self.BuildObject.LibraryAutoGenList]
Dependency.extend([ModuleMakeUnit(Mod, BuildCommand,Target) for Mod in self.BuildObject.ModuleAutoGenList])
BuildUnit.__init__(self, Obj, BuildCommand, Target, Dependency, Obj.MakeFileDir)
## The class representing the task of a module build or platform build
#
# This class manages the build tasks in multi-thread build mode. Its jobs include
# scheduling thread running, catching thread error, monitor the thread status, etc.
#
class BuildTask:
# queue for tasks waiting for schedule
_PendingQueue = OrderedDict()
_PendingQueueLock = threading.Lock()
# queue for tasks ready for running
_ReadyQueue = OrderedDict()
_ReadyQueueLock = threading.Lock()
# queue for run tasks
_RunningQueue = OrderedDict()
_RunningQueueLock = threading.Lock()
# queue containing all build tasks, in case duplicate build
_TaskQueue = OrderedDict()
# flag indicating error occurs in a running thread
_ErrorFlag = threading.Event()
_ErrorFlag.clear()
_ErrorMessage = ""
# BoundedSemaphore object used to control the number of running threads
_Thread = None
# flag indicating if the scheduler is started or not
_SchedulerStopped = threading.Event()
_SchedulerStopped.set()
## Start the task scheduler thread
#
# @param MaxThreadNumber The maximum thread number
# @param ExitFlag Flag used to end the scheduler
#
@staticmethod
def StartScheduler(MaxThreadNumber, ExitFlag):
SchedulerThread = Thread(target=BuildTask.Scheduler, args=(MaxThreadNumber, ExitFlag))
SchedulerThread.name = "Build-Task-Scheduler"
SchedulerThread.daemon = False
SchedulerThread.start()
# wait for the scheduler to be started, especially useful in Linux
while not BuildTask.IsOnGoing():
time.sleep(0.01)
## Scheduler method
#
# @param MaxThreadNumber The maximum thread number
# @param ExitFlag Flag used to end the scheduler
#
@staticmethod
def Scheduler(MaxThreadNumber, ExitFlag):
BuildTask._SchedulerStopped.clear()
try:
# use BoundedSemaphore to control the maximum running threads
BuildTask._Thread = BoundedSemaphore(MaxThreadNumber)
#
# scheduling loop, which will exits when no pending/ready task and
# indicated to do so, or there's error in running thread
#
while (len(BuildTask._PendingQueue) > 0 or len(BuildTask._ReadyQueue) > 0 \
or not ExitFlag.is_set()) and not BuildTask._ErrorFlag.is_set():
EdkLogger.debug(EdkLogger.DEBUG_8, "Pending Queue (%d), Ready Queue (%d)"
% (len(BuildTask._PendingQueue), len(BuildTask._ReadyQueue)))
# get all pending tasks
BuildTask._PendingQueueLock.acquire()
BuildObjectList = list(BuildTask._PendingQueue.keys())
#
# check if their dependency is resolved, and if true, move them
# into ready queue
#
for BuildObject in BuildObjectList:
Bt = BuildTask._PendingQueue[BuildObject]
if Bt.IsReady():
BuildTask._ReadyQueue[BuildObject] = BuildTask._PendingQueue.pop(BuildObject)
BuildTask._PendingQueueLock.release()
# launch build thread until the maximum number of threads is reached
while not BuildTask._ErrorFlag.is_set():
# empty ready queue, do nothing further
if len(BuildTask._ReadyQueue) == 0:
break
# wait for active thread(s) exit
BuildTask._Thread.acquire(True)
# start a new build thread
Bo, Bt = BuildTask._ReadyQueue.popitem()
# move into running queue
BuildTask._RunningQueueLock.acquire()
BuildTask._RunningQueue[Bo] = Bt
BuildTask._RunningQueueLock.release()
Bt.Start()
# avoid tense loop
time.sleep(0.01)
# avoid tense loop
time.sleep(0.01)
# wait for all running threads exit
if BuildTask._ErrorFlag.is_set():
EdkLogger.quiet("\nWaiting for all build threads exit...")
# while not BuildTask._ErrorFlag.is_set() and \
while len(BuildTask._RunningQueue) > 0:
EdkLogger.verbose("Waiting for thread ending...(%d)" % len(BuildTask._RunningQueue))
EdkLogger.debug(EdkLogger.DEBUG_8, "Threads [%s]" % ", ".join(Th.name for Th in threading.enumerate()))
# avoid tense loop
time.sleep(0.1)
except BaseException as X:
#
# TRICK: hide the output of threads left running, so that the user can
# catch the error message easily
#
EdkLogger.SetLevel(EdkLogger.ERROR)
BuildTask._ErrorFlag.set()
BuildTask._ErrorMessage = "build thread scheduler error\n\t%s" % str(X)
BuildTask._PendingQueue.clear()
BuildTask._ReadyQueue.clear()
BuildTask._RunningQueue.clear()
BuildTask._TaskQueue.clear()
BuildTask._SchedulerStopped.set()
## Wait for all running method exit
#
@staticmethod
def WaitForComplete():
BuildTask._SchedulerStopped.wait()
## Check if the scheduler is running or not
#
@staticmethod
def IsOnGoing():
return not BuildTask._SchedulerStopped.is_set()
## Abort the build
@staticmethod
def Abort():
if BuildTask.IsOnGoing():
BuildTask._ErrorFlag.set()
BuildTask.WaitForComplete()
## Check if there's error in running thread
#
# Since the main thread cannot catch exceptions in other thread, we have to
# use threading.Event to communicate this formation to main thread.
#
@staticmethod
def HasError():
return BuildTask._ErrorFlag.is_set()
## Get error message in running thread
#
# Since the main thread cannot catch exceptions in other thread, we have to
# use a static variable to communicate this message to main thread.
#
@staticmethod
def GetErrorMessage():
return BuildTask._ErrorMessage
## Factory method to create a BuildTask object
#
# This method will check if a module is building or has been built. And if
# true, just return the associated BuildTask object in the _TaskQueue. If
# not, create and return a new BuildTask object. The new BuildTask object
# will be appended to the _PendingQueue for scheduling later.
#
# @param BuildItem A BuildUnit object representing a build object
# @param Dependency The dependent build object of BuildItem
#
@staticmethod
def New(BuildItem, Dependency=None):
if BuildItem in BuildTask._TaskQueue:
Bt = BuildTask._TaskQueue[BuildItem]
return Bt
Bt = BuildTask()
Bt._Init(BuildItem, Dependency)
BuildTask._TaskQueue[BuildItem] = Bt
BuildTask._PendingQueueLock.acquire()
BuildTask._PendingQueue[BuildItem] = Bt
BuildTask._PendingQueueLock.release()
return Bt
## The real constructor of BuildTask
#
# @param BuildItem A BuildUnit object representing a build object
# @param Dependency The dependent build object of BuildItem
#
def _Init(self, BuildItem, Dependency=None):
self.BuildItem = BuildItem
self.DependencyList = []
if Dependency is None:
Dependency = BuildItem.Dependency
else:
Dependency.extend(BuildItem.Dependency)
self.AddDependency(Dependency)
# flag indicating build completes, used to avoid unnecessary re-build
self.CompleteFlag = False
## Check if all dependent build tasks are completed or not
#
def IsReady(self):
ReadyFlag = True
for Dep in self.DependencyList:
if Dep.CompleteFlag == True:
continue
ReadyFlag = False
break
return ReadyFlag
## Add dependent build task
#
# @param Dependency The list of dependent build objects
#
def AddDependency(self, Dependency):
for Dep in Dependency:
if not Dep.BuildObject.IsBinaryModule and not Dep.BuildObject.CanSkipbyCache(GlobalData.gModuleCacheHit):
self.DependencyList.append(BuildTask.New(Dep)) # BuildTask list
## The thread wrapper of LaunchCommand function
#
# @param Command A list or string contains the call of the command
# @param WorkingDir The directory in which the program will be running
#
def _CommandThread(self, Command, WorkingDir):
try:
self.BuildItem.BuildObject.BuildTime = LaunchCommand(Command, WorkingDir,self.BuildItem.BuildObject)
self.CompleteFlag = True
# Run hash operation post dependency to account for libs
# Run if --hash or --binary-destination
if GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
self.BuildItem.BuildObject.GenModuleHash()
if GlobalData.gBinCacheDest:
self.BuildItem.BuildObject.GenCMakeHash()
except:
#
# TRICK: hide the output of threads left running, so that the user can
# catch the error message easily
#
if not BuildTask._ErrorFlag.is_set():
GlobalData.gBuildingModule = "%s [%s, %s, %s]" % (str(self.BuildItem.BuildObject),
self.BuildItem.BuildObject.Arch,
self.BuildItem.BuildObject.ToolChain,
self.BuildItem.BuildObject.BuildTarget
)
EdkLogger.SetLevel(EdkLogger.ERROR)
BuildTask._ErrorFlag.set()
BuildTask._ErrorMessage = "%s broken\n %s [%s]" % \
(threading.current_thread().name, Command, WorkingDir)
# indicate there's a thread is available for another build task
BuildTask._RunningQueueLock.acquire()
BuildTask._RunningQueue.pop(self.BuildItem)
BuildTask._RunningQueueLock.release()
BuildTask._Thread.release()
## Start build task thread
#
def Start(self):
EdkLogger.quiet("Building ... %s" % repr(self.BuildItem))
Command = self.BuildItem.BuildCommand + [self.BuildItem.Target]
self.BuildTread = Thread(target=self._CommandThread, args=(Command, self.BuildItem.WorkingDir))
self.BuildTread.name = "build thread"
self.BuildTread.daemon = False
self.BuildTread.start()
## The class contains the information related to EFI image
#
class PeImageInfo():
## Constructor
#
# Constructor will load all required image information.
#
# @param BaseName The full file path of image.
# @param Guid The GUID for image.
# @param Arch Arch of this image.
# @param OutputDir The output directory for image.
# @param DebugDir The debug directory for image.
# @param ImageClass PeImage Information
#
def __init__(self, BaseName, Guid, Arch, OutputDir, DebugDir, ImageClass):
self.BaseName = BaseName
self.Guid = Guid
self.Arch = Arch
self.OutputDir = OutputDir
self.DebugDir = DebugDir
self.Image = ImageClass
self.Image.Size = (self.Image.Size // 0x1000 + 1) * 0x1000
## The class implementing the EDK2 build process
#
# The build process includes:
# 1. Load configuration from target.txt and tools_def.txt in $(WORKSPACE)/Conf
# 2. Parse DSC file of active platform
# 3. Parse FDF file if any
# 4. Establish build database, including parse all other files (module, package)
# 5. Create AutoGen files (C code file, depex file, makefile) if necessary
# 6. Call build command
#
class Build():
## Constructor
#
# Constructor will load all necessary configurations, parse platform, modules
# and packages and the establish a database for AutoGen.
#
# @param Target The build command target, one of gSupportedTarget
# @param WorkspaceDir The directory of workspace
# @param BuildOptions Build options passed from command line
#
def __init__(self, Target, WorkspaceDir, BuildOptions,log_q):
self.WorkspaceDir = WorkspaceDir
self.Target = Target
self.PlatformFile = BuildOptions.PlatformFile
self.ModuleFile = BuildOptions.ModuleFile
self.ArchList = BuildOptions.TargetArch
self.ToolChainList = BuildOptions.ToolChain
self.BuildTargetList= BuildOptions.BuildTarget
self.Fdf = BuildOptions.FdfFile
self.FdList = BuildOptions.RomImage
self.FvList = BuildOptions.FvImage
self.CapList = BuildOptions.CapName
self.SilentMode = BuildOptions.SilentMode
self.ThreadNumber = 1
self.SkipAutoGen = BuildOptions.SkipAutoGen
self.Reparse = BuildOptions.Reparse
self.SkuId = BuildOptions.SkuId
if self.SkuId:
GlobalData.gSKUID_CMD = self.SkuId
self.ConfDirectory = BuildOptions.ConfDirectory
self.SpawnMode = True
self.BuildReport = BuildReport(BuildOptions.ReportFile, BuildOptions.ReportType)
self.AutoGenTime = 0
self.MakeTime = 0
self.GenFdsTime = 0
self.MakeFileName = ""
TargetObj = TargetTxtDict()
ToolDefObj = ToolDefDict((os.path.join(os.getenv("WORKSPACE"),"Conf")))
self.TargetTxt = TargetObj.Target
self.ToolDef = ToolDefObj.ToolDef
GlobalData.BuildOptionPcd = BuildOptions.OptionPcd if BuildOptions.OptionPcd else []
#Set global flag for build mode
GlobalData.gIgnoreSource = BuildOptions.IgnoreSources
GlobalData.gUseHashCache = BuildOptions.UseHashCache
GlobalData.gBinCacheDest = BuildOptions.BinCacheDest
GlobalData.gBinCacheSource = BuildOptions.BinCacheSource
GlobalData.gEnableGenfdsMultiThread = not BuildOptions.NoGenfdsMultiThread
GlobalData.gDisableIncludePathCheck = BuildOptions.DisableIncludePathCheck
if GlobalData.gBinCacheDest and not GlobalData.gUseHashCache:
EdkLogger.error("build", OPTION_NOT_SUPPORTED, ExtraData="--binary-destination must be used together with --hash.")
if GlobalData.gBinCacheSource and not GlobalData.gUseHashCache:
EdkLogger.error("build", OPTION_NOT_SUPPORTED, ExtraData="--binary-source must be used together with --hash.")
if GlobalData.gBinCacheDest and GlobalData.gBinCacheSource:
EdkLogger.error("build", OPTION_NOT_SUPPORTED, ExtraData="--binary-destination can not be used together with --binary-source.")
if GlobalData.gBinCacheSource:
BinCacheSource = os.path.normpath(GlobalData.gBinCacheSource)
if not os.path.isabs(BinCacheSource):
BinCacheSource = mws.join(self.WorkspaceDir, BinCacheSource)
GlobalData.gBinCacheSource = BinCacheSource
else:
if GlobalData.gBinCacheSource is not None:
EdkLogger.error("build", OPTION_VALUE_INVALID, ExtraData="Invalid value of option --binary-source.")
if GlobalData.gBinCacheDest:
BinCacheDest = os.path.normpath(GlobalData.gBinCacheDest)
if not os.path.isabs(BinCacheDest):
BinCacheDest = mws.join(self.WorkspaceDir, BinCacheDest)
GlobalData.gBinCacheDest = BinCacheDest
else:
if GlobalData.gBinCacheDest is not None:
EdkLogger.error("build", OPTION_VALUE_INVALID, ExtraData="Invalid value of option --binary-destination.")
GlobalData.gDatabasePath = os.path.normpath(os.path.join(GlobalData.gConfDirectory, GlobalData.gDatabasePath))
if not os.path.exists(os.path.join(GlobalData.gConfDirectory, '.cache')):
os.makedirs(os.path.join(GlobalData.gConfDirectory, '.cache'))
self.Db = BuildDB
self.BuildDatabase = self.Db.BuildObject
self.Platform = None
self.ToolChainFamily = None
self.LoadFixAddress = 0
self.UniFlag = BuildOptions.Flag
self.BuildModules = []
self.HashSkipModules = []
self.Db_Flag = False
self.LaunchPrebuildFlag = False
self.PlatformBuildPath = os.path.join(GlobalData.gConfDirectory, '.cache', '.PlatformBuild')
if BuildOptions.CommandLength:
GlobalData.gCommandMaxLength = BuildOptions.CommandLength
# print dot character during doing some time-consuming work
self.Progress = Utils.Progressor()
# print current build environment and configuration
EdkLogger.quiet("%-16s = %s" % ("WORKSPACE", os.environ["WORKSPACE"]))
if "PACKAGES_PATH" in os.environ:
# WORKSPACE env has been converted before. Print the same path style with WORKSPACE env.
EdkLogger.quiet("%-16s = %s" % ("PACKAGES_PATH", os.path.normcase(os.path.normpath(os.environ["PACKAGES_PATH"]))))
EdkLogger.quiet("%-16s = %s" % ("EDK_TOOLS_PATH", os.environ["EDK_TOOLS_PATH"]))
if "EDK_TOOLS_BIN" in os.environ:
# Print the same path style with WORKSPACE env.
EdkLogger.quiet("%-16s = %s" % ("EDK_TOOLS_BIN", os.path.normcase(os.path.normpath(os.environ["EDK_TOOLS_BIN"]))))
EdkLogger.quiet("%-16s = %s" % ("CONF_PATH", GlobalData.gConfDirectory))
if "PYTHON3_ENABLE" in os.environ:
PYTHON3_ENABLE = os.environ["PYTHON3_ENABLE"]
if PYTHON3_ENABLE != "TRUE":
PYTHON3_ENABLE = "FALSE"
EdkLogger.quiet("%-16s = %s" % ("PYTHON3_ENABLE", PYTHON3_ENABLE))
if "PYTHON_COMMAND" in os.environ:
EdkLogger.quiet("%-16s = %s" % ("PYTHON_COMMAND", os.environ["PYTHON_COMMAND"]))
self.InitPreBuild()
self.InitPostBuild()
if self.Prebuild:
EdkLogger.quiet("%-16s = %s" % ("PREBUILD", self.Prebuild))
if self.Postbuild:
EdkLogger.quiet("%-16s = %s" % ("POSTBUILD", self.Postbuild))
if self.Prebuild:
self.LaunchPrebuild()
TargetObj = TargetTxtDict()
ToolDefObj = ToolDefDict((os.path.join(os.getenv("WORKSPACE"), "Conf")))
self.TargetTxt = TargetObj.Target
self.ToolDef = ToolDefObj.ToolDef
if not (self.LaunchPrebuildFlag and os.path.exists(self.PlatformBuildPath)):
self.InitBuild()
self.AutoGenMgr = None
EdkLogger.info("")
os.chdir(self.WorkspaceDir)
self.log_q = log_q
GlobalData.file_lock = mp.Lock()
# Init cache data for local only
GlobalData.gPackageHashFile = dict()
GlobalData.gModulePreMakeCacheStatus = dict()
GlobalData.gModuleMakeCacheStatus = dict()
GlobalData.gHashChainStatus = dict()
GlobalData.gCMakeHashFile = dict()
GlobalData.gModuleHashFile = dict()
GlobalData.gFileHashDict = dict()
GlobalData.gModuleAllCacheStatus = set()
GlobalData.gModuleCacheHit = set()
def StartAutoGen(self,mqueue, DataPipe,SkipAutoGen,PcdMaList,cqueue):
try:
if SkipAutoGen:
return True,0
feedback_q = mp.Queue()
error_event = mp.Event()
FfsCmd = DataPipe.Get("FfsCommand")
if FfsCmd is None:
FfsCmd = {}
GlobalData.FfsCmd = FfsCmd
auto_workers = [AutoGenWorkerInProcess(mqueue,DataPipe.dump_file,feedback_q,GlobalData.file_lock,cqueue,self.log_q,error_event) for _ in range(self.ThreadNumber)]
self.AutoGenMgr = AutoGenManager(auto_workers,feedback_q,error_event)
self.AutoGenMgr.start()
for w in auto_workers:
w.start()
if PcdMaList is not None:
for PcdMa in PcdMaList:
# SourceFileList calling sequence impact the makefile string sequence.
# Create cached SourceFileList here to unify its calling sequence for both
# CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
RetVal = PcdMa.SourceFileList
# Force cache miss for PCD driver
if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and self.Target in [None, "", "all"]:
cqueue.put((PcdMa.MetaFile.Path, PcdMa.Arch, "PreMakeCache", False))
PcdMa.CreateCodeFile(False)
PcdMa.CreateMakeFile(False,GenFfsList = DataPipe.Get("FfsCommand").get((PcdMa.MetaFile.Path, PcdMa.Arch),[]))
PcdMa.CreateAsBuiltInf()
# Force cache miss for PCD driver
if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:
cqueue.put((PcdMa.MetaFile.Path, PcdMa.Arch, "MakeCache", False))
self.AutoGenMgr.join()
rt = self.AutoGenMgr.Status
err = 0
if not rt:
err = UNKNOWN_ERROR
return rt, err
except FatalError as e:
return False, e.args[0]
except:
return False, UNKNOWN_ERROR
## Add TOOLCHAIN and FAMILY declared in DSC [BuildOptions] to ToolsDefTxtDatabase.
#
# Loop through the set of build targets, tool chains, and archs provided on either
# the command line or in target.txt to discover FAMILY and TOOLCHAIN delclarations
# in [BuildOptions] sections that may be within !if expressions that may use
# $(TARGET), $(TOOLCHAIN), $(TOOLCHAIN_TAG), or $(ARCH) operands.
#
def GetToolChainAndFamilyFromDsc (self, File):
SavedGlobalDefines = GlobalData.gGlobalDefines.copy()
for BuildTarget in self.BuildTargetList:
GlobalData.gGlobalDefines['TARGET'] = BuildTarget
for BuildToolChain in self.ToolChainList:
GlobalData.gGlobalDefines['TOOLCHAIN'] = BuildToolChain
GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = BuildToolChain
for BuildArch in self.ArchList:
GlobalData.gGlobalDefines['ARCH'] = BuildArch
dscobj = self.BuildDatabase[File, BuildArch]
for KeyFamily, Key, KeyCodeBase in dscobj.BuildOptions:
try:
Target, ToolChain, Arch, Tool, Attr = Key.split('_')
except:
continue
if ToolChain == TAB_STAR or Attr != TAB_TOD_DEFINES_FAMILY:
continue
try:
Family = dscobj.BuildOptions[(KeyFamily, Key, KeyCodeBase)]
Family = Family.strip().strip('=').strip()
except:
continue
if TAB_TOD_DEFINES_FAMILY not in self.ToolDef.ToolsDefTxtDatabase:
self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY] = {}
if ToolChain not in self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][ToolChain] = Family
if TAB_TOD_DEFINES_BUILDRULEFAMILY not in self.ToolDef.ToolsDefTxtDatabase:
self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY] = {}
if ToolChain not in self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY]:
self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][ToolChain] = Family
if TAB_TOD_DEFINES_TOOL_CHAIN_TAG not in self.ToolDef.ToolsDefTxtDatabase:
self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] = []
if ToolChain not in self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG]:
self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG].append(ToolChain)
GlobalData.gGlobalDefines = SavedGlobalDefines
## Load configuration
#
# This method will parse target.txt and get the build configurations.
#
def LoadConfiguration(self):
# if no ARCH given in command line, get it from target.txt
if not self.ArchList:
self.ArchList = self.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_TARGET_ARCH]
self.ArchList = tuple(self.ArchList)
# if no build target given in command line, get it from target.txt
if not self.BuildTargetList:
self.BuildTargetList = self.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_TARGET]
# if no tool chain given in command line, get it from target.txt
if not self.ToolChainList:
self.ToolChainList = self.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_TOOL_CHAIN_TAG]
if self.ToolChainList is None or len(self.ToolChainList) == 0:
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build.\n")
if not self.PlatformFile:
PlatformFile = self.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_ACTIVE_PLATFORM]
if not PlatformFile:
# Try to find one in current directory
WorkingDirectory = os.getcwd()
FileList = glob.glob(os.path.normpath(os.path.join(WorkingDirectory, '*.dsc')))
FileNum = len(FileList)
if FileNum >= 2:
EdkLogger.error("build", OPTION_MISSING,
ExtraData="There are %d DSC files in %s. Use '-p' to specify one.\n" % (FileNum, WorkingDirectory))
elif FileNum == 1:
PlatformFile = FileList[0]
else:
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
ExtraData="No active platform specified in target.txt or command line! Nothing can be built.\n")
self.PlatformFile = PathClass(NormFile(PlatformFile, self.WorkspaceDir), self.WorkspaceDir)
self.GetToolChainAndFamilyFromDsc (self.PlatformFile)
# check if the tool chains are defined or not
NewToolChainList = []
for ToolChain in self.ToolChainList:
if ToolChain not in self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG]:
EdkLogger.warn("build", "Tool chain [%s] is not defined" % ToolChain)
else:
NewToolChainList.append(ToolChain)
# if no tool chain available, break the build
if len(NewToolChainList) == 0:
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
ExtraData="[%s] not defined. No toolchain available for build!\n" % ", ".join(self.ToolChainList))
else:
self.ToolChainList = NewToolChainList
ToolChainFamily = []
ToolDefinition = self.ToolDef.ToolsDefTxtDatabase
for Tool in self.ToolChainList:
if TAB_TOD_DEFINES_FAMILY not in ToolDefinition or Tool not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][Tool]:
EdkLogger.warn("build", "No tool chain family found in configuration for %s. Default to MSFT." % Tool)
ToolChainFamily.append(TAB_COMPILER_MSFT)
else:
ToolChainFamily.append(ToolDefinition[TAB_TOD_DEFINES_FAMILY][Tool])
self.ToolChainFamily = ToolChainFamily
self.ThreadNumber = ThreadNum()
## Initialize build configuration
#
# This method will parse DSC file and merge the configurations from
# command line and target.txt, then get the final build configurations.
#
def InitBuild(self):
# parse target.txt, tools_def.txt, and platform file
self.LoadConfiguration()
# Allow case-insensitive for those from command line or configuration file
ErrorCode, ErrorInfo = self.PlatformFile.Validate(".dsc", False)
if ErrorCode != 0:
EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
def InitPreBuild(self):
self.LoadConfiguration()
ErrorCode, ErrorInfo = self.PlatformFile.Validate(".dsc", False)
if ErrorCode != 0:
EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
if self.BuildTargetList:
GlobalData.gGlobalDefines['TARGET'] = self.BuildTargetList[0]
if self.ArchList:
GlobalData.gGlobalDefines['ARCH'] = self.ArchList[0]
if self.ToolChainList:
GlobalData.gGlobalDefines['TOOLCHAIN'] = self.ToolChainList[0]
GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = self.ToolChainList[0]
if self.ToolChainFamily:
GlobalData.gGlobalDefines['FAMILY'] = self.ToolChainFamily[0]
if 'PREBUILD' in GlobalData.gCommandLineDefines:
self.Prebuild = GlobalData.gCommandLineDefines.get('PREBUILD')
else:
self.Db_Flag = True
Platform = self.Db.MapPlatform(str(self.PlatformFile))
self.Prebuild = str(Platform.Prebuild)
if self.Prebuild:
PrebuildList = []
#
# Evaluate all arguments and convert arguments that are WORKSPACE
# relative paths to absolute paths. Filter arguments that look like
# flags or do not follow the file/dir naming rules to avoid false
# positives on this conversion.
#
for Arg in self.Prebuild.split():
#
# Do not modify Arg if it looks like a flag or an absolute file path
#
if Arg.startswith('-') or os.path.isabs(Arg):
PrebuildList.append(Arg)
continue
#
# Do not modify Arg if it does not look like a Workspace relative
# path that starts with a valid package directory name
#
if not Arg[0].isalpha() or os.path.dirname(Arg) == '':
PrebuildList.append(Arg)
continue
#
# If Arg looks like a WORKSPACE relative path, then convert to an
# absolute path and check to see if the file exists.
#
Temp = mws.join(self.WorkspaceDir, Arg)
if os.path.isfile(Temp):
Arg = Temp
PrebuildList.append(Arg)
self.Prebuild = ' '.join(PrebuildList)
self.Prebuild += self.PassCommandOption(self.BuildTargetList, self.ArchList, self.ToolChainList, self.PlatformFile, self.Target)
def InitPostBuild(self):
if 'POSTBUILD' in GlobalData.gCommandLineDefines:
self.Postbuild = GlobalData.gCommandLineDefines.get('POSTBUILD')
else:
Platform = self.Db.MapPlatform(str(self.PlatformFile))
self.Postbuild = str(Platform.Postbuild)
if self.Postbuild:
PostbuildList = []
#
# Evaluate all arguments and convert arguments that are WORKSPACE
# relative paths to absolute paths. Filter arguments that look like
# flags or do not follow the file/dir naming rules to avoid false
# positives on this conversion.
#
for Arg in self.Postbuild.split():
#
# Do not modify Arg if it looks like a flag or an absolute file path
#
if Arg.startswith('-') or os.path.isabs(Arg):
PostbuildList.append(Arg)
continue
#
# Do not modify Arg if it does not look like a Workspace relative
# path that starts with a valid package directory name
#
if not Arg[0].isalpha() or os.path.dirname(Arg) == '':
PostbuildList.append(Arg)
continue
#
# If Arg looks like a WORKSPACE relative path, then convert to an
# absolute path and check to see if the file exists.
#
Temp = mws.join(self.WorkspaceDir, Arg)
if os.path.isfile(Temp):
Arg = Temp
PostbuildList.append(Arg)
self.Postbuild = ' '.join(PostbuildList)
self.Postbuild += self.PassCommandOption(self.BuildTargetList, self.ArchList, self.ToolChainList, self.PlatformFile, self.Target)
def PassCommandOption(self, BuildTarget, TargetArch, ToolChain, PlatformFile, Target):
BuildStr = ''
if GlobalData.gCommand and isinstance(GlobalData.gCommand, list):
BuildStr += ' ' + ' '.join(GlobalData.gCommand)
TargetFlag = False
ArchFlag = False
ToolChainFlag = False
PlatformFileFlag = False
if GlobalData.gOptions and not GlobalData.gOptions.BuildTarget:
TargetFlag = True
if GlobalData.gOptions and not GlobalData.gOptions.TargetArch:
ArchFlag = True
if GlobalData.gOptions and not GlobalData.gOptions.ToolChain:
ToolChainFlag = True
if GlobalData.gOptions and not GlobalData.gOptions.PlatformFile:
PlatformFileFlag = True
if TargetFlag and BuildTarget:
if isinstance(BuildTarget, list) or isinstance(BuildTarget, tuple):
BuildStr += ' -b ' + ' -b '.join(BuildTarget)
elif isinstance(BuildTarget, str):
BuildStr += ' -b ' + BuildTarget
if ArchFlag and TargetArch:
if isinstance(TargetArch, list) or isinstance(TargetArch, tuple):
BuildStr += ' -a ' + ' -a '.join(TargetArch)
elif isinstance(TargetArch, str):
BuildStr += ' -a ' + TargetArch
if ToolChainFlag and ToolChain:
if isinstance(ToolChain, list) or isinstance(ToolChain, tuple):
BuildStr += ' -t ' + ' -t '.join(ToolChain)
elif isinstance(ToolChain, str):
BuildStr += ' -t ' + ToolChain
if PlatformFileFlag and PlatformFile:
if isinstance(PlatformFile, list) or isinstance(PlatformFile, tuple):
BuildStr += ' -p ' + ' -p '.join(PlatformFile)
elif isinstance(PlatformFile, str):
BuildStr += ' -p' + PlatformFile
BuildStr += ' --conf=' + GlobalData.gConfDirectory
if Target:
BuildStr += ' ' + Target
return BuildStr
def LaunchPrebuild(self):
if self.Prebuild:
EdkLogger.info("\n- Prebuild Start -\n")
self.LaunchPrebuildFlag = True
#
# The purpose of .PrebuildEnv file is capture environment variable settings set by the prebuild script
# and preserve them for the rest of the main build step, because the child process environment will
# evaporate as soon as it exits, we cannot get it in build step.
#
PrebuildEnvFile = os.path.join(GlobalData.gConfDirectory, '.cache', '.PrebuildEnv')
if os.path.isfile(PrebuildEnvFile):
os.remove(PrebuildEnvFile)
if os.path.isfile(self.PlatformBuildPath):
os.remove(self.PlatformBuildPath)
if sys.platform == "win32":
args = ' && '.join((self.Prebuild, 'set > ' + PrebuildEnvFile))
Process = Popen(args, stdout=PIPE, stderr=PIPE, shell=True)
else:
args = ' && '.join((self.Prebuild, 'env > ' + PrebuildEnvFile))
Process = Popen(args, stdout=PIPE, stderr=PIPE, shell=True)
# launch two threads to read the STDOUT and STDERR
EndOfProcedure = Event()
EndOfProcedure.clear()
if Process.stdout:
StdOutThread = Thread(target=ReadMessage, args=(Process.stdout, EdkLogger.info, EndOfProcedure))
StdOutThread.name = "STDOUT-Redirector"
StdOutThread.daemon = False
StdOutThread.start()
if Process.stderr:
StdErrThread = Thread(target=ReadMessage, args=(Process.stderr, EdkLogger.quiet, EndOfProcedure))
StdErrThread.name = "STDERR-Redirector"
StdErrThread.daemon = False
StdErrThread.start()
# waiting for program exit
Process.wait()
if Process.stdout:
StdOutThread.join()
if Process.stderr:
StdErrThread.join()
if Process.returncode != 0 :
EdkLogger.error("Prebuild", PREBUILD_ERROR, 'Prebuild process is not success!')
if os.path.exists(PrebuildEnvFile):
f = open(PrebuildEnvFile)
envs = f.readlines()
f.close()
envs = [l.split("=", 1) for l in envs ]
envs = [[I.strip() for I in item] for item in envs if len(item) == 2]
os.environ.update(dict(envs))
EdkLogger.info("\n- Prebuild Done -\n")
def LaunchPostbuild(self):
if self.Postbuild:
EdkLogger.info("\n- Postbuild Start -\n")
if sys.platform == "win32":
Process = Popen(self.Postbuild, stdout=PIPE, stderr=PIPE, shell=True)
else:
Process = Popen(self.Postbuild, stdout=PIPE, stderr=PIPE, shell=True)
# launch two threads to read the STDOUT and STDERR
EndOfProcedure = Event()
EndOfProcedure.clear()
if Process.stdout:
StdOutThread = Thread(target=ReadMessage, args=(Process.stdout, EdkLogger.info, EndOfProcedure))
StdOutThread.name = "STDOUT-Redirector"
StdOutThread.daemon = False
StdOutThread.start()
if Process.stderr:
StdErrThread = Thread(target=ReadMessage, args=(Process.stderr, EdkLogger.quiet, EndOfProcedure))
StdErrThread.name = "STDERR-Redirector"
StdErrThread.daemon = False
StdErrThread.start()
# waiting for program exit
Process.wait()
if Process.stdout:
StdOutThread.join()
if Process.stderr:
StdErrThread.join()
if Process.returncode != 0 :
EdkLogger.error("Postbuild", POSTBUILD_ERROR, 'Postbuild process is not success!')
EdkLogger.info("\n- Postbuild Done -\n")
## Build a module or platform
#
# Create autogen code and makefile for a module or platform, and the launch
# "make" command to build it
#
# @param Target The target of build command
# @param Platform The platform file
# @param Module The module file
# @param BuildTarget The name of build target, one of "DEBUG", "RELEASE"
# @param ToolChain The name of toolchain to build
# @param Arch The arch of the module/platform
# @param CreateDepModuleCodeFile Flag used to indicate creating code
# for dependent modules/Libraries
# @param CreateDepModuleMakeFile Flag used to indicate creating makefile
# for dependent modules/Libraries
#
def _BuildPa(self, Target, AutoGenObject, CreateDepsCodeFile=True, CreateDepsMakeFile=True, BuildModule=False, FfsCommand=None, PcdMaList=None):
if AutoGenObject is None:
return False
if FfsCommand is None:
FfsCommand = {}
# skip file generation for cleanxxx targets, run and fds target
if Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']:
# for target which must generate AutoGen code and makefile
mqueue = mp.Queue()
for m in AutoGenObject.GetAllModuleInfo:
mqueue.put(m)
mqueue.put((None,None,None,None,None,None,None))
AutoGenObject.DataPipe.DataContainer = {"CommandTarget": self.Target}
AutoGenObject.DataPipe.DataContainer = {"Workspace_timestamp": AutoGenObject.Workspace._SrcTimeStamp}
AutoGenObject.CreateLibModuelDirs()
AutoGenObject.DataPipe.DataContainer = {"LibraryBuildDirectoryList":AutoGenObject.LibraryBuildDirectoryList}
AutoGenObject.DataPipe.DataContainer = {"ModuleBuildDirectoryList":AutoGenObject.ModuleBuildDirectoryList}
AutoGenObject.DataPipe.DataContainer = {"FdsCommandDict": AutoGenObject.Workspace.GenFdsCommandDict}
self.Progress.Start("Generating makefile and code")
data_pipe_file = os.path.join(AutoGenObject.BuildDir, "GlobalVar_%s_%s.bin" % (str(AutoGenObject.Guid),AutoGenObject.Arch))
AutoGenObject.DataPipe.dump(data_pipe_file)
cqueue = mp.Queue()
autogen_rt,errorcode = self.StartAutoGen(mqueue, AutoGenObject.DataPipe, self.SkipAutoGen, PcdMaList, cqueue)
AutoGenIdFile = os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")
with open(AutoGenIdFile,"w") as fw:
fw.write("Arch=%s\n" % "|".join((AutoGenObject.Workspace.ArchList)))
fw.write("BuildDir=%s\n" % AutoGenObject.Workspace.BuildDir)
fw.write("PlatformGuid=%s\n" % str(AutoGenObject.Guid))
self.Progress.Stop("done!")
if not autogen_rt:
self.AutoGenMgr.TerminateWorkers()
self.AutoGenMgr.join(1)
raise FatalError(errorcode)
AutoGenObject.CreateCodeFile(False)
AutoGenObject.CreateMakeFile(False)
else:
# always recreate top/platform makefile when clean, just in case of inconsistency
AutoGenObject.CreateCodeFile(True)
AutoGenObject.CreateMakeFile(True)
if EdkLogger.GetLevel() == EdkLogger.QUIET:
EdkLogger.quiet("Building ... %s" % repr(AutoGenObject))
BuildCommand = AutoGenObject.BuildCommand
if BuildCommand is None or len(BuildCommand) == 0:
EdkLogger.error("build", OPTION_MISSING,
"No build command found for this module. "
"Please check your setting of %s_%s_%s_MAKE_PATH in Conf/tools_def.txt file." %
(AutoGenObject.BuildTarget, AutoGenObject.ToolChain, AutoGenObject.Arch),
ExtraData=str(AutoGenObject))
# run
if Target == 'run':
return True
# Fetch the MakeFileName.
self.MakeFileName = AutoGenObject.MakeFileName
# build modules
if BuildModule:
BuildCommand = BuildCommand + [Target]
LaunchCommand(BuildCommand, AutoGenObject.MakeFileDir)
if GlobalData.gBinCacheDest:
self.GenDestCache()
elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
# Only for --hash
# Update PreMakeCacheChain files
self.GenLocalPreMakeCache()
self.BuildModules = []
return True
# build library
if Target == 'libraries':
DirList = []
for Lib in AutoGenObject.LibraryAutoGenList:
if not Lib.IsBinaryModule:
DirList.append((os.path.join(AutoGenObject.BuildDir, Lib.BuildDir),Lib))
for Lib, LibAutoGen in DirList:
NewBuildCommand = BuildCommand + ['-f', os.path.normpath(os.path.join(Lib, self.MakeFileName)), 'pbuild']
LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir,LibAutoGen)
return True
# build module
if Target == 'modules':
DirList = []
for Lib in AutoGenObject.LibraryAutoGenList:
if not Lib.IsBinaryModule:
DirList.append((os.path.join(AutoGenObject.BuildDir, Lib.BuildDir),Lib))
for Lib, LibAutoGen in DirList:
NewBuildCommand = BuildCommand + ['-f', os.path.normpath(os.path.join(Lib, self.MakeFileName)), 'pbuild']
LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir,LibAutoGen)
DirList = []
for ModuleAutoGen in AutoGenObject.ModuleAutoGenList:
if not ModuleAutoGen.IsBinaryModule:
DirList.append((os.path.join(AutoGenObject.BuildDir, ModuleAutoGen.BuildDir),ModuleAutoGen))
for Mod,ModAutoGen in DirList:
NewBuildCommand = BuildCommand + ['-f', os.path.normpath(os.path.join(Mod, self.MakeFileName)), 'pbuild']
LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir,ModAutoGen)
self.CreateAsBuiltInf()
if GlobalData.gBinCacheDest:
self.GenDestCache()
elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
# Only for --hash
# Update PreMakeCacheChain files
self.GenLocalPreMakeCache()
self.BuildModules = []
return True
# cleanlib
if Target == 'cleanlib':
for Lib in AutoGenObject.LibraryBuildDirectoryList:
LibMakefile = os.path.normpath(os.path.join(Lib, self.MakeFileName))
if os.path.exists(LibMakefile):
NewBuildCommand = BuildCommand + ['-f', LibMakefile, 'cleanall']
LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir)
return True
# clean
if Target == 'clean':
for Mod in AutoGenObject.ModuleBuildDirectoryList:
ModMakefile = os.path.normpath(os.path.join(Mod, self.MakeFileName))
if os.path.exists(ModMakefile):
NewBuildCommand = BuildCommand + ['-f', ModMakefile, 'cleanall']
LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir)
for Lib in AutoGenObject.LibraryBuildDirectoryList:
LibMakefile = os.path.normpath(os.path.join(Lib, self.MakeFileName))
if os.path.exists(LibMakefile):
NewBuildCommand = BuildCommand + ['-f', LibMakefile, 'cleanall']
LaunchCommand(NewBuildCommand, AutoGenObject.MakeFileDir)
return True
# cleanall
if Target == 'cleanall':
try:
#os.rmdir(AutoGenObject.BuildDir)
RemoveDirectory(AutoGenObject.BuildDir, True)
except WindowsError as X:
EdkLogger.error("build", FILE_DELETE_FAILURE, ExtraData=str(X))
return True
## Build a module or platform
#
# Create autogen code and makefile for a module or platform, and the launch
# "make" command to build it
#
# @param Target The target of build command
# @param Platform The platform file
# @param Module The module file
# @param BuildTarget The name of build target, one of "DEBUG", "RELEASE"
# @param ToolChain The name of toolchain to build
# @param Arch The arch of the module/platform
# @param CreateDepModuleCodeFile Flag used to indicate creating code
# for dependent modules/Libraries
# @param CreateDepModuleMakeFile Flag used to indicate creating makefile
# for dependent modules/Libraries
#
def _Build(self, Target, AutoGenObject, CreateDepsCodeFile=True, CreateDepsMakeFile=True, BuildModule=False):
if AutoGenObject is None:
return False
# skip file generation for cleanxxx targets, run and fds target
if Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']:
# for target which must generate AutoGen code and makefile
if not self.SkipAutoGen or Target == 'genc':
self.Progress.Start("Generating code")
AutoGenObject.CreateCodeFile(CreateDepsCodeFile)
self.Progress.Stop("done!")
if Target == "genc":
return True
if not self.SkipAutoGen or Target == 'genmake':
self.Progress.Start("Generating makefile")
AutoGenObject.CreateMakeFile(CreateDepsMakeFile)
#AutoGenObject.CreateAsBuiltInf()
self.Progress.Stop("done!")
if Target == "genmake":
return True
else:
# always recreate top/platform makefile when clean, just in case of inconsistency
AutoGenObject.CreateCodeFile(True)
AutoGenObject.CreateMakeFile(True)
if EdkLogger.GetLevel() == EdkLogger.QUIET:
EdkLogger.quiet("Building ... %s" % repr(AutoGenObject))
BuildCommand = AutoGenObject.BuildCommand
if BuildCommand is None or len(BuildCommand) == 0:
EdkLogger.error("build", OPTION_MISSING,
"No build command found for this module. "
"Please check your setting of %s_%s_%s_MAKE_PATH in Conf/tools_def.txt file." %
(AutoGenObject.BuildTarget, AutoGenObject.ToolChain, AutoGenObject.Arch),
ExtraData=str(AutoGenObject))
# build modules
if BuildModule:
if Target != 'fds':
BuildCommand = BuildCommand + [Target]
AutoGenObject.BuildTime = LaunchCommand(BuildCommand, AutoGenObject.MakeFileDir)
self.CreateAsBuiltInf()
if GlobalData.gBinCacheDest:
self.GenDestCache()
elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
# Only for --hash
# Update PreMakeCacheChain files
self.GenLocalPreMakeCache()
self.BuildModules = []
return True
# genfds
if Target == 'fds':
if GenFdsApi(AutoGenObject.GenFdsCommandDict, self.Db):
EdkLogger.error("build", COMMAND_FAILURE)
Threshold = self.GetFreeSizeThreshold()
if Threshold:
self.CheckFreeSizeThreshold(Threshold, AutoGenObject.FvDir)
return True
# run
if Target == 'run':
return True
# build library
if Target == 'libraries':
pass
# not build modules
# cleanall
if Target == 'cleanall':
try:
#os.rmdir(AutoGenObject.BuildDir)
RemoveDirectory(AutoGenObject.BuildDir, True)
except WindowsError as X:
EdkLogger.error("build", FILE_DELETE_FAILURE, ExtraData=str(X))
return True
## Rebase module image and Get function address for the input module list.
#
def _RebaseModule (self, MapBuffer, BaseAddress, ModuleList, AddrIsOffset = True, ModeIsSmm = False):
if ModeIsSmm:
AddrIsOffset = False
for InfFile in ModuleList:
sys.stdout.write (".")
sys.stdout.flush()
ModuleInfo = ModuleList[InfFile]
ModuleName = ModuleInfo.BaseName
ModuleOutputImage = ModuleInfo.Image.FileName
ModuleDebugImage = os.path.join(ModuleInfo.DebugDir, ModuleInfo.BaseName + '.efi')
## for SMM module in SMRAM, the SMRAM will be allocated from base to top.
if not ModeIsSmm:
BaseAddress = BaseAddress - ModuleInfo.Image.Size
#
# Update Image to new BaseAddress by GenFw tool
#
LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir)
LaunchCommand(["GenFw", "--rebase", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)
else:
#
# Set new address to the section header only for SMM driver.
#
LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleOutputImage], ModuleInfo.OutputDir)
LaunchCommand(["GenFw", "--address", str(BaseAddress), "-r", ModuleDebugImage], ModuleInfo.DebugDir)
#
# Collect function address from Map file
#
ImageMapTable = ModuleOutputImage.replace('.efi', '.map')
FunctionList = []
if os.path.exists(ImageMapTable):
OrigImageBaseAddress = 0
ImageMap = open(ImageMapTable, 'r')
for LinStr in ImageMap:
if len (LinStr.strip()) == 0:
continue
#
# Get the preferred address set on link time.
#
if LinStr.find ('Preferred load address is') != -1:
StrList = LinStr.split()
OrigImageBaseAddress = int (StrList[len(StrList) - 1], 16)
StrList = LinStr.split()
if len (StrList) > 4:
if StrList[3] == 'f' or StrList[3] == 'F':
Name = StrList[1]
RelativeAddress = int (StrList[2], 16) - OrigImageBaseAddress
FunctionList.append ((Name, RelativeAddress))
ImageMap.close()
#
# Add general information.
#
if ModeIsSmm:
MapBuffer.append('\n\n%s (Fixed SMRAM Offset, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint))
elif AddrIsOffset:
MapBuffer.append('\n\n%s (Fixed Memory Offset, BaseAddress=-0x%010X, EntryPoint=-0x%010X)\n' % (ModuleName, 0 - BaseAddress, 0 - (BaseAddress + ModuleInfo.Image.EntryPoint)))
else:
MapBuffer.append('\n\n%s (Fixed Memory Address, BaseAddress=0x%010X, EntryPoint=0x%010X)\n' % (ModuleName, BaseAddress, BaseAddress + ModuleInfo.Image.EntryPoint))
#
# Add guid and general seciton section.
#
TextSectionAddress = 0
DataSectionAddress = 0
for SectionHeader in ModuleInfo.Image.SectionHeaderList:
if SectionHeader[0] == '.text':
TextSectionAddress = SectionHeader[1]
elif SectionHeader[0] in ['.data', '.sdata']:
DataSectionAddress = SectionHeader[1]
if AddrIsOffset:
MapBuffer.append('(GUID=%s, .textbaseaddress=-0x%010X, .databaseaddress=-0x%010X)\n' % (ModuleInfo.Guid, 0 - (BaseAddress + TextSectionAddress), 0 - (BaseAddress + DataSectionAddress)))
else:
MapBuffer.append('(GUID=%s, .textbaseaddress=0x%010X, .databaseaddress=0x%010X)\n' % (ModuleInfo.Guid, BaseAddress + TextSectionAddress, BaseAddress + DataSectionAddress))
#
# Add debug image full path.
#
MapBuffer.append('(IMAGE=%s)\n\n' % (ModuleDebugImage))
#
# Add function address
#
for Function in FunctionList:
if AddrIsOffset:
MapBuffer.append(' -0x%010X %s\n' % (0 - (BaseAddress + Function[1]), Function[0]))
else:
MapBuffer.append(' 0x%010X %s\n' % (BaseAddress + Function[1], Function[0]))
ImageMap.close()
#
# for SMM module in SMRAM, the SMRAM will be allocated from base to top.
#
if ModeIsSmm:
BaseAddress = BaseAddress + ModuleInfo.Image.Size
## Collect MAP information of all FVs
#
def _CollectFvMapBuffer (self, MapBuffer, Wa, ModuleList):
if self.Fdf:
# First get the XIP base address for FV map file.
GuidPattern = re.compile("[-a-fA-F0-9]+")
GuidName = re.compile(r"\(GUID=[-a-fA-F0-9]+")
for FvName in Wa.FdfProfile.FvDict:
FvMapBuffer = os.path.join(Wa.FvDir, FvName + '.Fv.map')
if not os.path.exists(FvMapBuffer):
continue
FvMap = open(FvMapBuffer, 'r')
#skip FV size information
FvMap.readline()
FvMap.readline()
FvMap.readline()
FvMap.readline()
for Line in FvMap:
MatchGuid = GuidPattern.match(Line)
if MatchGuid is not None:
#
# Replace GUID with module name
#
GuidString = MatchGuid.group()
if GuidString.upper() in ModuleList:
Line = Line.replace(GuidString, ModuleList[GuidString.upper()].Name)
MapBuffer.append(Line)
#
# Add the debug image full path.
#
MatchGuid = GuidName.match(Line)
if MatchGuid is not None:
GuidString = MatchGuid.group().split("=")[1]
if GuidString.upper() in ModuleList:
MapBuffer.append('(IMAGE=%s)\n' % (os.path.join(ModuleList[GuidString.upper()].DebugDir, ModuleList[GuidString.upper()].Name + '.efi')))
FvMap.close()
## Collect MAP information of all modules
#
def _CollectModuleMapBuffer (self, MapBuffer, ModuleList):
sys.stdout.write ("Generate Load Module At Fix Address Map")
sys.stdout.flush()
PatchEfiImageList = []
PeiModuleList = {}
BtModuleList = {}
RtModuleList = {}
SmmModuleList = {}
PeiSize = 0
BtSize = 0
RtSize = 0
# reserve 4K size in SMRAM to make SMM module address not from 0.
SmmSize = 0x1000
for ModuleGuid in ModuleList:
Module = ModuleList[ModuleGuid]
GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (Module.MetaFile, Module.Arch, Module.ToolChain, Module.BuildTarget)
OutputImageFile = ''
for ResultFile in Module.CodaTargetList:
if str(ResultFile.Target).endswith('.efi'):
#
# module list for PEI, DXE, RUNTIME and SMM
#
OutputImageFile = os.path.join(Module.OutputDir, Module.Name + '.efi')
ImageClass = PeImageClass (OutputImageFile)
if not ImageClass.IsValid:
EdkLogger.error("build", FILE_PARSE_FAILURE, ExtraData=ImageClass.ErrorInfo)
ImageInfo = PeImageInfo(Module.Name, Module.Guid, Module.Arch, Module.OutputDir, Module.DebugDir, ImageClass)
if Module.ModuleType in [SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER, EDK_COMPONENT_TYPE_PIC_PEIM, EDK_COMPONENT_TYPE_RELOCATABLE_PEIM, SUP_MODULE_DXE_CORE]:
PeiModuleList[Module.MetaFile] = ImageInfo
PeiSize += ImageInfo.Image.Size
elif Module.ModuleType in [EDK_COMPONENT_TYPE_BS_DRIVER, SUP_MODULE_DXE_DRIVER, SUP_MODULE_UEFI_DRIVER]:
BtModuleList[Module.MetaFile] = ImageInfo
BtSize += ImageInfo.Image.Size
elif Module.ModuleType in [SUP_MODULE_DXE_RUNTIME_DRIVER, EDK_COMPONENT_TYPE_RT_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, EDK_COMPONENT_TYPE_SAL_RT_DRIVER]:
RtModuleList[Module.MetaFile] = ImageInfo
RtSize += ImageInfo.Image.Size
elif Module.ModuleType in [SUP_MODULE_SMM_CORE, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
SmmModuleList[Module.MetaFile] = ImageInfo
SmmSize += ImageInfo.Image.Size
if Module.ModuleType == SUP_MODULE_DXE_SMM_DRIVER:
PiSpecVersion = Module.Module.Specification.get('PI_SPECIFICATION_VERSION', '0x00000000')
# for PI specification < PI1.1, DXE_SMM_DRIVER also runs as BOOT time driver.
if int(PiSpecVersion, 16) < 0x0001000A:
BtModuleList[Module.MetaFile] = ImageInfo
BtSize += ImageInfo.Image.Size
break
#
# EFI image is final target.
# Check EFI image contains patchable FixAddress related PCDs.
#
if OutputImageFile != '':
ModuleIsPatch = False
for Pcd in Module.ModulePcdList:
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE and Pcd.TokenCName in TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SET:
ModuleIsPatch = True
break
if not ModuleIsPatch:
for Pcd in Module.LibraryPcdList:
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE and Pcd.TokenCName in TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SET:
ModuleIsPatch = True
break
if not ModuleIsPatch:
continue
#
# Module includes the patchable load fix address PCDs.
# It will be fixed up later.
#
PatchEfiImageList.append (OutputImageFile)
#
# Get Top Memory address
#
ReservedRuntimeMemorySize = 0
TopMemoryAddress = 0
if self.LoadFixAddress == 0xFFFFFFFFFFFFFFFF:
TopMemoryAddress = 0
else:
TopMemoryAddress = self.LoadFixAddress
if TopMemoryAddress < RtSize + BtSize + PeiSize:
EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS is too low to load driver")
#
# Patch FixAddress related PCDs into EFI image
#
for EfiImage in PatchEfiImageList:
EfiImageMap = EfiImage.replace('.efi', '.map')
if not os.path.exists(EfiImageMap):
continue
#
# Get PCD offset in EFI image by GenPatchPcdTable function
#
PcdTable = parsePcdInfoFromMapFile(EfiImageMap, EfiImage)
#
# Patch real PCD value by PatchPcdValue tool
#
for PcdInfo in PcdTable:
ReturnValue = 0
if PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE, str (PeiSize // 0x1000))
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE, str (BtSize // 0x1000))
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE, str (RtSize // 0x1000))
elif PcdInfo[0] == TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE and len (SmmModuleList) > 0:
ReturnValue, ErrorInfo = PatchBinaryFile (EfiImage, PcdInfo[1], TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE, str (SmmSize // 0x1000))
if ReturnValue != 0:
EdkLogger.error("build", PARAMETER_INVALID, "Patch PCD value failed", ExtraData=ErrorInfo)
MapBuffer.append('PEI_CODE_PAGE_NUMBER = 0x%x\n' % (PeiSize // 0x1000))
MapBuffer.append('BOOT_CODE_PAGE_NUMBER = 0x%x\n' % (BtSize // 0x1000))
MapBuffer.append('RUNTIME_CODE_PAGE_NUMBER = 0x%x\n' % (RtSize // 0x1000))
if len (SmmModuleList) > 0:
MapBuffer.append('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize // 0x1000))
PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
BtBaseAddr = TopMemoryAddress - RtSize
RtBaseAddr = TopMemoryAddress - ReservedRuntimeMemorySize
self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, RtBaseAddr, RtModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, 0x1000, SmmModuleList, AddrIsOffset=False, ModeIsSmm=True)
MapBuffer.append('\n\n')
sys.stdout.write ("\n")
sys.stdout.flush()
## Save platform Map file
#
def _SaveMapFile (self, MapBuffer, Wa):
#
# Map file path is got.
#
MapFilePath = os.path.join(Wa.BuildDir, Wa.Name + '.map')
#
# Save address map into MAP file.
#
SaveFileOnChange(MapFilePath, ''.join(MapBuffer), False)
if self.LoadFixAddress != 0:
sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" % (MapFilePath))
sys.stdout.flush()
## Build active platform for different build targets and different tool chains
#
def _BuildPlatform(self):
SaveFileOnChange(self.PlatformBuildPath, '# DO NOT EDIT \n# FILE auto-generated\n', False)
for BuildTarget in self.BuildTargetList:
GlobalData.gGlobalDefines['TARGET'] = BuildTarget
index = 0
for ToolChain in self.ToolChainList:
GlobalData.gGlobalDefines['TOOLCHAIN'] = ToolChain
GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = ToolChain
GlobalData.gGlobalDefines['FAMILY'] = self.ToolChainFamily[index]
index += 1
Wa = WorkspaceAutoGen(
self.WorkspaceDir,
self.PlatformFile,
BuildTarget,
ToolChain,
self.ArchList,
self.BuildDatabase,
self.TargetTxt,
self.ToolDef,
self.Fdf,
self.FdList,
self.FvList,
self.CapList,
self.SkuId,
self.UniFlag,
self.Progress
)
self.Fdf = Wa.FdfFile
self.LoadFixAddress = Wa.Platform.LoadFixAddress
self.BuildReport.AddPlatformReport(Wa)
self.Progress.Stop("done!")
# Add ffs build to makefile
CmdListDict = {}
if GlobalData.gEnableGenfdsMultiThread and self.Fdf:
CmdListDict = self._GenFfsCmd(Wa.ArchList)
for Arch in Wa.ArchList:
PcdMaList = []
GlobalData.gGlobalDefines['ARCH'] = Arch
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
for Module in Pa.Platform.Modules:
# Get ModuleAutoGen object to generate C code file and makefile
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
if Ma is None:
continue
if Ma.PcdIsDriver:
Ma.PlatformInfo = Pa
Ma.Workspace = Wa
PcdMaList.append(Ma)
self.BuildModules.append(Ma)
Pa.DataPipe.DataContainer = {"FfsCommand":CmdListDict}
Pa.DataPipe.DataContainer = {"Workspace_timestamp": Wa._SrcTimeStamp}
self._BuildPa(self.Target, Pa, FfsCommand=CmdListDict,PcdMaList=PcdMaList)
# Create MAP file when Load Fix Address is enabled.
if self.Target in ["", "all", "fds"]:
for Arch in Wa.ArchList:
GlobalData.gGlobalDefines['ARCH'] = Arch
#
# Check whether the set fix address is above 4G for 32bit image.
#
if (Arch == 'IA32' or Arch == 'ARM') and self.LoadFixAddress != 0xFFFFFFFFFFFFFFFF and self.LoadFixAddress >= 0x100000000:
EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS can't be set to larger than or equal to 4G for the platform with IA32 or ARM arch modules")
#
# Get Module List
#
ModuleList = {}
for Pa in Wa.AutoGenObjectList:
for Ma in Pa.ModuleAutoGenList:
if Ma is None:
continue
if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma
MapBuffer = []
if self.LoadFixAddress != 0:
#
# Rebase module to the preferred memory address before GenFds
#
self._CollectModuleMapBuffer(MapBuffer, ModuleList)
if self.Fdf:
#
# create FDS again for the updated EFI image
#
self._Build("fds", Wa)
#
# Create MAP file for all platform FVs after GenFds.
#
self._CollectFvMapBuffer(MapBuffer, Wa, ModuleList)
#
# Save MAP buffer into MAP file.
#
self._SaveMapFile (MapBuffer, Wa)
self.CreateGuidedSectionToolsFile(Wa)
## Build active module for different build targets, different tool chains and different archs
#
def _BuildModule(self):
for BuildTarget in self.BuildTargetList:
GlobalData.gGlobalDefines['TARGET'] = BuildTarget
index = 0
for ToolChain in self.ToolChainList:
WorkspaceAutoGenTime = time.time()
GlobalData.gGlobalDefines['TOOLCHAIN'] = ToolChain
GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = ToolChain
GlobalData.gGlobalDefines['FAMILY'] = self.ToolChainFamily[index]
index += 1
#
# module build needs platform build information, so get platform
# AutoGen first
#
Wa = WorkspaceAutoGen(
self.WorkspaceDir,
self.PlatformFile,
BuildTarget,
ToolChain,
self.ArchList,
self.BuildDatabase,
self.TargetTxt,
self.ToolDef,
self.Fdf,
self.FdList,
self.FvList,
self.CapList,
self.SkuId,
self.UniFlag,
self.Progress,
self.ModuleFile
)
self.Fdf = Wa.FdfFile
self.LoadFixAddress = Wa.Platform.LoadFixAddress
Wa.CreateMakeFile(False)
# Add ffs build to makefile
CmdListDict = None
if GlobalData.gEnableGenfdsMultiThread and self.Fdf:
CmdListDict = self._GenFfsCmd(Wa.ArchList)
GlobalData.file_lock = mp.Lock()
GlobalData.FfsCmd = CmdListDict
self.Progress.Stop("done!")
MaList = []
ExitFlag = threading.Event()
ExitFlag.clear()
self.AutoGenTime += int(round((time.time() - WorkspaceAutoGenTime)))
for Arch in Wa.ArchList:
AutoGenStart = time.time()
GlobalData.gGlobalDefines['ARCH'] = Arch
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
for Module in Pa.Platform.Modules:
if self.ModuleFile.Dir == Module.Dir and self.ModuleFile.Name == Module.Name:
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
if Ma is None:
continue
if Ma.PcdIsDriver:
Ma.PlatformInfo = Pa
Ma.Workspace = Wa
MaList.append(Ma)
if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and self.Target in [None, "", "all"]:
if Ma.CanSkipbyPreMakeCache():
continue
else:
self.PreMakeCacheMiss.add(Ma)
# Not to auto-gen for targets 'clean', 'cleanlib', 'cleanall', 'run', 'fds'
if self.Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']:
# for target which must generate AutoGen code and makefile
if not self.SkipAutoGen or self.Target == 'genc':
self.Progress.Start("Generating code")
Ma.CreateCodeFile(True)
self.Progress.Stop("done!")
if self.Target == "genc":
return True
if not self.SkipAutoGen or self.Target == 'genmake':
self.Progress.Start("Generating makefile")
if CmdListDict and self.Fdf and (Module.Path, Arch) in CmdListDict:
Ma.CreateMakeFile(True, CmdListDict[Module.Path, Arch])
del CmdListDict[Module.Path, Arch]
else:
Ma.CreateMakeFile(True)
self.Progress.Stop("done!")
if self.Target == "genmake":
return True
if GlobalData.gBinCacheSource and self.Target in [None, "", "all"]:
if Ma.CanSkipbyMakeCache():
continue
else:
self.MakeCacheMiss.add(Ma)
self.BuildModules.append(Ma)
self.AutoGenTime += int(round((time.time() - AutoGenStart)))
MakeStart = time.time()
for Ma in self.BuildModules:
if not Ma.IsBinaryModule:
Bt = BuildTask.New(ModuleMakeUnit(Ma, Pa.BuildCommand,self.Target))
# Break build if any build thread has error
if BuildTask.HasError():
# we need a full version of makefile for platform
ExitFlag.set()
BuildTask.WaitForComplete()
Pa.CreateMakeFile(False)
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
# Start task scheduler
if not BuildTask.IsOnGoing():
BuildTask.StartScheduler(self.ThreadNumber, ExitFlag)
# in case there's an interruption. we need a full version of makefile for platform
Pa.CreateMakeFile(False)
if BuildTask.HasError():
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
self.MakeTime += int(round((time.time() - MakeStart)))
MakeContiue = time.time()
ExitFlag.set()
BuildTask.WaitForComplete()
self.CreateAsBuiltInf()
if GlobalData.gBinCacheDest:
self.GenDestCache()
elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
# Only for --hash
# Update PreMakeCacheChain files
self.GenLocalPreMakeCache()
self.BuildModules = []
self.MakeTime += int(round((time.time() - MakeContiue)))
if BuildTask.HasError():
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
self.BuildReport.AddPlatformReport(Wa, MaList)
if MaList == []:
EdkLogger.error(
'build',
BUILD_ERROR,
"Module for [%s] is not a component of active platform."\
" Please make sure that the ARCH and inf file path are"\
" given in the same as in [%s]" % \
(', '.join(Wa.ArchList), self.PlatformFile),
ExtraData=self.ModuleFile
)
# Create MAP file when Load Fix Address is enabled.
if self.Target == "fds" and self.Fdf:
for Arch in Wa.ArchList:
#
# Check whether the set fix address is above 4G for 32bit image.
#
if (Arch == 'IA32' or Arch == 'ARM') and self.LoadFixAddress != 0xFFFFFFFFFFFFFFFF and self.LoadFixAddress >= 0x100000000:
EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS can't be set to larger than or equal to 4G for the platorm with IA32 or ARM arch modules")
#
# Get Module List
#
ModuleList = {}
for Pa in Wa.AutoGenObjectList:
for Ma in Pa.ModuleAutoGenList:
if Ma is None:
continue
if not Ma.IsLibrary:
ModuleList[Ma.Guid.upper()] = Ma
MapBuffer = []
if self.LoadFixAddress != 0:
#
# Rebase module to the preferred memory address before GenFds
#
self._CollectModuleMapBuffer(MapBuffer, ModuleList)
#
# create FDS again for the updated EFI image
#
GenFdsStart = time.time()
self._Build("fds", Wa)
self.GenFdsTime += int(round((time.time() - GenFdsStart)))
#
# Create MAP file for all platform FVs after GenFds.
#
self._CollectFvMapBuffer(MapBuffer, Wa, ModuleList)
#
# Save MAP buffer into MAP file.
#
self._SaveMapFile (MapBuffer, Wa)
def _GenFfsCmd(self,ArchList):
# convert dictionary of Cmd:(Inf,Arch)
# to a new dictionary of (Inf,Arch):Cmd,Cmd,Cmd...
CmdSetDict = defaultdict(set)
GenFfsDict = GenFds.GenFfsMakefile('', GlobalData.gFdfParser, self, ArchList, GlobalData)
for Cmd in GenFfsDict:
tmpInf, tmpArch = GenFfsDict[Cmd]
CmdSetDict[tmpInf, tmpArch].add(Cmd)
return CmdSetDict
def VerifyAutoGenFiles(self):
AutoGenIdFile = os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")
try:
with open(AutoGenIdFile) as fd:
lines = fd.readlines()
except:
return None
for line in lines:
if "Arch" in line:
ArchList = line.strip().split("=")[1].split("|")
if "BuildDir" in line:
BuildDir = line.split("=")[1].strip()
if "PlatformGuid" in line:
PlatformGuid = line.split("=")[1].strip()
GlobalVarList = []
for arch in ArchList:
global_var = os.path.join(BuildDir, "GlobalVar_%s_%s.bin" % (str(PlatformGuid),arch))
if not os.path.exists(global_var):
return None
GlobalVarList.append(global_var)
for global_var in GlobalVarList:
data_pipe = MemoryDataPipe()
data_pipe.load(global_var)
target = data_pipe.Get("P_Info").get("Target")
toolchain = data_pipe.Get("P_Info").get("ToolChain")
archlist = data_pipe.Get("P_Info").get("ArchList")
Arch = data_pipe.Get("P_Info").get("Arch")
active_p = data_pipe.Get("P_Info").get("ActivePlatform")
workspacedir = data_pipe.Get("P_Info").get("WorkspaceDir")
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(workspacedir, PackagesPath)
LibraryBuildDirectoryList = data_pipe.Get("LibraryBuildDirectoryList")
ModuleBuildDirectoryList = data_pipe.Get("ModuleBuildDirectoryList")
for m_build_dir in LibraryBuildDirectoryList:
if not os.path.exists(os.path.join(m_build_dir,self.MakeFileName)):
return None
for m_build_dir in ModuleBuildDirectoryList:
if not os.path.exists(os.path.join(m_build_dir,self.MakeFileName)):
return None
Wa = WorkSpaceInfo(
workspacedir,active_p,target,toolchain,archlist
)
Pa = PlatformInfo(Wa, active_p, target, toolchain, Arch,data_pipe)
Wa.AutoGenObjectList.append(Pa)
return Wa
def SetupMakeSetting(self,Wa):
BuildModules = []
for Pa in Wa.AutoGenObjectList:
for m in Pa._MbList:
ma = ModuleAutoGen(Wa,m.MetaFile, Pa.BuildTarget, Wa.ToolChain, Pa.Arch, Pa.MetaFile,Pa.DataPipe)
BuildModules.append(ma)
fdf_file = Wa.FlashDefinition
if fdf_file:
Fdf = FdfParser(fdf_file.Path)
Fdf.ParseFile()
GlobalData.gFdfParser = Fdf
if Fdf.CurrentFdName and Fdf.CurrentFdName in Fdf.Profile.FdDict:
FdDict = Fdf.Profile.FdDict[Fdf.CurrentFdName]
for FdRegion in FdDict.RegionList:
if str(FdRegion.RegionType) == 'FILE' and self.Platform.VpdToolGuid in str(FdRegion.RegionDataList):
if int(FdRegion.Offset) % 8 != 0:
EdkLogger.error("build", FORMAT_INVALID, 'The VPD Base Address %s must be 8-byte aligned.' % (FdRegion.Offset))
Wa.FdfProfile = Fdf.Profile
self.Fdf = Fdf
else:
self.Fdf = None
return BuildModules
## Build a platform in multi-thread mode
#
def PerformAutoGen(self,BuildTarget,ToolChain):
WorkspaceAutoGenTime = time.time()
Wa = WorkspaceAutoGen(
self.WorkspaceDir,
self.PlatformFile,
BuildTarget,
ToolChain,
self.ArchList,
self.BuildDatabase,
self.TargetTxt,
self.ToolDef,
self.Fdf,
self.FdList,
self.FvList,
self.CapList,
self.SkuId,
self.UniFlag,
self.Progress
)
self.Fdf = Wa.FdfFile
self.LoadFixAddress = Wa.Platform.LoadFixAddress
self.BuildReport.AddPlatformReport(Wa)
Wa.CreateMakeFile(False)
# Add ffs build to makefile
CmdListDict = {}
if GlobalData.gEnableGenfdsMultiThread and self.Fdf:
CmdListDict = self._GenFfsCmd(Wa.ArchList)
self.AutoGenTime += int(round((time.time() - WorkspaceAutoGenTime)))
BuildModules = []
for Arch in Wa.ArchList:
PcdMaList = []
AutoGenStart = time.time()
GlobalData.gGlobalDefines['ARCH'] = Arch
Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch)
if Pa is None:
continue
ModuleList = []
for Inf in Pa.Platform.Modules:
ModuleList.append(Inf)
# Add the INF only list in FDF
if GlobalData.gFdfParser is not None:
for InfName in GlobalData.gFdfParser.Profile.InfList:
Inf = PathClass(NormPath(InfName), self.WorkspaceDir, Arch)
if Inf in Pa.Platform.Modules:
continue
ModuleList.append(Inf)
Pa.DataPipe.DataContainer = {"FfsCommand":CmdListDict}
Pa.DataPipe.DataContainer = {"Workspace_timestamp": Wa._SrcTimeStamp}
Pa.DataPipe.DataContainer = {"CommandTarget": self.Target}
Pa.CreateLibModuelDirs()
# Fetch the MakeFileName.
self.MakeFileName = Pa.MakeFileName
Pa.DataPipe.DataContainer = {"LibraryBuildDirectoryList":Pa.LibraryBuildDirectoryList}
Pa.DataPipe.DataContainer = {"ModuleBuildDirectoryList":Pa.ModuleBuildDirectoryList}
Pa.DataPipe.DataContainer = {"FdsCommandDict": Wa.GenFdsCommandDict}
# Prepare the cache share data for multiprocessing
Pa.DataPipe.DataContainer = {"gPlatformHashFile":GlobalData.gPlatformHashFile}
ModuleCodaFile = {}
for ma in Pa.ModuleAutoGenList:
ModuleCodaFile[(ma.MetaFile.File,ma.MetaFile.Root,ma.Arch,ma.MetaFile.Path)] = [item.Target for item in ma.CodaTargetList]
Pa.DataPipe.DataContainer = {"ModuleCodaFile":ModuleCodaFile}
# ModuleList contains all driver modules only
for Module in ModuleList:
# Get ModuleAutoGen object to generate C code file and makefile
Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
if Ma is None:
continue
if Ma.PcdIsDriver:
Ma.PlatformInfo = Pa
Ma.Workspace = Wa
PcdMaList.append(Ma)
self.AllDrivers.add(Ma)
self.AllModules.add(Ma)
mqueue = mp.Queue()
cqueue = mp.Queue()
for m in Pa.GetAllModuleInfo:
mqueue.put(m)
module_file,module_root,module_path,module_basename,\
module_originalpath,module_arch,IsLib = m
Ma = ModuleAutoGen(Wa, PathClass(module_path, Wa), BuildTarget,\
ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
self.AllModules.add(Ma)
data_pipe_file = os.path.join(Pa.BuildDir, "GlobalVar_%s_%s.bin" % (str(Pa.Guid),Pa.Arch))
Pa.DataPipe.dump(data_pipe_file)
mqueue.put((None,None,None,None,None,None,None))
autogen_rt, errorcode = self.StartAutoGen(mqueue, Pa.DataPipe, self.SkipAutoGen, PcdMaList, cqueue)
if not autogen_rt:
self.AutoGenMgr.TerminateWorkers()
self.AutoGenMgr.join(1)
raise FatalError(errorcode)
if GlobalData.gUseHashCache:
for item in GlobalData.gModuleAllCacheStatus:
(MetaFilePath, Arch, CacheStr, Status) = item
Ma = ModuleAutoGen(Wa, PathClass(MetaFilePath, Wa), BuildTarget,\
ToolChain, Arch, self.PlatformFile,Pa.DataPipe)
if CacheStr == "PreMakeCache" and Status == False:
self.PreMakeCacheMiss.add(Ma)
if CacheStr == "PreMakeCache" and Status == True:
self.PreMakeCacheHit.add(Ma)
GlobalData.gModuleCacheHit.add(Ma)
if CacheStr == "MakeCache" and Status == False:
self.MakeCacheMiss.add(Ma)
if CacheStr == "MakeCache" and Status == True:
self.MakeCacheHit.add(Ma)
GlobalData.gModuleCacheHit.add(Ma)
self.AutoGenTime += int(round((time.time() - AutoGenStart)))
AutoGenIdFile = os.path.join(GlobalData.gConfDirectory,".AutoGenIdFile.txt")
with open(AutoGenIdFile,"w") as fw:
fw.write("Arch=%s\n" % "|".join((Wa.ArchList)))
fw.write("BuildDir=%s\n" % Wa.BuildDir)
fw.write("PlatformGuid=%s\n" % str(Wa.AutoGenObjectList[0].Guid))
if GlobalData.gBinCacheSource:
BuildModules.extend(self.MakeCacheMiss)
elif GlobalData.gUseHashCache and not GlobalData.gBinCacheDest:
BuildModules.extend(self.PreMakeCacheMiss)
else:
BuildModules.extend(self.AllDrivers)
self.Progress.Stop("done!")
return Wa, BuildModules
def _MultiThreadBuildPlatform(self):
SaveFileOnChange(self.PlatformBuildPath, '# DO NOT EDIT \n# FILE auto-generated\n', False)
for BuildTarget in self.BuildTargetList:
GlobalData.gGlobalDefines['TARGET'] = BuildTarget
index = 0
for ToolChain in self.ToolChainList:
resetFdsGlobalVariable()
GlobalData.gGlobalDefines['TOOLCHAIN'] = ToolChain
GlobalData.gGlobalDefines['TOOL_CHAIN_TAG'] = ToolChain
GlobalData.gGlobalDefines['FAMILY'] = self.ToolChainFamily[index]
index += 1
ExitFlag = threading.Event()
ExitFlag.clear()
if self.SkipAutoGen:
Wa = self.VerifyAutoGenFiles()
if Wa is None:
self.SkipAutoGen = False
Wa, self.BuildModules = self.PerformAutoGen(BuildTarget,ToolChain)
else:
GlobalData.gAutoGenPhase = True
self.BuildModules = self.SetupMakeSetting(Wa)
else:
Wa, self.BuildModules = self.PerformAutoGen(BuildTarget,ToolChain)
Pa = Wa.AutoGenObjectList[0]
GlobalData.gAutoGenPhase = False
if GlobalData.gBinCacheSource:
EdkLogger.quiet("[cache Summary]: Total module num: %s" % len(self.AllModules))
EdkLogger.quiet("[cache Summary]: PreMakecache miss num: %s " % len(self.PreMakeCacheMiss))
EdkLogger.quiet("[cache Summary]: Makecache miss num: %s " % len(self.MakeCacheMiss))
for Arch in Wa.ArchList:
MakeStart = time.time()
for Ma in set(self.BuildModules):
# Generate build task for the module
if not Ma.IsBinaryModule:
Bt = BuildTask.New(ModuleMakeUnit(Ma, Pa.BuildCommand,self.Target))
# Break build if any build thread has error
if BuildTask.HasError():
# we need a full version of makefile for platform
ExitFlag.set()
BuildTask.WaitForComplete()
Pa.CreateMakeFile(False)
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
# Start task scheduler
if not BuildTask.IsOnGoing():
BuildTask.StartScheduler(self.ThreadNumber, ExitFlag)
# in case there's an interruption. we need a full version of makefile for platform
if BuildTask.HasError():
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
self.MakeTime += int(round((time.time() - MakeStart)))
MakeContiue = time.time()
#
#
# All modules have been put in build tasks queue. Tell task scheduler
# to exit if all tasks are completed
#
ExitFlag.set()
BuildTask.WaitForComplete()
if GlobalData.gBinCacheDest:
self.GenDestCache()
elif GlobalData.gUseHashCache and not GlobalData.gBinCacheSource:
# Only for --hash
# Update PreMakeCacheChain files
self.GenLocalPreMakeCache()
#
# Get Module List
#
ModuleList = {ma.Guid.upper(): ma for ma in self.BuildModules}
self.BuildModules = []
self.MakeTime += int(round((time.time() - MakeContiue)))
#
# Check for build error, and raise exception if one
# has been signaled.
#
if BuildTask.HasError():
EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule)
# Create MAP file when Load Fix Address is enabled.
if self.Target in ["", "all", "fds"]:
for Arch in Wa.ArchList:
#
# Check whether the set fix address is above 4G for 32bit image.
#
if (Arch == 'IA32' or Arch == 'ARM') and self.LoadFixAddress != 0xFFFFFFFFFFFFFFFF and self.LoadFixAddress >= 0x100000000:
EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS can't be set to larger than or equal to 4G for the platorm with IA32 or ARM arch modules")
#
# Rebase module to the preferred memory address before GenFds
#
MapBuffer = []
if self.LoadFixAddress != 0:
self._CollectModuleMapBuffer(MapBuffer, ModuleList)
if self.Fdf:
#
# Generate FD image if there's a FDF file found
#
GenFdsStart = time.time()
if GenFdsApi(Wa.GenFdsCommandDict, self.Db):
EdkLogger.error("build", COMMAND_FAILURE)
Threshold = self.GetFreeSizeThreshold()
if Threshold:
self.CheckFreeSizeThreshold(Threshold, Wa.FvDir)
#
# Create MAP file for all platform FVs after GenFds.
#
self._CollectFvMapBuffer(MapBuffer, Wa, ModuleList)
self.GenFdsTime += int(round((time.time() - GenFdsStart)))
#
# Save MAP buffer into MAP file.
#
self._SaveMapFile(MapBuffer, Wa)
self.CreateGuidedSectionToolsFile(Wa)
## GetFreeSizeThreshold()
#
# @retval int Threshold value
#
def GetFreeSizeThreshold(self):
Threshold = None
Threshold_Str = GlobalData.gCommandLineDefines.get('FV_SPARE_SPACE_THRESHOLD')
if Threshold_Str:
try:
if Threshold_Str.lower().startswith('0x'):
Threshold = int(Threshold_Str, 16)
else:
Threshold = int(Threshold_Str)
except:
EdkLogger.warn("build", 'incorrect value for FV_SPARE_SPACE_THRESHOLD %s.Only decimal or hex format is allowed.' % Threshold_Str)
return Threshold
def CheckFreeSizeThreshold(self, Threshold=None, FvDir=None):
if not isinstance(Threshold, int):
return
if not isinstance(FvDir, str) or not FvDir:
return
FdfParserObject = GlobalData.gFdfParser
FvRegionNameList = [FvName for FvName in FdfParserObject.Profile.FvDict if FdfParserObject.Profile.FvDict[FvName].FvRegionInFD]
for FvName in FdfParserObject.Profile.FvDict:
if FvName in FvRegionNameList:
FvSpaceInfoFileName = os.path.join(FvDir, FvName.upper() + '.Fv.map')
if os.path.exists(FvSpaceInfoFileName):
FileLinesList = getlines(FvSpaceInfoFileName)
for Line in FileLinesList:
NameValue = Line.split('=')
if len(NameValue) == 2 and NameValue[0].strip() == 'EFI_FV_SPACE_SIZE':
FreeSizeValue = int(NameValue[1].strip(), 0)
if FreeSizeValue < Threshold:
EdkLogger.error("build", FV_FREESIZE_ERROR,
'%s FV free space %d is not enough to meet with the required spare space %d set by -D FV_SPARE_SPACE_THRESHOLD option.' % (
FvName, FreeSizeValue, Threshold))
break
## Generate GuidedSectionTools.txt in the FV directories.
#
def CreateGuidedSectionToolsFile(self,Wa):
for BuildTarget in self.BuildTargetList:
for ToolChain in self.ToolChainList:
FvDir = Wa.FvDir
if not os.path.exists(FvDir):
continue
for Arch in self.ArchList:
guidList = []
tooldefguidList = []
guidAttribs = []
for Platform in Wa.AutoGenObjectList:
if Platform.BuildTarget != BuildTarget:
continue
if Platform.ToolChain != ToolChain:
continue
if Platform.Arch != Arch:
continue
if hasattr (Platform, 'BuildOption'):
for Tool in Platform.BuildOption:
if 'GUID' in Platform.BuildOption[Tool]:
if 'PATH' in Platform.BuildOption[Tool]:
value = Platform.BuildOption[Tool]['GUID']
if value in guidList:
EdkLogger.error("build", FORMAT_INVALID, "Duplicate GUID value %s used with Tool %s in DSC [BuildOptions]." % (value, Tool))
path = Platform.BuildOption[Tool]['PATH']
guidList.append(value)
guidAttribs.append((value, Tool, path))
for Tool in Platform.ToolDefinition:
if 'GUID' in Platform.ToolDefinition[Tool]:
if 'PATH' in Platform.ToolDefinition[Tool]:
value = Platform.ToolDefinition[Tool]['GUID']
if value in tooldefguidList:
EdkLogger.error("build", FORMAT_INVALID, "Duplicate GUID value %s used with Tool %s in tools_def.txt." % (value, Tool))
tooldefguidList.append(value)
if value in guidList:
# Already added by platform
continue
path = Platform.ToolDefinition[Tool]['PATH']
guidList.append(value)
guidAttribs.append((value, Tool, path))
# Sort by GuidTool name
guidAttribs = sorted (guidAttribs, key=lambda x: x[1])
# Write out GuidedSecTools.txt
toolsFile = os.path.join(FvDir, 'GuidedSectionTools.txt')
toolsFile = open(toolsFile, 'wt')
for guidedSectionTool in guidAttribs:
print(' '.join(guidedSectionTool), file=toolsFile)
toolsFile.close()
## Returns the real path of the tool.
#
def GetRealPathOfTool (self, tool):
if os.path.exists(tool):
return os.path.realpath(tool)
return tool
## Launch the module or platform build
#
def Launch(self):
self.AllDrivers = set()
self.AllModules = set()
self.PreMakeCacheMiss = set()
self.PreMakeCacheHit = set()
self.MakeCacheMiss = set()
self.MakeCacheHit = set()
if not self.ModuleFile:
if not self.SpawnMode or self.Target not in ["", "all"]:
self.SpawnMode = False
self._BuildPlatform()
else:
self._MultiThreadBuildPlatform()
else:
self.SpawnMode = False
self._BuildModule()
if self.Target == 'cleanall':
RemoveDirectory(os.path.dirname(GlobalData.gDatabasePath), True)
def CreateAsBuiltInf(self):
for Module in self.BuildModules:
Module.CreateAsBuiltInf()
def GenDestCache(self):
for Module in self.AllModules:
Module.GenPreMakefileHashList()
Module.GenMakefileHashList()
Module.CopyModuleToCache()
def GenLocalPreMakeCache(self):
for Module in self.PreMakeCacheMiss:
Module.GenPreMakefileHashList()
## Do some clean-up works when error occurred
def Relinquish(self):
OldLogLevel = EdkLogger.GetLevel()
EdkLogger.SetLevel(EdkLogger.ERROR)
Utils.Progressor.Abort()
if self.SpawnMode == True:
BuildTask.Abort()
EdkLogger.SetLevel(OldLogLevel)
def ParseDefines(DefineList=[]):
DefineDict = {}
if DefineList is not None:
for Define in DefineList:
DefineTokenList = Define.split("=", 1)
if not GlobalData.gMacroNamePattern.match(DefineTokenList[0]):
EdkLogger.error('build', FORMAT_INVALID,
"The macro name must be in the pattern [A-Z][A-Z0-9_]*",
ExtraData=DefineTokenList[0])
if len(DefineTokenList) == 1:
DefineDict[DefineTokenList[0]] = "TRUE"
else:
DefineDict[DefineTokenList[0]] = DefineTokenList[1].strip()
return DefineDict
def LogBuildTime(Time):
if Time:
TimeDurStr = ''
TimeDur = time.gmtime(Time)
if TimeDur.tm_yday > 1:
TimeDurStr = time.strftime("%H:%M:%S", TimeDur) + ", %d day(s)" % (TimeDur.tm_yday - 1)
else:
TimeDurStr = time.strftime("%H:%M:%S", TimeDur)
return TimeDurStr
else:
return None
def ThreadNum():
OptionParser = MyOptionParser()
if not OptionParser.BuildOption and not OptionParser.BuildTarget:
OptionParser.GetOption()
BuildOption, BuildTarget = OptionParser.BuildOption, OptionParser.BuildTarget
ThreadNumber = BuildOption.ThreadNumber
GlobalData.gCmdConfDir = BuildOption.ConfDirectory
if ThreadNumber is None:
TargetObj = TargetTxtDict()
ThreadNumber = TargetObj.Target.TargetTxtDictionary[TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER]
if ThreadNumber == '':
ThreadNumber = 0
else:
ThreadNumber = int(ThreadNumber, 0)
if ThreadNumber == 0:
try:
ThreadNumber = multiprocessing.cpu_count()
except (ImportError, NotImplementedError):
ThreadNumber = 1
return ThreadNumber
## Tool entrance method
#
# This method mainly dispatch specific methods per the command line options.
# If no error found, return zero value so the caller of this tool can know
# if it's executed successfully or not.
#
# @retval 0 Tool was successful
# @retval 1 Tool failed
#
LogQMaxSize = ThreadNum() * 10
def Main():
StartTime = time.time()
#
# Create a log Queue
#
LogQ = mp.Queue(LogQMaxSize)
# Initialize log system
EdkLogger.LogClientInitialize(LogQ)
GlobalData.gCommand = sys.argv[1:]
#
# Parse the options and args
#
OptionParser = MyOptionParser()
if not OptionParser.BuildOption and not OptionParser.BuildTarget:
OptionParser.GetOption()
Option, Target = OptionParser.BuildOption, OptionParser.BuildTarget
GlobalData.gOptions = Option
GlobalData.gCaseInsensitive = Option.CaseInsensitive
# Set log level
LogLevel = EdkLogger.INFO
if Option.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
LogLevel = EdkLogger.VERBOSE
elif Option.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
LogLevel = EdkLogger.QUIET
elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
LogLevel = Option.debug + 1
else:
EdkLogger.SetLevel(EdkLogger.INFO)
if Option.WarningAsError == True:
EdkLogger.SetWarningAsError()
Log_Agent = LogAgent(LogQ,LogLevel,Option.LogFile)
Log_Agent.start()
if platform.platform().find("Windows") >= 0:
GlobalData.gIsWindows = True
else:
GlobalData.gIsWindows = False
EdkLogger.quiet("Build environment: %s" % platform.platform())
EdkLogger.quiet(time.strftime("Build start time: %H:%M:%S, %b.%d %Y\n", time.localtime()));
ReturnCode = 0
MyBuild = None
BuildError = True
try:
if len(Target) == 0:
Target = "all"
elif len(Target) >= 2:
EdkLogger.error("build", OPTION_NOT_SUPPORTED, "More than one targets are not supported.",
ExtraData="Please select one of: %s" % (' '.join(gSupportedTarget)))
else:
Target = Target[0].lower()
if Target not in gSupportedTarget:
EdkLogger.error("build", OPTION_NOT_SUPPORTED, "Not supported target [%s]." % Target,
ExtraData="Please select one of: %s" % (' '.join(gSupportedTarget)))
#
# Check environment variable: EDK_TOOLS_PATH, WORKSPACE, PATH
#
CheckEnvVariable()
GlobalData.gCommandLineDefines.update(ParseDefines(Option.Macros))
Workspace = os.getenv("WORKSPACE")
#
# Get files real name in workspace dir
#
GlobalData.gAllFiles = Utils.DirCache(Workspace)
WorkingDirectory = os.getcwd()
if not Option.ModuleFile:
FileList = glob.glob(os.path.normpath(os.path.join(WorkingDirectory, '*.inf')))
FileNum = len(FileList)
if FileNum >= 2:
EdkLogger.error("build", OPTION_NOT_SUPPORTED, "There are %d INF files in %s." % (FileNum, WorkingDirectory),
ExtraData="Please use '-m <INF_FILE_PATH>' switch to choose one.")
elif FileNum == 1:
Option.ModuleFile = NormFile(FileList[0], Workspace)
if Option.ModuleFile:
if os.path.isabs (Option.ModuleFile):
if os.path.normcase (os.path.normpath(Option.ModuleFile)).find (Workspace) == 0:
Option.ModuleFile = NormFile(os.path.normpath(Option.ModuleFile), Workspace)
Option.ModuleFile = PathClass(Option.ModuleFile, Workspace)
ErrorCode, ErrorInfo = Option.ModuleFile.Validate(".inf", False)
if ErrorCode != 0:
EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
if Option.PlatformFile is not None:
if os.path.isabs (Option.PlatformFile):
if os.path.normcase (os.path.normpath(Option.PlatformFile)).find (Workspace) == 0:
Option.PlatformFile = NormFile(os.path.normpath(Option.PlatformFile), Workspace)
Option.PlatformFile = PathClass(Option.PlatformFile, Workspace)
if Option.FdfFile is not None:
if os.path.isabs (Option.FdfFile):
if os.path.normcase (os.path.normpath(Option.FdfFile)).find (Workspace) == 0:
Option.FdfFile = NormFile(os.path.normpath(Option.FdfFile), Workspace)
Option.FdfFile = PathClass(Option.FdfFile, Workspace)
ErrorCode, ErrorInfo = Option.FdfFile.Validate(".fdf", False)
if ErrorCode != 0:
EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
if Option.Flag is not None and Option.Flag not in ['-c', '-s']:
EdkLogger.error("build", OPTION_VALUE_INVALID, "UNI flag must be one of -c or -s")
MyBuild = Build(Target, Workspace, Option,LogQ)
GlobalData.gCommandLineDefines['ARCH'] = ' '.join(MyBuild.ArchList)
if not (MyBuild.LaunchPrebuildFlag and os.path.exists(MyBuild.PlatformBuildPath)):
MyBuild.Launch()
#
# All job done, no error found and no exception raised
#
BuildError = False
except FatalError as X:
if MyBuild is not None:
# for multi-thread build exits safely
MyBuild.Relinquish()
if Option is not None and Option.debug is not None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
ReturnCode = X.args[0]
except Warning as X:
# error from Fdf parser
if MyBuild is not None:
# for multi-thread build exits safely
MyBuild.Relinquish()
if Option is not None and Option.debug is not None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
else:
EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError=False)
ReturnCode = FORMAT_INVALID
except KeyboardInterrupt:
if MyBuild is not None:
# for multi-thread build exits safely
MyBuild.Relinquish()
ReturnCode = ABORT_ERROR
if Option is not None and Option.debug is not None:
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
except:
if MyBuild is not None:
# for multi-thread build exits safely
MyBuild.Relinquish()
# try to get the meta-file from the object causing exception
Tb = sys.exc_info()[-1]
MetaFile = GlobalData.gProcessingFile
while Tb is not None:
if 'self' in Tb.tb_frame.f_locals and hasattr(Tb.tb_frame.f_locals['self'], 'MetaFile'):
MetaFile = Tb.tb_frame.f_locals['self'].MetaFile
Tb = Tb.tb_next
EdkLogger.error(
"\nbuild",
CODE_ERROR,
"Unknown fatal error when processing [%s]" % MetaFile,
ExtraData="\n(Please send email to %s for help, attaching following call stack trace!)\n" % MSG_EDKII_MAIL_ADDR,
RaiseError=False
)
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
ReturnCode = CODE_ERROR
finally:
Utils.Progressor.Abort()
Utils.ClearDuplicatedInf()
if ReturnCode == 0:
try:
MyBuild.LaunchPostbuild()
Conclusion = "Done"
except:
Conclusion = "Failed"
ReturnCode = POSTBUILD_ERROR
elif ReturnCode == ABORT_ERROR:
Conclusion = "Aborted"
else:
Conclusion = "Failed"
FinishTime = time.time()
BuildDuration = time.gmtime(int(round(FinishTime - StartTime)))
BuildDurationStr = ""
if BuildDuration.tm_yday > 1:
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration) + ", %d day(s)" % (BuildDuration.tm_yday - 1)
else:
BuildDurationStr = time.strftime("%H:%M:%S", BuildDuration)
if MyBuild is not None:
if not BuildError:
MyBuild.BuildReport.GenerateReport(BuildDurationStr, LogBuildTime(MyBuild.AutoGenTime), LogBuildTime(MyBuild.MakeTime), LogBuildTime(MyBuild.GenFdsTime))
EdkLogger.SetLevel(EdkLogger.QUIET)
EdkLogger.quiet("\n- %s -" % Conclusion)
EdkLogger.quiet(time.strftime("Build end time: %H:%M:%S, %b.%d %Y", time.localtime()))
EdkLogger.quiet("Build total time: %s\n" % BuildDurationStr)
Log_Agent.kill()
Log_Agent.join()
return ReturnCode
if __name__ == '__main__':
try:
mp.set_start_method('spawn')
except:
pass
r = Main()
## 0-127 is a safe return range, and 1 is a standard default error
if r < 0 or r > 127: r = 1
sys.exit(r)
| edk2-master | BaseTools/Source/Python/build/build.py |
## @file
# Python 'build' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/build/__init__.py |
## @file
# build a platform or a module
#
# Copyright (c) 2014, Hewlett-Packard Development Company, L.P.<BR>
# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2018 - 2020, Hewlett Packard Enterprise Development, L.P.<BR>
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
# Version and Copyright
from Common.BuildVersion import gBUILD_VERSION
from optparse import OptionParser
VersionNumber = "0.60" + ' ' + gBUILD_VERSION
__version__ = "%prog Version " + VersionNumber
__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
gParamCheck = []
def SingleCheckCallback(option, opt_str, value, parser):
if option not in gParamCheck:
setattr(parser.values, option.dest, value)
gParamCheck.append(option)
else:
parser.error("Option %s only allows one instance in command line!" % option)
class MyOptionParser():
def __new__(cls, *args, **kw):
if not hasattr(cls, '_instance'):
orig = super(MyOptionParser, cls)
cls._instance = orig.__new__(cls, *args, **kw)
return cls._instance
def __init__(self):
if not hasattr(self, 'BuildOption'):
self.BuildOption = None
if not hasattr(self, 'BuildTarget'):
self.BuildTarget = None
def GetOption(self):
Parser = OptionParser(description=__copyright__, version=__version__, prog="build.exe", usage="%prog [options] [all|fds|genc|genmake|clean|cleanall|cleanlib|modules|libraries|run]")
Parser.add_option("-a", "--arch", action="append", dest="TargetArch",
help="ARCHS is one of list: IA32, X64, ARM, AARCH64, RISCV64, LOONGARCH64 or EBC, which overrides target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option.")
Parser.add_option("-p", "--platform", action="callback", type="string", dest="PlatformFile", callback=SingleCheckCallback,
help="Build the platform specified by the DSC file name argument, overriding target.txt's ACTIVE_PLATFORM definition.")
Parser.add_option("-m", "--module", action="callback", type="string", dest="ModuleFile", callback=SingleCheckCallback,
help="Build the module specified by the INF file name argument.")
Parser.add_option("-b", "--buildtarget", type="string", dest="BuildTarget", help="Using the TARGET to build the platform, overriding target.txt's TARGET definition.",
action="append")
Parser.add_option("-t", "--tagname", action="append", type="string", dest="ToolChain",
help="Using the Tool Chain Tagname to build the platform, overriding target.txt's TOOL_CHAIN_TAG definition.")
Parser.add_option("-x", "--sku-id", action="callback", type="string", dest="SkuId", callback=SingleCheckCallback,
help="Using this name of SKU ID to build the platform, overriding SKUID_IDENTIFIER in DSC file.")
Parser.add_option("-n", action="callback", type="int", dest="ThreadNumber", callback=SingleCheckCallback,
help="Build the platform using multi-threaded compiler. The value overrides target.txt's MAX_CONCURRENT_THREAD_NUMBER. When value is set to 0, tool automatically detect number of "\
"processor threads, set value to 1 means disable multi-thread build, and set value to more than 1 means user specify the threads number to build.")
Parser.add_option("-f", "--fdf", action="callback", type="string", dest="FdfFile", callback=SingleCheckCallback,
help="The name of the FDF file to use, which overrides the setting in the DSC file.")
Parser.add_option("-r", "--rom-image", action="append", type="string", dest="RomImage", default=[],
help="The name of FD to be generated. The name must be from [FD] section in FDF file.")
Parser.add_option("-i", "--fv-image", action="append", type="string", dest="FvImage", default=[],
help="The name of FV to be generated. The name must be from [FV] section in FDF file.")
Parser.add_option("-C", "--capsule-image", action="append", type="string", dest="CapName", default=[],
help="The name of Capsule to be generated. The name must be from [Capsule] section in FDF file.")
Parser.add_option("-u", "--skip-autogen", action="store_true", dest="SkipAutoGen", help="Skip AutoGen step.")
Parser.add_option("-e", "--re-parse", action="store_true", dest="Reparse", help="Re-parse all meta-data files.")
Parser.add_option("-c", "--case-insensitive", action="store_true", dest="CaseInsensitive", default=False, help="Don't check case of file name.")
Parser.add_option("-w", "--warning-as-error", action="store_true", dest="WarningAsError", help="Treat warning in tools as error.")
Parser.add_option("-j", "--log", action="store", dest="LogFile", help="Put log in specified file as well as on console.")
Parser.add_option("-s", "--silent", action="store_true", type=None, dest="SilentMode",
help="Make use of silent mode of (n)make.")
Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\
"including library instances selected, final dependency expression, "\
"and warning messages, etc.")
Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".")
Parser.add_option("-y", "--report-file", action="store", dest="ReportFile", help="Create/overwrite the report to the specified filename.")
Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD', 'LIBRARY', 'FLASH', 'DEPEX', 'BUILD_FLAGS', 'FIXED_ADDRESS', 'HASH', 'EXECUTION_ORDER', 'COMPILE_INFO'], dest="ReportType", default=[],
help="Flags that control the type of build report to generate. Must be one of: [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS, HASH, EXECUTION_ORDER, COMPILE_INFO]. "\
"To specify more than one flag, repeat this option on the command line and the default flag set is [PCD, LIBRARY, FLASH, DEPEX, HASH, BUILD_FLAGS, FIXED_ADDRESS]")
Parser.add_option("-F", "--flag", action="store", type="string", dest="Flag",
help="Specify the specific option to parse EDK UNI file. Must be one of: [-c, -s]. -c is for EDK framework UNI file, and -s is for EDK UEFI UNI file. "\
"This option can also be specified by setting *_*_*_BUILD_FLAGS in [BuildOptions] section of platform DSC. If they are both specified, this value "\
"will override the setting in [BuildOptions] section of platform DSC.")
Parser.add_option("-N", "--no-cache", action="store_true", dest="DisableCache", default=False, help="Disable build cache mechanism")
Parser.add_option("--conf", action="store", type="string", dest="ConfDirectory", help="Specify the customized Conf directory.")
Parser.add_option("--check-usage", action="store_true", dest="CheckUsage", default=False, help="Check usage content of entries listed in INF file.")
Parser.add_option("--ignore-sources", action="store_true", dest="IgnoreSources", default=False, help="Focus to a binary build and ignore all source files")
Parser.add_option("--pcd", action="append", dest="OptionPcd", help="Set PCD value by command line. Format: \"PcdName=Value\" ")
Parser.add_option("-l", "--cmd-len", action="store", type="int", dest="CommandLength", help="Specify the maximum line length of build command. Default is 4096.")
Parser.add_option("--hash", action="store_true", dest="UseHashCache", default=False, help="Enable hash-based caching during build process.")
Parser.add_option("--binary-destination", action="store", type="string", dest="BinCacheDest", help="Generate a cache of binary files in the specified directory.")
Parser.add_option("--binary-source", action="store", type="string", dest="BinCacheSource", help="Consume a cache of binary files from the specified directory.")
Parser.add_option("--genfds-multi-thread", action="store_true", dest="GenfdsMultiThread", default=True, help="Enable GenFds multi thread to generate ffs file.")
Parser.add_option("--no-genfds-multi-thread", action="store_true", dest="NoGenfdsMultiThread", default=False, help="Disable GenFds multi thread to generate ffs file.")
Parser.add_option("--disable-include-path-check", action="store_true", dest="DisableIncludePathCheck", default=False, help="Disable the include path check for outside of package.")
self.BuildOption, self.BuildTarget = Parser.parse_args()
| edk2-master | BaseTools/Source/Python/build/buildoptions.py |
## @file
# Routines for generating build report.
#
# This module contains the functionality to generate build report after
# build all target completes successfully.
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## Import Modules
#
import json
from pathlib import Path
import Common.LongFilePathOs as os
import re
import platform
import textwrap
import traceback
import sys
import time
import struct
import hashlib
import subprocess
import threading
from datetime import datetime
from io import BytesIO
from Common import EdkLogger
from Common.Misc import SaveFileOnChange
from Common.Misc import GuidStructureByteArrayToGuidString
from Common.Misc import GuidStructureStringToGuidString
from Common.BuildToolError import FILE_WRITE_FAILURE
from Common.BuildToolError import CODE_ERROR
from Common.BuildToolError import COMMAND_FAILURE
from Common.BuildToolError import FORMAT_INVALID
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.MultipleWorkspace import MultipleWorkspace as mws
import Common.GlobalData as GlobalData
from AutoGen.ModuleAutoGen import ModuleAutoGen
from Common.Misc import PathClass
from Common.StringUtils import NormPath
from Common.DataType import *
import collections
from Common.Expression import *
from GenFds.AprioriSection import DXE_APRIORI_GUID, PEI_APRIORI_GUID
from AutoGen.IncludesAutoGen import IncludesAutoGen
## Pattern to extract contents in EDK DXS files
gDxsDependencyPattern = re.compile(r"DEPENDENCY_START(.+)DEPENDENCY_END", re.DOTALL)
## Pattern to find total FV total size, occupied size in flash report intermediate file
gFvTotalSizePattern = re.compile(r"EFI_FV_TOTAL_SIZE = (0x[0-9a-fA-F]+)")
gFvTakenSizePattern = re.compile(r"EFI_FV_TAKEN_SIZE = (0x[0-9a-fA-F]+)")
## Pattern to find module size and time stamp in module summary report intermediate file
gModuleSizePattern = re.compile(r"MODULE_SIZE = (\d+)")
gTimeStampPattern = re.compile(r"TIME_STAMP = (\d+)")
## Pattern to find GUID value in flash description files
gPcdGuidPattern = re.compile(r"PCD\((\w+)[.](\w+)\)")
## Pattern to collect offset, GUID value pair in the flash report intermediate file
gOffsetGuidPattern = re.compile(r"(0x[0-9A-Fa-f]+) ([-A-Fa-f0-9]+)")
## Pattern to find module base address and entry point in fixed flash map file
gModulePattern = r"\n[-\w]+\s*\(([^,]+),\s*BaseAddress=%(Address)s,\s*EntryPoint=%(Address)s,\s*Type=\w+\)\s*\(GUID=([-0-9A-Fa-f]+)[^)]*\)"
gMapFileItemPattern = re.compile(gModulePattern % {"Address" : "(-?0[xX][0-9A-Fa-f]+)"})
## Pattern to find all module referenced header files in source files
gIncludePattern = re.compile(r'#include\s*["<]([^">]+)[">]')
gIncludePattern2 = re.compile(r"#include\s+EFI_([A-Z_]+)\s*[(]\s*(\w+)\s*[)]")
## Pattern to find the entry point for EDK module using EDKII Glue library
gGlueLibEntryPoint = re.compile(r"__EDKII_GLUE_MODULE_ENTRY_POINT__\s*=\s*(\w+)")
## Tags for MaxLength of line in report
gLineMaxLength = 120
## Tags for end of line in report
gEndOfLine = "\r\n"
## Tags for section start, end and separator
gSectionStart = ">" + "=" * (gLineMaxLength - 2) + "<"
gSectionEnd = "<" + "=" * (gLineMaxLength - 2) + ">" + "\n"
gSectionSep = "=" * gLineMaxLength
## Tags for subsection start, end and separator
gSubSectionStart = ">" + "-" * (gLineMaxLength - 2) + "<"
gSubSectionEnd = "<" + "-" * (gLineMaxLength - 2) + ">"
gSubSectionSep = "-" * gLineMaxLength
## The look up table to map PCD type to pair of report display type and DEC type
gPcdTypeMap = {
TAB_PCDS_FIXED_AT_BUILD : ('FIXED', TAB_PCDS_FIXED_AT_BUILD),
TAB_PCDS_PATCHABLE_IN_MODULE: ('PATCH', TAB_PCDS_PATCHABLE_IN_MODULE),
TAB_PCDS_FEATURE_FLAG : ('FLAG', TAB_PCDS_FEATURE_FLAG),
TAB_PCDS_DYNAMIC : ('DYN', TAB_PCDS_DYNAMIC),
TAB_PCDS_DYNAMIC_HII : ('DYNHII', TAB_PCDS_DYNAMIC),
TAB_PCDS_DYNAMIC_VPD : ('DYNVPD', TAB_PCDS_DYNAMIC),
TAB_PCDS_DYNAMIC_EX : ('DEX', TAB_PCDS_DYNAMIC_EX),
TAB_PCDS_DYNAMIC_EX_HII : ('DEXHII', TAB_PCDS_DYNAMIC_EX),
TAB_PCDS_DYNAMIC_EX_VPD : ('DEXVPD', TAB_PCDS_DYNAMIC_EX),
}
## The look up table to map module type to driver type
gDriverTypeMap = {
SUP_MODULE_SEC : '0x3 (SECURITY_CORE)',
SUP_MODULE_PEI_CORE : '0x4 (PEI_CORE)',
SUP_MODULE_PEIM : '0x6 (PEIM)',
SUP_MODULE_DXE_CORE : '0x5 (DXE_CORE)',
SUP_MODULE_DXE_DRIVER : '0x7 (DRIVER)',
SUP_MODULE_DXE_SAL_DRIVER : '0x7 (DRIVER)',
SUP_MODULE_DXE_SMM_DRIVER : '0x7 (DRIVER)',
SUP_MODULE_DXE_RUNTIME_DRIVER: '0x7 (DRIVER)',
SUP_MODULE_UEFI_DRIVER : '0x7 (DRIVER)',
SUP_MODULE_UEFI_APPLICATION : '0x9 (APPLICATION)',
SUP_MODULE_SMM_CORE : '0xD (SMM_CORE)',
'SMM_DRIVER' : '0xA (SMM)', # Extension of module type to support PI 1.1 SMM drivers
SUP_MODULE_MM_STANDALONE : '0xE (MM_STANDALONE)',
SUP_MODULE_MM_CORE_STANDALONE : '0xF (MM_CORE_STANDALONE)'
}
## The look up table of the supported opcode in the dependency expression binaries
gOpCodeList = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "TRUE", "FALSE", "END", "SOR"]
## Save VPD Pcd
VPDPcdList = []
##
# Writes a string to the file object.
#
# This function writes a string to the file object and a new line is appended
# afterwards. It may optionally wraps the string for better readability.
#
# @File The file object to write
# @String The string to be written to the file
# @Wrapper Indicates whether to wrap the string
#
def FileWrite(File, String, Wrapper=False):
if Wrapper:
String = textwrap.fill(String, 120)
File.append(String + gEndOfLine)
def ByteArrayForamt(Value):
IsByteArray = False
SplitNum = 16
ArrayList = []
if Value.startswith('{') and Value.endswith('}') and not Value.startswith("{CODE("):
Value = Value[1:-1]
ValueList = Value.split(',')
if len(ValueList) >= SplitNum:
IsByteArray = True
if IsByteArray:
if ValueList:
Len = len(ValueList)/SplitNum
for i, element in enumerate(ValueList):
ValueList[i] = '0x%02X' % int(element.strip(), 16)
if Len:
Id = 0
while (Id <= Len):
End = min(SplitNum*(Id+1), len(ValueList))
Str = ','.join(ValueList[SplitNum*Id : End])
if End == len(ValueList):
Str += '}'
ArrayList.append(Str)
break
else:
Str += ','
ArrayList.append(Str)
Id += 1
else:
ArrayList = [Value + '}']
return IsByteArray, ArrayList
##
# Find all the header file that the module source directly includes.
#
# This function scans source code to find all header files the module may
# include. This is not accurate but very effective to find all the header
# file the module might include with #include statement.
#
# @Source The source file name
# @IncludePathList The list of include path to find the source file.
# @IncludeFiles The dictionary of current found include files.
#
def FindIncludeFiles(Source, IncludePathList, IncludeFiles):
FileContents = open(Source).read()
#
# Find header files with pattern #include "XXX.h" or #include <XXX.h>
#
for Match in gIncludePattern.finditer(FileContents):
FileName = Match.group(1).strip()
for Dir in [os.path.dirname(Source)] + IncludePathList:
FullFileName = os.path.normpath(os.path.join(Dir, FileName))
if os.path.exists(FullFileName):
IncludeFiles[FullFileName.lower().replace("\\", "/")] = FullFileName
break
#
# Find header files with pattern like #include EFI_PPI_CONSUMER(XXX)
#
for Match in gIncludePattern2.finditer(FileContents):
Key = Match.group(2)
Type = Match.group(1)
if "ARCH_PROTOCOL" in Type:
FileName = "ArchProtocol/%(Key)s/%(Key)s.h" % {"Key" : Key}
elif "PROTOCOL" in Type:
FileName = "Protocol/%(Key)s/%(Key)s.h" % {"Key" : Key}
elif "PPI" in Type:
FileName = "Ppi/%(Key)s/%(Key)s.h" % {"Key" : Key}
elif TAB_GUID in Type:
FileName = "Guid/%(Key)s/%(Key)s.h" % {"Key" : Key}
else:
continue
for Dir in IncludePathList:
FullFileName = os.path.normpath(os.path.join(Dir, FileName))
if os.path.exists(FullFileName):
IncludeFiles[FullFileName.lower().replace("\\", "/")] = FullFileName
break
## Split each lines in file
#
# This method is used to split the lines in file to make the length of each line
# less than MaxLength.
#
# @param Content The content of file
# @param MaxLength The Max Length of the line
#
def FileLinesSplit(Content=None, MaxLength=None):
ContentList = Content.split(TAB_LINE_BREAK)
NewContent = ''
NewContentList = []
for Line in ContentList:
while len(Line.rstrip()) > MaxLength:
LineSpaceIndex = Line.rfind(TAB_SPACE_SPLIT, 0, MaxLength)
LineSlashIndex = Line.rfind(TAB_SLASH, 0, MaxLength)
LineBackSlashIndex = Line.rfind(TAB_BACK_SLASH, 0, MaxLength)
if max(LineSpaceIndex, LineSlashIndex, LineBackSlashIndex) > 0:
LineBreakIndex = max(LineSpaceIndex, LineSlashIndex, LineBackSlashIndex)
else:
LineBreakIndex = MaxLength
NewContentList.append(Line[:LineBreakIndex])
Line = Line[LineBreakIndex:]
if Line:
NewContentList.append(Line)
for NewLine in NewContentList:
NewContent += NewLine + TAB_LINE_BREAK
NewContent = NewContent.replace(gEndOfLine, TAB_LINE_BREAK).replace('\r\r\n', gEndOfLine)
return NewContent
##
# Parse binary dependency expression section
#
# This utility class parses the dependency expression section and translate the readable
# GUID name and value.
#
class DepexParser(object):
##
# Constructor function for class DepexParser
#
# This constructor function collect GUID values so that the readable
# GUID name can be translated.
#
# @param self The object pointer
# @param Wa Workspace context information
#
def __init__(self, Wa):
self._GuidDb = {}
for Pa in Wa.AutoGenObjectList:
for Package in Pa.PackageList:
for Protocol in Package.Protocols:
GuidValue = GuidStructureStringToGuidString(Package.Protocols[Protocol])
self._GuidDb[GuidValue.upper()] = Protocol
for Ppi in Package.Ppis:
GuidValue = GuidStructureStringToGuidString(Package.Ppis[Ppi])
self._GuidDb[GuidValue.upper()] = Ppi
for Guid in Package.Guids:
GuidValue = GuidStructureStringToGuidString(Package.Guids[Guid])
self._GuidDb[GuidValue.upper()] = Guid
for Ma in Pa.ModuleAutoGenList:
for Pcd in Ma.FixedVoidTypePcds:
PcdValue = Ma.FixedVoidTypePcds[Pcd]
if len(PcdValue.split(',')) == 16:
GuidValue = GuidStructureByteArrayToGuidString(PcdValue)
self._GuidDb[GuidValue.upper()] = Pcd
##
# Parse the binary dependency expression files.
#
# This function parses the binary dependency expression file and translate it
# to the instruction list.
#
# @param self The object pointer
# @param DepexFileName The file name of binary dependency expression file.
#
def ParseDepexFile(self, DepexFileName):
DepexFile = open(DepexFileName, "rb")
DepexStatement = []
OpCode = DepexFile.read(1)
while OpCode:
Statement = gOpCodeList[struct.unpack("B", OpCode)[0]]
if Statement in ["BEFORE", "AFTER", "PUSH"]:
GuidValue = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X" % \
struct.unpack(PACK_PATTERN_GUID, DepexFile.read(16))
GuidString = self._GuidDb.get(GuidValue, GuidValue)
Statement = "%s %s" % (Statement, GuidString)
DepexStatement.append(Statement)
OpCode = DepexFile.read(1)
return DepexStatement
##
# Reports library information
#
# This class reports the module library subsection in the build report file.
#
class LibraryReport(object):
##
# Constructor function for class LibraryReport
#
# This constructor function generates LibraryReport object for
# a module.
#
# @param self The object pointer
# @param M Module context information
#
def __init__(self, M):
self.LibraryList = []
for Lib in M.DependentLibraryList:
LibInfPath = str(Lib)
LibClassList = Lib.LibraryClass[0].LibraryClass
LibConstructorList = Lib.ConstructorList
LibDesstructorList = Lib.DestructorList
LibDepexList = Lib.DepexExpression[M.Arch, M.ModuleType]
for LibAutoGen in M.LibraryAutoGenList:
if LibInfPath == LibAutoGen.MetaFile.Path:
LibTime = LibAutoGen.BuildTime
break
self.LibraryList.append((LibInfPath, LibClassList, LibConstructorList, LibDesstructorList, LibDepexList, LibTime))
##
# Generate report for module library information
#
# This function generates report for the module library.
# If the module is EDKII style one, the additional library class, library
# constructor/destructor and dependency expression may also be reported.
#
# @param self The object pointer
# @param File The file object for report
#
def GenerateReport(self, File):
if len(self.LibraryList) > 0:
FileWrite(File, gSubSectionStart)
FileWrite(File, TAB_BRG_LIBRARY)
FileWrite(File, gSubSectionSep)
for LibraryItem in self.LibraryList:
LibInfPath = LibraryItem[0]
FileWrite(File, LibInfPath)
LibClass = LibraryItem[1]
EdkIILibInfo = ""
LibConstructor = " ".join(LibraryItem[2])
if LibConstructor:
EdkIILibInfo += " C = " + LibConstructor
LibDestructor = " ".join(LibraryItem[3])
if LibDestructor:
EdkIILibInfo += " D = " + LibDestructor
LibDepex = " ".join(LibraryItem[4])
if LibDepex:
EdkIILibInfo += " Depex = " + LibDepex
if LibraryItem[5]:
EdkIILibInfo += " Time = " + LibraryItem[5]
if EdkIILibInfo:
FileWrite(File, "{%s: %s}" % (LibClass, EdkIILibInfo))
else:
FileWrite(File, "{%s}" % LibClass)
FileWrite(File, gSubSectionEnd)
##
# Reports dependency expression information
#
# This class reports the module dependency expression subsection in the build report file.
#
class DepexReport(object):
##
# Constructor function for class DepexReport
#
# This constructor function generates DepexReport object for
# a module. If the module source contains the DXS file (usually EDK
# style module), it uses the dependency in DXS file; otherwise,
# it uses the dependency expression from its own INF [Depex] section
# and then merges with the ones from its dependent library INF.
#
# @param self The object pointer
# @param M Module context information
#
def __init__(self, M):
self.Depex = ""
self._DepexFileName = os.path.join(M.BuildDir, "OUTPUT", M.Module.BaseName + ".depex")
ModuleType = M.ModuleType
if not ModuleType:
ModuleType = COMPONENT_TO_MODULE_MAP_DICT.get(M.ComponentType, "")
if ModuleType in [SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_DXE_CORE, SUP_MODULE_SMM_CORE, SUP_MODULE_MM_CORE_STANDALONE, SUP_MODULE_UEFI_APPLICATION]:
return
for Source in M.SourceFileList:
if os.path.splitext(Source.Path)[1].lower() == ".dxs":
Match = gDxsDependencyPattern.search(open(Source.Path).read())
if Match:
self.Depex = Match.group(1).strip()
self.Source = "DXS"
break
else:
self.Depex = M.DepexExpressionDict.get(M.ModuleType, "")
self.ModuleDepex = " ".join(M.Module.DepexExpression[M.Arch, M.ModuleType])
if not self.ModuleDepex:
self.ModuleDepex = "(None)"
LibDepexList = []
for Lib in M.DependentLibraryList:
LibDepex = " ".join(Lib.DepexExpression[M.Arch, M.ModuleType]).strip()
if LibDepex != "":
LibDepexList.append("(" + LibDepex + ")")
self.LibraryDepex = " AND ".join(LibDepexList)
if not self.LibraryDepex:
self.LibraryDepex = "(None)"
self.Source = "INF"
##
# Generate report for module dependency expression information
#
# This function generates report for the module dependency expression.
#
# @param self The object pointer
# @param File The file object for report
# @param GlobalDepexParser The platform global Dependency expression parser object
#
def GenerateReport(self, File, GlobalDepexParser):
if not self.Depex:
return
FileWrite(File, gSubSectionStart)
if os.path.isfile(self._DepexFileName):
try:
DepexStatements = GlobalDepexParser.ParseDepexFile(self._DepexFileName)
FileWrite(File, "Final Dependency Expression (DEPEX) Instructions")
for DepexStatement in DepexStatements:
FileWrite(File, " %s" % DepexStatement)
FileWrite(File, gSubSectionSep)
except:
EdkLogger.warn(None, "Dependency expression file is corrupted", self._DepexFileName)
FileWrite(File, "Dependency Expression (DEPEX) from %s" % self.Source)
if self.Source == "INF":
FileWrite(File, self.Depex, True)
FileWrite(File, gSubSectionSep)
FileWrite(File, "From Module INF: %s" % self.ModuleDepex, True)
FileWrite(File, "From Library INF: %s" % self.LibraryDepex, True)
else:
FileWrite(File, self.Depex)
FileWrite(File, gSubSectionEnd)
##
# Reports dependency expression information
#
# This class reports the module build flags subsection in the build report file.
#
class BuildFlagsReport(object):
##
# Constructor function for class BuildFlagsReport
#
# This constructor function generates BuildFlagsReport object for
# a module. It reports the build tool chain tag and all relevant
# build flags to build the module.
#
# @param self The object pointer
# @param M Module context information
#
def __init__(self, M):
BuildOptions = {}
#
# Add build flags according to source file extension so that
# irrelevant ones can be filtered out.
#
for Source in M.SourceFileList:
Ext = os.path.splitext(Source.File)[1].lower()
if Ext in [".c", ".cc", ".cpp"]:
BuildOptions["CC"] = 1
elif Ext in [".s", ".asm"]:
BuildOptions["PP"] = 1
BuildOptions["ASM"] = 1
elif Ext in [".vfr"]:
BuildOptions["VFRPP"] = 1
BuildOptions["VFR"] = 1
elif Ext in [".dxs"]:
BuildOptions["APP"] = 1
BuildOptions["CC"] = 1
elif Ext in [".asl"]:
BuildOptions["ASLPP"] = 1
BuildOptions["ASL"] = 1
elif Ext in [".aslc"]:
BuildOptions["ASLCC"] = 1
BuildOptions["ASLDLINK"] = 1
BuildOptions["CC"] = 1
elif Ext in [".asm16"]:
BuildOptions["ASMLINK"] = 1
BuildOptions["SLINK"] = 1
BuildOptions["DLINK"] = 1
#
# Save module build flags.
#
self.ToolChainTag = M.ToolChain
self.BuildFlags = {}
for Tool in BuildOptions:
self.BuildFlags[Tool + "_FLAGS"] = M.BuildOption.get(Tool, {}).get("FLAGS", "")
##
# Generate report for module build flags information
#
# This function generates report for the module build flags expression.
#
# @param self The object pointer
# @param File The file object for report
#
def GenerateReport(self, File):
FileWrite(File, gSubSectionStart)
FileWrite(File, "Build Flags")
FileWrite(File, "Tool Chain Tag: %s" % self.ToolChainTag)
for Tool in self.BuildFlags:
FileWrite(File, gSubSectionSep)
FileWrite(File, "%s = %s" % (Tool, self.BuildFlags[Tool]), True)
FileWrite(File, gSubSectionEnd)
##
# Reports individual module information
#
# This class reports the module section in the build report file.
# It comprises of module summary, module PCD, library, dependency expression,
# build flags sections.
#
class ModuleReport(object):
##
# Constructor function for class ModuleReport
#
# This constructor function generates ModuleReport object for
# a separate module in a platform build.
#
# @param self The object pointer
# @param M Module context information
# @param ReportType The kind of report items in the final report file
#
def __init__(self, M, ReportType):
self.ModuleName = M.Module.BaseName
self.ModuleInfPath = M.MetaFile.File
self.ModuleArch = M.Arch
self.FileGuid = M.Guid
self.Size = 0
self.BuildTimeStamp = None
self.Hash = 0
self.DriverType = ""
if not M.IsLibrary:
ModuleType = M.ModuleType
if not ModuleType:
ModuleType = COMPONENT_TO_MODULE_MAP_DICT.get(M.ComponentType, "")
#
# If a module complies to PI 1.1, promote Module type to "SMM_DRIVER"
#
if ModuleType == SUP_MODULE_DXE_SMM_DRIVER:
PiSpec = M.Module.Specification.get("PI_SPECIFICATION_VERSION", "0x00010000")
if int(PiSpec, 0) >= 0x0001000A:
ModuleType = "SMM_DRIVER"
self.DriverType = gDriverTypeMap.get(ModuleType, "0x2 (FREE_FORM)")
self.UefiSpecVersion = M.Module.Specification.get("UEFI_SPECIFICATION_VERSION", "")
self.PiSpecVersion = M.Module.Specification.get("PI_SPECIFICATION_VERSION", "")
self.PciDeviceId = M.Module.Defines.get("PCI_DEVICE_ID", "")
self.PciVendorId = M.Module.Defines.get("PCI_VENDOR_ID", "")
self.PciClassCode = M.Module.Defines.get("PCI_CLASS_CODE", "")
self.BuildTime = M.BuildTime
self._BuildDir = M.BuildDir
self.ModulePcdSet = {}
if "PCD" in ReportType:
#
# Collect all module used PCD set: module INF referenced directly or indirectly.
# It also saves module INF default values of them in case they exist.
#
for Pcd in M.ModulePcdList + M.LibraryPcdList:
self.ModulePcdSet.setdefault((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Type), (Pcd.InfDefaultValue, Pcd.DefaultValue))
self.LibraryReport = None
if "LIBRARY" in ReportType:
self.LibraryReport = LibraryReport(M)
self.DepexReport = None
if "DEPEX" in ReportType:
self.DepexReport = DepexReport(M)
if "BUILD_FLAGS" in ReportType:
self.BuildFlagsReport = BuildFlagsReport(M)
##
# Generate report for module information
#
# This function generates report for separate module expression
# in a platform build.
#
# @param self The object pointer
# @param File The file object for report
# @param GlobalPcdReport The platform global PCD report object
# @param GlobalPredictionReport The platform global Prediction report object
# @param GlobalDepexParser The platform global Dependency expression parser object
# @param ReportType The kind of report items in the final report file
#
def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, GlobalDepexParser, ReportType):
FileWrite(File, gSectionStart)
FwReportFileName = os.path.join(self._BuildDir, "OUTPUT", self.ModuleName + ".txt")
if os.path.isfile(FwReportFileName):
try:
FileContents = open(FwReportFileName).read()
Match = gModuleSizePattern.search(FileContents)
if Match:
self.Size = int(Match.group(1))
Match = gTimeStampPattern.search(FileContents)
if Match:
self.BuildTimeStamp = datetime.utcfromtimestamp(int(Match.group(1)))
except IOError:
EdkLogger.warn(None, "Fail to read report file", FwReportFileName)
if "HASH" in ReportType:
OutputDir = os.path.join(self._BuildDir, "OUTPUT")
DefaultEFIfile = os.path.join(OutputDir, self.ModuleName + ".efi")
if os.path.isfile(DefaultEFIfile):
Tempfile = os.path.join(OutputDir, self.ModuleName + "_hash.tmp")
# rebase the efi image since its base address may not zero
cmd = ["GenFw", "--rebase", str(0), "-o", Tempfile, DefaultEFIfile]
try:
PopenObject = subprocess.Popen(' '.join(cmd), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except Exception as X:
EdkLogger.error("GenFw", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0]))
EndOfProcedure = threading.Event()
EndOfProcedure.clear()
if PopenObject.stderr:
StdErrThread = threading.Thread(target=ReadMessage, args=(PopenObject.stderr, EdkLogger.quiet, EndOfProcedure))
StdErrThread.setName("STDERR-Redirector")
StdErrThread.setDaemon(False)
StdErrThread.start()
# waiting for program exit
PopenObject.wait()
if PopenObject.stderr:
StdErrThread.join()
if PopenObject.returncode != 0:
EdkLogger.error("GenFw", COMMAND_FAILURE, "Failed to generate firmware hash image for %s" % (DefaultEFIfile))
if os.path.isfile(Tempfile):
self.Hash = hashlib.sha1()
buf = open(Tempfile, 'rb').read()
if self.Hash.update(buf):
self.Hash = self.Hash.update(buf)
self.Hash = self.Hash.hexdigest()
os.remove(Tempfile)
FileWrite(File, "Module Summary")
FileWrite(File, "Module Name: %s" % self.ModuleName)
FileWrite(File, "Module Arch: %s" % self.ModuleArch)
FileWrite(File, "Module INF Path: %s" % self.ModuleInfPath)
FileWrite(File, "File GUID: %s" % self.FileGuid)
if self.Size:
FileWrite(File, "Size: 0x%X (%.2fK)" % (self.Size, self.Size / 1024.0))
if self.Hash:
FileWrite(File, "SHA1 HASH: %s *%s" % (self.Hash, self.ModuleName + ".efi"))
if self.BuildTimeStamp:
FileWrite(File, "Build Time Stamp: %s" % self.BuildTimeStamp)
if self.BuildTime:
FileWrite(File, "Module Build Time: %s" % self.BuildTime)
if self.DriverType:
FileWrite(File, "Driver Type: %s" % self.DriverType)
if self.UefiSpecVersion:
FileWrite(File, "UEFI Spec Version: %s" % self.UefiSpecVersion)
if self.PiSpecVersion:
FileWrite(File, "PI Spec Version: %s" % self.PiSpecVersion)
if self.PciDeviceId:
FileWrite(File, "PCI Device ID: %s" % self.PciDeviceId)
if self.PciVendorId:
FileWrite(File, "PCI Vendor ID: %s" % self.PciVendorId)
if self.PciClassCode:
FileWrite(File, "PCI Class Code: %s" % self.PciClassCode)
FileWrite(File, gSectionSep)
if "PCD" in ReportType:
GlobalPcdReport.GenerateReport(File, self.ModulePcdSet,self.FileGuid)
if "LIBRARY" in ReportType:
self.LibraryReport.GenerateReport(File)
if "DEPEX" in ReportType:
self.DepexReport.GenerateReport(File, GlobalDepexParser)
if "BUILD_FLAGS" in ReportType:
self.BuildFlagsReport.GenerateReport(File)
if "FIXED_ADDRESS" in ReportType and self.FileGuid:
GlobalPredictionReport.GenerateReport(File, self.FileGuid)
FileWrite(File, gSectionEnd)
def ReadMessage(From, To, ExitFlag):
while True:
# read one line a time
Line = From.readline()
# empty string means "end"
if Line is not None and Line != b"":
To(Line.rstrip().decode(encoding='utf-8', errors='ignore'))
else:
break
if ExitFlag.isSet():
break
##
# Reports platform and module PCD information
#
# This class reports the platform PCD section and module PCD subsection
# in the build report file.
#
class PcdReport(object):
##
# Constructor function for class PcdReport
#
# This constructor function generates PcdReport object a platform build.
# It collects the whole PCD database from platform DSC files, platform
# flash description file and package DEC files.
#
# @param self The object pointer
# @param Wa Workspace context information
#
def __init__(self, Wa):
self.AllPcds = {}
self.UnusedPcds = {}
self.ConditionalPcds = {}
self.MaxLen = 0
self.Arch = None
if Wa.FdfProfile:
self.FdfPcdSet = Wa.FdfProfile.PcdDict
else:
self.FdfPcdSet = {}
self.DefaultStoreSingle = True
self.SkuSingle = True
if GlobalData.gDefaultStores and len(GlobalData.gDefaultStores) > 1:
self.DefaultStoreSingle = False
if GlobalData.gSkuids and len(GlobalData.gSkuids) > 1:
self.SkuSingle = False
self.ModulePcdOverride = {}
for Pa in Wa.AutoGenObjectList:
self.Arch = Pa.Arch
#
# Collect all platform referenced PCDs and grouped them by PCD token space
# GUID C Names
#
for Pcd in Pa.AllPcdList:
PcdList = self.AllPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(Pcd.Type, [])
if Pcd not in PcdList:
PcdList.append(Pcd)
if len(Pcd.TokenCName) > self.MaxLen:
self.MaxLen = len(Pcd.TokenCName)
#
# Collect the PCD defined in DSC/FDF file, but not used in module
#
UnusedPcdFullList = []
StructPcdDict = GlobalData.gStructurePcd.get(self.Arch, collections.OrderedDict())
for Name, Guid in StructPcdDict:
if (Name, Guid) not in Pa.Platform.Pcds:
Pcd = StructPcdDict[(Name, Guid)]
PcdList = self.AllPcds.setdefault(Guid, {}).setdefault(Pcd.Type, [])
if Pcd not in PcdList and Pcd not in UnusedPcdFullList:
UnusedPcdFullList.append(Pcd)
for item in Pa.Platform.Pcds:
Pcd = Pa.Platform.Pcds[item]
if not Pcd.Type:
# check the Pcd in FDF file, whether it is used in module first
for T in PCD_TYPE_LIST:
PcdList = self.AllPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(T, [])
if Pcd in PcdList:
Pcd.Type = T
break
if not Pcd.Type:
PcdTypeFlag = False
for package in Pa.PackageList:
for T in PCD_TYPE_LIST:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, T) in package.Pcds:
Pcd.Type = T
PcdTypeFlag = True
if not Pcd.DatumType:
Pcd.DatumType = package.Pcds[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName, T)].DatumType
break
if PcdTypeFlag:
break
if not Pcd.DatumType:
PcdType = Pcd.Type
# Try to remove Hii and Vpd suffix
if PcdType.startswith(TAB_PCDS_DYNAMIC_EX):
PcdType = TAB_PCDS_DYNAMIC_EX
elif PcdType.startswith(TAB_PCDS_DYNAMIC):
PcdType = TAB_PCDS_DYNAMIC
for package in Pa.PackageList:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, PcdType) in package.Pcds:
Pcd.DatumType = package.Pcds[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName, PcdType)].DatumType
break
PcdList = self.AllPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(Pcd.Type, [])
UnusedPcdList = self.UnusedPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(Pcd.Type, [])
if Pcd in UnusedPcdList:
UnusedPcdList.remove(Pcd)
if Pcd not in PcdList and Pcd not in UnusedPcdFullList:
UnusedPcdFullList.append(Pcd)
if len(Pcd.TokenCName) > self.MaxLen:
self.MaxLen = len(Pcd.TokenCName)
if GlobalData.gConditionalPcds:
for PcdItem in GlobalData.gConditionalPcds:
if '.' in PcdItem:
(TokenSpaceGuidCName, TokenCName) = PcdItem.split('.')
if (TokenCName, TokenSpaceGuidCName) in Pa.Platform.Pcds:
Pcd = Pa.Platform.Pcds[(TokenCName, TokenSpaceGuidCName)]
PcdList = self.ConditionalPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(Pcd.Type, [])
if Pcd not in PcdList:
PcdList.append(Pcd)
UnusedPcdList = []
if UnusedPcdFullList:
for Pcd in UnusedPcdFullList:
if Pcd.TokenSpaceGuidCName + '.' + Pcd.TokenCName in GlobalData.gConditionalPcds:
continue
UnusedPcdList.append(Pcd)
for Pcd in UnusedPcdList:
PcdList = self.UnusedPcds.setdefault(Pcd.TokenSpaceGuidCName, {}).setdefault(Pcd.Type, [])
if Pcd not in PcdList:
PcdList.append(Pcd)
for Module in Pa.Platform.Modules.values():
#
# Collect module override PCDs
#
for ModulePcd in Module.M.ModulePcdList + Module.M.LibraryPcdList:
TokenCName = ModulePcd.TokenCName
TokenSpaceGuid = ModulePcd.TokenSpaceGuidCName
ModuleDefault = ModulePcd.DefaultValue
ModulePath = os.path.basename(Module.M.MetaFile.File)
self.ModulePcdOverride.setdefault((TokenCName, TokenSpaceGuid), {})[ModulePath] = ModuleDefault
#
# Collect PCD DEC default value.
#
self.DecPcdDefault = {}
self._GuidDict = {}
for Pa in Wa.AutoGenObjectList:
for Package in Pa.PackageList:
Guids = Package.Guids
self._GuidDict.update(Guids)
for (TokenCName, TokenSpaceGuidCName, DecType) in Package.Pcds:
DecDefaultValue = Package.Pcds[TokenCName, TokenSpaceGuidCName, DecType].DefaultValue
self.DecPcdDefault.setdefault((TokenCName, TokenSpaceGuidCName, DecType), DecDefaultValue)
#
# Collect PCDs defined in DSC common section
#
self.DscPcdDefault = {}
for Pa in Wa.AutoGenObjectList:
for (TokenCName, TokenSpaceGuidCName) in Pa.Platform.Pcds:
DscDefaultValue = Pa.Platform.Pcds[(TokenCName, TokenSpaceGuidCName)].DscDefaultValue
if DscDefaultValue:
self.DscPcdDefault[(TokenCName, TokenSpaceGuidCName)] = DscDefaultValue
def GenerateReport(self, File, ModulePcdSet,ModuleGuid=None):
if not ModulePcdSet:
if self.ConditionalPcds:
self.GenerateReportDetail(File, ModulePcdSet, 1)
if self.UnusedPcds:
IsEmpty = True
for Token in self.UnusedPcds:
TokenDict = self.UnusedPcds[Token]
for Type in TokenDict:
if TokenDict[Type]:
IsEmpty = False
break
if not IsEmpty:
break
if not IsEmpty:
self.GenerateReportDetail(File, ModulePcdSet, 2)
self.GenerateReportDetail(File, ModulePcdSet,ModuleGuid = ModuleGuid)
##
# Generate report for PCD information
#
# This function generates report for separate module expression
# in a platform build.
#
# @param self The object pointer
# @param File The file object for report
# @param ModulePcdSet Set of all PCDs referenced by module or None for
# platform PCD report
# @param ReportySubType 0 means platform/module PCD report, 1 means Conditional
# directives section report, 2 means Unused Pcds section report
# @param DscOverridePcds Module DSC override PCDs set
#
def GenerateReportDetail(self, File, ModulePcdSet, ReportSubType = 0,ModuleGuid=None):
PcdDict = self.AllPcds
if ReportSubType == 1:
PcdDict = self.ConditionalPcds
elif ReportSubType == 2:
PcdDict = self.UnusedPcds
if not ModulePcdSet:
FileWrite(File, gSectionStart)
if ReportSubType == 1:
FileWrite(File, "Conditional Directives used by the build system")
elif ReportSubType == 2:
FileWrite(File, "PCDs not used by modules or in conditional directives")
else:
FileWrite(File, "Platform Configuration Database Report")
FileWrite(File, " *B - PCD override in the build option")
FileWrite(File, " *P - Platform scoped PCD override in DSC file")
FileWrite(File, " *F - Platform scoped PCD override in FDF file")
if not ReportSubType:
FileWrite(File, " *M - Module scoped PCD override")
FileWrite(File, gSectionSep)
else:
if not ReportSubType and ModulePcdSet:
#
# For module PCD sub-section
#
FileWrite(File, gSubSectionStart)
FileWrite(File, TAB_BRG_PCD)
FileWrite(File, gSubSectionSep)
AllPcdDict = {}
for Key in PcdDict:
AllPcdDict[Key] = {}
for Type in PcdDict[Key]:
for Pcd in PcdDict[Key][Type]:
AllPcdDict[Key][(Pcd.TokenCName, Type)] = Pcd
for Key in sorted(AllPcdDict):
#
# Group PCD by their token space GUID C Name
#
First = True
for PcdTokenCName, Type in sorted(AllPcdDict[Key]):
#
# Group PCD by their usage type
#
Pcd = AllPcdDict[Key][(PcdTokenCName, Type)]
TypeName, DecType = gPcdTypeMap.get(Type, ("", Type))
MixedPcdFlag = False
if GlobalData.MixedPcd:
for PcdKey in GlobalData.MixedPcd:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdKey]:
PcdTokenCName = PcdKey[0]
MixedPcdFlag = True
if MixedPcdFlag and not ModulePcdSet:
continue
#
# Get PCD default value and their override relationship
#
DecDefaultValue = self.DecPcdDefault.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, DecType))
DscDefaultValue = self.DscPcdDefault.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
DscDefaultValBak = DscDefaultValue
Field = ''
for (CName, Guid, Field) in self.FdfPcdSet:
if CName == PcdTokenCName and Guid == Key:
DscDefaultValue = self.FdfPcdSet[(CName, Guid, Field)]
break
if DscDefaultValue != DscDefaultValBak:
try:
DscDefaultValue = ValueExpressionEx(DscDefaultValue, Pcd.DatumType, self._GuidDict)(True)
except BadExpression as DscDefaultValue:
EdkLogger.error('BuildReport', FORMAT_INVALID, "PCD Value: %s, Type: %s" %(DscDefaultValue, Pcd.DatumType))
InfDefaultValue = None
PcdValue = DecDefaultValue
if DscDefaultValue:
PcdValue = DscDefaultValue
#The DefaultValue of StructurePcd already be the latest, no need to update.
if not self.IsStructurePcd(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):
Pcd.DefaultValue = PcdValue
PcdComponentValue = None
if ModulePcdSet is not None:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type) not in ModulePcdSet:
continue
InfDefaultValue, PcdComponentValue = ModulePcdSet[Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type]
PcdValue = PcdComponentValue
#The DefaultValue of StructurePcd already be the latest, no need to update.
if not self.IsStructurePcd(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):
Pcd.DefaultValue = PcdValue
if InfDefaultValue:
try:
InfDefaultValue = ValueExpressionEx(InfDefaultValue, Pcd.DatumType, self._GuidDict)(True)
except BadExpression as InfDefaultValue:
EdkLogger.error('BuildReport', FORMAT_INVALID, "PCD Value: %s, Type: %s" % (InfDefaultValue, Pcd.DatumType))
if InfDefaultValue == "":
InfDefaultValue = None
BuildOptionMatch = False
if GlobalData.BuildOptionPcd:
for pcd in GlobalData.BuildOptionPcd:
if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) == (pcd[0], pcd[1]):
if pcd[2]:
continue
PcdValue = pcd[3]
#The DefaultValue of StructurePcd already be the latest, no need to update.
if not self.IsStructurePcd(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):
Pcd.DefaultValue = PcdValue
BuildOptionMatch = True
break
if First:
if ModulePcdSet is None:
FileWrite(File, "")
FileWrite(File, Key)
First = False
if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
if PcdValue.startswith('0') and not PcdValue.lower().startswith('0x') and \
len(PcdValue) > 1 and PcdValue.lstrip('0'):
PcdValue = PcdValue.lstrip('0')
PcdValueNumber = int(PcdValue.strip(), 0)
if DecDefaultValue is None:
DecMatch = True
else:
if DecDefaultValue.startswith('0') and not DecDefaultValue.lower().startswith('0x') and \
len(DecDefaultValue) > 1 and DecDefaultValue.lstrip('0'):
DecDefaultValue = DecDefaultValue.lstrip('0')
DecDefaultValueNumber = int(DecDefaultValue.strip(), 0)
DecMatch = (DecDefaultValueNumber == PcdValueNumber)
if InfDefaultValue is None:
InfMatch = True
else:
if InfDefaultValue.startswith('0') and not InfDefaultValue.lower().startswith('0x') and \
len(InfDefaultValue) > 1 and InfDefaultValue.lstrip('0'):
InfDefaultValue = InfDefaultValue.lstrip('0')
InfDefaultValueNumber = int(InfDefaultValue.strip(), 0)
InfMatch = (InfDefaultValueNumber == PcdValueNumber)
if DscDefaultValue is None:
DscMatch = True
else:
if DscDefaultValue.startswith('0') and not DscDefaultValue.lower().startswith('0x') and \
len(DscDefaultValue) > 1 and DscDefaultValue.lstrip('0'):
DscDefaultValue = DscDefaultValue.lstrip('0')
DscDefaultValueNumber = int(DscDefaultValue.strip(), 0)
DscMatch = (DscDefaultValueNumber == PcdValueNumber)
else:
if DecDefaultValue is None:
DecMatch = True
else:
DecMatch = (DecDefaultValue.strip() == PcdValue.strip())
if InfDefaultValue is None:
InfMatch = True
else:
InfMatch = (InfDefaultValue.strip() == PcdValue.strip())
if DscDefaultValue is None:
DscMatch = True
else:
DscMatch = (DscDefaultValue.strip() == PcdValue.strip())
IsStructure = False
if self.IsStructurePcd(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):
IsStructure = True
if TypeName in ('DYNVPD', 'DEXVPD'):
SkuInfoList = Pcd.SkuInfoList
Pcd = GlobalData.gStructurePcd[self.Arch][(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]
if ModulePcdSet and ModulePcdSet.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type)):
InfDefaultValue, PcdComponentValue = ModulePcdSet[Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Type]
DscDefaultValBak = Pcd.DefaultValue
Pcd.DefaultValue = PcdComponentValue
Pcd.DatumType = Pcd.StructName
if TypeName in ('DYNVPD', 'DEXVPD'):
Pcd.SkuInfoList = SkuInfoList
if Pcd.PcdValueFromComm or Pcd.PcdFieldValueFromComm:
BuildOptionMatch = True
DecMatch = False
elif Pcd.PcdValueFromFdf or Pcd.PcdFieldValueFromFdf:
DscDefaultValue = True
DscMatch = True
DecMatch = False
else:
if Pcd.Type in PCD_DYNAMIC_TYPE_SET | PCD_DYNAMIC_EX_TYPE_SET:
DscOverride = False
if Pcd.DefaultFromDSC:
DscOverride = True
else:
DictLen = 0
for item in Pcd.SkuOverrideValues:
DictLen += len(Pcd.SkuOverrideValues[item])
if not DictLen:
DscOverride = False
else:
if not Pcd.SkuInfoList:
OverrideValues = Pcd.SkuOverrideValues
if OverrideValues:
for Data in OverrideValues.values():
Struct = list(Data.values())
if Struct:
DscOverride = self.ParseStruct(Struct[0])
break
else:
SkuList = sorted(Pcd.SkuInfoList.keys())
for Sku in SkuList:
SkuInfo = Pcd.SkuInfoList[Sku]
if SkuInfo.DefaultStoreDict:
DefaultStoreList = sorted(SkuInfo.DefaultStoreDict.keys())
for DefaultStore in DefaultStoreList:
OverrideValues = Pcd.SkuOverrideValues.get(Sku)
if OverrideValues:
DscOverride = self.ParseStruct(OverrideValues[DefaultStore])
if DscOverride:
break
if DscOverride:
break
if DscOverride:
DscDefaultValue = True
DscMatch = True
DecMatch = False
else:
DecMatch = True
else:
if Pcd.DscRawValue or (ModuleGuid and ModuleGuid.replace("-","S") in Pcd.PcdValueFromComponents):
DscDefaultValue = True
DscMatch = True
DecMatch = False
else:
DscDefaultValue = False
DecMatch = True
#
# Report PCD item according to their override relationship
#
if Pcd.DatumType == 'BOOLEAN':
if DscDefaultValue:
DscDefaultValue = str(int(DscDefaultValue, 0))
if DecDefaultValue:
DecDefaultValue = str(int(DecDefaultValue, 0))
if InfDefaultValue:
InfDefaultValue = str(int(InfDefaultValue, 0))
if Pcd.DefaultValue:
Pcd.DefaultValue = str(int(Pcd.DefaultValue, 0))
if DecMatch:
self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, ' ')
elif InfDefaultValue and InfMatch:
self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, '*M')
elif BuildOptionMatch:
self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, '*B')
else:
if PcdComponentValue:
self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, PcdComponentValue, DecMatch, DecDefaultValue, '*M', ModuleGuid)
elif DscDefaultValue and DscMatch:
if (Pcd.TokenCName, Key, Field) in self.FdfPcdSet:
self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, '*F')
else:
self.PrintPcdValue(File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValBak, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, '*P')
if ModulePcdSet is None:
if IsStructure:
continue
if not TypeName in ('PATCH', 'FLAG', 'FIXED'):
continue
if not BuildOptionMatch:
ModuleOverride = self.ModulePcdOverride.get((Pcd.TokenCName, Pcd.TokenSpaceGuidCName), {})
for ModulePath in ModuleOverride:
ModuleDefault = ModuleOverride[ModulePath]
if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
if ModuleDefault.startswith('0') and not ModuleDefault.lower().startswith('0x') and \
len(ModuleDefault) > 1 and ModuleDefault.lstrip('0'):
ModuleDefault = ModuleDefault.lstrip('0')
ModulePcdDefaultValueNumber = int(ModuleDefault.strip(), 0)
Match = (ModulePcdDefaultValueNumber == PcdValueNumber)
if Pcd.DatumType == 'BOOLEAN':
ModuleDefault = str(ModulePcdDefaultValueNumber)
else:
Match = (ModuleDefault.strip() == PcdValue.strip())
if Match:
continue
IsByteArray, ArrayList = ByteArrayForamt(ModuleDefault.strip())
if IsByteArray:
FileWrite(File, ' *M %-*s = %s' % (self.MaxLen + 15, ModulePath, '{'))
for Array in ArrayList:
FileWrite(File, Array)
else:
Value = ModuleDefault.strip()
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
if Value.startswith(('0x', '0X')):
Value = '{} ({:d})'.format(Value, int(Value, 0))
else:
Value = "0x{:X} ({})".format(int(Value, 0), Value)
FileWrite(File, ' *M %-*s = %s' % (self.MaxLen + 15, ModulePath, Value))
if ModulePcdSet is None:
FileWrite(File, gSectionEnd)
else:
if not ReportSubType and ModulePcdSet:
FileWrite(File, gSubSectionEnd)
def ParseStruct(self, struct):
HasDscOverride = False
if struct:
for _, Values in list(struct.items()):
for Key, value in Values.items():
if value[1] and value[1].endswith('.dsc'):
HasDscOverride = True
break
if HasDscOverride == True:
break
return HasDscOverride
def PrintPcdDefault(self, File, Pcd, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue):
if not DscMatch and DscDefaultValue is not None:
Value = DscDefaultValue.strip()
IsByteArray, ArrayList = ByteArrayForamt(Value)
if IsByteArray:
FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'DSC DEFAULT', "{"))
for Array in ArrayList:
FileWrite(File, Array)
else:
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
if Value.startswith(('0x', '0X')):
Value = '{} ({:d})'.format(Value, int(Value, 0))
else:
Value = "0x{:X} ({})".format(int(Value, 0), Value)
FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'DSC DEFAULT', Value))
if not InfMatch and InfDefaultValue is not None:
Value = InfDefaultValue.strip()
IsByteArray, ArrayList = ByteArrayForamt(Value)
if IsByteArray:
FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'INF DEFAULT', "{"))
for Array in ArrayList:
FileWrite(File, Array)
else:
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
if Value.startswith(('0x', '0X')):
Value = '{} ({:d})'.format(Value, int(Value, 0))
else:
Value = "0x{:X} ({})".format(int(Value, 0), Value)
FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'INF DEFAULT', Value))
if not DecMatch and DecDefaultValue is not None:
Value = DecDefaultValue.strip()
IsByteArray, ArrayList = ByteArrayForamt(Value)
if IsByteArray:
FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'DEC DEFAULT', "{"))
for Array in ArrayList:
FileWrite(File, Array)
else:
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
if Value.startswith(('0x', '0X')):
Value = '{} ({:d})'.format(Value, int(Value, 0))
else:
Value = "0x{:X} ({})".format(int(Value, 0), Value)
FileWrite(File, ' %*s = %s' % (self.MaxLen + 19, 'DEC DEFAULT', Value))
if IsStructure:
for filedvalues in Pcd.DefaultValues.values():
self.PrintStructureInfo(File, filedvalues)
if DecMatch and IsStructure:
for filedvalues in Pcd.DefaultValues.values():
self.PrintStructureInfo(File, filedvalues)
def PrintPcdValue(self, File, Pcd, PcdTokenCName, TypeName, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue, Flag = ' ',ModuleGuid=None):
if not Pcd.SkuInfoList:
Value = Pcd.DefaultValue
IsByteArray, ArrayList = ByteArrayForamt(Value)
if IsByteArray:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '{'))
for Array in ArrayList:
FileWrite(File, Array)
else:
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
if Value.startswith('0') and not Value.lower().startswith('0x') and len(Value) > 1 and Value.lstrip('0'):
Value = Value.lstrip('0')
if Value.startswith(('0x', '0X')):
Value = '{} ({:d})'.format(Value, int(Value, 0))
else:
Value = "0x{:X} ({})".format(int(Value, 0), Value)
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', Value))
if IsStructure:
FiledOverrideFlag = False
if (Pcd.TokenCName,Pcd.TokenSpaceGuidCName) in GlobalData.gPcdSkuOverrides:
OverrideValues = GlobalData.gPcdSkuOverrides[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName)]
else:
OverrideValues = Pcd.SkuOverrideValues
FieldOverrideValues = None
if OverrideValues:
for Data in OverrideValues.values():
Struct = list(Data.values())
if Struct:
FieldOverrideValues = Struct[0]
FiledOverrideFlag = True
break
if Pcd.PcdFiledValueFromDscComponent and ModuleGuid and ModuleGuid.replace("-","S") in Pcd.PcdFiledValueFromDscComponent:
FieldOverrideValues = Pcd.PcdFiledValueFromDscComponent[ModuleGuid.replace("-","S")]
if FieldOverrideValues:
OverrideFieldStruct = self.OverrideFieldValue(Pcd, FieldOverrideValues)
self.PrintStructureInfo(File, OverrideFieldStruct)
if not FiledOverrideFlag and (Pcd.PcdFieldValueFromComm or Pcd.PcdFieldValueFromFdf):
OverrideFieldStruct = self.OverrideFieldValue(Pcd, {})
self.PrintStructureInfo(File, OverrideFieldStruct)
self.PrintPcdDefault(File, Pcd, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue)
else:
FirstPrint = True
SkuList = sorted(Pcd.SkuInfoList.keys())
for Sku in SkuList:
SkuInfo = Pcd.SkuInfoList[Sku]
SkuIdName = SkuInfo.SkuIdName
if TypeName in ('DYNHII', 'DEXHII'):
if SkuInfo.DefaultStoreDict:
DefaultStoreList = sorted(SkuInfo.DefaultStoreDict.keys())
for DefaultStore in DefaultStoreList:
Value = SkuInfo.DefaultStoreDict[DefaultStore]
IsByteArray, ArrayList = ByteArrayForamt(Value)
if Pcd.DatumType == 'BOOLEAN':
Value = str(int(Value, 0))
if FirstPrint:
FirstPrint = False
if IsByteArray:
if self.DefaultStoreSingle and self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '{'))
elif self.DefaultStoreSingle and not self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '{'))
elif not self.DefaultStoreSingle and self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + DefaultStore + ')', '{'))
else:
FileWrite(File, ' %-*s : %6s %10s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '(' + DefaultStore + ')', '{'))
for Array in ArrayList:
FileWrite(File, Array)
else:
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
if Value.startswith(('0x', '0X')):
Value = '{} ({:d})'.format(Value, int(Value, 0))
else:
Value = "0x{:X} ({})".format(int(Value, 0), Value)
if self.DefaultStoreSingle and self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', Value))
elif self.DefaultStoreSingle and not self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', Value))
elif not self.DefaultStoreSingle and self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + DefaultStore + ')', Value))
else:
FileWrite(File, ' %-*s : %6s %10s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '(' + DefaultStore + ')', Value))
else:
if IsByteArray:
if self.DefaultStoreSingle and self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '{'))
elif self.DefaultStoreSingle and not self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '{'))
elif not self.DefaultStoreSingle and self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + DefaultStore + ')', '{'))
else:
FileWrite(File, ' %-*s : %6s %10s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '(' + DefaultStore + ')', '{'))
for Array in ArrayList:
FileWrite(File, Array)
else:
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
if Value.startswith(('0x', '0X')):
Value = '{} ({:d})'.format(Value, int(Value, 0))
else:
Value = "0x{:X} ({})".format(int(Value, 0), Value)
if self.DefaultStoreSingle and self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', Value))
elif self.DefaultStoreSingle and not self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', Value))
elif not self.DefaultStoreSingle and self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + DefaultStore + ')', Value))
else:
FileWrite(File, ' %-*s : %6s %10s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', '(' + DefaultStore + ')', Value))
FileWrite(File, '%*s: %s: %s' % (self.MaxLen + 4, SkuInfo.VariableGuid, SkuInfo.VariableName, SkuInfo.VariableOffset))
if IsStructure:
OverrideValues = Pcd.SkuOverrideValues.get(Sku)
if OverrideValues:
OverrideFieldStruct = self.OverrideFieldValue(Pcd, OverrideValues[DefaultStore])
self.PrintStructureInfo(File, OverrideFieldStruct)
self.PrintPcdDefault(File, Pcd, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue)
else:
Value = SkuInfo.DefaultValue
IsByteArray, ArrayList = ByteArrayForamt(Value)
if Pcd.DatumType == 'BOOLEAN':
Value = str(int(Value, 0))
if FirstPrint:
FirstPrint = False
if IsByteArray:
if self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', "{"))
else:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', "{"))
for Array in ArrayList:
FileWrite(File, Array)
else:
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
if Value.startswith(('0x', '0X')):
Value = '{} ({:d})'.format(Value, int(Value, 0))
else:
Value = "0x{:X} ({})".format(int(Value, 0), Value)
if self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', Value))
else:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, Flag + ' ' + PcdTokenCName, TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', Value))
else:
if IsByteArray:
if self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', "{"))
else:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', "{"))
for Array in ArrayList:
FileWrite(File, Array)
else:
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
if Value.startswith(('0x', '0X')):
Value = '{} ({:d})'.format(Value, int(Value, 0))
else:
Value = "0x{:X} ({})".format(int(Value, 0), Value)
if self.SkuSingle:
FileWrite(File, ' %-*s : %6s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', Value))
else:
FileWrite(File, ' %-*s : %6s %10s %10s = %s' % (self.MaxLen, ' ', TypeName, '(' + Pcd.DatumType + ')', '(' + SkuIdName + ')', Value))
if TypeName in ('DYNVPD', 'DEXVPD'):
FileWrite(File, '%*s' % (self.MaxLen + 4, SkuInfo.VpdOffset))
VPDPcdItem = (Pcd.TokenSpaceGuidCName + '.' + PcdTokenCName, SkuIdName, SkuInfo.VpdOffset, Pcd.MaxDatumSize, SkuInfo.DefaultValue)
if VPDPcdItem not in VPDPcdList:
PcdGuidList = self.UnusedPcds.get(Pcd.TokenSpaceGuidCName)
if PcdGuidList:
PcdList = PcdGuidList.get(Pcd.Type)
if not PcdList:
VPDPcdList.append(VPDPcdItem)
for VpdPcd in PcdList:
if PcdTokenCName == VpdPcd.TokenCName:
break
else:
VPDPcdList.append(VPDPcdItem)
if IsStructure:
FiledOverrideFlag = False
OverrideValues = Pcd.SkuOverrideValues.get(Sku)
if OverrideValues:
Keys = list(OverrideValues.keys())
OverrideFieldStruct = self.OverrideFieldValue(Pcd, OverrideValues[Keys[0]])
self.PrintStructureInfo(File, OverrideFieldStruct)
FiledOverrideFlag = True
if not FiledOverrideFlag and (Pcd.PcdFieldValueFromComm or Pcd.PcdFieldValueFromFdf):
OverrideFieldStruct = self.OverrideFieldValue(Pcd, {})
self.PrintStructureInfo(File, OverrideFieldStruct)
self.PrintPcdDefault(File, Pcd, IsStructure, DscMatch, DscDefaultValue, InfMatch, InfDefaultValue, DecMatch, DecDefaultValue)
def OverrideFieldValue(self, Pcd, OverrideStruct):
OverrideFieldStruct = collections.OrderedDict()
if OverrideStruct:
for _, Values in OverrideStruct.items():
for Key,value in Values.items():
if value[1] and value[1].endswith('.dsc'):
OverrideFieldStruct[Key] = value
if Pcd.PcdFieldValueFromFdf:
for Key, Values in Pcd.PcdFieldValueFromFdf.items():
if Key in OverrideFieldStruct and Values[0] == OverrideFieldStruct[Key][0]:
continue
OverrideFieldStruct[Key] = Values
if Pcd.PcdFieldValueFromComm:
for Key, Values in Pcd.PcdFieldValueFromComm.items():
if Key in OverrideFieldStruct and Values[0] == OverrideFieldStruct[Key][0]:
continue
OverrideFieldStruct[Key] = Values
return OverrideFieldStruct
def PrintStructureInfo(self, File, Struct):
for Key, Value in sorted(Struct.items(), key=lambda x: x[0]):
if Value[1] and 'build command options' in Value[1]:
FileWrite(File, ' *B %-*s = %s' % (self.MaxLen + 4, '.' + Key, Value[0]))
elif Value[1] and Value[1].endswith('.fdf'):
FileWrite(File, ' *F %-*s = %s' % (self.MaxLen + 4, '.' + Key, Value[0]))
else:
FileWrite(File, ' %-*s = %s' % (self.MaxLen + 4, '.' + Key, Value[0]))
def StrtoHex(self, value):
try:
value = hex(int(value))
return value
except:
if value.startswith("L\"") and value.endswith("\""):
valuelist = []
for ch in value[2:-1]:
valuelist.append(hex(ord(ch)))
valuelist.append('0x00')
return valuelist
elif value.startswith("\"") and value.endswith("\""):
return hex(ord(value[1:-1]))
elif value.startswith("{") and value.endswith("}"):
valuelist = []
if ',' not in value:
return value[1:-1]
for ch in value[1:-1].split(','):
ch = ch.strip()
if ch.startswith('0x') or ch.startswith('0X'):
valuelist.append(ch)
continue
try:
valuelist.append(hex(int(ch.strip())))
except:
pass
return valuelist
else:
return value
def IsStructurePcd(self, PcdToken, PcdTokenSpaceGuid):
if GlobalData.gStructurePcd and (self.Arch in GlobalData.gStructurePcd) and ((PcdToken, PcdTokenSpaceGuid) in GlobalData.gStructurePcd[self.Arch]):
return True
else:
return False
##
# Reports platform and module Prediction information
#
# This class reports the platform execution order prediction section and
# module load fixed address prediction subsection in the build report file.
#
class PredictionReport(object):
##
# Constructor function for class PredictionReport
#
# This constructor function generates PredictionReport object for the platform.
#
# @param self: The object pointer
# @param Wa Workspace context information
#
def __init__(self, Wa):
self._MapFileName = os.path.join(Wa.BuildDir, Wa.Name + ".map")
self._MapFileParsed = False
self._EotToolInvoked = False
self._FvDir = Wa.FvDir
self._EotDir = Wa.BuildDir
self._FfsEntryPoint = {}
self._GuidMap = {}
self._SourceList = []
self.FixedMapDict = {}
self.ItemList = []
self.MaxLen = 0
#
# Collect all platform reference source files and GUID C Name
#
for Pa in Wa.AutoGenObjectList:
for Module in Pa.LibraryAutoGenList + Pa.ModuleAutoGenList:
#
# BASE typed modules are EFI agnostic, so we need not scan
# their source code to find PPI/Protocol produce or consume
# information.
#
if Module.ModuleType == SUP_MODULE_BASE:
continue
#
# Add module referenced source files
#
self._SourceList.append(str(Module))
IncludeList = {}
for Source in Module.SourceFileList:
if os.path.splitext(str(Source))[1].lower() == ".c":
self._SourceList.append(" " + str(Source))
FindIncludeFiles(Source.Path, Module.IncludePathList, IncludeList)
for IncludeFile in IncludeList.values():
self._SourceList.append(" " + IncludeFile)
for Guid in Module.PpiList:
self._GuidMap[Guid] = GuidStructureStringToGuidString(Module.PpiList[Guid])
for Guid in Module.ProtocolList:
self._GuidMap[Guid] = GuidStructureStringToGuidString(Module.ProtocolList[Guid])
for Guid in Module.GuidList:
self._GuidMap[Guid] = GuidStructureStringToGuidString(Module.GuidList[Guid])
if Module.Guid and not Module.IsLibrary:
EntryPoint = " ".join(Module.Module.ModuleEntryPointList)
RealEntryPoint = "_ModuleEntryPoint"
self._FfsEntryPoint[Module.Guid.upper()] = (EntryPoint, RealEntryPoint)
#
# Collect platform firmware volume list as the input of EOT.
#
self._FvList = []
if Wa.FdfProfile:
for Fd in Wa.FdfProfile.FdDict:
for FdRegion in Wa.FdfProfile.FdDict[Fd].RegionList:
if FdRegion.RegionType != BINARY_FILE_TYPE_FV:
continue
for FvName in FdRegion.RegionDataList:
if FvName in self._FvList:
continue
self._FvList.append(FvName)
for Ffs in Wa.FdfProfile.FvDict[FvName.upper()].FfsList:
for Section in Ffs.SectionList:
try:
for FvSection in Section.SectionList:
if FvSection.FvName in self._FvList:
continue
self._FvList.append(FvSection.FvName)
except AttributeError:
pass
##
# Parse platform fixed address map files
#
# This function parses the platform final fixed address map file to get
# the database of predicted fixed address for module image base, entry point
# etc.
#
# @param self: The object pointer
#
def _ParseMapFile(self):
if self._MapFileParsed:
return
self._MapFileParsed = True
if os.path.isfile(self._MapFileName):
try:
FileContents = open(self._MapFileName).read()
for Match in gMapFileItemPattern.finditer(FileContents):
AddressType = Match.group(1)
BaseAddress = Match.group(2)
EntryPoint = Match.group(3)
Guid = Match.group(4).upper()
List = self.FixedMapDict.setdefault(Guid, [])
List.append((AddressType, BaseAddress, "*I"))
List.append((AddressType, EntryPoint, "*E"))
except:
EdkLogger.warn(None, "Cannot open file to read", self._MapFileName)
##
# Invokes EOT tool to get the predicted the execution order.
#
# This function invokes EOT tool to calculate the predicted dispatch order
#
# @param self: The object pointer
#
def _InvokeEotTool(self):
if self._EotToolInvoked:
return
self._EotToolInvoked = True
FvFileList = []
for FvName in self._FvList:
FvFile = os.path.join(self._FvDir, FvName + ".Fv")
if os.path.isfile(FvFile):
FvFileList.append(FvFile)
if len(FvFileList) == 0:
return
#
# Write source file list and GUID file list to an intermediate file
# as the input for EOT tool and dispatch List as the output file
# from EOT tool.
#
SourceList = os.path.join(self._EotDir, "SourceFile.txt")
GuidList = os.path.join(self._EotDir, "GuidList.txt")
DispatchList = os.path.join(self._EotDir, "Dispatch.txt")
TempFile = []
for Item in self._SourceList:
FileWrite(TempFile, Item)
SaveFileOnChange(SourceList, "".join(TempFile), False)
TempFile = []
for Key in self._GuidMap:
FileWrite(TempFile, "%s %s" % (Key, self._GuidMap[Key]))
SaveFileOnChange(GuidList, "".join(TempFile), False)
try:
from Eot.EotMain import Eot
#
# Invoke EOT tool and echo its runtime performance
#
EotStartTime = time.time()
Eot(CommandLineOption=False, SourceFileList=SourceList, GuidList=GuidList,
FvFileList=' '.join(FvFileList), Dispatch=DispatchList, IsInit=True)
EotEndTime = time.time()
EotDuration = time.strftime("%H:%M:%S", time.gmtime(int(round(EotEndTime - EotStartTime))))
EdkLogger.quiet("EOT run time: %s\n" % EotDuration)
#
# Parse the output of EOT tool
#
for Line in open(DispatchList):
if len(Line.split()) < 4:
continue
(Guid, Phase, FfsName, FilePath) = Line.split()
Symbol = self._FfsEntryPoint.get(Guid, [FfsName, ""])[0]
if len(Symbol) > self.MaxLen:
self.MaxLen = len(Symbol)
self.ItemList.append((Phase, Symbol, FilePath))
except:
EdkLogger.quiet("(Python %s on %s\n%s)" % (platform.python_version(), sys.platform, traceback.format_exc()))
EdkLogger.warn(None, "Failed to generate execution order prediction report, for some error occurred in executing EOT.")
##
# Generate platform execution order report
#
# This function generates the predicted module execution order.
#
# @param self The object pointer
# @param File The file object for report
#
def _GenerateExecutionOrderReport(self, File):
self._InvokeEotTool()
if len(self.ItemList) == 0:
return
FileWrite(File, gSectionStart)
FileWrite(File, "Execution Order Prediction")
FileWrite(File, "*P PEI phase")
FileWrite(File, "*D DXE phase")
FileWrite(File, "*E Module INF entry point name")
FileWrite(File, "*N Module notification function name")
FileWrite(File, "Type %-*s %s" % (self.MaxLen, "Symbol", "Module INF Path"))
FileWrite(File, gSectionSep)
for Item in self.ItemList:
FileWrite(File, "*%sE %-*s %s" % (Item[0], self.MaxLen, Item[1], Item[2]))
FileWrite(File, gSectionStart)
##
# Generate Fixed Address report.
#
# This function generate the predicted fixed address report for a module
# specified by Guid.
#
# @param self The object pointer
# @param File The file object for report
# @param Guid The module Guid value.
# @param NotifyList The list of all notify function in a module
#
def _GenerateFixedAddressReport(self, File, Guid, NotifyList):
self._ParseMapFile()
FixedAddressList = self.FixedMapDict.get(Guid)
if not FixedAddressList:
return
FileWrite(File, gSubSectionStart)
FileWrite(File, "Fixed Address Prediction")
FileWrite(File, "*I Image Loading Address")
FileWrite(File, "*E Entry Point Address")
FileWrite(File, "*N Notification Function Address")
FileWrite(File, "*F Flash Address")
FileWrite(File, "*M Memory Address")
FileWrite(File, "*S SMM RAM Offset")
FileWrite(File, "TOM Top of Memory")
FileWrite(File, "Type Address Name")
FileWrite(File, gSubSectionSep)
for Item in FixedAddressList:
Type = Item[0]
Value = Item[1]
Symbol = Item[2]
if Symbol == "*I":
Name = "(Image Base)"
elif Symbol == "*E":
Name = self._FfsEntryPoint.get(Guid, ["", "_ModuleEntryPoint"])[1]
elif Symbol in NotifyList:
Name = Symbol
Symbol = "*N"
else:
continue
if "Flash" in Type:
Symbol += "F"
elif "Memory" in Type:
Symbol += "M"
else:
Symbol += "S"
if Value[0] == "-":
Value = "TOM" + Value
FileWrite(File, "%s %-16s %s" % (Symbol, Value, Name))
##
# Generate report for the prediction part
#
# This function generate the predicted fixed address report for a module or
# predicted module execution order for a platform.
# If the input Guid is None, then, it generates the predicted module execution order;
# otherwise it generated the module fixed loading address for the module specified by
# Guid.
#
# @param self The object pointer
# @param File The file object for report
# @param Guid The module Guid value.
#
def GenerateReport(self, File, Guid):
if Guid:
self._GenerateFixedAddressReport(File, Guid.upper(), [])
else:
self._GenerateExecutionOrderReport(File)
##
# Reports FD region information
#
# This class reports the FD subsection in the build report file.
# It collects region information of platform flash device.
# If the region is a firmware volume, it lists the set of modules
# and its space information; otherwise, it only lists its region name,
# base address and size in its sub-section header.
# If there are nesting FVs, the nested FVs will list immediate after
# this FD region subsection
#
class FdRegionReport(object):
##
# Discover all the nested FV name list.
#
# This is an internal worker function to discover the all the nested FV information
# in the parent firmware volume. It uses deep first search algorithm recursively to
# find all the FV list name and append them to the list.
#
# @param self The object pointer
# @param FvName The name of current firmware file system
# @param Wa Workspace context information
#
def _DiscoverNestedFvList(self, FvName, Wa):
FvDictKey=FvName.upper()
if FvDictKey in Wa.FdfProfile.FvDict:
for Ffs in Wa.FdfProfile.FvDict[FvName.upper()].FfsList:
for Section in Ffs.SectionList:
try:
for FvSection in Section.SectionList:
if FvSection.FvName in self.FvList:
continue
self._GuidsDb[Ffs.NameGuid.upper()] = FvSection.FvName
self.FvList.append(FvSection.FvName)
self.FvInfo[FvSection.FvName] = ("Nested FV", 0, 0)
self._DiscoverNestedFvList(FvSection.FvName, Wa)
except AttributeError:
pass
##
# Constructor function for class FdRegionReport
#
# This constructor function generates FdRegionReport object for a specified FdRegion.
# If the FdRegion is a firmware volume, it will recursively find all its nested Firmware
# volume list. This function also collects GUID map in order to dump module identification
# in the final report.
#
# @param self: The object pointer
# @param FdRegion The current FdRegion object
# @param Wa Workspace context information
#
def __init__(self, FdRegion, Wa):
self.Type = FdRegion.RegionType
self.BaseAddress = FdRegion.Offset
self.Size = FdRegion.Size
self.FvList = []
self.FvInfo = {}
self._GuidsDb = {}
self._FvDir = Wa.FvDir
self._WorkspaceDir = Wa.WorkspaceDir
#
# If the input FdRegion is not a firmware volume,
# we are done.
#
if self.Type != BINARY_FILE_TYPE_FV:
return
#
# Find all nested FVs in the FdRegion
#
for FvName in FdRegion.RegionDataList:
if FvName in self.FvList:
continue
self.FvList.append(FvName)
self.FvInfo[FvName] = ("Fd Region", self.BaseAddress, self.Size)
self._DiscoverNestedFvList(FvName, Wa)
PlatformPcds = {}
#
# Collect PCDs declared in DEC files.
#
for Pa in Wa.AutoGenObjectList:
for Package in Pa.PackageList:
for (TokenCName, TokenSpaceGuidCName, DecType) in Package.Pcds:
DecDefaultValue = Package.Pcds[TokenCName, TokenSpaceGuidCName, DecType].DefaultValue
PlatformPcds[(TokenCName, TokenSpaceGuidCName)] = DecDefaultValue
#
# Collect PCDs defined in DSC file
#
for Pa in Wa.AutoGenObjectList:
for (TokenCName, TokenSpaceGuidCName) in Pa.Platform.Pcds:
DscDefaultValue = Pa.Platform.Pcds[(TokenCName, TokenSpaceGuidCName)].DefaultValue
PlatformPcds[(TokenCName, TokenSpaceGuidCName)] = DscDefaultValue
#
# Add PEI and DXE a priori files GUIDs defined in PI specification.
#
self._GuidsDb[PEI_APRIORI_GUID] = "PEI Apriori"
self._GuidsDb[DXE_APRIORI_GUID] = "DXE Apriori"
#
# Add ACPI table storage file
#
self._GuidsDb["7E374E25-8E01-4FEE-87F2-390C23C606CD"] = "ACPI table storage"
for Pa in Wa.AutoGenObjectList:
for ModuleKey in Pa.Platform.Modules:
M = Pa.Platform.Modules[ModuleKey].M
InfPath = mws.join(Wa.WorkspaceDir, M.MetaFile.File)
self._GuidsDb[M.Guid.upper()] = "%s (%s)" % (M.Module.BaseName, InfPath)
#
# Collect the GUID map in the FV firmware volume
#
for FvName in self.FvList:
FvDictKey=FvName.upper()
if FvDictKey in Wa.FdfProfile.FvDict:
for Ffs in Wa.FdfProfile.FvDict[FvName.upper()].FfsList:
try:
#
# collect GUID map for binary EFI file in FDF file.
#
Guid = Ffs.NameGuid.upper()
Match = gPcdGuidPattern.match(Ffs.NameGuid)
if Match:
PcdTokenspace = Match.group(1)
PcdToken = Match.group(2)
if (PcdToken, PcdTokenspace) in PlatformPcds:
GuidValue = PlatformPcds[(PcdToken, PcdTokenspace)]
Guid = GuidStructureByteArrayToGuidString(GuidValue).upper()
for Section in Ffs.SectionList:
try:
ModuleSectFile = mws.join(Wa.WorkspaceDir, Section.SectFileName)
self._GuidsDb[Guid] = ModuleSectFile
except AttributeError:
pass
except AttributeError:
pass
##
# Internal worker function to generate report for the FD region
#
# This internal worker function to generate report for the FD region.
# It the type is firmware volume, it lists offset and module identification.
#
# @param self The object pointer
# @param File The file object for report
# @param Title The title for the FD subsection
# @param BaseAddress The base address for the FD region
# @param Size The size of the FD region
# @param FvName The FV name if the FD region is a firmware volume
#
def _GenerateReport(self, File, Title, Type, BaseAddress, Size=0, FvName=None):
FileWrite(File, gSubSectionStart)
FileWrite(File, Title)
FileWrite(File, "Type: %s" % Type)
FileWrite(File, "Base Address: 0x%X" % BaseAddress)
if self.Type == BINARY_FILE_TYPE_FV:
FvTotalSize = 0
FvTakenSize = 0
FvFreeSize = 0
if FvName.upper().endswith('.FV'):
FileExt = FvName + ".txt"
else:
FileExt = FvName + ".Fv.txt"
if not os.path.isfile(FileExt):
FvReportFileName = mws.join(self._WorkspaceDir, FileExt)
if not os.path.isfile(FvReportFileName):
FvReportFileName = os.path.join(self._FvDir, FileExt)
try:
#
# Collect size info in the firmware volume.
#
FvReport = open(FvReportFileName).read()
Match = gFvTotalSizePattern.search(FvReport)
if Match:
FvTotalSize = int(Match.group(1), 16)
Match = gFvTakenSizePattern.search(FvReport)
if Match:
FvTakenSize = int(Match.group(1), 16)
FvFreeSize = FvTotalSize - FvTakenSize
#
# Write size information to the report file.
#
FileWrite(File, "Size: 0x%X (%.0fK)" % (FvTotalSize, FvTotalSize / 1024.0))
FileWrite(File, "Fv Name: %s (%.1f%% Full)" % (FvName, FvTakenSize * 100.0 / FvTotalSize))
FileWrite(File, "Occupied Size: 0x%X (%.0fK)" % (FvTakenSize, FvTakenSize / 1024.0))
FileWrite(File, "Free Size: 0x%X (%.0fK)" % (FvFreeSize, FvFreeSize / 1024.0))
FileWrite(File, "Offset Module")
FileWrite(File, gSubSectionSep)
#
# Write module offset and module identification to the report file.
#
OffsetInfo = {}
for Match in gOffsetGuidPattern.finditer(FvReport):
Guid = Match.group(2).upper()
OffsetInfo[Match.group(1)] = self._GuidsDb.get(Guid, Guid)
OffsetList = sorted(OffsetInfo.keys())
for Offset in OffsetList:
FileWrite (File, "%s %s" % (Offset, OffsetInfo[Offset]))
except IOError:
EdkLogger.warn(None, "Fail to read report file", FvReportFileName)
else:
FileWrite(File, "Size: 0x%X (%.0fK)" % (Size, Size / 1024.0))
FileWrite(File, gSubSectionEnd)
##
# Generate report for the FD region
#
# This function generates report for the FD region.
#
# @param self The object pointer
# @param File The file object for report
#
def GenerateReport(self, File):
if (len(self.FvList) > 0):
for FvItem in self.FvList:
Info = self.FvInfo[FvItem]
self._GenerateReport(File, Info[0], TAB_FV_DIRECTORY, Info[1], Info[2], FvItem)
else:
self._GenerateReport(File, "FD Region", self.Type, self.BaseAddress, self.Size)
##
# Reports FD information
#
# This class reports the FD section in the build report file.
# It collects flash device information for a platform.
#
class FdReport(object):
##
# Constructor function for class FdReport
#
# This constructor function generates FdReport object for a specified
# firmware device.
#
# @param self The object pointer
# @param Fd The current Firmware device object
# @param Wa Workspace context information
#
def __init__(self, Fd, Wa):
self.FdName = Fd.FdUiName
self.BaseAddress = Fd.BaseAddress
self.Size = Fd.Size
self.FdRegionList = [FdRegionReport(FdRegion, Wa) for FdRegion in Fd.RegionList]
self.FvPath = os.path.join(Wa.BuildDir, TAB_FV_DIRECTORY)
self.VPDBaseAddress = 0
self.VPDSize = 0
for index, FdRegion in enumerate(Fd.RegionList):
if str(FdRegion.RegionType) == 'FILE' and Wa.Platform.VpdToolGuid in str(FdRegion.RegionDataList):
self.VPDBaseAddress = self.FdRegionList[index].BaseAddress
self.VPDSize = self.FdRegionList[index].Size
break
##
# Generate report for the firmware device.
#
# This function generates report for the firmware device.
#
# @param self The object pointer
# @param File The file object for report
#
def GenerateReport(self, File):
FileWrite(File, gSectionStart)
FileWrite(File, "Firmware Device (FD)")
FileWrite(File, "FD Name: %s" % self.FdName)
FileWrite(File, "Base Address: %s" % self.BaseAddress)
FileWrite(File, "Size: 0x%X (%.0fK)" % (self.Size, self.Size / 1024.0))
if len(self.FdRegionList) > 0:
FileWrite(File, gSectionSep)
for FdRegionItem in self.FdRegionList:
FdRegionItem.GenerateReport(File)
if VPDPcdList:
VPDPcdList.sort(key=lambda x: int(x[2], 0))
FileWrite(File, gSubSectionStart)
FileWrite(File, "FD VPD Region")
FileWrite(File, "Base Address: 0x%X" % self.VPDBaseAddress)
FileWrite(File, "Size: 0x%X (%.0fK)" % (self.VPDSize, self.VPDSize / 1024.0))
FileWrite(File, gSubSectionSep)
for item in VPDPcdList:
# Add BaseAddress for offset
Offset = '0x%08X' % (int(item[2], 16) + self.VPDBaseAddress)
IsByteArray, ArrayList = ByteArrayForamt(item[-1])
Skuinfo = item[1]
if len(GlobalData.gSkuids) == 1 :
Skuinfo = GlobalData.gSkuids[0]
if IsByteArray:
FileWrite(File, "%s | %s | %s | %s | %s" % (item[0], Skuinfo, Offset, item[3], '{'))
for Array in ArrayList:
FileWrite(File, Array)
else:
FileWrite(File, "%s | %s | %s | %s | %s" % (item[0], Skuinfo, Offset, item[3], item[-1]))
FileWrite(File, gSubSectionEnd)
FileWrite(File, gSectionEnd)
##
# Reports platform information
#
# This class reports the whole platform information
#
class PlatformReport(object):
##
# Constructor function for class PlatformReport
#
# This constructor function generates PlatformReport object a platform build.
# It generates report for platform summary, flash, global PCDs and detailed
# module information for modules involved in platform build.
#
# @param self The object pointer
# @param Wa Workspace context information
# @param MaList The list of modules in the platform build
#
def __init__(self, Wa, MaList, ReportType):
self._WorkspaceDir = Wa.WorkspaceDir
self.PlatformName = Wa.Name
self.PlatformDscPath = Wa.Platform
self.Architectures = " ".join(Wa.ArchList)
self.ToolChain = Wa.ToolChain
self.Target = Wa.BuildTarget
self.OutputPath = os.path.join(Wa.WorkspaceDir, Wa.OutputDir)
self.BuildEnvironment = platform.platform()
self.PcdReport = None
if "PCD" in ReportType:
self.PcdReport = PcdReport(Wa)
self.FdReportList = []
if "FLASH" in ReportType and Wa.FdfProfile and MaList is None:
for Fd in Wa.FdfProfile.FdDict:
self.FdReportList.append(FdReport(Wa.FdfProfile.FdDict[Fd], Wa))
self.PredictionReport = None
if "FIXED_ADDRESS" in ReportType or "EXECUTION_ORDER" in ReportType:
self.PredictionReport = PredictionReport(Wa)
self.DepexParser = None
if "DEPEX" in ReportType:
self.DepexParser = DepexParser(Wa)
self.ModuleReportList = []
if MaList is not None:
self._IsModuleBuild = True
for Ma in MaList:
self.ModuleReportList.append(ModuleReport(Ma, ReportType))
else:
self._IsModuleBuild = False
for Pa in Wa.AutoGenObjectList:
ModuleAutoGenList = []
for ModuleKey in Pa.Platform.Modules:
ModuleAutoGenList.append(Pa.Platform.Modules[ModuleKey].M)
if GlobalData.gFdfParser is not None:
if Pa.Arch in GlobalData.gFdfParser.Profile.InfDict:
INFList = GlobalData.gFdfParser.Profile.InfDict[Pa.Arch]
for InfName in INFList:
InfClass = PathClass(NormPath(InfName), Wa.WorkspaceDir, Pa.Arch)
Ma = ModuleAutoGen(Wa, InfClass, Pa.BuildTarget, Pa.ToolChain, Pa.Arch, Wa.MetaFile, Pa.DataPipe)
if Ma is None:
continue
if Ma not in ModuleAutoGenList:
ModuleAutoGenList.append(Ma)
for MGen in ModuleAutoGenList:
self.ModuleReportList.append(ModuleReport(MGen, ReportType))
##
# Generate report for the whole platform.
#
# This function generates report for platform information.
# It comprises of platform summary, global PCD, flash and
# module list sections.
#
# @param self The object pointer
# @param File The file object for report
# @param BuildDuration The total time to build the modules
# @param AutoGenTime The total time of AutoGen Phase
# @param MakeTime The total time of Make Phase
# @param GenFdsTime The total time of GenFds Phase
# @param ReportType The kind of report items in the final report file
#
def GenerateReport(self, File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, ReportType):
FileWrite(File, "Platform Summary")
FileWrite(File, "Platform Name: %s" % self.PlatformName)
FileWrite(File, "Platform DSC Path: %s" % self.PlatformDscPath)
FileWrite(File, "Architectures: %s" % self.Architectures)
FileWrite(File, "Tool Chain: %s" % self.ToolChain)
FileWrite(File, "Target: %s" % self.Target)
if GlobalData.gSkuids:
FileWrite(File, "SKUID: %s" % " ".join(GlobalData.gSkuids))
if GlobalData.gDefaultStores:
FileWrite(File, "DefaultStore: %s" % " ".join(GlobalData.gDefaultStores))
FileWrite(File, "Output Path: %s" % self.OutputPath)
FileWrite(File, "Build Environment: %s" % self.BuildEnvironment)
FileWrite(File, "Build Duration: %s" % BuildDuration)
if AutoGenTime:
FileWrite(File, "AutoGen Duration: %s" % AutoGenTime)
if MakeTime:
FileWrite(File, "Make Duration: %s" % MakeTime)
if GenFdsTime:
FileWrite(File, "GenFds Duration: %s" % GenFdsTime)
FileWrite(File, "Report Content: %s" % ", ".join(ReportType))
if GlobalData.MixedPcd:
FileWrite(File, gSectionStart)
FileWrite(File, "The following PCDs use different access methods:")
FileWrite(File, gSectionSep)
for PcdItem in GlobalData.MixedPcd:
FileWrite(File, "%s.%s" % (str(PcdItem[1]), str(PcdItem[0])))
FileWrite(File, gSectionEnd)
if not self._IsModuleBuild:
if "PCD" in ReportType:
self.PcdReport.GenerateReport(File, None)
if "FLASH" in ReportType:
for FdReportListItem in self.FdReportList:
FdReportListItem.GenerateReport(File)
for ModuleReportItem in self.ModuleReportList:
ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, self.DepexParser, ReportType)
if not self._IsModuleBuild:
if "EXECUTION_ORDER" in ReportType:
self.PredictionReport.GenerateReport(File, None)
## BuildReport class
#
# This base class contain the routines to collect data and then
# applies certain format to the output report
#
class BuildReport(object):
##
# Constructor function for class BuildReport
#
# This constructor function generates BuildReport object a platform build.
# It generates report for platform summary, flash, global PCDs and detailed
# module information for modules involved in platform build.
#
# @param self The object pointer
# @param ReportFile The file name to save report file
# @param ReportType The kind of report items in the final report file
#
def __init__(self, ReportFile, ReportType):
self.ReportFile = ReportFile
if ReportFile:
self.ReportList = []
self.ReportType = []
if ReportType:
for ReportTypeItem in ReportType:
if ReportTypeItem not in self.ReportType:
self.ReportType.append(ReportTypeItem)
else:
self.ReportType = ["PCD", "LIBRARY", "BUILD_FLAGS", "DEPEX", "HASH", "FLASH", "FIXED_ADDRESS"]
##
# Adds platform report to the list
#
# This function adds a platform report to the final report list.
#
# @param self The object pointer
# @param Wa Workspace context information
# @param MaList The list of modules in the platform build
#
def AddPlatformReport(self, Wa, MaList=None):
if self.ReportFile:
self.ReportList.append((Wa, MaList))
##
# Generates the final report.
#
# This function generates platform build report. It invokes GenerateReport()
# method for every platform report in the list.
#
# @param self The object pointer
# @param BuildDuration The total time to build the modules
# @param AutoGenTime The total time of AutoGen phase
# @param MakeTime The total time of Make phase
# @param GenFdsTime The total time of GenFds phase
#
def GenerateReport(self, BuildDuration, AutoGenTime, MakeTime, GenFdsTime):
if self.ReportFile:
try:
if "COMPILE_INFO" in self.ReportType:
self.GenerateCompileInfo()
File = []
for (Wa, MaList) in self.ReportList:
PlatformReport(Wa, MaList, self.ReportType).GenerateReport(File, BuildDuration, AutoGenTime, MakeTime, GenFdsTime, self.ReportType)
Content = FileLinesSplit(''.join(File), gLineMaxLength)
SaveFileOnChange(self.ReportFile, Content, False)
EdkLogger.quiet("Build report can be found at %s" % os.path.abspath(self.ReportFile))
except IOError:
EdkLogger.error(None, FILE_WRITE_FAILURE, ExtraData=self.ReportFile)
except:
EdkLogger.error("BuildReport", CODE_ERROR, "Unknown fatal error when generating build report", ExtraData=self.ReportFile, RaiseError=False)
EdkLogger.quiet("(Python %s on %s\n%s)" % (platform.python_version(), sys.platform, traceback.format_exc()))
##
# Generates compile data files to be used by external tools.
# Compile information will be generated in <Build>/<BuildTarget>/<ToolChain>/CompileInfo
# Files generated: compile_commands.json, cscope.files, modules_report.json
#
# @param self The object pointer
#
def GenerateCompileInfo(self):
try:
# Lists for the output elements
compile_commands = []
used_files = set()
module_report = []
for (Wa, MaList) in self.ReportList:
# Obtain list of all processed Workspace files
for file_path in Wa._GetMetaFiles(Wa.BuildTarget, Wa.ToolChain):
used_files.add(file_path)
for autoGen in Wa.AutoGenObjectList:
# Loop through all modules
for module in (autoGen.LibraryAutoGenList + autoGen.ModuleAutoGenList):
used_files.add(module.MetaFile.Path)
# Main elements of module report
module_report_data = {}
module_report_data["Name"] = module.Name
module_report_data["Arch"] = module.Arch
module_report_data["Path"] = module.MetaFile.Path
module_report_data["Guid"] = module.Guid
module_report_data["BuildType"] = module.BuildType
module_report_data["IsLibrary"] = module.IsLibrary
module_report_data["SourceDir"] = module.SourceDir
module_report_data["Files"] = []
module_report_data["LibraryClass"] = module.Module.LibraryClass
module_report_data["ModuleEntryPointList"] = module.Module.ModuleEntryPointList
module_report_data["ConstructorList"] = module.Module.ConstructorList
module_report_data["DestructorList"] = module.Module.DestructorList
# Files used by module
for data_file in module.SourceFileList:
module_report_data["Files"].append({"Name": data_file.Name, "Path": data_file.Path})
# Libraries used by module
module_report_data["Libraries"] = []
for data_library in module.LibraryAutoGenList:
module_report_data["Libraries"].append({"Path": data_library.MetaFile.Path})
# Packages used by module
module_report_data["Packages"] = []
for data_package in module.PackageList:
module_report_data["Packages"].append({"Path": data_package.MetaFile.Path, "Includes": []})
# Includes path used in package
for data_package_include in data_package.Includes:
module_report_data["Packages"][-1]["Includes"].append(data_package_include.Path)
# PPI's in module
module_report_data["PPI"] = []
for data_ppi in module.PpiList.keys():
module_report_data["PPI"].append({"Name": data_ppi, "Guid": module.PpiList[data_ppi]})
# Protocol's in module
module_report_data["Protocol"] = []
for data_protocol in module.ProtocolList.keys():
module_report_data["Protocol"].append({"Name": data_protocol, "Guid": module.ProtocolList[data_protocol]})
# PCD's in module
module_report_data["Pcd"] = []
for data_pcd in module.LibraryPcdList:
module_report_data["Pcd"].append({"Space": data_pcd.TokenSpaceGuidCName,
"Name": data_pcd.TokenCName,
"Value": data_pcd.TokenValue,
"Guid": data_pcd.TokenSpaceGuidValue,
"DatumType": data_pcd.DatumType,
"Type": data_pcd.Type,
"DefaultValue": data_pcd.DefaultValue})
# Add module to report
module_report.append(module_report_data)
# Include file dependencies to used files
includes_autogen = IncludesAutoGen(module.MakeFileDir, module)
for dep in includes_autogen.DepsCollection:
used_files.add(dep)
inc_flag = "-I" # Default include flag
if module.BuildRuleFamily == TAB_COMPILER_MSFT:
inc_flag = "/I"
for source in module.SourceFileList:
used_files.add(source.Path)
compile_command = {}
if source.Ext in [".c", ".cc", ".cpp"]:
#
# Generate compile command for each c file
#
compile_command["file"] = source.Path
compile_command["directory"] = source.Dir
build_command = module.BuildRules[source.Ext].CommandList[0]
build_command_variables = re.findall(r"\$\((.*?)\)", build_command)
for var in build_command_variables:
var_tokens = var.split("_")
var_main = var_tokens[0]
if len(var_tokens) == 1:
var_value = module.BuildOption[var_main]["PATH"]
else:
var_value = module.BuildOption[var_main][var_tokens[1]]
build_command = build_command.replace(f"$({var})", var_value)
include_files = f" {inc_flag}".join(module.IncludePathList)
build_command = build_command.replace("${src}", include_files)
build_command = build_command.replace("${dst}", module.OutputDir)
# Remove un defined macros
compile_command["command"] = re.sub(r"\$\(.*?\)", "", build_command)
compile_commands.append(compile_command)
# Create output folder if doesn't exist
compile_info_folder = Path(Wa.BuildDir).joinpath("CompileInfo")
compile_info_folder.mkdir(exist_ok=True)
# Sort and save files
compile_commands.sort(key=lambda x: x["file"])
SaveFileOnChange(compile_info_folder.joinpath(f"compile_commands.json"),json.dumps(compile_commands, indent=2), False)
SaveFileOnChange(compile_info_folder.joinpath(f"cscope.files"), "\n".join(sorted(used_files)), False)
module_report.sort(key=lambda x: x["Path"])
SaveFileOnChange(compile_info_folder.joinpath(f"module_report.json"), json.dumps(module_report, indent=2), False)
except:
EdkLogger.error("BuildReport", CODE_ERROR, "Unknown fatal error when generating build report compile information", ExtraData=self.ReportFile, RaiseError=False)
EdkLogger.quiet("(Python %s on %s\n%s)" % (platform.python_version(), sys.platform, traceback.format_exc()))
# This acts like the main() function for the script, unless it is 'import'ed into another script.
if __name__ == '__main__':
pass
| edk2-master | BaseTools/Source/Python/build/BuildReport.py |
## @file
# Python 'TargetTool' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/TargetTool/__init__.py |
## @file
# Target Tool Parser
#
# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import print_function
import Common.LongFilePathOs as os
import sys
import traceback
from optparse import OptionParser
import Common.EdkLogger as EdkLogger
import Common.BuildToolError as BuildToolError
from Common.DataType import *
from Common.BuildVersion import gBUILD_VERSION
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.TargetTxtClassObject import gDefaultTargetTxtFile
# To Do 1.set clean, 2. add item, if the line is disabled.
class TargetTool():
def __init__(self, opt, args):
self.WorkSpace = os.path.normpath(os.getenv('WORKSPACE'))
self.Opt = opt
self.Arg = args[0]
self.FileName = os.path.normpath(os.path.join(self.WorkSpace, 'Conf', gDefaultTargetTxtFile))
if os.path.isfile(self.FileName) == False:
print("%s does not exist." % self.FileName)
sys.exit(1)
self.TargetTxtDictionary = {
TAB_TAT_DEFINES_ACTIVE_PLATFORM : None,
TAB_TAT_DEFINES_TOOL_CHAIN_CONF : None,
TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER : None,
TAB_TAT_DEFINES_TARGET : None,
TAB_TAT_DEFINES_TOOL_CHAIN_TAG : None,
TAB_TAT_DEFINES_TARGET_ARCH : None,
TAB_TAT_DEFINES_BUILD_RULE_CONF : None,
}
self.LoadTargetTxtFile(self.FileName)
def LoadTargetTxtFile(self, filename):
if os.path.exists(filename) and os.path.isfile(filename):
return self.ConvertTextFileToDict(filename, '#', '=')
else:
raise ParseError('LoadTargetTxtFile() : No Target.txt file exists.')
return 1
#
# Convert a text file to a dictionary
#
def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
"""Convert a text file to a dictionary of (name:value) pairs."""
try:
f = open(FileName, 'r')
for Line in f:
if Line.startswith(CommentCharacter) or Line.strip() == '':
continue
LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2:
Key = LineList[0].strip()
if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary:
if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM or Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF \
or Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER \
or Key == TAB_TAT_DEFINES_ACTIVE_MODULE:
self.TargetTxtDictionary[Key] = LineList[1].replace('\\', '/').strip()
elif Key == TAB_TAT_DEFINES_TARGET or Key == TAB_TAT_DEFINES_TARGET_ARCH \
or Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG or Key == TAB_TAT_DEFINES_BUILD_RULE_CONF:
self.TargetTxtDictionary[Key] = LineList[1].split()
f.close()
return 0
except:
last_type, last_value, last_tb = sys.exc_info()
traceback.print_exception(last_type, last_value, last_tb)
def Print(self):
errMsg = ''
for Key in self.TargetTxtDictionary:
if isinstance(self.TargetTxtDictionary[Key], type([])):
print("%-30s = %s" % (Key, ''.join(elem + ' ' for elem in self.TargetTxtDictionary[Key])))
elif self.TargetTxtDictionary[Key] is None:
errMsg += " Missing %s configuration information, please use TargetTool to set value!" % Key + os.linesep
else:
print("%-30s = %s" % (Key, self.TargetTxtDictionary[Key]))
if errMsg != '':
print(os.linesep + 'Warning:' + os.linesep + errMsg)
def RWFile(self, CommentCharacter, KeySplitCharacter, Num):
try:
fr = open(self.FileName, 'r')
fw = open(os.path.normpath(os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), 'w')
existKeys = []
for Line in fr:
if Line.startswith(CommentCharacter) or Line.strip() == '':
fw.write(Line)
else:
LineList = Line.split(KeySplitCharacter, 1)
if len(LineList) >= 2:
Key = LineList[0].strip()
if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary:
if Key not in existKeys:
existKeys.append(Key)
else:
print("Warning: Found duplicate key item in original configuration files!")
if Num == 0:
Line = "%-30s = \n" % Key
else:
ret = GetConfigureKeyValue(self, Key)
if ret is not None:
Line = ret
fw.write(Line)
for key in self.TargetTxtDictionary:
if key not in existKeys:
print("Warning: %s does not exist in original configuration file" % key)
Line = GetConfigureKeyValue(self, key)
if Line is None:
Line = "%-30s = " % key
fw.write(Line)
fr.close()
fw.close()
os.remove(self.FileName)
os.rename(os.path.normpath(os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), self.FileName)
except:
last_type, last_value, last_tb = sys.exc_info()
traceback.print_exception(last_type, last_value, last_tb)
def GetConfigureKeyValue(self, Key):
Line = None
if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM and self.Opt.DSCFILE is not None:
dscFullPath = os.path.join(self.WorkSpace, self.Opt.DSCFILE)
if os.path.exists(dscFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.DSCFILE)
else:
EdkLogger.error("TargetTool", BuildToolError.FILE_NOT_FOUND,
"DSC file %s does not exist!" % self.Opt.DSCFILE, RaiseError=False)
elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE is not None:
tooldefFullPath = os.path.join(self.WorkSpace, self.Opt.TOOL_DEFINITION_FILE)
if os.path.exists(tooldefFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_DEFINITION_FILE)
else:
EdkLogger.error("TargetTool", BuildToolError.FILE_NOT_FOUND,
"Tooldef file %s does not exist!" % self.Opt.TOOL_DEFINITION_FILE, RaiseError=False)
elif self.Opt.NUM >= 2:
Line = "%-30s = %s\n" % (Key, 'Enable')
elif self.Opt.NUM <= 1:
Line = "%-30s = %s\n" % (Key, 'Disable')
elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM is not None:
Line = "%-30s = %s\n" % (Key, str(self.Opt.NUM))
elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET is not None:
Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET))
elif Key == TAB_TAT_DEFINES_TARGET_ARCH and self.Opt.TARGET_ARCH is not None:
Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET_ARCH))
elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG and self.Opt.TOOL_CHAIN_TAG is not None:
Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_CHAIN_TAG)
elif Key == TAB_TAT_DEFINES_BUILD_RULE_CONF and self.Opt.BUILD_RULE_FILE is not None:
buildruleFullPath = os.path.join(self.WorkSpace, self.Opt.BUILD_RULE_FILE)
if os.path.exists(buildruleFullPath):
Line = "%-30s = %s\n" % (Key, self.Opt.BUILD_RULE_FILE)
else:
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
"Build rule file %s does not exist!" % self.Opt.BUILD_RULE_FILE, RaiseError=False)
return Line
VersionNumber = ("0.01" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + VersionNumber
__copyright__ = "Copyright (c) 2007 - 2018, Intel Corporation All rights reserved."
__usage__ = "%prog [options] {args} \
\nArgs: \
\n Clean clean the all default configuration of target.txt. \
\n Print print the all default configuration of target.txt. \
\n Set replace the default configuration with expected value specified by option."
gParamCheck = []
def SingleCheckCallback(option, opt_str, value, parser):
if option not in gParamCheck:
setattr(parser.values, option.dest, value)
gParamCheck.append(option)
else:
parser.error("Option %s only allows one instance in command line!" % option)
def RangeCheckCallback(option, opt_str, value, parser):
if option not in gParamCheck:
gParamCheck.append(option)
if value < 1 or value > 8:
parser.error("The count of multi-thread is not in valid range of 1 ~ 8.")
else:
setattr(parser.values, option.dest, value)
else:
parser.error("Option %s only allows one instance in command line!" % option)
def MyOptionParser():
parser = OptionParser(version=__version__, prog="TargetTool.exe", usage=__usage__, description=__copyright__)
parser.add_option("-a", "--arch", action="append", dest="TARGET_ARCH",
help="ARCHS is one of list: IA32, X64, ARM, AARCH64 or EBC, which replaces target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option. 0 will clear this setting in target.txt and can't combine with other value.")
parser.add_option("-p", "--platform", action="callback", type="string", dest="DSCFILE", callback=SingleCheckCallback,
help="Specify a DSC file, which replace target.txt's ACTIVE_PLATFORM definition. 0 will clear this setting in target.txt and can't combine with other value.")
parser.add_option("-c", "--tooldef", action="callback", type="string", dest="TOOL_DEFINITION_FILE", callback=SingleCheckCallback,
help="Specify the WORKSPACE relative path of tool_def.txt file, which replace target.txt's TOOL_CHAIN_CONF definition. 0 will clear this setting in target.txt and can't combine with other value.")
parser.add_option("-t", "--target", action="append", type="choice", choices=['DEBUG', 'RELEASE', '0'], dest="TARGET",
help="TARGET is one of list: DEBUG, RELEASE, which replaces target.txt's TARGET definition. To specify more TARGET, please repeat this option. 0 will clear this setting in target.txt and can't combine with other value.")
parser.add_option("-n", "--tagname", action="callback", type="string", dest="TOOL_CHAIN_TAG", callback=SingleCheckCallback,
help="Specify the Tool Chain Tagname, which replaces target.txt's TOOL_CHAIN_TAG definition. 0 will clear this setting in target.txt and can't combine with other value.")
parser.add_option("-r", "--buildrule", action="callback", type="string", dest="BUILD_RULE_FILE", callback=SingleCheckCallback,
help="Specify the build rule configure file, which replaces target.txt's BUILD_RULE_CONF definition. If not specified, the default value Conf/build_rule.txt will be set.")
parser.add_option("-m", "--multithreadnum", action="callback", type="int", dest="NUM", callback=RangeCheckCallback,
help="Specify the multi-thread number which replace target.txt's MAX_CONCURRENT_THREAD_NUMBER. If the value is less than 2, MULTIPLE_THREAD will be disabled. If the value is larger than 1, MULTIPLE_THREAD will be enabled.")
(opt, args)=parser.parse_args()
return (opt, args)
if __name__ == '__main__':
EdkLogger.Initialize()
EdkLogger.SetLevel(EdkLogger.QUIET)
if os.getenv('WORKSPACE') is None:
print("ERROR: WORKSPACE should be specified or edksetup script should be executed before run TargetTool")
sys.exit(1)
(opt, args) = MyOptionParser()
if len(args) != 1 or (args[0].lower() != 'print' and args[0].lower() != 'clean' and args[0].lower() != 'set'):
print("The number of args isn't 1 or the value of args is invalid.")
sys.exit(1)
if opt.NUM is not None and opt.NUM < 1:
print("The MAX_CONCURRENT_THREAD_NUMBER must be larger than 0.")
sys.exit(1)
if opt.TARGET is not None and len(opt.TARGET) > 1:
for elem in opt.TARGET:
if elem == '0':
print("0 will clear the TARGET setting in target.txt and can't combine with other value.")
sys.exit(1)
if opt.TARGET_ARCH is not None and len(opt.TARGET_ARCH) > 1:
for elem in opt.TARGET_ARCH:
if elem == '0':
print("0 will clear the TARGET_ARCH setting in target.txt and can't combine with other value.")
sys.exit(1)
try:
FileHandle = TargetTool(opt, args)
if FileHandle.Arg.lower() == 'print':
FileHandle.Print()
sys.exit(0)
elif FileHandle.Arg.lower() == 'clean':
FileHandle.RWFile('#', '=', 0)
else:
FileHandle.RWFile('#', '=', 1)
except Exception as e:
last_type, last_value, last_tb = sys.exc_info()
traceback.print_exception(last_type, last_value, last_tb)
| edk2-master | BaseTools/Source/Python/TargetTool/TargetTool.py |
## @file
# This file is used to define class Configuration
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
import Common.LongFilePathOs as os
import Common.EdkLogger as EdkLogger
from Common.DataType import *
from Common.StringUtils import *
from Common.LongFilePathSupport import OpenLongFilePath as open
_ConfigFileToInternalTranslation = {
# not same
"ModifierList":"ModifierSet",
# same
# please keep this in correct alphabetical order.
"AutoCorrect":"AutoCorrect",
"BinaryExtList":"BinaryExtList",
"CFunctionLayoutCheckAll":"CFunctionLayoutCheckAll",
"CFunctionLayoutCheckDataDeclaration":"CFunctionLayoutCheckDataDeclaration",
"CFunctionLayoutCheckFunctionBody":"CFunctionLayoutCheckFunctionBody",
"CFunctionLayoutCheckFunctionName":"CFunctionLayoutCheckFunctionName",
"CFunctionLayoutCheckFunctionPrototype":"CFunctionLayoutCheckFunctionPrototype",
"CFunctionLayoutCheckNoInitOfVariable":"CFunctionLayoutCheckNoInitOfVariable",
"CFunctionLayoutCheckNoStatic":"CFunctionLayoutCheckNoStatic",
"CFunctionLayoutCheckOptionalFunctionalModifier":"CFunctionLayoutCheckOptionalFunctionalModifier",
"CFunctionLayoutCheckReturnType":"CFunctionLayoutCheckReturnType",
"CheckAll":"CheckAll",
"Copyright":"Copyright",
"DeclarationDataTypeCheckAll":"DeclarationDataTypeCheckAll",
"DeclarationDataTypeCheckEFIAPIModifier":"DeclarationDataTypeCheckEFIAPIModifier",
"DeclarationDataTypeCheckEnumeratedType":"DeclarationDataTypeCheckEnumeratedType",
"DeclarationDataTypeCheckInOutModifier":"DeclarationDataTypeCheckInOutModifier",
"DeclarationDataTypeCheckNoUseCType":"DeclarationDataTypeCheckNoUseCType",
"DeclarationDataTypeCheckSameStructure":"DeclarationDataTypeCheckSameStructure",
"DeclarationDataTypeCheckStructureDeclaration":"DeclarationDataTypeCheckStructureDeclaration",
"DeclarationDataTypeCheckUnionType":"DeclarationDataTypeCheckUnionType",
"DoxygenCheckAll":"DoxygenCheckAll",
"DoxygenCheckCommand":"DoxygenCheckCommand",
"DoxygenCheckCommentDescription":"DoxygenCheckCommentDescription",
"DoxygenCheckCommentFormat":"DoxygenCheckCommentFormat",
"DoxygenCheckFileHeader":"DoxygenCheckFileHeader",
"DoxygenCheckFunctionHeader":"DoxygenCheckFunctionHeader",
"GeneralCheckAll":"GeneralCheckAll",
"GeneralCheckCarriageReturn":"GeneralCheckCarriageReturn",
"GeneralCheckFileExistence":"GeneralCheckFileExistence",
"GeneralCheckIndentation":"GeneralCheckIndentation",
"GeneralCheckIndentationWidth":"GeneralCheckIndentationWidth",
"GeneralCheckLine":"GeneralCheckLine",
"GeneralCheckLineEnding":"GeneralCheckLineEnding",
"GeneralCheckLineWidth":"GeneralCheckLineWidth",
"GeneralCheckNoProgma":"GeneralCheckNoProgma",
"GeneralCheckNoTab":"GeneralCheckNoTab",
"GeneralCheckNo_Asm":"GeneralCheckNo_Asm",
"GeneralCheckNonAcsii":"GeneralCheckNonAcsii",
"GeneralCheckTabWidth":"GeneralCheckTabWidth",
"GeneralCheckTrailingWhiteSpaceLine":"GeneralCheckTrailingWhiteSpaceLine",
"GeneralCheckUni":"GeneralCheckUni",
"HeaderCheckAll":"HeaderCheckAll",
"HeaderCheckCFileCommentLicenseFormat":"HeaderCheckCFileCommentLicenseFormat",
"HeaderCheckCFileCommentReferenceFormat":"HeaderCheckCFileCommentReferenceFormat",
"HeaderCheckCFileCommentStartSpacesNum":"HeaderCheckCFileCommentStartSpacesNum",
"HeaderCheckFile":"HeaderCheckFile",
"HeaderCheckFileCommentEnd":"HeaderCheckFileCommentEnd",
"HeaderCheckFunction":"HeaderCheckFunction",
"IncludeFileCheckAll":"IncludeFileCheckAll",
"IncludeFileCheckData":"IncludeFileCheckData",
"IncludeFileCheckIfndefStatement":"IncludeFileCheckIfndefStatement",
"IncludeFileCheckSameName":"IncludeFileCheckSameName",
"MetaDataFileCheckAll":"MetaDataFileCheckAll",
"MetaDataFileCheckBinaryInfInFdf":"MetaDataFileCheckBinaryInfInFdf",
"MetaDataFileCheckGenerateFileList":"MetaDataFileCheckGenerateFileList",
"MetaDataFileCheckGuidDuplicate":"MetaDataFileCheckGuidDuplicate",
"MetaDataFileCheckLibraryDefinedInDec":"MetaDataFileCheckLibraryDefinedInDec",
"MetaDataFileCheckLibraryInstance":"MetaDataFileCheckLibraryInstance",
"MetaDataFileCheckLibraryInstanceDependent":"MetaDataFileCheckLibraryInstanceDependent",
"MetaDataFileCheckLibraryInstanceOrder":"MetaDataFileCheckLibraryInstanceOrder",
"MetaDataFileCheckLibraryNoUse":"MetaDataFileCheckLibraryNoUse",
"MetaDataFileCheckModuleFileGuidDuplication":"MetaDataFileCheckModuleFileGuidDuplication",
"MetaDataFileCheckModuleFileGuidFormat":"MetaDataFileCheckModuleFileGuidFormat",
"MetaDataFileCheckModuleFileNoUse":"MetaDataFileCheckModuleFileNoUse",
"MetaDataFileCheckModuleFilePcdFormat":"MetaDataFileCheckModuleFilePcdFormat",
"MetaDataFileCheckModuleFilePpiFormat":"MetaDataFileCheckModuleFilePpiFormat",
"MetaDataFileCheckModuleFileProtocolFormat":"MetaDataFileCheckModuleFileProtocolFormat",
"MetaDataFileCheckPathName":"MetaDataFileCheckPathName",
"MetaDataFileCheckPathOfGenerateFileList":"MetaDataFileCheckPathOfGenerateFileList",
"MetaDataFileCheckPcdDuplicate":"MetaDataFileCheckPcdDuplicate",
"MetaDataFileCheckPcdFlash":"MetaDataFileCheckPcdFlash",
"MetaDataFileCheckPcdNoUse":"MetaDataFileCheckPcdNoUse",
"MetaDataFileCheckPcdType":"MetaDataFileCheckPcdType",
"NamingConventionCheckAll":"NamingConventionCheckAll",
"NamingConventionCheckDefineStatement":"NamingConventionCheckDefineStatement",
"NamingConventionCheckFunctionName":"NamingConventionCheckFunctionName",
"NamingConventionCheckIfndefStatement":"NamingConventionCheckIfndefStatement",
"NamingConventionCheckPathName":"NamingConventionCheckPathName",
"NamingConventionCheckSingleCharacterVariable":"NamingConventionCheckSingleCharacterVariable",
"NamingConventionCheckTypedefStatement":"NamingConventionCheckTypedefStatement",
"NamingConventionCheckVariableName":"NamingConventionCheckVariableName",
"PredicateExpressionCheckAll":"PredicateExpressionCheckAll",
"PredicateExpressionCheckBooleanValue":"PredicateExpressionCheckBooleanValue",
"PredicateExpressionCheckComparisonNullType":"PredicateExpressionCheckComparisonNullType",
"PredicateExpressionCheckNonBooleanOperator":"PredicateExpressionCheckNonBooleanOperator",
"ScanOnlyDirList":"ScanOnlyDirList",
"SkipDirList":"SkipDirList",
"SkipFileList":"SkipFileList",
"SmmCommParaCheckAll":"SmmCommParaCheckAll",
"SmmCommParaCheckBufferType":"SmmCommParaCheckBufferType",
"SpaceCheckAll":"SpaceCheckAll",
"SpellingCheckAll":"SpellingCheckAll",
"TokenReleaceList":"TokenReleaceList",
"UniCheckAll":"UniCheckAll",
"UniCheckHelpInfo":"UniCheckHelpInfo",
"UniCheckPCDInfo":"UniCheckPCDInfo",
"Version":"Version"
}
## Configuration
#
# This class is used to define all items in configuration file
#
# @param Filename: The name of configuration file, the default is config.ini
#
class Configuration(object):
def __init__(self, Filename):
self.Filename = Filename
self.Version = 0.1
## Identify to if check all items
# 1 - Check all items and ignore all other detailed items
# 0 - Not check all items, the tool will go through all other detailed items to decide to check or not
#
self.CheckAll = 0
## Identify to if automatically correct mistakes
# 1 - Automatically correct
# 0 - Not automatically correct
# Only the following check points can be automatically corrected, others not listed below are not supported even it is 1
#
# GeneralCheckTab
# GeneralCheckIndentation
# GeneralCheckLine
# GeneralCheckCarriageReturn
# SpaceCheckAll
#
self.AutoCorrect = 0
# List customized Modifer here, split with ','
# Defaultly use the definition in class DataType
self.ModifierSet = MODIFIER_SET
## General Checking
self.GeneralCheckAll = 0
# Check whether NO Tab is used, replaced with spaces
self.GeneralCheckNoTab = 1
# The width of Tab
self.GeneralCheckTabWidth = 2
# Check whether the indentation is followed coding style
self.GeneralCheckIndentation = 1
# The width of indentation
self.GeneralCheckIndentationWidth = 2
# Check whether no line is exceeding defined widty
self.GeneralCheckLine = 1
# The width of a line
self.GeneralCheckLineWidth = 120
# Check whether no use of _asm in the source file
self.GeneralCheckNo_Asm = 1
# Check whether no use of "#progma" in source file except "#pragma pack(#)".
self.GeneralCheckNoProgma = 1
# Check whether there is a carriage return at the end of the file
self.GeneralCheckCarriageReturn = 1
# Check whether the file exists
self.GeneralCheckFileExistence = 1
# Check whether file has non ACSII char
self.GeneralCheckNonAcsii = 1
# Check whether UNI file is valid
self.GeneralCheckUni = 1
# Check Only use CRLF (Carriage Return Line Feed) line endings.
self.GeneralCheckLineEnding = 1
# Check if there is no trailing white space in one line.
self.GeneralCheckTrailingWhiteSpaceLine = 1
self.CFunctionLayoutCheckNoDeprecated = 1
## Space Checking
self.SpaceCheckAll = 1
## Predicate Expression Checking
self.PredicateExpressionCheckAll = 0
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
self.PredicateExpressionCheckBooleanValue = 1
# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
self.PredicateExpressionCheckNonBooleanOperator = 1
# Check whether a comparison of any pointer to zero must be done via the NULL type
self.PredicateExpressionCheckComparisonNullType = 1
## Headers Checking
self.HeaderCheckAll = 0
# Check whether File header exists
self.HeaderCheckFile = 1
# Check whether Function header exists
self.HeaderCheckFunction = 1
# Check whether Meta data File header Comment End with '##'
self.HeaderCheckFileCommentEnd = 1
# Check whether C File header Comment content start with two spaces
self.HeaderCheckCFileCommentStartSpacesNum = 1
# Check whether C File header Comment's each reference at list should begin with a bullet character '-'
self.HeaderCheckCFileCommentReferenceFormat = 1
# Check whether C File header Comment have the License immediately after the ""Copyright"" line
self.HeaderCheckCFileCommentLicenseFormat = 1
## C Function Layout Checking
self.CFunctionLayoutCheckAll = 0
# Check whether return type exists and in the first line
self.CFunctionLayoutCheckReturnType = 1
# Check whether any optional functional modifiers exist and next to the return type
self.CFunctionLayoutCheckOptionalFunctionalModifier = 1
# Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
# Check whether the closing parenthesis is on its own line and also indented two spaces
self.CFunctionLayoutCheckFunctionName = 1
# Check whether the function prototypes in include files have the same form as function definitions
self.CFunctionLayoutCheckFunctionPrototype = 1
# Check whether the body of a function is contained by open and close braces that must be in the first column
self.CFunctionLayoutCheckFunctionBody = 1
# Check whether the data declarations is the first code in a module.
self.CFunctionLayoutCheckDataDeclaration = 1
# Check whether no initialization of a variable as part of its declaration
self.CFunctionLayoutCheckNoInitOfVariable = 1
# Check whether no use of STATIC for functions
self.CFunctionLayoutCheckNoStatic = 1
## Include Files Checking
self.IncludeFileCheckAll = 0
#Check whether having include files with same name
self.IncludeFileCheckSameName = 1
# Check whether all include file contents is guarded by a #ifndef statement.
# the #ifndef must be the first line of code following the file header comment
# the #endif must appear on the last line in the file
self.IncludeFileCheckIfndefStatement = 1
# Check whether include files contain only public or only private data
# Check whether include files NOT contain code or define data variables
self.IncludeFileCheckData = 1
## Declarations and Data Types Checking
self.DeclarationDataTypeCheckAll = 0
# Check whether no use of int, unsigned, char, void, long in any .c, .h or .asl files.
self.DeclarationDataTypeCheckNoUseCType = 1
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
self.DeclarationDataTypeCheckInOutModifier = 1
# Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
self.DeclarationDataTypeCheckEFIAPIModifier = 1
# Check whether Enumerated Type has a 'typedef' and the name is capital
self.DeclarationDataTypeCheckEnumeratedType = 1
# Check whether Structure Type has a 'typedef' and the name is capital
self.DeclarationDataTypeCheckStructureDeclaration = 1
# Check whether having same Structure
self.DeclarationDataTypeCheckSameStructure = 1
# Check whether Union Type has a 'typedef' and the name is capital
self.DeclarationDataTypeCheckUnionType = 1
## Naming Conventions Checking
self.NamingConventionCheckAll = 0
# Check whether only capital letters are used for #define declarations
self.NamingConventionCheckDefineStatement = 1
# Check whether only capital letters are used for typedef declarations
self.NamingConventionCheckTypedefStatement = 1
# Check whether the #ifndef at the start of an include file uses both prefix and postfix underscore characters, '_'.
self.NamingConventionCheckIfndefStatement = 1
# Rule for path name, variable name and function name
# 1. First character should be upper case
# 2. Existing lower case in a word
# 3. No space existence
# Check whether the path name followed the rule
self.NamingConventionCheckPathName = 1
# Check whether the variable name followed the rule
self.NamingConventionCheckVariableName = 1
# Check whether the function name followed the rule
self.NamingConventionCheckFunctionName = 1
# Check whether NO use short variable name with single character
self.NamingConventionCheckSingleCharacterVariable = 1
## Doxygen Checking
self.DoxygenCheckAll = 0
# Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
self.DoxygenCheckFileHeader = 1
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
self.DoxygenCheckFunctionHeader = 1
# Check whether the first line of text in a comment block is a brief description of the element being documented.
# The brief description must end with a period.
self.DoxygenCheckCommentDescription = 1
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
self.DoxygenCheckCommentFormat = 1
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
self.DoxygenCheckCommand = 1
## Meta-Data File Processing Checking
self.MetaDataFileCheckAll = 0
# Check whether each file defined in meta-data exists
self.MetaDataFileCheckPathName = 1
# Generate a list for all files defined in meta-data files
self.MetaDataFileCheckGenerateFileList = 1
# The path of log file
self.MetaDataFileCheckPathOfGenerateFileList = 'File.log'
# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
# Each Library Instance must specify the Supported Module Types in its INF file,
# and any module specifying the library instance must be one of the supported types.
self.MetaDataFileCheckLibraryInstance = 1
# Check whether a Library Instance has been defined for all dependent library classes
self.MetaDataFileCheckLibraryInstanceDependent = 1
# Check whether the Library Instances specified by the LibraryClasses sections are listed in order of dependencies
self.MetaDataFileCheckLibraryInstanceOrder = 1
# Check whether the unnecessary inclusion of library classes in the INF file
self.MetaDataFileCheckLibraryNoUse = 1
# Check the header file in Include\Library directory whether be defined in the package DEC file.
self.MetaDataFileCheckLibraryDefinedInDec = 1
# Check whether an INF file is specified in the FDF file, but not in the DSC file, then the INF file must be for a Binary module only
self.MetaDataFileCheckBinaryInfInFdf = 1
# Not to report error and warning related OS include file such as "windows.h" and "stdio.h"
# Check whether a PCD is set in a DSC file or the FDF file, but not in both.
self.MetaDataFileCheckPcdDuplicate = 1
# Check whether PCD settings in the FDF file can only be related to flash.
self.MetaDataFileCheckPcdFlash = 1
# Check whether PCDs used in INF files but not specified in DSC or FDF files
self.MetaDataFileCheckPcdNoUse = 1
# Check whether having duplicate guids defined for Guid/Protocol/Ppi
self.MetaDataFileCheckGuidDuplicate = 1
# Check whether all files under module directory are described in INF files
self.MetaDataFileCheckModuleFileNoUse = 1
# Check whether the PCD is correctly used in C function via its type
self.MetaDataFileCheckPcdType = 1
# Check whether there are FILE_GUID duplication among different INF files
self.MetaDataFileCheckModuleFileGuidDuplication = 1
# Check Guid Format in INF files
self.MetaDataFileCheckModuleFileGuidFormat = 1
# Check Protocol Format in INF files
self.MetaDataFileCheckModuleFileProtocolFormat = 1
# Check Ppi Format in INF files
self.MetaDataFileCheckModuleFilePpiFormat = 1
# Check Pcd Format in INF files
self.MetaDataFileCheckModuleFilePcdFormat = 1
# Check UNI file
self.UniCheckAll = 0
# Check INF or DEC file whether defined the localized information in the associated UNI file.
self.UniCheckHelpInfo = 1
# Check PCD whether defined the prompt, help in the DEC file and localized information in the associated UNI file.
self.UniCheckPCDInfo = 1
# Check SMM communication function parameter
self.SmmCommParaCheckAll = 0
# Check if the EFI_SMM_COMMUNICATION_PROTOCOL parameter buffer type is Reserved / ACPI NVS or UEFI RT code/data
self.SmmCommParaCheckBufferType = -1
#
# The check points in this section are reserved
#
# GotoStatementCheckAll = 0
#
self.SpellingCheckAll = 0
# The directory listed here will not be parsed, split with ','
self.SkipDirList = []
# The file listed here will not be parsed, split with ','
self.SkipFileList = []
# A list for binary file ext name
self.BinaryExtList = []
# A list for only scanned folders
self.ScanOnlyDirList = []
# A list for Copyright format
self.Copyright = []
self.TokenReleaceList = []
self.ParseConfig()
def ParseConfig(self):
Filepath = os.path.normpath(self.Filename)
if not os.path.isfile(Filepath):
ErrorMsg = "Can't find configuration file '%s'" % Filepath
EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath)
LineNo = 0
for Line in open(Filepath, 'r'):
LineNo = LineNo + 1
Line = CleanString(Line)
if Line != '':
List = GetSplitValueList(Line, TAB_EQUAL_SPLIT)
if List[0] not in _ConfigFileToInternalTranslation:
ErrorMsg = "Invalid configuration option '%s' was found" % List[0]
EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath, Line = LineNo)
assert _ConfigFileToInternalTranslation[List[0]] in self.__dict__
if List[0] == 'ModifierList':
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
if List[0] == 'MetaDataFileCheckPathOfGenerateFileList' and List[1] == "":
continue
if List[0] == 'SkipDirList':
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
if List[0] == 'SkipFileList':
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
if List[0] == 'BinaryExtList':
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
if List[0] == 'Copyright':
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
if List[0] == 'TokenReleaceList':
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
self.__dict__[_ConfigFileToInternalTranslation[List[0]]] = List[1]
def ShowMe(self):
print(self.Filename)
for Key in self.__dict__.keys():
print(Key, '=', self.__dict__[Key])
#
# test that our dict and out class still match in contents.
#
if __name__ == '__main__':
myconfig = Configuration("BaseTools\Source\Python\Ecc\config.ini")
for each in myconfig.__dict__:
if each == "Filename":
continue
assert each in _ConfigFileToInternalTranslation.values()
for each in _ConfigFileToInternalTranslation.values():
assert each in myconfig.__dict__
| edk2-master | BaseTools/Source/Python/Ecc/Configuration.py |
## @file
# This file is used to parse exception items found by ECC tool
#
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
from Ecc.Xml.XmlRoutines import *
import Common.LongFilePathOs as os
# ExceptionXml to parse Exception Node of XML file
class ExceptionXml(object):
def __init__(self):
self.KeyWord = ''
self.ErrorID = ''
self.FilePath = ''
def FromXml(self, Item, Key):
self.KeyWord = XmlElement(Item, '%s/KeyWord' % Key)
self.ErrorID = XmlElement(Item, '%s/ErrorID' % Key)
self.FilePath = os.path.normpath(XmlElement(Item, '%s/FilePath' % Key))
def __str__(self):
return 'ErrorID = %s KeyWord = %s FilePath = %s' %(self.ErrorID, self.KeyWord, self.FilePath)
# ExceptionListXml to parse Exception Node List of XML file
class ExceptionListXml(object):
def __init__(self):
self.List = []
def FromXmlFile(self, FilePath):
XmlContent = XmlParseFile(FilePath)
for Item in XmlList(XmlContent, '/ExceptionList/Exception'):
Exp = ExceptionXml()
Exp.FromXml(Item, 'Exception')
self.List.append(Exp)
def ToList(self):
RtnList = []
for Item in self.List:
#RtnList.append((Item.ErrorID, Item.KeyWord, Item.FilePath))
RtnList.append((Item.ErrorID, Item.KeyWord))
return RtnList
def __str__(self):
RtnStr = ''
if self.List:
for Item in self.List:
RtnStr = RtnStr + str(Item) + '\n'
return RtnStr
# A class to check exception
class ExceptionCheck(object):
def __init__(self, FilePath = None):
self.ExceptionList = []
self.ExceptionListXml = ExceptionListXml()
self.LoadExceptionListXml(FilePath)
def LoadExceptionListXml(self, FilePath):
if FilePath and os.path.isfile(FilePath):
self.ExceptionListXml.FromXmlFile(FilePath)
self.ExceptionList = self.ExceptionListXml.ToList()
def IsException(self, ErrorID, KeyWord, FileID=-1):
if (str(ErrorID), KeyWord.replace('\r\n', '\n')) in self.ExceptionList:
return True
else:
return False
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
El = ExceptionCheck('C:\\Hess\\Project\\BuildTool\\src\\Ecc\\exception.xml')
print(El.ExceptionList)
| edk2-master | BaseTools/Source/Python/Ecc/Exception.py |
## @file
# Standardized Error Handling infrastructures.
#
# Copyright (c) 2021, Arm Limited. All rights reserved.<BR>
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
ERROR_GENERAL_CHECK_ALL = 1000
ERROR_GENERAL_CHECK_NO_TAB = 1001
ERROR_GENERAL_CHECK_INDENTATION = 1002
ERROR_GENERAL_CHECK_LINE = 1003
ERROR_GENERAL_CHECK_NO_ASM = 1004
ERROR_GENERAL_CHECK_NO_PROGMA = 1005
ERROR_GENERAL_CHECK_CARRIAGE_RETURN = 1006
ERROR_GENERAL_CHECK_FILE_EXISTENCE = 1007
ERROR_GENERAL_CHECK_NON_ACSII = 1008
ERROR_GENERAL_CHECK_UNI = 1009
ERROR_GENERAL_CHECK_UNI_HELP_INFO = 1010
ERROR_GENERAL_CHECK_INVALID_LINE_ENDING = 1011
ERROR_GENERAL_CHECK_TRAILING_WHITE_SPACE_LINE = 1012
ERROR_SPACE_CHECK_ALL = 2000
ERROR_PREDICATE_EXPRESSION_CHECK_ALL = 3000
ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE = 3001
ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR = 3002
ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE = 3003
ERROR_HEADER_CHECK_ALL = 4000
ERROR_HEADER_CHECK_FILE = 4001
ERROR_HEADER_CHECK_FUNCTION = 4002
ERROR_C_FUNCTION_LAYOUT_CHECK_ALL = 5000
ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE = 5001
ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER = 5002
ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME = 5003
ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE = 5004
ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY = 5005
ERROR_C_FUNCTION_LAYOUT_CHECK_DATA_DECLARATION = 5006
ERROR_C_FUNCTION_LAYOUT_CHECK_NO_INIT_OF_VARIABLE = 5007
ERROR_C_FUNCTION_LAYOUT_CHECK_NO_STATIC = 5008
ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2 = 5009
ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3 = 5010
ERROR_INCLUDE_FILE_CHECK_ALL = 6000
ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_1 = 6001
ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_2 = 6002
ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_3 = 6003
ERROR_INCLUDE_FILE_CHECK_DATA = 6004
ERROR_INCLUDE_FILE_CHECK_NAME = 6005
ERROR_DECLARATION_DATA_TYPE_CHECK_ALL = 7000
ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE = 7001
ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER = 7002
ERROR_DECLARATION_DATA_TYPE_CHECK_EFI_API_MODIFIER = 7003
ERROR_DECLARATION_DATA_TYPE_CHECK_ENUMERATED_TYPE = 7004
ERROR_DECLARATION_DATA_TYPE_CHECK_STRUCTURE_DECLARATION = 7005
ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE = 7007
ERROR_DECLARATION_DATA_TYPE_CHECK_UNION_TYPE = 7006
ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE = 7008
ERROR_NAMING_CONVENTION_CHECK_ALL = 8000
ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT = 8001
ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT = 8002
ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT = 8003
ERROR_NAMING_CONVENTION_CHECK_PATH_NAME = 8004
ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME = 8005
ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME = 8006
ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE = 8007
ERROR_DOXYGEN_CHECK_ALL = 9000
ERROR_DOXYGEN_CHECK_FILE_HEADER = 9001
ERROR_DOXYGEN_CHECK_FUNCTION_HEADER = 9002
ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION = 9003
ERROR_DOXYGEN_CHECK_COMMENT_FORMAT = 9004
ERROR_DOXYGEN_CHECK_COMMAND = 9005
ERROR_META_DATA_FILE_CHECK_ALL = 10000
ERROR_META_DATA_FILE_CHECK_PATH_NAME = 10001
ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1 = 10002
ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_2 = 10003
ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT = 10004
ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_ORDER = 10005
ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE = 10006
ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF = 10007
ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE = 10008
ERROR_META_DATA_FILE_CHECK_PCD_FLASH = 10009
ERROR_META_DATA_FILE_CHECK_PCD_NO_USE = 10010
ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID = 10011
ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL = 10012
ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI = 10013
ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE = 10014
ERROR_META_DATA_FILE_CHECK_PCD_TYPE = 10015
ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION = 10016
ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE = 10017
ERROR_META_DATA_FILE_CHECK_FORMAT_GUID = 10018
ERROR_META_DATA_FILE_CHECK_FORMAT_PROTOCOL = 10019
ERROR_META_DATA_FILE_CHECK_FORMAT_PPI = 10020
ERROR_META_DATA_FILE_CHECK_FORMAT_PCD = 10021
ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED = 10022
ERROR_SPELLING_CHECK_ALL = 11000
ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE = 12001
gEccErrorMessage = {
ERROR_GENERAL_CHECK_ALL : "",
ERROR_GENERAL_CHECK_NO_TAB : "'TAB' character is not allowed in source code, please replace each 'TAB' with two spaces.",
ERROR_GENERAL_CHECK_INDENTATION : "Indentation does not follow coding style",
ERROR_GENERAL_CHECK_LINE : "The width of each line does not follow coding style",
ERROR_GENERAL_CHECK_NO_ASM : "There should be no use of _asm in the source file",
ERROR_GENERAL_CHECK_NO_PROGMA : """There should be no use of "#progma" in source file except "#pragma pack(#)\"""",
ERROR_GENERAL_CHECK_CARRIAGE_RETURN : "There should be a carriage return at the end of the file",
ERROR_GENERAL_CHECK_FILE_EXISTENCE : "File not found",
ERROR_GENERAL_CHECK_NON_ACSII : "File has invalid Non-ACSII char",
ERROR_GENERAL_CHECK_UNI : "File is not a valid UTF-16 UNI file",
ERROR_GENERAL_CHECK_UNI_HELP_INFO : "UNI file that is associated by INF or DEC file need define the prompt and help information.",
ERROR_GENERAL_CHECK_INVALID_LINE_ENDING : "Only CRLF (Carriage Return Line Feed) is allowed to line ending.",
ERROR_GENERAL_CHECK_TRAILING_WHITE_SPACE_LINE : "There should be no trailing white space in one line.",
ERROR_SPACE_CHECK_ALL : "",
ERROR_PREDICATE_EXPRESSION_CHECK_ALL : "",
ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE : "Boolean values and variable type BOOLEAN should not use explicit comparisons to TRUE or FALSE",
ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR : "Non-Boolean comparisons should use a compare operator (==, !=, >, < >=, <=)",
ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE : "A comparison of any pointer to zero must be done via the NULL type",
ERROR_HEADER_CHECK_ALL : "",
ERROR_HEADER_CHECK_FILE : "File header doesn't exist",
ERROR_HEADER_CHECK_FUNCTION : "Function header doesn't exist",
ERROR_C_FUNCTION_LAYOUT_CHECK_ALL : "",
ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE : "Return type of a function should exist and in the first line",
ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER : "Any optional functional modifiers should exist and next to the return type",
ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME : """Function name should be left justified, followed by the beginning of the parameter list, with the closing parenthesis on its own line, indented two spaces""",
ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE : "Function prototypes in include files have the same form as function definitions",
ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2 : "Function prototypes in include files have different parameter number with function definitions",
ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3 : "Function prototypes in include files have different parameter modifier with function definitions",
ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY : "The body of a function should be contained by open and close braces that must be in the first column",
ERROR_C_FUNCTION_LAYOUT_CHECK_DATA_DECLARATION : "The data declarations should be the first code in a module",
ERROR_C_FUNCTION_LAYOUT_CHECK_NO_INIT_OF_VARIABLE : "There should be no initialization of a variable as part of its declaration",
ERROR_C_FUNCTION_LAYOUT_CHECK_NO_STATIC : "There should be no use of STATIC for functions",
ERROR_INCLUDE_FILE_CHECK_ALL : "",
ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_1 : "All include file contents should be guarded by a #ifndef statement.",
ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_2 : "The #ifndef must be the first line of code following the file header comment",
ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_3 : "The #endif must appear on the last line in the file",
ERROR_INCLUDE_FILE_CHECK_DATA : "Include files should contain only public or only private data and cannot contain code or define data variables",
ERROR_INCLUDE_FILE_CHECK_NAME : "No permission for the include file with same names",
ERROR_DECLARATION_DATA_TYPE_CHECK_ALL : "",
ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE : "There should be no use of int, unsigned, char, void, long in any .c, .h or .asl files",
ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER : """The modifiers IN, OUT, OPTIONAL, and UNALIGNED should be used only to qualify arguments to a function and should not appear in a data type declaration""",
ERROR_DECLARATION_DATA_TYPE_CHECK_EFI_API_MODIFIER : "The EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols",
ERROR_DECLARATION_DATA_TYPE_CHECK_ENUMERATED_TYPE : "Enumerated Type should have a 'typedef' and the name must be in capital letters",
ERROR_DECLARATION_DATA_TYPE_CHECK_STRUCTURE_DECLARATION : "Structure Type should have a 'typedef' and the name must be in capital letters",
ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE : "No permission for the structure with same names",
ERROR_DECLARATION_DATA_TYPE_CHECK_UNION_TYPE : "Union Type should have a 'typedef' and the name must be in capital letters",
ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE : "Complex types should be typedef-ed",
ERROR_NAMING_CONVENTION_CHECK_ALL : "",
ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT : "Only capital letters are allowed to be used for #define declarations",
ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT : "Only capital letters are allowed to be used for typedef declarations",
ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT : "The #ifndef at the start of an include file should have one postfix underscore, and no prefix underscore character '_'",
ERROR_NAMING_CONVENTION_CHECK_PATH_NAME : """Path name does not follow the rules: 1. First character should be upper case 2. Must contain lower case characters 3. No white space characters""",
ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME : """Variable name does not follow the rules: 1. First character should be upper case 2. Must contain lower case characters 3. No white space characters 4. Global variable name must start with a 'g'""",
ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME : """Function name does not follow the rules: 1. First character should be upper case 2. Must contain lower case characters 3. No white space characters""",
ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE : "There should be no use of short (single character) variable names",
ERROR_DOXYGEN_CHECK_ALL : "",
ERROR_DOXYGEN_CHECK_FILE_HEADER : "The file headers should follow Doxygen special documentation blocks in section 2.3.5",
ERROR_DOXYGEN_CHECK_FUNCTION_HEADER : "The function headers should follow Doxygen special documentation blocks in section 2.3.5",
ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION : """The first line of text in a comment block should be a brief description of the element being documented and the brief description must end with a period.""",
ERROR_DOXYGEN_CHECK_COMMENT_FORMAT : "For comment line with '///< ... text ...' format, if it is used, it should be after the code section",
ERROR_DOXYGEN_CHECK_COMMAND : "Only Doxygen commands '@bug', '@todo', '@example', '@file', '@attention', '@param', '@post', '@pre', '@retval', '@return', '@sa', '@since', '@test', '@note', '@par', '@endcode', '@code', '@{', '@}' are allowed to mark the code",
ERROR_META_DATA_FILE_CHECK_ALL : "",
ERROR_META_DATA_FILE_CHECK_PATH_NAME : "The file defined in meta-data does not exist",
ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1 : "A library instances defined for a given module (or dependent library instance) doesn't match the module's type.",
ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_2 : "A library instance must specify the Supported Module Types in its INF file",
ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT : "A library instance must be defined for all dependent library classes",
ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_ORDER : "The library Instances specified by the LibraryClasses sections should be listed in order of dependencies",
ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE : "There should be no unnecessary inclusion of library classes in the INF file",
ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE : "Duplicate Library Class Name found",
ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF : "An INF file is specified in the FDF file, but not in the DSC file, therefore the INF file must be for a Binary module only",
ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE : "Duplicate PCDs found",
ERROR_META_DATA_FILE_CHECK_PCD_FLASH : "PCD settings in the FDF file should only be related to flash",
ERROR_META_DATA_FILE_CHECK_PCD_NO_USE : "There should be no PCDs declared in INF files that are not specified in either a DSC or FDF file",
ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID : "Duplicate GUID found",
ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL : "Duplicate PROTOCOL found",
ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI : "Duplicate PPI found",
ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE : "No used module files found",
ERROR_META_DATA_FILE_CHECK_PCD_TYPE : "Wrong C code function used for this kind of PCD",
ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION : "Module file has FILE_GUID collision with other module file",
ERROR_META_DATA_FILE_CHECK_FORMAT_GUID : "Wrong GUID Format used in Module file",
ERROR_META_DATA_FILE_CHECK_FORMAT_PROTOCOL : "Wrong Protocol Format used in Module file",
ERROR_META_DATA_FILE_CHECK_FORMAT_PPI : "Wrong Ppi Format used in Module file",
ERROR_META_DATA_FILE_CHECK_FORMAT_PCD : "Wrong Pcd Format used in Module file",
ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED : "Not defined LibraryClass used in the Module file.",
ERROR_SPELLING_CHECK_ALL : "",
ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE : "SMM communication function may use wrong parameter type",
}
| edk2-master | BaseTools/Source/Python/Ecc/EccToolError.py |
Subsets and Splits