python_code
stringlengths 0
679k
| repo_name
stringlengths 9
41
| file_path
stringlengths 6
149
|
---|---|---|
## @file
# This file is used to define checkpoints used by ECC tool
#
# Copyright (c) 2021, Arm Limited. All rights reserved.<BR>
# Copyright (c) 2008 - 2020, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
import re
from CommonDataClass.DataClass import *
import Common.DataType as DT
from Ecc.EccToolError import *
from Ecc.MetaDataParser import ParseHeaderCommentSection
from Ecc import EccGlobalData
from Ecc import c
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.MultipleWorkspace import MultipleWorkspace as mws
## Check
#
# This class is to define checkpoints used by ECC tool
#
# @param object: Inherited from object class
#
class Check(object):
def __init__(self):
pass
# Check all required checkpoints
def Check(self):
self.GeneralCheck()
self.MetaDataFileCheck()
self.DoxygenCheck()
self.IncludeFileCheck()
self.PredicateExpressionCheck()
self.DeclAndDataTypeCheck()
self.FunctionLayoutCheck()
self.NamingConventionCheck()
self.SmmCommParaCheck()
def SmmCommParaCheck(self):
self.SmmCommParaCheckBufferType()
# Check if SMM communication function has correct parameter type
# 1. Get function calling with instance./->Communicate() interface
# and make sure the protocol instance is of type EFI_SMM_COMMUNICATION_PROTOCOL.
# 2. Find the origin of the 2nd parameter of Communicate() interface, if -
# a. it is a local buffer on stack
# report error.
# b. it is a global buffer, check the driver that holds the global buffer is of type DXE_RUNTIME_DRIVER
# report success.
# c. it is a buffer by AllocatePage/AllocatePool (may be wrapped by nested function calls),
# check the EFI_MEMORY_TYPE to be EfiRuntimeServicesCode,EfiRuntimeServicesData,
# EfiACPIMemoryNVS or EfiReservedMemoryType
# report success.
# d. it is a buffer located via EFI_SYSTEM_TABLE.ConfigurationTable (may be wrapped by nested function calls)
# report warning to indicate human code review.
# e. it is a buffer from other kind of pointers (may need to trace into nested function calls to locate),
# repeat checks in a.b.c and d.
def SmmCommParaCheckBufferType(self):
if EccGlobalData.gConfig.SmmCommParaCheckBufferType == '1' or EccGlobalData.gConfig.SmmCommParaCheckAll == '1':
EdkLogger.quiet("Checking SMM communication parameter type ...")
# Get all EFI_SMM_COMMUNICATION_PROTOCOL interface
CommApiList = []
for IdentifierTable in EccGlobalData.gIdentifierTableList:
SqlCommand = """select ID, Name, BelongsToFile from %s
where Modifier = 'EFI_SMM_COMMUNICATION_PROTOCOL*' """ % (IdentifierTable)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
if RecordSet:
for Record in RecordSet:
if Record[1] not in CommApiList:
CommApiList.append(Record[1])
# For each interface, check the second parameter
for CommApi in CommApiList:
for IdentifierTable in EccGlobalData.gIdentifierTableList:
SqlCommand = """select ID, Name, Value, BelongsToFile, StartLine from %s
where Name = '%s->Communicate' and Model = %s""" \
% (IdentifierTable, CommApi, MODEL_IDENTIFIER_FUNCTION_CALLING)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
if RecordSet:
# print IdentifierTable
for Record in RecordSet:
# Get the second parameter for Communicate function
SecondPara = Record[2].split(',')[1].strip()
SecondParaIndex = None
if SecondPara.startswith('&'):
SecondPara = SecondPara[1:]
if SecondPara.endswith(']'):
SecondParaIndex = SecondPara[SecondPara.find('[') + 1:-1]
SecondPara = SecondPara[:SecondPara.find('[')]
# Get the ID
Id = Record[0]
# Get the BelongsToFile
BelongsToFile = Record[3]
# Get the source file path
SqlCommand = """select FullPath from File where ID = %s""" % BelongsToFile
NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
FullPath = NewRecordSet[0][0]
# Get the line no of function calling
StartLine = Record[4]
# Get the module type
SqlCommand = """select Value3 from INF where BelongsToFile = (select ID from File
where Path = (select Path from File where ID = %s) and Model = 1011)
and Value2 = 'MODULE_TYPE'""" % BelongsToFile
NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
ModuleType = NewRecordSet[0][0] if NewRecordSet else None
# print BelongsToFile, FullPath, StartLine, ModuleType, SecondPara
Value = FindPara(FullPath, SecondPara, StartLine)
# Find the value of the parameter
if Value:
if 'AllocatePage' in Value \
or 'AllocatePool' in Value \
or 'AllocateRuntimePool' in Value \
or 'AllocateZeroPool' in Value:
pass
else:
if '->' in Value:
if not EccGlobalData.gException.IsException(
ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE, Value):
EccGlobalData.gDb.TblReport.Insert(ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE,
OtherMsg="Please review the buffer type"
+ "is correct or not. If it is correct" +
" please add [%s] to exception list"
% Value,
BelongsToTable=IdentifierTable,
BelongsToItem=Id)
else:
if not EccGlobalData.gException.IsException(
ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE, Value):
EccGlobalData.gDb.TblReport.Insert(ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE,
OtherMsg="Please review the buffer type"
+ "is correct or not. If it is correct" +
" please add [%s] to exception list"
% Value,
BelongsToTable=IdentifierTable,
BelongsToItem=Id)
# Not find the value of the parameter
else:
SqlCommand = """select ID, Modifier, Name, Value, Model, BelongsToFunction from %s
where Name = '%s' and StartLine < %s order by StartLine DESC""" \
% (IdentifierTable, SecondPara, StartLine)
NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
if NewRecordSet:
Value = NewRecordSet[0][1]
if 'AllocatePage' in Value \
or 'AllocatePool' in Value \
or 'AllocateRuntimePool' in Value \
or 'AllocateZeroPool' in Value:
pass
else:
if not EccGlobalData.gException.IsException(
ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE, Value):
EccGlobalData.gDb.TblReport.Insert(ERROR_SMM_COMM_PARA_CHECK_BUFFER_TYPE,
OtherMsg="Please review the buffer type"
+ "is correct or not. If it is correct" +
" please add [%s] to exception list"
% Value,
BelongsToTable=IdentifierTable,
BelongsToItem=Id)
else:
pass
# Check UNI files
def UniCheck(self):
if EccGlobalData.gConfig.GeneralCheckUni == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking whether UNI file is UTF-16 ...")
SqlCommand = """select ID, FullPath, ExtName from File where ExtName like 'uni'"""
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
File = Record[1]
FileIn = open(File, 'rb').read(2)
if FileIn != '\xff\xfe':
OtherMsg = "File %s is not a valid UTF-16 UNI file" % Record[1]
EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_UNI, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
# General Checking
def GeneralCheck(self):
self.GeneralCheckNonAcsii()
self.UniCheck()
self.GeneralCheckNoTab()
self.GeneralCheckLineEnding()
self.GeneralCheckTrailingWhiteSpaceLine()
# Check whether NO Tab is used, replaced with spaces
def GeneralCheckNoTab(self):
if EccGlobalData.gConfig.GeneralCheckNoTab == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking No TAB used in file ...")
SqlCommand = """select ID, FullPath, ExtName from File where ExtName in ('.dec', '.inf', '.dsc', 'c', 'h')"""
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
op = open(Record[1]).readlines()
IndexOfLine = 0
for Line in op:
IndexOfLine += 1
IndexOfChar = 0
for Char in Line:
IndexOfChar += 1
if Char == '\t':
OtherMsg = "File %s has TAB char at line %s column %s" % (Record[1], IndexOfLine, IndexOfChar)
EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_NO_TAB, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
# Check Only use CRLF (Carriage Return Line Feed) line endings.
def GeneralCheckLineEnding(self):
if EccGlobalData.gConfig.GeneralCheckLineEnding == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking line ending in file ...")
SqlCommand = """select ID, FullPath, ExtName from File where ExtName in ('.dec', '.inf', '.dsc', 'c', 'h')"""
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
op = open(Record[1], 'rb').readlines()
IndexOfLine = 0
for Line in op:
IndexOfLine += 1
if not bytes.decode(Line).endswith('\r\n'):
OtherMsg = "File %s has invalid line ending at line %s" % (Record[1], IndexOfLine)
EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_INVALID_LINE_ENDING, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
# Check if there is no trailing white space in one line.
def GeneralCheckTrailingWhiteSpaceLine(self):
if EccGlobalData.gConfig.GeneralCheckTrailingWhiteSpaceLine == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking trailing white space line in file ...")
SqlCommand = """select ID, FullPath, ExtName from File where ExtName in ('.dec', '.inf', '.dsc', 'c', 'h')"""
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
op = open(Record[1], 'r').readlines()
IndexOfLine = 0
for Line in op:
IndexOfLine += 1
if Line.replace('\r', '').replace('\n', '').endswith(' '):
OtherMsg = "File %s has trailing white spaces at line %s" % (Record[1], IndexOfLine)
EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_TRAILING_WHITE_SPACE_LINE, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
# Check whether file has non ACSII char
def GeneralCheckNonAcsii(self):
if EccGlobalData.gConfig.GeneralCheckNonAcsii == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Non-ACSII char in file ...")
SqlCommand = """select ID, FullPath, ExtName from File where ExtName in ('.dec', '.inf', '.dsc', 'c', 'h')"""
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
op = open(Record[1]).readlines()
IndexOfLine = 0
for Line in op:
IndexOfLine += 1
IndexOfChar = 0
for Char in Line:
IndexOfChar += 1
if ord(Char) > 126:
OtherMsg = "File %s has Non-ASCII char at line %s column %s" % (Record[1], IndexOfLine, IndexOfChar)
EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_NON_ACSII, OtherMsg=OtherMsg, BelongsToTable='File', BelongsToItem=Record[0])
# C Function Layout Checking
def FunctionLayoutCheck(self):
self.FunctionLayoutCheckReturnType()
self.FunctionLayoutCheckModifier()
self.FunctionLayoutCheckName()
self.FunctionLayoutCheckPrototype()
self.FunctionLayoutCheckBody()
self.FunctionLayoutCheckLocalVariable()
self.FunctionLayoutCheckDeprecated()
# To check if the deprecated functions are used
def FunctionLayoutCheckDeprecated(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckNoDeprecated == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function no deprecated one being used ...")
DeprecatedFunctionSet = ('UnicodeValueToString',
'AsciiValueToString',
'StrCpy',
'StrnCpy',
'StrCat',
'StrnCat',
'UnicodeStrToAsciiStr',
'AsciiStrCpy',
'AsciiStrnCpy',
'AsciiStrCat',
'AsciiStrnCat',
'AsciiStrToUnicodeStr',
'PcdSet8',
'PcdSet16',
'PcdSet32',
'PcdSet64',
'PcdSetPtr',
'PcdSetBool',
'PcdSetEx8',
'PcdSetEx16',
'PcdSetEx32',
'PcdSetEx64',
'PcdSetExPtr',
'PcdSetExBool',
'LibPcdSet8',
'LibPcdSet16',
'LibPcdSet32',
'LibPcdSet64',
'LibPcdSetPtr',
'LibPcdSetBool',
'LibPcdSetEx8',
'LibPcdSetEx16',
'LibPcdSetEx32',
'LibPcdSetEx64',
'LibPcdSetExPtr',
'LibPcdSetExBool',
'GetVariable',
'GetEfiGlobalVariable',
)
for IdentifierTable in EccGlobalData.gIdentifierTableList:
SqlCommand = """select ID, Name, BelongsToFile from %s
where Model = %s """ % (IdentifierTable, MODEL_IDENTIFIER_FUNCTION_CALLING)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
for Key in DeprecatedFunctionSet:
if Key == Record[1]:
if not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_NO_DEPRECATE, Key):
OtherMsg = 'The function [%s] is deprecated which should NOT be used' % Key
EccGlobalData.gDb.TblReport.Insert(ERROR_C_FUNCTION_LAYOUT_CHECK_NO_DEPRECATE,
OtherMsg=OtherMsg,
BelongsToTable=IdentifierTable,
BelongsToItem=Record[0])
def WalkTree(self):
IgnoredPattern = c.GetIgnoredDirListPattern()
for Dirpath, Dirnames, Filenames in os.walk(EccGlobalData.gTarget):
for Dir in Dirnames:
Dirname = os.path.join(Dirpath, Dir)
if os.path.islink(Dirname):
Dirname = os.path.realpath(Dirname)
if os.path.isdir(Dirname):
# symlinks to directories are treated as directories
Dirnames.remove(Dir)
Dirnames.append(Dirname)
if IgnoredPattern.match(Dirpath.upper()):
continue
for f in Filenames[:]:
if f.lower() in EccGlobalData.gConfig.SkipFileList:
Filenames.remove(f)
yield (Dirpath, Dirnames, Filenames)
# Check whether return type exists and in the first line
def FunctionLayoutCheckReturnType(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckReturnType == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout return type ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c', '.h'):
# FullName = os.path.join(Dirpath, F)
# c.CheckFuncLayoutReturnType(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
c.CheckFuncLayoutReturnType(FullName)
# Check whether any optional functional modifiers exist and next to the return type
def FunctionLayoutCheckModifier(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckOptionalFunctionalModifier == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout modifier ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c', '.h'):
# FullName = os.path.join(Dirpath, F)
# c.CheckFuncLayoutModifier(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
c.CheckFuncLayoutModifier(FullName)
# Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
# Check whether the closing parenthesis is on its own line and also indented two spaces
def FunctionLayoutCheckName(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionName == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout function name ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c', '.h'):
# FullName = os.path.join(Dirpath, F)
# c.CheckFuncLayoutName(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
c.CheckFuncLayoutName(FullName)
# Check whether the function prototypes in include files have the same form as function definitions
def FunctionLayoutCheckPrototype(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionPrototype == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout function prototype ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[PROTOTYPE]" + FullName)
# c.CheckFuncLayoutPrototype(FullName)
for FullName in EccGlobalData.gCFileList:
EdkLogger.quiet("[PROTOTYPE]" + FullName)
c.CheckFuncLayoutPrototype(FullName)
# Check whether the body of a function is contained by open and close braces that must be in the first column
def FunctionLayoutCheckBody(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionBody == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout function body ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# c.CheckFuncLayoutBody(FullName)
for FullName in EccGlobalData.gCFileList:
c.CheckFuncLayoutBody(FullName)
# Check whether the data declarations is the first code in a module.
# self.CFunctionLayoutCheckDataDeclaration = 1
# Check whether no initialization of a variable as part of its declaration
def FunctionLayoutCheckLocalVariable(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckNoInitOfVariable == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout local variables ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# c.CheckFuncLayoutLocalVariable(FullName)
for FullName in EccGlobalData.gCFileList:
c.CheckFuncLayoutLocalVariable(FullName)
# Check whether no use of STATIC for functions
# self.CFunctionLayoutCheckNoStatic = 1
# Declarations and Data Types Checking
def DeclAndDataTypeCheck(self):
self.DeclCheckNoUseCType()
self.DeclCheckInOutModifier()
self.DeclCheckEFIAPIModifier()
self.DeclCheckEnumeratedType()
self.DeclCheckStructureDeclaration()
self.DeclCheckSameStructure()
self.DeclCheckUnionType()
# Check whether no use of int, unsigned, char, void, long in any .c, .h or .asl files.
def DeclCheckNoUseCType(self):
if EccGlobalData.gConfig.DeclarationDataTypeCheckNoUseCType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration No use C type ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# c.CheckDeclNoUseCType(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
c.CheckDeclNoUseCType(FullName)
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
def DeclCheckInOutModifier(self):
if EccGlobalData.gConfig.DeclarationDataTypeCheckInOutModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration argument modifier ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# c.CheckDeclArgModifier(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
c.CheckDeclArgModifier(FullName)
# Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
def DeclCheckEFIAPIModifier(self):
if EccGlobalData.gConfig.DeclarationDataTypeCheckEFIAPIModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
pass
# Check whether Enumerated Type has a 'typedef' and the name is capital
def DeclCheckEnumeratedType(self):
if EccGlobalData.gConfig.DeclarationDataTypeCheckEnumeratedType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration enum typedef ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[ENUM]" + FullName)
# c.CheckDeclEnumTypedef(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
EdkLogger.quiet("[ENUM]" + FullName)
c.CheckDeclEnumTypedef(FullName)
# Check whether Structure Type has a 'typedef' and the name is capital
def DeclCheckStructureDeclaration(self):
if EccGlobalData.gConfig.DeclarationDataTypeCheckStructureDeclaration == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration struct typedef ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[STRUCT]" + FullName)
# c.CheckDeclStructTypedef(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
EdkLogger.quiet("[STRUCT]" + FullName)
c.CheckDeclStructTypedef(FullName)
# Check whether having same Structure
def DeclCheckSameStructure(self):
if EccGlobalData.gConfig.DeclarationDataTypeCheckSameStructure == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking same struct ...")
AllStructure = {}
for IdentifierTable in EccGlobalData.gIdentifierTableList:
SqlCommand = """select ID, Name, BelongsToFile from %s where Model = %s""" % (IdentifierTable, MODEL_IDENTIFIER_STRUCTURE)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
if Record[1] != '':
if Record[1] not in AllStructure.keys():
AllStructure[Record[1]] = Record[2]
else:
ID = AllStructure[Record[1]]
SqlCommand = """select FullPath from File where ID = %s """ % ID
NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
OtherMsg = "The structure name '%s' is duplicate" % Record[1]
if NewRecordSet != []:
OtherMsg = "The structure name [%s] is duplicate with the one defined in %s, maybe struct NOT typedefed or the typedef new type NOT used to qualify variables" % (Record[1], NewRecordSet[0][0])
if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE, OtherMsg=OtherMsg, BelongsToTable=IdentifierTable, BelongsToItem=Record[0])
# Check whether Union Type has a 'typedef' and the name is capital
def DeclCheckUnionType(self):
if EccGlobalData.gConfig.DeclarationDataTypeCheckUnionType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration union typedef ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[UNION]" + FullName)
# c.CheckDeclUnionTypedef(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
EdkLogger.quiet("[UNION]" + FullName)
c.CheckDeclUnionTypedef(FullName)
# Predicate Expression Checking
def PredicateExpressionCheck(self):
self.PredicateExpressionCheckBooleanValue()
self.PredicateExpressionCheckNonBooleanOperator()
self.PredicateExpressionCheckComparisonNullType()
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
def PredicateExpressionCheckBooleanValue(self):
if EccGlobalData.gConfig.PredicateExpressionCheckBooleanValue == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking predicate expression Boolean value ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[BOOLEAN]" + FullName)
# c.CheckBooleanValueComparison(FullName)
for FullName in EccGlobalData.gCFileList:
EdkLogger.quiet("[BOOLEAN]" + FullName)
c.CheckBooleanValueComparison(FullName)
# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
def PredicateExpressionCheckNonBooleanOperator(self):
if EccGlobalData.gConfig.PredicateExpressionCheckNonBooleanOperator == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking predicate expression Non-Boolean variable...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
# c.CheckNonBooleanValueComparison(FullName)
for FullName in EccGlobalData.gCFileList:
EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
c.CheckNonBooleanValueComparison(FullName)
# Check whether a comparison of any pointer to zero must be done via the NULL type
def PredicateExpressionCheckComparisonNullType(self):
if EccGlobalData.gConfig.PredicateExpressionCheckComparisonNullType == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking predicate expression NULL pointer ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.c'):
# FullName = os.path.join(Dirpath, F)
# EdkLogger.quiet("[POINTER]" + FullName)
# c.CheckPointerNullComparison(FullName)
for FullName in EccGlobalData.gCFileList:
EdkLogger.quiet("[POINTER]" + FullName)
c.CheckPointerNullComparison(FullName)
# Include file checking
def IncludeFileCheck(self):
self.IncludeFileCheckIfndef()
self.IncludeFileCheckData()
self.IncludeFileCheckSameName()
# Check whether having include files with same name
def IncludeFileCheckSameName(self):
if EccGlobalData.gConfig.IncludeFileCheckSameName == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking same header file name ...")
SqlCommand = """select ID, FullPath from File
where Model = 1002 order by Name """
RecordDict = {}
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
List = Record[1].replace('/', '\\').split('\\')
if len(List) >= 2:
Key = List[-2] + '\\' + List[-1]
else:
Key = List[0]
if Key not in RecordDict:
RecordDict[Key] = [Record]
else:
RecordDict[Key].append(Record)
for Key in RecordDict:
if len(RecordDict[Key]) > 1:
for Item in RecordDict[Key]:
Path = mws.relpath(Item[1], EccGlobalData.gWorkspace)
if not EccGlobalData.gException.IsException(ERROR_INCLUDE_FILE_CHECK_NAME, Path):
EccGlobalData.gDb.TblReport.Insert(ERROR_INCLUDE_FILE_CHECK_NAME, OtherMsg="The file name for [%s] is duplicate" % Path, BelongsToTable='File', BelongsToItem=Item[0])
# Check whether all include file contents is guarded by a #ifndef statement.
def IncludeFileCheckIfndef(self):
if EccGlobalData.gConfig.IncludeFileCheckIfndefStatement == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking header file ifndef ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h'):
# FullName = os.path.join(Dirpath, F)
# MsgList = c.CheckHeaderFileIfndef(FullName)
for FullName in EccGlobalData.gHFileList:
MsgList = c.CheckHeaderFileIfndef(FullName)
# Check whether include files NOT contain code or define data variables
def IncludeFileCheckData(self):
if EccGlobalData.gConfig.IncludeFileCheckData == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking header file data ...")
# Get all typedef functions
gAllTypedefFun = []
for IdentifierTable in EccGlobalData.gIdentifierTableList:
SqlCommand = """select Name from %s
where Model = %s """ % (IdentifierTable, MODEL_IDENTIFIER_TYPEDEF)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
if Record[0].startswith('('):
gAllTypedefFun.append(Record[0])
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h'):
# FullName = os.path.join(Dirpath, F)
# MsgList = c.CheckHeaderFileData(FullName)
for FullName in EccGlobalData.gHFileList:
MsgList = c.CheckHeaderFileData(FullName, gAllTypedefFun)
# Doxygen document checking
def DoxygenCheck(self):
self.DoxygenCheckFileHeader()
self.DoxygenCheckFunctionHeader()
self.DoxygenCheckCommentDescription()
self.DoxygenCheckCommentFormat()
self.DoxygenCheckCommand()
# Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
def DoxygenCheckFileHeader(self):
if EccGlobalData.gConfig.DoxygenCheckFileHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Doxygen file header ...")
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
Ext = os.path.splitext(F)[1]
if Ext in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
MsgList = c.CheckFileHeaderDoxygenComments(FullName)
elif Ext in ('.inf', '.dec', '.dsc', '.fdf'):
FullName = os.path.join(Dirpath, F)
op = open(FullName).readlines()
FileLinesList = op
LineNo = 0
CurrentSection = MODEL_UNKNOWN
HeaderSectionLines = []
HeaderCommentStart = False
HeaderCommentEnd = False
for Line in FileLinesList:
LineNo = LineNo + 1
Line = Line.strip()
if (LineNo < len(FileLinesList) - 1):
NextLine = FileLinesList[LineNo].strip()
#
# blank line
#
if (Line == '' or not Line) and LineNo == len(FileLinesList):
LastSectionFalg = True
#
# check whether file header comment section started
#
if Line.startswith('#') and \
(Line.find('@file') > -1) and \
not HeaderCommentStart:
if CurrentSection != MODEL_UNKNOWN:
SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file"" or ""# @file""at the very top file'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
else:
CurrentSection = MODEL_IDENTIFIER_FILE_HEADER
#
# Append the first line to section lines.
#
HeaderSectionLines.append((Line, LineNo))
HeaderCommentStart = True
continue
#
# Collect Header content.
#
if (Line.startswith('#') and CurrentSection == MODEL_IDENTIFIER_FILE_HEADER) and\
HeaderCommentStart and not Line.startswith('##') and not\
HeaderCommentEnd and NextLine != '':
HeaderSectionLines.append((Line, LineNo))
continue
#
# Header content end
#
if (Line.startswith('##') or not Line.strip().startswith("#")) and HeaderCommentStart \
and not HeaderCommentEnd:
if Line.startswith('##'):
HeaderCommentEnd = True
HeaderSectionLines.append((Line, LineNo))
ParseHeaderCommentSection(HeaderSectionLines, FullName)
break
if HeaderCommentStart == False:
SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file"" or ""# @file"" at the very top file'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
if HeaderCommentEnd == False:
SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Msg = 'INF/DEC/DSC/FDF file header comment should end with ""##"" at the end of file header comment block'
# Check whether File header Comment End with '##'
if EccGlobalData.gConfig.HeaderCheckFileCommentEnd == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
def DoxygenCheckFunctionHeader(self):
if EccGlobalData.gConfig.DoxygenCheckFunctionHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Doxygen function header ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
# Check whether the first line of text in a comment block is a brief description of the element being documented.
# The brief description must end with a period.
def DoxygenCheckCommentDescription(self):
if EccGlobalData.gConfig.DoxygenCheckCommentDescription == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
pass
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
def DoxygenCheckCommentFormat(self):
if EccGlobalData.gConfig.DoxygenCheckCommentFormat == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Doxygen comment ///< ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
def DoxygenCheckCommand(self):
if EccGlobalData.gConfig.DoxygenCheckCommand == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Doxygen command ...")
# for Dirpath, Dirnames, Filenames in self.WalkTree():
# for F in Filenames:
# if os.path.splitext(F)[1] in ('.h', '.c'):
# FullName = os.path.join(Dirpath, F)
# MsgList = c.CheckDoxygenCommand(FullName)
for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
MsgList = c.CheckDoxygenCommand(FullName)
# Meta-Data File Processing Checking
def MetaDataFileCheck(self):
self.MetaDataFileCheckPathName()
self.MetaDataFileCheckGenerateFileList()
self.MetaDataFileCheckLibraryInstance()
self.MetaDataFileCheckLibraryInstanceDependent()
self.MetaDataFileCheckLibraryInstanceOrder()
self.MetaDataFileCheckLibraryNoUse()
self.MetaDataFileCheckLibraryDefinedInDec()
self.MetaDataFileCheckBinaryInfInFdf()
self.MetaDataFileCheckPcdDuplicate()
self.MetaDataFileCheckPcdFlash()
self.MetaDataFileCheckPcdNoUse()
self.MetaDataFileCheckGuidDuplicate()
self.MetaDataFileCheckModuleFileNoUse()
self.MetaDataFileCheckPcdType()
self.MetaDataFileCheckModuleFileGuidDuplication()
self.MetaDataFileCheckModuleFileGuidFormat()
self.MetaDataFileCheckModuleFileProtocolFormat()
self.MetaDataFileCheckModuleFilePpiFormat()
self.MetaDataFileCheckModuleFilePcdFormat()
# Check whether each file defined in meta-data exists
def MetaDataFileCheckPathName(self):
if EccGlobalData.gConfig.MetaDataFileCheckPathName == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
# This item is covered when parsing Inf/Dec/Dsc files
pass
# Generate a list for all files defined in meta-data files
def MetaDataFileCheckGenerateFileList(self):
if EccGlobalData.gConfig.MetaDataFileCheckGenerateFileList == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
# This item is covered when parsing Inf/Dec/Dsc files
pass
# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
# Each Library Instance must specify the Supported Module Types in its Inf file,
# and any module specifying the library instance must be one of the supported types.
def MetaDataFileCheckLibraryInstance(self):
if EccGlobalData.gConfig.MetaDataFileCheckLibraryInstance == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for library instance type issue ...")
SqlCommand = """select A.ID, A.Value3, B.Value3 from Inf as A left join Inf as B
where A.Value2 = 'LIBRARY_CLASS' and A.Model = %s
and B.Value2 = 'MODULE_TYPE' and B.Model = %s and A.BelongsToFile = B.BelongsToFile
group by A.BelongsToFile""" % (MODEL_META_DATA_HEADER, MODEL_META_DATA_HEADER)
RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
LibraryClasses = {}
for Record in RecordSet:
List = Record[1].split('|', 1)
SupModType = []
if len(List) == 1:
SupModType = DT.SUP_MODULE_LIST_STRING.split(DT.TAB_VALUE_SPLIT)
elif len(List) == 2:
SupModType = List[1].split()
if List[0] not in LibraryClasses:
LibraryClasses[List[0]] = SupModType
else:
for Item in SupModType:
if Item not in LibraryClasses[List[0]]:
LibraryClasses[List[0]].append(Item)
if Record[2] != DT.SUP_MODULE_BASE and Record[2] not in SupModType:
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_2, OtherMsg="The Library Class '%s' does not specify its supported module types" % (List[0]), BelongsToTable='Inf', BelongsToItem=Record[0])
SqlCommand = """select A.ID, A.Value1, B.Value3 from Inf as A left join Inf as B
where A.Model = %s and B.Value2 = '%s' and B.Model = %s
and B.BelongsToFile = A.BelongsToFile""" \
% (MODEL_EFI_LIBRARY_CLASS, 'MODULE_TYPE', MODEL_META_DATA_HEADER)
RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
# Merge all LibraryClasses' supmodlist
RecordDict = {}
for Record in RecordSet:
if Record[1] not in RecordDict:
RecordDict[Record[1]] = [str(Record[2])]
else:
if Record[2] not in RecordDict[Record[1]]:
RecordDict[Record[1]].append(Record[2])
for Record in RecordSet:
if Record[1] in LibraryClasses:
if Record[2] not in LibraryClasses[Record[1]] and DT.SUP_MODULE_BASE not in RecordDict[Record[1]]:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, OtherMsg="The type of Library Class [%s] defined in Inf file does not match the type of the module" % (Record[1]), BelongsToTable='Inf', BelongsToItem=Record[0])
else:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, OtherMsg="The type of Library Class [%s] defined in Inf file does not match the type of the module" % (Record[1]), BelongsToTable='Inf', BelongsToItem=Record[0])
# Check whether a Library Instance has been defined for all dependent library classes
def MetaDataFileCheckLibraryInstanceDependent(self):
if EccGlobalData.gConfig.MetaDataFileCheckLibraryInstanceDependent == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for library instance dependent issue ...")
SqlCommand = """select ID, Value1, Value2 from Dsc where Model = %s""" % MODEL_EFI_LIBRARY_CLASS
LibraryClasses = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
for LibraryClass in LibraryClasses:
if LibraryClass[1].upper() == 'NULL' or LibraryClass[1].startswith('!ifdef') or LibraryClass[1].startswith('!ifndef') or LibraryClass[1].endswith('!endif'):
continue
else:
LibraryIns = os.path.normpath(mws.join(EccGlobalData.gWorkspace, LibraryClass[2]))
SkipDirString = '|'.join(EccGlobalData.gConfig.SkipDirList)
p = re.compile(r'.*[\\/](?:%s^\S)[\\/]?.*' % SkipDirString)
if p.match(os.path.split(LibraryIns)[0].upper()):
continue
SqlCommand = """select Value3 from Inf where BelongsToFile =
(select ID from File where lower(FullPath) = lower('%s'))
and Value2 = '%s'""" % (LibraryIns, DT.PLATFORM_COMPONENT_TYPE_LIBRARY_CLASS)
RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
IsFound = False
for Record in RecordSet:
LibName = Record[0].split('|', 1)[0]
if LibraryClass[1] == LibName:
IsFound = True
if not IsFound:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT, LibraryClass[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT, OtherMsg="The Library Class [%s] is not specified in '%s'" % (LibraryClass[1], LibraryClass[2]), BelongsToTable='Dsc', BelongsToItem=LibraryClass[0])
# Check whether the Library Instances specified by the LibraryClasses sections are listed in order of dependencies
def MetaDataFileCheckLibraryInstanceOrder(self):
if EccGlobalData.gConfig.MetaDataFileCheckLibraryInstanceOrder == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
# This checkpoint is not necessary for Ecc check
pass
# Check whether the unnecessary inclusion of library classes in the Inf file
# Check whether the unnecessary duplication of library classe names in the DSC file
def MetaDataFileCheckLibraryNoUse(self):
if EccGlobalData.gConfig.MetaDataFileCheckLibraryNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for library instance not used ...")
SqlCommand = """select ID, Value1 from Inf as A where A.Model = %s and A.Value1 not in (select B.Value1 from Dsc as B where Model = %s)""" % (MODEL_EFI_LIBRARY_CLASS, MODEL_EFI_LIBRARY_CLASS)
RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
for Record in RecordSet:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE, OtherMsg="The Library Class [%s] is not used in any platform" % (Record[1]), BelongsToTable='Inf', BelongsToItem=Record[0])
SqlCommand = """
select A.ID, A.Value1, A.BelongsToFile, A.StartLine, B.StartLine from Dsc as A left join Dsc as B
where A.Model = %s and B.Model = %s and A.Scope1 = B.Scope1 and A.Scope2 = B.Scope2 and A.ID != B.ID
and A.Value1 = B.Value1 and A.Value2 != B.Value2 and A.BelongsToItem = -1 and B.BelongsToItem = -1 and A.StartLine != B.StartLine and B.BelongsToFile = A.BelongsToFile""" \
% (MODEL_EFI_LIBRARY_CLASS, MODEL_EFI_LIBRARY_CLASS)
RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
for Record in RecordSet:
if Record[3] and Record[4] and Record[3] != Record[4] and Record[1] != 'NULL':
SqlCommand = """select FullPath from File where ID = %s""" % (Record[2])
FilePathList = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for FilePath in FilePathList:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NAME_DUPLICATE, OtherMsg="The Library Class [%s] is duplicated in '%s' line %s and line %s." % (Record[1], FilePath, Record[3], Record[4]), BelongsToTable='Dsc', BelongsToItem=Record[0])
# Check the header file in Include\Library directory whether be defined in the package DEC file.
def MetaDataFileCheckLibraryDefinedInDec(self):
if EccGlobalData.gConfig.MetaDataFileCheckLibraryDefinedInDec == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for library instance whether be defined in the package dec file ...")
SqlCommand = """
select A.Value1, A.StartLine, A.ID, B.Value1 from Inf as A left join Dec as B
on A.Model = B.Model and A.Value1 = B.Value1 where A.Model=%s
""" % MODEL_EFI_LIBRARY_CLASS
RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
for Record in RecordSet:
LibraryInInf, Line, ID, LibraryDec = Record
if not LibraryDec:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED, LibraryInInf):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NOT_DEFINED, \
OtherMsg="The Library Class [%s] in %s line is not defined in the associated package file." % (LibraryInInf, Line),
BelongsToTable='Inf', BelongsToItem=ID)
# Check whether an Inf file is specified in the FDF file, but not in the Dsc file, then the Inf file must be for a Binary module only
def MetaDataFileCheckBinaryInfInFdf(self):
if EccGlobalData.gConfig.MetaDataFileCheckBinaryInfInFdf == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for non-binary modules defined in FDF files ...")
SqlCommand = """select A.ID, A.Value1 from Fdf as A
where A.Model = %s
and A.Enabled > -1
and A.Value1 not in
(select B.Value1 from Dsc as B
where B.Model = %s
and B.Enabled > -1)""" % (MODEL_META_DATA_COMPONENT, MODEL_META_DATA_COMPONENT)
RecordSet = EccGlobalData.gDb.TblFdf.Exec(SqlCommand)
for Record in RecordSet:
FdfID = Record[0]
FilePath = Record[1]
FilePath = os.path.normpath(mws.join(EccGlobalData.gWorkspace, FilePath))
SqlCommand = """select ID from Inf where Model = %s and BelongsToFile = (select ID from File where FullPath like '%s')
""" % (MODEL_EFI_SOURCE_FILE, FilePath)
NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
if NewRecordSet != []:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF, FilePath):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF, OtherMsg="File [%s] defined in FDF file and not in DSC file must be a binary module" % (FilePath), BelongsToTable='Fdf', BelongsToItem=FdfID)
# Check whether a PCD is set in a Dsc file or the FDF file, but not in both.
def MetaDataFileCheckPcdDuplicate(self):
if EccGlobalData.gConfig.MetaDataFileCheckPcdDuplicate == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for duplicate PCDs defined in both DSC and FDF files ...")
SqlCommand = """
select A.ID, A.Value1, A.Value2, A.BelongsToFile, B.ID, B.Value1, B.Value2, B.BelongsToFile from Dsc as A, Fdf as B
where A.Model >= %s and A.Model < %s
and B.Model >= %s and B.Model < %s
and A.Value1 = B.Value1
and A.Value2 = B.Value2
and A.Enabled > -1
and B.Enabled > -1
group by A.ID
""" % (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)
RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
for Record in RecordSet:
SqlCommand1 = """select Name from File where ID = %s""" % Record[3]
SqlCommand2 = """select Name from File where ID = %s""" % Record[7]
DscFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand1)[0][0])[0]
FdfFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand2)[0][0])[0]
if DscFileName != FdfFileName:
continue
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1] + '.' + Record[2]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined in both FDF file and DSC file" % (Record[1] + '.' + Record[2]), BelongsToTable='Dsc', BelongsToItem=Record[0])
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[5] + '.' + Record[6]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined in both FDF file and DSC file" % (Record[5] + '.' + Record[6]), BelongsToTable='Fdf', BelongsToItem=Record[4])
EdkLogger.quiet("Checking for duplicate PCDs defined in DEC files ...")
SqlCommand = """
select A.ID, A.Value1, A.Value2, A.Model, B.Model from Dec as A left join Dec as B
where A.Model >= %s and A.Model < %s
and B.Model >= %s and B.Model < %s
and A.Value1 = B.Value1
and A.Value2 = B.Value2
and A.Scope1 = B.Scope1
and A.ID != B.ID
and A.Model = B.Model
and A.Enabled > -1
and B.Enabled > -1
and A.BelongsToFile = B.BelongsToFile
group by A.ID
""" % (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)
RecordSet = EccGlobalData.gDb.TblDec.Exec(SqlCommand)
for Record in RecordSet:
RecordCat = Record[1] + '.' + Record[2]
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, RecordCat):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg="The PCD [%s] is defined duplicated in DEC file" % RecordCat, BelongsToTable='Dec', BelongsToItem=Record[0])
# Check whether PCD settings in the FDF file can only be related to flash.
def MetaDataFileCheckPcdFlash(self):
if EccGlobalData.gConfig.MetaDataFileCheckPcdFlash == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking only Flash related PCDs are used in FDF ...")
SqlCommand = """
select ID, Value1, Value2, BelongsToFile from Fdf as A
where A.Model >= %s and Model < %s
and A.Enabled > -1
and A.Value2 not like '%%Flash%%'
""" % (MODEL_PCD, MODEL_META_DATA_HEADER)
RecordSet = EccGlobalData.gDb.TblFdf.Exec(SqlCommand)
for Record in RecordSet:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_FLASH, Record[1] + '.' + Record[2]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_FLASH, OtherMsg="The PCD [%s] defined in FDF file is not related to Flash" % (Record[1] + '.' + Record[2]), BelongsToTable='Fdf', BelongsToItem=Record[0])
# Check whether PCDs used in Inf files but not specified in Dsc or FDF files
def MetaDataFileCheckPcdNoUse(self):
if EccGlobalData.gConfig.MetaDataFileCheckPcdNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for non-specified PCDs ...")
SqlCommand = """
select ID, Value1, Value2, BelongsToFile from Inf as A
where A.Model >= %s and Model < %s
and A.Enabled > -1
and (A.Value1, A.Value2) not in
(select Value1, Value2 from Dsc as B
where B.Model >= %s and B.Model < %s
and B.Enabled > -1)
and (A.Value1, A.Value2) not in
(select Value1, Value2 from Fdf as C
where C.Model >= %s and C.Model < %s
and C.Enabled > -1)
""" % (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)
RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
for Record in RecordSet:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_NO_USE, Record[1] + '.' + Record[2]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_NO_USE, OtherMsg="The PCD [%s] defined in INF file is not specified in either DSC or FDF files" % (Record[1] + '.' + Record[2]), BelongsToTable='Inf', BelongsToItem=Record[0])
# Check whether having duplicate guids defined for Guid/Protocol/Ppi
def MetaDataFileCheckGuidDuplicate(self):
if EccGlobalData.gConfig.MetaDataFileCheckGuidDuplicate == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for duplicate GUID/PPI/PROTOCOL ...")
# Check Guid
self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID, MODEL_EFI_GUID, EccGlobalData.gDb.TblDec)
self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID, MODEL_EFI_GUID, EccGlobalData.gDb.TblDsc)
self.CheckGuidProtocolPpiValue(ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID, MODEL_EFI_GUID)
# Check protocol
self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL, MODEL_EFI_PROTOCOL, EccGlobalData.gDb.TblDec)
self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL, MODEL_EFI_PROTOCOL, EccGlobalData.gDb.TblDsc)
self.CheckGuidProtocolPpiValue(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL, MODEL_EFI_PROTOCOL)
# Check ppi
self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI, MODEL_EFI_PPI, EccGlobalData.gDb.TblDec)
self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI, MODEL_EFI_PPI, EccGlobalData.gDb.TblDsc)
self.CheckGuidProtocolPpiValue(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI, MODEL_EFI_PPI)
# Check whether all files under module directory are described in INF files
def MetaDataFileCheckModuleFileNoUse(self):
if EccGlobalData.gConfig.MetaDataFileCheckModuleFileNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for no used module files ...")
SqlCommand = """
select upper(Path) from File where ID in (select BelongsToFile from Inf where BelongsToFile != -1)
"""
InfPathSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
InfPathList = []
for Item in InfPathSet:
if Item[0] not in InfPathList:
InfPathList.append(Item[0])
SqlCommand = """
select ID, Path, FullPath from File where upper(FullPath) not in
(select upper(A.Path) || '%s' || upper(B.Value1) from File as A, INF as B
where A.ID in (select BelongsToFile from INF where Model = %s group by BelongsToFile) and
B.BelongsToFile = A.ID and B.Model = %s)
and (Model = %s or Model = %s)
""" % (os.sep, MODEL_EFI_SOURCE_FILE, MODEL_EFI_SOURCE_FILE, MODEL_FILE_C, MODEL_FILE_H)
RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
for Record in RecordSet:
Path = Record[1]
Path = Path.upper().replace('\X64', '').replace('\IA32', '').replace('\EBC', '').replace('\IPF', '').replace('\ARM', '')
if Path in InfPathList:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE, Record[2]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE, OtherMsg="The source file [%s] is existing in module directory but it is not described in INF file." % (Record[2]), BelongsToTable='File', BelongsToItem=Record[0])
# Check whether the PCD is correctly used in C function via its type
def MetaDataFileCheckPcdType(self):
if EccGlobalData.gConfig.MetaDataFileCheckPcdType == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for pcd type in c code function usage ...")
SqlCommand = """
select ID, Model, Value1, Value2, BelongsToFile from INF where Model > %s and Model < %s
""" % (MODEL_PCD, MODEL_META_DATA_HEADER)
PcdSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
for Pcd in PcdSet:
Model = Pcd[1]
PcdName = Pcd[2]
if Pcd[3]:
PcdName = Pcd[3]
BelongsToFile = Pcd[4]
SqlCommand = """
select ID from File where FullPath in
(select B.Path || '%s' || A.Value1 from INF as A, File as B where A.Model = %s and A.BelongsToFile = %s
and B.ID = %s and (B.Model = %s or B.Model = %s))
""" % (os.sep, MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile, MODEL_FILE_C, MODEL_FILE_H)
TableSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Tbl in TableSet:
TblName = 'Identifier' + str(Tbl[0])
SqlCommand = """
select Name, ID from %s where value like '%s' and Model = %s
""" % (TblName, PcdName, MODEL_IDENTIFIER_FUNCTION_CALLING)
RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
TblNumber = TblName.replace('Identifier', '')
for Record in RecordSet:
FunName = Record[0]
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, FunName):
if Model in [MODEL_PCD_FIXED_AT_BUILD] and not FunName.startswith('FixedPcdGet'):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, OtherMsg="The pcd '%s' is defined as a FixPcd but now it is called by c function [%s]" % (PcdName, FunName), BelongsToTable=TblName, BelongsToItem=Record[1])
if Model in [MODEL_PCD_FEATURE_FLAG] and (not FunName.startswith('FeaturePcdGet') and not FunName.startswith('FeaturePcdSet')):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, OtherMsg="The pcd '%s' is defined as a FeaturePcd but now it is called by c function [%s]" % (PcdName, FunName), BelongsToTable=TblName, BelongsToItem=Record[1])
if Model in [MODEL_PCD_PATCHABLE_IN_MODULE] and (not FunName.startswith('PatchablePcdGet') and not FunName.startswith('PatchablePcdSet')):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, OtherMsg="The pcd '%s' is defined as a PatchablePcd but now it is called by c function [%s]" % (PcdName, FunName), BelongsToTable=TblName, BelongsToItem=Record[1])
#ERROR_META_DATA_FILE_CHECK_PCD_TYPE
pass
# Internal worker function to get the INF workspace relative path from FileID
def GetInfFilePathFromID(self, FileID):
Table = EccGlobalData.gDb.TblFile
SqlCommand = """select A.FullPath from %s as A where A.ID = %s""" % (Table.Table, FileID)
RecordSet = Table.Exec(SqlCommand)
Path = ""
for Record in RecordSet:
Path = mws.relpath(Record[0], EccGlobalData.gWorkspace)
return Path
# Check whether two module INFs under one workspace has the same FILE_GUID value
def MetaDataFileCheckModuleFileGuidDuplication(self):
if EccGlobalData.gConfig.MetaDataFileCheckModuleFileGuidDuplication == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking for pcd type in c code function usage ...")
Table = EccGlobalData.gDb.TblInf
SqlCommand = """
select A.ID, A.Value3, A.BelongsToFile, B.BelongsToFile from %s as A, %s as B
where A.Value2 = 'FILE_GUID' and B.Value2 = 'FILE_GUID' and
A.Value3 = B.Value3 and A.ID != B.ID group by A.ID
""" % (Table.Table, Table.Table)
RecordSet = Table.Exec(SqlCommand)
for Record in RecordSet:
InfPath1 = self.GetInfFilePathFromID(Record[2])
InfPath2 = self.GetInfFilePathFromID(Record[3])
if InfPath1 and InfPath2:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, InfPath1):
Msg = "The FILE_GUID of INF file [%s] is duplicated with that of %s" % (InfPath1, InfPath2)
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
# Check Guid Format in module INF
def MetaDataFileCheckModuleFileGuidFormat(self):
if EccGlobalData.gConfig.MetaDataFileCheckModuleFileGuidFormat == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Check Guid Format in module INF ...")
Table = EccGlobalData.gDb.TblInf
SqlCommand = """
select ID, Value1, Usage, BelongsToFile from %s where Model = %s group by ID
""" % (Table.Table, MODEL_EFI_GUID)
RecordSet = Table.Exec(SqlCommand)
for Record in RecordSet:
Value1 = Record[1]
Value2 = Record[2]
GuidCommentList = []
InfPath = self.GetInfFilePathFromID(Record[3])
Msg = "The GUID format of %s in INF file [%s] does not follow rules" % (Value1, InfPath)
if Value2.startswith(DT.TAB_SPECIAL_COMMENT):
GuidCommentList = Value2[2:].split(DT.TAB_SPECIAL_COMMENT)
if GuidCommentList[0].strip().startswith(DT.TAB_INF_USAGE_UNDEFINED):
continue
elif len(GuidCommentList) > 1:
if not GuidCommentList[0].strip().startswith((DT.TAB_INF_USAGE_PRO,
DT.TAB_INF_USAGE_SOME_PRO,
DT.TAB_INF_USAGE_CON,
DT.TAB_INF_USAGE_SOME_CON)):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_GUID, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
if not (GuidCommentList[1].strip()).startswith(DT.TAB_INF_GUIDTYPE_VAR) and \
not GuidCommentList[1].strip().startswith((DT.TAB_INF_GUIDTYPE_EVENT,
DT.TAB_INF_GUIDTYPE_HII,
DT.TAB_INF_GUIDTYPE_FILE,
DT.TAB_INF_GUIDTYPE_HOB,
DT.TAB_INF_GUIDTYPE_FV,
DT.TAB_INF_GUIDTYPE_ST,
DT.TAB_INF_GUIDTYPE_TSG,
DT.TAB_INF_GUIDTYPE_GUID,
DT.TAB_INF_GUIDTYPE_PROTOCOL,
DT.TAB_INF_GUIDTYPE_PPI,
DT.TAB_INF_USAGE_UNDEFINED)):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_GUID, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
else:
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_GUID, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
else:
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_GUID, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
# Check Protocol Format in module INF
def MetaDataFileCheckModuleFileProtocolFormat(self):
if EccGlobalData.gConfig.MetaDataFileCheckModuleFileProtocolFormat == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Check Protocol Format in module INF ...")
Table = EccGlobalData.gDb.TblInf
SqlCommand = """
select ID, Value1, Usage, BelongsToFile from %s where Model = %s group by ID
""" % (Table.Table, MODEL_EFI_PROTOCOL)
RecordSet = Table.Exec(SqlCommand)
for Record in RecordSet:
Value1 = Record[1]
Value2 = Record[2]
GuidCommentList = []
InfPath = self.GetInfFilePathFromID(Record[3])
Msg = "The Protocol format of %s in INF file [%s] does not follow rules" % (Value1, InfPath)
if Value2.startswith(DT.TAB_SPECIAL_COMMENT):
GuidCommentList = Value2[2:].split(DT.TAB_SPECIAL_COMMENT)
if len(GuidCommentList) >= 1:
if not GuidCommentList[0].strip().startswith((DT.TAB_INF_USAGE_PRO,
DT.TAB_INF_USAGE_SOME_PRO,
DT.TAB_INF_USAGE_CON,
DT.TAB_INF_USAGE_SOME_CON,
DT.TAB_INF_USAGE_NOTIFY,
DT.TAB_INF_USAGE_TO_START,
DT.TAB_INF_USAGE_BY_START,
DT.TAB_INF_USAGE_UNDEFINED)):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PROTOCOL, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
else:
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PROTOCOL, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
# Check Ppi Format in module INF
def MetaDataFileCheckModuleFilePpiFormat(self):
if EccGlobalData.gConfig.MetaDataFileCheckModuleFilePpiFormat == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Check Ppi Format in module INF ...")
Table = EccGlobalData.gDb.TblInf
SqlCommand = """
select ID, Value1, Usage, BelongsToFile from %s where Model = %s group by ID
""" % (Table.Table, MODEL_EFI_PPI)
RecordSet = Table.Exec(SqlCommand)
for Record in RecordSet:
Value1 = Record[1]
Value2 = Record[2]
GuidCommentList = []
InfPath = self.GetInfFilePathFromID(Record[3])
Msg = "The Ppi format of %s in INF file [%s] does not follow rules" % (Value1, InfPath)
if Value2.startswith(DT.TAB_SPECIAL_COMMENT):
GuidCommentList = Value2[2:].split(DT.TAB_SPECIAL_COMMENT)
if len(GuidCommentList) >= 1:
if not GuidCommentList[0].strip().startswith((DT.TAB_INF_USAGE_PRO,
DT.TAB_INF_USAGE_SOME_PRO,
DT.TAB_INF_USAGE_CON,
DT.TAB_INF_USAGE_SOME_CON,
DT.TAB_INF_USAGE_NOTIFY,
DT.TAB_INF_USAGE_UNDEFINED)):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PPI, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
else:
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PPI, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
# Check Pcd Format in module INF
def MetaDataFileCheckModuleFilePcdFormat(self):
if EccGlobalData.gConfig.MetaDataFileCheckModuleFilePcdFormat == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Check Pcd Format in module INF ...")
Table = EccGlobalData.gDb.TblInf
SqlCommand = """
select ID, Model, Value1, Value2, Usage, BelongsToFile from %s where Model >= %s and Model < %s group by ID
""" % (Table.Table, MODEL_PCD, MODEL_META_DATA_HEADER)
RecordSet = Table.Exec(SqlCommand)
for Record in RecordSet:
Model = Record[1]
PcdName = Record[2] + '.' + Record[3]
Usage = Record[4]
PcdCommentList = []
InfPath = self.GetInfFilePathFromID(Record[5])
Msg = "The Pcd format of %s in INF file [%s] does not follow rules" % (PcdName, InfPath)
if Usage.startswith(DT.TAB_SPECIAL_COMMENT):
PcdCommentList = Usage[2:].split(DT.TAB_SPECIAL_COMMENT)
if len(PcdCommentList) >= 1:
if Model in [MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_FEATURE_FLAG] \
and not PcdCommentList[0].strip().startswith((DT.TAB_INF_USAGE_SOME_PRO,
DT.TAB_INF_USAGE_CON,
DT.TAB_INF_USAGE_UNDEFINED)):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PCD, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
if Model in [MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX] \
and not PcdCommentList[0].strip().startswith((DT.TAB_INF_USAGE_PRO,
DT.TAB_INF_USAGE_SOME_PRO,
DT.TAB_INF_USAGE_CON,
DT.TAB_INF_USAGE_SOME_CON,
DT.TAB_INF_USAGE_UNDEFINED)):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PCD, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
else:
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_FORMAT_PCD, OtherMsg=Msg, BelongsToTable=Table.Table, BelongsToItem=Record[0])
# Check whether these is duplicate Guid/Ppi/Protocol name
def CheckGuidProtocolPpi(self, ErrorID, Model, Table):
Name = ''
if Model == MODEL_EFI_GUID:
Name = 'guid'
if Model == MODEL_EFI_PROTOCOL:
Name = 'protocol'
if Model == MODEL_EFI_PPI:
Name = 'ppi'
SqlCommand = """
select A.ID, A.Value1 from %s as A, %s as B
where A.Model = %s and B.Model = %s
and A.Value1 like B.Value1 and A.ID != B.ID
and A.Scope1 = B.Scope1
and A.Enabled > -1
and B.Enabled > -1
group by A.ID
""" % (Table.Table, Table.Table, Model, Model)
RecordSet = Table.Exec(SqlCommand)
for Record in RecordSet:
if not EccGlobalData.gException.IsException(ErrorID, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ErrorID, OtherMsg="The %s name [%s] is defined more than one time" % (Name.upper(), Record[1]), BelongsToTable=Table.Table, BelongsToItem=Record[0])
# Check whether these is duplicate Guid/Ppi/Protocol value
def CheckGuidProtocolPpiValue(self, ErrorID, Model):
Name = ''
Table = EccGlobalData.gDb.TblDec
if Model == MODEL_EFI_GUID:
Name = 'guid'
if Model == MODEL_EFI_PROTOCOL:
Name = 'protocol'
if Model == MODEL_EFI_PPI:
Name = 'ppi'
SqlCommand = """
select A.ID, A.Value1, A.Value2 from %s as A, %s as B
where A.Model = %s and B.Model = %s
and A.Value2 like B.Value2 and A.ID != B.ID
and A.Scope1 = B.Scope1 and A.Value1 != B.Value1
group by A.ID
""" % (Table.Table, Table.Table, Model, Model)
RecordSet = Table.Exec(SqlCommand)
for Record in RecordSet:
if not EccGlobalData.gException.IsException(ErrorID, Record[2]):
EccGlobalData.gDb.TblReport.Insert(ErrorID, OtherMsg="The %s value [%s] is used more than one time" % (Name.upper(), Record[2]), BelongsToTable=Table.Table, BelongsToItem=Record[0])
# Naming Convention Check
def NamingConventionCheck(self):
if EccGlobalData.gConfig.NamingConventionCheckDefineStatement == '1' \
or EccGlobalData.gConfig.NamingConventionCheckTypedefStatement == '1' \
or EccGlobalData.gConfig.NamingConventionCheckIfndefStatement == '1' \
or EccGlobalData.gConfig.NamingConventionCheckVariableName == '1' \
or EccGlobalData.gConfig.NamingConventionCheckSingleCharacterVariable == '1' \
or EccGlobalData.gConfig.NamingConventionCheckAll == '1'\
or EccGlobalData.gConfig.CheckAll == '1':
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
if os.path.splitext(F)[1] in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
Id = c.GetTableID(FullName)
if Id < 0:
continue
FileTable = 'Identifier' + str(Id)
self.NamingConventionCheckDefineStatement(FileTable)
self.NamingConventionCheckTypedefStatement(FileTable)
self.NamingConventionCheckVariableName(FileTable)
self.NamingConventionCheckSingleCharacterVariable(FileTable)
if os.path.splitext(F)[1] in ('.h'):
self.NamingConventionCheckIfndefStatement(FileTable)
self.NamingConventionCheckPathName()
self.NamingConventionCheckFunctionName()
# Check whether only capital letters are used for #define declarations
def NamingConventionCheckDefineStatement(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckDefineStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming convention of #define statement ...")
SqlCommand = """select ID, Value from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_MACRO_DEFINE)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
Name = Record[1].strip().split()[1]
if Name.find('(') != -1:
Name = Name[0:Name.find('(')]
if Name.upper() != Name:
if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT, Name):
EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT, OtherMsg="The #define name [%s] does not follow the rules" % (Name), BelongsToTable=FileTable, BelongsToItem=Record[0])
# Check whether only capital letters are used for typedef declarations
def NamingConventionCheckTypedefStatement(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckTypedefStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming convention of #typedef statement ...")
SqlCommand = """select ID, Name from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_TYPEDEF)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
Name = Record[1].strip()
if Name != '' and Name is not None:
if Name[0] == '(':
Name = Name[1:Name.find(')')]
if Name.find('(') > -1:
Name = Name[Name.find('(') + 1 : Name.find(')')]
Name = Name.replace('WINAPI', '')
Name = Name.replace('*', '').strip()
if Name.upper() != Name:
if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT, Name):
EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT, OtherMsg="The #typedef name [%s] does not follow the rules" % (Name), BelongsToTable=FileTable, BelongsToItem=Record[0])
# Check whether the #ifndef at the start of an include file uses both prefix and postfix underscore characters, '_'.
def NamingConventionCheckIfndefStatement(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckIfndefStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming convention of #ifndef statement ...")
SqlCommand = """select ID, Value from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_MACRO_IFNDEF)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
if RecordSet:
# Only check the first ifndef statement of the file
FirstDefine = sorted(RecordSet, key=lambda Record: Record[0])[0]
Name = FirstDefine[1].replace('#ifndef', '').strip()
if Name[0] == '_' or Name[-1] != '_' or Name[-2] == '_':
if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT, Name):
EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT, OtherMsg="The #ifndef name [%s] does not follow the rules" % (Name), BelongsToTable=FileTable, BelongsToItem=FirstDefine[0])
# Rule for path name, variable name and function name
# 1. First character should be upper case
# 2. Existing lower case in a word
# 3. No space existence
# Check whether the path name followed the rule
def NamingConventionCheckPathName(self):
if EccGlobalData.gConfig.NamingConventionCheckPathName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming convention of file path name ...")
Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
SqlCommand = """select ID, Name from File"""
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
if not Pattern.match(Record[1]):
if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_PATH_NAME, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_PATH_NAME, OtherMsg="The file path [%s] does not follow the rules" % (Record[1]), BelongsToTable='File', BelongsToItem=Record[0])
# Rule for path name, variable name and function name
# 1. First character should be upper case
# 2. Existing lower case in a word
# 3. No space existence
# 4. Global variable name must start with a 'g'
# Check whether the variable name followed the rule
def NamingConventionCheckVariableName(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckVariableName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming convention of variable name ...")
Pattern = re.compile(r'^[A-Zgm]+\S*[a-z]\S*$')
SqlCommand = """select ID, Name, Modifier from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_VARIABLE)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
Var = Record[1]
Modifier = Record[2]
if Var.startswith('CONST'):
Var = Var[5:].lstrip()
if not Pattern.match(Var) and not (Modifier.endswith('*') and Var.startswith('p')):
if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, OtherMsg="The variable name [%s] does not follow the rules" % (Record[1]), BelongsToTable=FileTable, BelongsToItem=Record[0])
# Rule for path name, variable name and function name
# 1. First character should be upper case
# 2. Existing lower case in a word
# 3. No space existence
# Check whether the function name followed the rule
def NamingConventionCheckFunctionName(self):
if EccGlobalData.gConfig.NamingConventionCheckFunctionName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming convention of function name ...")
Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
SqlCommand = """select ID, Name from Function"""
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
if not Pattern.match(Record[1]):
if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME, OtherMsg="The function name [%s] does not follow the rules" % (Record[1]), BelongsToTable='Function', BelongsToItem=Record[0])
# Check whether NO use short variable name with single character
def NamingConventionCheckSingleCharacterVariable(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckSingleCharacterVariable == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming convention of single character variable name ...")
SqlCommand = """select ID, Name from %s where Model = %s""" % (FileTable, MODEL_IDENTIFIER_VARIABLE)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
Variable = Record[1].replace('*', '')
if len(Variable) == 1:
if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE, Record[1]):
EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE, OtherMsg="The variable name [%s] does not follow the rules" % (Record[1]), BelongsToTable=FileTable, BelongsToItem=Record[0])
def FindPara(FilePath, Para, CallingLine):
Lines = open(FilePath).readlines()
Line = ''
for Index in range(CallingLine - 1, 0, -1):
# Find the nearest statement for Para
Line = Lines[Index].strip()
if Line.startswith('%s = ' % Para):
Line = Line.strip()
return Line
break
return ''
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
Check = Check()
Check.Check()
| edk2-master | BaseTools/Source/Python/Ecc/Check.py |
## @file
# This file is used to create a database used by ECC tool
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import sqlite3
import Common.LongFilePathOs as os, time
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.TableDataModel import TableDataModel
from Table.TableFile import TableFile
from Table.TableFunction import TableFunction
from Table.TablePcd import TablePcd
from Table.TableIdentifier import TableIdentifier
from Table.TableReport import TableReport
from Ecc.MetaFileWorkspace.MetaFileTable import ModuleTable
from Ecc.MetaFileWorkspace.MetaFileTable import PackageTable
from Ecc.MetaFileWorkspace.MetaFileTable import PlatformTable
from Table.TableFdf import TableFdf
##
# Static definitions
#
DATABASE_PATH = "Ecc.db"
## Database
#
# This class defined the ECC database
# During the phase of initialization, the database will create all tables and
# insert all records of table DataModel
#
# @param object: Inherited from object class
# @param DbPath: A string for the path of the ECC database
#
# @var Conn: Connection of the ECC database
# @var Cur: Cursor of the connection
# @var TblDataModel: Local instance for TableDataModel
#
class Database(object):
def __init__(self, DbPath):
self.DbPath = DbPath
self.Conn = None
self.Cur = None
self.TblDataModel = None
self.TblFile = None
self.TblFunction = None
self.TblIdentifier = None
self.TblPcd = None
self.TblReport = None
self.TblInf = None
self.TblDec = None
self.TblDsc = None
self.TblFdf = None
## Initialize ECC database
#
# 1. Delete all old existing tables
# 2. Create new tables
# 3. Initialize table DataModel
#
def InitDatabase(self, NewDatabase = True):
EdkLogger.verbose("\nInitialize ECC database started ...")
#
# Drop all old existing tables
#
if NewDatabase:
if os.path.exists(self.DbPath):
os.remove(self.DbPath)
self.Conn = sqlite3.connect(self.DbPath, isolation_level = 'DEFERRED')
self.Conn.execute("PRAGMA page_size=4096")
self.Conn.execute("PRAGMA synchronous=OFF")
# to avoid non-ascii character conversion error
self.Conn.text_factory = str
self.Cur = self.Conn.cursor()
self.TblDataModel = TableDataModel(self.Cur)
self.TblFile = TableFile(self.Cur)
self.TblFunction = TableFunction(self.Cur)
self.TblIdentifier = TableIdentifier(self.Cur)
self.TblPcd = TablePcd(self.Cur)
self.TblReport = TableReport(self.Cur)
self.TblInf = ModuleTable(self.Cur)
self.TblDec = PackageTable(self.Cur)
self.TblDsc = PlatformTable(self.Cur)
self.TblFdf = TableFdf(self.Cur)
#
# Create new tables
#
if NewDatabase:
self.TblDataModel.Create()
self.TblFile.Create()
self.TblFunction.Create()
self.TblPcd.Create()
self.TblReport.Create()
self.TblInf.Create()
self.TblDec.Create()
self.TblDsc.Create()
self.TblFdf.Create()
#
# Init each table's ID
#
self.TblDataModel.InitID()
self.TblFile.InitID()
self.TblFunction.InitID()
self.TblPcd.InitID()
self.TblReport.InitID()
self.TblInf.InitID()
self.TblDec.InitID()
self.TblDsc.InitID()
self.TblFdf.InitID()
#
# Initialize table DataModel
#
if NewDatabase:
self.TblDataModel.InitTable()
EdkLogger.verbose("Initialize ECC database ... DONE!")
## Query a table
#
# @param Table: The instance of the table to be queried
#
def QueryTable(self, Table):
Table.Query()
## Close entire database
#
# Commit all first
# Close the connection and cursor
#
def Close(self):
#
# Commit to file
#
self.Conn.commit()
#
# Close connection and cursor
#
self.Cur.close()
self.Conn.close()
## Insert one file information
#
# Insert one file's information to the database
# 1. Create a record in TableFile
# 2. Create functions one by one
# 2.1 Create variables of function one by one
# 2.2 Create pcds of function one by one
# 3. Create variables one by one
# 4. Create pcds one by one
#
def InsertOneFile(self, File):
#
# Insert a record for file
#
FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)
if File.Model == DataClass.MODEL_FILE_C or File.Model == DataClass.MODEL_FILE_H:
IdTable = TableIdentifier(self.Cur)
IdTable.Table = "Identifier%s" % FileID
IdTable.Create()
#
# Insert function of file
#
for Function in File.FunctionList:
FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \
Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \
Function.BodyStartLine, Function.BodyStartColumn, FileID, \
Function.FunNameStartLine, Function.FunNameStartColumn)
#
# Insert Identifier of function
#
for Identifier in Function.IdentifierList:
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
#
# Insert Pcd of function
#
for Pcd in Function.PcdList:
PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
FileID, FunctionID, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
#
# Insert Identifier of file
#
for Identifier in File.IdentifierList:
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
#
# Insert Pcd of file
#
for Pcd in File.PcdList:
PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
FileID, -1, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
EdkLogger.verbose("Insert information from file %s ... DONE!" % File.FullPath)
## UpdateIdentifierBelongsToFunction
#
# Update the field "BelongsToFunction" for each Identifier
#
#
def UpdateIdentifierBelongsToFunction_disabled(self):
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine, Model from Identifier"""
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
Records = self.Cur.fetchall()
for Record in Records:
IdentifierID = Record[0]
BelongsToFile = Record[1]
StartLine = Record[2]
EndLine = Record[3]
Model = Record[4]
#
# Check whether an identifier belongs to a function
#
EdkLogger.debug(4, "For common identifiers ... ")
SqlCommand = """select ID from Function
where StartLine < %s and EndLine > %s
and BelongsToFile = %s""" % (StartLine, EndLine, BelongsToFile)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
IDs = self.Cur.fetchall()
for ID in IDs:
SqlCommand = """Update Identifier set BelongsToFunction = %s where ID = %s""" % (ID[0], IdentifierID)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
#
# Check whether the identifier is a function header
#
EdkLogger.debug(4, "For function headers ... ")
if Model == DataClass.MODEL_IDENTIFIER_COMMENT:
SqlCommand = """select ID from Function
where StartLine = %s + 1
and BelongsToFile = %s""" % (EndLine, BelongsToFile)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
IDs = self.Cur.fetchall()
for ID in IDs:
SqlCommand = """Update Identifier set BelongsToFunction = %s, Model = %s where ID = %s""" % (ID[0], DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, IdentifierID)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
## UpdateIdentifierBelongsToFunction
#
# Update the field "BelongsToFunction" for each Identifier
#
#
def UpdateIdentifierBelongsToFunction(self):
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine from Function"""
Records = self.TblFunction.Exec(SqlCommand)
Data1 = []
Data2 = []
for Record in Records:
FunctionID = Record[0]
BelongsToFile = Record[1]
StartLine = Record[2]
EndLine = Record[3]
#Data1.append(("'file%s'" % BelongsToFile, FunctionID, BelongsToFile, StartLine, EndLine))
#Data2.append(("'file%s'" % BelongsToFile, FunctionID, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, BelongsToFile, DataClass.MODEL_IDENTIFIER_COMMENT, StartLine - 1))
SqlCommand = """Update Identifier%s set BelongsToFunction = %s where BelongsToFile = %s and StartLine > %s and EndLine < %s""" % \
(BelongsToFile, FunctionID, BelongsToFile, StartLine, EndLine)
self.TblIdentifier.Exec(SqlCommand)
SqlCommand = """Update Identifier%s set BelongsToFunction = %s, Model = %s where BelongsToFile = %s and Model = %s and EndLine = %s""" % \
(BelongsToFile, FunctionID, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, BelongsToFile, DataClass.MODEL_IDENTIFIER_COMMENT, StartLine - 1)
self.TblIdentifier.Exec(SqlCommand)
# #
# # Check whether an identifier belongs to a function
# #
# print Data1
# SqlCommand = """Update ? set BelongsToFunction = ? where BelongsToFile = ? and StartLine > ? and EndLine < ?"""
# print SqlCommand
# EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
# self.Cur.executemany(SqlCommand, Data1)
#
# #
# # Check whether the identifier is a function header
# #
# EdkLogger.debug(4, "For function headers ... ")
# SqlCommand = """Update ? set BelongsToFunction = ?, Model = ? where BelongsToFile = ? and Model = ? and EndLine = ?"""
# EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
# self.Cur.executemany(SqlCommand, Data2)
#
# EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
EdkLogger.Initialize()
#EdkLogger.SetLevel(EdkLogger.VERBOSE)
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
EdkLogger.verbose("Start at " + time.strftime('%H:%M:%S', time.localtime()))
Db = Database(DATABASE_PATH)
Db.InitDatabase()
Db.QueryTable(Db.TblDataModel)
identifier1 = DataClass.IdentifierClass(-1, '', '', "i''1", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 32, 43, 54, 43)
identifier2 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 15, 43, 20, 43)
identifier3 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 55, 43, 58, 43)
identifier4 = DataClass.IdentifierClass(-1, '', '', "i1'", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 77, 43, 88, 43)
fun1 = DataClass.FunctionClass(-1, '', '', 'fun1', '', 21, 2, 60, 45, 1, 23, 0, [], [])
file = DataClass.FileClass(-1, 'F1', 'c', 'C:\\', 'C:\\F1.exe', DataClass.MODEL_FILE_C, '2007-12-28', [fun1], [identifier1, identifier2, identifier3, identifier4], [])
Db.InsertOneFile(file)
Db.UpdateIdentifierBelongsToFunction()
Db.QueryTable(Db.TblFile)
Db.QueryTable(Db.TblFunction)
Db.QueryTable(Db.TblPcd)
Db.QueryTable(Db.TblIdentifier)
Db.Close()
EdkLogger.verbose("End at " + time.strftime('%H:%M:%S', time.localtime()))
| edk2-master | BaseTools/Source/Python/Ecc/Database.py |
## @file
# fragments of source file
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import re
import Common.LongFilePathOs as os
from Ecc.ParserWarning import Warning
from Common.LongFilePathSupport import OpenLongFilePath as open
CommentList = []
PPDirectiveList = []
PredicateExpressionList = []
FunctionDefinitionList = []
VariableDeclarationList = []
EnumerationDefinitionList = []
StructUnionDefinitionList = []
TypedefDefinitionList = []
FunctionCallingList = []
## record file data when parsing source
#
# May raise Exception when opening file.
#
class FileProfile :
## The constructor
#
# @param self The object pointer
# @param FileName The file that to be parsed
#
def __init__(self, FileName):
self.FileLinesList = []
self.FileLinesListFromFile = []
try:
fsock = open(FileName, "r")
try:
self.FileLinesListFromFile = fsock.readlines()
finally:
fsock.close()
except IOError:
raise Warning("Error when opening file %s" % FileName)
| edk2-master | BaseTools/Source/Python/Ecc/FileProfile.py |
## @file
# Python 'Ecc' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/Ecc/__init__.py |
## @file
# This file is used to be the c coding style checking of ECC tool
#
# Copyright (c) 2009 - 2019, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2020, Arm Limited. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import print_function
from __future__ import absolute_import
import sys
import Common.LongFilePathOs as os
import re
import string
from Ecc import CodeFragmentCollector
from Ecc import FileProfile
from CommonDataClass import DataClass
from Ecc import Database
from Common import EdkLogger
from Ecc.EccToolError import *
from Ecc import EccGlobalData
from Ecc import MetaDataParser
IncludeFileListDict = {}
AllIncludeFileListDict = {}
IncludePathListDict = {}
ComplexTypeDict = {}
SUDict = {}
IgnoredKeywordList = ['EFI_ERROR']
def GetIgnoredDirListPattern():
skipList = list(EccGlobalData.gConfig.SkipDirList) + ['.svn']
DirString = '|'.join(skipList)
p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % DirString)
return p
def GetFuncDeclPattern():
p = re.compile(r'(?:EFIAPI|EFI_BOOT_SERVICE|EFI_RUNTIME_SERVICE)?\s*[_\w]+\s*\(.*\)$', re.DOTALL)
return p
def GetArrayPattern():
p = re.compile(r'[_\w]*\s*[\[.*\]]+')
return p
def GetTypedefFuncPointerPattern():
p = re.compile('[_\w\s]*\([\w\s]*\*+\s*[_\w]+\s*\)\s*\(.*\)', re.DOTALL)
return p
def GetDB():
return EccGlobalData.gDb
def GetConfig():
return EccGlobalData.gConfig
def PrintErrorMsg(ErrorType, Msg, TableName, ItemId):
Msg = Msg.replace('\n', '').replace('\r', '')
MsgPartList = Msg.split()
Msg = ''
for Part in MsgPartList:
Msg += Part
Msg += ' '
GetDB().TblReport.Insert(ErrorType, OtherMsg=Msg, BelongsToTable=TableName, BelongsToItem=ItemId)
def GetIdType(Str):
Type = DataClass.MODEL_UNKNOWN
Str = Str.replace('#', '# ')
List = Str.split()
if len(List) < 2:
pass
elif List[1] == 'include':
Type = DataClass.MODEL_IDENTIFIER_INCLUDE
elif List[1] == 'define':
Type = DataClass.MODEL_IDENTIFIER_MACRO_DEFINE
elif List[1] == 'ifdef':
Type = DataClass.MODEL_IDENTIFIER_MACRO_IFDEF
elif List[1] == 'ifndef':
Type = DataClass.MODEL_IDENTIFIER_MACRO_IFNDEF
elif List[1] == 'endif':
Type = DataClass.MODEL_IDENTIFIER_MACRO_ENDIF
elif List[1] == 'pragma':
Type = DataClass.MODEL_IDENTIFIER_MACRO_PROGMA
else:
Type = DataClass.MODEL_UNKNOWN
return Type
def SuOccurInTypedef (Su, TdList):
for Td in TdList:
if Su.StartPos[0] == Td.StartPos[0] and Su.EndPos[0] == Td.EndPos[0]:
return True
return False
def GetIdentifierList():
IdList = []
for comment in FileProfile.CommentList:
IdComment = DataClass.IdentifierClass(-1, '', '', '', comment.Content, DataClass.MODEL_IDENTIFIER_COMMENT, -1, -1, comment.StartPos[0], comment.StartPos[1], comment.EndPos[0], comment.EndPos[1])
IdList.append(IdComment)
for pp in FileProfile.PPDirectiveList:
Type = GetIdType(pp.Content)
IdPP = DataClass.IdentifierClass(-1, '', '', '', pp.Content, Type, -1, -1, pp.StartPos[0], pp.StartPos[1], pp.EndPos[0], pp.EndPos[1])
IdList.append(IdPP)
for pe in FileProfile.PredicateExpressionList:
IdPE = DataClass.IdentifierClass(-1, '', '', '', pe.Content, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION, -1, -1, pe.StartPos[0], pe.StartPos[1], pe.EndPos[0], pe.EndPos[1])
IdList.append(IdPE)
FuncDeclPattern = GetFuncDeclPattern()
ArrayPattern = GetArrayPattern()
for var in FileProfile.VariableDeclarationList:
DeclText = var.Declarator.lstrip()
FuncPointerPattern = GetTypedefFuncPointerPattern()
if FuncPointerPattern.match(DeclText):
continue
VarNameStartLine = var.NameStartPos[0]
VarNameStartColumn = var.NameStartPos[1]
FirstChar = DeclText[0]
while not FirstChar.isalpha() and FirstChar != '_':
if FirstChar == '*':
var.Modifier += '*'
VarNameStartColumn += 1
DeclText = DeclText.lstrip('*')
elif FirstChar == '\r':
DeclText = DeclText.lstrip('\r\n').lstrip('\r')
VarNameStartLine += 1
VarNameStartColumn = 0
elif FirstChar == '\n':
DeclText = DeclText.lstrip('\n')
VarNameStartLine += 1
VarNameStartColumn = 0
elif FirstChar == ' ':
DeclText = DeclText.lstrip(' ')
VarNameStartColumn += 1
elif FirstChar == '\t':
DeclText = DeclText.lstrip('\t')
VarNameStartColumn += 8
else:
DeclText = DeclText[1:]
VarNameStartColumn += 1
FirstChar = DeclText[0]
var.Declarator = DeclText
if FuncDeclPattern.match(var.Declarator):
DeclSplitList = var.Declarator.split('(')
FuncName = DeclSplitList[0].strip()
FuncNamePartList = FuncName.split()
if len(FuncNamePartList) > 1:
FuncName = FuncNamePartList[-1].strip()
NameStart = DeclSplitList[0].rfind(FuncName)
var.Declarator = var.Declarator[NameStart:]
if NameStart > 0:
var.Modifier += ' ' + DeclSplitList[0][0:NameStart]
Index = 0
PreChar = ''
while Index < NameStart:
FirstChar = DeclSplitList[0][Index]
if DeclSplitList[0][Index:].startswith('EFIAPI'):
Index += 6
VarNameStartColumn += 6
PreChar = ''
continue
elif FirstChar == '\r':
Index += 1
VarNameStartLine += 1
VarNameStartColumn = 0
elif FirstChar == '\n':
Index += 1
if PreChar != '\r':
VarNameStartLine += 1
VarNameStartColumn = 0
elif FirstChar == ' ':
Index += 1
VarNameStartColumn += 1
elif FirstChar == '\t':
Index += 1
VarNameStartColumn += 8
else:
Index += 1
VarNameStartColumn += 1
PreChar = FirstChar
IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', var.Declarator, FuncName, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, -1, -1, var.StartPos[0], var.StartPos[1], VarNameStartLine, VarNameStartColumn)
IdList.append(IdVar)
continue
if var.Declarator.find('{') == -1:
for decl in var.Declarator.split(','):
DeclList = decl.split('=')
Name = DeclList[0].strip()
if ArrayPattern.match(Name):
LSBPos = var.Declarator.find('[')
var.Modifier += ' ' + Name[LSBPos:]
Name = Name[0:LSBPos]
IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0], var.StartPos[1], VarNameStartLine, VarNameStartColumn)
IdList.append(IdVar)
else:
DeclList = var.Declarator.split('=')
Name = DeclList[0].strip()
if ArrayPattern.match(Name):
LSBPos = var.Declarator.find('[')
var.Modifier += ' ' + Name[LSBPos:]
Name = Name[0:LSBPos]
IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0], var.StartPos[1], VarNameStartLine, VarNameStartColumn)
IdList.append(IdVar)
for enum in FileProfile.EnumerationDefinitionList:
LBPos = enum.Content.find('{')
RBPos = enum.Content.find('}')
Name = enum.Content[4:LBPos].strip()
Value = enum.Content[LBPos + 1:RBPos]
IdEnum = DataClass.IdentifierClass(-1, '', '', Name, Value, DataClass.MODEL_IDENTIFIER_ENUMERATE, -1, -1, enum.StartPos[0], enum.StartPos[1], enum.EndPos[0], enum.EndPos[1])
IdList.append(IdEnum)
for su in FileProfile.StructUnionDefinitionList:
if SuOccurInTypedef(su, FileProfile.TypedefDefinitionList):
continue
Type = DataClass.MODEL_IDENTIFIER_STRUCTURE
SkipLen = 6
if su.Content.startswith('union'):
Type = DataClass.MODEL_IDENTIFIER_UNION
SkipLen = 5
LBPos = su.Content.find('{')
RBPos = su.Content.find('}')
if LBPos == -1 or RBPos == -1:
Name = su.Content[SkipLen:].strip()
Value = ''
else:
Name = su.Content[SkipLen:LBPos].strip()
Value = su.Content[LBPos:RBPos + 1]
IdPE = DataClass.IdentifierClass(-1, '', '', Name, Value, Type, -1, -1, su.StartPos[0], su.StartPos[1], su.EndPos[0], su.EndPos[1])
IdList.append(IdPE)
TdFuncPointerPattern = GetTypedefFuncPointerPattern()
for td in FileProfile.TypedefDefinitionList:
Modifier = ''
Name = td.ToType
Value = td.FromType
if TdFuncPointerPattern.match(td.ToType):
Modifier = td.FromType
LBPos = td.ToType.find('(')
TmpStr = td.ToType[LBPos + 1:].strip()
StarPos = TmpStr.find('*')
if StarPos != -1:
Modifier += ' ' + TmpStr[0:StarPos]
while TmpStr[StarPos] == '*':
# Modifier += ' ' + '*'
StarPos += 1
TmpStr = TmpStr[StarPos:].strip()
RBPos = TmpStr.find(')')
Name = TmpStr[0:RBPos]
Value = 'FP' + TmpStr[RBPos + 1:]
else:
while Name.startswith('*'):
Value += ' ' + '*'
Name = Name.lstrip('*').strip()
if Name.find('[') != -1:
LBPos = Name.find('[')
RBPos = Name.rfind(']')
Value += Name[LBPos : RBPos + 1]
Name = Name[0 : LBPos]
IdTd = DataClass.IdentifierClass(-1, Modifier, '', Name, Value, DataClass.MODEL_IDENTIFIER_TYPEDEF, -1, -1, td.StartPos[0], td.StartPos[1], td.EndPos[0], td.EndPos[1])
IdList.append(IdTd)
for funcCall in FileProfile.FunctionCallingList:
IdFC = DataClass.IdentifierClass(-1, '', '', funcCall.FuncName, funcCall.ParamList, DataClass.MODEL_IDENTIFIER_FUNCTION_CALLING, -1, -1, funcCall.StartPos[0], funcCall.StartPos[1], funcCall.EndPos[0], funcCall.EndPos[1])
IdList.append(IdFC)
return IdList
def StripNonAlnumChars(Str):
StrippedStr = ''
for Char in Str:
if Char.isalnum() or Char == '_':
StrippedStr += Char
return StrippedStr
def GetParamList(FuncDeclarator, FuncNameLine=0, FuncNameOffset=0):
FuncDeclarator = StripComments(FuncDeclarator)
ParamIdList = []
#DeclSplitList = FuncDeclarator.split('(')
LBPos = FuncDeclarator.find('(')
#if len(DeclSplitList) < 2:
if LBPos == -1:
return ParamIdList
#FuncName = DeclSplitList[0]
FuncName = FuncDeclarator[0:LBPos]
#ParamStr = DeclSplitList[1].rstrip(')')
ParamStr = FuncDeclarator[LBPos + 1:].rstrip(')')
LineSkipped = 0
OffsetSkipped = 0
TailChar = FuncName[-1]
while not TailChar.isalpha() and TailChar != '_':
if TailChar == '\n':
FuncName = FuncName.rstrip('\r\n').rstrip('\n')
LineSkipped += 1
OffsetSkipped = 0
elif TailChar == '\r':
FuncName = FuncName.rstrip('\r')
LineSkipped += 1
OffsetSkipped = 0
elif TailChar == ' ':
FuncName = FuncName.rstrip(' ')
OffsetSkipped += 1
elif TailChar == '\t':
FuncName = FuncName.rstrip('\t')
OffsetSkipped += 8
else:
FuncName = FuncName[:-1]
TailChar = FuncName[-1]
OffsetSkipped += 1 #skip '('
for p in ParamStr.split(','):
ListP = p.split()
if len(ListP) == 0:
continue
ParamName = ListP[-1]
DeclText = ParamName.strip()
RightSpacePos = p.rfind(ParamName)
ParamModifier = p[0:RightSpacePos]
if ParamName == 'OPTIONAL':
if ParamModifier == '':
ParamModifier += ' ' + 'OPTIONAL'
DeclText = ''
else:
ParamName = ListP[-2]
DeclText = ParamName.strip()
RightSpacePos = p.rfind(ParamName)
ParamModifier = p[0:RightSpacePos]
ParamModifier += 'OPTIONAL'
while DeclText.startswith('*'):
ParamModifier += ' ' + '*'
DeclText = DeclText.lstrip('*').strip()
ParamName = DeclText
# ignore array length if exists.
LBIndex = ParamName.find('[')
if LBIndex != -1:
ParamName = ParamName[0:LBIndex]
Start = RightSpacePos
Index = 0
PreChar = ''
while Index < Start:
FirstChar = p[Index]
if FirstChar == '\r':
Index += 1
LineSkipped += 1
OffsetSkipped = 0
elif FirstChar == '\n':
Index += 1
if PreChar != '\r':
LineSkipped += 1
OffsetSkipped = 0
elif FirstChar == ' ':
Index += 1
OffsetSkipped += 1
elif FirstChar == '\t':
Index += 1
OffsetSkipped += 8
else:
Index += 1
OffsetSkipped += 1
PreChar = FirstChar
ParamBeginLine = FuncNameLine + LineSkipped
ParamBeginOffset = FuncNameOffset + OffsetSkipped
Index = Start + len(ParamName)
PreChar = ''
while Index < len(p):
FirstChar = p[Index]
if FirstChar == '\r':
Index += 1
LineSkipped += 1
OffsetSkipped = 0
elif FirstChar == '\n':
Index += 1
if PreChar != '\r':
LineSkipped += 1
OffsetSkipped = 0
elif FirstChar == ' ':
Index += 1
OffsetSkipped += 1
elif FirstChar == '\t':
Index += 1
OffsetSkipped += 8
else:
Index += 1
OffsetSkipped += 1
PreChar = FirstChar
ParamEndLine = FuncNameLine + LineSkipped
ParamEndOffset = FuncNameOffset + OffsetSkipped
if ParamName != '...':
ParamName = StripNonAlnumChars(ParamName)
IdParam = DataClass.IdentifierClass(-1, ParamModifier, '', ParamName, '', DataClass.MODEL_IDENTIFIER_PARAMETER, -1, -1, ParamBeginLine, ParamBeginOffset, ParamEndLine, ParamEndOffset)
ParamIdList.append(IdParam)
OffsetSkipped += 1 #skip ','
return ParamIdList
def GetFunctionList():
FuncObjList = []
for FuncDef in FileProfile.FunctionDefinitionList:
ParamIdList = []
DeclText = FuncDef.Declarator.lstrip()
FuncNameStartLine = FuncDef.NamePos[0]
FuncNameStartColumn = FuncDef.NamePos[1]
FirstChar = DeclText[0]
while not FirstChar.isalpha() and FirstChar != '_':
if FirstChar == '*':
FuncDef.Modifier += '*'
FuncNameStartColumn += 1
DeclText = DeclText.lstrip('*')
elif FirstChar == '\r':
DeclText = DeclText.lstrip('\r\n').lstrip('\r')
FuncNameStartLine += 1
FuncNameStartColumn = 0
elif FirstChar == '\n':
DeclText = DeclText.lstrip('\n')
FuncNameStartLine += 1
FuncNameStartColumn = 0
elif FirstChar == ' ':
DeclText = DeclText.lstrip(' ')
FuncNameStartColumn += 1
elif FirstChar == '\t':
DeclText = DeclText.lstrip('\t')
FuncNameStartColumn += 8
else:
DeclText = DeclText[1:]
FuncNameStartColumn += 1
FirstChar = DeclText[0]
FuncDef.Declarator = DeclText
DeclSplitList = FuncDef.Declarator.split('(')
if len(DeclSplitList) < 2:
continue
FuncName = DeclSplitList[0]
FuncNamePartList = FuncName.split()
if len(FuncNamePartList) > 1:
FuncName = FuncNamePartList[-1]
NameStart = DeclSplitList[0].rfind(FuncName)
if NameStart > 0:
FuncDef.Modifier += ' ' + DeclSplitList[0][0:NameStart]
Index = 0
PreChar = ''
while Index < NameStart:
FirstChar = DeclSplitList[0][Index]
if DeclSplitList[0][Index:].startswith('EFIAPI'):
Index += 6
FuncNameStartColumn += 6
PreChar = ''
continue
elif FirstChar == '\r':
Index += 1
FuncNameStartLine += 1
FuncNameStartColumn = 0
elif FirstChar == '\n':
Index += 1
if PreChar != '\r':
FuncNameStartLine += 1
FuncNameStartColumn = 0
elif FirstChar == ' ':
Index += 1
FuncNameStartColumn += 1
elif FirstChar == '\t':
Index += 1
FuncNameStartColumn += 8
else:
Index += 1
FuncNameStartColumn += 1
PreChar = FirstChar
FuncObj = DataClass.FunctionClass(-1, FuncDef.Declarator, FuncDef.Modifier, FuncName.strip(), '', FuncDef.StartPos[0], FuncDef.StartPos[1], FuncDef.EndPos[0], FuncDef.EndPos[1], FuncDef.LeftBracePos[0], FuncDef.LeftBracePos[1], -1, ParamIdList, [], FuncNameStartLine, FuncNameStartColumn)
FuncObjList.append(FuncObj)
return FuncObjList
def GetFileModificationTimeFromDB(FullFileName):
TimeValue = 0.0
Db = GetDB()
SqlStatement = """ select TimeStamp
from File
where FullPath = \'%s\'
""" % (FullFileName)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
TimeValue = Result[0]
return TimeValue
def CollectSourceCodeDataIntoDB(RootDir):
FileObjList = []
tuple = os.walk(RootDir)
IgnoredPattern = GetIgnoredDirListPattern()
ParseErrorFileList = []
TokenReleaceList = EccGlobalData.gConfig.TokenReleaceList
TokenReleaceList.extend(['L",\\\""'])
for dirpath, dirnames, filenames in tuple:
if IgnoredPattern.match(dirpath.upper()):
continue
for Dir in dirnames:
Dirname = os.path.join(dirpath, Dir)
if os.path.islink(Dirname):
Dirname = os.path.realpath(Dirname)
if os.path.isdir(Dirname):
# symlinks to directories are treated as directories
dirnames.remove(Dir)
dirnames.append(Dirname)
for f in filenames:
if f.lower() in EccGlobalData.gConfig.SkipFileList:
continue
collector = None
FullName = os.path.normpath(os.path.join(dirpath, f))
model = DataClass.MODEL_FILE_OTHERS
if os.path.splitext(f)[1] in ('.h', '.c'):
EdkLogger.info("Parsing " + FullName)
model = f.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H
collector = CodeFragmentCollector.CodeFragmentCollector(FullName)
collector.TokenReleaceList = TokenReleaceList
try:
collector.ParseFile()
except UnicodeError:
ParseErrorFileList.append(FullName)
collector.CleanFileProfileBuffer()
collector.ParseFileWithClearedPPDirective()
# collector.PrintFragments()
BaseName = os.path.basename(f)
DirName = os.path.dirname(FullName)
Ext = os.path.splitext(f)[1].lstrip('.')
ModifiedTime = os.path.getmtime(FullName)
FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
FileObjList.append(FileObj)
if collector:
collector.CleanFileProfileBuffer()
if len(ParseErrorFileList) > 0:
EdkLogger.info("Found unrecoverable error during parsing:\n\t%s\n" % "\n\t".join(ParseErrorFileList))
Db = GetDB()
for file in FileObjList:
if file.ExtName.upper() not in ['INF', 'DEC', 'DSC', 'FDF']:
Db.InsertOneFile(file)
Db.UpdateIdentifierBelongsToFunction()
def GetTableID(FullFileName, ErrorMsgList=None):
if ErrorMsgList is None:
ErrorMsgList = []
Db = GetDB()
SqlStatement = """ select ID
from File
where FullPath like '%s'
""" % FullFileName
ResultSet = Db.TblFile.Exec(SqlStatement)
FileID = -1
for Result in ResultSet:
if FileID != -1:
ErrorMsgList.append('Duplicate file ID found in DB for file %s' % FullFileName)
return - 2
FileID = Result[0]
if FileID == -1:
ErrorMsgList.append('NO file ID found in DB for file %s' % FullFileName)
return - 1
return FileID
def GetIncludeFileList(FullFileName):
if os.path.splitext(FullFileName)[1].upper() not in ('.H'):
return []
IFList = IncludeFileListDict.get(FullFileName)
if IFList is not None:
return IFList
FileID = GetTableID(FullFileName)
if FileID < 0:
return []
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_INCLUDE)
ResultSet = Db.TblFile.Exec(SqlStatement)
IncludeFileListDict[FullFileName] = ResultSet
return ResultSet
def GetFullPathOfIncludeFile(Str, IncludePathList):
for IncludePath in IncludePathList:
FullPath = os.path.join(IncludePath, Str)
FullPath = os.path.normpath(FullPath)
if os.path.exists(FullPath):
return FullPath
return None
def GetAllIncludeFiles(FullFileName):
if AllIncludeFileListDict.get(FullFileName) is not None:
return AllIncludeFileListDict.get(FullFileName)
FileDirName = os.path.dirname(FullFileName)
IncludePathList = IncludePathListDict.get(FileDirName)
if IncludePathList is None:
IncludePathList = MetaDataParser.GetIncludeListOfFile(EccGlobalData.gWorkspace, FullFileName, GetDB())
if FileDirName not in IncludePathList:
IncludePathList.insert(0, FileDirName)
IncludePathListDict[FileDirName] = IncludePathList
IncludeFileQueue = []
for IncludeFile in GetIncludeFileList(FullFileName):
FileName = IncludeFile[0].lstrip('#').strip()
FileName = FileName.lstrip('include').strip()
FileName = FileName.strip('\"')
FileName = FileName.lstrip('<').rstrip('>').strip()
FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
if FullPath is not None:
IncludeFileQueue.append(FullPath)
i = 0
while i < len(IncludeFileQueue):
for IncludeFile in GetIncludeFileList(IncludeFileQueue[i]):
FileName = IncludeFile[0].lstrip('#').strip()
FileName = FileName.lstrip('include').strip()
FileName = FileName.strip('\"')
FileName = FileName.lstrip('<').rstrip('>').strip()
FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
if FullPath is not None and FullPath not in IncludeFileQueue:
IncludeFileQueue.insert(i + 1, FullPath)
i += 1
AllIncludeFileListDict[FullFileName] = IncludeFileQueue
return IncludeFileQueue
def GetPredicateListFromPredicateExpStr(PES):
PredicateList = []
i = 0
PredicateBegin = 0
#PredicateEnd = 0
LogicOpPos = -1
p = GetFuncDeclPattern()
while i < len(PES) - 1:
if (PES[i].isalnum() or PES[i] == '_' or PES[i] == '*') and LogicOpPos > PredicateBegin:
PredicateBegin = i
if (PES[i] == '&' and PES[i + 1] == '&') or (PES[i] == '|' and PES[i + 1] == '|'):
LogicOpPos = i
Exp = PES[PredicateBegin:i].strip()
# Exp may contain '.' or '->'
TmpExp = Exp.replace('.', '').replace('->', '')
if p.match(TmpExp):
PredicateList.append(Exp)
else:
PredicateList.append(Exp.rstrip(';').rstrip(')').strip())
i += 1
if PredicateBegin > LogicOpPos:
while PredicateBegin < len(PES):
if PES[PredicateBegin].isalnum() or PES[PredicateBegin] == '_' or PES[PredicateBegin] == '*':
break
PredicateBegin += 1
Exp = PES[PredicateBegin:len(PES)].strip()
# Exp may contain '.' or '->'
TmpExp = Exp.replace('.', '').replace('->', '')
if p.match(TmpExp):
PredicateList.append(Exp)
else:
PredicateList.append(Exp.rstrip(';').rstrip(')').strip())
return PredicateList
def GetCNameList(Lvalue, StarList=[]):
Lvalue += ' '
i = 0
SearchBegin = 0
VarStart = -1
VarEnd = -1
VarList = []
while SearchBegin < len(Lvalue):
while i < len(Lvalue):
if Lvalue[i].isalnum() or Lvalue[i] == '_':
if VarStart == -1:
VarStart = i
VarEnd = i
i += 1
elif VarEnd != -1:
VarList.append(Lvalue[VarStart:VarEnd + 1])
i += 1
break
else:
if VarStart == -1 and Lvalue[i] == '*':
StarList.append('*')
i += 1
if VarEnd == -1:
break
DotIndex = Lvalue[VarEnd:].find('.')
ArrowIndex = Lvalue[VarEnd:].find('->')
if DotIndex == -1 and ArrowIndex == -1:
break
elif DotIndex == -1 and ArrowIndex != -1:
SearchBegin = VarEnd + ArrowIndex
elif ArrowIndex == -1 and DotIndex != -1:
SearchBegin = VarEnd + DotIndex
else:
SearchBegin = VarEnd + ((DotIndex < ArrowIndex) and DotIndex or ArrowIndex)
i = SearchBegin
VarStart = -1
VarEnd = -1
return VarList
def SplitPredicateByOp(Str, Op, IsFuncCalling=False):
Name = Str.strip()
Value = None
if IsFuncCalling:
Index = 0
LBFound = False
UnmatchedLBCount = 0
while Index < len(Str):
while not LBFound and Str[Index] != '_' and not Str[Index].isalnum():
Index += 1
while not LBFound and (Str[Index].isalnum() or Str[Index] == '_'):
Index += 1
# maybe type-cast at the beginning, skip it.
RemainingStr = Str[Index:].lstrip()
if RemainingStr.startswith(')') and not LBFound:
Index += 1
continue
if RemainingStr.startswith('(') and not LBFound:
LBFound = True
if Str[Index] == '(':
UnmatchedLBCount += 1
Index += 1
continue
if Str[Index] == ')':
UnmatchedLBCount -= 1
Index += 1
if UnmatchedLBCount == 0:
break
continue
Index += 1
if UnmatchedLBCount > 0:
return [Name]
IndexInRemainingStr = Str[Index:].find(Op)
if IndexInRemainingStr == -1:
return [Name]
Name = Str[0:Index + IndexInRemainingStr].strip()
Value = Str[Index + IndexInRemainingStr + len(Op):].strip().strip(')')
return [Name, Value]
TmpStr = Str.rstrip(';').rstrip(')')
while True:
Index = TmpStr.rfind(Op)
if Index == -1:
return [Name]
if Str[Index - 1].isalnum() or Str[Index - 1].isspace() or Str[Index - 1] == ')' or Str[Index - 1] == ']':
Name = Str[0:Index].strip()
Value = Str[Index + len(Op):].strip()
return [Name, Value]
TmpStr = Str[0:Index - 1]
def SplitPredicateStr(Str):
Str = Str.lstrip('(')
IsFuncCalling = False
p = GetFuncDeclPattern()
TmpStr = Str.replace('.', '').replace('->', '')
if p.match(TmpStr):
IsFuncCalling = True
PredPartList = SplitPredicateByOp(Str, '==', IsFuncCalling)
if len(PredPartList) > 1:
return [PredPartList, '==']
PredPartList = SplitPredicateByOp(Str, '!=', IsFuncCalling)
if len(PredPartList) > 1:
return [PredPartList, '!=']
PredPartList = SplitPredicateByOp(Str, '>=', IsFuncCalling)
if len(PredPartList) > 1:
return [PredPartList, '>=']
PredPartList = SplitPredicateByOp(Str, '<=', IsFuncCalling)
if len(PredPartList) > 1:
return [PredPartList, '<=']
PredPartList = SplitPredicateByOp(Str, '>', IsFuncCalling)
if len(PredPartList) > 1:
return [PredPartList, '>']
PredPartList = SplitPredicateByOp(Str, '<', IsFuncCalling)
if len(PredPartList) > 1:
return [PredPartList, '<']
return [[Str, None], None]
def GetFuncContainsPE(ExpLine, ResultSet):
for Result in ResultSet:
if Result[0] < ExpLine and Result[1] > ExpLine:
return Result
return None
def PatternInModifier(Modifier, SubStr):
PartList = Modifier.split()
for Part in PartList:
if Part == SubStr:
return True
return False
def GetDataTypeFromModifier(ModifierStr):
MList = ModifierStr.split()
ReturnType = ''
for M in MList:
if M in EccGlobalData.gConfig.ModifierSet:
continue
# remove array suffix
if M.startswith('[') or M.endswith(']'):
continue
ReturnType += M + ' '
ReturnType = ReturnType.strip()
if len(ReturnType) == 0:
ReturnType = 'VOID'
return ReturnType
def DiffModifier(Str1, Str2):
PartList1 = Str1.split()
PartList2 = Str2.split()
if PartList1 == PartList2:
return False
else:
return True
def GetTypedefDict(FullFileName):
Dict = ComplexTypeDict.get(FullFileName)
if Dict is not None:
return Dict
FileID = GetTableID(FullFileName)
FileTable = 'Identifier' + str(FileID)
Db = GetDB()
SqlStatement = """ select Modifier, Name, Value, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
ResultSet = Db.TblFile.Exec(SqlStatement)
Dict = {}
for Result in ResultSet:
if len(Result[0]) == 0:
Dict[Result[1]] = Result[2]
IncludeFileList = GetAllIncludeFiles(FullFileName)
for F in IncludeFileList:
FileID = GetTableID(F)
if FileID < 0:
continue
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Modifier, Name, Value, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
if not Result[2].startswith('FP ('):
Dict[Result[1]] = Result[2]
else:
if len(Result[0]) == 0:
Dict[Result[1]] = 'VOID'
else:
Dict[Result[1]] = GetDataTypeFromModifier(Result[0])
ComplexTypeDict[FullFileName] = Dict
return Dict
def GetSUDict(FullFileName):
Dict = SUDict.get(FullFileName)
if Dict is not None:
return Dict
FileID = GetTableID(FullFileName)
FileTable = 'Identifier' + str(FileID)
Db = GetDB()
SqlStatement = """ select Name, Value, ID
from %s
where Model = %d or Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_STRUCTURE, DataClass.MODEL_IDENTIFIER_UNION)
ResultSet = Db.TblFile.Exec(SqlStatement)
Dict = {}
for Result in ResultSet:
if len(Result[1]) > 0:
Dict[Result[0]] = Result[1]
IncludeFileList = GetAllIncludeFiles(FullFileName)
for F in IncludeFileList:
FileID = GetTableID(F)
if FileID < 0:
continue
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Name, Value, ID
from %s
where Model = %d or Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_STRUCTURE, DataClass.MODEL_IDENTIFIER_UNION)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
if len(Result[1]) > 0:
Dict[Result[0]] = Result[1]
SUDict[FullFileName] = Dict
return Dict
def StripComments(Str):
Str += ' '
ListFromStr = list(Str)
InComment = False
DoubleSlashComment = False
Index = 0
while Index < len(ListFromStr):
# meet new line, then no longer in a comment for //
if ListFromStr[Index] == '\n':
if InComment and DoubleSlashComment:
InComment = False
DoubleSlashComment = False
Index += 1
# check for */ comment end
elif InComment and not DoubleSlashComment and ListFromStr[Index] == '*' and ListFromStr[Index + 1] == '/':
ListFromStr[Index] = ' '
Index += 1
ListFromStr[Index] = ' '
Index += 1
InComment = False
# set comments to spaces
elif InComment:
ListFromStr[Index] = ' '
Index += 1
# check for // comment
elif ListFromStr[Index] == '/' and ListFromStr[Index + 1] == '/':
InComment = True
DoubleSlashComment = True
# check for /* comment start
elif ListFromStr[Index] == '/' and ListFromStr[Index + 1] == '*':
ListFromStr[Index] = ' '
Index += 1
ListFromStr[Index] = ' '
Index += 1
InComment = True
else:
Index += 1
# restore from List to String
Str = "".join(ListFromStr)
Str = Str.rstrip(' ')
return Str
def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
Value = TypedefDict.get(Type)
if Value is None:
Value = SUDict.get(Type)
if Value is None:
return None
LBPos = Value.find('{')
while LBPos == -1:
FTList = Value.split()
for FT in FTList:
if FT not in ('struct', 'union'):
Value = TypedefDict.get(FT)
if Value is None:
Value = SUDict.get(FT)
break
if Value is None:
return None
LBPos = Value.find('{')
# RBPos = Value.find('}')
Fields = Value[LBPos + 1:]
Fields = StripComments(Fields)
FieldsList = Fields.split(';')
for Field in FieldsList:
Field = Field.strip()
Index = Field.rfind(FieldName)
if Index < 1:
continue
if not Field[Index - 1].isalnum():
if Index + len(FieldName) == len(Field):
Type = GetDataTypeFromModifier(Field[0:Index])
return Type.strip()
else:
# For the condition that the field in struct is an array with [] suffixes...
if not Field[Index + len(FieldName)].isalnum():
Type = GetDataTypeFromModifier(Field[0:Index])
return Type.strip()
return None
def GetRealType(Type, TypedefDict, TargetType=None):
if TargetType is not None and Type == TargetType:
return Type
while TypedefDict.get(Type):
Type = TypedefDict.get(Type)
if TargetType is not None and Type == TargetType:
return Type
return Type
def GetTypeInfo(RefList, Modifier, FullFileName, TargetType=None):
TypedefDict = GetTypedefDict(FullFileName)
SUDict = GetSUDict(FullFileName)
Type = GetDataTypeFromModifier(Modifier).replace('*', '').strip()
Type = Type.split()[-1]
Index = 0
while Index < len(RefList):
FieldName = RefList[Index]
FromType = GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict)
if FromType is None:
return None
# we want to determine the exact type.
if TargetType is not None:
Type = FromType.split()[0]
# we only want to check if it is a pointer
else:
Type = FromType
if Type.find('*') != -1 and Index == len(RefList) - 1:
return Type
Type = FromType.split()[0]
Index += 1
Type = GetRealType(Type, TypedefDict, TargetType)
return Type
def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall=False, TargetType=None, StarList=None):
PredVar = PredVarList[0]
FileID = GetTableID(FullFileName)
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
# search variable in include files
# it is a function call, search function declarations and definitions
if IsFuncCall:
SqlStatement = """ select Modifier, ID
from %s
where Model = %d and Value = \'%s\'
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, PredVar)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Type = GetDataTypeFromModifier(Result[0]).split()[-1]
TypedefDict = GetTypedefDict(FullFileName)
Type = GetRealType(Type, TypedefDict, TargetType)
return Type
IncludeFileList = GetAllIncludeFiles(FullFileName)
for F in IncludeFileList:
FileID = GetTableID(F)
if FileID < 0:
continue
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Modifier, ID
from %s
where Model = %d and Value = \'%s\'
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, PredVar)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Type = GetDataTypeFromModifier(Result[0]).split()[-1]
TypedefDict = GetTypedefDict(FullFileName)
Type = GetRealType(Type, TypedefDict, TargetType)
return Type
FileID = GetTableID(FullFileName)
SqlStatement = """ select Modifier, ID
from Function
where BelongsToFile = %d and Name = \'%s\'
""" % (FileID, PredVar)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Type = GetDataTypeFromModifier(Result[0]).split()[-1]
TypedefDict = GetTypedefDict(FullFileName)
Type = GetRealType(Type, TypedefDict, TargetType)
return Type
for F in IncludeFileList:
FileID = GetTableID(F)
if FileID < 0:
continue
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Modifier, ID
from Function
where BelongsToFile = %d and Name = \'%s\'
""" % (FileID, PredVar)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Type = GetDataTypeFromModifier(Result[0]).split()[-1]
TypedefDict = GetTypedefDict(FullFileName)
Type = GetRealType(Type, TypedefDict, TargetType)
return Type
return None
# really variable, search local variable first
SqlStatement = """ select Modifier, ID
from %s
where Model = %d and Name = \'%s\' and StartLine >= %d and StartLine <= %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, PredVar, FuncRecord[0], FuncRecord[1])
ResultSet = Db.TblFile.Exec(SqlStatement)
VarFound = False
for Result in ResultSet:
if len(PredVarList) > 1:
Type = GetTypeInfo(PredVarList[1:], Result[0], FullFileName, TargetType)
return Type
else:
# Type = GetDataTypeFromModifier(Result[0]).split()[-1]
TypeList = GetDataTypeFromModifier(Result[0]).split()
Type = TypeList[-1]
if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
# Get real type after de-reference pointers.
if len(Type.strip()) == 0:
Type = TypeList[-2]
TypedefDict = GetTypedefDict(FullFileName)
Type = GetRealType(Type, TypedefDict, TargetType)
return Type
# search function parameters second
ParamList = GetParamList(FuncRecord[2])
for Param in ParamList:
if Param.Name.strip() == PredVar:
if len(PredVarList) > 1:
Type = GetTypeInfo(PredVarList[1:], Param.Modifier, FullFileName, TargetType)
return Type
else:
TypeList = GetDataTypeFromModifier(Param.Modifier).split()
Type = TypeList[-1]
if Type == '*' and len(TypeList) >= 2:
Type = TypeList[-2]
if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
# Get real type after de-reference pointers.
if len(Type.strip()) == 0:
Type = TypeList[-2]
TypedefDict = GetTypedefDict(FullFileName)
Type = GetRealType(Type, TypedefDict, TargetType)
return Type
# search global variable next
SqlStatement = """ select Modifier, ID
from %s
where Model = %d and Name = \'%s\' and BelongsToFunction = -1
""" % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, PredVar)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
if len(PredVarList) > 1:
Type = GetTypeInfo(PredVarList[1:], Result[0], FullFileName, TargetType)
return Type
else:
TypeList = GetDataTypeFromModifier(Result[0]).split()
Type = TypeList[-1]
if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
# Get real type after de-reference pointers.
if len(Type.strip()) == 0:
Type = TypeList[-2]
TypedefDict = GetTypedefDict(FullFileName)
Type = GetRealType(Type, TypedefDict, TargetType)
return Type
IncludeFileList = GetAllIncludeFiles(FullFileName)
for F in IncludeFileList:
FileID = GetTableID(F)
if FileID < 0:
continue
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Modifier, ID
from %s
where Model = %d and BelongsToFunction = -1 and Name = \'%s\'
""" % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, PredVar)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
if len(PredVarList) > 1:
Type = GetTypeInfo(PredVarList[1:], Result[0], FullFileName, TargetType)
return Type
else:
TypeList = GetDataTypeFromModifier(Result[0]).split()
Type = TypeList[-1]
if len(TypeList) > 1 and StarList is not None:
for Star in StarList:
Type = Type.strip()
Type = Type.rstrip(Star)
# Get real type after de-reference pointers.
if len(Type.strip()) == 0:
Type = TypeList[-2]
TypedefDict = GetTypedefDict(FullFileName)
Type = GetRealType(Type, TypedefDict, TargetType)
return Type
def GetTypeFromArray(Type, Var):
Count = Var.count('[')
while Count > 0:
Type = Type.strip()
Type = Type.rstrip('*')
Count = Count - 1
return Type
def CheckFuncLayoutReturnType(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Modifier, ID, StartLine, StartColumn, EndLine, Value
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
ReturnType = GetDataTypeFromModifier(Result[0])
TypeStart = ReturnType.split()[0]
FuncName = Result[5]
if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, FuncName):
continue
Result0 = Result[0]
if Result0.upper().startswith('STATIC'):
Result0 = Result0[6:].strip()
Index = Result0.find(TypeStart)
if Index != 0 or Result[3] != 0:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear at the start of line' % FuncName, FileTable, Result[1])
if Result[2] == Result[4]:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear on its own line' % FuncName, FileTable, Result[1])
SqlStatement = """ select Modifier, ID, StartLine, StartColumn, FunNameStartLine, Name
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
ReturnType = GetDataTypeFromModifier(Result[0])
TypeStart = ReturnType.split()[0]
FuncName = Result[5]
if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, FuncName):
continue
Result0 = Result[0]
if Result0.upper().startswith('STATIC'):
Result0 = Result0[6:].strip()
Index = Result0.find(TypeStart)
if Index != 0 or Result[3] != 0:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear at the start of line' % FuncName, 'Function', Result[1])
def CheckFuncLayoutModifier(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Modifier, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
ReturnType = GetDataTypeFromModifier(Result[0])
TypeStart = ReturnType.split()[0]
Result0 = Result[0]
if Result0.upper().startswith('STATIC'):
Result0 = Result0[6:].strip()
Index = Result0.find(TypeStart)
if Index != 0:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER, '', FileTable, Result[1])
SqlStatement = """ select Modifier, ID
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
ReturnType = GetDataTypeFromModifier(Result[0])
TypeStart = ReturnType.split()[0]
Result0 = Result[0]
if Result0.upper().startswith('STATIC'):
Result0 = Result0[6:].strip()
Index = Result0.find(TypeStart)
if Index != 0:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER, '', 'Function', Result[1])
def CheckFuncLayoutName(FullFileName):
ErrorMsgList = []
# Parameter variable format pattern.
Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
ParamIgnoreList = ('VOID', '...')
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Name, ID, EndColumn, Value
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
FuncName = Result[3]
if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, FuncName):
continue
if Result[2] != 0:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Function name [%s] should appear at the start of a line' % FuncName, FileTable, Result[1])
ParamList = GetParamList(Result[0])
if len(ParamList) == 0:
continue
StartLine = 0
for Param in ParamList:
if Param.StartLine <= StartLine:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Parameter %s should be in its own line.' % Param.Name, FileTable, Result[1])
if Param.StartLine - StartLine > 1:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Empty line appears before Parameter %s.' % Param.Name, FileTable, Result[1])
if not Pattern.match(Param.Name) and not Param.Name in ParamIgnoreList and not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Param.Name):
PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Parameter [%s] NOT follow naming convention.' % Param.Name, FileTable, Result[1])
StartLine = Param.StartLine
if not Result[0].endswith('\n )') and not Result[0].endswith('\r )'):
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, '\')\' should be on a new line and indented two spaces', FileTable, Result[1])
SqlStatement = """ select Modifier, ID, FunNameStartColumn, Name
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
FuncName = Result[3]
if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, FuncName):
continue
if Result[2] != 0:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Function name [%s] should appear at the start of a line' % FuncName, 'Function', Result[1])
ParamList = GetParamList(Result[0])
if len(ParamList) == 0:
continue
StartLine = 0
for Param in ParamList:
if Param.StartLine <= StartLine:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Parameter %s should be in its own line.' % Param.Name, 'Function', Result[1])
if Param.StartLine - StartLine > 1:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Empty line appears before Parameter %s.' % Param.Name, 'Function', Result[1])
if not Pattern.match(Param.Name) and not Param.Name in ParamIgnoreList and not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Param.Name):
PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Parameter [%s] NOT follow naming convention.' % Param.Name, FileTable, Result[1])
StartLine = Param.StartLine
if not Result[0].endswith('\n )') and not Result[0].endswith('\r )'):
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, '\')\' should be on a new line and indented two spaces', 'Function', Result[1])
def CheckFuncLayoutPrototype(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
FileTable = 'Identifier' + str(FileID)
Db = GetDB()
SqlStatement = """ select Modifier, Header, Name, ID
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
return ErrorMsgList
FuncDefList = []
for Result in ResultSet:
FuncDefList.append(Result)
SqlStatement = """ select Modifier, Name, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
ResultSet = Db.TblFile.Exec(SqlStatement)
FuncDeclList = []
for Result in ResultSet:
FuncDeclList.append(Result)
UndeclFuncList = []
for FuncDef in FuncDefList:
FuncName = FuncDef[2].strip()
FuncModifier = FuncDef[0]
FuncDefHeader = FuncDef[1]
for FuncDecl in FuncDeclList:
LBPos = FuncDecl[1].find('(')
DeclName = FuncDecl[1][0:LBPos].strip()
DeclModifier = FuncDecl[0]
if DeclName == FuncName:
if DiffModifier(FuncModifier, DeclModifier) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, FuncName):
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, 'Function [%s] modifier different with prototype.' % FuncName, 'Function', FuncDef[3])
ParamListOfDef = GetParamList(FuncDefHeader)
ParamListOfDecl = GetParamList(FuncDecl[1])
if len(ParamListOfDef) != len(ParamListOfDecl) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2, FuncName):
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2, 'Parameter number different in function [%s].' % FuncName, 'Function', FuncDef[3])
break
Index = 0
while Index < len(ParamListOfDef):
if DiffModifier(ParamListOfDef[Index].Modifier, ParamListOfDecl[Index].Modifier) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3, FuncName):
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3, 'Parameter %s has different modifier with prototype in function [%s].' % (ParamListOfDef[Index].Name, FuncName), 'Function', FuncDef[3])
Index += 1
break
else:
UndeclFuncList.append(FuncDef)
IncludeFileList = GetAllIncludeFiles(FullFileName)
FuncDeclList = []
for F in IncludeFileList:
FileID = GetTableID(F, ErrorMsgList)
if FileID < 0:
continue
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Modifier, Name, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
FuncDeclList.append(Result)
for FuncDef in UndeclFuncList:
FuncName = FuncDef[2].strip()
FuncModifier = FuncDef[0]
FuncDefHeader = FuncDef[1]
for FuncDecl in FuncDeclList:
LBPos = FuncDecl[1].find('(')
DeclName = FuncDecl[1][0:LBPos].strip()
DeclModifier = FuncDecl[0]
if DeclName == FuncName:
if DiffModifier(FuncModifier, DeclModifier) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, FuncName):
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, 'Function [%s] modifier different with prototype.' % FuncName, 'Function', FuncDef[3])
ParamListOfDef = GetParamList(FuncDefHeader)
ParamListOfDecl = GetParamList(FuncDecl[1])
if len(ParamListOfDef) != len(ParamListOfDecl) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2, FuncName):
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_2, 'Parameter number different in function [%s].' % FuncName, 'Function', FuncDef[3])
break
Index = 0
while Index < len(ParamListOfDef):
if DiffModifier(ParamListOfDef[Index].Modifier, ParamListOfDecl[Index].Modifier) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3, FuncName):
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE_3, 'Parameter %s has different modifier with prototype in function [%s].' % (ParamListOfDef[Index].Name, FuncName), 'Function', FuncDef[3])
Index += 1
break
def CheckFuncLayoutBody(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
FileTable = 'Identifier' + str(FileID)
Db = GetDB()
SqlStatement = """ select BodyStartColumn, EndColumn, ID, Name
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
return ErrorMsgList
for Result in ResultSet:
if Result[0] != 0:
if not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY, Result[3]):
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY,
'The open brace should be at the very beginning of a line for the function [%s].' % Result[3],
'Function', Result[2])
if Result[1] != 0:
if not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY, Result[3]):
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY,
'The close brace should be at the very beginning of a line for the function [%s].' % Result[3],
'Function', Result[2])
def CheckFuncLayoutLocalVariable(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select ID
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
return ErrorMsgList
FL = []
for Result in ResultSet:
FL.append(Result)
for F in FL:
SqlStatement = """ select Name, Value, ID, Modifier
from %s
where Model = %d and BelongsToFunction = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, F[0])
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
continue
for Result in ResultSet:
if len(Result[1]) > 0 and 'CONST' not in Result[3] and 'STATIC' not in Result[3]:
PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_NO_INIT_OF_VARIABLE, 'Variable Name: %s' % Result[0], FileTable, Result[2])
def CheckMemberVariableFormat(Name, Value, FileTable, TdId, ModelId):
ErrMsgList = []
# Member variable format pattern.
Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
LBPos = Value.find('{')
RBPos = Value.rfind('}')
if LBPos == -1 or RBPos == -1:
return ErrMsgList
Fields = Value[LBPos + 1 : RBPos]
Fields = StripComments(Fields).strip()
NestPos = Fields.find ('struct')
if NestPos != -1 and (NestPos + len('struct') < len(Fields)) and ModelId != DataClass.MODEL_IDENTIFIER_UNION:
if not Fields[NestPos + len('struct') + 1].isalnum():
if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, Name):
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, 'Nested struct in [%s].' % (Name), FileTable, TdId)
return ErrMsgList
NestPos = Fields.find ('union')
if NestPos != -1 and (NestPos + len('union') < len(Fields)):
if not Fields[NestPos + len('union') + 1].isalnum():
if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, Name):
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, 'Nested union in [%s].' % (Name), FileTable, TdId)
return ErrMsgList
NestPos = Fields.find ('enum')
if NestPos != -1 and (NestPos + len('enum') < len(Fields)):
if not Fields[NestPos + len('enum') + 1].isalnum():
if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, Name):
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, 'Nested enum in [%s].' % (Name), FileTable, TdId)
return ErrMsgList
if ModelId == DataClass.MODEL_IDENTIFIER_ENUMERATE:
FieldsList = Fields.split(',')
# deal with enum is pre-assigned a value by function call ( , , , ...)
QuoteCount = 0
Index = 0
RemoveCurrentElement = False
while Index < len(FieldsList):
Field = FieldsList[Index]
if Field.find('(') != -1:
QuoteCount += 1
RemoveCurrentElement = True
Index += 1
continue
if Field.find(')') != -1 and QuoteCount > 0:
QuoteCount -= 1
if RemoveCurrentElement:
FieldsList.remove(Field)
if QuoteCount == 0:
RemoveCurrentElement = False
continue
if QuoteCount == 0:
RemoveCurrentElement = False
Index += 1
else:
FieldsList = Fields.split(';')
for Field in FieldsList:
Field = Field.strip()
if Field == '':
continue
# For the condition that the field in struct is an array with [] suffixes...
if Field[-1] == ']':
LBPos = Field.find('[')
Field = Field[0:LBPos]
# For the condition that bit field ": Number"
if Field.find(':') != -1:
ColonPos = Field.find(':')
Field = Field[0:ColonPos]
Field = Field.strip()
if Field == '':
continue
if Field.startswith("#"):
continue
# Enum could directly assign value to variable
Field = Field.split('=')[0].strip()
TokenList = Field.split()
# Remove pointers before variable
Token = TokenList[-1]
if Token in ['OPTIONAL']:
Token = TokenList[-2]
if not Pattern.match(Token.lstrip('*')):
ErrMsgList.append(Token.lstrip('*'))
return ErrMsgList
def CheckDeclTypedefFormat(FullFileName, ModelId):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Name, StartLine, EndLine, ID, Value
from %s
where Model = %d
""" % (FileTable, ModelId)
ResultSet = Db.TblFile.Exec(SqlStatement)
ResultList = []
for Result in ResultSet:
ResultList.append(Result)
ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_ALL
if ModelId == DataClass.MODEL_IDENTIFIER_STRUCTURE:
ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_STRUCTURE_DECLARATION
elif ModelId == DataClass.MODEL_IDENTIFIER_ENUMERATE:
ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_ENUMERATED_TYPE
elif ModelId == DataClass.MODEL_IDENTIFIER_UNION:
ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_UNION_TYPE
SqlStatement = """ select Modifier, Name, Value, StartLine, EndLine, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
TdSet = Db.TblFile.Exec(SqlStatement)
TdList = []
for Td in TdSet:
TdList.append(Td)
# Check member variable name format that from typedefs of ONLY this file.
for Td in TdList:
Name = Td[1].strip()
Value = Td[2].strip()
if Value.startswith('enum'):
ValueModelId = DataClass.MODEL_IDENTIFIER_ENUMERATE
elif Value.startswith('struct'):
ValueModelId = DataClass.MODEL_IDENTIFIER_STRUCTURE
elif Value.startswith('union'):
ValueModelId = DataClass.MODEL_IDENTIFIER_UNION
else:
continue
if ValueModelId != ModelId:
continue
# Check member variable format.
ErrMsgList = CheckMemberVariableFormat(Name, Value, FileTable, Td[5], ModelId)
for ErrMsg in ErrMsgList:
if EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Name + '.' + ErrMsg):
continue
PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Member variable [%s] NOT follow naming convention.' % (Name + '.' + ErrMsg), FileTable, Td[5])
# First check in current file to see whether struct/union/enum is typedef-ed.
UntypedefedList = []
for Result in ResultList:
# Check member variable format.
Name = Result[0].strip()
Value = Result[4].strip()
if Value.startswith('enum'):
ValueModelId = DataClass.MODEL_IDENTIFIER_ENUMERATE
elif Value.startswith('struct'):
ValueModelId = DataClass.MODEL_IDENTIFIER_STRUCTURE
elif Value.startswith('union'):
ValueModelId = DataClass.MODEL_IDENTIFIER_UNION
else:
continue
if ValueModelId != ModelId:
continue
ErrMsgList = CheckMemberVariableFormat(Name, Value, FileTable, Result[3], ModelId)
for ErrMsg in ErrMsgList:
if EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Result[0] + '.' + ErrMsg):
continue
PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Member variable [%s] NOT follow naming convention.' % (Result[0] + '.' + ErrMsg), FileTable, Result[3])
# Check whether it is typedefed.
Found = False
for Td in TdList:
# skip function pointer
if len(Td[0]) > 0:
continue
if Result[1] >= Td[3] and Td[4] >= Result[2]:
Found = True
if not Td[1].isupper():
PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
if Result[0] in Td[2].split():
Found = True
if not Td[1].isupper():
PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
if Found:
break
if not Found:
UntypedefedList.append(Result)
continue
if len(UntypedefedList) == 0:
return
IncludeFileList = GetAllIncludeFiles(FullFileName)
TdList = []
for F in IncludeFileList:
FileID = GetTableID(F, ErrorMsgList)
if FileID < 0:
continue
IncludeFileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Modifier, Name, Value, StartLine, EndLine, ID
from %s
where Model = %d
""" % (IncludeFileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
ResultSet = Db.TblFile.Exec(SqlStatement)
TdList.extend(ResultSet)
for Result in UntypedefedList:
# Check whether it is typedefed.
Found = False
for Td in TdList:
if len(Td[0]) > 0:
continue
if Result[1] >= Td[3] and Td[4] >= Result[2]:
Found = True
if not Td[1].isupper():
PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
if Result[0] in Td[2].split():
Found = True
if not Td[1].isupper():
PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
if Found:
break
if not Found:
PrintErrorMsg(ErrorType, 'No Typedef for %s' % Result[0], FileTable, Result[3])
continue
def CheckDeclStructTypedef(FullFileName):
CheckDeclTypedefFormat(FullFileName, DataClass.MODEL_IDENTIFIER_STRUCTURE)
def CheckDeclEnumTypedef(FullFileName):
CheckDeclTypedefFormat(FullFileName, DataClass.MODEL_IDENTIFIER_ENUMERATE)
def CheckDeclUnionTypedef(FullFileName):
CheckDeclTypedefFormat(FullFileName, DataClass.MODEL_IDENTIFIER_UNION)
def CheckDeclArgModifier(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Modifier, Name, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE)
ResultSet = Db.TblFile.Exec(SqlStatement)
ModifierTuple = ('IN', 'OUT', 'OPTIONAL', 'UNALIGNED')
MAX_MODIFIER_LENGTH = 100
for Result in ResultSet:
for Modifier in ModifierTuple:
if PatternInModifier(Result[0], Modifier) and len(Result[0]) < MAX_MODIFIER_LENGTH:
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER, 'Variable Modifier %s' % Result[0], FileTable, Result[2])
break
SqlStatement = """ select Modifier, Name, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
for Modifier in ModifierTuple:
if PatternInModifier(Result[0], Modifier):
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER, 'Return Type Modifier %s' % Result[0], FileTable, Result[2])
break
SqlStatement = """ select Modifier, Header, ID
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
for Modifier in ModifierTuple:
if PatternInModifier(Result[0], Modifier):
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER, 'Return Type Modifier %s' % Result[0], FileTable, Result[2])
break
def CheckDeclNoUseCType(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Modifier, Name, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE)
ResultSet = Db.TblFile.Exec(SqlStatement)
CTypeTuple = ('int', 'unsigned', 'char', 'void', 'long')
for Result in ResultSet:
for Type in CTypeTuple:
if PatternInModifier(Result[0], Type):
if EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE,
Result[0] + ' ' + Result[1]):
continue
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE,
'Invalid variable type (%s) in definition [%s]' % (Type, Result[0] + ' ' + Result[1]),
FileTable,
Result[2])
break
SqlStatement = """ select Modifier, Name, ID, Value
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
ParamList = GetParamList(Result[1])
FuncName = Result[3]
if EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, FuncName):
continue
for Type in CTypeTuple:
if PatternInModifier(Result[0], Type):
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, '%s Return type %s' % (FuncName, Result[0]), FileTable, Result[2])
for Param in ParamList:
if PatternInModifier(Param.Modifier, Type):
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, 'Parameter %s' % Param.Name, FileTable, Result[2])
SqlStatement = """ select Modifier, Header, ID, Name
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
ParamList = GetParamList(Result[1])
FuncName = Result[3]
if EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, FuncName):
continue
for Type in CTypeTuple:
if PatternInModifier(Result[0], Type):
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, '[%s] Return type %s' % (FuncName, Result[0]), FileTable, Result[2])
for Param in ParamList:
if PatternInModifier(Param.Modifier, Type):
PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, 'Parameter %s' % Param.Name, FileTable, Result[2])
def CheckPointerNullComparison(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
# cache the found function return type to accelerate later checking in this file.
FuncReturnTypeDict = {}
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value, StartLine, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
return
PSL = []
for Result in ResultSet:
PSL.append([Result[0], Result[1], Result[2]])
SqlStatement = """ select BodyStartLine, EndLine, Header, Modifier, ID
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
FL = []
for Result in ResultSet:
FL.append([Result[0], Result[1], Result[2], Result[3], Result[4]])
p = GetFuncDeclPattern()
for Str in PSL:
FuncRecord = GetFuncContainsPE(Str[1], FL)
if FuncRecord is None:
continue
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
PredInfo = SplitPredicateStr(Exp)
if PredInfo[1] is None:
PredVarStr = PredInfo[0][0].strip()
IsFuncCall = False
SearchInCache = False
# PredVarStr may contain '.' or '->'
TmpStr = PredVarStr.replace('.', '').replace('->', '')
if p.match(TmpStr):
PredVarStr = PredVarStr[0:PredVarStr.find('(')]
SearchInCache = True
# Only direct function call using IsFuncCall branch. Multi-level ref. function call is considered a variable.
if TmpStr.startswith(PredVarStr):
IsFuncCall = True
if PredVarStr.strip() in IgnoredKeywordList:
continue
StarList = []
PredVarList = GetCNameList(PredVarStr, StarList)
# No variable found, maybe value first? like (0 == VarName)
if len(PredVarList) == 0:
continue
if SearchInCache:
Type = FuncReturnTypeDict.get(PredVarStr)
if Type is not None:
if Type.find('*') != -1 and Type != 'BOOLEAN*':
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
continue
if PredVarStr in FuncReturnTypeDict:
continue
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, None, StarList)
if SearchInCache:
FuncReturnTypeDict[PredVarStr] = Type
if Type is None:
continue
Type = GetTypeFromArray(Type, PredVarStr)
if Type.find('*') != -1 and Type != 'BOOLEAN*':
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
def CheckNonBooleanValueComparison(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
# cache the found function return type to accelerate later checking in this file.
FuncReturnTypeDict = {}
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value, StartLine, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
return
PSL = []
for Result in ResultSet:
PSL.append([Result[0], Result[1], Result[2]])
SqlStatement = """ select BodyStartLine, EndLine, Header, Modifier, ID
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
FL = []
for Result in ResultSet:
FL.append([Result[0], Result[1], Result[2], Result[3], Result[4]])
p = GetFuncDeclPattern()
for Str in PSL:
FuncRecord = GetFuncContainsPE(Str[1], FL)
if FuncRecord is None:
continue
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
PredInfo = SplitPredicateStr(Exp)
if PredInfo[1] is None:
PredVarStr = PredInfo[0][0].strip()
IsFuncCall = False
SearchInCache = False
# PredVarStr may contain '.' or '->'
TmpStr = PredVarStr.replace('.', '').replace('->', '')
if p.match(TmpStr):
PredVarStr = PredVarStr[0:PredVarStr.find('(')]
SearchInCache = True
# Only direct function call using IsFuncCall branch. Multi-level ref. function call is considered a variable.
if TmpStr.startswith(PredVarStr):
IsFuncCall = True
if PredVarStr.strip() in IgnoredKeywordList:
continue
StarList = []
PredVarList = GetCNameList(PredVarStr, StarList)
# No variable found, maybe value first? like (0 == VarName)
if len(PredVarList) == 0:
continue
if SearchInCache:
Type = FuncReturnTypeDict.get(PredVarStr)
if Type is not None:
if Type.find('BOOLEAN') == -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
continue
if PredVarStr in FuncReturnTypeDict:
continue
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
if SearchInCache:
FuncReturnTypeDict[PredVarStr] = Type
if Type is None:
continue
if Type.find('BOOLEAN') == -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
def CheckBooleanValueComparison(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
# cache the found function return type to accelerate later checking in this file.
FuncReturnTypeDict = {}
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value, StartLine, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
return
PSL = []
for Result in ResultSet:
PSL.append([Result[0], Result[1], Result[2]])
SqlStatement = """ select BodyStartLine, EndLine, Header, Modifier, ID
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
FL = []
for Result in ResultSet:
FL.append([Result[0], Result[1], Result[2], Result[3], Result[4]])
p = GetFuncDeclPattern()
for Str in PSL:
FuncRecord = GetFuncContainsPE(Str[1], FL)
if FuncRecord is None:
continue
for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
PredInfo = SplitPredicateStr(Exp)
if PredInfo[1] in ('==', '!=') and PredInfo[0][1] in ('TRUE', 'FALSE'):
PredVarStr = PredInfo[0][0].strip()
IsFuncCall = False
SearchInCache = False
# PredVarStr may contain '.' or '->'
TmpStr = PredVarStr.replace('.', '').replace('->', '')
if p.match(TmpStr):
PredVarStr = PredVarStr[0:PredVarStr.find('(')]
SearchInCache = True
# Only direct function call using IsFuncCall branch. Multi-level ref. function call is considered a variable.
if TmpStr.startswith(PredVarStr):
IsFuncCall = True
if PredVarStr.strip() in IgnoredKeywordList:
continue
StarList = []
PredVarList = GetCNameList(PredVarStr, StarList)
# No variable found, maybe value first? like (0 == VarName)
if len(PredVarList) == 0:
continue
if SearchInCache:
Type = FuncReturnTypeDict.get(PredVarStr)
if Type is not None:
if Type.find('BOOLEAN') != -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
continue
if PredVarStr in FuncReturnTypeDict:
continue
Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
if SearchInCache:
FuncReturnTypeDict[PredVarStr] = Type
if Type is None:
continue
if Type.find('BOOLEAN') != -1:
PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
def CheckHeaderFileData(FullFileName, AllTypedefFun=[]):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select ID, Modifier
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
if not Result[1].startswith('extern'):
for Item in AllTypedefFun:
if '(%s)' % Result[1] in Item:
break
else:
PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_DATA, 'Variable definition appears in header file', FileTable, Result[0])
SqlStatement = """ select ID
from Function
where BelongsToFile = %d
""" % FileID
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_DATA, 'Function definition appears in header file', 'Function', Result[0])
return ErrorMsgList
def CheckHeaderFileIfndef(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value, StartLine
from %s
where Model = %d order by StartLine
""" % (FileTable, DataClass.MODEL_IDENTIFIER_MACRO_IFNDEF)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_1, '', 'File', FileID)
return ErrorMsgList
for Result in ResultSet:
SqlStatement = """ select Value, EndLine
from %s
where EndLine < %d
""" % (FileTable, Result[1])
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
if not Result[0].startswith('/*') and not Result[0].startswith('//'):
PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_2, '', 'File', FileID)
break
SqlStatement = """ select Value
from %s
where StartLine > (select max(EndLine) from %s where Model = %d)
""" % (FileTable, FileTable, DataClass.MODEL_IDENTIFIER_MACRO_ENDIF)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
if not Result[0].startswith('/*') and not Result[0].startswith('//'):
PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_3, '', 'File', FileID)
return ErrorMsgList
def CheckDoxygenCommand(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value, ID
from %s
where Model = %d or Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER)
ResultSet = Db.TblFile.Exec(SqlStatement)
DoxygenCommandList = ['bug', 'todo', 'example', 'file', 'attention', 'param', 'post', 'pre', 'retval',
'return', 'sa', 'since', 'test', 'note', 'par', 'endcode', 'code']
for Result in ResultSet:
CommentStr = Result[0]
CommentPartList = CommentStr.split()
for Part in CommentPartList:
if Part.upper() == 'BUGBUG':
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Bug should be marked with doxygen tag @bug', FileTable, Result[1])
if Part.upper() == 'TODO':
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'ToDo should be marked with doxygen tag @todo', FileTable, Result[1])
if Part.startswith('@'):
if EccGlobalData.gException.IsException(ERROR_DOXYGEN_CHECK_COMMAND, Part):
continue
if not Part.replace('@', '').strip():
continue
if Part.lstrip('@') in ['{', '}']:
continue
if Part.lstrip('@').isalpha():
if Part.lstrip('@') not in DoxygenCommandList:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Unknown doxygen command %s' % Part, FileTable, Result[1])
else:
Index = Part.find('[')
if Index == -1:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Unknown doxygen command %s' % Part, FileTable, Result[1])
RealCmd = Part[1:Index]
if RealCmd not in DoxygenCommandList:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Unknown doxygen command %s' % Part, FileTable, Result[1])
def CheckDoxygenTripleForwardSlash(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
SqlStatement = """ select ID, BodyStartLine, BodyStartColumn, EndLine, EndColumn
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
return
FuncDefSet = []
for Result in ResultSet:
FuncDefSet.append(Result)
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value, ID, StartLine, StartColumn, EndLine, EndColumn
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
ResultSet = Db.TblFile.Exec(SqlStatement)
CommentSet = []
try:
for Result in ResultSet:
CommentSet.append(Result)
except:
print('Unrecognized chars in comment of file %s', FullFileName)
for Result in CommentSet:
CommentStr = Result[0]
StartLine = Result[2]
StartColumn = Result[3]
EndLine = Result[4]
EndColumn = Result[5]
if not CommentStr.startswith('///<'):
continue
Found = False
for FuncDef in FuncDefSet:
if StartLine == FuncDef[1] and StartColumn > FuncDef[2] and EndLine == FuncDef[3] and EndColumn < FuncDef[4]:
Found = True
break
if StartLine > FuncDef[1] and EndLine < FuncDef[3]:
Found = True
break
if StartLine == FuncDef[1] and StartColumn > FuncDef[2] and EndLine < FuncDef[3]:
Found = True
break
if StartLine > FuncDef[1] and EndLine == FuncDef[3] and EndColumn < FuncDef[4]:
Found = True
break
if Found:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_FORMAT, '', FileTable, Result[1])
def CheckFileHeaderDoxygenComments(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value, ID
from %s
where Model = %d and (StartLine = 1 or StartLine = 7 or StartLine = 8) and StartColumn = 0
""" % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'No File License header appear at the very beginning of file.', 'File', FileID)
return ErrorMsgList
NoHeaderCommentStartFlag = True
NoHeaderCommentEndFlag = True
NoHeaderCommentPeriodFlag = True
NoCopyrightFlag = True
NoLicenseFlag = True
NoRevReferFlag = True
NextLineIndex = 0
for Result in ResultSet:
FileStartFlag = False
CommentStrList = []
CommentStr = Result[0].strip()
CommentStrListTemp = CommentStr.split('\n')
if (len(CommentStrListTemp) <= 1):
# For Mac
CommentStrListTemp = CommentStr.split('\r')
# Skip the content before the file header
for CommentLine in CommentStrListTemp:
if CommentLine.strip().startswith('/** @file'):
FileStartFlag = True
if FileStartFlag == True:
CommentStrList.append(CommentLine)
ID = Result[1]
Index = 0
if CommentStrList and CommentStrList[0].strip().startswith('/** @file'):
NoHeaderCommentStartFlag = False
else:
continue
if CommentStrList and CommentStrList[-1].strip().endswith('**/'):
NoHeaderCommentEndFlag = False
else:
continue
for CommentLine in CommentStrList:
Index = Index + 1
NextLineIndex = Index
if CommentLine.startswith('/** @file'):
continue
if CommentLine.startswith('**/'):
break
# Check whether C File header Comment content start with two spaces.
if EccGlobalData.gConfig.HeaderCheckCFileCommentStartSpacesNum == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
if CommentLine.startswith('/** @file') == False and CommentLine.startswith('**/') == False and CommentLine.strip() and CommentLine.startswith(' ') == False:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment content should start with two spaces at each line', FileTable, ID)
CommentLine = CommentLine.strip()
if CommentLine.startswith('Copyright') or ('Copyright' in CommentLine and CommentLine.lower().startswith('(c)')):
NoCopyrightFlag = False
if CommentLine.find('All rights reserved') == -1:
for Copyright in EccGlobalData.gConfig.Copyright:
if CommentLine.find(Copyright) > -1:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, '""All rights reserved"" announcement should be following the ""Copyright"" at the same line', FileTable, ID)
break
if CommentLine.endswith('<BR>') == -1:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'The ""<BR>"" at the end of the Copyright line is required', FileTable, ID)
if NextLineIndex < len(CommentStrList) and CommentStrList[NextLineIndex].strip().startswith('Copyright') == False and CommentStrList[NextLineIndex].strip():
NoLicenseFlag = False
if CommentLine.startswith('@par Revision Reference:'):
NoRevReferFlag = False
RefListFlag = False
for RefLine in CommentStrList[NextLineIndex:]:
if RefLine.strip() and (NextLineIndex + 1) < len(CommentStrList) and CommentStrList[NextLineIndex+1].strip() and CommentStrList[NextLineIndex+1].strip().startswith('**/') == False:
RefListFlag = True
if RefLine.strip() == False or RefLine.strip().startswith('**/'):
RefListFlag = False
break
# Check whether C File header Comment's each reference at list should begin with a bullet character.
if EccGlobalData.gConfig.HeaderCheckCFileCommentReferenceFormat == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
if RefListFlag == True:
if RefLine.strip() and RefLine.strip().startswith('**/') == False and RefLine.startswith(' -') == False:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'Each reference on a separate line should begin with a bullet character ""-"" ', FileTable, ID)
if NoHeaderCommentStartFlag:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, ID)
return
if NoHeaderCommentEndFlag:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with ""**/""', FileTable, ID)
return
if NoCopyrightFlag:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment missing the ""Copyright""', FileTable, ID)
#Check whether C File header Comment have the License immediately after the ""Copyright"" line.
if EccGlobalData.gConfig.HeaderCheckCFileCommentLicenseFormat == '1' or EccGlobalData.gConfig.HeaderCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
if NoLicenseFlag:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should have the License immediately after the ""Copyright"" line', FileTable, ID)
def CheckFuncHeaderDoxygenComments(FullFileName):
ErrorMsgList = []
FileID = GetTableID(FullFileName, ErrorMsgList)
if FileID < 0:
return ErrorMsgList
Db = GetDB()
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value, StartLine, EndLine, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
ResultSet = Db.TblFile.Exec(SqlStatement)
CommentSet = []
try:
for Result in ResultSet:
CommentSet.append(Result)
except:
print('Unrecognized chars in comment of file %s', FullFileName)
# Func Decl check
SqlStatement = """ select Modifier, Name, StartLine, ID, Value
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
FuncName = Result[4]
FunctionHeaderComment = CheckCommentImmediatelyPrecedeFunctionHeader(Result[1], Result[2], CommentSet)
if FunctionHeaderComment:
CheckFunctionHeaderConsistentWithDoxygenComment(Result[0], Result[1], Result[2], FunctionHeaderComment[0], FunctionHeaderComment[1], ErrorMsgList, FunctionHeaderComment[3], FileTable)
else:
if EccGlobalData.gException.IsException(ERROR_HEADER_CHECK_FUNCTION, FuncName):
continue
ErrorMsgList.append('Line %d :Function %s has NO comment immediately preceding it.' % (Result[2], Result[1]))
PrintErrorMsg(ERROR_HEADER_CHECK_FUNCTION, 'Function [%s] has NO comment immediately preceding it.' % (FuncName), FileTable, Result[3])
# Func Def check
SqlStatement = """ select Value, StartLine, EndLine, ID
from %s
where Model = %d
""" % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER)
ResultSet = Db.TblFile.Exec(SqlStatement)
CommentSet = []
try:
for Result in ResultSet:
CommentSet.append(Result)
except:
print('Unrecognized chars in comment of file %s', FullFileName)
SqlStatement = """ select Modifier, Header, StartLine, ID, Name
from Function
where BelongsToFile = %d
""" % (FileID)
ResultSet = Db.TblFile.Exec(SqlStatement)
for Result in ResultSet:
FuncName = Result[4]
FunctionHeaderComment = CheckCommentImmediatelyPrecedeFunctionHeader(Result[1], Result[2], CommentSet)
if FunctionHeaderComment:
CheckFunctionHeaderConsistentWithDoxygenComment(Result[0], Result[1], Result[2], FunctionHeaderComment[0], FunctionHeaderComment[1], ErrorMsgList, FunctionHeaderComment[3], FileTable)
else:
if EccGlobalData.gException.IsException(ERROR_HEADER_CHECK_FUNCTION, FuncName):
continue
ErrorMsgList.append('Line %d :Function [%s] has NO comment immediately preceding it.' % (Result[2], Result[1]))
PrintErrorMsg(ERROR_HEADER_CHECK_FUNCTION, 'Function [%s] has NO comment immediately preceding it.' % (FuncName), 'Function', Result[3])
return ErrorMsgList
def CheckCommentImmediatelyPrecedeFunctionHeader(FuncName, FuncStartLine, CommentSet):
for Comment in CommentSet:
if Comment[2] == FuncStartLine - 1:
return Comment
return None
def GetDoxygenStrFromComment(Str):
DoxygenStrList = []
ParamTagList = Str.split('@param')
if len(ParamTagList) > 1:
i = 1
while i < len(ParamTagList):
DoxygenStrList.append('@param' + ParamTagList[i])
i += 1
Str = ParamTagList[0]
RetvalTagList = ParamTagList[-1].split('@retval')
if len(RetvalTagList) > 1:
if len(ParamTagList) > 1:
DoxygenStrList[-1] = '@param' + RetvalTagList[0]
i = 1
while i < len(RetvalTagList):
DoxygenStrList.append('@retval' + RetvalTagList[i])
i += 1
ReturnTagList = RetvalTagList[-1].split('@return')
if len(ReturnTagList) > 1:
if len(RetvalTagList) > 1:
DoxygenStrList[-1] = '@retval' + ReturnTagList[0]
elif len(ParamTagList) > 1:
DoxygenStrList[-1] = '@param' + ReturnTagList[0]
i = 1
while i < len(ReturnTagList):
DoxygenStrList.append('@return' + ReturnTagList[i])
i += 1
if len(DoxygenStrList) > 0:
DoxygenStrList[-1] = DoxygenStrList[-1].rstrip('--*/')
return DoxygenStrList
def CheckGeneralDoxygenCommentLayout(Str, StartLine, ErrorMsgList, CommentId= -1, TableName=''):
#/** --*/ @retval after @param
if not Str.startswith('/**'):
ErrorMsgList.append('Line %d : Comment does NOT have prefix /** ' % StartLine)
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Comment does NOT have prefix /** ', TableName, CommentId)
if not Str.endswith('**/'):
ErrorMsgList.append('Line %d : Comment does NOT have tail **/ ' % StartLine)
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Comment does NOT have tail **/ ', TableName, CommentId)
FirstRetvalIndex = Str.find('@retval')
LastParamIndex = Str.rfind('@param')
if (FirstRetvalIndex > 0) and (LastParamIndex > 0) and (FirstRetvalIndex < LastParamIndex):
ErrorMsgList.append('Line %d : @retval appear before @param ' % StartLine)
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, @retval appear before @param ', TableName, CommentId)
def CheckFunctionHeaderConsistentWithDoxygenComment(FuncModifier, FuncHeader, FuncStartLine, CommentStr, CommentStartLine, ErrorMsgList, CommentId= -1, TableName=''):
ParamList = GetParamList(FuncHeader)
CheckGeneralDoxygenCommentLayout(CommentStr, CommentStartLine, ErrorMsgList, CommentId, TableName)
DescriptionStr = CommentStr
DoxygenStrList = GetDoxygenStrFromComment(DescriptionStr)
if DescriptionStr.find('.') == -1:
PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period \'.\'', TableName, CommentId)
DoxygenTagNumber = len(DoxygenStrList)
ParamNumber = len(ParamList)
for Param in ParamList:
if Param.Name.upper() == 'VOID' and ParamNumber == 1:
ParamNumber -= 1
Index = 0
if ParamNumber > 0 and DoxygenTagNumber > 0:
while Index < ParamNumber and Index < DoxygenTagNumber:
ParamModifier = ParamList[Index].Modifier
ParamName = ParamList[Index].Name.strip()
Tag = DoxygenStrList[Index].strip(' ')
if (not Tag[-1] == ('\n')) and (not Tag[-1] == ('\r')):
ErrorMsgList.append('Line %d : in Comment, <%s> does NOT end with new line ' % (CommentStartLine, Tag.replace('\n', '').replace('\r', '')))
PrintErrorMsg(ERROR_HEADER_CHECK_FUNCTION, 'in Comment, <%s> does NOT end with new line ' % (Tag.replace('\n', '').replace('\r', '')), TableName, CommentId)
TagPartList = Tag.split()
if len(TagPartList) < 2:
ErrorMsgList.append('Line %d : in Comment, <%s> does NOT contain doxygen contents ' % (CommentStartLine, Tag.replace('\n', '').replace('\r', '')))
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, <%s> does NOT contain doxygen contents ' % (Tag.replace('\n', '').replace('\r', '')), TableName, CommentId)
Index += 1
continue
LBPos = Tag.find('[')
RBPos = Tag.find(']')
ParamToLBContent = Tag[len('@param'):LBPos].strip()
if LBPos > 0 and len(ParamToLBContent) == 0 and RBPos > LBPos:
InOutStr = ''
ModifierPartList = ParamModifier.split()
for Part in ModifierPartList:
if Part.strip() == 'IN':
InOutStr += 'in'
if Part.strip() == 'OUT':
if InOutStr != '':
InOutStr += ', out'
else:
InOutStr = 'out'
if InOutStr != '':
if Tag.find('[' + InOutStr + ']') == -1:
if InOutStr != 'in, out':
ErrorMsgList.append('Line %d : in Comment, <%s> does NOT have %s ' % (CommentStartLine, (TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), '[' + InOutStr + ']'))
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, <%s> does NOT have %s ' % ((TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), '[' + InOutStr + ']'), TableName, CommentId)
else:
if Tag.find('[in,out]') == -1:
ErrorMsgList.append('Line %d : in Comment, <%s> does NOT have %s ' % (CommentStartLine, (TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), '[' + InOutStr + ']'))
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, <%s> does NOT have %s ' % ((TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), '[' + InOutStr + ']'), TableName, CommentId)
if Tag.find(ParamName) == -1 and ParamName != 'VOID' and ParamName != 'void':
ErrorMsgList.append('Line %d : in Comment, <%s> is NOT consistent with parameter name %s ' % (CommentStartLine, (TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), ParamName))
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, <%s> is NOT consistent with parameter name %s ' % ((TagPartList[0] + ' ' + TagPartList[1]).replace('\n', '').replace('\r', ''), ParamName), TableName, CommentId)
Index += 1
if Index < ParamNumber:
ErrorMsgList.append('Line %d : Number of doxygen tags in comment less than number of function parameters' % CommentStartLine)
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Number of doxygen tags in comment less than number of function parameters ', TableName, CommentId)
# VOID return type, NOT VOID*. VOID* should be matched with a doxygen tag.
if (FuncModifier.find('VOID') != -1 or FuncModifier.find('void') != -1) and FuncModifier.find('*') == -1:
# assume we allow a return description tag for void func. return. that's why 'DoxygenTagNumber - 1' is used instead of 'DoxygenTagNumber'
if Index < DoxygenTagNumber - 1 or (Index < DoxygenTagNumber and DoxygenStrList[Index].startswith('@retval')):
ErrorMsgList.append('Line %d : VOID return type need NO doxygen tags in comment' % CommentStartLine)
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'VOID return type need no doxygen tags in comment ', TableName, CommentId)
else:
if Index < DoxygenTagNumber and not DoxygenStrList[Index].startswith('@retval') and not DoxygenStrList[Index].startswith('@return'):
ErrorMsgList.append('Line %d : Number of @param doxygen tags in comment does NOT match number of function parameters' % CommentStartLine)
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Number of @param doxygen tags in comment does NOT match number of function parameters ', TableName, CommentId)
else:
if ParamNumber == 0 and DoxygenTagNumber != 0 and ((FuncModifier.find('VOID') != -1 or FuncModifier.find('void') != -1) and FuncModifier.find('*') == -1):
ErrorMsgList.append('Line %d : VOID return type need NO doxygen tags in comment' % CommentStartLine)
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'VOID return type need NO doxygen tags in comment ', TableName, CommentId)
if ParamNumber != 0 and DoxygenTagNumber == 0:
ErrorMsgList.append('Line %d : No doxygen tags in comment' % CommentStartLine)
PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'No doxygen tags in comment ', TableName, CommentId)
if __name__ == '__main__':
# EdkLogger.Initialize()
# EdkLogger.SetLevel(EdkLogger.QUIET)
# CollectSourceCodeDataIntoDB(sys.argv[1])
try:
test_file = sys.argv[1]
except IndexError as v:
print("Usage: %s filename" % sys.argv[0])
sys.exit(1)
MsgList = CheckFuncHeaderDoxygenComments(test_file)
for Msg in MsgList:
print(Msg)
print('Done!')
| edk2-master | BaseTools/Source/Python/Ecc/c.py |
## @file
# This file is used to save global datas used by ECC tool
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import Common.LongFilePathOs as os
gWorkspace = ''
gTarget = ''
gConfig = None
gDb = None
gIdentifierTableList = []
gCFileList = []
gHFileList = []
gUFileList = []
gException = None
| edk2-master | BaseTools/Source/Python/Ecc/EccGlobalData.py |
## @file
# This file is used to be the warning class of ECC tool
#
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## The exception class that used to report error messages when preprocessing
#
# Currently the "ToolName" is set to be "ECC PP".
#
class Warning (Exception):
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param File The FDF name
# @param Line The Line number that error occurs
#
def __init__(self, Str, File = None, Line = None):
self.message = Str
self.FileName = File
self.LineNumber = Line
self.ToolName = 'ECC PP'
| edk2-master | BaseTools/Source/Python/Ecc/ParserWarning.py |
## @file
# preprocess source file
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import re
import Common.LongFilePathOs as os
import sys
if sys.version_info.major == 3:
import antlr4 as antlr
from Ecc.CParser4.CLexer import CLexer
from Ecc.CParser4.CParser import CParser
else:
import antlr3 as antlr
antlr.InputStream = antlr.StringStream
from Ecc.CParser3.CLexer import CLexer
from Ecc.CParser3.CParser import CParser
from Ecc import FileProfile
from Ecc.CodeFragment import Comment
from Ecc.CodeFragment import PP_Directive
from Ecc.ParserWarning import Warning
##define T_CHAR_SPACE ' '
##define T_CHAR_NULL '\0'
##define T_CHAR_CR '\r'
##define T_CHAR_TAB '\t'
##define T_CHAR_LF '\n'
##define T_CHAR_SLASH '/'
##define T_CHAR_BACKSLASH '\\'
##define T_CHAR_DOUBLE_QUOTE '\"'
##define T_CHAR_SINGLE_QUOTE '\''
##define T_CHAR_STAR '*'
##define T_CHAR_HASH '#'
(T_CHAR_SPACE, T_CHAR_NULL, T_CHAR_CR, T_CHAR_TAB, T_CHAR_LF, T_CHAR_SLASH, \
T_CHAR_BACKSLASH, T_CHAR_DOUBLE_QUOTE, T_CHAR_SINGLE_QUOTE, T_CHAR_STAR, T_CHAR_HASH) = \
(' ', '\0', '\r', '\t', '\n', '/', '\\', '\"', '\'', '*', '#')
SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
(T_COMMENT_TWO_SLASH, T_COMMENT_SLASH_STAR) = (0, 1)
(T_PP_INCLUDE, T_PP_DEFINE, T_PP_OTHERS) = (0, 1, 2)
## The collector for source code fragments.
#
# PreprocessFile method should be called prior to ParseFile
#
# GetNext*** procedures mean these procedures will get next token first, then make judgement.
# Get*** procedures mean these procedures will make judgement on current token only.
#
class CodeFragmentCollector:
## The constructor
#
# @param self The object pointer
# @param FileName The file that to be parsed
#
def __init__(self, FileName):
self.Profile = FileProfile.FileProfile(FileName)
self.Profile.FileLinesList.append(T_CHAR_LF)
self.FileName = FileName
self.CurrentLineNumber = 1
self.CurrentOffsetWithinLine = 0
self.TokenReleaceList = []
self.__Token = ""
self.__SkippedChars = ""
## __EndOfFile() method
#
# Judge current buffer pos is at file end
#
# @param self The object pointer
# @retval True Current File buffer position is at file end
# @retval False Current File buffer position is NOT at file end
#
def __EndOfFile(self):
NumberOfLines = len(self.Profile.FileLinesList)
SizeOfLastLine = NumberOfLines
if NumberOfLines > 0:
SizeOfLastLine = len(self.Profile.FileLinesList[-1])
if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
return True
elif self.CurrentLineNumber > NumberOfLines:
return True
else:
return False
## __EndOfLine() method
#
# Judge current buffer pos is at line end
#
# @param self The object pointer
# @retval True Current File buffer position is at line end
# @retval False Current File buffer position is NOT at line end
#
def __EndOfLine(self):
SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
if self.CurrentOffsetWithinLine >= SizeOfCurrentLine - 1:
return True
else:
return False
## Rewind() method
#
# Reset file data buffer to the initial state
#
# @param self The object pointer
#
def Rewind(self):
self.CurrentLineNumber = 1
self.CurrentOffsetWithinLine = 0
## __UndoOneChar() method
#
# Go back one char in the file buffer
#
# @param self The object pointer
# @retval True Successfully go back one char
# @retval False Not able to go back one char as file beginning reached
#
def __UndoOneChar(self):
if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
return False
elif self.CurrentOffsetWithinLine == 0:
self.CurrentLineNumber -= 1
self.CurrentOffsetWithinLine = len(self.__CurrentLine()) - 1
else:
self.CurrentOffsetWithinLine -= 1
return True
## __GetOneChar() method
#
# Move forward one char in the file buffer
#
# @param self The object pointer
#
def __GetOneChar(self):
if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
self.CurrentLineNumber += 1
self.CurrentOffsetWithinLine = 0
else:
self.CurrentOffsetWithinLine += 1
## __CurrentChar() method
#
# Get the char pointed to by the file buffer pointer
#
# @param self The object pointer
# @retval Char Current char
#
def __CurrentChar(self):
CurrentChar = self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
# if CurrentChar > 255:
# raise Warning("Non-Ascii char found At Line %d, offset %d" % (self.CurrentLineNumber, self.CurrentOffsetWithinLine), self.FileName, self.CurrentLineNumber)
return CurrentChar
## __NextChar() method
#
# Get the one char pass the char pointed to by the file buffer pointer
#
# @param self The object pointer
# @retval Char Next char
#
def __NextChar(self):
if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
return self.Profile.FileLinesList[self.CurrentLineNumber][0]
else:
return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
## __SetCurrentCharValue() method
#
# Modify the value of current char
#
# @param self The object pointer
# @param Value The new value of current char
#
def __SetCurrentCharValue(self, Value):
self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
## __SetCharValue() method
#
# Modify the value of current char
#
# @param self The object pointer
# @param Value The new value of current char
#
def __SetCharValue(self, Line, Offset, Value):
self.Profile.FileLinesList[Line - 1][Offset] = Value
## __CurrentLine() method
#
# Get the list that contains current line contents
#
# @param self The object pointer
# @retval List current line contents
#
def __CurrentLine(self):
return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
## __InsertComma() method
#
# Insert ',' to replace PP
#
# @param self The object pointer
# @retval List current line contents
#
def __InsertComma(self, Line):
if self.Profile.FileLinesList[Line - 1][0] != T_CHAR_HASH:
BeforeHashPart = str(self.Profile.FileLinesList[Line - 1]).split(T_CHAR_HASH)[0]
if BeforeHashPart.rstrip().endswith(T_CHAR_COMMA) or BeforeHashPart.rstrip().endswith(';'):
return
if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(','):
return
if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(';'):
return
if str(self.Profile.FileLinesList[Line]).lstrip().startswith(',') or str(self.Profile.FileLinesList[Line]).lstrip().startswith(';'):
return
self.Profile.FileLinesList[Line - 1].insert(self.CurrentOffsetWithinLine, ',')
## PreprocessFile() method
#
# Preprocess file contents, replace comments with spaces.
# In the end, rewind the file buffer pointer to the beginning
# BUGBUG: No !include statement processing contained in this procedure
# !include statement should be expanded at the same FileLinesList[CurrentLineNumber - 1]
#
# @param self The object pointer
#
def PreprocessFile(self):
self.Rewind()
InComment = False
DoubleSlashComment = False
HashComment = False
PPExtend = False
CommentObj = None
PPDirectiveObj = None
# HashComment in quoted string " " is ignored.
InString = False
InCharLiteral = False
self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
while not self.__EndOfFile():
if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
InString = not InString
if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
if self.__CurrentChar() == T_CHAR_LF:
if HashComment and PPDirectiveObj is not None:
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
PPDirectiveObj.Content += T_CHAR_LF
PPExtend = True
else:
PPExtend = False
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
if InComment and DoubleSlashComment:
InComment = False
DoubleSlashComment = False
CommentObj.Content += T_CHAR_LF
CommentObj.EndPos = EndLinePos
FileProfile.CommentList.append(CommentObj)
CommentObj = None
if InComment and HashComment and not PPExtend:
InComment = False
HashComment = False
PPDirectiveObj.Content += T_CHAR_LF
PPDirectiveObj.EndPos = EndLinePos
FileProfile.PPDirectiveList.append(PPDirectiveObj)
PPDirectiveObj = None
if InString or InCharLiteral:
CurrentLine = "".join(self.__CurrentLine())
if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
if InComment and not DoubleSlashComment and not HashComment:
CommentObj.Content += T_CHAR_LF
self.CurrentLineNumber += 1
self.CurrentOffsetWithinLine = 0
# check for */ comment end
elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
CommentObj.Content += self.__CurrentChar()
# self.__SetCurrentCharValue(T_CHAR_SPACE)
self.__GetOneChar()
CommentObj.Content += self.__CurrentChar()
# self.__SetCurrentCharValue(T_CHAR_SPACE)
CommentObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
FileProfile.CommentList.append(CommentObj)
CommentObj = None
self.__GetOneChar()
InComment = False
# set comments to spaces
elif InComment:
if HashComment:
# // follows hash PP directive
if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
InComment = False
HashComment = False
PPDirectiveObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine - 1)
FileProfile.PPDirectiveList.append(PPDirectiveObj)
PPDirectiveObj = None
continue
else:
PPDirectiveObj.Content += self.__CurrentChar()
if PPExtend:
self.__SetCurrentCharValue(T_CHAR_SPACE)
else:
CommentObj.Content += self.__CurrentChar()
# self.__SetCurrentCharValue(T_CHAR_SPACE)
self.__GetOneChar()
# check for // comment
elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
InComment = True
DoubleSlashComment = True
CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_TWO_SLASH)
# check for '#' comment
elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
InComment = True
HashComment = True
PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
# check for /* comment start
elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_SLASH_STAR)
CommentObj.Content += self.__CurrentChar()
# self.__SetCurrentCharValue( T_CHAR_SPACE)
self.__GetOneChar()
CommentObj.Content += self.__CurrentChar()
# self.__SetCurrentCharValue( T_CHAR_SPACE)
self.__GetOneChar()
InComment = True
else:
self.__GetOneChar()
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
if InComment and DoubleSlashComment:
CommentObj.EndPos = EndLinePos
FileProfile.CommentList.append(CommentObj)
if InComment and HashComment and not PPExtend:
PPDirectiveObj.EndPos = EndLinePos
FileProfile.PPDirectiveList.append(PPDirectiveObj)
self.Rewind()
def PreprocessFileWithClear(self):
self.Rewind()
InComment = False
DoubleSlashComment = False
HashComment = False
PPExtend = False
CommentObj = None
PPDirectiveObj = None
# HashComment in quoted string " " is ignored.
InString = False
InCharLiteral = False
self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
while not self.__EndOfFile():
if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
InString = not InString
if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
if self.__CurrentChar() == T_CHAR_LF:
if HashComment and PPDirectiveObj is not None:
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
PPDirectiveObj.Content += T_CHAR_LF
PPExtend = True
else:
PPExtend = False
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
if InComment and DoubleSlashComment:
InComment = False
DoubleSlashComment = False
CommentObj.Content += T_CHAR_LF
CommentObj.EndPos = EndLinePos
FileProfile.CommentList.append(CommentObj)
CommentObj = None
if InComment and HashComment and not PPExtend:
InComment = False
HashComment = False
PPDirectiveObj.Content += T_CHAR_LF
PPDirectiveObj.EndPos = EndLinePos
FileProfile.PPDirectiveList.append(PPDirectiveObj)
PPDirectiveObj = None
if InString or InCharLiteral:
CurrentLine = "".join(self.__CurrentLine())
if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
if InComment and not DoubleSlashComment and not HashComment:
CommentObj.Content += T_CHAR_LF
self.CurrentLineNumber += 1
self.CurrentOffsetWithinLine = 0
# check for */ comment end
elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
CommentObj.Content += self.__CurrentChar()
self.__SetCurrentCharValue(T_CHAR_SPACE)
self.__GetOneChar()
CommentObj.Content += self.__CurrentChar()
self.__SetCurrentCharValue(T_CHAR_SPACE)
CommentObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
FileProfile.CommentList.append(CommentObj)
CommentObj = None
self.__GetOneChar()
InComment = False
# set comments to spaces
elif InComment:
if HashComment:
# // follows hash PP directive
if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
InComment = False
HashComment = False
PPDirectiveObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine - 1)
FileProfile.PPDirectiveList.append(PPDirectiveObj)
PPDirectiveObj = None
continue
else:
PPDirectiveObj.Content += self.__CurrentChar()
# if PPExtend:
# self.__SetCurrentCharValue(T_CHAR_SPACE)
else:
CommentObj.Content += self.__CurrentChar()
self.__SetCurrentCharValue(T_CHAR_SPACE)
self.__GetOneChar()
# check for // comment
elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
InComment = True
DoubleSlashComment = True
CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_TWO_SLASH)
# check for '#' comment
elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
InComment = True
HashComment = True
PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
# check for /* comment start
elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_SLASH_STAR)
CommentObj.Content += self.__CurrentChar()
self.__SetCurrentCharValue( T_CHAR_SPACE)
self.__GetOneChar()
CommentObj.Content += self.__CurrentChar()
self.__SetCurrentCharValue( T_CHAR_SPACE)
self.__GetOneChar()
InComment = True
else:
self.__GetOneChar()
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
if InComment and DoubleSlashComment:
CommentObj.EndPos = EndLinePos
FileProfile.CommentList.append(CommentObj)
if InComment and HashComment and not PPExtend:
PPDirectiveObj.EndPos = EndLinePos
FileProfile.PPDirectiveList.append(PPDirectiveObj)
self.Rewind()
## ParseFile() method
#
# Parse the file profile buffer to extract fd, fv ... information
# Exception will be raised if syntax error found
#
# @param self The object pointer
#
def ParseFile(self):
self.PreprocessFile()
# restore from ListOfList to ListOfString
self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
FileStringContents = ''
for fileLine in self.Profile.FileLinesList:
FileStringContents += fileLine
for Token in self.TokenReleaceList:
if Token in FileStringContents:
FileStringContents = FileStringContents.replace(Token, 'TOKENSTRING')
cStream = antlr.InputStream(FileStringContents)
lexer = CLexer(cStream)
tStream = antlr.CommonTokenStream(lexer)
parser = CParser(tStream)
parser.translation_unit()
def ParseFileWithClearedPPDirective(self):
self.PreprocessFileWithClear()
# restore from ListOfList to ListOfString
self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
FileStringContents = ''
for fileLine in self.Profile.FileLinesList:
FileStringContents += fileLine
cStream = antlr.InputStream(FileStringContents)
lexer = CLexer(cStream)
tStream = antlr.CommonTokenStream(lexer)
parser = CParser(tStream)
parser.translation_unit()
def CleanFileProfileBuffer(self):
FileProfile.CommentList = []
FileProfile.PPDirectiveList = []
FileProfile.PredicateExpressionList = []
FileProfile.FunctionDefinitionList = []
FileProfile.VariableDeclarationList = []
FileProfile.EnumerationDefinitionList = []
FileProfile.StructUnionDefinitionList = []
FileProfile.TypedefDefinitionList = []
FileProfile.FunctionCallingList = []
def PrintFragments(self):
print('################# ' + self.FileName + '#####################')
print('/****************************************/')
print('/*************** COMMENTS ***************/')
print('/****************************************/')
for comment in FileProfile.CommentList:
print(str(comment.StartPos) + comment.Content)
print('/****************************************/')
print('/********* PREPROCESS DIRECTIVES ********/')
print('/****************************************/')
for pp in FileProfile.PPDirectiveList:
print(str(pp.StartPos) + pp.Content)
print('/****************************************/')
print('/********* VARIABLE DECLARATIONS ********/')
print('/****************************************/')
for var in FileProfile.VariableDeclarationList:
print(str(var.StartPos) + var.Modifier + ' '+ var.Declarator)
print('/****************************************/')
print('/********* FUNCTION DEFINITIONS *********/')
print('/****************************************/')
for func in FileProfile.FunctionDefinitionList:
print(str(func.StartPos) + func.Modifier + ' '+ func.Declarator + ' ' + str(func.NamePos))
print('/****************************************/')
print('/************ ENUMERATIONS **************/')
print('/****************************************/')
for enum in FileProfile.EnumerationDefinitionList:
print(str(enum.StartPos) + enum.Content)
print('/****************************************/')
print('/*********** STRUCTS/UNIONS *************/')
print('/****************************************/')
for su in FileProfile.StructUnionDefinitionList:
print(str(su.StartPos) + su.Content)
print('/****************************************/')
print('/********* PREDICATE EXPRESSIONS ********/')
print('/****************************************/')
for predexp in FileProfile.PredicateExpressionList:
print(str(predexp.StartPos) + predexp.Content)
print('/****************************************/')
print('/************** TYPEDEFS ****************/')
print('/****************************************/')
for typedef in FileProfile.TypedefDefinitionList:
print(str(typedef.StartPos) + typedef.ToType)
if __name__ == "__main__":
collector = CodeFragmentCollector(sys.argv[1])
collector.PreprocessFile()
print("For Test.")
| edk2-master | BaseTools/Source/Python/Ecc/CodeFragmentCollector.py |
## @file
# fragments of source file
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## The description of comment contents and start & end position
#
#
class Comment :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
# @param CommentType The type of comment (T_COMMENT_TWO_SLASH or T_COMMENT_SLASH_STAR).
#
def __init__(self, Str, Begin, End, CommentType):
self.Content = Str
self.StartPos = Begin
self.EndPos = End
self.Type = CommentType
## The description of preprocess directives and start & end position
#
#
class PP_Directive :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Str, Begin, End):
self.Content = Str
self.StartPos = Begin
self.EndPos = End
## The description of predicate expression and start & end position
#
#
class PredicateExpression :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Str, Begin, End):
self.Content = Str
self.StartPos = Begin
self.EndPos = End
## The description of function definition and start & end position
#
#
class FunctionDefinition :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
# @param LBPos The left brace position tuple.
#
def __init__(self, ModifierStr, DeclStr, Begin, End, LBPos, NamePos):
self.Modifier = ModifierStr
self.Declarator = DeclStr
self.StartPos = Begin
self.EndPos = End
self.LeftBracePos = LBPos
self.NamePos = NamePos
## The description of variable declaration and start & end position
#
#
class VariableDeclaration :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param NamePos The name position tuple.
#
def __init__(self, ModifierStr, DeclStr, Begin, NamePos):
self.Modifier = ModifierStr
self.Declarator = DeclStr
self.StartPos = Begin
self.NameStartPos = NamePos
## The description of enum definition and start & end position
#
#
class EnumerationDefinition :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Str, Begin, End):
self.Content = Str
self.StartPos = Begin
self.EndPos = End
## The description of struct/union definition and start & end position
#
#
class StructUnionDefinition :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Str, Begin, End):
self.Content = Str
self.StartPos = Begin
self.EndPos = End
## The description of 'Typedef' definition and start & end position
#
#
class TypedefDefinition :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, FromStr, ToStr, Begin, End):
self.FromType = FromStr
self.ToType = ToStr
self.StartPos = Begin
self.EndPos = End
class FunctionCalling:
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Name, Param, Begin, End):
self.FuncName = Name
self.ParamList = Param
self.StartPos = Begin
self.EndPos = End
| edk2-master | BaseTools/Source/Python/Ecc/CodeFragment.py |
## @file
# This file is used to define common parser functions for meta-data
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
from CommonDataClass.DataClass import *
from Ecc.EccToolError import *
from Common.MultipleWorkspace import MultipleWorkspace as mws
from Ecc import EccGlobalData
import re
## Get the include path list for a source file
#
# 1. Find the source file belongs to which inf file
# 2. Find the inf's package
# 3. Return the include path list of the package
#
def GetIncludeListOfFile(WorkSpace, Filepath, Db):
IncludeList = []
Filepath = os.path.normpath(Filepath)
SqlCommand = """
select Value1, FullPath from Inf, File where Inf.Model = %s and Inf.BelongsToFile in(
select distinct B.BelongsToFile from File as A left join Inf as B
where A.ID = B.BelongsToFile and B.Model = %s and (A.Path || '%s' || B.Value1) = '%s')
and Inf.BelongsToFile = File.ID""" \
% (MODEL_META_DATA_PACKAGE, MODEL_EFI_SOURCE_FILE, '\\', Filepath)
RecordSet = Db.TblFile.Exec(SqlCommand)
for Record in RecordSet:
DecFullPath = os.path.normpath(mws.join(WorkSpace, Record[0]))
InfFullPath = os.path.normpath(mws.join(WorkSpace, Record[1]))
(DecPath, DecName) = os.path.split(DecFullPath)
(InfPath, InfName) = os.path.split(InfFullPath)
SqlCommand = """select Value1 from Dec where BelongsToFile =
(select ID from File where FullPath = '%s') and Model = %s""" \
% (DecFullPath, MODEL_EFI_INCLUDE)
NewRecordSet = Db.TblDec.Exec(SqlCommand)
if InfPath not in IncludeList:
IncludeList.append(InfPath)
for NewRecord in NewRecordSet:
IncludePath = os.path.normpath(os.path.join(DecPath, NewRecord[0]))
if IncludePath not in IncludeList:
IncludeList.append(IncludePath)
return IncludeList
## Get the file list
#
# Search table file and find all specific type files
#
def GetFileList(FileModel, Db):
FileList = []
SqlCommand = """select FullPath from File where Model = %s""" % str(FileModel)
RecordSet = Db.TblFile.Exec(SqlCommand)
for Record in RecordSet:
FileList.append(Record[0])
return FileList
## Get the table list
#
# Search table file and find all small tables
#
def GetTableList(FileModelList, Table, Db):
TableList = []
SqlCommand = """select ID from File where Model in %s""" % str(FileModelList)
RecordSet = Db.TblFile.Exec(SqlCommand)
for Record in RecordSet:
TableName = Table + str(Record[0])
TableList.append(TableName)
return TableList
## ParseHeaderCommentSection
#
# Parse Header comment section lines, extract Abstract, Description, Copyright
# , License lines
#
# @param CommentList: List of (Comment, LineNumber)
# @param FileName: FileName of the comment
#
def ParseHeaderCommentSection(CommentList, FileName = None):
Abstract = ''
Description = ''
Copyright = ''
License = ''
EndOfLine = "\n"
STR_HEADER_COMMENT_START = "@file"
#
# used to indicate the state of processing header comment section of dec,
# inf files
#
HEADER_COMMENT_NOT_STARTED = -1
HEADER_COMMENT_STARTED = 0
HEADER_COMMENT_FILE = 1
HEADER_COMMENT_ABSTRACT = 2
HEADER_COMMENT_DESCRIPTION = 3
HEADER_COMMENT_COPYRIGHT = 4
HEADER_COMMENT_LICENSE = 5
HEADER_COMMENT_END = 6
#
# first find the last copyright line
#
Last = 0
HeaderCommentStage = HEADER_COMMENT_NOT_STARTED
for Index in range(len(CommentList) - 1, 0, -1):
Line = CommentList[Index][0]
if _IsCopyrightLine(Line):
Last = Index
break
for Item in CommentList:
Line = Item[0]
LineNo = Item[1]
if not Line.startswith('#') and Line:
SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Msg = 'Comment must start with #'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
Comment = CleanString2(Line)[1]
Comment = Comment.strip()
#
# if there are blank lines between License or Description, keep them as they would be
# indication of different block; or in the position that Abstract should be, also keep it
# as it indicates that no abstract
#
if not Comment and HeaderCommentStage not in [HEADER_COMMENT_LICENSE, \
HEADER_COMMENT_DESCRIPTION, HEADER_COMMENT_ABSTRACT]:
continue
if HeaderCommentStage == HEADER_COMMENT_NOT_STARTED:
if Comment.startswith(STR_HEADER_COMMENT_START):
HeaderCommentStage = HEADER_COMMENT_ABSTRACT
else:
License += Comment + EndOfLine
else:
if HeaderCommentStage == HEADER_COMMENT_ABSTRACT:
#
# in case there is no abstract and description
#
if not Comment:
Abstract = ''
HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
elif _IsCopyrightLine(Comment):
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
else:
Abstract += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_DESCRIPTION
elif HeaderCommentStage == HEADER_COMMENT_DESCRIPTION:
#
# in case there is no description
#
if _IsCopyrightLine(Comment):
Copyright += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_COPYRIGHT
else:
Description += Comment + EndOfLine
elif HeaderCommentStage == HEADER_COMMENT_COPYRIGHT:
if _IsCopyrightLine(Comment):
Copyright += Comment + EndOfLine
else:
#
# Contents after copyright line are license, those non-copyright lines in between
# copyright line will be discarded
#
if LineNo > Last:
if License:
License += EndOfLine
License += Comment + EndOfLine
HeaderCommentStage = HEADER_COMMENT_LICENSE
else:
if not Comment and not License:
continue
License += Comment + EndOfLine
if not Copyright.strip():
SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Msg = 'Header comment section must have copyright information'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
if not License.strip():
SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Msg = 'Header comment section must have license information'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
if not Abstract.strip() or Abstract.find('Component description file') > -1:
SqlStatement = """ select ID from File where FullPath like '%s'""" % FileName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Msg = 'Header comment section must have Abstract information.'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
return Abstract.strip(), Description.strip(), Copyright.strip(), License.strip()
## _IsCopyrightLine
# check whether current line is copyright line, the criteria is whether there is case insensitive keyword "Copyright"
# followed by zero or more white space characters followed by a "(" character
#
# @param LineContent: the line need to be checked
# @return: True if current line is copyright line, False else
#
def _IsCopyrightLine (LineContent):
LineContent = LineContent.upper()
Result = False
#Support below Copyright format
# Copyright (C) 2020 Hewlett Packard Enterprise Development LP<BR>
# (C) Copyright 2020 Hewlett Packard Enterprise Development LP<BR>
ReIsCopyrightRe = re.compile(r"""(^|\s)COPYRIGHT *\(""", re.DOTALL)
ReIsCopyrightTypeB = re.compile(r"""(^|\s)\(C\)\s*COPYRIGHT""", re.DOTALL)
if ReIsCopyrightRe.search(LineContent) or ReIsCopyrightTypeB.search(LineContent):
Result = True
return Result
## CleanString2
#
# Split comments in a string
# Remove spaces
#
# @param Line: The string to be cleaned
# @param CommentCharacter: Comment char, used to ignore comment content,
# default is DataType.TAB_COMMENT_SPLIT
#
def CleanString2(Line, CommentCharacter='#', AllowCppStyleComment=False):
#
# remove whitespace
#
Line = Line.strip()
#
# Replace EDK1's comment character
#
if AllowCppStyleComment:
Line = Line.replace('//', CommentCharacter)
#
# separate comments and statements
#
LineParts = Line.split(CommentCharacter, 1)
#
# remove whitespace again
#
Line = LineParts[0].strip()
if len(LineParts) > 1:
Comment = LineParts[1].strip()
#
# Remove prefixed and trailing comment characters
#
Start = 0
End = len(Comment)
while Start < End and Comment.startswith(CommentCharacter, Start, End):
Start += 1
while End >= 0 and Comment.endswith(CommentCharacter, Start, End):
End -= 1
Comment = Comment[Start:End]
Comment = Comment.strip()
else:
Comment = ''
return Line, Comment
| edk2-master | BaseTools/Source/Python/Ecc/MetaDataParser.py |
## @file
# This file is used to be the main entrance of ECC tool
#
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
# Copyright (c) Microsoft Corporation.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os, time, glob, sys
import Common.EdkLogger as EdkLogger
from Ecc import Database
from Ecc import EccGlobalData
from Ecc.MetaDataParser import *
from optparse import OptionParser
from Ecc.Configuration import Configuration
from Ecc.Check import Check
import Common.GlobalData as GlobalData
from Common.StringUtils import NormPath
from Common.BuildVersion import gBUILD_VERSION
from Common import BuildToolError
from Common.Misc import PathClass
from Common.Misc import DirCache
from Ecc.MetaFileWorkspace.MetaFileParser import DscParser
from Ecc.MetaFileWorkspace.MetaFileParser import DecParser
from Ecc.MetaFileWorkspace.MetaFileParser import InfParser
from Ecc.MetaFileWorkspace.MetaFileParser import Fdf
from Ecc.MetaFileWorkspace.MetaFileTable import MetaFileStorage
from Ecc import c
import re, string
from Ecc.Exception import *
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.MultipleWorkspace import MultipleWorkspace as mws
## Ecc
#
# This class is used to define Ecc main entrance
#
# @param object: Inherited from object class
#
class Ecc(object):
def __init__(self):
# Version and Copyright
self.VersionNumber = ("1.0" + " Build " + gBUILD_VERSION)
self.Version = "%prog Version " + self.VersionNumber
self.Copyright = "Copyright (c) 2009 - 2018, Intel Corporation All rights reserved."
self.InitDefaultConfigIni()
self.OutputFile = 'output.txt'
self.ReportFile = 'Report.csv'
self.ExceptionFile = 'exception.xml'
self.IsInit = True
self.ScanSourceCode = True
self.ScanMetaData = True
self.MetaFile = ''
self.OnlyScan = None
# Parse the options and args
self.ParseOption()
EdkLogger.info(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"]))
os.environ["WORKSPACE"] = WorkspaceDir
# set multiple workspace
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(WorkspaceDir, PackagesPath)
GlobalData.gWorkspace = WorkspaceDir
GlobalData.gGlobalDefines["WORKSPACE"] = WorkspaceDir
EdkLogger.info("Loading ECC configuration ... done")
# Generate checkpoints list
EccGlobalData.gConfig = Configuration(self.ConfigFile)
# Generate exception list
EccGlobalData.gException = ExceptionCheck(self.ExceptionFile)
# Init Ecc database
EccGlobalData.gDb = Database.Database(Database.DATABASE_PATH)
EccGlobalData.gDb.InitDatabase(self.IsInit)
#
# Get files real name in workspace dir
#
GlobalData.gAllFiles = DirCache(GlobalData.gWorkspace)
# Build ECC database
# self.BuildDatabase()
self.DetectOnlyScanDirs()
# Start to check
self.Check()
# Show report
self.GenReport()
# Close Database
EccGlobalData.gDb.Close()
def InitDefaultConfigIni(self):
paths = map(lambda p: os.path.join(p, 'Ecc', 'config.ini'), sys.path)
paths = (os.path.abspath('config.ini'),) + tuple(paths)
for path in paths:
if os.path.exists(path):
self.ConfigFile = path
return
self.ConfigFile = 'config.ini'
## DetectOnlyScan
#
# Detect whether only scanned folders have been enabled
#
def DetectOnlyScanDirs(self):
if self.OnlyScan == True:
OnlyScanDirs = []
# Use regex here if multiple spaces or TAB exists in ScanOnlyDirList in config.ini file
for folder in re.finditer(r'\S+', EccGlobalData.gConfig.ScanOnlyDirList):
OnlyScanDirs.append(folder.group())
if len(OnlyScanDirs) != 0:
self.BuildDatabase(OnlyScanDirs)
else:
EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Use -f option need to fill specific folders in config.ini file")
else:
self.BuildDatabase()
## BuildDatabase
#
# Build the database for target
#
def BuildDatabase(self, SpeciDirs = None):
# Clean report table
EccGlobalData.gDb.TblReport.Drop()
EccGlobalData.gDb.TblReport.Create()
# Build database
if self.IsInit:
if self.ScanMetaData:
EdkLogger.quiet("Building database for Meta Data File ...")
self.BuildMetaDataFileDatabase(SpeciDirs)
if self.ScanSourceCode:
EdkLogger.quiet("Building database for Meta Data File Done!")
if SpeciDirs is None:
c.CollectSourceCodeDataIntoDB(EccGlobalData.gTarget)
else:
for specificDir in SpeciDirs:
c.CollectSourceCodeDataIntoDB(os.path.join(EccGlobalData.gTarget, specificDir))
EccGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EccGlobalData.gDb)
EccGlobalData.gCFileList = GetFileList(MODEL_FILE_C, EccGlobalData.gDb)
EccGlobalData.gHFileList = GetFileList(MODEL_FILE_H, EccGlobalData.gDb)
EccGlobalData.gUFileList = GetFileList(MODEL_FILE_UNI, EccGlobalData.gDb)
## BuildMetaDataFileDatabase
#
# Build the database for meta data files
#
def BuildMetaDataFileDatabase(self, SpecificDirs = None):
ScanFolders = []
if SpecificDirs is None:
ScanFolders.append(EccGlobalData.gTarget)
else:
for specificDir in SpecificDirs:
ScanFolders.append(os.path.join(EccGlobalData.gTarget, specificDir))
EdkLogger.quiet("Building database for meta data files ...")
Op = open(EccGlobalData.gConfig.MetaDataFileCheckPathOfGenerateFileList, 'w+')
#SkipDirs = Read from config file
SkipDirs = EccGlobalData.gConfig.SkipDirList
SkipDirString = '|'.join(SkipDirs)
# p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % SkipDirString)
p = re.compile(r'.*[\\/](?:%s^\S)[\\/]?.*' % SkipDirString)
for scanFolder in ScanFolders:
for Root, Dirs, Files in os.walk(scanFolder):
if p.match(Root.upper()):
continue
for Dir in Dirs:
Dirname = os.path.join(Root, Dir)
if os.path.islink(Dirname):
Dirname = os.path.realpath(Dirname)
if os.path.isdir(Dirname):
# symlinks to directories are treated as directories
Dirs.remove(Dir)
Dirs.append(Dirname)
for File in Files:
if len(File) > 4 and File[-4:].upper() == ".DEC":
Filename = os.path.normpath(os.path.join(Root, File))
EdkLogger.quiet("Parsing %s" % Filename)
Op.write("%s\r" % Filename)
#Dec(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
self.MetaFile = DecParser(Filename, MODEL_FILE_DEC, EccGlobalData.gDb.TblDec)
self.MetaFile.Start()
continue
if len(File) > 4 and File[-4:].upper() == ".DSC":
Filename = os.path.normpath(os.path.join(Root, File))
EdkLogger.quiet("Parsing %s" % Filename)
Op.write("%s\r" % Filename)
#Dsc(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
self.MetaFile = DscParser(PathClass(Filename, Root), MODEL_FILE_DSC, MetaFileStorage(EccGlobalData.gDb.TblDsc.Cur, Filename, MODEL_FILE_DSC, True))
# always do post-process, in case of macros change
self.MetaFile.DoPostProcess()
self.MetaFile.Start()
self.MetaFile._PostProcess()
continue
if len(File) > 4 and File[-4:].upper() == ".INF":
Filename = os.path.normpath(os.path.join(Root, File))
EdkLogger.quiet("Parsing %s" % Filename)
Op.write("%s\r" % Filename)
#Inf(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
self.MetaFile = InfParser(Filename, MODEL_FILE_INF, EccGlobalData.gDb.TblInf)
self.MetaFile.Start()
continue
if len(File) > 4 and File[-4:].upper() == ".FDF":
Filename = os.path.normpath(os.path.join(Root, File))
EdkLogger.quiet("Parsing %s" % Filename)
Op.write("%s\r" % Filename)
Fdf(Filename, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
continue
if len(File) > 4 and File[-4:].upper() == ".UNI":
Filename = os.path.normpath(os.path.join(Root, File))
EdkLogger.quiet("Parsing %s" % Filename)
Op.write("%s\r" % Filename)
FileID = EccGlobalData.gDb.TblFile.InsertFile(Filename, MODEL_FILE_UNI)
EccGlobalData.gDb.TblReport.UpdateBelongsToItemByFile(FileID, File)
continue
Op.close()
# Commit to database
EccGlobalData.gDb.Conn.commit()
EdkLogger.quiet("Building database for meta data files done!")
##
#
# Check each checkpoint
#
def Check(self):
EdkLogger.quiet("Checking ...")
EccCheck = Check()
EccCheck.Check()
EdkLogger.quiet("Checking done!")
##
#
# Generate the scan report
#
def GenReport(self):
EdkLogger.quiet("Generating report ...")
EccGlobalData.gDb.TblReport.ToCSV(self.ReportFile)
EdkLogger.quiet("Generating report done!")
def GetRealPathCase(self, path):
TmpPath = path.rstrip(os.sep)
PathParts = TmpPath.split(os.sep)
if len(PathParts) == 0:
return path
if len(PathParts) == 1:
if PathParts[0].strip().endswith(':'):
return PathParts[0].upper()
# Relative dir, list . current dir
Dirs = os.listdir('.')
for Dir in Dirs:
if Dir.upper() == PathParts[0].upper():
return Dir
if PathParts[0].strip().endswith(':'):
PathParts[0] = PathParts[0].upper()
ParentDir = PathParts[0]
RealPath = ParentDir
if PathParts[0] == '':
RealPath = os.sep
ParentDir = os.sep
PathParts.remove(PathParts[0]) # need to remove the parent
for Part in PathParts:
Dirs = os.listdir(ParentDir + os.sep)
for Dir in Dirs:
if Dir.upper() == Part.upper():
RealPath += os.sep
RealPath += Dir
break
ParentDir += os.sep
ParentDir += Dir
return RealPath
## ParseOption
#
# Parse options
#
def ParseOption(self):
(Options, Target) = self.EccOptionParser()
if Options.Workspace:
os.environ["WORKSPACE"] = Options.Workspace
# Check workspace environment
if "WORKSPACE" not in os.environ:
EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
ExtraData="WORKSPACE")
else:
EccGlobalData.gWorkspace = os.path.normpath(os.getenv("WORKSPACE"))
if not os.path.exists(EccGlobalData.gWorkspace):
EdkLogger.error("ECC", BuildToolError.FILE_NOT_FOUND, ExtraData="WORKSPACE = %s" % EccGlobalData.gWorkspace)
os.environ["WORKSPACE"] = EccGlobalData.gWorkspace
# Set log level
self.SetLogLevel(Options)
# Set other options
if Options.ConfigFile is not None:
self.ConfigFile = Options.ConfigFile
if Options.OutputFile is not None:
self.OutputFile = Options.OutputFile
if Options.ReportFile is not None:
self.ReportFile = Options.ReportFile
if Options.ExceptionFile is not None:
self.ExceptionFile = Options.ExceptionFile
if Options.Target is not None:
if not os.path.isdir(Options.Target):
EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Target [%s] does NOT exist" % Options.Target)
else:
EccGlobalData.gTarget = self.GetRealPathCase(os.path.normpath(Options.Target))
else:
EdkLogger.warn("Ecc", EdkLogger.ECC_ERROR, "The target source tree was not specified, using current WORKSPACE instead!")
EccGlobalData.gTarget = os.path.normpath(os.getenv("WORKSPACE"))
if Options.keepdatabase is not None:
self.IsInit = False
if Options.metadata is not None and Options.sourcecode is not None:
EdkLogger.error("ECC", BuildToolError.OPTION_CONFLICT, ExtraData="-m and -s can't be specified at one time")
if Options.metadata is not None:
self.ScanSourceCode = False
if Options.sourcecode is not None:
self.ScanMetaData = False
if Options.folders is not None:
self.OnlyScan = True
## SetLogLevel
#
# Set current log level of the tool based on args
#
# @param Option: The option list including log level setting
#
def SetLogLevel(self, Option):
if Option.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
elif Option.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
## Parse command line options
#
# Using standard Python module optparse to parse command line option of this tool.
#
# @retval Opt A optparse.Values object containing the parsed options
# @retval Args Target of build command
#
def EccOptionParser(self):
Parser = OptionParser(description = self.Copyright, version = self.Version, prog = "Ecc.exe", usage = "%prog [options]")
Parser.add_option("-t", "--target sourcepath", action="store", type="string", dest='Target',
help="Check all files under the target workspace.")
Parser.add_option("-c", "--config filename", action="store", type="string", dest="ConfigFile",
help="Specify a configuration file. Defaultly use config.ini under ECC tool directory.")
Parser.add_option("-o", "--outfile filename", action="store", type="string", dest="OutputFile",
help="Specify the name of an output file, if and only if one filename was specified.")
Parser.add_option("-r", "--reportfile filename", action="store", type="string", dest="ReportFile",
help="Specify the name of an report file, if and only if one filename was specified.")
Parser.add_option("-e", "--exceptionfile filename", action="store", type="string", dest="ExceptionFile",
help="Specify the name of an exception file, if and only if one filename was specified.")
Parser.add_option("-m", "--metadata", action="store_true", type=None, help="Only scan meta-data files information if this option is specified.")
Parser.add_option("-s", "--sourcecode", action="store_true", type=None, help="Only scan source code files information if this option is specified.")
Parser.add_option("-k", "--keepdatabase", action="store_true", type=None, help="The existing Ecc database will not be cleaned except report information if this option is specified.")
Parser.add_option("-l", "--log filename", action="store", dest="LogFile", help="""If specified, the tool should emit the changes that
were made by the tool after printing the result message.
If filename, the emit to the file, otherwise emit to
standard output. If no modifications were made, then do not
create a log file, or output a log message.""")
Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\
"including library instances selected, final dependency expression, "\
"and warning messages, etc.")
Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
Parser.add_option("-w", "--workspace", action="store", type="string", dest='Workspace', help="Specify workspace.")
Parser.add_option("-f", "--folders", action="store_true", type=None, help="Only scanning specified folders which are recorded in config.ini file.")
(Opt, Args)=Parser.parse_args()
return (Opt, Args)
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
# Initialize log system
EdkLogger.Initialize()
EdkLogger.IsRaiseError = False
StartTime = time.perf_counter()
Ecc = Ecc()
FinishTime = time.perf_counter()
BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime))))
EdkLogger.quiet("\n%s [%s]" % (time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration))
| edk2-master | BaseTools/Source/Python/Ecc/EccMain.py |
## @file
# Python 'Workspace' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/Ecc/MetaFileWorkspace/__init__.py |
## @file
# This file is used to create/update/query/erase a meta file table
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import uuid
import Common.EdkLogger as EdkLogger
import Ecc.EccGlobalData as EccGlobalData
from Ecc.MetaFileWorkspace.MetaDataTable import Table
from Ecc.MetaFileWorkspace.MetaDataTable import ConvertToSqlString
from CommonDataClass.DataClass import MODEL_FILE_DSC, MODEL_FILE_DEC, MODEL_FILE_INF, \
MODEL_FILE_OTHERS
class MetaFileTable(Table):
## Constructor
def __init__(self, Cursor, MetaFile, FileType, TableName, Temporary = False):
self.MetaFile = MetaFile
self.TblFile = EccGlobalData.gDb.TblFile
if (FileType == MODEL_FILE_INF):
TableName = "Inf"
if (FileType == MODEL_FILE_DSC):
if Temporary:
TableName = "_%s_%s" % ("Dsc", uuid.uuid4().hex)
else:
TableName = "Dsc"
if (FileType == MODEL_FILE_DEC):
TableName = "Dec"
Table.__init__(self, Cursor, TableName, 0, Temporary)
self.Create(False)
## Python class representation of table storing module data
class ModuleTable(MetaFileTable):
_COLUMN_ = '''
ID REAL PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 TEXT NOT NULL,
Value2 TEXT,
Value3 TEXT,
Usage TEXT,
Scope1 TEXT,
Scope2 TEXT,
BelongsToItem REAL NOT NULL,
BelongsToFile SINGLE NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
'''
# used as table end flag, in case the changes to database is not committed to db file
_DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1"
## Constructor
def __init__(self, Cursor):
MetaFileTable.__init__(self, Cursor, '', MODEL_FILE_INF, "Inf", False)
## Insert a record into table Inf
#
# @param Model: Model of a Inf item
# @param Value1: Value1 of a Inf item
# @param Value2: Value2 of a Inf item
# @param Value3: Value3 of a Inf item
# @param Scope1: Arch of a Inf item
# @param Scope2 Platform os a Inf item
# @param BelongsToItem: The item belongs to which another item
# @param StartLine: StartLine of a Inf item
# @param StartColumn: StartColumn of a Inf item
# @param EndLine: EndLine of a Inf item
# @param EndColumn: EndColumn of a Inf item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',
BelongsToItem=-1, BelongsToFile = -1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0, Usage=''):
(Value1, Value2, Value3, Usage, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Usage, Scope1, Scope2))
return Table.Insert(
self,
Model,
Value1,
Value2,
Value3,
Usage,
Scope1,
Scope2,
BelongsToItem,
BelongsToFile,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
)
## Query table
#
# @param Model: The Model of Record
# @param Arch: The Arch attribute of Record
# @param Platform The Platform attribute of Record
#
# @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None, Platform=None):
ConditionString = "Model=%s AND Enabled>=0" % Model
ValueString = "Value1,Value2,Value3,Usage,Scope1,Scope2,ID,StartLine"
if Arch is not None and Arch != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
if Platform is not None and Platform != 'COMMON':
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
return self.Exec(SqlCommand)
## Python class representation of table storing package data
class PackageTable(MetaFileTable):
_COLUMN_ = '''
ID REAL PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 TEXT NOT NULL,
Value2 TEXT,
Value3 TEXT,
Scope1 TEXT,
Scope2 TEXT,
BelongsToItem REAL NOT NULL,
BelongsToFile SINGLE NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
'''
# used as table end flag, in case the changes to database is not committed to db file
_DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1"
## Constructor
def __init__(self, Cursor):
MetaFileTable.__init__(self, Cursor, '', MODEL_FILE_DEC, "Dec", False)
## Insert table
#
# Insert a record into table Dec
#
# @param Model: Model of a Dec item
# @param Value1: Value1 of a Dec item
# @param Value2: Value2 of a Dec item
# @param Value3: Value3 of a Dec item
# @param Scope1: Arch of a Dec item
# @param Scope2: Module type of a Dec item
# @param BelongsToItem: The item belongs to which another item
# @param StartLine: StartLine of a Dec item
# @param StartColumn: StartColumn of a Dec item
# @param EndLine: EndLine of a Dec item
# @param EndColumn: EndColumn of a Dec item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',
BelongsToItem=-1, BelongsToFile = -1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
(Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
return Table.Insert(
self,
Model,
Value1,
Value2,
Value3,
Scope1,
Scope2,
BelongsToItem,
BelongsToFile,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
)
## Query table
#
# @param Model: The Model of Record
# @param Arch: The Arch attribute of Record
#
# @retval: A recordSet of all found records
#
def Query(self, Model, Arch=None):
ConditionString = "Model=%s AND Enabled>=0" % Model
ValueString = "Value1,Value2,Value3,Scope1,ID,StartLine"
if Arch is not None and Arch != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
return self.Exec(SqlCommand)
## Python class representation of table storing platform data
class PlatformTable(MetaFileTable):
_COLUMN_ = '''
ID REAL PRIMARY KEY,
Model INTEGER NOT NULL,
Value1 TEXT NOT NULL,
Value2 TEXT,
Value3 TEXT,
Scope1 TEXT,
Scope2 TEXT,
BelongsToItem REAL NOT NULL,
BelongsToFile SINGLE NOT NULL,
FromItem REAL NOT NULL,
StartLine INTEGER NOT NULL,
StartColumn INTEGER NOT NULL,
EndLine INTEGER NOT NULL,
EndColumn INTEGER NOT NULL,
Enabled INTEGER DEFAULT 0
'''
# used as table end flag, in case the changes to database is not committed to db file
_DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1, -1"
## Constructor
def __init__(self, Cursor, MetaFile = '', FileType = MODEL_FILE_DSC, Temporary = False):
MetaFileTable.__init__(self, Cursor, MetaFile, FileType, "Dsc", Temporary)
## Insert table
#
# Insert a record into table Dsc
#
# @param Model: Model of a Dsc item
# @param Value1: Value1 of a Dsc item
# @param Value2: Value2 of a Dsc item
# @param Value3: Value3 of a Dsc item
# @param Scope1: Arch of a Dsc item
# @param Scope2: Module type of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param FromItem: The item belongs to which dsc file
# @param StartLine: StartLine of a Dsc item
# @param StartColumn: StartColumn of a Dsc item
# @param EndLine: EndLine of a Dsc item
# @param EndColumn: EndColumn of a Dsc item
# @param Enabled: If this item enabled
#
def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON', BelongsToItem=-1, BelongsToFile = -1,
FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
(Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
return Table.Insert(
self,
Model,
Value1,
Value2,
Value3,
Scope1,
Scope2,
BelongsToItem,
BelongsToFile,
FromItem,
StartLine,
StartColumn,
EndLine,
EndColumn,
Enabled
)
## Query table
#
# @param Model: The Model of Record
# @param Scope1: Arch of a Dsc item
# @param Scope2: Module type of a Dsc item
# @param BelongsToItem: The item belongs to which another item
# @param FromItem: The item belongs to which dsc file
#
# @retval: A recordSet of all found records
#
def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
ConditionString = "Model=%s AND Enabled>0" % Model
ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
if Scope1 is not None and Scope1 != 'COMMON':
ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1
if Scope2 is not None and Scope2 != 'COMMON':
ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2
if BelongsToItem is not None:
ConditionString += " AND BelongsToItem=%s" % BelongsToItem
else:
ConditionString += " AND BelongsToItem<0"
if FromItem is not None:
ConditionString += " AND FromItem=%s" % FromItem
SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
return self.Exec(SqlCommand)
## Factory class to produce different storage for different type of meta-file
class MetaFileStorage(object):
_FILE_TABLE_ = {
MODEL_FILE_INF : ModuleTable,
MODEL_FILE_DEC : PackageTable,
MODEL_FILE_DSC : PlatformTable,
MODEL_FILE_OTHERS : MetaFileTable,
}
_FILE_TYPE_ = {
".inf" : MODEL_FILE_INF,
".dec" : MODEL_FILE_DEC,
".dsc" : MODEL_FILE_DSC,
}
## Constructor
def __new__(Class, Cursor, MetaFile, FileType=None, Temporary=False):
# no type given, try to find one
if not FileType:
if MetaFile.Type in self._FILE_TYPE_:
FileType = Class._FILE_TYPE_[MetaFile.Type]
else:
FileType = MODEL_FILE_OTHERS
# don't pass the type around if it's well known
if FileType == MODEL_FILE_OTHERS:
Args = (Cursor, MetaFile, FileType, Temporary)
else:
Args = (Cursor, MetaFile, FileType, Temporary)
# create the storage object and return it to caller
return Class._FILE_TABLE_[FileType](*Args)
| edk2-master | BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileTable.py |
## @file
# This file is used to create/update/query/erase table for files
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
import Common.LongFilePathOs as os
import Common.EdkLogger as EdkLogger
from CommonDataClass import DataClass
from CommonDataClass.DataClass import FileClass
## Convert to SQL required string format
def ConvertToSqlString(StringList):
return map(lambda s: "'" + s.replace("'", "''") + "'", StringList)
## TableFile
#
# This class defined a common table
#
# @param object: Inherited from object class
#
# @param Cursor: Cursor of the database
# @param TableName: Name of the table
#
class Table(object):
_COLUMN_ = ''
_ID_STEP_ = 1
_ID_MAX_ = 0x80000000
_DUMMY_ = 0
def __init__(self, Cursor, Name='', IdBase=0, Temporary=False):
self.Cur = Cursor
self.Table = Name
self.IdBase = int(IdBase)
self.ID = int(IdBase)
self.Temporary = Temporary
def __str__(self):
return self.Table
## Create table
#
# Create a table
#
def Create(self, NewTable=True):
if NewTable:
self.Drop()
if self.Temporary:
SqlCommand = """create temp table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)
else:
SqlCommand = """create table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)
EdkLogger.debug(EdkLogger.DEBUG_8, SqlCommand)
self.Cur.execute(SqlCommand)
self.ID = self.GetId()
## Insert table
#
# Insert a record into a table
#
def Insert(self, *Args):
self.ID = self.ID + self._ID_STEP_
if self.ID >= (self.IdBase + self._ID_MAX_):
self.ID = self.IdBase + self._ID_STEP_
Values = ", ".join(str(Arg) for Arg in Args)
SqlCommand = "insert into %s values(%s, %s)" % (self.Table, self.ID, Values)
EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
self.Cur.execute(SqlCommand)
return self.ID
## Query table
#
# Query all records of the table
#
def Query(self):
SqlCommand = """select * from %s""" % self.Table
self.Cur.execute(SqlCommand)
for Rs in self.Cur:
EdkLogger.verbose(str(Rs))
TotalCount = self.GetId()
## Drop a table
#
# Drop the table
#
def Drop(self):
SqlCommand = """drop table IF EXISTS %s""" % self.Table
try:
self.Cur.execute(SqlCommand)
except Exception as e:
print("An error occurred when Drop a table:", e.args[0])
## Get count
#
# Get a count of all records of the table
#
# @retval Count: Total count of all records
#
def GetCount(self):
SqlCommand = """select count(ID) from %s""" % self.Table
Record = self.Cur.execute(SqlCommand).fetchall()
return Record[0][0]
def GetId(self):
SqlCommand = """select max(ID) from %s""" % self.Table
Record = self.Cur.execute(SqlCommand).fetchall()
Id = Record[0][0]
if Id is None:
Id = self.IdBase
return Id
## Init the ID of the table
#
# Init the ID of the table
#
def InitID(self):
self.ID = self.GetId()
## Exec
#
# Exec Sql Command, return result
#
# @param SqlCommand: The SqlCommand to be executed
#
# @retval RecordSet: The result after executed
#
def Exec(self, SqlCommand):
EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
self.Cur.execute(SqlCommand)
RecordSet = self.Cur.fetchall()
return RecordSet
def SetEndFlag(self):
pass
def IsIntegral(self):
Result = self.Exec("select min(ID) from %s" % (self.Table))
if Result[0][0] != -1:
return False
return True
def GetAll(self):
return self.Exec("select * from %s where ID > 0 order by ID" % (self.Table))
## TableDataModel
#
# This class defined a table used for data model
#
# @param object: Inherited from object class
#
#
class TableDataModel(Table):
_COLUMN_ = """
ID INTEGER PRIMARY KEY,
CrossIndex INTEGER NOT NULL,
Name VARCHAR NOT NULL,
Description VARCHAR
"""
def __init__(self, Cursor):
Table.__init__(self, Cursor, 'DataModel')
## Insert table
#
# Insert a record into table DataModel
#
# @param ID: ID of a ModelType
# @param CrossIndex: CrossIndex of a ModelType
# @param Name: Name of a ModelType
# @param Description: Description of a ModelType
#
def Insert(self, CrossIndex, Name, Description):
(Name, Description) = ConvertToSqlString((Name, Description))
return Table.Insert(self, CrossIndex, Name, Description)
## Init table
#
# Create all default records of table DataModel
#
def InitTable(self):
EdkLogger.verbose("\nInitialize table DataModel started ...")
Count = self.GetCount()
if Count is not None and Count != 0:
return
for Item in DataClass.MODEL_LIST:
CrossIndex = Item[1]
Name = Item[0]
Description = Item[0]
self.Insert(CrossIndex, Name, Description)
EdkLogger.verbose("Initialize table DataModel ... DONE!")
## Get CrossIndex
#
# Get a model's cross index from its name
#
# @param ModelName: Name of the model
# @retval CrossIndex: CrossIndex of the model
#
def GetCrossIndex(self, ModelName):
CrossIndex = -1
SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
self.Cur.execute(SqlCommand)
for Item in self.Cur:
CrossIndex = Item[0]
return CrossIndex
| edk2-master | BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py |
## @file
# This file is used to parse meta files
#
# Copyright (c) 2008 - 2020, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
import re
import time
import copy
from hashlib import md5
import Common.EdkLogger as EdkLogger
import Common.GlobalData as GlobalData
import Ecc.EccGlobalData as EccGlobalData
import Ecc.EccToolError as EccToolError
from CommonDataClass.DataClass import *
from Common.DataType import *
from Common.StringUtils import *
from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData
from Common.Expression import *
from CommonDataClass.Exceptions import *
from Ecc.MetaFileWorkspace.MetaFileTable import MetaFileStorage
from GenFds.FdfParser import FdfParser
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.LongFilePathSupport import CodecOpenLongFilePath
## RegEx for finding file versions
hexVersionPattern = re.compile(r'0[xX][\da-f-A-F]{5,8}')
decVersionPattern = re.compile(r'\d+\.\d+')
## A decorator used to parse macro definition
def ParseMacro(Parser):
def MacroParser(self):
Match = GlobalData.gMacroDefPattern.match(self._CurrentLine)
if not Match:
# Not 'DEFINE/EDK_GLOBAL' statement, call decorated method
Parser(self)
return
TokenList = GetSplitValueList(self._CurrentLine[Match.end(1):], TAB_EQUAL_SPLIT, 1)
# Syntax check
if not TokenList[0]:
EdkLogger.error('Parser', FORMAT_INVALID, "No macro name given",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
if len(TokenList) < 2:
TokenList.append('')
Type = Match.group(1)
Name, Value = TokenList
# Global macros can be only defined via environment variable
if Name in GlobalData.gGlobalDefines:
EdkLogger.error('Parser', FORMAT_INVALID, "%s can only be defined via environment variable" % Name,
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
# Only upper case letters, digit and '_' are allowed
if not GlobalData.gMacroNamePattern.match(Name):
EdkLogger.error('Parser', FORMAT_INVALID, "The macro name must be in the pattern [A-Z][A-Z0-9_]*",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
Value = ReplaceMacro(Value, self._Macros)
self._ItemType = MODEL_META_DATA_DEFINE
# DEFINE defined macros
if Type == TAB_DSC_DEFINES_DEFINE:
if isinstance(self, DecParser):
if MODEL_META_DATA_HEADER in self._SectionType:
self._FileLocalMacros[Name] = Value
else:
for Scope in self._Scope:
self._SectionsMacroDict.setdefault((Scope[2], Scope[0], Scope[1]), {})[Name] = Value
elif self._SectionType == MODEL_META_DATA_HEADER:
self._FileLocalMacros[Name] = Value
else:
SectionDictKey = self._SectionType, self._Scope[0][0], self._Scope[0][1]
if SectionDictKey not in self._SectionsMacroDict:
self._SectionsMacroDict[SectionDictKey] = {}
SectionLocalMacros = self._SectionsMacroDict[SectionDictKey]
SectionLocalMacros[Name] = Value
# EDK_GLOBAL defined macros
elif not isinstance(self, DscParser):
EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used in .dsc file",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
elif self._SectionType != MODEL_META_DATA_HEADER:
EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used under [Defines] section",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
elif (Name in self._FileLocalMacros) and (self._FileLocalMacros[Name] != Value):
EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL defined a macro with the same name and different value as one defined by 'DEFINE'",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
self._ValueList = [Type, Name, Value]
return MacroParser
## Base class of parser
#
# This class is used for derivation purpose. The specific parser for one kind
# type file must derive this class and implement some public interfaces.
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
# @param Owner Owner ID (for sub-section parsing)
# @param From ID from which the data comes (for !INCLUDE directive)
#
class MetaFileParser(object):
# data type (file content) for specific file type
DataType = {}
# Parser objects used to implement singleton
MetaFiles = {}
## Factory method
#
# One file, one parser object. This factory method makes sure that there's
# only one object constructed for one meta file.
#
# @param Class class object of real AutoGen class
# (InfParser, DecParser or DscParser)
# @param FilePath The path of meta file
# @param *args The specific class related parameters
# @param **kwargs The specific class related dict parameters
#
def __new__(Class, FilePath, *args, **kwargs):
if FilePath in Class.MetaFiles:
return Class.MetaFiles[FilePath]
else:
ParserObject = super(MetaFileParser, Class).__new__(Class)
Class.MetaFiles[FilePath] = ParserObject
return ParserObject
## Constructor of MetaFileParser
#
# Initialize object of MetaFileParser
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
# @param Owner Owner ID (for sub-section parsing)
# @param From ID from which the data comes (for !INCLUDE directive)
#
def __init__(self, FilePath, FileType, Table, Owner=-1, From=-1):
self._Table = Table
self._RawTable = Table
self._FileType = FileType
self.MetaFile = FilePath
self._Defines = {}
self._FileLocalMacros = {}
self._SectionsMacroDict = {}
# for recursive parsing
self._Owner = [Owner]
self._From = From
# parsr status for parsing
self._ValueList = ['', '', '', '', '']
self._Scope = []
self._LineIndex = 0
self._CurrentLine = ''
self._SectionType = MODEL_UNKNOWN
self._SectionName = ''
self._InSubsection = False
self._SubsectionType = MODEL_UNKNOWN
self._SubsectionName = ''
self._ItemType = MODEL_UNKNOWN
self._LastItem = -1
self._Enabled = 0
self._Finished = False
self._PostProcessed = False
# Different version of meta-file has different way to parse.
self._Version = 0
# UNI object and extra UNI object
self._UniObj = None
self._UniExtraObj = None
## Store the parsed data in table
def _Store(self, *Args):
return self._Table.Insert(*Args)
## Virtual method for starting parse
def Start(self):
raise NotImplementedError
## Notify a post-process is needed
def DoPostProcess(self):
self._PostProcessed = False
## Set parsing complete flag in both class and table
def _Done(self):
self._Finished = True
## Do not set end flag when processing included files
if self._From == -1:
self._Table.SetEndFlag()
def _PostProcess(self):
self._PostProcessed = True
## Get the parse complete flag
def _GetFinished(self):
return self._Finished
## Set the complete flag
def _SetFinished(self, Value):
self._Finished = Value
## Use [] style to query data in table, just for readability
#
# DataInfo = [data_type, scope1(arch), scope2(platform/moduletype)]
#
def __getitem__(self, DataInfo):
if not isinstance(DataInfo, type(())):
DataInfo = (DataInfo,)
# Parse the file first, if necessary
if not self._Finished:
if self._RawTable.IsIntegrity():
self._Finished = True
else:
self._Table = self._RawTable
self._PostProcessed = False
self.Start()
# No specific ARCH or Platform given, use raw data
if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] is None):
return self._RawTable.Query(*DataInfo)
# Do post-process if necessary
if not self._PostProcessed:
self._PostProcess()
return self._Table.Query(*DataInfo)
## Data parser for the common format in different type of file
#
# The common format in the meatfile is like
#
# xxx1 | xxx2 | xxx3
#
@ParseMacro
def _CommonParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
self._ValueList[0:len(TokenList)] = TokenList
## Data parser for the format in which there's path
#
# Only path can have macro used. So we need to replace them before use.
#
@ParseMacro
def _PathParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
self._ValueList[0:len(TokenList)] = TokenList
# Don't do macro replacement for dsc file at this point
if not isinstance(self, DscParser):
Macros = self._Macros
self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
## Skip unsupported data
def _Skip(self):
if self._SectionName == TAB_USER_EXTENSIONS.upper() and self._CurrentLine.upper().endswith('.UNI'):
if EccGlobalData.gConfig.UniCheckHelpInfo == '1' or EccGlobalData.gConfig.UniCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
ExtraUni = self._CurrentLine.strip()
ExtraUniFile = os.path.join(os.path.dirname(self.MetaFile), ExtraUni)
IsModuleUni = self.MetaFile.upper().endswith('.INF')
self._UniExtraObj = UniParser(ExtraUniFile, IsExtraUni=True, IsModuleUni=IsModuleUni)
self._UniExtraObj.Start()
else:
EdkLogger.warn("Parser", "Unrecognized content", File=self.MetaFile,
Line=self._LineIndex + 1, ExtraData=self._CurrentLine);
self._ValueList[0:1] = [self._CurrentLine]
## Section header parser
#
# The section header is always in following format:
#
# [section_name.arch<.platform|module_type>]
#
def _SectionHeaderParser(self):
self._Scope = []
self._SectionName = ''
ArchList = set()
for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
if Item == '':
continue
ItemList = GetSplitValueList(Item, TAB_SPLIT)
# different section should not mix in one section
if self._SectionName != '' and self._SectionName != ItemList[0].upper():
EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section",
File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
self._SectionName = ItemList[0].upper()
if self._SectionName in self.DataType:
self._SectionType = self.DataType[self._SectionName]
else:
self._SectionType = MODEL_UNKNOWN
EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile,
Line=self._LineIndex+1, ExtraData=self._CurrentLine)
# S1 is always Arch
if len(ItemList) > 1:
S1 = ItemList[1].upper()
else:
S1 = 'COMMON'
ArchList.add(S1)
# S2 may be Platform or ModuleType
if len(ItemList) > 2:
S2 = ItemList[2].upper()
else:
S2 = 'COMMON'
self._Scope.append([S1, S2])
# 'COMMON' must not be used with specific ARCHs at the same section
if 'COMMON' in ArchList and len(ArchList) > 1:
EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
# If the section information is needed later, it should be stored in database
self._ValueList[0] = self._SectionName
## [defines] section parser
@ParseMacro
def _DefineParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
self._ValueList[1:len(TokenList)] = TokenList
if not self._ValueList[1]:
EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
if not self._ValueList[2]:
EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
Name, Value = self._ValueList[1], self._ValueList[2]
# Sometimes, we need to make differences between EDK and EDK2 modules
if Name == 'INF_VERSION':
if hexVersionPattern.match(Value):
self._Version = int(Value, 0)
elif decVersionPattern.match(Value):
ValueList = Value.split('.')
Major = int(ValueList[0], 0)
Minor = int(ValueList[1], 0)
if Major > 0xffff or Minor > 0xffff:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
self._Version = int('0x{0:04x}{1:04x}'.format(Major, Minor), 0)
else:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
elif Name == 'MODULE_UNI_FILE':
UniFile = os.path.join(os.path.dirname(self.MetaFile), Value)
if os.path.exists(UniFile):
self._UniObj = UniParser(UniFile, IsExtraUni=False, IsModuleUni=True)
self._UniObj.Start()
else:
EdkLogger.error('Parser', FILE_NOT_FOUND, "Module UNI file %s is missing." % Value,
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1,
RaiseError=False)
elif Name == 'PACKAGE_UNI_FILE':
UniFile = os.path.join(os.path.dirname(self.MetaFile), Value)
if os.path.exists(UniFile):
self._UniObj = UniParser(UniFile, IsExtraUni=False, IsModuleUni=False)
if isinstance(self, InfParser) and self._Version < 0x00010005:
# EDK module allows using defines as macros
self._FileLocalMacros[Name] = Value
self._Defines[Name] = Value
## [BuildOptions] section parser
@ParseMacro
def _BuildOptionParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
if len(TokenList2) == 2:
self._ValueList[0] = TokenList2[0] # toolchain family
self._ValueList[1] = TokenList2[1] # keys
else:
self._ValueList[1] = TokenList[0]
if len(TokenList) == 2 and not isinstance(self, DscParser): # value
self._ValueList[2] = ReplaceMacro(TokenList[1], self._Macros)
if self._ValueList[1].count('_') != 4:
EdkLogger.error(
'Parser',
FORMAT_INVALID,
"'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
ExtraData=self._CurrentLine,
File=self.MetaFile,
Line=self._LineIndex+1
)
def _GetMacros(self):
Macros = {}
Macros.update(self._FileLocalMacros)
Macros.update(self._GetApplicableSectionMacro())
return Macros
## Get section Macros that are applicable to current line, which may come from other sections
## that share the same name while scope is wider
def _GetApplicableSectionMacro(self):
Macros = {}
for Scope1, Scope2 in [("COMMON", "COMMON"), ("COMMON", self._Scope[0][1]),
(self._Scope[0][0], "COMMON"), (self._Scope[0][0], self._Scope[0][1])]:
if (self._SectionType, Scope1, Scope2) in self._SectionsMacroDict:
Macros.update(self._SectionsMacroDict[(self._SectionType, Scope1, Scope2)])
return Macros
_SectionParser = {}
Finished = property(_GetFinished, _SetFinished)
_Macros = property(_GetMacros)
## INF file parser class
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
#
class InfParser(MetaFileParser):
# INF file supported data types (one type per section)
DataType = {
TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
TAB_GUIDS.upper() : MODEL_EFI_GUID,
TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
TAB_PPIS.upper() : MODEL_EFI_PPI,
TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
}
## Constructor of InfParser
#
# Initialize object of InfParser
#
# @param FilePath The path of module description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
#
def __init__(self, FilePath, FileType, Table):
# prevent re-initialization
if hasattr(self, "_Table"):
return
MetaFileParser.__init__(self, FilePath, FileType, Table)
self.TblFile = EccGlobalData.gDb.TblFile
self.FileID = -1
## Parser starter
def Start(self):
NmakeLine = ''
Content = ''
Usage = ''
try:
Content = open(str(self.MetaFile), 'r').readlines()
except:
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
#
# Insert a record for file
#
Filename = NormPath(self.MetaFile)
FileID = self.TblFile.GetFileId(Filename)
if FileID:
self.FileID = FileID
else:
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF)
# parse the file line by line
IsFindBlockComment = False
for Index in range(0, len(Content)):
if self._SectionType in [MODEL_EFI_GUID,
MODEL_EFI_PROTOCOL,
MODEL_EFI_PPI,
MODEL_PCD_FIXED_AT_BUILD,
MODEL_PCD_PATCHABLE_IN_MODULE,
MODEL_PCD_FEATURE_FLAG,
MODEL_PCD_DYNAMIC_EX,
MODEL_PCD_DYNAMIC]:
Line = Content[Index].strip()
if Line.startswith(TAB_SPECIAL_COMMENT):
Usage += ' ' + Line[Line.find(TAB_SPECIAL_COMMENT):]
continue
elif Line.startswith(TAB_COMMENT_SPLIT):
continue
elif Line.find(TAB_COMMENT_SPLIT) > 0:
Usage += ' ' + Line[Line.find(TAB_COMMENT_SPLIT):]
Line = Line[:Line.find(TAB_COMMENT_SPLIT)]
else:
# skip empty, commented, block commented lines
Line = CleanString(Content[Index], AllowCppStyleComment=True)
Usage = ''
NextLine = ''
if Index + 1 < len(Content):
NextLine = CleanString(Content[Index + 1])
if Line == '':
continue
if Line.find(DataType.TAB_COMMENT_EDK_START) > -1:
IsFindBlockComment = True
continue
if Line.find(DataType.TAB_COMMENT_EDK_END) > -1:
IsFindBlockComment = False
continue
if IsFindBlockComment:
continue
self._LineIndex = Index
self._CurrentLine = Line
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionHeaderParser()
# Check invalid sections
if self._Version < 0x00010005:
if self._SectionType in [MODEL_META_DATA_BUILD_OPTION,
MODEL_EFI_LIBRARY_CLASS,
MODEL_META_DATA_PACKAGE,
MODEL_PCD_FIXED_AT_BUILD,
MODEL_PCD_PATCHABLE_IN_MODULE,
MODEL_PCD_FEATURE_FLAG,
MODEL_PCD_DYNAMIC_EX,
MODEL_PCD_DYNAMIC,
MODEL_EFI_GUID,
MODEL_EFI_PROTOCOL,
MODEL_EFI_PPI,
MODEL_META_DATA_USER_EXTENSION]:
EdkLogger.error('Parser', FORMAT_INVALID,
"Section [%s] is not allowed in inf file without version" % (self._SectionName),
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
elif self._SectionType in [MODEL_EFI_INCLUDE,
MODEL_EFI_LIBRARY_INSTANCE,
MODEL_META_DATA_NMAKE]:
EdkLogger.error('Parser', FORMAT_INVALID,
"Section [%s] is not allowed in inf file with version 0x%08x" % (self._SectionName, self._Version),
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
continue
# merge two lines specified by '\' in section NMAKE
elif self._SectionType == MODEL_META_DATA_NMAKE:
if Line[-1] == '\\':
if NextLine == '':
self._CurrentLine = NmakeLine + Line[0:-1]
NmakeLine = ''
else:
if NextLine[0] == TAB_SECTION_START and NextLine[-1] == TAB_SECTION_END:
self._CurrentLine = NmakeLine + Line[0:-1]
NmakeLine = ''
else:
NmakeLine = NmakeLine + ' ' + Line[0:-1]
continue
else:
self._CurrentLine = NmakeLine + Line
NmakeLine = ''
# section content
self._ValueList = ['', '', '']
# parse current line, result will be put in self._ValueList
self._SectionParser[self._SectionType](self)
if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
continue
#
# Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1,
# LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
#
self._ValueList[0] = self._ValueList[0].replace('/', '\\')
Usage = Usage.strip()
for Arch, Platform in self._Scope:
self._Store(self._SectionType,
self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
Arch,
Platform,
self._Owner[-1],
self.FileID,
self._LineIndex+1,
-1,
self._LineIndex+1,
-1,
0,
Usage
)
Usage = ''
if IsFindBlockComment:
EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
File=self.MetaFile)
self._Done()
## Data parser for the format in which there's path
#
# Only path can have macro used. So we need to replace them before use.
#
def _IncludeParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
self._ValueList[0:len(TokenList)] = TokenList
Macros = self._Macros
if Macros:
for Index in range(0, len(self._ValueList)):
Value = self._ValueList[Index]
if not Value:
continue
self._ValueList[Index] = ReplaceMacro(Value, Macros)
## Parse [Sources] section
#
# Only path can have macro used. So we need to replace them before use.
#
@ParseMacro
def _SourceFileParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
self._ValueList[0:len(TokenList)] = TokenList
Macros = self._Macros
# For Acpi tables, remove macro like ' TABLE_NAME=Sata1'
if 'COMPONENT_TYPE' in Macros:
if self._Defines['COMPONENT_TYPE'].upper() == 'ACPITABLE':
self._ValueList[0] = GetSplitValueList(self._ValueList[0], ' ', 1)[0]
if self._Defines['BASE_NAME'] == 'Microcode':
pass
self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
## Parse [Binaries] section
#
# Only path can have macro used. So we need to replace them before use.
#
@ParseMacro
def _BinaryFileParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 2)
if len(TokenList) < 2:
EdkLogger.error('Parser', FORMAT_INVALID, "No file type or path specified",
ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
File=self.MetaFile, Line=self._LineIndex+1)
if not TokenList[0]:
EdkLogger.error('Parser', FORMAT_INVALID, "No file type specified",
ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
File=self.MetaFile, Line=self._LineIndex+1)
if not TokenList[1]:
EdkLogger.error('Parser', FORMAT_INVALID, "No file path specified",
ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
File=self.MetaFile, Line=self._LineIndex+1)
self._ValueList[0:len(TokenList)] = TokenList
self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
## [nmake] section parser (Edk.x style only)
def _NmakeParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
self._ValueList[0:len(TokenList)] = TokenList
# remove macros
self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
# remove self-reference in macro setting
#self._ValueList[1] = ReplaceMacro(self._ValueList[1], {self._ValueList[0]:''})
## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
@ParseMacro
def _PcdParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
if len(ValueList) != 2:
EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
File=self.MetaFile, Line=self._LineIndex+1)
self._ValueList[0:1] = ValueList
if len(TokenList) > 1:
self._ValueList[2] = TokenList[1]
if self._ValueList[0] == '' or self._ValueList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
File=self.MetaFile, Line=self._LineIndex+1)
# if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
if self._ValueList[2] != '':
InfPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
if InfPcdValueList[0] in ['True', 'true', 'TRUE']:
self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '1', 1);
elif InfPcdValueList[0] in ['False', 'false', 'FALSE']:
self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '0', 1);
## [depex] section parser
@ParseMacro
def _DepexParser(self):
self._ValueList[0:1] = [self._CurrentLine]
_SectionParser = {
MODEL_UNKNOWN : MetaFileParser._Skip,
MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
MODEL_META_DATA_BUILD_OPTION : MetaFileParser._BuildOptionParser,
MODEL_EFI_INCLUDE : _IncludeParser, # for Edk.x modules
MODEL_EFI_LIBRARY_INSTANCE : MetaFileParser._CommonParser, # for Edk.x modules
MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
MODEL_META_DATA_PACKAGE : MetaFileParser._PathParser,
MODEL_META_DATA_NMAKE : _NmakeParser, # for Edk.x modules
MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
MODEL_PCD_FEATURE_FLAG : _PcdParser,
MODEL_PCD_DYNAMIC_EX : _PcdParser,
MODEL_PCD_DYNAMIC : _PcdParser,
MODEL_EFI_SOURCE_FILE : _SourceFileParser,
MODEL_EFI_GUID : MetaFileParser._CommonParser,
MODEL_EFI_PROTOCOL : MetaFileParser._CommonParser,
MODEL_EFI_PPI : MetaFileParser._CommonParser,
MODEL_EFI_DEPEX : _DepexParser,
MODEL_EFI_BINARY_FILE : _BinaryFileParser,
MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip,
}
## DSC file parser class
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
# @param Owner Owner ID (for sub-section parsing)
# @param From ID from which the data comes (for !INCLUDE directive)
#
class DscParser(MetaFileParser):
# DSC file supported data types (one type per section)
DataType = {
TAB_SKUIDS.upper() : MODEL_EFI_SKU_ID,
TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_DEFAULT,
TAB_PCDS_DYNAMIC_HII_NULL.upper() : MODEL_PCD_DYNAMIC_HII,
TAB_PCDS_DYNAMIC_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_VPD,
TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_EX_DEFAULT,
TAB_PCDS_DYNAMIC_EX_HII_NULL.upper() : MODEL_PCD_DYNAMIC_EX_HII,
TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_EX_VPD,
TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
TAB_DSC_DEFINES.upper() : MODEL_META_DATA_HEADER,
TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
TAB_DSC_DEFINES_EDKGLOBAL : MODEL_META_DATA_GLOBAL_DEFINE,
TAB_INCLUDE.upper() : MODEL_META_DATA_INCLUDE,
TAB_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
TAB_IF_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
TAB_IF_N_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF,
TAB_ELSE_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF,
TAB_ELSE.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE,
TAB_END_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF,
TAB_ERROR.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR,
}
# Valid names in define section
DefineKeywords = [
"DSC_SPECIFICATION",
"PLATFORM_NAME",
"PLATFORM_GUID",
"PLATFORM_VERSION",
"SKUID_IDENTIFIER",
"PCD_INFO_GENERATION",
"SUPPORTED_ARCHITECTURES",
"BUILD_TARGETS",
"OUTPUT_DIRECTORY",
"FLASH_DEFINITION",
"BUILD_NUMBER",
"RFC_LANGUAGES",
"ISO_LANGUAGES",
"TIME_STAMP_FILE",
"VPD_TOOL_GUID",
"FIX_LOAD_TOP_MEMORY_ADDRESS"
]
SubSectionDefineKeywords = [
"FILE_GUID"
]
SymbolPattern = ValueExpression.SymbolPattern
## Constructor of DscParser
#
# Initialize object of DscParser
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
# @param Owner Owner ID (for sub-section parsing)
# @param From ID from which the data comes (for !INCLUDE directive)
#
def __init__(self, FilePath, FileType, Table, Owner=-1, From=-1):
# prevent re-initialization
if hasattr(self, "_Table"):
return
MetaFileParser.__init__(self, FilePath, FileType, Table, Owner, From)
self._Version = 0x00010005 # Only EDK2 dsc file is supported
# to store conditional directive evaluation result
self._DirectiveStack = []
self._DirectiveEvalStack = []
self._Enabled = 1
# Final valid replacable symbols
self._Symbols = {}
#
# Map the ID between the original table and new table to track
# the owner item
#
self._IdMapping = {-1:-1}
self.TblFile = EccGlobalData.gDb.TblFile
self.FileID = -1
## Parser starter
def Start(self):
Content = ''
try:
Content = open(str(self.MetaFile.Path), 'r').readlines()
except:
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
#
# Insert a record for file
#
Filename = NormPath(self.MetaFile.Path)
FileID = self.TblFile.GetFileId(Filename)
if FileID:
self.FileID = FileID
else:
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DSC)
for Index in range(0, len(Content)):
Line = CleanString(Content[Index])
# skip empty line
if Line == '':
continue
self._CurrentLine = Line
self._LineIndex = Index
if self._InSubsection and self._Owner[-1] == -1:
self._Owner.append(self._LastItem)
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionType = MODEL_META_DATA_SECTION_HEADER
# subsection ending
elif Line[0] == '}' and self._InSubsection:
self._InSubsection = False
self._SubsectionType = MODEL_UNKNOWN
self._SubsectionName = ''
self._Owner[-1] = -1
continue
# subsection header
elif Line[0] == TAB_OPTION_START and Line[-1] == TAB_OPTION_END:
self._SubsectionType = MODEL_META_DATA_SUBSECTION_HEADER
# directive line
elif Line[0] == '!':
self._DirectiveParser()
continue
if self._InSubsection:
SectionType = self._SubsectionType
else:
SectionType = self._SectionType
self._ItemType = SectionType
self._ValueList = ['', '', '']
self._SectionParser[SectionType](self)
if self._ValueList is None:
continue
#
# Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
# LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
#
for Arch, ModuleType in self._Scope:
self._LastItem = self._Store(
self._ItemType,
self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
Arch,
ModuleType,
self._Owner[-1],
self.FileID,
self._From,
self._LineIndex+1,
-1,
self._LineIndex+1,
-1,
self._Enabled
)
if self._DirectiveStack:
Type, Line, Text = self._DirectiveStack[-1]
EdkLogger.error('Parser', FORMAT_INVALID, "No matching '!endif' found",
ExtraData=Text, File=self.MetaFile, Line=Line)
self._Done()
## <subsection_header> parser
def _SubsectionHeaderParser(self):
self._SubsectionName = self._CurrentLine[1:-1].upper()
if self._SubsectionName in self.DataType:
self._SubsectionType = self.DataType[self._SubsectionName]
else:
self._SubsectionType = MODEL_UNKNOWN
EdkLogger.warn("Parser", "Unrecognized sub-section", File=self.MetaFile,
Line=self._LineIndex+1, ExtraData=self._CurrentLine)
self._ValueList[0] = self._SubsectionName
## Directive statement parser
def _DirectiveParser(self):
self._ValueList = ['', '', '']
TokenList = GetSplitValueList(self._CurrentLine, ' ', 1)
self._ValueList[0:len(TokenList)] = TokenList
# Syntax check
DirectiveName = self._ValueList[0].upper()
if DirectiveName not in self.DataType:
EdkLogger.error("Parser", FORMAT_INVALID, "Unknown directive [%s]" % DirectiveName,
File=self.MetaFile, Line=self._LineIndex+1)
if DirectiveName in ['!IF', '!IFDEF', '!INCLUDE', '!IFNDEF', '!ELSEIF'] and self._ValueList[1] == '':
EdkLogger.error("Parser", FORMAT_INVALID, "Missing expression",
File=self.MetaFile, Line=self._LineIndex+1,
ExtraData=self._CurrentLine)
ItemType = self.DataType[DirectiveName]
if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
# Remove all directives between !if and !endif, including themselves
while self._DirectiveStack:
# Remove any !else or !elseif
DirectiveInfo = self._DirectiveStack.pop()
if DirectiveInfo[0] in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
break
else:
EdkLogger.error("Parser", FORMAT_INVALID, "Redundant '!endif'",
File=self.MetaFile, Line=self._LineIndex+1,
ExtraData=self._CurrentLine)
elif ItemType != MODEL_META_DATA_INCLUDE:
# Break if there's a !else is followed by a !elseif
if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF and \
self._DirectiveStack and \
self._DirectiveStack[-1][0] == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
EdkLogger.error("Parser", FORMAT_INVALID, "'!elseif' after '!else'",
File=self.MetaFile, Line=self._LineIndex+1,
ExtraData=self._CurrentLine)
self._DirectiveStack.append((ItemType, self._LineIndex+1, self._CurrentLine))
elif self._From > 0:
EdkLogger.error('Parser', FORMAT_INVALID,
"No '!include' allowed in included file",
ExtraData=self._CurrentLine, File=self.MetaFile,
Line=self._LineIndex+1)
#
# Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
# LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
#
self._LastItem = self._Store(
ItemType,
self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
'COMMON',
'COMMON',
self._Owner[-1],
self.FileID,
self._From,
self._LineIndex+1,
-1,
self._LineIndex+1,
-1,
0
)
## [defines] section parser
@ParseMacro
def _DefineParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
self._ValueList[1:len(TokenList)] = TokenList
# Syntax check
if not self._ValueList[1]:
EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
if not self._ValueList[2]:
EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
if (not self._ValueList[1] in self.DefineKeywords and
(self._InSubsection and self._ValueList[1] not in self.SubSectionDefineKeywords)):
EdkLogger.error('Parser', FORMAT_INVALID,
"Unknown keyword found: %s. "
"If this is a macro you must "
"add it as a DEFINE in the DSC" % self._ValueList[1],
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
self._Defines[self._ValueList[1]] = self._ValueList[2]
self._ItemType = self.DataType[TAB_DSC_DEFINES.upper()]
@ParseMacro
def _SkuIdParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
if len(TokenList) != 2:
EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '<Integer>|<UiName>'",
ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
self._ValueList[0:len(TokenList)] = TokenList
## Parse Edk style of library modules
def _LibraryInstanceParser(self):
self._ValueList[0] = self._CurrentLine
## PCD sections parser
#
# [PcdsFixedAtBuild]
# [PcdsPatchableInModule]
# [PcdsFeatureFlag]
# [PcdsDynamicEx
# [PcdsDynamicExDefault]
# [PcdsDynamicExVpd]
# [PcdsDynamicExHii]
# [PcdsDynamic]
# [PcdsDynamicDefault]
# [PcdsDynamicVpd]
# [PcdsDynamicHii]
#
@ParseMacro
def _PcdParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
if len(TokenList) == 2:
self._ValueList[2] = TokenList[1]
if self._ValueList[0] == '' or self._ValueList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
File=self.MetaFile, Line=self._LineIndex+1)
if self._ValueList[2] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No PCD value given",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
File=self.MetaFile, Line=self._LineIndex+1)
# if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
if DscPcdValueList[0] in ['True', 'true', 'TRUE']:
self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '1', 1);
elif DscPcdValueList[0] in ['False', 'false', 'FALSE']:
self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '0', 1);
## [components] section parser
@ParseMacro
def _ComponentParser(self):
if self._CurrentLine[-1] == '{':
self._ValueList[0] = self._CurrentLine[0:-1].strip()
self._InSubsection = True
else:
self._ValueList[0] = self._CurrentLine
## [LibraryClasses] section
@ParseMacro
def _LibraryClassParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
if len(TokenList) < 2:
EdkLogger.error('Parser', FORMAT_INVALID, "No library class or instance specified",
ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
File=self.MetaFile, Line=self._LineIndex+1)
if TokenList[0] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No library class specified",
ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
File=self.MetaFile, Line=self._LineIndex+1)
if TokenList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No library instance specified",
ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
File=self.MetaFile, Line=self._LineIndex+1)
self._ValueList[0:len(TokenList)] = TokenList
## [BuildOptions] section parser
@ParseMacro
def _BuildOptionParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
if len(TokenList2) == 2:
self._ValueList[0] = TokenList2[0] # toolchain family
self._ValueList[1] = TokenList2[1] # keys
else:
self._ValueList[1] = TokenList[0]
if len(TokenList) == 2: # value
self._ValueList[2] = TokenList[1]
if self._ValueList[1].count('_') != 4:
EdkLogger.error(
'Parser',
FORMAT_INVALID,
"'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
ExtraData=self._CurrentLine,
File=self.MetaFile,
Line=self._LineIndex+1
)
## Override parent's method since we'll do all macro replacements in parser
def _GetMacros(self):
Macros = dict( [('ARCH', 'IA32'), ('FAMILY', TAB_COMPILER_MSFT), ('TOOL_CHAIN_TAG', 'VS2008x86'), ('TARGET', 'DEBUG')])
Macros.update(self._FileLocalMacros)
Macros.update(self._GetApplicableSectionMacro())
Macros.update(GlobalData.gEdkGlobal)
Macros.update(GlobalData.gPlatformDefines)
Macros.update(GlobalData.gCommandLineDefines)
# PCD cannot be referenced in macro definition
if self._ItemType not in [MODEL_META_DATA_DEFINE, MODEL_META_DATA_GLOBAL_DEFINE]:
Macros.update(self._Symbols)
return Macros
def _PostProcess(self):
Processer = {
MODEL_META_DATA_SECTION_HEADER : self.__ProcessSectionHeader,
MODEL_META_DATA_SUBSECTION_HEADER : self.__ProcessSubsectionHeader,
MODEL_META_DATA_HEADER : self.__ProcessDefine,
MODEL_META_DATA_DEFINE : self.__ProcessDefine,
MODEL_META_DATA_GLOBAL_DEFINE : self.__ProcessDefine,
MODEL_META_DATA_INCLUDE : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IF : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF : self.__ProcessDirective,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF : self.__ProcessDirective,
MODEL_EFI_SKU_ID : self.__ProcessSkuId,
MODEL_EFI_LIBRARY_INSTANCE : self.__ProcessLibraryInstance,
MODEL_EFI_LIBRARY_CLASS : self.__ProcessLibraryClass,
MODEL_PCD_FIXED_AT_BUILD : self.__ProcessPcd,
MODEL_PCD_PATCHABLE_IN_MODULE : self.__ProcessPcd,
MODEL_PCD_FEATURE_FLAG : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_DEFAULT : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_HII : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_VPD : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_EX_DEFAULT : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_EX_HII : self.__ProcessPcd,
MODEL_PCD_DYNAMIC_EX_VPD : self.__ProcessPcd,
MODEL_META_DATA_COMPONENT : self.__ProcessComponent,
MODEL_META_DATA_BUILD_OPTION : self.__ProcessBuildOption,
MODEL_UNKNOWN : self._Skip,
MODEL_META_DATA_USER_EXTENSION : self._Skip,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR : self._Skip,
}
self._RawTable = self._Table
self._Table = MetaFileStorage(self._RawTable.Cur, self.MetaFile, MODEL_FILE_DSC, True)
self._DirectiveStack = []
self._DirectiveEvalStack = []
self._FileWithError = self.MetaFile
self._FileLocalMacros = {}
self._SectionsMacroDict = {}
GlobalData.gPlatformDefines = {}
# Get all macro and PCD which has straitforward value
self.__RetrievePcdValue()
self._Content = self._RawTable.GetAll()
self._ContentIndex = 0
while self._ContentIndex < len(self._Content) :
Id, self._ItemType, V1, V2, V3, S1, S2, Owner, BelongsToFile, self._From, \
LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex]
if self._From < 0:
self._FileWithError = self.MetaFile
self._ContentIndex += 1
self._Scope = [[S1, S2]]
self._LineIndex = LineStart - 1
self._ValueList = [V1, V2, V3]
try:
Processer[self._ItemType]()
except EvaluationException as Excpt:
#
# Only catch expression evaluation error here. We need to report
# the precise number of line on which the error occurred
#
pass
# EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt),
# File=self._FileWithError, ExtraData=' '.join(self._ValueList),
# Line=self._LineIndex+1)
except MacroException as Excpt:
EdkLogger.error('Parser', FORMAT_INVALID, str(Excpt),
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex+1)
if self._ValueList is None:
continue
NewOwner = self._IdMapping.get(Owner, -1)
self._Enabled = int((not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack))
self._LastItem = self._Store(
self._ItemType,
self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
S1,
S2,
NewOwner,
BelongsToFile,
self._From,
self._LineIndex+1,
-1,
self._LineIndex+1,
-1,
self._Enabled
)
self._IdMapping[Id] = self._LastItem
RecordList = self._Table.GetAll()
self._RawTable.Drop()
self._Table.Drop()
for Record in RecordList:
EccGlobalData.gDb.TblDsc.Insert(Record[1], Record[2], Record[3], Record[4], Record[5], Record[6], Record[7], Record[8], Record[9], Record[10], Record[11], Record[12], Record[13], Record[14])
GlobalData.gPlatformDefines.update(self._FileLocalMacros)
self._PostProcessed = True
self._Content = None
def __ProcessSectionHeader(self):
self._SectionName = self._ValueList[0]
if self._SectionName in self.DataType:
self._SectionType = self.DataType[self._SectionName]
else:
self._SectionType = MODEL_UNKNOWN
def __ProcessSubsectionHeader(self):
self._SubsectionName = self._ValueList[0]
if self._SubsectionName in self.DataType:
self._SubsectionType = self.DataType[self._SubsectionName]
else:
self._SubsectionType = MODEL_UNKNOWN
def __RetrievePcdValue(self):
Records = self._RawTable.Query(MODEL_PCD_FEATURE_FLAG, BelongsToItem=-1.0)
for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, ID, Line in Records:
Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
# Only use PCD whose value is straitforward (no macro and PCD)
if self.SymbolPattern.findall(Value):
continue
Name = TokenSpaceGuid + '.' + PcdName
# Don't use PCD with different values.
if Name in self._Symbols and self._Symbols[Name] != Value:
self._Symbols.pop(Name)
continue
self._Symbols[Name] = Value
Records = self._RawTable.Query(MODEL_PCD_FIXED_AT_BUILD, BelongsToItem=-1.0)
for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, ID, Line in Records:
Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
# Only use PCD whose value is straitforward (no macro and PCD)
if self.SymbolPattern.findall(Value):
continue
Name = TokenSpaceGuid+'.'+PcdName
# Don't use PCD with different values.
if Name in self._Symbols and self._Symbols[Name] != Value:
self._Symbols.pop(Name)
continue
self._Symbols[Name] = Value
def __ProcessDefine(self):
if not self._Enabled:
return
Type, Name, Value = self._ValueList
Value = ReplaceMacro(Value, self._Macros, False)
if self._ItemType == MODEL_META_DATA_DEFINE:
if self._SectionType == MODEL_META_DATA_HEADER:
self._FileLocalMacros[Name] = Value
else:
SectionDictKey = self._SectionType, self._Scope[0][0], self._Scope[0][1]
if SectionDictKey not in self._SectionsMacroDict:
self._SectionsMacroDict[SectionDictKey] = {}
SectionLocalMacros = self._SectionsMacroDict[SectionDictKey]
SectionLocalMacros[Name] = Value
elif self._ItemType == MODEL_META_DATA_GLOBAL_DEFINE:
GlobalData.gEdkGlobal[Name] = Value
#
# Keyword in [Defines] section can be used as Macros
#
if (self._ItemType == MODEL_META_DATA_HEADER) and (self._SectionType == MODEL_META_DATA_HEADER):
self._FileLocalMacros[Name] = Value
self._ValueList = [Type, Name, Value]
def __ProcessDirective(self):
Result = None
if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF]:
Macros = self._Macros
Macros.update(GlobalData.gGlobalDefines)
try:
Result = ValueExpression(self._ValueList[1], Macros)()
except SymbolNotFound as Exc:
EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
Result = False
except WrnExpression as Excpt:
#
# Catch expression evaluation warning here. We need to report
# the precise number of line and return the evaluation result
#
EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
File=self._FileWithError, ExtraData=' '.join(self._ValueList),
Line=self._LineIndex+1)
Result = Excpt.result
except BadExpression as Exc:
EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
Result = False
if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
self._DirectiveStack.append(self._ItemType)
if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IF:
Result = bool(Result)
else:
Macro = self._ValueList[1]
Macro = Macro[2:-1] if (Macro.startswith("$(") and Macro.endswith(")")) else Macro
Result = Macro in self._Macros
if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF:
Result = not Result
self._DirectiveEvalStack.append(Result)
elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF:
self._DirectiveStack.append(self._ItemType)
self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
self._DirectiveEvalStack.append(bool(Result))
elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
self._DirectiveStack[-1] = self._ItemType
self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
# Back to the nearest !if/!ifdef/!ifndef
while self._DirectiveStack:
self._DirectiveEvalStack.pop()
Directive = self._DirectiveStack.pop()
if Directive in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE,
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
break
elif self._ItemType == MODEL_META_DATA_INCLUDE:
# The included file must be relative to workspace or same directory as DSC file
__IncludeMacros = {}
#
# Allow using system environment variables in path after !include
#
__IncludeMacros['WORKSPACE'] = GlobalData.gGlobalDefines['WORKSPACE']
#
# Allow using MACROs comes from [Defines] section to keep compatible.
#
__IncludeMacros.update(self._Macros)
IncludedFile = NormPath(ReplaceMacro(self._ValueList[1], __IncludeMacros, RaiseError=True))
#
# First search the include file under the same directory as DSC file
#
IncludedFile1 = PathClass(IncludedFile, self.MetaFile.Dir)
ErrorCode, ErrorInfo1 = IncludedFile1.Validate()
if ErrorCode != 0:
#
# Also search file under the WORKSPACE directory
#
IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
ErrorCode, ErrorInfo2 = IncludedFile1.Validate()
if ErrorCode != 0:
EdkLogger.error('parser', ErrorCode, File=self._FileWithError,
Line=self._LineIndex+1, ExtraData=ErrorInfo1 + "\n"+ ErrorInfo2)
self._FileWithError = IncludedFile1
IncludedFileTable = MetaFileStorage(self._Table.Cur, IncludedFile1, MODEL_FILE_DSC, True)
Owner = self._Content[self._ContentIndex-1][0]
Parser = DscParser(IncludedFile1, self._FileType, IncludedFileTable,
Owner=Owner, From=Owner)
# set the parser status with current status
Parser._SectionName = self._SectionName
Parser._SectionType = self._SectionType
Parser._Scope = self._Scope
Parser._Enabled = self._Enabled
# Parse the included file
Parser.Start()
# update current status with sub-parser's status
self._SectionName = Parser._SectionName
self._SectionType = Parser._SectionType
self._Scope = Parser._Scope
self._Enabled = Parser._Enabled
# Insert all records in the table for the included file into dsc file table
Records = IncludedFileTable.GetAll()
if Records:
self._Content[self._ContentIndex:self._ContentIndex] = Records
self._Content.pop(self._ContentIndex-1)
self._ValueList = None
self._ContentIndex -= 1
def __ProcessSkuId(self):
self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True)
for Value in self._ValueList]
def __ProcessLibraryInstance(self):
self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
def __ProcessLibraryClass(self):
self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros, RaiseError=True)
def __ProcessPcd(self):
ValueList = GetSplitValueList(self._ValueList[2])
#
# PCD value can be an expression
#
if len(ValueList) > 1 and ValueList[1] == TAB_VOID:
PcdValue = ValueList[0]
try:
ValueList[0] = ValueExpression(PcdValue, self._Macros)(True)
except WrnExpression as Value:
ValueList[0] = Value.result
else:
PcdValue = ValueList[-1]
try:
ValueList[-1] = ValueExpression(PcdValue, self._Macros)(True)
except WrnExpression as Value:
ValueList[-1] = Value.result
if ValueList[-1] == 'True':
ValueList[-1] = '1'
if ValueList[-1] == 'False':
ValueList[-1] = '0'
self._ValueList[2] = '|'.join(ValueList)
def __ProcessComponent(self):
self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
def __ProcessBuildOption(self):
self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=False)
for Value in self._ValueList]
_SectionParser = {
MODEL_META_DATA_HEADER : _DefineParser,
MODEL_EFI_SKU_ID : _SkuIdParser,
MODEL_EFI_LIBRARY_INSTANCE : _LibraryInstanceParser,
MODEL_EFI_LIBRARY_CLASS : _LibraryClassParser,
MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
MODEL_PCD_FEATURE_FLAG : _PcdParser,
MODEL_PCD_DYNAMIC_DEFAULT : _PcdParser,
MODEL_PCD_DYNAMIC_HII : _PcdParser,
MODEL_PCD_DYNAMIC_VPD : _PcdParser,
MODEL_PCD_DYNAMIC_EX_DEFAULT : _PcdParser,
MODEL_PCD_DYNAMIC_EX_HII : _PcdParser,
MODEL_PCD_DYNAMIC_EX_VPD : _PcdParser,
MODEL_META_DATA_COMPONENT : _ComponentParser,
MODEL_META_DATA_BUILD_OPTION : _BuildOptionParser,
MODEL_UNKNOWN : MetaFileParser._Skip,
MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip,
MODEL_META_DATA_SECTION_HEADER : MetaFileParser._SectionHeaderParser,
MODEL_META_DATA_SUBSECTION_HEADER : _SubsectionHeaderParser,
}
_Macros = property(_GetMacros)
## DEC file parser class
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
#
class DecParser(MetaFileParser):
# DEC file supported data types (one type per section)
DataType = {
TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
TAB_GUIDS.upper() : MODEL_EFI_GUID,
TAB_PPIS.upper() : MODEL_EFI_PPI,
TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
}
## Constructor of DecParser
#
# Initialize object of DecParser
#
# @param FilePath The path of platform description file
# @param FileType The raw data of DSC file
# @param Table Database used to retrieve module/package information
# @param Macros Macros used for replacement in file
#
def __init__(self, FilePath, FileType, Table):
# prevent re-initialization
if hasattr(self, "_Table"):
return
MetaFileParser.__init__(self, FilePath, FileType, Table)
self._Comments = []
self._Version = 0x00010005 # Only EDK2 dec file is supported
self.TblFile = EccGlobalData.gDb.TblFile
self.FileID = -1
self._CurrentStructurePcdName = ""
self._include_flag = False
self._package_flag = False
## Parser starter
def Start(self):
Content = ''
try:
Content = open(str(self.MetaFile), 'r').readlines()
except:
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
#
# Insert a record for file
#
Filename = NormPath(self.MetaFile)
FileID = self.TblFile.GetFileId(Filename)
if FileID:
self.FileID = FileID
else:
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DEC)
for Index in range(0, len(Content)):
Line, Comment = CleanString2(Content[Index])
self._CurrentLine = Line
self._LineIndex = Index
# save comment for later use
if Comment:
self._Comments.append((Comment, self._LineIndex+1))
# skip empty line
if Line == '':
continue
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionHeaderParser()
self._Comments = []
continue
elif len(self._SectionType) == 0:
self._Comments = []
continue
# section content
self._ValueList = ['', '', '']
self._SectionParser[self._SectionType[0]](self)
if self._ValueList is None or self._ItemType == MODEL_META_DATA_DEFINE:
self._ItemType = -1
self._Comments = []
continue
#
# Model, Value1, Value2, Value3, Arch, BelongsToItem=-1, LineBegin=-1,
# ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, FeatureFlag='', Enabled=-1
#
for Arch, ModuleType, Type in self._Scope:
self._LastItem = self._Store(
Type,
self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
Arch,
ModuleType,
self._Owner[-1],
self.FileID,
self._LineIndex+1,
-1,
self._LineIndex+1,
-1,
0
)
for Comment, LineNo in self._Comments:
self._Store(
MODEL_META_DATA_COMMENT,
Comment,
self._ValueList[0],
self._ValueList[1],
Arch,
ModuleType,
self._LastItem,
self.FileID,
LineNo,
-1,
LineNo,
-1,
0
)
self._Comments = []
self._Done()
def _GetApplicableSectionMacro(self):
Macros = {}
for S1, S2, SectionType in self._Scope:
for Scope1, Scope2 in [("COMMON", "COMMON"), ("COMMON", S2), (S1, "COMMON"), (S1, S2)]:
if (SectionType, Scope1, Scope2) in self._SectionsMacroDict:
Macros.update(self._SectionsMacroDict[(SectionType, Scope1, Scope2)])
return Macros
## Section header parser
#
# The section header is always in following format:
#
# [section_name.arch<.platform|module_type>]
#
def _SectionHeaderParser(self):
self._Scope = []
self._SectionName = ''
self._SectionType = []
ArchList = set()
for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
if Item == '':
continue
ItemList = GetSplitValueList(Item, TAB_SPLIT)
# different types of PCD are permissible in one section
self._SectionName = ItemList[0].upper()
if self._SectionName in self.DataType:
if self.DataType[self._SectionName] not in self._SectionType:
self._SectionType.append(self.DataType[self._SectionName])
else:
EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile,
Line=self._LineIndex+1, ExtraData=self._CurrentLine)
continue
if MODEL_PCD_FEATURE_FLAG in self._SectionType and len(self._SectionType) > 1:
EdkLogger.error(
'Parser',
FORMAT_INVALID,
"%s must not be in the same section of other types of PCD" % TAB_PCDS_FEATURE_FLAG_NULL,
File=self.MetaFile,
Line=self._LineIndex+1,
ExtraData=self._CurrentLine
)
# S1 is always Arch
if len(ItemList) > 1:
S1 = ItemList[1].upper()
else:
S1 = 'COMMON'
ArchList.add(S1)
# S2 may be Platform or ModuleType
if len(ItemList) > 2:
S2 = ItemList[2].upper()
else:
S2 = 'COMMON'
if [S1, S2, self.DataType[self._SectionName]] not in self._Scope:
self._Scope.append([S1, S2, self.DataType[self._SectionName]])
# 'COMMON' must not be used with specific ARCHs at the same section
if 'COMMON' in ArchList and len(ArchList) > 1:
EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
## [guids], [ppis] and [protocols] section parser
@ParseMacro
def _GuidParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
if len(TokenList) < 2:
EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name or value specified",
ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
File=self.MetaFile, Line=self._LineIndex+1)
if TokenList[0] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name specified",
ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
File=self.MetaFile, Line=self._LineIndex+1)
if TokenList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No GUID value specified",
ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
File=self.MetaFile, Line=self._LineIndex+1)
if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidStructureStringToGuidString(TokenList[1]) == '':
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
ExtraData=self._CurrentLine + \
" (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)",
File=self.MetaFile, Line=self._LineIndex+1)
self._ValueList[0] = TokenList[0]
#Parse the Guid value format
GuidValueList = TokenList[1].strip(' {}').split(',')
Index = 0
HexList = []
if len(GuidValueList) == 11:
for GuidValue in GuidValueList:
GuidValue = GuidValue.strip()
if GuidValue.startswith('0x') or GuidValue.startswith('0X'):
HexList.append('0x' + str(GuidValue[2:]))
Index += 1
continue
else:
if GuidValue.startswith('{'):
GuidValue = GuidValue.lstrip(' {')
HexList.append('0x' + str(GuidValue[2:]))
Index += 1
self._ValueList[1] = "{ %s, %s, %s, { %s, %s, %s, %s, %s, %s, %s, %s }}" % (HexList[0], HexList[1], HexList[2], HexList[3], HexList[4], HexList[5], HexList[6], HexList[7], HexList[8], HexList[9], HexList[10])
else:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
ExtraData=self._CurrentLine + \
" (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)",
File=self.MetaFile, Line=self._LineIndex+1)
self._ValueList[0] = ''
def ParsePcdName(self,namelist):
if "[" in namelist[1]:
pcdname = namelist[1][:namelist[1].index("[")]
arrayindex = namelist[1][namelist[1].index("["):]
namelist[1] = pcdname
if len(namelist) == 2:
namelist.append(arrayindex)
else:
namelist[2] = ".".join((arrayindex,namelist[2]))
return namelist
def StructPcdParser(self):
self._ValueList[0] = self._CurrentStructurePcdName
if "|" not in self._CurrentLine:
if "<HeaderFiles>" == self._CurrentLine:
self._include_flag = True
self._package_flag = False
self._ValueList = None
return
if "<Packages>" == self._CurrentLine:
self._package_flag = True
self._ValueList = None
self._include_flag = False
return
if self._include_flag:
self._ValueList[1] = "<HeaderFiles>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
self._ValueList[2] = self._CurrentLine
if self._package_flag and "}" != self._CurrentLine:
self._ValueList[1] = "<Packages>_" + md5(self._CurrentLine.encode('utf-8')).hexdigest()
self._ValueList[2] = self._CurrentLine
if self._CurrentLine == "}":
self._package_flag = False
self._include_flag = False
self._ValueList = None
else:
PcdTockens = self._CurrentLine.split(TAB_VALUE_SPLIT)
PcdNames = self.ParsePcdName(PcdTockens[0].split(TAB_SPLIT))
if len(PcdNames) == 2:
if PcdNames[1].strip().endswith("]"):
PcdName = PcdNames[1][:PcdNames[1].index('[')]
Index = PcdNames[1][PcdNames[1].index('['):]
self._ValueList[0] = TAB_SPLIT.join((PcdNames[0], PcdName))
self._ValueList[1] = Index
self._ValueList[2] = PcdTockens[1]
else:
self._CurrentStructurePcdName = ""
else:
if self._CurrentStructurePcdName != TAB_SPLIT.join(PcdNames[:2]):
EdkLogger.error('Parser', FORMAT_INVALID, "Pcd Name does not match: %s and %s " % (
self._CurrentStructurePcdName, TAB_SPLIT.join(PcdNames[:2])),
File=self.MetaFile, Line=self._LineIndex + 1)
self._ValueList[1] = TAB_SPLIT.join(PcdNames[2:])
self._ValueList[2] = PcdTockens[1]
## PCD sections parser
#
# [PcdsFixedAtBuild]
# [PcdsPatchableInModule]
# [PcdsFeatureFlag]
# [PcdsDynamicEx
# [PcdsDynamic]
#
@ParseMacro
def _PcdParser(self):
if self._CurrentStructurePcdName:
self.StructPcdParser()
return
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
# check PCD information
if self._ValueList[0] == '' or self._ValueList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex+1)
# check PCD datum information
if len(TokenList) < 2 or TokenList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "No PCD Datum information given",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex+1)
ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
PtrValue = ValueRe.findall(TokenList[1])
# Has VOID* type string, may contain "|" character in the string.
if len(PtrValue) != 0:
ptrValueList = re.sub(ValueRe, '', TokenList[1])
ValueList = GetSplitValueList(ptrValueList)
ValueList[0] = PtrValue[0]
else:
ValueList = GetSplitValueList(TokenList[1])
# check if there's enough datum information given
if len(ValueList) != 3:
EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex+1)
# check default value
if ValueList[0] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "Missing DefaultValue in PCD Datum information",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex+1)
# check datum type
if ValueList[1] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "Missing DatumType in PCD Datum information",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex+1)
# check token of the PCD
if ValueList[2] == '':
EdkLogger.error('Parser', FORMAT_INVALID, "Missing Token in PCD Datum information",
ExtraData=self._CurrentLine + \
" (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
File=self.MetaFile, Line=self._LineIndex+1)
# check format of default value against the datum type
IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0])
if not IsValid:
EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine,
File=self.MetaFile, Line=self._LineIndex+1)
if Cause == "StructurePcd":
self._CurrentStructurePcdName = TAB_SPLIT.join(self._ValueList[0:2])
self._ValueList[0] = self._CurrentStructurePcdName
self._ValueList[1] = ValueList[1].strip()
if EccGlobalData.gConfig.UniCheckPCDInfo == '1' or EccGlobalData.gConfig.UniCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
# check Description, Prompt information
PatternDesc = re.compile('##\s*([\x21-\x7E\s]*)', re.S)
PatternPrompt = re.compile('#\s+@Prompt\s+([\x21-\x7E\s]*)', re.S)
Description = None
Prompt = None
# check @ValidRange, @ValidList and @Expression format valid
ErrorCodeValid = '0x0 <= %s <= 0xFFFFFFFF'
PatternValidRangeIn = '(NOT)?\s*(\d+\s*-\s*\d+|0[xX][a-fA-F0-9]+\s*-\s*0[xX][a-fA-F0-9]+|LT\s*\d+|LT\s*0[xX][a-fA-F0-9]+|GT\s*\d+|GT\s*0[xX][a-fA-F0-9]+|LE\s*\d+|LE\s*0[xX][a-fA-F0-9]+|GE\s*\d+|GE\s*0[xX][a-fA-F0-9]+|XOR\s*\d+|XOR\s*0[xX][a-fA-F0-9]+|EQ\s*\d+|EQ\s*0[xX][a-fA-F0-9]+)'
PatternValidRng = re.compile('^' + '(NOT)?\s*' + PatternValidRangeIn + '$')
for Comment in self._Comments:
Comm = Comment[0].strip()
if not Comm:
continue
if not Description:
Description = PatternDesc.findall(Comm)
if not Prompt:
Prompt = PatternPrompt.findall(Comm)
if Comm[0] == '#':
ValidFormt = Comm.lstrip('#')
ValidFormt = ValidFormt.lstrip()
if ValidFormt[0:11] == '@ValidRange':
ValidFormt = ValidFormt[11:]
ValidFormt = ValidFormt.lstrip()
try:
ErrorCode, Expression = ValidFormt.split('|', 1)
except ValueError:
ErrorCode = '0x0'
Expression = ValidFormt
ErrorCode, Expression = ErrorCode.strip(), Expression.strip()
try:
if not eval(ErrorCodeValid % ErrorCode):
EdkLogger.warn('Parser', '@ValidRange ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
except:
EdkLogger.warn('Parser', '@ValidRange ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
if not PatternValidRng.search(Expression):
EdkLogger.warn('Parser', '@ValidRange Expression(%s) of PCD %s is incorrect format.' % (Expression, TokenList[0]))
if ValidFormt[0:10] == '@ValidList':
ValidFormt = ValidFormt[10:]
ValidFormt = ValidFormt.lstrip()
try:
ErrorCode, Expression = ValidFormt.split('|', 1)
except ValueError:
ErrorCode = '0x0'
Expression = ValidFormt
ErrorCode, Expression = ErrorCode.strip(), Expression.strip()
try:
if not eval(ErrorCodeValid % ErrorCode):
EdkLogger.warn('Parser', '@ValidList ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
except:
EdkLogger.warn('Parser', '@ValidList ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
Values = Expression.split(',')
for Value in Values:
Value = Value.strip()
try:
eval(Value)
except:
EdkLogger.warn('Parser', '@ValidList Expression of PCD %s include a invalid value(%s).' % (TokenList[0], Value))
break
if ValidFormt[0:11] == '@Expression':
ValidFormt = ValidFormt[11:]
ValidFormt = ValidFormt.lstrip()
try:
ErrorCode, Expression = ValidFormt.split('|', 1)
except ValueError:
ErrorCode = '0x0'
Expression = ValidFormt
ErrorCode, Expression = ErrorCode.strip(), Expression.strip()
try:
if not eval(ErrorCodeValid % ErrorCode):
EdkLogger.warn('Parser', '@Expression ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
except:
EdkLogger.warn('Parser', '@Expression ErrorCode(%s) of PCD %s is not valid UINT32 value.' % (ErrorCode, TokenList[0]))
if not Expression:
EdkLogger.warn('Parser', '@Expression Expression of PCD %s is incorrect format.' % TokenList[0])
if not Description:
EdkLogger.warn('Parser', 'PCD %s Description information is not provided.' % TokenList[0])
if not Prompt:
EdkLogger.warn('Parser', 'PCD %s Prompt information is not provided.' % TokenList[0])
# check Description, Prompt localization information
if self._UniObj:
self._UniObj.CheckPcdInfo(TokenList[0])
if ValueList[0] in ['True', 'true', 'TRUE']:
ValueList[0] = '1'
elif ValueList[0] in ['False', 'false', 'FALSE']:
ValueList[0] = '0'
self._ValueList[2] = ValueList[0].strip() + '|' + ValueList[1].strip() + '|' + ValueList[2].strip()
_SectionParser = {
MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
MODEL_EFI_INCLUDE : MetaFileParser._PathParser,
MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
MODEL_EFI_GUID : _GuidParser,
MODEL_EFI_PPI : _GuidParser,
MODEL_EFI_PROTOCOL : _GuidParser,
MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
MODEL_PCD_FEATURE_FLAG : _PcdParser,
MODEL_PCD_DYNAMIC : _PcdParser,
MODEL_PCD_DYNAMIC_EX : _PcdParser,
MODEL_UNKNOWN : MetaFileParser._Skip,
MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip,
}
## Fdf
#
# This class defined the structure used in Fdf object
#
# @param Filename: Input value for Ffilename of Fdf file, default is None
# @param WorkspaceDir: Input value for current workspace directory, default is None
#
class Fdf(object):
def __init__(self, Filename = None, IsToDatabase = False, WorkspaceDir = None, Database = None):
self.WorkspaceDir = WorkspaceDir
self.IsToDatabase = IsToDatabase
self.Cur = Database.Cur
self.TblFile = Database.TblFile
self.TblFdf = Database.TblFdf
self.FileID = -1
self.FileList = {}
#
# Load Fdf file if filename is not None
#
if Filename is not None:
try:
self.LoadFdfFile(Filename)
except Exception:
pass
#
# Insert a FDF file record into database
#
def InsertFile(self, Filename):
FileID = -1
Filename = NormPath(Filename)
if Filename not in self.FileList:
FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_FDF)
self.FileList[Filename] = FileID
return self.FileList[Filename]
## Load Fdf file
#
# Load the file if it exists
#
# @param Filename: Input value for filename of Fdf file
#
def LoadFdfFile(self, Filename):
FileList = []
#
# Parse Fdf file
#
Filename = NormPath(Filename)
Fdf = FdfParser(Filename)
Fdf.ParseFile()
#
# Insert inf file and pcd information
#
if self.IsToDatabase:
(Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled) = \
(0, '', '', '', 'COMMON', 'COMMON', -1, -1, -1, -1, -1, -1, 0)
for Index in range(0, len(Fdf.Profile.PcdDict)):
pass
for Key in Fdf.Profile.PcdDict.keys():
Model = MODEL_PCD
Value1 = Key[1]
Value2 = Key[0]
FileName = Fdf.Profile.PcdFileLineDict[Key][0]
StartLine = Fdf.Profile.PcdFileLineDict[Key][1]
BelongsToFile = self.InsertFile(FileName)
self.TblFdf.Insert(Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
for Index in range(0, len(Fdf.Profile.InfList)):
Model = MODEL_META_DATA_COMPONENT
Value1 = Fdf.Profile.InfList[Index]
Value2 = ''
FileName = Fdf.Profile.InfFileLineList[Index][0]
StartLine = Fdf.Profile.InfFileLineList[Index][1]
BelongsToFile = self.InsertFile(FileName)
self.TblFdf.Insert(Model, Value1, Value2, Value3, Scope1, Scope2, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
class UniParser(object):
# IsExtraUni defined the UNI file is Module UNI or extra Module UNI
# IsModuleUni defined the UNI file is Module UNI or Package UNI
def __init__(self, FilePath, IsExtraUni=False, IsModuleUni=True):
self.FilePath = FilePath
self.FileName = os.path.basename(FilePath)
self.IsExtraUni = IsExtraUni
self.IsModuleUni = IsModuleUni
self.FileIn = None
self.Missing = []
self.__read()
def __read(self):
try:
self.FileIn = CodecOpenLongFilePath(self.FilePath, Mode='rb', Encoding='utf_8').read()
except UnicodeError:
self.FileIn = CodecOpenLongFilePath(self.FilePath, Mode='rb', Encoding='utf_16').read()
except UnicodeError:
self.FileIn = CodecOpenLongFilePath(self.FilePath, Mode='rb', Encoding='utf_16_le').read()
except IOError:
self.FileIn = ""
def Start(self):
if self.IsModuleUni:
if self.IsExtraUni:
ModuleName = self.CheckKeyValid('STR_PROPERTIES_MODULE_NAME')
self.PrintLog('STR_PROPERTIES_MODULE_NAME', ModuleName)
else:
ModuleAbstract = self.CheckKeyValid('STR_MODULE_ABSTRACT')
self.PrintLog('STR_MODULE_ABSTRACT', ModuleAbstract)
ModuleDescription = self.CheckKeyValid('STR_MODULE_DESCRIPTION')
self.PrintLog('STR_MODULE_DESCRIPTION', ModuleDescription)
else:
if self.IsExtraUni:
PackageName = self.CheckKeyValid('STR_PROPERTIES_PACKAGE_NAME')
self.PrintLog('STR_PROPERTIES_PACKAGE_NAME', PackageName)
else:
PackageAbstract = self.CheckKeyValid('STR_PACKAGE_ABSTRACT')
self.PrintLog('STR_PACKAGE_ABSTRACT', PackageAbstract)
PackageDescription = self.CheckKeyValid('STR_PACKAGE_DESCRIPTION')
self.PrintLog('STR_PACKAGE_DESCRIPTION', PackageDescription)
def CheckKeyValid(self, Key, Contents=None):
if not Contents:
Contents = self.FileIn
KeyPattern = re.compile('#string\s+%s\s+.*?#language.*?".*?"' % Key, re.S)
if KeyPattern.search(Contents):
return True
return False
def CheckPcdInfo(self, PcdCName):
PromptKey = 'STR_%s_PROMPT' % PcdCName.replace('.', '_')
PcdPrompt = self.CheckKeyValid(PromptKey)
self.PrintLog(PromptKey, PcdPrompt)
HelpKey = 'STR_%s_HELP' % PcdCName.replace('.', '_')
PcdHelp = self.CheckKeyValid(HelpKey)
self.PrintLog(HelpKey, PcdHelp)
def PrintLog(self, Key, Value):
if not Value and Key not in self.Missing:
Msg = '%s is missing in the %s file.' % (Key, self.FileName)
EdkLogger.warn('Parser', Msg)
EccGlobalData.gDb.TblReport.Insert(EccToolError.ERROR_GENERAL_CHECK_UNI_HELP_INFO, OtherMsg=Msg, BelongsToTable='File', BelongsToItem=-2)
self.Missing.append(Key)
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
pass
| edk2-master | BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py |
# $ANTLR 3.0.1 C.g 2010-02-23 09:58:53
from antlr3 import *
from antlr3.compat import set, frozenset
## @file
# The file defines the Lexer for C source files.
#
# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# for convenience in actions
HIDDEN = BaseRecognizer.HIDDEN
# token types
T114=114
T115=115
T116=116
T117=117
FloatTypeSuffix=16
LETTER=11
T29=29
T28=28
T27=27
T26=26
T25=25
EOF=-1
STRING_LITERAL=9
FLOATING_POINT_LITERAL=10
T38=38
T37=37
T39=39
T34=34
COMMENT=22
T33=33
T36=36
T35=35
T30=30
T32=32
T31=31
LINE_COMMENT=23
IntegerTypeSuffix=14
CHARACTER_LITERAL=8
T49=49
T48=48
T100=100
T43=43
T42=42
T102=102
T41=41
T101=101
T40=40
T47=47
T46=46
T45=45
T44=44
T109=109
T107=107
T108=108
T105=105
WS=19
T106=106
T103=103
T104=104
T50=50
LINE_COMMAND=24
T59=59
T113=113
T52=52
T112=112
T51=51
T111=111
T54=54
T110=110
EscapeSequence=12
DECIMAL_LITERAL=7
T53=53
T56=56
T55=55
T58=58
T57=57
T75=75
T76=76
T73=73
T74=74
T79=79
T77=77
T78=78
Exponent=15
HexDigit=13
T72=72
T71=71
T70=70
T62=62
T63=63
T64=64
T65=65
T66=66
T67=67
T68=68
T69=69
IDENTIFIER=4
UnicodeVocabulary=21
HEX_LITERAL=5
T61=61
T60=60
T99=99
T97=97
BS=20
T98=98
T95=95
T96=96
OCTAL_LITERAL=6
T94=94
Tokens=118
T93=93
T92=92
T91=91
T90=90
T88=88
T89=89
T84=84
T85=85
T86=86
T87=87
UnicodeEscape=18
T81=81
T80=80
T83=83
OctalEscape=17
T82=82
class CLexer(Lexer):
grammarFileName = "C.g"
def __init__(self, input=None):
Lexer.__init__(self, input)
self.dfa25 = self.DFA25(
self, 25,
eot = self.DFA25_eot,
eof = self.DFA25_eof,
min = self.DFA25_min,
max = self.DFA25_max,
accept = self.DFA25_accept,
special = self.DFA25_special,
transition = self.DFA25_transition
)
self.dfa35 = self.DFA35(
self, 35,
eot = self.DFA35_eot,
eof = self.DFA35_eof,
min = self.DFA35_min,
max = self.DFA35_max,
accept = self.DFA35_accept,
special = self.DFA35_special,
transition = self.DFA35_transition
)
# $ANTLR start T25
def mT25(self, ):
try:
self.type = T25
# C.g:27:5: ( ';' )
# C.g:27:7: ';'
self.match(u';')
finally:
pass
# $ANTLR end T25
# $ANTLR start T26
def mT26(self, ):
try:
self.type = T26
# C.g:28:5: ( 'typedef' )
# C.g:28:7: 'typedef'
self.match("typedef")
finally:
pass
# $ANTLR end T26
# $ANTLR start T27
def mT27(self, ):
try:
self.type = T27
# C.g:29:5: ( ',' )
# C.g:29:7: ','
self.match(u',')
finally:
pass
# $ANTLR end T27
# $ANTLR start T28
def mT28(self, ):
try:
self.type = T28
# C.g:30:5: ( '=' )
# C.g:30:7: '='
self.match(u'=')
finally:
pass
# $ANTLR end T28
# $ANTLR start T29
def mT29(self, ):
try:
self.type = T29
# C.g:31:5: ( 'extern' )
# C.g:31:7: 'extern'
self.match("extern")
finally:
pass
# $ANTLR end T29
# $ANTLR start T30
def mT30(self, ):
try:
self.type = T30
# C.g:32:5: ( 'static' )
# C.g:32:7: 'static'
self.match("static")
finally:
pass
# $ANTLR end T30
# $ANTLR start T31
def mT31(self, ):
try:
self.type = T31
# C.g:33:5: ( 'auto' )
# C.g:33:7: 'auto'
self.match("auto")
finally:
pass
# $ANTLR end T31
# $ANTLR start T32
def mT32(self, ):
try:
self.type = T32
# C.g:34:5: ( 'register' )
# C.g:34:7: 'register'
self.match("register")
finally:
pass
# $ANTLR end T32
# $ANTLR start T33
def mT33(self, ):
try:
self.type = T33
# C.g:35:5: ( 'STATIC' )
# C.g:35:7: 'STATIC'
self.match("STATIC")
finally:
pass
# $ANTLR end T33
# $ANTLR start T34
def mT34(self, ):
try:
self.type = T34
# C.g:36:5: ( 'void' )
# C.g:36:7: 'void'
self.match("void")
finally:
pass
# $ANTLR end T34
# $ANTLR start T35
def mT35(self, ):
try:
self.type = T35
# C.g:37:5: ( 'char' )
# C.g:37:7: 'char'
self.match("char")
finally:
pass
# $ANTLR end T35
# $ANTLR start T36
def mT36(self, ):
try:
self.type = T36
# C.g:38:5: ( 'short' )
# C.g:38:7: 'short'
self.match("short")
finally:
pass
# $ANTLR end T36
# $ANTLR start T37
def mT37(self, ):
try:
self.type = T37
# C.g:39:5: ( 'int' )
# C.g:39:7: 'int'
self.match("int")
finally:
pass
# $ANTLR end T37
# $ANTLR start T38
def mT38(self, ):
try:
self.type = T38
# C.g:40:5: ( 'long' )
# C.g:40:7: 'long'
self.match("long")
finally:
pass
# $ANTLR end T38
# $ANTLR start T39
def mT39(self, ):
try:
self.type = T39
# C.g:41:5: ( 'float' )
# C.g:41:7: 'float'
self.match("float")
finally:
pass
# $ANTLR end T39
# $ANTLR start T40
def mT40(self, ):
try:
self.type = T40
# C.g:42:5: ( 'double' )
# C.g:42:7: 'double'
self.match("double")
finally:
pass
# $ANTLR end T40
# $ANTLR start T41
def mT41(self, ):
try:
self.type = T41
# C.g:43:5: ( 'signed' )
# C.g:43:7: 'signed'
self.match("signed")
finally:
pass
# $ANTLR end T41
# $ANTLR start T42
def mT42(self, ):
try:
self.type = T42
# C.g:44:5: ( 'unsigned' )
# C.g:44:7: 'unsigned'
self.match("unsigned")
finally:
pass
# $ANTLR end T42
# $ANTLR start T43
def mT43(self, ):
try:
self.type = T43
# C.g:45:5: ( '{' )
# C.g:45:7: '{'
self.match(u'{')
finally:
pass
# $ANTLR end T43
# $ANTLR start T44
def mT44(self, ):
try:
self.type = T44
# C.g:46:5: ( '}' )
# C.g:46:7: '}'
self.match(u'}')
finally:
pass
# $ANTLR end T44
# $ANTLR start T45
def mT45(self, ):
try:
self.type = T45
# C.g:47:5: ( 'struct' )
# C.g:47:7: 'struct'
self.match("struct")
finally:
pass
# $ANTLR end T45
# $ANTLR start T46
def mT46(self, ):
try:
self.type = T46
# C.g:48:5: ( 'union' )
# C.g:48:7: 'union'
self.match("union")
finally:
pass
# $ANTLR end T46
# $ANTLR start T47
def mT47(self, ):
try:
self.type = T47
# C.g:49:5: ( ':' )
# C.g:49:7: ':'
self.match(u':')
finally:
pass
# $ANTLR end T47
# $ANTLR start T48
def mT48(self, ):
try:
self.type = T48
# C.g:50:5: ( 'enum' )
# C.g:50:7: 'enum'
self.match("enum")
finally:
pass
# $ANTLR end T48
# $ANTLR start T49
def mT49(self, ):
try:
self.type = T49
# C.g:51:5: ( 'const' )
# C.g:51:7: 'const'
self.match("const")
finally:
pass
# $ANTLR end T49
# $ANTLR start T50
def mT50(self, ):
try:
self.type = T50
# C.g:52:5: ( 'volatile' )
# C.g:52:7: 'volatile'
self.match("volatile")
finally:
pass
# $ANTLR end T50
# $ANTLR start T51
def mT51(self, ):
try:
self.type = T51
# C.g:53:5: ( 'IN' )
# C.g:53:7: 'IN'
self.match("IN")
finally:
pass
# $ANTLR end T51
# $ANTLR start T52
def mT52(self, ):
try:
self.type = T52
# C.g:54:5: ( 'OUT' )
# C.g:54:7: 'OUT'
self.match("OUT")
finally:
pass
# $ANTLR end T52
# $ANTLR start T53
def mT53(self, ):
try:
self.type = T53
# C.g:55:5: ( 'OPTIONAL' )
# C.g:55:7: 'OPTIONAL'
self.match("OPTIONAL")
finally:
pass
# $ANTLR end T53
# $ANTLR start T54
def mT54(self, ):
try:
self.type = T54
# C.g:56:5: ( 'CONST' )
# C.g:56:7: 'CONST'
self.match("CONST")
finally:
pass
# $ANTLR end T54
# $ANTLR start T55
def mT55(self, ):
try:
self.type = T55
# C.g:57:5: ( 'UNALIGNED' )
# C.g:57:7: 'UNALIGNED'
self.match("UNALIGNED")
finally:
pass
# $ANTLR end T55
# $ANTLR start T56
def mT56(self, ):
try:
self.type = T56
# C.g:58:5: ( 'VOLATILE' )
# C.g:58:7: 'VOLATILE'
self.match("VOLATILE")
finally:
pass
# $ANTLR end T56
# $ANTLR start T57
def mT57(self, ):
try:
self.type = T57
# C.g:59:5: ( 'GLOBAL_REMOVE_IF_UNREFERENCED' )
# C.g:59:7: 'GLOBAL_REMOVE_IF_UNREFERENCED'
self.match("GLOBAL_REMOVE_IF_UNREFERENCED")
finally:
pass
# $ANTLR end T57
# $ANTLR start T58
def mT58(self, ):
try:
self.type = T58
# C.g:60:5: ( 'EFIAPI' )
# C.g:60:7: 'EFIAPI'
self.match("EFIAPI")
finally:
pass
# $ANTLR end T58
# $ANTLR start T59
def mT59(self, ):
try:
self.type = T59
# C.g:61:5: ( 'EFI_BOOTSERVICE' )
# C.g:61:7: 'EFI_BOOTSERVICE'
self.match("EFI_BOOTSERVICE")
finally:
pass
# $ANTLR end T59
# $ANTLR start T60
def mT60(self, ):
try:
self.type = T60
# C.g:62:5: ( 'EFI_RUNTIMESERVICE' )
# C.g:62:7: 'EFI_RUNTIMESERVICE'
self.match("EFI_RUNTIMESERVICE")
finally:
pass
# $ANTLR end T60
# $ANTLR start T61
def mT61(self, ):
try:
self.type = T61
# C.g:63:5: ( 'PACKED' )
# C.g:63:7: 'PACKED'
self.match("PACKED")
finally:
pass
# $ANTLR end T61
# $ANTLR start T62
def mT62(self, ):
try:
self.type = T62
# C.g:64:5: ( '(' )
# C.g:64:7: '('
self.match(u'(')
finally:
pass
# $ANTLR end T62
# $ANTLR start T63
def mT63(self, ):
try:
self.type = T63
# C.g:65:5: ( ')' )
# C.g:65:7: ')'
self.match(u')')
finally:
pass
# $ANTLR end T63
# $ANTLR start T64
def mT64(self, ):
try:
self.type = T64
# C.g:66:5: ( '[' )
# C.g:66:7: '['
self.match(u'[')
finally:
pass
# $ANTLR end T64
# $ANTLR start T65
def mT65(self, ):
try:
self.type = T65
# C.g:67:5: ( ']' )
# C.g:67:7: ']'
self.match(u']')
finally:
pass
# $ANTLR end T65
# $ANTLR start T66
def mT66(self, ):
try:
self.type = T66
# C.g:68:5: ( '*' )
# C.g:68:7: '*'
self.match(u'*')
finally:
pass
# $ANTLR end T66
# $ANTLR start T67
def mT67(self, ):
try:
self.type = T67
# C.g:69:5: ( '...' )
# C.g:69:7: '...'
self.match("...")
finally:
pass
# $ANTLR end T67
# $ANTLR start T68
def mT68(self, ):
try:
self.type = T68
# C.g:70:5: ( '+' )
# C.g:70:7: '+'
self.match(u'+')
finally:
pass
# $ANTLR end T68
# $ANTLR start T69
def mT69(self, ):
try:
self.type = T69
# C.g:71:5: ( '-' )
# C.g:71:7: '-'
self.match(u'-')
finally:
pass
# $ANTLR end T69
# $ANTLR start T70
def mT70(self, ):
try:
self.type = T70
# C.g:72:5: ( '/' )
# C.g:72:7: '/'
self.match(u'/')
finally:
pass
# $ANTLR end T70
# $ANTLR start T71
def mT71(self, ):
try:
self.type = T71
# C.g:73:5: ( '%' )
# C.g:73:7: '%'
self.match(u'%')
finally:
pass
# $ANTLR end T71
# $ANTLR start T72
def mT72(self, ):
try:
self.type = T72
# C.g:74:5: ( '++' )
# C.g:74:7: '++'
self.match("++")
finally:
pass
# $ANTLR end T72
# $ANTLR start T73
def mT73(self, ):
try:
self.type = T73
# C.g:75:5: ( '--' )
# C.g:75:7: '--'
self.match("--")
finally:
pass
# $ANTLR end T73
# $ANTLR start T74
def mT74(self, ):
try:
self.type = T74
# C.g:76:5: ( 'sizeof' )
# C.g:76:7: 'sizeof'
self.match("sizeof")
finally:
pass
# $ANTLR end T74
# $ANTLR start T75
def mT75(self, ):
try:
self.type = T75
# C.g:77:5: ( '.' )
# C.g:77:7: '.'
self.match(u'.')
finally:
pass
# $ANTLR end T75
# $ANTLR start T76
def mT76(self, ):
try:
self.type = T76
# C.g:78:5: ( '->' )
# C.g:78:7: '->'
self.match("->")
finally:
pass
# $ANTLR end T76
# $ANTLR start T77
def mT77(self, ):
try:
self.type = T77
# C.g:79:5: ( '&' )
# C.g:79:7: '&'
self.match(u'&')
finally:
pass
# $ANTLR end T77
# $ANTLR start T78
def mT78(self, ):
try:
self.type = T78
# C.g:80:5: ( '~' )
# C.g:80:7: '~'
self.match(u'~')
finally:
pass
# $ANTLR end T78
# $ANTLR start T79
def mT79(self, ):
try:
self.type = T79
# C.g:81:5: ( '!' )
# C.g:81:7: '!'
self.match(u'!')
finally:
pass
# $ANTLR end T79
# $ANTLR start T80
def mT80(self, ):
try:
self.type = T80
# C.g:82:5: ( '*=' )
# C.g:82:7: '*='
self.match("*=")
finally:
pass
# $ANTLR end T80
# $ANTLR start T81
def mT81(self, ):
try:
self.type = T81
# C.g:83:5: ( '/=' )
# C.g:83:7: '/='
self.match("/=")
finally:
pass
# $ANTLR end T81
# $ANTLR start T82
def mT82(self, ):
try:
self.type = T82
# C.g:84:5: ( '%=' )
# C.g:84:7: '%='
self.match("%=")
finally:
pass
# $ANTLR end T82
# $ANTLR start T83
def mT83(self, ):
try:
self.type = T83
# C.g:85:5: ( '+=' )
# C.g:85:7: '+='
self.match("+=")
finally:
pass
# $ANTLR end T83
# $ANTLR start T84
def mT84(self, ):
try:
self.type = T84
# C.g:86:5: ( '-=' )
# C.g:86:7: '-='
self.match("-=")
finally:
pass
# $ANTLR end T84
# $ANTLR start T85
def mT85(self, ):
try:
self.type = T85
# C.g:87:5: ( '<<=' )
# C.g:87:7: '<<='
self.match("<<=")
finally:
pass
# $ANTLR end T85
# $ANTLR start T86
def mT86(self, ):
try:
self.type = T86
# C.g:88:5: ( '>>=' )
# C.g:88:7: '>>='
self.match(">>=")
finally:
pass
# $ANTLR end T86
# $ANTLR start T87
def mT87(self, ):
try:
self.type = T87
# C.g:89:5: ( '&=' )
# C.g:89:7: '&='
self.match("&=")
finally:
pass
# $ANTLR end T87
# $ANTLR start T88
def mT88(self, ):
try:
self.type = T88
# C.g:90:5: ( '^=' )
# C.g:90:7: '^='
self.match("^=")
finally:
pass
# $ANTLR end T88
# $ANTLR start T89
def mT89(self, ):
try:
self.type = T89
# C.g:91:5: ( '|=' )
# C.g:91:7: '|='
self.match("|=")
finally:
pass
# $ANTLR end T89
# $ANTLR start T90
def mT90(self, ):
try:
self.type = T90
# C.g:92:5: ( '?' )
# C.g:92:7: '?'
self.match(u'?')
finally:
pass
# $ANTLR end T90
# $ANTLR start T91
def mT91(self, ):
try:
self.type = T91
# C.g:93:5: ( '||' )
# C.g:93:7: '||'
self.match("||")
finally:
pass
# $ANTLR end T91
# $ANTLR start T92
def mT92(self, ):
try:
self.type = T92
# C.g:94:5: ( '&&' )
# C.g:94:7: '&&'
self.match("&&")
finally:
pass
# $ANTLR end T92
# $ANTLR start T93
def mT93(self, ):
try:
self.type = T93
# C.g:95:5: ( '|' )
# C.g:95:7: '|'
self.match(u'|')
finally:
pass
# $ANTLR end T93
# $ANTLR start T94
def mT94(self, ):
try:
self.type = T94
# C.g:96:5: ( '^' )
# C.g:96:7: '^'
self.match(u'^')
finally:
pass
# $ANTLR end T94
# $ANTLR start T95
def mT95(self, ):
try:
self.type = T95
# C.g:97:5: ( '==' )
# C.g:97:7: '=='
self.match("==")
finally:
pass
# $ANTLR end T95
# $ANTLR start T96
def mT96(self, ):
try:
self.type = T96
# C.g:98:5: ( '!=' )
# C.g:98:7: '!='
self.match("!=")
finally:
pass
# $ANTLR end T96
# $ANTLR start T97
def mT97(self, ):
try:
self.type = T97
# C.g:99:5: ( '<' )
# C.g:99:7: '<'
self.match(u'<')
finally:
pass
# $ANTLR end T97
# $ANTLR start T98
def mT98(self, ):
try:
self.type = T98
# C.g:100:5: ( '>' )
# C.g:100:7: '>'
self.match(u'>')
finally:
pass
# $ANTLR end T98
# $ANTLR start T99
def mT99(self, ):
try:
self.type = T99
# C.g:101:5: ( '<=' )
# C.g:101:7: '<='
self.match("<=")
finally:
pass
# $ANTLR end T99
# $ANTLR start T100
def mT100(self, ):
try:
self.type = T100
# C.g:102:6: ( '>=' )
# C.g:102:8: '>='
self.match(">=")
finally:
pass
# $ANTLR end T100
# $ANTLR start T101
def mT101(self, ):
try:
self.type = T101
# C.g:103:6: ( '<<' )
# C.g:103:8: '<<'
self.match("<<")
finally:
pass
# $ANTLR end T101
# $ANTLR start T102
def mT102(self, ):
try:
self.type = T102
# C.g:104:6: ( '>>' )
# C.g:104:8: '>>'
self.match(">>")
finally:
pass
# $ANTLR end T102
# $ANTLR start T103
def mT103(self, ):
try:
self.type = T103
# C.g:105:6: ( '__asm__' )
# C.g:105:8: '__asm__'
self.match("__asm__")
finally:
pass
# $ANTLR end T103
# $ANTLR start T104
def mT104(self, ):
try:
self.type = T104
# C.g:106:6: ( '_asm' )
# C.g:106:8: '_asm'
self.match("_asm")
finally:
pass
# $ANTLR end T104
# $ANTLR start T105
def mT105(self, ):
try:
self.type = T105
# C.g:107:6: ( '__asm' )
# C.g:107:8: '__asm'
self.match("__asm")
finally:
pass
# $ANTLR end T105
# $ANTLR start T106
def mT106(self, ):
try:
self.type = T106
# C.g:108:6: ( 'case' )
# C.g:108:8: 'case'
self.match("case")
finally:
pass
# $ANTLR end T106
# $ANTLR start T107
def mT107(self, ):
try:
self.type = T107
# C.g:109:6: ( 'default' )
# C.g:109:8: 'default'
self.match("default")
finally:
pass
# $ANTLR end T107
# $ANTLR start T108
def mT108(self, ):
try:
self.type = T108
# C.g:110:6: ( 'if' )
# C.g:110:8: 'if'
self.match("if")
finally:
pass
# $ANTLR end T108
# $ANTLR start T109
def mT109(self, ):
try:
self.type = T109
# C.g:111:6: ( 'else' )
# C.g:111:8: 'else'
self.match("else")
finally:
pass
# $ANTLR end T109
# $ANTLR start T110
def mT110(self, ):
try:
self.type = T110
# C.g:112:6: ( 'switch' )
# C.g:112:8: 'switch'
self.match("switch")
finally:
pass
# $ANTLR end T110
# $ANTLR start T111
def mT111(self, ):
try:
self.type = T111
# C.g:113:6: ( 'while' )
# C.g:113:8: 'while'
self.match("while")
finally:
pass
# $ANTLR end T111
# $ANTLR start T112
def mT112(self, ):
try:
self.type = T112
# C.g:114:6: ( 'do' )
# C.g:114:8: 'do'
self.match("do")
finally:
pass
# $ANTLR end T112
# $ANTLR start T113
def mT113(self, ):
try:
self.type = T113
# C.g:115:6: ( 'for' )
# C.g:115:8: 'for'
self.match("for")
finally:
pass
# $ANTLR end T113
# $ANTLR start T114
def mT114(self, ):
try:
self.type = T114
# C.g:116:6: ( 'goto' )
# C.g:116:8: 'goto'
self.match("goto")
finally:
pass
# $ANTLR end T114
# $ANTLR start T115
def mT115(self, ):
try:
self.type = T115
# C.g:117:6: ( 'continue' )
# C.g:117:8: 'continue'
self.match("continue")
finally:
pass
# $ANTLR end T115
# $ANTLR start T116
def mT116(self, ):
try:
self.type = T116
# C.g:118:6: ( 'break' )
# C.g:118:8: 'break'
self.match("break")
finally:
pass
# $ANTLR end T116
# $ANTLR start T117
def mT117(self, ):
try:
self.type = T117
# C.g:119:6: ( 'return' )
# C.g:119:8: 'return'
self.match("return")
finally:
pass
# $ANTLR end T117
# $ANTLR start IDENTIFIER
def mIDENTIFIER(self, ):
try:
self.type = IDENTIFIER
# C.g:586:2: ( LETTER ( LETTER | '0' .. '9' )* )
# C.g:586:4: LETTER ( LETTER | '0' .. '9' )*
self.mLETTER()
# C.g:586:11: ( LETTER | '0' .. '9' )*
while True: #loop1
alt1 = 2
LA1_0 = self.input.LA(1)
if (LA1_0 == u'$' or (u'0' <= LA1_0 <= u'9') or (u'A' <= LA1_0 <= u'Z') or LA1_0 == u'_' or (u'a' <= LA1_0 <= u'z')) :
alt1 = 1
if alt1 == 1:
# C.g:
if self.input.LA(1) == u'$' or (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop1
finally:
pass
# $ANTLR end IDENTIFIER
# $ANTLR start LETTER
def mLETTER(self, ):
try:
# C.g:591:2: ( '$' | 'A' .. 'Z' | 'a' .. 'z' | '_' )
# C.g:
if self.input.LA(1) == u'$' or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end LETTER
# $ANTLR start CHARACTER_LITERAL
def mCHARACTER_LITERAL(self, ):
try:
self.type = CHARACTER_LITERAL
# C.g:598:5: ( ( 'L' )? '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\'' )
# C.g:598:9: ( 'L' )? '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\''
# C.g:598:9: ( 'L' )?
alt2 = 2
LA2_0 = self.input.LA(1)
if (LA2_0 == u'L') :
alt2 = 1
if alt2 == 1:
# C.g:598:10: 'L'
self.match(u'L')
self.match(u'\'')
# C.g:598:21: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) )
alt3 = 2
LA3_0 = self.input.LA(1)
if (LA3_0 == u'\\') :
alt3 = 1
elif ((u'\u0000' <= LA3_0 <= u'&') or (u'(' <= LA3_0 <= u'[') or (u']' <= LA3_0 <= u'\uFFFE')) :
alt3 = 2
else:
nvae = NoViableAltException("598:21: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) )", 3, 0, self.input)
raise nvae
if alt3 == 1:
# C.g:598:23: EscapeSequence
self.mEscapeSequence()
elif alt3 == 2:
# C.g:598:40: ~ ( '\\'' | '\\\\' )
if (u'\u0000' <= self.input.LA(1) <= u'&') or (u'(' <= self.input.LA(1) <= u'[') or (u']' <= self.input.LA(1) <= u'\uFFFE'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
self.match(u'\'')
finally:
pass
# $ANTLR end CHARACTER_LITERAL
# $ANTLR start STRING_LITERAL
def mSTRING_LITERAL(self, ):
try:
self.type = STRING_LITERAL
# C.g:602:5: ( ( 'L' )? '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"' )
# C.g:602:8: ( 'L' )? '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"'
# C.g:602:8: ( 'L' )?
alt4 = 2
LA4_0 = self.input.LA(1)
if (LA4_0 == u'L') :
alt4 = 1
if alt4 == 1:
# C.g:602:9: 'L'
self.match(u'L')
self.match(u'"')
# C.g:602:19: ( EscapeSequence | ~ ( '\\\\' | '\"' ) )*
while True: #loop5
alt5 = 3
LA5_0 = self.input.LA(1)
if (LA5_0 == u'\\') :
alt5 = 1
elif ((u'\u0000' <= LA5_0 <= u'!') or (u'#' <= LA5_0 <= u'[') or (u']' <= LA5_0 <= u'\uFFFE')) :
alt5 = 2
if alt5 == 1:
# C.g:602:21: EscapeSequence
self.mEscapeSequence()
elif alt5 == 2:
# C.g:602:38: ~ ( '\\\\' | '\"' )
if (u'\u0000' <= self.input.LA(1) <= u'!') or (u'#' <= self.input.LA(1) <= u'[') or (u']' <= self.input.LA(1) <= u'\uFFFE'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop5
self.match(u'"')
finally:
pass
# $ANTLR end STRING_LITERAL
# $ANTLR start HEX_LITERAL
def mHEX_LITERAL(self, ):
try:
self.type = HEX_LITERAL
# C.g:605:13: ( '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )? )
# C.g:605:15: '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )?
self.match(u'0')
if self.input.LA(1) == u'X' or self.input.LA(1) == u'x':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
# C.g:605:29: ( HexDigit )+
cnt6 = 0
while True: #loop6
alt6 = 2
LA6_0 = self.input.LA(1)
if ((u'0' <= LA6_0 <= u'9') or (u'A' <= LA6_0 <= u'F') or (u'a' <= LA6_0 <= u'f')) :
alt6 = 1
if alt6 == 1:
# C.g:605:29: HexDigit
self.mHexDigit()
else:
if cnt6 >= 1:
break #loop6
eee = EarlyExitException(6, self.input)
raise eee
cnt6 += 1
# C.g:605:39: ( IntegerTypeSuffix )?
alt7 = 2
LA7_0 = self.input.LA(1)
if (LA7_0 == u'L' or LA7_0 == u'U' or LA7_0 == u'l' or LA7_0 == u'u') :
alt7 = 1
if alt7 == 1:
# C.g:605:39: IntegerTypeSuffix
self.mIntegerTypeSuffix()
finally:
pass
# $ANTLR end HEX_LITERAL
# $ANTLR start DECIMAL_LITERAL
def mDECIMAL_LITERAL(self, ):
try:
self.type = DECIMAL_LITERAL
# C.g:607:17: ( ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )? )
# C.g:607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )?
# C.g:607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* )
alt9 = 2
LA9_0 = self.input.LA(1)
if (LA9_0 == u'0') :
alt9 = 1
elif ((u'1' <= LA9_0 <= u'9')) :
alt9 = 2
else:
nvae = NoViableAltException("607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* )", 9, 0, self.input)
raise nvae
if alt9 == 1:
# C.g:607:20: '0'
self.match(u'0')
elif alt9 == 2:
# C.g:607:26: '1' .. '9' ( '0' .. '9' )*
self.matchRange(u'1', u'9')
# C.g:607:35: ( '0' .. '9' )*
while True: #loop8
alt8 = 2
LA8_0 = self.input.LA(1)
if ((u'0' <= LA8_0 <= u'9')) :
alt8 = 1
if alt8 == 1:
# C.g:607:35: '0' .. '9'
self.matchRange(u'0', u'9')
else:
break #loop8
# C.g:607:46: ( IntegerTypeSuffix )?
alt10 = 2
LA10_0 = self.input.LA(1)
if (LA10_0 == u'L' or LA10_0 == u'U' or LA10_0 == u'l' or LA10_0 == u'u') :
alt10 = 1
if alt10 == 1:
# C.g:607:46: IntegerTypeSuffix
self.mIntegerTypeSuffix()
finally:
pass
# $ANTLR end DECIMAL_LITERAL
# $ANTLR start OCTAL_LITERAL
def mOCTAL_LITERAL(self, ):
try:
self.type = OCTAL_LITERAL
# C.g:609:15: ( '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )? )
# C.g:609:17: '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )?
self.match(u'0')
# C.g:609:21: ( '0' .. '7' )+
cnt11 = 0
while True: #loop11
alt11 = 2
LA11_0 = self.input.LA(1)
if ((u'0' <= LA11_0 <= u'7')) :
alt11 = 1
if alt11 == 1:
# C.g:609:22: '0' .. '7'
self.matchRange(u'0', u'7')
else:
if cnt11 >= 1:
break #loop11
eee = EarlyExitException(11, self.input)
raise eee
cnt11 += 1
# C.g:609:33: ( IntegerTypeSuffix )?
alt12 = 2
LA12_0 = self.input.LA(1)
if (LA12_0 == u'L' or LA12_0 == u'U' or LA12_0 == u'l' or LA12_0 == u'u') :
alt12 = 1
if alt12 == 1:
# C.g:609:33: IntegerTypeSuffix
self.mIntegerTypeSuffix()
finally:
pass
# $ANTLR end OCTAL_LITERAL
# $ANTLR start HexDigit
def mHexDigit(self, ):
try:
# C.g:612:10: ( ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' ) )
# C.g:612:12: ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' )
if (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'F') or (u'a' <= self.input.LA(1) <= u'f'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end HexDigit
# $ANTLR start IntegerTypeSuffix
def mIntegerTypeSuffix(self, ):
try:
# C.g:616:2: ( ( 'u' | 'U' ) | ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) )
alt13 = 4
LA13_0 = self.input.LA(1)
if (LA13_0 == u'U' or LA13_0 == u'u') :
LA13_1 = self.input.LA(2)
if (LA13_1 == u'L' or LA13_1 == u'l') :
LA13_3 = self.input.LA(3)
if (LA13_3 == u'L' or LA13_3 == u'l') :
alt13 = 4
else:
alt13 = 3
else:
alt13 = 1
elif (LA13_0 == u'L' or LA13_0 == u'l') :
alt13 = 2
else:
nvae = NoViableAltException("614:1: fragment IntegerTypeSuffix : ( ( 'u' | 'U' ) | ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) );", 13, 0, self.input)
raise nvae
if alt13 == 1:
# C.g:616:4: ( 'u' | 'U' )
if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
elif alt13 == 2:
# C.g:617:4: ( 'l' | 'L' )
if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
elif alt13 == 3:
# C.g:618:4: ( 'u' | 'U' ) ( 'l' | 'L' )
if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
elif alt13 == 4:
# C.g:619:4: ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' )
if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end IntegerTypeSuffix
# $ANTLR start FLOATING_POINT_LITERAL
def mFLOATING_POINT_LITERAL(self, ):
try:
self.type = FLOATING_POINT_LITERAL
# C.g:623:5: ( ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )? | '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )? | ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )? | ( '0' .. '9' )+ ( Exponent )? FloatTypeSuffix )
alt25 = 4
alt25 = self.dfa25.predict(self.input)
if alt25 == 1:
# C.g:623:9: ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )?
# C.g:623:9: ( '0' .. '9' )+
cnt14 = 0
while True: #loop14
alt14 = 2
LA14_0 = self.input.LA(1)
if ((u'0' <= LA14_0 <= u'9')) :
alt14 = 1
if alt14 == 1:
# C.g:623:10: '0' .. '9'
self.matchRange(u'0', u'9')
else:
if cnt14 >= 1:
break #loop14
eee = EarlyExitException(14, self.input)
raise eee
cnt14 += 1
self.match(u'.')
# C.g:623:25: ( '0' .. '9' )*
while True: #loop15
alt15 = 2
LA15_0 = self.input.LA(1)
if ((u'0' <= LA15_0 <= u'9')) :
alt15 = 1
if alt15 == 1:
# C.g:623:26: '0' .. '9'
self.matchRange(u'0', u'9')
else:
break #loop15
# C.g:623:37: ( Exponent )?
alt16 = 2
LA16_0 = self.input.LA(1)
if (LA16_0 == u'E' or LA16_0 == u'e') :
alt16 = 1
if alt16 == 1:
# C.g:623:37: Exponent
self.mExponent()
# C.g:623:47: ( FloatTypeSuffix )?
alt17 = 2
LA17_0 = self.input.LA(1)
if (LA17_0 == u'D' or LA17_0 == u'F' or LA17_0 == u'd' or LA17_0 == u'f') :
alt17 = 1
if alt17 == 1:
# C.g:623:47: FloatTypeSuffix
self.mFloatTypeSuffix()
elif alt25 == 2:
# C.g:624:9: '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )?
self.match(u'.')
# C.g:624:13: ( '0' .. '9' )+
cnt18 = 0
while True: #loop18
alt18 = 2
LA18_0 = self.input.LA(1)
if ((u'0' <= LA18_0 <= u'9')) :
alt18 = 1
if alt18 == 1:
# C.g:624:14: '0' .. '9'
self.matchRange(u'0', u'9')
else:
if cnt18 >= 1:
break #loop18
eee = EarlyExitException(18, self.input)
raise eee
cnt18 += 1
# C.g:624:25: ( Exponent )?
alt19 = 2
LA19_0 = self.input.LA(1)
if (LA19_0 == u'E' or LA19_0 == u'e') :
alt19 = 1
if alt19 == 1:
# C.g:624:25: Exponent
self.mExponent()
# C.g:624:35: ( FloatTypeSuffix )?
alt20 = 2
LA20_0 = self.input.LA(1)
if (LA20_0 == u'D' or LA20_0 == u'F' or LA20_0 == u'd' or LA20_0 == u'f') :
alt20 = 1
if alt20 == 1:
# C.g:624:35: FloatTypeSuffix
self.mFloatTypeSuffix()
elif alt25 == 3:
# C.g:625:9: ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )?
# C.g:625:9: ( '0' .. '9' )+
cnt21 = 0
while True: #loop21
alt21 = 2
LA21_0 = self.input.LA(1)
if ((u'0' <= LA21_0 <= u'9')) :
alt21 = 1
if alt21 == 1:
# C.g:625:10: '0' .. '9'
self.matchRange(u'0', u'9')
else:
if cnt21 >= 1:
break #loop21
eee = EarlyExitException(21, self.input)
raise eee
cnt21 += 1
self.mExponent()
# C.g:625:30: ( FloatTypeSuffix )?
alt22 = 2
LA22_0 = self.input.LA(1)
if (LA22_0 == u'D' or LA22_0 == u'F' or LA22_0 == u'd' or LA22_0 == u'f') :
alt22 = 1
if alt22 == 1:
# C.g:625:30: FloatTypeSuffix
self.mFloatTypeSuffix()
elif alt25 == 4:
# C.g:626:9: ( '0' .. '9' )+ ( Exponent )? FloatTypeSuffix
# C.g:626:9: ( '0' .. '9' )+
cnt23 = 0
while True: #loop23
alt23 = 2
LA23_0 = self.input.LA(1)
if ((u'0' <= LA23_0 <= u'9')) :
alt23 = 1
if alt23 == 1:
# C.g:626:10: '0' .. '9'
self.matchRange(u'0', u'9')
else:
if cnt23 >= 1:
break #loop23
eee = EarlyExitException(23, self.input)
raise eee
cnt23 += 1
# C.g:626:21: ( Exponent )?
alt24 = 2
LA24_0 = self.input.LA(1)
if (LA24_0 == u'E' or LA24_0 == u'e') :
alt24 = 1
if alt24 == 1:
# C.g:626:21: Exponent
self.mExponent()
self.mFloatTypeSuffix()
finally:
pass
# $ANTLR end FLOATING_POINT_LITERAL
# $ANTLR start Exponent
def mExponent(self, ):
try:
# C.g:630:10: ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )
# C.g:630:12: ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+
if self.input.LA(1) == u'E' or self.input.LA(1) == u'e':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
# C.g:630:22: ( '+' | '-' )?
alt26 = 2
LA26_0 = self.input.LA(1)
if (LA26_0 == u'+' or LA26_0 == u'-') :
alt26 = 1
if alt26 == 1:
# C.g:
if self.input.LA(1) == u'+' or self.input.LA(1) == u'-':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
# C.g:630:33: ( '0' .. '9' )+
cnt27 = 0
while True: #loop27
alt27 = 2
LA27_0 = self.input.LA(1)
if ((u'0' <= LA27_0 <= u'9')) :
alt27 = 1
if alt27 == 1:
# C.g:630:34: '0' .. '9'
self.matchRange(u'0', u'9')
else:
if cnt27 >= 1:
break #loop27
eee = EarlyExitException(27, self.input)
raise eee
cnt27 += 1
finally:
pass
# $ANTLR end Exponent
# $ANTLR start FloatTypeSuffix
def mFloatTypeSuffix(self, ):
try:
# C.g:633:17: ( ( 'f' | 'F' | 'd' | 'D' ) )
# C.g:633:19: ( 'f' | 'F' | 'd' | 'D' )
if self.input.LA(1) == u'D' or self.input.LA(1) == u'F' or self.input.LA(1) == u'd' or self.input.LA(1) == u'f':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end FloatTypeSuffix
# $ANTLR start EscapeSequence
def mEscapeSequence(self, ):
try:
# C.g:637:5: ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape )
alt28 = 2
LA28_0 = self.input.LA(1)
if (LA28_0 == u'\\') :
LA28_1 = self.input.LA(2)
if (LA28_1 == u'"' or LA28_1 == u'\'' or LA28_1 == u'\\' or LA28_1 == u'b' or LA28_1 == u'f' or LA28_1 == u'n' or LA28_1 == u'r' or LA28_1 == u't') :
alt28 = 1
elif ((u'0' <= LA28_1 <= u'7')) :
alt28 = 2
else:
nvae = NoViableAltException("635:1: fragment EscapeSequence : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape );", 28, 1, self.input)
raise nvae
else:
nvae = NoViableAltException("635:1: fragment EscapeSequence : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape );", 28, 0, self.input)
raise nvae
if alt28 == 1:
# C.g:637:8: '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' )
self.match(u'\\')
if self.input.LA(1) == u'"' or self.input.LA(1) == u'\'' or self.input.LA(1) == u'\\' or self.input.LA(1) == u'b' or self.input.LA(1) == u'f' or self.input.LA(1) == u'n' or self.input.LA(1) == u'r' or self.input.LA(1) == u't':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
elif alt28 == 2:
# C.g:638:9: OctalEscape
self.mOctalEscape()
finally:
pass
# $ANTLR end EscapeSequence
# $ANTLR start OctalEscape
def mOctalEscape(self, ):
try:
# C.g:643:5: ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) )
alt29 = 3
LA29_0 = self.input.LA(1)
if (LA29_0 == u'\\') :
LA29_1 = self.input.LA(2)
if ((u'0' <= LA29_1 <= u'3')) :
LA29_2 = self.input.LA(3)
if ((u'0' <= LA29_2 <= u'7')) :
LA29_4 = self.input.LA(4)
if ((u'0' <= LA29_4 <= u'7')) :
alt29 = 1
else:
alt29 = 2
else:
alt29 = 3
elif ((u'4' <= LA29_1 <= u'7')) :
LA29_3 = self.input.LA(3)
if ((u'0' <= LA29_3 <= u'7')) :
alt29 = 2
else:
alt29 = 3
else:
nvae = NoViableAltException("641:1: fragment OctalEscape : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );", 29, 1, self.input)
raise nvae
else:
nvae = NoViableAltException("641:1: fragment OctalEscape : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );", 29, 0, self.input)
raise nvae
if alt29 == 1:
# C.g:643:9: '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' )
self.match(u'\\')
# C.g:643:14: ( '0' .. '3' )
# C.g:643:15: '0' .. '3'
self.matchRange(u'0', u'3')
# C.g:643:25: ( '0' .. '7' )
# C.g:643:26: '0' .. '7'
self.matchRange(u'0', u'7')
# C.g:643:36: ( '0' .. '7' )
# C.g:643:37: '0' .. '7'
self.matchRange(u'0', u'7')
elif alt29 == 2:
# C.g:644:9: '\\\\' ( '0' .. '7' ) ( '0' .. '7' )
self.match(u'\\')
# C.g:644:14: ( '0' .. '7' )
# C.g:644:15: '0' .. '7'
self.matchRange(u'0', u'7')
# C.g:644:25: ( '0' .. '7' )
# C.g:644:26: '0' .. '7'
self.matchRange(u'0', u'7')
elif alt29 == 3:
# C.g:645:9: '\\\\' ( '0' .. '7' )
self.match(u'\\')
# C.g:645:14: ( '0' .. '7' )
# C.g:645:15: '0' .. '7'
self.matchRange(u'0', u'7')
finally:
pass
# $ANTLR end OctalEscape
# $ANTLR start UnicodeEscape
def mUnicodeEscape(self, ):
try:
# C.g:650:5: ( '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit )
# C.g:650:9: '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit
self.match(u'\\')
self.match(u'u')
self.mHexDigit()
self.mHexDigit()
self.mHexDigit()
self.mHexDigit()
finally:
pass
# $ANTLR end UnicodeEscape
# $ANTLR start WS
def mWS(self, ):
try:
self.type = WS
# C.g:653:5: ( ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) )
# C.g:653:8: ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' )
if (u'\t' <= self.input.LA(1) <= u'\n') or (u'\f' <= self.input.LA(1) <= u'\r') or self.input.LA(1) == u' ':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
#action start
self.channel=HIDDEN;
#action end
finally:
pass
# $ANTLR end WS
# $ANTLR start BS
def mBS(self, ):
try:
self.type = BS
# C.g:657:5: ( ( '\\\\' ) )
# C.g:657:7: ( '\\\\' )
# C.g:657:7: ( '\\\\' )
# C.g:657:8: '\\\\'
self.match(u'\\')
#action start
self.channel=HIDDEN;
#action end
finally:
pass
# $ANTLR end BS
# $ANTLR start UnicodeVocabulary
def mUnicodeVocabulary(self, ):
try:
self.type = UnicodeVocabulary
# C.g:665:5: ( '\\u0003' .. '\\uFFFE' )
# C.g:665:7: '\\u0003' .. '\\uFFFE'
self.matchRange(u'\u0003', u'\uFFFE')
finally:
pass
# $ANTLR end UnicodeVocabulary
# $ANTLR start COMMENT
def mCOMMENT(self, ):
try:
self.type = COMMENT
# C.g:668:5: ( '/*' ( options {greedy=false; } : . )* '*/' )
# C.g:668:9: '/*' ( options {greedy=false; } : . )* '*/'
self.match("/*")
# C.g:668:14: ( options {greedy=false; } : . )*
while True: #loop30
alt30 = 2
LA30_0 = self.input.LA(1)
if (LA30_0 == u'*') :
LA30_1 = self.input.LA(2)
if (LA30_1 == u'/') :
alt30 = 2
elif ((u'\u0000' <= LA30_1 <= u'.') or (u'0' <= LA30_1 <= u'\uFFFE')) :
alt30 = 1
elif ((u'\u0000' <= LA30_0 <= u')') or (u'+' <= LA30_0 <= u'\uFFFE')) :
alt30 = 1
if alt30 == 1:
# C.g:668:42: .
self.matchAny()
else:
break #loop30
self.match("*/")
#action start
self.channel=HIDDEN;
#action end
finally:
pass
# $ANTLR end COMMENT
# $ANTLR start LINE_COMMENT
def mLINE_COMMENT(self, ):
try:
self.type = LINE_COMMENT
# C.g:673:5: ( '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
# C.g:673:7: '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
self.match("//")
# C.g:673:12: (~ ( '\\n' | '\\r' ) )*
while True: #loop31
alt31 = 2
LA31_0 = self.input.LA(1)
if ((u'\u0000' <= LA31_0 <= u'\t') or (u'\u000B' <= LA31_0 <= u'\f') or (u'\u000E' <= LA31_0 <= u'\uFFFE')) :
alt31 = 1
if alt31 == 1:
# C.g:673:12: ~ ( '\\n' | '\\r' )
if (u'\u0000' <= self.input.LA(1) <= u'\t') or (u'\u000B' <= self.input.LA(1) <= u'\f') or (u'\u000E' <= self.input.LA(1) <= u'\uFFFE'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop31
# C.g:673:26: ( '\\r' )?
alt32 = 2
LA32_0 = self.input.LA(1)
if (LA32_0 == u'\r') :
alt32 = 1
if alt32 == 1:
# C.g:673:26: '\\r'
self.match(u'\r')
self.match(u'\n')
#action start
self.channel=HIDDEN;
#action end
finally:
pass
# $ANTLR end LINE_COMMENT
# $ANTLR start LINE_COMMAND
def mLINE_COMMAND(self, ):
try:
self.type = LINE_COMMAND
# C.g:678:5: ( '#' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
# C.g:678:7: '#' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
self.match(u'#')
# C.g:678:11: (~ ( '\\n' | '\\r' ) )*
while True: #loop33
alt33 = 2
LA33_0 = self.input.LA(1)
if ((u'\u0000' <= LA33_0 <= u'\t') or (u'\u000B' <= LA33_0 <= u'\f') or (u'\u000E' <= LA33_0 <= u'\uFFFE')) :
alt33 = 1
if alt33 == 1:
# C.g:678:11: ~ ( '\\n' | '\\r' )
if (u'\u0000' <= self.input.LA(1) <= u'\t') or (u'\u000B' <= self.input.LA(1) <= u'\f') or (u'\u000E' <= self.input.LA(1) <= u'\uFFFE'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop33
# C.g:678:25: ( '\\r' )?
alt34 = 2
LA34_0 = self.input.LA(1)
if (LA34_0 == u'\r') :
alt34 = 1
if alt34 == 1:
# C.g:678:25: '\\r'
self.match(u'\r')
self.match(u'\n')
#action start
self.channel=HIDDEN;
#action end
finally:
pass
# $ANTLR end LINE_COMMAND
def mTokens(self):
# C.g:1:8: ( T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | T33 | T34 | T35 | T36 | T37 | T38 | T39 | T40 | T41 | T42 | T43 | T44 | T45 | T46 | T47 | T48 | T49 | T50 | T51 | T52 | T53 | T54 | T55 | T56 | T57 | T58 | T59 | T60 | T61 | T62 | T63 | T64 | T65 | T66 | T67 | T68 | T69 | T70 | T71 | T72 | T73 | T74 | T75 | T76 | T77 | T78 | T79 | T80 | T81 | T82 | T83 | T84 | T85 | T86 | T87 | T88 | T89 | T90 | T91 | T92 | T93 | T94 | T95 | T96 | T97 | T98 | T99 | T100 | T101 | T102 | T103 | T104 | T105 | T106 | T107 | T108 | T109 | T110 | T111 | T112 | T113 | T114 | T115 | T116 | T117 | IDENTIFIER | CHARACTER_LITERAL | STRING_LITERAL | HEX_LITERAL | DECIMAL_LITERAL | OCTAL_LITERAL | FLOATING_POINT_LITERAL | WS | BS | UnicodeVocabulary | COMMENT | LINE_COMMENT | LINE_COMMAND )
alt35 = 106
alt35 = self.dfa35.predict(self.input)
if alt35 == 1:
# C.g:1:10: T25
self.mT25()
elif alt35 == 2:
# C.g:1:14: T26
self.mT26()
elif alt35 == 3:
# C.g:1:18: T27
self.mT27()
elif alt35 == 4:
# C.g:1:22: T28
self.mT28()
elif alt35 == 5:
# C.g:1:26: T29
self.mT29()
elif alt35 == 6:
# C.g:1:30: T30
self.mT30()
elif alt35 == 7:
# C.g:1:34: T31
self.mT31()
elif alt35 == 8:
# C.g:1:38: T32
self.mT32()
elif alt35 == 9:
# C.g:1:42: T33
self.mT33()
elif alt35 == 10:
# C.g:1:46: T34
self.mT34()
elif alt35 == 11:
# C.g:1:50: T35
self.mT35()
elif alt35 == 12:
# C.g:1:54: T36
self.mT36()
elif alt35 == 13:
# C.g:1:58: T37
self.mT37()
elif alt35 == 14:
# C.g:1:62: T38
self.mT38()
elif alt35 == 15:
# C.g:1:66: T39
self.mT39()
elif alt35 == 16:
# C.g:1:70: T40
self.mT40()
elif alt35 == 17:
# C.g:1:74: T41
self.mT41()
elif alt35 == 18:
# C.g:1:78: T42
self.mT42()
elif alt35 == 19:
# C.g:1:82: T43
self.mT43()
elif alt35 == 20:
# C.g:1:86: T44
self.mT44()
elif alt35 == 21:
# C.g:1:90: T45
self.mT45()
elif alt35 == 22:
# C.g:1:94: T46
self.mT46()
elif alt35 == 23:
# C.g:1:98: T47
self.mT47()
elif alt35 == 24:
# C.g:1:102: T48
self.mT48()
elif alt35 == 25:
# C.g:1:106: T49
self.mT49()
elif alt35 == 26:
# C.g:1:110: T50
self.mT50()
elif alt35 == 27:
# C.g:1:114: T51
self.mT51()
elif alt35 == 28:
# C.g:1:118: T52
self.mT52()
elif alt35 == 29:
# C.g:1:122: T53
self.mT53()
elif alt35 == 30:
# C.g:1:126: T54
self.mT54()
elif alt35 == 31:
# C.g:1:130: T55
self.mT55()
elif alt35 == 32:
# C.g:1:134: T56
self.mT56()
elif alt35 == 33:
# C.g:1:138: T57
self.mT57()
elif alt35 == 34:
# C.g:1:142: T58
self.mT58()
elif alt35 == 35:
# C.g:1:146: T59
self.mT59()
elif alt35 == 36:
# C.g:1:150: T60
self.mT60()
elif alt35 == 37:
# C.g:1:154: T61
self.mT61()
elif alt35 == 38:
# C.g:1:158: T62
self.mT62()
elif alt35 == 39:
# C.g:1:162: T63
self.mT63()
elif alt35 == 40:
# C.g:1:166: T64
self.mT64()
elif alt35 == 41:
# C.g:1:170: T65
self.mT65()
elif alt35 == 42:
# C.g:1:174: T66
self.mT66()
elif alt35 == 43:
# C.g:1:178: T67
self.mT67()
elif alt35 == 44:
# C.g:1:182: T68
self.mT68()
elif alt35 == 45:
# C.g:1:186: T69
self.mT69()
elif alt35 == 46:
# C.g:1:190: T70
self.mT70()
elif alt35 == 47:
# C.g:1:194: T71
self.mT71()
elif alt35 == 48:
# C.g:1:198: T72
self.mT72()
elif alt35 == 49:
# C.g:1:202: T73
self.mT73()
elif alt35 == 50:
# C.g:1:206: T74
self.mT74()
elif alt35 == 51:
# C.g:1:210: T75
self.mT75()
elif alt35 == 52:
# C.g:1:214: T76
self.mT76()
elif alt35 == 53:
# C.g:1:218: T77
self.mT77()
elif alt35 == 54:
# C.g:1:222: T78
self.mT78()
elif alt35 == 55:
# C.g:1:226: T79
self.mT79()
elif alt35 == 56:
# C.g:1:230: T80
self.mT80()
elif alt35 == 57:
# C.g:1:234: T81
self.mT81()
elif alt35 == 58:
# C.g:1:238: T82
self.mT82()
elif alt35 == 59:
# C.g:1:242: T83
self.mT83()
elif alt35 == 60:
# C.g:1:246: T84
self.mT84()
elif alt35 == 61:
# C.g:1:250: T85
self.mT85()
elif alt35 == 62:
# C.g:1:254: T86
self.mT86()
elif alt35 == 63:
# C.g:1:258: T87
self.mT87()
elif alt35 == 64:
# C.g:1:262: T88
self.mT88()
elif alt35 == 65:
# C.g:1:266: T89
self.mT89()
elif alt35 == 66:
# C.g:1:270: T90
self.mT90()
elif alt35 == 67:
# C.g:1:274: T91
self.mT91()
elif alt35 == 68:
# C.g:1:278: T92
self.mT92()
elif alt35 == 69:
# C.g:1:282: T93
self.mT93()
elif alt35 == 70:
# C.g:1:286: T94
self.mT94()
elif alt35 == 71:
# C.g:1:290: T95
self.mT95()
elif alt35 == 72:
# C.g:1:294: T96
self.mT96()
elif alt35 == 73:
# C.g:1:298: T97
self.mT97()
elif alt35 == 74:
# C.g:1:302: T98
self.mT98()
elif alt35 == 75:
# C.g:1:306: T99
self.mT99()
elif alt35 == 76:
# C.g:1:310: T100
self.mT100()
elif alt35 == 77:
# C.g:1:315: T101
self.mT101()
elif alt35 == 78:
# C.g:1:320: T102
self.mT102()
elif alt35 == 79:
# C.g:1:325: T103
self.mT103()
elif alt35 == 80:
# C.g:1:330: T104
self.mT104()
elif alt35 == 81:
# C.g:1:335: T105
self.mT105()
elif alt35 == 82:
# C.g:1:340: T106
self.mT106()
elif alt35 == 83:
# C.g:1:345: T107
self.mT107()
elif alt35 == 84:
# C.g:1:350: T108
self.mT108()
elif alt35 == 85:
# C.g:1:355: T109
self.mT109()
elif alt35 == 86:
# C.g:1:360: T110
self.mT110()
elif alt35 == 87:
# C.g:1:365: T111
self.mT111()
elif alt35 == 88:
# C.g:1:370: T112
self.mT112()
elif alt35 == 89:
# C.g:1:375: T113
self.mT113()
elif alt35 == 90:
# C.g:1:380: T114
self.mT114()
elif alt35 == 91:
# C.g:1:385: T115
self.mT115()
elif alt35 == 92:
# C.g:1:390: T116
self.mT116()
elif alt35 == 93:
# C.g:1:395: T117
self.mT117()
elif alt35 == 94:
# C.g:1:400: IDENTIFIER
self.mIDENTIFIER()
elif alt35 == 95:
# C.g:1:411: CHARACTER_LITERAL
self.mCHARACTER_LITERAL()
elif alt35 == 96:
# C.g:1:429: STRING_LITERAL
self.mSTRING_LITERAL()
elif alt35 == 97:
# C.g:1:444: HEX_LITERAL
self.mHEX_LITERAL()
elif alt35 == 98:
# C.g:1:456: DECIMAL_LITERAL
self.mDECIMAL_LITERAL()
elif alt35 == 99:
# C.g:1:472: OCTAL_LITERAL
self.mOCTAL_LITERAL()
elif alt35 == 100:
# C.g:1:486: FLOATING_POINT_LITERAL
self.mFLOATING_POINT_LITERAL()
elif alt35 == 101:
# C.g:1:509: WS
self.mWS()
elif alt35 == 102:
# C.g:1:512: BS
self.mBS()
elif alt35 == 103:
# C.g:1:515: UnicodeVocabulary
self.mUnicodeVocabulary()
elif alt35 == 104:
# C.g:1:533: COMMENT
self.mCOMMENT()
elif alt35 == 105:
# C.g:1:541: LINE_COMMENT
self.mLINE_COMMENT()
elif alt35 == 106:
# C.g:1:554: LINE_COMMAND
self.mLINE_COMMAND()
# lookup tables for DFA #25
DFA25_eot = DFA.unpack(
u"\7\uffff\1\10\2\uffff"
)
DFA25_eof = DFA.unpack(
u"\12\uffff"
)
DFA25_min = DFA.unpack(
u"\2\56\2\uffff\1\53\1\uffff\2\60\2\uffff"
)
DFA25_max = DFA.unpack(
u"\1\71\1\146\2\uffff\1\71\1\uffff\1\71\1\146\2\uffff"
)
DFA25_accept = DFA.unpack(
u"\2\uffff\1\2\1\1\1\uffff\1\4\2\uffff\2\3"
)
DFA25_special = DFA.unpack(
u"\12\uffff"
)
DFA25_transition = [
DFA.unpack(u"\1\2\1\uffff\12\1"),
DFA.unpack(u"\1\3\1\uffff\12\1\12\uffff\1\5\1\4\1\5\35\uffff\1\5"
u"\1\4\1\5"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\6\1\uffff\1\6\2\uffff\12\7"),
DFA.unpack(u""),
DFA.unpack(u"\12\7"),
DFA.unpack(u"\12\7\12\uffff\1\11\1\uffff\1\11\35\uffff\1\11\1\uffff"
u"\1\11"),
DFA.unpack(u""),
DFA.unpack(u"")
]
# class definition for DFA #25
DFA25 = DFA
# lookup tables for DFA #35
DFA35_eot = DFA.unpack(
u"\2\uffff\1\76\1\uffff\1\101\14\76\3\uffff\10\76\4\uffff\1\151\1"
u"\153\1\157\1\163\1\167\1\171\1\174\1\uffff\1\177\1\u0082\1\u0085"
u"\1\u0087\1\u008a\1\uffff\5\76\1\uffff\2\73\2\u0095\2\uffff\1\73"
u"\2\uffff\1\76\4\uffff\16\76\1\u00ad\5\76\1\u00b4\1\76\3\uffff\1"
u"\u00b7\10\76\34\uffff\1\u00c1\2\uffff\1\u00c3\10\uffff\5\76\3\uffff"
u"\1\u00c9\1\uffff\1\u0095\3\uffff\23\76\1\uffff\1\u00de\1\76\1\u00e0"
u"\3\76\1\uffff\2\76\1\uffff\1\76\1\u00e7\6\76\4\uffff\5\76\1\uffff"
u"\1\76\1\u00f5\1\76\1\u00f7\6\76\1\u00fe\4\76\1\u0103\1\u0104\2"
u"\76\1\u0107\1\uffff\1\u0108\1\uffff\6\76\1\uffff\10\76\1\u0118"
u"\1\76\1\u011a\2\76\1\uffff\1\76\1\uffff\5\76\1\u0123\1\uffff\4"
u"\76\2\uffff\1\76\1\u0129\2\uffff\1\u012a\3\76\1\u012e\1\76\1\u0130"
u"\7\76\1\u0139\1\uffff\1\u013a\1\uffff\1\u013b\1\76\1\u013d\1\u013e"
u"\1\u013f\1\u0140\1\u0141\1\u0142\1\uffff\1\76\1\u0144\1\u0145\2"
u"\76\2\uffff\1\76\1\u0149\1\76\1\uffff\1\76\1\uffff\5\76\1\u0151"
u"\1\u0152\1\76\3\uffff\1\u0154\6\uffff\1\76\2\uffff\2\76\1\u0158"
u"\1\uffff\7\76\2\uffff\1\u0160\1\uffff\1\u0161\1\u0162\1\u0163\1"
u"\uffff\1\u0164\1\u0165\1\76\1\u0167\3\76\6\uffff\1\u016b\1\uffff"
u"\3\76\1\uffff\21\76\1\u0180\2\76\1\uffff\3\76\1\u0186\1\76\1\uffff"
u"\11\76\1\u0191\1\uffff"
)
DFA35_eof = DFA.unpack(
u"\u0192\uffff"
)
DFA35_min = DFA.unpack(
u"\1\3\1\uffff\1\171\1\uffff\1\75\1\154\1\150\1\165\1\145\1\124\1"
u"\157\1\141\1\146\1\157\1\154\1\145\1\156\3\uffff\1\116\1\120\1"
u"\117\1\116\1\117\1\114\1\106\1\101\4\uffff\1\75\1\56\1\53\1\55"
u"\1\52\1\75\1\46\1\uffff\1\75\1\74\3\75\1\uffff\1\137\1\150\1\157"
u"\1\162\1\42\1\uffff\2\0\2\56\2\uffff\1\0\2\uffff\1\160\4\uffff"
u"\1\163\1\164\1\165\1\151\1\141\1\147\1\157\1\164\1\147\1\101\1"
u"\151\1\163\1\156\1\141\1\44\1\164\1\156\1\162\1\157\1\146\1\44"
u"\1\151\3\uffff\1\44\2\124\1\116\1\101\1\114\1\117\1\111\1\103\34"
u"\uffff\1\75\2\uffff\1\75\10\uffff\1\141\1\163\1\151\1\164\1\145"
u"\3\uffff\1\56\1\uffff\1\56\3\uffff\3\145\1\155\2\164\1\165\1\145"
u"\1\156\1\162\1\157\1\151\1\165\1\124\1\141\1\144\1\145\1\163\1"
u"\162\1\uffff\1\44\1\147\1\44\2\141\1\142\1\uffff\1\151\1\157\1"
u"\uffff\1\111\1\44\1\123\1\114\1\101\1\102\1\101\1\113\4\uffff\1"
u"\163\1\155\1\154\1\157\1\141\1\uffff\1\144\1\44\1\162\1\44\1\143"
u"\1\151\1\143\1\157\1\145\1\164\1\44\1\163\1\162\1\111\1\164\2\44"
u"\1\151\1\164\1\44\1\uffff\1\44\1\uffff\1\164\1\165\1\154\1\147"
u"\1\156\1\117\1\uffff\1\124\1\111\1\124\1\101\1\102\1\120\1\105"
u"\1\155\1\44\1\145\1\44\1\153\1\145\1\uffff\1\156\1\uffff\1\150"
u"\1\143\1\164\1\146\1\144\1\44\1\uffff\1\164\1\156\1\103\1\151\2"
u"\uffff\1\156\1\44\2\uffff\1\44\1\154\1\145\1\156\1\44\1\116\1\44"
u"\1\107\1\111\1\114\1\125\1\117\1\111\1\104\1\44\1\uffff\1\44\1"
u"\uffff\1\44\1\146\6\44\1\uffff\1\145\2\44\1\154\1\165\2\uffff\1"
u"\164\1\44\1\145\1\uffff\1\101\1\uffff\1\116\1\114\1\137\1\116\1"
u"\117\2\44\1\137\3\uffff\1\44\6\uffff\1\162\2\uffff\2\145\1\44\1"
u"\uffff\1\144\1\114\2\105\1\122\2\124\2\uffff\1\44\1\uffff\3\44"
u"\1\uffff\2\44\1\104\1\44\1\105\1\111\1\123\6\uffff\1\44\1\uffff"
u"\2\115\1\105\1\uffff\1\117\1\105\1\122\1\126\1\123\1\126\2\105"
u"\1\111\1\137\1\122\1\103\1\111\1\126\1\105\1\106\1\111\1\44\1\137"
u"\1\103\1\uffff\1\125\1\105\1\116\1\44\1\122\1\uffff\1\105\1\106"
u"\1\105\1\122\1\105\1\116\1\103\1\105\1\104\1\44\1\uffff"
)
DFA35_max = DFA.unpack(
u"\1\ufffe\1\uffff\1\171\1\uffff\1\75\1\170\1\167\1\165\1\145\1\124"
u"\2\157\1\156\3\157\1\156\3\uffff\1\116\1\125\1\117\1\116\1\117"
u"\1\114\1\106\1\101\4\uffff\1\75\1\71\1\75\1\76\3\75\1\uffff\2\75"
u"\1\76\1\75\1\174\1\uffff\1\141\1\150\1\157\1\162\1\47\1\uffff\2"
u"\ufffe\1\170\1\146\2\uffff\1\ufffe\2\uffff\1\160\4\uffff\1\163"
u"\1\164\1\165\1\151\1\162\1\172\1\157\2\164\1\101\1\154\1\163\1"
u"\156\1\141\1\172\1\164\1\156\1\162\1\157\1\146\1\172\1\163\3\uffff"
u"\1\172\2\124\1\116\1\101\1\114\1\117\1\111\1\103\34\uffff\1\75"
u"\2\uffff\1\75\10\uffff\1\141\1\163\1\151\1\164\1\145\3\uffff\1"
u"\146\1\uffff\1\146\3\uffff\3\145\1\155\2\164\1\165\1\145\1\156"
u"\1\162\1\157\1\151\1\165\1\124\1\141\1\144\1\145\1\164\1\162\1"
u"\uffff\1\172\1\147\1\172\2\141\1\142\1\uffff\1\151\1\157\1\uffff"
u"\1\111\1\172\1\123\1\114\1\101\1\102\1\137\1\113\4\uffff\1\163"
u"\1\155\1\154\1\157\1\141\1\uffff\1\144\1\172\1\162\1\172\1\143"
u"\1\151\1\143\1\157\1\145\1\164\1\172\1\163\1\162\1\111\1\164\2"
u"\172\1\151\1\164\1\172\1\uffff\1\172\1\uffff\1\164\1\165\1\154"
u"\1\147\1\156\1\117\1\uffff\1\124\1\111\1\124\1\101\1\122\1\120"
u"\1\105\1\155\1\172\1\145\1\172\1\153\1\145\1\uffff\1\156\1\uffff"
u"\1\150\1\143\1\164\1\146\1\144\1\172\1\uffff\1\164\1\156\1\103"
u"\1\151\2\uffff\1\156\1\172\2\uffff\1\172\1\154\1\145\1\156\1\172"
u"\1\116\1\172\1\107\1\111\1\114\1\125\1\117\1\111\1\104\1\172\1"
u"\uffff\1\172\1\uffff\1\172\1\146\6\172\1\uffff\1\145\2\172\1\154"
u"\1\165\2\uffff\1\164\1\172\1\145\1\uffff\1\101\1\uffff\1\116\1"
u"\114\1\137\1\116\1\117\2\172\1\137\3\uffff\1\172\6\uffff\1\162"
u"\2\uffff\2\145\1\172\1\uffff\1\144\1\114\2\105\1\122\2\124\2\uffff"
u"\1\172\1\uffff\3\172\1\uffff\2\172\1\104\1\172\1\105\1\111\1\123"
u"\6\uffff\1\172\1\uffff\2\115\1\105\1\uffff\1\117\1\105\1\122\1"
u"\126\1\123\1\126\2\105\1\111\1\137\1\122\1\103\1\111\1\126\1\105"
u"\1\106\1\111\1\172\1\137\1\103\1\uffff\1\125\1\105\1\116\1\172"
u"\1\122\1\uffff\1\105\1\106\1\105\1\122\1\105\1\116\1\103\1\105"
u"\1\104\1\172\1\uffff"
)
DFA35_accept = DFA.unpack(
u"\1\uffff\1\1\1\uffff\1\3\15\uffff\1\23\1\24\1\27\10\uffff\1\46"
u"\1\47\1\50\1\51\7\uffff\1\66\5\uffff\1\102\5\uffff\1\136\4\uffff"
u"\1\145\1\146\1\uffff\1\147\1\1\1\uffff\1\136\1\3\1\107\1\4\26\uffff"
u"\1\23\1\24\1\27\11\uffff\1\46\1\47\1\50\1\51\1\70\1\52\1\53\1\63"
u"\1\144\1\73\1\60\1\54\1\74\1\64\1\61\1\55\1\150\1\151\1\71\1\56"
u"\1\72\1\57\1\77\1\104\1\65\1\66\1\110\1\67\1\uffff\1\113\1\111"
u"\1\uffff\1\114\1\112\1\100\1\106\1\103\1\101\1\105\1\102\5\uffff"
u"\1\140\1\137\1\141\1\uffff\1\142\1\uffff\1\145\1\146\1\152\23\uffff"
u"\1\124\6\uffff\1\130\2\uffff\1\33\10\uffff\1\75\1\115\1\76\1\116"
u"\5\uffff\1\143\24\uffff\1\15\1\uffff\1\131\6\uffff\1\34\15\uffff"
u"\1\125\1\uffff\1\30\6\uffff\1\7\4\uffff\1\12\1\122\2\uffff\1\13"
u"\1\16\17\uffff\1\120\1\uffff\1\132\10\uffff\1\14\5\uffff\1\31\1"
u"\17\3\uffff\1\26\1\uffff\1\36\10\uffff\1\121\1\127\1\134\1\uffff"
u"\1\5\1\126\1\6\1\25\1\62\1\21\1\uffff\1\135\1\11\3\uffff\1\20\7"
u"\uffff\1\42\1\45\1\uffff\1\2\3\uffff\1\123\7\uffff\1\117\1\10\1"
u"\32\1\133\1\22\1\35\1\uffff\1\40\3\uffff\1\37\24\uffff\1\43\5\uffff"
u"\1\44\12\uffff\1\41"
)
DFA35_special = DFA.unpack(
u"\u0192\uffff"
)
DFA35_transition = [
DFA.unpack(u"\6\73\2\70\1\73\2\70\22\73\1\70\1\50\1\65\1\72\1\63"
u"\1\45\1\46\1\64\1\34\1\35\1\40\1\42\1\3\1\43\1\41\1\44\1\66\11"
u"\67\1\23\1\1\1\51\1\4\1\52\1\55\1\73\2\63\1\26\1\63\1\32\1\63\1"
u"\31\1\63\1\24\2\63\1\62\2\63\1\25\1\33\2\63\1\11\1\63\1\27\1\30"
u"\4\63\1\36\1\71\1\37\1\53\1\56\1\73\1\7\1\61\1\13\1\17\1\5\1\16"
u"\1\60\1\63\1\14\2\63\1\15\5\63\1\10\1\6\1\2\1\20\1\12\1\57\3\63"
u"\1\21\1\54\1\22\1\47\uff80\73"),
DFA.unpack(u""),
DFA.unpack(u"\1\75"),
DFA.unpack(u""),
DFA.unpack(u"\1\100"),
DFA.unpack(u"\1\102\1\uffff\1\104\11\uffff\1\103"),
DFA.unpack(u"\1\110\1\107\12\uffff\1\106\2\uffff\1\105"),
DFA.unpack(u"\1\111"),
DFA.unpack(u"\1\112"),
DFA.unpack(u"\1\113"),
DFA.unpack(u"\1\114"),
DFA.unpack(u"\1\115\6\uffff\1\117\6\uffff\1\116"),
DFA.unpack(u"\1\120\7\uffff\1\121"),
DFA.unpack(u"\1\122"),
DFA.unpack(u"\1\124\2\uffff\1\123"),
DFA.unpack(u"\1\125\11\uffff\1\126"),
DFA.unpack(u"\1\127"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\133"),
DFA.unpack(u"\1\134\4\uffff\1\135"),
DFA.unpack(u"\1\136"),
DFA.unpack(u"\1\137"),
DFA.unpack(u"\1\140"),
DFA.unpack(u"\1\141"),
DFA.unpack(u"\1\142"),
DFA.unpack(u"\1\143"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\150"),
DFA.unpack(u"\1\152\1\uffff\12\154"),
DFA.unpack(u"\1\156\21\uffff\1\155"),
DFA.unpack(u"\1\162\17\uffff\1\160\1\161"),
DFA.unpack(u"\1\164\4\uffff\1\165\15\uffff\1\166"),
DFA.unpack(u"\1\170"),
DFA.unpack(u"\1\173\26\uffff\1\172"),
DFA.unpack(u""),
DFA.unpack(u"\1\176"),
DFA.unpack(u"\1\u0080\1\u0081"),
DFA.unpack(u"\1\u0084\1\u0083"),
DFA.unpack(u"\1\u0086"),
DFA.unpack(u"\1\u0089\76\uffff\1\u0088"),
DFA.unpack(u""),
DFA.unpack(u"\1\u008c\1\uffff\1\u008d"),
DFA.unpack(u"\1\u008e"),
DFA.unpack(u"\1\u008f"),
DFA.unpack(u"\1\u0090"),
DFA.unpack(u"\1\u0091\4\uffff\1\u0092"),
DFA.unpack(u""),
DFA.unpack(u"\47\u0092\1\uffff\uffd7\u0092"),
DFA.unpack(u"\uffff\u0091"),
DFA.unpack(u"\1\154\1\uffff\10\u0094\2\154\12\uffff\3\154\21\uffff"
u"\1\u0093\13\uffff\3\154\21\uffff\1\u0093"),
DFA.unpack(u"\1\154\1\uffff\12\u0096\12\uffff\3\154\35\uffff\3\154"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\uffff\u0099"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u009a"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u009b"),
DFA.unpack(u"\1\u009c"),
DFA.unpack(u"\1\u009d"),
DFA.unpack(u"\1\u009e"),
DFA.unpack(u"\1\u009f\20\uffff\1\u00a0"),
DFA.unpack(u"\1\u00a2\22\uffff\1\u00a1"),
DFA.unpack(u"\1\u00a3"),
DFA.unpack(u"\1\u00a4"),
DFA.unpack(u"\1\u00a5\14\uffff\1\u00a6"),
DFA.unpack(u"\1\u00a7"),
DFA.unpack(u"\1\u00a9\2\uffff\1\u00a8"),
DFA.unpack(u"\1\u00aa"),
DFA.unpack(u"\1\u00ab"),
DFA.unpack(u"\1\u00ac"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00ae"),
DFA.unpack(u"\1\u00af"),
DFA.unpack(u"\1\u00b0"),
DFA.unpack(u"\1\u00b1"),
DFA.unpack(u"\1\u00b2"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\24\76\1\u00b3\5\76"),
DFA.unpack(u"\1\u00b6\11\uffff\1\u00b5"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00b8"),
DFA.unpack(u"\1\u00b9"),
DFA.unpack(u"\1\u00ba"),
DFA.unpack(u"\1\u00bb"),
DFA.unpack(u"\1\u00bc"),
DFA.unpack(u"\1\u00bd"),
DFA.unpack(u"\1\u00be"),
DFA.unpack(u"\1\u00bf"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00c0"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00c2"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00c4"),
DFA.unpack(u"\1\u00c5"),
DFA.unpack(u"\1\u00c6"),
DFA.unpack(u"\1\u00c7"),
DFA.unpack(u"\1\u00c8"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\154\1\uffff\10\u0094\2\154\12\uffff\3\154\35\uffff"
u"\3\154"),
DFA.unpack(u""),
DFA.unpack(u"\1\154\1\uffff\12\u0096\12\uffff\3\154\35\uffff\3\154"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00ca"),
DFA.unpack(u"\1\u00cb"),
DFA.unpack(u"\1\u00cc"),
DFA.unpack(u"\1\u00cd"),
DFA.unpack(u"\1\u00ce"),
DFA.unpack(u"\1\u00cf"),
DFA.unpack(u"\1\u00d0"),
DFA.unpack(u"\1\u00d1"),
DFA.unpack(u"\1\u00d2"),
DFA.unpack(u"\1\u00d3"),
DFA.unpack(u"\1\u00d4"),
DFA.unpack(u"\1\u00d5"),
DFA.unpack(u"\1\u00d6"),
DFA.unpack(u"\1\u00d7"),
DFA.unpack(u"\1\u00d8"),
DFA.unpack(u"\1\u00d9"),
DFA.unpack(u"\1\u00da"),
DFA.unpack(u"\1\u00dc\1\u00db"),
DFA.unpack(u"\1\u00dd"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00df"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00e1"),
DFA.unpack(u"\1\u00e2"),
DFA.unpack(u"\1\u00e3"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00e4"),
DFA.unpack(u"\1\u00e5"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00e6"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00e8"),
DFA.unpack(u"\1\u00e9"),
DFA.unpack(u"\1\u00ea"),
DFA.unpack(u"\1\u00eb"),
DFA.unpack(u"\1\u00ed\35\uffff\1\u00ec"),
DFA.unpack(u"\1\u00ee"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00ef"),
DFA.unpack(u"\1\u00f0"),
DFA.unpack(u"\1\u00f1"),
DFA.unpack(u"\1\u00f2"),
DFA.unpack(u"\1\u00f3"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00f4"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00f6"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00f8"),
DFA.unpack(u"\1\u00f9"),
DFA.unpack(u"\1\u00fa"),
DFA.unpack(u"\1\u00fb"),
DFA.unpack(u"\1\u00fc"),
DFA.unpack(u"\1\u00fd"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00ff"),
DFA.unpack(u"\1\u0100"),
DFA.unpack(u"\1\u0101"),
DFA.unpack(u"\1\u0102"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0105"),
DFA.unpack(u"\1\u0106"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0109"),
DFA.unpack(u"\1\u010a"),
DFA.unpack(u"\1\u010b"),
DFA.unpack(u"\1\u010c"),
DFA.unpack(u"\1\u010d"),
DFA.unpack(u"\1\u010e"),
DFA.unpack(u""),
DFA.unpack(u"\1\u010f"),
DFA.unpack(u"\1\u0110"),
DFA.unpack(u"\1\u0111"),
DFA.unpack(u"\1\u0112"),
DFA.unpack(u"\1\u0114\17\uffff\1\u0113"),
DFA.unpack(u"\1\u0115"),
DFA.unpack(u"\1\u0116"),
DFA.unpack(u"\1\u0117"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0119"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u011b"),
DFA.unpack(u"\1\u011c"),
DFA.unpack(u""),
DFA.unpack(u"\1\u011d"),
DFA.unpack(u""),
DFA.unpack(u"\1\u011e"),
DFA.unpack(u"\1\u011f"),
DFA.unpack(u"\1\u0120"),
DFA.unpack(u"\1\u0121"),
DFA.unpack(u"\1\u0122"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0124"),
DFA.unpack(u"\1\u0125"),
DFA.unpack(u"\1\u0126"),
DFA.unpack(u"\1\u0127"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0128"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u012b"),
DFA.unpack(u"\1\u012c"),
DFA.unpack(u"\1\u012d"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u012f"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0131"),
DFA.unpack(u"\1\u0132"),
DFA.unpack(u"\1\u0133"),
DFA.unpack(u"\1\u0134"),
DFA.unpack(u"\1\u0135"),
DFA.unpack(u"\1\u0136"),
DFA.unpack(u"\1\u0137"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\u0138\1"
u"\uffff\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u013c"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0143"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0146"),
DFA.unpack(u"\1\u0147"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0148"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u014a"),
DFA.unpack(u""),
DFA.unpack(u"\1\u014b"),
DFA.unpack(u""),
DFA.unpack(u"\1\u014c"),
DFA.unpack(u"\1\u014d"),
DFA.unpack(u"\1\u014e"),
DFA.unpack(u"\1\u014f"),
DFA.unpack(u"\1\u0150"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0153"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0155"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0156"),
DFA.unpack(u"\1\u0157"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0159"),
DFA.unpack(u"\1\u015a"),
DFA.unpack(u"\1\u015b"),
DFA.unpack(u"\1\u015c"),
DFA.unpack(u"\1\u015d"),
DFA.unpack(u"\1\u015e"),
DFA.unpack(u"\1\u015f"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0166"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0168"),
DFA.unpack(u"\1\u0169"),
DFA.unpack(u"\1\u016a"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\u016c"),
DFA.unpack(u"\1\u016d"),
DFA.unpack(u"\1\u016e"),
DFA.unpack(u""),
DFA.unpack(u"\1\u016f"),
DFA.unpack(u"\1\u0170"),
DFA.unpack(u"\1\u0171"),
DFA.unpack(u"\1\u0172"),
DFA.unpack(u"\1\u0173"),
DFA.unpack(u"\1\u0174"),
DFA.unpack(u"\1\u0175"),
DFA.unpack(u"\1\u0176"),
DFA.unpack(u"\1\u0177"),
DFA.unpack(u"\1\u0178"),
DFA.unpack(u"\1\u0179"),
DFA.unpack(u"\1\u017a"),
DFA.unpack(u"\1\u017b"),
DFA.unpack(u"\1\u017c"),
DFA.unpack(u"\1\u017d"),
DFA.unpack(u"\1\u017e"),
DFA.unpack(u"\1\u017f"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0181"),
DFA.unpack(u"\1\u0182"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0183"),
DFA.unpack(u"\1\u0184"),
DFA.unpack(u"\1\u0185"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0187"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0188"),
DFA.unpack(u"\1\u0189"),
DFA.unpack(u"\1\u018a"),
DFA.unpack(u"\1\u018b"),
DFA.unpack(u"\1\u018c"),
DFA.unpack(u"\1\u018d"),
DFA.unpack(u"\1\u018e"),
DFA.unpack(u"\1\u018f"),
DFA.unpack(u"\1\u0190"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"")
]
# class definition for DFA #35
DFA35 = DFA
| edk2-master | BaseTools/Source/Python/Ecc/CParser3/CLexer.py |
edk2-master | BaseTools/Source/Python/Ecc/CParser3/__init__.py |
|
# $ANTLR 3.0.1 C.g 2010-02-23 09:58:53
from __future__ import print_function
from __future__ import absolute_import
from antlr3 import *
from antlr3.compat import set, frozenset
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
from Ecc import CodeFragment
from Ecc import FileProfile
# for convenience in actions
HIDDEN = BaseRecognizer.HIDDEN
# token types
BS=20
LINE_COMMENT=23
FloatTypeSuffix=16
IntegerTypeSuffix=14
LETTER=11
OCTAL_LITERAL=6
CHARACTER_LITERAL=8
Exponent=15
EOF=-1
HexDigit=13
STRING_LITERAL=9
WS=19
FLOATING_POINT_LITERAL=10
IDENTIFIER=4
UnicodeEscape=18
LINE_COMMAND=24
UnicodeVocabulary=21
HEX_LITERAL=5
COMMENT=22
DECIMAL_LITERAL=7
EscapeSequence=12
OctalEscape=17
# token names
tokenNames = [
"<invalid>", "<EOR>", "<DOWN>", "<UP>",
"IDENTIFIER", "HEX_LITERAL", "OCTAL_LITERAL", "DECIMAL_LITERAL", "CHARACTER_LITERAL",
"STRING_LITERAL", "FLOATING_POINT_LITERAL", "LETTER", "EscapeSequence",
"HexDigit", "IntegerTypeSuffix", "Exponent", "FloatTypeSuffix", "OctalEscape",
"UnicodeEscape", "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
"LINE_COMMAND", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
"'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'", "'int'",
"'long'", "'float'", "'double'", "'signed'", "'unsigned'", "'{'", "'}'",
"'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'", "'IN'",
"'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'",
"'EFIAPI'", "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
"'('", "')'", "'['", "']'", "'*'", "'...'", "'+'", "'-'", "'/'", "'%'",
"'++'", "'--'", "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='",
"'/='", "'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
"'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'", "'>'", "'<='",
"'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'", "'__asm'", "'case'",
"'default'", "'if'", "'else'", "'switch'", "'while'", "'do'", "'for'",
"'goto'", "'continue'", "'break'", "'return'"
]
class function_definition_scope(object):
def __init__(self):
self.ModifierText = None
self.DeclText = None
self.LBLine = None
self.LBOffset = None
self.DeclLine = None
self.DeclOffset = None
class postfix_expression_scope(object):
def __init__(self):
self.FuncCallText = None
class CParser(Parser):
grammarFileName = "C.g"
tokenNames = tokenNames
def __init__(self, input):
Parser.__init__(self, input)
self.ruleMemo = {}
self.function_definition_stack = []
self.postfix_expression_stack = []
def printTokenInfo(self, line, offset, tokenText):
print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.PredicateExpressionList.append(PredExp)
def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.EnumerationDefinitionList.append(EnumDef)
def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.StructUnionDefinitionList.append(SUDef)
def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.TypedefDefinitionList.append(Tdef)
def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
FileProfile.FunctionDefinitionList.append(FuncDef)
def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.VariableDeclarationList.append(VarDecl)
def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.FunctionCallingList.append(FuncCall)
# $ANTLR start translation_unit
# C.g:102:1: translation_unit : ( external_declaration )* ;
def translation_unit(self, ):
translation_unit_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 1):
return
# C.g:103:2: ( ( external_declaration )* )
# C.g:103:4: ( external_declaration )*
# C.g:103:4: ( external_declaration )*
while True: #loop1
alt1 = 2
LA1_0 = self.input.LA(1)
if (LA1_0 == IDENTIFIER or LA1_0 == 26 or (29 <= LA1_0 <= 42) or (45 <= LA1_0 <= 46) or (48 <= LA1_0 <= 62) or LA1_0 == 66) :
alt1 = 1
if alt1 == 1:
# C.g:0:0: external_declaration
self.following.append(self.FOLLOW_external_declaration_in_translation_unit74)
self.external_declaration()
self.following.pop()
if self.failed:
return
else:
break #loop1
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 1, translation_unit_StartIndex)
pass
return
# $ANTLR end translation_unit
# $ANTLR start external_declaration
# C.g:114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );
def external_declaration(self, ):
external_declaration_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 2):
return
# C.g:119:2: ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? )
alt3 = 3
LA3_0 = self.input.LA(1)
if ((29 <= LA3_0 <= 33)) :
LA3_1 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 1, self.input)
raise nvae
elif (LA3_0 == 34) :
LA3_2 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 2, self.input)
raise nvae
elif (LA3_0 == 35) :
LA3_3 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 3, self.input)
raise nvae
elif (LA3_0 == 36) :
LA3_4 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 4, self.input)
raise nvae
elif (LA3_0 == 37) :
LA3_5 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 5, self.input)
raise nvae
elif (LA3_0 == 38) :
LA3_6 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 6, self.input)
raise nvae
elif (LA3_0 == 39) :
LA3_7 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 7, self.input)
raise nvae
elif (LA3_0 == 40) :
LA3_8 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 8, self.input)
raise nvae
elif (LA3_0 == 41) :
LA3_9 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 9, self.input)
raise nvae
elif (LA3_0 == 42) :
LA3_10 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 10, self.input)
raise nvae
elif ((45 <= LA3_0 <= 46)) :
LA3_11 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 11, self.input)
raise nvae
elif (LA3_0 == 48) :
LA3_12 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 12, self.input)
raise nvae
elif (LA3_0 == IDENTIFIER) :
LA3_13 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
elif (True) :
alt3 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 13, self.input)
raise nvae
elif (LA3_0 == 58) :
LA3_14 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 14, self.input)
raise nvae
elif (LA3_0 == 66) and (self.synpred4()):
alt3 = 1
elif (LA3_0 == 59) :
LA3_16 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 16, self.input)
raise nvae
elif (LA3_0 == 60) :
LA3_17 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 17, self.input)
raise nvae
elif ((49 <= LA3_0 <= 57) or LA3_0 == 61) :
LA3_18 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 18, self.input)
raise nvae
elif (LA3_0 == 62) and (self.synpred4()):
alt3 = 1
elif (LA3_0 == 26) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 0, self.input)
raise nvae
if alt3 == 1:
# C.g:119:4: ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition
self.following.append(self.FOLLOW_function_definition_in_external_declaration113)
self.function_definition()
self.following.pop()
if self.failed:
return
elif alt3 == 2:
# C.g:120:4: declaration
self.following.append(self.FOLLOW_declaration_in_external_declaration118)
self.declaration()
self.following.pop()
if self.failed:
return
elif alt3 == 3:
# C.g:121:4: macro_statement ( ';' )?
self.following.append(self.FOLLOW_macro_statement_in_external_declaration123)
self.macro_statement()
self.following.pop()
if self.failed:
return
# C.g:121:20: ( ';' )?
alt2 = 2
LA2_0 = self.input.LA(1)
if (LA2_0 == 25) :
alt2 = 1
if alt2 == 1:
# C.g:121:21: ';'
self.match(self.input, 25, self.FOLLOW_25_in_external_declaration126)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 2, external_declaration_StartIndex)
pass
return
# $ANTLR end external_declaration
class function_definition_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start function_definition
# C.g:126:1: function_definition : (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) ;
def function_definition(self, ):
self.function_definition_stack.append(function_definition_scope())
retval = self.function_definition_return()
retval.start = self.input.LT(1)
function_definition_StartIndex = self.input.index()
d = None
a = None
b = None
declarator1 = None
self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = ''
self.function_definition_stack[-1].LBLine = 0
self.function_definition_stack[-1].LBOffset = 0
self.function_definition_stack[-1].DeclLine = 0
self.function_definition_stack[-1].DeclOffset = 0
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 3):
return retval
# C.g:146:2: ( (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) )
# C.g:146:4: (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement )
# C.g:146:5: (d= declaration_specifiers )?
alt4 = 2
LA4 = self.input.LA(1)
if LA4 == 29 or LA4 == 30 or LA4 == 31 or LA4 == 32 or LA4 == 33 or LA4 == 34 or LA4 == 35 or LA4 == 36 or LA4 == 37 or LA4 == 38 or LA4 == 39 or LA4 == 40 or LA4 == 41 or LA4 == 42 or LA4 == 45 or LA4 == 46 or LA4 == 48 or LA4 == 49 or LA4 == 50 or LA4 == 51 or LA4 == 52 or LA4 == 53 or LA4 == 54 or LA4 == 55 or LA4 == 56 or LA4 == 57 or LA4 == 61:
alt4 = 1
elif LA4 == IDENTIFIER:
LA4 = self.input.LA(2)
if LA4 == 66:
alt4 = 1
elif LA4 == 58:
LA4_21 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 59:
LA4_22 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 60:
LA4_23 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == IDENTIFIER:
LA4_24 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 62:
LA4_25 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 29 or LA4 == 30 or LA4 == 31 or LA4 == 32 or LA4 == 33:
LA4_26 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 34:
LA4_27 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 35:
LA4_28 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 36:
LA4_29 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 37:
LA4_30 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 38:
LA4_31 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 39:
LA4_32 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 40:
LA4_33 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 41:
LA4_34 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 42:
LA4_35 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 45 or LA4 == 46:
LA4_36 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 48:
LA4_37 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 49 or LA4 == 50 or LA4 == 51 or LA4 == 52 or LA4 == 53 or LA4 == 54 or LA4 == 55 or LA4 == 56 or LA4 == 57 or LA4 == 61:
LA4_38 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 58:
LA4_14 = self.input.LA(2)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 59:
LA4_16 = self.input.LA(2)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 60:
LA4_17 = self.input.LA(2)
if (self.synpred7()) :
alt4 = 1
if alt4 == 1:
# C.g:0:0: d= declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_function_definition157)
d = self.declaration_specifiers()
self.following.pop()
if self.failed:
return retval
self.following.append(self.FOLLOW_declarator_in_function_definition160)
declarator1 = self.declarator()
self.following.pop()
if self.failed:
return retval
# C.g:147:3: ( ( declaration )+ a= compound_statement | b= compound_statement )
alt6 = 2
LA6_0 = self.input.LA(1)
if (LA6_0 == IDENTIFIER or LA6_0 == 26 or (29 <= LA6_0 <= 42) or (45 <= LA6_0 <= 46) or (48 <= LA6_0 <= 61)) :
alt6 = 1
elif (LA6_0 == 43) :
alt6 = 2
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("147:3: ( ( declaration )+ a= compound_statement | b= compound_statement )", 6, 0, self.input)
raise nvae
if alt6 == 1:
# C.g:147:5: ( declaration )+ a= compound_statement
# C.g:147:5: ( declaration )+
cnt5 = 0
while True: #loop5
alt5 = 2
LA5_0 = self.input.LA(1)
if (LA5_0 == IDENTIFIER or LA5_0 == 26 or (29 <= LA5_0 <= 42) or (45 <= LA5_0 <= 46) or (48 <= LA5_0 <= 61)) :
alt5 = 1
if alt5 == 1:
# C.g:0:0: declaration
self.following.append(self.FOLLOW_declaration_in_function_definition166)
self.declaration()
self.following.pop()
if self.failed:
return retval
else:
if cnt5 >= 1:
break #loop5
if self.backtracking > 0:
self.failed = True
return retval
eee = EarlyExitException(5, self.input)
raise eee
cnt5 += 1
self.following.append(self.FOLLOW_compound_statement_in_function_definition171)
a = self.compound_statement()
self.following.pop()
if self.failed:
return retval
elif alt6 == 2:
# C.g:148:5: b= compound_statement
self.following.append(self.FOLLOW_compound_statement_in_function_definition180)
b = self.compound_statement()
self.following.pop()
if self.failed:
return retval
if self.backtracking == 0:
if d is not None:
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start, d.stop)
else:
self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start, declarator1.stop)
self.function_definition_stack[-1].DeclLine = declarator1.start.line
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
if a is not None:
self.function_definition_stack[-1].LBLine = a.start.line
self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
else:
self.function_definition_stack[-1].LBLine = b.start.line
self.function_definition_stack[-1].LBOffset = b.start.charPositionInLine
retval.stop = self.input.LT(-1)
if self.backtracking == 0:
self.StoreFunctionDefinition(retval.start.line, retval.start.charPositionInLine, retval.stop.line, retval.stop.charPositionInLine, self.function_definition_stack[-1].ModifierText, self.function_definition_stack[-1].DeclText, self.function_definition_stack[-1].LBLine, self.function_definition_stack[-1].LBOffset, self.function_definition_stack[-1].DeclLine, self.function_definition_stack[-1].DeclOffset)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 3, function_definition_StartIndex)
self.function_definition_stack.pop()
pass
return retval
# $ANTLR end function_definition
# $ANTLR start declaration
# C.g:166:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );
def declaration(self, ):
declaration_StartIndex = self.input.index()
a = None
d = None
e = None
b = None
c = None
s = None
t = None
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 4):
return
# C.g:167:2: (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' )
alt9 = 2
LA9_0 = self.input.LA(1)
if (LA9_0 == 26) :
alt9 = 1
elif (LA9_0 == IDENTIFIER or (29 <= LA9_0 <= 42) or (45 <= LA9_0 <= 46) or (48 <= LA9_0 <= 61)) :
alt9 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("166:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );", 9, 0, self.input)
raise nvae
if alt9 == 1:
# C.g:167:4: a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';'
a = self.input.LT(1)
self.match(self.input, 26, self.FOLLOW_26_in_declaration203)
if self.failed:
return
# C.g:167:17: (b= declaration_specifiers )?
alt7 = 2
LA7 = self.input.LA(1)
if LA7 == 29 or LA7 == 30 or LA7 == 31 or LA7 == 32 or LA7 == 33 or LA7 == 34 or LA7 == 35 or LA7 == 36 or LA7 == 37 or LA7 == 38 or LA7 == 39 or LA7 == 40 or LA7 == 41 or LA7 == 42 or LA7 == 45 or LA7 == 46 or LA7 == 48 or LA7 == 49 or LA7 == 50 or LA7 == 51 or LA7 == 52 or LA7 == 53 or LA7 == 54 or LA7 == 55 or LA7 == 56 or LA7 == 57 or LA7 == 61:
alt7 = 1
elif LA7 == IDENTIFIER:
LA7_13 = self.input.LA(2)
if (LA7_13 == 62) :
LA7_21 = self.input.LA(3)
if (self.synpred10()) :
alt7 = 1
elif (LA7_13 == IDENTIFIER or (29 <= LA7_13 <= 42) or (45 <= LA7_13 <= 46) or (48 <= LA7_13 <= 61) or LA7_13 == 66) :
alt7 = 1
elif LA7 == 58:
LA7_14 = self.input.LA(2)
if (self.synpred10()) :
alt7 = 1
elif LA7 == 59:
LA7_16 = self.input.LA(2)
if (self.synpred10()) :
alt7 = 1
elif LA7 == 60:
LA7_17 = self.input.LA(2)
if (self.synpred10()) :
alt7 = 1
if alt7 == 1:
# C.g:0:0: b= declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_declaration207)
b = self.declaration_specifiers()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_init_declarator_list_in_declaration216)
c = self.init_declarator_list()
self.following.pop()
if self.failed:
return
d = self.input.LT(1)
self.match(self.input, 25, self.FOLLOW_25_in_declaration220)
if self.failed:
return
if self.backtracking == 0:
if b is not None:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start, b.stop), self.input.toString(c.start, c.stop))
else:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start, c.stop))
elif alt9 == 2:
# C.g:175:4: s= declaration_specifiers (t= init_declarator_list )? e= ';'
self.following.append(self.FOLLOW_declaration_specifiers_in_declaration234)
s = self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# C.g:175:30: (t= init_declarator_list )?
alt8 = 2
LA8_0 = self.input.LA(1)
if (LA8_0 == IDENTIFIER or (58 <= LA8_0 <= 60) or LA8_0 == 62 or LA8_0 == 66) :
alt8 = 1
if alt8 == 1:
# C.g:0:0: t= init_declarator_list
self.following.append(self.FOLLOW_init_declarator_list_in_declaration238)
t = self.init_declarator_list()
self.following.pop()
if self.failed:
return
e = self.input.LT(1)
self.match(self.input, 25, self.FOLLOW_25_in_declaration243)
if self.failed:
return
if self.backtracking == 0:
if t is not None:
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start, s.stop), self.input.toString(t.start, t.stop))
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 4, declaration_StartIndex)
pass
return
# $ANTLR end declaration
class declaration_specifiers_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start declaration_specifiers
# C.g:182:1: declaration_specifiers : ( storage_class_specifier | type_specifier | type_qualifier )+ ;
def declaration_specifiers(self, ):
retval = self.declaration_specifiers_return()
retval.start = self.input.LT(1)
declaration_specifiers_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 5):
return retval
# C.g:183:2: ( ( storage_class_specifier | type_specifier | type_qualifier )+ )
# C.g:183:6: ( storage_class_specifier | type_specifier | type_qualifier )+
# C.g:183:6: ( storage_class_specifier | type_specifier | type_qualifier )+
cnt10 = 0
while True: #loop10
alt10 = 4
LA10 = self.input.LA(1)
if LA10 == 58:
LA10_2 = self.input.LA(2)
if (self.synpred15()) :
alt10 = 3
elif LA10 == 59:
LA10_3 = self.input.LA(2)
if (self.synpred15()) :
alt10 = 3
elif LA10 == 60:
LA10_4 = self.input.LA(2)
if (self.synpred15()) :
alt10 = 3
elif LA10 == IDENTIFIER:
LA10_5 = self.input.LA(2)
if (self.synpred14()) :
alt10 = 2
elif LA10 == 53:
LA10_9 = self.input.LA(2)
if (self.synpred15()) :
alt10 = 3
elif LA10 == 29 or LA10 == 30 or LA10 == 31 or LA10 == 32 or LA10 == 33:
alt10 = 1
elif LA10 == 34 or LA10 == 35 or LA10 == 36 or LA10 == 37 or LA10 == 38 or LA10 == 39 or LA10 == 40 or LA10 == 41 or LA10 == 42 or LA10 == 45 or LA10 == 46 or LA10 == 48:
alt10 = 2
elif LA10 == 49 or LA10 == 50 or LA10 == 51 or LA10 == 52 or LA10 == 54 or LA10 == 55 or LA10 == 56 or LA10 == 57 or LA10 == 61:
alt10 = 3
if alt10 == 1:
# C.g:183:10: storage_class_specifier
self.following.append(self.FOLLOW_storage_class_specifier_in_declaration_specifiers264)
self.storage_class_specifier()
self.following.pop()
if self.failed:
return retval
elif alt10 == 2:
# C.g:184:7: type_specifier
self.following.append(self.FOLLOW_type_specifier_in_declaration_specifiers272)
self.type_specifier()
self.following.pop()
if self.failed:
return retval
elif alt10 == 3:
# C.g:185:13: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_declaration_specifiers286)
self.type_qualifier()
self.following.pop()
if self.failed:
return retval
else:
if cnt10 >= 1:
break #loop10
if self.backtracking > 0:
self.failed = True
return retval
eee = EarlyExitException(10, self.input)
raise eee
cnt10 += 1
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 5, declaration_specifiers_StartIndex)
pass
return retval
# $ANTLR end declaration_specifiers
class init_declarator_list_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start init_declarator_list
# C.g:189:1: init_declarator_list : init_declarator ( ',' init_declarator )* ;
def init_declarator_list(self, ):
retval = self.init_declarator_list_return()
retval.start = self.input.LT(1)
init_declarator_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 6):
return retval
# C.g:190:2: ( init_declarator ( ',' init_declarator )* )
# C.g:190:4: init_declarator ( ',' init_declarator )*
self.following.append(self.FOLLOW_init_declarator_in_init_declarator_list308)
self.init_declarator()
self.following.pop()
if self.failed:
return retval
# C.g:190:20: ( ',' init_declarator )*
while True: #loop11
alt11 = 2
LA11_0 = self.input.LA(1)
if (LA11_0 == 27) :
alt11 = 1
if alt11 == 1:
# C.g:190:21: ',' init_declarator
self.match(self.input, 27, self.FOLLOW_27_in_init_declarator_list311)
if self.failed:
return retval
self.following.append(self.FOLLOW_init_declarator_in_init_declarator_list313)
self.init_declarator()
self.following.pop()
if self.failed:
return retval
else:
break #loop11
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 6, init_declarator_list_StartIndex)
pass
return retval
# $ANTLR end init_declarator_list
# $ANTLR start init_declarator
# C.g:193:1: init_declarator : declarator ( '=' initializer )? ;
def init_declarator(self, ):
init_declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 7):
return
# C.g:194:2: ( declarator ( '=' initializer )? )
# C.g:194:4: declarator ( '=' initializer )?
self.following.append(self.FOLLOW_declarator_in_init_declarator326)
self.declarator()
self.following.pop()
if self.failed:
return
# C.g:194:15: ( '=' initializer )?
alt12 = 2
LA12_0 = self.input.LA(1)
if (LA12_0 == 28) :
alt12 = 1
if alt12 == 1:
# C.g:194:16: '=' initializer
self.match(self.input, 28, self.FOLLOW_28_in_init_declarator329)
if self.failed:
return
self.following.append(self.FOLLOW_initializer_in_init_declarator331)
self.initializer()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 7, init_declarator_StartIndex)
pass
return
# $ANTLR end init_declarator
# $ANTLR start storage_class_specifier
# C.g:197:1: storage_class_specifier : ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' );
def storage_class_specifier(self, ):
storage_class_specifier_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 8):
return
# C.g:198:2: ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' )
# C.g:
if (29 <= self.input.LA(1) <= 33):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_storage_class_specifier0
)
raise mse
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 8, storage_class_specifier_StartIndex)
pass
return
# $ANTLR end storage_class_specifier
# $ANTLR start type_specifier
# C.g:205:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );
def type_specifier(self, ):
type_specifier_StartIndex = self.input.index()
s = None
e = None
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 9):
return
# C.g:206:2: ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id )
alt13 = 12
LA13_0 = self.input.LA(1)
if (LA13_0 == 34) :
alt13 = 1
elif (LA13_0 == 35) :
alt13 = 2
elif (LA13_0 == 36) :
alt13 = 3
elif (LA13_0 == 37) :
alt13 = 4
elif (LA13_0 == 38) :
alt13 = 5
elif (LA13_0 == 39) :
alt13 = 6
elif (LA13_0 == 40) :
alt13 = 7
elif (LA13_0 == 41) :
alt13 = 8
elif (LA13_0 == 42) :
alt13 = 9
elif ((45 <= LA13_0 <= 46)) :
alt13 = 10
elif (LA13_0 == 48) :
alt13 = 11
elif (LA13_0 == IDENTIFIER) and (self.synpred34()):
alt13 = 12
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("205:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );", 13, 0, self.input)
raise nvae
if alt13 == 1:
# C.g:206:4: 'void'
self.match(self.input, 34, self.FOLLOW_34_in_type_specifier376)
if self.failed:
return
elif alt13 == 2:
# C.g:207:4: 'char'
self.match(self.input, 35, self.FOLLOW_35_in_type_specifier381)
if self.failed:
return
elif alt13 == 3:
# C.g:208:4: 'short'
self.match(self.input, 36, self.FOLLOW_36_in_type_specifier386)
if self.failed:
return
elif alt13 == 4:
# C.g:209:4: 'int'
self.match(self.input, 37, self.FOLLOW_37_in_type_specifier391)
if self.failed:
return
elif alt13 == 5:
# C.g:210:4: 'long'
self.match(self.input, 38, self.FOLLOW_38_in_type_specifier396)
if self.failed:
return
elif alt13 == 6:
# C.g:211:4: 'float'
self.match(self.input, 39, self.FOLLOW_39_in_type_specifier401)
if self.failed:
return
elif alt13 == 7:
# C.g:212:4: 'double'
self.match(self.input, 40, self.FOLLOW_40_in_type_specifier406)
if self.failed:
return
elif alt13 == 8:
# C.g:213:4: 'signed'
self.match(self.input, 41, self.FOLLOW_41_in_type_specifier411)
if self.failed:
return
elif alt13 == 9:
# C.g:214:4: 'unsigned'
self.match(self.input, 42, self.FOLLOW_42_in_type_specifier416)
if self.failed:
return
elif alt13 == 10:
# C.g:215:4: s= struct_or_union_specifier
self.following.append(self.FOLLOW_struct_or_union_specifier_in_type_specifier423)
s = self.struct_or_union_specifier()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
if s.stop is not None:
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start, s.stop))
elif alt13 == 11:
# C.g:220:4: e= enum_specifier
self.following.append(self.FOLLOW_enum_specifier_in_type_specifier433)
e = self.enum_specifier()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
if e.stop is not None:
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
elif alt13 == 12:
# C.g:225:4: ( IDENTIFIER ( type_qualifier )* declarator )=> type_id
self.following.append(self.FOLLOW_type_id_in_type_specifier451)
self.type_id()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 9, type_specifier_StartIndex)
pass
return
# $ANTLR end type_specifier
# $ANTLR start type_id
# C.g:228:1: type_id : IDENTIFIER ;
def type_id(self, ):
type_id_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 10):
return
# C.g:229:5: ( IDENTIFIER )
# C.g:229:9: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_type_id467)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 10, type_id_StartIndex)
pass
return
# $ANTLR end type_id
class struct_or_union_specifier_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start struct_or_union_specifier
# C.g:233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );
def struct_or_union_specifier(self, ):
retval = self.struct_or_union_specifier_return()
retval.start = self.input.LT(1)
struct_or_union_specifier_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 11):
return retval
# C.g:235:2: ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER )
alt15 = 2
LA15_0 = self.input.LA(1)
if ((45 <= LA15_0 <= 46)) :
LA15_1 = self.input.LA(2)
if (LA15_1 == IDENTIFIER) :
LA15_2 = self.input.LA(3)
if (LA15_2 == 43) :
alt15 = 1
elif (LA15_2 == EOF or LA15_2 == IDENTIFIER or LA15_2 == 25 or LA15_2 == 27 or (29 <= LA15_2 <= 42) or (45 <= LA15_2 <= 64) or LA15_2 == 66) :
alt15 = 2
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 2, self.input)
raise nvae
elif (LA15_1 == 43) :
alt15 = 1
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 1, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 0, self.input)
raise nvae
if alt15 == 1:
# C.g:235:4: struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}'
self.following.append(self.FOLLOW_struct_or_union_in_struct_or_union_specifier494)
self.struct_or_union()
self.following.pop()
if self.failed:
return retval
# C.g:235:20: ( IDENTIFIER )?
alt14 = 2
LA14_0 = self.input.LA(1)
if (LA14_0 == IDENTIFIER) :
alt14 = 1
if alt14 == 1:
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_struct_or_union_specifier496)
if self.failed:
return retval
self.match(self.input, 43, self.FOLLOW_43_in_struct_or_union_specifier499)
if self.failed:
return retval
self.following.append(self.FOLLOW_struct_declaration_list_in_struct_or_union_specifier501)
self.struct_declaration_list()
self.following.pop()
if self.failed:
return retval
self.match(self.input, 44, self.FOLLOW_44_in_struct_or_union_specifier503)
if self.failed:
return retval
elif alt15 == 2:
# C.g:236:4: struct_or_union IDENTIFIER
self.following.append(self.FOLLOW_struct_or_union_in_struct_or_union_specifier508)
self.struct_or_union()
self.following.pop()
if self.failed:
return retval
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_struct_or_union_specifier510)
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 11, struct_or_union_specifier_StartIndex)
pass
return retval
# $ANTLR end struct_or_union_specifier
# $ANTLR start struct_or_union
# C.g:239:1: struct_or_union : ( 'struct' | 'union' );
def struct_or_union(self, ):
struct_or_union_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 12):
return
# C.g:240:2: ( 'struct' | 'union' )
# C.g:
if (45 <= self.input.LA(1) <= 46):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_struct_or_union0
)
raise mse
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 12, struct_or_union_StartIndex)
pass
return
# $ANTLR end struct_or_union
# $ANTLR start struct_declaration_list
# C.g:244:1: struct_declaration_list : ( struct_declaration )+ ;
def struct_declaration_list(self, ):
struct_declaration_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 13):
return
# C.g:245:2: ( ( struct_declaration )+ )
# C.g:245:4: ( struct_declaration )+
# C.g:245:4: ( struct_declaration )+
cnt16 = 0
while True: #loop16
alt16 = 2
LA16_0 = self.input.LA(1)
if (LA16_0 == IDENTIFIER or (34 <= LA16_0 <= 42) or (45 <= LA16_0 <= 46) or (48 <= LA16_0 <= 61)) :
alt16 = 1
if alt16 == 1:
# C.g:0:0: struct_declaration
self.following.append(self.FOLLOW_struct_declaration_in_struct_declaration_list537)
self.struct_declaration()
self.following.pop()
if self.failed:
return
else:
if cnt16 >= 1:
break #loop16
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(16, self.input)
raise eee
cnt16 += 1
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 13, struct_declaration_list_StartIndex)
pass
return
# $ANTLR end struct_declaration_list
# $ANTLR start struct_declaration
# C.g:248:1: struct_declaration : specifier_qualifier_list struct_declarator_list ';' ;
def struct_declaration(self, ):
struct_declaration_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 14):
return
# C.g:249:2: ( specifier_qualifier_list struct_declarator_list ';' )
# C.g:249:4: specifier_qualifier_list struct_declarator_list ';'
self.following.append(self.FOLLOW_specifier_qualifier_list_in_struct_declaration549)
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_struct_declarator_list_in_struct_declaration551)
self.struct_declarator_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_struct_declaration553)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 14, struct_declaration_StartIndex)
pass
return
# $ANTLR end struct_declaration
# $ANTLR start specifier_qualifier_list
# C.g:252:1: specifier_qualifier_list : ( type_qualifier | type_specifier )+ ;
def specifier_qualifier_list(self, ):
specifier_qualifier_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 15):
return
# C.g:253:2: ( ( type_qualifier | type_specifier )+ )
# C.g:253:4: ( type_qualifier | type_specifier )+
# C.g:253:4: ( type_qualifier | type_specifier )+
cnt17 = 0
while True: #loop17
alt17 = 3
LA17 = self.input.LA(1)
if LA17 == 58:
LA17_2 = self.input.LA(2)
if (self.synpred39()) :
alt17 = 1
elif LA17 == 59:
LA17_3 = self.input.LA(2)
if (self.synpred39()) :
alt17 = 1
elif LA17 == 60:
LA17_4 = self.input.LA(2)
if (self.synpred39()) :
alt17 = 1
elif LA17 == IDENTIFIER:
LA17 = self.input.LA(2)
if LA17 == EOF or LA17 == IDENTIFIER or LA17 == 34 or LA17 == 35 or LA17 == 36 or LA17 == 37 or LA17 == 38 or LA17 == 39 or LA17 == 40 or LA17 == 41 or LA17 == 42 or LA17 == 45 or LA17 == 46 or LA17 == 48 or LA17 == 49 or LA17 == 50 or LA17 == 51 or LA17 == 52 or LA17 == 53 or LA17 == 54 or LA17 == 55 or LA17 == 56 or LA17 == 57 or LA17 == 58 or LA17 == 59 or LA17 == 60 or LA17 == 61 or LA17 == 63 or LA17 == 66:
alt17 = 2
elif LA17 == 62:
LA17_94 = self.input.LA(3)
if (self.synpred40()) :
alt17 = 2
elif LA17 == 47:
LA17_95 = self.input.LA(3)
if (self.synpred40()) :
alt17 = 2
elif LA17 == 64:
LA17_96 = self.input.LA(3)
if (self.synpred40()) :
alt17 = 2
elif LA17 == 49 or LA17 == 50 or LA17 == 51 or LA17 == 52 or LA17 == 53 or LA17 == 54 or LA17 == 55 or LA17 == 56 or LA17 == 57 or LA17 == 61:
alt17 = 1
elif LA17 == 34 or LA17 == 35 or LA17 == 36 or LA17 == 37 or LA17 == 38 or LA17 == 39 or LA17 == 40 or LA17 == 41 or LA17 == 42 or LA17 == 45 or LA17 == 46 or LA17 == 48:
alt17 = 2
if alt17 == 1:
# C.g:253:6: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_specifier_qualifier_list566)
self.type_qualifier()
self.following.pop()
if self.failed:
return
elif alt17 == 2:
# C.g:253:23: type_specifier
self.following.append(self.FOLLOW_type_specifier_in_specifier_qualifier_list570)
self.type_specifier()
self.following.pop()
if self.failed:
return
else:
if cnt17 >= 1:
break #loop17
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(17, self.input)
raise eee
cnt17 += 1
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 15, specifier_qualifier_list_StartIndex)
pass
return
# $ANTLR end specifier_qualifier_list
# $ANTLR start struct_declarator_list
# C.g:256:1: struct_declarator_list : struct_declarator ( ',' struct_declarator )* ;
def struct_declarator_list(self, ):
struct_declarator_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 16):
return
# C.g:257:2: ( struct_declarator ( ',' struct_declarator )* )
# C.g:257:4: struct_declarator ( ',' struct_declarator )*
self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list584)
self.struct_declarator()
self.following.pop()
if self.failed:
return
# C.g:257:22: ( ',' struct_declarator )*
while True: #loop18
alt18 = 2
LA18_0 = self.input.LA(1)
if (LA18_0 == 27) :
alt18 = 1
if alt18 == 1:
# C.g:257:23: ',' struct_declarator
self.match(self.input, 27, self.FOLLOW_27_in_struct_declarator_list587)
if self.failed:
return
self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list589)
self.struct_declarator()
self.following.pop()
if self.failed:
return
else:
break #loop18
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 16, struct_declarator_list_StartIndex)
pass
return
# $ANTLR end struct_declarator_list
# $ANTLR start struct_declarator
# C.g:260:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );
def struct_declarator(self, ):
struct_declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 17):
return
# C.g:261:2: ( declarator ( ':' constant_expression )? | ':' constant_expression )
alt20 = 2
LA20_0 = self.input.LA(1)
if (LA20_0 == IDENTIFIER or (58 <= LA20_0 <= 60) or LA20_0 == 62 or LA20_0 == 66) :
alt20 = 1
elif (LA20_0 == 47) :
alt20 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("260:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );", 20, 0, self.input)
raise nvae
if alt20 == 1:
# C.g:261:4: declarator ( ':' constant_expression )?
self.following.append(self.FOLLOW_declarator_in_struct_declarator602)
self.declarator()
self.following.pop()
if self.failed:
return
# C.g:261:15: ( ':' constant_expression )?
alt19 = 2
LA19_0 = self.input.LA(1)
if (LA19_0 == 47) :
alt19 = 1
if alt19 == 1:
# C.g:261:16: ':' constant_expression
self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator605)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_struct_declarator607)
self.constant_expression()
self.following.pop()
if self.failed:
return
elif alt20 == 2:
# C.g:262:4: ':' constant_expression
self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator614)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_struct_declarator616)
self.constant_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 17, struct_declarator_StartIndex)
pass
return
# $ANTLR end struct_declarator
class enum_specifier_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start enum_specifier
# C.g:265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );
def enum_specifier(self, ):
retval = self.enum_specifier_return()
retval.start = self.input.LT(1)
enum_specifier_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 18):
return retval
# C.g:267:2: ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER )
alt23 = 3
LA23_0 = self.input.LA(1)
if (LA23_0 == 48) :
LA23_1 = self.input.LA(2)
if (LA23_1 == IDENTIFIER) :
LA23_2 = self.input.LA(3)
if (LA23_2 == 43) :
alt23 = 2
elif (LA23_2 == EOF or LA23_2 == IDENTIFIER or LA23_2 == 25 or LA23_2 == 27 or (29 <= LA23_2 <= 42) or (45 <= LA23_2 <= 64) or LA23_2 == 66) :
alt23 = 3
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 2, self.input)
raise nvae
elif (LA23_1 == 43) :
alt23 = 1
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 1, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 0, self.input)
raise nvae
if alt23 == 1:
# C.g:267:4: 'enum' '{' enumerator_list ( ',' )? '}'
self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier634)
if self.failed:
return retval
self.match(self.input, 43, self.FOLLOW_43_in_enum_specifier636)
if self.failed:
return retval
self.following.append(self.FOLLOW_enumerator_list_in_enum_specifier638)
self.enumerator_list()
self.following.pop()
if self.failed:
return retval
# C.g:267:31: ( ',' )?
alt21 = 2
LA21_0 = self.input.LA(1)
if (LA21_0 == 27) :
alt21 = 1
if alt21 == 1:
# C.g:0:0: ','
self.match(self.input, 27, self.FOLLOW_27_in_enum_specifier640)
if self.failed:
return retval
self.match(self.input, 44, self.FOLLOW_44_in_enum_specifier643)
if self.failed:
return retval
elif alt23 == 2:
# C.g:268:4: 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}'
self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier648)
if self.failed:
return retval
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enum_specifier650)
if self.failed:
return retval
self.match(self.input, 43, self.FOLLOW_43_in_enum_specifier652)
if self.failed:
return retval
self.following.append(self.FOLLOW_enumerator_list_in_enum_specifier654)
self.enumerator_list()
self.following.pop()
if self.failed:
return retval
# C.g:268:42: ( ',' )?
alt22 = 2
LA22_0 = self.input.LA(1)
if (LA22_0 == 27) :
alt22 = 1
if alt22 == 1:
# C.g:0:0: ','
self.match(self.input, 27, self.FOLLOW_27_in_enum_specifier656)
if self.failed:
return retval
self.match(self.input, 44, self.FOLLOW_44_in_enum_specifier659)
if self.failed:
return retval
elif alt23 == 3:
# C.g:269:4: 'enum' IDENTIFIER
self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier664)
if self.failed:
return retval
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enum_specifier666)
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 18, enum_specifier_StartIndex)
pass
return retval
# $ANTLR end enum_specifier
# $ANTLR start enumerator_list
# C.g:272:1: enumerator_list : enumerator ( ',' enumerator )* ;
def enumerator_list(self, ):
enumerator_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 19):
return
# C.g:273:2: ( enumerator ( ',' enumerator )* )
# C.g:273:4: enumerator ( ',' enumerator )*
self.following.append(self.FOLLOW_enumerator_in_enumerator_list677)
self.enumerator()
self.following.pop()
if self.failed:
return
# C.g:273:15: ( ',' enumerator )*
while True: #loop24
alt24 = 2
LA24_0 = self.input.LA(1)
if (LA24_0 == 27) :
LA24_1 = self.input.LA(2)
if (LA24_1 == IDENTIFIER) :
alt24 = 1
if alt24 == 1:
# C.g:273:16: ',' enumerator
self.match(self.input, 27, self.FOLLOW_27_in_enumerator_list680)
if self.failed:
return
self.following.append(self.FOLLOW_enumerator_in_enumerator_list682)
self.enumerator()
self.following.pop()
if self.failed:
return
else:
break #loop24
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 19, enumerator_list_StartIndex)
pass
return
# $ANTLR end enumerator_list
# $ANTLR start enumerator
# C.g:276:1: enumerator : IDENTIFIER ( '=' constant_expression )? ;
def enumerator(self, ):
enumerator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 20):
return
# C.g:277:2: ( IDENTIFIER ( '=' constant_expression )? )
# C.g:277:4: IDENTIFIER ( '=' constant_expression )?
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enumerator695)
if self.failed:
return
# C.g:277:15: ( '=' constant_expression )?
alt25 = 2
LA25_0 = self.input.LA(1)
if (LA25_0 == 28) :
alt25 = 1
if alt25 == 1:
# C.g:277:16: '=' constant_expression
self.match(self.input, 28, self.FOLLOW_28_in_enumerator698)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_enumerator700)
self.constant_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 20, enumerator_StartIndex)
pass
return
# $ANTLR end enumerator
# $ANTLR start type_qualifier
# C.g:280:1: type_qualifier : ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' | 'PACKED' );
def type_qualifier(self, ):
type_qualifier_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 21):
return
# C.g:281:2: ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' | 'PACKED' )
# C.g:
if (49 <= self.input.LA(1) <= 61):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_type_qualifier0
)
raise mse
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 21, type_qualifier_StartIndex)
pass
return
# $ANTLR end type_qualifier
class declarator_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start declarator
# C.g:296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );
def declarator(self, ):
retval = self.declarator_return()
retval.start = self.input.LT(1)
declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 22):
return retval
# C.g:297:2: ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer )
alt30 = 2
LA30_0 = self.input.LA(1)
if (LA30_0 == 66) :
LA30_1 = self.input.LA(2)
if (self.synpred66()) :
alt30 = 1
elif (True) :
alt30 = 2
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );", 30, 1, self.input)
raise nvae
elif (LA30_0 == IDENTIFIER or (58 <= LA30_0 <= 60) or LA30_0 == 62) :
alt30 = 1
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );", 30, 0, self.input)
raise nvae
if alt30 == 1:
# C.g:297:4: ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator
# C.g:297:4: ( pointer )?
alt26 = 2
LA26_0 = self.input.LA(1)
if (LA26_0 == 66) :
alt26 = 1
if alt26 == 1:
# C.g:0:0: pointer
self.following.append(self.FOLLOW_pointer_in_declarator784)
self.pointer()
self.following.pop()
if self.failed:
return retval
# C.g:297:13: ( 'EFIAPI' )?
alt27 = 2
LA27_0 = self.input.LA(1)
if (LA27_0 == 58) :
alt27 = 1
if alt27 == 1:
# C.g:297:14: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_declarator788)
if self.failed:
return retval
# C.g:297:25: ( 'EFI_BOOTSERVICE' )?
alt28 = 2
LA28_0 = self.input.LA(1)
if (LA28_0 == 59) :
alt28 = 1
if alt28 == 1:
# C.g:297:26: 'EFI_BOOTSERVICE'
self.match(self.input, 59, self.FOLLOW_59_in_declarator793)
if self.failed:
return retval
# C.g:297:46: ( 'EFI_RUNTIMESERVICE' )?
alt29 = 2
LA29_0 = self.input.LA(1)
if (LA29_0 == 60) :
alt29 = 1
if alt29 == 1:
# C.g:297:47: 'EFI_RUNTIMESERVICE'
self.match(self.input, 60, self.FOLLOW_60_in_declarator798)
if self.failed:
return retval
self.following.append(self.FOLLOW_direct_declarator_in_declarator802)
self.direct_declarator()
self.following.pop()
if self.failed:
return retval
elif alt30 == 2:
# C.g:299:4: pointer
self.following.append(self.FOLLOW_pointer_in_declarator808)
self.pointer()
self.following.pop()
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 22, declarator_StartIndex)
pass
return retval
# $ANTLR end declarator
# $ANTLR start direct_declarator
# C.g:302:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );
def direct_declarator(self, ):
direct_declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 23):
return
# C.g:303:2: ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ )
alt34 = 2
LA34_0 = self.input.LA(1)
if (LA34_0 == IDENTIFIER) :
alt34 = 1
elif (LA34_0 == 62) :
alt34 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("302:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );", 34, 0, self.input)
raise nvae
if alt34 == 1:
# C.g:303:4: IDENTIFIER ( declarator_suffix )*
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_direct_declarator819)
if self.failed:
return
# C.g:303:15: ( declarator_suffix )*
while True: #loop31
alt31 = 2
LA31_0 = self.input.LA(1)
if (LA31_0 == 62) :
LA31 = self.input.LA(2)
if LA31 == 63:
LA31_30 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 58:
LA31_31 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 66:
LA31_32 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 59:
LA31_33 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 60:
LA31_34 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == IDENTIFIER:
LA31_35 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 29 or LA31 == 30 or LA31 == 31 or LA31 == 32 or LA31 == 33:
LA31_37 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 34:
LA31_38 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 35:
LA31_39 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 36:
LA31_40 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 37:
LA31_41 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 38:
LA31_42 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 39:
LA31_43 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 40:
LA31_44 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 41:
LA31_45 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 42:
LA31_46 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 45 or LA31 == 46:
LA31_47 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 48:
LA31_48 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 49 or LA31 == 50 or LA31 == 51 or LA31 == 52 or LA31 == 53 or LA31 == 54 or LA31 == 55 or LA31 == 56 or LA31 == 57 or LA31 == 61:
LA31_49 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif (LA31_0 == 64) :
LA31 = self.input.LA(2)
if LA31 == 65:
LA31_51 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 62:
LA31_52 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == IDENTIFIER:
LA31_53 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == HEX_LITERAL:
LA31_54 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == OCTAL_LITERAL:
LA31_55 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == DECIMAL_LITERAL:
LA31_56 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == CHARACTER_LITERAL:
LA31_57 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == STRING_LITERAL:
LA31_58 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == FLOATING_POINT_LITERAL:
LA31_59 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 72:
LA31_60 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 73:
LA31_61 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 66 or LA31 == 68 or LA31 == 69 or LA31 == 77 or LA31 == 78 or LA31 == 79:
LA31_62 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 74:
LA31_63 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
if alt31 == 1:
# C.g:0:0: declarator_suffix
self.following.append(self.FOLLOW_declarator_suffix_in_direct_declarator821)
self.declarator_suffix()
self.following.pop()
if self.failed:
return
else:
break #loop31
elif alt34 == 2:
# C.g:304:4: '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+
self.match(self.input, 62, self.FOLLOW_62_in_direct_declarator827)
if self.failed:
return
# C.g:304:8: ( 'EFIAPI' )?
alt32 = 2
LA32_0 = self.input.LA(1)
if (LA32_0 == 58) :
LA32_1 = self.input.LA(2)
if (self.synpred69()) :
alt32 = 1
if alt32 == 1:
# C.g:304:9: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_direct_declarator830)
if self.failed:
return
self.following.append(self.FOLLOW_declarator_in_direct_declarator834)
self.declarator()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_direct_declarator836)
if self.failed:
return
# C.g:304:35: ( declarator_suffix )+
cnt33 = 0
while True: #loop33
alt33 = 2
LA33_0 = self.input.LA(1)
if (LA33_0 == 62) :
LA33 = self.input.LA(2)
if LA33 == 63:
LA33_30 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 58:
LA33_31 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 66:
LA33_32 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 59:
LA33_33 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 60:
LA33_34 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == IDENTIFIER:
LA33_35 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 29 or LA33 == 30 or LA33 == 31 or LA33 == 32 or LA33 == 33:
LA33_37 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 34:
LA33_38 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 35:
LA33_39 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 36:
LA33_40 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 37:
LA33_41 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 38:
LA33_42 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 39:
LA33_43 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 40:
LA33_44 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 41:
LA33_45 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 42:
LA33_46 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 45 or LA33 == 46:
LA33_47 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 48:
LA33_48 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 49 or LA33 == 50 or LA33 == 51 or LA33 == 52 or LA33 == 53 or LA33 == 54 or LA33 == 55 or LA33 == 56 or LA33 == 57 or LA33 == 61:
LA33_49 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif (LA33_0 == 64) :
LA33 = self.input.LA(2)
if LA33 == 65:
LA33_51 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 62:
LA33_52 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == IDENTIFIER:
LA33_53 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == HEX_LITERAL:
LA33_54 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == OCTAL_LITERAL:
LA33_55 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == DECIMAL_LITERAL:
LA33_56 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == CHARACTER_LITERAL:
LA33_57 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == STRING_LITERAL:
LA33_58 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == FLOATING_POINT_LITERAL:
LA33_59 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 72:
LA33_60 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 73:
LA33_61 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 66 or LA33 == 68 or LA33 == 69 or LA33 == 77 or LA33 == 78 or LA33 == 79:
LA33_62 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 74:
LA33_63 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
if alt33 == 1:
# C.g:0:0: declarator_suffix
self.following.append(self.FOLLOW_declarator_suffix_in_direct_declarator838)
self.declarator_suffix()
self.following.pop()
if self.failed:
return
else:
if cnt33 >= 1:
break #loop33
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(33, self.input)
raise eee
cnt33 += 1
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 23, direct_declarator_StartIndex)
pass
return
# $ANTLR end direct_declarator
# $ANTLR start declarator_suffix
# C.g:307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );
def declarator_suffix(self, ):
declarator_suffix_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 24):
return
# C.g:308:2: ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' )
alt35 = 5
LA35_0 = self.input.LA(1)
if (LA35_0 == 64) :
LA35_1 = self.input.LA(2)
if (LA35_1 == 65) :
alt35 = 2
elif ((IDENTIFIER <= LA35_1 <= FLOATING_POINT_LITERAL) or LA35_1 == 62 or LA35_1 == 66 or (68 <= LA35_1 <= 69) or (72 <= LA35_1 <= 74) or (77 <= LA35_1 <= 79)) :
alt35 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 1, self.input)
raise nvae
elif (LA35_0 == 62) :
LA35 = self.input.LA(2)
if LA35 == 63:
alt35 = 5
elif LA35 == 29 or LA35 == 30 or LA35 == 31 or LA35 == 32 or LA35 == 33 or LA35 == 34 or LA35 == 35 or LA35 == 36 or LA35 == 37 or LA35 == 38 or LA35 == 39 or LA35 == 40 or LA35 == 41 or LA35 == 42 or LA35 == 45 or LA35 == 46 or LA35 == 48 or LA35 == 49 or LA35 == 50 or LA35 == 51 or LA35 == 52 or LA35 == 53 or LA35 == 54 or LA35 == 55 or LA35 == 56 or LA35 == 57 or LA35 == 58 or LA35 == 59 or LA35 == 60 or LA35 == 61 or LA35 == 66:
alt35 = 3
elif LA35 == IDENTIFIER:
LA35_29 = self.input.LA(3)
if (self.synpred73()) :
alt35 = 3
elif (self.synpred74()) :
alt35 = 4
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 29, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 2, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 0, self.input)
raise nvae
if alt35 == 1:
# C.g:308:6: '[' constant_expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix852)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_declarator_suffix854)
self.constant_expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix856)
if self.failed:
return
elif alt35 == 2:
# C.g:309:9: '[' ']'
self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix866)
if self.failed:
return
self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix868)
if self.failed:
return
elif alt35 == 3:
# C.g:310:9: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix878)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_type_list_in_declarator_suffix880)
self.parameter_type_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix882)
if self.failed:
return
elif alt35 == 4:
# C.g:311:9: '(' identifier_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix892)
if self.failed:
return
self.following.append(self.FOLLOW_identifier_list_in_declarator_suffix894)
self.identifier_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix896)
if self.failed:
return
elif alt35 == 5:
# C.g:312:9: '(' ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix906)
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix908)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 24, declarator_suffix_StartIndex)
pass
return
# $ANTLR end declarator_suffix
# $ANTLR start pointer
# C.g:315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );
def pointer(self, ):
pointer_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 25):
return
# C.g:316:2: ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' )
alt38 = 3
LA38_0 = self.input.LA(1)
if (LA38_0 == 66) :
LA38 = self.input.LA(2)
if LA38 == 66:
LA38_2 = self.input.LA(3)
if (self.synpred78()) :
alt38 = 2
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 2, self.input)
raise nvae
elif LA38 == 58:
LA38_3 = self.input.LA(3)
if (self.synpred77()) :
alt38 = 1
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 3, self.input)
raise nvae
elif LA38 == 59:
LA38_4 = self.input.LA(3)
if (self.synpred77()) :
alt38 = 1
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 4, self.input)
raise nvae
elif LA38 == 60:
LA38_5 = self.input.LA(3)
if (self.synpred77()) :
alt38 = 1
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 5, self.input)
raise nvae
elif LA38 == EOF or LA38 == IDENTIFIER or LA38 == 25 or LA38 == 26 or LA38 == 27 or LA38 == 28 or LA38 == 29 or LA38 == 30 or LA38 == 31 or LA38 == 32 or LA38 == 33 or LA38 == 34 or LA38 == 35 or LA38 == 36 or LA38 == 37 or LA38 == 38 or LA38 == 39 or LA38 == 40 or LA38 == 41 or LA38 == 42 or LA38 == 43 or LA38 == 45 or LA38 == 46 or LA38 == 47 or LA38 == 48 or LA38 == 62 or LA38 == 63 or LA38 == 64:
alt38 = 3
elif LA38 == 53:
LA38_21 = self.input.LA(3)
if (self.synpred77()) :
alt38 = 1
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 21, self.input)
raise nvae
elif LA38 == 49 or LA38 == 50 or LA38 == 51 or LA38 == 52 or LA38 == 54 or LA38 == 55 or LA38 == 56 or LA38 == 57 or LA38 == 61:
LA38_29 = self.input.LA(3)
if (self.synpred77()) :
alt38 = 1
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 29, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 1, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 0, self.input)
raise nvae
if alt38 == 1:
# C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
self.match(self.input, 66, self.FOLLOW_66_in_pointer919)
if self.failed:
return
# C.g:316:8: ( type_qualifier )+
cnt36 = 0
while True: #loop36
alt36 = 2
LA36 = self.input.LA(1)
if LA36 == 58:
LA36_2 = self.input.LA(2)
if (self.synpred75()) :
alt36 = 1
elif LA36 == 59:
LA36_3 = self.input.LA(2)
if (self.synpred75()) :
alt36 = 1
elif LA36 == 60:
LA36_4 = self.input.LA(2)
if (self.synpred75()) :
alt36 = 1
elif LA36 == 53:
LA36_20 = self.input.LA(2)
if (self.synpred75()) :
alt36 = 1
elif LA36 == 49 or LA36 == 50 or LA36 == 51 or LA36 == 52 or LA36 == 54 or LA36 == 55 or LA36 == 56 or LA36 == 57 or LA36 == 61:
LA36_28 = self.input.LA(2)
if (self.synpred75()) :
alt36 = 1
if alt36 == 1:
# C.g:0:0: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_pointer921)
self.type_qualifier()
self.following.pop()
if self.failed:
return
else:
if cnt36 >= 1:
break #loop36
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(36, self.input)
raise eee
cnt36 += 1
# C.g:316:24: ( pointer )?
alt37 = 2
LA37_0 = self.input.LA(1)
if (LA37_0 == 66) :
LA37_1 = self.input.LA(2)
if (self.synpred76()) :
alt37 = 1
if alt37 == 1:
# C.g:0:0: pointer
self.following.append(self.FOLLOW_pointer_in_pointer924)
self.pointer()
self.following.pop()
if self.failed:
return
elif alt38 == 2:
# C.g:317:4: '*' pointer
self.match(self.input, 66, self.FOLLOW_66_in_pointer930)
if self.failed:
return
self.following.append(self.FOLLOW_pointer_in_pointer932)
self.pointer()
self.following.pop()
if self.failed:
return
elif alt38 == 3:
# C.g:318:4: '*'
self.match(self.input, 66, self.FOLLOW_66_in_pointer937)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 25, pointer_StartIndex)
pass
return
# $ANTLR end pointer
# $ANTLR start parameter_type_list
# C.g:321:1: parameter_type_list : parameter_list ( ',' ( 'OPTIONAL' )? '...' )? ;
def parameter_type_list(self, ):
parameter_type_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 26):
return
# C.g:322:2: ( parameter_list ( ',' ( 'OPTIONAL' )? '...' )? )
# C.g:322:4: parameter_list ( ',' ( 'OPTIONAL' )? '...' )?
self.following.append(self.FOLLOW_parameter_list_in_parameter_type_list948)
self.parameter_list()
self.following.pop()
if self.failed:
return
# C.g:322:19: ( ',' ( 'OPTIONAL' )? '...' )?
alt40 = 2
LA40_0 = self.input.LA(1)
if (LA40_0 == 27) :
alt40 = 1
if alt40 == 1:
# C.g:322:20: ',' ( 'OPTIONAL' )? '...'
self.match(self.input, 27, self.FOLLOW_27_in_parameter_type_list951)
if self.failed:
return
# C.g:322:24: ( 'OPTIONAL' )?
alt39 = 2
LA39_0 = self.input.LA(1)
if (LA39_0 == 53) :
alt39 = 1
if alt39 == 1:
# C.g:322:25: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_type_list954)
if self.failed:
return
self.match(self.input, 67, self.FOLLOW_67_in_parameter_type_list958)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 26, parameter_type_list_StartIndex)
pass
return
# $ANTLR end parameter_type_list
# $ANTLR start parameter_list
# C.g:325:1: parameter_list : parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* ;
def parameter_list(self, ):
parameter_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 27):
return
# C.g:326:2: ( parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* )
# C.g:326:4: parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )*
self.following.append(self.FOLLOW_parameter_declaration_in_parameter_list971)
self.parameter_declaration()
self.following.pop()
if self.failed:
return
# C.g:326:26: ( ',' ( 'OPTIONAL' )? parameter_declaration )*
while True: #loop42
alt42 = 2
LA42_0 = self.input.LA(1)
if (LA42_0 == 27) :
LA42_1 = self.input.LA(2)
if (LA42_1 == 53) :
LA42_3 = self.input.LA(3)
if (self.synpred82()) :
alt42 = 1
elif (LA42_1 == IDENTIFIER or (29 <= LA42_1 <= 42) or (45 <= LA42_1 <= 46) or (48 <= LA42_1 <= 52) or (54 <= LA42_1 <= 61) or LA42_1 == 66) :
alt42 = 1
if alt42 == 1:
# C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_parameter_list974)
if self.failed:
return
# C.g:326:31: ( 'OPTIONAL' )?
alt41 = 2
LA41_0 = self.input.LA(1)
if (LA41_0 == 53) :
LA41_1 = self.input.LA(2)
if (self.synpred81()) :
alt41 = 1
if alt41 == 1:
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_list977)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_declaration_in_parameter_list981)
self.parameter_declaration()
self.following.pop()
if self.failed:
return
else:
break #loop42
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 27, parameter_list_StartIndex)
pass
return
# $ANTLR end parameter_list
# $ANTLR start parameter_declaration
# C.g:329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );
def parameter_declaration(self, ):
parameter_declaration_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 28):
return
# C.g:330:2: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER )
alt46 = 2
LA46 = self.input.LA(1)
if LA46 == 29 or LA46 == 30 or LA46 == 31 or LA46 == 32 or LA46 == 33 or LA46 == 34 or LA46 == 35 or LA46 == 36 or LA46 == 37 or LA46 == 38 or LA46 == 39 or LA46 == 40 or LA46 == 41 or LA46 == 42 or LA46 == 45 or LA46 == 46 or LA46 == 48 or LA46 == 49 or LA46 == 50 or LA46 == 51 or LA46 == 52 or LA46 == 53 or LA46 == 54 or LA46 == 55 or LA46 == 56 or LA46 == 57 or LA46 == 58 or LA46 == 59 or LA46 == 60 or LA46 == 61:
alt46 = 1
elif LA46 == IDENTIFIER:
LA46_13 = self.input.LA(2)
if (self.synpred86()) :
alt46 = 1
elif (True) :
alt46 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 13, self.input)
raise nvae
elif LA46 == 66:
alt46 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 0, self.input)
raise nvae
if alt46 == 1:
# C.g:330:4: declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )?
self.following.append(self.FOLLOW_declaration_specifiers_in_parameter_declaration994)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# C.g:330:27: ( declarator | abstract_declarator )*
while True: #loop43
alt43 = 3
LA43 = self.input.LA(1)
if LA43 == 66:
LA43_5 = self.input.LA(2)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == IDENTIFIER or LA43 == 58 or LA43 == 59 or LA43 == 60:
alt43 = 1
elif LA43 == 62:
LA43 = self.input.LA(2)
if LA43 == 29 or LA43 == 30 or LA43 == 31 or LA43 == 32 or LA43 == 33 or LA43 == 34 or LA43 == 35 or LA43 == 36 or LA43 == 37 or LA43 == 38 or LA43 == 39 or LA43 == 40 or LA43 == 41 or LA43 == 42 or LA43 == 45 or LA43 == 46 or LA43 == 48 or LA43 == 49 or LA43 == 50 or LA43 == 51 or LA43 == 52 or LA43 == 53 or LA43 == 54 or LA43 == 55 or LA43 == 56 or LA43 == 57 or LA43 == 61 or LA43 == 63 or LA43 == 64:
alt43 = 2
elif LA43 == IDENTIFIER:
LA43_37 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 58:
LA43_38 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 66:
LA43_39 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 59:
LA43_40 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 60:
LA43_41 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 62:
LA43_43 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 64:
alt43 = 2
if alt43 == 1:
# C.g:330:28: declarator
self.following.append(self.FOLLOW_declarator_in_parameter_declaration997)
self.declarator()
self.following.pop()
if self.failed:
return
elif alt43 == 2:
# C.g:330:39: abstract_declarator
self.following.append(self.FOLLOW_abstract_declarator_in_parameter_declaration999)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
else:
break #loop43
# C.g:330:61: ( 'OPTIONAL' )?
alt44 = 2
LA44_0 = self.input.LA(1)
if (LA44_0 == 53) :
alt44 = 1
if alt44 == 1:
# C.g:330:62: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_declaration1004)
if self.failed:
return
elif alt46 == 2:
# C.g:332:4: ( pointer )* IDENTIFIER
# C.g:332:4: ( pointer )*
while True: #loop45
alt45 = 2
LA45_0 = self.input.LA(1)
if (LA45_0 == 66) :
alt45 = 1
if alt45 == 1:
# C.g:0:0: pointer
self.following.append(self.FOLLOW_pointer_in_parameter_declaration1013)
self.pointer()
self.following.pop()
if self.failed:
return
else:
break #loop45
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_parameter_declaration1016)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 28, parameter_declaration_StartIndex)
pass
return
# $ANTLR end parameter_declaration
# $ANTLR start identifier_list
# C.g:335:1: identifier_list : IDENTIFIER ( ',' IDENTIFIER )* ;
def identifier_list(self, ):
identifier_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 29):
return
# C.g:336:2: ( IDENTIFIER ( ',' IDENTIFIER )* )
# C.g:336:4: IDENTIFIER ( ',' IDENTIFIER )*
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1027)
if self.failed:
return
# C.g:337:2: ( ',' IDENTIFIER )*
while True: #loop47
alt47 = 2
LA47_0 = self.input.LA(1)
if (LA47_0 == 27) :
alt47 = 1
if alt47 == 1:
# C.g:337:3: ',' IDENTIFIER
self.match(self.input, 27, self.FOLLOW_27_in_identifier_list1031)
if self.failed:
return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1033)
if self.failed:
return
else:
break #loop47
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 29, identifier_list_StartIndex)
pass
return
# $ANTLR end identifier_list
# $ANTLR start type_name
# C.g:340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );
def type_name(self, ):
type_name_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 30):
return
# C.g:341:2: ( specifier_qualifier_list ( abstract_declarator )? | type_id )
alt49 = 2
LA49_0 = self.input.LA(1)
if ((34 <= LA49_0 <= 42) or (45 <= LA49_0 <= 46) or (48 <= LA49_0 <= 61)) :
alt49 = 1
elif (LA49_0 == IDENTIFIER) :
LA49_13 = self.input.LA(2)
if (self.synpred90()) :
alt49 = 1
elif (True) :
alt49 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 13, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 0, self.input)
raise nvae
if alt49 == 1:
# C.g:341:4: specifier_qualifier_list ( abstract_declarator )?
self.following.append(self.FOLLOW_specifier_qualifier_list_in_type_name1046)
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
return
# C.g:341:29: ( abstract_declarator )?
alt48 = 2
LA48_0 = self.input.LA(1)
if (LA48_0 == 62 or LA48_0 == 64 or LA48_0 == 66) :
alt48 = 1
if alt48 == 1:
# C.g:0:0: abstract_declarator
self.following.append(self.FOLLOW_abstract_declarator_in_type_name1048)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
elif alt49 == 2:
# C.g:342:4: type_id
self.following.append(self.FOLLOW_type_id_in_type_name1054)
self.type_id()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 30, type_name_StartIndex)
pass
return
# $ANTLR end type_name
# $ANTLR start abstract_declarator
# C.g:345:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );
def abstract_declarator(self, ):
abstract_declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 31):
return
# C.g:346:2: ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator )
alt51 = 2
LA51_0 = self.input.LA(1)
if (LA51_0 == 66) :
alt51 = 1
elif (LA51_0 == 62 or LA51_0 == 64) :
alt51 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("345:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );", 51, 0, self.input)
raise nvae
if alt51 == 1:
# C.g:346:4: pointer ( direct_abstract_declarator )?
self.following.append(self.FOLLOW_pointer_in_abstract_declarator1065)
self.pointer()
self.following.pop()
if self.failed:
return
# C.g:346:12: ( direct_abstract_declarator )?
alt50 = 2
LA50_0 = self.input.LA(1)
if (LA50_0 == 62) :
LA50 = self.input.LA(2)
if LA50 == 63:
LA50_12 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 58:
LA50_13 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 66:
LA50_14 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 59:
LA50_15 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 60:
LA50_16 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == IDENTIFIER:
LA50_17 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 62:
LA50_18 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 64:
LA50_19 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 29 or LA50 == 30 or LA50 == 31 or LA50 == 32 or LA50 == 33:
LA50_20 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 34:
LA50_21 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 35:
LA50_22 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 36:
LA50_23 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 37:
LA50_24 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 38:
LA50_25 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 39:
LA50_26 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 40:
LA50_27 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 41:
LA50_28 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 42:
LA50_29 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 45 or LA50 == 46:
LA50_30 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 48:
LA50_31 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 49 or LA50 == 50 or LA50 == 51 or LA50 == 52 or LA50 == 53 or LA50 == 54 or LA50 == 55 or LA50 == 56 or LA50 == 57 or LA50 == 61:
LA50_32 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif (LA50_0 == 64) :
LA50 = self.input.LA(2)
if LA50 == 65:
LA50_33 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 62:
LA50_34 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == IDENTIFIER:
LA50_35 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == HEX_LITERAL:
LA50_36 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == OCTAL_LITERAL:
LA50_37 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == DECIMAL_LITERAL:
LA50_38 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == CHARACTER_LITERAL:
LA50_39 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == STRING_LITERAL:
LA50_40 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == FLOATING_POINT_LITERAL:
LA50_41 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 72:
LA50_42 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 73:
LA50_43 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 66 or LA50 == 68 or LA50 == 69 or LA50 == 77 or LA50 == 78 or LA50 == 79:
LA50_44 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 74:
LA50_45 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
if alt50 == 1:
# C.g:0:0: direct_abstract_declarator
self.following.append(self.FOLLOW_direct_abstract_declarator_in_abstract_declarator1067)
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
return
elif alt51 == 2:
# C.g:347:4: direct_abstract_declarator
self.following.append(self.FOLLOW_direct_abstract_declarator_in_abstract_declarator1073)
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 31, abstract_declarator_StartIndex)
pass
return
# $ANTLR end abstract_declarator
# $ANTLR start direct_abstract_declarator
# C.g:350:1: direct_abstract_declarator : ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* ;
def direct_abstract_declarator(self, ):
direct_abstract_declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 32):
return
# C.g:351:2: ( ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* )
# C.g:351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )*
# C.g:351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )
alt52 = 2
LA52_0 = self.input.LA(1)
if (LA52_0 == 62) :
LA52 = self.input.LA(2)
if LA52 == IDENTIFIER or LA52 == 29 or LA52 == 30 or LA52 == 31 or LA52 == 32 or LA52 == 33 or LA52 == 34 or LA52 == 35 or LA52 == 36 or LA52 == 37 or LA52 == 38 or LA52 == 39 or LA52 == 40 or LA52 == 41 or LA52 == 42 or LA52 == 45 or LA52 == 46 or LA52 == 48 or LA52 == 49 or LA52 == 50 or LA52 == 51 or LA52 == 52 or LA52 == 53 or LA52 == 54 or LA52 == 55 or LA52 == 56 or LA52 == 57 or LA52 == 58 or LA52 == 59 or LA52 == 60 or LA52 == 61 or LA52 == 63:
alt52 = 2
elif LA52 == 66:
LA52_18 = self.input.LA(3)
if (self.synpred93()) :
alt52 = 1
elif (True) :
alt52 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 18, self.input)
raise nvae
elif LA52 == 62 or LA52 == 64:
alt52 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 1, self.input)
raise nvae
elif (LA52_0 == 64) :
alt52 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 0, self.input)
raise nvae
if alt52 == 1:
# C.g:351:6: '(' abstract_declarator ')'
self.match(self.input, 62, self.FOLLOW_62_in_direct_abstract_declarator1086)
if self.failed:
return
self.following.append(self.FOLLOW_abstract_declarator_in_direct_abstract_declarator1088)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_direct_abstract_declarator1090)
if self.failed:
return
elif alt52 == 2:
# C.g:351:36: abstract_declarator_suffix
self.following.append(self.FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1094)
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
return
# C.g:351:65: ( abstract_declarator_suffix )*
while True: #loop53
alt53 = 2
LA53_0 = self.input.LA(1)
if (LA53_0 == 62) :
LA53 = self.input.LA(2)
if LA53 == 63:
LA53_12 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 58:
LA53_13 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 66:
LA53_14 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 59:
LA53_15 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 60:
LA53_16 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == IDENTIFIER:
LA53_17 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 29 or LA53 == 30 or LA53 == 31 or LA53 == 32 or LA53 == 33:
LA53_19 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 34:
LA53_20 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 35:
LA53_21 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 36:
LA53_22 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 37:
LA53_23 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 38:
LA53_24 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 39:
LA53_25 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 40:
LA53_26 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 41:
LA53_27 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 42:
LA53_28 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 45 or LA53 == 46:
LA53_29 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 48:
LA53_30 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 49 or LA53 == 50 or LA53 == 51 or LA53 == 52 or LA53 == 53 or LA53 == 54 or LA53 == 55 or LA53 == 56 or LA53 == 57 or LA53 == 61:
LA53_31 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif (LA53_0 == 64) :
LA53 = self.input.LA(2)
if LA53 == 65:
LA53_33 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 62:
LA53_34 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == IDENTIFIER:
LA53_35 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == HEX_LITERAL:
LA53_36 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == OCTAL_LITERAL:
LA53_37 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == DECIMAL_LITERAL:
LA53_38 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == CHARACTER_LITERAL:
LA53_39 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == STRING_LITERAL:
LA53_40 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == FLOATING_POINT_LITERAL:
LA53_41 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 72:
LA53_42 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 73:
LA53_43 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 66 or LA53 == 68 or LA53 == 69 or LA53 == 77 or LA53 == 78 or LA53 == 79:
LA53_44 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 74:
LA53_45 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
if alt53 == 1:
# C.g:0:0: abstract_declarator_suffix
self.following.append(self.FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1098)
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
return
else:
break #loop53
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 32, direct_abstract_declarator_StartIndex)
pass
return
# $ANTLR end direct_abstract_declarator
# $ANTLR start abstract_declarator_suffix
# C.g:354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );
def abstract_declarator_suffix(self, ):
abstract_declarator_suffix_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 33):
return
# C.g:355:2: ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' )
alt54 = 4
LA54_0 = self.input.LA(1)
if (LA54_0 == 64) :
LA54_1 = self.input.LA(2)
if (LA54_1 == 65) :
alt54 = 1
elif ((IDENTIFIER <= LA54_1 <= FLOATING_POINT_LITERAL) or LA54_1 == 62 or LA54_1 == 66 or (68 <= LA54_1 <= 69) or (72 <= LA54_1 <= 74) or (77 <= LA54_1 <= 79)) :
alt54 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 1, self.input)
raise nvae
elif (LA54_0 == 62) :
LA54_2 = self.input.LA(2)
if (LA54_2 == 63) :
alt54 = 3
elif (LA54_2 == IDENTIFIER or (29 <= LA54_2 <= 42) or (45 <= LA54_2 <= 46) or (48 <= LA54_2 <= 61) or LA54_2 == 66) :
alt54 = 4
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 2, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 0, self.input)
raise nvae
if alt54 == 1:
# C.g:355:4: '[' ']'
self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1110)
if self.failed:
return
self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1112)
if self.failed:
return
elif alt54 == 2:
# C.g:356:4: '[' constant_expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1117)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_abstract_declarator_suffix1119)
self.constant_expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1121)
if self.failed:
return
elif alt54 == 3:
# C.g:357:4: '(' ')'
self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1126)
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1128)
if self.failed:
return
elif alt54 == 4:
# C.g:358:4: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1133)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_type_list_in_abstract_declarator_suffix1135)
self.parameter_type_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1137)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 33, abstract_declarator_suffix_StartIndex)
pass
return
# $ANTLR end abstract_declarator_suffix
# $ANTLR start initializer
# C.g:361:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );
def initializer(self, ):
initializer_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 34):
return
# C.g:363:2: ( assignment_expression | '{' initializer_list ( ',' )? '}' )
alt56 = 2
LA56_0 = self.input.LA(1)
if ((IDENTIFIER <= LA56_0 <= FLOATING_POINT_LITERAL) or LA56_0 == 62 or LA56_0 == 66 or (68 <= LA56_0 <= 69) or (72 <= LA56_0 <= 74) or (77 <= LA56_0 <= 79)) :
alt56 = 1
elif (LA56_0 == 43) :
alt56 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("361:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );", 56, 0, self.input)
raise nvae
if alt56 == 1:
# C.g:363:4: assignment_expression
self.following.append(self.FOLLOW_assignment_expression_in_initializer1150)
self.assignment_expression()
self.following.pop()
if self.failed:
return
elif alt56 == 2:
# C.g:364:4: '{' initializer_list ( ',' )? '}'
self.match(self.input, 43, self.FOLLOW_43_in_initializer1155)
if self.failed:
return
self.following.append(self.FOLLOW_initializer_list_in_initializer1157)
self.initializer_list()
self.following.pop()
if self.failed:
return
# C.g:364:25: ( ',' )?
alt55 = 2
LA55_0 = self.input.LA(1)
if (LA55_0 == 27) :
alt55 = 1
if alt55 == 1:
# C.g:0:0: ','
self.match(self.input, 27, self.FOLLOW_27_in_initializer1159)
if self.failed:
return
self.match(self.input, 44, self.FOLLOW_44_in_initializer1162)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 34, initializer_StartIndex)
pass
return
# $ANTLR end initializer
# $ANTLR start initializer_list
# C.g:367:1: initializer_list : initializer ( ',' initializer )* ;
def initializer_list(self, ):
initializer_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 35):
return
# C.g:368:2: ( initializer ( ',' initializer )* )
# C.g:368:4: initializer ( ',' initializer )*
self.following.append(self.FOLLOW_initializer_in_initializer_list1173)
self.initializer()
self.following.pop()
if self.failed:
return
# C.g:368:16: ( ',' initializer )*
while True: #loop57
alt57 = 2
LA57_0 = self.input.LA(1)
if (LA57_0 == 27) :
LA57_1 = self.input.LA(2)
if ((IDENTIFIER <= LA57_1 <= FLOATING_POINT_LITERAL) or LA57_1 == 43 or LA57_1 == 62 or LA57_1 == 66 or (68 <= LA57_1 <= 69) or (72 <= LA57_1 <= 74) or (77 <= LA57_1 <= 79)) :
alt57 = 1
if alt57 == 1:
# C.g:368:17: ',' initializer
self.match(self.input, 27, self.FOLLOW_27_in_initializer_list1176)
if self.failed:
return
self.following.append(self.FOLLOW_initializer_in_initializer_list1178)
self.initializer()
self.following.pop()
if self.failed:
return
else:
break #loop57
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 35, initializer_list_StartIndex)
pass
return
# $ANTLR end initializer_list
class argument_expression_list_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start argument_expression_list
# C.g:373:1: argument_expression_list : assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* ;
def argument_expression_list(self, ):
retval = self.argument_expression_list_return()
retval.start = self.input.LT(1)
argument_expression_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 36):
return retval
# C.g:374:2: ( assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* )
# C.g:374:6: assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )*
self.following.append(self.FOLLOW_assignment_expression_in_argument_expression_list1196)
self.assignment_expression()
self.following.pop()
if self.failed:
return retval
# C.g:374:28: ( 'OPTIONAL' )?
alt58 = 2
LA58_0 = self.input.LA(1)
if (LA58_0 == 53) :
alt58 = 1
if alt58 == 1:
# C.g:374:29: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_argument_expression_list1199)
if self.failed:
return retval
# C.g:374:42: ( ',' assignment_expression ( 'OPTIONAL' )? )*
while True: #loop60
alt60 = 2
LA60_0 = self.input.LA(1)
if (LA60_0 == 27) :
alt60 = 1
if alt60 == 1:
# C.g:374:43: ',' assignment_expression ( 'OPTIONAL' )?
self.match(self.input, 27, self.FOLLOW_27_in_argument_expression_list1204)
if self.failed:
return retval
self.following.append(self.FOLLOW_assignment_expression_in_argument_expression_list1206)
self.assignment_expression()
self.following.pop()
if self.failed:
return retval
# C.g:374:69: ( 'OPTIONAL' )?
alt59 = 2
LA59_0 = self.input.LA(1)
if (LA59_0 == 53) :
alt59 = 1
if alt59 == 1:
# C.g:374:70: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_argument_expression_list1209)
if self.failed:
return retval
else:
break #loop60
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 36, argument_expression_list_StartIndex)
pass
return retval
# $ANTLR end argument_expression_list
# $ANTLR start additive_expression
# C.g:377:1: additive_expression : ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* ;
def additive_expression(self, ):
additive_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 37):
return
# C.g:378:2: ( ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* )
# C.g:378:4: ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )*
# C.g:378:4: ( multiplicative_expression )
# C.g:378:5: multiplicative_expression
self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1225)
self.multiplicative_expression()
self.following.pop()
if self.failed:
return
# C.g:378:32: ( '+' multiplicative_expression | '-' multiplicative_expression )*
while True: #loop61
alt61 = 3
LA61_0 = self.input.LA(1)
if (LA61_0 == 68) :
alt61 = 1
elif (LA61_0 == 69) :
alt61 = 2
if alt61 == 1:
# C.g:378:33: '+' multiplicative_expression
self.match(self.input, 68, self.FOLLOW_68_in_additive_expression1229)
if self.failed:
return
self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1231)
self.multiplicative_expression()
self.following.pop()
if self.failed:
return
elif alt61 == 2:
# C.g:378:65: '-' multiplicative_expression
self.match(self.input, 69, self.FOLLOW_69_in_additive_expression1235)
if self.failed:
return
self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1237)
self.multiplicative_expression()
self.following.pop()
if self.failed:
return
else:
break #loop61
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 37, additive_expression_StartIndex)
pass
return
# $ANTLR end additive_expression
# $ANTLR start multiplicative_expression
# C.g:381:1: multiplicative_expression : ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* ;
def multiplicative_expression(self, ):
multiplicative_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 38):
return
# C.g:382:2: ( ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* )
# C.g:382:4: ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )*
# C.g:382:4: ( cast_expression )
# C.g:382:5: cast_expression
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1251)
self.cast_expression()
self.following.pop()
if self.failed:
return
# C.g:382:22: ( '*' cast_expression | '/' cast_expression | '%' cast_expression )*
while True: #loop62
alt62 = 4
LA62 = self.input.LA(1)
if LA62 == 66:
alt62 = 1
elif LA62 == 70:
alt62 = 2
elif LA62 == 71:
alt62 = 3
if alt62 == 1:
# C.g:382:23: '*' cast_expression
self.match(self.input, 66, self.FOLLOW_66_in_multiplicative_expression1255)
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1257)
self.cast_expression()
self.following.pop()
if self.failed:
return
elif alt62 == 2:
# C.g:382:45: '/' cast_expression
self.match(self.input, 70, self.FOLLOW_70_in_multiplicative_expression1261)
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1263)
self.cast_expression()
self.following.pop()
if self.failed:
return
elif alt62 == 3:
# C.g:382:67: '%' cast_expression
self.match(self.input, 71, self.FOLLOW_71_in_multiplicative_expression1267)
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1269)
self.cast_expression()
self.following.pop()
if self.failed:
return
else:
break #loop62
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 38, multiplicative_expression_StartIndex)
pass
return
# $ANTLR end multiplicative_expression
# $ANTLR start cast_expression
# C.g:385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );
def cast_expression(self, ):
cast_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 39):
return
# C.g:386:2: ( '(' type_name ')' cast_expression | unary_expression )
alt63 = 2
LA63_0 = self.input.LA(1)
if (LA63_0 == 62) :
LA63 = self.input.LA(2)
if LA63 == 34 or LA63 == 35 or LA63 == 36 or LA63 == 37 or LA63 == 38 or LA63 == 39 or LA63 == 40 or LA63 == 41 or LA63 == 42 or LA63 == 45 or LA63 == 46 or LA63 == 48 or LA63 == 49 or LA63 == 50 or LA63 == 51 or LA63 == 52 or LA63 == 53 or LA63 == 54 or LA63 == 55 or LA63 == 56 or LA63 == 57 or LA63 == 58 or LA63 == 59 or LA63 == 60 or LA63 == 61:
alt63 = 1
elif LA63 == IDENTIFIER:
LA63_25 = self.input.LA(3)
if (self.synpred109()) :
alt63 = 1
elif (True) :
alt63 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 25, self.input)
raise nvae
elif LA63 == HEX_LITERAL or LA63 == OCTAL_LITERAL or LA63 == DECIMAL_LITERAL or LA63 == CHARACTER_LITERAL or LA63 == STRING_LITERAL or LA63 == FLOATING_POINT_LITERAL or LA63 == 62 or LA63 == 66 or LA63 == 68 or LA63 == 69 or LA63 == 72 or LA63 == 73 or LA63 == 74 or LA63 == 77 or LA63 == 78 or LA63 == 79:
alt63 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 1, self.input)
raise nvae
elif ((IDENTIFIER <= LA63_0 <= FLOATING_POINT_LITERAL) or LA63_0 == 66 or (68 <= LA63_0 <= 69) or (72 <= LA63_0 <= 74) or (77 <= LA63_0 <= 79)) :
alt63 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 0, self.input)
raise nvae
if alt63 == 1:
# C.g:386:4: '(' type_name ')' cast_expression
self.match(self.input, 62, self.FOLLOW_62_in_cast_expression1282)
if self.failed:
return
self.following.append(self.FOLLOW_type_name_in_cast_expression1284)
self.type_name()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_cast_expression1286)
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_cast_expression1288)
self.cast_expression()
self.following.pop()
if self.failed:
return
elif alt63 == 2:
# C.g:387:4: unary_expression
self.following.append(self.FOLLOW_unary_expression_in_cast_expression1293)
self.unary_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 39, cast_expression_StartIndex)
pass
return
# $ANTLR end cast_expression
# $ANTLR start unary_expression
# C.g:390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );
def unary_expression(self, ):
unary_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 40):
return
# C.g:391:2: ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' )
alt64 = 6
LA64 = self.input.LA(1)
if LA64 == IDENTIFIER or LA64 == HEX_LITERAL or LA64 == OCTAL_LITERAL or LA64 == DECIMAL_LITERAL or LA64 == CHARACTER_LITERAL or LA64 == STRING_LITERAL or LA64 == FLOATING_POINT_LITERAL or LA64 == 62:
alt64 = 1
elif LA64 == 72:
alt64 = 2
elif LA64 == 73:
alt64 = 3
elif LA64 == 66 or LA64 == 68 or LA64 == 69 or LA64 == 77 or LA64 == 78 or LA64 == 79:
alt64 = 4
elif LA64 == 74:
LA64_12 = self.input.LA(2)
if (LA64_12 == 62) :
LA64_13 = self.input.LA(3)
if (self.synpred114()) :
alt64 = 5
elif (True) :
alt64 = 6
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 13, self.input)
raise nvae
elif ((IDENTIFIER <= LA64_12 <= FLOATING_POINT_LITERAL) or LA64_12 == 66 or (68 <= LA64_12 <= 69) or (72 <= LA64_12 <= 74) or (77 <= LA64_12 <= 79)) :
alt64 = 5
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 12, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 0, self.input)
raise nvae
if alt64 == 1:
# C.g:391:4: postfix_expression
self.following.append(self.FOLLOW_postfix_expression_in_unary_expression1304)
self.postfix_expression()
self.following.pop()
if self.failed:
return
elif alt64 == 2:
# C.g:392:4: '++' unary_expression
self.match(self.input, 72, self.FOLLOW_72_in_unary_expression1309)
if self.failed:
return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1311)
self.unary_expression()
self.following.pop()
if self.failed:
return
elif alt64 == 3:
# C.g:393:4: '--' unary_expression
self.match(self.input, 73, self.FOLLOW_73_in_unary_expression1316)
if self.failed:
return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1318)
self.unary_expression()
self.following.pop()
if self.failed:
return
elif alt64 == 4:
# C.g:394:4: unary_operator cast_expression
self.following.append(self.FOLLOW_unary_operator_in_unary_expression1323)
self.unary_operator()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_unary_expression1325)
self.cast_expression()
self.following.pop()
if self.failed:
return
elif alt64 == 5:
# C.g:395:4: 'sizeof' unary_expression
self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1330)
if self.failed:
return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1332)
self.unary_expression()
self.following.pop()
if self.failed:
return
elif alt64 == 6:
# C.g:396:4: 'sizeof' '(' type_name ')'
self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1337)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_unary_expression1339)
if self.failed:
return
self.following.append(self.FOLLOW_type_name_in_unary_expression1341)
self.type_name()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_unary_expression1343)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 40, unary_expression_StartIndex)
pass
return
# $ANTLR end unary_expression
# $ANTLR start postfix_expression
# C.g:399:1: postfix_expression : p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* ;
def postfix_expression(self, ):
self.postfix_expression_stack.append(postfix_expression_scope())
postfix_expression_StartIndex = self.input.index()
a = None
b = None
x = None
y = None
z = None
p = None
c = None
self.postfix_expression_stack[-1].FuncCallText = ''
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 41):
return
# C.g:406:2: (p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* )
# C.g:406:6: p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
self.following.append(self.FOLLOW_primary_expression_in_postfix_expression1367)
p = self.primary_expression()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start, p.stop)
# C.g:407:9: ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
while True: #loop65
alt65 = 10
LA65 = self.input.LA(1)
if LA65 == 66:
LA65_1 = self.input.LA(2)
if (LA65_1 == IDENTIFIER) :
LA65_30 = self.input.LA(3)
if (self.synpred120()) :
alt65 = 6
elif LA65 == 64:
alt65 = 1
elif LA65 == 62:
LA65 = self.input.LA(2)
if LA65 == 63:
alt65 = 2
elif LA65 == 29 or LA65 == 30 or LA65 == 31 or LA65 == 32 or LA65 == 33 or LA65 == 34 or LA65 == 35 or LA65 == 36 or LA65 == 37 or LA65 == 38 or LA65 == 39 or LA65 == 40 or LA65 == 41 or LA65 == 42 or LA65 == 45 or LA65 == 46 or LA65 == 48 or LA65 == 49 or LA65 == 50 or LA65 == 51 or LA65 == 52 or LA65 == 53 or LA65 == 54 or LA65 == 55 or LA65 == 56 or LA65 == 57 or LA65 == 58 or LA65 == 59 or LA65 == 60 or LA65 == 61:
alt65 = 4
elif LA65 == IDENTIFIER:
LA65_55 = self.input.LA(3)
if (self.synpred117()) :
alt65 = 3
elif (self.synpred118()) :
alt65 = 4
elif LA65 == 66:
LA65_57 = self.input.LA(3)
if (self.synpred117()) :
alt65 = 3
elif (self.synpred118()) :
alt65 = 4
elif LA65 == HEX_LITERAL or LA65 == OCTAL_LITERAL or LA65 == DECIMAL_LITERAL or LA65 == CHARACTER_LITERAL or LA65 == STRING_LITERAL or LA65 == FLOATING_POINT_LITERAL or LA65 == 62 or LA65 == 68 or LA65 == 69 or LA65 == 72 or LA65 == 73 or LA65 == 74 or LA65 == 77 or LA65 == 78 or LA65 == 79:
alt65 = 3
elif LA65 == 75:
alt65 = 5
elif LA65 == 76:
alt65 = 7
elif LA65 == 72:
alt65 = 8
elif LA65 == 73:
alt65 = 9
if alt65 == 1:
# C.g:407:13: '[' expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_postfix_expression1383)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_postfix_expression1385)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 65, self.FOLLOW_65_in_postfix_expression1387)
if self.failed:
return
elif alt65 == 2:
# C.g:408:13: '(' a= ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1401)
if self.failed:
return
a = self.input.LT(1)
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1405)
if self.failed:
return
if self.backtracking == 0:
self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, a.line, a.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, '')
elif alt65 == 3:
# C.g:409:13: '(' c= argument_expression_list b= ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1420)
if self.failed:
return
self.following.append(self.FOLLOW_argument_expression_list_in_postfix_expression1424)
c = self.argument_expression_list()
self.following.pop()
if self.failed:
return
b = self.input.LT(1)
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1428)
if self.failed:
return
if self.backtracking == 0:
self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start, c.stop))
elif alt65 == 4:
# C.g:410:13: '(' macro_parameter_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1444)
if self.failed:
return
self.following.append(self.FOLLOW_macro_parameter_list_in_postfix_expression1446)
self.macro_parameter_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1448)
if self.failed:
return
elif alt65 == 5:
# C.g:411:13: '.' x= IDENTIFIER
self.match(self.input, 75, self.FOLLOW_75_in_postfix_expression1462)
if self.failed:
return
x = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1466)
if self.failed:
return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += '.' + x.text
elif alt65 == 6:
# C.g:412:13: '*' y= IDENTIFIER
self.match(self.input, 66, self.FOLLOW_66_in_postfix_expression1482)
if self.failed:
return
y = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1486)
if self.failed:
return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText = y.text
elif alt65 == 7:
# C.g:413:13: '->' z= IDENTIFIER
self.match(self.input, 76, self.FOLLOW_76_in_postfix_expression1502)
if self.failed:
return
z = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1506)
if self.failed:
return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += '->' + z.text
elif alt65 == 8:
# C.g:414:13: '++'
self.match(self.input, 72, self.FOLLOW_72_in_postfix_expression1522)
if self.failed:
return
elif alt65 == 9:
# C.g:415:13: '--'
self.match(self.input, 73, self.FOLLOW_73_in_postfix_expression1536)
if self.failed:
return
else:
break #loop65
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 41, postfix_expression_StartIndex)
self.postfix_expression_stack.pop()
pass
return
# $ANTLR end postfix_expression
# $ANTLR start macro_parameter_list
# C.g:419:1: macro_parameter_list : parameter_declaration ( ',' parameter_declaration )* ;
def macro_parameter_list(self, ):
macro_parameter_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 42):
return
# C.g:420:2: ( parameter_declaration ( ',' parameter_declaration )* )
# C.g:420:4: parameter_declaration ( ',' parameter_declaration )*
self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1559)
self.parameter_declaration()
self.following.pop()
if self.failed:
return
# C.g:420:26: ( ',' parameter_declaration )*
while True: #loop66
alt66 = 2
LA66_0 = self.input.LA(1)
if (LA66_0 == 27) :
alt66 = 1
if alt66 == 1:
# C.g:420:27: ',' parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_macro_parameter_list1562)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1564)
self.parameter_declaration()
self.following.pop()
if self.failed:
return
else:
break #loop66
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 42, macro_parameter_list_StartIndex)
pass
return
# $ANTLR end macro_parameter_list
# $ANTLR start unary_operator
# C.g:423:1: unary_operator : ( '&' | '*' | '+' | '-' | '~' | '!' );
def unary_operator(self, ):
unary_operator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 43):
return
# C.g:424:2: ( '&' | '*' | '+' | '-' | '~' | '!' )
# C.g:
if self.input.LA(1) == 66 or (68 <= self.input.LA(1) <= 69) or (77 <= self.input.LA(1) <= 79):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_unary_operator0
)
raise mse
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 43, unary_operator_StartIndex)
pass
return
# $ANTLR end unary_operator
class primary_expression_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start primary_expression
# C.g:432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );
def primary_expression(self, ):
retval = self.primary_expression_return()
retval.start = self.input.LT(1)
primary_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 44):
return retval
# C.g:433:2: ( IDENTIFIER | constant | '(' expression ')' )
alt67 = 3
LA67 = self.input.LA(1)
if LA67 == IDENTIFIER:
LA67_1 = self.input.LA(2)
if (LA67_1 == EOF or LA67_1 == 25 or (27 <= LA67_1 <= 28) or LA67_1 == 44 or LA67_1 == 47 or LA67_1 == 53 or (62 <= LA67_1 <= 66) or (68 <= LA67_1 <= 73) or (75 <= LA67_1 <= 77) or (80 <= LA67_1 <= 102)) :
alt67 = 1
elif (LA67_1 == IDENTIFIER or LA67_1 == STRING_LITERAL) :
alt67 = 2
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );", 67, 1, self.input)
raise nvae
elif LA67 == HEX_LITERAL or LA67 == OCTAL_LITERAL or LA67 == DECIMAL_LITERAL or LA67 == CHARACTER_LITERAL or LA67 == STRING_LITERAL or LA67 == FLOATING_POINT_LITERAL:
alt67 = 2
elif LA67 == 62:
alt67 = 3
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );", 67, 0, self.input)
raise nvae
if alt67 == 1:
# C.g:433:4: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_primary_expression1613)
if self.failed:
return retval
elif alt67 == 2:
# C.g:434:4: constant
self.following.append(self.FOLLOW_constant_in_primary_expression1618)
self.constant()
self.following.pop()
if self.failed:
return retval
elif alt67 == 3:
# C.g:435:4: '(' expression ')'
self.match(self.input, 62, self.FOLLOW_62_in_primary_expression1623)
if self.failed:
return retval
self.following.append(self.FOLLOW_expression_in_primary_expression1625)
self.expression()
self.following.pop()
if self.failed:
return retval
self.match(self.input, 63, self.FOLLOW_63_in_primary_expression1627)
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 44, primary_expression_StartIndex)
pass
return retval
# $ANTLR end primary_expression
# $ANTLR start constant
# C.g:438:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );
def constant(self, ):
constant_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 45):
return
# C.g:439:5: ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL )
alt72 = 6
LA72 = self.input.LA(1)
if LA72 == HEX_LITERAL:
alt72 = 1
elif LA72 == OCTAL_LITERAL:
alt72 = 2
elif LA72 == DECIMAL_LITERAL:
alt72 = 3
elif LA72 == CHARACTER_LITERAL:
alt72 = 4
elif LA72 == IDENTIFIER or LA72 == STRING_LITERAL:
alt72 = 5
elif LA72 == FLOATING_POINT_LITERAL:
alt72 = 6
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("438:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );", 72, 0, self.input)
raise nvae
if alt72 == 1:
# C.g:439:9: HEX_LITERAL
self.match(self.input, HEX_LITERAL, self.FOLLOW_HEX_LITERAL_in_constant1643)
if self.failed:
return
elif alt72 == 2:
# C.g:440:9: OCTAL_LITERAL
self.match(self.input, OCTAL_LITERAL, self.FOLLOW_OCTAL_LITERAL_in_constant1653)
if self.failed:
return
elif alt72 == 3:
# C.g:441:9: DECIMAL_LITERAL
self.match(self.input, DECIMAL_LITERAL, self.FOLLOW_DECIMAL_LITERAL_in_constant1663)
if self.failed:
return
elif alt72 == 4:
# C.g:442:7: CHARACTER_LITERAL
self.match(self.input, CHARACTER_LITERAL, self.FOLLOW_CHARACTER_LITERAL_in_constant1671)
if self.failed:
return
elif alt72 == 5:
# C.g:443:7: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )*
# C.g:443:7: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+
cnt70 = 0
while True: #loop70
alt70 = 2
LA70_0 = self.input.LA(1)
if (LA70_0 == IDENTIFIER) :
LA70_1 = self.input.LA(2)
if (LA70_1 == STRING_LITERAL) :
alt70 = 1
elif (LA70_1 == IDENTIFIER) :
LA70_33 = self.input.LA(3)
if (self.synpred138()) :
alt70 = 1
elif (LA70_0 == STRING_LITERAL) :
alt70 = 1
if alt70 == 1:
# C.g:443:8: ( IDENTIFIER )* ( STRING_LITERAL )+
# C.g:443:8: ( IDENTIFIER )*
while True: #loop68
alt68 = 2
LA68_0 = self.input.LA(1)
if (LA68_0 == IDENTIFIER) :
alt68 = 1
if alt68 == 1:
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1680)
if self.failed:
return
else:
break #loop68
# C.g:443:20: ( STRING_LITERAL )+
cnt69 = 0
while True: #loop69
alt69 = 2
LA69_0 = self.input.LA(1)
if (LA69_0 == STRING_LITERAL) :
LA69_31 = self.input.LA(2)
if (self.synpred137()) :
alt69 = 1
if alt69 == 1:
# C.g:0:0: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_constant1683)
if self.failed:
return
else:
if cnt69 >= 1:
break #loop69
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(69, self.input)
raise eee
cnt69 += 1
else:
if cnt70 >= 1:
break #loop70
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(70, self.input)
raise eee
cnt70 += 1
# C.g:443:38: ( IDENTIFIER )*
while True: #loop71
alt71 = 2
LA71_0 = self.input.LA(1)
if (LA71_0 == IDENTIFIER) :
alt71 = 1
if alt71 == 1:
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1688)
if self.failed:
return
else:
break #loop71
elif alt72 == 6:
# C.g:444:9: FLOATING_POINT_LITERAL
self.match(self.input, FLOATING_POINT_LITERAL, self.FOLLOW_FLOATING_POINT_LITERAL_in_constant1699)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 45, constant_StartIndex)
pass
return
# $ANTLR end constant
class expression_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start expression
# C.g:449:1: expression : assignment_expression ( ',' assignment_expression )* ;
def expression(self, ):
retval = self.expression_return()
retval.start = self.input.LT(1)
expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 46):
return retval
# C.g:450:2: ( assignment_expression ( ',' assignment_expression )* )
# C.g:450:4: assignment_expression ( ',' assignment_expression )*
self.following.append(self.FOLLOW_assignment_expression_in_expression1715)
self.assignment_expression()
self.following.pop()
if self.failed:
return retval
# C.g:450:26: ( ',' assignment_expression )*
while True: #loop73
alt73 = 2
LA73_0 = self.input.LA(1)
if (LA73_0 == 27) :
alt73 = 1
if alt73 == 1:
# C.g:450:27: ',' assignment_expression
self.match(self.input, 27, self.FOLLOW_27_in_expression1718)
if self.failed:
return retval
self.following.append(self.FOLLOW_assignment_expression_in_expression1720)
self.assignment_expression()
self.following.pop()
if self.failed:
return retval
else:
break #loop73
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 46, expression_StartIndex)
pass
return retval
# $ANTLR end expression
# $ANTLR start constant_expression
# C.g:453:1: constant_expression : conditional_expression ;
def constant_expression(self, ):
constant_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 47):
return
# C.g:454:2: ( conditional_expression )
# C.g:454:4: conditional_expression
self.following.append(self.FOLLOW_conditional_expression_in_constant_expression1733)
self.conditional_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 47, constant_expression_StartIndex)
pass
return
# $ANTLR end constant_expression
# $ANTLR start assignment_expression
# C.g:457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );
def assignment_expression(self, ):
assignment_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 48):
return
# C.g:458:2: ( lvalue assignment_operator assignment_expression | conditional_expression )
alt74 = 2
LA74 = self.input.LA(1)
if LA74 == IDENTIFIER:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_13 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 13, self.input)
raise nvae
elif LA74 == 62:
LA74_14 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 14, self.input)
raise nvae
elif LA74 == 75:
LA74_15 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 15, self.input)
raise nvae
elif LA74 == 66:
LA74_16 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 16, self.input)
raise nvae
elif LA74 == 76:
LA74_17 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 17, self.input)
raise nvae
elif LA74 == 72:
LA74_18 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 18, self.input)
raise nvae
elif LA74 == 73:
LA74_19 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 19, self.input)
raise nvae
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
elif LA74 == STRING_LITERAL:
LA74_21 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 21, self.input)
raise nvae
elif LA74 == IDENTIFIER:
LA74_22 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 22, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 1, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_44 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 44, self.input)
raise nvae
elif LA74 == 62:
LA74_45 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 45, self.input)
raise nvae
elif LA74 == 75:
LA74_46 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 46, self.input)
raise nvae
elif LA74 == 66:
LA74_47 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 47, self.input)
raise nvae
elif LA74 == 76:
LA74_48 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 48, self.input)
raise nvae
elif LA74 == 72:
LA74_49 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 49, self.input)
raise nvae
elif LA74 == 73:
LA74_50 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 50, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 2, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_73 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 73, self.input)
raise nvae
elif LA74 == 62:
LA74_74 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 74, self.input)
raise nvae
elif LA74 == 75:
LA74_75 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 75, self.input)
raise nvae
elif LA74 == 66:
LA74_76 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 76, self.input)
raise nvae
elif LA74 == 76:
LA74_77 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 77, self.input)
raise nvae
elif LA74 == 72:
LA74_78 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 78, self.input)
raise nvae
elif LA74 == 73:
LA74_79 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 79, self.input)
raise nvae
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 3, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_102 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 102, self.input)
raise nvae
elif LA74 == 62:
LA74_103 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 103, self.input)
raise nvae
elif LA74 == 75:
LA74_104 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 104, self.input)
raise nvae
elif LA74 == 66:
LA74_105 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 105, self.input)
raise nvae
elif LA74 == 76:
LA74_106 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 106, self.input)
raise nvae
elif LA74 == 72:
LA74_107 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 107, self.input)
raise nvae
elif LA74 == 73:
LA74_108 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 108, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 4, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_131 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 131, self.input)
raise nvae
elif LA74 == 62:
LA74_132 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 132, self.input)
raise nvae
elif LA74 == 75:
LA74_133 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 133, self.input)
raise nvae
elif LA74 == 66:
LA74_134 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 134, self.input)
raise nvae
elif LA74 == 76:
LA74_135 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 135, self.input)
raise nvae
elif LA74 == 72:
LA74_136 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 136, self.input)
raise nvae
elif LA74 == 73:
LA74_137 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 137, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 5, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74 = self.input.LA(2)
if LA74 == IDENTIFIER:
LA74_160 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 160, self.input)
raise nvae
elif LA74 == 64:
LA74_161 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 161, self.input)
raise nvae
elif LA74 == 62:
LA74_162 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 162, self.input)
raise nvae
elif LA74 == 75:
LA74_163 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 163, self.input)
raise nvae
elif LA74 == 66:
LA74_164 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 164, self.input)
raise nvae
elif LA74 == 76:
LA74_165 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 165, self.input)
raise nvae
elif LA74 == 72:
LA74_166 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 166, self.input)
raise nvae
elif LA74 == 73:
LA74_167 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 167, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
elif LA74 == STRING_LITERAL:
LA74_189 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 189, self.input)
raise nvae
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 6, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_191 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 191, self.input)
raise nvae
elif LA74 == 62:
LA74_192 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 192, self.input)
raise nvae
elif LA74 == 75:
LA74_193 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 193, self.input)
raise nvae
elif LA74 == 66:
LA74_194 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 194, self.input)
raise nvae
elif LA74 == 76:
LA74_195 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 195, self.input)
raise nvae
elif LA74 == 72:
LA74_196 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 196, self.input)
raise nvae
elif LA74 == 73:
LA74_197 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 197, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 7, self.input)
raise nvae
elif LA74 == 62:
LA74 = self.input.LA(2)
if LA74 == IDENTIFIER:
LA74_220 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 220, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74_221 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 221, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74_222 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 222, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74_223 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 223, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74_224 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 224, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74_225 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 225, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74_226 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 226, self.input)
raise nvae
elif LA74 == 62:
LA74_227 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 227, self.input)
raise nvae
elif LA74 == 72:
LA74_228 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 228, self.input)
raise nvae
elif LA74 == 73:
LA74_229 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 229, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74_230 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 230, self.input)
raise nvae
elif LA74 == 74:
LA74_231 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 231, self.input)
raise nvae
elif LA74 == 34 or LA74 == 35 or LA74 == 36 or LA74 == 37 or LA74 == 38 or LA74 == 39 or LA74 == 40 or LA74 == 41 or LA74 == 42 or LA74 == 45 or LA74 == 46 or LA74 == 48 or LA74 == 49 or LA74 == 50 or LA74 == 51 or LA74 == 52 or LA74 == 53 or LA74 == 54 or LA74 == 55 or LA74 == 56 or LA74 == 57 or LA74 == 58 or LA74 == 59 or LA74 == 60 or LA74 == 61:
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 8, self.input)
raise nvae
elif LA74 == 72:
LA74 = self.input.LA(2)
if LA74 == IDENTIFIER:
LA74_244 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 244, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74_245 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 245, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74_246 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 246, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74_247 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 247, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74_248 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 248, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74_249 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 249, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74_250 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 250, self.input)
raise nvae
elif LA74 == 62:
LA74_251 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 251, self.input)
raise nvae
elif LA74 == 72:
LA74_252 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 252, self.input)
raise nvae
elif LA74 == 73:
LA74_253 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 253, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74_254 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 254, self.input)
raise nvae
elif LA74 == 74:
LA74_255 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 255, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 9, self.input)
raise nvae
elif LA74 == 73:
LA74 = self.input.LA(2)
if LA74 == IDENTIFIER:
LA74_256 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 256, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74_257 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 257, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74_258 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 258, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74_259 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 259, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74_260 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 260, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74_261 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 261, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74_262 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 262, self.input)
raise nvae
elif LA74 == 62:
LA74_263 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 263, self.input)
raise nvae
elif LA74 == 72:
LA74_264 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 264, self.input)
raise nvae
elif LA74 == 73:
LA74_265 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 265, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74_266 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 266, self.input)
raise nvae
elif LA74 == 74:
LA74_267 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 267, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 10, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74 = self.input.LA(2)
if LA74 == 62:
LA74_268 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 268, self.input)
raise nvae
elif LA74 == IDENTIFIER:
LA74_269 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 269, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74_270 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 270, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74_271 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 271, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74_272 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 272, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74_273 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 273, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74_274 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 274, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74_275 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 275, self.input)
raise nvae
elif LA74 == 72:
LA74_276 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 276, self.input)
raise nvae
elif LA74 == 73:
LA74_277 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 277, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74_278 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 278, self.input)
raise nvae
elif LA74 == 74:
LA74_279 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 279, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 11, self.input)
raise nvae
elif LA74 == 74:
LA74 = self.input.LA(2)
if LA74 == 62:
LA74_280 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 280, self.input)
raise nvae
elif LA74 == IDENTIFIER:
LA74_281 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 281, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74_282 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 282, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74_283 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 283, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74_284 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 284, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74_285 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 285, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74_286 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 286, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74_287 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 287, self.input)
raise nvae
elif LA74 == 72:
LA74_288 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 288, self.input)
raise nvae
elif LA74 == 73:
LA74_289 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 289, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74_290 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 290, self.input)
raise nvae
elif LA74 == 74:
LA74_291 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 291, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 12, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 0, self.input)
raise nvae
if alt74 == 1:
# C.g:458:4: lvalue assignment_operator assignment_expression
self.following.append(self.FOLLOW_lvalue_in_assignment_expression1744)
self.lvalue()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_assignment_operator_in_assignment_expression1746)
self.assignment_operator()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_assignment_expression_in_assignment_expression1748)
self.assignment_expression()
self.following.pop()
if self.failed:
return
elif alt74 == 2:
# C.g:459:4: conditional_expression
self.following.append(self.FOLLOW_conditional_expression_in_assignment_expression1753)
self.conditional_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 48, assignment_expression_StartIndex)
pass
return
# $ANTLR end assignment_expression
# $ANTLR start lvalue
# C.g:462:1: lvalue : unary_expression ;
def lvalue(self, ):
lvalue_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 49):
return
# C.g:463:2: ( unary_expression )
# C.g:463:4: unary_expression
self.following.append(self.FOLLOW_unary_expression_in_lvalue1765)
self.unary_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 49, lvalue_StartIndex)
pass
return
# $ANTLR end lvalue
# $ANTLR start assignment_operator
# C.g:466:1: assignment_operator : ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' );
def assignment_operator(self, ):
assignment_operator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 50):
return
# C.g:467:2: ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' )
# C.g:
if self.input.LA(1) == 28 or (80 <= self.input.LA(1) <= 89):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_assignment_operator0
)
raise mse
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 50, assignment_operator_StartIndex)
pass
return
# $ANTLR end assignment_operator
# $ANTLR start conditional_expression
# C.g:480:1: conditional_expression : e= logical_or_expression ( '?' expression ':' conditional_expression )? ;
def conditional_expression(self, ):
conditional_expression_StartIndex = self.input.index()
e = None
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 51):
return
# C.g:481:2: (e= logical_or_expression ( '?' expression ':' conditional_expression )? )
# C.g:481:4: e= logical_or_expression ( '?' expression ':' conditional_expression )?
self.following.append(self.FOLLOW_logical_or_expression_in_conditional_expression1839)
e = self.logical_or_expression()
self.following.pop()
if self.failed:
return
# C.g:481:28: ( '?' expression ':' conditional_expression )?
alt75 = 2
LA75_0 = self.input.LA(1)
if (LA75_0 == 90) :
alt75 = 1
if alt75 == 1:
# C.g:481:29: '?' expression ':' conditional_expression
self.match(self.input, 90, self.FOLLOW_90_in_conditional_expression1842)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_conditional_expression1844)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 47, self.FOLLOW_47_in_conditional_expression1846)
if self.failed:
return
self.following.append(self.FOLLOW_conditional_expression_in_conditional_expression1848)
self.conditional_expression()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 51, conditional_expression_StartIndex)
pass
return
# $ANTLR end conditional_expression
class logical_or_expression_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start logical_or_expression
# C.g:484:1: logical_or_expression : logical_and_expression ( '||' logical_and_expression )* ;
def logical_or_expression(self, ):
retval = self.logical_or_expression_return()
retval.start = self.input.LT(1)
logical_or_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 52):
return retval
# C.g:485:2: ( logical_and_expression ( '||' logical_and_expression )* )
# C.g:485:4: logical_and_expression ( '||' logical_and_expression )*
self.following.append(self.FOLLOW_logical_and_expression_in_logical_or_expression1863)
self.logical_and_expression()
self.following.pop()
if self.failed:
return retval
# C.g:485:27: ( '||' logical_and_expression )*
while True: #loop76
alt76 = 2
LA76_0 = self.input.LA(1)
if (LA76_0 == 91) :
alt76 = 1
if alt76 == 1:
# C.g:485:28: '||' logical_and_expression
self.match(self.input, 91, self.FOLLOW_91_in_logical_or_expression1866)
if self.failed:
return retval
self.following.append(self.FOLLOW_logical_and_expression_in_logical_or_expression1868)
self.logical_and_expression()
self.following.pop()
if self.failed:
return retval
else:
break #loop76
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 52, logical_or_expression_StartIndex)
pass
return retval
# $ANTLR end logical_or_expression
# $ANTLR start logical_and_expression
# C.g:488:1: logical_and_expression : inclusive_or_expression ( '&&' inclusive_or_expression )* ;
def logical_and_expression(self, ):
logical_and_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 53):
return
# C.g:489:2: ( inclusive_or_expression ( '&&' inclusive_or_expression )* )
# C.g:489:4: inclusive_or_expression ( '&&' inclusive_or_expression )*
self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1881)
self.inclusive_or_expression()
self.following.pop()
if self.failed:
return
# C.g:489:28: ( '&&' inclusive_or_expression )*
while True: #loop77
alt77 = 2
LA77_0 = self.input.LA(1)
if (LA77_0 == 92) :
alt77 = 1
if alt77 == 1:
# C.g:489:29: '&&' inclusive_or_expression
self.match(self.input, 92, self.FOLLOW_92_in_logical_and_expression1884)
if self.failed:
return
self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1886)
self.inclusive_or_expression()
self.following.pop()
if self.failed:
return
else:
break #loop77
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 53, logical_and_expression_StartIndex)
pass
return
# $ANTLR end logical_and_expression
# $ANTLR start inclusive_or_expression
# C.g:492:1: inclusive_or_expression : exclusive_or_expression ( '|' exclusive_or_expression )* ;
def inclusive_or_expression(self, ):
inclusive_or_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 54):
return
# C.g:493:2: ( exclusive_or_expression ( '|' exclusive_or_expression )* )
# C.g:493:4: exclusive_or_expression ( '|' exclusive_or_expression )*
self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1899)
self.exclusive_or_expression()
self.following.pop()
if self.failed:
return
# C.g:493:28: ( '|' exclusive_or_expression )*
while True: #loop78
alt78 = 2
LA78_0 = self.input.LA(1)
if (LA78_0 == 93) :
alt78 = 1
if alt78 == 1:
# C.g:493:29: '|' exclusive_or_expression
self.match(self.input, 93, self.FOLLOW_93_in_inclusive_or_expression1902)
if self.failed:
return
self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1904)
self.exclusive_or_expression()
self.following.pop()
if self.failed:
return
else:
break #loop78
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 54, inclusive_or_expression_StartIndex)
pass
return
# $ANTLR end inclusive_or_expression
# $ANTLR start exclusive_or_expression
# C.g:496:1: exclusive_or_expression : and_expression ( '^' and_expression )* ;
def exclusive_or_expression(self, ):
exclusive_or_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 55):
return
# C.g:497:2: ( and_expression ( '^' and_expression )* )
# C.g:497:4: and_expression ( '^' and_expression )*
self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1917)
self.and_expression()
self.following.pop()
if self.failed:
return
# C.g:497:19: ( '^' and_expression )*
while True: #loop79
alt79 = 2
LA79_0 = self.input.LA(1)
if (LA79_0 == 94) :
alt79 = 1
if alt79 == 1:
# C.g:497:20: '^' and_expression
self.match(self.input, 94, self.FOLLOW_94_in_exclusive_or_expression1920)
if self.failed:
return
self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1922)
self.and_expression()
self.following.pop()
if self.failed:
return
else:
break #loop79
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 55, exclusive_or_expression_StartIndex)
pass
return
# $ANTLR end exclusive_or_expression
# $ANTLR start and_expression
# C.g:500:1: and_expression : equality_expression ( '&' equality_expression )* ;
def and_expression(self, ):
and_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 56):
return
# C.g:501:2: ( equality_expression ( '&' equality_expression )* )
# C.g:501:4: equality_expression ( '&' equality_expression )*
self.following.append(self.FOLLOW_equality_expression_in_and_expression1935)
self.equality_expression()
self.following.pop()
if self.failed:
return
# C.g:501:24: ( '&' equality_expression )*
while True: #loop80
alt80 = 2
LA80_0 = self.input.LA(1)
if (LA80_0 == 77) :
alt80 = 1
if alt80 == 1:
# C.g:501:25: '&' equality_expression
self.match(self.input, 77, self.FOLLOW_77_in_and_expression1938)
if self.failed:
return
self.following.append(self.FOLLOW_equality_expression_in_and_expression1940)
self.equality_expression()
self.following.pop()
if self.failed:
return
else:
break #loop80
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 56, and_expression_StartIndex)
pass
return
# $ANTLR end and_expression
# $ANTLR start equality_expression
# C.g:503:1: equality_expression : relational_expression ( ( '==' | '!=' ) relational_expression )* ;
def equality_expression(self, ):
equality_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 57):
return
# C.g:504:2: ( relational_expression ( ( '==' | '!=' ) relational_expression )* )
# C.g:504:4: relational_expression ( ( '==' | '!=' ) relational_expression )*
self.following.append(self.FOLLOW_relational_expression_in_equality_expression1952)
self.relational_expression()
self.following.pop()
if self.failed:
return
# C.g:504:26: ( ( '==' | '!=' ) relational_expression )*
while True: #loop81
alt81 = 2
LA81_0 = self.input.LA(1)
if ((95 <= LA81_0 <= 96)) :
alt81 = 1
if alt81 == 1:
# C.g:504:27: ( '==' | '!=' ) relational_expression
if (95 <= self.input.LA(1) <= 96):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_equality_expression1955
)
raise mse
self.following.append(self.FOLLOW_relational_expression_in_equality_expression1961)
self.relational_expression()
self.following.pop()
if self.failed:
return
else:
break #loop81
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 57, equality_expression_StartIndex)
pass
return
# $ANTLR end equality_expression
# $ANTLR start relational_expression
# C.g:507:1: relational_expression : shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* ;
def relational_expression(self, ):
relational_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 58):
return
# C.g:508:2: ( shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* )
# C.g:508:4: shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
self.following.append(self.FOLLOW_shift_expression_in_relational_expression1975)
self.shift_expression()
self.following.pop()
if self.failed:
return
# C.g:508:21: ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
while True: #loop82
alt82 = 2
LA82_0 = self.input.LA(1)
if ((97 <= LA82_0 <= 100)) :
alt82 = 1
if alt82 == 1:
# C.g:508:22: ( '<' | '>' | '<=' | '>=' ) shift_expression
if (97 <= self.input.LA(1) <= 100):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_relational_expression1978
)
raise mse
self.following.append(self.FOLLOW_shift_expression_in_relational_expression1988)
self.shift_expression()
self.following.pop()
if self.failed:
return
else:
break #loop82
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 58, relational_expression_StartIndex)
pass
return
# $ANTLR end relational_expression
# $ANTLR start shift_expression
# C.g:511:1: shift_expression : additive_expression ( ( '<<' | '>>' ) additive_expression )* ;
def shift_expression(self, ):
shift_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 59):
return
# C.g:512:2: ( additive_expression ( ( '<<' | '>>' ) additive_expression )* )
# C.g:512:4: additive_expression ( ( '<<' | '>>' ) additive_expression )*
self.following.append(self.FOLLOW_additive_expression_in_shift_expression2001)
self.additive_expression()
self.following.pop()
if self.failed:
return
# C.g:512:24: ( ( '<<' | '>>' ) additive_expression )*
while True: #loop83
alt83 = 2
LA83_0 = self.input.LA(1)
if ((101 <= LA83_0 <= 102)) :
alt83 = 1
if alt83 == 1:
# C.g:512:25: ( '<<' | '>>' ) additive_expression
if (101 <= self.input.LA(1) <= 102):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_shift_expression2004
)
raise mse
self.following.append(self.FOLLOW_additive_expression_in_shift_expression2010)
self.additive_expression()
self.following.pop()
if self.failed:
return
else:
break #loop83
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 59, shift_expression_StartIndex)
pass
return
# $ANTLR end shift_expression
# $ANTLR start statement
# C.g:517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );
def statement(self, ):
statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 60):
return
# C.g:518:2: ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration )
alt84 = 11
LA84 = self.input.LA(1)
if LA84 == IDENTIFIER:
LA84 = self.input.LA(2)
if LA84 == 62:
LA84_43 = self.input.LA(3)
if (self.synpred169()) :
alt84 = 3
elif (self.synpred173()) :
alt84 = 7
elif (self.synpred174()) :
alt84 = 8
elif (True) :
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 43, self.input)
raise nvae
elif LA84 == 47:
alt84 = 1
elif LA84 == STRING_LITERAL or LA84 == 27 or LA84 == 28 or LA84 == 64 or LA84 == 68 or LA84 == 69 or LA84 == 70 or LA84 == 71 or LA84 == 72 or LA84 == 73 or LA84 == 75 or LA84 == 76 or LA84 == 77 or LA84 == 80 or LA84 == 81 or LA84 == 82 or LA84 == 83 or LA84 == 84 or LA84 == 85 or LA84 == 86 or LA84 == 87 or LA84 == 88 or LA84 == 89 or LA84 == 90 or LA84 == 91 or LA84 == 92 or LA84 == 93 or LA84 == 94 or LA84 == 95 or LA84 == 96 or LA84 == 97 or LA84 == 98 or LA84 == 99 or LA84 == 100 or LA84 == 101 or LA84 == 102:
alt84 = 3
elif LA84 == 66:
LA84_47 = self.input.LA(3)
if (self.synpred169()) :
alt84 = 3
elif (True) :
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 47, self.input)
raise nvae
elif LA84 == IDENTIFIER:
LA84_53 = self.input.LA(3)
if (self.synpred169()) :
alt84 = 3
elif (True) :
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 53, self.input)
raise nvae
elif LA84 == 25:
LA84_68 = self.input.LA(3)
if (self.synpred169()) :
alt84 = 3
elif (True) :
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 68, self.input)
raise nvae
elif LA84 == 29 or LA84 == 30 or LA84 == 31 or LA84 == 32 or LA84 == 33 or LA84 == 34 or LA84 == 35 or LA84 == 36 or LA84 == 37 or LA84 == 38 or LA84 == 39 or LA84 == 40 or LA84 == 41 or LA84 == 42 or LA84 == 45 or LA84 == 46 or LA84 == 48 or LA84 == 49 or LA84 == 50 or LA84 == 51 or LA84 == 52 or LA84 == 53 or LA84 == 54 or LA84 == 55 or LA84 == 56 or LA84 == 57 or LA84 == 58 or LA84 == 59 or LA84 == 60 or LA84 == 61:
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 1, self.input)
raise nvae
elif LA84 == 106 or LA84 == 107:
alt84 = 1
elif LA84 == 43:
alt84 = 2
elif LA84 == HEX_LITERAL or LA84 == OCTAL_LITERAL or LA84 == DECIMAL_LITERAL or LA84 == CHARACTER_LITERAL or LA84 == STRING_LITERAL or LA84 == FLOATING_POINT_LITERAL or LA84 == 25 or LA84 == 62 or LA84 == 66 or LA84 == 68 or LA84 == 69 or LA84 == 72 or LA84 == 73 or LA84 == 74 or LA84 == 77 or LA84 == 78 or LA84 == 79:
alt84 = 3
elif LA84 == 108 or LA84 == 110:
alt84 = 4
elif LA84 == 111 or LA84 == 112 or LA84 == 113:
alt84 = 5
elif LA84 == 114 or LA84 == 115 or LA84 == 116 or LA84 == 117:
alt84 = 6
elif LA84 == 103:
alt84 = 8
elif LA84 == 104:
alt84 = 9
elif LA84 == 105:
alt84 = 10
elif LA84 == 26 or LA84 == 29 or LA84 == 30 or LA84 == 31 or LA84 == 32 or LA84 == 33 or LA84 == 34 or LA84 == 35 or LA84 == 36 or LA84 == 37 or LA84 == 38 or LA84 == 39 or LA84 == 40 or LA84 == 41 or LA84 == 42 or LA84 == 45 or LA84 == 46 or LA84 == 48 or LA84 == 49 or LA84 == 50 or LA84 == 51 or LA84 == 52 or LA84 == 53 or LA84 == 54 or LA84 == 55 or LA84 == 56 or LA84 == 57 or LA84 == 58 or LA84 == 59 or LA84 == 60 or LA84 == 61:
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 0, self.input)
raise nvae
if alt84 == 1:
# C.g:518:4: labeled_statement
self.following.append(self.FOLLOW_labeled_statement_in_statement2025)
self.labeled_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 2:
# C.g:519:4: compound_statement
self.following.append(self.FOLLOW_compound_statement_in_statement2030)
self.compound_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 3:
# C.g:520:4: expression_statement
self.following.append(self.FOLLOW_expression_statement_in_statement2035)
self.expression_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 4:
# C.g:521:4: selection_statement
self.following.append(self.FOLLOW_selection_statement_in_statement2040)
self.selection_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 5:
# C.g:522:4: iteration_statement
self.following.append(self.FOLLOW_iteration_statement_in_statement2045)
self.iteration_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 6:
# C.g:523:4: jump_statement
self.following.append(self.FOLLOW_jump_statement_in_statement2050)
self.jump_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 7:
# C.g:524:4: macro_statement
self.following.append(self.FOLLOW_macro_statement_in_statement2055)
self.macro_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 8:
# C.g:525:4: asm2_statement
self.following.append(self.FOLLOW_asm2_statement_in_statement2060)
self.asm2_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 9:
# C.g:526:4: asm1_statement
self.following.append(self.FOLLOW_asm1_statement_in_statement2065)
self.asm1_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 10:
# C.g:527:4: asm_statement
self.following.append(self.FOLLOW_asm_statement_in_statement2070)
self.asm_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 11:
# C.g:528:4: declaration
self.following.append(self.FOLLOW_declaration_in_statement2075)
self.declaration()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 60, statement_StartIndex)
pass
return
# $ANTLR end statement
# $ANTLR start asm2_statement
# C.g:531:1: asm2_statement : ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' ;
def asm2_statement(self, ):
asm2_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 61):
return
# C.g:532:2: ( ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' )
# C.g:532:4: ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';'
# C.g:532:4: ( '__asm__' )?
alt85 = 2
LA85_0 = self.input.LA(1)
if (LA85_0 == 103) :
alt85 = 1
if alt85 == 1:
# C.g:0:0: '__asm__'
self.match(self.input, 103, self.FOLLOW_103_in_asm2_statement2086)
if self.failed:
return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_asm2_statement2089)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_asm2_statement2091)
if self.failed:
return
# C.g:532:30: (~ ( ';' ) )*
while True: #loop86
alt86 = 2
LA86_0 = self.input.LA(1)
if (LA86_0 == 63) :
LA86_1 = self.input.LA(2)
if ((IDENTIFIER <= LA86_1 <= LINE_COMMAND) or (26 <= LA86_1 <= 117)) :
alt86 = 1
elif ((IDENTIFIER <= LA86_0 <= LINE_COMMAND) or (26 <= LA86_0 <= 62) or (64 <= LA86_0 <= 117)) :
alt86 = 1
if alt86 == 1:
# C.g:532:31: ~ ( ';' )
if (IDENTIFIER <= self.input.LA(1) <= LINE_COMMAND) or (26 <= self.input.LA(1) <= 117):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_asm2_statement2094
)
raise mse
else:
break #loop86
self.match(self.input, 63, self.FOLLOW_63_in_asm2_statement2101)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_asm2_statement2103)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 61, asm2_statement_StartIndex)
pass
return
# $ANTLR end asm2_statement
# $ANTLR start asm1_statement
# C.g:535:1: asm1_statement : '_asm' '{' (~ ( '}' ) )* '}' ;
def asm1_statement(self, ):
asm1_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 62):
return
# C.g:536:2: ( '_asm' '{' (~ ( '}' ) )* '}' )
# C.g:536:4: '_asm' '{' (~ ( '}' ) )* '}'
self.match(self.input, 104, self.FOLLOW_104_in_asm1_statement2115)
if self.failed:
return
self.match(self.input, 43, self.FOLLOW_43_in_asm1_statement2117)
if self.failed:
return
# C.g:536:15: (~ ( '}' ) )*
while True: #loop87
alt87 = 2
LA87_0 = self.input.LA(1)
if ((IDENTIFIER <= LA87_0 <= 43) or (45 <= LA87_0 <= 117)) :
alt87 = 1
if alt87 == 1:
# C.g:536:16: ~ ( '}' )
if (IDENTIFIER <= self.input.LA(1) <= 43) or (45 <= self.input.LA(1) <= 117):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_asm1_statement2120
)
raise mse
else:
break #loop87
self.match(self.input, 44, self.FOLLOW_44_in_asm1_statement2127)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 62, asm1_statement_StartIndex)
pass
return
# $ANTLR end asm1_statement
# $ANTLR start asm_statement
# C.g:539:1: asm_statement : '__asm' '{' (~ ( '}' ) )* '}' ;
def asm_statement(self, ):
asm_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 63):
return
# C.g:540:2: ( '__asm' '{' (~ ( '}' ) )* '}' )
# C.g:540:4: '__asm' '{' (~ ( '}' ) )* '}'
self.match(self.input, 105, self.FOLLOW_105_in_asm_statement2138)
if self.failed:
return
self.match(self.input, 43, self.FOLLOW_43_in_asm_statement2140)
if self.failed:
return
# C.g:540:16: (~ ( '}' ) )*
while True: #loop88
alt88 = 2
LA88_0 = self.input.LA(1)
if ((IDENTIFIER <= LA88_0 <= 43) or (45 <= LA88_0 <= 117)) :
alt88 = 1
if alt88 == 1:
# C.g:540:17: ~ ( '}' )
if (IDENTIFIER <= self.input.LA(1) <= 43) or (45 <= self.input.LA(1) <= 117):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_asm_statement2143
)
raise mse
else:
break #loop88
self.match(self.input, 44, self.FOLLOW_44_in_asm_statement2150)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 63, asm_statement_StartIndex)
pass
return
# $ANTLR end asm_statement
# $ANTLR start macro_statement
# C.g:543:1: macro_statement : IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' ;
def macro_statement(self, ):
macro_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 64):
return
# C.g:544:2: ( IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' )
# C.g:544:4: IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')'
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_macro_statement2162)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_macro_statement2164)
if self.failed:
return
# C.g:544:19: ( declaration )*
while True: #loop89
alt89 = 2
LA89 = self.input.LA(1)
if LA89 == IDENTIFIER:
LA89 = self.input.LA(2)
if LA89 == 62:
LA89_45 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_47 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 66:
LA89_50 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_68 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_71 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_72 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_73 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_74 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_75 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_76 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_77 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_78 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_79 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_80 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_81 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_82 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_83 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_84 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_85 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_86 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 26:
LA89 = self.input.LA(2)
if LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_87 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_88 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_89 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_90 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_91 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_92 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_93 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_94 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_95 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_96 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_97 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_98 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_99 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_100 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 66:
LA89_101 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_102 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_103 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_104 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_105 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_106 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_107 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_108 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_109 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_110 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_111 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_112 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_113 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_114 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_115 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_116 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_117 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_118 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_119 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_120 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_121 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_122 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_123 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_124 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_125 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_126 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_127 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_128 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_129 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_130 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_131 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_132 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_133 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_134 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_135 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_136 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_137 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_138 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_139 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_140 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_141 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_142 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_143 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_144 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_145 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_146 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_147 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_148 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_149 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_150 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_151 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_152 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_153 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_154 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_155 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_156 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_157 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_158 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_159 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_160 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_161 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_162 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_163 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_164 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_165 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_166 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_167 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_168 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_169 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_170 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_171 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_172 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_173 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_174 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_175 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_176 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_177 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_178 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_179 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_180 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_181 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_182 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_183 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_184 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_185 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_186 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_187 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_188 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_189 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_190 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_191 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_192 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_193 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_194 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_195 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_196 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_197 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_198 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_199 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_200 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_201 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_202 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_203 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_204 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_205 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_206 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_207 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_208 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_209 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_210 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_211 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_212 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_213 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_214 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_215 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_216 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_217 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_218 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_219 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_220 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_221 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_222 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_223 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_224 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_225 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_226 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_227 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_228 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_229 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_230 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_231 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_232 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_233 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_234 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_235 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_236 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_237 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_238 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_239 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_240 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_241 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_242 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_243 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_244 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_245 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_246 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_247 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_248 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_249 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_250 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_251 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_252 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_253 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_254 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_255 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_256 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_257 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_258 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_259 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_260 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_261 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_262 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_263 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_264 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_265 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_266 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_267 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_268 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_269 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_270 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_271 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_272 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_273 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_274 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_275 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_276 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_277 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_278 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_279 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_280 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_281 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_282 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_283 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_284 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_285 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_286 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_287 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_288 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_289 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_290 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_291 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_292 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_293 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_294 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_295 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_296 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_297 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_298 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_299 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_300 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_301 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_302 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_303 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_304 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_305 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_40 = self.input.LA(2)
if (LA89_40 == IDENTIFIER) :
LA89_306 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif (LA89_40 == 43) :
LA89_307 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_41 = self.input.LA(2)
if (LA89_41 == 43) :
LA89_308 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif (LA89_41 == IDENTIFIER) :
LA89_309 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 58 or LA89 == 59 or LA89 == 60 or LA89 == 61:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_310 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_311 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_312 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_313 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_314 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_315 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_316 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_317 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_318 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_319 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_320 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_321 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_322 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_323 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_324 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_325 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_326 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_327 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_328 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_329 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
if alt89 == 1:
# C.g:0:0: declaration
self.following.append(self.FOLLOW_declaration_in_macro_statement2166)
self.declaration()
self.following.pop()
if self.failed:
return
else:
break #loop89
# C.g:544:33: ( statement_list )?
alt90 = 2
LA90 = self.input.LA(1)
if LA90 == IDENTIFIER:
LA90 = self.input.LA(2)
if LA90 == 25 or LA90 == 29 or LA90 == 30 or LA90 == 31 or LA90 == 32 or LA90 == 33 or LA90 == 34 or LA90 == 35 or LA90 == 36 or LA90 == 37 or LA90 == 38 or LA90 == 39 or LA90 == 40 or LA90 == 41 or LA90 == 42 or LA90 == 45 or LA90 == 46 or LA90 == 47 or LA90 == 48 or LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61:
alt90 = 1
elif LA90 == 62:
LA90_45 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_46 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == IDENTIFIER:
LA90_47 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 64:
LA90_48 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_49 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_50 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_51 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_52 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_53 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_54 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_55 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_56 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_57 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_58 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_59 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_60 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_61 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_62 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_63 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_64 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_65 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_66 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_67 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_70 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25 or LA90 == 26 or LA90 == 29 or LA90 == 30 or LA90 == 31 or LA90 == 32 or LA90 == 33 or LA90 == 34 or LA90 == 35 or LA90 == 36 or LA90 == 37 or LA90 == 38 or LA90 == 39 or LA90 == 40 or LA90 == 41 or LA90 == 42 or LA90 == 43 or LA90 == 45 or LA90 == 46 or LA90 == 48 or LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61 or LA90 == 103 or LA90 == 104 or LA90 == 105 or LA90 == 106 or LA90 == 107 or LA90 == 108 or LA90 == 110 or LA90 == 111 or LA90 == 112 or LA90 == 113 or LA90 == 114 or LA90 == 115 or LA90 == 116 or LA90 == 117:
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90 = self.input.LA(2)
if LA90 == 64:
LA90_87 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_88 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_89 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_90 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_91 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_92 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_93 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_94 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_95 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_96 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_97 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_98 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_99 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_100 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_101 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_102 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_103 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_104 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_105 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_106 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_107 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_108 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90 = self.input.LA(2)
if LA90 == 64:
LA90_111 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_112 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_113 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_114 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_115 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_116 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_117 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_118 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_119 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_120 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_121 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_122 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_123 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_124 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_125 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_126 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_127 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_128 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_129 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_130 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_131 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_134 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90 = self.input.LA(2)
if LA90 == 64:
LA90_135 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_136 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_137 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_138 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_139 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_140 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_141 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_142 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_143 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_144 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_145 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_146 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_147 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_148 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_149 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_150 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_151 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_152 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_153 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_154 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_155 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_156 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90 = self.input.LA(2)
if LA90 == 64:
LA90_159 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_160 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_161 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_162 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_163 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_164 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_165 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_166 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_167 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_168 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_169 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_170 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_171 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_172 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_173 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_174 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_175 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_176 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_177 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_178 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_179 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_181 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90 = self.input.LA(2)
if LA90 == IDENTIFIER:
LA90_183 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 64:
LA90_184 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_185 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_186 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_187 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_188 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_189 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_190 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_191 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_192 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_193 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_194 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_195 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_196 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_197 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_198 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_199 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_200 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_201 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_202 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_203 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_204 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_205 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_206 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90 = self.input.LA(2)
if LA90 == 64:
LA90_209 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_210 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_211 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_212 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_213 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_214 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_215 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_216 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_217 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_218 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_219 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_220 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_221 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_222 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_223 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_224 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_225 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_226 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_227 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_228 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_229 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_230 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == 62:
LA90 = self.input.LA(2)
if LA90 == IDENTIFIER:
LA90_233 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90_234 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90_235 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90_236 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90_237 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_238 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90_239 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_240 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_241 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_242 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90_243 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90_244 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61:
LA90_245 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 34:
LA90_246 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 35:
LA90_247 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 36:
LA90_248 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 37:
LA90_249 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 38:
LA90_250 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 39:
LA90_251 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 40:
LA90_252 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 41:
LA90_253 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 42:
LA90_254 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 45 or LA90 == 46:
LA90_255 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 48:
LA90_256 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90 = self.input.LA(2)
if LA90 == IDENTIFIER:
LA90_257 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90_258 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90_259 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90_260 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90_261 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_262 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90_263 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_264 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_265 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_266 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90_267 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90_268 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90 = self.input.LA(2)
if LA90 == IDENTIFIER:
LA90_269 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90_270 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90_271 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90_272 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90_273 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_274 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90_275 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_276 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_277 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_278 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90_279 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90_280 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90 = self.input.LA(2)
if LA90 == 62:
LA90_281 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == IDENTIFIER:
LA90_282 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90_283 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90_284 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90_285 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90_286 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_287 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90_288 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_289 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_290 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90_291 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90_292 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90 = self.input.LA(2)
if LA90 == 62:
LA90_293 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == IDENTIFIER:
LA90_294 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90_295 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90_296 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90_297 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90_298 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_299 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90_300 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_301 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_302 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90_303 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90_304 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
if alt90 == 1:
# C.g:0:0: statement_list
self.following.append(self.FOLLOW_statement_list_in_macro_statement2170)
self.statement_list()
self.following.pop()
if self.failed:
return
# C.g:544:49: ( expression )?
alt91 = 2
LA91_0 = self.input.LA(1)
if ((IDENTIFIER <= LA91_0 <= FLOATING_POINT_LITERAL) or LA91_0 == 62 or LA91_0 == 66 or (68 <= LA91_0 <= 69) or (72 <= LA91_0 <= 74) or (77 <= LA91_0 <= 79)) :
alt91 = 1
if alt91 == 1:
# C.g:0:0: expression
self.following.append(self.FOLLOW_expression_in_macro_statement2173)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_macro_statement2176)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 64, macro_statement_StartIndex)
pass
return
# $ANTLR end macro_statement
# $ANTLR start labeled_statement
# C.g:547:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );
def labeled_statement(self, ):
labeled_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 65):
return
# C.g:548:2: ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement )
alt92 = 3
LA92 = self.input.LA(1)
if LA92 == IDENTIFIER:
alt92 = 1
elif LA92 == 106:
alt92 = 2
elif LA92 == 107:
alt92 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("547:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );", 92, 0, self.input)
raise nvae
if alt92 == 1:
# C.g:548:4: IDENTIFIER ':' statement
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_labeled_statement2188)
if self.failed:
return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2190)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_labeled_statement2192)
self.statement()
self.following.pop()
if self.failed:
return
elif alt92 == 2:
# C.g:549:4: 'case' constant_expression ':' statement
self.match(self.input, 106, self.FOLLOW_106_in_labeled_statement2197)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_labeled_statement2199)
self.constant_expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2201)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_labeled_statement2203)
self.statement()
self.following.pop()
if self.failed:
return
elif alt92 == 3:
# C.g:550:4: 'default' ':' statement
self.match(self.input, 107, self.FOLLOW_107_in_labeled_statement2208)
if self.failed:
return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2210)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_labeled_statement2212)
self.statement()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 65, labeled_statement_StartIndex)
pass
return
# $ANTLR end labeled_statement
class compound_statement_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start compound_statement
# C.g:553:1: compound_statement : '{' ( declaration )* ( statement_list )? '}' ;
def compound_statement(self, ):
retval = self.compound_statement_return()
retval.start = self.input.LT(1)
compound_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 66):
return retval
# C.g:554:2: ( '{' ( declaration )* ( statement_list )? '}' )
# C.g:554:4: '{' ( declaration )* ( statement_list )? '}'
self.match(self.input, 43, self.FOLLOW_43_in_compound_statement2223)
if self.failed:
return retval
# C.g:554:8: ( declaration )*
while True: #loop93
alt93 = 2
LA93 = self.input.LA(1)
if LA93 == IDENTIFIER:
LA93 = self.input.LA(2)
if LA93 == 62:
LA93_44 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_47 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 66:
LA93_48 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_49 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_50 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_51 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_52 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_53 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_54 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_55 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_56 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_57 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_58 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_59 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_60 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_61 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_62 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_63 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_64 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_65 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 26:
LA93 = self.input.LA(2)
if LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_86 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_87 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_88 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_89 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_90 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_91 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_92 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_93 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_94 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_95 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_96 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_97 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_98 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_99 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 66:
LA93_100 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_101 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_102 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_103 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_104 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_105 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_106 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_107 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_108 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_109 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_110 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_111 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_112 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_113 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_114 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_115 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_116 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_117 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_118 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_119 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_120 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_121 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_122 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_123 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_124 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_125 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_126 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_127 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_128 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_129 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_130 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_131 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_132 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_133 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_134 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_135 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_136 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_137 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_138 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_139 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_140 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_141 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_142 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_143 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_144 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_145 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_146 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_147 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_148 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_149 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_150 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_151 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_152 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_153 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_154 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_155 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_156 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_157 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_158 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_159 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_160 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_161 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_162 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_163 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_164 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_165 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_166 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_167 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_168 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_169 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_170 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_171 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_172 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_173 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_174 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_175 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_176 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_177 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_178 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_179 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_180 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_181 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_182 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_183 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_184 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_185 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_186 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_187 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_188 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_189 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_190 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_191 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_192 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_193 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_194 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_195 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_196 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_197 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_198 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_199 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_200 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_201 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_202 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_203 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_204 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_205 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_206 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_207 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_208 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_209 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_210 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_211 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_212 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_213 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_214 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_215 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_216 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_217 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_218 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_219 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_220 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_221 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_222 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_223 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_224 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_225 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_226 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_227 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_228 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_229 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_230 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_231 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_232 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_233 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_234 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_235 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_236 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_237 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_238 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_239 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_240 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_241 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_242 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_243 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_244 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_245 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_246 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_247 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_248 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_249 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_250 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_251 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_252 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_253 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_254 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_255 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_256 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_257 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_258 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_259 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_260 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_261 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_262 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_263 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_264 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_265 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_266 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_267 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_268 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_269 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_270 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_271 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_272 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_273 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_274 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_275 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_276 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_277 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_278 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_279 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_280 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_281 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_282 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_283 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_284 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_285 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_286 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_287 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_288 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_289 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_290 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_291 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_292 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_293 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_294 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_295 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_296 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_297 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_298 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_299 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_300 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_301 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_302 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_303 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_304 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_40 = self.input.LA(2)
if (LA93_40 == IDENTIFIER) :
LA93_305 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif (LA93_40 == 43) :
LA93_306 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_41 = self.input.LA(2)
if (LA93_41 == 43) :
LA93_307 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif (LA93_41 == IDENTIFIER) :
LA93_308 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 58 or LA93 == 59 or LA93 == 60 or LA93 == 61:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_309 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_310 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_311 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_312 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_313 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_314 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_315 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_316 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_317 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_318 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_319 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_320 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_321 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_322 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_323 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_324 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_325 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_326 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_327 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_328 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
if alt93 == 1:
# C.g:0:0: declaration
self.following.append(self.FOLLOW_declaration_in_compound_statement2225)
self.declaration()
self.following.pop()
if self.failed:
return retval
else:
break #loop93
# C.g:554:21: ( statement_list )?
alt94 = 2
LA94_0 = self.input.LA(1)
if ((IDENTIFIER <= LA94_0 <= FLOATING_POINT_LITERAL) or (25 <= LA94_0 <= 26) or (29 <= LA94_0 <= 43) or (45 <= LA94_0 <= 46) or (48 <= LA94_0 <= 62) or LA94_0 == 66 or (68 <= LA94_0 <= 69) or (72 <= LA94_0 <= 74) or (77 <= LA94_0 <= 79) or (103 <= LA94_0 <= 108) or (110 <= LA94_0 <= 117)) :
alt94 = 1
if alt94 == 1:
# C.g:0:0: statement_list
self.following.append(self.FOLLOW_statement_list_in_compound_statement2228)
self.statement_list()
self.following.pop()
if self.failed:
return retval
self.match(self.input, 44, self.FOLLOW_44_in_compound_statement2231)
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 66, compound_statement_StartIndex)
pass
return retval
# $ANTLR end compound_statement
# $ANTLR start statement_list
# C.g:557:1: statement_list : ( statement )+ ;
def statement_list(self, ):
statement_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 67):
return
# C.g:558:2: ( ( statement )+ )
# C.g:558:4: ( statement )+
# C.g:558:4: ( statement )+
cnt95 = 0
while True: #loop95
alt95 = 2
LA95 = self.input.LA(1)
if LA95 == IDENTIFIER:
LA95 = self.input.LA(2)
if LA95 == 62:
LA95_46 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25 or LA95 == 29 or LA95 == 30 or LA95 == 31 or LA95 == 32 or LA95 == 33 or LA95 == 34 or LA95 == 35 or LA95 == 36 or LA95 == 37 or LA95 == 38 or LA95 == 39 or LA95 == 40 or LA95 == 41 or LA95 == 42 or LA95 == 45 or LA95 == 46 or LA95 == 47 or LA95 == 48 or LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61:
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_48 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == IDENTIFIER:
LA95_49 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 64:
LA95_50 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_51 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_52 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_53 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_54 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_55 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_56 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_57 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_58 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_59 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_60 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_61 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_62 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_63 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_64 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_65 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_66 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_67 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_68 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_69 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_88 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95 = self.input.LA(2)
if LA95 == 64:
LA95_89 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_90 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_91 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_92 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_93 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_94 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_95 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_96 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_97 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_98 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_99 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_100 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_101 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_102 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_103 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_104 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_105 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_106 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_107 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_108 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_109 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_110 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95 = self.input.LA(2)
if LA95 == 64:
LA95_113 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_114 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_115 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_116 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_117 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_118 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_119 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_120 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_121 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_122 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_123 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_124 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_125 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_126 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_127 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_128 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_129 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_130 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_131 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_132 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_133 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_135 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95 = self.input.LA(2)
if LA95 == 64:
LA95_137 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_138 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_139 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_140 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_141 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_142 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_143 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_144 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_145 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_146 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_147 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_148 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_149 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_150 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_151 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_152 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_153 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_154 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_155 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_156 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_157 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_158 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95 = self.input.LA(2)
if LA95 == 64:
LA95_161 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_162 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_163 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_164 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_165 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_166 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_167 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_168 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_169 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_170 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_171 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_172 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_173 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_174 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_175 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_176 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_177 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_178 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_179 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_180 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_181 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_182 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95 = self.input.LA(2)
if LA95 == IDENTIFIER:
LA95_185 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 64:
LA95_186 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_187 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_188 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_189 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_190 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_191 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_192 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_193 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_194 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_195 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_196 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_197 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_198 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_199 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_200 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_201 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_202 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_203 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_204 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_205 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_206 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_208 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_209 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95 = self.input.LA(2)
if LA95 == 64:
LA95_211 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_212 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_213 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_214 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_215 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_216 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_217 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_218 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_219 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_220 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_221 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_222 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_223 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_224 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_225 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_226 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_227 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_228 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_229 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_230 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_231 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_234 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95 = self.input.LA(2)
if LA95 == IDENTIFIER:
LA95_235 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95_236 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95_237 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95_238 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95_239 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_240 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95_241 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_242 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_243 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_244 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95_245 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95_246 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61:
LA95_247 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 34:
LA95_248 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 35:
LA95_249 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 36:
LA95_250 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 37:
LA95_251 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 38:
LA95_252 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 39:
LA95_253 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 40:
LA95_254 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 41:
LA95_255 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 42:
LA95_256 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 45 or LA95 == 46:
LA95_257 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 48:
LA95_258 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95 = self.input.LA(2)
if LA95 == IDENTIFIER:
LA95_259 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95_260 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95_261 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95_262 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95_263 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_264 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95_265 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_266 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_267 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_268 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95_269 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95_270 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95 = self.input.LA(2)
if LA95 == IDENTIFIER:
LA95_271 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95_272 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95_273 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95_274 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95_275 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_276 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95_277 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_278 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_279 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_280 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95_281 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95_282 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95 = self.input.LA(2)
if LA95 == 62:
LA95_283 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == IDENTIFIER:
LA95_284 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95_285 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95_286 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95_287 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95_288 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_289 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95_290 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_291 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_292 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95_293 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95_294 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95 = self.input.LA(2)
if LA95 == 62:
LA95_295 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == IDENTIFIER:
LA95_296 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95_297 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95_298 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95_299 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95_300 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_301 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95_302 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_303 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_304 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95_305 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95_306 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25 or LA95 == 26 or LA95 == 29 or LA95 == 30 or LA95 == 31 or LA95 == 32 or LA95 == 33 or LA95 == 34 or LA95 == 35 or LA95 == 36 or LA95 == 37 or LA95 == 38 or LA95 == 39 or LA95 == 40 or LA95 == 41 or LA95 == 42 or LA95 == 43 or LA95 == 45 or LA95 == 46 or LA95 == 48 or LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61 or LA95 == 103 or LA95 == 104 or LA95 == 105 or LA95 == 106 or LA95 == 107 or LA95 == 108 or LA95 == 110 or LA95 == 111 or LA95 == 112 or LA95 == 113 or LA95 == 114 or LA95 == 115 or LA95 == 116 or LA95 == 117:
alt95 = 1
if alt95 == 1:
# C.g:0:0: statement
self.following.append(self.FOLLOW_statement_in_statement_list2242)
self.statement()
self.following.pop()
if self.failed:
return
else:
if cnt95 >= 1:
break #loop95
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(95, self.input)
raise eee
cnt95 += 1
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 67, statement_list_StartIndex)
pass
return
# $ANTLR end statement_list
class expression_statement_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start expression_statement
# C.g:561:1: expression_statement : ( ';' | expression ';' );
def expression_statement(self, ):
retval = self.expression_statement_return()
retval.start = self.input.LT(1)
expression_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 68):
return retval
# C.g:562:2: ( ';' | expression ';' )
alt96 = 2
LA96_0 = self.input.LA(1)
if (LA96_0 == 25) :
alt96 = 1
elif ((IDENTIFIER <= LA96_0 <= FLOATING_POINT_LITERAL) or LA96_0 == 62 or LA96_0 == 66 or (68 <= LA96_0 <= 69) or (72 <= LA96_0 <= 74) or (77 <= LA96_0 <= 79)) :
alt96 = 2
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("561:1: expression_statement : ( ';' | expression ';' );", 96, 0, self.input)
raise nvae
if alt96 == 1:
# C.g:562:4: ';'
self.match(self.input, 25, self.FOLLOW_25_in_expression_statement2254)
if self.failed:
return retval
elif alt96 == 2:
# C.g:563:4: expression ';'
self.following.append(self.FOLLOW_expression_in_expression_statement2259)
self.expression()
self.following.pop()
if self.failed:
return retval
self.match(self.input, 25, self.FOLLOW_25_in_expression_statement2261)
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 68, expression_statement_StartIndex)
pass
return retval
# $ANTLR end expression_statement
# $ANTLR start selection_statement
# C.g:566:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );
def selection_statement(self, ):
selection_statement_StartIndex = self.input.index()
e = None
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 69):
return
# C.g:567:2: ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement )
alt98 = 2
LA98_0 = self.input.LA(1)
if (LA98_0 == 108) :
alt98 = 1
elif (LA98_0 == 110) :
alt98 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("566:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );", 98, 0, self.input)
raise nvae
if alt98 == 1:
# C.g:567:4: 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )?
self.match(self.input, 108, self.FOLLOW_108_in_selection_statement2272)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2274)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_selection_statement2278)
e = self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2280)
if self.failed:
return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
self.following.append(self.FOLLOW_statement_in_selection_statement2284)
self.statement()
self.following.pop()
if self.failed:
return
# C.g:567:167: ( options {k=1; backtrack=false; } : 'else' statement )?
alt97 = 2
LA97_0 = self.input.LA(1)
if (LA97_0 == 109) :
alt97 = 1
if alt97 == 1:
# C.g:567:200: 'else' statement
self.match(self.input, 109, self.FOLLOW_109_in_selection_statement2299)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_selection_statement2301)
self.statement()
self.following.pop()
if self.failed:
return
elif alt98 == 2:
# C.g:568:4: 'switch' '(' expression ')' statement
self.match(self.input, 110, self.FOLLOW_110_in_selection_statement2308)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2310)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_selection_statement2312)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2314)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_selection_statement2316)
self.statement()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 69, selection_statement_StartIndex)
pass
return
# $ANTLR end selection_statement
# $ANTLR start iteration_statement
# C.g:571:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );
def iteration_statement(self, ):
iteration_statement_StartIndex = self.input.index()
e = None
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 70):
return
# C.g:572:2: ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement )
alt100 = 3
LA100 = self.input.LA(1)
if LA100 == 111:
alt100 = 1
elif LA100 == 112:
alt100 = 2
elif LA100 == 113:
alt100 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("571:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );", 100, 0, self.input)
raise nvae
if alt100 == 1:
# C.g:572:4: 'while' '(' e= expression ')' statement
self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2327)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2329)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_iteration_statement2333)
e = self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2335)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_iteration_statement2337)
self.statement()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
elif alt100 == 2:
# C.g:573:4: 'do' statement 'while' '(' e= expression ')' ';'
self.match(self.input, 112, self.FOLLOW_112_in_iteration_statement2344)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_iteration_statement2346)
self.statement()
self.following.pop()
if self.failed:
return
self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2348)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2350)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_iteration_statement2354)
e = self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2356)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_iteration_statement2358)
if self.failed:
return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
elif alt100 == 3:
# C.g:574:4: 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement
self.match(self.input, 113, self.FOLLOW_113_in_iteration_statement2365)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2367)
if self.failed:
return
self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2369)
self.expression_statement()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2373)
e = self.expression_statement()
self.following.pop()
if self.failed:
return
# C.g:574:58: ( expression )?
alt99 = 2
LA99_0 = self.input.LA(1)
if ((IDENTIFIER <= LA99_0 <= FLOATING_POINT_LITERAL) or LA99_0 == 62 or LA99_0 == 66 or (68 <= LA99_0 <= 69) or (72 <= LA99_0 <= 74) or (77 <= LA99_0 <= 79)) :
alt99 = 1
if alt99 == 1:
# C.g:0:0: expression
self.following.append(self.FOLLOW_expression_in_iteration_statement2375)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2378)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_iteration_statement2380)
self.statement()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 70, iteration_statement_StartIndex)
pass
return
# $ANTLR end iteration_statement
# $ANTLR start jump_statement
# C.g:577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );
def jump_statement(self, ):
jump_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 71):
return
# C.g:578:2: ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' )
alt101 = 5
LA101 = self.input.LA(1)
if LA101 == 114:
alt101 = 1
elif LA101 == 115:
alt101 = 2
elif LA101 == 116:
alt101 = 3
elif LA101 == 117:
LA101_4 = self.input.LA(2)
if (LA101_4 == 25) :
alt101 = 4
elif ((IDENTIFIER <= LA101_4 <= FLOATING_POINT_LITERAL) or LA101_4 == 62 or LA101_4 == 66 or (68 <= LA101_4 <= 69) or (72 <= LA101_4 <= 74) or (77 <= LA101_4 <= 79)) :
alt101 = 5
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 4, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 0, self.input)
raise nvae
if alt101 == 1:
# C.g:578:4: 'goto' IDENTIFIER ';'
self.match(self.input, 114, self.FOLLOW_114_in_jump_statement2393)
if self.failed:
return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_jump_statement2395)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2397)
if self.failed:
return
elif alt101 == 2:
# C.g:579:4: 'continue' ';'
self.match(self.input, 115, self.FOLLOW_115_in_jump_statement2402)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2404)
if self.failed:
return
elif alt101 == 3:
# C.g:580:4: 'break' ';'
self.match(self.input, 116, self.FOLLOW_116_in_jump_statement2409)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2411)
if self.failed:
return
elif alt101 == 4:
# C.g:581:4: 'return' ';'
self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2416)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2418)
if self.failed:
return
elif alt101 == 5:
# C.g:582:4: 'return' expression ';'
self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2423)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_jump_statement2425)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2427)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 71, jump_statement_StartIndex)
pass
return
# $ANTLR end jump_statement
# $ANTLR start synpred2
def synpred2_fragment(self, ):
# C.g:119:6: ( declaration_specifiers )
# C.g:119:6: declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_synpred2100)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred2
# $ANTLR start synpred4
def synpred4_fragment(self, ):
# C.g:119:4: ( ( declaration_specifiers )? declarator ( declaration )* '{' )
# C.g:119:6: ( declaration_specifiers )? declarator ( declaration )* '{'
# C.g:119:6: ( declaration_specifiers )?
alt102 = 2
LA102 = self.input.LA(1)
if LA102 == 29 or LA102 == 30 or LA102 == 31 or LA102 == 32 or LA102 == 33 or LA102 == 34 or LA102 == 35 or LA102 == 36 or LA102 == 37 or LA102 == 38 or LA102 == 39 or LA102 == 40 or LA102 == 41 or LA102 == 42 or LA102 == 45 or LA102 == 46 or LA102 == 48 or LA102 == 49 or LA102 == 50 or LA102 == 51 or LA102 == 52 or LA102 == 53 or LA102 == 54 or LA102 == 55 or LA102 == 56 or LA102 == 57 or LA102 == 61:
alt102 = 1
elif LA102 == IDENTIFIER:
LA102 = self.input.LA(2)
if LA102 == 62:
LA102_21 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 29 or LA102 == 30 or LA102 == 31 or LA102 == 32 or LA102 == 33:
LA102_23 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 34:
LA102_24 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 35:
LA102_25 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 36:
LA102_26 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 37:
LA102_27 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 38:
LA102_28 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 39:
LA102_29 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 40:
LA102_30 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 41:
LA102_31 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 42:
LA102_32 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 45 or LA102 == 46:
LA102_33 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 48:
LA102_34 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == IDENTIFIER:
LA102_35 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 58:
LA102_36 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 66:
alt102 = 1
elif LA102 == 59:
LA102_39 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 60:
LA102_40 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 49 or LA102 == 50 or LA102 == 51 or LA102 == 52 or LA102 == 53 or LA102 == 54 or LA102 == 55 or LA102 == 56 or LA102 == 57 or LA102 == 61:
LA102_41 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 58:
LA102_14 = self.input.LA(2)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 59:
LA102_16 = self.input.LA(2)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 60:
LA102_17 = self.input.LA(2)
if (self.synpred2()) :
alt102 = 1
if alt102 == 1:
# C.g:0:0: declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_synpred4100)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_declarator_in_synpred4103)
self.declarator()
self.following.pop()
if self.failed:
return
# C.g:119:41: ( declaration )*
while True: #loop103
alt103 = 2
LA103_0 = self.input.LA(1)
if (LA103_0 == IDENTIFIER or LA103_0 == 26 or (29 <= LA103_0 <= 42) or (45 <= LA103_0 <= 46) or (48 <= LA103_0 <= 61)) :
alt103 = 1
if alt103 == 1:
# C.g:0:0: declaration
self.following.append(self.FOLLOW_declaration_in_synpred4105)
self.declaration()
self.following.pop()
if self.failed:
return
else:
break #loop103
self.match(self.input, 43, self.FOLLOW_43_in_synpred4108)
if self.failed:
return
# $ANTLR end synpred4
# $ANTLR start synpred5
def synpred5_fragment(self, ):
# C.g:120:4: ( declaration )
# C.g:120:4: declaration
self.following.append(self.FOLLOW_declaration_in_synpred5118)
self.declaration()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred5
# $ANTLR start synpred7
def synpred7_fragment(self, ):
# C.g:146:6: ( declaration_specifiers )
# C.g:146:6: declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_synpred7157)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred7
# $ANTLR start synpred10
def synpred10_fragment(self, ):
# C.g:167:18: ( declaration_specifiers )
# C.g:167:18: declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_synpred10207)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred10
# $ANTLR start synpred14
def synpred14_fragment(self, ):
# C.g:184:7: ( type_specifier )
# C.g:184:7: type_specifier
self.following.append(self.FOLLOW_type_specifier_in_synpred14272)
self.type_specifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred14
# $ANTLR start synpred15
def synpred15_fragment(self, ):
# C.g:185:13: ( type_qualifier )
# C.g:185:13: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred15286)
self.type_qualifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred15
# $ANTLR start synpred33
def synpred33_fragment(self, ):
# C.g:225:16: ( type_qualifier )
# C.g:225:16: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred33444)
self.type_qualifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred33
# $ANTLR start synpred34
def synpred34_fragment(self, ):
# C.g:225:4: ( IDENTIFIER ( type_qualifier )* declarator )
# C.g:225:5: IDENTIFIER ( type_qualifier )* declarator
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred34442)
if self.failed:
return
# C.g:225:16: ( type_qualifier )*
while True: #loop106
alt106 = 2
LA106 = self.input.LA(1)
if LA106 == 58:
LA106_2 = self.input.LA(2)
if (self.synpred33()) :
alt106 = 1
elif LA106 == 59:
LA106_3 = self.input.LA(2)
if (self.synpred33()) :
alt106 = 1
elif LA106 == 60:
LA106_4 = self.input.LA(2)
if (self.synpred33()) :
alt106 = 1
elif LA106 == 49 or LA106 == 50 or LA106 == 51 or LA106 == 52 or LA106 == 53 or LA106 == 54 or LA106 == 55 or LA106 == 56 or LA106 == 57 or LA106 == 61:
alt106 = 1
if alt106 == 1:
# C.g:0:0: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred34444)
self.type_qualifier()
self.following.pop()
if self.failed:
return
else:
break #loop106
self.following.append(self.FOLLOW_declarator_in_synpred34447)
self.declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred34
# $ANTLR start synpred39
def synpred39_fragment(self, ):
# C.g:253:6: ( type_qualifier )
# C.g:253:6: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred39566)
self.type_qualifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred39
# $ANTLR start synpred40
def synpred40_fragment(self, ):
# C.g:253:23: ( type_specifier )
# C.g:253:23: type_specifier
self.following.append(self.FOLLOW_type_specifier_in_synpred40570)
self.type_specifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred40
# $ANTLR start synpred66
def synpred66_fragment(self, ):
# C.g:297:4: ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator )
# C.g:297:4: ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator
# C.g:297:4: ( pointer )?
alt111 = 2
LA111_0 = self.input.LA(1)
if (LA111_0 == 66) :
alt111 = 1
if alt111 == 1:
# C.g:0:0: pointer
self.following.append(self.FOLLOW_pointer_in_synpred66784)
self.pointer()
self.following.pop()
if self.failed:
return
# C.g:297:13: ( 'EFIAPI' )?
alt112 = 2
LA112_0 = self.input.LA(1)
if (LA112_0 == 58) :
alt112 = 1
if alt112 == 1:
# C.g:297:14: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_synpred66788)
if self.failed:
return
# C.g:297:25: ( 'EFI_BOOTSERVICE' )?
alt113 = 2
LA113_0 = self.input.LA(1)
if (LA113_0 == 59) :
alt113 = 1
if alt113 == 1:
# C.g:297:26: 'EFI_BOOTSERVICE'
self.match(self.input, 59, self.FOLLOW_59_in_synpred66793)
if self.failed:
return
# C.g:297:46: ( 'EFI_RUNTIMESERVICE' )?
alt114 = 2
LA114_0 = self.input.LA(1)
if (LA114_0 == 60) :
alt114 = 1
if alt114 == 1:
# C.g:297:47: 'EFI_RUNTIMESERVICE'
self.match(self.input, 60, self.FOLLOW_60_in_synpred66798)
if self.failed:
return
self.following.append(self.FOLLOW_direct_declarator_in_synpred66802)
self.direct_declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred66
# $ANTLR start synpred67
def synpred67_fragment(self, ):
# C.g:303:15: ( declarator_suffix )
# C.g:303:15: declarator_suffix
self.following.append(self.FOLLOW_declarator_suffix_in_synpred67821)
self.declarator_suffix()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred67
# $ANTLR start synpred69
def synpred69_fragment(self, ):
# C.g:304:9: ( 'EFIAPI' )
# C.g:304:9: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_synpred69830)
if self.failed:
return
# $ANTLR end synpred69
# $ANTLR start synpred70
def synpred70_fragment(self, ):
# C.g:304:35: ( declarator_suffix )
# C.g:304:35: declarator_suffix
self.following.append(self.FOLLOW_declarator_suffix_in_synpred70838)
self.declarator_suffix()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred70
# $ANTLR start synpred73
def synpred73_fragment(self, ):
# C.g:310:9: ( '(' parameter_type_list ')' )
# C.g:310:9: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred73878)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_type_list_in_synpred73880)
self.parameter_type_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred73882)
if self.failed:
return
# $ANTLR end synpred73
# $ANTLR start synpred74
def synpred74_fragment(self, ):
# C.g:311:9: ( '(' identifier_list ')' )
# C.g:311:9: '(' identifier_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred74892)
if self.failed:
return
self.following.append(self.FOLLOW_identifier_list_in_synpred74894)
self.identifier_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred74896)
if self.failed:
return
# $ANTLR end synpred74
# $ANTLR start synpred75
def synpred75_fragment(self, ):
# C.g:316:8: ( type_qualifier )
# C.g:316:8: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred75921)
self.type_qualifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred75
# $ANTLR start synpred76
def synpred76_fragment(self, ):
# C.g:316:24: ( pointer )
# C.g:316:24: pointer
self.following.append(self.FOLLOW_pointer_in_synpred76924)
self.pointer()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred76
# $ANTLR start synpred77
def synpred77_fragment(self, ):
# C.g:316:4: ( '*' ( type_qualifier )+ ( pointer )? )
# C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
self.match(self.input, 66, self.FOLLOW_66_in_synpred77919)
if self.failed:
return
# C.g:316:8: ( type_qualifier )+
cnt116 = 0
while True: #loop116
alt116 = 2
LA116_0 = self.input.LA(1)
if ((49 <= LA116_0 <= 61)) :
alt116 = 1
if alt116 == 1:
# C.g:0:0: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred77921)
self.type_qualifier()
self.following.pop()
if self.failed:
return
else:
if cnt116 >= 1:
break #loop116
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(116, self.input)
raise eee
cnt116 += 1
# C.g:316:24: ( pointer )?
alt117 = 2
LA117_0 = self.input.LA(1)
if (LA117_0 == 66) :
alt117 = 1
if alt117 == 1:
# C.g:0:0: pointer
self.following.append(self.FOLLOW_pointer_in_synpred77924)
self.pointer()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred77
# $ANTLR start synpred78
def synpred78_fragment(self, ):
# C.g:317:4: ( '*' pointer )
# C.g:317:4: '*' pointer
self.match(self.input, 66, self.FOLLOW_66_in_synpred78930)
if self.failed:
return
self.following.append(self.FOLLOW_pointer_in_synpred78932)
self.pointer()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred78
# $ANTLR start synpred81
def synpred81_fragment(self, ):
# C.g:326:32: ( 'OPTIONAL' )
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred81977)
if self.failed:
return
# $ANTLR end synpred81
# $ANTLR start synpred82
def synpred82_fragment(self, ):
# C.g:326:27: ( ',' ( 'OPTIONAL' )? parameter_declaration )
# C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_synpred82974)
if self.failed:
return
# C.g:326:31: ( 'OPTIONAL' )?
alt119 = 2
LA119_0 = self.input.LA(1)
if (LA119_0 == 53) :
LA119_1 = self.input.LA(2)
if (self.synpred81()) :
alt119 = 1
if alt119 == 1:
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred82977)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_declaration_in_synpred82981)
self.parameter_declaration()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred82
# $ANTLR start synpred83
def synpred83_fragment(self, ):
# C.g:330:28: ( declarator )
# C.g:330:28: declarator
self.following.append(self.FOLLOW_declarator_in_synpred83997)
self.declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred83
# $ANTLR start synpred84
def synpred84_fragment(self, ):
# C.g:330:39: ( abstract_declarator )
# C.g:330:39: abstract_declarator
self.following.append(self.FOLLOW_abstract_declarator_in_synpred84999)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred84
# $ANTLR start synpred86
def synpred86_fragment(self, ):
# C.g:330:4: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? )
# C.g:330:4: declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )?
self.following.append(self.FOLLOW_declaration_specifiers_in_synpred86994)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# C.g:330:27: ( declarator | abstract_declarator )*
while True: #loop120
alt120 = 3
LA120 = self.input.LA(1)
if LA120 == 66:
LA120_3 = self.input.LA(2)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == IDENTIFIER or LA120 == 58 or LA120 == 59 or LA120 == 60:
alt120 = 1
elif LA120 == 62:
LA120 = self.input.LA(2)
if LA120 == 29 or LA120 == 30 or LA120 == 31 or LA120 == 32 or LA120 == 33 or LA120 == 34 or LA120 == 35 or LA120 == 36 or LA120 == 37 or LA120 == 38 or LA120 == 39 or LA120 == 40 or LA120 == 41 or LA120 == 42 or LA120 == 45 or LA120 == 46 or LA120 == 48 or LA120 == 49 or LA120 == 50 or LA120 == 51 or LA120 == 52 or LA120 == 53 or LA120 == 54 or LA120 == 55 or LA120 == 56 or LA120 == 57 or LA120 == 61 or LA120 == 63 or LA120 == 64:
alt120 = 2
elif LA120 == 58:
LA120_21 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == 66:
LA120_22 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == 59:
LA120_23 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == 60:
LA120_24 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == IDENTIFIER:
LA120_25 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == 62:
LA120_26 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == 64:
alt120 = 2
if alt120 == 1:
# C.g:330:28: declarator
self.following.append(self.FOLLOW_declarator_in_synpred86997)
self.declarator()
self.following.pop()
if self.failed:
return
elif alt120 == 2:
# C.g:330:39: abstract_declarator
self.following.append(self.FOLLOW_abstract_declarator_in_synpred86999)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
else:
break #loop120
# C.g:330:61: ( 'OPTIONAL' )?
alt121 = 2
LA121_0 = self.input.LA(1)
if (LA121_0 == 53) :
alt121 = 1
if alt121 == 1:
# C.g:330:62: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred861004)
if self.failed:
return
# $ANTLR end synpred86
# $ANTLR start synpred90
def synpred90_fragment(self, ):
# C.g:341:4: ( specifier_qualifier_list ( abstract_declarator )? )
# C.g:341:4: specifier_qualifier_list ( abstract_declarator )?
self.following.append(self.FOLLOW_specifier_qualifier_list_in_synpred901046)
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
return
# C.g:341:29: ( abstract_declarator )?
alt122 = 2
LA122_0 = self.input.LA(1)
if (LA122_0 == 62 or LA122_0 == 64 or LA122_0 == 66) :
alt122 = 1
if alt122 == 1:
# C.g:0:0: abstract_declarator
self.following.append(self.FOLLOW_abstract_declarator_in_synpred901048)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred90
# $ANTLR start synpred91
def synpred91_fragment(self, ):
# C.g:346:12: ( direct_abstract_declarator )
# C.g:346:12: direct_abstract_declarator
self.following.append(self.FOLLOW_direct_abstract_declarator_in_synpred911067)
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred91
# $ANTLR start synpred93
def synpred93_fragment(self, ):
# C.g:351:6: ( '(' abstract_declarator ')' )
# C.g:351:6: '(' abstract_declarator ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred931086)
if self.failed:
return
self.following.append(self.FOLLOW_abstract_declarator_in_synpred931088)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred931090)
if self.failed:
return
# $ANTLR end synpred93
# $ANTLR start synpred94
def synpred94_fragment(self, ):
# C.g:351:65: ( abstract_declarator_suffix )
# C.g:351:65: abstract_declarator_suffix
self.following.append(self.FOLLOW_abstract_declarator_suffix_in_synpred941098)
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred94
# $ANTLR start synpred109
def synpred109_fragment(self, ):
# C.g:386:4: ( '(' type_name ')' cast_expression )
# C.g:386:4: '(' type_name ')' cast_expression
self.match(self.input, 62, self.FOLLOW_62_in_synpred1091282)
if self.failed:
return
self.following.append(self.FOLLOW_type_name_in_synpred1091284)
self.type_name()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1091286)
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_synpred1091288)
self.cast_expression()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred109
# $ANTLR start synpred114
def synpred114_fragment(self, ):
# C.g:395:4: ( 'sizeof' unary_expression )
# C.g:395:4: 'sizeof' unary_expression
self.match(self.input, 74, self.FOLLOW_74_in_synpred1141330)
if self.failed:
return
self.following.append(self.FOLLOW_unary_expression_in_synpred1141332)
self.unary_expression()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred114
# $ANTLR start synpred117
def synpred117_fragment(self, ):
# C.g:409:13: ( '(' argument_expression_list ')' )
# C.g:409:13: '(' argument_expression_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred1171420)
if self.failed:
return
self.following.append(self.FOLLOW_argument_expression_list_in_synpred1171424)
self.argument_expression_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1171428)
if self.failed:
return
# $ANTLR end synpred117
# $ANTLR start synpred118
def synpred118_fragment(self, ):
# C.g:410:13: ( '(' macro_parameter_list ')' )
# C.g:410:13: '(' macro_parameter_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred1181444)
if self.failed:
return
self.following.append(self.FOLLOW_macro_parameter_list_in_synpred1181446)
self.macro_parameter_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1181448)
if self.failed:
return
# $ANTLR end synpred118
# $ANTLR start synpred120
def synpred120_fragment(self, ):
# C.g:412:13: ( '*' IDENTIFIER )
# C.g:412:13: '*' IDENTIFIER
self.match(self.input, 66, self.FOLLOW_66_in_synpred1201482)
if self.failed:
return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1201486)
if self.failed:
return
# $ANTLR end synpred120
# $ANTLR start synpred137
def synpred137_fragment(self, ):
# C.g:443:20: ( STRING_LITERAL )
# C.g:443:20: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1371683)
if self.failed:
return
# $ANTLR end synpred137
# $ANTLR start synpred138
def synpred138_fragment(self, ):
# C.g:443:8: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )
# C.g:443:8: ( IDENTIFIER )* ( STRING_LITERAL )+
# C.g:443:8: ( IDENTIFIER )*
while True: #loop125
alt125 = 2
LA125_0 = self.input.LA(1)
if (LA125_0 == IDENTIFIER) :
alt125 = 1
if alt125 == 1:
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1381680)
if self.failed:
return
else:
break #loop125
# C.g:443:20: ( STRING_LITERAL )+
cnt126 = 0
while True: #loop126
alt126 = 2
LA126_0 = self.input.LA(1)
if (LA126_0 == STRING_LITERAL) :
alt126 = 1
if alt126 == 1:
# C.g:0:0: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1381683)
if self.failed:
return
else:
if cnt126 >= 1:
break #loop126
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(126, self.input)
raise eee
cnt126 += 1
# $ANTLR end synpred138
# $ANTLR start synpred142
def synpred142_fragment(self, ):
# C.g:458:4: ( lvalue assignment_operator assignment_expression )
# C.g:458:4: lvalue assignment_operator assignment_expression
self.following.append(self.FOLLOW_lvalue_in_synpred1421744)
self.lvalue()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_assignment_operator_in_synpred1421746)
self.assignment_operator()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_assignment_expression_in_synpred1421748)
self.assignment_expression()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred142
# $ANTLR start synpred169
def synpred169_fragment(self, ):
# C.g:520:4: ( expression_statement )
# C.g:520:4: expression_statement
self.following.append(self.FOLLOW_expression_statement_in_synpred1692035)
self.expression_statement()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred169
# $ANTLR start synpred173
def synpred173_fragment(self, ):
# C.g:524:4: ( macro_statement )
# C.g:524:4: macro_statement
self.following.append(self.FOLLOW_macro_statement_in_synpred1732055)
self.macro_statement()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred173
# $ANTLR start synpred174
def synpred174_fragment(self, ):
# C.g:525:4: ( asm2_statement )
# C.g:525:4: asm2_statement
self.following.append(self.FOLLOW_asm2_statement_in_synpred1742060)
self.asm2_statement()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred174
# $ANTLR start synpred181
def synpred181_fragment(self, ):
# C.g:544:19: ( declaration )
# C.g:544:19: declaration
self.following.append(self.FOLLOW_declaration_in_synpred1812166)
self.declaration()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred181
# $ANTLR start synpred182
def synpred182_fragment(self, ):
# C.g:544:33: ( statement_list )
# C.g:544:33: statement_list
self.following.append(self.FOLLOW_statement_list_in_synpred1822170)
self.statement_list()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred182
# $ANTLR start synpred186
def synpred186_fragment(self, ):
# C.g:554:8: ( declaration )
# C.g:554:8: declaration
self.following.append(self.FOLLOW_declaration_in_synpred1862225)
self.declaration()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred186
# $ANTLR start synpred188
def synpred188_fragment(self, ):
# C.g:558:4: ( statement )
# C.g:558:4: statement
self.following.append(self.FOLLOW_statement_in_synpred1882242)
self.statement()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred188
def synpred69(self):
self.backtracking += 1
start = self.input.mark()
self.synpred69_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred81(self):
self.backtracking += 1
start = self.input.mark()
self.synpred81_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred82(self):
self.backtracking += 1
start = self.input.mark()
self.synpred82_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred66(self):
self.backtracking += 1
start = self.input.mark()
self.synpred66_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred83(self):
self.backtracking += 1
start = self.input.mark()
self.synpred83_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred84(self):
self.backtracking += 1
start = self.input.mark()
self.synpred84_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred67(self):
self.backtracking += 1
start = self.input.mark()
self.synpred67_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred86(self):
self.backtracking += 1
start = self.input.mark()
self.synpred86_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred120(self):
self.backtracking += 1
start = self.input.mark()
self.synpred120_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred40(self):
self.backtracking += 1
start = self.input.mark()
self.synpred40_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred142(self):
self.backtracking += 1
start = self.input.mark()
self.synpred142_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred182(self):
self.backtracking += 1
start = self.input.mark()
self.synpred182_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred109(self):
self.backtracking += 1
start = self.input.mark()
self.synpred109_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred181(self):
self.backtracking += 1
start = self.input.mark()
self.synpred181_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred186(self):
self.backtracking += 1
start = self.input.mark()
self.synpred186_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred188(self):
self.backtracking += 1
start = self.input.mark()
self.synpred188_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred169(self):
self.backtracking += 1
start = self.input.mark()
self.synpred169_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred117(self):
self.backtracking += 1
start = self.input.mark()
self.synpred117_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred70(self):
self.backtracking += 1
start = self.input.mark()
self.synpred70_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred118(self):
self.backtracking += 1
start = self.input.mark()
self.synpred118_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred34(self):
self.backtracking += 1
start = self.input.mark()
self.synpred34_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred33(self):
self.backtracking += 1
start = self.input.mark()
self.synpred33_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred94(self):
self.backtracking += 1
start = self.input.mark()
self.synpred94_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred39(self):
self.backtracking += 1
start = self.input.mark()
self.synpred39_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred74(self):
self.backtracking += 1
start = self.input.mark()
self.synpred74_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred114(self):
self.backtracking += 1
start = self.input.mark()
self.synpred114_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred93(self):
self.backtracking += 1
start = self.input.mark()
self.synpred93_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred75(self):
self.backtracking += 1
start = self.input.mark()
self.synpred75_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred137(self):
self.backtracking += 1
start = self.input.mark()
self.synpred137_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred90(self):
self.backtracking += 1
start = self.input.mark()
self.synpred90_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred138(self):
self.backtracking += 1
start = self.input.mark()
self.synpred138_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred91(self):
self.backtracking += 1
start = self.input.mark()
self.synpred91_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred73(self):
self.backtracking += 1
start = self.input.mark()
self.synpred73_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred5(self):
self.backtracking += 1
start = self.input.mark()
self.synpred5_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred78(self):
self.backtracking += 1
start = self.input.mark()
self.synpred78_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred7(self):
self.backtracking += 1
start = self.input.mark()
self.synpred7_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred76(self):
self.backtracking += 1
start = self.input.mark()
self.synpred76_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred77(self):
self.backtracking += 1
start = self.input.mark()
self.synpred77_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred2(self):
self.backtracking += 1
start = self.input.mark()
self.synpred2_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred4(self):
self.backtracking += 1
start = self.input.mark()
self.synpred4_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred174(self):
self.backtracking += 1
start = self.input.mark()
self.synpred174_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred173(self):
self.backtracking += 1
start = self.input.mark()
self.synpred173_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred14(self):
self.backtracking += 1
start = self.input.mark()
self.synpred14_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred15(self):
self.backtracking += 1
start = self.input.mark()
self.synpred15_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred10(self):
self.backtracking += 1
start = self.input.mark()
self.synpred10_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
FOLLOW_external_declaration_in_translation_unit74 = frozenset([1, 4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
FOLLOW_function_definition_in_external_declaration113 = frozenset([1])
FOLLOW_declaration_in_external_declaration118 = frozenset([1])
FOLLOW_macro_statement_in_external_declaration123 = frozenset([1, 25])
FOLLOW_25_in_external_declaration126 = frozenset([1])
FOLLOW_declaration_specifiers_in_function_definition157 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_declarator_in_function_definition160 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_declaration_in_function_definition166 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_compound_statement_in_function_definition171 = frozenset([1])
FOLLOW_compound_statement_in_function_definition180 = frozenset([1])
FOLLOW_26_in_declaration203 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
FOLLOW_declaration_specifiers_in_declaration207 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_init_declarator_list_in_declaration216 = frozenset([25])
FOLLOW_25_in_declaration220 = frozenset([1])
FOLLOW_declaration_specifiers_in_declaration234 = frozenset([4, 25, 58, 59, 60, 62, 66])
FOLLOW_init_declarator_list_in_declaration238 = frozenset([25])
FOLLOW_25_in_declaration243 = frozenset([1])
FOLLOW_storage_class_specifier_in_declaration_specifiers264 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_specifier_in_declaration_specifiers272 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_qualifier_in_declaration_specifiers286 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_init_declarator_in_init_declarator_list308 = frozenset([1, 27])
FOLLOW_27_in_init_declarator_list311 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_init_declarator_in_init_declarator_list313 = frozenset([1, 27])
FOLLOW_declarator_in_init_declarator326 = frozenset([1, 28])
FOLLOW_28_in_init_declarator329 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_initializer_in_init_declarator331 = frozenset([1])
FOLLOW_set_in_storage_class_specifier0 = frozenset([1])
FOLLOW_34_in_type_specifier376 = frozenset([1])
FOLLOW_35_in_type_specifier381 = frozenset([1])
FOLLOW_36_in_type_specifier386 = frozenset([1])
FOLLOW_37_in_type_specifier391 = frozenset([1])
FOLLOW_38_in_type_specifier396 = frozenset([1])
FOLLOW_39_in_type_specifier401 = frozenset([1])
FOLLOW_40_in_type_specifier406 = frozenset([1])
FOLLOW_41_in_type_specifier411 = frozenset([1])
FOLLOW_42_in_type_specifier416 = frozenset([1])
FOLLOW_struct_or_union_specifier_in_type_specifier423 = frozenset([1])
FOLLOW_enum_specifier_in_type_specifier433 = frozenset([1])
FOLLOW_type_id_in_type_specifier451 = frozenset([1])
FOLLOW_IDENTIFIER_in_type_id467 = frozenset([1])
FOLLOW_struct_or_union_in_struct_or_union_specifier494 = frozenset([4, 43])
FOLLOW_IDENTIFIER_in_struct_or_union_specifier496 = frozenset([43])
FOLLOW_43_in_struct_or_union_specifier499 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_struct_declaration_list_in_struct_or_union_specifier501 = frozenset([44])
FOLLOW_44_in_struct_or_union_specifier503 = frozenset([1])
FOLLOW_struct_or_union_in_struct_or_union_specifier508 = frozenset([4])
FOLLOW_IDENTIFIER_in_struct_or_union_specifier510 = frozenset([1])
FOLLOW_set_in_struct_or_union0 = frozenset([1])
FOLLOW_struct_declaration_in_struct_declaration_list537 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_specifier_qualifier_list_in_struct_declaration549 = frozenset([4, 47, 58, 59, 60, 62, 66])
FOLLOW_struct_declarator_list_in_struct_declaration551 = frozenset([25])
FOLLOW_25_in_struct_declaration553 = frozenset([1])
FOLLOW_type_qualifier_in_specifier_qualifier_list566 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_specifier_in_specifier_qualifier_list570 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_struct_declarator_in_struct_declarator_list584 = frozenset([1, 27])
FOLLOW_27_in_struct_declarator_list587 = frozenset([4, 47, 58, 59, 60, 62, 66])
FOLLOW_struct_declarator_in_struct_declarator_list589 = frozenset([1, 27])
FOLLOW_declarator_in_struct_declarator602 = frozenset([1, 47])
FOLLOW_47_in_struct_declarator605 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_struct_declarator607 = frozenset([1])
FOLLOW_47_in_struct_declarator614 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_struct_declarator616 = frozenset([1])
FOLLOW_48_in_enum_specifier634 = frozenset([43])
FOLLOW_43_in_enum_specifier636 = frozenset([4])
FOLLOW_enumerator_list_in_enum_specifier638 = frozenset([27, 44])
FOLLOW_27_in_enum_specifier640 = frozenset([44])
FOLLOW_44_in_enum_specifier643 = frozenset([1])
FOLLOW_48_in_enum_specifier648 = frozenset([4])
FOLLOW_IDENTIFIER_in_enum_specifier650 = frozenset([43])
FOLLOW_43_in_enum_specifier652 = frozenset([4])
FOLLOW_enumerator_list_in_enum_specifier654 = frozenset([27, 44])
FOLLOW_27_in_enum_specifier656 = frozenset([44])
FOLLOW_44_in_enum_specifier659 = frozenset([1])
FOLLOW_48_in_enum_specifier664 = frozenset([4])
FOLLOW_IDENTIFIER_in_enum_specifier666 = frozenset([1])
FOLLOW_enumerator_in_enumerator_list677 = frozenset([1, 27])
FOLLOW_27_in_enumerator_list680 = frozenset([4])
FOLLOW_enumerator_in_enumerator_list682 = frozenset([1, 27])
FOLLOW_IDENTIFIER_in_enumerator695 = frozenset([1, 28])
FOLLOW_28_in_enumerator698 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_enumerator700 = frozenset([1])
FOLLOW_set_in_type_qualifier0 = frozenset([1])
FOLLOW_pointer_in_declarator784 = frozenset([4, 58, 59, 60, 62])
FOLLOW_58_in_declarator788 = frozenset([4, 59, 60, 62])
FOLLOW_59_in_declarator793 = frozenset([4, 60, 62])
FOLLOW_60_in_declarator798 = frozenset([4, 62])
FOLLOW_direct_declarator_in_declarator802 = frozenset([1])
FOLLOW_pointer_in_declarator808 = frozenset([1])
FOLLOW_IDENTIFIER_in_direct_declarator819 = frozenset([1, 62, 64])
FOLLOW_declarator_suffix_in_direct_declarator821 = frozenset([1, 62, 64])
FOLLOW_62_in_direct_declarator827 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_58_in_direct_declarator830 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_declarator_in_direct_declarator834 = frozenset([63])
FOLLOW_63_in_direct_declarator836 = frozenset([62, 64])
FOLLOW_declarator_suffix_in_direct_declarator838 = frozenset([1, 62, 64])
FOLLOW_64_in_declarator_suffix852 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_declarator_suffix854 = frozenset([65])
FOLLOW_65_in_declarator_suffix856 = frozenset([1])
FOLLOW_64_in_declarator_suffix866 = frozenset([65])
FOLLOW_65_in_declarator_suffix868 = frozenset([1])
FOLLOW_62_in_declarator_suffix878 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_type_list_in_declarator_suffix880 = frozenset([63])
FOLLOW_63_in_declarator_suffix882 = frozenset([1])
FOLLOW_62_in_declarator_suffix892 = frozenset([4])
FOLLOW_identifier_list_in_declarator_suffix894 = frozenset([63])
FOLLOW_63_in_declarator_suffix896 = frozenset([1])
FOLLOW_62_in_declarator_suffix906 = frozenset([63])
FOLLOW_63_in_declarator_suffix908 = frozenset([1])
FOLLOW_66_in_pointer919 = frozenset([49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_qualifier_in_pointer921 = frozenset([1, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_pointer_in_pointer924 = frozenset([1])
FOLLOW_66_in_pointer930 = frozenset([66])
FOLLOW_pointer_in_pointer932 = frozenset([1])
FOLLOW_66_in_pointer937 = frozenset([1])
FOLLOW_parameter_list_in_parameter_type_list948 = frozenset([1, 27])
FOLLOW_27_in_parameter_type_list951 = frozenset([53, 67])
FOLLOW_53_in_parameter_type_list954 = frozenset([67])
FOLLOW_67_in_parameter_type_list958 = frozenset([1])
FOLLOW_parameter_declaration_in_parameter_list971 = frozenset([1, 27])
FOLLOW_27_in_parameter_list974 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_53_in_parameter_list977 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_declaration_in_parameter_list981 = frozenset([1, 27])
FOLLOW_declaration_specifiers_in_parameter_declaration994 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_declarator_in_parameter_declaration997 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_abstract_declarator_in_parameter_declaration999 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_53_in_parameter_declaration1004 = frozenset([1])
FOLLOW_pointer_in_parameter_declaration1013 = frozenset([4, 66])
FOLLOW_IDENTIFIER_in_parameter_declaration1016 = frozenset([1])
FOLLOW_IDENTIFIER_in_identifier_list1027 = frozenset([1, 27])
FOLLOW_27_in_identifier_list1031 = frozenset([4])
FOLLOW_IDENTIFIER_in_identifier_list1033 = frozenset([1, 27])
FOLLOW_specifier_qualifier_list_in_type_name1046 = frozenset([1, 62, 64, 66])
FOLLOW_abstract_declarator_in_type_name1048 = frozenset([1])
FOLLOW_type_id_in_type_name1054 = frozenset([1])
FOLLOW_pointer_in_abstract_declarator1065 = frozenset([1, 62, 64])
FOLLOW_direct_abstract_declarator_in_abstract_declarator1067 = frozenset([1])
FOLLOW_direct_abstract_declarator_in_abstract_declarator1073 = frozenset([1])
FOLLOW_62_in_direct_abstract_declarator1086 = frozenset([62, 64, 66])
FOLLOW_abstract_declarator_in_direct_abstract_declarator1088 = frozenset([63])
FOLLOW_63_in_direct_abstract_declarator1090 = frozenset([1, 62, 64])
FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1094 = frozenset([1, 62, 64])
FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1098 = frozenset([1, 62, 64])
FOLLOW_64_in_abstract_declarator_suffix1110 = frozenset([65])
FOLLOW_65_in_abstract_declarator_suffix1112 = frozenset([1])
FOLLOW_64_in_abstract_declarator_suffix1117 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_abstract_declarator_suffix1119 = frozenset([65])
FOLLOW_65_in_abstract_declarator_suffix1121 = frozenset([1])
FOLLOW_62_in_abstract_declarator_suffix1126 = frozenset([63])
FOLLOW_63_in_abstract_declarator_suffix1128 = frozenset([1])
FOLLOW_62_in_abstract_declarator_suffix1133 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_type_list_in_abstract_declarator_suffix1135 = frozenset([63])
FOLLOW_63_in_abstract_declarator_suffix1137 = frozenset([1])
FOLLOW_assignment_expression_in_initializer1150 = frozenset([1])
FOLLOW_43_in_initializer1155 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_initializer_list_in_initializer1157 = frozenset([27, 44])
FOLLOW_27_in_initializer1159 = frozenset([44])
FOLLOW_44_in_initializer1162 = frozenset([1])
FOLLOW_initializer_in_initializer_list1173 = frozenset([1, 27])
FOLLOW_27_in_initializer_list1176 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_initializer_in_initializer_list1178 = frozenset([1, 27])
FOLLOW_assignment_expression_in_argument_expression_list1196 = frozenset([1, 27, 53])
FOLLOW_53_in_argument_expression_list1199 = frozenset([1, 27])
FOLLOW_27_in_argument_expression_list1204 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_assignment_expression_in_argument_expression_list1206 = frozenset([1, 27, 53])
FOLLOW_53_in_argument_expression_list1209 = frozenset([1, 27])
FOLLOW_multiplicative_expression_in_additive_expression1225 = frozenset([1, 68, 69])
FOLLOW_68_in_additive_expression1229 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_multiplicative_expression_in_additive_expression1231 = frozenset([1, 68, 69])
FOLLOW_69_in_additive_expression1235 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_multiplicative_expression_in_additive_expression1237 = frozenset([1, 68, 69])
FOLLOW_cast_expression_in_multiplicative_expression1251 = frozenset([1, 66, 70, 71])
FOLLOW_66_in_multiplicative_expression1255 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_multiplicative_expression1257 = frozenset([1, 66, 70, 71])
FOLLOW_70_in_multiplicative_expression1261 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_multiplicative_expression1263 = frozenset([1, 66, 70, 71])
FOLLOW_71_in_multiplicative_expression1267 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_multiplicative_expression1269 = frozenset([1, 66, 70, 71])
FOLLOW_62_in_cast_expression1282 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_name_in_cast_expression1284 = frozenset([63])
FOLLOW_63_in_cast_expression1286 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_cast_expression1288 = frozenset([1])
FOLLOW_unary_expression_in_cast_expression1293 = frozenset([1])
FOLLOW_postfix_expression_in_unary_expression1304 = frozenset([1])
FOLLOW_72_in_unary_expression1309 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_unary_expression_in_unary_expression1311 = frozenset([1])
FOLLOW_73_in_unary_expression1316 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_unary_expression_in_unary_expression1318 = frozenset([1])
FOLLOW_unary_operator_in_unary_expression1323 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_unary_expression1325 = frozenset([1])
FOLLOW_74_in_unary_expression1330 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_unary_expression_in_unary_expression1332 = frozenset([1])
FOLLOW_74_in_unary_expression1337 = frozenset([62])
FOLLOW_62_in_unary_expression1339 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_name_in_unary_expression1341 = frozenset([63])
FOLLOW_63_in_unary_expression1343 = frozenset([1])
FOLLOW_primary_expression_in_postfix_expression1367 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_64_in_postfix_expression1383 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_postfix_expression1385 = frozenset([65])
FOLLOW_65_in_postfix_expression1387 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_62_in_postfix_expression1401 = frozenset([63])
FOLLOW_63_in_postfix_expression1405 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_62_in_postfix_expression1420 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_argument_expression_list_in_postfix_expression1424 = frozenset([63])
FOLLOW_63_in_postfix_expression1428 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_62_in_postfix_expression1444 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_macro_parameter_list_in_postfix_expression1446 = frozenset([63])
FOLLOW_63_in_postfix_expression1448 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_75_in_postfix_expression1462 = frozenset([4])
FOLLOW_IDENTIFIER_in_postfix_expression1466 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_66_in_postfix_expression1482 = frozenset([4])
FOLLOW_IDENTIFIER_in_postfix_expression1486 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_76_in_postfix_expression1502 = frozenset([4])
FOLLOW_IDENTIFIER_in_postfix_expression1506 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_72_in_postfix_expression1522 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_73_in_postfix_expression1536 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_parameter_declaration_in_macro_parameter_list1559 = frozenset([1, 27])
FOLLOW_27_in_macro_parameter_list1562 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_declaration_in_macro_parameter_list1564 = frozenset([1, 27])
FOLLOW_set_in_unary_operator0 = frozenset([1])
FOLLOW_IDENTIFIER_in_primary_expression1613 = frozenset([1])
FOLLOW_constant_in_primary_expression1618 = frozenset([1])
FOLLOW_62_in_primary_expression1623 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_primary_expression1625 = frozenset([63])
FOLLOW_63_in_primary_expression1627 = frozenset([1])
FOLLOW_HEX_LITERAL_in_constant1643 = frozenset([1])
FOLLOW_OCTAL_LITERAL_in_constant1653 = frozenset([1])
FOLLOW_DECIMAL_LITERAL_in_constant1663 = frozenset([1])
FOLLOW_CHARACTER_LITERAL_in_constant1671 = frozenset([1])
FOLLOW_IDENTIFIER_in_constant1680 = frozenset([4, 9])
FOLLOW_STRING_LITERAL_in_constant1683 = frozenset([1, 4, 9])
FOLLOW_IDENTIFIER_in_constant1688 = frozenset([1, 4])
FOLLOW_FLOATING_POINT_LITERAL_in_constant1699 = frozenset([1])
FOLLOW_assignment_expression_in_expression1715 = frozenset([1, 27])
FOLLOW_27_in_expression1718 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_assignment_expression_in_expression1720 = frozenset([1, 27])
FOLLOW_conditional_expression_in_constant_expression1733 = frozenset([1])
FOLLOW_lvalue_in_assignment_expression1744 = frozenset([28, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89])
FOLLOW_assignment_operator_in_assignment_expression1746 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_assignment_expression_in_assignment_expression1748 = frozenset([1])
FOLLOW_conditional_expression_in_assignment_expression1753 = frozenset([1])
FOLLOW_unary_expression_in_lvalue1765 = frozenset([1])
FOLLOW_set_in_assignment_operator0 = frozenset([1])
FOLLOW_logical_or_expression_in_conditional_expression1839 = frozenset([1, 90])
FOLLOW_90_in_conditional_expression1842 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_conditional_expression1844 = frozenset([47])
FOLLOW_47_in_conditional_expression1846 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_conditional_expression_in_conditional_expression1848 = frozenset([1])
FOLLOW_logical_and_expression_in_logical_or_expression1863 = frozenset([1, 91])
FOLLOW_91_in_logical_or_expression1866 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_logical_and_expression_in_logical_or_expression1868 = frozenset([1, 91])
FOLLOW_inclusive_or_expression_in_logical_and_expression1881 = frozenset([1, 92])
FOLLOW_92_in_logical_and_expression1884 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_inclusive_or_expression_in_logical_and_expression1886 = frozenset([1, 92])
FOLLOW_exclusive_or_expression_in_inclusive_or_expression1899 = frozenset([1, 93])
FOLLOW_93_in_inclusive_or_expression1902 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_exclusive_or_expression_in_inclusive_or_expression1904 = frozenset([1, 93])
FOLLOW_and_expression_in_exclusive_or_expression1917 = frozenset([1, 94])
FOLLOW_94_in_exclusive_or_expression1920 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_and_expression_in_exclusive_or_expression1922 = frozenset([1, 94])
FOLLOW_equality_expression_in_and_expression1935 = frozenset([1, 77])
FOLLOW_77_in_and_expression1938 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_equality_expression_in_and_expression1940 = frozenset([1, 77])
FOLLOW_relational_expression_in_equality_expression1952 = frozenset([1, 95, 96])
FOLLOW_set_in_equality_expression1955 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_relational_expression_in_equality_expression1961 = frozenset([1, 95, 96])
FOLLOW_shift_expression_in_relational_expression1975 = frozenset([1, 97, 98, 99, 100])
FOLLOW_set_in_relational_expression1978 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_shift_expression_in_relational_expression1988 = frozenset([1, 97, 98, 99, 100])
FOLLOW_additive_expression_in_shift_expression2001 = frozenset([1, 101, 102])
FOLLOW_set_in_shift_expression2004 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_additive_expression_in_shift_expression2010 = frozenset([1, 101, 102])
FOLLOW_labeled_statement_in_statement2025 = frozenset([1])
FOLLOW_compound_statement_in_statement2030 = frozenset([1])
FOLLOW_expression_statement_in_statement2035 = frozenset([1])
FOLLOW_selection_statement_in_statement2040 = frozenset([1])
FOLLOW_iteration_statement_in_statement2045 = frozenset([1])
FOLLOW_jump_statement_in_statement2050 = frozenset([1])
FOLLOW_macro_statement_in_statement2055 = frozenset([1])
FOLLOW_asm2_statement_in_statement2060 = frozenset([1])
FOLLOW_asm1_statement_in_statement2065 = frozenset([1])
FOLLOW_asm_statement_in_statement2070 = frozenset([1])
FOLLOW_declaration_in_statement2075 = frozenset([1])
FOLLOW_103_in_asm2_statement2086 = frozenset([4])
FOLLOW_IDENTIFIER_in_asm2_statement2089 = frozenset([62])
FOLLOW_62_in_asm2_statement2091 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_set_in_asm2_statement2094 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_63_in_asm2_statement2101 = frozenset([25])
FOLLOW_25_in_asm2_statement2103 = frozenset([1])
FOLLOW_104_in_asm1_statement2115 = frozenset([43])
FOLLOW_43_in_asm1_statement2117 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_set_in_asm1_statement2120 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_44_in_asm1_statement2127 = frozenset([1])
FOLLOW_105_in_asm_statement2138 = frozenset([43])
FOLLOW_43_in_asm_statement2140 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_set_in_asm_statement2143 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_44_in_asm_statement2150 = frozenset([1])
FOLLOW_IDENTIFIER_in_macro_statement2162 = frozenset([62])
FOLLOW_62_in_macro_statement2164 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_declaration_in_macro_statement2166 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_list_in_macro_statement2170 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_macro_statement2173 = frozenset([63])
FOLLOW_63_in_macro_statement2176 = frozenset([1])
FOLLOW_IDENTIFIER_in_labeled_statement2188 = frozenset([47])
FOLLOW_47_in_labeled_statement2190 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_labeled_statement2192 = frozenset([1])
FOLLOW_106_in_labeled_statement2197 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_labeled_statement2199 = frozenset([47])
FOLLOW_47_in_labeled_statement2201 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_labeled_statement2203 = frozenset([1])
FOLLOW_107_in_labeled_statement2208 = frozenset([47])
FOLLOW_47_in_labeled_statement2210 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_labeled_statement2212 = frozenset([1])
FOLLOW_43_in_compound_statement2223 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_declaration_in_compound_statement2225 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_list_in_compound_statement2228 = frozenset([44])
FOLLOW_44_in_compound_statement2231 = frozenset([1])
FOLLOW_statement_in_statement_list2242 = frozenset([1, 4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_25_in_expression_statement2254 = frozenset([1])
FOLLOW_expression_in_expression_statement2259 = frozenset([25])
FOLLOW_25_in_expression_statement2261 = frozenset([1])
FOLLOW_108_in_selection_statement2272 = frozenset([62])
FOLLOW_62_in_selection_statement2274 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_selection_statement2278 = frozenset([63])
FOLLOW_63_in_selection_statement2280 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_selection_statement2284 = frozenset([1, 109])
FOLLOW_109_in_selection_statement2299 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_selection_statement2301 = frozenset([1])
FOLLOW_110_in_selection_statement2308 = frozenset([62])
FOLLOW_62_in_selection_statement2310 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_selection_statement2312 = frozenset([63])
FOLLOW_63_in_selection_statement2314 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_selection_statement2316 = frozenset([1])
FOLLOW_111_in_iteration_statement2327 = frozenset([62])
FOLLOW_62_in_iteration_statement2329 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_iteration_statement2333 = frozenset([63])
FOLLOW_63_in_iteration_statement2335 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_iteration_statement2337 = frozenset([1])
FOLLOW_112_in_iteration_statement2344 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_iteration_statement2346 = frozenset([111])
FOLLOW_111_in_iteration_statement2348 = frozenset([62])
FOLLOW_62_in_iteration_statement2350 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_iteration_statement2354 = frozenset([63])
FOLLOW_63_in_iteration_statement2356 = frozenset([25])
FOLLOW_25_in_iteration_statement2358 = frozenset([1])
FOLLOW_113_in_iteration_statement2365 = frozenset([62])
FOLLOW_62_in_iteration_statement2367 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_statement_in_iteration_statement2369 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_statement_in_iteration_statement2373 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_iteration_statement2375 = frozenset([63])
FOLLOW_63_in_iteration_statement2378 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_iteration_statement2380 = frozenset([1])
FOLLOW_114_in_jump_statement2393 = frozenset([4])
FOLLOW_IDENTIFIER_in_jump_statement2395 = frozenset([25])
FOLLOW_25_in_jump_statement2397 = frozenset([1])
FOLLOW_115_in_jump_statement2402 = frozenset([25])
FOLLOW_25_in_jump_statement2404 = frozenset([1])
FOLLOW_116_in_jump_statement2409 = frozenset([25])
FOLLOW_25_in_jump_statement2411 = frozenset([1])
FOLLOW_117_in_jump_statement2416 = frozenset([25])
FOLLOW_25_in_jump_statement2418 = frozenset([1])
FOLLOW_117_in_jump_statement2423 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_jump_statement2425 = frozenset([25])
FOLLOW_25_in_jump_statement2427 = frozenset([1])
FOLLOW_declaration_specifiers_in_synpred2100 = frozenset([1])
FOLLOW_declaration_specifiers_in_synpred4100 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_declarator_in_synpred4103 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_declaration_in_synpred4105 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_43_in_synpred4108 = frozenset([1])
FOLLOW_declaration_in_synpred5118 = frozenset([1])
FOLLOW_declaration_specifiers_in_synpred7157 = frozenset([1])
FOLLOW_declaration_specifiers_in_synpred10207 = frozenset([1])
FOLLOW_type_specifier_in_synpred14272 = frozenset([1])
FOLLOW_type_qualifier_in_synpred15286 = frozenset([1])
FOLLOW_type_qualifier_in_synpred33444 = frozenset([1])
FOLLOW_IDENTIFIER_in_synpred34442 = frozenset([4, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
FOLLOW_type_qualifier_in_synpred34444 = frozenset([4, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
FOLLOW_declarator_in_synpred34447 = frozenset([1])
FOLLOW_type_qualifier_in_synpred39566 = frozenset([1])
FOLLOW_type_specifier_in_synpred40570 = frozenset([1])
FOLLOW_pointer_in_synpred66784 = frozenset([4, 58, 59, 60, 62])
FOLLOW_58_in_synpred66788 = frozenset([4, 59, 60, 62])
FOLLOW_59_in_synpred66793 = frozenset([4, 60, 62])
FOLLOW_60_in_synpred66798 = frozenset([4, 62])
FOLLOW_direct_declarator_in_synpred66802 = frozenset([1])
FOLLOW_declarator_suffix_in_synpred67821 = frozenset([1])
FOLLOW_58_in_synpred69830 = frozenset([1])
FOLLOW_declarator_suffix_in_synpred70838 = frozenset([1])
FOLLOW_62_in_synpred73878 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_type_list_in_synpred73880 = frozenset([63])
FOLLOW_63_in_synpred73882 = frozenset([1])
FOLLOW_62_in_synpred74892 = frozenset([4])
FOLLOW_identifier_list_in_synpred74894 = frozenset([63])
FOLLOW_63_in_synpred74896 = frozenset([1])
FOLLOW_type_qualifier_in_synpred75921 = frozenset([1])
FOLLOW_pointer_in_synpred76924 = frozenset([1])
FOLLOW_66_in_synpred77919 = frozenset([49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_qualifier_in_synpred77921 = frozenset([1, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_pointer_in_synpred77924 = frozenset([1])
FOLLOW_66_in_synpred78930 = frozenset([66])
FOLLOW_pointer_in_synpred78932 = frozenset([1])
FOLLOW_53_in_synpred81977 = frozenset([1])
FOLLOW_27_in_synpred82974 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_53_in_synpred82977 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_declaration_in_synpred82981 = frozenset([1])
FOLLOW_declarator_in_synpred83997 = frozenset([1])
FOLLOW_abstract_declarator_in_synpred84999 = frozenset([1])
FOLLOW_declaration_specifiers_in_synpred86994 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_declarator_in_synpred86997 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_abstract_declarator_in_synpred86999 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_53_in_synpred861004 = frozenset([1])
FOLLOW_specifier_qualifier_list_in_synpred901046 = frozenset([1, 62, 64, 66])
FOLLOW_abstract_declarator_in_synpred901048 = frozenset([1])
FOLLOW_direct_abstract_declarator_in_synpred911067 = frozenset([1])
FOLLOW_62_in_synpred931086 = frozenset([62, 64, 66])
FOLLOW_abstract_declarator_in_synpred931088 = frozenset([63])
FOLLOW_63_in_synpred931090 = frozenset([1])
FOLLOW_abstract_declarator_suffix_in_synpred941098 = frozenset([1])
FOLLOW_62_in_synpred1091282 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_name_in_synpred1091284 = frozenset([63])
FOLLOW_63_in_synpred1091286 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_synpred1091288 = frozenset([1])
FOLLOW_74_in_synpred1141330 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_unary_expression_in_synpred1141332 = frozenset([1])
FOLLOW_62_in_synpred1171420 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_argument_expression_list_in_synpred1171424 = frozenset([63])
FOLLOW_63_in_synpred1171428 = frozenset([1])
FOLLOW_62_in_synpred1181444 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_macro_parameter_list_in_synpred1181446 = frozenset([63])
FOLLOW_63_in_synpred1181448 = frozenset([1])
FOLLOW_66_in_synpred1201482 = frozenset([4])
FOLLOW_IDENTIFIER_in_synpred1201486 = frozenset([1])
FOLLOW_STRING_LITERAL_in_synpred1371683 = frozenset([1])
FOLLOW_IDENTIFIER_in_synpred1381680 = frozenset([4, 9])
FOLLOW_STRING_LITERAL_in_synpred1381683 = frozenset([1, 9])
FOLLOW_lvalue_in_synpred1421744 = frozenset([28, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89])
FOLLOW_assignment_operator_in_synpred1421746 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_assignment_expression_in_synpred1421748 = frozenset([1])
FOLLOW_expression_statement_in_synpred1692035 = frozenset([1])
FOLLOW_macro_statement_in_synpred1732055 = frozenset([1])
FOLLOW_asm2_statement_in_synpred1742060 = frozenset([1])
FOLLOW_declaration_in_synpred1812166 = frozenset([1])
FOLLOW_statement_list_in_synpred1822170 = frozenset([1])
FOLLOW_declaration_in_synpred1862225 = frozenset([1])
FOLLOW_statement_in_synpred1882242 = frozenset([1])
| edk2-master | BaseTools/Source/Python/Ecc/CParser3/CParser.py |
## @file
# This is an XML API that uses a syntax similar to XPath, but it is written in
# standard python so that no extra python packages are required to use it.
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
import xml.dom.minidom
import codecs
from Common.LongFilePathSupport import OpenLongFilePath as open
## Create a element of XML
#
# @param Name
# @param String
# @param NodeList
# @param AttributeList
#
# @revel Element
#
def CreateXmlElement(Name, String, NodeList, AttributeList):
Doc = xml.dom.minidom.Document()
Element = Doc.createElement(Name)
if String != '' and String is not None:
Element.appendChild(Doc.createTextNode(String))
for Item in NodeList:
if isinstance(Item, type([])):
Key = Item[0]
Value = Item[1]
if Key != '' and Key is not None and Value != '' and Value is not None:
Node = Doc.createElement(Key)
Node.appendChild(Doc.createTextNode(Value))
Element.appendChild(Node)
else:
Element.appendChild(Item)
for Item in AttributeList:
Key = Item[0]
Value = Item[1]
if Key != '' and Key is not None and Value != '' and Value is not None:
Element.setAttribute(Key, Value)
return Element
## Get a list of XML nodes using XPath style syntax.
#
# Return a list of XML DOM nodes from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty list is returned.
#
# @param Dom The root XML DOM node.
# @param String A XPath style path.
#
# @revel Nodes A list of XML nodes matching XPath style Sting.
#
def XmlList(Dom, String):
if String is None or String == "" or Dom is None or Dom == "":
return []
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
if String[0] == "/":
String = String[1:]
TagList = String.split('/')
Nodes = [Dom]
Index = 0
End = len(TagList) - 1
while Index <= End:
ChildNodes = []
for Node in Nodes:
if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
if Index < End:
ChildNodes.extend(Node.childNodes)
else:
ChildNodes.append(Node)
Nodes = ChildNodes
ChildNodes = []
Index += 1
return Nodes
## Get a single XML node using XPath style syntax.
#
# Return a single XML DOM node from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM node.
# @param String A XPath style path.
#
# @revel Node A single XML node matching XPath style Sting.
#
def XmlNode(Dom, String):
if String is None or String == "" or Dom is None or Dom == "":
return ""
if Dom.nodeType == Dom.DOCUMENT_NODE:
Dom = Dom.documentElement
if String[0] == "/":
String = String[1:]
TagList = String.split('/')
Index = 0
End = len(TagList) - 1
ChildNodes = [Dom]
while Index <= End:
for Node in ChildNodes:
if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
if Index < End:
ChildNodes = Node.childNodes
else:
return Node
break
Index += 1
return ""
## Get a single XML element using XPath style syntax.
#
# Return a single XML element from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
# @param Strin A XPath style path.
#
# @revel Element An XML element matching XPath style Sting.
#
def XmlElement(Dom, String):
try:
return XmlNode(Dom, String).firstChild.data.strip()
except:
return ""
## Get a single XML element of the current node.
#
# Return a single XML element specified by the current root Dom.
# If the input Dom is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
#
# @revel Element An XML element in current root Dom.
#
def XmlElementData(Dom):
try:
return Dom.firstChild.data.strip()
except:
return ""
## Get a list of XML elements using XPath style syntax.
#
# Return a list of XML elements from the root Dom specified by XPath String.
# If the input Dom or String is not valid, then an empty list is returned.
#
# @param Dom The root XML DOM object.
# @param String A XPath style path.
#
# @revel Elements A list of XML elements matching XPath style Sting.
#
def XmlElementList(Dom, String):
return map(XmlElementData, XmlList(Dom, String))
## Get the XML attribute of the current node.
#
# Return a single XML attribute named Attribute from the current root Dom.
# If the input Dom or Attribute is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
# @param Attribute The name of Attribute.
#
# @revel Element A single XML element matching XPath style Sting.
#
def XmlAttribute(Dom, Attribute):
try:
return Dom.getAttribute(Attribute).strip()
except:
return ''
## Get the XML node name of the current node.
#
# Return a single XML node name from the current root Dom.
# If the input Dom is not valid, then an empty string is returned.
#
# @param Dom The root XML DOM object.
#
# @revel Element A single XML element matching XPath style Sting.
#
def XmlNodeName(Dom):
try:
return Dom.nodeName.strip()
except:
return ''
## Parse an XML file.
#
# Parse the input XML file named FileName and return a XML DOM it stands for.
# If the input File is not a valid XML file, then an empty string is returned.
#
# @param FileName The XML file name.
#
# @revel Dom The Dom object achieved from the XML file.
#
def XmlParseFile(FileName):
try:
XmlFile = codecs.open(FileName,encoding='utf_8_sig')
Dom = xml.dom.minidom.parse(XmlFile)
XmlFile.close()
return Dom
except Exception as X:
print(X)
return ""
# This acts like the main() function for the script, unless it is 'import'ed
# into another script.
if __name__ == '__main__':
# Nothing to do here. Could do some unit tests.
A = CreateXmlElement('AAA', 'CCC', [['AAA', '111'], ['BBB', '222']], [['A', '1'], ['B', '2']])
B = CreateXmlElement('ZZZ', 'CCC', [['XXX', '111'], ['YYY', '222']], [['A', '1'], ['B', '2']])
C = CreateXmlList('DDD', 'EEE', [A, B], ['FFF', 'GGG'])
print(C.toprettyxml(indent = " "))
pass
| edk2-master | BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py |
## @file
# Python 'Library' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2011 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Xml
'''
| edk2-master | BaseTools/Source/Python/Ecc/Xml/__init__.py |
# Generated from C.g4 by ANTLR 4.7.1
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
import Ecc.CodeFragment as CodeFragment
import Ecc.FileProfile as FileProfile
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2k")
buf.write("\u0383\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.")
buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64")
buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:")
buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t")
buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t")
buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t")
buf.write("U\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4")
buf.write("^\t^\4_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4")
buf.write("g\tg\4h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4")
buf.write("p\tp\4q\tq\4r\tr\3\2\3\2\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3")
buf.write("\4\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7")
buf.write("\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\n\3")
buf.write("\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13")
buf.write("\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\16")
buf.write("\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\20\3\20")
buf.write("\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22")
buf.write("\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23")
buf.write("\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25")
buf.write("\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27")
buf.write("\3\27\3\27\3\27\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\32")
buf.write("\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33")
buf.write("\3\33\3\33\3\33\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\36")
buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37")
buf.write("\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3!\3!\3")
buf.write("!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3")
buf.write("\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"")
buf.write("\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#")
buf.write("\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3%\3")
buf.write("%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3")
buf.write("&\3&\3&\3&\3&\3&\3&\3\'\3\'\3(\3(\3)\3)\3*\3*\3+\3+\3")
buf.write(",\3,\3,\3,\3-\3-\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\61")
buf.write("\3\62\3\62\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64")
buf.write("\3\64\3\65\3\65\3\65\3\66\3\66\3\67\3\67\38\38\39\39\3")
buf.write("9\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3=\3>\3>\3>\3>\3?\3")
buf.write("?\3?\3?\3@\3@\3@\3A\3A\3A\3B\3B\3B\3C\3C\3D\3D\3D\3E\3")
buf.write("E\3E\3F\3F\3G\3G\3H\3H\3H\3I\3I\3I\3J\3J\3K\3K\3L\3L\3")
buf.write("L\3M\3M\3M\3N\3N\3N\3O\3O\3O\3P\3P\3P\3P\3P\3P\3P\3P\3")
buf.write("Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\3R\3S\3S\3S\3S\3S\3T\3T\3")
buf.write("T\3T\3T\3T\3T\3T\3U\3U\3U\3V\3V\3V\3V\3V\3W\3W\3W\3W\3")
buf.write("W\3W\3W\3X\3X\3X\3X\3X\3X\3Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3[\3")
buf.write("[\3[\3[\3[\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3\\\3]\3]\3")
buf.write("]\3]\3]\3]\3]\3^\3^\3^\7^\u02b2\n^\f^\16^\u02b5\13^\3")
buf.write("_\3_\3`\5`\u02ba\n`\3`\3`\3`\5`\u02bf\n`\3`\3`\3a\5a\u02c4")
buf.write("\na\3a\3a\3a\7a\u02c9\na\fa\16a\u02cc\13a\3a\3a\3b\3b")
buf.write("\3b\6b\u02d3\nb\rb\16b\u02d4\3b\5b\u02d8\nb\3c\3c\3c\7")
buf.write("c\u02dd\nc\fc\16c\u02e0\13c\5c\u02e2\nc\3c\5c\u02e5\n")
buf.write("c\3d\3d\6d\u02e9\nd\rd\16d\u02ea\3d\5d\u02ee\nd\3e\3e")
buf.write("\3f\3f\3f\3f\3f\3f\5f\u02f8\nf\3g\6g\u02fb\ng\rg\16g\u02fc")
buf.write("\3g\3g\7g\u0301\ng\fg\16g\u0304\13g\3g\5g\u0307\ng\3g")
buf.write("\5g\u030a\ng\3g\3g\6g\u030e\ng\rg\16g\u030f\3g\5g\u0313")
buf.write("\ng\3g\5g\u0316\ng\3g\6g\u0319\ng\rg\16g\u031a\3g\3g\5")
buf.write("g\u031f\ng\3g\6g\u0322\ng\rg\16g\u0323\3g\5g\u0327\ng")
buf.write("\3g\5g\u032a\ng\3h\3h\5h\u032e\nh\3h\6h\u0331\nh\rh\16")
buf.write("h\u0332\3i\3i\3j\3j\3j\5j\u033a\nj\3k\3k\3k\3k\3k\3k\3")
buf.write("k\3k\3k\5k\u0345\nk\3l\3l\3l\3l\3l\3l\3l\3m\3m\3m\3m\3")
buf.write("n\3n\3n\3n\3o\3o\3p\3p\3p\3p\7p\u035c\np\fp\16p\u035f")
buf.write("\13p\3p\3p\3p\3p\3p\3q\3q\3q\3q\7q\u036a\nq\fq\16q\u036d")
buf.write("\13q\3q\5q\u0370\nq\3q\3q\3q\3q\3r\3r\7r\u0378\nr\fr\16")
buf.write("r\u037b\13r\3r\5r\u037e\nr\3r\3r\3r\3r\3\u035d\2s\3\3")
buf.write("\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16")
buf.write("\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61")
buf.write("\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*")
buf.write("S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u<w")
buf.write("=y>{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008b")
buf.write("G\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009b")
buf.write("O\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00ab")
buf.write("W\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb")
buf.write("_\u00bd\2\u00bf`\u00c1a\u00c3b\u00c5c\u00c7d\u00c9\2\u00cb")
buf.write("\2\u00cde\u00cf\2\u00d1\2\u00d3\2\u00d5\2\u00d7\2\u00d9")
buf.write("f\u00dbg\u00ddh\u00dfi\u00e1j\u00e3k\3\2\20\6\2&&C\\a")
buf.write("ac|\4\2))^^\4\2$$^^\4\2ZZzz\5\2\62;CHch\6\2NNWWnnww\4")
buf.write("\2WWww\4\2NNnn\4\2GGgg\4\2--//\6\2FFHHffhh\t\2))^^ddh")
buf.write("hppttvv\5\2\13\f\16\17\"\"\4\2\f\f\17\17\2\u03a2\2\3\3")
buf.write("\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2")
buf.write("\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2")
buf.write("\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2")
buf.write("\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2")
buf.write("\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3")
buf.write("\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2")
buf.write("\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3")
buf.write("\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K")
buf.write("\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2")
buf.write("U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2")
buf.write("\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2")
buf.write("\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2")
buf.write("\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3")
buf.write("\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083")
buf.write("\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2")
buf.write("\2\2\u008b\3\2\2\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091")
buf.write("\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2\2\2\u0097\3\2\2")
buf.write("\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f")
buf.write("\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2")
buf.write("\2\2\u00a7\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad")
buf.write("\3\2\2\2\2\u00af\3\2\2\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2")
buf.write("\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9\3\2\2\2\2\u00bb")
buf.write("\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2")
buf.write("\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00cd\3\2\2\2\2\u00d9")
buf.write("\3\2\2\2\2\u00db\3\2\2\2\2\u00dd\3\2\2\2\2\u00df\3\2\2")
buf.write("\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\3\u00e5\3\2\2\2\5\u00e7")
buf.write("\3\2\2\2\7\u00e9\3\2\2\2\t\u00f1\3\2\2\2\13\u00f3\3\2")
buf.write("\2\2\r\u00f5\3\2\2\2\17\u00fc\3\2\2\2\21\u0103\3\2\2\2")
buf.write("\23\u0108\3\2\2\2\25\u0111\3\2\2\2\27\u0118\3\2\2\2\31")
buf.write("\u011d\3\2\2\2\33\u0122\3\2\2\2\35\u0128\3\2\2\2\37\u012c")
buf.write("\3\2\2\2!\u0131\3\2\2\2#\u0137\3\2\2\2%\u013e\3\2\2\2")
buf.write("\'\u0145\3\2\2\2)\u014e\3\2\2\2+\u0150\3\2\2\2-\u0157")
buf.write("\3\2\2\2/\u015d\3\2\2\2\61\u015f\3\2\2\2\63\u0164\3\2")
buf.write("\2\2\65\u016a\3\2\2\2\67\u0173\3\2\2\29\u0176\3\2\2\2")
buf.write(";\u017a\3\2\2\2=\u0183\3\2\2\2?\u0189\3\2\2\2A\u0193\3")
buf.write("\2\2\2C\u019c\3\2\2\2E\u01ba\3\2\2\2G\u01c1\3\2\2\2I\u01d1")
buf.write("\3\2\2\2K\u01e4\3\2\2\2M\u01eb\3\2\2\2O\u01ed\3\2\2\2")
buf.write("Q\u01ef\3\2\2\2S\u01f1\3\2\2\2U\u01f3\3\2\2\2W\u01f5\3")
buf.write("\2\2\2Y\u01f9\3\2\2\2[\u01fb\3\2\2\2]\u01fd\3\2\2\2_\u01ff")
buf.write("\3\2\2\2a\u0201\3\2\2\2c\u0204\3\2\2\2e\u0207\3\2\2\2")
buf.write("g\u020e\3\2\2\2i\u0210\3\2\2\2k\u0213\3\2\2\2m\u0215\3")
buf.write("\2\2\2o\u0217\3\2\2\2q\u0219\3\2\2\2s\u021c\3\2\2\2u\u021f")
buf.write("\3\2\2\2w\u0222\3\2\2\2y\u0225\3\2\2\2{\u0228\3\2\2\2")
buf.write("}\u022c\3\2\2\2\177\u0230\3\2\2\2\u0081\u0233\3\2\2\2")
buf.write("\u0083\u0236\3\2\2\2\u0085\u0239\3\2\2\2\u0087\u023b\3")
buf.write("\2\2\2\u0089\u023e\3\2\2\2\u008b\u0241\3\2\2\2\u008d\u0243")
buf.write("\3\2\2\2\u008f\u0245\3\2\2\2\u0091\u0248\3\2\2\2\u0093")
buf.write("\u024b\3\2\2\2\u0095\u024d\3\2\2\2\u0097\u024f\3\2\2\2")
buf.write("\u0099\u0252\3\2\2\2\u009b\u0255\3\2\2\2\u009d\u0258\3")
buf.write("\2\2\2\u009f\u025b\3\2\2\2\u00a1\u0263\3\2\2\2\u00a3\u0268")
buf.write("\3\2\2\2\u00a5\u026e\3\2\2\2\u00a7\u0273\3\2\2\2\u00a9")
buf.write("\u027b\3\2\2\2\u00ab\u027e\3\2\2\2\u00ad\u0283\3\2\2\2")
buf.write("\u00af\u028a\3\2\2\2\u00b1\u0290\3\2\2\2\u00b3\u0293\3")
buf.write("\2\2\2\u00b5\u0298\3\2\2\2\u00b7\u02a1\3\2\2\2\u00b9\u02a7")
buf.write("\3\2\2\2\u00bb\u02ae\3\2\2\2\u00bd\u02b6\3\2\2\2\u00bf")
buf.write("\u02b9\3\2\2\2\u00c1\u02c3\3\2\2\2\u00c3\u02cf\3\2\2\2")
buf.write("\u00c5\u02e1\3\2\2\2\u00c7\u02e6\3\2\2\2\u00c9\u02ef\3")
buf.write("\2\2\2\u00cb\u02f7\3\2\2\2\u00cd\u0329\3\2\2\2\u00cf\u032b")
buf.write("\3\2\2\2\u00d1\u0334\3\2\2\2\u00d3\u0339\3\2\2\2\u00d5")
buf.write("\u0344\3\2\2\2\u00d7\u0346\3\2\2\2\u00d9\u034d\3\2\2\2")
buf.write("\u00db\u0351\3\2\2\2\u00dd\u0355\3\2\2\2\u00df\u0357\3")
buf.write("\2\2\2\u00e1\u0365\3\2\2\2\u00e3\u0375\3\2\2\2\u00e5\u00e6")
buf.write("\7}\2\2\u00e6\4\3\2\2\2\u00e7\u00e8\7=\2\2\u00e8\6\3\2")
buf.write("\2\2\u00e9\u00ea\7v\2\2\u00ea\u00eb\7{\2\2\u00eb\u00ec")
buf.write("\7r\2\2\u00ec\u00ed\7g\2\2\u00ed\u00ee\7f\2\2\u00ee\u00ef")
buf.write("\7g\2\2\u00ef\u00f0\7h\2\2\u00f0\b\3\2\2\2\u00f1\u00f2")
buf.write("\7.\2\2\u00f2\n\3\2\2\2\u00f3\u00f4\7?\2\2\u00f4\f\3\2")
buf.write("\2\2\u00f5\u00f6\7g\2\2\u00f6\u00f7\7z\2\2\u00f7\u00f8")
buf.write("\7v\2\2\u00f8\u00f9\7g\2\2\u00f9\u00fa\7t\2\2\u00fa\u00fb")
buf.write("\7p\2\2\u00fb\16\3\2\2\2\u00fc\u00fd\7u\2\2\u00fd\u00fe")
buf.write("\7v\2\2\u00fe\u00ff\7c\2\2\u00ff\u0100\7v\2\2\u0100\u0101")
buf.write("\7k\2\2\u0101\u0102\7e\2\2\u0102\20\3\2\2\2\u0103\u0104")
buf.write("\7c\2\2\u0104\u0105\7w\2\2\u0105\u0106\7v\2\2\u0106\u0107")
buf.write("\7q\2\2\u0107\22\3\2\2\2\u0108\u0109\7t\2\2\u0109\u010a")
buf.write("\7g\2\2\u010a\u010b\7i\2\2\u010b\u010c\7k\2\2\u010c\u010d")
buf.write("\7u\2\2\u010d\u010e\7v\2\2\u010e\u010f\7g\2\2\u010f\u0110")
buf.write("\7t\2\2\u0110\24\3\2\2\2\u0111\u0112\7U\2\2\u0112\u0113")
buf.write("\7V\2\2\u0113\u0114\7C\2\2\u0114\u0115\7V\2\2\u0115\u0116")
buf.write("\7K\2\2\u0116\u0117\7E\2\2\u0117\26\3\2\2\2\u0118\u0119")
buf.write("\7x\2\2\u0119\u011a\7q\2\2\u011a\u011b\7k\2\2\u011b\u011c")
buf.write("\7f\2\2\u011c\30\3\2\2\2\u011d\u011e\7e\2\2\u011e\u011f")
buf.write("\7j\2\2\u011f\u0120\7c\2\2\u0120\u0121\7t\2\2\u0121\32")
buf.write("\3\2\2\2\u0122\u0123\7u\2\2\u0123\u0124\7j\2\2\u0124\u0125")
buf.write("\7q\2\2\u0125\u0126\7t\2\2\u0126\u0127\7v\2\2\u0127\34")
buf.write("\3\2\2\2\u0128\u0129\7k\2\2\u0129\u012a\7p\2\2\u012a\u012b")
buf.write("\7v\2\2\u012b\36\3\2\2\2\u012c\u012d\7n\2\2\u012d\u012e")
buf.write("\7q\2\2\u012e\u012f\7p\2\2\u012f\u0130\7i\2\2\u0130 \3")
buf.write("\2\2\2\u0131\u0132\7h\2\2\u0132\u0133\7n\2\2\u0133\u0134")
buf.write("\7q\2\2\u0134\u0135\7c\2\2\u0135\u0136\7v\2\2\u0136\"")
buf.write("\3\2\2\2\u0137\u0138\7f\2\2\u0138\u0139\7q\2\2\u0139\u013a")
buf.write("\7w\2\2\u013a\u013b\7d\2\2\u013b\u013c\7n\2\2\u013c\u013d")
buf.write("\7g\2\2\u013d$\3\2\2\2\u013e\u013f\7u\2\2\u013f\u0140")
buf.write("\7k\2\2\u0140\u0141\7i\2\2\u0141\u0142\7p\2\2\u0142\u0143")
buf.write("\7g\2\2\u0143\u0144\7f\2\2\u0144&\3\2\2\2\u0145\u0146")
buf.write("\7w\2\2\u0146\u0147\7p\2\2\u0147\u0148\7u\2\2\u0148\u0149")
buf.write("\7k\2\2\u0149\u014a\7i\2\2\u014a\u014b\7p\2\2\u014b\u014c")
buf.write("\7g\2\2\u014c\u014d\7f\2\2\u014d(\3\2\2\2\u014e\u014f")
buf.write("\7\177\2\2\u014f*\3\2\2\2\u0150\u0151\7u\2\2\u0151\u0152")
buf.write("\7v\2\2\u0152\u0153\7t\2\2\u0153\u0154\7w\2\2\u0154\u0155")
buf.write("\7e\2\2\u0155\u0156\7v\2\2\u0156,\3\2\2\2\u0157\u0158")
buf.write("\7w\2\2\u0158\u0159\7p\2\2\u0159\u015a\7k\2\2\u015a\u015b")
buf.write("\7q\2\2\u015b\u015c\7p\2\2\u015c.\3\2\2\2\u015d\u015e")
buf.write("\7<\2\2\u015e\60\3\2\2\2\u015f\u0160\7g\2\2\u0160\u0161")
buf.write("\7p\2\2\u0161\u0162\7w\2\2\u0162\u0163\7o\2\2\u0163\62")
buf.write("\3\2\2\2\u0164\u0165\7e\2\2\u0165\u0166\7q\2\2\u0166\u0167")
buf.write("\7p\2\2\u0167\u0168\7u\2\2\u0168\u0169\7v\2\2\u0169\64")
buf.write("\3\2\2\2\u016a\u016b\7x\2\2\u016b\u016c\7q\2\2\u016c\u016d")
buf.write("\7n\2\2\u016d\u016e\7c\2\2\u016e\u016f\7v\2\2\u016f\u0170")
buf.write("\7k\2\2\u0170\u0171\7n\2\2\u0171\u0172\7g\2\2\u0172\66")
buf.write("\3\2\2\2\u0173\u0174\7K\2\2\u0174\u0175\7P\2\2\u01758")
buf.write("\3\2\2\2\u0176\u0177\7Q\2\2\u0177\u0178\7W\2\2\u0178\u0179")
buf.write("\7V\2\2\u0179:\3\2\2\2\u017a\u017b\7Q\2\2\u017b\u017c")
buf.write("\7R\2\2\u017c\u017d\7V\2\2\u017d\u017e\7K\2\2\u017e\u017f")
buf.write("\7Q\2\2\u017f\u0180\7P\2\2\u0180\u0181\7C\2\2\u0181\u0182")
buf.write("\7N\2\2\u0182<\3\2\2\2\u0183\u0184\7E\2\2\u0184\u0185")
buf.write("\7Q\2\2\u0185\u0186\7P\2\2\u0186\u0187\7U\2\2\u0187\u0188")
buf.write("\7V\2\2\u0188>\3\2\2\2\u0189\u018a\7W\2\2\u018a\u018b")
buf.write("\7P\2\2\u018b\u018c\7C\2\2\u018c\u018d\7N\2\2\u018d\u018e")
buf.write("\7K\2\2\u018e\u018f\7I\2\2\u018f\u0190\7P\2\2\u0190\u0191")
buf.write("\7G\2\2\u0191\u0192\7F\2\2\u0192@\3\2\2\2\u0193\u0194")
buf.write("\7X\2\2\u0194\u0195\7Q\2\2\u0195\u0196\7N\2\2\u0196\u0197")
buf.write("\7C\2\2\u0197\u0198\7V\2\2\u0198\u0199\7K\2\2\u0199\u019a")
buf.write("\7N\2\2\u019a\u019b\7G\2\2\u019bB\3\2\2\2\u019c\u019d")
buf.write("\7I\2\2\u019d\u019e\7N\2\2\u019e\u019f\7Q\2\2\u019f\u01a0")
buf.write("\7D\2\2\u01a0\u01a1\7C\2\2\u01a1\u01a2\7N\2\2\u01a2\u01a3")
buf.write("\7a\2\2\u01a3\u01a4\7T\2\2\u01a4\u01a5\7G\2\2\u01a5\u01a6")
buf.write("\7O\2\2\u01a6\u01a7\7Q\2\2\u01a7\u01a8\7X\2\2\u01a8\u01a9")
buf.write("\7G\2\2\u01a9\u01aa\7a\2\2\u01aa\u01ab\7K\2\2\u01ab\u01ac")
buf.write("\7H\2\2\u01ac\u01ad\7a\2\2\u01ad\u01ae\7W\2\2\u01ae\u01af")
buf.write("\7P\2\2\u01af\u01b0\7T\2\2\u01b0\u01b1\7G\2\2\u01b1\u01b2")
buf.write("\7H\2\2\u01b2\u01b3\7G\2\2\u01b3\u01b4\7T\2\2\u01b4\u01b5")
buf.write("\7G\2\2\u01b5\u01b6\7P\2\2\u01b6\u01b7\7E\2\2\u01b7\u01b8")
buf.write("\7G\2\2\u01b8\u01b9\7F\2\2\u01b9D\3\2\2\2\u01ba\u01bb")
buf.write("\7G\2\2\u01bb\u01bc\7H\2\2\u01bc\u01bd\7K\2\2\u01bd\u01be")
buf.write("\7C\2\2\u01be\u01bf\7R\2\2\u01bf\u01c0\7K\2\2\u01c0F\3")
buf.write("\2\2\2\u01c1\u01c2\7G\2\2\u01c2\u01c3\7H\2\2\u01c3\u01c4")
buf.write("\7K\2\2\u01c4\u01c5\7a\2\2\u01c5\u01c6\7D\2\2\u01c6\u01c7")
buf.write("\7Q\2\2\u01c7\u01c8\7Q\2\2\u01c8\u01c9\7V\2\2\u01c9\u01ca")
buf.write("\7U\2\2\u01ca\u01cb\7G\2\2\u01cb\u01cc\7T\2\2\u01cc\u01cd")
buf.write("\7X\2\2\u01cd\u01ce\7K\2\2\u01ce\u01cf\7E\2\2\u01cf\u01d0")
buf.write("\7G\2\2\u01d0H\3\2\2\2\u01d1\u01d2\7G\2\2\u01d2\u01d3")
buf.write("\7H\2\2\u01d3\u01d4\7K\2\2\u01d4\u01d5\7a\2\2\u01d5\u01d6")
buf.write("\7T\2\2\u01d6\u01d7\7W\2\2\u01d7\u01d8\7P\2\2\u01d8\u01d9")
buf.write("\7V\2\2\u01d9\u01da\7K\2\2\u01da\u01db\7O\2\2\u01db\u01dc")
buf.write("\7G\2\2\u01dc\u01dd\7U\2\2\u01dd\u01de\7G\2\2\u01de\u01df")
buf.write("\7T\2\2\u01df\u01e0\7X\2\2\u01e0\u01e1\7K\2\2\u01e1\u01e2")
buf.write("\7E\2\2\u01e2\u01e3\7G\2\2\u01e3J\3\2\2\2\u01e4\u01e5")
buf.write("\7R\2\2\u01e5\u01e6\7C\2\2\u01e6\u01e7\7E\2\2\u01e7\u01e8")
buf.write("\7M\2\2\u01e8\u01e9\7G\2\2\u01e9\u01ea\7F\2\2\u01eaL\3")
buf.write("\2\2\2\u01eb\u01ec\7*\2\2\u01ecN\3\2\2\2\u01ed\u01ee\7")
buf.write("+\2\2\u01eeP\3\2\2\2\u01ef\u01f0\7]\2\2\u01f0R\3\2\2\2")
buf.write("\u01f1\u01f2\7_\2\2\u01f2T\3\2\2\2\u01f3\u01f4\7,\2\2")
buf.write("\u01f4V\3\2\2\2\u01f5\u01f6\7\60\2\2\u01f6\u01f7\7\60")
buf.write("\2\2\u01f7\u01f8\7\60\2\2\u01f8X\3\2\2\2\u01f9\u01fa\7")
buf.write("-\2\2\u01faZ\3\2\2\2\u01fb\u01fc\7/\2\2\u01fc\\\3\2\2")
buf.write("\2\u01fd\u01fe\7\61\2\2\u01fe^\3\2\2\2\u01ff\u0200\7\'")
buf.write("\2\2\u0200`\3\2\2\2\u0201\u0202\7-\2\2\u0202\u0203\7-")
buf.write("\2\2\u0203b\3\2\2\2\u0204\u0205\7/\2\2\u0205\u0206\7/")
buf.write("\2\2\u0206d\3\2\2\2\u0207\u0208\7u\2\2\u0208\u0209\7k")
buf.write("\2\2\u0209\u020a\7|\2\2\u020a\u020b\7g\2\2\u020b\u020c")
buf.write("\7q\2\2\u020c\u020d\7h\2\2\u020df\3\2\2\2\u020e\u020f")
buf.write("\7\60\2\2\u020fh\3\2\2\2\u0210\u0211\7/\2\2\u0211\u0212")
buf.write("\7@\2\2\u0212j\3\2\2\2\u0213\u0214\7(\2\2\u0214l\3\2\2")
buf.write("\2\u0215\u0216\7\u0080\2\2\u0216n\3\2\2\2\u0217\u0218")
buf.write("\7#\2\2\u0218p\3\2\2\2\u0219\u021a\7,\2\2\u021a\u021b")
buf.write("\7?\2\2\u021br\3\2\2\2\u021c\u021d\7\61\2\2\u021d\u021e")
buf.write("\7?\2\2\u021et\3\2\2\2\u021f\u0220\7\'\2\2\u0220\u0221")
buf.write("\7?\2\2\u0221v\3\2\2\2\u0222\u0223\7-\2\2\u0223\u0224")
buf.write("\7?\2\2\u0224x\3\2\2\2\u0225\u0226\7/\2\2\u0226\u0227")
buf.write("\7?\2\2\u0227z\3\2\2\2\u0228\u0229\7>\2\2\u0229\u022a")
buf.write("\7>\2\2\u022a\u022b\7?\2\2\u022b|\3\2\2\2\u022c\u022d")
buf.write("\7@\2\2\u022d\u022e\7@\2\2\u022e\u022f\7?\2\2\u022f~\3")
buf.write("\2\2\2\u0230\u0231\7(\2\2\u0231\u0232\7?\2\2\u0232\u0080")
buf.write("\3\2\2\2\u0233\u0234\7`\2\2\u0234\u0235\7?\2\2\u0235\u0082")
buf.write("\3\2\2\2\u0236\u0237\7~\2\2\u0237\u0238\7?\2\2\u0238\u0084")
buf.write("\3\2\2\2\u0239\u023a\7A\2\2\u023a\u0086\3\2\2\2\u023b")
buf.write("\u023c\7~\2\2\u023c\u023d\7~\2\2\u023d\u0088\3\2\2\2\u023e")
buf.write("\u023f\7(\2\2\u023f\u0240\7(\2\2\u0240\u008a\3\2\2\2\u0241")
buf.write("\u0242\7~\2\2\u0242\u008c\3\2\2\2\u0243\u0244\7`\2\2\u0244")
buf.write("\u008e\3\2\2\2\u0245\u0246\7?\2\2\u0246\u0247\7?\2\2\u0247")
buf.write("\u0090\3\2\2\2\u0248\u0249\7#\2\2\u0249\u024a\7?\2\2\u024a")
buf.write("\u0092\3\2\2\2\u024b\u024c\7>\2\2\u024c\u0094\3\2\2\2")
buf.write("\u024d\u024e\7@\2\2\u024e\u0096\3\2\2\2\u024f\u0250\7")
buf.write(">\2\2\u0250\u0251\7?\2\2\u0251\u0098\3\2\2\2\u0252\u0253")
buf.write("\7@\2\2\u0253\u0254\7?\2\2\u0254\u009a\3\2\2\2\u0255\u0256")
buf.write("\7>\2\2\u0256\u0257\7>\2\2\u0257\u009c\3\2\2\2\u0258\u0259")
buf.write("\7@\2\2\u0259\u025a\7@\2\2\u025a\u009e\3\2\2\2\u025b\u025c")
buf.write("\7a\2\2\u025c\u025d\7a\2\2\u025d\u025e\7c\2\2\u025e\u025f")
buf.write("\7u\2\2\u025f\u0260\7o\2\2\u0260\u0261\7a\2\2\u0261\u0262")
buf.write("\7a\2\2\u0262\u00a0\3\2\2\2\u0263\u0264\7a\2\2\u0264\u0265")
buf.write("\7c\2\2\u0265\u0266\7u\2\2\u0266\u0267\7o\2\2\u0267\u00a2")
buf.write("\3\2\2\2\u0268\u0269\7a\2\2\u0269\u026a\7a\2\2\u026a\u026b")
buf.write("\7c\2\2\u026b\u026c\7u\2\2\u026c\u026d\7o\2\2\u026d\u00a4")
buf.write("\3\2\2\2\u026e\u026f\7e\2\2\u026f\u0270\7c\2\2\u0270\u0271")
buf.write("\7u\2\2\u0271\u0272\7g\2\2\u0272\u00a6\3\2\2\2\u0273\u0274")
buf.write("\7f\2\2\u0274\u0275\7g\2\2\u0275\u0276\7h\2\2\u0276\u0277")
buf.write("\7c\2\2\u0277\u0278\7w\2\2\u0278\u0279\7n\2\2\u0279\u027a")
buf.write("\7v\2\2\u027a\u00a8\3\2\2\2\u027b\u027c\7k\2\2\u027c\u027d")
buf.write("\7h\2\2\u027d\u00aa\3\2\2\2\u027e\u027f\7g\2\2\u027f\u0280")
buf.write("\7n\2\2\u0280\u0281\7u\2\2\u0281\u0282\7g\2\2\u0282\u00ac")
buf.write("\3\2\2\2\u0283\u0284\7u\2\2\u0284\u0285\7y\2\2\u0285\u0286")
buf.write("\7k\2\2\u0286\u0287\7v\2\2\u0287\u0288\7e\2\2\u0288\u0289")
buf.write("\7j\2\2\u0289\u00ae\3\2\2\2\u028a\u028b\7y\2\2\u028b\u028c")
buf.write("\7j\2\2\u028c\u028d\7k\2\2\u028d\u028e\7n\2\2\u028e\u028f")
buf.write("\7g\2\2\u028f\u00b0\3\2\2\2\u0290\u0291\7f\2\2\u0291\u0292")
buf.write("\7q\2\2\u0292\u00b2\3\2\2\2\u0293\u0294\7i\2\2\u0294\u0295")
buf.write("\7q\2\2\u0295\u0296\7v\2\2\u0296\u0297\7q\2\2\u0297\u00b4")
buf.write("\3\2\2\2\u0298\u0299\7e\2\2\u0299\u029a\7q\2\2\u029a\u029b")
buf.write("\7p\2\2\u029b\u029c\7v\2\2\u029c\u029d\7k\2\2\u029d\u029e")
buf.write("\7p\2\2\u029e\u029f\7w\2\2\u029f\u02a0\7g\2\2\u02a0\u00b6")
buf.write("\3\2\2\2\u02a1\u02a2\7d\2\2\u02a2\u02a3\7t\2\2\u02a3\u02a4")
buf.write("\7g\2\2\u02a4\u02a5\7c\2\2\u02a5\u02a6\7m\2\2\u02a6\u00b8")
buf.write("\3\2\2\2\u02a7\u02a8\7t\2\2\u02a8\u02a9\7g\2\2\u02a9\u02aa")
buf.write("\7v\2\2\u02aa\u02ab\7w\2\2\u02ab\u02ac\7t\2\2\u02ac\u02ad")
buf.write("\7p\2\2\u02ad\u00ba\3\2\2\2\u02ae\u02b3\5\u00bd_\2\u02af")
buf.write("\u02b2\5\u00bd_\2\u02b0\u02b2\4\62;\2\u02b1\u02af\3\2")
buf.write("\2\2\u02b1\u02b0\3\2\2\2\u02b2\u02b5\3\2\2\2\u02b3\u02b1")
buf.write("\3\2\2\2\u02b3\u02b4\3\2\2\2\u02b4\u00bc\3\2\2\2\u02b5")
buf.write("\u02b3\3\2\2\2\u02b6\u02b7\t\2\2\2\u02b7\u00be\3\2\2\2")
buf.write("\u02b8\u02ba\7N\2\2\u02b9\u02b8\3\2\2\2\u02b9\u02ba\3")
buf.write("\2\2\2\u02ba\u02bb\3\2\2\2\u02bb\u02be\7)\2\2\u02bc\u02bf")
buf.write("\5\u00d3j\2\u02bd\u02bf\n\3\2\2\u02be\u02bc\3\2\2\2\u02be")
buf.write("\u02bd\3\2\2\2\u02bf\u02c0\3\2\2\2\u02c0\u02c1\7)\2\2")
buf.write("\u02c1\u00c0\3\2\2\2\u02c2\u02c4\7N\2\2\u02c3\u02c2\3")
buf.write("\2\2\2\u02c3\u02c4\3\2\2\2\u02c4\u02c5\3\2\2\2\u02c5\u02ca")
buf.write("\7$\2\2\u02c6\u02c9\5\u00d3j\2\u02c7\u02c9\n\4\2\2\u02c8")
buf.write("\u02c6\3\2\2\2\u02c8\u02c7\3\2\2\2\u02c9\u02cc\3\2\2\2")
buf.write("\u02ca\u02c8\3\2\2\2\u02ca\u02cb\3\2\2\2\u02cb\u02cd\3")
buf.write("\2\2\2\u02cc\u02ca\3\2\2\2\u02cd\u02ce\7$\2\2\u02ce\u00c2")
buf.write("\3\2\2\2\u02cf\u02d0\7\62\2\2\u02d0\u02d2\t\5\2\2\u02d1")
buf.write("\u02d3\5\u00c9e\2\u02d2\u02d1\3\2\2\2\u02d3\u02d4\3\2")
buf.write("\2\2\u02d4\u02d2\3\2\2\2\u02d4\u02d5\3\2\2\2\u02d5\u02d7")
buf.write("\3\2\2\2\u02d6\u02d8\5\u00cbf\2\u02d7\u02d6\3\2\2\2\u02d7")
buf.write("\u02d8\3\2\2\2\u02d8\u00c4\3\2\2\2\u02d9\u02e2\7\62\2")
buf.write("\2\u02da\u02de\4\63;\2\u02db\u02dd\4\62;\2\u02dc\u02db")
buf.write("\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2\u02de")
buf.write("\u02df\3\2\2\2\u02df\u02e2\3\2\2\2\u02e0\u02de\3\2\2\2")
buf.write("\u02e1\u02d9\3\2\2\2\u02e1\u02da\3\2\2\2\u02e2\u02e4\3")
buf.write("\2\2\2\u02e3\u02e5\5\u00cbf\2\u02e4\u02e3\3\2\2\2\u02e4")
buf.write("\u02e5\3\2\2\2\u02e5\u00c6\3\2\2\2\u02e6\u02e8\7\62\2")
buf.write("\2\u02e7\u02e9\4\629\2\u02e8\u02e7\3\2\2\2\u02e9\u02ea")
buf.write("\3\2\2\2\u02ea\u02e8\3\2\2\2\u02ea\u02eb\3\2\2\2\u02eb")
buf.write("\u02ed\3\2\2\2\u02ec\u02ee\5\u00cbf\2\u02ed\u02ec\3\2")
buf.write("\2\2\u02ed\u02ee\3\2\2\2\u02ee\u00c8\3\2\2\2\u02ef\u02f0")
buf.write("\t\6\2\2\u02f0\u00ca\3\2\2\2\u02f1\u02f8\t\7\2\2\u02f2")
buf.write("\u02f3\t\b\2\2\u02f3\u02f8\t\t\2\2\u02f4\u02f5\t\b\2\2")
buf.write("\u02f5\u02f6\t\t\2\2\u02f6\u02f8\t\t\2\2\u02f7\u02f1\3")
buf.write("\2\2\2\u02f7\u02f2\3\2\2\2\u02f7\u02f4\3\2\2\2\u02f8\u00cc")
buf.write("\3\2\2\2\u02f9\u02fb\4\62;\2\u02fa\u02f9\3\2\2\2\u02fb")
buf.write("\u02fc\3\2\2\2\u02fc\u02fa\3\2\2\2\u02fc\u02fd\3\2\2\2")
buf.write("\u02fd\u02fe\3\2\2\2\u02fe\u0302\7\60\2\2\u02ff\u0301")
buf.write("\4\62;\2\u0300\u02ff\3\2\2\2\u0301\u0304\3\2\2\2\u0302")
buf.write("\u0300\3\2\2\2\u0302\u0303\3\2\2\2\u0303\u0306\3\2\2\2")
buf.write("\u0304\u0302\3\2\2\2\u0305\u0307\5\u00cfh\2\u0306\u0305")
buf.write("\3\2\2\2\u0306\u0307\3\2\2\2\u0307\u0309\3\2\2\2\u0308")
buf.write("\u030a\5\u00d1i\2\u0309\u0308\3\2\2\2\u0309\u030a\3\2")
buf.write("\2\2\u030a\u032a\3\2\2\2\u030b\u030d\7\60\2\2\u030c\u030e")
buf.write("\4\62;\2\u030d\u030c\3\2\2\2\u030e\u030f\3\2\2\2\u030f")
buf.write("\u030d\3\2\2\2\u030f\u0310\3\2\2\2\u0310\u0312\3\2\2\2")
buf.write("\u0311\u0313\5\u00cfh\2\u0312\u0311\3\2\2\2\u0312\u0313")
buf.write("\3\2\2\2\u0313\u0315\3\2\2\2\u0314\u0316\5\u00d1i\2\u0315")
buf.write("\u0314\3\2\2\2\u0315\u0316\3\2\2\2\u0316\u032a\3\2\2\2")
buf.write("\u0317\u0319\4\62;\2\u0318\u0317\3\2\2\2\u0319\u031a\3")
buf.write("\2\2\2\u031a\u0318\3\2\2\2\u031a\u031b\3\2\2\2\u031b\u031c")
buf.write("\3\2\2\2\u031c\u031e\5\u00cfh\2\u031d\u031f\5\u00d1i\2")
buf.write("\u031e\u031d\3\2\2\2\u031e\u031f\3\2\2\2\u031f\u032a\3")
buf.write("\2\2\2\u0320\u0322\4\62;\2\u0321\u0320\3\2\2\2\u0322\u0323")
buf.write("\3\2\2\2\u0323\u0321\3\2\2\2\u0323\u0324\3\2\2\2\u0324")
buf.write("\u0326\3\2\2\2\u0325\u0327\5\u00cfh\2\u0326\u0325\3\2")
buf.write("\2\2\u0326\u0327\3\2\2\2\u0327\u0328\3\2\2\2\u0328\u032a")
buf.write("\5\u00d1i\2\u0329\u02fa\3\2\2\2\u0329\u030b\3\2\2\2\u0329")
buf.write("\u0318\3\2\2\2\u0329\u0321\3\2\2\2\u032a\u00ce\3\2\2\2")
buf.write("\u032b\u032d\t\n\2\2\u032c\u032e\t\13\2\2\u032d\u032c")
buf.write("\3\2\2\2\u032d\u032e\3\2\2\2\u032e\u0330\3\2\2\2\u032f")
buf.write("\u0331\4\62;\2\u0330\u032f\3\2\2\2\u0331\u0332\3\2\2\2")
buf.write("\u0332\u0330\3\2\2\2\u0332\u0333\3\2\2\2\u0333\u00d0\3")
buf.write("\2\2\2\u0334\u0335\t\f\2\2\u0335\u00d2\3\2\2\2\u0336\u0337")
buf.write("\7^\2\2\u0337\u033a\t\r\2\2\u0338\u033a\5\u00d5k\2\u0339")
buf.write("\u0336\3\2\2\2\u0339\u0338\3\2\2\2\u033a\u00d4\3\2\2\2")
buf.write("\u033b\u033c\7^\2\2\u033c\u033d\4\62\65\2\u033d\u033e")
buf.write("\4\629\2\u033e\u0345\4\629\2\u033f\u0340\7^\2\2\u0340")
buf.write("\u0341\4\629\2\u0341\u0345\4\629\2\u0342\u0343\7^\2\2")
buf.write("\u0343\u0345\4\629\2\u0344\u033b\3\2\2\2\u0344\u033f\3")
buf.write("\2\2\2\u0344\u0342\3\2\2\2\u0345\u00d6\3\2\2\2\u0346\u0347")
buf.write("\7^\2\2\u0347\u0348\7w\2\2\u0348\u0349\5\u00c9e\2\u0349")
buf.write("\u034a\5\u00c9e\2\u034a\u034b\5\u00c9e\2\u034b\u034c\5")
buf.write("\u00c9e\2\u034c\u00d8\3\2\2\2\u034d\u034e\t\16\2\2\u034e")
buf.write("\u034f\3\2\2\2\u034f\u0350\bm\2\2\u0350\u00da\3\2\2\2")
buf.write("\u0351\u0352\7^\2\2\u0352\u0353\3\2\2\2\u0353\u0354\b")
buf.write("n\2\2\u0354\u00dc\3\2\2\2\u0355\u0356\4\5\0\2\u0356\u00de")
buf.write("\3\2\2\2\u0357\u0358\7\61\2\2\u0358\u0359\7,\2\2\u0359")
buf.write("\u035d\3\2\2\2\u035a\u035c\13\2\2\2\u035b\u035a\3\2\2")
buf.write("\2\u035c\u035f\3\2\2\2\u035d\u035e\3\2\2\2\u035d\u035b")
buf.write("\3\2\2\2\u035e\u0360\3\2\2\2\u035f\u035d\3\2\2\2\u0360")
buf.write("\u0361\7,\2\2\u0361\u0362\7\61\2\2\u0362\u0363\3\2\2\2")
buf.write("\u0363\u0364\bp\2\2\u0364\u00e0\3\2\2\2\u0365\u0366\7")
buf.write("\61\2\2\u0366\u0367\7\61\2\2\u0367\u036b\3\2\2\2\u0368")
buf.write("\u036a\n\17\2\2\u0369\u0368\3\2\2\2\u036a\u036d\3\2\2")
buf.write("\2\u036b\u0369\3\2\2\2\u036b\u036c\3\2\2\2\u036c\u036f")
buf.write("\3\2\2\2\u036d\u036b\3\2\2\2\u036e\u0370\7\17\2\2\u036f")
buf.write("\u036e\3\2\2\2\u036f\u0370\3\2\2\2\u0370\u0371\3\2\2\2")
buf.write("\u0371\u0372\7\f\2\2\u0372\u0373\3\2\2\2\u0373\u0374\b")
buf.write("q\2\2\u0374\u00e2\3\2\2\2\u0375\u0379\7%\2\2\u0376\u0378")
buf.write("\n\17\2\2\u0377\u0376\3\2\2\2\u0378\u037b\3\2\2\2\u0379")
buf.write("\u0377\3\2\2\2\u0379\u037a\3\2\2\2\u037a\u037d\3\2\2\2")
buf.write("\u037b\u0379\3\2\2\2\u037c\u037e\7\17\2\2\u037d\u037c")
buf.write("\3\2\2\2\u037d\u037e\3\2\2\2\u037e\u037f\3\2\2\2\u037f")
buf.write("\u0380\7\f\2\2\u0380\u0381\3\2\2\2\u0381\u0382\br\2\2")
buf.write("\u0382\u00e4\3\2\2\2\'\2\u02b1\u02b3\u02b9\u02be\u02c3")
buf.write("\u02c8\u02ca\u02d4\u02d7\u02de\u02e1\u02e4\u02ea\u02ed")
buf.write("\u02f7\u02fc\u0302\u0306\u0309\u030f\u0312\u0315\u031a")
buf.write("\u031e\u0323\u0326\u0329\u032d\u0332\u0339\u0344\u035d")
buf.write("\u036b\u036f\u0379\u037d\3\2\3\2")
return buf.getvalue()
class CLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
T__2 = 3
T__3 = 4
T__4 = 5
T__5 = 6
T__6 = 7
T__7 = 8
T__8 = 9
T__9 = 10
T__10 = 11
T__11 = 12
T__12 = 13
T__13 = 14
T__14 = 15
T__15 = 16
T__16 = 17
T__17 = 18
T__18 = 19
T__19 = 20
T__20 = 21
T__21 = 22
T__22 = 23
T__23 = 24
T__24 = 25
T__25 = 26
T__26 = 27
T__27 = 28
T__28 = 29
T__29 = 30
T__30 = 31
T__31 = 32
T__32 = 33
T__33 = 34
T__34 = 35
T__35 = 36
T__36 = 37
T__37 = 38
T__38 = 39
T__39 = 40
T__40 = 41
T__41 = 42
T__42 = 43
T__43 = 44
T__44 = 45
T__45 = 46
T__46 = 47
T__47 = 48
T__48 = 49
T__49 = 50
T__50 = 51
T__51 = 52
T__52 = 53
T__53 = 54
T__54 = 55
T__55 = 56
T__56 = 57
T__57 = 58
T__58 = 59
T__59 = 60
T__60 = 61
T__61 = 62
T__62 = 63
T__63 = 64
T__64 = 65
T__65 = 66
T__66 = 67
T__67 = 68
T__68 = 69
T__69 = 70
T__70 = 71
T__71 = 72
T__72 = 73
T__73 = 74
T__74 = 75
T__75 = 76
T__76 = 77
T__77 = 78
T__78 = 79
T__79 = 80
T__80 = 81
T__81 = 82
T__82 = 83
T__83 = 84
T__84 = 85
T__85 = 86
T__86 = 87
T__87 = 88
T__88 = 89
T__89 = 90
T__90 = 91
T__91 = 92
IDENTIFIER = 93
CHARACTER_LITERAL = 94
STRING_LITERAL = 95
HEX_LITERAL = 96
DECIMAL_LITERAL = 97
OCTAL_LITERAL = 98
FLOATING_POINT_LITERAL = 99
WS = 100
BS = 101
UnicodeVocabulary = 102
COMMENT = 103
LINE_COMMENT = 104
LINE_COMMAND = 105
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'{'", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
"'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'",
"'int'", "'long'", "'float'", "'double'", "'signed'", "'unsigned'",
"'}'", "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'",
"'IN'", "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'",
"'GLOBAL_REMOVE_IF_UNREFERENCED'", "'EFIAPI'", "'EFI_BOOTSERVICE'",
"'EFI_RUNTIMESERVICE'", "'PACKED'", "'('", "')'", "'['", "']'",
"'*'", "'...'", "'+'", "'-'", "'/'", "'%'", "'++'", "'--'",
"'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='", "'/='",
"'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
"'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'",
"'>'", "'<='", "'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'",
"'__asm'", "'case'", "'default'", "'if'", "'else'", "'switch'",
"'while'", "'do'", "'goto'", "'continue'", "'break'", "'return'" ]
symbolicNames = [ "<INVALID>",
"IDENTIFIER", "CHARACTER_LITERAL", "STRING_LITERAL", "HEX_LITERAL",
"DECIMAL_LITERAL", "OCTAL_LITERAL", "FLOATING_POINT_LITERAL",
"WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
"LINE_COMMAND" ]
ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
"T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13",
"T__14", "T__15", "T__16", "T__17", "T__18", "T__19",
"T__20", "T__21", "T__22", "T__23", "T__24", "T__25",
"T__26", "T__27", "T__28", "T__29", "T__30", "T__31",
"T__32", "T__33", "T__34", "T__35", "T__36", "T__37",
"T__38", "T__39", "T__40", "T__41", "T__42", "T__43",
"T__44", "T__45", "T__46", "T__47", "T__48", "T__49",
"T__50", "T__51", "T__52", "T__53", "T__54", "T__55",
"T__56", "T__57", "T__58", "T__59", "T__60", "T__61",
"T__62", "T__63", "T__64", "T__65", "T__66", "T__67",
"T__68", "T__69", "T__70", "T__71", "T__72", "T__73",
"T__74", "T__75", "T__76", "T__77", "T__78", "T__79",
"T__80", "T__81", "T__82", "T__83", "T__84", "T__85",
"T__86", "T__87", "T__88", "T__89", "T__90", "T__91",
"IDENTIFIER", "LETTER", "CHARACTER_LITERAL", "STRING_LITERAL",
"HEX_LITERAL", "DECIMAL_LITERAL", "OCTAL_LITERAL", "HexDigit",
"IntegerTypeSuffix", "FLOATING_POINT_LITERAL", "Exponent",
"FloatTypeSuffix", "EscapeSequence", "OctalEscape", "UnicodeEscape",
"WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
"LINE_COMMAND" ]
grammarFileName = "C.g4"
# @param output= sys.stdout Type: TextIO
def __init__(self,input=None,output= sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.1")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
def printTokenInfo(self,line,offset,tokenText):
print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
def StorePredicateExpression(self,StartLine,StartOffset,EndLine,EndOffset,Text):
PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.PredicateExpressionList.append(PredExp)
def StoreEnumerationDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.EnumerationDefinitionList.append(EnumDef)
def StoreStructUnionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.StructUnionDefinitionList.append(SUDef)
def StoreTypedefDefinition(self,StartLine,StartOffset,EndLine,EndOffset,FromText,ToText):
Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.TypedefDefinitionList.append(Tdef)
def StoreFunctionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText,LeftBraceLine,LeftBraceOffset,DeclLine,DeclOffset):
FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
FileProfile.FunctionDefinitionList.append(FuncDef)
def StoreVariableDeclaration(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText):
VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.VariableDeclarationList.append(VarDecl)
def StoreFunctionCalling(self,StartLine,StartOffset,EndLine,EndOffset,FuncName,ParamList):
FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.FunctionCallingList.append(FuncCall)
| edk2-master | BaseTools/Source/Python/Ecc/CParser4/CLexer.py |
edk2-master | BaseTools/Source/Python/Ecc/CParser4/__init__.py |
|
# Generated from C.g4 by ANTLR 4.7.1
# encoding: utf-8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
import Ecc.CodeFragment as CodeFragment
import Ecc.FileProfile as FileProfile
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3k")
buf.write("\u0380\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
buf.write("\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t")
buf.write(";\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\t")
buf.write("D\4E\tE\4F\tF\4G\tG\4H\tH\3\2\7\2\u0092\n\2\f\2\16\2\u0095")
buf.write("\13\2\3\3\5\3\u0098\n\3\3\3\3\3\7\3\u009c\n\3\f\3\16\3")
buf.write("\u009f\13\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3\u00a7\n\3\5\3")
buf.write("\u00a9\n\3\3\4\5\4\u00ac\n\4\3\4\3\4\6\4\u00b0\n\4\r\4")
buf.write("\16\4\u00b1\3\4\3\4\3\4\5\4\u00b7\n\4\3\4\3\4\3\5\3\5")
buf.write("\3\5\6\5\u00be\n\5\r\5\16\5\u00bf\3\6\3\6\5\6\u00c4\n")
buf.write("\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6\u00cc\n\6\3\6\3\6\3\6\5")
buf.write("\6\u00d1\n\6\3\7\3\7\3\7\7\7\u00d6\n\7\f\7\16\7\u00d9")
buf.write("\13\7\3\b\3\b\3\b\5\b\u00de\n\b\3\t\3\t\3\n\3\n\3\n\3")
buf.write("\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n")
buf.write("\7\n\u00f3\n\n\f\n\16\n\u00f6\13\n\3\n\3\n\5\n\u00fa\n")
buf.write("\n\3\13\3\13\3\f\3\f\5\f\u0100\n\f\3\f\3\f\3\f\3\f\3\f")
buf.write("\3\f\3\f\5\f\u0109\n\f\3\r\3\r\3\16\6\16\u010e\n\16\r")
buf.write("\16\16\16\u010f\3\17\3\17\3\17\3\17\3\20\3\20\6\20\u0118")
buf.write("\n\20\r\20\16\20\u0119\3\21\3\21\3\21\7\21\u011f\n\21")
buf.write("\f\21\16\21\u0122\13\21\3\22\3\22\3\22\5\22\u0127\n\22")
buf.write("\3\22\3\22\5\22\u012b\n\22\3\23\3\23\3\23\3\23\5\23\u0131")
buf.write("\n\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u013a\n")
buf.write("\23\3\23\3\23\3\23\3\23\5\23\u0140\n\23\3\24\3\24\3\24")
buf.write("\7\24\u0145\n\24\f\24\16\24\u0148\13\24\3\25\3\25\3\25")
buf.write("\5\25\u014d\n\25\3\26\3\26\3\27\5\27\u0152\n\27\3\27\5")
buf.write("\27\u0155\n\27\3\27\5\27\u0158\n\27\3\27\5\27\u015b\n")
buf.write("\27\3\27\3\27\5\27\u015f\n\27\3\30\3\30\7\30\u0163\n\30")
buf.write("\f\30\16\30\u0166\13\30\3\30\3\30\5\30\u016a\n\30\3\30")
buf.write("\3\30\3\30\6\30\u016f\n\30\r\30\16\30\u0170\5\30\u0173")
buf.write("\n\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31")
buf.write("\3\31\3\31\3\31\3\31\3\31\3\31\5\31\u0185\n\31\3\32\3")
buf.write("\32\6\32\u0189\n\32\r\32\16\32\u018a\3\32\5\32\u018e\n")
buf.write("\32\3\32\3\32\3\32\5\32\u0193\n\32\3\33\3\33\3\33\5\33")
buf.write("\u0198\n\33\3\33\5\33\u019b\n\33\3\34\3\34\3\34\5\34\u01a0")
buf.write("\n\34\3\34\7\34\u01a3\n\34\f\34\16\34\u01a6\13\34\3\35")
buf.write("\3\35\3\35\7\35\u01ab\n\35\f\35\16\35\u01ae\13\35\3\35")
buf.write("\5\35\u01b1\n\35\3\35\7\35\u01b4\n\35\f\35\16\35\u01b7")
buf.write("\13\35\3\35\5\35\u01ba\n\35\3\36\3\36\3\36\7\36\u01bf")
buf.write("\n\36\f\36\16\36\u01c2\13\36\3\37\3\37\5\37\u01c6\n\37")
buf.write("\3\37\5\37\u01c9\n\37\3 \3 \5 \u01cd\n \3 \5 \u01d0\n")
buf.write(" \3!\3!\3!\3!\3!\5!\u01d7\n!\3!\7!\u01da\n!\f!\16!\u01dd")
buf.write("\13!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\5")
buf.write("\"\u01eb\n\"\3#\3#\3#\3#\5#\u01f1\n#\3#\3#\5#\u01f5\n")
buf.write("#\3$\3$\3$\7$\u01fa\n$\f$\16$\u01fd\13$\3%\3%\5%\u0201")
buf.write("\n%\3%\3%\3%\5%\u0206\n%\7%\u0208\n%\f%\16%\u020b\13%")
buf.write("\3&\3&\3&\3&\3&\7&\u0212\n&\f&\16&\u0215\13&\3\'\3\'\3")
buf.write("\'\3\'\3\'\3\'\3\'\7\'\u021e\n\'\f\'\16\'\u0221\13\'\3")
buf.write("(\3(\3(\3(\3(\3(\5(\u0229\n(\3)\3)\3)\3)\3)\3)\3)\3)\3")
buf.write(")\3)\3)\3)\3)\3)\3)\5)\u023a\n)\3*\3*\3*\3*\3*\3*\3*\3")
buf.write("*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3")
buf.write("*\3*\3*\3*\7*\u0259\n*\f*\16*\u025c\13*\3+\3+\3+\7+\u0261")
buf.write("\n+\f+\16+\u0264\13+\3,\3,\3-\3-\3-\3-\3-\3-\5-\u026e")
buf.write("\n-\3.\3.\3.\3.\3.\7.\u0275\n.\f.\16.\u0278\13.\3.\6.")
buf.write("\u027b\n.\r.\16.\u027c\6.\u027f\n.\r.\16.\u0280\3.\7.")
buf.write("\u0284\n.\f.\16.\u0287\13.\3.\5.\u028a\n.\3/\3/\3/\7/")
buf.write("\u028f\n/\f/\16/\u0292\13/\3\60\3\60\3\61\3\61\3\61\3")
buf.write("\61\3\61\5\61\u029b\n\61\3\62\3\62\3\63\3\63\3\64\3\64")
buf.write("\3\64\3\64\3\64\3\64\3\64\5\64\u02a8\n\64\3\65\3\65\3")
buf.write("\65\7\65\u02ad\n\65\f\65\16\65\u02b0\13\65\3\66\3\66\3")
buf.write("\66\7\66\u02b5\n\66\f\66\16\66\u02b8\13\66\3\67\3\67\3")
buf.write("\67\7\67\u02bd\n\67\f\67\16\67\u02c0\13\67\38\38\38\7")
buf.write("8\u02c5\n8\f8\168\u02c8\138\39\39\39\79\u02cd\n9\f9\16")
buf.write("9\u02d0\139\3:\3:\3:\7:\u02d5\n:\f:\16:\u02d8\13:\3;\3")
buf.write(";\3;\7;\u02dd\n;\f;\16;\u02e0\13;\3<\3<\3<\7<\u02e5\n")
buf.write("<\f<\16<\u02e8\13<\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\5")
buf.write("=\u02f5\n=\3>\5>\u02f8\n>\3>\3>\3>\7>\u02fd\n>\f>\16>")
buf.write("\u0300\13>\3>\3>\3>\3?\3?\3?\7?\u0308\n?\f?\16?\u030b")
buf.write("\13?\3?\3?\3@\3@\3@\7@\u0312\n@\f@\16@\u0315\13@\3@\3")
buf.write("@\3A\3A\3A\7A\u031c\nA\fA\16A\u031f\13A\3A\5A\u0322\n")
buf.write("A\3A\5A\u0325\nA\3A\3A\3B\3B\3B\3B\3B\3B\3B\3B\3B\3B\3")
buf.write("B\5B\u0334\nB\3C\3C\7C\u0338\nC\fC\16C\u033b\13C\3C\5")
buf.write("C\u033e\nC\3C\3C\3D\6D\u0343\nD\rD\16D\u0344\3E\3E\3E")
buf.write("\3E\5E\u034b\nE\3F\3F\3F\3F\3F\3F\3F\3F\5F\u0355\nF\3")
buf.write("F\3F\3F\3F\3F\3F\5F\u035d\nF\3G\3G\3G\3G\3G\3G\3G\3G\3")
buf.write("G\3G\3G\3G\3G\3G\3G\3G\5G\u036f\nG\3H\3H\3H\3H\3H\3H\3")
buf.write("H\3H\3H\3H\3H\3H\3H\5H\u037e\nH\3H\2\2I\2\4\6\b\n\f\16")
buf.write("\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDF")
buf.write("HJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086")
buf.write("\u0088\u008a\u008c\u008e\2\f\3\2\b\f\3\2\27\30\3\2\33")
buf.write("\'\5\2,,./\679\4\2\7\7:C\3\2IJ\3\2KN\3\2OP\3\2\4\4\3\2")
buf.write("\26\26\2\u03d8\2\u0093\3\2\2\2\4\u00a8\3\2\2\2\6\u00ab")
buf.write("\3\2\2\2\b\u00bd\3\2\2\2\n\u00d0\3\2\2\2\f\u00d2\3\2\2")
buf.write("\2\16\u00da\3\2\2\2\20\u00df\3\2\2\2\22\u00f9\3\2\2\2")
buf.write("\24\u00fb\3\2\2\2\26\u0108\3\2\2\2\30\u010a\3\2\2\2\32")
buf.write("\u010d\3\2\2\2\34\u0111\3\2\2\2\36\u0117\3\2\2\2 \u011b")
buf.write("\3\2\2\2\"\u012a\3\2\2\2$\u013f\3\2\2\2&\u0141\3\2\2\2")
buf.write("(\u0149\3\2\2\2*\u014e\3\2\2\2,\u015e\3\2\2\2.\u0172\3")
buf.write("\2\2\2\60\u0184\3\2\2\2\62\u0192\3\2\2\2\64\u0194\3\2")
buf.write("\2\2\66\u019c\3\2\2\28\u01b9\3\2\2\2:\u01bb\3\2\2\2<\u01c8")
buf.write("\3\2\2\2>\u01cf\3\2\2\2@\u01d6\3\2\2\2B\u01ea\3\2\2\2")
buf.write("D\u01f4\3\2\2\2F\u01f6\3\2\2\2H\u01fe\3\2\2\2J\u020c\3")
buf.write("\2\2\2L\u0216\3\2\2\2N\u0228\3\2\2\2P\u0239\3\2\2\2R\u023b")
buf.write("\3\2\2\2T\u025d\3\2\2\2V\u0265\3\2\2\2X\u026d\3\2\2\2")
buf.write("Z\u0289\3\2\2\2\\\u028b\3\2\2\2^\u0293\3\2\2\2`\u029a")
buf.write("\3\2\2\2b\u029c\3\2\2\2d\u029e\3\2\2\2f\u02a0\3\2\2\2")
buf.write("h\u02a9\3\2\2\2j\u02b1\3\2\2\2l\u02b9\3\2\2\2n\u02c1\3")
buf.write("\2\2\2p\u02c9\3\2\2\2r\u02d1\3\2\2\2t\u02d9\3\2\2\2v\u02e1")
buf.write("\3\2\2\2x\u02f4\3\2\2\2z\u02f7\3\2\2\2|\u0304\3\2\2\2")
buf.write("~\u030e\3\2\2\2\u0080\u0318\3\2\2\2\u0082\u0333\3\2\2")
buf.write("\2\u0084\u0335\3\2\2\2\u0086\u0342\3\2\2\2\u0088\u034a")
buf.write("\3\2\2\2\u008a\u035c\3\2\2\2\u008c\u036e\3\2\2\2\u008e")
buf.write("\u037d\3\2\2\2\u0090\u0092\5\4\3\2\u0091\u0090\3\2\2\2")
buf.write("\u0092\u0095\3\2\2\2\u0093\u0091\3\2\2\2\u0093\u0094\3")
buf.write("\2\2\2\u0094\3\3\2\2\2\u0095\u0093\3\2\2\2\u0096\u0098")
buf.write("\5\b\5\2\u0097\u0096\3\2\2\2\u0097\u0098\3\2\2\2\u0098")
buf.write("\u0099\3\2\2\2\u0099\u009d\5,\27\2\u009a\u009c\5\n\6\2")
buf.write("\u009b\u009a\3\2\2\2\u009c\u009f\3\2\2\2\u009d\u009b\3")
buf.write("\2\2\2\u009d\u009e\3\2\2\2\u009e\u00a0\3\2\2\2\u009f\u009d")
buf.write("\3\2\2\2\u00a0\u00a1\7\3\2\2\u00a1\u00a9\3\2\2\2\u00a2")
buf.write("\u00a9\5\6\4\2\u00a3\u00a9\5\n\6\2\u00a4\u00a6\5\u0080")
buf.write("A\2\u00a5\u00a7\7\4\2\2\u00a6\u00a5\3\2\2\2\u00a6\u00a7")
buf.write("\3\2\2\2\u00a7\u00a9\3\2\2\2\u00a8\u0097\3\2\2\2\u00a8")
buf.write("\u00a2\3\2\2\2\u00a8\u00a3\3\2\2\2\u00a8\u00a4\3\2\2\2")
buf.write("\u00a9\5\3\2\2\2\u00aa\u00ac\5\b\5\2\u00ab\u00aa\3\2\2")
buf.write("\2\u00ab\u00ac\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad\u00b6")
buf.write("\5,\27\2\u00ae\u00b0\5\n\6\2\u00af\u00ae\3\2\2\2\u00b0")
buf.write("\u00b1\3\2\2\2\u00b1\u00af\3\2\2\2\u00b1\u00b2\3\2\2\2")
buf.write("\u00b2\u00b3\3\2\2\2\u00b3\u00b4\5\u0084C\2\u00b4\u00b7")
buf.write("\3\2\2\2\u00b5\u00b7\5\u0084C\2\u00b6\u00af\3\2\2\2\u00b6")
buf.write("\u00b5\3\2\2\2\u00b7\u00b8\3\2\2\2\u00b8\u00b9\b\4\1\2")
buf.write("\u00b9\7\3\2\2\2\u00ba\u00be\5\20\t\2\u00bb\u00be\5\22")
buf.write("\n\2\u00bc\u00be\5*\26\2\u00bd\u00ba\3\2\2\2\u00bd\u00bb")
buf.write("\3\2\2\2\u00bd\u00bc\3\2\2\2\u00be\u00bf\3\2\2\2\u00bf")
buf.write("\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\t\3\2\2\2\u00c1")
buf.write("\u00c3\7\5\2\2\u00c2\u00c4\5\b\5\2\u00c3\u00c2\3\2\2\2")
buf.write("\u00c3\u00c4\3\2\2\2\u00c4\u00c5\3\2\2\2\u00c5\u00c6\5")
buf.write("\f\7\2\u00c6\u00c7\7\4\2\2\u00c7\u00c8\b\6\1\2\u00c8\u00d1")
buf.write("\3\2\2\2\u00c9\u00cb\5\b\5\2\u00ca\u00cc\5\f\7\2\u00cb")
buf.write("\u00ca\3\2\2\2\u00cb\u00cc\3\2\2\2\u00cc\u00cd\3\2\2\2")
buf.write("\u00cd\u00ce\7\4\2\2\u00ce\u00cf\b\6\1\2\u00cf\u00d1\3")
buf.write("\2\2\2\u00d0\u00c1\3\2\2\2\u00d0\u00c9\3\2\2\2\u00d1\13")
buf.write("\3\2\2\2\u00d2\u00d7\5\16\b\2\u00d3\u00d4\7\6\2\2\u00d4")
buf.write("\u00d6\5\16\b\2\u00d5\u00d3\3\2\2\2\u00d6\u00d9\3\2\2")
buf.write("\2\u00d7\u00d5\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\r\3\2")
buf.write("\2\2\u00d9\u00d7\3\2\2\2\u00da\u00dd\5,\27\2\u00db\u00dc")
buf.write("\7\7\2\2\u00dc\u00de\5D#\2\u00dd\u00db\3\2\2\2\u00dd\u00de")
buf.write("\3\2\2\2\u00de\17\3\2\2\2\u00df\u00e0\t\2\2\2\u00e0\21")
buf.write("\3\2\2\2\u00e1\u00fa\7\r\2\2\u00e2\u00fa\7\16\2\2\u00e3")
buf.write("\u00fa\7\17\2\2\u00e4\u00fa\7\20\2\2\u00e5\u00fa\7\21")
buf.write("\2\2\u00e6\u00fa\7\22\2\2\u00e7\u00fa\7\23\2\2\u00e8\u00fa")
buf.write("\7\24\2\2\u00e9\u00fa\7\25\2\2\u00ea\u00eb\5\26\f\2\u00eb")
buf.write("\u00ec\b\n\1\2\u00ec\u00fa\3\2\2\2\u00ed\u00ee\5$\23\2")
buf.write("\u00ee\u00ef\b\n\1\2\u00ef\u00fa\3\2\2\2\u00f0\u00f4\7")
buf.write("_\2\2\u00f1\u00f3\5*\26\2\u00f2\u00f1\3\2\2\2\u00f3\u00f6")
buf.write("\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f4\u00f5\3\2\2\2\u00f5")
buf.write("\u00f7\3\2\2\2\u00f6\u00f4\3\2\2\2\u00f7\u00fa\5,\27\2")
buf.write("\u00f8\u00fa\5\24\13\2\u00f9\u00e1\3\2\2\2\u00f9\u00e2")
buf.write("\3\2\2\2\u00f9\u00e3\3\2\2\2\u00f9\u00e4\3\2\2\2\u00f9")
buf.write("\u00e5\3\2\2\2\u00f9\u00e6\3\2\2\2\u00f9\u00e7\3\2\2\2")
buf.write("\u00f9\u00e8\3\2\2\2\u00f9\u00e9\3\2\2\2\u00f9\u00ea\3")
buf.write("\2\2\2\u00f9\u00ed\3\2\2\2\u00f9\u00f0\3\2\2\2\u00f9\u00f8")
buf.write("\3\2\2\2\u00fa\23\3\2\2\2\u00fb\u00fc\7_\2\2\u00fc\25")
buf.write("\3\2\2\2\u00fd\u00ff\5\30\r\2\u00fe\u0100\7_\2\2\u00ff")
buf.write("\u00fe\3\2\2\2\u00ff\u0100\3\2\2\2\u0100\u0101\3\2\2\2")
buf.write("\u0101\u0102\7\3\2\2\u0102\u0103\5\32\16\2\u0103\u0104")
buf.write("\7\26\2\2\u0104\u0109\3\2\2\2\u0105\u0106\5\30\r\2\u0106")
buf.write("\u0107\7_\2\2\u0107\u0109\3\2\2\2\u0108\u00fd\3\2\2\2")
buf.write("\u0108\u0105\3\2\2\2\u0109\27\3\2\2\2\u010a\u010b\t\3")
buf.write("\2\2\u010b\31\3\2\2\2\u010c\u010e\5\34\17\2\u010d\u010c")
buf.write("\3\2\2\2\u010e\u010f\3\2\2\2\u010f\u010d\3\2\2\2\u010f")
buf.write("\u0110\3\2\2\2\u0110\33\3\2\2\2\u0111\u0112\5\36\20\2")
buf.write("\u0112\u0113\5 \21\2\u0113\u0114\7\4\2\2\u0114\35\3\2")
buf.write("\2\2\u0115\u0118\5*\26\2\u0116\u0118\5\22\n\2\u0117\u0115")
buf.write("\3\2\2\2\u0117\u0116\3\2\2\2\u0118\u0119\3\2\2\2\u0119")
buf.write("\u0117\3\2\2\2\u0119\u011a\3\2\2\2\u011a\37\3\2\2\2\u011b")
buf.write("\u0120\5\"\22\2\u011c\u011d\7\6\2\2\u011d\u011f\5\"\22")
buf.write("\2\u011e\u011c\3\2\2\2\u011f\u0122\3\2\2\2\u0120\u011e")
buf.write("\3\2\2\2\u0120\u0121\3\2\2\2\u0121!\3\2\2\2\u0122\u0120")
buf.write("\3\2\2\2\u0123\u0126\5,\27\2\u0124\u0125\7\31\2\2\u0125")
buf.write("\u0127\5^\60\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2\2\2")
buf.write("\u0127\u012b\3\2\2\2\u0128\u0129\7\31\2\2\u0129\u012b")
buf.write("\5^\60\2\u012a\u0123\3\2\2\2\u012a\u0128\3\2\2\2\u012b")
buf.write("#\3\2\2\2\u012c\u012d\7\32\2\2\u012d\u012e\7\3\2\2\u012e")
buf.write("\u0130\5&\24\2\u012f\u0131\7\6\2\2\u0130\u012f\3\2\2\2")
buf.write("\u0130\u0131\3\2\2\2\u0131\u0132\3\2\2\2\u0132\u0133\7")
buf.write("\26\2\2\u0133\u0140\3\2\2\2\u0134\u0135\7\32\2\2\u0135")
buf.write("\u0136\7_\2\2\u0136\u0137\7\3\2\2\u0137\u0139\5&\24\2")
buf.write("\u0138\u013a\7\6\2\2\u0139\u0138\3\2\2\2\u0139\u013a\3")
buf.write("\2\2\2\u013a\u013b\3\2\2\2\u013b\u013c\7\26\2\2\u013c")
buf.write("\u0140\3\2\2\2\u013d\u013e\7\32\2\2\u013e\u0140\7_\2\2")
buf.write("\u013f\u012c\3\2\2\2\u013f\u0134\3\2\2\2\u013f\u013d\3")
buf.write("\2\2\2\u0140%\3\2\2\2\u0141\u0146\5(\25\2\u0142\u0143")
buf.write("\7\6\2\2\u0143\u0145\5(\25\2\u0144\u0142\3\2\2\2\u0145")
buf.write("\u0148\3\2\2\2\u0146\u0144\3\2\2\2\u0146\u0147\3\2\2\2")
buf.write("\u0147\'\3\2\2\2\u0148\u0146\3\2\2\2\u0149\u014c\7_\2")
buf.write("\2\u014a\u014b\7\7\2\2\u014b\u014d\5^\60\2\u014c\u014a")
buf.write("\3\2\2\2\u014c\u014d\3\2\2\2\u014d)\3\2\2\2\u014e\u014f")
buf.write("\t\4\2\2\u014f+\3\2\2\2\u0150\u0152\5\62\32\2\u0151\u0150")
buf.write("\3\2\2\2\u0151\u0152\3\2\2\2\u0152\u0154\3\2\2\2\u0153")
buf.write("\u0155\7$\2\2\u0154\u0153\3\2\2\2\u0154\u0155\3\2\2\2")
buf.write("\u0155\u0157\3\2\2\2\u0156\u0158\7%\2\2\u0157\u0156\3")
buf.write("\2\2\2\u0157\u0158\3\2\2\2\u0158\u015a\3\2\2\2\u0159\u015b")
buf.write("\7&\2\2\u015a\u0159\3\2\2\2\u015a\u015b\3\2\2\2\u015b")
buf.write("\u015c\3\2\2\2\u015c\u015f\5.\30\2\u015d\u015f\5\62\32")
buf.write("\2\u015e\u0151\3\2\2\2\u015e\u015d\3\2\2\2\u015f-\3\2")
buf.write("\2\2\u0160\u0164\7_\2\2\u0161\u0163\5\60\31\2\u0162\u0161")
buf.write("\3\2\2\2\u0163\u0166\3\2\2\2\u0164\u0162\3\2\2\2\u0164")
buf.write("\u0165\3\2\2\2\u0165\u0173\3\2\2\2\u0166\u0164\3\2\2\2")
buf.write("\u0167\u0169\7(\2\2\u0168\u016a\7$\2\2\u0169\u0168\3\2")
buf.write("\2\2\u0169\u016a\3\2\2\2\u016a\u016b\3\2\2\2\u016b\u016c")
buf.write("\5,\27\2\u016c\u016e\7)\2\2\u016d\u016f\5\60\31\2\u016e")
buf.write("\u016d\3\2\2\2\u016f\u0170\3\2\2\2\u0170\u016e\3\2\2\2")
buf.write("\u0170\u0171\3\2\2\2\u0171\u0173\3\2\2\2\u0172\u0160\3")
buf.write("\2\2\2\u0172\u0167\3\2\2\2\u0173/\3\2\2\2\u0174\u0175")
buf.write("\7*\2\2\u0175\u0176\5^\60\2\u0176\u0177\7+\2\2\u0177\u0185")
buf.write("\3\2\2\2\u0178\u0179\7*\2\2\u0179\u0185\7+\2\2\u017a\u017b")
buf.write("\7(\2\2\u017b\u017c\5\64\33\2\u017c\u017d\7)\2\2\u017d")
buf.write("\u0185\3\2\2\2\u017e\u017f\7(\2\2\u017f\u0180\5:\36\2")
buf.write("\u0180\u0181\7)\2\2\u0181\u0185\3\2\2\2\u0182\u0183\7")
buf.write("(\2\2\u0183\u0185\7)\2\2\u0184\u0174\3\2\2\2\u0184\u0178")
buf.write("\3\2\2\2\u0184\u017a\3\2\2\2\u0184\u017e\3\2\2\2\u0184")
buf.write("\u0182\3\2\2\2\u0185\61\3\2\2\2\u0186\u0188\7,\2\2\u0187")
buf.write("\u0189\5*\26\2\u0188\u0187\3\2\2\2\u0189\u018a\3\2\2\2")
buf.write("\u018a\u0188\3\2\2\2\u018a\u018b\3\2\2\2\u018b\u018d\3")
buf.write("\2\2\2\u018c\u018e\5\62\32\2\u018d\u018c\3\2\2\2\u018d")
buf.write("\u018e\3\2\2\2\u018e\u0193\3\2\2\2\u018f\u0190\7,\2\2")
buf.write("\u0190\u0193\5\62\32\2\u0191\u0193\7,\2\2\u0192\u0186")
buf.write("\3\2\2\2\u0192\u018f\3\2\2\2\u0192\u0191\3\2\2\2\u0193")
buf.write("\63\3\2\2\2\u0194\u019a\5\66\34\2\u0195\u0197\7\6\2\2")
buf.write("\u0196\u0198\7\37\2\2\u0197\u0196\3\2\2\2\u0197\u0198")
buf.write("\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u019b\7-\2\2\u019a")
buf.write("\u0195\3\2\2\2\u019a\u019b\3\2\2\2\u019b\65\3\2\2\2\u019c")
buf.write("\u01a4\58\35\2\u019d\u019f\7\6\2\2\u019e\u01a0\7\37\2")
buf.write("\2\u019f\u019e\3\2\2\2\u019f\u01a0\3\2\2\2\u01a0\u01a1")
buf.write("\3\2\2\2\u01a1\u01a3\58\35\2\u01a2\u019d\3\2\2\2\u01a3")
buf.write("\u01a6\3\2\2\2\u01a4\u01a2\3\2\2\2\u01a4\u01a5\3\2\2\2")
buf.write("\u01a5\67\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a7\u01ac\5\b")
buf.write("\5\2\u01a8\u01ab\5,\27\2\u01a9\u01ab\5> \2\u01aa\u01a8")
buf.write("\3\2\2\2\u01aa\u01a9\3\2\2\2\u01ab\u01ae\3\2\2\2\u01ac")
buf.write("\u01aa\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad\u01b0\3\2\2\2")
buf.write("\u01ae\u01ac\3\2\2\2\u01af\u01b1\7\37\2\2\u01b0\u01af")
buf.write("\3\2\2\2\u01b0\u01b1\3\2\2\2\u01b1\u01ba\3\2\2\2\u01b2")
buf.write("\u01b4\5\62\32\2\u01b3\u01b2\3\2\2\2\u01b4\u01b7\3\2\2")
buf.write("\2\u01b5\u01b3\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01b8")
buf.write("\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8\u01ba\7_\2\2\u01b9")
buf.write("\u01a7\3\2\2\2\u01b9\u01b5\3\2\2\2\u01ba9\3\2\2\2\u01bb")
buf.write("\u01c0\7_\2\2\u01bc\u01bd\7\6\2\2\u01bd\u01bf\7_\2\2\u01be")
buf.write("\u01bc\3\2\2\2\u01bf\u01c2\3\2\2\2\u01c0\u01be\3\2\2\2")
buf.write("\u01c0\u01c1\3\2\2\2\u01c1;\3\2\2\2\u01c2\u01c0\3\2\2")
buf.write("\2\u01c3\u01c5\5\36\20\2\u01c4\u01c6\5> \2\u01c5\u01c4")
buf.write("\3\2\2\2\u01c5\u01c6\3\2\2\2\u01c6\u01c9\3\2\2\2\u01c7")
buf.write("\u01c9\5\24\13\2\u01c8\u01c3\3\2\2\2\u01c8\u01c7\3\2\2")
buf.write("\2\u01c9=\3\2\2\2\u01ca\u01cc\5\62\32\2\u01cb\u01cd\5")
buf.write("@!\2\u01cc\u01cb\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cd\u01d0")
buf.write("\3\2\2\2\u01ce\u01d0\5@!\2\u01cf\u01ca\3\2\2\2\u01cf\u01ce")
buf.write("\3\2\2\2\u01d0?\3\2\2\2\u01d1\u01d2\7(\2\2\u01d2\u01d3")
buf.write("\5> \2\u01d3\u01d4\7)\2\2\u01d4\u01d7\3\2\2\2\u01d5\u01d7")
buf.write("\5B\"\2\u01d6\u01d1\3\2\2\2\u01d6\u01d5\3\2\2\2\u01d7")
buf.write("\u01db\3\2\2\2\u01d8\u01da\5B\"\2\u01d9\u01d8\3\2\2\2")
buf.write("\u01da\u01dd\3\2\2\2\u01db\u01d9\3\2\2\2\u01db\u01dc\3")
buf.write("\2\2\2\u01dcA\3\2\2\2\u01dd\u01db\3\2\2\2\u01de\u01df")
buf.write("\7*\2\2\u01df\u01eb\7+\2\2\u01e0\u01e1\7*\2\2\u01e1\u01e2")
buf.write("\5^\60\2\u01e2\u01e3\7+\2\2\u01e3\u01eb\3\2\2\2\u01e4")
buf.write("\u01e5\7(\2\2\u01e5\u01eb\7)\2\2\u01e6\u01e7\7(\2\2\u01e7")
buf.write("\u01e8\5\64\33\2\u01e8\u01e9\7)\2\2\u01e9\u01eb\3\2\2")
buf.write("\2\u01ea\u01de\3\2\2\2\u01ea\u01e0\3\2\2\2\u01ea\u01e4")
buf.write("\3\2\2\2\u01ea\u01e6\3\2\2\2\u01ebC\3\2\2\2\u01ec\u01f5")
buf.write("\5`\61\2\u01ed\u01ee\7\3\2\2\u01ee\u01f0\5F$\2\u01ef\u01f1")
buf.write("\7\6\2\2\u01f0\u01ef\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1")
buf.write("\u01f2\3\2\2\2\u01f2\u01f3\7\26\2\2\u01f3\u01f5\3\2\2")
buf.write("\2\u01f4\u01ec\3\2\2\2\u01f4\u01ed\3\2\2\2\u01f5E\3\2")
buf.write("\2\2\u01f6\u01fb\5D#\2\u01f7\u01f8\7\6\2\2\u01f8\u01fa")
buf.write("\5D#\2\u01f9\u01f7\3\2\2\2\u01fa\u01fd\3\2\2\2\u01fb\u01f9")
buf.write("\3\2\2\2\u01fb\u01fc\3\2\2\2\u01fcG\3\2\2\2\u01fd\u01fb")
buf.write("\3\2\2\2\u01fe\u0200\5`\61\2\u01ff\u0201\7\37\2\2\u0200")
buf.write("\u01ff\3\2\2\2\u0200\u0201\3\2\2\2\u0201\u0209\3\2\2\2")
buf.write("\u0202\u0203\7\6\2\2\u0203\u0205\5`\61\2\u0204\u0206\7")
buf.write("\37\2\2\u0205\u0204\3\2\2\2\u0205\u0206\3\2\2\2\u0206")
buf.write("\u0208\3\2\2\2\u0207\u0202\3\2\2\2\u0208\u020b\3\2\2\2")
buf.write("\u0209\u0207\3\2\2\2\u0209\u020a\3\2\2\2\u020aI\3\2\2")
buf.write("\2\u020b\u0209\3\2\2\2\u020c\u0213\5L\'\2\u020d\u020e")
buf.write("\7.\2\2\u020e\u0212\5L\'\2\u020f\u0210\7/\2\2\u0210\u0212")
buf.write("\5L\'\2\u0211\u020d\3\2\2\2\u0211\u020f\3\2\2\2\u0212")
buf.write("\u0215\3\2\2\2\u0213\u0211\3\2\2\2\u0213\u0214\3\2\2\2")
buf.write("\u0214K\3\2\2\2\u0215\u0213\3\2\2\2\u0216\u021f\5N(\2")
buf.write("\u0217\u0218\7,\2\2\u0218\u021e\5N(\2\u0219\u021a\7\60")
buf.write("\2\2\u021a\u021e\5N(\2\u021b\u021c\7\61\2\2\u021c\u021e")
buf.write("\5N(\2\u021d\u0217\3\2\2\2\u021d\u0219\3\2\2\2\u021d\u021b")
buf.write("\3\2\2\2\u021e\u0221\3\2\2\2\u021f\u021d\3\2\2\2\u021f")
buf.write("\u0220\3\2\2\2\u0220M\3\2\2\2\u0221\u021f\3\2\2\2\u0222")
buf.write("\u0223\7(\2\2\u0223\u0224\5<\37\2\u0224\u0225\7)\2\2\u0225")
buf.write("\u0226\5N(\2\u0226\u0229\3\2\2\2\u0227\u0229\5P)\2\u0228")
buf.write("\u0222\3\2\2\2\u0228\u0227\3\2\2\2\u0229O\3\2\2\2\u022a")
buf.write("\u023a\5R*\2\u022b\u022c\7\62\2\2\u022c\u023a\5P)\2\u022d")
buf.write("\u022e\7\63\2\2\u022e\u023a\5P)\2\u022f\u0230\5V,\2\u0230")
buf.write("\u0231\5N(\2\u0231\u023a\3\2\2\2\u0232\u0233\7\64\2\2")
buf.write("\u0233\u023a\5P)\2\u0234\u0235\7\64\2\2\u0235\u0236\7")
buf.write("(\2\2\u0236\u0237\5<\37\2\u0237\u0238\7)\2\2\u0238\u023a")
buf.write("\3\2\2\2\u0239\u022a\3\2\2\2\u0239\u022b\3\2\2\2\u0239")
buf.write("\u022d\3\2\2\2\u0239\u022f\3\2\2\2\u0239\u0232\3\2\2\2")
buf.write("\u0239\u0234\3\2\2\2\u023aQ\3\2\2\2\u023b\u023c\5X-\2")
buf.write("\u023c\u025a\b*\1\2\u023d\u023e\7*\2\2\u023e\u023f\5\\")
buf.write("/\2\u023f\u0240\7+\2\2\u0240\u0259\3\2\2\2\u0241\u0242")
buf.write("\7(\2\2\u0242\u0243\7)\2\2\u0243\u0259\b*\1\2\u0244\u0245")
buf.write("\7(\2\2\u0245\u0246\5H%\2\u0246\u0247\7)\2\2\u0247\u0248")
buf.write("\b*\1\2\u0248\u0259\3\2\2\2\u0249\u024a\7(\2\2\u024a\u024b")
buf.write("\5T+\2\u024b\u024c\7)\2\2\u024c\u0259\3\2\2\2\u024d\u024e")
buf.write("\7\65\2\2\u024e\u024f\7_\2\2\u024f\u0259\b*\1\2\u0250")
buf.write("\u0251\7,\2\2\u0251\u0252\7_\2\2\u0252\u0259\b*\1\2\u0253")
buf.write("\u0254\7\66\2\2\u0254\u0255\7_\2\2\u0255\u0259\b*\1\2")
buf.write("\u0256\u0259\7\62\2\2\u0257\u0259\7\63\2\2\u0258\u023d")
buf.write("\3\2\2\2\u0258\u0241\3\2\2\2\u0258\u0244\3\2\2\2\u0258")
buf.write("\u0249\3\2\2\2\u0258\u024d\3\2\2\2\u0258\u0250\3\2\2\2")
buf.write("\u0258\u0253\3\2\2\2\u0258\u0256\3\2\2\2\u0258\u0257\3")
buf.write("\2\2\2\u0259\u025c\3\2\2\2\u025a\u0258\3\2\2\2\u025a\u025b")
buf.write("\3\2\2\2\u025bS\3\2\2\2\u025c\u025a\3\2\2\2\u025d\u0262")
buf.write("\58\35\2\u025e\u025f\7\6\2\2\u025f\u0261\58\35\2\u0260")
buf.write("\u025e\3\2\2\2\u0261\u0264\3\2\2\2\u0262\u0260\3\2\2\2")
buf.write("\u0262\u0263\3\2\2\2\u0263U\3\2\2\2\u0264\u0262\3\2\2")
buf.write("\2\u0265\u0266\t\5\2\2\u0266W\3\2\2\2\u0267\u026e\7_\2")
buf.write("\2\u0268\u026e\5Z.\2\u0269\u026a\7(\2\2\u026a\u026b\5")
buf.write("\\/\2\u026b\u026c\7)\2\2\u026c\u026e\3\2\2\2\u026d\u0267")
buf.write("\3\2\2\2\u026d\u0268\3\2\2\2\u026d\u0269\3\2\2\2\u026e")
buf.write("Y\3\2\2\2\u026f\u028a\7b\2\2\u0270\u028a\7d\2\2\u0271")
buf.write("\u028a\7c\2\2\u0272\u028a\7`\2\2\u0273\u0275\7_\2\2\u0274")
buf.write("\u0273\3\2\2\2\u0275\u0278\3\2\2\2\u0276\u0274\3\2\2\2")
buf.write("\u0276\u0277\3\2\2\2\u0277\u027a\3\2\2\2\u0278\u0276\3")
buf.write("\2\2\2\u0279\u027b\7a\2\2\u027a\u0279\3\2\2\2\u027b\u027c")
buf.write("\3\2\2\2\u027c\u027a\3\2\2\2\u027c\u027d\3\2\2\2\u027d")
buf.write("\u027f\3\2\2\2\u027e\u0276\3\2\2\2\u027f\u0280\3\2\2\2")
buf.write("\u0280\u027e\3\2\2\2\u0280\u0281\3\2\2\2\u0281\u0285\3")
buf.write("\2\2\2\u0282\u0284\7_\2\2\u0283\u0282\3\2\2\2\u0284\u0287")
buf.write("\3\2\2\2\u0285\u0283\3\2\2\2\u0285\u0286\3\2\2\2\u0286")
buf.write("\u028a\3\2\2\2\u0287\u0285\3\2\2\2\u0288\u028a\7e\2\2")
buf.write("\u0289\u026f\3\2\2\2\u0289\u0270\3\2\2\2\u0289\u0271\3")
buf.write("\2\2\2\u0289\u0272\3\2\2\2\u0289\u027e\3\2\2\2\u0289\u0288")
buf.write("\3\2\2\2\u028a[\3\2\2\2\u028b\u0290\5`\61\2\u028c\u028d")
buf.write("\7\6\2\2\u028d\u028f\5`\61\2\u028e\u028c\3\2\2\2\u028f")
buf.write("\u0292\3\2\2\2\u0290\u028e\3\2\2\2\u0290\u0291\3\2\2\2")
buf.write("\u0291]\3\2\2\2\u0292\u0290\3\2\2\2\u0293\u0294\5f\64")
buf.write("\2\u0294_\3\2\2\2\u0295\u0296\5b\62\2\u0296\u0297\5d\63")
buf.write("\2\u0297\u0298\5`\61\2\u0298\u029b\3\2\2\2\u0299\u029b")
buf.write("\5f\64\2\u029a\u0295\3\2\2\2\u029a\u0299\3\2\2\2\u029b")
buf.write("a\3\2\2\2\u029c\u029d\5P)\2\u029dc\3\2\2\2\u029e\u029f")
buf.write("\t\6\2\2\u029fe\3\2\2\2\u02a0\u02a7\5h\65\2\u02a1\u02a2")
buf.write("\7D\2\2\u02a2\u02a3\5\\/\2\u02a3\u02a4\7\31\2\2\u02a4")
buf.write("\u02a5\5f\64\2\u02a5\u02a6\b\64\1\2\u02a6\u02a8\3\2\2")
buf.write("\2\u02a7\u02a1\3\2\2\2\u02a7\u02a8\3\2\2\2\u02a8g\3\2")
buf.write("\2\2\u02a9\u02ae\5j\66\2\u02aa\u02ab\7E\2\2\u02ab\u02ad")
buf.write("\5j\66\2\u02ac\u02aa\3\2\2\2\u02ad\u02b0\3\2\2\2\u02ae")
buf.write("\u02ac\3\2\2\2\u02ae\u02af\3\2\2\2\u02afi\3\2\2\2\u02b0")
buf.write("\u02ae\3\2\2\2\u02b1\u02b6\5l\67\2\u02b2\u02b3\7F\2\2")
buf.write("\u02b3\u02b5\5l\67\2\u02b4\u02b2\3\2\2\2\u02b5\u02b8\3")
buf.write("\2\2\2\u02b6\u02b4\3\2\2\2\u02b6\u02b7\3\2\2\2\u02b7k")
buf.write("\3\2\2\2\u02b8\u02b6\3\2\2\2\u02b9\u02be\5n8\2\u02ba\u02bb")
buf.write("\7G\2\2\u02bb\u02bd\5n8\2\u02bc\u02ba\3\2\2\2\u02bd\u02c0")
buf.write("\3\2\2\2\u02be\u02bc\3\2\2\2\u02be\u02bf\3\2\2\2\u02bf")
buf.write("m\3\2\2\2\u02c0\u02be\3\2\2\2\u02c1\u02c6\5p9\2\u02c2")
buf.write("\u02c3\7H\2\2\u02c3\u02c5\5p9\2\u02c4\u02c2\3\2\2\2\u02c5")
buf.write("\u02c8\3\2\2\2\u02c6\u02c4\3\2\2\2\u02c6\u02c7\3\2\2\2")
buf.write("\u02c7o\3\2\2\2\u02c8\u02c6\3\2\2\2\u02c9\u02ce\5r:\2")
buf.write("\u02ca\u02cb\7\67\2\2\u02cb\u02cd\5r:\2\u02cc\u02ca\3")
buf.write("\2\2\2\u02cd\u02d0\3\2\2\2\u02ce\u02cc\3\2\2\2\u02ce\u02cf")
buf.write("\3\2\2\2\u02cfq\3\2\2\2\u02d0\u02ce\3\2\2\2\u02d1\u02d6")
buf.write("\5t;\2\u02d2\u02d3\t\7\2\2\u02d3\u02d5\5t;\2\u02d4\u02d2")
buf.write("\3\2\2\2\u02d5\u02d8\3\2\2\2\u02d6\u02d4\3\2\2\2\u02d6")
buf.write("\u02d7\3\2\2\2\u02d7s\3\2\2\2\u02d8\u02d6\3\2\2\2\u02d9")
buf.write("\u02de\5v<\2\u02da\u02db\t\b\2\2\u02db\u02dd\5v<\2\u02dc")
buf.write("\u02da\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2")
buf.write("\u02de\u02df\3\2\2\2\u02dfu\3\2\2\2\u02e0\u02de\3\2\2")
buf.write("\2\u02e1\u02e6\5J&\2\u02e2\u02e3\t\t\2\2\u02e3\u02e5\5")
buf.write("J&\2\u02e4\u02e2\3\2\2\2\u02e5\u02e8\3\2\2\2\u02e6\u02e4")
buf.write("\3\2\2\2\u02e6\u02e7\3\2\2\2\u02e7w\3\2\2\2\u02e8\u02e6")
buf.write("\3\2\2\2\u02e9\u02f5\5\u0082B\2\u02ea\u02f5\5\u0084C\2")
buf.write("\u02eb\u02f5\5\u0088E\2\u02ec\u02f5\5\u008aF\2\u02ed\u02f5")
buf.write("\5\u008cG\2\u02ee\u02f5\5\u008eH\2\u02ef\u02f5\5\u0080")
buf.write("A\2\u02f0\u02f5\5z>\2\u02f1\u02f5\5|?\2\u02f2\u02f5\5")
buf.write("~@\2\u02f3\u02f5\5\n\6\2\u02f4\u02e9\3\2\2\2\u02f4\u02ea")
buf.write("\3\2\2\2\u02f4\u02eb\3\2\2\2\u02f4\u02ec\3\2\2\2\u02f4")
buf.write("\u02ed\3\2\2\2\u02f4\u02ee\3\2\2\2\u02f4\u02ef\3\2\2\2")
buf.write("\u02f4\u02f0\3\2\2\2\u02f4\u02f1\3\2\2\2\u02f4\u02f2\3")
buf.write("\2\2\2\u02f4\u02f3\3\2\2\2\u02f5y\3\2\2\2\u02f6\u02f8")
buf.write("\7Q\2\2\u02f7\u02f6\3\2\2\2\u02f7\u02f8\3\2\2\2\u02f8")
buf.write("\u02f9\3\2\2\2\u02f9\u02fa\7_\2\2\u02fa\u02fe\7(\2\2\u02fb")
buf.write("\u02fd\n\n\2\2\u02fc\u02fb\3\2\2\2\u02fd\u0300\3\2\2\2")
buf.write("\u02fe\u02fc\3\2\2\2\u02fe\u02ff\3\2\2\2\u02ff\u0301\3")
buf.write("\2\2\2\u0300\u02fe\3\2\2\2\u0301\u0302\7)\2\2\u0302\u0303")
buf.write("\7\4\2\2\u0303{\3\2\2\2\u0304\u0305\7R\2\2\u0305\u0309")
buf.write("\7\3\2\2\u0306\u0308\n\13\2\2\u0307\u0306\3\2\2\2\u0308")
buf.write("\u030b\3\2\2\2\u0309\u0307\3\2\2\2\u0309\u030a\3\2\2\2")
buf.write("\u030a\u030c\3\2\2\2\u030b\u0309\3\2\2\2\u030c\u030d\7")
buf.write("\26\2\2\u030d}\3\2\2\2\u030e\u030f\7S\2\2\u030f\u0313")
buf.write("\7\3\2\2\u0310\u0312\n\13\2\2\u0311\u0310\3\2\2\2\u0312")
buf.write("\u0315\3\2\2\2\u0313\u0311\3\2\2\2\u0313\u0314\3\2\2\2")
buf.write("\u0314\u0316\3\2\2\2\u0315\u0313\3\2\2\2\u0316\u0317\7")
buf.write("\26\2\2\u0317\177\3\2\2\2\u0318\u0319\7_\2\2\u0319\u031d")
buf.write("\7(\2\2\u031a\u031c\5\n\6\2\u031b\u031a\3\2\2\2\u031c")
buf.write("\u031f\3\2\2\2\u031d\u031b\3\2\2\2\u031d\u031e\3\2\2\2")
buf.write("\u031e\u0321\3\2\2\2\u031f\u031d\3\2\2\2\u0320\u0322\5")
buf.write("\u0086D\2\u0321\u0320\3\2\2\2\u0321\u0322\3\2\2\2\u0322")
buf.write("\u0324\3\2\2\2\u0323\u0325\5\\/\2\u0324\u0323\3\2\2\2")
buf.write("\u0324\u0325\3\2\2\2\u0325\u0326\3\2\2\2\u0326\u0327\7")
buf.write(")\2\2\u0327\u0081\3\2\2\2\u0328\u0329\7_\2\2\u0329\u032a")
buf.write("\7\31\2\2\u032a\u0334\5x=\2\u032b\u032c\7T\2\2\u032c\u032d")
buf.write("\5^\60\2\u032d\u032e\7\31\2\2\u032e\u032f\5x=\2\u032f")
buf.write("\u0334\3\2\2\2\u0330\u0331\7U\2\2\u0331\u0332\7\31\2\2")
buf.write("\u0332\u0334\5x=\2\u0333\u0328\3\2\2\2\u0333\u032b\3\2")
buf.write("\2\2\u0333\u0330\3\2\2\2\u0334\u0083\3\2\2\2\u0335\u0339")
buf.write("\7\3\2\2\u0336\u0338\5\n\6\2\u0337\u0336\3\2\2\2\u0338")
buf.write("\u033b\3\2\2\2\u0339\u0337\3\2\2\2\u0339\u033a\3\2\2\2")
buf.write("\u033a\u033d\3\2\2\2\u033b\u0339\3\2\2\2\u033c\u033e\5")
buf.write("\u0086D\2\u033d\u033c\3\2\2\2\u033d\u033e\3\2\2\2\u033e")
buf.write("\u033f\3\2\2\2\u033f\u0340\7\26\2\2\u0340\u0085\3\2\2")
buf.write("\2\u0341\u0343\5x=\2\u0342\u0341\3\2\2\2\u0343\u0344\3")
buf.write("\2\2\2\u0344\u0342\3\2\2\2\u0344\u0345\3\2\2\2\u0345\u0087")
buf.write("\3\2\2\2\u0346\u034b\7\4\2\2\u0347\u0348\5\\/\2\u0348")
buf.write("\u0349\7\4\2\2\u0349\u034b\3\2\2\2\u034a\u0346\3\2\2\2")
buf.write("\u034a\u0347\3\2\2\2\u034b\u0089\3\2\2\2\u034c\u034d\7")
buf.write("V\2\2\u034d\u034e\7(\2\2\u034e\u034f\5\\/\2\u034f\u0350")
buf.write("\7)\2\2\u0350\u0351\bF\1\2\u0351\u0354\5x=\2\u0352\u0353")
buf.write("\7W\2\2\u0353\u0355\5x=\2\u0354\u0352\3\2\2\2\u0354\u0355")
buf.write("\3\2\2\2\u0355\u035d\3\2\2\2\u0356\u0357\7X\2\2\u0357")
buf.write("\u0358\7(\2\2\u0358\u0359\5\\/\2\u0359\u035a\7)\2\2\u035a")
buf.write("\u035b\5x=\2\u035b\u035d\3\2\2\2\u035c\u034c\3\2\2\2\u035c")
buf.write("\u0356\3\2\2\2\u035d\u008b\3\2\2\2\u035e\u035f\7Y\2\2")
buf.write("\u035f\u0360\7(\2\2\u0360\u0361\5\\/\2\u0361\u0362\7)")
buf.write("\2\2\u0362\u0363\5x=\2\u0363\u0364\bG\1\2\u0364\u036f")
buf.write("\3\2\2\2\u0365\u0366\7Z\2\2\u0366\u0367\5x=\2\u0367\u0368")
buf.write("\7Y\2\2\u0368\u0369\7(\2\2\u0369\u036a\5\\/\2\u036a\u036b")
buf.write("\7)\2\2\u036b\u036c\7\4\2\2\u036c\u036d\bG\1\2\u036d\u036f")
buf.write("\3\2\2\2\u036e\u035e\3\2\2\2\u036e\u0365\3\2\2\2\u036f")
buf.write("\u008d\3\2\2\2\u0370\u0371\7[\2\2\u0371\u0372\7_\2\2\u0372")
buf.write("\u037e\7\4\2\2\u0373\u0374\7\\\2\2\u0374\u037e\7\4\2\2")
buf.write("\u0375\u0376\7]\2\2\u0376\u037e\7\4\2\2\u0377\u0378\7")
buf.write("^\2\2\u0378\u037e\7\4\2\2\u0379\u037a\7^\2\2\u037a\u037b")
buf.write("\5\\/\2\u037b\u037c\7\4\2\2\u037c\u037e\3\2\2\2\u037d")
buf.write("\u0370\3\2\2\2\u037d\u0373\3\2\2\2\u037d\u0375\3\2\2\2")
buf.write("\u037d\u0377\3\2\2\2\u037d\u0379\3\2\2\2\u037e\u008f\3")
buf.write("\2\2\2o\u0093\u0097\u009d\u00a6\u00a8\u00ab\u00b1\u00b6")
buf.write("\u00bd\u00bf\u00c3\u00cb\u00d0\u00d7\u00dd\u00f4\u00f9")
buf.write("\u00ff\u0108\u010f\u0117\u0119\u0120\u0126\u012a\u0130")
buf.write("\u0139\u013f\u0146\u014c\u0151\u0154\u0157\u015a\u015e")
buf.write("\u0164\u0169\u0170\u0172\u0184\u018a\u018d\u0192\u0197")
buf.write("\u019a\u019f\u01a4\u01aa\u01ac\u01b0\u01b5\u01b9\u01c0")
buf.write("\u01c5\u01c8\u01cc\u01cf\u01d6\u01db\u01ea\u01f0\u01f4")
buf.write("\u01fb\u0200\u0205\u0209\u0211\u0213\u021d\u021f\u0228")
buf.write("\u0239\u0258\u025a\u0262\u026d\u0276\u027c\u0280\u0285")
buf.write("\u0289\u0290\u029a\u02a7\u02ae\u02b6\u02be\u02c6\u02ce")
buf.write("\u02d6\u02de\u02e6\u02f4\u02f7\u02fe\u0309\u0313\u031d")
buf.write("\u0321\u0324\u0333\u0339\u033d\u0344\u034a\u0354\u035c")
buf.write("\u036e\u037d")
return buf.getvalue()
class CParser ( Parser ):
grammarFileName = "C.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'{'", "';'", "'typedef'", "','", "'='",
"'extern'", "'static'", "'auto'", "'register'", "'STATIC'",
"'void'", "'char'", "'short'", "'int'", "'long'", "'float'",
"'double'", "'signed'", "'unsigned'", "'}'", "'struct'",
"'union'", "':'", "'enum'", "'const'", "'volatile'",
"'IN'", "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'",
"'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'", "'EFIAPI'",
"'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
"'('", "')'", "'['", "']'", "'*'", "'...'", "'+'",
"'-'", "'/'", "'%'", "'++'", "'--'", "'sizeof'", "'.'",
"'->'", "'&'", "'~'", "'!'", "'*='", "'/='", "'%='",
"'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
"'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='",
"'<'", "'>'", "'<='", "'>='", "'<<'", "'>>'", "'__asm__'",
"'_asm'", "'__asm'", "'case'", "'default'", "'if'",
"'else'", "'switch'", "'while'", "'do'", "'goto'",
"'continue'", "'break'", "'return'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "IDENTIFIER", "CHARACTER_LITERAL", "STRING_LITERAL",
"HEX_LITERAL", "DECIMAL_LITERAL", "OCTAL_LITERAL",
"FLOATING_POINT_LITERAL", "WS", "BS", "UnicodeVocabulary",
"COMMENT", "LINE_COMMENT", "LINE_COMMAND" ]
RULE_translation_unit = 0
RULE_external_declaration = 1
RULE_function_definition = 2
RULE_declaration_specifiers = 3
RULE_declaration = 4
RULE_init_declarator_list = 5
RULE_init_declarator = 6
RULE_storage_class_specifier = 7
RULE_type_specifier = 8
RULE_type_id = 9
RULE_struct_or_union_specifier = 10
RULE_struct_or_union = 11
RULE_struct_declaration_list = 12
RULE_struct_declaration = 13
RULE_specifier_qualifier_list = 14
RULE_struct_declarator_list = 15
RULE_struct_declarator = 16
RULE_enum_specifier = 17
RULE_enumerator_list = 18
RULE_enumerator = 19
RULE_type_qualifier = 20
RULE_declarator = 21
RULE_direct_declarator = 22
RULE_declarator_suffix = 23
RULE_pointer = 24
RULE_parameter_type_list = 25
RULE_parameter_list = 26
RULE_parameter_declaration = 27
RULE_identifier_list = 28
RULE_type_name = 29
RULE_abstract_declarator = 30
RULE_direct_abstract_declarator = 31
RULE_abstract_declarator_suffix = 32
RULE_initializer = 33
RULE_initializer_list = 34
RULE_argument_expression_list = 35
RULE_additive_expression = 36
RULE_multiplicative_expression = 37
RULE_cast_expression = 38
RULE_unary_expression = 39
RULE_postfix_expression = 40
RULE_macro_parameter_list = 41
RULE_unary_operator = 42
RULE_primary_expression = 43
RULE_constant = 44
RULE_expression = 45
RULE_constant_expression = 46
RULE_assignment_expression = 47
RULE_lvalue = 48
RULE_assignment_operator = 49
RULE_conditional_expression = 50
RULE_logical_or_expression = 51
RULE_logical_and_expression = 52
RULE_inclusive_or_expression = 53
RULE_exclusive_or_expression = 54
RULE_and_expression = 55
RULE_equality_expression = 56
RULE_relational_expression = 57
RULE_shift_expression = 58
RULE_statement = 59
RULE_asm2_statement = 60
RULE_asm1_statement = 61
RULE_asm_statement = 62
RULE_macro_statement = 63
RULE_labeled_statement = 64
RULE_compound_statement = 65
RULE_statement_list = 66
RULE_expression_statement = 67
RULE_selection_statement = 68
RULE_iteration_statement = 69
RULE_jump_statement = 70
ruleNames = [ "translation_unit", "external_declaration", "function_definition",
"declaration_specifiers", "declaration", "init_declarator_list",
"init_declarator", "storage_class_specifier", "type_specifier",
"type_id", "struct_or_union_specifier", "struct_or_union",
"struct_declaration_list", "struct_declaration", "specifier_qualifier_list",
"struct_declarator_list", "struct_declarator", "enum_specifier",
"enumerator_list", "enumerator", "type_qualifier", "declarator",
"direct_declarator", "declarator_suffix", "pointer",
"parameter_type_list", "parameter_list", "parameter_declaration",
"identifier_list", "type_name", "abstract_declarator",
"direct_abstract_declarator", "abstract_declarator_suffix",
"initializer", "initializer_list", "argument_expression_list",
"additive_expression", "multiplicative_expression", "cast_expression",
"unary_expression", "postfix_expression", "macro_parameter_list",
"unary_operator", "primary_expression", "constant", "expression",
"constant_expression", "assignment_expression", "lvalue",
"assignment_operator", "conditional_expression", "logical_or_expression",
"logical_and_expression", "inclusive_or_expression",
"exclusive_or_expression", "and_expression", "equality_expression",
"relational_expression", "shift_expression", "statement",
"asm2_statement", "asm1_statement", "asm_statement",
"macro_statement", "labeled_statement", "compound_statement",
"statement_list", "expression_statement", "selection_statement",
"iteration_statement", "jump_statement" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
T__11=12
T__12=13
T__13=14
T__14=15
T__15=16
T__16=17
T__17=18
T__18=19
T__19=20
T__20=21
T__21=22
T__22=23
T__23=24
T__24=25
T__25=26
T__26=27
T__27=28
T__28=29
T__29=30
T__30=31
T__31=32
T__32=33
T__33=34
T__34=35
T__35=36
T__36=37
T__37=38
T__38=39
T__39=40
T__40=41
T__41=42
T__42=43
T__43=44
T__44=45
T__45=46
T__46=47
T__47=48
T__48=49
T__49=50
T__50=51
T__51=52
T__52=53
T__53=54
T__54=55
T__55=56
T__56=57
T__57=58
T__58=59
T__59=60
T__60=61
T__61=62
T__62=63
T__63=64
T__64=65
T__65=66
T__66=67
T__67=68
T__68=69
T__69=70
T__70=71
T__71=72
T__72=73
T__73=74
T__74=75
T__75=76
T__76=77
T__77=78
T__78=79
T__79=80
T__80=81
T__81=82
T__82=83
T__83=84
T__84=85
T__85=86
T__86=87
T__87=88
T__88=89
T__89=90
T__90=91
T__91=92
IDENTIFIER=93
CHARACTER_LITERAL=94
STRING_LITERAL=95
HEX_LITERAL=96
DECIMAL_LITERAL=97
OCTAL_LITERAL=98
FLOATING_POINT_LITERAL=99
WS=100
BS=101
UnicodeVocabulary=102
COMMENT=103
LINE_COMMENT=104
LINE_COMMAND=105
# @param input Type: TokenStream
# @param output= sys.stdout Type: TextIO
def __init__(self,input,output= sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.1")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
def printTokenInfo(self,line,offset,tokenText):
print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
def StorePredicateExpression(self,StartLine,StartOffset,EndLine,EndOffset,Text):
PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.PredicateExpressionList.append(PredExp)
def StoreEnumerationDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.EnumerationDefinitionList.append(EnumDef)
def StoreStructUnionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.StructUnionDefinitionList.append(SUDef)
def StoreTypedefDefinition(self,StartLine,StartOffset,EndLine,EndOffset,FromText,ToText):
Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.TypedefDefinitionList.append(Tdef)
def StoreFunctionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText,LeftBraceLine,LeftBraceOffset,DeclLine,DeclOffset):
FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
FileProfile.FunctionDefinitionList.append(FuncDef)
def StoreVariableDeclaration(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText):
VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.VariableDeclarationList.append(VarDecl)
def StoreFunctionCalling(self,StartLine,StartOffset,EndLine,EndOffset,FuncName,ParamList):
FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.FunctionCallingList.append(FuncCall)
class Translation_unitContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def external_declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.External_declarationContext)
else:
return self.getTypedRuleContext(CParser.External_declarationContext,i)
def getRuleIndex(self):
return CParser.RULE_translation_unit
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterTranslation_unit" ):
listener.enterTranslation_unit(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitTranslation_unit" ):
listener.exitTranslation_unit(self)
def translation_unit(self):
localctx = CParser.Translation_unitContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_translation_unit)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 145
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__41))) != 0) or _la==CParser.IDENTIFIER:
self.state = 142
self.external_declaration()
self.state = 147
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class External_declarationContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
def declaration_specifiers(self):
return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
# @param i=None Type: int
def declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.DeclarationContext)
else:
return self.getTypedRuleContext(CParser.DeclarationContext,i)
def function_definition(self):
return self.getTypedRuleContext(CParser.Function_definitionContext,0)
def macro_statement(self):
return self.getTypedRuleContext(CParser.Macro_statementContext,0)
def getRuleIndex(self):
return CParser.RULE_external_declaration
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterExternal_declaration" ):
listener.enterExternal_declaration(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitExternal_declaration" ):
listener.exitExternal_declaration(self)
def external_declaration(self):
localctx = CParser.External_declarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_external_declaration)
self._la = 0 # Token type
try:
self.state = 166
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,4,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 149
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,1,self._ctx)
if la_ == 1:
self.state = 148
self.declaration_specifiers()
self.state = 151
self.declarator()
self.state = 155
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER:
self.state = 152
self.declaration()
self.state = 157
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 158
self.match(CParser.T__0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 160
self.function_definition()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 161
self.declaration()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 162
self.macro_statement()
self.state = 164
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__1:
self.state = 163
self.match(CParser.T__1)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Function_definitionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.ModifierText = ''
self.DeclText = ''
self.LBLine = 0
self.LBOffset = 0
self.DeclLine = 0
self.DeclOffset = 0
self.d = None # Declaration_specifiersContext
self._declaration_specifiers = None # Declaration_specifiersContext
self._declarator = None # DeclaratorContext
self.a = None # Compound_statementContext
self.b = None # Compound_statementContext
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
def compound_statement(self):
return self.getTypedRuleContext(CParser.Compound_statementContext,0)
def declaration_specifiers(self):
return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
# @param i=None Type: int
def declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.DeclarationContext)
else:
return self.getTypedRuleContext(CParser.DeclarationContext,i)
def getRuleIndex(self):
return CParser.RULE_function_definition
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterFunction_definition" ):
listener.enterFunction_definition(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitFunction_definition" ):
listener.exitFunction_definition(self)
def function_definition(self):
localctx = CParser.Function_definitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_function_definition)
ModifierText = '';
DeclText = '';
LBLine = 0;
LBOffset = 0;
DeclLine = 0;
DeclOffset = 0;
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 169
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,5,self._ctx)
if la_ == 1:
self.state = 168
localctx.d = localctx._declaration_specifiers = self.declaration_specifiers()
self.state = 171
localctx._declarator = self.declarator()
self.state = 180
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__2, CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9, CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36, CParser.IDENTIFIER]:
self.state = 173
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 172
self.declaration()
self.state = 175
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER):
break
self.state = 177
localctx.a = self.compound_statement()
pass
elif token in [CParser.T__0]:
self.state = 179
localctx.b = self.compound_statement()
pass
else:
raise NoViableAltException(self)
if localctx.d != None:
ModifierText = (None if localctx._declaration_specifiers is None else self._input.getText((localctx._declaration_specifiers.start,localctx._declaration_specifiers.stop)))
else:
ModifierText = ''
DeclText = (None if localctx._declarator is None else self._input.getText((localctx._declarator.start,localctx._declarator.stop)))
DeclLine = (None if localctx._declarator is None else localctx._declarator.start).line
DeclOffset = (None if localctx._declarator is None else localctx._declarator.start).column
if localctx.a != None:
LBLine = (None if localctx.a is None else localctx.a.start).line
LBOffset = (None if localctx.a is None else localctx.a.start).column
else:
LBLine = (None if localctx.b is None else localctx.b.start).line
LBOffset = (None if localctx.b is None else localctx.b.start).column
self._ctx.stop = self._input.LT(-1)
self.StoreFunctionDefinition(localctx.start.line, localctx.start.column, localctx.stop.line, localctx.stop.column, ModifierText, DeclText, LBLine, LBOffset, DeclLine, DeclOffset)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Declaration_specifiersContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def storage_class_specifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Storage_class_specifierContext)
else:
return self.getTypedRuleContext(CParser.Storage_class_specifierContext,i)
# @param i=None Type: int
def type_specifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_specifierContext)
else:
return self.getTypedRuleContext(CParser.Type_specifierContext,i)
# @param i=None Type: int
def type_qualifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_qualifierContext)
else:
return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
def getRuleIndex(self):
return CParser.RULE_declaration_specifiers
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDeclaration_specifiers" ):
listener.enterDeclaration_specifiers(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDeclaration_specifiers" ):
listener.exitDeclaration_specifiers(self)
def declaration_specifiers(self):
localctx = CParser.Declaration_specifiersContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_declaration_specifiers)
try:
self.enterOuterAlt(localctx, 1)
self.state = 187
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 187
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9]:
self.state = 184
self.storage_class_specifier()
pass
elif token in [CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.IDENTIFIER]:
self.state = 185
self.type_specifier()
pass
elif token in [CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36]:
self.state = 186
self.type_qualifier()
pass
else:
raise NoViableAltException(self)
else:
raise NoViableAltException(self)
self.state = 189
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,9,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclarationContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.a = None # Token
self.b = None # Declaration_specifiersContext
self.c = None # Init_declarator_listContext
self.d = None # Token
self.s = None # Declaration_specifiersContext
self.t = None # Init_declarator_listContext
self.e = None # Token
def init_declarator_list(self):
return self.getTypedRuleContext(CParser.Init_declarator_listContext,0)
def declaration_specifiers(self):
return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
def getRuleIndex(self):
return CParser.RULE_declaration
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDeclaration" ):
listener.enterDeclaration(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDeclaration" ):
listener.exitDeclaration(self)
def declaration(self):
localctx = CParser.DeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_declaration)
self._la = 0 # Token type
try:
self.state = 206
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__2]:
self.enterOuterAlt(localctx, 1)
self.state = 191
localctx.a = self.match(CParser.T__2)
self.state = 193
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,10,self._ctx)
if la_ == 1:
self.state = 192
localctx.b = self.declaration_specifiers()
self.state = 195
localctx.c = self.init_declarator_list()
self.state = 196
localctx.d = self.match(CParser.T__1)
if localctx.b is not None:
self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, (0 if localctx.d is None else localctx.d.line), localctx.d.column, (None if localctx.b is None else self._input.getText((localctx.b.start,localctx.b.stop))), (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
else:
self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, (0 if localctx.d is None else localctx.d.line), localctx.d.column, '', (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
pass
elif token in [CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9, CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36, CParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 2)
self.state = 199
localctx.s = self.declaration_specifiers()
self.state = 201
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 34)) & ~0x3f) == 0 and ((1 << (_la - 34)) & ((1 << (CParser.T__33 - 34)) | (1 << (CParser.T__34 - 34)) | (1 << (CParser.T__35 - 34)) | (1 << (CParser.T__37 - 34)) | (1 << (CParser.T__41 - 34)) | (1 << (CParser.IDENTIFIER - 34)))) != 0):
self.state = 200
localctx.t = self.init_declarator_list()
self.state = 203
localctx.e = self.match(CParser.T__1)
if localctx.t is not None:
self.StoreVariableDeclaration((None if localctx.s is None else localctx.s.start).line, (None if localctx.s is None else localctx.s.start).column, (None if localctx.t is None else localctx.t.start).line, (None if localctx.t is None else localctx.t.start).column, (None if localctx.s is None else self._input.getText((localctx.s.start,localctx.s.stop))), (None if localctx.t is None else self._input.getText((localctx.t.start,localctx.t.stop))))
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Init_declarator_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def init_declarator(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Init_declaratorContext)
else:
return self.getTypedRuleContext(CParser.Init_declaratorContext,i)
def getRuleIndex(self):
return CParser.RULE_init_declarator_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterInit_declarator_list" ):
listener.enterInit_declarator_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitInit_declarator_list" ):
listener.exitInit_declarator_list(self)
def init_declarator_list(self):
localctx = CParser.Init_declarator_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_init_declarator_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 208
self.init_declarator()
self.state = 213
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 209
self.match(CParser.T__3)
self.state = 210
self.init_declarator()
self.state = 215
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Init_declaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
def initializer(self):
return self.getTypedRuleContext(CParser.InitializerContext,0)
def getRuleIndex(self):
return CParser.RULE_init_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterInit_declarator" ):
listener.enterInit_declarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitInit_declarator" ):
listener.exitInit_declarator(self)
def init_declarator(self):
localctx = CParser.Init_declaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_init_declarator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 216
self.declarator()
self.state = 219
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__4:
self.state = 217
self.match(CParser.T__4)
self.state = 218
self.initializer()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Storage_class_specifierContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_storage_class_specifier
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStorage_class_specifier" ):
listener.enterStorage_class_specifier(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStorage_class_specifier" ):
listener.exitStorage_class_specifier(self)
def storage_class_specifier(self):
localctx = CParser.Storage_class_specifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_storage_class_specifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 221
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Type_specifierContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.s = None # Struct_or_union_specifierContext
self.e = None # Enum_specifierContext
def struct_or_union_specifier(self):
return self.getTypedRuleContext(CParser.Struct_or_union_specifierContext,0)
def enum_specifier(self):
return self.getTypedRuleContext(CParser.Enum_specifierContext,0)
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
# @param i=None Type: int
def type_qualifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_qualifierContext)
else:
return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
def type_id(self):
return self.getTypedRuleContext(CParser.Type_idContext,0)
def getRuleIndex(self):
return CParser.RULE_type_specifier
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterType_specifier" ):
listener.enterType_specifier(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitType_specifier" ):
listener.exitType_specifier(self)
def type_specifier(self):
localctx = CParser.Type_specifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_type_specifier)
try:
self.state = 247
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,16,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 223
self.match(CParser.T__10)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 224
self.match(CParser.T__11)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 225
self.match(CParser.T__12)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 226
self.match(CParser.T__13)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 227
self.match(CParser.T__14)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 228
self.match(CParser.T__15)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 229
self.match(CParser.T__16)
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 230
self.match(CParser.T__17)
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 231
self.match(CParser.T__18)
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 232
localctx.s = self.struct_or_union_specifier()
if localctx.s.stop is not None:
self.StoreStructUnionDefinition((None if localctx.s is None else localctx.s.start).line, (None if localctx.s is None else localctx.s.start).column, (None if localctx.s is None else localctx.s.stop).line, (None if localctx.s is None else localctx.s.stop).column, (None if localctx.s is None else self._input.getText((localctx.s.start,localctx.s.stop))))
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 235
localctx.e = self.enum_specifier()
if localctx.e.stop is not None:
self.StoreEnumerationDefinition((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
pass
elif la_ == 12:
self.enterOuterAlt(localctx, 12)
self.state = 238
self.match(CParser.IDENTIFIER)
self.state = 242
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 239
self.type_qualifier()
self.state = 244
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
self.state = 245
self.declarator()
pass
elif la_ == 13:
self.enterOuterAlt(localctx, 13)
self.state = 246
self.type_id()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Type_idContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def getRuleIndex(self):
return CParser.RULE_type_id
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterType_id" ):
listener.enterType_id(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitType_id" ):
listener.exitType_id(self)
def type_id(self):
localctx = CParser.Type_idContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_type_id)
try:
self.enterOuterAlt(localctx, 1)
self.state = 249
self.match(CParser.IDENTIFIER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_or_union_specifierContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def struct_or_union(self):
return self.getTypedRuleContext(CParser.Struct_or_unionContext,0)
def struct_declaration_list(self):
return self.getTypedRuleContext(CParser.Struct_declaration_listContext,0)
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def getRuleIndex(self):
return CParser.RULE_struct_or_union_specifier
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_or_union_specifier" ):
listener.enterStruct_or_union_specifier(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_or_union_specifier" ):
listener.exitStruct_or_union_specifier(self)
def struct_or_union_specifier(self):
localctx = CParser.Struct_or_union_specifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_struct_or_union_specifier)
self._la = 0 # Token type
try:
self.state = 262
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,18,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 251
self.struct_or_union()
self.state = 253
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.IDENTIFIER:
self.state = 252
self.match(CParser.IDENTIFIER)
self.state = 255
self.match(CParser.T__0)
self.state = 256
self.struct_declaration_list()
self.state = 257
self.match(CParser.T__19)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 259
self.struct_or_union()
self.state = 260
self.match(CParser.IDENTIFIER)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_or_unionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_struct_or_union
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_or_union" ):
listener.enterStruct_or_union(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_or_union" ):
listener.exitStruct_or_union(self)
def struct_or_union(self):
localctx = CParser.Struct_or_unionContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_struct_or_union)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 264
_la = self._input.LA(1)
if not(_la==CParser.T__20 or _la==CParser.T__21):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_declaration_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def struct_declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Struct_declarationContext)
else:
return self.getTypedRuleContext(CParser.Struct_declarationContext,i)
def getRuleIndex(self):
return CParser.RULE_struct_declaration_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_declaration_list" ):
listener.enterStruct_declaration_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_declaration_list" ):
listener.exitStruct_declaration_list(self)
def struct_declaration_list(self):
localctx = CParser.Struct_declaration_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_struct_declaration_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 267
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 266
self.struct_declaration()
self.state = 269
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_declarationContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def specifier_qualifier_list(self):
return self.getTypedRuleContext(CParser.Specifier_qualifier_listContext,0)
def struct_declarator_list(self):
return self.getTypedRuleContext(CParser.Struct_declarator_listContext,0)
def getRuleIndex(self):
return CParser.RULE_struct_declaration
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_declaration" ):
listener.enterStruct_declaration(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_declaration" ):
listener.exitStruct_declaration(self)
def struct_declaration(self):
localctx = CParser.Struct_declarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_struct_declaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 271
self.specifier_qualifier_list()
self.state = 272
self.struct_declarator_list()
self.state = 273
self.match(CParser.T__1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Specifier_qualifier_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def type_qualifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_qualifierContext)
else:
return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
# @param i=None Type: int
def type_specifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_specifierContext)
else:
return self.getTypedRuleContext(CParser.Type_specifierContext,i)
def getRuleIndex(self):
return CParser.RULE_specifier_qualifier_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterSpecifier_qualifier_list" ):
listener.enterSpecifier_qualifier_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitSpecifier_qualifier_list" ):
listener.exitSpecifier_qualifier_list(self)
def specifier_qualifier_list(self):
localctx = CParser.Specifier_qualifier_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_specifier_qualifier_list)
try:
self.enterOuterAlt(localctx, 1)
self.state = 277
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 277
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36]:
self.state = 275
self.type_qualifier()
pass
elif token in [CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.IDENTIFIER]:
self.state = 276
self.type_specifier()
pass
else:
raise NoViableAltException(self)
else:
raise NoViableAltException(self)
self.state = 279
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,21,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_declarator_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def struct_declarator(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Struct_declaratorContext)
else:
return self.getTypedRuleContext(CParser.Struct_declaratorContext,i)
def getRuleIndex(self):
return CParser.RULE_struct_declarator_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_declarator_list" ):
listener.enterStruct_declarator_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_declarator_list" ):
listener.exitStruct_declarator_list(self)
def struct_declarator_list(self):
localctx = CParser.Struct_declarator_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_struct_declarator_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 281
self.struct_declarator()
self.state = 286
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 282
self.match(CParser.T__3)
self.state = 283
self.struct_declarator()
self.state = 288
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_declaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
def constant_expression(self):
return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_struct_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_declarator" ):
listener.enterStruct_declarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_declarator" ):
listener.exitStruct_declarator(self)
def struct_declarator(self):
localctx = CParser.Struct_declaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_struct_declarator)
self._la = 0 # Token type
try:
self.state = 296
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__37, CParser.T__41, CParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 289
self.declarator()
self.state = 292
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__22:
self.state = 290
self.match(CParser.T__22)
self.state = 291
self.constant_expression()
pass
elif token in [CParser.T__22]:
self.enterOuterAlt(localctx, 2)
self.state = 294
self.match(CParser.T__22)
self.state = 295
self.constant_expression()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Enum_specifierContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def enumerator_list(self):
return self.getTypedRuleContext(CParser.Enumerator_listContext,0)
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def getRuleIndex(self):
return CParser.RULE_enum_specifier
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterEnum_specifier" ):
listener.enterEnum_specifier(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitEnum_specifier" ):
listener.exitEnum_specifier(self)
def enum_specifier(self):
localctx = CParser.Enum_specifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_enum_specifier)
self._la = 0 # Token type
try:
self.state = 317
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,27,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 298
self.match(CParser.T__23)
self.state = 299
self.match(CParser.T__0)
self.state = 300
self.enumerator_list()
self.state = 302
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__3:
self.state = 301
self.match(CParser.T__3)
self.state = 304
self.match(CParser.T__19)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 306
self.match(CParser.T__23)
self.state = 307
self.match(CParser.IDENTIFIER)
self.state = 308
self.match(CParser.T__0)
self.state = 309
self.enumerator_list()
self.state = 311
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__3:
self.state = 310
self.match(CParser.T__3)
self.state = 313
self.match(CParser.T__19)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 315
self.match(CParser.T__23)
self.state = 316
self.match(CParser.IDENTIFIER)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Enumerator_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def enumerator(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.EnumeratorContext)
else:
return self.getTypedRuleContext(CParser.EnumeratorContext,i)
def getRuleIndex(self):
return CParser.RULE_enumerator_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterEnumerator_list" ):
listener.enterEnumerator_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitEnumerator_list" ):
listener.exitEnumerator_list(self)
def enumerator_list(self):
localctx = CParser.Enumerator_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_enumerator_list)
try:
self.enterOuterAlt(localctx, 1)
self.state = 319
self.enumerator()
self.state = 324
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,28,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 320
self.match(CParser.T__3)
self.state = 321
self.enumerator()
self.state = 326
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,28,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumeratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def constant_expression(self):
return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_enumerator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterEnumerator" ):
listener.enterEnumerator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitEnumerator" ):
listener.exitEnumerator(self)
def enumerator(self):
localctx = CParser.EnumeratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_enumerator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 327
self.match(CParser.IDENTIFIER)
self.state = 330
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__4:
self.state = 328
self.match(CParser.T__4)
self.state = 329
self.constant_expression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Type_qualifierContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_type_qualifier
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterType_qualifier" ):
listener.enterType_qualifier(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitType_qualifier" ):
listener.exitType_qualifier(self)
def type_qualifier(self):
localctx = CParser.Type_qualifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 40, self.RULE_type_qualifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 332
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def direct_declarator(self):
return self.getTypedRuleContext(CParser.Direct_declaratorContext,0)
def pointer(self):
return self.getTypedRuleContext(CParser.PointerContext,0)
def getRuleIndex(self):
return CParser.RULE_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDeclarator" ):
listener.enterDeclarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDeclarator" ):
listener.exitDeclarator(self)
def declarator(self):
localctx = CParser.DeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_declarator)
self._la = 0 # Token type
try:
self.state = 348
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,34,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 335
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__41:
self.state = 334
self.pointer()
self.state = 338
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__33:
self.state = 337
self.match(CParser.T__33)
self.state = 341
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__34:
self.state = 340
self.match(CParser.T__34)
self.state = 344
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__35:
self.state = 343
self.match(CParser.T__35)
self.state = 346
self.direct_declarator()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 347
self.pointer()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Direct_declaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
# @param i=None Type: int
def declarator_suffix(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Declarator_suffixContext)
else:
return self.getTypedRuleContext(CParser.Declarator_suffixContext,i)
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
def getRuleIndex(self):
return CParser.RULE_direct_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDirect_declarator" ):
listener.enterDirect_declarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDirect_declarator" ):
listener.exitDirect_declarator(self)
def direct_declarator(self):
localctx = CParser.Direct_declaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 44, self.RULE_direct_declarator)
try:
self.state = 368
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 350
self.match(CParser.IDENTIFIER)
self.state = 354
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,35,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 351
self.declarator_suffix()
self.state = 356
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,35,self._ctx)
pass
elif token in [CParser.T__37]:
self.enterOuterAlt(localctx, 2)
self.state = 357
self.match(CParser.T__37)
self.state = 359
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,36,self._ctx)
if la_ == 1:
self.state = 358
self.match(CParser.T__33)
self.state = 361
self.declarator()
self.state = 362
self.match(CParser.T__38)
self.state = 364
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 363
self.declarator_suffix()
else:
raise NoViableAltException(self)
self.state = 366
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,37,self._ctx)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Declarator_suffixContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def constant_expression(self):
return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
def parameter_type_list(self):
return self.getTypedRuleContext(CParser.Parameter_type_listContext,0)
def identifier_list(self):
return self.getTypedRuleContext(CParser.Identifier_listContext,0)
def getRuleIndex(self):
return CParser.RULE_declarator_suffix
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDeclarator_suffix" ):
listener.enterDeclarator_suffix(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDeclarator_suffix" ):
listener.exitDeclarator_suffix(self)
def declarator_suffix(self):
localctx = CParser.Declarator_suffixContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_declarator_suffix)
try:
self.state = 386
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,39,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 370
self.match(CParser.T__39)
self.state = 371
self.constant_expression()
self.state = 372
self.match(CParser.T__40)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 374
self.match(CParser.T__39)
self.state = 375
self.match(CParser.T__40)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 376
self.match(CParser.T__37)
self.state = 377
self.parameter_type_list()
self.state = 378
self.match(CParser.T__38)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 380
self.match(CParser.T__37)
self.state = 381
self.identifier_list()
self.state = 382
self.match(CParser.T__38)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 384
self.match(CParser.T__37)
self.state = 385
self.match(CParser.T__38)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PointerContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def type_qualifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_qualifierContext)
else:
return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
def pointer(self):
return self.getTypedRuleContext(CParser.PointerContext,0)
def getRuleIndex(self):
return CParser.RULE_pointer
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterPointer" ):
listener.enterPointer(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitPointer" ):
listener.exitPointer(self)
def pointer(self):
localctx = CParser.PointerContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_pointer)
try:
self.state = 400
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,42,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 388
self.match(CParser.T__41)
self.state = 390
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 389
self.type_qualifier()
else:
raise NoViableAltException(self)
self.state = 392
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,40,self._ctx)
self.state = 395
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,41,self._ctx)
if la_ == 1:
self.state = 394
self.pointer()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 397
self.match(CParser.T__41)
self.state = 398
self.pointer()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 399
self.match(CParser.T__41)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Parameter_type_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameter_list(self):
return self.getTypedRuleContext(CParser.Parameter_listContext,0)
def getRuleIndex(self):
return CParser.RULE_parameter_type_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterParameter_type_list" ):
listener.enterParameter_type_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitParameter_type_list" ):
listener.exitParameter_type_list(self)
def parameter_type_list(self):
localctx = CParser.Parameter_type_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 50, self.RULE_parameter_type_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 402
self.parameter_list()
self.state = 408
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__3:
self.state = 403
self.match(CParser.T__3)
self.state = 405
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__28:
self.state = 404
self.match(CParser.T__28)
self.state = 407
self.match(CParser.T__42)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Parameter_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def parameter_declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Parameter_declarationContext)
else:
return self.getTypedRuleContext(CParser.Parameter_declarationContext,i)
def getRuleIndex(self):
return CParser.RULE_parameter_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterParameter_list" ):
listener.enterParameter_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitParameter_list" ):
listener.exitParameter_list(self)
def parameter_list(self):
localctx = CParser.Parameter_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 52, self.RULE_parameter_list)
try:
self.enterOuterAlt(localctx, 1)
self.state = 410
self.parameter_declaration()
self.state = 418
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,46,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 411
self.match(CParser.T__3)
self.state = 413
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,45,self._ctx)
if la_ == 1:
self.state = 412
self.match(CParser.T__28)
self.state = 415
self.parameter_declaration()
self.state = 420
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,46,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Parameter_declarationContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declaration_specifiers(self):
return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
# @param i=None Type: int
def declarator(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.DeclaratorContext)
else:
return self.getTypedRuleContext(CParser.DeclaratorContext,i)
# @param i=None Type: int
def abstract_declarator(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Abstract_declaratorContext)
else:
return self.getTypedRuleContext(CParser.Abstract_declaratorContext,i)
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
# @param i=None Type: int
def pointer(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.PointerContext)
else:
return self.getTypedRuleContext(CParser.PointerContext,i)
def getRuleIndex(self):
return CParser.RULE_parameter_declaration
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterParameter_declaration" ):
listener.enterParameter_declaration(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitParameter_declaration" ):
listener.exitParameter_declaration(self)
def parameter_declaration(self):
localctx = CParser.Parameter_declarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 54, self.RULE_parameter_declaration)
self._la = 0 # Token type
try:
self.state = 439
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,51,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 421
self.declaration_specifiers()
self.state = 426
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 34)) & ~0x3f) == 0 and ((1 << (_la - 34)) & ((1 << (CParser.T__33 - 34)) | (1 << (CParser.T__34 - 34)) | (1 << (CParser.T__35 - 34)) | (1 << (CParser.T__37 - 34)) | (1 << (CParser.T__39 - 34)) | (1 << (CParser.T__41 - 34)) | (1 << (CParser.IDENTIFIER - 34)))) != 0):
self.state = 424
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,47,self._ctx)
if la_ == 1:
self.state = 422
self.declarator()
pass
elif la_ == 2:
self.state = 423
self.abstract_declarator()
pass
self.state = 428
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 430
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__28:
self.state = 429
self.match(CParser.T__28)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 435
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__41:
self.state = 432
self.pointer()
self.state = 437
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 438
self.match(CParser.IDENTIFIER)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Identifier_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def IDENTIFIER(self,i=None):
if i is None:
return self.getTokens(CParser.IDENTIFIER)
else:
return self.getToken(CParser.IDENTIFIER, i)
def getRuleIndex(self):
return CParser.RULE_identifier_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterIdentifier_list" ):
listener.enterIdentifier_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitIdentifier_list" ):
listener.exitIdentifier_list(self)
def identifier_list(self):
localctx = CParser.Identifier_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 56, self.RULE_identifier_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 441
self.match(CParser.IDENTIFIER)
self.state = 446
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 442
self.match(CParser.T__3)
self.state = 443
self.match(CParser.IDENTIFIER)
self.state = 448
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Type_nameContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def specifier_qualifier_list(self):
return self.getTypedRuleContext(CParser.Specifier_qualifier_listContext,0)
def abstract_declarator(self):
return self.getTypedRuleContext(CParser.Abstract_declaratorContext,0)
def type_id(self):
return self.getTypedRuleContext(CParser.Type_idContext,0)
def getRuleIndex(self):
return CParser.RULE_type_name
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterType_name" ):
listener.enterType_name(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitType_name" ):
listener.exitType_name(self)
def type_name(self):
localctx = CParser.Type_nameContext(self, self._ctx, self.state)
self.enterRule(localctx, 58, self.RULE_type_name)
self._la = 0 # Token type
try:
self.state = 454
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,54,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 449
self.specifier_qualifier_list()
self.state = 451
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__37) | (1 << CParser.T__39) | (1 << CParser.T__41))) != 0):
self.state = 450
self.abstract_declarator()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 453
self.type_id()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Abstract_declaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pointer(self):
return self.getTypedRuleContext(CParser.PointerContext,0)
def direct_abstract_declarator(self):
return self.getTypedRuleContext(CParser.Direct_abstract_declaratorContext,0)
def getRuleIndex(self):
return CParser.RULE_abstract_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAbstract_declarator" ):
listener.enterAbstract_declarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAbstract_declarator" ):
listener.exitAbstract_declarator(self)
def abstract_declarator(self):
localctx = CParser.Abstract_declaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 60, self.RULE_abstract_declarator)
try:
self.state = 461
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__41]:
self.enterOuterAlt(localctx, 1)
self.state = 456
self.pointer()
self.state = 458
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,55,self._ctx)
if la_ == 1:
self.state = 457
self.direct_abstract_declarator()
pass
elif token in [CParser.T__37, CParser.T__39]:
self.enterOuterAlt(localctx, 2)
self.state = 460
self.direct_abstract_declarator()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Direct_abstract_declaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def abstract_declarator(self):
return self.getTypedRuleContext(CParser.Abstract_declaratorContext,0)
# @param i=None Type: int
def abstract_declarator_suffix(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Abstract_declarator_suffixContext)
else:
return self.getTypedRuleContext(CParser.Abstract_declarator_suffixContext,i)
def getRuleIndex(self):
return CParser.RULE_direct_abstract_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDirect_abstract_declarator" ):
listener.enterDirect_abstract_declarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDirect_abstract_declarator" ):
listener.exitDirect_abstract_declarator(self)
def direct_abstract_declarator(self):
localctx = CParser.Direct_abstract_declaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 62, self.RULE_direct_abstract_declarator)
try:
self.enterOuterAlt(localctx, 1)
self.state = 468
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,57,self._ctx)
if la_ == 1:
self.state = 463
self.match(CParser.T__37)
self.state = 464
self.abstract_declarator()
self.state = 465
self.match(CParser.T__38)
pass
elif la_ == 2:
self.state = 467
self.abstract_declarator_suffix()
pass
self.state = 473
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,58,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 470
self.abstract_declarator_suffix()
self.state = 475
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,58,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Abstract_declarator_suffixContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def constant_expression(self):
return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
def parameter_type_list(self):
return self.getTypedRuleContext(CParser.Parameter_type_listContext,0)
def getRuleIndex(self):
return CParser.RULE_abstract_declarator_suffix
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAbstract_declarator_suffix" ):
listener.enterAbstract_declarator_suffix(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAbstract_declarator_suffix" ):
listener.exitAbstract_declarator_suffix(self)
def abstract_declarator_suffix(self):
localctx = CParser.Abstract_declarator_suffixContext(self, self._ctx, self.state)
self.enterRule(localctx, 64, self.RULE_abstract_declarator_suffix)
try:
self.state = 488
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,59,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 476
self.match(CParser.T__39)
self.state = 477
self.match(CParser.T__40)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 478
self.match(CParser.T__39)
self.state = 479
self.constant_expression()
self.state = 480
self.match(CParser.T__40)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 482
self.match(CParser.T__37)
self.state = 483
self.match(CParser.T__38)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 484
self.match(CParser.T__37)
self.state = 485
self.parameter_type_list()
self.state = 486
self.match(CParser.T__38)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InitializerContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def assignment_expression(self):
return self.getTypedRuleContext(CParser.Assignment_expressionContext,0)
def initializer_list(self):
return self.getTypedRuleContext(CParser.Initializer_listContext,0)
def getRuleIndex(self):
return CParser.RULE_initializer
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterInitializer" ):
listener.enterInitializer(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitInitializer" ):
listener.exitInitializer(self)
def initializer(self):
localctx = CParser.InitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 66, self.RULE_initializer)
self._la = 0 # Token type
try:
self.state = 498
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__37, CParser.T__41, CParser.T__43, CParser.T__44, CParser.T__47, CParser.T__48, CParser.T__49, CParser.T__52, CParser.T__53, CParser.T__54, CParser.IDENTIFIER, CParser.CHARACTER_LITERAL, CParser.STRING_LITERAL, CParser.HEX_LITERAL, CParser.DECIMAL_LITERAL, CParser.OCTAL_LITERAL, CParser.FLOATING_POINT_LITERAL]:
self.enterOuterAlt(localctx, 1)
self.state = 490
self.assignment_expression()
pass
elif token in [CParser.T__0]:
self.enterOuterAlt(localctx, 2)
self.state = 491
self.match(CParser.T__0)
self.state = 492
self.initializer_list()
self.state = 494
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__3:
self.state = 493
self.match(CParser.T__3)
self.state = 496
self.match(CParser.T__19)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Initializer_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def initializer(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.InitializerContext)
else:
return self.getTypedRuleContext(CParser.InitializerContext,i)
def getRuleIndex(self):
return CParser.RULE_initializer_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterInitializer_list" ):
listener.enterInitializer_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitInitializer_list" ):
listener.exitInitializer_list(self)
def initializer_list(self):
localctx = CParser.Initializer_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 68, self.RULE_initializer_list)
try:
self.enterOuterAlt(localctx, 1)
self.state = 500
self.initializer()
self.state = 505
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,62,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 501
self.match(CParser.T__3)
self.state = 502
self.initializer()
self.state = 507
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,62,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Argument_expression_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def assignment_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Assignment_expressionContext)
else:
return self.getTypedRuleContext(CParser.Assignment_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_argument_expression_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterArgument_expression_list" ):
listener.enterArgument_expression_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitArgument_expression_list" ):
listener.exitArgument_expression_list(self)
def argument_expression_list(self):
localctx = CParser.Argument_expression_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 70, self.RULE_argument_expression_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 508
self.assignment_expression()
self.state = 510
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__28:
self.state = 509
self.match(CParser.T__28)
self.state = 519
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 512
self.match(CParser.T__3)
self.state = 513
self.assignment_expression()
self.state = 515
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__28:
self.state = 514
self.match(CParser.T__28)
self.state = 521
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Additive_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def multiplicative_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Multiplicative_expressionContext)
else:
return self.getTypedRuleContext(CParser.Multiplicative_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_additive_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAdditive_expression" ):
listener.enterAdditive_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAdditive_expression" ):
listener.exitAdditive_expression(self)
def additive_expression(self):
localctx = CParser.Additive_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 72, self.RULE_additive_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 522
self.multiplicative_expression()
self.state = 529
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__43 or _la==CParser.T__44:
self.state = 527
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__43]:
self.state = 523
self.match(CParser.T__43)
self.state = 524
self.multiplicative_expression()
pass
elif token in [CParser.T__44]:
self.state = 525
self.match(CParser.T__44)
self.state = 526
self.multiplicative_expression()
pass
else:
raise NoViableAltException(self)
self.state = 531
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Multiplicative_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def cast_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Cast_expressionContext)
else:
return self.getTypedRuleContext(CParser.Cast_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_multiplicative_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterMultiplicative_expression" ):
listener.enterMultiplicative_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitMultiplicative_expression" ):
listener.exitMultiplicative_expression(self)
def multiplicative_expression(self):
localctx = CParser.Multiplicative_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 74, self.RULE_multiplicative_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 532
self.cast_expression()
self.state = 541
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__41) | (1 << CParser.T__45) | (1 << CParser.T__46))) != 0):
self.state = 539
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__41]:
self.state = 533
self.match(CParser.T__41)
self.state = 534
self.cast_expression()
pass
elif token in [CParser.T__45]:
self.state = 535
self.match(CParser.T__45)
self.state = 536
self.cast_expression()
pass
elif token in [CParser.T__46]:
self.state = 537
self.match(CParser.T__46)
self.state = 538
self.cast_expression()
pass
else:
raise NoViableAltException(self)
self.state = 543
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cast_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def type_name(self):
return self.getTypedRuleContext(CParser.Type_nameContext,0)
def cast_expression(self):
return self.getTypedRuleContext(CParser.Cast_expressionContext,0)
def unary_expression(self):
return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_cast_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterCast_expression" ):
listener.enterCast_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitCast_expression" ):
listener.exitCast_expression(self)
def cast_expression(self):
localctx = CParser.Cast_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 76, self.RULE_cast_expression)
try:
self.state = 550
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,70,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 544
self.match(CParser.T__37)
self.state = 545
self.type_name()
self.state = 546
self.match(CParser.T__38)
self.state = 547
self.cast_expression()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 549
self.unary_expression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Unary_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def postfix_expression(self):
return self.getTypedRuleContext(CParser.Postfix_expressionContext,0)
def unary_expression(self):
return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
def unary_operator(self):
return self.getTypedRuleContext(CParser.Unary_operatorContext,0)
def cast_expression(self):
return self.getTypedRuleContext(CParser.Cast_expressionContext,0)
def type_name(self):
return self.getTypedRuleContext(CParser.Type_nameContext,0)
def getRuleIndex(self):
return CParser.RULE_unary_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterUnary_expression" ):
listener.enterUnary_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitUnary_expression" ):
listener.exitUnary_expression(self)
def unary_expression(self):
localctx = CParser.Unary_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 78, self.RULE_unary_expression)
try:
self.state = 567
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,71,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 552
self.postfix_expression()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 553
self.match(CParser.T__47)
self.state = 554
self.unary_expression()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 555
self.match(CParser.T__48)
self.state = 556
self.unary_expression()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 557
self.unary_operator()
self.state = 558
self.cast_expression()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 560
self.match(CParser.T__49)
self.state = 561
self.unary_expression()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 562
self.match(CParser.T__49)
self.state = 563
self.match(CParser.T__37)
self.state = 564
self.type_name()
self.state = 565
self.match(CParser.T__38)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Postfix_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.FuncCallText = ''
self.p = None # Primary_expressionContext
self.a = None # Token
self.c = None # Argument_expression_listContext
self.b = None # Token
self.x = None # Token
self.y = None # Token
self.z = None # Token
def primary_expression(self):
return self.getTypedRuleContext(CParser.Primary_expressionContext,0)
# @param i=None Type: int
def expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.ExpressionContext)
else:
return self.getTypedRuleContext(CParser.ExpressionContext,i)
# @param i=None Type: int
def macro_parameter_list(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Macro_parameter_listContext)
else:
return self.getTypedRuleContext(CParser.Macro_parameter_listContext,i)
# @param i=None Type: int
def argument_expression_list(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Argument_expression_listContext)
else:
return self.getTypedRuleContext(CParser.Argument_expression_listContext,i)
# @param i=None Type: int
def IDENTIFIER(self,i=None):
if i is None:
return self.getTokens(CParser.IDENTIFIER)
else:
return self.getToken(CParser.IDENTIFIER, i)
def getRuleIndex(self):
return CParser.RULE_postfix_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterPostfix_expression" ):
listener.enterPostfix_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitPostfix_expression" ):
listener.exitPostfix_expression(self)
def postfix_expression(self):
localctx = CParser.Postfix_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 80, self.RULE_postfix_expression)
self.FuncCallText=''
try:
self.enterOuterAlt(localctx, 1)
self.state = 569
localctx.p = self.primary_expression()
self.FuncCallText += (None if localctx.p is None else self._input.getText((localctx.p.start,localctx.p.stop)))
self.state = 600
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,73,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 598
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,72,self._ctx)
if la_ == 1:
self.state = 571
self.match(CParser.T__39)
self.state = 572
self.expression()
self.state = 573
self.match(CParser.T__40)
pass
elif la_ == 2:
self.state = 575
self.match(CParser.T__37)
self.state = 576
localctx.a = self.match(CParser.T__38)
self.StoreFunctionCalling((None if localctx.p is None else localctx.p.start).line, (None if localctx.p is None else localctx.p.start).column, (0 if localctx.a is None else localctx.a.line), localctx.a.column, self.FuncCallText, '')
pass
elif la_ == 3:
self.state = 578
self.match(CParser.T__37)
self.state = 579
localctx.c = self.argument_expression_list()
self.state = 580
localctx.b = self.match(CParser.T__38)
self.StoreFunctionCalling((None if localctx.p is None else localctx.p.start).line, (None if localctx.p is None else localctx.p.start).column, (0 if localctx.b is None else localctx.b.line), localctx.b.column, self.FuncCallText, (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
pass
elif la_ == 4:
self.state = 583
self.match(CParser.T__37)
self.state = 584
self.macro_parameter_list()
self.state = 585
self.match(CParser.T__38)
pass
elif la_ == 5:
self.state = 587
self.match(CParser.T__50)
self.state = 588
localctx.x = self.match(CParser.IDENTIFIER)
self.FuncCallText += '.' + (None if localctx.x is None else localctx.x.text)
pass
elif la_ == 6:
self.state = 590
self.match(CParser.T__41)
self.state = 591
localctx.y = self.match(CParser.IDENTIFIER)
self.FuncCallText = (None if localctx.y is None else localctx.y.text)
pass
elif la_ == 7:
self.state = 593
self.match(CParser.T__51)
self.state = 594
localctx.z = self.match(CParser.IDENTIFIER)
self.FuncCallText += '->' + (None if localctx.z is None else localctx.z.text)
pass
elif la_ == 8:
self.state = 596
self.match(CParser.T__47)
pass
elif la_ == 9:
self.state = 597
self.match(CParser.T__48)
pass
self.state = 602
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,73,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Macro_parameter_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def parameter_declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Parameter_declarationContext)
else:
return self.getTypedRuleContext(CParser.Parameter_declarationContext,i)
def getRuleIndex(self):
return CParser.RULE_macro_parameter_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterMacro_parameter_list" ):
listener.enterMacro_parameter_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitMacro_parameter_list" ):
listener.exitMacro_parameter_list(self)
def macro_parameter_list(self):
localctx = CParser.Macro_parameter_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 82, self.RULE_macro_parameter_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 603
self.parameter_declaration()
self.state = 608
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 604
self.match(CParser.T__3)
self.state = 605
self.parameter_declaration()
self.state = 610
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Unary_operatorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_unary_operator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterUnary_operator" ):
listener.enterUnary_operator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitUnary_operator" ):
listener.exitUnary_operator(self)
def unary_operator(self):
localctx = CParser.Unary_operatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 84, self.RULE_unary_operator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 611
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__41) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Primary_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def constant(self):
return self.getTypedRuleContext(CParser.ConstantContext,0)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_primary_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterPrimary_expression" ):
listener.enterPrimary_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitPrimary_expression" ):
listener.exitPrimary_expression(self)
def primary_expression(self):
localctx = CParser.Primary_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 86, self.RULE_primary_expression)
try:
self.state = 619
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,75,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 613
self.match(CParser.IDENTIFIER)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 614
self.constant()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 615
self.match(CParser.T__37)
self.state = 616
self.expression()
self.state = 617
self.match(CParser.T__38)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ConstantContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def HEX_LITERAL(self):
return self.getToken(CParser.HEX_LITERAL, 0)
def OCTAL_LITERAL(self):
return self.getToken(CParser.OCTAL_LITERAL, 0)
def DECIMAL_LITERAL(self):
return self.getToken(CParser.DECIMAL_LITERAL, 0)
def CHARACTER_LITERAL(self):
return self.getToken(CParser.CHARACTER_LITERAL, 0)
# @param i=None Type: int
def IDENTIFIER(self,i=None):
if i is None:
return self.getTokens(CParser.IDENTIFIER)
else:
return self.getToken(CParser.IDENTIFIER, i)
# @param i=None Type: int
def STRING_LITERAL(self,i=None):
if i is None:
return self.getTokens(CParser.STRING_LITERAL)
else:
return self.getToken(CParser.STRING_LITERAL, i)
def FLOATING_POINT_LITERAL(self):
return self.getToken(CParser.FLOATING_POINT_LITERAL, 0)
def getRuleIndex(self):
return CParser.RULE_constant
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterConstant" ):
listener.enterConstant(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitConstant" ):
listener.exitConstant(self)
def constant(self):
localctx = CParser.ConstantContext(self, self._ctx, self.state)
self.enterRule(localctx, 88, self.RULE_constant)
self._la = 0 # Token type
try:
self.state = 647
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.HEX_LITERAL]:
self.enterOuterAlt(localctx, 1)
self.state = 621
self.match(CParser.HEX_LITERAL)
pass
elif token in [CParser.OCTAL_LITERAL]:
self.enterOuterAlt(localctx, 2)
self.state = 622
self.match(CParser.OCTAL_LITERAL)
pass
elif token in [CParser.DECIMAL_LITERAL]:
self.enterOuterAlt(localctx, 3)
self.state = 623
self.match(CParser.DECIMAL_LITERAL)
pass
elif token in [CParser.CHARACTER_LITERAL]:
self.enterOuterAlt(localctx, 4)
self.state = 624
self.match(CParser.CHARACTER_LITERAL)
pass
elif token in [CParser.IDENTIFIER, CParser.STRING_LITERAL]:
self.enterOuterAlt(localctx, 5)
self.state = 636
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 628
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.IDENTIFIER:
self.state = 625
self.match(CParser.IDENTIFIER)
self.state = 630
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 632
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 631
self.match(CParser.STRING_LITERAL)
else:
raise NoViableAltException(self)
self.state = 634
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,77,self._ctx)
else:
raise NoViableAltException(self)
self.state = 638
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,78,self._ctx)
self.state = 643
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.IDENTIFIER:
self.state = 640
self.match(CParser.IDENTIFIER)
self.state = 645
self._errHandler.sync(self)
_la = self._input.LA(1)
pass
elif token in [CParser.FLOATING_POINT_LITERAL]:
self.enterOuterAlt(localctx, 6)
self.state = 646
self.match(CParser.FLOATING_POINT_LITERAL)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def assignment_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Assignment_expressionContext)
else:
return self.getTypedRuleContext(CParser.Assignment_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterExpression" ):
listener.enterExpression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitExpression" ):
listener.exitExpression(self)
def expression(self):
localctx = CParser.ExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 90, self.RULE_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 649
self.assignment_expression()
self.state = 654
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 650
self.match(CParser.T__3)
self.state = 651
self.assignment_expression()
self.state = 656
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Constant_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def conditional_expression(self):
return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_constant_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterConstant_expression" ):
listener.enterConstant_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitConstant_expression" ):
listener.exitConstant_expression(self)
def constant_expression(self):
localctx = CParser.Constant_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 92, self.RULE_constant_expression)
try:
self.enterOuterAlt(localctx, 1)
self.state = 657
self.conditional_expression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Assignment_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def lvalue(self):
return self.getTypedRuleContext(CParser.LvalueContext,0)
def assignment_operator(self):
return self.getTypedRuleContext(CParser.Assignment_operatorContext,0)
def assignment_expression(self):
return self.getTypedRuleContext(CParser.Assignment_expressionContext,0)
def conditional_expression(self):
return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_assignment_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAssignment_expression" ):
listener.enterAssignment_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAssignment_expression" ):
listener.exitAssignment_expression(self)
def assignment_expression(self):
localctx = CParser.Assignment_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 94, self.RULE_assignment_expression)
try:
self.state = 664
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,82,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 659
self.lvalue()
self.state = 660
self.assignment_operator()
self.state = 661
self.assignment_expression()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 663
self.conditional_expression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LvalueContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unary_expression(self):
return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_lvalue
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterLvalue" ):
listener.enterLvalue(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitLvalue" ):
listener.exitLvalue(self)
def lvalue(self):
localctx = CParser.LvalueContext(self, self._ctx, self.state)
self.enterRule(localctx, 96, self.RULE_lvalue)
try:
self.enterOuterAlt(localctx, 1)
self.state = 666
self.unary_expression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Assignment_operatorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_assignment_operator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAssignment_operator" ):
listener.enterAssignment_operator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAssignment_operator" ):
listener.exitAssignment_operator(self)
def assignment_operator(self):
localctx = CParser.Assignment_operatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 98, self.RULE_assignment_operator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 668
_la = self._input.LA(1)
if not(((((_la - 5)) & ~0x3f) == 0 and ((1 << (_la - 5)) & ((1 << (CParser.T__4 - 5)) | (1 << (CParser.T__55 - 5)) | (1 << (CParser.T__56 - 5)) | (1 << (CParser.T__57 - 5)) | (1 << (CParser.T__58 - 5)) | (1 << (CParser.T__59 - 5)) | (1 << (CParser.T__60 - 5)) | (1 << (CParser.T__61 - 5)) | (1 << (CParser.T__62 - 5)) | (1 << (CParser.T__63 - 5)) | (1 << (CParser.T__64 - 5)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Conditional_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.e = None # Logical_or_expressionContext
def logical_or_expression(self):
return self.getTypedRuleContext(CParser.Logical_or_expressionContext,0)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def conditional_expression(self):
return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_conditional_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterConditional_expression" ):
listener.enterConditional_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitConditional_expression" ):
listener.exitConditional_expression(self)
def conditional_expression(self):
localctx = CParser.Conditional_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 100, self.RULE_conditional_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 670
localctx.e = self.logical_or_expression()
self.state = 677
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__65:
self.state = 671
self.match(CParser.T__65)
self.state = 672
self.expression()
self.state = 673
self.match(CParser.T__22)
self.state = 674
self.conditional_expression()
self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Logical_or_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def logical_and_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Logical_and_expressionContext)
else:
return self.getTypedRuleContext(CParser.Logical_and_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_logical_or_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterLogical_or_expression" ):
listener.enterLogical_or_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitLogical_or_expression" ):
listener.exitLogical_or_expression(self)
def logical_or_expression(self):
localctx = CParser.Logical_or_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 102, self.RULE_logical_or_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 679
self.logical_and_expression()
self.state = 684
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__66:
self.state = 680
self.match(CParser.T__66)
self.state = 681
self.logical_and_expression()
self.state = 686
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Logical_and_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def inclusive_or_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Inclusive_or_expressionContext)
else:
return self.getTypedRuleContext(CParser.Inclusive_or_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_logical_and_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterLogical_and_expression" ):
listener.enterLogical_and_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitLogical_and_expression" ):
listener.exitLogical_and_expression(self)
def logical_and_expression(self):
localctx = CParser.Logical_and_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 104, self.RULE_logical_and_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 687
self.inclusive_or_expression()
self.state = 692
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__67:
self.state = 688
self.match(CParser.T__67)
self.state = 689
self.inclusive_or_expression()
self.state = 694
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Inclusive_or_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def exclusive_or_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Exclusive_or_expressionContext)
else:
return self.getTypedRuleContext(CParser.Exclusive_or_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_inclusive_or_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterInclusive_or_expression" ):
listener.enterInclusive_or_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitInclusive_or_expression" ):
listener.exitInclusive_or_expression(self)
def inclusive_or_expression(self):
localctx = CParser.Inclusive_or_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 106, self.RULE_inclusive_or_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 695
self.exclusive_or_expression()
self.state = 700
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__68:
self.state = 696
self.match(CParser.T__68)
self.state = 697
self.exclusive_or_expression()
self.state = 702
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Exclusive_or_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def and_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.And_expressionContext)
else:
return self.getTypedRuleContext(CParser.And_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_exclusive_or_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterExclusive_or_expression" ):
listener.enterExclusive_or_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitExclusive_or_expression" ):
listener.exitExclusive_or_expression(self)
def exclusive_or_expression(self):
localctx = CParser.Exclusive_or_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 108, self.RULE_exclusive_or_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 703
self.and_expression()
self.state = 708
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__69:
self.state = 704
self.match(CParser.T__69)
self.state = 705
self.and_expression()
self.state = 710
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class And_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def equality_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Equality_expressionContext)
else:
return self.getTypedRuleContext(CParser.Equality_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_and_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAnd_expression" ):
listener.enterAnd_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAnd_expression" ):
listener.exitAnd_expression(self)
def and_expression(self):
localctx = CParser.And_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 110, self.RULE_and_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 711
self.equality_expression()
self.state = 716
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__52:
self.state = 712
self.match(CParser.T__52)
self.state = 713
self.equality_expression()
self.state = 718
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Equality_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def relational_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Relational_expressionContext)
else:
return self.getTypedRuleContext(CParser.Relational_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_equality_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterEquality_expression" ):
listener.enterEquality_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitEquality_expression" ):
listener.exitEquality_expression(self)
def equality_expression(self):
localctx = CParser.Equality_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 112, self.RULE_equality_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 719
self.relational_expression()
self.state = 724
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__70 or _la==CParser.T__71:
self.state = 720
_la = self._input.LA(1)
if not(_la==CParser.T__70 or _la==CParser.T__71):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 721
self.relational_expression()
self.state = 726
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Relational_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def shift_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Shift_expressionContext)
else:
return self.getTypedRuleContext(CParser.Shift_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_relational_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterRelational_expression" ):
listener.enterRelational_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitRelational_expression" ):
listener.exitRelational_expression(self)
def relational_expression(self):
localctx = CParser.Relational_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 114, self.RULE_relational_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 727
self.shift_expression()
self.state = 732
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & ((1 << (CParser.T__72 - 73)) | (1 << (CParser.T__73 - 73)) | (1 << (CParser.T__74 - 73)) | (1 << (CParser.T__75 - 73)))) != 0):
self.state = 728
_la = self._input.LA(1)
if not(((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & ((1 << (CParser.T__72 - 73)) | (1 << (CParser.T__73 - 73)) | (1 << (CParser.T__74 - 73)) | (1 << (CParser.T__75 - 73)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 729
self.shift_expression()
self.state = 734
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Shift_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def additive_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Additive_expressionContext)
else:
return self.getTypedRuleContext(CParser.Additive_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_shift_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterShift_expression" ):
listener.enterShift_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitShift_expression" ):
listener.exitShift_expression(self)
def shift_expression(self):
localctx = CParser.Shift_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 116, self.RULE_shift_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 735
self.additive_expression()
self.state = 740
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__76 or _la==CParser.T__77:
self.state = 736
_la = self._input.LA(1)
if not(_la==CParser.T__76 or _la==CParser.T__77):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 737
self.additive_expression()
self.state = 742
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def labeled_statement(self):
return self.getTypedRuleContext(CParser.Labeled_statementContext,0)
def compound_statement(self):
return self.getTypedRuleContext(CParser.Compound_statementContext,0)
def expression_statement(self):
return self.getTypedRuleContext(CParser.Expression_statementContext,0)
def selection_statement(self):
return self.getTypedRuleContext(CParser.Selection_statementContext,0)
def iteration_statement(self):
return self.getTypedRuleContext(CParser.Iteration_statementContext,0)
def jump_statement(self):
return self.getTypedRuleContext(CParser.Jump_statementContext,0)
def macro_statement(self):
return self.getTypedRuleContext(CParser.Macro_statementContext,0)
def asm2_statement(self):
return self.getTypedRuleContext(CParser.Asm2_statementContext,0)
def asm1_statement(self):
return self.getTypedRuleContext(CParser.Asm1_statementContext,0)
def asm_statement(self):
return self.getTypedRuleContext(CParser.Asm_statementContext,0)
def declaration(self):
return self.getTypedRuleContext(CParser.DeclarationContext,0)
def getRuleIndex(self):
return CParser.RULE_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStatement" ):
listener.enterStatement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStatement" ):
listener.exitStatement(self)
def statement(self):
localctx = CParser.StatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 118, self.RULE_statement)
try:
self.state = 754
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,92,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 743
self.labeled_statement()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 744
self.compound_statement()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 745
self.expression_statement()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 746
self.selection_statement()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 747
self.iteration_statement()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 748
self.jump_statement()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 749
self.macro_statement()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 750
self.asm2_statement()
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 751
self.asm1_statement()
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 752
self.asm_statement()
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 753
self.declaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Asm2_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def getRuleIndex(self):
return CParser.RULE_asm2_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAsm2_statement" ):
listener.enterAsm2_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAsm2_statement" ):
listener.exitAsm2_statement(self)
def asm2_statement(self):
localctx = CParser.Asm2_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 120, self.RULE_asm2_statement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 757
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__78:
self.state = 756
self.match(CParser.T__78)
self.state = 759
self.match(CParser.IDENTIFIER)
self.state = 760
self.match(CParser.T__37)
self.state = 764
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,94,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 761
_la = self._input.LA(1)
if _la <= 0 or _la==CParser.T__1:
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 766
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,94,self._ctx)
self.state = 767
self.match(CParser.T__38)
self.state = 768
self.match(CParser.T__1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Asm1_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_asm1_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAsm1_statement" ):
listener.enterAsm1_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAsm1_statement" ):
listener.exitAsm1_statement(self)
def asm1_statement(self):
localctx = CParser.Asm1_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 122, self.RULE_asm1_statement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 770
self.match(CParser.T__79)
self.state = 771
self.match(CParser.T__0)
self.state = 775
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__3) | (1 << CParser.T__4) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__22) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__38) | (1 << CParser.T__39) | (1 << CParser.T__40) | (1 << CParser.T__41) | (1 << CParser.T__42) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__45) | (1 << CParser.T__46) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__50) | (1 << CParser.T__51) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54) | (1 << CParser.T__55) | (1 << CParser.T__56) | (1 << CParser.T__57) | (1 << CParser.T__58) | (1 << CParser.T__59) | (1 << CParser.T__60) | (1 << CParser.T__61) | (1 << CParser.T__62))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CParser.T__63 - 64)) | (1 << (CParser.T__64 - 64)) | (1 << (CParser.T__65 - 64)) | (1 << (CParser.T__66 - 64)) | (1 << (CParser.T__67 - 64)) | (1 << (CParser.T__68 - 64)) | (1 << (CParser.T__69 - 64)) | (1 << (CParser.T__70 - 64)) | (1 << (CParser.T__71 - 64)) | (1 << (CParser.T__72 - 64)) | (1 << (CParser.T__73 - 64)) | (1 << (CParser.T__74 - 64)) | (1 << (CParser.T__75 - 64)) | (1 << (CParser.T__76 - 64)) | (1 << (CParser.T__77 - 64)) | (1 << (CParser.T__78 - 64)) | (1 << (CParser.T__79 - 64)) | (1 << (CParser.T__80 - 64)) | (1 << (CParser.T__81 - 64)) | (1 << (CParser.T__82 - 64)) | (1 << (CParser.T__83 - 64)) | (1 << (CParser.T__84 - 64)) | (1 << (CParser.T__85 - 64)) | (1 << (CParser.T__86 - 64)) | (1 << (CParser.T__87 - 64)) | (1 << (CParser.T__88 - 64)) | (1 << (CParser.T__89 - 64)) | (1 << (CParser.T__90 - 64)) | (1 << (CParser.T__91 - 64)) | (1 << (CParser.IDENTIFIER - 64)) | (1 << (CParser.CHARACTER_LITERAL - 64)) | (1 << (CParser.STRING_LITERAL - 64)) | (1 << (CParser.HEX_LITERAL - 64)) | (1 << (CParser.DECIMAL_LITERAL - 64)) | (1 << (CParser.OCTAL_LITERAL - 64)) | (1 << (CParser.FLOATING_POINT_LITERAL - 64)) | (1 << (CParser.WS - 64)) | (1 << (CParser.BS - 64)) | (1 << (CParser.UnicodeVocabulary - 64)) | (1 << (CParser.COMMENT - 64)) | (1 << (CParser.LINE_COMMENT - 64)) | (1 << (CParser.LINE_COMMAND - 64)))) != 0):
self.state = 772
_la = self._input.LA(1)
if _la <= 0 or _la==CParser.T__19:
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 777
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 778
self.match(CParser.T__19)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Asm_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_asm_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAsm_statement" ):
listener.enterAsm_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAsm_statement" ):
listener.exitAsm_statement(self)
def asm_statement(self):
localctx = CParser.Asm_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 124, self.RULE_asm_statement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 780
self.match(CParser.T__80)
self.state = 781
self.match(CParser.T__0)
self.state = 785
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__3) | (1 << CParser.T__4) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__22) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__38) | (1 << CParser.T__39) | (1 << CParser.T__40) | (1 << CParser.T__41) | (1 << CParser.T__42) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__45) | (1 << CParser.T__46) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__50) | (1 << CParser.T__51) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54) | (1 << CParser.T__55) | (1 << CParser.T__56) | (1 << CParser.T__57) | (1 << CParser.T__58) | (1 << CParser.T__59) | (1 << CParser.T__60) | (1 << CParser.T__61) | (1 << CParser.T__62))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CParser.T__63 - 64)) | (1 << (CParser.T__64 - 64)) | (1 << (CParser.T__65 - 64)) | (1 << (CParser.T__66 - 64)) | (1 << (CParser.T__67 - 64)) | (1 << (CParser.T__68 - 64)) | (1 << (CParser.T__69 - 64)) | (1 << (CParser.T__70 - 64)) | (1 << (CParser.T__71 - 64)) | (1 << (CParser.T__72 - 64)) | (1 << (CParser.T__73 - 64)) | (1 << (CParser.T__74 - 64)) | (1 << (CParser.T__75 - 64)) | (1 << (CParser.T__76 - 64)) | (1 << (CParser.T__77 - 64)) | (1 << (CParser.T__78 - 64)) | (1 << (CParser.T__79 - 64)) | (1 << (CParser.T__80 - 64)) | (1 << (CParser.T__81 - 64)) | (1 << (CParser.T__82 - 64)) | (1 << (CParser.T__83 - 64)) | (1 << (CParser.T__84 - 64)) | (1 << (CParser.T__85 - 64)) | (1 << (CParser.T__86 - 64)) | (1 << (CParser.T__87 - 64)) | (1 << (CParser.T__88 - 64)) | (1 << (CParser.T__89 - 64)) | (1 << (CParser.T__90 - 64)) | (1 << (CParser.T__91 - 64)) | (1 << (CParser.IDENTIFIER - 64)) | (1 << (CParser.CHARACTER_LITERAL - 64)) | (1 << (CParser.STRING_LITERAL - 64)) | (1 << (CParser.HEX_LITERAL - 64)) | (1 << (CParser.DECIMAL_LITERAL - 64)) | (1 << (CParser.OCTAL_LITERAL - 64)) | (1 << (CParser.FLOATING_POINT_LITERAL - 64)) | (1 << (CParser.WS - 64)) | (1 << (CParser.BS - 64)) | (1 << (CParser.UnicodeVocabulary - 64)) | (1 << (CParser.COMMENT - 64)) | (1 << (CParser.LINE_COMMENT - 64)) | (1 << (CParser.LINE_COMMAND - 64)))) != 0):
self.state = 782
_la = self._input.LA(1)
if _la <= 0 or _la==CParser.T__19:
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 787
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 788
self.match(CParser.T__19)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Macro_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
# @param i=None Type: int
def declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.DeclarationContext)
else:
return self.getTypedRuleContext(CParser.DeclarationContext,i)
def statement_list(self):
return self.getTypedRuleContext(CParser.Statement_listContext,0)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_macro_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterMacro_statement" ):
listener.enterMacro_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitMacro_statement" ):
listener.exitMacro_statement(self)
def macro_statement(self):
localctx = CParser.Macro_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 126, self.RULE_macro_statement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 790
self.match(CParser.IDENTIFIER)
self.state = 791
self.match(CParser.T__37)
self.state = 795
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,97,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 792
self.declaration()
self.state = 797
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,97,self._ctx)
self.state = 799
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,98,self._ctx)
if la_ == 1:
self.state = 798
self.statement_list()
self.state = 802
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 38)) & ~0x3f) == 0 and ((1 << (_la - 38)) & ((1 << (CParser.T__37 - 38)) | (1 << (CParser.T__41 - 38)) | (1 << (CParser.T__43 - 38)) | (1 << (CParser.T__44 - 38)) | (1 << (CParser.T__47 - 38)) | (1 << (CParser.T__48 - 38)) | (1 << (CParser.T__49 - 38)) | (1 << (CParser.T__52 - 38)) | (1 << (CParser.T__53 - 38)) | (1 << (CParser.T__54 - 38)) | (1 << (CParser.IDENTIFIER - 38)) | (1 << (CParser.CHARACTER_LITERAL - 38)) | (1 << (CParser.STRING_LITERAL - 38)) | (1 << (CParser.HEX_LITERAL - 38)) | (1 << (CParser.DECIMAL_LITERAL - 38)) | (1 << (CParser.OCTAL_LITERAL - 38)) | (1 << (CParser.FLOATING_POINT_LITERAL - 38)))) != 0):
self.state = 801
self.expression()
self.state = 804
self.match(CParser.T__38)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Labeled_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def statement(self):
return self.getTypedRuleContext(CParser.StatementContext,0)
def constant_expression(self):
return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_labeled_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterLabeled_statement" ):
listener.enterLabeled_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitLabeled_statement" ):
listener.exitLabeled_statement(self)
def labeled_statement(self):
localctx = CParser.Labeled_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 128, self.RULE_labeled_statement)
try:
self.state = 817
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 806
self.match(CParser.IDENTIFIER)
self.state = 807
self.match(CParser.T__22)
self.state = 808
self.statement()
pass
elif token in [CParser.T__81]:
self.enterOuterAlt(localctx, 2)
self.state = 809
self.match(CParser.T__81)
self.state = 810
self.constant_expression()
self.state = 811
self.match(CParser.T__22)
self.state = 812
self.statement()
pass
elif token in [CParser.T__82]:
self.enterOuterAlt(localctx, 3)
self.state = 814
self.match(CParser.T__82)
self.state = 815
self.match(CParser.T__22)
self.state = 816
self.statement()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Compound_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.DeclarationContext)
else:
return self.getTypedRuleContext(CParser.DeclarationContext,i)
def statement_list(self):
return self.getTypedRuleContext(CParser.Statement_listContext,0)
def getRuleIndex(self):
return CParser.RULE_compound_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterCompound_statement" ):
listener.enterCompound_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitCompound_statement" ):
listener.exitCompound_statement(self)
def compound_statement(self):
localctx = CParser.Compound_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 130, self.RULE_compound_statement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 819
self.match(CParser.T__0)
self.state = 823
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,101,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 820
self.declaration()
self.state = 825
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,101,self._ctx)
self.state = 827
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__41) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54))) != 0) or ((((_la - 79)) & ~0x3f) == 0 and ((1 << (_la - 79)) & ((1 << (CParser.T__78 - 79)) | (1 << (CParser.T__79 - 79)) | (1 << (CParser.T__80 - 79)) | (1 << (CParser.T__81 - 79)) | (1 << (CParser.T__82 - 79)) | (1 << (CParser.T__83 - 79)) | (1 << (CParser.T__85 - 79)) | (1 << (CParser.T__86 - 79)) | (1 << (CParser.T__87 - 79)) | (1 << (CParser.T__88 - 79)) | (1 << (CParser.T__89 - 79)) | (1 << (CParser.T__90 - 79)) | (1 << (CParser.T__91 - 79)) | (1 << (CParser.IDENTIFIER - 79)) | (1 << (CParser.CHARACTER_LITERAL - 79)) | (1 << (CParser.STRING_LITERAL - 79)) | (1 << (CParser.HEX_LITERAL - 79)) | (1 << (CParser.DECIMAL_LITERAL - 79)) | (1 << (CParser.OCTAL_LITERAL - 79)) | (1 << (CParser.FLOATING_POINT_LITERAL - 79)))) != 0):
self.state = 826
self.statement_list()
self.state = 829
self.match(CParser.T__19)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Statement_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def statement(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.StatementContext)
else:
return self.getTypedRuleContext(CParser.StatementContext,i)
def getRuleIndex(self):
return CParser.RULE_statement_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStatement_list" ):
listener.enterStatement_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStatement_list" ):
listener.exitStatement_list(self)
def statement_list(self):
localctx = CParser.Statement_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 132, self.RULE_statement_list)
try:
self.enterOuterAlt(localctx, 1)
self.state = 832
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 831
self.statement()
else:
raise NoViableAltException(self)
self.state = 834
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,103,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Expression_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_expression_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterExpression_statement" ):
listener.enterExpression_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitExpression_statement" ):
listener.exitExpression_statement(self)
def expression_statement(self):
localctx = CParser.Expression_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 134, self.RULE_expression_statement)
try:
self.state = 840
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__1]:
self.enterOuterAlt(localctx, 1)
self.state = 836
self.match(CParser.T__1)
pass
elif token in [CParser.T__37, CParser.T__41, CParser.T__43, CParser.T__44, CParser.T__47, CParser.T__48, CParser.T__49, CParser.T__52, CParser.T__53, CParser.T__54, CParser.IDENTIFIER, CParser.CHARACTER_LITERAL, CParser.STRING_LITERAL, CParser.HEX_LITERAL, CParser.DECIMAL_LITERAL, CParser.OCTAL_LITERAL, CParser.FLOATING_POINT_LITERAL]:
self.enterOuterAlt(localctx, 2)
self.state = 837
self.expression()
self.state = 838
self.match(CParser.T__1)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Selection_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.e = None # ExpressionContext
# @param i=None Type: int
def statement(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.StatementContext)
else:
return self.getTypedRuleContext(CParser.StatementContext,i)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_selection_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterSelection_statement" ):
listener.enterSelection_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitSelection_statement" ):
listener.exitSelection_statement(self)
def selection_statement(self):
localctx = CParser.Selection_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 136, self.RULE_selection_statement)
try:
self.state = 858
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__83]:
self.enterOuterAlt(localctx, 1)
self.state = 842
self.match(CParser.T__83)
self.state = 843
self.match(CParser.T__37)
self.state = 844
localctx.e = self.expression()
self.state = 845
self.match(CParser.T__38)
self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
self.state = 847
self.statement()
self.state = 850
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,105,self._ctx)
if la_ == 1:
self.state = 848
self.match(CParser.T__84)
self.state = 849
self.statement()
pass
elif token in [CParser.T__85]:
self.enterOuterAlt(localctx, 2)
self.state = 852
self.match(CParser.T__85)
self.state = 853
self.match(CParser.T__37)
self.state = 854
self.expression()
self.state = 855
self.match(CParser.T__38)
self.state = 856
self.statement()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Iteration_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.e = None # ExpressionContext
def statement(self):
return self.getTypedRuleContext(CParser.StatementContext,0)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_iteration_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterIteration_statement" ):
listener.enterIteration_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitIteration_statement" ):
listener.exitIteration_statement(self)
def iteration_statement(self):
localctx = CParser.Iteration_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 138, self.RULE_iteration_statement)
try:
self.state = 876
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__86]:
self.enterOuterAlt(localctx, 1)
self.state = 860
self.match(CParser.T__86)
self.state = 861
self.match(CParser.T__37)
self.state = 862
localctx.e = self.expression()
self.state = 863
self.match(CParser.T__38)
self.state = 864
self.statement()
self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
pass
elif token in [CParser.T__87]:
self.enterOuterAlt(localctx, 2)
self.state = 867
self.match(CParser.T__87)
self.state = 868
self.statement()
self.state = 869
self.match(CParser.T__86)
self.state = 870
self.match(CParser.T__37)
self.state = 871
localctx.e = self.expression()
self.state = 872
self.match(CParser.T__38)
self.state = 873
self.match(CParser.T__1)
self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Jump_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_jump_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterJump_statement" ):
listener.enterJump_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitJump_statement" ):
listener.exitJump_statement(self)
def jump_statement(self):
localctx = CParser.Jump_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 140, self.RULE_jump_statement)
try:
self.state = 891
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,108,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 878
self.match(CParser.T__88)
self.state = 879
self.match(CParser.IDENTIFIER)
self.state = 880
self.match(CParser.T__1)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 881
self.match(CParser.T__89)
self.state = 882
self.match(CParser.T__1)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 883
self.match(CParser.T__90)
self.state = 884
self.match(CParser.T__1)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 885
self.match(CParser.T__91)
self.state = 886
self.match(CParser.T__1)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 887
self.match(CParser.T__91)
self.state = 888
self.expression()
self.state = 889
self.match(CParser.T__1)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
| edk2-master | BaseTools/Source/Python/Ecc/CParser4/CParser.py |
# Generated from C.g4 by ANTLR 4.7.1
from antlr4 import *
if __name__ is not None and "." in __name__:
from .CParser import CParser
else:
from CParser import CParser
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
import Ecc.CodeFragment as CodeFragment
import Ecc.FileProfile as FileProfile
# This class defines a complete listener for a parse tree produced by CParser.
class CListener(ParseTreeListener):
# Enter a parse tree produced by CParser#translation_unit.
# @param ctx Type: CParser.Translation_unitContext
def enterTranslation_unit(self,ctx):
pass
# Exit a parse tree produced by CParser#translation_unit.
# @param ctx Type: CParser.Translation_unitContext
def exitTranslation_unit(self,ctx):
pass
# Enter a parse tree produced by CParser#external_declaration.
# @param ctx Type: CParser.External_declarationContext
def enterExternal_declaration(self,ctx):
pass
# Exit a parse tree produced by CParser#external_declaration.
# @param ctx Type: CParser.External_declarationContext
def exitExternal_declaration(self,ctx):
pass
# Enter a parse tree produced by CParser#function_definition.
# @param ctx Type: CParser.Function_definitionContext
def enterFunction_definition(self,ctx):
pass
# Exit a parse tree produced by CParser#function_definition.
# @param ctx Type: CParser.Function_definitionContext
def exitFunction_definition(self,ctx):
pass
# Enter a parse tree produced by CParser#declaration_specifiers.
# @param ctx Type: CParser.Declaration_specifiersContext
def enterDeclaration_specifiers(self,ctx):
pass
# Exit a parse tree produced by CParser#declaration_specifiers.
# @param ctx Type: CParser.Declaration_specifiersContext
def exitDeclaration_specifiers(self,ctx):
pass
# Enter a parse tree produced by CParser#declaration.
# @param ctx Type: CParser.DeclarationContext
def enterDeclaration(self,ctx):
pass
# Exit a parse tree produced by CParser#declaration.
# @param ctx Type: CParser.DeclarationContext
def exitDeclaration(self,ctx):
pass
# Enter a parse tree produced by CParser#init_declarator_list.
# @param ctx Type: CParser.Init_declarator_listContext
def enterInit_declarator_list(self,ctx):
pass
# Exit a parse tree produced by CParser#init_declarator_list.
# @param ctx Type: CParser.Init_declarator_listContext
def exitInit_declarator_list(self,ctx):
pass
# Enter a parse tree produced by CParser#init_declarator.
# @param ctx Type: CParser.Init_declaratorContext
def enterInit_declarator(self,ctx):
pass
# Exit a parse tree produced by CParser#init_declarator.
# @param ctx Type: CParser.Init_declaratorContext
def exitInit_declarator(self,ctx):
pass
# Enter a parse tree produced by CParser#storage_class_specifier.
# @param ctx Type: CParser.Storage_class_specifierContext
def enterStorage_class_specifier(self,ctx):
pass
# Exit a parse tree produced by CParser#storage_class_specifier.
# @param ctx Type: CParser.Storage_class_specifierContext
def exitStorage_class_specifier(self,ctx):
pass
# Enter a parse tree produced by CParser#type_specifier.
# @param ctx Type: CParser.Type_specifierContext
def enterType_specifier(self,ctx):
pass
# Exit a parse tree produced by CParser#type_specifier.
# @param ctx Type: CParser.Type_specifierContext
def exitType_specifier(self,ctx):
pass
# Enter a parse tree produced by CParser#type_id.
# @param ctx Type: CParser.Type_idContext
def enterType_id(self,ctx):
pass
# Exit a parse tree produced by CParser#type_id.
# @param ctx Type: CParser.Type_idContext
def exitType_id(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_or_union_specifier.
# @param ctx Type: CParser.Struct_or_union_specifierContext
def enterStruct_or_union_specifier(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_or_union_specifier.
# @param ctx Type: CParser.Struct_or_union_specifierContext
def exitStruct_or_union_specifier(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_or_union.
# @param ctx Type: CParser.Struct_or_unionContext
def enterStruct_or_union(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_or_union.
# @param ctx Type: CParser.Struct_or_unionContext
def exitStruct_or_union(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_declaration_list.
# @param ctx Type: CParser.Struct_declaration_listContext
def enterStruct_declaration_list(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_declaration_list.
# @param ctx Type: CParser.Struct_declaration_listContext
def exitStruct_declaration_list(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_declaration.
# @param ctx Type: CParser.Struct_declarationContext
def enterStruct_declaration(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_declaration.
# @param ctx Type: CParser.Struct_declarationContext
def exitStruct_declaration(self,ctx):
pass
# Enter a parse tree produced by CParser#specifier_qualifier_list.
# @param ctx Type: CParser.Specifier_qualifier_listContext
def enterSpecifier_qualifier_list(self,ctx):
pass
# Exit a parse tree produced by CParser#specifier_qualifier_list.
# @param ctx Type: CParser.Specifier_qualifier_listContext
def exitSpecifier_qualifier_list(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_declarator_list.
# @param ctx Type: CParser.Struct_declarator_listContext
def enterStruct_declarator_list(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_declarator_list.
# @param ctx Type: CParser.Struct_declarator_listContext
def exitStruct_declarator_list(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_declarator.
# @param ctx Type: CParser.Struct_declaratorContext
def enterStruct_declarator(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_declarator.
# @param ctx Type: CParser.Struct_declaratorContext
def exitStruct_declarator(self,ctx):
pass
# Enter a parse tree produced by CParser#enum_specifier.
# @param ctx Type: CParser.Enum_specifierContext
def enterEnum_specifier(self,ctx):
pass
# Exit a parse tree produced by CParser#enum_specifier.
# @param ctx Type: CParser.Enum_specifierContext
def exitEnum_specifier(self,ctx):
pass
# Enter a parse tree produced by CParser#enumerator_list.
# @param ctx Type: CParser.Enumerator_listContext
def enterEnumerator_list(self,ctx):
pass
# Exit a parse tree produced by CParser#enumerator_list.
# @param ctx Type: CParser.Enumerator_listContext
def exitEnumerator_list(self,ctx):
pass
# Enter a parse tree produced by CParser#enumerator.
# @param ctx Type: CParser.EnumeratorContext
def enterEnumerator(self,ctx):
pass
# Exit a parse tree produced by CParser#enumerator.
# @param ctx Type: CParser.EnumeratorContext
def exitEnumerator(self,ctx):
pass
# Enter a parse tree produced by CParser#type_qualifier.
# @param ctx Type: CParser.Type_qualifierContext
def enterType_qualifier(self,ctx):
pass
# Exit a parse tree produced by CParser#type_qualifier.
# @param ctx Type: CParser.Type_qualifierContext
def exitType_qualifier(self,ctx):
pass
# Enter a parse tree produced by CParser#declarator.
# @param ctx Type: CParser.DeclaratorContext
def enterDeclarator(self,ctx):
pass
# Exit a parse tree produced by CParser#declarator.
# @param ctx Type: CParser.DeclaratorContext
def exitDeclarator(self,ctx):
pass
# Enter a parse tree produced by CParser#direct_declarator.
# @param ctx Type: CParser.Direct_declaratorContext
def enterDirect_declarator(self,ctx):
pass
# Exit a parse tree produced by CParser#direct_declarator.
# @param ctx Type: CParser.Direct_declaratorContext
def exitDirect_declarator(self,ctx):
pass
# Enter a parse tree produced by CParser#declarator_suffix.
# @param ctx Type: CParser.Declarator_suffixContext
def enterDeclarator_suffix(self,ctx):
pass
# Exit a parse tree produced by CParser#declarator_suffix.
# @param ctx Type: CParser.Declarator_suffixContext
def exitDeclarator_suffix(self,ctx):
pass
# Enter a parse tree produced by CParser#pointer.
# @param ctx Type: CParser.PointerContext
def enterPointer(self,ctx):
pass
# Exit a parse tree produced by CParser#pointer.
# @param ctx Type: CParser.PointerContext
def exitPointer(self,ctx):
pass
# Enter a parse tree produced by CParser#parameter_type_list.
# @param ctx Type: CParser.Parameter_type_listContext
def enterParameter_type_list(self,ctx):
pass
# Exit a parse tree produced by CParser#parameter_type_list.
# @param ctx Type: CParser.Parameter_type_listContext
def exitParameter_type_list(self,ctx):
pass
# Enter a parse tree produced by CParser#parameter_list.
# @param ctx Type: CParser.Parameter_listContext
def enterParameter_list(self,ctx):
pass
# Exit a parse tree produced by CParser#parameter_list.
# @param ctx Type: CParser.Parameter_listContext
def exitParameter_list(self,ctx):
pass
# Enter a parse tree produced by CParser#parameter_declaration.
# @param ctx Type: CParser.Parameter_declarationContext
def enterParameter_declaration(self,ctx):
pass
# Exit a parse tree produced by CParser#parameter_declaration.
# @param ctx Type: CParser.Parameter_declarationContext
def exitParameter_declaration(self,ctx):
pass
# Enter a parse tree produced by CParser#identifier_list.
# @param ctx Type: CParser.Identifier_listContext
def enterIdentifier_list(self,ctx):
pass
# Exit a parse tree produced by CParser#identifier_list.
# @param ctx Type: CParser.Identifier_listContext
def exitIdentifier_list(self,ctx):
pass
# Enter a parse tree produced by CParser#type_name.
# @param ctx Type: CParser.Type_nameContext
def enterType_name(self,ctx):
pass
# Exit a parse tree produced by CParser#type_name.
# @param ctx Type: CParser.Type_nameContext
def exitType_name(self,ctx):
pass
# Enter a parse tree produced by CParser#abstract_declarator.
# @param ctx Type: CParser.Abstract_declaratorContext
def enterAbstract_declarator(self,ctx):
pass
# Exit a parse tree produced by CParser#abstract_declarator.
# @param ctx Type: CParser.Abstract_declaratorContext
def exitAbstract_declarator(self,ctx):
pass
# Enter a parse tree produced by CParser#direct_abstract_declarator.
# @param ctx Type: CParser.Direct_abstract_declaratorContext
def enterDirect_abstract_declarator(self,ctx):
pass
# Exit a parse tree produced by CParser#direct_abstract_declarator.
# @param ctx Type: CParser.Direct_abstract_declaratorContext
def exitDirect_abstract_declarator(self,ctx):
pass
# Enter a parse tree produced by CParser#abstract_declarator_suffix.
# @param ctx Type: CParser.Abstract_declarator_suffixContext
def enterAbstract_declarator_suffix(self,ctx):
pass
# Exit a parse tree produced by CParser#abstract_declarator_suffix.
# @param ctx Type: CParser.Abstract_declarator_suffixContext
def exitAbstract_declarator_suffix(self,ctx):
pass
# Enter a parse tree produced by CParser#initializer.
# @param ctx Type: CParser.InitializerContext
def enterInitializer(self,ctx):
pass
# Exit a parse tree produced by CParser#initializer.
# @param ctx Type: CParser.InitializerContext
def exitInitializer(self,ctx):
pass
# Enter a parse tree produced by CParser#initializer_list.
# @param ctx Type: CParser.Initializer_listContext
def enterInitializer_list(self,ctx):
pass
# Exit a parse tree produced by CParser#initializer_list.
# @param ctx Type: CParser.Initializer_listContext
def exitInitializer_list(self,ctx):
pass
# Enter a parse tree produced by CParser#argument_expression_list.
# @param ctx Type: CParser.Argument_expression_listContext
def enterArgument_expression_list(self,ctx):
pass
# Exit a parse tree produced by CParser#argument_expression_list.
# @param ctx Type: CParser.Argument_expression_listContext
def exitArgument_expression_list(self,ctx):
pass
# Enter a parse tree produced by CParser#additive_expression.
# @param ctx Type: CParser.Additive_expressionContext
def enterAdditive_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#additive_expression.
# @param ctx Type: CParser.Additive_expressionContext
def exitAdditive_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#multiplicative_expression.
# @param ctx Type: CParser.Multiplicative_expressionContext
def enterMultiplicative_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#multiplicative_expression.
# @param ctx Type: CParser.Multiplicative_expressionContext
def exitMultiplicative_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#cast_expression.
# @param ctx Type: CParser.Cast_expressionContext
def enterCast_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#cast_expression.
# @param ctx Type: CParser.Cast_expressionContext
def exitCast_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#unary_expression.
# @param ctx Type: CParser.Unary_expressionContext
def enterUnary_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#unary_expression.
# @param ctx Type: CParser.Unary_expressionContext
def exitUnary_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#postfix_expression.
# @param ctx Type: CParser.Postfix_expressionContext
def enterPostfix_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#postfix_expression.
# @param ctx Type: CParser.Postfix_expressionContext
def exitPostfix_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#macro_parameter_list.
# @param ctx Type: CParser.Macro_parameter_listContext
def enterMacro_parameter_list(self,ctx):
pass
# Exit a parse tree produced by CParser#macro_parameter_list.
# @param ctx Type: CParser.Macro_parameter_listContext
def exitMacro_parameter_list(self,ctx):
pass
# Enter a parse tree produced by CParser#unary_operator.
# @param ctx Type: CParser.Unary_operatorContext
def enterUnary_operator(self,ctx):
pass
# Exit a parse tree produced by CParser#unary_operator.
# @param ctx Type: CParser.Unary_operatorContext
def exitUnary_operator(self,ctx):
pass
# Enter a parse tree produced by CParser#primary_expression.
# @param ctx Type: CParser.Primary_expressionContext
def enterPrimary_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#primary_expression.
# @param ctx Type: CParser.Primary_expressionContext
def exitPrimary_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#constant.
# @param ctx Type: CParser.ConstantContext
def enterConstant(self,ctx):
pass
# Exit a parse tree produced by CParser#constant.
# @param ctx Type: CParser.ConstantContext
def exitConstant(self,ctx):
pass
# Enter a parse tree produced by CParser#expression.
# @param ctx Type: CParser.ExpressionContext
def enterExpression(self,ctx):
pass
# Exit a parse tree produced by CParser#expression.
# @param ctx Type: CParser.ExpressionContext
def exitExpression(self,ctx):
pass
# Enter a parse tree produced by CParser#constant_expression.
# @param ctx Type: CParser.Constant_expressionContext
def enterConstant_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#constant_expression.
# @param ctx Type: CParser.Constant_expressionContext
def exitConstant_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#assignment_expression.
# @param ctx Type: CParser.Assignment_expressionContext
def enterAssignment_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#assignment_expression.
# @param ctx Type: CParser.Assignment_expressionContext
def exitAssignment_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#lvalue.
# @param ctx Type: CParser.LvalueContext
def enterLvalue(self,ctx):
pass
# Exit a parse tree produced by CParser#lvalue.
# @param ctx Type: CParser.LvalueContext
def exitLvalue(self,ctx):
pass
# Enter a parse tree produced by CParser#assignment_operator.
# @param ctx Type: CParser.Assignment_operatorContext
def enterAssignment_operator(self,ctx):
pass
# Exit a parse tree produced by CParser#assignment_operator.
# @param ctx Type: CParser.Assignment_operatorContext
def exitAssignment_operator(self,ctx):
pass
# Enter a parse tree produced by CParser#conditional_expression.
# @param ctx Type: CParser.Conditional_expressionContext
def enterConditional_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#conditional_expression.
# @param ctx Type: CParser.Conditional_expressionContext
def exitConditional_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#logical_or_expression.
# @param ctx Type: CParser.Logical_or_expressionContext
def enterLogical_or_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#logical_or_expression.
# @param ctx Type: CParser.Logical_or_expressionContext
def exitLogical_or_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#logical_and_expression.
# @param ctx Type: CParser.Logical_and_expressionContext
def enterLogical_and_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#logical_and_expression.
# @param ctx Type: CParser.Logical_and_expressionContext
def exitLogical_and_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#inclusive_or_expression.
# @param ctx Type: CParser.Inclusive_or_expressionContext
def enterInclusive_or_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#inclusive_or_expression.
# @param ctx Type: CParser.Inclusive_or_expressionContext
def exitInclusive_or_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#exclusive_or_expression.
# @param ctx Type: CParser.Exclusive_or_expressionContext
def enterExclusive_or_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#exclusive_or_expression.
# @param ctx Type: CParser.Exclusive_or_expressionContext
def exitExclusive_or_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#and_expression.
# @param ctx Type: CParser.And_expressionContext
def enterAnd_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#and_expression.
# @param ctx Type: CParser.And_expressionContext
def exitAnd_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#equality_expression.
# @param ctx Type: CParser.Equality_expressionContext
def enterEquality_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#equality_expression.
# @param ctx Type: CParser.Equality_expressionContext
def exitEquality_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#relational_expression.
# @param ctx Type: CParser.Relational_expressionContext
def enterRelational_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#relational_expression.
# @param ctx Type: CParser.Relational_expressionContext
def exitRelational_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#shift_expression.
# @param ctx Type: CParser.Shift_expressionContext
def enterShift_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#shift_expression.
# @param ctx Type: CParser.Shift_expressionContext
def exitShift_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#statement.
# @param ctx Type: CParser.StatementContext
def enterStatement(self,ctx):
pass
# Exit a parse tree produced by CParser#statement.
# @param ctx Type: CParser.StatementContext
def exitStatement(self,ctx):
pass
# Enter a parse tree produced by CParser#asm2_statement.
# @param ctx Type: CParser.Asm2_statementContext
def enterAsm2_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#asm2_statement.
# @param ctx Type: CParser.Asm2_statementContext
def exitAsm2_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#asm1_statement.
# @param ctx Type: CParser.Asm1_statementContext
def enterAsm1_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#asm1_statement.
# @param ctx Type: CParser.Asm1_statementContext
def exitAsm1_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#asm_statement.
# @param ctx Type: CParser.Asm_statementContext
def enterAsm_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#asm_statement.
# @param ctx Type: CParser.Asm_statementContext
def exitAsm_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#macro_statement.
# @param ctx Type: CParser.Macro_statementContext
def enterMacro_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#macro_statement.
# @param ctx Type: CParser.Macro_statementContext
def exitMacro_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#labeled_statement.
# @param ctx Type: CParser.Labeled_statementContext
def enterLabeled_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#labeled_statement.
# @param ctx Type: CParser.Labeled_statementContext
def exitLabeled_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#compound_statement.
# @param ctx Type: CParser.Compound_statementContext
def enterCompound_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#compound_statement.
# @param ctx Type: CParser.Compound_statementContext
def exitCompound_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#statement_list.
# @param ctx Type: CParser.Statement_listContext
def enterStatement_list(self,ctx):
pass
# Exit a parse tree produced by CParser#statement_list.
# @param ctx Type: CParser.Statement_listContext
def exitStatement_list(self,ctx):
pass
# Enter a parse tree produced by CParser#expression_statement.
# @param ctx Type: CParser.Expression_statementContext
def enterExpression_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#expression_statement.
# @param ctx Type: CParser.Expression_statementContext
def exitExpression_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#selection_statement.
# @param ctx Type: CParser.Selection_statementContext
def enterSelection_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#selection_statement.
# @param ctx Type: CParser.Selection_statementContext
def exitSelection_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#iteration_statement.
# @param ctx Type: CParser.Iteration_statementContext
def enterIteration_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#iteration_statement.
# @param ctx Type: CParser.Iteration_statementContext
def exitIteration_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#jump_statement.
# @param ctx Type: CParser.Jump_statementContext
def enterJump_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#jump_statement.
# @param ctx Type: CParser.Jump_statementContext
def exitJump_statement(self,ctx):
pass
| edk2-master | BaseTools/Source/Python/Ecc/CParser4/CListener.py |
## @file
# classes represent data in FDF
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## FD data in FDF
#
#
class FDClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.FdUiName = ''
self.CreateFileName = None
self.BaseAddress = None
self.BaseAddressPcd = None
self.Size = None
self.SizePcd = None
self.ErasePolarity = None
# 3-tuple list (blockSize, numBlocks, pcd)
self.BlockSizeList = []
# DefineVarDict[var] = value
self.DefineVarDict = {}
# SetVarDict[var] = value
self.SetVarDict = {}
self.RegionList = []
## FFS data in FDF
#
#
class FfsClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.NameGuid = None
self.Fixed = False
self.CheckSum = False
self.Alignment = None
self.SectionList = []
## FILE statement data in FDF
#
#
class FileStatementClassObject (FfsClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FfsClassObject.__init__(self)
self.FvFileType = None
self.FileName = None
self.KeyStringList = []
self.FvName = None
self.FdName = None
self.DefineVarDict = {}
self.KeepReloc = None
## INF statement data in FDF
#
#
class FfsInfStatementClassObject(FfsClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FfsClassObject.__init__(self)
self.Rule = None
self.Version = None
self.Ui = None
self.InfFileName = None
self.BuildNum = ''
self.KeyStringList = []
self.KeepReloc = None
self.UseArch = None
## section data in FDF
#
#
class SectionClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.Alignment = None
## Depex expression section in FDF
#
#
class DepexSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.DepexType = None
self.Expression = None
self.ExpressionProcessed = False
## Compress section data in FDF
#
#
class CompressSectionClassObject (SectionClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.CompType = None
self.SectionList = []
## Data section data in FDF
#
#
class DataSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.SecType = None
self.SectFileName = None
self.SectionList = []
self.KeepReloc = True
## Rule section data in FDF
#
#
class EfiSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.SectionType = None
self.Optional = False
self.FileType = None
self.StringData = None
self.FileName = None
self.FileExtension = None
self.BuildNum = None
self.KeepReloc = None
## FV image section data in FDF
#
#
class FvImageSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.Fv = None
self.FvName = None
self.FvFileType = None
self.FvFileName = None
self.FvFileExtension = None
self.FvAddr = None
## GUIDed section data in FDF
#
#
class GuidSectionClassObject (SectionClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.NameGuid = None
self.SectionList = []
self.SectionType = None
self.ProcessRequired = False
self.AuthStatusValid = False
self.ExtraHeaderSize = -1
self.FvAddr = []
self.FvParentAddr = None
self.IncludeFvSection = False
## SubType GUID section data in FDF
#
#
class SubTypeGuidSectionClassObject (SectionClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.SubTypeGuid = None
## UI section data in FDF
#
#
class UiSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.StringData = None
self.FileName = None
## Version section data in FDF
#
#
class VerSectionClassObject (SectionClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
SectionClassObject.__init__(self)
self.BuildNum = None
self.StringData = None
self.FileName = None
## Rule data in FDF
#
#
class RuleClassObject :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.Arch = None
self.ModuleType = None # For Module Type
self.TemplateName = None
self.NameGuid = None
self.Fixed = False
self.Alignment = None
self.SectAlignment = None
self.CheckSum = False
self.FvFileType = None # for Ffs File Type
self.KeyStringList = []
self.KeepReloc = None
## Complex rule data in FDF
#
#
class RuleComplexFileClassObject(RuleClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
RuleClassObject.__init__(self)
self.SectionList = []
## Simple rule data in FDF
#
#
class RuleSimpleFileClassObject(RuleClassObject) :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
RuleClassObject.__init__(self)
self.FileName = None
self.SectionType = ''
self.FileExtension = None
## File extension rule data in FDF
#
#
class RuleFileExtensionClassObject(RuleClassObject):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
RuleClassObject.__init__(self)
self.FileExtension = None
## Capsule data in FDF
#
#
class CapsuleClassObject :
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.SpecName = None
self.UiCapsuleName = None
self.CreateFile = None
self.GroupIdNumber = None
# DefineVarDict[var] = value
self.DefineVarDict = {}
# SetVarDict[var] = value
self.SetVarDict = {}
# TokensDict[var] = value
self.TokensDict = {}
self.CapsuleDataList = []
self.FmpPayloadList = []
## OptionROM data in FDF
#
#
class OptionRomClassObject:
## The constructor
#
# @param self The object pointer
#
def __init__(self):
self.DriverName = None
self.FfsList = []
| edk2-master | BaseTools/Source/Python/CommonDataClass/FdfClass.py |
## @file
# This file is used to define class for data structure used in ECC
#
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
# Import Modules
#
import Common.EdkLogger as EdkLogger
##
# Static values for data models
#
MODEL_UNKNOWN = 0
MODEL_FILE_C = 1001
MODEL_FILE_H = 1002
MODEL_FILE_ASM = 1003
MODEL_FILE_INF = 1011
MODEL_FILE_DEC = 1012
MODEL_FILE_DSC = 1013
MODEL_FILE_FDF = 1014
MODEL_FILE_INC = 1015
MODEL_FILE_CIF = 1016
MODEL_FILE_UNI = 1017
MODEL_FILE_OTHERS = 1099
MODEL_IDENTIFIER_FILE_HEADER = 2001
MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
MODEL_IDENTIFIER_COMMENT = 2003
MODEL_IDENTIFIER_PARAMETER = 2004
MODEL_IDENTIFIER_STRUCTURE = 2005
MODEL_IDENTIFIER_VARIABLE = 2006
MODEL_IDENTIFIER_INCLUDE = 2007
MODEL_IDENTIFIER_PREDICATE_EXPRESSION = 2008
MODEL_IDENTIFIER_ENUMERATE = 2009
MODEL_IDENTIFIER_PCD = 2010
MODEL_IDENTIFIER_UNION = 2011
MODEL_IDENTIFIER_MACRO_IFDEF = 2012
MODEL_IDENTIFIER_MACRO_IFNDEF = 2013
MODEL_IDENTIFIER_MACRO_DEFINE = 2014
MODEL_IDENTIFIER_MACRO_ENDIF = 2015
MODEL_IDENTIFIER_MACRO_PROGMA = 2016
MODEL_IDENTIFIER_FUNCTION_CALLING = 2018
MODEL_IDENTIFIER_TYPEDEF = 2017
MODEL_IDENTIFIER_FUNCTION_DECLARATION = 2019
MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION = 2020
MODEL_EFI_PROTOCOL = 3001
MODEL_EFI_PPI = 3002
MODEL_EFI_GUID = 3003
MODEL_EFI_LIBRARY_CLASS = 3004
MODEL_EFI_LIBRARY_INSTANCE = 3005
MODEL_EFI_PCD = 3006
MODEL_EFI_SOURCE_FILE = 3007
MODEL_EFI_BINARY_FILE = 3008
MODEL_EFI_SKU_ID = 3009
MODEL_EFI_INCLUDE = 3010
MODEL_EFI_DEPEX = 3011
MODEL_EFI_DEFAULT_STORES = 3012
MODEL_PCD = 4000
MODEL_PCD_FIXED_AT_BUILD = 4001
MODEL_PCD_PATCHABLE_IN_MODULE = 4002
MODEL_PCD_FEATURE_FLAG = 4003
MODEL_PCD_DYNAMIC_EX = 4004
MODEL_PCD_DYNAMIC_EX_DEFAULT = 4005
MODEL_PCD_DYNAMIC_EX_VPD = 4006
MODEL_PCD_DYNAMIC_EX_HII = 4007
MODEL_PCD_DYNAMIC = 4008
MODEL_PCD_DYNAMIC_DEFAULT = 4009
MODEL_PCD_DYNAMIC_VPD = 4010
MODEL_PCD_DYNAMIC_HII = 4011
MODEL_PCD_TYPE_LIST = [MODEL_PCD_FIXED_AT_BUILD,
MODEL_PCD_PATCHABLE_IN_MODULE,
MODEL_PCD_FEATURE_FLAG,
MODEL_PCD_DYNAMIC_DEFAULT,
MODEL_PCD_DYNAMIC_HII,
MODEL_PCD_DYNAMIC_VPD,
MODEL_PCD_DYNAMIC_EX_DEFAULT,
MODEL_PCD_DYNAMIC_EX_HII,
MODEL_PCD_DYNAMIC_EX_VPD
]
MODEL_META_DATA_HEADER_COMMENT = 5000
MODEL_META_DATA_HEADER = 5001
MODEL_META_DATA_INCLUDE = 5002
MODEL_META_DATA_DEFINE = 5003
MODEL_META_DATA_CONDITIONAL_STATEMENT_IF = 5004
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE = 5005
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF = 5006
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF = 5007
MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR = 5400
MODEL_META_DATA_BUILD_OPTION = 5008
MODEL_META_DATA_COMPONENT = 5009
MODEL_META_DATA_USER_EXTENSION = 5010
MODEL_META_DATA_PACKAGE = 5011
MODEL_META_DATA_NMAKE = 5012
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 5013
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
MODEL_META_DATA_COMMENT = 5016
MODEL_META_DATA_GLOBAL_DEFINE = 5017
MODEL_META_DATA_SECTION_HEADER = 5100
MODEL_META_DATA_SUBSECTION_HEADER = 5200
MODEL_META_DATA_TAIL_COMMENT = 5300
MODEL_EXTERNAL_DEPENDENCY = 10000
MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
('MODEL_FILE_C', MODEL_FILE_C),
('MODEL_FILE_H', MODEL_FILE_H),
('MODEL_FILE_ASM', MODEL_FILE_ASM),
('MODEL_FILE_INF', MODEL_FILE_INF),
('MODEL_FILE_DEC', MODEL_FILE_DEC),
('MODEL_FILE_DSC', MODEL_FILE_DSC),
('MODEL_FILE_FDF', MODEL_FILE_FDF),
('MODEL_FILE_INC', MODEL_FILE_INC),
('MODEL_FILE_CIF', MODEL_FILE_CIF),
('MODEL_FILE_OTHERS', MODEL_FILE_OTHERS),
('MODEL_IDENTIFIER_FILE_HEADER', MODEL_IDENTIFIER_FILE_HEADER),
('MODEL_IDENTIFIER_FUNCTION_HEADER', MODEL_IDENTIFIER_FUNCTION_HEADER),
('MODEL_IDENTIFIER_COMMENT', MODEL_IDENTIFIER_COMMENT),
('MODEL_IDENTIFIER_PARAMETER', MODEL_IDENTIFIER_PARAMETER),
('MODEL_IDENTIFIER_STRUCTURE', MODEL_IDENTIFIER_STRUCTURE),
('MODEL_IDENTIFIER_VARIABLE', MODEL_IDENTIFIER_VARIABLE),
('MODEL_IDENTIFIER_INCLUDE', MODEL_IDENTIFIER_INCLUDE),
('MODEL_IDENTIFIER_PREDICATE_EXPRESSION', MODEL_IDENTIFIER_PREDICATE_EXPRESSION),
('MODEL_IDENTIFIER_ENUMERATE', MODEL_IDENTIFIER_ENUMERATE),
('MODEL_IDENTIFIER_PCD', MODEL_IDENTIFIER_PCD),
('MODEL_IDENTIFIER_UNION', MODEL_IDENTIFIER_UNION),
('MODEL_IDENTIFIER_MACRO_IFDEF', MODEL_IDENTIFIER_MACRO_IFDEF),
('MODEL_IDENTIFIER_MACRO_IFNDEF', MODEL_IDENTIFIER_MACRO_IFNDEF),
('MODEL_IDENTIFIER_MACRO_DEFINE', MODEL_IDENTIFIER_MACRO_DEFINE),
('MODEL_IDENTIFIER_MACRO_ENDIF', MODEL_IDENTIFIER_MACRO_ENDIF),
('MODEL_IDENTIFIER_MACRO_PROGMA', MODEL_IDENTIFIER_MACRO_PROGMA),
('MODEL_IDENTIFIER_FUNCTION_CALLING', MODEL_IDENTIFIER_FUNCTION_CALLING),
('MODEL_IDENTIFIER_TYPEDEF', MODEL_IDENTIFIER_TYPEDEF),
('MODEL_IDENTIFIER_FUNCTION_DECLARATION', MODEL_IDENTIFIER_FUNCTION_DECLARATION),
('MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION', MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION),
('MODEL_EFI_PROTOCOL', MODEL_EFI_PROTOCOL),
('MODEL_EFI_PPI', MODEL_EFI_PPI),
('MODEL_EFI_GUID', MODEL_EFI_GUID),
('MODEL_EFI_LIBRARY_CLASS', MODEL_EFI_LIBRARY_CLASS),
('MODEL_EFI_LIBRARY_INSTANCE', MODEL_EFI_LIBRARY_INSTANCE),
('MODEL_EFI_PCD', MODEL_EFI_PCD),
('MODEL_EFI_SKU_ID', MODEL_EFI_SKU_ID),
('MODEL_EFI_INCLUDE', MODEL_EFI_INCLUDE),
('MODEL_EFI_DEPEX', MODEL_EFI_DEPEX),
('MODEL_IDENTIFIER_UNION', MODEL_IDENTIFIER_UNION),
('MODEL_EFI_SOURCE_FILE', MODEL_EFI_SOURCE_FILE),
('MODEL_EFI_BINARY_FILE', MODEL_EFI_BINARY_FILE),
('MODEL_PCD', MODEL_PCD),
('MODEL_PCD_FIXED_AT_BUILD', MODEL_PCD_FIXED_AT_BUILD),
('MODEL_PCD_PATCHABLE_IN_MODULE', MODEL_PCD_PATCHABLE_IN_MODULE),
('MODEL_PCD_FEATURE_FLAG', MODEL_PCD_FEATURE_FLAG),
('MODEL_PCD_DYNAMIC_EX', MODEL_PCD_DYNAMIC_EX),
('MODEL_PCD_DYNAMIC_EX_DEFAULT', MODEL_PCD_DYNAMIC_EX_DEFAULT),
('MODEL_PCD_DYNAMIC_EX_VPD', MODEL_PCD_DYNAMIC_EX_VPD),
('MODEL_PCD_DYNAMIC_EX_HII', MODEL_PCD_DYNAMIC_EX_HII),
('MODEL_PCD_DYNAMIC', MODEL_PCD_DYNAMIC),
('MODEL_PCD_DYNAMIC_DEFAULT', MODEL_PCD_DYNAMIC_DEFAULT),
('MODEL_PCD_DYNAMIC_VPD', MODEL_PCD_DYNAMIC_VPD),
('MODEL_PCD_DYNAMIC_HII', MODEL_PCD_DYNAMIC_HII),
("MODEL_META_DATA_HEADER", MODEL_META_DATA_HEADER),
("MODEL_META_DATA_INCLUDE", MODEL_META_DATA_INCLUDE),
("MODEL_META_DATA_DEFINE", MODEL_META_DATA_DEFINE),
("MODEL_META_DATA_CONDITIONAL_STATEMENT_IF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IF),
("MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE", MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE),
("MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF),
("MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF),
("MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR", MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR),
("MODEL_META_DATA_BUILD_OPTION", MODEL_META_DATA_BUILD_OPTION),
("MODEL_META_DATA_COMPONENT", MODEL_META_DATA_COMPONENT),
('MODEL_META_DATA_USER_EXTENSION', MODEL_META_DATA_USER_EXTENSION),
('MODEL_META_DATA_PACKAGE', MODEL_META_DATA_PACKAGE),
('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE),
('MODEL_META_DATA_COMMENT', MODEL_META_DATA_COMMENT)
]
## FunctionClass
#
# This class defines a structure of a function
#
# @param ID: ID of a Function
# @param Header: Header of a Function
# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
# @param ReturnStatement: ReturnStatement of a Function
# @param StartLine: StartLine of a Function
# @param StartColumn: StartColumn of a Function
# @param EndLine: EndLine of a Function
# @param EndColumn: EndColumn of a Function
# @param BodyStartLine: BodyStartLine of a Function Body
# @param BodyStartColumn: BodyStartColumn of a Function Body
# @param BelongsToFile: The Function belongs to which file
# @param IdentifierList: IdentifierList of a File
# @param PcdList: PcdList of a File
#
# @var ID: ID of a Function
# @var Header: Header of a Function
# @var Modifier: Modifier of a Function
# @var Name: Name of a Function
# @var ReturnStatement: ReturnStatement of a Function
# @var StartLine: StartLine of a Function
# @var StartColumn: StartColumn of a Function
# @var EndLine: EndLine of a Function
# @var EndColumn: EndColumn of a Function
# @var BodyStartLine: StartLine of a Function Body
# @var BodyStartColumn: StartColumn of a Function Body
# @var BelongsToFile: The Function belongs to which file
# @var IdentifierList: IdentifierList of a File
# @var PcdList: PcdList of a File
#
class FunctionClass(object):
def __init__(self, ID = -1, Header = '', Modifier = '', Name = '', ReturnStatement = '', \
StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1, \
BodyStartLine = -1, BodyStartColumn = -1, BelongsToFile = -1, \
IdentifierList = [], PcdList = [], \
FunNameStartLine = -1, FunNameStartColumn = -1):
self.ID = ID
self.Header = Header
self.Modifier = Modifier
self.Name = Name
self.ReturnStatement = ReturnStatement
self.StartLine = StartLine
self.StartColumn = StartColumn
self.EndLine = EndLine
self.EndColumn = EndColumn
self.BodyStartLine = BodyStartLine
self.BodyStartColumn = BodyStartColumn
self.BelongsToFile = BelongsToFile
self.FunNameStartLine = FunNameStartLine
self.FunNameStartColumn = FunNameStartColumn
self.IdentifierList = IdentifierList
self.PcdList = PcdList
## IdentifierClass
#
# This class defines a structure of a variable
#
# @param ID: ID of a Identifier
# @param Modifier: Modifier of a Identifier
# @param Type: Type of a Identifier
# @param Name: Name of a Identifier
# @param Value: Value of a Identifier
# @param Model: Model of a Identifier
# @param BelongsToFile: The Identifier belongs to which file
# @param BelongsToFunction: The Identifier belongs to which function
# @param StartLine: StartLine of a Identifier
# @param StartColumn: StartColumn of a Identifier
# @param EndLine: EndLine of a Identifier
# @param EndColumn: EndColumn of a Identifier
#
# @var ID: ID of a Identifier
# @var Modifier: Modifier of a Identifier
# @var Type: Type of a Identifier
# @var Name: Name of a Identifier
# @var Value: Value of a Identifier
# @var Model: Model of a Identifier
# @var BelongsToFile: The Identifier belongs to which file
# @var BelongsToFunction: The Identifier belongs to which function
# @var StartLine: StartLine of a Identifier
# @var StartColumn: StartColumn of a Identifier
# @var EndLine: EndLine of a Identifier
# @var EndColumn: EndColumn of a Identifier
#
class IdentifierClass(object):
def __init__(self, ID = -1, Modifier = '', Type = '', Name = '', Value = '', Model = MODEL_UNKNOWN, \
BelongsToFile = -1, BelongsToFunction = -1, StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1):
self.ID = ID
self.Modifier = Modifier
self.Type = Type
self.Name = Name
self.Value = Value
self.Model = Model
self.BelongsToFile = BelongsToFile
self.BelongsToFunction = BelongsToFunction
self.StartLine = StartLine
self.StartColumn = StartColumn
self.EndLine = EndLine
self.EndColumn = EndColumn
## PcdClass
#
# This class defines a structure of a Pcd
#
# @param ID: ID of a Pcd
# @param CName: CName of a Pcd
# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
# @param Token: Token of a Pcd
# @param DatumType: DatumType of a Pcd
# @param Model: Model of a Pcd
# @param BelongsToFile: The Pcd belongs to which file
# @param BelongsToFunction: The Pcd belongs to which function
# @param StartLine: StartLine of a Pcd
# @param StartColumn: StartColumn of a Pcd
# @param EndLine: EndLine of a Pcd
# @param EndColumn: EndColumn of a Pcd
#
# @var ID: ID of a Pcd
# @var CName: CName of a Pcd
# @var TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
# @var Token: Token of a Pcd
# @var DatumType: DatumType of a Pcd
# @var Model: Model of a Pcd
# @var BelongsToFile: The Pcd belongs to which file
# @var BelongsToFunction: The Pcd belongs to which function
# @var StartLine: StartLine of a Pcd
# @var StartColumn: StartColumn of a Pcd
# @var EndLine: EndLine of a Pcd
# @var EndColumn: EndColumn of a Pcd
#
class PcdDataClass(object):
def __init__(self, ID = -1, CName = '', TokenSpaceGuidCName = '', Token = '', DatumType = '', Model = MODEL_UNKNOWN, \
BelongsToFile = -1, BelongsToFunction = -1, StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1):
self.ID = ID
self.CName = CName
self.TokenSpaceGuidCName = TokenSpaceGuidCName
self.Token = Token
self.DatumType = DatumType
self.BelongsToFile = BelongsToFile
self.BelongsToFunction = BelongsToFunction
self.StartLine = StartLine
self.StartColumn = StartColumn
self.EndLine = EndLine
self.EndColumn = EndColumn
## FileClass
#
# This class defines a structure of a file
#
# @param ID: ID of a File
# @param Name: Name of a File
# @param ExtName: ExtName of a File
# @param Path: Path of a File
# @param FullPath: FullPath of a File
# @param Model: Model of a File
# @param TimeStamp: TimeStamp of a File
# @param FunctionList: FunctionList of a File
# @param IdentifierList: IdentifierList of a File
# @param PcdList: PcdList of a File
#
# @var ID: ID of a File
# @var Name: Name of a File
# @var ExtName: ExtName of a File
# @var Path: Path of a File
# @var FullPath: FullPath of a File
# @var Model: Model of a File
# @var TimeStamp: TimeStamp of a File
# @var FunctionList: FunctionList of a File
# @var IdentifierList: IdentifierList of a File
# @var PcdList: PcdList of a File
#
class FileClass(object):
def __init__(self, ID = -1, Name = '', ExtName = '', Path = '', FullPath = '', Model = MODEL_UNKNOWN, TimeStamp = '', \
FunctionList = [], IdentifierList = [], PcdList = []):
self.ID = ID
self.Name = Name
self.ExtName = ExtName
self.Path = Path
self.FullPath = FullPath
self.Model = Model
self.TimeStamp = TimeStamp
self.FunctionList = FunctionList
self.IdentifierList = IdentifierList
self.PcdList = PcdList
| edk2-master | BaseTools/Source/Python/CommonDataClass/DataClass.py |
## @file
# Python 'CommonDataClass' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/CommonDataClass/__init__.py |
## @file
# This file is used to define common items of class object
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
## SkuInfoClass
#
# This class defined SkuInfo item used in Module/Platform/Package files
#
# @param object: Inherited from object class
# @param SkuIdName: Input value for SkuIdName, default is ''
# @param SkuId: Input value for SkuId, default is ''
# @param VariableName: Input value for VariableName, default is ''
# @param VariableGuid: Input value for VariableGuid, default is ''
# @param VariableOffset: Input value for VariableOffset, default is ''
# @param HiiDefaultValue: Input value for HiiDefaultValue, default is ''
# @param VpdOffset: Input value for VpdOffset, default is ''
# @param DefaultValue: Input value for DefaultValue, default is ''
#
# @var SkuIdName: To store value for SkuIdName
# @var SkuId: To store value for SkuId
# @var VariableName: To store value for VariableName
# @var VariableGuid: To store value for VariableGuid
# @var VariableOffset: To store value for VariableOffset
# @var HiiDefaultValue: To store value for HiiDefaultValue
# @var VpdOffset: To store value for VpdOffset
# @var DefaultValue: To store value for DefaultValue
#
class SkuInfoClass(object):
def __init__(self, SkuIdName = '', SkuId = '', VariableName = '', VariableGuid = '', VariableOffset = '',
HiiDefaultValue = '', VpdOffset = '', DefaultValue = '', VariableGuidValue = '', VariableAttribute = '', DefaultStore = None):
self.SkuIdName = SkuIdName
self.SkuId = SkuId
#
# Used by Hii
#
if DefaultStore is None:
DefaultStore = {}
self.VariableName = VariableName
self.VariableGuid = VariableGuid
self.VariableGuidValue = VariableGuidValue
self.VariableOffset = VariableOffset
self.HiiDefaultValue = HiiDefaultValue
self.VariableAttribute = VariableAttribute
self.DefaultStoreDict = DefaultStore
#
# Used by Vpd
#
self.VpdOffset = VpdOffset
#
# Used by Default
#
self.DefaultValue = DefaultValue
## Convert the class to a string
#
# Convert each member of the class to string
# Organize to a single line format string
#
# @retval Rtn Formatted String
#
def __str__(self):
Rtn = 'SkuId = ' + str(self.SkuId) + "," + \
'SkuIdName = ' + str(self.SkuIdName) + "," + \
'VariableName = ' + str(self.VariableName) + "," + \
'VariableGuid = ' + str(self.VariableGuid) + "," + \
'VariableOffset = ' + str(self.VariableOffset) + "," + \
'HiiDefaultValue = ' + str(self.HiiDefaultValue) + "," + \
'VpdOffset = ' + str(self.VpdOffset) + "," + \
'DefaultValue = ' + str(self.DefaultValue) + ","
return Rtn
def __deepcopy__(self,memo):
new_sku = SkuInfoClass()
new_sku.SkuIdName = self.SkuIdName
new_sku.SkuId = self.SkuId
new_sku.VariableName = self.VariableName
new_sku.VariableGuid = self.VariableGuid
new_sku.VariableGuidValue = self.VariableGuidValue
new_sku.VariableOffset = self.VariableOffset
new_sku.HiiDefaultValue = self.HiiDefaultValue
new_sku.VariableAttribute = self.VariableAttribute
new_sku.DefaultStoreDict = {key:value for key,value in self.DefaultStoreDict.items()}
new_sku.VpdOffset = self.VpdOffset
new_sku.DefaultValue = self.DefaultValue
return new_sku
| edk2-master | BaseTools/Source/Python/CommonDataClass/CommonClass.py |
## @file
# This file is used to define common Exceptions class used in python tools
#
# Copyright (c) 2011, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
## Exceptions used in Expression
class EvaluationException(Exception):
pass
class BadExpression(EvaluationException):
pass
class WrnExpression(Exception):
pass
## Exceptions used in macro replacements
class MacroException(Exception):
pass
class SymbolNotFound(MacroException):
pass
| edk2-master | BaseTools/Source/Python/CommonDataClass/Exceptions.py |
## @file
# Routines for generating Pcd Database
#
# Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
from io import BytesIO
from Common.Misc import *
from Common.StringUtils import StringToArray
from struct import pack
from .ValidCheckingInfoObject import VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER
from .ValidCheckingInfoObject import VAR_CHECK_PCD_VARIABLE_TAB
from .ValidCheckingInfoObject import GetValidationObject
from Common.VariableAttributes import VariableAttributes
import copy
from struct import unpack
from Common.DataType import *
from Common import GlobalData
from Common import EdkLogger
import Common.LongFilePathOs as os
DATABASE_VERSION = 7
gPcdDatabaseAutoGenC = TemplateString("""
//
// External PCD database debug information
//
#if 0
${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
/* SkuIdTable */
{ ${BEGIN}${SKUID_VALUE}, ${END} },
${BEGIN} { ${INIT_VALUE_UINT64} }, /* ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}] */
${END}
${BEGIN} ${VARDEF_VALUE_UINT64}, /* ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64} */
${END}
${BEGIN} { ${INIT_VALUE_UINT32} }, /* ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}] */
${END}
${BEGIN} ${VARDEF_VALUE_UINT32}, /* ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32} */
${END}
/* VPD */
${BEGIN} { ${VPD_HEAD_VALUE} }, /* ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}] */
${END}
/* ExMapTable */
{
${BEGIN} { ${EXMAPPING_TABLE_EXTOKEN}, ${EXMAPPING_TABLE_LOCAL_TOKEN}, ${EXMAPPING_TABLE_GUID_INDEX} },
${END}
},
/* LocalTokenNumberTable */
{
${BEGIN} offsetof(${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE},
${END}
},
/* GuidTable */
{
${BEGIN} ${GUID_STRUCTURE},
${END}
},
${BEGIN} { ${STRING_HEAD_VALUE} }, /* ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}] */
${END}
${BEGIN} /* ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}] */
{
${VARIABLE_HEAD_VALUE}
},
${END}
/* SkuHead */
{
${BEGIN} offsetof (${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE}, /* */
offsetof (${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.SkuHead) /* */
${END}
},
/* StringTable */
${BEGIN} ${STRING_TABLE_VALUE}, /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */
${END}
/* SizeTable */
{
${BEGIN} ${SIZE_TABLE_MAXIMUM_LENGTH}, ${SIZE_TABLE_CURRENT_LENGTH}, /* ${SIZE_TABLE_CNAME}_${SIZE_TABLE_GUID} */
${END}
},
${BEGIN} { ${INIT_VALUE_UINT16} }, /* ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}] */
${END}
${BEGIN} ${VARDEF_VALUE_UINT16}, /* ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16} */
${END}
${BEGIN} { ${INIT_VALUE_UINT8} }, /* ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}] */
${END}
${BEGIN} ${VARDEF_VALUE_UINT8}, /* ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8} */
${END}
${BEGIN} { ${INIT_VALUE_BOOLEAN} }, /* ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}] */
${END}
${BEGIN} ${VARDEF_VALUE_BOOLEAN}, /* ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN} */
${END}
${SYSTEM_SKU_ID_VALUE}
};
#endif
""")
## Mapping between PCD driver type and EFI phase
gPcdPhaseMap = {
"PEI_PCD_DRIVER" : "PEI",
"DXE_PCD_DRIVER" : "DXE"
}
gPcdDatabaseAutoGenH = TemplateString("""
#define PCD_${PHASE}_SERVICE_DRIVER_VERSION ${SERVICE_DRIVER_VERSION}
//
// External PCD database debug information
//
#if 0
#define ${PHASE}_GUID_TABLE_SIZE ${GUID_TABLE_SIZE}
#define ${PHASE}_STRING_TABLE_SIZE ${STRING_TABLE_SIZE}
#define ${PHASE}_SKUID_TABLE_SIZE ${SKUID_TABLE_SIZE}
#define ${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE ${LOCAL_TOKEN_NUMBER_TABLE_SIZE}
#define ${PHASE}_LOCAL_TOKEN_NUMBER ${LOCAL_TOKEN_NUMBER}
#define ${PHASE}_EXMAPPING_TABLE_SIZE ${EXMAPPING_TABLE_SIZE}
#define ${PHASE}_EX_TOKEN_NUMBER ${EX_TOKEN_NUMBER}
#define ${PHASE}_SIZE_TABLE_SIZE ${SIZE_TABLE_SIZE}
#define ${PHASE}_GUID_TABLE_EMPTY ${GUID_TABLE_EMPTY}
#define ${PHASE}_STRING_TABLE_EMPTY ${STRING_TABLE_EMPTY}
#define ${PHASE}_SKUID_TABLE_EMPTY ${SKUID_TABLE_EMPTY}
#define ${PHASE}_DATABASE_EMPTY ${DATABASE_EMPTY}
#define ${PHASE}_EXMAP_TABLE_EMPTY ${EXMAP_TABLE_EMPTY}
typedef struct {
UINT64 SkuIdTable[${PHASE}_SKUID_TABLE_SIZE];
${BEGIN} UINT64 ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}];
${END}
${BEGIN} UINT64 ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64};
${END}
${BEGIN} UINT32 ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}];
${END}
${BEGIN} UINT32 ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32};
${END}
${BEGIN} VPD_HEAD ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}];
${END}
DYNAMICEX_MAPPING ExMapTable[${PHASE}_EXMAPPING_TABLE_SIZE];
UINT32 LocalTokenNumberTable[${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE];
GUID GuidTable[${PHASE}_GUID_TABLE_SIZE];
${BEGIN} STRING_HEAD ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}];
${END}
${BEGIN} VARIABLE_HEAD ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}];
${BEGIN} UINT8 StringTable${STRING_TABLE_INDEX}[${STRING_TABLE_LENGTH}]; /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */
${END}
SIZE_INFO SizeTable[${PHASE}_SIZE_TABLE_SIZE];
${BEGIN} UINT16 ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}];
${END}
${BEGIN} UINT16 ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16};
${END}
${BEGIN} UINT8 ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}];
${END}
${BEGIN} UINT8 ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8};
${END}
${BEGIN} BOOLEAN ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}];
${END}
${BEGIN} BOOLEAN ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN};
${END}
${SYSTEM_SKU_ID}
} ${PHASE}_PCD_DATABASE_INIT;
typedef struct {
${PCD_DATABASE_UNINIT_EMPTY}
${BEGIN} UINT64 ${UNINIT_CNAME_DECL_UINT64}_${UNINIT_GUID_DECL_UINT64}[${UNINIT_NUMSKUS_DECL_UINT64}];
${END}
${BEGIN} UINT32 ${UNINIT_CNAME_DECL_UINT32}_${UNINIT_GUID_DECL_UINT32}[${UNINIT_NUMSKUS_DECL_UINT32}];
${END}
${BEGIN} UINT16 ${UNINIT_CNAME_DECL_UINT16}_${UNINIT_GUID_DECL_UINT16}[${UNINIT_NUMSKUS_DECL_UINT16}];
${END}
${BEGIN} UINT8 ${UNINIT_CNAME_DECL_UINT8}_${UNINIT_GUID_DECL_UINT8}[${UNINIT_NUMSKUS_DECL_UINT8}];
${END}
${BEGIN} BOOLEAN ${UNINIT_CNAME_DECL_BOOLEAN}_${UNINIT_GUID_DECL_BOOLEAN}[${UNINIT_NUMSKUS_DECL_BOOLEAN}];
${END}
} ${PHASE}_PCD_DATABASE_UNINIT;
typedef struct {
//GUID Signature; // PcdDataBaseGuid
//UINT32 BuildVersion;
//UINT32 Length;
//SKU_ID SystemSkuId; // Current SkuId value.
//UINT32 LengthForAllSkus; // Length of all SKU PCD DB
//UINT32 UninitDataBaseSize;// Total size for PCD those default value with 0.
//TABLE_OFFSET LocalTokenNumberTableOffset;
//TABLE_OFFSET ExMapTableOffset;
//TABLE_OFFSET GuidTableOffset;
//TABLE_OFFSET StringTableOffset;
//TABLE_OFFSET SizeTableOffset;
//TABLE_OFFSET SkuIdTableOffset;
//TABLE_OFFSET PcdNameTableOffset;
//UINT16 LocalTokenCount; // LOCAL_TOKEN_NUMBER for all
//UINT16 ExTokenCount; // EX_TOKEN_NUMBER for DynamicEx
//UINT16 GuidTableCount; // The Number of Guid in GuidTable
//UINT8 Pad[6];
${PHASE}_PCD_DATABASE_INIT Init;
${PHASE}_PCD_DATABASE_UNINIT Uninit;
} ${PHASE}_PCD_DATABASE;
#define ${PHASE}_NEX_TOKEN_NUMBER (${PHASE}_LOCAL_TOKEN_NUMBER - ${PHASE}_EX_TOKEN_NUMBER)
#endif
""")
gEmptyPcdDatabaseAutoGenC = TemplateString("""
//
// External PCD database debug information
//
#if 0
${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
/* SkuIdTable */
{ 0 },
/* ExMapTable */
{
{0, 0, 0}
},
/* LocalTokenNumberTable */
{
0
},
/* GuidTable */
{
{0x00000000, 0x0000, 0x0000, {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}}
},
/* StringTable */
{ 0 },
/* SkuHead */
{
0, 0
},
/* SizeTable */
{
0, 0
},
${SYSTEM_SKU_ID_VALUE}
};
#endif
""")
## DbItemList
#
# The class holds the Pcd database items. ItemSize if not zero should match the item datum type in the C structure.
# When the structure is changed, remember to check the ItemSize and the related PackStr in PackData()
# RawDataList is the RawData that may need some kind of calculation or transformation,
# the DataList corresponds to the data that need to be written to database. If DataList is not present, then RawDataList
# will be written to the database.
#
class DbItemList:
def __init__(self, ItemSize, DataList=None, RawDataList=None):
self.ItemSize = ItemSize
self.DataList = DataList if DataList else []
self.RawDataList = RawDataList if RawDataList else []
self.ListSize = 0
def GetInterOffset(self, Index):
Offset = 0
if self.ItemSize == 0:
#
# Variable length, need to calculate one by one
#
assert(Index < len(self.RawDataList))
for ItemIndex in range(Index):
Offset += len(self.RawDataList[ItemIndex])
else:
Offset = self.ItemSize * Index
return Offset
def GetListSize(self):
if self.ListSize:
return self.ListSize
if len(self.RawDataList) == 0:
self.ListSize = 0
return self.ListSize
if self.ItemSize == 0:
self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1])
else:
self.ListSize = self.ItemSize * len(self.RawDataList)
return self.ListSize
def PackData(self):
## PackGuid
#
# Pack the GUID value in C structure format into data array
#
# @param GuidStructureValue: The GUID value in C structure format
#
# @retval Buffer: a data array contains the Guid
#
def PackGuid(GuidStructureValue):
GuidString = GuidStructureStringToGuidString(GuidStructureValue)
return PackGUID(GuidString.split('-'))
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
Buffer = bytearray()
for Datas in self.RawDataList:
if type(Datas) in (list, tuple):
for Data in Datas:
if PackStr:
Buffer += pack(PackStr, GetIntegerValue(Data))
else:
Buffer += PackGuid(Data)
else:
if PackStr:
Buffer += pack(PackStr, GetIntegerValue(Datas))
else:
Buffer += PackGuid(Datas)
return Buffer
## DbExMapTblItemList
#
# The class holds the ExMap table
#
class DbExMapTblItemList (DbItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def PackData(self):
Buffer = bytearray()
PackStr = "=LHH"
for Datas in self.RawDataList:
Buffer += pack(PackStr,
GetIntegerValue(Datas[0]),
GetIntegerValue(Datas[1]),
GetIntegerValue(Datas[2]))
return Buffer
## DbComItemList
#
# The DbComItemList is a special kind of DbItemList in case that the size of the List can not be computed by the
# ItemSize multiply the ItemCount.
#
class DbComItemList (DbItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def GetInterOffset(self, Index):
Offset = 0
if self.ItemSize == 0:
#
# Variable length, need to calculate one by one
# The only variable table is stringtable, it is not Composite item, should not reach here
#
assert(False)
else:
assert(Index < len(self.RawDataList))
for ItemIndex in range(Index):
Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
return Offset
def GetListSize(self):
if self.ListSize:
return self.ListSize
if self.ItemSize == 0:
assert(False)
else:
if len(self.RawDataList) == 0:
self.ListSize = 0
else:
self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1]) * self.ItemSize
return self.ListSize
def PackData(self):
PackStr = PACK_CODE_BY_SIZE[self.ItemSize]
Buffer = bytearray()
for DataList in self.RawDataList:
for Data in DataList:
if type(Data) in (list, tuple):
for SingleData in Data:
Buffer += pack(PackStr, GetIntegerValue(SingleData))
else:
Buffer += pack(PackStr, GetIntegerValue(Data))
return Buffer
## DbVariableTableItemList
#
# The class holds the Variable header value table
#
class DbVariableTableItemList (DbComItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
DbComItemList.__init__(self, ItemSize, DataList, RawDataList)
def PackData(self):
PackStr = "=LLHHLHH"
Buffer = bytearray()
for DataList in self.RawDataList:
for Data in DataList:
Buffer += pack(PackStr,
GetIntegerValue(Data[0]),
GetIntegerValue(Data[1]),
GetIntegerValue(Data[2]),
GetIntegerValue(Data[3]),
GetIntegerValue(Data[4]),
GetIntegerValue(Data[5]),
GetIntegerValue(0))
return Buffer
class DbStringHeadTableItemList(DbItemList):
def __init__(self,ItemSize,DataList=None,RawDataList=None):
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def GetInterOffset(self, Index):
Offset = 0
if self.ItemSize == 0:
#
# Variable length, need to calculate one by one
#
assert(Index < len(self.RawDataList))
for ItemIndex in range(Index):
Offset += len(self.RawDataList[ItemIndex])
else:
for innerIndex in range(Index):
if type(self.RawDataList[innerIndex]) in (list, tuple):
Offset += len(self.RawDataList[innerIndex]) * self.ItemSize
else:
Offset += self.ItemSize
return Offset
def GetListSize(self):
if self.ListSize:
return self.ListSize
if len(self.RawDataList) == 0:
self.ListSize = 0
return self.ListSize
if self.ItemSize == 0:
self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1])
else:
for Datas in self.RawDataList:
if type(Datas) in (list, tuple):
self.ListSize += len(Datas) * self.ItemSize
else:
self.ListSize += self.ItemSize
return self.ListSize
## DbSkuHeadTableItemList
#
# The class holds the Sku header value table
#
class DbSkuHeadTableItemList (DbItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def PackData(self):
PackStr = "=LL"
Buffer = bytearray()
for Data in self.RawDataList:
Buffer += pack(PackStr,
GetIntegerValue(Data[0]),
GetIntegerValue(Data[1]))
return Buffer
## DbSizeTableItemList
#
# The class holds the size table
#
class DbSizeTableItemList (DbItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None):
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
def GetListSize(self):
length = 0
for Data in self.RawDataList:
length += (1 + len(Data[1]))
return length * self.ItemSize
def PackData(self):
PackStr = "=H"
Buffer = bytearray()
for Data in self.RawDataList:
Buffer += pack(PackStr,
GetIntegerValue(Data[0]))
for subData in Data[1]:
Buffer += pack(PackStr,
GetIntegerValue(subData))
return Buffer
## DbStringItemList
#
# The class holds the string table
#
class DbStringItemList (DbComItemList):
def __init__(self, ItemSize, DataList=None, RawDataList=None, LenList=None):
if DataList is None:
DataList = []
if RawDataList is None:
RawDataList = []
if LenList is None:
LenList = []
assert(len(RawDataList) == len(LenList))
DataList = []
# adjust DataList according to the LenList
for Index in range(len(RawDataList)):
Len = LenList[Index]
RawDatas = RawDataList[Index]
assert(Len >= len(RawDatas))
ActualDatas = []
for i in range(len(RawDatas)):
ActualDatas.append(RawDatas[i])
for i in range(len(RawDatas), Len):
ActualDatas.append(0)
DataList.append(ActualDatas)
self.LenList = LenList
DbComItemList.__init__(self, ItemSize, DataList, RawDataList)
def GetInterOffset(self, Index):
Offset = 0
assert(Index < len(self.LenList))
for ItemIndex in range(Index):
Offset += self.LenList[ItemIndex]
return Offset
def GetListSize(self):
if self.ListSize:
return self.ListSize
if len(self.LenList) == 0:
self.ListSize = 0
else:
self.ListSize = self.GetInterOffset(len(self.LenList) - 1) + self.LenList[len(self.LenList)-1]
return self.ListSize
def PackData(self):
self.RawDataList = self.DataList
return DbComItemList.PackData(self)
## Find the index in two list where the item matches the key separately
#
# @param Key1 The key used to search the List1
# @param List1 The list that Key1 will be searched
# @param Key2 The key used to search the List2
# @param List2 The list that Key2 will be searched
#
# @retval Index The position inside the list where list1[Index] == Key1 and list2[Index] == Key2
#
def GetMatchedIndex(Key1, List1, Key2, List2):
StartPos = 0
while StartPos < len(List1):
Index = List1.index(Key1, StartPos)
if List2[Index] == Key2:
return Index
else:
StartPos = Index + 1
return -1
## convert StringArray like {0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00}
# to List like [0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00]
#
# @param StringArray A string array like {0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00}
#
# @retval A list object of integer items
#
def StringArrayToList(StringArray):
StringArray = StringArray[1:-1]
StringArray = '[' + StringArray + ']'
return eval(StringArray)
## Convert TokenType String like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII" to TokenType value
#
# @param TokenType A TokenType string like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII"
#
# @retval A integer representation of the TokenType
#
def GetTokenTypeValue(TokenType):
TokenTypeDict = {
"PCD_TYPE_SHIFT": 28,
"PCD_TYPE_DATA": (0x0 << 28),
"PCD_TYPE_HII": (0x8 << 28),
"PCD_TYPE_VPD": (0x4 << 28),
# "PCD_TYPE_SKU_ENABLED":(0x2 << 28),
"PCD_TYPE_STRING": (0x1 << 28),
"PCD_DATUM_TYPE_SHIFT": 24,
"PCD_DATUM_TYPE_POINTER": (0x0 << 24),
"PCD_DATUM_TYPE_UINT8": (0x1 << 24),
"PCD_DATUM_TYPE_UINT16": (0x2 << 24),
"PCD_DATUM_TYPE_UINT32": (0x4 << 24),
"PCD_DATUM_TYPE_UINT64": (0x8 << 24),
"PCD_DATUM_TYPE_SHIFT2": 20,
"PCD_DATUM_TYPE_UINT8_BOOLEAN": (0x1 << 20 | 0x1 << 24),
}
return eval(TokenType, TokenTypeDict)
## construct the external Pcd database using data from Dict
#
# @param Dict A dictionary contains Pcd related tables
#
# @retval Buffer A byte stream of the Pcd database
#
def BuildExDataBase(Dict):
# init Db items
InitValueUint64 = Dict['INIT_DB_VALUE_UINT64']
DbInitValueUint64 = DbComItemList(8, RawDataList = InitValueUint64)
VardefValueUint64 = Dict['VARDEF_DB_VALUE_UINT64']
DbVardefValueUint64 = DbItemList(8, RawDataList = VardefValueUint64)
InitValueUint32 = Dict['INIT_DB_VALUE_UINT32']
DbInitValueUint32 = DbComItemList(4, RawDataList = InitValueUint32)
VardefValueUint32 = Dict['VARDEF_DB_VALUE_UINT32']
DbVardefValueUint32 = DbItemList(4, RawDataList = VardefValueUint32)
VpdHeadValue = Dict['VPD_DB_VALUE']
DbVpdHeadValue = DbComItemList(4, RawDataList = VpdHeadValue)
ExMapTable = list(zip(Dict['EXMAPPING_TABLE_EXTOKEN'], Dict['EXMAPPING_TABLE_LOCAL_TOKEN'], Dict['EXMAPPING_TABLE_GUID_INDEX']))
DbExMapTable = DbExMapTblItemList(8, RawDataList = ExMapTable)
LocalTokenNumberTable = Dict['LOCAL_TOKEN_NUMBER_DB_VALUE']
DbLocalTokenNumberTable = DbItemList(4, RawDataList = LocalTokenNumberTable)
GuidTable = Dict['GUID_STRUCTURE']
DbGuidTable = DbItemList(16, RawDataList = GuidTable)
StringHeadValue = Dict['STRING_DB_VALUE']
# DbItemList to DbStringHeadTableItemList
DbStringHeadValue = DbStringHeadTableItemList(4, RawDataList = StringHeadValue)
VariableTable = Dict['VARIABLE_DB_VALUE']
DbVariableTable = DbVariableTableItemList(20, RawDataList = VariableTable)
NumberOfSkuEnabledPcd = GetIntegerValue(Dict['SKU_HEAD_SIZE'])
Dict['STRING_TABLE_DB_VALUE'] = [StringArrayToList(x) for x in Dict['STRING_TABLE_VALUE']]
StringTableValue = Dict['STRING_TABLE_DB_VALUE']
# when calcute the offset, should use StringTableLen instead of StringTableValue, as string maximum len may be different with actual len
StringTableLen = Dict['STRING_TABLE_LENGTH']
DbStringTableLen = DbStringItemList(0, RawDataList = StringTableValue, LenList = StringTableLen)
PcdTokenTable = Dict['PCD_TOKENSPACE']
PcdTokenLen = Dict['PCD_TOKENSPACE_LENGTH']
PcdTokenTableValue = [StringArrayToList(x) for x in Dict['PCD_TOKENSPACE']]
DbPcdTokenTable = DbStringItemList(0, RawDataList = PcdTokenTableValue, LenList = PcdTokenLen)
PcdCNameTable = Dict['PCD_CNAME']
PcdCNameLen = Dict['PCD_CNAME_LENGTH']
PcdCNameTableValue = [StringArrayToList(x) for x in Dict['PCD_CNAME']]
DbPcdCNameTable = DbStringItemList(0, RawDataList = PcdCNameTableValue, LenList = PcdCNameLen)
PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
DbPcdNameOffsetTable = DbItemList(4, RawDataList = PcdNameOffsetTable)
SizeTableValue = list(zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH']))
DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
DbInitValueUint16 = DbComItemList(2, RawDataList = InitValueUint16)
VardefValueUint16 = Dict['VARDEF_DB_VALUE_UINT16']
DbVardefValueUint16 = DbItemList(2, RawDataList = VardefValueUint16)
InitValueUint8 = Dict['INIT_DB_VALUE_UINT8']
DbInitValueUint8 = DbComItemList(1, RawDataList = InitValueUint8)
VardefValueUint8 = Dict['VARDEF_DB_VALUE_UINT8']
DbVardefValueUint8 = DbItemList(1, RawDataList = VardefValueUint8)
InitValueBoolean = Dict['INIT_DB_VALUE_BOOLEAN']
DbInitValueBoolean = DbComItemList(1, RawDataList = InitValueBoolean)
VardefValueBoolean = Dict['VARDEF_DB_VALUE_BOOLEAN']
DbVardefValueBoolean = DbItemList(1, RawDataList = VardefValueBoolean)
SkuidValue = Dict['SKUID_VALUE']
DbSkuidValue = DbItemList(8, RawDataList = SkuidValue)
# Unit Db Items
UnInitValueUint64 = Dict['UNINIT_GUID_DECL_UINT64']
DbUnInitValueUint64 = DbItemList(8, RawDataList = UnInitValueUint64)
UnInitValueUint32 = Dict['UNINIT_GUID_DECL_UINT32']
DbUnInitValueUint32 = DbItemList(4, RawDataList = UnInitValueUint32)
UnInitValueUint16 = Dict['UNINIT_GUID_DECL_UINT16']
DbUnInitValueUint16 = DbItemList(2, RawDataList = UnInitValueUint16)
UnInitValueUint8 = Dict['UNINIT_GUID_DECL_UINT8']
DbUnInitValueUint8 = DbItemList(1, RawDataList = UnInitValueUint8)
UnInitValueBoolean = Dict['UNINIT_GUID_DECL_BOOLEAN']
DbUnInitValueBoolean = DbItemList(1, RawDataList = UnInitValueBoolean)
PcdTokenNumberMap = Dict['PCD_ORDER_TOKEN_NUMBER_MAP']
DbNameTotle = ["SkuidValue", "InitValueUint64", "VardefValueUint64", "InitValueUint32", "VardefValueUint32", "VpdHeadValue", "ExMapTable",
"LocalTokenNumberTable", "GuidTable", "StringHeadValue", "PcdNameOffsetTable", "VariableTable", "StringTableLen", "PcdTokenTable", "PcdCNameTable",
"SizeTableValue", "InitValueUint16", "VardefValueUint16", "InitValueUint8", "VardefValueUint8", "InitValueBoolean",
"VardefValueBoolean", "UnInitValueUint64", "UnInitValueUint32", "UnInitValueUint16", "UnInitValueUint8", "UnInitValueBoolean"]
DbTotal = [SkuidValue, InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable,
LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable, VariableTable, StringTableLen, PcdTokenTable, PcdCNameTable,
SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean,
VardefValueBoolean, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean]
DbItemTotal = [DbSkuidValue, DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable,
DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable, DbVariableTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable,
DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
DbVardefValueBoolean, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean]
# VardefValueBoolean is the last table in the init table items
InitTableNum = DbNameTotle.index("VardefValueBoolean") + 1
# The FixedHeader length of the PCD_DATABASE_INIT, from Signature to Pad
FixedHeaderLen = 80
# Get offset of SkuId table in the database
SkuIdTableOffset = FixedHeaderLen
for DbIndex in range(len(DbTotal)):
if DbTotal[DbIndex] is SkuidValue:
break
SkuIdTableOffset += DbItemTotal[DbIndex].GetListSize()
# Get offset of SkuValue table in the database
# Fix up the LocalTokenNumberTable, SkuHeader table
for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
DbIndex = 0
DbOffset = FixedHeaderLen
for DbIndex in range(len(DbTotal)):
if DbTotal[DbIndex] is Table:
DbOffset += DbItemTotal[DbIndex].GetInterOffset(Offset)
break
DbOffset += DbItemTotal[DbIndex].GetListSize()
if DbIndex + 1 == InitTableNum:
if DbOffset % 8:
DbOffset += (8 - DbOffset % 8)
else:
assert(False)
TokenTypeValue = Dict['TOKEN_TYPE'][LocalTokenNumberTableIndex]
TokenTypeValue = GetTokenTypeValue(TokenTypeValue)
LocalTokenNumberTable[LocalTokenNumberTableIndex] = DbOffset|int(TokenTypeValue)
# if PCD_TYPE_SKU_ENABLED, then we need to fix up the SkuTable
# resolve variable table offset
for VariableEntries in VariableTable:
skuindex = 0
for VariableEntryPerSku in VariableEntries:
(VariableHeadGuidIndex, VariableHeadStringIndex, SKUVariableOffset, VariableOffset, VariableRefTable, VariableAttribute) = VariableEntryPerSku[:]
DbIndex = 0
DbOffset = FixedHeaderLen
for DbIndex in range(len(DbTotal)):
if DbTotal[DbIndex] is VariableRefTable:
DbOffset += DbItemTotal[DbIndex].GetInterOffset(VariableOffset)
break
DbOffset += DbItemTotal[DbIndex].GetListSize()
if DbIndex + 1 == InitTableNum:
if DbOffset % 8:
DbOffset += (8 - DbOffset % 8)
else:
assert(False)
if isinstance(VariableRefTable[0], list):
DbOffset += skuindex * 4
skuindex += 1
if DbIndex >= InitTableNum:
assert(False)
VarAttr, VarProp = VariableAttributes.GetVarAttributes(VariableAttribute)
VariableEntryPerSku[:] = (VariableHeadStringIndex, DbOffset, VariableHeadGuidIndex, SKUVariableOffset, VarAttr, VarProp)
# calculate various table offset now
DbTotalLength = FixedHeaderLen
for DbIndex in range(len(DbItemTotal)):
if DbItemTotal[DbIndex] is DbLocalTokenNumberTable:
LocalTokenNumberTableOffset = DbTotalLength
elif DbItemTotal[DbIndex] is DbExMapTable:
ExMapTableOffset = DbTotalLength
elif DbItemTotal[DbIndex] is DbGuidTable:
GuidTableOffset = DbTotalLength
elif DbItemTotal[DbIndex] is DbStringTableLen:
StringTableOffset = DbTotalLength
elif DbItemTotal[DbIndex] is DbSizeTableValue:
SizeTableOffset = DbTotalLength
elif DbItemTotal[DbIndex] is DbSkuidValue:
SkuIdTableOffset = DbTotalLength
elif DbItemTotal[DbIndex] is DbPcdNameOffsetTable:
DbPcdNameOffset = DbTotalLength
DbTotalLength += DbItemTotal[DbIndex].GetListSize()
if not Dict['PCD_INFO_FLAG']:
DbPcdNameOffset = 0
LocalTokenCount = GetIntegerValue(Dict['LOCAL_TOKEN_NUMBER'])
ExTokenCount = GetIntegerValue(Dict['EX_TOKEN_NUMBER'])
GuidTableCount = GetIntegerValue(Dict['GUID_TABLE_SIZE'])
SystemSkuId = GetIntegerValue(Dict['SYSTEM_SKU_ID_VALUE'])
Pad = 0xDA
UninitDataBaseSize = 0
for Item in (DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean):
UninitDataBaseSize += Item.GetListSize()
if (DbTotalLength - UninitDataBaseSize) % 8:
DbTotalLength += (8 - (DbTotalLength - UninitDataBaseSize) % 8)
# Construct the database buffer
Guid = "{0x3c7d193c, 0x682c, 0x4c14, 0xa6, 0x8f, 0x55, 0x2d, 0xea, 0x4f, 0x43, 0x7e}"
Guid = StringArrayToList(Guid)
Buffer = PackByteFormatGUID(Guid)
b = pack("=L", DATABASE_VERSION)
Buffer += b
b = pack('=L', DbTotalLength - UninitDataBaseSize)
Buffer += b
b = pack('=Q', SystemSkuId)
Buffer += b
b = pack('=L', 0)
Buffer += b
b = pack('=L', UninitDataBaseSize)
Buffer += b
b = pack('=L', LocalTokenNumberTableOffset)
Buffer += b
b = pack('=L', ExMapTableOffset)
Buffer += b
b = pack('=L', GuidTableOffset)
Buffer += b
b = pack('=L', StringTableOffset)
Buffer += b
b = pack('=L', SizeTableOffset)
Buffer += b
b = pack('=L', SkuIdTableOffset)
Buffer += b
b = pack('=L', DbPcdNameOffset)
Buffer += b
b = pack('=H', LocalTokenCount)
Buffer += b
b = pack('=H', ExTokenCount)
Buffer += b
b = pack('=H', GuidTableCount)
Buffer += b
b = pack('=B', Pad)
Buffer += b
Buffer += b
Buffer += b
Buffer += b
Buffer += b
Buffer += b
Index = 0
for Item in DbItemTotal:
Index +=1
packdata = Item.PackData()
for i in range(len(packdata)):
Buffer += packdata[i:i + 1]
if Index == InitTableNum:
if len(Buffer) % 8:
for num in range(8 - len(Buffer) % 8):
b = pack('=B', Pad)
Buffer += b
break
return Buffer
## Create code for PCD database
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreatePcdDatabaseCode (Info, AutoGenC, AutoGenH):
if Info.PcdIsDriver == "":
return
if Info.PcdIsDriver not in gPcdPhaseMap:
EdkLogger.error("build", AUTOGEN_ERROR, "Not supported PcdIsDriver type:%s" % Info.PcdIsDriver,
ExtraData="[%s]" % str(Info))
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer = NewCreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, 'PEI')
AutoGenH.Append(AdditionalAutoGenH.String)
Phase = gPcdPhaseMap[Info.PcdIsDriver]
if Phase == 'PEI':
AutoGenC.Append(AdditionalAutoGenC.String)
if Phase == 'DXE':
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer = NewCreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, Phase)
AutoGenH.Append(AdditionalAutoGenH.String)
AutoGenC.Append(AdditionalAutoGenC.String)
if Info.IsBinaryModule:
DbFileName = os.path.join(Info.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, Phase + "PcdDataBase.raw")
else:
DbFileName = os.path.join(Info.OutputDir, Phase + "PcdDataBase.raw")
DbFile = BytesIO()
DbFile.write(PcdDbBuffer)
Changed = SaveFileOnChange(DbFileName, DbFile.getvalue(), True)
def CreatePcdDataBase(PcdDBData):
delta = {}
for skuname, skuid in PcdDBData:
if len(PcdDBData[(skuname, skuid)][1]) != len(PcdDBData[(TAB_DEFAULT, "0")][1]):
EdkLogger.error("build", AUTOGEN_ERROR, "The size of each sku in one pcd are not same")
for skuname, skuid in PcdDBData:
if skuname == TAB_DEFAULT:
continue
delta[(skuname, skuid)] = [(index, data, hex(data)) for index, data in enumerate(PcdDBData[(skuname, skuid)][1]) if PcdDBData[(skuname, skuid)][1][index] != PcdDBData[(TAB_DEFAULT, "0")][1][index]]
databasebuff = PcdDBData[(TAB_DEFAULT, "0")][0]
for skuname, skuid in delta:
# 8 byte align
if len(databasebuff) % 8 > 0:
for i in range(8 - (len(databasebuff) % 8)):
databasebuff += pack("=B", 0)
databasebuff += pack('=Q', int(skuid))
databasebuff += pack('=Q', 0)
databasebuff += pack('=L', 8+8+4+4*len(delta[(skuname, skuid)]))
for item in delta[(skuname, skuid)]:
databasebuff += pack("=L", item[0])
databasebuff = databasebuff[:-1] + pack("=B", item[1])
totallen = len(databasebuff)
totallenbuff = pack("=L", totallen)
newbuffer = databasebuff[:32]
for i in range(4):
newbuffer += totallenbuff[i:i+1]
for i in range(36, totallen):
newbuffer += databasebuff[i:i+1]
return newbuffer
def CreateVarCheckBin(VarCheckTab):
return VarCheckTab[(TAB_DEFAULT, "0")]
def CreateAutoGen(PcdDriverAutoGenData):
autogenC = TemplateString()
for skuname, skuid in PcdDriverAutoGenData:
autogenC.Append("//SKUID: %s" % skuname)
autogenC.Append(PcdDriverAutoGenData[(skuname, skuid)][1].String)
return (PcdDriverAutoGenData[(skuname, skuid)][0], autogenC)
def NewCreatePcdDatabasePhaseSpecificAutoGen(Platform, Phase):
def prune_sku(pcd, skuname):
new_pcd = copy.deepcopy(pcd)
new_pcd.SkuInfoList = {skuname:pcd.SkuInfoList[skuname]}
new_pcd.isinit = 'INIT'
if new_pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
for skuobj in pcd.SkuInfoList.values():
if skuobj.DefaultValue:
defaultvalue = int(skuobj.DefaultValue, 16) if skuobj.DefaultValue.upper().startswith("0X") else int(skuobj.DefaultValue, 10)
if defaultvalue != 0:
new_pcd.isinit = "INIT"
break
elif skuobj.VariableName:
new_pcd.isinit = "INIT"
break
else:
new_pcd.isinit = "UNINIT"
return new_pcd
DynamicPcds = Platform.DynamicPcdList
DynamicPcdSet_Sku = {(SkuName, skuobj.SkuId):[] for pcd in DynamicPcds for (SkuName, skuobj) in pcd.SkuInfoList.items() }
for skuname, skuid in DynamicPcdSet_Sku:
DynamicPcdSet_Sku[(skuname, skuid)] = [prune_sku(pcd, skuname) for pcd in DynamicPcds]
PcdDBData = {}
PcdDriverAutoGenData = {}
VarCheckTableData = {}
if DynamicPcdSet_Sku:
for skuname, skuid in DynamicPcdSet_Sku:
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdSet_Sku[(skuname, skuid)], Phase)
final_data = ()
for item in range(len(PcdDbBuffer)):
final_data += unpack("B", PcdDbBuffer[item:item+1])
PcdDBData[(skuname, skuid)] = (PcdDbBuffer, final_data)
PcdDriverAutoGenData[(skuname, skuid)] = (AdditionalAutoGenH, AdditionalAutoGenC)
VarCheckTableData[(skuname, skuid)] = VarCheckTab
if Platform.Platform.VarCheckFlag:
dest = os.path.join(Platform.BuildDir, TAB_FV_DIRECTORY)
VarCheckTable = CreateVarCheckBin(VarCheckTableData)
VarCheckTable.dump(dest, Phase)
AdditionalAutoGenH, AdditionalAutoGenC = CreateAutoGen(PcdDriverAutoGenData)
else:
AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer, VarCheckTab = CreatePcdDatabasePhaseSpecificAutoGen (Platform, {}, Phase)
final_data = ()
for item in range(len(PcdDbBuffer)):
final_data += unpack("B", PcdDbBuffer[item:item + 1])
PcdDBData[(TAB_DEFAULT, "0")] = (PcdDbBuffer, final_data)
return AdditionalAutoGenH, AdditionalAutoGenC, CreatePcdDataBase(PcdDBData)
## Create PCD database in DXE or PEI phase
#
# @param Platform The platform object
# @retval tuple Two TemplateString objects for C code and header file,
# respectively
#
def CreatePcdDatabasePhaseSpecificAutoGen (Platform, DynamicPcdList, Phase):
AutoGenC = TemplateString()
AutoGenH = TemplateString()
Dict = {
'PHASE' : Phase,
'SERVICE_DRIVER_VERSION' : DATABASE_VERSION,
'GUID_TABLE_SIZE' : '1U',
'STRING_TABLE_SIZE' : '1U',
'SKUID_TABLE_SIZE' : '1U',
'LOCAL_TOKEN_NUMBER_TABLE_SIZE' : '0U',
'LOCAL_TOKEN_NUMBER' : '0U',
'EXMAPPING_TABLE_SIZE' : '1U',
'EX_TOKEN_NUMBER' : '0U',
'SIZE_TABLE_SIZE' : '2U',
'SKU_HEAD_SIZE' : '1U',
'GUID_TABLE_EMPTY' : 'TRUE',
'STRING_TABLE_EMPTY' : 'TRUE',
'SKUID_TABLE_EMPTY' : 'TRUE',
'DATABASE_EMPTY' : 'TRUE',
'EXMAP_TABLE_EMPTY' : 'TRUE',
'PCD_DATABASE_UNINIT_EMPTY' : ' UINT8 dummy; /* PCD_DATABASE_UNINIT is empty */',
'SYSTEM_SKU_ID' : ' SKU_ID SystemSkuId;',
'SYSTEM_SKU_ID_VALUE' : '0U'
}
SkuObj = Platform.Platform.SkuIdMgr
Dict['SYSTEM_SKU_ID_VALUE'] = 0 if SkuObj.SkuUsageType == SkuObj.SINGLE else Platform.Platform.SkuIds[SkuObj.SystemSkuId][0]
Dict['PCD_INFO_FLAG'] = Platform.Platform.PcdInfoFlag
for DatumType in TAB_PCD_NUMERIC_TYPES_VOID:
Dict['VARDEF_CNAME_' + DatumType] = []
Dict['VARDEF_GUID_' + DatumType] = []
Dict['VARDEF_SKUID_' + DatumType] = []
Dict['VARDEF_VALUE_' + DatumType] = []
Dict['VARDEF_DB_VALUE_' + DatumType] = []
for Init in ['INIT', 'UNINIT']:
Dict[Init+'_CNAME_DECL_' + DatumType] = []
Dict[Init+'_GUID_DECL_' + DatumType] = []
Dict[Init+'_NUMSKUS_DECL_' + DatumType] = []
Dict[Init+'_VALUE_' + DatumType] = []
Dict[Init+'_DB_VALUE_'+DatumType] = []
for Type in ['STRING_HEAD', 'VPD_HEAD', 'VARIABLE_HEAD']:
Dict[Type + '_CNAME_DECL'] = []
Dict[Type + '_GUID_DECL'] = []
Dict[Type + '_NUMSKUS_DECL'] = []
Dict[Type + '_VALUE'] = []
Dict['STRING_DB_VALUE'] = []
Dict['VPD_DB_VALUE'] = []
Dict['VARIABLE_DB_VALUE'] = []
Dict['STRING_TABLE_INDEX'] = []
Dict['STRING_TABLE_LENGTH'] = []
Dict['STRING_TABLE_CNAME'] = []
Dict['STRING_TABLE_GUID'] = []
Dict['STRING_TABLE_VALUE'] = []
Dict['STRING_TABLE_DB_VALUE'] = []
Dict['SIZE_TABLE_CNAME'] = []
Dict['SIZE_TABLE_GUID'] = []
Dict['SIZE_TABLE_CURRENT_LENGTH'] = []
Dict['SIZE_TABLE_MAXIMUM_LENGTH'] = []
Dict['EXMAPPING_TABLE_EXTOKEN'] = []
Dict['EXMAPPING_TABLE_LOCAL_TOKEN'] = []
Dict['EXMAPPING_TABLE_GUID_INDEX'] = []
Dict['GUID_STRUCTURE'] = []
Dict['SKUID_VALUE'] = [0] # init Dict length
Dict['VARDEF_HEADER'] = []
Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = []
Dict['VARIABLE_DB_VALUE'] = []
Dict['PCD_TOKENSPACE'] = []
Dict['PCD_CNAME'] = []
Dict['PCD_TOKENSPACE_LENGTH'] = []
Dict['PCD_CNAME_LENGTH'] = []
Dict['PCD_TOKENSPACE_OFFSET'] = []
Dict['PCD_CNAME_OFFSET'] = []
Dict['PCD_TOKENSPACE_MAP'] = []
Dict['PCD_NAME_OFFSET'] = []
Dict['PCD_ORDER_TOKEN_NUMBER_MAP'] = {}
PCD_STRING_INDEX_MAP = {}
StringTableIndex = 0
StringTableSize = 0
NumberOfLocalTokens = 0
NumberOfPeiLocalTokens = 0
NumberOfDxeLocalTokens = 0
NumberOfExTokens = 0
NumberOfSizeItems = 0
NumberOfSkuEnabledPcd = 0
GuidList = []
VarCheckTab = VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER()
i = 0
ReorderedDynPcdList = GetOrderedDynamicPcdList(DynamicPcdList, Platform.PcdTokenNumber)
for item in ReorderedDynPcdList:
if item.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
item.DatumType = TAB_VOID
for Pcd in ReorderedDynPcdList:
VoidStarTypeCurrSize = []
i += 1
CName = Pcd.TokenCName
TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
for PcdItem in GlobalData.MixedPcd:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
CName = PcdItem[0]
EdkLogger.debug(EdkLogger.DEBUG_3, "PCD: %s %s (%s : %s)" % (CName, TokenSpaceGuidCName, Pcd.Phase, Phase))
if Pcd.Phase == 'PEI':
NumberOfPeiLocalTokens += 1
if Pcd.Phase == 'DXE':
NumberOfDxeLocalTokens += 1
if Pcd.Phase != Phase:
continue
#
# TODO: need GetGuidValue() definition
#
TokenSpaceGuidStructure = Pcd.TokenSpaceGuidValue
TokenSpaceGuid = GuidStructureStringToGuidValueName(TokenSpaceGuidStructure)
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
if TokenSpaceGuid not in GuidList:
GuidList.append(TokenSpaceGuid)
Dict['GUID_STRUCTURE'].append(TokenSpaceGuidStructure)
NumberOfExTokens += 1
ValueList = []
DbValueList = []
StringHeadOffsetList = []
StringDbOffsetList = []
VpdHeadOffsetList = []
VpdDbOffsetList = []
VariableHeadValueList = []
VariableDbValueList = []
Pcd.InitString = 'UNINIT'
if Pcd.DatumType == TAB_VOID:
if Pcd.Type not in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
Pcd.TokenTypeList = ['PCD_TYPE_STRING']
else:
Pcd.TokenTypeList = []
elif Pcd.DatumType == 'BOOLEAN':
Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8_BOOLEAN']
else:
Pcd.TokenTypeList = ['PCD_DATUM_TYPE_' + Pcd.DatumType]
if len(Pcd.SkuInfoList) > 1:
NumberOfSkuEnabledPcd += 1
SkuIdIndex = 1
VariableHeadList = []
for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName]
SkuId = Sku.SkuId
if SkuId is None or SkuId == '':
continue
SkuIdIndex += 1
if len(Sku.VariableName) > 0:
VariableGuidStructure = Sku.VariableGuidValue
VariableGuid = GuidStructureStringToGuidValueName(VariableGuidStructure)
if Platform.Platform.VarCheckFlag:
var_check_obj = VAR_CHECK_PCD_VARIABLE_TAB(VariableGuidStructure, StringToArray(Sku.VariableName))
try:
var_check_obj.push_back(GetValidationObject(Pcd, Sku.VariableOffset))
VarAttr, _ = VariableAttributes.GetVarAttributes(Sku.VariableAttribute)
var_check_obj.SetAttributes(VarAttr)
var_check_obj.UpdateSize()
VarCheckTab.push_back(var_check_obj)
except Exception:
ValidInfo = ''
if Pcd.validateranges:
ValidInfo = Pcd.validateranges[0]
if Pcd.validlists:
ValidInfo = Pcd.validlists[0]
if ValidInfo:
EdkLogger.error("build", PCD_VALIDATION_INFO_ERROR,
"The PCD '%s.%s' Validation information defined in DEC file has incorrect format." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
ExtraData = "[%s]" % str(ValidInfo))
else:
EdkLogger.error("build", PCD_VALIDATION_INFO_ERROR,
"The PCD '%s.%s' Validation information defined in DEC file has incorrect format." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
Pcd.TokenTypeList.append('PCD_TYPE_HII')
Pcd.InitString = 'INIT'
# Store all variable names of one HII PCD under different SKU to stringTable
# and calculate the VariableHeadStringIndex
VariableNameStructure = StringToArray(Sku.VariableName)
# Make pointer of VaraibleName(HII PCD) 2 bytes aligned
VariableNameStructureBytes = VariableNameStructure.lstrip("{").rstrip("}").split(",")
if len(VariableNameStructureBytes) % 2:
VariableNameStructure = "{%s,0x00}" % ",".join(VariableNameStructureBytes)
if VariableNameStructure not in Dict['STRING_TABLE_VALUE']:
Dict['STRING_TABLE_CNAME'].append(CName)
Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
if StringTableIndex == 0:
Dict['STRING_TABLE_INDEX'].append('')
else:
Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
VarNameSize = len(VariableNameStructure.replace(',', ' ').split())
Dict['STRING_TABLE_LENGTH'].append(VarNameSize )
Dict['STRING_TABLE_VALUE'].append(VariableNameStructure)
StringHeadOffsetList.append(str(StringTableSize) + 'U')
VarStringDbOffsetList = []
VarStringDbOffsetList.append(StringTableSize)
Dict['STRING_DB_VALUE'].append(VarStringDbOffsetList)
StringTableIndex += 1
StringTableSize += len(VariableNameStructure.replace(',', ' ').split())
VariableHeadStringIndex = 0
for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)):
VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index]
VariableHeadList.append(VariableHeadStringIndex)
VariableHeadStringIndex = VariableHeadList[SkuIdIndex - 2]
# store VariableGuid to GuidTable and get the VariableHeadGuidIndex
if VariableGuid not in GuidList:
GuidList.append(VariableGuid)
Dict['GUID_STRUCTURE'].append(VariableGuidStructure)
VariableHeadGuidIndex = GuidList.index(VariableGuid)
if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s), %dU, %sU' %
(VariableHeadStringIndex, Phase, CName, TokenSpaceGuid,
VariableHeadGuidIndex, Sku.VariableOffset))
else:
VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s), %dU, %sU' %
(VariableHeadStringIndex, Phase, CName, TokenSpaceGuid, SkuIdIndex,
VariableHeadGuidIndex, Sku.VariableOffset))
Dict['VARDEF_CNAME_'+Pcd.DatumType].append(CName)
Dict['VARDEF_GUID_'+Pcd.DatumType].append(TokenSpaceGuid)
Dict['VARDEF_SKUID_'+Pcd.DatumType].append(SkuIdIndex)
if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
Dict['VARDEF_VALUE_' + Pcd.DatumType].append("%s_%s[%d]" % (Pcd.TokenCName, TokenSpaceGuid, SkuIdIndex))
else:
#
# ULL (for UINT64) or U(other integer type) should be append to avoid
# warning under linux building environment.
#
Dict['VARDEF_DB_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
if Pcd.DatumType == TAB_UINT64:
Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "ULL")
elif Pcd.DatumType in (TAB_UINT32, TAB_UINT16, TAB_UINT8):
Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "U")
elif Pcd.DatumType == "BOOLEAN":
if eval(Sku.HiiDefaultValue) in [1, 0]:
Dict['VARDEF_VALUE_'+Pcd.DatumType].append(str(eval(Sku.HiiDefaultValue)) + "U")
else:
Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
# construct the VariableHeader value
if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
VariableHeadValueList.append('%dU, %dU, %sU, offsetof(%s_PCD_DATABASE, Init.%s_%s)' %
(VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset,
Phase, CName, TokenSpaceGuid))
# the Pcd default value will be filled later on
VariableOffset = len(Dict['STRING_DB_VALUE'])
VariableRefTable = Dict['STRING_DB_VALUE']
else:
VariableHeadValueList.append('%dU, %dU, %sU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s)' %
(VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset,
Phase, CName, TokenSpaceGuid, SkuIdIndex))
# the Pcd default value was filled before
VariableOffset = len(Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]) - 1
VariableRefTable = Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]
VariableDbValueList.append([VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, VariableOffset, VariableRefTable, Sku.VariableAttribute])
elif Sku.VpdOffset != '':
Pcd.TokenTypeList.append('PCD_TYPE_VPD')
Pcd.InitString = 'INIT'
VpdHeadOffsetList.append(str(Sku.VpdOffset) + 'U')
VpdDbOffsetList.append(Sku.VpdOffset)
# Also add the VOID* string of VPD PCD to SizeTable
if Pcd.DatumType == TAB_VOID:
NumberOfSizeItems += 1
# For VPD type of PCD, its current size is equal to its MAX size.
VoidStarTypeCurrSize = [str(Pcd.MaxDatumSize) + 'U']
continue
if Pcd.DatumType == TAB_VOID:
Pcd.TokenTypeList.append('PCD_TYPE_STRING')
Pcd.InitString = 'INIT'
if Sku.HiiDefaultValue != '' and Sku.DefaultValue == '':
Sku.DefaultValue = Sku.HiiDefaultValue
if Sku.DefaultValue != '':
NumberOfSizeItems += 1
Dict['STRING_TABLE_CNAME'].append(CName)
Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
if StringTableIndex == 0:
Dict['STRING_TABLE_INDEX'].append('')
else:
Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
if Sku.DefaultValue[0] == 'L':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(DefaultValueBinStructure.replace(',', ' ').split())
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '"':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(Sku.DefaultValue) - 2 + 1
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '{':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
Size = len(Sku.DefaultValue.split(","))
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
StringHeadOffsetList.append(str(StringTableSize) + 'U')
StringDbOffsetList.append(StringTableSize)
if Pcd.MaxDatumSize != '':
MaxDatumSize = int(Pcd.MaxDatumSize, 0)
if MaxDatumSize < Size:
if Pcd.MaxSizeUserSet:
EdkLogger.error("build", AUTOGEN_ERROR,
"The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
ExtraData="[%s]" % str(Platform))
else:
MaxDatumSize = Size
else:
MaxDatumSize = Size
StringTabLen = MaxDatumSize
if StringTabLen % 2:
StringTabLen += 1
if Sku.VpdOffset == '':
VoidStarTypeCurrSize.append(str(Size) + 'U')
Dict['STRING_TABLE_LENGTH'].append(StringTabLen)
StringTableIndex += 1
StringTableSize += (StringTabLen)
else:
if "PCD_TYPE_HII" not in Pcd.TokenTypeList:
Pcd.TokenTypeList.append('PCD_TYPE_DATA')
if Sku.DefaultValue == 'TRUE':
Pcd.InitString = 'INIT'
else:
Pcd.InitString = Pcd.isinit
#
# For UNIT64 type PCD's value, ULL should be append to avoid
# warning under linux building environment.
#
if Pcd.DatumType == TAB_UINT64:
ValueList.append(Sku.DefaultValue + "ULL")
elif Pcd.DatumType in (TAB_UINT32, TAB_UINT16, TAB_UINT8):
ValueList.append(Sku.DefaultValue + "U")
elif Pcd.DatumType == "BOOLEAN":
if Sku.DefaultValue in ["1", "0"]:
ValueList.append(Sku.DefaultValue + "U")
else:
ValueList.append(Sku.DefaultValue)
DbValueList.append(Sku.DefaultValue)
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
if Pcd.DatumType == TAB_VOID:
Dict['SIZE_TABLE_CNAME'].append(CName)
Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
Dict['SIZE_TABLE_CURRENT_LENGTH'].append(VoidStarTypeCurrSize)
if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
Dict['VARIABLE_HEAD_CNAME_DECL'].append(CName)
Dict['VARIABLE_HEAD_GUID_DECL'].append(TokenSpaceGuid)
Dict['VARIABLE_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
Dict['VARIABLE_HEAD_VALUE'].append('{ %s }\n' % ' },\n { '.join(VariableHeadValueList))
Dict['VARDEF_HEADER'].append('_Variable_Header')
Dict['VARIABLE_DB_VALUE'].append(VariableDbValueList)
else:
Dict['VARDEF_HEADER'].append('')
if 'PCD_TYPE_VPD' in Pcd.TokenTypeList:
Dict['VPD_HEAD_CNAME_DECL'].append(CName)
Dict['VPD_HEAD_GUID_DECL'].append(TokenSpaceGuid)
Dict['VPD_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
Dict['VPD_HEAD_VALUE'].append('{ %s }' % ' }, { '.join(VpdHeadOffsetList))
Dict['VPD_DB_VALUE'].append(VpdDbOffsetList)
if 'PCD_TYPE_STRING' in Pcd.TokenTypeList:
Dict['STRING_HEAD_CNAME_DECL'].append(CName)
Dict['STRING_HEAD_GUID_DECL'].append(TokenSpaceGuid)
Dict['STRING_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
Dict['STRING_HEAD_VALUE'].append(', '.join(StringHeadOffsetList))
Dict['STRING_DB_VALUE'].append(StringDbOffsetList)
PCD_STRING_INDEX_MAP[len(Dict['STRING_HEAD_CNAME_DECL']) -1 ] = len(Dict['STRING_DB_VALUE']) -1
if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType].append(CName)
Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType].append(TokenSpaceGuid)
Dict[Pcd.InitString+'_NUMSKUS_DECL_'+Pcd.DatumType].append(len(Pcd.SkuInfoList))
if Pcd.InitString == 'UNINIT':
Dict['PCD_DATABASE_UNINIT_EMPTY'] = ''
else:
Dict[Pcd.InitString+'_VALUE_'+Pcd.DatumType].append(', '.join(ValueList))
Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType].append(DbValueList)
if Phase == 'PEI':
NumberOfLocalTokens = NumberOfPeiLocalTokens
if Phase == 'DXE':
NumberOfLocalTokens = NumberOfDxeLocalTokens
Dict['TOKEN_INIT'] = ['' for x in range(NumberOfLocalTokens)]
Dict['TOKEN_CNAME'] = ['' for x in range(NumberOfLocalTokens)]
Dict['TOKEN_GUID'] = ['' for x in range(NumberOfLocalTokens)]
Dict['TOKEN_TYPE'] = ['' for x in range(NumberOfLocalTokens)]
Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = ['' for x in range(NumberOfLocalTokens)]
Dict['PCD_CNAME'] = ['' for x in range(NumberOfLocalTokens)]
Dict['PCD_TOKENSPACE_MAP'] = ['' for x in range(NumberOfLocalTokens)]
Dict['PCD_CNAME_LENGTH'] = [0 for x in range(NumberOfLocalTokens)]
SkuEnablePcdIndex = 0
for Pcd in ReorderedDynPcdList:
CName = Pcd.TokenCName
TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
if Pcd.Phase != Phase:
continue
TokenSpaceGuid = GuidStructureStringToGuidValueName(Pcd.TokenSpaceGuidValue) #(Platform.PackageList, TokenSpaceGuidCName))
GeneratedTokenNumber = Platform.PcdTokenNumber[CName, TokenSpaceGuidCName] - 1
if Phase == 'DXE':
GeneratedTokenNumber -= NumberOfPeiLocalTokens
if len(Pcd.SkuInfoList) > 1:
Dict['PCD_ORDER_TOKEN_NUMBER_MAP'][GeneratedTokenNumber] = SkuEnablePcdIndex
SkuEnablePcdIndex += 1
for PcdItem in GlobalData.MixedPcd:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
CName = PcdItem[0]
EdkLogger.debug(EdkLogger.DEBUG_1, "PCD = %s.%s" % (CName, TokenSpaceGuidCName))
EdkLogger.debug(EdkLogger.DEBUG_1, "phase = %s" % Phase)
EdkLogger.debug(EdkLogger.DEBUG_1, "GeneratedTokenNumber = %s" % str(GeneratedTokenNumber))
#
# following four Dict items hold the information for LocalTokenNumberTable
#
Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Init'
if Pcd.InitString == 'UNINIT':
Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Uninit'
Dict['TOKEN_CNAME'][GeneratedTokenNumber] = CName
Dict['TOKEN_GUID'][GeneratedTokenNumber] = TokenSpaceGuid
Dict['TOKEN_TYPE'][GeneratedTokenNumber] = ' | '.join(Pcd.TokenTypeList)
if Platform.Platform.PcdInfoFlag:
TokenSpaceGuidCNameArray = StringToArray('"' + TokenSpaceGuidCName + '"' )
if TokenSpaceGuidCNameArray not in Dict['PCD_TOKENSPACE']:
Dict['PCD_TOKENSPACE'].append(TokenSpaceGuidCNameArray)
Dict['PCD_TOKENSPACE_LENGTH'].append( len(TokenSpaceGuidCNameArray.split(",")) )
Dict['PCD_TOKENSPACE_MAP'][GeneratedTokenNumber] = Dict['PCD_TOKENSPACE'].index(TokenSpaceGuidCNameArray)
CNameBinArray = StringToArray('"' + CName + '"' )
Dict['PCD_CNAME'][GeneratedTokenNumber] = CNameBinArray
Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
# search the Offset and Table, used by LocalTokenNumberTableOffset
if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
# Find index by CName, TokenSpaceGuid
Offset = GetMatchedIndex(CName, Dict['VARIABLE_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['VARIABLE_HEAD_GUID_DECL'])
assert(Offset != -1)
Table = Dict['VARIABLE_DB_VALUE']
if 'PCD_TYPE_VPD' in Pcd.TokenTypeList:
Offset = GetMatchedIndex(CName, Dict['VPD_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['VPD_HEAD_GUID_DECL'])
assert(Offset != -1)
Table = Dict['VPD_DB_VALUE']
if 'PCD_TYPE_STRING' in Pcd.TokenTypeList and 'PCD_TYPE_HII' not in Pcd.TokenTypeList:
# Find index by CName, TokenSpaceGuid
Offset = GetMatchedIndex(CName, Dict['STRING_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['STRING_HEAD_GUID_DECL'])
Offset = PCD_STRING_INDEX_MAP[Offset]
assert(Offset != -1)
Table = Dict['STRING_DB_VALUE']
if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
# need to store whether it is in init table or not
Offset = GetMatchedIndex(CName, Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType], TokenSpaceGuid, Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType])
assert(Offset != -1)
if Pcd.InitString == 'UNINIT':
Table = Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType]
else:
Table = Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType]
Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'][GeneratedTokenNumber] = (Offset, Table)
#
# Update VARDEF_HEADER
#
if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
Dict['VARDEF_HEADER'][GeneratedTokenNumber] = '_Variable_Header'
else:
Dict['VARDEF_HEADER'][GeneratedTokenNumber] = ''
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
if Phase == 'DXE':
GeneratedTokenNumber += NumberOfPeiLocalTokens
#
# Per, PCD architecture specification, PCD Token Number is 1 based and 0 is defined as invalid token number.
# For each EX type PCD, a PCD Token Number is assigned. When the
# PCD Driver/PEIM map EX_GUID and EX_TOKEN_NUMBER to the PCD Token Number,
# the non-EX Protocol/PPI interface can be called to get/set the value. This assumption is made by
# Pcd Driver/PEIM in MdeModulePkg.
# Therefore, 1 is added to GeneratedTokenNumber to generate a PCD Token Number before being inserted
# to the EXMAPPING_TABLE.
#
Dict['EXMAPPING_TABLE_EXTOKEN'].append(str(Pcd.TokenValue) + 'U')
Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(str(GeneratedTokenNumber + 1) + 'U')
Dict['EXMAPPING_TABLE_GUID_INDEX'].append(str(GuidList.index(TokenSpaceGuid)) + 'U')
if Platform.Platform.PcdInfoFlag:
for index in range(len(Dict['PCD_TOKENSPACE_MAP'])):
TokenSpaceIndex = StringTableSize
for i in range(Dict['PCD_TOKENSPACE_MAP'][index]):
TokenSpaceIndex += Dict['PCD_TOKENSPACE_LENGTH'][i]
Dict['PCD_TOKENSPACE_OFFSET'].append(TokenSpaceIndex)
for index in range(len(Dict['PCD_TOKENSPACE'])):
StringTableSize += Dict['PCD_TOKENSPACE_LENGTH'][index]
StringTableIndex += 1
for index in range(len(Dict['PCD_CNAME'])):
Dict['PCD_CNAME_OFFSET'].append(StringTableSize)
Dict['PCD_NAME_OFFSET'].append(Dict['PCD_TOKENSPACE_OFFSET'][index])
Dict['PCD_NAME_OFFSET'].append(StringTableSize)
StringTableSize += Dict['PCD_CNAME_LENGTH'][index]
StringTableIndex += 1
if GuidList != []:
Dict['GUID_TABLE_EMPTY'] = 'FALSE'
Dict['GUID_TABLE_SIZE'] = str(len(GuidList)) + 'U'
else:
Dict['GUID_STRUCTURE'] = [GuidStringToGuidStructureString('00000000-0000-0000-0000-000000000000')]
if StringTableIndex == 0:
Dict['STRING_TABLE_INDEX'].append('')
Dict['STRING_TABLE_LENGTH'].append(1)
Dict['STRING_TABLE_CNAME'].append('')
Dict['STRING_TABLE_GUID'].append('')
Dict['STRING_TABLE_VALUE'].append('{ 0 }')
else:
Dict['STRING_TABLE_EMPTY'] = 'FALSE'
Dict['STRING_TABLE_SIZE'] = str(StringTableSize) + 'U'
if Dict['SIZE_TABLE_CNAME'] == []:
Dict['SIZE_TABLE_CNAME'].append('')
Dict['SIZE_TABLE_GUID'].append('')
Dict['SIZE_TABLE_CURRENT_LENGTH'].append(['0U'])
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append('0U')
if NumberOfLocalTokens != 0:
Dict['DATABASE_EMPTY'] = 'FALSE'
Dict['LOCAL_TOKEN_NUMBER_TABLE_SIZE'] = NumberOfLocalTokens
Dict['LOCAL_TOKEN_NUMBER'] = NumberOfLocalTokens
if NumberOfExTokens != 0:
Dict['EXMAP_TABLE_EMPTY'] = 'FALSE'
Dict['EXMAPPING_TABLE_SIZE'] = str(NumberOfExTokens) + 'U'
Dict['EX_TOKEN_NUMBER'] = str(NumberOfExTokens) + 'U'
else:
Dict['EXMAPPING_TABLE_EXTOKEN'].append('0U')
Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append('0U')
Dict['EXMAPPING_TABLE_GUID_INDEX'].append('0U')
if NumberOfSizeItems != 0:
Dict['SIZE_TABLE_SIZE'] = str(NumberOfSizeItems * 2) + 'U'
if NumberOfSkuEnabledPcd != 0:
Dict['SKU_HEAD_SIZE'] = str(NumberOfSkuEnabledPcd) + 'U'
for AvailableSkuNumber in SkuObj.SkuIdNumberSet:
if AvailableSkuNumber not in Dict['SKUID_VALUE']:
Dict['SKUID_VALUE'].append(AvailableSkuNumber)
Dict['SKUID_VALUE'][0] = len(Dict['SKUID_VALUE']) - 1
AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict))
if NumberOfLocalTokens == 0:
AutoGenC.Append(gEmptyPcdDatabaseAutoGenC.Replace(Dict))
else:
#
# Update Size Table to the right order, it should be same with LocalTokenNumberTable
#
SizeCNameTempList = []
SizeGuidTempList = []
SizeCurLenTempList = []
SizeMaxLenTempList = []
ReOrderFlag = True
if len(Dict['SIZE_TABLE_CNAME']) == 1:
if not (Dict['SIZE_TABLE_CNAME'][0] and Dict['SIZE_TABLE_GUID'][0]):
ReOrderFlag = False
if ReOrderFlag:
for Count in range(len(Dict['TOKEN_CNAME'])):
for Count1 in range(len(Dict['SIZE_TABLE_CNAME'])):
if Dict['TOKEN_CNAME'][Count] == Dict['SIZE_TABLE_CNAME'][Count1] and \
Dict['TOKEN_GUID'][Count] == Dict['SIZE_TABLE_GUID'][Count1]:
SizeCNameTempList.append(Dict['SIZE_TABLE_CNAME'][Count1])
SizeGuidTempList.append(Dict['SIZE_TABLE_GUID'][Count1])
SizeCurLenTempList.append(Dict['SIZE_TABLE_CURRENT_LENGTH'][Count1])
SizeMaxLenTempList.append(Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count1])
for Count in range(len(Dict['SIZE_TABLE_CNAME'])):
Dict['SIZE_TABLE_CNAME'][Count] = SizeCNameTempList[Count]
Dict['SIZE_TABLE_GUID'][Count] = SizeGuidTempList[Count]
Dict['SIZE_TABLE_CURRENT_LENGTH'][Count] = SizeCurLenTempList[Count]
Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count] = SizeMaxLenTempList[Count]
AutoGenC.Append(gPcdDatabaseAutoGenC.Replace(Dict))
# print Phase
Buffer = BuildExDataBase(Dict)
return AutoGenH, AutoGenC, Buffer, VarCheckTab
def GetOrderedDynamicPcdList(DynamicPcdList, PcdTokenNumberList):
ReorderedDyPcdList = [None for i in range(len(DynamicPcdList))]
for Pcd in DynamicPcdList:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in PcdTokenNumberList:
ReorderedDyPcdList[PcdTokenNumberList[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]-1] = Pcd
return ReorderedDyPcdList
| edk2-master | BaseTools/Source/Python/AutoGen/GenPcdDb.py |
## @file
# Routines for generating AutoGen.h and AutoGen.c
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## Import Modules
#
from __future__ import absolute_import
import string
import collections
import struct
from Common import EdkLogger
from Common import GlobalData
from Common.BuildToolError import *
from Common.DataType import *
from Common.Misc import *
from Common.StringUtils import StringToArray
from .StrGather import *
from .GenPcdDb import CreatePcdDatabaseCode
from .IdfClassObject import *
## PCD type string
gItemTypeStringDatabase = {
TAB_PCDS_FEATURE_FLAG : TAB_PCDS_FIXED_AT_BUILD,
TAB_PCDS_FIXED_AT_BUILD : TAB_PCDS_FIXED_AT_BUILD,
TAB_PCDS_PATCHABLE_IN_MODULE: 'BinaryPatch',
TAB_PCDS_DYNAMIC : '',
TAB_PCDS_DYNAMIC_DEFAULT : '',
TAB_PCDS_DYNAMIC_VPD : '',
TAB_PCDS_DYNAMIC_HII : '',
TAB_PCDS_DYNAMIC_EX : '',
TAB_PCDS_DYNAMIC_EX_DEFAULT : '',
TAB_PCDS_DYNAMIC_EX_VPD : '',
TAB_PCDS_DYNAMIC_EX_HII : '',
}
## Datum size
gDatumSizeStringDatabase = {TAB_UINT8:'8',TAB_UINT16:'16',TAB_UINT32:'32',TAB_UINT64:'64','BOOLEAN':'BOOLEAN',TAB_VOID:'8'}
gDatumSizeStringDatabaseH = {TAB_UINT8:'8',TAB_UINT16:'16',TAB_UINT32:'32',TAB_UINT64:'64','BOOLEAN':'BOOL',TAB_VOID:'PTR'}
gDatumSizeStringDatabaseLib = {TAB_UINT8:'8',TAB_UINT16:'16',TAB_UINT32:'32',TAB_UINT64:'64','BOOLEAN':'Bool',TAB_VOID:'Ptr'}
## AutoGen File Header Templates
gAutoGenHeaderString = TemplateString("""\
/**
DO NOT EDIT
FILE auto-generated
Module name:
${FileName}
Abstract: Auto-generated ${FileName} for building module or library.
**/
""")
gAutoGenHPrologueString = TemplateString("""
#ifndef _${File}_${Guid}
#define _${File}_${Guid}
""")
gAutoGenHCppPrologueString = """\
#ifdef __cplusplus
extern "C" {
#endif
"""
gAutoGenHEpilogueString = """
#ifdef __cplusplus
}
#endif
#endif
"""
## PEI Core Entry Point Templates
gPeiCoreEntryPointPrototype = TemplateString("""
${BEGIN}
VOID
EFIAPI
${Function} (
IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData,
IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList,
IN VOID *Context
);
${END}
""")
gPeiCoreEntryPointString = TemplateString("""
${BEGIN}
VOID
EFIAPI
ProcessModuleEntryPointList (
IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData,
IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList,
IN VOID *Context
)
{
${Function} (SecCoreData, PpiList, Context);
}
${END}
""")
## DXE Core Entry Point Templates
gDxeCoreEntryPointPrototype = TemplateString("""
${BEGIN}
VOID
EFIAPI
${Function} (
IN VOID *HobStart
);
${END}
""")
gDxeCoreEntryPointString = TemplateString("""
${BEGIN}
VOID
EFIAPI
ProcessModuleEntryPointList (
IN VOID *HobStart
)
{
${Function} (HobStart);
}
${END}
""")
## PEIM Entry Point Templates
gPeimEntryPointPrototype = TemplateString("""
${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN EFI_PEI_FILE_HANDLE FileHandle,
IN CONST EFI_PEI_SERVICES **PeiServices
);
${END}
""")
gPeimEntryPointString = [
TemplateString("""
GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_PEI_FILE_HANDLE FileHandle,
IN CONST EFI_PEI_SERVICES **PeiServices
)
{
return EFI_SUCCESS;
}
"""),
TemplateString("""
GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
${BEGIN}
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_PEI_FILE_HANDLE FileHandle,
IN CONST EFI_PEI_SERVICES **PeiServices
)
{
return ${Function} (FileHandle, PeiServices);
}
${END}
"""),
TemplateString("""
GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_PEI_FILE_HANDLE FileHandle,
IN CONST EFI_PEI_SERVICES **PeiServices
)
{
EFI_STATUS Status;
EFI_STATUS CombinedStatus;
CombinedStatus = EFI_LOAD_ERROR;
${BEGIN}
Status = ${Function} (FileHandle, PeiServices);
if (!EFI_ERROR (Status) || EFI_ERROR (CombinedStatus)) {
CombinedStatus = Status;
}
${END}
return CombinedStatus;
}
""")
]
## SMM_CORE Entry Point Templates
gSmmCoreEntryPointPrototype = TemplateString("""
${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
);
${END}
""")
gSmmCoreEntryPointString = TemplateString("""
${BEGIN}
const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
return ${Function} (ImageHandle, SystemTable);
}
${END}
""")
## MM_CORE_STANDALONE Entry Point Templates
gMmCoreStandaloneEntryPointPrototype = TemplateString("""
${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN VOID *HobStart
);
${END}
""")
gMmCoreStandaloneEntryPointString = TemplateString("""
${BEGIN}
const UINT32 _gMmRevision = ${PiSpecVersion};
VOID
EFIAPI
ProcessModuleEntryPointList (
IN VOID *HobStart
)
{
${Function} (HobStart);
}
${END}
""")
## MM_STANDALONE Entry Point Templates
gMmStandaloneEntryPointPrototype = TemplateString("""
${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN EFI_HANDLE ImageHandle,
IN EFI_MM_SYSTEM_TABLE *MmSystemTable
);
${END}
""")
gMmStandaloneEntryPointString = [
TemplateString("""
GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gMmRevision = ${PiSpecVersion};
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_MM_SYSTEM_TABLE *MmSystemTable
)
{
return EFI_SUCCESS;
}
"""),
TemplateString("""
GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gMmRevision = ${PiSpecVersion};
${BEGIN}
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_MM_SYSTEM_TABLE *MmSystemTable
)
{
return ${Function} (ImageHandle, MmSystemTable);
}
${END}
"""),
TemplateString("""
GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gMmRevision = ${PiSpecVersion};
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_MM_SYSTEM_TABLE *MmSystemTable
)
{
EFI_STATUS Status;
EFI_STATUS CombinedStatus;
CombinedStatus = EFI_LOAD_ERROR;
${BEGIN}
Status = ${Function} (ImageHandle, MmSystemTable);
if (!EFI_ERROR (Status) || EFI_ERROR (CombinedStatus)) {
CombinedStatus = Status;
}
${END}
return CombinedStatus;
}
""")
]
## DXE SMM Entry Point Templates
gDxeSmmEntryPointPrototype = TemplateString("""
${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
);
${END}
""")
gDxeSmmEntryPointString = [
TemplateString("""
const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
return EFI_SUCCESS;
}
"""),
TemplateString("""
const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
static EFI_STATUS mDriverEntryPointStatus;
VOID
EFIAPI
ExitDriver (
IN EFI_STATUS Status
)
{
if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
mDriverEntryPointStatus = Status;
}
LongJump (&mJumpContext, (UINTN)-1);
ASSERT (FALSE);
}
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
mDriverEntryPointStatus = EFI_LOAD_ERROR;
${BEGIN}
if (SetJump (&mJumpContext) == 0) {
ExitDriver (${Function} (ImageHandle, SystemTable));
ASSERT (FALSE);
}
${END}
return mDriverEntryPointStatus;
}
""")
]
## UEFI Driver Entry Point Templates
gUefiDriverEntryPointPrototype = TemplateString("""
${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
);
${END}
""")
gUefiDriverEntryPointString = [
TemplateString("""
const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
return EFI_SUCCESS;
}
"""),
TemplateString("""
const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
${BEGIN}
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
return ${Function} (ImageHandle, SystemTable);
}
${END}
VOID
EFIAPI
ExitDriver (
IN EFI_STATUS Status
)
{
if (EFI_ERROR (Status)) {
ProcessLibraryDestructorList (gImageHandle, gST);
}
gBS->Exit (gImageHandle, Status, 0, NULL);
}
"""),
TemplateString("""
const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
static EFI_STATUS mDriverEntryPointStatus;
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
mDriverEntryPointStatus = EFI_LOAD_ERROR;
${BEGIN}
if (SetJump (&mJumpContext) == 0) {
ExitDriver (${Function} (ImageHandle, SystemTable));
ASSERT (FALSE);
}
${END}
return mDriverEntryPointStatus;
}
VOID
EFIAPI
ExitDriver (
IN EFI_STATUS Status
)
{
if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
mDriverEntryPointStatus = Status;
}
LongJump (&mJumpContext, (UINTN)-1);
ASSERT (FALSE);
}
""")
]
## UEFI Application Entry Point Templates
gUefiApplicationEntryPointPrototype = TemplateString("""
${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
);
${END}
""")
gUefiApplicationEntryPointString = [
TemplateString("""
const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
return EFI_SUCCESS;
}
"""),
TemplateString("""
const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
${BEGIN}
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
return ${Function} (ImageHandle, SystemTable);
}
${END}
VOID
EFIAPI
ExitDriver (
IN EFI_STATUS Status
)
{
if (EFI_ERROR (Status)) {
ProcessLibraryDestructorList (gImageHandle, gST);
}
gBS->Exit (gImageHandle, Status, 0, NULL);
}
"""),
TemplateString("""
const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
EFI_STATUS
EFIAPI
ProcessModuleEntryPointList (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
${BEGIN}
if (SetJump (&mJumpContext) == 0) {
ExitDriver (${Function} (ImageHandle, SystemTable));
ASSERT (FALSE);
}
${END}
return mDriverEntryPointStatus;
}
static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
static EFI_STATUS mDriverEntryPointStatus = EFI_LOAD_ERROR;
VOID
EFIAPI
ExitDriver (
IN EFI_STATUS Status
)
{
if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
mDriverEntryPointStatus = Status;
}
LongJump (&mJumpContext, (UINTN)-1);
ASSERT (FALSE);
}
""")
]
## UEFI Unload Image Templates
gUefiUnloadImagePrototype = TemplateString("""
${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN EFI_HANDLE ImageHandle
);
${END}
""")
gUefiUnloadImageString = [
TemplateString("""
GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
EFI_STATUS
EFIAPI
ProcessModuleUnloadList (
IN EFI_HANDLE ImageHandle
)
{
return EFI_SUCCESS;
}
"""),
TemplateString("""
GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
${BEGIN}
EFI_STATUS
EFIAPI
ProcessModuleUnloadList (
IN EFI_HANDLE ImageHandle
)
{
return ${Function} (ImageHandle);
}
${END}
"""),
TemplateString("""
GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
EFI_STATUS
EFIAPI
ProcessModuleUnloadList (
IN EFI_HANDLE ImageHandle
)
{
EFI_STATUS Status;
Status = EFI_SUCCESS;
${BEGIN}
if (EFI_ERROR (Status)) {
${Function} (ImageHandle);
} else {
Status = ${Function} (ImageHandle);
}
${END}
return Status;
}
""")
]
gLibraryStructorPrototype = {
SUP_MODULE_BASE : TemplateString("""${BEGIN}
RETURN_STATUS
EFIAPI
${Function} (
VOID
);${END}
"""),
'PEI' : TemplateString("""${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN EFI_PEI_FILE_HANDLE FileHandle,
IN CONST EFI_PEI_SERVICES **PeiServices
);${END}
"""),
'DXE' : TemplateString("""${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
);${END}
"""),
'MM' : TemplateString("""${BEGIN}
EFI_STATUS
EFIAPI
${Function} (
IN EFI_HANDLE ImageHandle,
IN EFI_MM_SYSTEM_TABLE *MmSystemTable
);${END}
"""),
}
gLibraryStructorCall = {
SUP_MODULE_BASE : TemplateString("""${BEGIN}
Status = ${Function} ();
ASSERT_RETURN_ERROR (Status);${END}
"""),
'PEI' : TemplateString("""${BEGIN}
Status = ${Function} (FileHandle, PeiServices);
ASSERT_EFI_ERROR (Status);${END}
"""),
'DXE' : TemplateString("""${BEGIN}
Status = ${Function} (ImageHandle, SystemTable);
ASSERT_EFI_ERROR (Status);${END}
"""),
'MM' : TemplateString("""${BEGIN}
Status = ${Function} (ImageHandle, MmSystemTable);
ASSERT_EFI_ERROR (Status);${END}
"""),
}
## Library Constructor and Destructor Templates
gLibraryString = {
SUP_MODULE_BASE : TemplateString("""
${BEGIN}${FunctionPrototype}${END}
VOID
EFIAPI
ProcessLibrary${Type}List (
VOID
)
{
${BEGIN} RETURN_STATUS Status;
${FunctionCall}${END}
}
"""),
'PEI' : TemplateString("""
${BEGIN}${FunctionPrototype}${END}
VOID
EFIAPI
ProcessLibrary${Type}List (
IN EFI_PEI_FILE_HANDLE FileHandle,
IN CONST EFI_PEI_SERVICES **PeiServices
)
{
${BEGIN} EFI_STATUS Status;
${FunctionCall}${END}
}
"""),
'DXE' : TemplateString("""
${BEGIN}${FunctionPrototype}${END}
VOID
EFIAPI
ProcessLibrary${Type}List (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
${BEGIN} EFI_STATUS Status;
${FunctionCall}${END}
}
"""),
'MM' : TemplateString("""
${BEGIN}${FunctionPrototype}${END}
VOID
EFIAPI
ProcessLibrary${Type}List (
IN EFI_HANDLE ImageHandle,
IN EFI_MM_SYSTEM_TABLE *MmSystemTable
)
{
${BEGIN} EFI_STATUS Status;
${FunctionCall}${END}
}
"""),
}
gBasicHeaderFile = "Base.h"
gModuleTypeHeaderFile = {
SUP_MODULE_BASE : [gBasicHeaderFile, "Library/DebugLib.h"],
SUP_MODULE_SEC : ["PiPei.h", "Library/DebugLib.h"],
SUP_MODULE_PEI_CORE : ["PiPei.h", "Library/DebugLib.h", "Library/PeiCoreEntryPoint.h"],
SUP_MODULE_PEIM : ["PiPei.h", "Library/DebugLib.h", "Library/PeimEntryPoint.h"],
SUP_MODULE_DXE_CORE : ["PiDxe.h", "Library/DebugLib.h", "Library/DxeCoreEntryPoint.h"],
SUP_MODULE_DXE_DRIVER : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
SUP_MODULE_DXE_SMM_DRIVER : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
SUP_MODULE_DXE_RUNTIME_DRIVER: ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
SUP_MODULE_DXE_SAL_DRIVER : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
SUP_MODULE_UEFI_DRIVER : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
SUP_MODULE_UEFI_APPLICATION : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiApplicationEntryPoint.h"],
SUP_MODULE_SMM_CORE : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiDriverEntryPoint.h"],
SUP_MODULE_MM_STANDALONE : ["PiMm.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/StandaloneMmDriverEntryPoint.h"],
SUP_MODULE_MM_CORE_STANDALONE : ["PiMm.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/StandaloneMmCoreEntryPoint.h"],
SUP_MODULE_USER_DEFINED : [gBasicHeaderFile, "Library/DebugLib.h"],
SUP_MODULE_HOST_APPLICATION : [gBasicHeaderFile, "Library/DebugLib.h"]
}
## Autogen internal worker macro to define DynamicEx PCD name includes both the TokenSpaceGuidName
# the TokenName and Guid comparison to avoid define name collisions.
#
# @param Info The ModuleAutoGen object
# @param AutoGenH The TemplateString object for header file
#
#
def DynExPcdTokenNumberMapping(Info, AutoGenH):
ExTokenCNameList = []
PcdExList = []
# Even it is the Library, the PCD is saved in the ModulePcdList
PcdList = Info.ModulePcdList
for Pcd in PcdList:
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
ExTokenCNameList.append(Pcd.TokenCName)
PcdExList.append(Pcd)
if len(ExTokenCNameList) == 0:
return
AutoGenH.Append('\n#define COMPAREGUID(Guid1, Guid2) (BOOLEAN)(*(CONST UINT64*)Guid1 == *(CONST UINT64*)Guid2 && *((CONST UINT64*)Guid1 + 1) == *((CONST UINT64*)Guid2 + 1))\n')
# AutoGen for each PCD listed in a [PcdEx] section of a Module/Lib INF file.
# Auto generate a macro for each TokenName that takes a Guid pointer as a parameter.
# Use the Guid pointer to see if it matches any of the token space GUIDs.
TokenCNameList = set()
for TokenCName in ExTokenCNameList:
if TokenCName in TokenCNameList:
continue
Index = 0
Count = ExTokenCNameList.count(TokenCName)
for Pcd in PcdExList:
RealTokenCName = Pcd.TokenCName
for PcdItem in GlobalData.MixedPcd:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
RealTokenCName = PcdItem[0]
break
if Pcd.TokenCName == TokenCName:
Index = Index + 1
if Index == 1:
AutoGenH.Append('\n#define __PCD_%s_ADDR_CMP(GuidPtr) (' % (RealTokenCName))
AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
else:
AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
if Index == Count:
AutoGenH.Append('0 \\\n )\n')
TokenCNameList.add(TokenCName)
TokenCNameList = set()
for TokenCName in ExTokenCNameList:
if TokenCName in TokenCNameList:
continue
Index = 0
Count = ExTokenCNameList.count(TokenCName)
for Pcd in PcdExList:
RealTokenCName = Pcd.TokenCName
for PcdItem in GlobalData.MixedPcd:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
RealTokenCName = PcdItem[0]
break
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == TokenCName:
Index = Index + 1
if Index == 1:
AutoGenH.Append('\n#define __PCD_%s_VAL_CMP(GuidPtr) (' % (RealTokenCName))
AutoGenH.Append('\\\n (GuidPtr == NULL) ? 0:')
AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
else:
AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
% (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
if Index == Count:
AutoGenH.Append('0 \\\n )\n')
# Autogen internal worker macro to compare GUIDs. Guid1 is a pointer to a GUID.
# Guid2 is a C name for a GUID. Compare pointers first because optimizing compiler
# can do this at build time on CONST GUID pointers and optimize away call to COMPAREGUID().
# COMPAREGUID() will only be used if the Guid passed in is local to the module.
AutoGenH.Append('#define _PCD_TOKEN_EX_%s(GuidPtr) __PCD_%s_ADDR_CMP(GuidPtr) ? __PCD_%s_ADDR_CMP(GuidPtr) : __PCD_%s_VAL_CMP(GuidPtr) \n'
% (RealTokenCName, RealTokenCName, RealTokenCName, RealTokenCName))
TokenCNameList.add(TokenCName)
## Create code for module PCDs
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
# @param Pcd The PCD object
#
def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
TokenSpaceGuidValue = Pcd.TokenSpaceGuidValue #Info.GuidList[Pcd.TokenSpaceGuidCName]
PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber
#
# Write PCDs
#
TokenCName = Pcd.TokenCName
for PcdItem in GlobalData.MixedPcd:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
TokenCName = PcdItem[0]
break
PcdTokenName = '_PCD_TOKEN_' + TokenCName
PatchPcdSizeTokenName = '_PCD_PATCHABLE_' + TokenCName +'_SIZE'
PatchPcdSizeVariableName = '_gPcd_BinaryPatch_Size_' + TokenCName
PatchPcdMaxSizeVariable = '_gPcd_BinaryPatch_MaxSize_' + TokenCName
FixPcdSizeTokenName = '_PCD_SIZE_' + TokenCName
FixedPcdSizeVariableName = '_gPcd_FixedAtBuild_Size_' + TokenCName
if Pcd.PcdValueFromComm:
Pcd.DefaultValue = Pcd.PcdValueFromComm
elif Pcd.PcdValueFromFdf:
Pcd.DefaultValue = Pcd.PcdValueFromFdf
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
TokenNumber = int(Pcd.TokenValue, 0)
# Add TokenSpaceGuidValue value to PcdTokenName to discriminate the DynamicEx PCDs with
# different Guids but same TokenCName
PcdExTokenName = '_PCD_TOKEN_' + Pcd.TokenSpaceGuidCName + '_' + TokenCName
AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
else:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
# If one of the Source built modules listed in the DSC is not listed in FDF modules,
# and the INF lists a PCD can only use the PcdsDynamic access method (it is only
# listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
# report warning message notify the PI that they are attempting to build a module
# that must be included in a flash image in order to be functional. These Dynamic PCD
# will not be added into the Database unless it is used by other modules that are
# included in the FDF file.
# In this case, just assign an invalid token number to make it pass build.
if Pcd.Type in PCD_DYNAMIC_TYPE_SET:
TokenNumber = 0
else:
EdkLogger.error("build", AUTOGEN_ERROR,
"No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
else:
TokenNumber = PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]
AutoGenH.Append('\n#define %s %dU\n' % (PcdTokenName, TokenNumber))
EdkLogger.debug(EdkLogger.DEBUG_3, "Creating code for " + TokenCName + "." + Pcd.TokenSpaceGuidCName)
if Pcd.Type not in gItemTypeStringDatabase:
EdkLogger.error("build", AUTOGEN_ERROR,
"Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
DatumSize = gDatumSizeStringDatabase[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabase else gDatumSizeStringDatabase[TAB_VOID]
DatumSizeLib = gDatumSizeStringDatabaseLib[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabaseLib else gDatumSizeStringDatabaseLib[TAB_VOID]
GetModeName = '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
SetModeStatusName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_S_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_S_' + TokenCName
GetModeSizeName = '_PCD_GET_MODE_SIZE' + '_' + TokenCName
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
if Info.IsLibrary:
PcdList = Info.LibraryPcdList
else:
PcdList = Info.ModulePcdList + Info.LibraryPcdList
PcdExCNameTest = 0
for PcdModule in PcdList:
if PcdModule.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
PcdExCNameTest += 1
# get out early once we found > 1...
if PcdExCNameTest > 1:
break
# Be compatible with the current code which using PcdToken and PcdGet/Set for DynamicEx Pcd.
# If only PcdToken and PcdGet/Set used in all Pcds with different CName, it should succeed to build.
# If PcdToken and PcdGet/Set used in the Pcds with different Guids but same CName, it should failed to build.
if PcdExCNameTest > 1:
AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n')
AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName))
AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
else:
AutoGenH.Append('// #define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('// #define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
else:
AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName))
AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
else:
AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
elif Pcd.Type in PCD_DYNAMIC_TYPE_SET:
PcdCNameTest = 0
for PcdModule in Info.LibraryPcdList + Info.ModulePcdList:
if PcdModule.Type in PCD_DYNAMIC_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
PcdCNameTest += 1
# get out early once we found > 1...
if PcdCNameTest > 1:
break
if PcdCNameTest > 1:
EdkLogger.error("build", AUTOGEN_ERROR, "More than one Dynamic Pcds [%s] are different Guids but same CName. They need to be changed to DynamicEx type to avoid the confliction.\n" % (TokenCName), ExtraData="[%s]" % str(Info.MetaFile.Path))
else:
AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName))
AutoGenH.Append('#define %s LibPcdGetSize(%s)\n' % (GetModeSizeName, PcdTokenName))
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName))
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%sS(%s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
else:
AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName))
AutoGenH.Append('#define %s(Value) LibPcdSet%sS(%s, (Value))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
else:
PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
Const = 'const'
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
Const = ''
Type = ''
Array = ''
Value = Pcd.DefaultValue
Unicode = False
ValueNumber = 0
if Pcd.DatumType == 'BOOLEAN':
BoolValue = Value.upper()
if BoolValue == 'TRUE' or BoolValue == '1':
Value = '1U'
elif BoolValue == 'FALSE' or BoolValue == '0':
Value = '0U'
if Pcd.DatumType in TAB_PCD_CLEAN_NUMERIC_TYPES:
try:
if Value.upper().endswith('L'):
Value = Value[:-1]
if Value.startswith('0') and not Value.lower().startswith('0x') and len(Value) > 1 and Value.lstrip('0'):
Value = Value.lstrip('0')
ValueNumber = int (Value, 0)
except:
EdkLogger.error("build", AUTOGEN_ERROR,
"PCD value is not valid dec or hex number for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
if ValueNumber < 0:
EdkLogger.error("build", AUTOGEN_ERROR,
"PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
elif ValueNumber > MAX_VAL_TYPE[Pcd.DatumType]:
EdkLogger.error("build", AUTOGEN_ERROR,
"Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
if Pcd.DatumType == TAB_UINT64 and not Value.endswith('ULL'):
Value += 'ULL'
elif Pcd.DatumType != TAB_UINT64 and not Value.endswith('U'):
Value += 'U'
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
if not Pcd.MaxDatumSize:
EdkLogger.error("build", AUTOGEN_ERROR,
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
ArraySize = int(Pcd.MaxDatumSize, 0)
if Value[0] == '{':
Type = '(VOID *)'
ValueSize = len(Value.split(','))
else:
if Value[0] == 'L':
Unicode = True
Value = Value.lstrip('L') #.strip('"')
Value = eval(Value) # translate escape character
ValueSize = len(Value) + 1
NewValue = '{'
for Index in range(0, len(Value)):
if Unicode:
NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ', '
else:
NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ', '
if Unicode:
ArraySize = ArraySize // 2
Value = NewValue + '0 }'
if ArraySize < ValueSize:
if Pcd.MaxSizeUserSet:
EdkLogger.error("build", AUTOGEN_ERROR,
"The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
else:
ArraySize = Pcd.GetPcdSize()
if Unicode:
ArraySize = ArraySize // 2
Array = '[%d]' % ArraySize
#
# skip casting for fixed at build since it breaks ARM assembly.
# Long term we need PCD macros that work in assembly
#
elif Pcd.Type != TAB_PCDS_FIXED_AT_BUILD and Pcd.DatumType in TAB_PCD_NUMERIC_TYPES_VOID:
Value = "((%s)%s)" % (Pcd.DatumType, Value)
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
PcdValueName = '_PCD_PATCHABLE_VALUE_' + TokenCName
else:
PcdValueName = '_PCD_VALUE_' + TokenCName
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
#
# For unicode, UINT16 array will be generated, so the alignment of unicode is guaranteed.
#
AutoGenH.Append('#define %s %s%s\n' %(PcdValueName, Type, PcdVariableName))
if Unicode:
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT16 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
AutoGenH.Append('extern %s UINT16 %s%s;\n' %(Const, PcdVariableName, Array))
else:
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT8 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
AutoGenH.Append('extern %s UINT8 %s%s;\n' %(Const, PcdVariableName, Array))
AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName))
PcdDataSize = Pcd.GetPcdSize()
if Pcd.Type == TAB_PCDS_FIXED_AT_BUILD:
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, FixPcdSizeTokenName))
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED const UINTN %s = %s;\n' % (FixedPcdSizeVariableName, PcdDataSize))
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, Pcd.MaxDatumSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, PatchPcdSizeVariableName))
AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName, PcdDataSize))
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED const UINTN %s = %s;\n' % (PatchPcdMaxSizeVariable, Pcd.MaxDatumSize))
elif Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
AutoGenC.Append('volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
AutoGenH.Append('extern volatile %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
PcdDataSize = Pcd.GetPcdSize()
AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, PatchPcdSizeVariableName))
AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName, PcdDataSize))
else:
PcdDataSize = Pcd.GetPcdSize()
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s \n' % (GetModeSizeName, FixPcdSizeTokenName))
AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
AutoGenH.Append('extern %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSize((VOID *)_gPcd_BinaryPatch_%s, &_gPcd_BinaryPatch_Size_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeName, Pcd.TokenCName, Pcd.TokenCName, Pcd.TokenCName))
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSizeS((VOID *)_gPcd_BinaryPatch_%s, &_gPcd_BinaryPatch_Size_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, Pcd.TokenCName, Pcd.TokenCName, Pcd.TokenCName))
else:
AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName))
AutoGenH.Append('#define %s(Value) ((%s = (Value)), RETURN_SUCCESS) \n' % (SetModeStatusName, PcdVariableName))
else:
AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
## Create code for library module PCDs
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
# @param Pcd The PCD object
#
def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber
TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
TokenCName = Pcd.TokenCName
for PcdItem in GlobalData.MixedPcd:
if (TokenCName, TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
TokenCName = PcdItem[0]
break
PcdTokenName = '_PCD_TOKEN_' + TokenCName
FixPcdSizeTokenName = '_PCD_SIZE_' + TokenCName
PatchPcdSizeTokenName = '_PCD_PATCHABLE_' + TokenCName +'_SIZE'
PatchPcdSizeVariableName = '_gPcd_BinaryPatch_Size_' + TokenCName
PatchPcdMaxSizeVariable = '_gPcd_BinaryPatch_MaxSize_' + TokenCName
FixedPcdSizeVariableName = '_gPcd_FixedAtBuild_Size_' + TokenCName
if Pcd.PcdValueFromComm:
Pcd.DefaultValue = Pcd.PcdValueFromComm
elif Pcd.PcdValueFromFdf:
Pcd.DefaultValue = Pcd.PcdValueFromFdf
#
# Write PCDs
#
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
TokenNumber = int(Pcd.TokenValue, 0)
else:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
# If one of the Source built modules listed in the DSC is not listed in FDF modules,
# and the INF lists a PCD can only use the PcdsDynamic access method (it is only
# listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
# report warning message notify the PI that they are attempting to build a module
# that must be included in a flash image in order to be functional. These Dynamic PCD
# will not be added into the Database unless it is used by other modules that are
# included in the FDF file.
# In this case, just assign an invalid token number to make it pass build.
if Pcd.Type in PCD_DYNAMIC_TYPE_SET:
TokenNumber = 0
else:
EdkLogger.error("build", AUTOGEN_ERROR,
"No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
else:
TokenNumber = PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]
if Pcd.Type not in gItemTypeStringDatabase:
EdkLogger.error("build", AUTOGEN_ERROR,
"Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, TokenCName),
ExtraData="[%s]" % str(Info))
DatumType = Pcd.DatumType
DatumSize = gDatumSizeStringDatabase[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabase else gDatumSizeStringDatabase[TAB_VOID]
DatumSizeLib = gDatumSizeStringDatabaseLib[Pcd.DatumType] if Pcd.DatumType in gDatumSizeStringDatabaseLib else gDatumSizeStringDatabaseLib[TAB_VOID]
GetModeName = '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_' + TokenCName
SetModeStatusName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_S_' + TokenCName if Pcd.DatumType in gDatumSizeStringDatabaseH else '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[TAB_VOID] + '_S_' + TokenCName
GetModeSizeName = '_PCD_GET_MODE_SIZE' + '_' + TokenCName
Type = ''
Array = ''
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
if Pcd.DefaultValue[0]== '{':
Type = '(VOID *)'
Array = '[]'
PcdItemType = Pcd.Type
if PcdItemType in PCD_DYNAMIC_EX_TYPE_SET:
PcdExTokenName = '_PCD_TOKEN_' + TokenSpaceGuidCName + '_' + TokenCName
AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
if Info.IsLibrary:
PcdList = Info.LibraryPcdList
else:
PcdList = Info.ModulePcdList
PcdExCNameTest = 0
for PcdModule in PcdList:
if PcdModule.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenCName == PcdModule.TokenCName:
PcdExCNameTest += 1
# get out early once we found > 1...
if PcdExCNameTest > 1:
break
# Be compatible with the current code which using PcdGet/Set for DynamicEx Pcd.
# If only PcdGet/Set used in all Pcds with different CName, it should succeed to build.
# If PcdGet/Set used in the Pcds with different Guids but same CName, it should failed to build.
if PcdExCNameTest > 1:
AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n')
AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName))
AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
else:
AutoGenH.Append('// #define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('// #define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
else:
AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName))
AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName, Pcd.TokenSpaceGuidCName, PcdTokenName))
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
else:
AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
else:
AutoGenH.Append('#define _PCD_TOKEN_%s %dU\n' % (TokenCName, TokenNumber))
if PcdItemType in PCD_DYNAMIC_TYPE_SET:
PcdList = []
PcdCNameList = []
PcdList.extend(Info.LibraryPcdList)
PcdList.extend(Info.ModulePcdList)
for PcdModule in PcdList:
if PcdModule.Type in PCD_DYNAMIC_TYPE_SET:
PcdCNameList.append(PcdModule.TokenCName)
if PcdCNameList.count(Pcd.TokenCName) > 1:
EdkLogger.error("build", AUTOGEN_ERROR, "More than one Dynamic Pcds [%s] are different Guids but same CName.They need to be changed to DynamicEx type to avoid the confliction.\n" % (TokenCName), ExtraData="[%s]" % str(Info.MetaFile.Path))
else:
AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName))
AutoGenH.Append('#define %s LibPcdGetSize(%s)\n' % (GetModeSizeName, PcdTokenName))
if DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName))
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%sS(%s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
else:
AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName))
AutoGenH.Append('#define %s(Value) LibPcdSet%sS(%s, (Value))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
if PcdItemType == TAB_PCDS_PATCHABLE_IN_MODULE:
PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[TAB_PCDS_PATCHABLE_IN_MODULE] + '_' + TokenCName
if DatumType not in TAB_PCD_NUMERIC_TYPES:
if DatumType == TAB_VOID and Array == '[]':
DatumType = [TAB_UINT8, TAB_UINT16][Pcd.DefaultValue[0] == 'L']
else:
DatumType = TAB_UINT8
AutoGenH.Append('extern %s _gPcd_BinaryPatch_%s%s;\n' %(DatumType, TokenCName, Array))
else:
AutoGenH.Append('extern volatile %s %s%s;\n' % (DatumType, PcdVariableName, Array))
AutoGenH.Append('#define %s %s_gPcd_BinaryPatch_%s\n' %(GetModeName, Type, TokenCName))
PcdDataSize = Pcd.GetPcdSize()
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSize((VOID *)_gPcd_BinaryPatch_%s, &%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, TokenCName, PatchPcdSizeVariableName, PatchPcdMaxSizeVariable))
AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSizeS((VOID *)_gPcd_BinaryPatch_%s, &%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, TokenCName, PatchPcdSizeVariableName, PatchPcdMaxSizeVariable))
AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PatchPcdMaxSizeVariable))
AutoGenH.Append('extern const UINTN %s; \n' % PatchPcdMaxSizeVariable)
else:
AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName))
AutoGenH.Append('#define %s(Value) ((%s = (Value)), RETURN_SUCCESS)\n' % (SetModeStatusName, PcdVariableName))
AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, PatchPcdSizeVariableName))
AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG:
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
if DatumType == TAB_VOID and Array == '[]':
DatumType = [TAB_UINT8, TAB_UINT16][Pcd.DefaultValue[0] == 'L']
if DatumType not in TAB_PCD_NUMERIC_TYPES_VOID:
DatumType = TAB_UINT8
AutoGenH.Append('extern const %s _gPcd_FixedAtBuild_%s%s;\n' %(DatumType, TokenCName, Array))
AutoGenH.Append('#define %s %s_gPcd_FixedAtBuild_%s\n' %(GetModeName, Type, TokenCName))
AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
ConstFixedPcd = False
if PcdItemType == TAB_PCDS_FIXED_AT_BUILD and (key in Info.ConstPcd or (Info.IsLibrary and not Info.ReferenceModules)):
ConstFixedPcd = True
if key in Info.ConstPcd:
Pcd.DefaultValue = Info.ConstPcd[key]
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
AutoGenH.Append('#define _PCD_VALUE_%s %s%s\n' %(TokenCName, Type, PcdVariableName))
else:
AutoGenH.Append('#define _PCD_VALUE_%s %s\n' %(TokenCName, Pcd.DefaultValue))
PcdDataSize = Pcd.GetPcdSize()
if PcdItemType == TAB_PCDS_FIXED_AT_BUILD:
if Pcd.DatumType not in TAB_PCD_NUMERIC_TYPES:
if ConstFixedPcd:
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixPcdSizeTokenName))
else:
AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixedPcdSizeVariableName))
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, FixedPcdSizeVariableName))
AutoGenH.Append('extern const UINTN %s; \n' % FixedPcdSizeVariableName)
else:
AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
AutoGenH.Append('#define %s %s\n' % (GetModeSizeName, FixPcdSizeTokenName))
## Create code for library constructor
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH):
#
# Library Constructors
#
ConstructorPrototypeString = TemplateString()
ConstructorCallingString = TemplateString()
if Info.IsLibrary:
DependentLibraryList = [Info.Module]
else:
DependentLibraryList = Info.DependentLibraryList
for Lib in DependentLibraryList:
if len(Lib.ConstructorList) <= 0:
continue
Dict = {'Function':Lib.ConstructorList}
if Lib.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC]:
ConstructorPrototypeString.Append(gLibraryStructorPrototype[SUP_MODULE_BASE].Replace(Dict))
ConstructorCallingString.Append(gLibraryStructorCall[SUP_MODULE_BASE].Replace(Dict))
if Info.ModuleType not in [SUP_MODULE_BASE, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
if Lib.ModuleType in SUP_MODULE_SET_PEI:
ConstructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict))
ConstructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict))
elif Lib.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
ConstructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict))
ConstructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict))
elif Lib.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
ConstructorPrototypeString.Append(gLibraryStructorPrototype['MM'].Replace(Dict))
ConstructorCallingString.Append(gLibraryStructorCall['MM'].Replace(Dict))
if str(ConstructorPrototypeString) == '':
ConstructorPrototypeList = []
else:
ConstructorPrototypeList = [str(ConstructorPrototypeString)]
if str(ConstructorCallingString) == '':
ConstructorCallingList = []
else:
ConstructorCallingList = [str(ConstructorCallingString)]
Dict = {
'Type' : 'Constructor',
'FunctionPrototype' : ConstructorPrototypeList,
'FunctionCall' : ConstructorCallingList
}
if Info.IsLibrary:
AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict)
else:
if Info.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
AutoGenC.Append(gLibraryString[SUP_MODULE_BASE].Replace(Dict))
elif Info.ModuleType in SUP_MODULE_SET_PEI:
AutoGenC.Append(gLibraryString['PEI'].Replace(Dict))
elif Info.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
AutoGenC.Append(gLibraryString['DXE'].Replace(Dict))
elif Info.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
AutoGenC.Append(gLibraryString['MM'].Replace(Dict))
## Create code for library destructor
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH):
#
# Library Destructors
#
DestructorPrototypeString = TemplateString()
DestructorCallingString = TemplateString()
if Info.IsLibrary:
DependentLibraryList = [Info.Module]
else:
DependentLibraryList = Info.DependentLibraryList
for Index in range(len(DependentLibraryList)-1, -1, -1):
Lib = DependentLibraryList[Index]
if len(Lib.DestructorList) <= 0:
continue
Dict = {'Function':Lib.DestructorList}
if Lib.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC]:
DestructorPrototypeString.Append(gLibraryStructorPrototype[SUP_MODULE_BASE].Replace(Dict))
DestructorCallingString.Append(gLibraryStructorCall[SUP_MODULE_BASE].Replace(Dict))
if Info.ModuleType not in [SUP_MODULE_BASE, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
if Lib.ModuleType in SUP_MODULE_SET_PEI:
DestructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict))
DestructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict))
elif Lib.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
DestructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict))
DestructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict))
elif Lib.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
DestructorPrototypeString.Append(gLibraryStructorPrototype['MM'].Replace(Dict))
DestructorCallingString.Append(gLibraryStructorCall['MM'].Replace(Dict))
if str(DestructorPrototypeString) == '':
DestructorPrototypeList = []
else:
DestructorPrototypeList = [str(DestructorPrototypeString)]
if str(DestructorCallingString) == '':
DestructorCallingList = []
else:
DestructorCallingList = [str(DestructorCallingString)]
Dict = {
'Type' : 'Destructor',
'FunctionPrototype' : DestructorPrototypeList,
'FunctionCall' : DestructorCallingList
}
if Info.IsLibrary:
AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict)
else:
if Info.ModuleType in [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION]:
AutoGenC.Append(gLibraryString[SUP_MODULE_BASE].Replace(Dict))
elif Info.ModuleType in SUP_MODULE_SET_PEI:
AutoGenC.Append(gLibraryString['PEI'].Replace(Dict))
elif Info.ModuleType in [SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER,
SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER, SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_SMM_CORE]:
AutoGenC.Append(gLibraryString['DXE'].Replace(Dict))
elif Info.ModuleType in [SUP_MODULE_MM_STANDALONE, SUP_MODULE_MM_CORE_STANDALONE]:
AutoGenC.Append(gLibraryString['MM'].Replace(Dict))
## Create code for ModuleEntryPoint
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH):
if Info.IsLibrary or Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_SEC]:
return
#
# Module Entry Points
#
NumEntryPoints = len(Info.Module.ModuleEntryPointList)
if 'PI_SPECIFICATION_VERSION' in Info.Module.Specification:
PiSpecVersion = Info.Module.Specification['PI_SPECIFICATION_VERSION']
else:
PiSpecVersion = '0x00000000'
if 'UEFI_SPECIFICATION_VERSION' in Info.Module.Specification:
UefiSpecVersion = Info.Module.Specification['UEFI_SPECIFICATION_VERSION']
else:
UefiSpecVersion = '0x00000000'
Dict = {
'Function' : Info.Module.ModuleEntryPointList,
'PiSpecVersion' : PiSpecVersion + 'U',
'UefiSpecVersion': UefiSpecVersion + 'U'
}
if Info.ModuleType in [SUP_MODULE_PEI_CORE, SUP_MODULE_DXE_CORE, SUP_MODULE_SMM_CORE, SUP_MODULE_MM_CORE_STANDALONE]:
if Info.SourceFileList:
if NumEntryPoints != 1:
EdkLogger.error(
"build",
AUTOGEN_ERROR,
'%s must have exactly one entry point' % Info.ModuleType,
File=str(Info),
ExtraData= ", ".join(Info.Module.ModuleEntryPointList)
)
if Info.ModuleType == SUP_MODULE_PEI_CORE:
AutoGenC.Append(gPeiCoreEntryPointString.Replace(Dict))
AutoGenH.Append(gPeiCoreEntryPointPrototype.Replace(Dict))
elif Info.ModuleType == SUP_MODULE_DXE_CORE:
AutoGenC.Append(gDxeCoreEntryPointString.Replace(Dict))
AutoGenH.Append(gDxeCoreEntryPointPrototype.Replace(Dict))
elif Info.ModuleType == SUP_MODULE_SMM_CORE:
AutoGenC.Append(gSmmCoreEntryPointString.Replace(Dict))
AutoGenH.Append(gSmmCoreEntryPointPrototype.Replace(Dict))
elif Info.ModuleType == SUP_MODULE_MM_CORE_STANDALONE:
AutoGenC.Append(gMmCoreStandaloneEntryPointString.Replace(Dict))
AutoGenH.Append(gMmCoreStandaloneEntryPointPrototype.Replace(Dict))
elif Info.ModuleType == SUP_MODULE_PEIM:
if NumEntryPoints < 2:
AutoGenC.Append(gPeimEntryPointString[NumEntryPoints].Replace(Dict))
else:
AutoGenC.Append(gPeimEntryPointString[2].Replace(Dict))
AutoGenH.Append(gPeimEntryPointPrototype.Replace(Dict))
elif Info.ModuleType in [SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
if NumEntryPoints < 2:
AutoGenC.Append(gUefiDriverEntryPointString[NumEntryPoints].Replace(Dict))
else:
AutoGenC.Append(gUefiDriverEntryPointString[2].Replace(Dict))
AutoGenH.Append(gUefiDriverEntryPointPrototype.Replace(Dict))
elif Info.ModuleType == SUP_MODULE_DXE_SMM_DRIVER:
if NumEntryPoints == 0:
AutoGenC.Append(gDxeSmmEntryPointString[0].Replace(Dict))
else:
AutoGenC.Append(gDxeSmmEntryPointString[1].Replace(Dict))
AutoGenH.Append(gDxeSmmEntryPointPrototype.Replace(Dict))
elif Info.ModuleType == SUP_MODULE_MM_STANDALONE:
if NumEntryPoints < 2:
AutoGenC.Append(gMmStandaloneEntryPointString[NumEntryPoints].Replace(Dict))
else:
AutoGenC.Append(gMmStandaloneEntryPointString[2].Replace(Dict))
AutoGenH.Append(gMmStandaloneEntryPointPrototype.Replace(Dict))
elif Info.ModuleType == SUP_MODULE_UEFI_APPLICATION:
if NumEntryPoints < 2:
AutoGenC.Append(gUefiApplicationEntryPointString[NumEntryPoints].Replace(Dict))
else:
AutoGenC.Append(gUefiApplicationEntryPointString[2].Replace(Dict))
AutoGenH.Append(gUefiApplicationEntryPointPrototype.Replace(Dict))
## Create code for ModuleUnloadImage
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH):
if Info.IsLibrary or Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE, SUP_MODULE_SEC]:
return
#
# Unload Image Handlers
#
NumUnloadImage = len(Info.Module.ModuleUnloadImageList)
Dict = {'Count':str(NumUnloadImage) + 'U', 'Function':Info.Module.ModuleUnloadImageList}
if NumUnloadImage < 2:
AutoGenC.Append(gUefiUnloadImageString[NumUnloadImage].Replace(Dict))
else:
AutoGenC.Append(gUefiUnloadImageString[2].Replace(Dict))
AutoGenH.Append(gUefiUnloadImagePrototype.Replace(Dict))
## Create code for GUID
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH):
if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
GuidType = TAB_GUID
else:
GuidType = "EFI_GUID"
if Info.GuidList:
if not Info.IsLibrary:
AutoGenC.Append("\n// Guids\n")
AutoGenH.Append("\n// Guids\n")
#
# GUIDs
#
for Key in Info.GuidList:
if not Info.IsLibrary:
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.GuidList[Key]))
AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
## Create code for protocol
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH):
if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
GuidType = TAB_GUID
else:
GuidType = "EFI_GUID"
if Info.ProtocolList:
if not Info.IsLibrary:
AutoGenC.Append("\n// Protocols\n")
AutoGenH.Append("\n// Protocols\n")
#
# Protocol GUIDs
#
for Key in Info.ProtocolList:
if not Info.IsLibrary:
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.ProtocolList[Key]))
AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
## Create code for PPI
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH):
if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
GuidType = TAB_GUID
else:
GuidType = "EFI_GUID"
if Info.PpiList:
if not Info.IsLibrary:
AutoGenC.Append("\n// PPIs\n")
AutoGenH.Append("\n// PPIs\n")
#
# PPI GUIDs
#
for Key in Info.PpiList:
if not Info.IsLibrary:
AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.PpiList[Key]))
AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
## Create code for PCD
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreatePcdCode(Info, AutoGenC, AutoGenH):
# Collect Token Space GUIDs used by DynamicEc PCDs
TokenSpaceList = []
for Pcd in Info.ModulePcdList:
if Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET and Pcd.TokenSpaceGuidCName not in TokenSpaceList:
TokenSpaceList.append(Pcd.TokenSpaceGuidCName)
SkuMgr = Info.PlatformInfo.Platform.SkuIdMgr
AutoGenH.Append("\n// Definition of SkuId Array\n")
AutoGenH.Append("extern UINT64 _gPcd_SkuId_Array[];\n")
# Add extern declarations to AutoGen.h if one or more Token Space GUIDs were found
if TokenSpaceList:
AutoGenH.Append("\n// Definition of PCD Token Space GUIDs used in this module\n\n")
if Info.ModuleType in [SUP_MODULE_USER_DEFINED, SUP_MODULE_HOST_APPLICATION, SUP_MODULE_BASE]:
GuidType = TAB_GUID
else:
GuidType = "EFI_GUID"
for Item in TokenSpaceList:
AutoGenH.Append('extern %s %s;\n' % (GuidType, Item))
if Info.IsLibrary:
if Info.ModulePcdList:
AutoGenH.Append("\n// PCD definitions\n")
for Pcd in Info.ModulePcdList:
CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd)
DynExPcdTokenNumberMapping (Info, AutoGenH)
else:
AutoGenC.Append("\n// Definition of SkuId Array\n")
AutoGenC.Append("GLOBAL_REMOVE_IF_UNREFERENCED UINT64 _gPcd_SkuId_Array[] = %s;\n" % SkuMgr.DumpSkuIdArrary())
if Info.ModulePcdList:
AutoGenH.Append("\n// Definition of PCDs used in this module\n")
AutoGenC.Append("\n// Definition of PCDs used in this module\n")
for Pcd in Info.ModulePcdList:
CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd)
DynExPcdTokenNumberMapping (Info, AutoGenH)
if Info.LibraryPcdList:
AutoGenH.Append("\n// Definition of PCDs used in libraries is in AutoGen.c\n")
AutoGenC.Append("\n// Definition of PCDs used in libraries\n")
for Pcd in Info.LibraryPcdList:
CreateModulePcdCode(Info, AutoGenC, AutoGenC, Pcd)
CreatePcdDatabaseCode(Info, AutoGenC, AutoGenH)
## Create code for unicode string definition
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
# @param UniGenBinBuffer Buffer to store uni string package data
#
def CreateUnicodeStringCode(Info, AutoGenC, AutoGenH, UniGenCFlag, UniGenBinBuffer):
WorkingDir = os.getcwd()
os.chdir(Info.WorkspaceDir)
IncList = [Info.MetaFile.Dir]
# Get all files under [Sources] section in inf file for EDK-II module
EDK2Module = True
SrcList = [F for F in Info.SourceFileList]
if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-c') > -1:
CompatibleMode = True
else:
CompatibleMode = False
#
# -s is a temporary option dedicated for building .UNI files with ISO 639-2 language codes of EDK Shell in EDK2
#
if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-s') > -1:
if CompatibleMode:
EdkLogger.error("build", AUTOGEN_ERROR,
"-c and -s build options should be used exclusively",
ExtraData="[%s]" % str(Info))
ShellMode = True
else:
ShellMode = False
#RFC4646 is only for EDKII modules and ISO639-2 for EDK modules
if EDK2Module:
FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.RFCLanguages]
else:
FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.ISOLanguages]
Header, Code = GetStringFiles(Info.UnicodeFileList, SrcList, IncList, Info.IncludePathList, ['.uni', '.inf'], Info.Name, CompatibleMode, ShellMode, UniGenCFlag, UniGenBinBuffer, FilterInfo)
if CompatibleMode or UniGenCFlag:
AutoGenC.Append("\n//\n//Unicode String Pack Definition\n//\n")
AutoGenC.Append(Code)
AutoGenC.Append("\n")
AutoGenH.Append("\n//\n//Unicode String ID\n//\n")
AutoGenH.Append(Header)
if CompatibleMode or UniGenCFlag:
AutoGenH.Append("\n#define STRING_ARRAY_NAME %sStrings\n" % Info.Name)
os.chdir(WorkingDir)
def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
if len(Info.IdfFileList) > 0:
ImageFiles = IdfFileClassObject(sorted (Info.IdfFileList))
if ImageFiles.ImageFilesDict:
Index = 1
PaletteIndex = 1
IncList = [Info.MetaFile.Dir]
SrcList = [F for F in Info.SourceFileList]
SkipList = ['.jpg', '.png', '.bmp', '.inf', '.idf']
FileList = GetFileList(SrcList, IncList, SkipList)
ValueStartPtr = 60
StringH.Append("\n//\n//Image ID\n//\n")
ImageInfoOffset = 0
PaletteInfoOffset = 0
ImageBuffer = pack('x')
PaletteBuffer = pack('x')
BufferStr = ''
PaletteStr = ''
FileDict = {}
for Idf in ImageFiles.ImageFilesDict:
if ImageFiles.ImageFilesDict[Idf]:
for FileObj in ImageFiles.ImageFilesDict[Idf]:
for sourcefile in Info.SourceFileList:
if FileObj.FileName == sourcefile.File:
if not sourcefile.Ext.upper() in ['.PNG', '.BMP', '.JPG']:
EdkLogger.error("build", AUTOGEN_ERROR, "The %s's postfix must be one of .bmp, .jpg, .png" % (FileObj.FileName), ExtraData="[%s]" % str(Info))
FileObj.File = sourcefile
break
else:
EdkLogger.error("build", AUTOGEN_ERROR, "The %s in %s is not defined in the driver's [Sources] section" % (FileObj.FileName, Idf), ExtraData="[%s]" % str(Info))
for FileObj in ImageFiles.ImageFilesDict[Idf]:
ID = FileObj.ImageID
File = FileObj.File
try:
SearchImageID (FileObj, FileList)
if FileObj.Referenced:
if (ValueStartPtr - len(DEFINE_STR + ID)) <= 0:
Line = DEFINE_STR + ' ' + ID + ' ' + DecToHexStr(Index, 4) + '\n'
else:
Line = DEFINE_STR + ' ' + ID + ' ' * (ValueStartPtr - len(DEFINE_STR + ID)) + DecToHexStr(Index, 4) + '\n'
if File not in FileDict:
FileDict[File] = Index
else:
DuplicateBlock = pack('B', EFI_HII_IIBT_DUPLICATE)
DuplicateBlock += pack('H', FileDict[File])
ImageBuffer += DuplicateBlock
BufferStr = WriteLine(BufferStr, '// %s: %s: %s' % (DecToHexStr(Index, 4), ID, DecToHexStr(Index, 4)))
TempBufferList = AscToHexList(DuplicateBlock)
BufferStr = WriteLine(BufferStr, CreateArrayItem(TempBufferList, 16) + '\n')
StringH.Append(Line)
Index += 1
continue
TmpFile = open(File.Path, 'rb')
Buffer = TmpFile.read()
TmpFile.close()
if File.Ext.upper() == '.PNG':
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_PNG)
TempBuffer += pack('I', len(Buffer))
TempBuffer += Buffer
elif File.Ext.upper() == '.JPG':
ImageType, = struct.unpack('4s', Buffer[6:10])
if ImageType != b'JFIF':
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
TempBuffer += pack('I', len(Buffer))
TempBuffer += Buffer
elif File.Ext.upper() == '.BMP':
TempBuffer, TempPalette = BmpImageDecoder(File, Buffer, PaletteIndex, FileObj.TransParent)
if len(TempPalette) > 1:
PaletteIndex += 1
NewPalette = pack('H', len(TempPalette))
NewPalette += TempPalette
PaletteBuffer += NewPalette
PaletteStr = WriteLine(PaletteStr, '// %s: %s: %s' % (DecToHexStr(PaletteIndex - 1, 4), ID, DecToHexStr(PaletteIndex - 1, 4)))
TempPaletteList = AscToHexList(NewPalette)
PaletteStr = WriteLine(PaletteStr, CreateArrayItem(TempPaletteList, 16) + '\n')
ImageBuffer += TempBuffer
BufferStr = WriteLine(BufferStr, '// %s: %s: %s' % (DecToHexStr(Index, 4), ID, DecToHexStr(Index, 4)))
TempBufferList = AscToHexList(TempBuffer)
BufferStr = WriteLine(BufferStr, CreateArrayItem(TempBufferList, 16) + '\n')
StringH.Append(Line)
Index += 1
except IOError:
EdkLogger.error("build", FILE_NOT_FOUND, ExtraData=File.Path)
BufferStr = WriteLine(BufferStr, '// End of the Image Info')
BufferStr = WriteLine(BufferStr, CreateArrayItem(DecToHexList(EFI_HII_IIBT_END, 2)) + '\n')
ImageEnd = pack('B', EFI_HII_IIBT_END)
ImageBuffer += ImageEnd
if len(ImageBuffer) > 1:
ImageInfoOffset = 12
if len(PaletteBuffer) > 1:
PaletteInfoOffset = 12 + len(ImageBuffer) - 1 # -1 is for the first empty pad byte of ImageBuffer
IMAGE_PACKAGE_HDR = pack('=II', ImageInfoOffset, PaletteInfoOffset)
# PACKAGE_HEADER_Length = PACKAGE_HEADER + ImageInfoOffset + PaletteInfoOffset + ImageBuffer Length + PaletteCount + PaletteBuffer Length
if len(PaletteBuffer) > 1:
PACKAGE_HEADER_Length = 4 + 4 + 4 + len(ImageBuffer) - 1 + 2 + len(PaletteBuffer) - 1
else:
PACKAGE_HEADER_Length = 4 + 4 + 4 + len(ImageBuffer) - 1
if PaletteIndex > 1:
PALETTE_INFO_HEADER = pack('H', PaletteIndex - 1)
# EFI_HII_PACKAGE_HEADER length max value is 0xFFFFFF
Hex_Length = '%06X' % PACKAGE_HEADER_Length
if PACKAGE_HEADER_Length > 0xFFFFFF:
EdkLogger.error("build", AUTOGEN_ERROR, "The Length of EFI_HII_PACKAGE_HEADER exceed its maximum value", ExtraData="[%s]" % str(Info))
PACKAGE_HEADER = pack('=HBB', int('0x' + Hex_Length[2:], 16), int('0x' + Hex_Length[0:2], 16), EFI_HII_PACKAGE_IMAGES)
IdfGenBinBuffer.write(PACKAGE_HEADER)
IdfGenBinBuffer.write(IMAGE_PACKAGE_HDR)
if len(ImageBuffer) > 1 :
IdfGenBinBuffer.write(ImageBuffer[1:])
if PaletteIndex > 1:
IdfGenBinBuffer.write(PALETTE_INFO_HEADER)
if len(PaletteBuffer) > 1:
IdfGenBinBuffer.write(PaletteBuffer[1:])
if IdfGenCFlag:
TotalLength = EFI_HII_ARRAY_SIZE_LENGTH + PACKAGE_HEADER_Length
AutoGenC.Append("\n//\n//Image Pack Definition\n//\n")
AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + Info.Module.BaseName + 'Images' + '[] = {\n')
AllStr = WriteLine(AllStr, '// STRGATHER_OUTPUT_HEADER')
AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength)) + '\n')
AllStr = WriteLine(AllStr, '// Image PACKAGE HEADER\n')
IMAGE_PACKAGE_HDR_List = AscToHexList(PACKAGE_HEADER)
IMAGE_PACKAGE_HDR_List += AscToHexList(IMAGE_PACKAGE_HDR)
AllStr = WriteLine(AllStr, CreateArrayItem(IMAGE_PACKAGE_HDR_List, 16) + '\n')
AllStr = WriteLine(AllStr, '// Image DATA\n')
if BufferStr:
AllStr = WriteLine(AllStr, BufferStr)
if PaletteStr:
AllStr = WriteLine(AllStr, '// Palette Header\n')
PALETTE_INFO_HEADER_List = AscToHexList(PALETTE_INFO_HEADER)
AllStr = WriteLine(AllStr, CreateArrayItem(PALETTE_INFO_HEADER_List, 16) + '\n')
AllStr = WriteLine(AllStr, '// Palette Data\n')
AllStr = WriteLine(AllStr, PaletteStr)
AllStr = WriteLine(AllStr, '};')
AutoGenC.Append(AllStr)
AutoGenC.Append("\n")
StringH.Append('\nextern unsigned char ' + Info.Module.BaseName + 'Images[];\n')
StringH.Append("\n#define IMAGE_ARRAY_NAME %sImages\n" % Info.Module.BaseName)
# typedef struct _EFI_HII_IMAGE_PACKAGE_HDR {
# EFI_HII_PACKAGE_HEADER Header; # Standard package header, where Header.Type = EFI_HII_PACKAGE_IMAGES
# UINT32 ImageInfoOffset;
# UINT32 PaletteInfoOffset;
# } EFI_HII_IMAGE_PACKAGE_HDR;
# typedef struct {
# UINT32 Length:24;
# UINT32 Type:8;
# UINT8 Data[];
# } EFI_HII_PACKAGE_HEADER;
# typedef struct _EFI_HII_IMAGE_BLOCK {
# UINT8 BlockType;
# UINT8 BlockBody[];
# } EFI_HII_IMAGE_BLOCK;
def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
ImageType, = struct.unpack('2s', Buffer[0:2])
if ImageType!= b'BM': # BMP file type is 'BM'
EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize', 'bfReserved1', 'bfReserved2', 'bfOffBits', 'biSize', 'biWidth', 'biHeight', 'biPlanes', 'biBitCount', 'biCompression', 'biSizeImage', 'biXPelsPerMeter', 'biYPelsPerMeter', 'biClrUsed', 'biClrImportant'])
BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
BmpHeader = BMP_IMAGE_HEADER._make(BMP_IMAGE_HEADER_STRUCT.unpack_from(Buffer[2:]))
#
# Doesn't support compress.
#
if BmpHeader.biCompression != 0:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The compress BMP file %s is not support." % File.Path)
# The Width and Height is UINT16 type in Image Package
if BmpHeader.biWidth > 0xFFFF:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The BMP file %s Width is exceed 0xFFFF." % File.Path)
if BmpHeader.biHeight > 0xFFFF:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The BMP file %s Height is exceed 0xFFFF." % File.Path)
PaletteBuffer = pack('x')
if BmpHeader.biBitCount == 1:
if TransParent:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT_TRANS)
else:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT)
ImageBuffer += pack('B', PaletteIndex)
Width = (BmpHeader.biWidth + 7)//8
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
elif BmpHeader.biBitCount == 4:
if TransParent:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT_TRANS)
else:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT)
ImageBuffer += pack('B', PaletteIndex)
Width = (BmpHeader.biWidth + 1)//2
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
elif BmpHeader.biBitCount == 8:
if TransParent:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_8BIT_TRANS)
else:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_8BIT)
ImageBuffer += pack('B', PaletteIndex)
Width = BmpHeader.biWidth
if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
elif BmpHeader.biBitCount == 24:
if TransParent:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_24BIT_TRANS)
else:
ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_24BIT)
Width = BmpHeader.biWidth * 3
else:
EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "Only support the 1 bit, 4 bit, 8bit, 24 bit BMP files.", ExtraData="[%s]" % str(File.Path))
ImageBuffer += pack('H', BmpHeader.biWidth)
ImageBuffer += pack('H', BmpHeader.biHeight)
Start = BmpHeader.bfOffBits
End = BmpHeader.bfSize - 1
for Height in range(0, BmpHeader.biHeight):
if Width % 4 != 0:
Start = End + (Width % 4) - 4 - Width
else:
Start = End - Width
ImageBuffer += Buffer[Start + 1 : Start + Width + 1]
End = Start
# handle the Palette info, BMP use 4 bytes for R, G, B and Reserved info while EFI_HII_RGB_PIXEL only have the R, G, B info
if PaletteBuffer and len(PaletteBuffer) > 1:
PaletteTemp = pack('x')
for Index in range(0, len(PaletteBuffer)):
if Index % 4 == 3:
continue
PaletteTemp += PaletteBuffer[Index:Index+1]
PaletteBuffer = PaletteTemp[1:]
return ImageBuffer, PaletteBuffer
## Create common code
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreateHeaderCode(Info, AutoGenC, AutoGenH):
# file header
AutoGenH.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.h'}))
# header file Prologue
AutoGenH.Append(gAutoGenHPrologueString.Replace({'File':'AUTOGENH','Guid':Info.Guid.replace('-', '_')}))
AutoGenH.Append(gAutoGenHCppPrologueString)
# header files includes
if Info.ModuleType in gModuleTypeHeaderFile:
AutoGenH.Append("#include <%s>\n" % gModuleTypeHeaderFile[Info.ModuleType][0])
#
# if either PcdLib in [LibraryClasses] sections or there exist Pcd section, add PcdLib.h
# As if modules only uses FixedPcd, then PcdLib is not needed in [LibraryClasses] section.
#
if 'PcdLib' in Info.Module.LibraryClasses or Info.Module.Pcds:
AutoGenH.Append("#include <Library/PcdLib.h>\n")
AutoGenH.Append('\nextern GUID gEfiCallerIdGuid;')
AutoGenH.Append('\nextern GUID gEdkiiDscPlatformGuid;')
AutoGenH.Append('\nextern CHAR8 *gEfiCallerBaseName;\n\n')
if Info.IsLibrary:
return
AutoGenH.Append("#define EFI_CALLER_ID_GUID \\\n %s\n" % GuidStringToGuidStructureString(Info.Guid))
AutoGenH.Append("#define EDKII_DSC_PLATFORM_GUID \\\n %s\n" % GuidStringToGuidStructureString(Info.PlatformInfo.Guid))
if Info.IsLibrary:
return
# C file header
AutoGenC.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.c'}))
# C file header files includes
if Info.ModuleType in gModuleTypeHeaderFile:
for Inc in gModuleTypeHeaderFile[Info.ModuleType]:
AutoGenC.Append("#include <%s>\n" % Inc)
else:
AutoGenC.Append("#include <%s>\n" % gBasicHeaderFile)
#
# Publish the CallerId Guid
#
AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED GUID gEfiCallerIdGuid = %s;\n' % GuidStringToGuidStructureString(Info.Guid))
AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED GUID gEdkiiDscPlatformGuid = %s;\n' % GuidStringToGuidStructureString(Info.PlatformInfo.Guid))
AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED CHAR8 *gEfiCallerBaseName = "%s";\n' % Info.Name)
## Create common code for header file
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
#
def CreateFooterCode(Info, AutoGenC, AutoGenH):
AutoGenH.Append(gAutoGenHEpilogueString)
## Create code for a module
#
# @param Info The ModuleAutoGen object
# @param AutoGenC The TemplateString object for C code
# @param AutoGenH The TemplateString object for header file
# @param StringH The TemplateString object for header file
# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
# @param UniGenBinBuffer Buffer to store uni string package data
# @param StringIdf The TemplateString object for header file
# @param IdfGenCFlag IdfString is generated into AutoGen C file when it is set to True
# @param IdfGenBinBuffer Buffer to store Idf string package data
#
def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer, StringIdf, IdfGenCFlag, IdfGenBinBuffer):
CreateHeaderCode(Info, AutoGenC, AutoGenH)
CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH)
CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH)
CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH)
CreatePcdCode(Info, AutoGenC, AutoGenH)
CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH)
CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH)
CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH)
CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH)
if Info.UnicodeFileList:
FileName = "%sStrDefs.h" % Info.Name
StringH.Append(gAutoGenHeaderString.Replace({'FileName':FileName}))
StringH.Append(gAutoGenHPrologueString.Replace({'File':'STRDEFS', 'Guid':Info.Guid.replace('-', '_')}))
CreateUnicodeStringCode(Info, AutoGenC, StringH, UniGenCFlag, UniGenBinBuffer)
GuidMacros = []
for Guid in Info.Module.Guids:
if Guid in Info.Module.GetGuidsUsedByPcd():
continue
GuidMacros.append('#define %s %s' % (Guid, Info.Module.Guids[Guid]))
for Guid, Value in list(Info.Module.Protocols.items()) + list(Info.Module.Ppis.items()):
GuidMacros.append('#define %s %s' % (Guid, Value))
# supports FixedAtBuild and FeaturePcd usage in VFR file
if Info.VfrFileList and Info.ModulePcdList:
GuidMacros.append('#define %s %s' % ('FixedPcdGetBool(TokenName)', '_PCD_VALUE_##TokenName'))
GuidMacros.append('#define %s %s' % ('FixedPcdGet8(TokenName)', '_PCD_VALUE_##TokenName'))
GuidMacros.append('#define %s %s' % ('FixedPcdGet16(TokenName)', '_PCD_VALUE_##TokenName'))
GuidMacros.append('#define %s %s' % ('FixedPcdGet32(TokenName)', '_PCD_VALUE_##TokenName'))
GuidMacros.append('#define %s %s' % ('FixedPcdGet64(TokenName)', '_PCD_VALUE_##TokenName'))
GuidMacros.append('#define %s %s' % ('FeaturePcdGet(TokenName)', '_PCD_VALUE_##TokenName'))
for Pcd in Info.ModulePcdList:
if Pcd.Type in [TAB_PCDS_FIXED_AT_BUILD, TAB_PCDS_FEATURE_FLAG]:
TokenCName = Pcd.TokenCName
Value = Pcd.DefaultValue
if Pcd.DatumType == 'BOOLEAN':
BoolValue = Value.upper()
if BoolValue == 'TRUE':
Value = '1'
elif BoolValue == 'FALSE':
Value = '0'
for PcdItem in GlobalData.MixedPcd:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
TokenCName = PcdItem[0]
break
GuidMacros.append('#define %s %s' % ('_PCD_VALUE_'+TokenCName, Value))
if Info.IdfFileList:
GuidMacros.append('#include "%sImgDefs.h"' % Info.Name)
if GuidMacros:
StringH.Append('\n#ifdef VFRCOMPILE\n%s\n#endif\n' % '\n'.join(GuidMacros))
StringH.Append("\n#endif\n")
AutoGenH.Append('#include "%s"\n' % FileName)
if Info.IdfFileList:
FileName = "%sImgDefs.h" % Info.Name
StringIdf.Append(gAutoGenHeaderString.Replace({'FileName':FileName}))
StringIdf.Append(gAutoGenHPrologueString.Replace({'File':'IMAGEDEFS', 'Guid':Info.Guid.replace('-', '_')}))
CreateIdfFileCode(Info, AutoGenC, StringIdf, IdfGenCFlag, IdfGenBinBuffer)
StringIdf.Append("\n#endif\n")
AutoGenH.Append('#include "%s"\n' % FileName)
CreateFooterCode(Info, AutoGenC, AutoGenH)
## Create the code file
#
# @param FilePath The path of code file
# @param Content The content of code file
# @param IsBinaryFile The flag indicating if the file is binary file or not
#
# @retval True If file content is changed or file doesn't exist
# @retval False If the file exists and the content is not changed
#
def Generate(FilePath, Content, IsBinaryFile):
return SaveFileOnChange(FilePath, Content, IsBinaryFile)
| edk2-master | BaseTools/Source/Python/AutoGen/GenC.py |
## @file
# This file is used to collect all defined strings in multiple uni files
#
#
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.<BR>
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
# Import Modules
#
from __future__ import print_function
import Common.LongFilePathOs as os, codecs, re
import shlex
import Common.EdkLogger as EdkLogger
from io import BytesIO
from Common.BuildToolError import *
from Common.StringUtils import GetLineNo
from Common.Misc import PathClass
from Common.LongFilePathSupport import LongFilePath
from Common.GlobalData import *
##
# Static definitions
#
UNICODE_WIDE_CHAR = u'\\wide'
UNICODE_NARROW_CHAR = u'\\narrow'
UNICODE_NON_BREAKING_CHAR = u'\\nbr'
UNICODE_UNICODE_CR = '\r'
UNICODE_UNICODE_LF = '\n'
NARROW_CHAR = u'\uFFF0'
WIDE_CHAR = u'\uFFF1'
NON_BREAKING_CHAR = u'\uFFF2'
CR = u'\u000D'
LF = u'\u000A'
NULL = u'\u0000'
TAB = u'\t'
BACK_SLASH_PLACEHOLDER = u'\u0006'
gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
## Convert a unicode string to a Hex list
#
# Convert a unicode string to a Hex list
# UniToHexList('ABC') is ['0x41', '0x00', '0x42', '0x00', '0x43', '0x00']
#
# @param Uni: The python unicode string
#
# @retval List: The formatted hex list
#
def UniToHexList(Uni):
List = []
for Item in Uni:
Temp = '%04X' % ord(Item)
List.append('0x' + Temp[2:4])
List.append('0x' + Temp[0:2])
return List
LangConvTable = {'eng':'en', 'fra':'fr', \
'aar':'aa', 'abk':'ab', 'ave':'ae', 'afr':'af', 'aka':'ak', 'amh':'am', \
'arg':'an', 'ara':'ar', 'asm':'as', 'ava':'av', 'aym':'ay', 'aze':'az', \
'bak':'ba', 'bel':'be', 'bul':'bg', 'bih':'bh', 'bis':'bi', 'bam':'bm', \
'ben':'bn', 'bod':'bo', 'bre':'br', 'bos':'bs', 'cat':'ca', 'che':'ce', \
'cha':'ch', 'cos':'co', 'cre':'cr', 'ces':'cs', 'chu':'cu', 'chv':'cv', \
'cym':'cy', 'dan':'da', 'deu':'de', 'div':'dv', 'dzo':'dz', 'ewe':'ee', \
'ell':'el', 'epo':'eo', 'spa':'es', 'est':'et', 'eus':'eu', 'fas':'fa', \
'ful':'ff', 'fin':'fi', 'fij':'fj', 'fao':'fo', 'fry':'fy', 'gle':'ga', \
'gla':'gd', 'glg':'gl', 'grn':'gn', 'guj':'gu', 'glv':'gv', 'hau':'ha', \
'heb':'he', 'hin':'hi', 'hmo':'ho', 'hrv':'hr', 'hat':'ht', 'hun':'hu', \
'hye':'hy', 'her':'hz', 'ina':'ia', 'ind':'id', 'ile':'ie', 'ibo':'ig', \
'iii':'ii', 'ipk':'ik', 'ido':'io', 'isl':'is', 'ita':'it', 'iku':'iu', \
'jpn':'ja', 'jav':'jv', 'kat':'ka', 'kon':'kg', 'kik':'ki', 'kua':'kj', \
'kaz':'kk', 'kal':'kl', 'khm':'km', 'kan':'kn', 'kor':'ko', 'kau':'kr', \
'kas':'ks', 'kur':'ku', 'kom':'kv', 'cor':'kw', 'kir':'ky', 'lat':'la', \
'ltz':'lb', 'lug':'lg', 'lim':'li', 'lin':'ln', 'lao':'lo', 'lit':'lt', \
'lub':'lu', 'lav':'lv', 'mlg':'mg', 'mah':'mh', 'mri':'mi', 'mkd':'mk', \
'mal':'ml', 'mon':'mn', 'mar':'mr', 'msa':'ms', 'mlt':'mt', 'mya':'my', \
'nau':'na', 'nob':'nb', 'nde':'nd', 'nep':'ne', 'ndo':'ng', 'nld':'nl', \
'nno':'nn', 'nor':'no', 'nbl':'nr', 'nav':'nv', 'nya':'ny', 'oci':'oc', \
'oji':'oj', 'orm':'om', 'ori':'or', 'oss':'os', 'pan':'pa', 'pli':'pi', \
'pol':'pl', 'pus':'ps', 'por':'pt', 'que':'qu', 'roh':'rm', 'run':'rn', \
'ron':'ro', 'rus':'ru', 'kin':'rw', 'san':'sa', 'srd':'sc', 'snd':'sd', \
'sme':'se', 'sag':'sg', 'sin':'si', 'slk':'sk', 'slv':'sl', 'smo':'sm', \
'sna':'sn', 'som':'so', 'sqi':'sq', 'srp':'sr', 'ssw':'ss', 'sot':'st', \
'sun':'su', 'swe':'sv', 'swa':'sw', 'tam':'ta', 'tel':'te', 'tgk':'tg', \
'tha':'th', 'tir':'ti', 'tuk':'tk', 'tgl':'tl', 'tsn':'tn', 'ton':'to', \
'tur':'tr', 'tso':'ts', 'tat':'tt', 'twi':'tw', 'tah':'ty', 'uig':'ug', \
'ukr':'uk', 'urd':'ur', 'uzb':'uz', 'ven':'ve', 'vie':'vi', 'vol':'vo', \
'wln':'wa', 'wol':'wo', 'xho':'xh', 'yid':'yi', 'yor':'yo', 'zha':'za', \
'zho':'zh', 'zul':'zu'}
## GetLanguageCode
#
# Check the language code read from .UNI file and convert ISO 639-2 codes to RFC 4646 codes if appropriate
# ISO 639-2 language codes supported in compatibility mode
# RFC 4646 language codes supported in native mode
#
# @param LangName: Language codes read from .UNI file
#
# @retval LangName: Valid language code in RFC 4646 format or None
#
def GetLanguageCode(LangName, IsCompatibleMode, File):
length = len(LangName)
if IsCompatibleMode:
if length == 3 and LangName.isalpha():
TempLangName = LangConvTable.get(LangName.lower())
if TempLangName is not None:
return TempLangName
return LangName
else:
EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid ISO 639-2 language code : %s" % LangName, File)
if (LangName[0] == 'X' or LangName[0] == 'x') and LangName[1] == '-':
return LangName
if length == 2:
if LangName.isalpha():
return LangName
elif length == 3:
if LangName.isalpha() and LangConvTable.get(LangName.lower()) is None:
return LangName
elif length == 5:
if LangName[0:2].isalpha() and LangName[2] == '-':
return LangName
elif length >= 6:
if LangName[0:2].isalpha() and LangName[2] == '-':
return LangName
if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) is None and LangName[3] == '-':
return LangName
EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)
## Ucs2Codec
#
# This is only a partial codec implementation. It only supports
# encoding, and is primarily used to check that all the characters are
# valid for UCS-2.
#
class Ucs2Codec(codecs.Codec):
def __init__(self):
self.__utf16 = codecs.lookup('utf-16')
def encode(self, input, errors='strict'):
for Char in input:
CodePoint = ord(Char)
if CodePoint >= 0xd800 and CodePoint <= 0xdfff:
raise ValueError("Code Point is in range reserved for " +
"UTF-16 surrogate pairs")
elif CodePoint > 0xffff:
raise ValueError("Code Point too large to encode in UCS-2")
return self.__utf16.encode(input)
TheUcs2Codec = Ucs2Codec()
def Ucs2Search(name):
if name in ['ucs-2', 'ucs_2']:
return codecs.CodecInfo(
name=name,
encode=TheUcs2Codec.encode,
decode=TheUcs2Codec.decode)
else:
return None
codecs.register(Ucs2Search)
## StringDefClassObject
#
# A structure for language definition
#
class StringDefClassObject(object):
def __init__(self, Name = None, Value = None, Referenced = False, Token = None, UseOtherLangDef = ''):
self.StringName = ''
self.StringNameByteList = []
self.StringValue = ''
self.StringValueByteList = ''
self.Token = 0
self.Referenced = Referenced
self.UseOtherLangDef = UseOtherLangDef
self.Length = 0
if Name is not None:
self.StringName = Name
self.StringNameByteList = UniToHexList(Name)
if Value is not None:
self.StringValue = Value + u'\x00' # Add a NULL at string tail
self.StringValueByteList = UniToHexList(self.StringValue)
self.Length = len(self.StringValueByteList)
if Token is not None:
self.Token = Token
def __str__(self):
return repr(self.StringName) + ' ' + \
repr(self.Token) + ' ' + \
repr(self.Referenced) + ' ' + \
repr(self.StringValue) + ' ' + \
repr(self.UseOtherLangDef)
def UpdateValue(self, Value = None):
if Value is not None:
self.StringValue = Value + u'\x00' # Add a NULL at string tail
self.StringValueByteList = UniToHexList(self.StringValue)
self.Length = len(self.StringValueByteList)
def StripComments(Line):
Comment = u'//'
CommentPos = Line.find(Comment)
while CommentPos >= 0:
# if there are non matched quotes before the comment header
# then we are in the middle of a string
# but we need to ignore the escaped quotes and backslashes.
if ((Line.count(u'"', 0, CommentPos) - Line.count(u'\\"', 0, CommentPos)) & 1) == 1:
CommentPos = Line.find (Comment, CommentPos + 1)
else:
return Line[:CommentPos].strip()
return Line.strip()
## UniFileClassObject
#
# A structure for .uni file definition
#
class UniFileClassObject(object):
def __init__(self, FileList = [], IsCompatibleMode = False, IncludePathList = []):
self.FileList = FileList
self.Token = 2
self.LanguageDef = [] #[ [u'LanguageIdentifier', u'PrintableName'], ... ]
self.OrderedStringList = {} #{ u'LanguageIdentifier' : [StringDefClassObject] }
self.OrderedStringDict = {} #{ u'LanguageIdentifier' : {StringName:(IndexInList)} }
self.OrderedStringListByToken = {} #{ u'LanguageIdentifier' : {Token: StringDefClassObject} }
self.IsCompatibleMode = IsCompatibleMode
self.IncludePathList = IncludePathList
if len(self.FileList) > 0:
self.LoadUniFiles(FileList)
#
# Get Language definition
#
def GetLangDef(self, File, Line):
Lang = shlex.split(Line.split(u"//")[0])
if len(Lang) != 3:
try:
FileIn = UniFileClassObject.OpenUniFile(LongFilePath(File.Path))
except UnicodeError as X:
EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File);
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File);
LineNo = GetLineNo(FileIn, Line, False)
EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition",
ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File=File, Line=LineNo)
else:
LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
LangPrintName = Lang[2]
IsLangInDef = False
for Item in self.LanguageDef:
if Item[0] == LangName:
IsLangInDef = True
break;
if not IsLangInDef:
self.LanguageDef.append([LangName, LangPrintName])
#
# Add language string
#
self.AddStringToList(u'$LANGUAGE_NAME', LangName, LangName, 0, True, Index=0)
self.AddStringToList(u'$PRINTABLE_LANGUAGE_NAME', LangName, LangPrintName, 1, True, Index=1)
if not IsLangInDef:
#
# The found STRING tokens will be added into new language string list
# so that the unique STRING identifier is reserved for all languages in the package list.
#
FirstLangName = self.LanguageDef[0][0]
if LangName != FirstLangName:
for Index in range (2, len (self.OrderedStringList[FirstLangName])):
Item = self.OrderedStringList[FirstLangName][Index]
if Item.UseOtherLangDef != '':
OtherLang = Item.UseOtherLangDef
else:
OtherLang = FirstLangName
self.OrderedStringList[LangName].append (StringDefClassObject(Item.StringName, '', Item.Referenced, Item.Token, OtherLang))
self.OrderedStringDict[LangName][Item.StringName] = len(self.OrderedStringList[LangName]) - 1
return True
@staticmethod
def OpenUniFile(FileName):
#
# Read file
#
try:
UniFile = open(FileName, mode='rb')
FileIn = UniFile.read()
UniFile.close()
except:
EdkLogger.Error("build", FILE_OPEN_FAILURE, ExtraData=File)
#
# Detect Byte Order Mark at beginning of file. Default to UTF-8
#
Encoding = 'utf-8'
if (FileIn.startswith(codecs.BOM_UTF16_BE) or
FileIn.startswith(codecs.BOM_UTF16_LE)):
Encoding = 'utf-16'
UniFileClassObject.VerifyUcs2Data(FileIn, FileName, Encoding)
UniFile = BytesIO(FileIn)
Info = codecs.lookup(Encoding)
(Reader, Writer) = (Info.streamreader, Info.streamwriter)
return codecs.StreamReaderWriter(UniFile, Reader, Writer)
@staticmethod
def VerifyUcs2Data(FileIn, FileName, Encoding):
Ucs2Info = codecs.lookup('ucs-2')
#
# Convert to unicode
#
try:
FileDecoded = codecs.decode(FileIn, Encoding)
Ucs2Info.encode(FileDecoded)
except:
UniFile = BytesIO(FileIn)
Info = codecs.lookup(Encoding)
(Reader, Writer) = (Info.streamreader, Info.streamwriter)
File = codecs.StreamReaderWriter(UniFile, Reader, Writer)
LineNumber = 0
ErrMsg = lambda Encoding, LineNumber: \
'%s contains invalid %s characters on line %d.' % \
(FileName, Encoding, LineNumber)
while True:
LineNumber = LineNumber + 1
try:
Line = File.readline()
if Line == '':
EdkLogger.error('Unicode File Parser', PARSER_ERROR,
ErrMsg(Encoding, LineNumber))
Ucs2Info.encode(Line)
except:
EdkLogger.error('Unicode File Parser', PARSER_ERROR,
ErrMsg('UCS-2', LineNumber))
#
# Get String name and value
#
def GetStringObject(self, Item):
Language = ''
Value = ''
Name = Item.split()[1]
# Check the string name
if Name != '':
MatchString = gIdentifierPattern.match(Name)
if MatchString is None:
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
LanguageList = Item.split(u'#language ')
for IndexI in range(len(LanguageList)):
if IndexI == 0:
continue
else:
Language = LanguageList[IndexI].split()[0]
Value = LanguageList[IndexI][LanguageList[IndexI].find(u'\"') + len(u'\"') : LanguageList[IndexI].rfind(u'\"')] #.replace(u'\r\n', u'')
Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
self.AddStringToList(Name, Language, Value)
#
# Get include file list and load them
#
def GetIncludeFile(self, Item, Dir):
FileName = Item[Item.find(u'#include ') + len(u'#include ') :Item.find(u' ', len(u'#include '))][1:-1]
self.LoadUniFile(FileName)
#
# Pre-process before parse .uni file
#
def PreProcess(self, File):
try:
FileIn = UniFileClassObject.OpenUniFile(LongFilePath(File.Path))
except UnicodeError as X:
EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File.Path);
except OSError:
EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, ExtraData=File.Path)
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File.Path);
Lines = []
#
# Use unique identifier
#
for Line in FileIn:
Line = Line.strip()
Line = Line.replace(u'\\\\', BACK_SLASH_PLACEHOLDER)
Line = StripComments(Line)
#
# Ignore empty line
#
if len(Line) == 0:
continue
Line = Line.replace(u'/langdef', u'#langdef')
Line = Line.replace(u'/string', u'#string')
Line = Line.replace(u'/language', u'#language')
Line = Line.replace(u'/include', u'#include')
Line = Line.replace(UNICODE_WIDE_CHAR, WIDE_CHAR)
Line = Line.replace(UNICODE_NARROW_CHAR, NARROW_CHAR)
Line = Line.replace(UNICODE_NON_BREAKING_CHAR, NON_BREAKING_CHAR)
Line = Line.replace(u'\\r\\n', CR + LF)
Line = Line.replace(u'\\n', CR + LF)
Line = Line.replace(u'\\r', CR)
Line = Line.replace(u'\\t', u' ')
Line = Line.replace(u'\t', u' ')
Line = Line.replace(u'\\"', u'"')
Line = Line.replace(u"\\'", u"'")
Line = Line.replace(BACK_SLASH_PLACEHOLDER, u'\\')
StartPos = Line.find(u'\\x')
while (StartPos != -1):
EndPos = Line.find(u'\\', StartPos + 1, StartPos + 7)
if EndPos != -1 and EndPos - StartPos == 6 :
if g4HexChar.match(Line[StartPos + 2 : EndPos], re.UNICODE):
EndStr = Line[EndPos: ]
UniStr = Line[StartPos + 2: EndPos]
if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
if EndStr[6] == u'\\' and g4HexChar.match(EndStr[2 : 6], re.UNICODE):
Line = Line[0 : StartPos] + UniStr + EndStr
else:
Line = Line[0 : StartPos] + UniStr + EndStr[1:]
StartPos = Line.find(u'\\x', StartPos + 1)
IncList = gIncludePattern.findall(Line)
if len(IncList) == 1:
for Dir in [File.Dir] + self.IncludePathList:
IncFile = PathClass(str(IncList[0]), Dir)
if os.path.isfile(IncFile.Path):
Lines.extend(self.PreProcess(IncFile))
break
else:
EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, Message="Cannot find include file", ExtraData=str(IncList[0]))
continue
Lines.append(Line)
return Lines
#
# Load a .uni file
#
def LoadUniFile(self, File = None):
if File is None:
EdkLogger.error("Unicode File Parser", PARSER_ERROR, 'No unicode file is given')
self.File = File
#
# Process special char in file
#
Lines = self.PreProcess(File)
#
# Get Unicode Information
#
for IndexI in range(len(Lines)):
Line = Lines[IndexI]
if (IndexI + 1) < len(Lines):
SecondLine = Lines[IndexI + 1]
if (IndexI + 2) < len(Lines):
ThirdLine = Lines[IndexI + 2]
#
# Get Language def information
#
if Line.find(u'#langdef ') >= 0:
self.GetLangDef(File, Line)
continue
Name = ''
Language = ''
Value = ''
#
# Get string def information format 1 as below
#
# #string MY_STRING_1
# #language eng
# My first English string line 1
# My first English string line 2
# #string MY_STRING_1
# #language spa
# Mi segunda secuencia 1
# Mi segunda secuencia 2
#
if Line.find(u'#string ') >= 0 and Line.find(u'#language ') < 0 and \
SecondLine.find(u'#string ') < 0 and SecondLine.find(u'#language ') >= 0 and \
ThirdLine.find(u'#string ') < 0 and ThirdLine.find(u'#language ') < 0:
Name = Line[Line.find(u'#string ') + len(u'#string ') : ].strip(' ')
Language = SecondLine[SecondLine.find(u'#language ') + len(u'#language ') : ].strip(' ')
for IndexJ in range(IndexI + 2, len(Lines)):
if Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') < 0:
Value = Value + Lines[IndexJ]
else:
IndexI = IndexJ
break
# Value = Value.replace(u'\r\n', u'')
Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
# Check the string name
if not self.IsCompatibleMode and Name != '':
MatchString = gIdentifierPattern.match(Name)
if MatchString is None:
EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
self.AddStringToList(Name, Language, Value)
continue
#
# Get string def information format 2 as below
#
# #string MY_STRING_1 #language eng "My first English string line 1"
# "My first English string line 2"
# #language spa "Mi segunda secuencia 1"
# "Mi segunda secuencia 2"
# #string MY_STRING_2 #language eng "My first English string line 1"
# "My first English string line 2"
# #string MY_STRING_2 #language spa "Mi segunda secuencia 1"
# "Mi segunda secuencia 2"
#
if Line.find(u'#string ') >= 0 and Line.find(u'#language ') >= 0:
StringItem = Line
for IndexJ in range(IndexI + 1, len(Lines)):
if Lines[IndexJ].find(u'#string ') >= 0 and Lines[IndexJ].find(u'#language ') >= 0:
IndexI = IndexJ
break
elif Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') >= 0:
StringItem = StringItem + Lines[IndexJ]
elif Lines[IndexJ].count(u'\"') >= 2:
StringItem = StringItem[ : StringItem.rfind(u'\"')] + Lines[IndexJ][Lines[IndexJ].find(u'\"') + len(u'\"') : ]
self.GetStringObject(StringItem)
continue
#
# Load multiple .uni files
#
def LoadUniFiles(self, FileList):
if len(FileList) > 0:
for File in FileList:
self.LoadUniFile(File)
#
# Add a string to list
#
def AddStringToList(self, Name, Language, Value, Token = None, Referenced = False, UseOtherLangDef = '', Index = -1):
for LangNameItem in self.LanguageDef:
if Language == LangNameItem[0]:
break
else:
EdkLogger.error('Unicode File Parser', FORMAT_NOT_SUPPORTED, "The language '%s' for %s is not defined in Unicode file %s." \
% (Language, Name, self.File))
if Language not in self.OrderedStringList:
self.OrderedStringList[Language] = []
self.OrderedStringDict[Language] = {}
IsAdded = True
if Name in self.OrderedStringDict[Language]:
IsAdded = False
if Value is not None:
ItemIndexInList = self.OrderedStringDict[Language][Name]
Item = self.OrderedStringList[Language][ItemIndexInList]
Item.UpdateValue(Value)
Item.UseOtherLangDef = ''
if IsAdded:
Token = len(self.OrderedStringList[Language])
if Index == -1:
self.OrderedStringList[Language].append(StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
self.OrderedStringDict[Language][Name] = Token
for LangName in self.LanguageDef:
#
# New STRING token will be added into all language string lists.
# so that the unique STRING identifier is reserved for all languages in the package list.
#
if LangName[0] != Language:
if UseOtherLangDef != '':
OtherLangDef = UseOtherLangDef
else:
OtherLangDef = Language
self.OrderedStringList[LangName[0]].append(StringDefClassObject(Name, '', Referenced, Token, OtherLangDef))
self.OrderedStringDict[LangName[0]][Name] = len(self.OrderedStringList[LangName[0]]) - 1
else:
self.OrderedStringList[Language].insert(Index, StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
self.OrderedStringDict[Language][Name] = Index
#
# Set the string as referenced
#
def SetStringReferenced(self, Name):
#
# String stoken are added in the same order in all language string lists.
# So, only update the status of string stoken in first language string list.
#
Lang = self.LanguageDef[0][0]
if Name in self.OrderedStringDict[Lang]:
ItemIndexInList = self.OrderedStringDict[Lang][Name]
Item = self.OrderedStringList[Lang][ItemIndexInList]
Item.Referenced = True
#
# Search the string in language definition by Name
#
def FindStringValue(self, Name, Lang):
if Name in self.OrderedStringDict[Lang]:
ItemIndexInList = self.OrderedStringDict[Lang][Name]
return self.OrderedStringList[Lang][ItemIndexInList]
return None
#
# Search the string in language definition by Token
#
def FindByToken(self, Token, Lang):
for Item in self.OrderedStringList[Lang]:
if Item.Token == Token:
return Item
return None
#
# Re-order strings and re-generate tokens
#
def ReToken(self):
#
# Retoken all language strings according to the status of string stoken in the first language string.
#
FirstLangName = self.LanguageDef[0][0]
# Convert the OrderedStringList to be OrderedStringListByToken in order to faciliate future search by token
for LangNameItem in self.LanguageDef:
self.OrderedStringListByToken[LangNameItem[0]] = {}
#
# Use small token for all referred string stoken.
#
RefToken = 0
for Index in range (0, len (self.OrderedStringList[FirstLangName])):
FirstLangItem = self.OrderedStringList[FirstLangName][Index]
if FirstLangItem.Referenced == True:
for LangNameItem in self.LanguageDef:
LangName = LangNameItem[0]
OtherLangItem = self.OrderedStringList[LangName][Index]
OtherLangItem.Referenced = True
OtherLangItem.Token = RefToken
self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
RefToken = RefToken + 1
#
# Use big token for all unreferred string stoken.
#
UnRefToken = 0
for Index in range (0, len (self.OrderedStringList[FirstLangName])):
FirstLangItem = self.OrderedStringList[FirstLangName][Index]
if FirstLangItem.Referenced == False:
for LangNameItem in self.LanguageDef:
LangName = LangNameItem[0]
OtherLangItem = self.OrderedStringList[LangName][Index]
OtherLangItem.Token = RefToken + UnRefToken
self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
UnRefToken = UnRefToken + 1
#
# Show the instance itself
#
def ShowMe(self):
print(self.LanguageDef)
#print self.OrderedStringList
for Item in self.OrderedStringList:
print(Item)
for Member in self.OrderedStringList[Item]:
print(str(Member))
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
if __name__ == '__main__':
EdkLogger.Initialize()
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
a = UniFileClassObject([PathClass("C:\\Edk\\Strings.uni"), PathClass("C:\\Edk\\Strings2.uni")])
a.ReToken()
a.ShowMe()
| edk2-master | BaseTools/Source/Python/AutoGen/UniClassObject.py |
## @file
# Create makefile for MS nmake and GNU make
#
# Copyright (c) 2007 - 2021, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2020 - 2021, Arm Limited. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
import sys
import string
import re
import os.path as path
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.MultipleWorkspace import MultipleWorkspace as mws
from Common.BuildToolError import *
from Common.Misc import *
from Common.StringUtils import *
from .BuildEngine import *
import Common.GlobalData as GlobalData
from collections import OrderedDict
from Common.DataType import TAB_COMPILER_MSFT
## Regular expression for finding header file inclusions
gIncludePattern = re.compile(r"^[ \t]*[#%]?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE | re.UNICODE | re.IGNORECASE)
## Regular expression for matching macro used in header file inclusion
gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE)
gIsFileMap = {}
## pattern for include style in Edk.x code
gProtocolDefinition = "Protocol/%(HeaderKey)s/%(HeaderKey)s.h"
gGuidDefinition = "Guid/%(HeaderKey)s/%(HeaderKey)s.h"
gArchProtocolDefinition = "ArchProtocol/%(HeaderKey)s/%(HeaderKey)s.h"
gPpiDefinition = "Ppi/%(HeaderKey)s/%(HeaderKey)s.h"
gIncludeMacroConversion = {
"EFI_PROTOCOL_DEFINITION" : gProtocolDefinition,
"EFI_GUID_DEFINITION" : gGuidDefinition,
"EFI_ARCH_PROTOCOL_DEFINITION" : gArchProtocolDefinition,
"EFI_PROTOCOL_PRODUCER" : gProtocolDefinition,
"EFI_PROTOCOL_CONSUMER" : gProtocolDefinition,
"EFI_PROTOCOL_DEPENDENCY" : gProtocolDefinition,
"EFI_ARCH_PROTOCOL_PRODUCER" : gArchProtocolDefinition,
"EFI_ARCH_PROTOCOL_CONSUMER" : gArchProtocolDefinition,
"EFI_ARCH_PROTOCOL_DEPENDENCY" : gArchProtocolDefinition,
"EFI_PPI_DEFINITION" : gPpiDefinition,
"EFI_PPI_PRODUCER" : gPpiDefinition,
"EFI_PPI_CONSUMER" : gPpiDefinition,
"EFI_PPI_DEPENDENCY" : gPpiDefinition,
}
NMAKE_FILETYPE = "nmake"
GMAKE_FILETYPE = "gmake"
WIN32_PLATFORM = "win32"
POSIX_PLATFORM = "posix"
## BuildFile class
#
# This base class encapsules build file and its generation. It uses template to generate
# the content of build file. The content of build file will be got from AutoGen objects.
#
class BuildFile(object):
## template used to generate the build file (i.e. makefile if using make)
_TEMPLATE_ = TemplateString('')
_DEFAULT_FILE_NAME_ = "Makefile"
## default file name for each type of build file
_FILE_NAME_ = {
NMAKE_FILETYPE : "Makefile",
GMAKE_FILETYPE : "GNUmakefile"
}
# Get Makefile name.
def getMakefileName(self):
if not self._FileType:
return self._DEFAULT_FILE_NAME_
else:
return self._FILE_NAME_[self._FileType]
## Fixed header string for makefile
_MAKEFILE_HEADER = '''#
# DO NOT EDIT
# This file is auto-generated by build utility
#
# Module Name:
#
# %s
#
# Abstract:
#
# Auto-generated makefile for building modules, libraries or platform
#
'''
## Header string for each type of build file
_FILE_HEADER_ = {
NMAKE_FILETYPE : _MAKEFILE_HEADER % _FILE_NAME_[NMAKE_FILETYPE],
GMAKE_FILETYPE : _MAKEFILE_HEADER % _FILE_NAME_[GMAKE_FILETYPE]
}
## shell commands which can be used in build file in the form of macro
# $(CP) copy file command
# $(MV) move file command
# $(RM) remove file command
# $(MD) create dir command
# $(RD) remove dir command
#
_SHELL_CMD_ = {
WIN32_PLATFORM : {
"CP" : "copy /y",
"MV" : "move /y",
"RM" : "del /f /q",
"MD" : "mkdir",
"RD" : "rmdir /s /q",
},
POSIX_PLATFORM : {
"CP" : "cp -p -f",
"MV" : "mv -f",
"RM" : "rm -f",
"MD" : "mkdir -p",
"RD" : "rm -r -f",
}
}
## directory separator
_SEP_ = {
WIN32_PLATFORM : "\\",
POSIX_PLATFORM : "/"
}
## directory creation template
_MD_TEMPLATE_ = {
WIN32_PLATFORM : 'if not exist %(dir)s $(MD) %(dir)s',
POSIX_PLATFORM : "$(MD) %(dir)s"
}
## directory removal template
_RD_TEMPLATE_ = {
WIN32_PLATFORM : 'if exist %(dir)s $(RD) %(dir)s',
POSIX_PLATFORM : "$(RD) %(dir)s"
}
## cp if exist
_CP_TEMPLATE_ = {
WIN32_PLATFORM : 'if exist %(Src)s $(CP) %(Src)s %(Dst)s',
POSIX_PLATFORM : "test -f %(Src)s && $(CP) %(Src)s %(Dst)s"
}
_CD_TEMPLATE_ = {
WIN32_PLATFORM : 'if exist %(dir)s cd %(dir)s',
POSIX_PLATFORM : "test -e %(dir)s && cd %(dir)s"
}
_MAKE_TEMPLATE_ = {
WIN32_PLATFORM : 'if exist %(file)s "$(MAKE)" $(MAKE_FLAGS) -f %(file)s',
POSIX_PLATFORM : 'test -e %(file)s && "$(MAKE)" $(MAKE_FLAGS) -f %(file)s'
}
_INCLUDE_CMD_ = {
NMAKE_FILETYPE : '!INCLUDE',
GMAKE_FILETYPE : "include"
}
_INC_FLAG_ = {TAB_COMPILER_MSFT : "/I", "GCC" : "-I", "INTEL" : "-I", "NASM" : "-I"}
## Constructor of BuildFile
#
# @param AutoGenObject Object of AutoGen class
#
def __init__(self, AutoGenObject):
self._AutoGenObject = AutoGenObject
MakePath = AutoGenObject.BuildOption.get('MAKE', {}).get('PATH')
if not MakePath:
MakePath = AutoGenObject.ToolDefinition.get('MAKE', {}).get('PATH')
if "nmake" in MakePath:
self._FileType = NMAKE_FILETYPE
else:
self._FileType = GMAKE_FILETYPE
if sys.platform == "win32":
self._Platform = WIN32_PLATFORM
else:
self._Platform = POSIX_PLATFORM
## Create build file.
#
# Only nmake and gmake are supported.
#
# @retval TRUE The build file is created or re-created successfully.
# @retval FALSE The build file exists and is the same as the one to be generated.
#
def Generate(self):
FileContent = self._TEMPLATE_.Replace(self._TemplateDict)
FileName = self.getMakefileName()
if not os.path.exists(os.path.join(self._AutoGenObject.MakeFileDir, "deps.txt")):
with open(os.path.join(self._AutoGenObject.MakeFileDir, "deps.txt"),"w+") as fd:
fd.write("")
if not os.path.exists(os.path.join(self._AutoGenObject.MakeFileDir, "dependency")):
with open(os.path.join(self._AutoGenObject.MakeFileDir, "dependency"),"w+") as fd:
fd.write("")
if not os.path.exists(os.path.join(self._AutoGenObject.MakeFileDir, "deps_target")):
with open(os.path.join(self._AutoGenObject.MakeFileDir, "deps_target"),"w+") as fd:
fd.write("")
return SaveFileOnChange(os.path.join(self._AutoGenObject.MakeFileDir, FileName), FileContent, False)
## Return a list of directory creation command string
#
# @param DirList The list of directory to be created
#
# @retval list The directory creation command list
#
def GetCreateDirectoryCommand(self, DirList):
return [self._MD_TEMPLATE_[self._Platform] % {'dir':Dir} for Dir in DirList]
## Return a list of directory removal command string
#
# @param DirList The list of directory to be removed
#
# @retval list The directory removal command list
#
def GetRemoveDirectoryCommand(self, DirList):
return [self._RD_TEMPLATE_[self._Platform] % {'dir':Dir} for Dir in DirList]
def PlaceMacro(self, Path, MacroDefinitions=None):
if Path.startswith("$("):
return Path
else:
if MacroDefinitions is None:
MacroDefinitions = {}
PathLength = len(Path)
for MacroName in MacroDefinitions:
MacroValue = MacroDefinitions[MacroName]
MacroValueLength = len(MacroValue)
if MacroValueLength == 0:
continue
if MacroValueLength <= PathLength and Path.startswith(MacroValue):
Path = "$(%s)%s" % (MacroName, Path[MacroValueLength:])
break
return Path
## ModuleMakefile class
#
# This class encapsules makefie and its generation for module. It uses template to generate
# the content of makefile. The content of makefile will be got from ModuleAutoGen object.
#
class ModuleMakefile(BuildFile):
## template used to generate the makefile for module
_TEMPLATE_ = TemplateString('''\
${makefile_header}
#
# Platform Macro Definition
#
PLATFORM_NAME = ${platform_name}
PLATFORM_GUID = ${platform_guid}
PLATFORM_VERSION = ${platform_version}
PLATFORM_RELATIVE_DIR = ${platform_relative_directory}
PLATFORM_DIR = ${platform_dir}
PLATFORM_OUTPUT_DIR = ${platform_output_directory}
#
# Module Macro Definition
#
MODULE_NAME = ${module_name}
MODULE_GUID = ${module_guid}
MODULE_NAME_GUID = ${module_name_guid}
MODULE_VERSION = ${module_version}
MODULE_TYPE = ${module_type}
MODULE_FILE = ${module_file}
MODULE_FILE_BASE_NAME = ${module_file_base_name}
BASE_NAME = $(MODULE_NAME)
MODULE_RELATIVE_DIR = ${module_relative_directory}
PACKAGE_RELATIVE_DIR = ${package_relative_directory}
MODULE_DIR = ${module_dir}
FFS_OUTPUT_DIR = ${ffs_output_directory}
MODULE_ENTRY_POINT = ${module_entry_point}
ARCH_ENTRY_POINT = ${arch_entry_point}
IMAGE_ENTRY_POINT = ${image_entry_point}
${BEGIN}${module_extra_defines}
${END}
#
# Build Configuration Macro Definition
#
ARCH = ${architecture}
TOOLCHAIN = ${toolchain_tag}
TOOLCHAIN_TAG = ${toolchain_tag}
TARGET = ${build_target}
#
# Build Directory Macro Definition
#
# PLATFORM_BUILD_DIR = ${platform_build_directory}
BUILD_DIR = ${platform_build_directory}
BIN_DIR = $(BUILD_DIR)${separator}${architecture}
LIB_DIR = $(BIN_DIR)
MODULE_BUILD_DIR = ${module_build_directory}
OUTPUT_DIR = ${module_output_directory}
DEBUG_DIR = ${module_debug_directory}
DEST_DIR_OUTPUT = $(OUTPUT_DIR)
DEST_DIR_DEBUG = $(DEBUG_DIR)
#
# Shell Command Macro
#
${BEGIN}${shell_command_code} = ${shell_command}
${END}
#
# Tools definitions specific to this module
#
${BEGIN}${module_tool_definitions}
${END}
MAKE_FILE = ${makefile_path}
#
# Build Macro
#
${BEGIN}${file_macro}
${END}
#
# Overridable Target Macro Definitions
#
FORCE_REBUILD = force_build
INIT_TARGET = init
PCH_TARGET =
BC_TARGET = ${BEGIN}${backward_compatible_target} ${END}
CODA_TARGET = ${BEGIN}${remaining_build_target} \\
${END}
#
# Default target, which will build dependent libraries in addition to source files
#
all: mbuild
#
# Target used when called from platform makefile, which will bypass the build of dependent libraries
#
pbuild: $(INIT_TARGET) $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET)
#
# ModuleTarget
#
mbuild: $(INIT_TARGET) $(BC_TARGET) gen_libs $(PCH_TARGET) $(CODA_TARGET)
#
# Build Target used in multi-thread build mode, which will bypass the init and gen_libs targets
#
tbuild: $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET)
#
# Phony target which is used to force executing commands for a target
#
force_build:
\t-@
#
# Target to update the FD
#
fds: mbuild gen_fds
#
# Initialization target: print build information and create necessary directories
#
init: info dirs
info:
\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)]
dirs:
${BEGIN}\t-@${create_directory_command}\n${END}
strdefs:
\t-@$(CP) $(DEBUG_DIR)${separator}AutoGen.h $(DEBUG_DIR)${separator}$(MODULE_NAME)StrDefs.h
#
# GenLibsTarget
#
gen_libs:
\t${BEGIN}@"$(MAKE)" $(MAKE_FLAGS) -f ${dependent_library_build_directory}${separator}${makefile_name}
\t${END}@cd $(MODULE_BUILD_DIR)
#
# Build Flash Device Image
#
gen_fds:
\t@"$(MAKE)" $(MAKE_FLAGS) -f $(BUILD_DIR)${separator}${makefile_name} fds
\t@cd $(MODULE_BUILD_DIR)
${INCLUDETAG}
#
# Individual Object Build Targets
#
${BEGIN}${file_build_target}
${END}
#
# clean all intermediate files
#
clean:
\t${BEGIN}${clean_command}
\t${END}\t$(RM) AutoGenTimeStamp
#
# clean all generated files
#
cleanall:
${BEGIN}\t${cleanall_command}
${END}\t$(RM) *.pdb *.idb > NUL 2>&1
\t$(RM) $(BIN_DIR)${separator}$(MODULE_NAME).efi
\t$(RM) AutoGenTimeStamp
#
# clean all dependent libraries built
#
cleanlib:
\t${BEGIN}-@${library_build_command} cleanall
\t${END}@cd $(MODULE_BUILD_DIR)\n\n''')
_FILE_MACRO_TEMPLATE = TemplateString("${macro_name} = ${BEGIN} \\\n ${source_file}${END}\n")
_BUILD_TARGET_TEMPLATE = TemplateString("${BEGIN}${target} : ${deps}\n${END}\t${cmd}\n")
## Constructor of ModuleMakefile
#
# @param ModuleAutoGen Object of ModuleAutoGen class
#
def __init__(self, ModuleAutoGen):
BuildFile.__init__(self, ModuleAutoGen)
self.PlatformInfo = self._AutoGenObject.PlatformInfo
self.ResultFileList = []
self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]
self.FileBuildTargetList = [] # [(src, target string)]
self.BuildTargetList = [] # [target string]
self.PendingBuildTargetList = [] # [FileBuildRule objects]
self.CommonFileDependency = []
self.FileListMacros = {}
self.ListFileMacros = {}
self.ObjTargetDict = OrderedDict()
self.FileCache = {}
self.LibraryBuildCommandList = []
self.LibraryFileList = []
self.LibraryMakefileList = []
self.LibraryBuildDirectoryList = []
self.SystemLibraryList = []
self.Macros = OrderedDict()
self.Macros["OUTPUT_DIR" ] = self._AutoGenObject.Macros["OUTPUT_DIR"]
self.Macros["DEBUG_DIR" ] = self._AutoGenObject.Macros["DEBUG_DIR"]
self.Macros["MODULE_BUILD_DIR"] = self._AutoGenObject.Macros["MODULE_BUILD_DIR"]
self.Macros["BIN_DIR" ] = self._AutoGenObject.Macros["BIN_DIR"]
self.Macros["BUILD_DIR" ] = self._AutoGenObject.Macros["BUILD_DIR"]
self.Macros["WORKSPACE" ] = self._AutoGenObject.Macros["WORKSPACE"]
self.Macros["FFS_OUTPUT_DIR" ] = self._AutoGenObject.Macros["FFS_OUTPUT_DIR"]
self.GenFfsList = ModuleAutoGen.GenFfsList
self.MacroList = ['FFS_OUTPUT_DIR', 'MODULE_GUID', 'OUTPUT_DIR']
self.FfsOutputFileList = []
self.DependencyHeaderFileSet = set()
# Compose a dict object containing information used to do replacement in template
@property
def _TemplateDict(self):
MyAgo = self._AutoGenObject
Separator = self._SEP_[self._Platform]
# break build if no source files and binary files are found
if len(MyAgo.SourceFileList) == 0 and len(MyAgo.BinaryFileList) == 0:
EdkLogger.error("build", AUTOGEN_ERROR, "No files to be built in module [%s, %s, %s]"
% (MyAgo.BuildTarget, MyAgo.ToolChain, MyAgo.Arch),
ExtraData="[%s]" % str(MyAgo))
# convert dependent libraries to build command
self.ProcessDependentLibrary()
if len(MyAgo.Module.ModuleEntryPointList) > 0:
ModuleEntryPoint = MyAgo.Module.ModuleEntryPointList[0]
else:
ModuleEntryPoint = "_ModuleEntryPoint"
ArchEntryPoint = ModuleEntryPoint
if MyAgo.Arch == "EBC":
# EBC compiler always use "EfiStart" as entry point. Only applies to EdkII modules
ImageEntryPoint = "EfiStart"
else:
# EdkII modules always use "_ModuleEntryPoint" as entry point
ImageEntryPoint = "_ModuleEntryPoint"
for k, v in MyAgo.Module.Defines.items():
if k not in MyAgo.Macros:
MyAgo.Macros[k] = v
if 'MODULE_ENTRY_POINT' not in MyAgo.Macros:
MyAgo.Macros['MODULE_ENTRY_POINT'] = ModuleEntryPoint
if 'ARCH_ENTRY_POINT' not in MyAgo.Macros:
MyAgo.Macros['ARCH_ENTRY_POINT'] = ArchEntryPoint
if 'IMAGE_ENTRY_POINT' not in MyAgo.Macros:
MyAgo.Macros['IMAGE_ENTRY_POINT'] = ImageEntryPoint
PCI_COMPRESS_Flag = False
for k, v in MyAgo.Module.Defines.items():
if 'PCI_COMPRESS' == k and 'TRUE' == v:
PCI_COMPRESS_Flag = True
# tools definitions
ToolsDef = []
IncPrefix = self._INC_FLAG_[MyAgo.ToolChainFamily]
for Tool in sorted(list(MyAgo.BuildOption)):
Appended = False
for Attr in sorted(list(MyAgo.BuildOption[Tool])):
Value = MyAgo.BuildOption[Tool][Attr]
if Attr == "FAMILY":
continue
elif Attr == "PATH":
ToolsDef.append("%s = %s" % (Tool, Value))
Appended = True
else:
# Don't generate MAKE_FLAGS in makefile. It's put in environment variable.
if Tool == "MAKE":
continue
# Remove duplicated include path, if any
if Attr == "FLAGS":
Value = RemoveDupOption(Value, IncPrefix, MyAgo.IncludePathList)
if Tool == "OPTROM" and PCI_COMPRESS_Flag:
ValueList = Value.split()
if ValueList:
for i, v in enumerate(ValueList):
if '-e' == v:
ValueList[i] = '-ec'
Value = ' '.join(ValueList)
ToolsDef.append("%s_%s = %s" % (Tool, Attr, Value))
Appended = True
if Appended:
ToolsDef.append("")
# generate the Response file and Response flag
RespDict = self.CommandExceedLimit()
RespFileList = os.path.join(MyAgo.OutputDir, 'respfilelist.txt')
if RespDict:
RespFileListContent = ''
for Resp in RespDict:
RespFile = os.path.join(MyAgo.OutputDir, str(Resp).lower() + '.txt')
StrList = RespDict[Resp].split(' ')
UnexpandMacro = []
NewStr = []
for Str in StrList:
if '$' in Str or '-MMD' in Str or '-MF' in Str:
UnexpandMacro.append(Str)
else:
NewStr.append(Str)
UnexpandMacroStr = ' '.join(UnexpandMacro)
NewRespStr = ' '.join(NewStr)
SaveFileOnChange(RespFile, NewRespStr, False)
ToolsDef.append("%s = %s" % (Resp, UnexpandMacroStr + ' @' + RespFile))
RespFileListContent += '@' + RespFile + TAB_LINE_BREAK
RespFileListContent += NewRespStr + TAB_LINE_BREAK
SaveFileOnChange(RespFileList, RespFileListContent, False)
else:
if os.path.exists(RespFileList):
os.remove(RespFileList)
# convert source files and binary files to build targets
self.ResultFileList = [str(T.Target) for T in MyAgo.CodaTargetList]
if len(self.ResultFileList) == 0 and len(MyAgo.SourceFileList) != 0:
EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build",
ExtraData="[%s]" % str(MyAgo))
self.ProcessBuildTargetList(MyAgo.OutputDir, ToolsDef)
self.ParserGenerateFfsCmd()
# Generate macros used to represent input files
FileMacroList = [] # macro name = file list
for FileListMacro in self.FileListMacros:
FileMacro = self._FILE_MACRO_TEMPLATE.Replace(
{
"macro_name" : FileListMacro,
"source_file" : self.FileListMacros[FileListMacro]
}
)
FileMacroList.append(FileMacro)
# INC_LIST is special
FileMacro = ""
IncludePathList = []
for P in MyAgo.IncludePathList:
IncludePathList.append(IncPrefix + self.PlaceMacro(P, self.Macros))
if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros:
self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix + P)
FileMacro += self._FILE_MACRO_TEMPLATE.Replace(
{
"macro_name" : "INC",
"source_file" : IncludePathList
}
)
FileMacroList.append(FileMacro)
# Add support when compiling .nasm source files
IncludePathList = []
asmsource = [item for item in MyAgo.SourceFileList if item.File.upper().endswith((".NASM",".ASM",".NASMB","S"))]
if asmsource:
for P in MyAgo.IncludePathList:
IncludePath = self._INC_FLAG_['NASM'] + self.PlaceMacro(P, self.Macros)
if IncludePath.endswith(os.sep):
IncludePath = IncludePath.rstrip(os.sep)
# When compiling .nasm files, need to add a literal backslash at each path.
# In nmake makfiles, a trailing literal backslash must be escaped with a caret ('^').
# It is otherwise replaced with a space (' '). This is not necessary for GNU makfefiles.
if P == MyAgo.IncludePathList[-1] and self._Platform == WIN32_PLATFORM and self._FileType == NMAKE_FILETYPE:
IncludePath = ''.join([IncludePath, '^', os.sep])
else:
IncludePath = os.path.join(IncludePath, '')
IncludePathList.append(IncludePath)
FileMacroList.append(self._FILE_MACRO_TEMPLATE.Replace({"macro_name": "NASM_INC", "source_file": IncludePathList}))
# Generate macros used to represent files containing list of input files
for ListFileMacro in self.ListFileMacros:
ListFileName = os.path.join(MyAgo.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro) - 5])
FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName))
SaveFileOnChange(
ListFileName,
"\n".join(self.ListFileMacros[ListFileMacro]),
False
)
# Generate objlist used to create .obj file
for Type in self.ObjTargetDict:
NewLine = ' '.join(list(self.ObjTargetDict[Type]))
FileMacroList.append("OBJLIST_%s = %s" % (list(self.ObjTargetDict.keys()).index(Type), NewLine))
BcTargetList = []
MakefileName = self.getMakefileName()
LibraryMakeCommandList = []
for D in self.LibraryBuildDirectoryList:
Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":os.path.join(D, MakefileName)}
LibraryMakeCommandList.append(Command)
package_rel_dir = MyAgo.SourceDir
current_dir = self.Macros["WORKSPACE"]
found = False
while not found and os.sep in package_rel_dir:
index = package_rel_dir.index(os.sep)
current_dir = mws.join(current_dir, package_rel_dir[:index])
if os.path.exists(current_dir):
for fl in os.listdir(current_dir):
if fl.endswith('.dec'):
found = True
break
package_rel_dir = package_rel_dir[index + 1:]
MakefileTemplateDict = {
"makefile_header" : self._FILE_HEADER_[self._FileType],
"makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName),
"makefile_name" : MakefileName,
"platform_name" : self.PlatformInfo.Name,
"platform_guid" : self.PlatformInfo.Guid,
"platform_version" : self.PlatformInfo.Version,
"platform_relative_directory": self.PlatformInfo.SourceDir,
"platform_output_directory" : self.PlatformInfo.OutputDir,
"ffs_output_directory" : MyAgo.Macros["FFS_OUTPUT_DIR"],
"platform_dir" : MyAgo.Macros["PLATFORM_DIR"],
"module_name" : MyAgo.Name,
"module_guid" : MyAgo.Guid,
"module_name_guid" : MyAgo.UniqueBaseName,
"module_version" : MyAgo.Version,
"module_type" : MyAgo.ModuleType,
"module_file" : MyAgo.MetaFile.Name,
"module_file_base_name" : MyAgo.MetaFile.BaseName,
"module_relative_directory" : MyAgo.SourceDir,
"module_dir" : mws.join (self.Macros["WORKSPACE"], MyAgo.SourceDir),
"package_relative_directory": package_rel_dir,
"module_extra_defines" : ["%s = %s" % (k, v) for k, v in MyAgo.Module.Defines.items()],
"architecture" : MyAgo.Arch,
"toolchain_tag" : MyAgo.ToolChain,
"build_target" : MyAgo.BuildTarget,
"platform_build_directory" : self.PlatformInfo.BuildDir,
"module_build_directory" : MyAgo.BuildDir,
"module_output_directory" : MyAgo.OutputDir,
"module_debug_directory" : MyAgo.DebugDir,
"separator" : Separator,
"module_tool_definitions" : ToolsDef,
"shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
"shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
"module_entry_point" : ModuleEntryPoint,
"image_entry_point" : ImageEntryPoint,
"arch_entry_point" : ArchEntryPoint,
"remaining_build_target" : self.ResultFileList,
"common_dependency_file" : self.CommonFileDependency,
"create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
"clean_command" : self.GetRemoveDirectoryCommand(["$(OUTPUT_DIR)"]),
"cleanall_command" : self.GetRemoveDirectoryCommand(["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]),
"dependent_library_build_directory" : self.LibraryBuildDirectoryList,
"library_build_command" : LibraryMakeCommandList,
"file_macro" : FileMacroList,
"file_build_target" : self.BuildTargetList,
"backward_compatible_target": BcTargetList,
"INCLUDETAG" : "\n".join([self._INCLUDE_CMD_[self._FileType] + " " + os.path.join("$(MODULE_BUILD_DIR)","dependency"),
self._INCLUDE_CMD_[self._FileType] + " " + os.path.join("$(MODULE_BUILD_DIR)","deps_target")
])
}
return MakefileTemplateDict
def ParserGenerateFfsCmd(self):
#Add Ffs cmd to self.BuildTargetList
OutputFile = ''
DepsFileList = []
for Cmd in self.GenFfsList:
if Cmd[2]:
for CopyCmd in Cmd[2]:
Src, Dst = CopyCmd
Src = self.ReplaceMacro(Src)
Dst = self.ReplaceMacro(Dst)
if Dst not in self.ResultFileList:
self.ResultFileList.append(Dst)
if '%s :' %(Dst) not in self.BuildTargetList:
self.BuildTargetList.append("%s : %s" %(Dst,Src))
self.BuildTargetList.append('\t' + self._CP_TEMPLATE_[self._Platform] %{'Src': Src, 'Dst': Dst})
FfsCmdList = Cmd[0]
for index, Str in enumerate(FfsCmdList):
if '-o' == Str:
OutputFile = FfsCmdList[index + 1]
if '-i' == Str or "-oi" == Str:
if DepsFileList == []:
DepsFileList = [FfsCmdList[index + 1]]
else:
DepsFileList.append(FfsCmdList[index + 1])
DepsFileString = ' '.join(DepsFileList).strip()
if DepsFileString == '':
continue
OutputFile = self.ReplaceMacro(OutputFile)
self.ResultFileList.append(OutputFile)
DepsFileString = self.ReplaceMacro(DepsFileString)
self.BuildTargetList.append('%s : %s' % (OutputFile, DepsFileString))
CmdString = ' '.join(FfsCmdList).strip()
CmdString = self.ReplaceMacro(CmdString)
self.BuildTargetList.append('\t%s' % CmdString)
self.ParseSecCmd(DepsFileList, Cmd[1])
for SecOutputFile, SecDepsFile, SecCmd in self.FfsOutputFileList :
self.BuildTargetList.append('%s : %s' % (self.ReplaceMacro(SecOutputFile), self.ReplaceMacro(SecDepsFile)))
self.BuildTargetList.append('\t%s' % self.ReplaceMacro(SecCmd))
self.FfsOutputFileList = []
def ParseSecCmd(self, OutputFileList, CmdTuple):
for OutputFile in OutputFileList:
for SecCmdStr in CmdTuple:
SecDepsFileList = []
SecCmdList = SecCmdStr.split()
CmdName = SecCmdList[0]
for index, CmdItem in enumerate(SecCmdList):
if '-o' == CmdItem and OutputFile == SecCmdList[index + 1]:
index = index + 1
while index + 1 < len(SecCmdList):
if not SecCmdList[index+1].startswith('-'):
SecDepsFileList.append(SecCmdList[index + 1])
index = index + 1
if CmdName == 'Trim':
SecDepsFileList.append(os.path.join('$(DEBUG_DIR)', os.path.basename(OutputFile).replace('offset', 'efi')))
if OutputFile.endswith('.ui') or OutputFile.endswith('.ver'):
SecDepsFileList.append(os.path.join('$(MODULE_DIR)', '$(MODULE_FILE)'))
self.FfsOutputFileList.append((OutputFile, ' '.join(SecDepsFileList), SecCmdStr))
if len(SecDepsFileList) > 0:
self.ParseSecCmd(SecDepsFileList, CmdTuple)
break
else:
continue
def ReplaceMacro(self, str):
for Macro in self.MacroList:
if self._AutoGenObject.Macros[Macro] and os.path.normcase(self._AutoGenObject.Macros[Macro]) in os.path.normcase(str):
replace_dir = str[os.path.normcase(str).index(os.path.normcase(self._AutoGenObject.Macros[Macro])): os.path.normcase(str).index(
os.path.normcase(self._AutoGenObject.Macros[Macro])) + len(self._AutoGenObject.Macros[Macro])]
str = str.replace(replace_dir, '$(' + Macro + ')')
return str
def CommandExceedLimit(self):
FlagDict = {
'CC' : { 'Macro' : '$(CC_FLAGS)', 'Value' : False},
'PP' : { 'Macro' : '$(PP_FLAGS)', 'Value' : False},
'APP' : { 'Macro' : '$(APP_FLAGS)', 'Value' : False},
'ASLPP' : { 'Macro' : '$(ASLPP_FLAGS)', 'Value' : False},
'VFRPP' : { 'Macro' : '$(VFRPP_FLAGS)', 'Value' : False},
'ASM' : { 'Macro' : '$(ASM_FLAGS)', 'Value' : False},
'ASLCC' : { 'Macro' : '$(ASLCC_FLAGS)', 'Value' : False},
}
RespDict = {}
FileTypeList = []
IncPrefix = self._INC_FLAG_[self._AutoGenObject.ToolChainFamily]
# base on the source files to decide the file type
for File in self._AutoGenObject.SourceFileList:
for type in self._AutoGenObject.FileTypes:
if File in self._AutoGenObject.FileTypes[type]:
if type not in FileTypeList:
FileTypeList.append(type)
# calculate the command-line length
if FileTypeList:
for type in FileTypeList:
BuildTargets = self._AutoGenObject.BuildRules[type].BuildTargets
for Target in BuildTargets:
CommandList = BuildTargets[Target].Commands
for SingleCommand in CommandList:
Tool = ''
SingleCommandLength = len(SingleCommand)
SingleCommandList = SingleCommand.split()
if len(SingleCommandList) > 0:
for Flag in FlagDict:
if '$('+ Flag +')' in SingleCommandList[0]:
Tool = Flag
break
if Tool:
if 'PATH' not in self._AutoGenObject.BuildOption[Tool]:
EdkLogger.error("build", AUTOGEN_ERROR, "%s_PATH doesn't exist in %s ToolChain and %s Arch." %(Tool, self._AutoGenObject.ToolChain, self._AutoGenObject.Arch), ExtraData="[%s]" % str(self._AutoGenObject))
SingleCommandLength += len(self._AutoGenObject.BuildOption[Tool]['PATH'])
for item in SingleCommandList[1:]:
if FlagDict[Tool]['Macro'] in item:
if 'FLAGS' not in self._AutoGenObject.BuildOption[Tool]:
EdkLogger.error("build", AUTOGEN_ERROR, "%s_FLAGS doesn't exist in %s ToolChain and %s Arch." %(Tool, self._AutoGenObject.ToolChain, self._AutoGenObject.Arch), ExtraData="[%s]" % str(self._AutoGenObject))
Str = self._AutoGenObject.BuildOption[Tool]['FLAGS']
for Option in self._AutoGenObject.BuildOption:
for Attr in self._AutoGenObject.BuildOption[Option]:
if Str.find(Option + '_' + Attr) != -1:
Str = Str.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
while(Str.find('$(') != -1):
for macro in self._AutoGenObject.Macros:
MacroName = '$('+ macro + ')'
if (Str.find(MacroName) != -1):
Str = Str.replace(MacroName, self._AutoGenObject.Macros[macro])
break
else:
break
SingleCommandLength += len(Str)
elif '$(INC)' in item:
SingleCommandLength += self._AutoGenObject.IncludePathLength + len(IncPrefix) * len(self._AutoGenObject.IncludePathList)
elif item.find('$(') != -1:
Str = item
for Option in self._AutoGenObject.BuildOption:
for Attr in self._AutoGenObject.BuildOption[Option]:
if Str.find(Option + '_' + Attr) != -1:
Str = Str.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
while(Str.find('$(') != -1):
for macro in self._AutoGenObject.Macros:
MacroName = '$('+ macro + ')'
if (Str.find(MacroName) != -1):
Str = Str.replace(MacroName, self._AutoGenObject.Macros[macro])
break
else:
break
SingleCommandLength += len(Str)
if SingleCommandLength > GlobalData.gCommandMaxLength:
FlagDict[Tool]['Value'] = True
# generate the response file content by combine the FLAGS and INC
for Flag in FlagDict:
if FlagDict[Flag]['Value']:
Key = Flag + '_RESP'
RespMacro = FlagDict[Flag]['Macro'].replace('FLAGS', 'RESP')
Value = self._AutoGenObject.BuildOption[Flag]['FLAGS']
for inc in self._AutoGenObject.IncludePathList:
Value += ' ' + IncPrefix + inc
for Option in self._AutoGenObject.BuildOption:
for Attr in self._AutoGenObject.BuildOption[Option]:
if Value.find(Option + '_' + Attr) != -1:
Value = Value.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
while (Value.find('$(') != -1):
for macro in self._AutoGenObject.Macros:
MacroName = '$('+ macro + ')'
if (Value.find(MacroName) != -1):
Value = Value.replace(MacroName, self._AutoGenObject.Macros[macro])
break
else:
break
if self._AutoGenObject.ToolChainFamily == 'GCC':
RespDict[Key] = Value.replace('\\', '/')
else:
RespDict[Key] = Value
for Target in BuildTargets:
for i, SingleCommand in enumerate(BuildTargets[Target].Commands):
if FlagDict[Flag]['Macro'] in SingleCommand:
BuildTargets[Target].Commands[i] = SingleCommand.replace('$(INC)', '').replace(FlagDict[Flag]['Macro'], RespMacro)
return RespDict
def ProcessBuildTargetList(self, RespFile, ToolsDef):
#
# Search dependency file list for each source file
#
ForceIncludedFile = []
for File in self._AutoGenObject.AutoGenFileList:
if File.Ext == '.h':
ForceIncludedFile.append(File)
SourceFileList = []
OutPutFileList = []
for Target in self._AutoGenObject.IntroTargetList:
SourceFileList.extend(Target.Inputs)
OutPutFileList.extend(Target.Outputs)
if OutPutFileList:
for Item in OutPutFileList:
if Item in SourceFileList:
SourceFileList.remove(Item)
FileDependencyDict = {item:ForceIncludedFile for item in SourceFileList}
for Dependency in FileDependencyDict.values():
self.DependencyHeaderFileSet.update(set(Dependency))
# Get a set of unique package includes from MetaFile
parentMetaFileIncludes = set()
for aInclude in self._AutoGenObject.PackageIncludePathList:
aIncludeName = str(aInclude)
parentMetaFileIncludes.add(aIncludeName.lower())
# Check if header files are listed in metafile
# Get a set of unique module header source files from MetaFile
headerFilesInMetaFileSet = set()
for aFile in self._AutoGenObject.SourceFileList:
aFileName = str(aFile)
if not aFileName.endswith('.h'):
continue
headerFilesInMetaFileSet.add(aFileName.lower())
# Get a set of unique module autogen files
localAutoGenFileSet = set()
for aFile in self._AutoGenObject.AutoGenFileList:
localAutoGenFileSet.add(str(aFile).lower())
# Get a set of unique module dependency header files
# Exclude autogen files and files not in the source directory
# and files that are under the package include list
headerFileDependencySet = set()
localSourceDir = str(self._AutoGenObject.SourceDir).lower()
for Dependency in FileDependencyDict.values():
for aFile in Dependency:
aFileName = str(aFile).lower()
# Exclude non-header files
if not aFileName.endswith('.h'):
continue
# Exclude autogen files
if aFileName in localAutoGenFileSet:
continue
# Exclude include out of local scope
if localSourceDir not in aFileName:
continue
# Exclude files covered by package includes
pathNeeded = True
for aIncludePath in parentMetaFileIncludes:
if aIncludePath in aFileName:
pathNeeded = False
break
if not pathNeeded:
continue
# Keep the file to be checked
headerFileDependencySet.add(aFileName)
# Check if a module dependency header file is missing from the module's MetaFile
for aFile in headerFileDependencySet:
if aFile in headerFilesInMetaFileSet:
continue
if GlobalData.gUseHashCache:
GlobalData.gModuleBuildTracking[self._AutoGenObject] = 'FAIL_METAFILE'
EdkLogger.warn("build","Module MetaFile [Sources] is missing local header!",
ExtraData = "Local Header: " + aFile + " not found in " + self._AutoGenObject.MetaFile.Path
)
for File,Dependency in FileDependencyDict.items():
if not Dependency:
continue
self._AutoGenObject.AutoGenDepSet |= set(Dependency)
CmdSumDict = {}
CmdTargetDict = {}
CmdCppDict = {}
DependencyDict = FileDependencyDict.copy()
# Convert target description object to target string in makefile
if self._AutoGenObject.BuildRuleFamily == TAB_COMPILER_MSFT and TAB_C_CODE_FILE in self._AutoGenObject.Targets:
for T in self._AutoGenObject.Targets[TAB_C_CODE_FILE]:
NewFile = self.PlaceMacro(str(T), self.Macros)
if not self.ObjTargetDict.get(T.Target.SubDir):
self.ObjTargetDict[T.Target.SubDir] = set()
self.ObjTargetDict[T.Target.SubDir].add(NewFile)
for Type in self._AutoGenObject.Targets:
resp_file_number = 0
for T in self._AutoGenObject.Targets[Type]:
# Generate related macros if needed
if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros:
self.FileListMacros[T.FileListMacro] = []
if T.GenListFile and T.ListFileMacro not in self.ListFileMacros:
self.ListFileMacros[T.ListFileMacro] = []
if T.GenIncListFile and T.IncListFileMacro not in self.ListFileMacros:
self.ListFileMacros[T.IncListFileMacro] = []
Deps = []
CCodeDeps = []
# Add force-dependencies
for Dep in T.Dependencies:
Deps.append(self.PlaceMacro(str(Dep), self.Macros))
if Dep != '$(MAKE_FILE)':
CCodeDeps.append(self.PlaceMacro(str(Dep), self.Macros))
# Add inclusion-dependencies
if len(T.Inputs) == 1 and T.Inputs[0] in FileDependencyDict:
for F in FileDependencyDict[T.Inputs[0]]:
Deps.append(self.PlaceMacro(str(F), self.Macros))
# Add source-dependencies
for F in T.Inputs:
NewFile = self.PlaceMacro(str(F), self.Macros)
# In order to use file list macro as dependency
if T.GenListFile:
# gnu tools need forward slash path separator, even on Windows
self.ListFileMacros[T.ListFileMacro].append(str(F).replace ('\\', '/'))
self.FileListMacros[T.FileListMacro].append(NewFile)
elif T.GenFileListMacro:
self.FileListMacros[T.FileListMacro].append(NewFile)
else:
Deps.append(NewFile)
for key in self.FileListMacros:
self.FileListMacros[key].sort()
# Use file list macro as dependency
if T.GenFileListMacro:
Deps.append("$(%s)" % T.FileListMacro)
if Type in [TAB_OBJECT_FILE, TAB_STATIC_LIBRARY]:
Deps.append("$(%s)" % T.ListFileMacro)
if self._AutoGenObject.BuildRuleFamily == TAB_COMPILER_MSFT and Type == TAB_C_CODE_FILE:
T, CmdTarget, CmdTargetDict, CmdCppDict = self.ParserCCodeFile(T, Type, CmdSumDict, CmdTargetDict,
CmdCppDict, DependencyDict, RespFile,
ToolsDef, resp_file_number)
resp_file_number += 1
TargetDict = {"target": self.PlaceMacro(T.Target.Path, self.Macros), "cmd": "\n\t".join(T.Commands),"deps": CCodeDeps}
CmdLine = self._BUILD_TARGET_TEMPLATE.Replace(TargetDict).rstrip().replace('\t$(OBJLIST', '$(OBJLIST')
if T.Commands:
CmdLine = '%s%s' %(CmdLine, TAB_LINE_BREAK)
if CCodeDeps or CmdLine:
self.BuildTargetList.append(CmdLine)
else:
TargetDict = {"target": self.PlaceMacro(T.Target.Path, self.Macros), "cmd": "\n\t".join(T.Commands),"deps": Deps}
self.BuildTargetList.append(self._BUILD_TARGET_TEMPLATE.Replace(TargetDict))
# Add a Makefile rule for targets generating multiple files.
# The main output is a prerequisite for the other output files.
for i in T.Outputs[1:]:
AnnexeTargetDict = {"target": self.PlaceMacro(i.Path, self.Macros), "cmd": "", "deps": self.PlaceMacro(T.Target.Path, self.Macros)}
self.BuildTargetList.append(self._BUILD_TARGET_TEMPLATE.Replace(AnnexeTargetDict))
def ParserCCodeFile(self, T, Type, CmdSumDict, CmdTargetDict, CmdCppDict, DependencyDict, RespFile, ToolsDef,
resp_file_number):
SaveFilePath = os.path.join(RespFile, "cc_resp_%s.txt" % resp_file_number)
if not CmdSumDict:
for item in self._AutoGenObject.Targets[Type]:
CmdSumDict[item.Target.SubDir] = item.Target.BaseName
for CppPath in item.Inputs:
Path = self.PlaceMacro(CppPath.Path, self.Macros)
if CmdCppDict.get(item.Target.SubDir):
CmdCppDict[item.Target.SubDir].append(Path)
else:
CmdCppDict[item.Target.SubDir] = ['$(MAKE_FILE)', Path]
if CppPath.Path in DependencyDict:
for Temp in DependencyDict[CppPath.Path]:
try:
Path = self.PlaceMacro(Temp.Path, self.Macros)
except:
continue
if Path not in (self.CommonFileDependency + CmdCppDict[item.Target.SubDir]):
CmdCppDict[item.Target.SubDir].append(Path)
if T.Commands:
CommandList = T.Commands[:]
for Item in CommandList[:]:
SingleCommandList = Item.split()
if len(SingleCommandList) > 0 and self.CheckCCCmd(SingleCommandList):
for Temp in SingleCommandList:
if Temp.startswith('/Fo'):
CmdSign = '%s%s' % (Temp.rsplit(TAB_SLASH, 1)[0], TAB_SLASH)
break
else:
continue
if CmdSign not in list(CmdTargetDict.keys()):
cmd = Item.replace(Temp, CmdSign)
if SingleCommandList[-1] in cmd:
CmdTargetDict[CmdSign] = [cmd.replace(SingleCommandList[-1], "").rstrip(), SingleCommandList[-1]]
else:
# CmdTargetDict[CmdSign] = "%s %s" % (CmdTargetDict[CmdSign], SingleCommandList[-1])
CmdTargetDict[CmdSign].append(SingleCommandList[-1])
Index = CommandList.index(Item)
CommandList.pop(Index)
BaseName = SingleCommandList[-1].rsplit('.',1)[0]
if BaseName.endswith("%s%s" % (TAB_SLASH, CmdSumDict[CmdSign[3:].rsplit(TAB_SLASH, 1)[0]])):
Cpplist = CmdCppDict[T.Target.SubDir]
Cpplist.insert(0, '$(OBJLIST_%d): ' % list(self.ObjTargetDict.keys()).index(T.Target.SubDir))
source_files = CmdTargetDict[CmdSign][1:]
source_files.insert(0, " ")
if len(source_files)>2:
SaveFileOnChange(SaveFilePath, " ".join(source_files), False)
T.Commands[Index] = '%s\n\t%s $(cc_resp_%s)' % (
' \\\n\t'.join(Cpplist), CmdTargetDict[CmdSign][0], resp_file_number)
ToolsDef.append("cc_resp_%s = @%s" % (resp_file_number, SaveFilePath))
elif len(source_files)<=2 and len(" ".join(CmdTargetDict[CmdSign][:2]))>GlobalData.gCommandMaxLength:
SaveFileOnChange(SaveFilePath, " ".join(source_files), False)
T.Commands[Index] = '%s\n\t%s $(cc_resp_%s)' % (
' \\\n\t'.join(Cpplist), CmdTargetDict[CmdSign][0], resp_file_number)
ToolsDef.append("cc_resp_%s = @%s" % (resp_file_number, SaveFilePath))
else:
T.Commands[Index] = '%s\n\t%s' % (' \\\n\t'.join(Cpplist), " ".join(CmdTargetDict[CmdSign]))
else:
T.Commands.pop(Index)
return T, CmdSumDict, CmdTargetDict, CmdCppDict
def CheckCCCmd(self, CommandList):
for cmd in CommandList:
if '$(CC)' in cmd:
return True
return False
## For creating makefile targets for dependent libraries
def ProcessDependentLibrary(self):
for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
if not LibraryAutoGen.IsBinaryModule:
self.LibraryBuildDirectoryList.append(self.PlaceMacro(LibraryAutoGen.BuildDir, self.Macros))
## Return a list containing source file's dependencies
#
# @param FileList The list of source files
# @param ForceInculeList The list of files which will be included forcely
# @param SearchPathList The list of search path
#
# @retval dict The mapping between source file path and its dependencies
#
def GetFileDependency(self, FileList, ForceInculeList, SearchPathList):
Dependency = {}
for F in FileList:
Dependency[F] = GetDependencyList(self._AutoGenObject, self.FileCache, F, ForceInculeList, SearchPathList)
return Dependency
## CustomMakefile class
#
# This class encapsules makefie and its generation for module. It uses template to generate
# the content of makefile. The content of makefile will be got from ModuleAutoGen object.
#
class CustomMakefile(BuildFile):
## template used to generate the makefile for module with custom makefile
_TEMPLATE_ = TemplateString('''\
${makefile_header}
#
# Platform Macro Definition
#
PLATFORM_NAME = ${platform_name}
PLATFORM_GUID = ${platform_guid}
PLATFORM_VERSION = ${platform_version}
PLATFORM_RELATIVE_DIR = ${platform_relative_directory}
PLATFORM_DIR = ${platform_dir}
PLATFORM_OUTPUT_DIR = ${platform_output_directory}
#
# Module Macro Definition
#
MODULE_NAME = ${module_name}
MODULE_GUID = ${module_guid}
MODULE_NAME_GUID = ${module_name_guid}
MODULE_VERSION = ${module_version}
MODULE_TYPE = ${module_type}
MODULE_FILE = ${module_file}
MODULE_FILE_BASE_NAME = ${module_file_base_name}
BASE_NAME = $(MODULE_NAME)
MODULE_RELATIVE_DIR = ${module_relative_directory}
MODULE_DIR = ${module_dir}
#
# Build Configuration Macro Definition
#
ARCH = ${architecture}
TOOLCHAIN = ${toolchain_tag}
TOOLCHAIN_TAG = ${toolchain_tag}
TARGET = ${build_target}
#
# Build Directory Macro Definition
#
# PLATFORM_BUILD_DIR = ${platform_build_directory}
BUILD_DIR = ${platform_build_directory}
BIN_DIR = $(BUILD_DIR)${separator}${architecture}
LIB_DIR = $(BIN_DIR)
MODULE_BUILD_DIR = ${module_build_directory}
OUTPUT_DIR = ${module_output_directory}
DEBUG_DIR = ${module_debug_directory}
DEST_DIR_OUTPUT = $(OUTPUT_DIR)
DEST_DIR_DEBUG = $(DEBUG_DIR)
#
# Tools definitions specific to this module
#
${BEGIN}${module_tool_definitions}
${END}
MAKE_FILE = ${makefile_path}
#
# Shell Command Macro
#
${BEGIN}${shell_command_code} = ${shell_command}
${END}
${custom_makefile_content}
#
# Target used when called from platform makefile, which will bypass the build of dependent libraries
#
pbuild: init all
#
# ModuleTarget
#
mbuild: init all
#
# Build Target used in multi-thread build mode, which no init target is needed
#
tbuild: all
#
# Initialization target: print build information and create necessary directories
#
init:
\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)]
${BEGIN}\t-@${create_directory_command}\n${END}\
''')
## Constructor of CustomMakefile
#
# @param ModuleAutoGen Object of ModuleAutoGen class
#
def __init__(self, ModuleAutoGen):
BuildFile.__init__(self, ModuleAutoGen)
self.PlatformInfo = self._AutoGenObject.PlatformInfo
self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]
self.DependencyHeaderFileSet = set()
# Compose a dict object containing information used to do replacement in template
@property
def _TemplateDict(self):
Separator = self._SEP_[self._Platform]
MyAgo = self._AutoGenObject
if self._FileType not in MyAgo.CustomMakefile:
EdkLogger.error('build', OPTION_NOT_SUPPORTED, "No custom makefile for %s" % self._FileType,
ExtraData="[%s]" % str(MyAgo))
MakefilePath = mws.join(
MyAgo.WorkspaceDir,
MyAgo.CustomMakefile[self._FileType]
)
try:
CustomMakefile = open(MakefilePath, 'r').read()
except:
EdkLogger.error('build', FILE_OPEN_FAILURE, File=str(MyAgo),
ExtraData=MyAgo.CustomMakefile[self._FileType])
# tools definitions
ToolsDef = []
for Tool in MyAgo.BuildOption:
# Don't generate MAKE_FLAGS in makefile. It's put in environment variable.
if Tool == "MAKE":
continue
for Attr in MyAgo.BuildOption[Tool]:
if Attr == "FAMILY":
continue
elif Attr == "PATH":
ToolsDef.append("%s = %s" % (Tool, MyAgo.BuildOption[Tool][Attr]))
else:
ToolsDef.append("%s_%s = %s" % (Tool, Attr, MyAgo.BuildOption[Tool][Attr]))
ToolsDef.append("")
MakefileName = self.getMakefileName()
MakefileTemplateDict = {
"makefile_header" : self._FILE_HEADER_[self._FileType],
"makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName),
"platform_name" : self.PlatformInfo.Name,
"platform_guid" : self.PlatformInfo.Guid,
"platform_version" : self.PlatformInfo.Version,
"platform_relative_directory": self.PlatformInfo.SourceDir,
"platform_output_directory" : self.PlatformInfo.OutputDir,
"platform_dir" : MyAgo.Macros["PLATFORM_DIR"],
"module_name" : MyAgo.Name,
"module_guid" : MyAgo.Guid,
"module_name_guid" : MyAgo.UniqueBaseName,
"module_version" : MyAgo.Version,
"module_type" : MyAgo.ModuleType,
"module_file" : MyAgo.MetaFile,
"module_file_base_name" : MyAgo.MetaFile.BaseName,
"module_relative_directory" : MyAgo.SourceDir,
"module_dir" : mws.join (MyAgo.WorkspaceDir, MyAgo.SourceDir),
"architecture" : MyAgo.Arch,
"toolchain_tag" : MyAgo.ToolChain,
"build_target" : MyAgo.BuildTarget,
"platform_build_directory" : self.PlatformInfo.BuildDir,
"module_build_directory" : MyAgo.BuildDir,
"module_output_directory" : MyAgo.OutputDir,
"module_debug_directory" : MyAgo.DebugDir,
"separator" : Separator,
"module_tool_definitions" : ToolsDef,
"shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
"shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
"create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
"custom_makefile_content" : CustomMakefile
}
return MakefileTemplateDict
## PlatformMakefile class
#
# This class encapsules makefie and its generation for platform. It uses
# template to generate the content of makefile. The content of makefile will be
# got from PlatformAutoGen object.
#
class PlatformMakefile(BuildFile):
## template used to generate the makefile for platform
_TEMPLATE_ = TemplateString('''\
${makefile_header}
#
# Platform Macro Definition
#
PLATFORM_NAME = ${platform_name}
PLATFORM_GUID = ${platform_guid}
PLATFORM_VERSION = ${platform_version}
PLATFORM_FILE = ${platform_file}
PLATFORM_DIR = ${platform_dir}
PLATFORM_OUTPUT_DIR = ${platform_output_directory}
#
# Build Configuration Macro Definition
#
TOOLCHAIN = ${toolchain_tag}
TOOLCHAIN_TAG = ${toolchain_tag}
TARGET = ${build_target}
#
# Build Directory Macro Definition
#
BUILD_DIR = ${platform_build_directory}
FV_DIR = ${platform_build_directory}${separator}FV
#
# Shell Command Macro
#
${BEGIN}${shell_command_code} = ${shell_command}
${END}
MAKE = ${make_path}
MAKE_FILE = ${makefile_path}
#
# Default target
#
all: init build_libraries build_modules
#
# Initialization target: print build information and create necessary directories
#
init:
\t-@echo Building ... $(PLATFORM_FILE) [${build_architecture_list}]
\t${BEGIN}-@${create_directory_command}
\t${END}
#
# library build target
#
libraries: init build_libraries
#
# module build target
#
modules: init build_libraries build_modules
#
# Build all libraries:
#
build_libraries:
${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${library_makefile_list} pbuild
${END}\t@cd $(BUILD_DIR)
#
# Build all modules:
#
build_modules:
${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${module_makefile_list} pbuild
${END}\t@cd $(BUILD_DIR)
#
# Clean intermediate files
#
clean:
\t${BEGIN}-@${library_build_command} clean
\t${END}${BEGIN}-@${module_build_command} clean
\t${END}@cd $(BUILD_DIR)
#
# Clean all generated files except to makefile
#
cleanall:
${BEGIN}\t${cleanall_command}
${END}
#
# Clean all library files
#
cleanlib:
\t${BEGIN}-@${library_build_command} cleanall
\t${END}@cd $(BUILD_DIR)\n
''')
## Constructor of PlatformMakefile
#
# @param ModuleAutoGen Object of PlatformAutoGen class
#
def __init__(self, PlatformAutoGen):
BuildFile.__init__(self, PlatformAutoGen)
self.ModuleBuildCommandList = []
self.ModuleMakefileList = []
self.IntermediateDirectoryList = []
self.ModuleBuildDirectoryList = []
self.LibraryBuildDirectoryList = []
self.LibraryMakeCommandList = []
self.DependencyHeaderFileSet = set()
# Compose a dict object containing information used to do replacement in template
@property
def _TemplateDict(self):
Separator = self._SEP_[self._Platform]
MyAgo = self._AutoGenObject
if "MAKE" not in MyAgo.ToolDefinition or "PATH" not in MyAgo.ToolDefinition["MAKE"]:
EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!",
ExtraData="[%s]" % str(MyAgo))
self.IntermediateDirectoryList = ["$(BUILD_DIR)"]
self.ModuleBuildDirectoryList = self.GetModuleBuildDirectoryList()
self.LibraryBuildDirectoryList = self.GetLibraryBuildDirectoryList()
MakefileName = self.getMakefileName()
LibraryMakefileList = []
LibraryMakeCommandList = []
for D in self.LibraryBuildDirectoryList:
D = self.PlaceMacro(D, {"BUILD_DIR":MyAgo.BuildDir})
Makefile = os.path.join(D, MakefileName)
Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":Makefile}
LibraryMakefileList.append(Makefile)
LibraryMakeCommandList.append(Command)
self.LibraryMakeCommandList = LibraryMakeCommandList
ModuleMakefileList = []
ModuleMakeCommandList = []
for D in self.ModuleBuildDirectoryList:
D = self.PlaceMacro(D, {"BUILD_DIR":MyAgo.BuildDir})
Makefile = os.path.join(D, MakefileName)
Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":Makefile}
ModuleMakefileList.append(Makefile)
ModuleMakeCommandList.append(Command)
MakefileTemplateDict = {
"makefile_header" : self._FILE_HEADER_[self._FileType],
"makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName),
"make_path" : MyAgo.ToolDefinition["MAKE"]["PATH"],
"makefile_name" : MakefileName,
"platform_name" : MyAgo.Name,
"platform_guid" : MyAgo.Guid,
"platform_version" : MyAgo.Version,
"platform_file" : MyAgo.MetaFile,
"platform_relative_directory": MyAgo.SourceDir,
"platform_output_directory" : MyAgo.OutputDir,
"platform_build_directory" : MyAgo.BuildDir,
"platform_dir" : MyAgo.Macros["PLATFORM_DIR"],
"toolchain_tag" : MyAgo.ToolChain,
"build_target" : MyAgo.BuildTarget,
"shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
"shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
"build_architecture_list" : MyAgo.Arch,
"architecture" : MyAgo.Arch,
"separator" : Separator,
"create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
"cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList),
"library_makefile_list" : LibraryMakefileList,
"module_makefile_list" : ModuleMakefileList,
"library_build_command" : LibraryMakeCommandList,
"module_build_command" : ModuleMakeCommandList,
}
return MakefileTemplateDict
## Get the root directory list for intermediate files of all modules build
#
# @retval list The list of directory
#
def GetModuleBuildDirectoryList(self):
DirList = []
for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList:
if not ModuleAutoGen.IsBinaryModule:
DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
return DirList
## Get the root directory list for intermediate files of all libraries build
#
# @retval list The list of directory
#
def GetLibraryBuildDirectoryList(self):
DirList = []
for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
if not LibraryAutoGen.IsBinaryModule:
DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
return DirList
## TopLevelMakefile class
#
# This class encapsules makefie and its generation for entrance makefile. It
# uses template to generate the content of makefile. The content of makefile
# will be got from WorkspaceAutoGen object.
#
class TopLevelMakefile(BuildFile):
## template used to generate toplevel makefile
_TEMPLATE_ = TemplateString('''${BEGIN}\tGenFds -f ${fdf_file} --conf=${conf_directory} -o ${platform_build_directory} -t ${toolchain_tag} -b ${build_target} -p ${active_platform} -a ${build_architecture_list} ${extra_options}${END}${BEGIN} -r ${fd} ${END}${BEGIN} -i ${fv} ${END}${BEGIN} -C ${cap} ${END}${BEGIN} -D ${macro} ${END}''')
## Constructor of TopLevelMakefile
#
# @param Workspace Object of WorkspaceAutoGen class
#
def __init__(self, Workspace):
BuildFile.__init__(self, Workspace)
self.IntermediateDirectoryList = []
self.DependencyHeaderFileSet = set()
# Compose a dict object containing information used to do replacement in template
@property
def _TemplateDict(self):
Separator = self._SEP_[self._Platform]
# any platform autogen object is ok because we just need common information
MyAgo = self._AutoGenObject
if "MAKE" not in MyAgo.ToolDefinition or "PATH" not in MyAgo.ToolDefinition["MAKE"]:
EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!",
ExtraData="[%s]" % str(MyAgo))
for Arch in MyAgo.ArchList:
self.IntermediateDirectoryList.append(Separator.join(["$(BUILD_DIR)", Arch]))
self.IntermediateDirectoryList.append("$(FV_DIR)")
# TRICK: for not generating GenFds call in makefile if no FDF file
MacroList = []
if MyAgo.FdfFile is not None and MyAgo.FdfFile != "":
FdfFileList = [MyAgo.FdfFile]
# macros passed to GenFds
MacroDict = {}
MacroDict.update(GlobalData.gGlobalDefines)
MacroDict.update(GlobalData.gCommandLineDefines)
for MacroName in MacroDict:
if MacroDict[MacroName] != "":
MacroList.append('"%s=%s"' % (MacroName, MacroDict[MacroName].replace('\\', '\\\\')))
else:
MacroList.append('"%s"' % MacroName)
else:
FdfFileList = []
# pass extra common options to external program called in makefile, currently GenFds.exe
ExtraOption = ''
LogLevel = EdkLogger.GetLevel()
if LogLevel == EdkLogger.VERBOSE:
ExtraOption += " -v"
elif LogLevel <= EdkLogger.DEBUG_9:
ExtraOption += " -d %d" % (LogLevel - 1)
elif LogLevel == EdkLogger.QUIET:
ExtraOption += " -q"
if GlobalData.gCaseInsensitive:
ExtraOption += " -c"
if not GlobalData.gEnableGenfdsMultiThread:
ExtraOption += " --no-genfds-multi-thread"
if GlobalData.gIgnoreSource:
ExtraOption += " --ignore-sources"
for pcd in GlobalData.BuildOptionPcd:
if pcd[2]:
pcdname = '.'.join(pcd[0:3])
else:
pcdname = '.'.join(pcd[0:2])
if pcd[3].startswith('{'):
ExtraOption += " --pcd " + pcdname + '=' + 'H' + '"' + pcd[3] + '"'
else:
ExtraOption += " --pcd " + pcdname + '=' + pcd[3]
MakefileName = self.getMakefileName()
SubBuildCommandList = []
for A in MyAgo.ArchList:
Command = self._MAKE_TEMPLATE_[self._Platform] % {"file":os.path.join("$(BUILD_DIR)", A, MakefileName)}
SubBuildCommandList.append(Command)
MakefileTemplateDict = {
"makefile_header" : self._FILE_HEADER_[self._FileType],
"makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName),
"make_path" : MyAgo.ToolDefinition["MAKE"]["PATH"],
"platform_name" : MyAgo.Name,
"platform_guid" : MyAgo.Guid,
"platform_version" : MyAgo.Version,
"platform_build_directory" : MyAgo.BuildDir,
"conf_directory" : GlobalData.gConfDirectory,
"toolchain_tag" : MyAgo.ToolChain,
"build_target" : MyAgo.BuildTarget,
"shell_command_code" : list(self._SHELL_CMD_[self._Platform].keys()),
"shell_command" : list(self._SHELL_CMD_[self._Platform].values()),
'arch' : list(MyAgo.ArchList),
"build_architecture_list" : ','.join(MyAgo.ArchList),
"separator" : Separator,
"create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
"cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList),
"sub_build_command" : SubBuildCommandList,
"fdf_file" : FdfFileList,
"active_platform" : str(MyAgo),
"fd" : MyAgo.FdTargetList,
"fv" : MyAgo.FvTargetList,
"cap" : MyAgo.CapTargetList,
"extra_options" : ExtraOption,
"macro" : MacroList,
}
return MakefileTemplateDict
## Get the root directory list for intermediate files of all modules build
#
# @retval list The list of directory
#
def GetModuleBuildDirectoryList(self):
DirList = []
for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList:
if not ModuleAutoGen.IsBinaryModule:
DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
return DirList
## Get the root directory list for intermediate files of all libraries build
#
# @retval list The list of directory
#
def GetLibraryBuildDirectoryList(self):
DirList = []
for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
if not LibraryAutoGen.IsBinaryModule:
DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
return DirList
## Find dependencies for one source file
#
# By searching recursively "#include" directive in file, find out all the
# files needed by given source file. The dependencies will be only searched
# in given search path list.
#
# @param File The source file
# @param ForceInculeList The list of files which will be included forcely
# @param SearchPathList The list of search path
#
# @retval list The list of files the given source file depends on
#
def GetDependencyList(AutoGenObject, FileCache, File, ForceList, SearchPathList):
EdkLogger.debug(EdkLogger.DEBUG_1, "Try to get dependency files for %s" % File)
FileStack = [File] + ForceList
DependencySet = set()
if AutoGenObject.Arch not in gDependencyDatabase:
gDependencyDatabase[AutoGenObject.Arch] = {}
DepDb = gDependencyDatabase[AutoGenObject.Arch]
while len(FileStack) > 0:
F = FileStack.pop()
FullPathDependList = []
if F in FileCache:
for CacheFile in FileCache[F]:
FullPathDependList.append(CacheFile)
if CacheFile not in DependencySet:
FileStack.append(CacheFile)
DependencySet.update(FullPathDependList)
continue
CurrentFileDependencyList = []
if F in DepDb:
CurrentFileDependencyList = DepDb[F]
else:
try:
Fd = open(F.Path, 'rb')
FileContent = Fd.read()
Fd.close()
except BaseException as X:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
if len(FileContent) == 0:
continue
try:
if FileContent[0] == 0xff or FileContent[0] == 0xfe:
FileContent = FileContent.decode('utf-16')
else:
FileContent = FileContent.decode()
except:
# The file is not txt file. for example .mcb file
continue
IncludedFileList = gIncludePattern.findall(FileContent)
for Inc in IncludedFileList:
Inc = Inc.strip()
# if there's macro used to reference header file, expand it
HeaderList = gMacroPattern.findall(Inc)
if len(HeaderList) == 1 and len(HeaderList[0]) == 2:
HeaderType = HeaderList[0][0]
HeaderKey = HeaderList[0][1]
if HeaderType in gIncludeMacroConversion:
Inc = gIncludeMacroConversion[HeaderType] % {"HeaderKey" : HeaderKey}
else:
# not known macro used in #include, always build the file by
# returning a empty dependency
FileCache[File] = []
return []
Inc = os.path.normpath(Inc)
CurrentFileDependencyList.append(Inc)
DepDb[F] = CurrentFileDependencyList
CurrentFilePath = F.Dir
PathList = [CurrentFilePath] + SearchPathList
for Inc in CurrentFileDependencyList:
for SearchPath in PathList:
FilePath = os.path.join(SearchPath, Inc)
if FilePath in gIsFileMap:
if not gIsFileMap[FilePath]:
continue
# If isfile is called too many times, the performance is slow down.
elif not os.path.isfile(FilePath):
gIsFileMap[FilePath] = False
continue
else:
gIsFileMap[FilePath] = True
FilePath = PathClass(FilePath)
FullPathDependList.append(FilePath)
if FilePath not in DependencySet:
FileStack.append(FilePath)
break
else:
EdkLogger.debug(EdkLogger.DEBUG_9, "%s included by %s was not found "\
"in any given path:\n\t%s" % (Inc, F, "\n\t".join(SearchPathList)))
FileCache[F] = FullPathDependList
DependencySet.update(FullPathDependList)
DependencySet.update(ForceList)
if File in DependencySet:
DependencySet.remove(File)
DependencyList = list(DependencySet) # remove duplicate ones
return DependencyList
# This acts like the main() function for the script, unless it is 'import'ed into another script.
if __name__ == '__main__':
pass
| edk2-master | BaseTools/Source/Python/AutoGen/GenMake.py |
# Copyright (c) 2015 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
# This file is used to collect the Variable checking information
#
# #
# Import Modules
#
import os
from Common.RangeExpression import RangeExpression
from Common.Misc import *
from io import BytesIO
from struct import pack
from Common.DataType import *
class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
def __init__(self):
self.var_check_info = []
def push_back(self, var_check_tab):
for tab in self.var_check_info:
if tab.equal(var_check_tab):
tab.merge(var_check_tab)
break
else:
self.var_check_info.append(var_check_tab)
def dump(self, dest, Phase):
if not os.path.isabs(dest):
return
if not os.path.exists(dest):
os.mkdir(dest)
BinFileName = "PcdVarCheck.bin"
BinFilePath = os.path.join(dest, BinFileName)
Buffer = bytearray()
index = 0
for var_check_tab in self.var_check_info:
index += 1
realLength = 0
realLength += 32
Name = var_check_tab.Name[1:-1]
NameChars = Name.split(",")
realLength += len(NameChars)
if (index < len(self.var_check_info) and realLength % 4) or (index == len(self.var_check_info) and len(var_check_tab.validtab) > 0 and realLength % 4):
realLength += (4 - (realLength % 4))
itemIndex = 0
for item in var_check_tab.validtab:
itemIndex += 1
realLength += 5
for v_data in item.data:
if isinstance(v_data, int):
realLength += item.StorageWidth
else:
realLength += item.StorageWidth
realLength += item.StorageWidth
if (index == len(self.var_check_info)) :
if (itemIndex < len(var_check_tab.validtab)) and realLength % 4:
realLength += (4 - (realLength % 4))
else:
if realLength % 4:
realLength += (4 - (realLength % 4))
var_check_tab.Length = realLength
realLength = 0
index = 0
for var_check_tab in self.var_check_info:
index += 1
b = pack("=H", var_check_tab.Revision)
Buffer += b
realLength += 2
b = pack("=H", var_check_tab.HeaderLength)
Buffer += b
realLength += 2
b = pack("=L", var_check_tab.Length)
Buffer += b
realLength += 4
b = pack("=B", var_check_tab.Type)
Buffer += b
realLength += 1
for i in range(0, 3):
b = pack("=B", var_check_tab.Reserved)
Buffer += b
realLength += 1
b = pack("=L", var_check_tab.Attributes)
Buffer += b
realLength += 4
Guid = var_check_tab.Guid
b = PackByteFormatGUID(Guid)
Buffer += b
realLength += 16
Name = var_check_tab.Name[1:-1]
NameChars = Name.split(",")
for NameChar in NameChars:
NameCharNum = int(NameChar, 16)
b = pack("=B", NameCharNum)
Buffer += b
realLength += 1
if (index < len(self.var_check_info) and realLength % 4) or (index == len(self.var_check_info) and len(var_check_tab.validtab) > 0 and realLength % 4):
for i in range(4 - (realLength % 4)):
b = pack("=B", var_check_tab.pad)
Buffer += b
realLength += 1
itemIndex = 0
for item in var_check_tab.validtab:
itemIndex += 1
b = pack("=B", item.Type)
Buffer += b
realLength += 1
b = pack("=B", item.Length)
Buffer += b
realLength += 1
b = pack("=H", int(item.VarOffset, 16))
Buffer += b
realLength += 2
b = pack("=B", item.StorageWidth)
Buffer += b
realLength += 1
for v_data in item.data:
if isinstance(v_data, int):
b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data)
Buffer += b
realLength += item.StorageWidth
else:
b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data[0])
Buffer += b
realLength += item.StorageWidth
b = pack(PACK_CODE_BY_SIZE[item.StorageWidth], v_data[1])
Buffer += b
realLength += item.StorageWidth
if (index == len(self.var_check_info)) :
if (itemIndex < len(var_check_tab.validtab)) and realLength % 4:
for i in range(4 - (realLength % 4)):
b = pack("=B", var_check_tab.pad)
Buffer += b
realLength += 1
else:
if realLength % 4:
for i in range(4 - (realLength % 4)):
b = pack("=B", var_check_tab.pad)
Buffer += b
realLength += 1
DbFile = BytesIO()
if Phase == 'DXE' and os.path.exists(BinFilePath):
BinFile = open(BinFilePath, "rb")
BinBuffer = BinFile.read()
BinFile.close()
BinBufferSize = len(BinBuffer)
if (BinBufferSize % 4):
for i in range(4 - (BinBufferSize % 4)):
b = pack("=B", VAR_CHECK_PCD_VARIABLE_TAB.pad)
BinBuffer += b
Buffer = BinBuffer + Buffer
DbFile.write(Buffer)
SaveFileOnChange(BinFilePath, DbFile.getvalue(), True)
class VAR_CHECK_PCD_VARIABLE_TAB(object):
pad = 0xDA
def __init__(self, TokenSpaceGuid, PcdCName):
self.Revision = 0x0001
self.HeaderLength = 0
self.Length = 0 # Length include this header
self.Type = 0
self.Reserved = 0
self.Attributes = 0x00000000
self.Guid = eval("[" + TokenSpaceGuid.replace("{", "").replace("}", "") + "]")
self.Name = PcdCName
self.validtab = []
def UpdateSize(self):
self.HeaderLength = 32 + len(self.Name.split(","))
self.Length = 32 + len(self.Name.split(",")) + self.GetValidTabLen()
def GetValidTabLen(self):
validtablen = 0
for item in self.validtab:
validtablen += item.Length
return validtablen
def SetAttributes(self, attributes):
self.Attributes = attributes
def push_back(self, valid_obj):
if valid_obj is not None:
self.validtab.append(valid_obj)
def equal(self, varchecktab):
if self.Guid == varchecktab.Guid and self.Name == varchecktab.Name:
return True
else:
return False
def merge(self, varchecktab):
for validobj in varchecktab.validtab:
if validobj in self.validtab:
continue
self.validtab.append(validobj)
self.UpdateSize()
class VAR_CHECK_PCD_VALID_OBJ(object):
def __init__(self, VarOffset, data, PcdDataType):
self.Type = 1
self.Length = 0 # Length include this header
self.VarOffset = VarOffset
self.PcdDataType = PcdDataType.strip()
self.rawdata = data
self.data = set()
try:
self.StorageWidth = MAX_SIZE_TYPE[self.PcdDataType]
self.ValidData = True
except:
self.StorageWidth = 0
self.ValidData = False
def __eq__(self, validObj):
return validObj and self.VarOffset == validObj.VarOffset
class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
def __init__(self, VarOffset, validlist, PcdDataType):
super(VAR_CHECK_PCD_VALID_LIST, self).__init__(VarOffset, validlist, PcdDataType)
self.Type = 1
valid_num_list = []
for item in self.rawdata:
valid_num_list.extend(item.split(','))
for valid_num in valid_num_list:
valid_num = valid_num.strip()
if valid_num.startswith('0x') or valid_num.startswith('0X'):
self.data.add(int(valid_num, 16))
else:
self.data.add(int(valid_num))
self.Length = 5 + len(self.data) * self.StorageWidth
class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
def __init__(self, VarOffset, validrange, PcdDataType):
super(VAR_CHECK_PCD_VALID_RANGE, self).__init__(VarOffset, validrange, PcdDataType)
self.Type = 2
RangeExpr = ""
i = 0
for item in self.rawdata:
if i == 0:
RangeExpr = "( " + item + " )"
else:
RangeExpr = RangeExpr + "OR ( " + item + " )"
range_result = RangeExpression(RangeExpr, self.PcdDataType)(True)
for rangelist in range_result:
for obj in rangelist.pop():
self.data.add((obj.start, obj.end))
self.Length = 5 + len(self.data) * 2 * self.StorageWidth
def GetValidationObject(PcdClass, VarOffset):
if PcdClass.validateranges:
return VAR_CHECK_PCD_VALID_RANGE(VarOffset, PcdClass.validateranges, PcdClass.DatumType)
if PcdClass.validlists:
return VAR_CHECK_PCD_VALID_LIST(VarOffset, PcdClass.validlists, PcdClass.DatumType)
else:
return None
| edk2-master | BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py |
## @file
# Create makefile for MS nmake and GNU make
#
# Copyright (c) 2019 - 2021, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
from Workspace.WorkspaceDatabase import WorkspaceDatabase,BuildDB
from Common.caching import cached_property
from AutoGen.BuildEngine import BuildRule,AutoGenReqBuildRuleVerNum
from AutoGen.AutoGen import CalculatePriorityValue
from Common.Misc import CheckPcdDatum,GuidValue
from Common.Expression import ValueExpressionEx
from Common.DataType import *
from CommonDataClass.Exceptions import *
from CommonDataClass.CommonClass import SkuInfoClass
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import OPTION_CONFLICT,FORMAT_INVALID,RESOURCE_NOT_AVAILABLE
from Common.MultipleWorkspace import MultipleWorkspace as mws
from collections import defaultdict
from Common.Misc import PathClass
import os
#
# The priority list while override build option
#
PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
"0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
"0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
"0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
"0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
"0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
"0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
"0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE
"0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE
"0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE
"0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE
"0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE
"0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE
"0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
"0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
"0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
## Base class for AutoGen
#
# This class just implements the cache mechanism of AutoGen objects.
#
class AutoGenInfo(object):
# database to maintain the objects in each child class
__ObjectCache = {} # (BuildTarget, ToolChain, ARCH, platform file): AutoGen object
## Factory method
#
# @param Class class object of real AutoGen class
# (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)
# @param Workspace Workspace directory or WorkspaceAutoGen object
# @param MetaFile The path of meta file
# @param Target Build target
# @param Toolchain Tool chain name
# @param Arch Target arch
# @param *args The specific class related parameters
# @param **kwargs The specific class related dict parameters
#
@classmethod
def GetCache(cls):
return cls.__ObjectCache
def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
# check if the object has been created
Key = (Target, Toolchain, Arch, MetaFile)
if Key in cls.__ObjectCache:
# if it exists, just return it directly
return cls.__ObjectCache[Key]
# it didnt exist. create it, cache it, then return it
RetVal = cls.__ObjectCache[Key] = super(AutoGenInfo, cls).__new__(cls)
return RetVal
## hash() operator
#
# The file path of platform file will be used to represent hash value of this object
#
# @retval int Hash value of the file path of platform file
#
def __hash__(self):
return hash(self.MetaFile)
## str() operator
#
# The file path of platform file will be used to represent this object
#
# @retval string String of platform file path
#
def __str__(self):
return str(self.MetaFile)
## "==" operator
def __eq__(self, Other):
return Other and self.MetaFile == Other
## Expand * in build option key
#
# @param Options Options to be expanded
# @param ToolDef Use specified ToolDef instead of full version.
# This is needed during initialization to prevent
# infinite recursion betweeh BuildOptions,
# ToolDefinition, and this function.
#
# @retval options Options expanded
#
def _ExpandBuildOption(self, Options, ModuleStyle=None, ToolDef=None):
if not ToolDef:
ToolDef = self.ToolDefinition
BuildOptions = {}
FamilyMatch = False
FamilyIsNull = True
OverrideList = {}
#
# Construct a list contain the build options which need override.
#
for Key in Options:
#
# Key[0] -- tool family
# Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
#
if (Key[0] == self.BuildRuleFamily and
(ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
if (Target == self.BuildTarget or Target == TAB_STAR) and\
(ToolChain == self.ToolChain or ToolChain == TAB_STAR) and\
(Arch == self.Arch or Arch == TAB_STAR) and\
Options[Key].startswith("="):
if OverrideList.get(Key[1]) is not None:
OverrideList.pop(Key[1])
OverrideList[Key[1]] = Options[Key]
#
# Use the highest priority value.
#
if (len(OverrideList) >= 2):
KeyList = list(OverrideList.keys())
for Index in range(len(KeyList)):
NowKey = KeyList[Index]
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
for Index1 in range(len(KeyList) - Index - 1):
NextKey = KeyList[Index1 + Index + 1]
#
# Compare two Key, if one is included by another, choose the higher priority one
#
Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
if (Target1 == Target2 or Target1 == TAB_STAR or Target2 == TAB_STAR) and\
(ToolChain1 == ToolChain2 or ToolChain1 == TAB_STAR or ToolChain2 == TAB_STAR) and\
(Arch1 == Arch2 or Arch1 == TAB_STAR or Arch2 == TAB_STAR) and\
(CommandType1 == CommandType2 or CommandType1 == TAB_STAR or CommandType2 == TAB_STAR) and\
(Attr1 == Attr2 or Attr1 == TAB_STAR or Attr2 == TAB_STAR):
if CalculatePriorityValue(NowKey) > CalculatePriorityValue(NextKey):
if Options.get((self.BuildRuleFamily, NextKey)) is not None:
Options.pop((self.BuildRuleFamily, NextKey))
else:
if Options.get((self.BuildRuleFamily, NowKey)) is not None:
Options.pop((self.BuildRuleFamily, NowKey))
for Key in Options:
if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
# Only append build option for the matched style module.
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
continue
elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
continue
Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it
if Family != "":
Found = False
if Tool in ToolDef:
FamilyIsNull = False
if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[Tool]:
if Family == ToolDef[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
FamilyMatch = True
Found = True
if TAB_STAR in ToolDef:
FamilyIsNull = False
if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[TAB_STAR]:
if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
FamilyMatch = True
Found = True
if not Found:
continue
# expand any wildcard
if Target == TAB_STAR or Target == self.BuildTarget:
if Tag == TAB_STAR or Tag == self.ToolChain:
if Arch == TAB_STAR or Arch == self.Arch:
if Tool not in BuildOptions:
BuildOptions[Tool] = {}
if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
BuildOptions[Tool][Attr] = Options[Key]
else:
# append options for the same tool except PATH
if Attr != 'PATH':
BuildOptions[Tool][Attr] += " " + Options[Key]
else:
BuildOptions[Tool][Attr] = Options[Key]
# Build Option Family has been checked, which need't to be checked again for family.
if FamilyMatch or FamilyIsNull:
return BuildOptions
for Key in Options:
if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
# Only append build option for the matched style module.
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
continue
elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
continue
Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it
if Family == "":
continue
# option has been added before
Found = False
if Tool in ToolDef:
if TAB_TOD_DEFINES_FAMILY in ToolDef[Tool]:
if Family == ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:
Found = True
if TAB_STAR in ToolDef:
if TAB_TOD_DEFINES_FAMILY in ToolDef[TAB_STAR]:
if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_FAMILY]:
Found = True
if not Found:
continue
# expand any wildcard
if Target == TAB_STAR or Target == self.BuildTarget:
if Tag == TAB_STAR or Tag == self.ToolChain:
if Arch == TAB_STAR or Arch == self.Arch:
if Tool not in BuildOptions:
BuildOptions[Tool] = {}
if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
BuildOptions[Tool][Attr] = Options[Key]
else:
# append options for the same tool except PATH
if Attr != 'PATH':
BuildOptions[Tool][Attr] += " " + Options[Key]
else:
BuildOptions[Tool][Attr] = Options[Key]
return BuildOptions
#
#This class is the pruned WorkSpaceAutoGen for ModuleAutoGen in multiple thread
#
class WorkSpaceInfo(AutoGenInfo):
def __init__(self,Workspace, MetaFile, Target, ToolChain, Arch):
if not hasattr(self, "_Init"):
self.do_init(Workspace, MetaFile, Target, ToolChain, Arch)
self._Init = True
def do_init(self,Workspace, MetaFile, Target, ToolChain, Arch):
self._SrcTimeStamp = 0
self.Db = BuildDB
self.BuildDatabase = self.Db.BuildObject
self.Target = Target
self.ToolChain = ToolChain
self.WorkspaceDir = Workspace
self.ActivePlatform = MetaFile
self.ArchList = Arch
self.AutoGenObjectList = []
@property
def BuildDir(self):
return self.AutoGenObjectList[0].BuildDir
@property
def Name(self):
return self.AutoGenObjectList[0].Platform.PlatformName
@property
def FlashDefinition(self):
return self.AutoGenObjectList[0].Platform.FlashDefinition
@property
def GenFdsCommandDict(self):
FdsCommandDict = self.AutoGenObjectList[0].DataPipe.Get("FdsCommandDict")
if FdsCommandDict:
return FdsCommandDict
return {}
@cached_property
def FvDir(self):
return os.path.join(self.BuildDir, TAB_FV_DIRECTORY)
class PlatformInfo(AutoGenInfo):
def __init__(self, Workspace, MetaFile, Target, ToolChain, Arch,DataPipe):
if not hasattr(self, "_Init"):
self.do_init(Workspace, MetaFile, Target, ToolChain, Arch,DataPipe)
self._Init = True
def do_init(self,Workspace, MetaFile, Target, ToolChain, Arch,DataPipe):
self.Wa = Workspace
self.WorkspaceDir = self.Wa.WorkspaceDir
self.MetaFile = MetaFile
self.Arch = Arch
self.Target = Target
self.BuildTarget = Target
self.ToolChain = ToolChain
self.Platform = self.Wa.BuildDatabase[self.MetaFile, self.Arch, self.Target, self.ToolChain]
self.SourceDir = MetaFile.SubDir
self.DataPipe = DataPipe
@cached_property
def _AsBuildModuleList(self):
retVal = self.DataPipe.Get("AsBuildModuleList")
if retVal is None:
retVal = {}
return retVal
## Test if a module is supported by the platform
#
# An error will be raised directly if the module or its arch is not supported
# by the platform or current configuration
#
def ValidModule(self, Module):
return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \
or Module in self._AsBuildModuleList
@cached_property
def ToolChainFamily(self):
retVal = self.DataPipe.Get("ToolChainFamily")
if retVal is None:
retVal = {}
return retVal
@cached_property
def BuildRuleFamily(self):
retVal = self.DataPipe.Get("BuildRuleFamily")
if retVal is None:
retVal = {}
return retVal
@cached_property
def _MbList(self):
return [self.Wa.BuildDatabase[m, self.Arch, self.BuildTarget, self.ToolChain] for m in self.Platform.Modules]
@cached_property
def PackageList(self):
RetVal = set()
for dec_file,Arch in self.DataPipe.Get("PackageList"):
RetVal.add(self.Wa.BuildDatabase[dec_file,Arch,self.BuildTarget, self.ToolChain])
return list(RetVal)
## Return the directory to store all intermediate and final files built
@cached_property
def BuildDir(self):
if os.path.isabs(self.OutputDir):
RetVal = os.path.join(
os.path.abspath(self.OutputDir),
self.Target + "_" + self.ToolChain,
)
else:
RetVal = os.path.join(
self.WorkspaceDir,
self.OutputDir,
self.Target + "_" + self.ToolChain,
)
return RetVal
## Return the build output directory platform specifies
@cached_property
def OutputDir(self):
return self.Platform.OutputDirectory
## Return platform name
@cached_property
def Name(self):
return self.Platform.PlatformName
## Return meta-file GUID
@cached_property
def Guid(self):
return self.Platform.Guid
## Return platform version
@cached_property
def Version(self):
return self.Platform.Version
## Return paths of tools
@cached_property
def ToolDefinition(self):
retVal = self.DataPipe.Get("TOOLDEF")
if retVal is None:
retVal = {}
return retVal
## Return build command string
#
# @retval string Build command string
#
@cached_property
def BuildCommand(self):
retVal = self.DataPipe.Get("BuildCommand")
if retVal is None:
retVal = []
return retVal
@cached_property
def PcdTokenNumber(self):
retVal = self.DataPipe.Get("PCD_TNUM")
if retVal is None:
retVal = {}
return retVal
## Override PCD setting (type, value, ...)
#
# @param ToPcd The PCD to be overridden
# @param FromPcd The PCD overriding from
#
def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):
#
# in case there's PCDs coming from FDF file, which have no type given.
# at this point, ToPcd.Type has the type found from dependent
# package
#
TokenCName = ToPcd.TokenCName
for PcdItem in self.MixedPcd:
if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in self.MixedPcd[PcdItem]:
TokenCName = PcdItem[0]
break
if FromPcd is not None:
if ToPcd.Pending and FromPcd.Type:
ToPcd.Type = FromPcd.Type
elif ToPcd.Type and FromPcd.Type\
and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:
if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:
ToPcd.Type = FromPcd.Type
elif ToPcd.Type and FromPcd.Type \
and ToPcd.Type != FromPcd.Type:
if Library:
Module = str(Module) + " 's library file (" + str(Library) + ")"
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\
% (ToPcd.TokenSpaceGuidCName, TokenCName,
ToPcd.Type, Module, FromPcd.Type, Msg),
File=self.MetaFile)
if FromPcd.MaxDatumSize:
ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize
if FromPcd.DefaultValue:
ToPcd.DefaultValue = FromPcd.DefaultValue
if FromPcd.TokenValue:
ToPcd.TokenValue = FromPcd.TokenValue
if FromPcd.DatumType:
ToPcd.DatumType = FromPcd.DatumType
if FromPcd.SkuInfoList:
ToPcd.SkuInfoList = FromPcd.SkuInfoList
if FromPcd.UserDefinedDefaultStoresFlag:
ToPcd.UserDefinedDefaultStoresFlag = FromPcd.UserDefinedDefaultStoresFlag
# Add Flexible PCD format parse
if ToPcd.DefaultValue:
try:
ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self._GuidDict)(True)
except BadExpression as Value:
EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),
File=self.MetaFile)
# check the validation of datum
IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)
if not IsValid:
EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,
ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))
ToPcd.validateranges = FromPcd.validateranges
ToPcd.validlists = FromPcd.validlists
ToPcd.expressions = FromPcd.expressions
ToPcd.CustomAttribute = FromPcd.CustomAttribute
if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:
EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
% (ToPcd.TokenSpaceGuidCName, TokenCName))
Value = ToPcd.DefaultValue
if not Value:
ToPcd.MaxDatumSize = '1'
elif Value[0] == 'L':
ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)
elif Value[0] == '{':
ToPcd.MaxDatumSize = str(len(Value.split(',')))
else:
ToPcd.MaxDatumSize = str(len(Value) - 1)
# apply default SKU for dynamic PCDS if specified one is not available
if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \
and not ToPcd.SkuInfoList:
if self.Platform.SkuName in self.Platform.SkuIds:
SkuName = self.Platform.SkuName
else:
SkuName = TAB_DEFAULT
ToPcd.SkuInfoList = {
SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)
}
def ApplyPcdSetting(self, Ma, Pcds, Library=""):
# for each PCD in module
Module=Ma.Module
for Name, Guid in Pcds:
PcdInModule = Pcds[Name, Guid]
# find out the PCD setting in platform
if (Name, Guid) in self.Pcds:
PcdInPlatform = self.Pcds[Name, Guid]
else:
PcdInPlatform = None
# then override the settings if any
self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)
# resolve the VariableGuid value
for SkuId in PcdInModule.SkuInfoList:
Sku = PcdInModule.SkuInfoList[SkuId]
if Sku.VariableGuid == '': continue
Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
if Sku.VariableGuidValue is None:
PackageList = "\n\t".join(str(P) for P in self.PackageList)
EdkLogger.error(
'build',
RESOURCE_NOT_AVAILABLE,
"Value of GUID [%s] is not found in" % Sku.VariableGuid,
ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \
% (Guid, Name, str(Module)),
File=self.MetaFile
)
# override PCD settings with module specific setting
ModuleScopePcds = self.DataPipe.Get("MOL_PCDS")
if Module in self.Platform.Modules:
PlatformModule = self.Platform.Modules[str(Module)]
PCD_DATA = ModuleScopePcds.get(Ma.Guid,{})
mPcds = {(pcd.TokenCName,pcd.TokenSpaceGuidCName): pcd for pcd in PCD_DATA}
for Key in mPcds:
if self.BuildOptionPcd:
for pcd in self.BuildOptionPcd:
(TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd
if (TokenCName, TokenSpaceGuidCName) == Key and FieldName =="":
PlatformModule.Pcds[Key].DefaultValue = pcdvalue
PlatformModule.Pcds[Key].PcdValueFromComm = pcdvalue
break
Flag = False
if Key in Pcds:
ToPcd = Pcds[Key]
Flag = True
elif Key in self.MixedPcd:
for PcdItem in self.MixedPcd[Key]:
if PcdItem in Pcds:
ToPcd = Pcds[PcdItem]
Flag = True
break
if Flag:
self._OverridePcd(ToPcd, mPcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)
# use PCD value to calculate the MaxDatumSize when it is not specified
for Name, Guid in Pcds:
Pcd = Pcds[Name, Guid]
if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:
Pcd.MaxSizeUserSet = None
Value = Pcd.DefaultValue
if not Value:
Pcd.MaxDatumSize = '1'
elif Value[0] == 'L':
Pcd.MaxDatumSize = str((len(Value) - 2) * 2)
elif Value[0] == '{':
Pcd.MaxDatumSize = str(len(Value.split(',')))
else:
Pcd.MaxDatumSize = str(len(Value) - 1)
return list(Pcds.values())
@cached_property
def Pcds(self):
PlatformPcdData = self.DataPipe.Get("PLA_PCD")
# for pcd in PlatformPcdData:
# for skuid in pcd.SkuInfoList:
# pcd.SkuInfoList[skuid] = self.CreateSkuInfoFromDict(pcd.SkuInfoList[skuid])
return {(pcddata.TokenCName,pcddata.TokenSpaceGuidCName):pcddata for pcddata in PlatformPcdData}
def CreateSkuInfoFromDict(self,SkuInfoDict):
return SkuInfoClass(
SkuInfoDict.get("SkuIdName"),
SkuInfoDict.get("SkuId"),
SkuInfoDict.get("VariableName"),
SkuInfoDict.get("VariableGuid"),
SkuInfoDict.get("VariableOffset"),
SkuInfoDict.get("HiiDefaultValue"),
SkuInfoDict.get("VpdOffset"),
SkuInfoDict.get("DefaultValue"),
SkuInfoDict.get("VariableGuidValue"),
SkuInfoDict.get("VariableAttribute",""),
SkuInfoDict.get("DefaultStore",None)
)
@cached_property
def MixedPcd(self):
return self.DataPipe.Get("MixedPcd")
@cached_property
def _GuidDict(self):
RetVal = self.DataPipe.Get("GuidDict")
if RetVal is None:
RetVal = {}
return RetVal
@cached_property
def BuildOptionPcd(self):
return self.DataPipe.Get("BuildOptPcd")
def ApplyBuildOption(self,module):
PlatformOptions = self.DataPipe.Get("PLA_BO")
ModuleBuildOptions = self.DataPipe.Get("MOL_BO")
ModuleOptionFromDsc = ModuleBuildOptions.get((module.MetaFile.File,module.MetaFile.Root))
if ModuleOptionFromDsc:
ModuleTypeOptions, PlatformModuleOptions = ModuleOptionFromDsc["ModuleTypeOptions"],ModuleOptionFromDsc["PlatformModuleOptions"]
else:
ModuleTypeOptions, PlatformModuleOptions = {}, {}
ToolDefinition = self.DataPipe.Get("TOOLDEF")
ModuleOptions = self._ExpandBuildOption(module.BuildOptions)
BuildRuleOrder = None
for Options in [ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
for Tool in Options:
for Attr in Options[Tool]:
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
BuildRuleOrder = Options[Tool][Attr]
AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
list(ToolDefinition.keys()))
BuildOptions = defaultdict(lambda: defaultdict(str))
for Tool in AllTools:
for Options in [ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
if Tool not in Options:
continue
for Attr in Options[Tool]:
#
# Do not generate it in Makefile
#
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
continue
Value = Options[Tool][Attr]
ToolList = [Tool]
if Tool == TAB_STAR:
ToolList = list(AllTools)
ToolList.remove(TAB_STAR)
for ExpandedTool in ToolList:
# check if override is indicated
if Value.startswith('='):
BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value[1:])
else:
if Attr != 'PATH':
BuildOptions[ExpandedTool][Attr] += " " + mws.handleWsMacro(Value)
else:
BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value)
return BuildOptions, BuildRuleOrder
def ApplyLibraryInstance(self,module):
alldeps = self.DataPipe.Get("DEPS")
if alldeps is None:
alldeps = {}
mod_libs = alldeps.get((module.MetaFile.File,module.MetaFile.Root,module.Arch,module.MetaFile.Path),[])
retVal = []
for (file_path,root,arch,abs_path) in mod_libs:
libMetaFile = PathClass(file_path,root)
libMetaFile.OriginalPath = PathClass(file_path,root)
libMetaFile.Path = abs_path
retVal.append(self.Wa.BuildDatabase[libMetaFile, arch, self.Target,self.ToolChain])
return retVal
## Parse build_rule.txt in Conf Directory.
#
# @retval BuildRule object
#
@cached_property
def BuildRule(self):
WInfo = self.DataPipe.Get("P_Info")
RetVal = WInfo.get("BuildRuleFile")
if RetVal._FileVersion == "":
RetVal._FileVersion = AutoGenReqBuildRuleVerNum
return RetVal
| edk2-master | BaseTools/Source/Python/AutoGen/ModuleAutoGenHelper.py |
## @file
# Create makefile for MS nmake and GNU make
#
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
from Workspace.WorkspaceDatabase import BuildDB
from Workspace.WorkspaceCommon import GetModuleLibInstances
import Common.GlobalData as GlobalData
import os
import pickle
from pickle import HIGHEST_PROTOCOL
from Common import EdkLogger
class PCD_DATA():
def __init__(self,TokenCName,TokenSpaceGuidCName,Type,DatumType,SkuInfoList,DefaultValue,
MaxDatumSize,UserDefinedDefaultStoresFlag,validateranges,
validlists,expressions,CustomAttribute,TokenValue):
self.TokenCName = TokenCName
self.TokenSpaceGuidCName = TokenSpaceGuidCName
self.Type = Type
self.DatumType = DatumType
self.SkuInfoList = SkuInfoList
self.DefaultValue = DefaultValue
self.MaxDatumSize = MaxDatumSize
self.UserDefinedDefaultStoresFlag = UserDefinedDefaultStoresFlag
self.validateranges = validateranges
self.validlists = validlists
self.expressions = expressions
self.CustomAttribute = CustomAttribute
self.TokenValue = TokenValue
class DataPipe(object):
def __init__(self, BuildDir=None):
self.data_container = {}
self.BuildDir = BuildDir
self.dump_file = ""
class MemoryDataPipe(DataPipe):
def Get(self,key):
return self.data_container.get(key)
def dump(self,file_path):
self.dump_file = file_path
with open(file_path,'wb') as fd:
pickle.dump(self.data_container,fd,pickle.HIGHEST_PROTOCOL)
def load(self,file_path):
with open(file_path,'rb') as fd:
self.data_container = pickle.load(fd)
@property
def DataContainer(self):
return self.data_container
@DataContainer.setter
def DataContainer(self,data):
self.data_container.update(data)
def FillData(self,PlatformInfo):
#Platform Pcds
self.DataContainer = {
"PLA_PCD" : [PCD_DATA(
pcd.TokenCName,pcd.TokenSpaceGuidCName,pcd.Type,
pcd.DatumType,pcd.SkuInfoList,pcd.DefaultValue,
pcd.MaxDatumSize,pcd.UserDefinedDefaultStoresFlag,pcd.validateranges,
pcd.validlists,pcd.expressions,pcd.CustomAttribute,pcd.TokenValue)
for pcd in PlatformInfo.Platform.Pcds.values()]
}
#Platform Module Pcds
ModulePcds = {}
for m in PlatformInfo.Platform.Modules:
module = PlatformInfo.Platform.Modules[m]
m_pcds = module.Pcds
if m_pcds:
ModulePcds[module.Guid] = [PCD_DATA(
pcd.TokenCName,pcd.TokenSpaceGuidCName,pcd.Type,
pcd.DatumType,pcd.SkuInfoList,pcd.DefaultValue,
pcd.MaxDatumSize,pcd.UserDefinedDefaultStoresFlag,pcd.validateranges,
pcd.validlists,pcd.expressions,pcd.CustomAttribute,pcd.TokenValue)
for pcd in PlatformInfo.Platform.Modules[m].Pcds.values()]
self.DataContainer = {"MOL_PCDS":ModulePcds}
#Module's Library Instance
ModuleLibs = {}
libModules = {}
for m in PlatformInfo.Platform.Modules:
module_obj = BuildDB.BuildObject[m,PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain]
Libs = GetModuleLibInstances(module_obj, PlatformInfo.Platform, BuildDB.BuildObject, PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain,PlatformInfo.MetaFile,EdkLogger)
for lib in Libs:
try:
libModules[(lib.MetaFile.File,lib.MetaFile.Root,lib.Arch,lib.MetaFile.Path)].append((m.File,m.Root,module_obj.Arch,m.Path))
except:
libModules[(lib.MetaFile.File,lib.MetaFile.Root,lib.Arch,lib.MetaFile.Path)] = [(m.File,m.Root,module_obj.Arch,m.Path)]
ModuleLibs[(m.File,m.Root,module_obj.Arch,m.Path)] = [(l.MetaFile.File,l.MetaFile.Root,l.Arch,l.MetaFile.Path) for l in Libs]
self.DataContainer = {"DEPS":ModuleLibs}
self.DataContainer = {"REFS":libModules}
#Platform BuildOptions
platform_build_opt = PlatformInfo.EdkIIBuildOption
ToolDefinition = PlatformInfo.ToolDefinition
module_build_opt = {}
for m in PlatformInfo.Platform.Modules:
ModuleTypeOptions, PlatformModuleOptions = PlatformInfo.GetGlobalBuildOptions(BuildDB.BuildObject[m,PlatformInfo.Arch,PlatformInfo.BuildTarget,PlatformInfo.ToolChain])
if ModuleTypeOptions or PlatformModuleOptions:
module_build_opt.update({(m.File,m.Root): {"ModuleTypeOptions":ModuleTypeOptions, "PlatformModuleOptions":PlatformModuleOptions}})
self.DataContainer = {"PLA_BO":platform_build_opt,
"TOOLDEF":ToolDefinition,
"MOL_BO":module_build_opt
}
#Platform Info
PInfo = {
"WorkspaceDir":PlatformInfo.Workspace.WorkspaceDir,
"Target":PlatformInfo.BuildTarget,
"ToolChain":PlatformInfo.Workspace.ToolChain,
"BuildRuleFile":PlatformInfo.BuildRule,
"Arch": PlatformInfo.Arch,
"ArchList":PlatformInfo.Workspace.ArchList,
"ActivePlatform":PlatformInfo.MetaFile
}
self.DataContainer = {'P_Info':PInfo}
self.DataContainer = {'M_Name':PlatformInfo.UniqueBaseName}
self.DataContainer = {"ToolChainFamily": PlatformInfo.ToolChainFamily}
self.DataContainer = {"BuildRuleFamily": PlatformInfo.BuildRuleFamily}
self.DataContainer = {"MixedPcd":GlobalData.MixedPcd}
self.DataContainer = {"BuildOptPcd":GlobalData.BuildOptionPcd}
self.DataContainer = {"BuildCommand": PlatformInfo.BuildCommand}
self.DataContainer = {"AsBuildModuleList": PlatformInfo._AsBuildModuleList}
self.DataContainer = {"G_defines": GlobalData.gGlobalDefines}
self.DataContainer = {"CL_defines": GlobalData.gCommandLineDefines}
self.DataContainer = {"gCommandMaxLength": GlobalData.gCommandMaxLength}
self.DataContainer = {"Env_Var": {k:v for k, v in os.environ.items()}}
self.DataContainer = {"PackageList": [(dec.MetaFile,dec.Arch) for dec in PlatformInfo.PackageList]}
self.DataContainer = {"GuidDict": PlatformInfo.Platform._GuidDict}
self.DataContainer = {"DatabasePath":GlobalData.gDatabasePath}
self.DataContainer = {"FdfParser": True if GlobalData.gFdfParser else False}
self.DataContainer = {"LogLevel": EdkLogger.GetLevel()}
self.DataContainer = {"UseHashCache":GlobalData.gUseHashCache}
self.DataContainer = {"BinCacheSource":GlobalData.gBinCacheSource}
self.DataContainer = {"BinCacheDest":GlobalData.gBinCacheDest}
self.DataContainer = {"EnableGenfdsMultiThread":GlobalData.gEnableGenfdsMultiThread}
self.DataContainer = {"gPlatformFinalPcds":GlobalData.gPlatformFinalPcds}
| edk2-master | BaseTools/Source/Python/AutoGen/DataPipe.py |
## @file
# Python 'AutoGen' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
__all__ = ["AutoGen"]
| edk2-master | BaseTools/Source/Python/AutoGen/__init__.py |
# Copyright (c) 2017 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
# This file is used to collect the Variable checking information
#
# #
# Import Modules
#
from struct import pack, unpack
import collections
import copy
from Common.VariableAttributes import VariableAttributes
from Common.Misc import *
import collections
import Common.DataType as DataType
import Common.GlobalData as GlobalData
var_info = collections.namedtuple("uefi_var", "pcdindex,pcdname,defaultstoragename,skuname,var_name, var_guid, var_offset,var_attribute,pcd_default_value, default_value, data_type,PcdDscLine,StructurePcd")
NvStorageHeaderSize = 28
VariableHeaderSize = 32
AuthenticatedVariableHeaderSize = 60
class VariableMgr(object):
def __init__(self, DefaultStoreMap, SkuIdMap):
self.VarInfo = []
self.DefaultStoreMap = DefaultStoreMap
self.SkuIdMap = SkuIdMap
self.VpdRegionSize = 0
self.VpdRegionOffset = 0
self.NVHeaderBuff = None
self.VarDefaultBuff = None
self.VarDeltaBuff = None
def append_variable(self, uefi_var):
self.VarInfo.append(uefi_var)
def SetVpdRegionMaxSize(self, maxsize):
self.VpdRegionSize = maxsize
def SetVpdRegionOffset(self, vpdoffset):
self.VpdRegionOffset = vpdoffset
def PatchNVStoreDefaultMaxSize(self, maxsize):
if not self.NVHeaderBuff:
return ""
self.NVHeaderBuff = self.NVHeaderBuff[:8] + pack("=Q", maxsize)
default_var_bin = VariableMgr.format_data(self.NVHeaderBuff + self.VarDefaultBuff + self.VarDeltaBuff)
value_str = "{"
default_var_bin_strip = [ data.strip("""'""") for data in default_var_bin]
value_str += ",".join(default_var_bin_strip)
value_str += "}"
return value_str
def combine_variable(self):
indexedvarinfo = collections.OrderedDict()
for item in self.VarInfo:
if (item.skuname, item.defaultstoragename, item.var_name, item.var_guid) not in indexedvarinfo:
indexedvarinfo[(item.skuname, item.defaultstoragename, item.var_name, item.var_guid) ] = []
indexedvarinfo[(item.skuname, item.defaultstoragename, item.var_name, item.var_guid)].append(item)
for key in indexedvarinfo:
sku_var_info_offset_list = indexedvarinfo[key]
sku_var_info_offset_list.sort(key=lambda x:x.PcdDscLine)
FirstOffset = int(sku_var_info_offset_list[0].var_offset, 16) if sku_var_info_offset_list[0].var_offset.upper().startswith("0X") else int(sku_var_info_offset_list[0].var_offset)
fisrtvalue_list = sku_var_info_offset_list[0].default_value.strip("{").strip("}").split(",")
firstdata_type = sku_var_info_offset_list[0].data_type
if firstdata_type in DataType.TAB_PCD_NUMERIC_TYPES:
fisrtdata_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[firstdata_type]]
fisrtdata = fisrtvalue_list[0]
fisrtvalue_list = []
pack_data = pack(fisrtdata_flag, int(fisrtdata, 0))
for data_byte in range(len(pack_data)):
fisrtvalue_list.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))
newvalue_list = ["0x00"] * FirstOffset + fisrtvalue_list
for var_item in sku_var_info_offset_list[1:]:
CurOffset = int(var_item.var_offset, 16) if var_item.var_offset.upper().startswith("0X") else int(var_item.var_offset)
CurvalueList = var_item.default_value.strip("{").strip("}").split(",")
Curdata_type = var_item.data_type
if Curdata_type in DataType.TAB_PCD_NUMERIC_TYPES:
data_flag = DataType.PACK_CODE_BY_SIZE[MAX_SIZE_TYPE[Curdata_type]]
data = CurvalueList[0]
CurvalueList = []
pack_data = pack(data_flag, int(data, 0))
for data_byte in range(len(pack_data)):
CurvalueList.append(hex(unpack("B", pack_data[data_byte:data_byte + 1])[0]))
if CurOffset > len(newvalue_list):
newvalue_list = newvalue_list + ["0x00"] * (CurOffset - len(newvalue_list)) + CurvalueList
else:
newvalue_list[CurOffset : CurOffset + len(CurvalueList)] = CurvalueList
newvaluestr = "{" + ",".join(newvalue_list) +"}"
n = sku_var_info_offset_list[0]
indexedvarinfo[key] = [var_info(n.pcdindex, n.pcdname, n.defaultstoragename, n.skuname, n.var_name, n.var_guid, "0x00", n.var_attribute, newvaluestr, newvaluestr, DataType.TAB_VOID,n.PcdDscLine,n.StructurePcd)]
self.VarInfo = [item[0] for item in list(indexedvarinfo.values())]
def process_variable_data(self):
var_data = collections.defaultdict(collections.OrderedDict)
indexedvarinfo = collections.OrderedDict()
for item in self.VarInfo:
if item.pcdindex not in indexedvarinfo:
indexedvarinfo[item.pcdindex] = dict()
indexedvarinfo[item.pcdindex][(item.skuname, item.defaultstoragename)] = item
for index in indexedvarinfo:
sku_var_info = indexedvarinfo[index]
default_data_buffer = ""
others_data_buffer = ""
tail = None
default_sku_default = indexedvarinfo[index].get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT))
if default_sku_default.data_type not in DataType.TAB_PCD_NUMERIC_TYPES:
var_max_len = max(len(var_item.default_value.split(",")) for var_item in sku_var_info.values())
if len(default_sku_default.default_value.split(",")) < var_max_len:
tail = ",".join("0x00" for i in range(var_max_len-len(default_sku_default.default_value.split(","))))
default_data_buffer = VariableMgr.PACK_VARIABLES_DATA(default_sku_default.default_value, default_sku_default.data_type, tail)
default_data_array = ()
for item in range(len(default_data_buffer)):
default_data_array += unpack("B", default_data_buffer[item:item + 1])
var_data[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)][index] = (default_data_buffer, sku_var_info[(DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT)])
for (skuid, defaultstoragename) in indexedvarinfo[index]:
tail = None
if (skuid, defaultstoragename) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
continue
other_sku_other = indexedvarinfo[index][(skuid, defaultstoragename)]
if default_sku_default.data_type not in DataType.TAB_PCD_NUMERIC_TYPES:
if len(other_sku_other.default_value.split(",")) < var_max_len:
tail = ",".join("0x00" for i in range(var_max_len-len(other_sku_other.default_value.split(","))))
others_data_buffer = VariableMgr.PACK_VARIABLES_DATA(other_sku_other.default_value, other_sku_other.data_type, tail)
others_data_array = ()
for item in range(len(others_data_buffer)):
others_data_array += unpack("B", others_data_buffer[item:item + 1])
data_delta = VariableMgr.calculate_delta(default_data_array, others_data_array)
var_data[(skuid, defaultstoragename)][index] = (data_delta, sku_var_info[(skuid, defaultstoragename)])
return var_data
def new_process_varinfo(self):
self.combine_variable()
var_data = self.process_variable_data()
if not var_data:
return []
pcds_default_data = var_data.get((DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT), {})
NvStoreDataBuffer = bytearray()
var_data_offset = collections.OrderedDict()
offset = NvStorageHeaderSize
for default_data, default_info in pcds_default_data.values():
var_name_buffer = VariableMgr.PACK_VARIABLE_NAME(default_info.var_name)
vendorguid = default_info.var_guid.split('-')
if default_info.var_attribute:
var_attr_value, _ = VariableAttributes.GetVarAttributes(default_info.var_attribute)
else:
var_attr_value = 0x07
DataBuffer = VariableMgr.AlignData(var_name_buffer + default_data)
data_size = len(DataBuffer)
if GlobalData.gCommandLineDefines.get(TAB_DSC_DEFINES_VPD_AUTHENTICATED_VARIABLE_STORE,"FALSE").upper() == "TRUE":
offset += AuthenticatedVariableHeaderSize + len(default_info.var_name.split(","))
else:
offset += VariableHeaderSize + len(default_info.var_name.split(","))
var_data_offset[default_info.pcdindex] = offset
offset += data_size - len(default_info.var_name.split(","))
if GlobalData.gCommandLineDefines.get(TAB_DSC_DEFINES_VPD_AUTHENTICATED_VARIABLE_STORE,"FALSE").upper() == "TRUE":
var_header_buffer = VariableMgr.PACK_AUTHENTICATED_VARIABLE_HEADER(var_attr_value, len(default_info.var_name.split(",")), len (default_data), vendorguid)
else:
var_header_buffer = VariableMgr.PACK_VARIABLE_HEADER(var_attr_value, len(default_info.var_name.split(",")), len (default_data), vendorguid)
NvStoreDataBuffer += (var_header_buffer + DataBuffer)
if GlobalData.gCommandLineDefines.get(TAB_DSC_DEFINES_VPD_AUTHENTICATED_VARIABLE_STORE,"FALSE").upper() == "TRUE":
variable_storage_header_buffer = VariableMgr.PACK_AUTHENTICATED_VARIABLE_STORE_HEADER(len(NvStoreDataBuffer) + 28)
else:
variable_storage_header_buffer = VariableMgr.PACK_VARIABLE_STORE_HEADER(len(NvStoreDataBuffer) + 28)
nv_default_part = VariableMgr.AlignData(VariableMgr.PACK_DEFAULT_DATA(0, 0, VariableMgr.unpack_data(variable_storage_header_buffer+NvStoreDataBuffer)), 8)
data_delta_structure_buffer = bytearray()
for skuname, defaultstore in var_data:
if (skuname, defaultstore) == (DataType.TAB_DEFAULT, DataType.TAB_DEFAULT_STORES_DEFAULT):
continue
pcds_sku_data = var_data[(skuname, defaultstore)]
delta_data_set = []
for pcdindex in pcds_sku_data:
offset = var_data_offset[pcdindex]
delta_data, _ = pcds_sku_data[pcdindex]
delta_data = [(item[0] + offset, item[1]) for item in delta_data]
delta_data_set.extend(delta_data)
data_delta_structure_buffer += VariableMgr.AlignData(self.PACK_DELTA_DATA(skuname, defaultstore, delta_data_set), 8)
size = len(nv_default_part + data_delta_structure_buffer) + 16
maxsize = self.VpdRegionSize if self.VpdRegionSize else size
NV_Store_Default_Header = VariableMgr.PACK_NV_STORE_DEFAULT_HEADER(size, maxsize)
self.NVHeaderBuff = NV_Store_Default_Header
self.VarDefaultBuff =nv_default_part
self.VarDeltaBuff = data_delta_structure_buffer
return VariableMgr.format_data(NV_Store_Default_Header + nv_default_part + data_delta_structure_buffer)
@staticmethod
def format_data(data):
return [hex(item) for item in VariableMgr.unpack_data(data)]
@staticmethod
def unpack_data(data):
final_data = ()
for item in range(len(data)):
final_data += unpack("B", data[item:item + 1])
return final_data
@staticmethod
def calculate_delta(default, theother):
if len(default) - len(theother) != 0:
EdkLogger.error("build", FORMAT_INVALID, 'The variable data length is not the same for the same PCD.')
data_delta = []
for i in range(len(default)):
if default[i] != theother[i]:
data_delta.append((i, theother[i]))
return data_delta
def dump(self):
default_var_bin = self.new_process_varinfo()
if default_var_bin:
value_str = "{"
default_var_bin_strip = [ data.strip("""'""") for data in default_var_bin]
value_str += ",".join(default_var_bin_strip)
value_str += "}"
return value_str
return ""
@staticmethod
def PACK_VARIABLE_STORE_HEADER(size):
#Signature: gEfiVariableGuid
Guid = "{ 0xddcf3616, 0x3275, 0x4164, { 0x98, 0xb6, 0xfe, 0x85, 0x70, 0x7f, 0xfe, 0x7d }}"
Guid = GuidStructureStringToGuidString(Guid)
GuidBuffer = PackGUID(Guid.split('-'))
SizeBuffer = pack('=L', size)
FormatBuffer = pack('=B', 0x5A)
StateBuffer = pack('=B', 0xFE)
reservedBuffer = pack('=H', 0)
reservedBuffer += pack('=L', 0)
return GuidBuffer + SizeBuffer + FormatBuffer + StateBuffer + reservedBuffer
def PACK_AUTHENTICATED_VARIABLE_STORE_HEADER(size):
#Signature: gEfiAuthenticatedVariableGuid
Guid = "{ 0xaaf32c78, 0x947b, 0x439a, { 0xa1, 0x80, 0x2e, 0x14, 0x4e, 0xc3, 0x77, 0x92 }}"
Guid = GuidStructureStringToGuidString(Guid)
GuidBuffer = PackGUID(Guid.split('-'))
SizeBuffer = pack('=L', size)
FormatBuffer = pack('=B', 0x5A)
StateBuffer = pack('=B', 0xFE)
reservedBuffer = pack('=H', 0)
reservedBuffer += pack('=L', 0)
return GuidBuffer + SizeBuffer + FormatBuffer + StateBuffer + reservedBuffer
@staticmethod
def PACK_NV_STORE_DEFAULT_HEADER(size, maxsize):
Signature = pack('=B', ord('N'))
Signature += pack("=B", ord('S'))
Signature += pack("=B", ord('D'))
Signature += pack("=B", ord('B'))
SizeBuffer = pack("=L", size)
MaxSizeBuffer = pack("=Q", maxsize)
return Signature + SizeBuffer + MaxSizeBuffer
@staticmethod
def PACK_VARIABLE_HEADER(attribute, namesize, datasize, vendorguid):
Buffer = pack('=H', 0x55AA) # pack StartID
Buffer += pack('=B', 0x3F) # pack State
Buffer += pack('=B', 0) # pack reserved
Buffer += pack('=L', attribute)
Buffer += pack('=L', namesize)
Buffer += pack('=L', datasize)
Buffer += PackGUID(vendorguid)
return Buffer
@staticmethod
def PACK_AUTHENTICATED_VARIABLE_HEADER(attribute, namesize, datasize, vendorguid):
Buffer = pack('=H', 0x55AA) # pack StartID
Buffer += pack('=B', 0x3F) # pack State
Buffer += pack('=B', 0) # pack reserved
Buffer += pack('=L', attribute)
Buffer += pack('=Q', 0) # pack MonotonicCount
Buffer += pack('=HBBBBBBLhBB', # pack TimeStamp
0, # UINT16 Year
0, # UINT8 Month
0, # UINT8 Day
0, # UINT8 Hour
0, # UINT8 Minute
0, # UINT8 Second
0, # UINT8 Pad1
0, # UINT32 Nanosecond
0, # INT16 TimeZone
0, # UINT8 Daylight
0) # UINT8 Pad2
Buffer += pack('=L', 0) # pack PubKeyIndex
Buffer += pack('=L', namesize)
Buffer += pack('=L', datasize)
Buffer += PackGUID(vendorguid)
return Buffer
@staticmethod
def PACK_VARIABLES_DATA(var_value,data_type, tail = None):
Buffer = bytearray()
data_len = 0
if data_type == DataType.TAB_VOID:
for value_char in var_value.strip("{").strip("}").split(","):
Buffer += pack("=B", int(value_char, 16))
data_len += len(var_value.split(","))
if tail:
for value_char in tail.split(","):
Buffer += pack("=B", int(value_char, 16))
data_len += len(tail.split(","))
elif data_type == "BOOLEAN":
Buffer += pack("=B", True) if var_value.upper() in ["TRUE","1"] else pack("=B", False)
data_len += 1
elif data_type == DataType.TAB_UINT8:
Buffer += pack("=B", GetIntegerValue(var_value))
data_len += 1
elif data_type == DataType.TAB_UINT16:
Buffer += pack("=H", GetIntegerValue(var_value))
data_len += 2
elif data_type == DataType.TAB_UINT32:
Buffer += pack("=L", GetIntegerValue(var_value))
data_len += 4
elif data_type == DataType.TAB_UINT64:
Buffer += pack("=Q", GetIntegerValue(var_value))
data_len += 8
return Buffer
@staticmethod
def PACK_DEFAULT_DATA(defaultstoragename, skuid, var_value):
Buffer = bytearray()
Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstoragename))
for item in var_value:
Buffer += pack("=B", item)
Buffer = pack("=L", len(Buffer)+4) + Buffer
return Buffer
def GetSkuId(self, skuname):
if skuname not in self.SkuIdMap:
return None
return self.SkuIdMap.get(skuname)[0]
def GetDefaultStoreId(self, dname):
if dname not in self.DefaultStoreMap:
return None
return self.DefaultStoreMap.get(dname)[0]
def PACK_DELTA_DATA(self, skuname, defaultstoragename, delta_list):
skuid = self.GetSkuId(skuname)
defaultstorageid = self.GetDefaultStoreId(defaultstoragename)
Buffer = bytearray()
Buffer += pack("=L", 4+8+8)
Buffer += pack("=Q", int(skuid))
Buffer += pack("=Q", int(defaultstorageid))
for (delta_offset, value) in delta_list:
Buffer += pack("=L", delta_offset)
Buffer = Buffer[:-1] + pack("=B", value)
Buffer = pack("=L", len(Buffer) + 4) + Buffer
return Buffer
@staticmethod
def AlignData(data, align = 4):
mybuffer = data
if (len(data) % align) > 0:
for i in range(align - (len(data) % align)):
mybuffer += pack("=B", 0)
return mybuffer
@staticmethod
def PACK_VARIABLE_NAME(var_name):
Buffer = bytearray()
for name_char in var_name.strip("{").strip("}").split(","):
Buffer += pack("=B", int(name_char, 16))
return Buffer
| edk2-master | BaseTools/Source/Python/AutoGen/GenVar.py |
## @file
# Create makefile for MS nmake and GNU make
#
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
import multiprocessing as mp
import threading
from Common.Misc import PathClass
from AutoGen.ModuleAutoGen import ModuleAutoGen
from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo,AutoGenInfo
import Common.GlobalData as GlobalData
import Common.EdkLogger as EdkLogger
import os
from Common.MultipleWorkspace import MultipleWorkspace as mws
from AutoGen.AutoGen import AutoGen
from Workspace.WorkspaceDatabase import BuildDB
try:
from queue import Empty
except:
from Queue import Empty
import traceback
import sys
from AutoGen.DataPipe import MemoryDataPipe
import logging
import time
def clearQ(q):
try:
while True:
q.get_nowait()
except Empty:
pass
class LogAgent(threading.Thread):
def __init__(self,log_q,log_level,log_file=None):
super(LogAgent,self).__init__()
self.log_q = log_q
self.log_level = log_level
self.log_file = log_file
def InitLogger(self):
# For DEBUG level (All DEBUG_0~9 are applicable)
self._DebugLogger_agent = logging.getLogger("tool_debug_agent")
_DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
self._DebugLogger_agent.setLevel(self.log_level)
_DebugChannel = logging.StreamHandler(sys.stdout)
_DebugChannel.setFormatter(_DebugFormatter)
self._DebugLogger_agent.addHandler(_DebugChannel)
# For VERBOSE, INFO, WARN level
self._InfoLogger_agent = logging.getLogger("tool_info_agent")
_InfoFormatter = logging.Formatter("%(message)s")
self._InfoLogger_agent.setLevel(self.log_level)
_InfoChannel = logging.StreamHandler(sys.stdout)
_InfoChannel.setFormatter(_InfoFormatter)
self._InfoLogger_agent.addHandler(_InfoChannel)
# For ERROR level
self._ErrorLogger_agent = logging.getLogger("tool_error_agent")
_ErrorFormatter = logging.Formatter("%(message)s")
self._ErrorLogger_agent.setLevel(self.log_level)
_ErrorCh = logging.StreamHandler(sys.stderr)
_ErrorCh.setFormatter(_ErrorFormatter)
self._ErrorLogger_agent.addHandler(_ErrorCh)
if self.log_file:
if os.path.exists(self.log_file):
os.remove(self.log_file)
_Ch = logging.FileHandler(self.log_file)
_Ch.setFormatter(_DebugFormatter)
self._DebugLogger_agent.addHandler(_Ch)
_Ch= logging.FileHandler(self.log_file)
_Ch.setFormatter(_InfoFormatter)
self._InfoLogger_agent.addHandler(_Ch)
_Ch = logging.FileHandler(self.log_file)
_Ch.setFormatter(_ErrorFormatter)
self._ErrorLogger_agent.addHandler(_Ch)
def run(self):
self.InitLogger()
while True:
log_message = self.log_q.get()
if log_message is None:
break
if log_message.name == "tool_error":
self._ErrorLogger_agent.log(log_message.levelno,log_message.getMessage())
elif log_message.name == "tool_info":
self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
elif log_message.name == "tool_debug":
self._DebugLogger_agent.log(log_message.levelno,log_message.getMessage())
else:
self._InfoLogger_agent.log(log_message.levelno,log_message.getMessage())
def kill(self):
self.log_q.put(None)
class AutoGenManager(threading.Thread):
def __init__(self,autogen_workers, feedback_q,error_event):
super(AutoGenManager,self).__init__()
self.autogen_workers = autogen_workers
self.feedback_q = feedback_q
self.Status = True
self.error_event = error_event
def run(self):
try:
fin_num = 0
while True:
badnews = self.feedback_q.get()
if badnews is None:
break
if badnews == "Done":
fin_num += 1
elif badnews == "QueueEmpty":
EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))
self.TerminateWorkers()
else:
EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), badnews))
self.Status = False
self.TerminateWorkers()
if fin_num == len(self.autogen_workers):
self.clearQueue()
for w in self.autogen_workers:
w.join()
break
except Exception:
return
def clearQueue(self):
taskq = self.autogen_workers[0].module_queue
logq = self.autogen_workers[0].log_q
clearQ(taskq)
clearQ(self.feedback_q)
clearQ(logq)
# Copy the cache queue itmes to parent thread before clear
cacheq = self.autogen_workers[0].cache_q
try:
cache_num = 0
while True:
item = cacheq.get()
if item == "CacheDone":
cache_num += 1
else:
GlobalData.gModuleAllCacheStatus.add(item)
if cache_num == len(self.autogen_workers):
break
except:
print ("cache_q error")
def TerminateWorkers(self):
self.error_event.set()
def kill(self):
self.feedback_q.put(None)
class AutoGenWorkerInProcess(mp.Process):
def __init__(self,module_queue,data_pipe_file_path,feedback_q,file_lock,cache_q,log_q,error_event):
mp.Process.__init__(self)
self.module_queue = module_queue
self.data_pipe_file_path =data_pipe_file_path
self.data_pipe = None
self.feedback_q = feedback_q
self.PlatformMetaFileSet = {}
self.file_lock = file_lock
self.cache_q = cache_q
self.log_q = log_q
self.error_event = error_event
def GetPlatformMetaFile(self,filepath,root):
try:
return self.PlatformMetaFileSet[(filepath,root)]
except:
self.PlatformMetaFileSet[(filepath,root)] = filepath
return self.PlatformMetaFileSet[(filepath,root)]
def run(self):
try:
taskname = "Init"
with self.file_lock:
try:
self.data_pipe = MemoryDataPipe()
self.data_pipe.load(self.data_pipe_file_path)
except:
self.feedback_q.put(taskname + ":" + "load data pipe %s failed." % self.data_pipe_file_path)
EdkLogger.LogClientInitialize(self.log_q)
loglevel = self.data_pipe.Get("LogLevel")
if not loglevel:
loglevel = EdkLogger.INFO
EdkLogger.SetLevel(loglevel)
target = self.data_pipe.Get("P_Info").get("Target")
toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
archlist = self.data_pipe.Get("P_Info").get("ArchList")
active_p = self.data_pipe.Get("P_Info").get("ActivePlatform")
workspacedir = self.data_pipe.Get("P_Info").get("WorkspaceDir")
PackagesPath = os.getenv("PACKAGES_PATH")
mws.setWs(workspacedir, PackagesPath)
self.Wa = WorkSpaceInfo(
workspacedir,active_p,target,toolchain,archlist
)
self.Wa._SrcTimeStamp = self.data_pipe.Get("Workspace_timestamp")
GlobalData.gGlobalDefines = self.data_pipe.Get("G_defines")
GlobalData.gCommandLineDefines = self.data_pipe.Get("CL_defines")
GlobalData.gCommandMaxLength = self.data_pipe.Get('gCommandMaxLength')
os.environ._data = self.data_pipe.Get("Env_Var")
GlobalData.gWorkspace = workspacedir
GlobalData.gDisableIncludePathCheck = False
GlobalData.gFdfParser = self.data_pipe.Get("FdfParser")
GlobalData.gDatabasePath = self.data_pipe.Get("DatabasePath")
GlobalData.gUseHashCache = self.data_pipe.Get("UseHashCache")
GlobalData.gBinCacheSource = self.data_pipe.Get("BinCacheSource")
GlobalData.gBinCacheDest = self.data_pipe.Get("BinCacheDest")
GlobalData.gPlatformHashFile = self.data_pipe.Get("PlatformHashFile")
GlobalData.gModulePreMakeCacheStatus = dict()
GlobalData.gModuleMakeCacheStatus = dict()
GlobalData.gHashChainStatus = dict()
GlobalData.gCMakeHashFile = dict()
GlobalData.gModuleHashFile = dict()
GlobalData.gFileHashDict = dict()
GlobalData.gEnableGenfdsMultiThread = self.data_pipe.Get("EnableGenfdsMultiThread")
GlobalData.gPlatformFinalPcds = self.data_pipe.Get("gPlatformFinalPcds")
GlobalData.file_lock = self.file_lock
CommandTarget = self.data_pipe.Get("CommandTarget")
pcd_from_build_option = []
for pcd_tuple in self.data_pipe.Get("BuildOptPcd"):
pcd_id = ".".join((pcd_tuple[0],pcd_tuple[1]))
if pcd_tuple[2].strip():
pcd_id = ".".join((pcd_id,pcd_tuple[2]))
pcd_from_build_option.append("=".join((pcd_id,pcd_tuple[3])))
GlobalData.BuildOptionPcd = pcd_from_build_option
module_count = 0
FfsCmd = self.data_pipe.Get("FfsCommand")
if FfsCmd is None:
FfsCmd = {}
GlobalData.FfsCmd = FfsCmd
PlatformMetaFile = self.GetPlatformMetaFile(self.data_pipe.Get("P_Info").get("ActivePlatform"),
self.data_pipe.Get("P_Info").get("WorkspaceDir"))
while True:
if self.error_event.is_set():
break
module_count += 1
try:
module_file,module_root,module_path,module_basename,module_originalpath,module_arch,IsLib = self.module_queue.get_nowait()
except Empty:
EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Fake Empty."))
time.sleep(0.01)
continue
if module_file is None:
EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Worker get the last item in the queue."))
self.feedback_q.put("QueueEmpty")
time.sleep(0.01)
continue
modulefullpath = os.path.join(module_root,module_file)
taskname = " : ".join((modulefullpath,module_arch))
module_metafile = PathClass(module_file,module_root)
if module_path:
module_metafile.Path = module_path
if module_basename:
module_metafile.BaseName = module_basename
if module_originalpath:
module_metafile.OriginalPath = PathClass(module_originalpath,module_root)
arch = module_arch
target = self.data_pipe.Get("P_Info").get("Target")
toolchain = self.data_pipe.Get("P_Info").get("ToolChain")
Ma = ModuleAutoGen(self.Wa,module_metafile,target,toolchain,arch,PlatformMetaFile,self.data_pipe)
Ma.IsLibrary = IsLib
# SourceFileList calling sequence impact the makefile string sequence.
# Create cached SourceFileList here to unify its calling sequence for both
# CanSkipbyPreMakeCache and CreateCodeFile/CreateMakeFile.
RetVal = Ma.SourceFileList
if GlobalData.gUseHashCache and not GlobalData.gBinCacheDest and CommandTarget in [None, "", "all"]:
try:
CacheResult = Ma.CanSkipbyPreMakeCache()
except:
CacheResult = False
self.feedback_q.put(taskname)
if CacheResult:
self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", True))
continue
else:
self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "PreMakeCache", False))
Ma.CreateCodeFile(False)
Ma.CreateMakeFile(False,GenFfsList=FfsCmd.get((Ma.MetaFile.Path, Ma.Arch),[]))
Ma.CreateAsBuiltInf()
if GlobalData.gBinCacheSource and CommandTarget in [None, "", "all"]:
try:
CacheResult = Ma.CanSkipbyMakeCache()
except:
CacheResult = False
self.feedback_q.put(taskname)
if CacheResult:
self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", True))
continue
else:
self.cache_q.put((Ma.MetaFile.Path, Ma.Arch, "MakeCache", False))
except Exception as e:
EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), str(e)))
self.feedback_q.put(taskname)
finally:
EdkLogger.debug(EdkLogger.DEBUG_9, "Worker %s: %s" % (os.getpid(), "Done"))
self.feedback_q.put("Done")
self.cache_q.put("CacheDone")
def printStatus(self):
print("Processs ID: %d Run %d modules in AutoGen " % (os.getpid(),len(AutoGen.Cache())))
print("Processs ID: %d Run %d modules in AutoGenInfo " % (os.getpid(),len(AutoGenInfo.GetCache())))
groupobj = {}
for buildobj in BuildDB.BuildObject.GetCache().values():
if str(buildobj).lower().endswith("dec"):
try:
groupobj['dec'].append(str(buildobj))
except:
groupobj['dec'] = [str(buildobj)]
if str(buildobj).lower().endswith("dsc"):
try:
groupobj['dsc'].append(str(buildobj))
except:
groupobj['dsc'] = [str(buildobj)]
if str(buildobj).lower().endswith("inf"):
try:
groupobj['inf'].append(str(buildobj))
except:
groupobj['inf'] = [str(buildobj)]
print("Processs ID: %d Run %d pkg in WDB " % (os.getpid(),len(groupobj.get("dec",[]))))
print("Processs ID: %d Run %d pla in WDB " % (os.getpid(),len(groupobj.get("dsc",[]))))
print("Processs ID: %d Run %d inf in WDB " % (os.getpid(),len(groupobj.get("inf",[]))))
| edk2-master | BaseTools/Source/Python/AutoGen/AutoGenWorker.py |
## @file
# Generate AutoGen.h, AutoGen.c and *.depex files
#
# Copyright (c) 2007 - 2019, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2018, Hewlett Packard Enterprise Development, L.P.<BR>
# Copyright (c) 2019, American Megatrends, Inc. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
from Common.DataType import TAB_STAR
## Base class for AutoGen
#
# This class just implements the cache mechanism of AutoGen objects.
#
class AutoGen(object):
# database to maintain the objects in each child class
__ObjectCache = {} # (BuildTarget, ToolChain, ARCH, platform file): AutoGen object
## Factory method
#
# @param Class class object of real AutoGen class
# (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)
# @param Workspace Workspace directory or WorkspaceAutoGen object
# @param MetaFile The path of meta file
# @param Target Build target
# @param Toolchain Tool chain name
# @param Arch Target arch
# @param *args The specific class related parameters
# @param **kwargs The specific class related dict parameters
#
def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
# check if the object has been created
Key = (Target, Toolchain, Arch, MetaFile)
if Key in cls.__ObjectCache:
# if it exists, just return it directly
return cls.__ObjectCache[Key]
# it didnt exist. create it, cache it, then return it
RetVal = cls.__ObjectCache[Key] = super(AutoGen, cls).__new__(cls)
return RetVal
## hash() operator
#
# The file path of platform file will be used to represent hash value of this object
#
# @retval int Hash value of the file path of platform file
#
def __hash__(self):
return hash(self.MetaFile)
## str() operator
#
# The file path of platform file will be used to represent this object
#
# @retval string String of platform file path
#
def __str__(self):
return str(self.MetaFile)
## "==" operator
def __eq__(self, Other):
return Other and self.MetaFile == Other
@classmethod
def Cache(cls):
return cls.__ObjectCache
#
# The priority list while override build option
#
PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
"0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
"0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
"0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
"0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
"0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
"0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
"0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE
"0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE
"0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE
"0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE
"0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE
"0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE
"0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
"0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
"0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
## Calculate the priority value of the build option
#
# @param Key Build option definition contain: TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
#
# @retval Value Priority value based on the priority list.
#
def CalculatePriorityValue(Key):
Target, ToolChain, Arch, CommandType, Attr = Key.split('_')
PriorityValue = 0x11111
if Target == TAB_STAR:
PriorityValue &= 0x01111
if ToolChain == TAB_STAR:
PriorityValue &= 0x10111
if Arch == TAB_STAR:
PriorityValue &= 0x11011
if CommandType == TAB_STAR:
PriorityValue &= 0x11101
if Attr == TAB_STAR:
PriorityValue &= 0x11110
return PrioList["0x%0.5x" % PriorityValue]
| edk2-master | BaseTools/Source/Python/AutoGen/AutoGen.py |
## @file
# Build cache intermediate result and state
#
# Copyright (c) 2019 - 2020, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from Common.caching import cached_property
import Common.EdkLogger as EdkLogger
import Common.LongFilePathOs as os
from Common.BuildToolError import *
from Common.Misc import SaveFileOnChange, PathClass
from Common.Misc import TemplateString
import sys
gIsFileMap = {}
DEP_FILE_TAIL = "# Updated \n"
class IncludesAutoGen():
""" This class is to manage the dependent files witch are used in Makefile to support incremental build.
1. C files:
1. MSVS.
cl.exe has a build option /showIncludes to display include files on stdout. Build tool captures
that messages and generate dependency files, .deps files.
2. CLANG and GCC
-MMD -MF build option are used to generate dependency files by compiler. Build tool updates the
.deps files.
2. ASL files:
1. Trim find out all the included files with asl specific include format and generate .trim.deps file.
2. ASL PP use c preprocessor to find out all included files with #include format and generate a .deps file
3. build tool updates the .deps file
3. ASM files (.asm, .s or .nasm):
1. Trim find out all the included files with asl specific include format and generate .trim.deps file.
2. ASM PP use c preprocessor to find out all included files with #include format and generate a deps file
3. build tool updates the .deps file
"""
def __init__(self, makefile_folder, ModuleAuto):
self.d_folder = makefile_folder
self.makefile_folder = makefile_folder
self.module_autogen = ModuleAuto
self.ToolChainFamily = ModuleAuto.ToolChainFamily
self.workspace = ModuleAuto.WorkspaceDir
def CreateModuleDeps(self):
SaveFileOnChange(os.path.join(self.makefile_folder,"deps.txt"),"\n".join(self.DepsCollection),False)
def CreateDepsInclude(self):
deps_file = {'deps_file':self.deps_files}
MakePath = self.module_autogen.BuildOption.get('MAKE', {}).get('PATH')
if not MakePath:
EdkLogger.error("build", PARAMETER_MISSING, Message="No Make path available.")
elif "nmake" in MakePath:
_INCLUDE_DEPS_TEMPLATE = TemplateString('''
${BEGIN}
!IF EXIST(${deps_file})
!INCLUDE ${deps_file}
!ENDIF
${END}
''')
else:
_INCLUDE_DEPS_TEMPLATE = TemplateString('''
${BEGIN}
-include ${deps_file}
${END}
''')
try:
deps_include_str = _INCLUDE_DEPS_TEMPLATE.Replace(deps_file)
except Exception as e:
print(e)
SaveFileOnChange(os.path.join(self.makefile_folder,"dependency"),deps_include_str,False)
def CreateDepsTarget(self):
SaveFileOnChange(os.path.join(self.makefile_folder,"deps_target"),"\n".join([item +":" for item in self.DepsCollection]),False)
@cached_property
def deps_files(self):
""" Get all .deps file under module build folder. """
deps_files = []
for root, _, files in os.walk(self.d_folder, topdown=False):
for name in files:
if not name.endswith(".deps"):
continue
abspath = os.path.join(root, name)
deps_files.append(abspath)
return deps_files
@cached_property
def DepsCollection(self):
""" Collect all the dependency files list from all .deps files under a module's build folder """
includes = set()
targetname = [item[0].Name for item in self.TargetFileList.values()]
for abspath in self.deps_files:
try:
with open(abspath,"r") as fd:
lines = fd.readlines()
firstlineitems = lines[0].split(": ")
dependency_file = firstlineitems[1].strip(" \\\n")
dependency_file = dependency_file.strip('''"''')
if dependency_file:
if os.path.normpath(dependency_file +".deps") == abspath:
continue
filename = os.path.basename(dependency_file).strip()
if filename not in targetname:
includes.add(dependency_file.strip())
for item in lines[1:]:
if item == DEP_FILE_TAIL:
continue
dependency_file = item.strip(" \\\n")
dependency_file = dependency_file.strip('''"''')
if dependency_file == '':
continue
if os.path.normpath(dependency_file +".deps") == abspath:
continue
filename = os.path.basename(dependency_file).strip()
if filename in targetname:
continue
includes.add(dependency_file.strip())
except Exception as e:
EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
continue
rt = sorted(list(set([item.strip(' " \\\n') for item in includes])))
return rt
@cached_property
def SourceFileList(self):
""" Get a map of module's source files name to module's source files path """
source = {os.path.basename(item.File):item.Path for item in self.module_autogen.SourceFileList}
middle_file = {}
for afile in source:
if afile.upper().endswith(".VFR"):
middle_file.update({afile.split(".")[0]+".c":os.path.join(self.module_autogen.DebugDir,afile.split(".")[0]+".c")})
if afile.upper().endswith((".S","ASM")):
middle_file.update({afile.split(".")[0]+".i":os.path.join(self.module_autogen.OutputDir,afile.split(".")[0]+".i")})
if afile.upper().endswith(".ASL"):
middle_file.update({afile.split(".")[0]+".i":os.path.join(self.module_autogen.OutputDir,afile.split(".")[0]+".i")})
source.update({"AutoGen.c":os.path.join(self.module_autogen.OutputDir,"AutoGen.c")})
source.update(middle_file)
return source
@cached_property
def HasNamesakeSourceFile(self):
source_base_name = set([os.path.basename(item.File) for item in self.module_autogen.SourceFileList])
rt = len(source_base_name) != len(self.module_autogen.SourceFileList)
return rt
@cached_property
def CcPPCommandPathSet(self):
rt = set()
rt.add(self.module_autogen.BuildOption.get('CC',{}).get('PATH'))
rt.add(self.module_autogen.BuildOption.get('ASLCC',{}).get('PATH'))
rt.add(self.module_autogen.BuildOption.get('ASLPP',{}).get('PATH'))
rt.add(self.module_autogen.BuildOption.get('VFRPP',{}).get('PATH'))
rt.add(self.module_autogen.BuildOption.get('PP',{}).get('PATH'))
rt.add(self.module_autogen.BuildOption.get('APP',{}).get('PATH'))
rt.discard(None)
return rt
@cached_property
def TargetFileList(self):
""" Get a map of module's target name to a tuple of module's targets path and whose input file path """
targets = {}
targets["AutoGen.obj"] = (PathClass(os.path.join(self.module_autogen.OutputDir,"AutoGen.obj")),PathClass(os.path.join(self.module_autogen.DebugDir,"AutoGen.c")))
for item in self.module_autogen.Targets.values():
for block in item:
targets[block.Target.Path] = (block.Target,block.Inputs[0])
return targets
def GetRealTarget(self,source_file_abs):
""" Get the final target file based on source file abspath """
source_target_map = {item[1].Path:item[0].Path for item in self.TargetFileList.values()}
source_name_map = {item[1].File:item[0].Path for item in self.TargetFileList.values()}
target_abs = source_target_map.get(source_file_abs)
if target_abs is None:
if source_file_abs.strip().endswith(".i"):
sourcefilename = os.path.basename(source_file_abs.strip())
for sourcefile in source_name_map:
if sourcefilename.split(".")[0] == sourcefile.split(".")[0]:
target_abs = source_name_map[sourcefile]
break
else:
target_abs = source_file_abs
else:
target_abs = source_file_abs
return target_abs
def CreateDepsFileForMsvc(self, DepList):
""" Generate dependency files, .deps file from /showIncludes output message """
if not DepList:
return
ModuleDepDict = {}
current_source = ""
SourceFileAbsPathMap = self.SourceFileList
for line in DepList:
line = line.strip()
if self.HasNamesakeSourceFile:
for cc_cmd in self.CcPPCommandPathSet:
if cc_cmd in line:
if '''"'''+cc_cmd+'''"''' in line:
cc_options = line[len(cc_cmd)+2:].split()
else:
cc_options = line[len(cc_cmd):].split()
for item in cc_options:
if not item.startswith("/"):
if item.endswith(".txt") and item.startswith("@"):
with open(item[1:], "r") as file:
source_files = file.readlines()[0].split()
SourceFileAbsPathMap = {os.path.basename(file): file for file in source_files if
os.path.exists(file)}
else:
if os.path.exists(item):
SourceFileAbsPathMap.update({os.path.basename(item): item.strip()})
# SourceFileAbsPathMap = {os.path.basename(item):item for item in cc_options if not item.startswith("/") and os.path.exists(item)}
if line in SourceFileAbsPathMap:
current_source = line
if current_source not in ModuleDepDict:
ModuleDepDict[SourceFileAbsPathMap[current_source]] = []
elif "Note: including file:" == line.lstrip()[:21]:
if not current_source:
EdkLogger.error("build",BUILD_ERROR, "Parse /showIncludes output failed. line: %s. \n" % line, RaiseError=False)
else:
ModuleDepDict[SourceFileAbsPathMap[current_source]].append(line.lstrip()[22:].strip())
for source_abs in ModuleDepDict:
if ModuleDepDict[source_abs]:
target_abs = self.GetRealTarget(source_abs)
dep_file_name = os.path.basename(source_abs) + ".deps"
SaveFileOnChange(os.path.join(os.path.dirname(target_abs),dep_file_name)," \\\n".join([target_abs+":"] + ['''"''' + item +'''"''' for item in ModuleDepDict[source_abs]]),False)
def UpdateDepsFileforNonMsvc(self):
""" Update .deps files.
1. Update target path to absolute path.
2. Update middle target to final target.
"""
for abspath in self.deps_files:
if abspath.endswith(".trim.deps"):
continue
try:
newcontent = []
with open(abspath,"r") as fd:
lines = fd.readlines()
if lines[-1] == DEP_FILE_TAIL:
continue
firstlineitems = lines[0].strip().split(" ")
if len(firstlineitems) > 2:
sourceitem = firstlineitems[1]
else:
sourceitem = lines[1].strip().split(" ")[0]
source_abs = self.SourceFileList.get(sourceitem,sourceitem)
firstlineitems[0] = self.GetRealTarget(source_abs)
p_target = firstlineitems
if not p_target[0].strip().endswith(":"):
p_target[0] += ": "
if len(p_target) == 2:
p_target[0] += lines[1]
newcontent.append(p_target[0])
newcontent.extend(lines[2:])
else:
line1 = " ".join(p_target).strip()
line1 += "\n"
newcontent.append(line1)
newcontent.extend(lines[1:])
newcontent.append("\n")
newcontent.append(DEP_FILE_TAIL)
with open(abspath,"w") as fw:
fw.write("".join(newcontent))
except Exception as e:
EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
continue
def UpdateDepsFileforTrim(self):
""" Update .deps file which generated by trim. """
for abspath in self.deps_files:
if not abspath.endswith(".trim.deps"):
continue
try:
newcontent = []
with open(abspath,"r") as fd:
lines = fd.readlines()
if lines[-1] == DEP_FILE_TAIL:
continue
source_abs = lines[0].strip().split(" ")[0]
targetitem = self.GetRealTarget(source_abs.strip(" :"))
targetitem += ": "
if len(lines)>=2:
targetitem += lines[1]
newcontent.append(targetitem)
newcontent.extend(lines[2:])
newcontent.append("\n")
newcontent.append(DEP_FILE_TAIL)
with open(abspath,"w") as fw:
fw.write("".join(newcontent))
except Exception as e:
EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
continue
| edk2-master | BaseTools/Source/Python/AutoGen/IncludesAutoGen.py |
## @file
# This file is used to collect all defined strings in Image Definition files
#
# Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
# Import Modules
#
from __future__ import absolute_import
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import *
from Common.StringUtils import GetLineNo
from Common.Misc import PathClass
from Common.LongFilePathSupport import LongFilePath
import re
import os
from Common.GlobalData import gIdentifierPattern
from .UniClassObject import StripComments
IMAGE_TOKEN = re.compile('IMAGE_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
#
# Value of different image information block types
#
EFI_HII_IIBT_END = 0x00
EFI_HII_IIBT_IMAGE_1BIT = 0x10
EFI_HII_IIBT_IMAGE_1BIT_TRANS = 0x11
EFI_HII_IIBT_IMAGE_4BIT = 0x12
EFI_HII_IIBT_IMAGE_4BIT_TRANS = 0x13
EFI_HII_IIBT_IMAGE_8BIT = 0x14
EFI_HII_IIBT_IMAGE_8BIT_TRANS = 0x15
EFI_HII_IIBT_IMAGE_24BIT = 0x16
EFI_HII_IIBT_IMAGE_24BIT_TRANS = 0x17
EFI_HII_IIBT_IMAGE_JPEG = 0x18
EFI_HII_IIBT_IMAGE_PNG = 0x19
EFI_HII_IIBT_DUPLICATE = 0x20
EFI_HII_IIBT_SKIP2 = 0x21
EFI_HII_IIBT_SKIP1 = 0x22
EFI_HII_IIBT_EXT1 = 0x30
EFI_HII_IIBT_EXT2 = 0x31
EFI_HII_IIBT_EXT4 = 0x32
#
# Value of HII package type
#
EFI_HII_PACKAGE_TYPE_ALL = 0x00
EFI_HII_PACKAGE_TYPE_GUID = 0x01
EFI_HII_PACKAGE_FORMS = 0x02
EFI_HII_PACKAGE_STRINGS = 0x04
EFI_HII_PACKAGE_FONTS = 0x05
EFI_HII_PACKAGE_IMAGES = 0x06
EFI_HII_PACKAGE_SIMPLE_FONTS = 0x07
EFI_HII_PACKAGE_DEVICE_PATH = 0x08
EFI_HII_PACKAGE_KEYBOARD_LAYOUT = 0x09
EFI_HII_PACKAGE_ANIMATIONS = 0x0A
EFI_HII_PACKAGE_END = 0xDF
EFI_HII_PACKAGE_TYPE_SYSTEM_BEGIN = 0xE0
EFI_HII_PACKAGE_TYPE_SYSTEM_END = 0xFF
class IdfFileClassObject(object):
def __init__(self, FileList = []):
self.ImageFilesDict = {}
self.ImageIDList = []
for File in FileList:
if File is None:
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'No Image definition file is given.')
try:
IdfFile = open(LongFilePath(File.Path), mode='r')
FileIn = IdfFile.read()
IdfFile.close()
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
ImageFileList = []
for Line in FileIn.splitlines():
Line = Line.strip()
Line = StripComments(Line)
if len(Line) == 0:
continue
LineNo = GetLineNo(FileIn, Line, False)
if not Line.startswith('#image '):
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The %s in Line %s of File %s is invalid.' % (Line, LineNo, File.Path))
if Line.find('#image ') >= 0:
LineDetails = Line.split()
Len = len(LineDetails)
if Len != 3 and Len != 4:
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The format is not match #image IMAGE_ID [TRANSPARENT] ImageFileName in Line %s of File %s.' % (LineNo, File.Path))
if Len == 4 and LineDetails[2] != 'TRANSPARENT':
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'Please use the keyword "TRANSPARENT" to describe the transparency setting in Line %s of File %s.' % (LineNo, File.Path))
MatchString = gIdentifierPattern.match(LineDetails[1])
if MatchString is None:
EdkLogger.error('Image Definition File Parser', FORMAT_INVALID, 'The Image token name %s defined in Idf file %s contains the invalid character.' % (LineDetails[1], File.Path))
if LineDetails[1] not in self.ImageIDList:
self.ImageIDList.append(LineDetails[1])
else:
EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The %s in Line %s of File %s is already defined.' % (LineDetails[1], LineNo, File.Path))
if Len == 4:
ImageFile = ImageFileObject(LineDetails[Len-1], LineDetails[1], True)
else:
ImageFile = ImageFileObject(LineDetails[Len-1], LineDetails[1], False)
ImageFileList.append(ImageFile)
if ImageFileList:
self.ImageFilesDict[File] = ImageFileList
def SearchImageID(ImageFileObject, FileList):
if FileList == []:
return ImageFileObject
for File in FileList:
if os.path.isfile(File):
Lines = open(File, 'r')
for Line in Lines:
ImageIdList = IMAGE_TOKEN.findall(Line)
for ID in ImageIdList:
EdkLogger.debug(EdkLogger.DEBUG_5, "Found ImageID identifier: " + ID)
ImageFileObject.SetImageIDReferenced(ID)
class ImageFileObject(object):
def __init__(self, FileName, ImageID, TransParent = False):
self.FileName = FileName
self.File = ''
self.ImageID = ImageID
self.TransParent = TransParent
self.Referenced = False
def SetImageIDReferenced(self, ImageID):
if ImageID == self.ImageID:
self.Referenced = True
| edk2-master | BaseTools/Source/Python/AutoGen/IdfClassObject.py |
## @file
# The engine for building files
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
import Common.LongFilePathOs as os
import re
import copy
import string
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.GlobalData import *
from Common.BuildToolError import *
from Common.Misc import tdict, PathClass
from Common.StringUtils import NormPath
from Common.DataType import *
from Common.TargetTxtClassObject import TargetTxtDict
gDefaultBuildRuleFile = 'build_rule.txt'
AutoGenReqBuildRuleVerNum = '0.1'
import Common.EdkLogger as EdkLogger
## Convert file type to file list macro name
#
# @param FileType The name of file type
#
# @retval string The name of macro
#
def FileListMacro(FileType):
return "%sS" % FileType.replace("-", "_").upper()
## Convert file type to list file macro name
#
# @param FileType The name of file type
#
# @retval string The name of macro
#
def ListFileMacro(FileType):
return "%s_LIST" % FileListMacro(FileType)
class TargetDescBlock(object):
def __init__(self, Inputs, Outputs, Commands, Dependencies):
self.InitWorker(Inputs, Outputs, Commands, Dependencies)
def InitWorker(self, Inputs, Outputs, Commands, Dependencies):
self.Inputs = Inputs
self.Outputs = Outputs
self.Commands = Commands
self.Dependencies = Dependencies
if self.Outputs:
self.Target = self.Outputs[0]
else:
self.Target = None
def __str__(self):
return self.Target.Path
def __hash__(self):
return hash(self.Target.Path)
def __eq__(self, Other):
if isinstance(Other, type(self)):
return Other.Target.Path == self.Target.Path
else:
return str(Other) == self.Target.Path
def AddInput(self, Input):
if Input not in self.Inputs:
self.Inputs.append(Input)
def IsMultipleInput(self):
return len(self.Inputs) > 1
## Class for one build rule
#
# This represents a build rule which can give out corresponding command list for
# building the given source file(s). The result can be used for generating the
# target for makefile.
#
class FileBuildRule:
INC_LIST_MACRO = "INC_LIST"
INC_MACRO = "INC"
## constructor
#
# @param Input The dictionary representing input file(s) for a rule
# @param Output The list representing output file(s) for a rule
# @param Command The list containing commands to generate the output from input
#
def __init__(self, Type, Input, Output, Command, ExtraDependency=None):
# The Input should not be empty
if not Input:
Input = []
if not Output:
Output = []
if not Command:
Command = []
self.FileListMacro = FileListMacro(Type)
self.ListFileMacro = ListFileMacro(Type)
self.IncListFileMacro = self.INC_LIST_MACRO
self.SourceFileType = Type
# source files listed not in TAB_STAR or "?" pattern format
if not ExtraDependency:
self.ExtraSourceFileList = []
else:
self.ExtraSourceFileList = ExtraDependency
#
# Search macros used in command lines for <FILE_TYPE>_LIST and INC_LIST.
# If found, generate a file to keep the input files used to get over the
# limitation of command line length
#
self.MacroList = []
self.CommandList = []
for CmdLine in Command:
self.MacroList.extend(gMacroRefPattern.findall(CmdLine))
# replace path separator with native one
self.CommandList.append(CmdLine)
# Indicate what should be generated
if self.FileListMacro in self.MacroList:
self.GenFileListMacro = True
else:
self.GenFileListMacro = False
if self.ListFileMacro in self.MacroList:
self.GenListFile = True
self.GenFileListMacro = True
else:
self.GenListFile = False
if self.INC_LIST_MACRO in self.MacroList:
self.GenIncListFile = True
else:
self.GenIncListFile = False
# Check input files
self.IsMultipleInput = False
self.SourceFileExtList = set()
for File in Input:
Base, Ext = os.path.splitext(File)
if Base.find(TAB_STAR) >= 0:
# There's TAB_STAR in the file name
self.IsMultipleInput = True
self.GenFileListMacro = True
elif Base.find("?") < 0:
# There's no TAB_STAR and "?" in file name
self.ExtraSourceFileList.append(File)
continue
self.SourceFileExtList.add(Ext)
# Check output files
self.DestFileList = []
for File in Output:
self.DestFileList.append(File)
# All build targets generated by this rule for a module
self.BuildTargets = {}
## str() function support
#
# @retval string
#
def __str__(self):
SourceString = ""
SourceString += " %s %s %s" % (self.SourceFileType, " ".join(self.SourceFileExtList), self.ExtraSourceFileList)
DestString = ", ".join([str(i) for i in self.DestFileList])
CommandString = "\n\t".join(self.CommandList)
return "%s : %s\n\t%s" % (DestString, SourceString, CommandString)
def Instantiate(self, Macros = None):
if Macros is None:
Macros = {}
NewRuleObject = copy.copy(self)
NewRuleObject.BuildTargets = {}
NewRuleObject.DestFileList = []
for File in self.DestFileList:
NewRuleObject.DestFileList.append(PathClass(NormPath(File, Macros)))
return NewRuleObject
## Apply the rule to given source file(s)
#
# @param SourceFile One file or a list of files to be built
# @param RelativeToDir The relative path of the source file
# @param PathSeparator Path separator
#
# @retval tuple (Source file in full path, List of individual sourcefiles, Destination file, List of build commands)
#
def Apply(self, SourceFile, BuildRuleOrder=None):
if not self.CommandList or not self.DestFileList:
return None
# source file
if self.IsMultipleInput:
SrcFileName = ""
SrcFileBase = ""
SrcFileExt = ""
SrcFileDir = ""
SrcPath = ""
# SourceFile must be a list
SrcFile = "$(%s)" % self.FileListMacro
else:
SrcFileName, SrcFileBase, SrcFileExt = SourceFile.Name, SourceFile.BaseName, SourceFile.Ext
if SourceFile.Root:
SrcFileDir = SourceFile.SubDir
if SrcFileDir == "":
SrcFileDir = "."
else:
SrcFileDir = "."
SrcFile = SourceFile.Path
SrcPath = SourceFile.Dir
# destination file (the first one)
if self.DestFileList:
DestFile = self.DestFileList[0].Path
DestPath = self.DestFileList[0].Dir
DestFileName = self.DestFileList[0].Name
DestFileBase, DestFileExt = self.DestFileList[0].BaseName, self.DestFileList[0].Ext
else:
DestFile = ""
DestPath = ""
DestFileName = ""
DestFileBase = ""
DestFileExt = ""
BuildRulePlaceholderDict = {
# source file
"src" : SrcFile,
"s_path" : SrcPath,
"s_dir" : SrcFileDir,
"s_name" : SrcFileName,
"s_base" : SrcFileBase,
"s_ext" : SrcFileExt,
# destination file
"dst" : DestFile,
"d_path" : DestPath,
"d_name" : DestFileName,
"d_base" : DestFileBase,
"d_ext" : DestFileExt,
}
DstFile = []
for File in self.DestFileList:
File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict)
File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict)
DstFile.append(PathClass(File, IsBinary=True))
if DstFile[0] in self.BuildTargets:
TargetDesc = self.BuildTargets[DstFile[0]]
if BuildRuleOrder and SourceFile.Ext in BuildRuleOrder:
Index = BuildRuleOrder.index(SourceFile.Ext)
for Input in TargetDesc.Inputs:
if Input.Ext not in BuildRuleOrder or BuildRuleOrder.index(Input.Ext) > Index:
#
# Command line should be regenerated since some macros are different
#
CommandList = self._BuildCommand(BuildRulePlaceholderDict)
TargetDesc.InitWorker([SourceFile], DstFile, CommandList, self.ExtraSourceFileList)
break
else:
TargetDesc.AddInput(SourceFile)
else:
CommandList = self._BuildCommand(BuildRulePlaceholderDict)
TargetDesc = TargetDescBlock([SourceFile], DstFile, CommandList, self.ExtraSourceFileList)
TargetDesc.ListFileMacro = self.ListFileMacro
TargetDesc.FileListMacro = self.FileListMacro
TargetDesc.IncListFileMacro = self.IncListFileMacro
TargetDesc.GenFileListMacro = self.GenFileListMacro
TargetDesc.GenListFile = self.GenListFile
TargetDesc.GenIncListFile = self.GenIncListFile
self.BuildTargets[DstFile[0]] = TargetDesc
return TargetDesc
def _BuildCommand(self, Macros):
CommandList = []
for CommandString in self.CommandList:
CommandString = string.Template(CommandString).safe_substitute(Macros)
CommandString = string.Template(CommandString).safe_substitute(Macros)
CommandList.append(CommandString)
return CommandList
## Class for build rules
#
# BuildRule class parses rules defined in a file or passed by caller, and converts
# the rule into FileBuildRule object.
#
class BuildRule:
_SectionHeader = "SECTIONHEADER"
_Section = "SECTION"
_SubSectionHeader = "SUBSECTIONHEADER"
_SubSection = "SUBSECTION"
_InputFile = "INPUTFILE"
_OutputFile = "OUTPUTFILE"
_ExtraDependency = "EXTRADEPENDENCY"
_Command = "COMMAND"
_UnknownSection = "UNKNOWNSECTION"
_SubSectionList = [_InputFile, _OutputFile, _Command]
_PATH_SEP = "(+)"
_FileTypePattern = re.compile("^[_a-zA-Z][_\-0-9a-zA-Z]*$")
_BinaryFileRule = FileBuildRule(TAB_DEFAULT_BINARY_FILE, [], [os.path.join("$(OUTPUT_DIR)", "${s_name}")],
["$(CP) ${src} ${dst}"], [])
## Constructor
#
# @param File The file containing build rules in a well defined format
# @param Content The string list of build rules in a well defined format
# @param LineIndex The line number from which the parsing will begin
# @param SupportedFamily The list of supported tool chain families
#
def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=[TAB_COMPILER_MSFT, "INTEL", "GCC"]):
self.RuleFile = File
# Read build rules from file if it's not none
if File is not None:
try:
self.RuleContent = open(File, 'r').readlines()
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
elif Content is not None:
self.RuleContent = Content
else:
EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given")
self.SupportedToolChainFamilyList = SupportedFamily
self.RuleDatabase = tdict(True, 4) # {FileExt, ModuleType, Arch, Family : FileBuildRule object}
self.Ext2FileType = {} # {ext : file-type}
self.FileTypeList = set()
self._LineIndex = LineIndex
self._State = ""
self._RuleInfo = tdict(True, 2) # {toolchain family : {"InputFile": {}, "OutputFile" : [], "Command" : []}}
self._FileType = ''
self._BuildTypeList = set()
self._ArchList = set()
self._FamilyList = []
self._TotalToolChainFamilySet = set()
self._RuleObjectList = [] # FileBuildRule object list
self._FileVersion = ""
self.Parse()
# some intrinsic rules
self.RuleDatabase[TAB_DEFAULT_BINARY_FILE, TAB_COMMON, TAB_COMMON, TAB_COMMON] = self._BinaryFileRule
self.FileTypeList.add(TAB_DEFAULT_BINARY_FILE)
## Parse the build rule strings
def Parse(self):
self._State = self._Section
for Index in range(self._LineIndex, len(self.RuleContent)):
# Clean up the line and replace path separator with native one
Line = self.RuleContent[Index].strip().replace(self._PATH_SEP, os.path.sep)
self.RuleContent[Index] = Line
# find the build_rule_version
if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) != -1:
if Line.find("=") != -1 and Line.find("=") < (len(Line) - 1) and (Line[(Line.find("=") + 1):]).split():
self._FileVersion = (Line[(Line.find("=") + 1):]).split()[0]
# skip empty or comment line
if Line == "" or Line[0] == "#":
continue
# find out section header, enclosed by []
if Line[0] == '[' and Line[-1] == ']':
# merge last section information into rule database
self.EndOfSection()
self._State = self._SectionHeader
# find out sub-section header, enclosed by <>
elif Line[0] == '<' and Line[-1] == '>':
if self._State != self._UnknownSection:
self._State = self._SubSectionHeader
# call section handler to parse each (sub)section
self._StateHandler[self._State](self, Index)
# merge last section information into rule database
self.EndOfSection()
## Parse definitions under a section
#
# @param LineIndex The line index of build rule text
#
def ParseSection(self, LineIndex):
pass
## Parse definitions under a subsection
#
# @param LineIndex The line index of build rule text
#
def ParseSubSection(self, LineIndex):
# currently nothing here
pass
## Placeholder for not supported sections
#
# @param LineIndex The line index of build rule text
#
def SkipSection(self, LineIndex):
pass
## Merge section information just got into rule database
def EndOfSection(self):
Database = self.RuleDatabase
# if there's specific toolchain family, 'COMMON' doesn't make sense any more
if len(self._TotalToolChainFamilySet) > 1 and TAB_COMMON in self._TotalToolChainFamilySet:
self._TotalToolChainFamilySet.remove(TAB_COMMON)
for Family in self._TotalToolChainFamilySet:
Input = self._RuleInfo[Family, self._InputFile]
Output = self._RuleInfo[Family, self._OutputFile]
Command = self._RuleInfo[Family, self._Command]
ExtraDependency = self._RuleInfo[Family, self._ExtraDependency]
BuildRule = FileBuildRule(self._FileType, Input, Output, Command, ExtraDependency)
for BuildType in self._BuildTypeList:
for Arch in self._ArchList:
Database[self._FileType, BuildType, Arch, Family] = BuildRule
for FileExt in BuildRule.SourceFileExtList:
self.Ext2FileType[FileExt] = self._FileType
## Parse section header
#
# @param LineIndex The line index of build rule text
#
def ParseSectionHeader(self, LineIndex):
self._RuleInfo = tdict(True, 2)
self._BuildTypeList = set()
self._ArchList = set()
self._FamilyList = []
self._TotalToolChainFamilySet = set()
FileType = ''
RuleNameList = self.RuleContent[LineIndex][1:-1].split(',')
for RuleName in RuleNameList:
Arch = TAB_COMMON
BuildType = TAB_COMMON
TokenList = [Token.strip().upper() for Token in RuleName.split('.')]
# old format: Build.File-Type
if TokenList[0] == "BUILD":
if len(TokenList) == 1:
EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section",
File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
FileType = TokenList[1]
if FileType == '':
EdkLogger.error("build", FORMAT_INVALID, "No file type given",
File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
if self._FileTypePattern.match(FileType) is None:
EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")
# new format: File-Type.Build-Type.Arch
else:
if FileType == '':
FileType = TokenList[0]
elif FileType != TokenList[0]:
EdkLogger.error("build", FORMAT_INVALID,
"Different file types are not allowed in the same rule section",
File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
if len(TokenList) > 1:
BuildType = TokenList[1]
if len(TokenList) > 2:
Arch = TokenList[2]
self._BuildTypeList.add(BuildType)
self._ArchList.add(Arch)
if TAB_COMMON in self._BuildTypeList and len(self._BuildTypeList) > 1:
EdkLogger.error("build", FORMAT_INVALID,
"Specific build types must not be mixed with common one",
File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
if TAB_COMMON in self._ArchList and len(self._ArchList) > 1:
EdkLogger.error("build", FORMAT_INVALID,
"Specific ARCH must not be mixed with common one",
File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
self._FileType = FileType
self._State = self._Section
self.FileTypeList.add(FileType)
## Parse sub-section header
#
# @param LineIndex The line index of build rule text
#
def ParseSubSectionHeader(self, LineIndex):
SectionType = ""
List = self.RuleContent[LineIndex][1:-1].split(',')
FamilyList = []
for Section in List:
TokenList = Section.split('.')
Type = TokenList[0].strip().upper()
if SectionType == "":
SectionType = Type
elif SectionType != Type:
EdkLogger.error("build", FORMAT_INVALID,
"Two different section types are not allowed in the same sub-section",
File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
if len(TokenList) > 1:
Family = TokenList[1].strip().upper()
else:
Family = TAB_COMMON
if Family not in FamilyList:
FamilyList.append(Family)
self._FamilyList = FamilyList
self._TotalToolChainFamilySet.update(FamilyList)
self._State = SectionType.upper()
if TAB_COMMON in FamilyList and len(FamilyList) > 1:
EdkLogger.error("build", FORMAT_INVALID,
"Specific tool chain family should not be mixed with general one",
File=self.RuleFile, Line=LineIndex + 1,
ExtraData=self.RuleContent[LineIndex])
if self._State not in self._StateHandler:
EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex])
## Parse <InputFile> sub-section
#
# @param LineIndex The line index of build rule text
#
def ParseInputFileSubSection(self, LineIndex):
FileList = [File.strip() for File in self.RuleContent[LineIndex].split(",")]
for ToolChainFamily in self._FamilyList:
if self._RuleInfo[ToolChainFamily, self._State] is None:
self._RuleInfo[ToolChainFamily, self._State] = []
self._RuleInfo[ToolChainFamily, self._State].extend(FileList)
## Parse <ExtraDependency> sub-section
## Parse <OutputFile> sub-section
## Parse <Command> sub-section
#
# @param LineIndex The line index of build rule text
#
def ParseCommonSubSection(self, LineIndex):
for ToolChainFamily in self._FamilyList:
if self._RuleInfo[ToolChainFamily, self._State] is None:
self._RuleInfo[ToolChainFamily, self._State] = []
self._RuleInfo[ToolChainFamily, self._State].append(self.RuleContent[LineIndex])
## Get a build rule via [] operator
#
# @param FileExt The extension of a file
# @param ToolChainFamily The tool chain family name
# @param BuildVersion The build version number. TAB_STAR means any rule
# is applicable.
#
# @retval FileType The file type string
# @retval FileBuildRule The object of FileBuildRule
#
# Key = (FileExt, ModuleType, Arch, ToolChainFamily)
def __getitem__(self, Key):
if not Key:
return None
if Key[0] in self.Ext2FileType:
Type = self.Ext2FileType[Key[0]]
elif Key[0].upper() in self.FileTypeList:
Type = Key[0].upper()
else:
return None
if len(Key) > 1:
Key = (Type,) + Key[1:]
else:
Key = (Type,)
return self.RuleDatabase[Key]
_StateHandler = {
_SectionHeader : ParseSectionHeader,
_Section : ParseSection,
_SubSectionHeader : ParseSubSectionHeader,
_SubSection : ParseSubSection,
_InputFile : ParseInputFileSubSection,
_OutputFile : ParseCommonSubSection,
_ExtraDependency : ParseCommonSubSection,
_Command : ParseCommonSubSection,
_UnknownSection : SkipSection,
}
class ToolBuildRule():
def __new__(cls, *args, **kw):
if not hasattr(cls, '_instance'):
orig = super(ToolBuildRule, cls)
cls._instance = orig.__new__(cls, *args, **kw)
return cls._instance
def __init__(self):
if not hasattr(self, 'ToolBuildRule'):
self._ToolBuildRule = None
@property
def ToolBuildRule(self):
if not self._ToolBuildRule:
self._GetBuildRule()
return self._ToolBuildRule
def _GetBuildRule(self):
BuildRuleFile = None
TargetObj = TargetTxtDict()
TargetTxt = TargetObj.Target
if TAB_TAT_DEFINES_BUILD_RULE_CONF in TargetTxt.TargetTxtDictionary:
BuildRuleFile = TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
if not BuildRuleFile:
BuildRuleFile = gDefaultBuildRuleFile
RetVal = BuildRule(BuildRuleFile)
if RetVal._FileVersion == "":
RetVal._FileVersion = AutoGenReqBuildRuleVerNum
else:
if RetVal._FileVersion < AutoGenReqBuildRuleVerNum :
# If Build Rule's version is less than the version number required by the tools, halting the build.
EdkLogger.error("build", AUTOGEN_ERROR,
ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\
% (RetVal._FileVersion, AutoGenReqBuildRuleVerNum))
self._ToolBuildRule = RetVal
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
if __name__ == '__main__':
import sys
EdkLogger.Initialize()
if len(sys.argv) > 1:
Br = BuildRule(sys.argv[1])
print(str(Br[".c", SUP_MODULE_DXE_DRIVER, "IA32", TAB_COMPILER_MSFT][1]))
print()
print(str(Br[".c", SUP_MODULE_DXE_DRIVER, "IA32", "INTEL"][1]))
print()
print(str(Br[".c", SUP_MODULE_DXE_DRIVER, "IA32", "GCC"][1]))
print()
print(str(Br[".ac", "ACPI_TABLE", "IA32", TAB_COMPILER_MSFT][1]))
print()
print(str(Br[".h", "ACPI_TABLE", "IA32", "INTEL"][1]))
print()
print(str(Br[".ac", "ACPI_TABLE", "IA32", TAB_COMPILER_MSFT][1]))
print()
print(str(Br[".s", SUP_MODULE_SEC, "IPF", "COMMON"][1]))
print()
print(str(Br[".s", SUP_MODULE_SEC][1]))
| edk2-master | BaseTools/Source/Python/AutoGen/BuildEngine.py |
## @file
# Parser a Inf file and Get specify section data.
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## Import Modules
#
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import *
from Common.DataType import *
class InfSectionParser():
def __init__(self, FilePath):
self._FilePath = FilePath
self._FileSectionDataList = []
self._ParserInf()
def _ParserInf(self):
FileLinesList = []
UserExtFind = False
FindEnd = True
FileLastLine = False
SectionLine = ''
SectionData = []
try:
with open(self._FilePath, "r") as File:
FileLinesList = File.readlines()
except BaseException:
EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % self._FilePath)
for Index in range(0, len(FileLinesList)):
line = str(FileLinesList[Index]).strip()
if Index + 1 == len(FileLinesList):
FileLastLine = True
NextLine = ''
else:
NextLine = str(FileLinesList[Index + 1]).strip()
if UserExtFind and FindEnd == False:
if line:
SectionData.append(line)
if line.startswith(TAB_SECTION_START) and line.endswith(TAB_SECTION_END):
SectionLine = line
UserExtFind = True
FindEnd = False
if (NextLine != '' and NextLine[0] == TAB_SECTION_START and \
NextLine[-1] == TAB_SECTION_END) or FileLastLine:
UserExtFind = False
FindEnd = True
self._FileSectionDataList.append({SectionLine: SectionData[:]})
del SectionData[:]
SectionLine = ''
# Get user extension TianoCore data
#
# @return: a list include some dictionary that key is section and value is a list contain all data.
def GetUserExtensionTianoCore(self):
UserExtensionTianoCore = []
if not self._FileSectionDataList:
return UserExtensionTianoCore
for SectionDataDict in self._FileSectionDataList:
for key in SectionDataDict:
if key.lower().startswith("[userextensions") and key.lower().find('.tianocore.') > -1:
SectionLine = key.lstrip(TAB_SECTION_START).rstrip(TAB_SECTION_END)
SubSectionList = [SectionLine]
if str(SectionLine).find(TAB_COMMA_SPLIT) > -1:
SubSectionList = str(SectionLine).split(TAB_COMMA_SPLIT)
for SubSection in SubSectionList:
if SubSection.lower().find('.tianocore.') > -1:
UserExtensionTianoCore.append({SubSection: SectionDataDict[key]})
return UserExtensionTianoCore
# Get depex expression
#
# @return: a list include some dictionary that key is section and value is a list contain all data.
def GetDepexExpresionList(self):
DepexExpressionList = []
if not self._FileSectionDataList:
return DepexExpressionList
for SectionDataDict in self._FileSectionDataList:
for key in SectionDataDict:
if key.lower() == "[depex]" or key.lower().startswith("[depex."):
SectionLine = key.lstrip(TAB_SECTION_START).rstrip(TAB_SECTION_END)
SubSectionList = [SectionLine]
if str(SectionLine).find(TAB_COMMA_SPLIT) > -1:
SubSectionList = str(SectionLine).split(TAB_COMMA_SPLIT)
for SubSection in SubSectionList:
SectionList = SubSection.split(TAB_SPLIT)
SubKey = ()
if len(SectionList) == 1:
SubKey = (TAB_ARCH_COMMON, TAB_ARCH_COMMON)
elif len(SectionList) == 2:
SubKey = (SectionList[1], TAB_ARCH_COMMON)
elif len(SectionList) == 3:
SubKey = (SectionList[1], SectionList[2])
else:
EdkLogger.error("build", AUTOGEN_ERROR, 'Section %s is invalid.' % key)
DepexExpressionList.append({SubKey: SectionDataDict[key]})
return DepexExpressionList
| edk2-master | BaseTools/Source/Python/AutoGen/InfSectionParser.py |
## @file
# This file is used to generate DEPEX file for module's dependency expression
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
## Import Modules
#
import sys
import Common.LongFilePathOs as os
import re
import traceback
from Common.LongFilePathSupport import OpenLongFilePath as open
from io import BytesIO
from struct import pack
from Common.BuildToolError import *
from Common.Misc import SaveFileOnChange
from Common.Misc import GuidStructureStringToGuidString
from Common.Misc import GuidStructureByteArrayToGuidString
from Common.Misc import GuidStringToGuidStructureString
from Common import EdkLogger as EdkLogger
from Common.BuildVersion import gBUILD_VERSION
from Common.DataType import *
## Regular expression for matching "DEPENDENCY_START ... DEPENDENCY_END"
gStartClosePattern = re.compile(".*DEPENDENCY_START(.+)DEPENDENCY_END.*", re.S)
## Mapping between module type and EFI phase
gType2Phase = {
SUP_MODULE_BASE : None,
SUP_MODULE_SEC : "PEI",
SUP_MODULE_PEI_CORE : "PEI",
SUP_MODULE_PEIM : "PEI",
SUP_MODULE_DXE_CORE : "DXE",
SUP_MODULE_DXE_DRIVER : "DXE",
SUP_MODULE_DXE_SMM_DRIVER : "DXE",
SUP_MODULE_DXE_RUNTIME_DRIVER: "DXE",
SUP_MODULE_DXE_SAL_DRIVER : "DXE",
SUP_MODULE_UEFI_DRIVER : "DXE",
SUP_MODULE_UEFI_APPLICATION : "DXE",
SUP_MODULE_SMM_CORE : "DXE",
SUP_MODULE_MM_STANDALONE : "MM",
SUP_MODULE_MM_CORE_STANDALONE : "MM",
}
## Convert dependency expression string into EFI internal representation
#
# DependencyExpression class is used to parse dependency expression string and
# convert it into its binary form.
#
class DependencyExpression:
ArchProtocols = {
'665e3ff6-46cc-11d4-9a38-0090273fc14d', # 'gEfiBdsArchProtocolGuid'
'26baccb1-6f42-11d4-bce7-0080c73c8881', # 'gEfiCpuArchProtocolGuid'
'26baccb2-6f42-11d4-bce7-0080c73c8881', # 'gEfiMetronomeArchProtocolGuid'
'1da97072-bddc-4b30-99f1-72a0b56fff2a', # 'gEfiMonotonicCounterArchProtocolGuid'
'27cfac87-46cc-11d4-9a38-0090273fc14d', # 'gEfiRealTimeClockArchProtocolGuid'
'27cfac88-46cc-11d4-9a38-0090273fc14d', # 'gEfiResetArchProtocolGuid'
'b7dfb4e1-052f-449f-87be-9818fc91b733', # 'gEfiRuntimeArchProtocolGuid'
'a46423e3-4617-49f1-b9ff-d1bfa9115839', # 'gEfiSecurityArchProtocolGuid'
'26baccb3-6f42-11d4-bce7-0080c73c8881', # 'gEfiTimerArchProtocolGuid'
'6441f818-6362-4e44-b570-7dba31dd2453', # 'gEfiVariableWriteArchProtocolGuid'
'1e5668e2-8481-11d4-bcf1-0080c73c8881', # 'gEfiVariableArchProtocolGuid'
'665e3ff5-46cc-11d4-9a38-0090273fc14d' # 'gEfiWatchdogTimerArchProtocolGuid'
}
OpcodePriority = {
DEPEX_OPCODE_AND : 1,
DEPEX_OPCODE_OR : 1,
DEPEX_OPCODE_NOT : 2,
}
Opcode = {
"PEI" : {
DEPEX_OPCODE_PUSH : 0x02,
DEPEX_OPCODE_AND : 0x03,
DEPEX_OPCODE_OR : 0x04,
DEPEX_OPCODE_NOT : 0x05,
DEPEX_OPCODE_TRUE : 0x06,
DEPEX_OPCODE_FALSE : 0x07,
DEPEX_OPCODE_END : 0x08
},
"DXE" : {
DEPEX_OPCODE_BEFORE: 0x00,
DEPEX_OPCODE_AFTER : 0x01,
DEPEX_OPCODE_PUSH : 0x02,
DEPEX_OPCODE_AND : 0x03,
DEPEX_OPCODE_OR : 0x04,
DEPEX_OPCODE_NOT : 0x05,
DEPEX_OPCODE_TRUE : 0x06,
DEPEX_OPCODE_FALSE : 0x07,
DEPEX_OPCODE_END : 0x08,
DEPEX_OPCODE_SOR : 0x09
},
"MM" : {
DEPEX_OPCODE_BEFORE: 0x00,
DEPEX_OPCODE_AFTER : 0x01,
DEPEX_OPCODE_PUSH : 0x02,
DEPEX_OPCODE_AND : 0x03,
DEPEX_OPCODE_OR : 0x04,
DEPEX_OPCODE_NOT : 0x05,
DEPEX_OPCODE_TRUE : 0x06,
DEPEX_OPCODE_FALSE : 0x07,
DEPEX_OPCODE_END : 0x08,
DEPEX_OPCODE_SOR : 0x09
}
}
# all supported op codes and operands
SupportedOpcode = [DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER, DEPEX_OPCODE_PUSH, DEPEX_OPCODE_AND, DEPEX_OPCODE_OR, DEPEX_OPCODE_NOT, DEPEX_OPCODE_END, DEPEX_OPCODE_SOR]
SupportedOperand = [DEPEX_OPCODE_TRUE, DEPEX_OPCODE_FALSE]
OpcodeWithSingleOperand = [DEPEX_OPCODE_NOT, DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER]
OpcodeWithTwoOperand = [DEPEX_OPCODE_AND, DEPEX_OPCODE_OR]
# op code that should not be the last one
NonEndingOpcode = [DEPEX_OPCODE_AND, DEPEX_OPCODE_OR, DEPEX_OPCODE_NOT, DEPEX_OPCODE_SOR]
# op code must not present at the same time
ExclusiveOpcode = [DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER]
# op code that should be the first one if it presents
AboveAllOpcode = [DEPEX_OPCODE_SOR, DEPEX_OPCODE_BEFORE, DEPEX_OPCODE_AFTER]
#
# open and close brace must be taken as individual tokens
#
TokenPattern = re.compile("(\(|\)|\{[^{}]+\{?[^{}]+\}?[ ]*\}|\w+)")
## Constructor
#
# @param Expression The list or string of dependency expression
# @param ModuleType The type of the module using the dependency expression
#
def __init__(self, Expression, ModuleType, Optimize=False):
self.ModuleType = ModuleType
self.Phase = gType2Phase[ModuleType]
if isinstance(Expression, type([])):
self.ExpressionString = " ".join(Expression)
self.TokenList = Expression
else:
self.ExpressionString = Expression
self.GetExpressionTokenList()
self.PostfixNotation = []
self.OpcodeList = []
self.GetPostfixNotation()
self.ValidateOpcode()
EdkLogger.debug(EdkLogger.DEBUG_8, repr(self))
if Optimize:
self.Optimize()
EdkLogger.debug(EdkLogger.DEBUG_8, "\n Optimized: " + repr(self))
def __str__(self):
return " ".join(self.TokenList)
def __repr__(self):
WellForm = ''
for Token in self.PostfixNotation:
if Token in self.SupportedOpcode:
WellForm += "\n " + Token
else:
WellForm += ' ' + Token
return WellForm
## Split the expression string into token list
def GetExpressionTokenList(self):
self.TokenList = self.TokenPattern.findall(self.ExpressionString)
## Convert token list into postfix notation
def GetPostfixNotation(self):
Stack = []
LastToken = ''
for Token in self.TokenList:
if Token == "(":
if LastToken not in self.SupportedOpcode + ['(', '', None]:
EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before open parentheses",
ExtraData="Near %s" % LastToken)
Stack.append(Token)
elif Token == ")":
if '(' not in Stack:
EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
ExtraData=str(self))
elif LastToken in self.SupportedOpcode + ['', None]:
EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before close parentheses",
ExtraData="Near %s" % LastToken)
while len(Stack) > 0:
if Stack[-1] == '(':
Stack.pop()
break
self.PostfixNotation.append(Stack.pop())
elif Token in self.OpcodePriority:
if Token == DEPEX_OPCODE_NOT:
if LastToken not in self.SupportedOpcode + ['(', '', None]:
EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before NOT",
ExtraData="Near %s" % LastToken)
elif LastToken in self.SupportedOpcode + ['(', '', None]:
EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before " + Token,
ExtraData="Near %s" % LastToken)
while len(Stack) > 0:
if Stack[-1] == "(" or self.OpcodePriority[Token] >= self.OpcodePriority[Stack[-1]]:
break
self.PostfixNotation.append(Stack.pop())
Stack.append(Token)
self.OpcodeList.append(Token)
else:
if Token not in self.SupportedOpcode:
# not OP, take it as GUID
if LastToken not in self.SupportedOpcode + ['(', '', None]:
EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before %s" % Token,
ExtraData="Near %s" % LastToken)
if len(self.OpcodeList) == 0 or self.OpcodeList[-1] not in self.ExclusiveOpcode:
if Token not in self.SupportedOperand:
self.PostfixNotation.append(DEPEX_OPCODE_PUSH)
# check if OP is valid in this phase
elif Token in self.Opcode[self.Phase]:
if Token == DEPEX_OPCODE_END:
break
self.OpcodeList.append(Token)
else:
EdkLogger.error("GenDepex", PARSER_ERROR,
"Opcode=%s doesn't supported in %s stage " % (Token, self.Phase),
ExtraData=str(self))
self.PostfixNotation.append(Token)
LastToken = Token
# there should not be parentheses in Stack
if '(' in Stack or ')' in Stack:
EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
ExtraData=str(self))
while len(Stack) > 0:
self.PostfixNotation.append(Stack.pop())
if self.PostfixNotation[-1] != DEPEX_OPCODE_END:
self.PostfixNotation.append(DEPEX_OPCODE_END)
## Validate the dependency expression
def ValidateOpcode(self):
for Op in self.AboveAllOpcode:
if Op in self.PostfixNotation:
if Op != self.PostfixNotation[0]:
EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the first opcode in the expression" % Op,
ExtraData=str(self))
if len(self.PostfixNotation) < 3:
EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
ExtraData=str(self))
for Op in self.ExclusiveOpcode:
if Op in self.OpcodeList:
if len(self.OpcodeList) > 1:
EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the only opcode in the expression" % Op,
ExtraData=str(self))
if len(self.PostfixNotation) < 3:
EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
ExtraData=str(self))
if self.TokenList[-1] != DEPEX_OPCODE_END and self.TokenList[-1] in self.NonEndingOpcode:
EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-1],
ExtraData=str(self))
if self.TokenList[-1] == DEPEX_OPCODE_END and self.TokenList[-2] in self.NonEndingOpcode:
EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-2],
ExtraData=str(self))
if DEPEX_OPCODE_END in self.TokenList and DEPEX_OPCODE_END != self.TokenList[-1]:
EdkLogger.error("GenDepex", PARSER_ERROR, "Extra expressions after END",
ExtraData=str(self))
## Simply optimize the dependency expression by removing duplicated operands
def Optimize(self):
OpcodeSet = set(self.OpcodeList)
# if there are isn't one in the set, return
if len(OpcodeSet) != 1:
return
Op = OpcodeSet.pop()
#if Op isn't either OR or AND, return
if Op not in [DEPEX_OPCODE_AND, DEPEX_OPCODE_OR]:
return
NewOperand = []
AllOperand = set()
for Token in self.PostfixNotation:
if Token in self.SupportedOpcode or Token in NewOperand:
continue
AllOperand.add(Token)
if Token == DEPEX_OPCODE_TRUE:
if Op == DEPEX_OPCODE_AND:
continue
else:
NewOperand.append(Token)
break
elif Token == DEPEX_OPCODE_FALSE:
if Op == DEPEX_OPCODE_OR:
continue
else:
NewOperand.append(Token)
break
NewOperand.append(Token)
# don't generate depex if only TRUE operand left
if self.ModuleType == SUP_MODULE_PEIM and len(NewOperand) == 1 and NewOperand[0] == DEPEX_OPCODE_TRUE:
self.PostfixNotation = []
return
# don't generate depex if all operands are architecture protocols
if self.ModuleType in [SUP_MODULE_UEFI_DRIVER, SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_MM_STANDALONE] and \
Op == DEPEX_OPCODE_AND and \
self.ArchProtocols == set(GuidStructureStringToGuidString(Guid) for Guid in AllOperand):
self.PostfixNotation = []
return
if len(NewOperand) == 0:
self.TokenList = list(AllOperand)
else:
self.TokenList = []
while True:
self.TokenList.append(NewOperand.pop(0))
if NewOperand == []:
break
self.TokenList.append(Op)
self.PostfixNotation = []
self.GetPostfixNotation()
## Convert a GUID value in C structure format into its binary form
#
# @param Guid The GUID value in C structure format
#
# @retval array The byte array representing the GUID value
#
def GetGuidValue(self, Guid):
GuidValueString = Guid.replace("{", "").replace("}", "").replace(" ", "")
GuidValueList = GuidValueString.split(",")
if len(GuidValueList) != 11 and len(GuidValueList) == 16:
GuidValueString = GuidStringToGuidStructureString(GuidStructureByteArrayToGuidString(Guid))
GuidValueString = GuidValueString.replace("{", "").replace("}", "").replace(" ", "")
GuidValueList = GuidValueString.split(",")
if len(GuidValueList) != 11:
EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid GUID value string or opcode: %s" % Guid)
return pack("1I2H8B", *(int(value, 16) for value in GuidValueList))
## Save the binary form of dependency expression in file
#
# @param File The path of file. If None is given, put the data on console
#
# @retval True If the file doesn't exist or file is changed
# @retval False If file exists and is not changed.
#
def Generate(self, File=None):
Buffer = BytesIO()
if len(self.PostfixNotation) == 0:
return False
for Item in self.PostfixNotation:
if Item in self.Opcode[self.Phase]:
Buffer.write(pack("B", self.Opcode[self.Phase][Item]))
elif Item in self.SupportedOpcode:
EdkLogger.error("GenDepex", FORMAT_INVALID,
"Opcode [%s] is not expected in %s phase" % (Item, self.Phase),
ExtraData=self.ExpressionString)
else:
Buffer.write(self.GetGuidValue(Item))
FilePath = ""
FileChangeFlag = True
if File is None:
sys.stdout.write(Buffer.getvalue())
FilePath = "STDOUT"
else:
FileChangeFlag = SaveFileOnChange(File, Buffer.getvalue(), True)
Buffer.close()
return FileChangeFlag
versionNumber = ("0.04" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + versionNumber
__copyright__ = "Copyright (c) 2007-2018, Intel Corporation All rights reserved."
__usage__ = "%prog [options] [dependency_expression_file]"
## Parse command line options
#
# @retval OptionParser
#
def GetOptions():
from optparse import OptionParser
Parser = OptionParser(description=__copyright__, version=__version__, usage=__usage__)
Parser.add_option("-o", "--output", dest="OutputFile", default=None, metavar="FILE",
help="Specify the name of depex file to be generated")
Parser.add_option("-t", "--module-type", dest="ModuleType", default=None,
help="The type of module for which the dependency expression serves")
Parser.add_option("-e", "--dependency-expression", dest="Expression", default="",
help="The string of dependency expression. If this option presents, the input file will be ignored.")
Parser.add_option("-m", "--optimize", dest="Optimize", default=False, action="store_true",
help="Do some simple optimization on the expression.")
Parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true",
help="build with verbose information")
Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
Parser.add_option("-q", "--quiet", dest="quiet", default=False, action="store_true",
help="build with little information")
return Parser.parse_args()
## Entrance method
#
# @retval 0 Tool was successful
# @retval 1 Tool failed
#
def Main():
EdkLogger.Initialize()
Option, Input = GetOptions()
# Set log level
if Option.quiet:
EdkLogger.SetLevel(EdkLogger.QUIET)
elif Option.verbose:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
try:
if Option.ModuleType is None or Option.ModuleType not in gType2Phase:
EdkLogger.error("GenDepex", OPTION_MISSING, "Module type is not specified or supported")
DxsFile = ''
if len(Input) > 0 and Option.Expression == "":
DxsFile = Input[0]
DxsString = open(DxsFile, 'r').read().replace("\n", " ").replace("\r", " ")
DxsString = gStartClosePattern.sub("\\1", DxsString)
elif Option.Expression != "":
if Option.Expression[0] == '"':
DxsString = Option.Expression[1:-1]
else:
DxsString = Option.Expression
else:
EdkLogger.error("GenDepex", OPTION_MISSING, "No expression string or file given")
Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)
if Option.OutputFile is not None:
FileChangeFlag = Dpx.Generate(Option.OutputFile)
if not FileChangeFlag and DxsFile:
#
# Touch the output file if its time stamp is older than the original
# DXS file to avoid re-invoke this tool for the dependency check in build rule.
#
if os.stat(DxsFile)[8] > os.stat(Option.OutputFile)[8]:
os.utime(Option.OutputFile, None)
else:
Dpx.Generate()
except BaseException as X:
EdkLogger.quiet("")
if Option is not None and Option.debug is not None:
EdkLogger.quiet(traceback.format_exc())
else:
EdkLogger.quiet(str(X))
return 1
return 0
if __name__ == '__main__':
sys.exit(Main())
| edk2-master | BaseTools/Source/Python/AutoGen/GenDepex.py |
## @file
# Create makefile for MS nmake and GNU make
#
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
from AutoGen.AutoGen import AutoGen
from Common.LongFilePathSupport import LongFilePath, CopyLongFilePath
from Common.BuildToolError import *
from Common.DataType import *
from Common.Misc import *
from Common.StringUtils import NormPath,GetSplitList
from collections import defaultdict
from Workspace.WorkspaceCommon import OrderedListDict
import os.path as path
import copy
import hashlib
from . import InfSectionParser
from . import GenC
from . import GenMake
from . import GenDepex
from io import BytesIO
from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
from Workspace.MetaFileCommentParser import UsageList
from .GenPcdDb import CreatePcdDatabaseCode
from Common.caching import cached_class_function
from AutoGen.ModuleAutoGenHelper import PlatformInfo,WorkSpaceInfo
import json
import tempfile
## Mapping Makefile type
gMakeTypeMap = {TAB_COMPILER_MSFT:"nmake", "GCC":"gmake"}
#
# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC
# is the former use /I , the Latter used -I to specify include directories
#
gBuildOptIncludePatternMsft = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
gBuildOptIncludePatternOther = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
## default file name for AutoGen
gAutoGenCodeFileName = "AutoGen.c"
gAutoGenHeaderFileName = "AutoGen.h"
gAutoGenStringFileName = "%(module_name)sStrDefs.h"
gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
gAutoGenDepexFileName = "%(module_name)s.depex"
gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"
gAutoGenIdfFileName = "%(module_name)sIdf.hpk"
gInfSpecVersion = "0x00010017"
#
# Match name = variable
#
gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
#
# The format of guid in efivarstore statement likes following and must be correct:
# guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
#
gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
#
# Template string to generic AsBuilt INF
#
gAsBuiltInfHeaderString = TemplateString("""${header_comments}
# DO NOT EDIT
# FILE auto-generated
[Defines]
INF_VERSION = ${module_inf_version}
BASE_NAME = ${module_name}
FILE_GUID = ${module_guid}
MODULE_TYPE = ${module_module_type}${BEGIN}
VERSION_STRING = ${module_version_string}${END}${BEGIN}
PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
DESTRUCTOR = ${module_destructor}${END}${BEGIN}
SHADOW = ${module_shadow}${END}${BEGIN}
PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
SPEC = ${module_spec}${END}${BEGIN}
UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
MODULE_UNI_FILE = ${module_uni_file}${END}
[Packages.${module_arch}]${BEGIN}
${package_item}${END}
[Binaries.${module_arch}]${BEGIN}
${binary_item}${END}
[PatchPcd.${module_arch}]${BEGIN}
${patchablepcd_item}
${END}
[Protocols.${module_arch}]${BEGIN}
${protocol_item}
${END}
[Ppis.${module_arch}]${BEGIN}
${ppi_item}
${END}
[Guids.${module_arch}]${BEGIN}
${guid_item}
${END}
[PcdEx.${module_arch}]${BEGIN}
${pcd_item}
${END}
[LibraryClasses.${module_arch}]
## @LIB_INSTANCES${BEGIN}
# ${libraryclasses_item}${END}
${depexsection_item}
${userextension_tianocore_item}
${tail_comments}
[BuildOptions.${module_arch}]
## @AsBuilt${BEGIN}
## ${flags_item}${END}
""")
#
# extend lists contained in a dictionary with lists stored in another dictionary
# if CopyToDict is not derived from DefaultDict(list) then this may raise exception
#
def ExtendCopyDictionaryLists(CopyToDict, CopyFromDict):
for Key in CopyFromDict:
CopyToDict[Key].extend(CopyFromDict[Key])
# Create a directory specified by a set of path elements and return the full path
def _MakeDir(PathList):
RetVal = path.join(*PathList)
CreateDirectory(RetVal)
return RetVal
#
# Convert string to C format array
#
def _ConvertStringToByteArray(Value):
Value = Value.strip()
if not Value:
return None
if Value[0] == '{':
if not Value.endswith('}'):
return None
Value = Value.replace(' ', '').replace('{', '').replace('}', '')
ValFields = Value.split(',')
try:
for Index in range(len(ValFields)):
ValFields[Index] = str(int(ValFields[Index], 0))
except ValueError:
return None
Value = '{' + ','.join(ValFields) + '}'
return Value
Unicode = False
if Value.startswith('L"'):
if not Value.endswith('"'):
return None
Value = Value[1:]
Unicode = True
elif not Value.startswith('"') or not Value.endswith('"'):
return None
Value = eval(Value) # translate escape character
NewValue = '{'
for Index in range(0, len(Value)):
if Unicode:
NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ','
else:
NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ','
Value = NewValue + '0}'
return Value
## ModuleAutoGen class
#
# This class encapsules the AutoGen behaviors for the build tools. In addition to
# the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
# to the [depex] section in module's inf file.
#
class ModuleAutoGen(AutoGen):
# call super().__init__ then call the worker function with different parameter count
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
if not hasattr(self, "_Init"):
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args)
self._Init = True
## Cache the timestamps of metafiles of every module in a class attribute
#
TimeDict = {}
def __new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
# check if this module is employed by active platform
if not PlatformInfo(Workspace, args[0], Target, Toolchain, Arch,args[-1]).ValidModule(MetaFile):
EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
% (MetaFile, Arch))
return None
return super(ModuleAutoGen, cls).__new__(cls, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
## Initialize ModuleAutoGen
#
# @param Workspace EdkIIWorkspaceBuild object
# @param ModuleFile The path of module file
# @param Target Build target (DEBUG, RELEASE)
# @param Toolchain Name of tool chain
# @param Arch The arch the module supports
# @param PlatformFile Platform meta-file
#
def _InitWorker(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile,DataPipe):
EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
self.Workspace = Workspace
self.WorkspaceDir = ""
self.PlatformInfo = None
self.DataPipe = DataPipe
self.__init_platform_info__()
self.MetaFile = ModuleFile
self.SourceDir = self.MetaFile.SubDir
self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
self.ToolChain = Toolchain
self.BuildTarget = Target
self.Arch = Arch
self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
self.IsCodeFileCreated = False
self.IsAsBuiltInfCreated = False
self.DepexGenerated = False
self.BuildDatabase = self.Workspace.BuildDatabase
self.BuildRuleOrder = None
self.BuildTime = 0
self._GuidComments = OrderedListDict()
self._ProtocolComments = OrderedListDict()
self._PpiComments = OrderedListDict()
self._BuildTargets = None
self._IntroBuildTargetList = None
self._FinalBuildTargetList = None
self._FileTypes = None
self.AutoGenDepSet = set()
self.ReferenceModules = []
self.ConstPcd = {}
self.FileDependCache = {}
def __init_platform_info__(self):
pinfo = self.DataPipe.Get("P_Info")
self.WorkspaceDir = pinfo.get("WorkspaceDir")
self.PlatformInfo = PlatformInfo(self.Workspace,pinfo.get("ActivePlatform"),pinfo.get("Target"),pinfo.get("ToolChain"),pinfo.get("Arch"),self.DataPipe)
## hash() operator of ModuleAutoGen
#
# The module file path and arch string will be used to represent
# hash value of this object
#
# @retval int Hash value of the module file path and arch
#
@cached_class_function
def __hash__(self):
return hash((self.MetaFile, self.Arch, self.ToolChain,self.BuildTarget))
def __repr__(self):
return "%s [%s]" % (self.MetaFile, self.Arch)
# Get FixedAtBuild Pcds of this Module
@cached_property
def FixedAtBuildPcds(self):
RetVal = []
for Pcd in self.ModulePcdList:
if Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:
continue
if Pcd not in RetVal:
RetVal.append(Pcd)
return RetVal
@cached_property
def FixedVoidTypePcds(self):
RetVal = {}
for Pcd in self.FixedAtBuildPcds:
if Pcd.DatumType == TAB_VOID:
if '.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName)) not in RetVal:
RetVal['.'.join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))] = Pcd.DefaultValue
return RetVal
@property
def UniqueBaseName(self):
ModuleNames = self.DataPipe.Get("M_Name")
if not ModuleNames:
return self.Name
return ModuleNames.get((self.Name,self.MetaFile),self.Name)
# Macros could be used in build_rule.txt (also Makefile)
@cached_property
def Macros(self):
return OrderedDict((
("WORKSPACE" ,self.WorkspaceDir),
("MODULE_NAME" ,self.Name),
("MODULE_NAME_GUID" ,self.UniqueBaseName),
("MODULE_GUID" ,self.Guid),
("MODULE_VERSION" ,self.Version),
("MODULE_TYPE" ,self.ModuleType),
("MODULE_FILE" ,str(self.MetaFile)),
("MODULE_FILE_BASE_NAME" ,self.MetaFile.BaseName),
("MODULE_RELATIVE_DIR" ,self.SourceDir),
("MODULE_DIR" ,self.SourceDir),
("BASE_NAME" ,self.Name),
("ARCH" ,self.Arch),
("TOOLCHAIN" ,self.ToolChain),
("TOOLCHAIN_TAG" ,self.ToolChain),
("TOOL_CHAIN_TAG" ,self.ToolChain),
("TARGET" ,self.BuildTarget),
("BUILD_DIR" ,self.PlatformInfo.BuildDir),
("BIN_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
("LIB_DIR" ,os.path.join(self.PlatformInfo.BuildDir, self.Arch)),
("MODULE_BUILD_DIR" ,self.BuildDir),
("OUTPUT_DIR" ,self.OutputDir),
("DEBUG_DIR" ,self.DebugDir),
("DEST_DIR_OUTPUT" ,self.OutputDir),
("DEST_DIR_DEBUG" ,self.DebugDir),
("PLATFORM_NAME" ,self.PlatformInfo.Name),
("PLATFORM_GUID" ,self.PlatformInfo.Guid),
("PLATFORM_VERSION" ,self.PlatformInfo.Version),
("PLATFORM_RELATIVE_DIR" ,self.PlatformInfo.SourceDir),
("PLATFORM_DIR" ,mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)),
("PLATFORM_OUTPUT_DIR" ,self.PlatformInfo.OutputDir),
("FFS_OUTPUT_DIR" ,self.FfsOutputDir)
))
## Return the module build data object
@cached_property
def Module(self):
return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
## Return the module name
@cached_property
def Name(self):
return self.Module.BaseName
## Return the module DxsFile if exist
@cached_property
def DxsFile(self):
return self.Module.DxsFile
## Return the module meta-file GUID
@cached_property
def Guid(self):
#
# To build same module more than once, the module path with FILE_GUID overridden has
# the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the real path
# in DSC. The overridden GUID can be retrieved from file name
#
if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
#
# Length of GUID is 36
#
return os.path.basename(self.MetaFile.Path)[:36]
return self.Module.Guid
## Return the module version
@cached_property
def Version(self):
return self.Module.Version
## Return the module type
@cached_property
def ModuleType(self):
return self.Module.ModuleType
## Return the component type (for Edk.x style of module)
@cached_property
def ComponentType(self):
return self.Module.ComponentType
## Return the build type
@cached_property
def BuildType(self):
return self.Module.BuildType
## Return the PCD_IS_DRIVER setting
@cached_property
def PcdIsDriver(self):
return self.Module.PcdIsDriver
## Return the autogen version, i.e. module meta-file version
@cached_property
def AutoGenVersion(self):
return self.Module.AutoGenVersion
## Check if the module is library or not
@cached_property
def IsLibrary(self):
return bool(self.Module.LibraryClass)
## Check if the module is binary module or not
@cached_property
def IsBinaryModule(self):
return self.Module.IsBinaryModule
## Return the directory to store intermediate files of the module
@cached_property
def BuildDir(self):
return _MakeDir((
self.PlatformInfo.BuildDir,
self.Arch,
self.SourceDir,
self.MetaFile.BaseName
))
## Return the directory to store the intermediate object files of the module
@cached_property
def OutputDir(self):
return _MakeDir((self.BuildDir, "OUTPUT"))
## Return the directory path to store ffs file
@cached_property
def FfsOutputDir(self):
if GlobalData.gFdfParser:
return path.join(self.PlatformInfo.BuildDir, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
return ''
## Return the directory to store auto-gened source files of the module
@cached_property
def DebugDir(self):
return _MakeDir((self.BuildDir, "DEBUG"))
## Return the path of custom file
@cached_property
def CustomMakefile(self):
RetVal = {}
for Type in self.Module.CustomMakefile:
MakeType = gMakeTypeMap[Type] if Type in gMakeTypeMap else 'nmake'
File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
RetVal[MakeType] = File
return RetVal
## Return the directory of the makefile
#
# @retval string The directory string of module's makefile
#
@cached_property
def MakeFileDir(self):
return self.BuildDir
## Return build command string
#
# @retval string Build command string
#
@cached_property
def BuildCommand(self):
return self.PlatformInfo.BuildCommand
## Get Module package and Platform package
#
# @retval list The list of package object
#
@cached_property
def PackageList(self):
PkagList = []
if self.Module.Packages:
PkagList.extend(self.Module.Packages)
Platform = self.BuildDatabase[self.PlatformInfo.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
for Package in Platform.Packages:
if Package in PkagList:
continue
PkagList.append(Package)
return PkagList
## Get object list of all packages the module and its dependent libraries belong to and the Platform depends on
#
# @retval list The list of package object
#
@cached_property
def DerivedPackageList(self):
PackageList = []
PackageList.extend(self.PackageList)
for M in self.DependentLibraryList:
for Package in M.Packages:
if Package in PackageList:
continue
PackageList.append(Package)
return PackageList
## Get the depex string
#
# @return : a string contain all depex expression.
def _GetDepexExpresionString(self):
DepexStr = ''
DepexList = []
## DPX_SOURCE IN Define section.
if self.Module.DxsFile:
return DepexStr
for M in [self.Module] + self.DependentLibraryList:
Filename = M.MetaFile.Path
InfObj = InfSectionParser.InfSectionParser(Filename)
DepexExpressionList = InfObj.GetDepexExpresionList()
for DepexExpression in DepexExpressionList:
for key in DepexExpression:
Arch, ModuleType = key
DepexExpr = [x for x in DepexExpression[key] if not str(x).startswith('#')]
# the type of build module is USER_DEFINED.
# All different DEPEX section tags would be copied into the As Built INF file
# and there would be separate DEPEX section tags
if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
DepexList.append({(Arch, ModuleType): DepexExpr})
else:
if Arch.upper() == TAB_ARCH_COMMON or \
(Arch.upper() == self.Arch.upper() and \
ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
DepexList.append({(Arch, ModuleType): DepexExpr})
#the type of build module is USER_DEFINED.
if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED or self.ModuleType.upper() == SUP_MODULE_HOST_APPLICATION:
for Depex in DepexList:
for key in Depex:
DepexStr += '[Depex.%s.%s]\n' % key
DepexStr += '\n'.join('# '+ val for val in Depex[key])
DepexStr += '\n\n'
if not DepexStr:
return '[Depex.%s]\n' % self.Arch
return DepexStr
#the type of build module not is USER_DEFINED.
Count = 0
for Depex in DepexList:
Count += 1
if DepexStr != '':
DepexStr += ' AND '
DepexStr += '('
for D in Depex.values():
DepexStr += ' '.join(val for val in D)
Index = DepexStr.find('END')
if Index > -1 and Index == len(DepexStr) - 3:
DepexStr = DepexStr[:-3]
DepexStr = DepexStr.strip()
DepexStr += ')'
if Count == 1:
DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
if not DepexStr:
return '[Depex.%s]\n' % self.Arch
return '[Depex.%s]\n# ' % self.Arch + DepexStr
## Merge dependency expression
#
# @retval list The token list of the dependency expression after parsed
#
@cached_property
def DepexList(self):
if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
return {}
DepexList = []
#
# Append depex from dependent libraries, if not "BEFORE", "AFTER" expression
#
FixedVoidTypePcds = {}
for M in [self] + self.LibraryAutoGenList:
FixedVoidTypePcds.update(M.FixedVoidTypePcds)
for M in [self] + self.LibraryAutoGenList:
Inherited = False
for D in M.Module.Depex[self.Arch, self.ModuleType]:
if DepexList != []:
DepexList.append('AND')
DepexList.append('(')
#replace D with value if D is FixedAtBuild PCD
NewList = []
for item in D:
if '.' not in item:
NewList.append(item)
else:
try:
Value = FixedVoidTypePcds[item]
if len(Value.split(',')) != 16:
EdkLogger.error("build", FORMAT_INVALID,
"{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type and 16 bytes in the module.".format(item))
NewList.append(Value)
except:
EdkLogger.error("build", FORMAT_INVALID, "{} used in [Depex] section should be used as FixedAtBuild type and VOID* datum type in the module.".format(item))
DepexList.extend(NewList)
if DepexList[-1] == 'END': # no need of a END at this time
DepexList.pop()
DepexList.append(')')
Inherited = True
if Inherited:
EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.Module.BaseName, DepexList))
if 'BEFORE' in DepexList or 'AFTER' in DepexList:
break
if len(DepexList) > 0:
EdkLogger.verbose('')
return {self.ModuleType:DepexList}
## Merge dependency expression
#
# @retval list The token list of the dependency expression after parsed
#
@cached_property
def DepexExpressionDict(self):
if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
return {}
DepexExpressionString = ''
#
# Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
#
for M in [self.Module] + self.DependentLibraryList:
Inherited = False
for D in M.DepexExpression[self.Arch, self.ModuleType]:
if DepexExpressionString != '':
DepexExpressionString += ' AND '
DepexExpressionString += '('
DepexExpressionString += D
DepexExpressionString = DepexExpressionString.rstrip('END').strip()
DepexExpressionString += ')'
Inherited = True
if Inherited:
EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionString))
if 'BEFORE' in DepexExpressionString or 'AFTER' in DepexExpressionString:
break
if len(DepexExpressionString) > 0:
EdkLogger.verbose('')
return {self.ModuleType:DepexExpressionString}
# Get the tiano core user extension, it is contain dependent library.
# @retval: a list contain tiano core userextension.
#
def _GetTianoCoreUserExtensionList(self):
TianoCoreUserExtentionList = []
for M in [self.Module] + self.DependentLibraryList:
Filename = M.MetaFile.Path
InfObj = InfSectionParser.InfSectionParser(Filename)
TianoCoreUserExtenList = InfObj.GetUserExtensionTianoCore()
for TianoCoreUserExtent in TianoCoreUserExtenList:
for Section in TianoCoreUserExtent:
ItemList = Section.split(TAB_SPLIT)
Arch = self.Arch
if len(ItemList) == 4:
Arch = ItemList[3]
if Arch.upper() == TAB_ARCH_COMMON or Arch.upper() == self.Arch.upper():
TianoCoreList = []
TianoCoreList.extend([TAB_SECTION_START + Section + TAB_SECTION_END])
TianoCoreList.extend(TianoCoreUserExtent[Section][:])
TianoCoreList.append('\n')
TianoCoreUserExtentionList.append(TianoCoreList)
return TianoCoreUserExtentionList
## Return the list of specification version required for the module
#
# @retval list The list of specification defined in module file
#
@cached_property
def Specification(self):
return self.Module.Specification
## Tool option for the module build
#
# @param PlatformInfo The object of PlatformBuildInfo
# @retval dict The dict containing valid options
#
@cached_property
def BuildOption(self):
RetVal, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
if self.BuildRuleOrder:
self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
return RetVal
## Get include path list from tool option for the module build
#
# @retval list The include path list
#
@cached_property
def BuildOptionIncPathList(self):
#
# Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC
# is the former use /I , the Latter used -I to specify include directories
#
if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):
BuildOptIncludeRegEx = gBuildOptIncludePatternMsft
elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC'):
BuildOptIncludeRegEx = gBuildOptIncludePatternOther
else:
#
# New ToolChainFamily, don't known whether there is option to specify include directories
#
return []
RetVal = []
for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
try:
FlagOption = self.BuildOption[Tool]['FLAGS']
except KeyError:
FlagOption = ''
IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
#
# EDK II modules must not reference header files outside of the packages they depend on or
# within the module's directory tree. Report error if violation.
#
if GlobalData.gDisableIncludePathCheck == False:
for Path in IncPathList:
if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
EdkLogger.error("build",
PARAMETER_INVALID,
ExtraData=ErrMsg,
File=str(self.MetaFile))
RetVal += IncPathList
return RetVal
## Return a list of files which can be built from source
#
# What kind of files can be built is determined by build rules in
# $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
#
@cached_property
def SourceFileList(self):
RetVal = []
ToolChainTagSet = {"", TAB_STAR, self.ToolChain}
ToolChainFamilySet = {"", TAB_STAR, self.ToolChainFamily, self.BuildRuleFamily}
for F in self.Module.Sources:
# match tool chain
if F.TagName not in ToolChainTagSet:
EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
"but [%s] is currently used" % (F.TagName, str(F), self.ToolChain))
continue
# match tool chain family or build rule family
if F.ToolChainFamily not in ToolChainFamilySet:
EdkLogger.debug(
EdkLogger.DEBUG_0,
"The file [%s] must be built by tools of [%s], " \
"but current toolchain family is [%s], buildrule family is [%s]" \
% (str(F), F.ToolChainFamily, self.ToolChainFamily, self.BuildRuleFamily))
continue
# add the file path into search path list for file including
if F.Dir not in self.IncludePathList:
self.IncludePathList.insert(0, F.Dir)
RetVal.append(F)
self._MatchBuildRuleOrder(RetVal)
for F in RetVal:
self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
return RetVal
def _MatchBuildRuleOrder(self, FileList):
Order_Dict = {}
self.BuildOption
for SingleFile in FileList:
if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:
key = SingleFile.Path.rsplit(SingleFile.Ext,1)[0]
if key in Order_Dict:
Order_Dict[key].append(SingleFile.Ext)
else:
Order_Dict[key] = [SingleFile.Ext]
RemoveList = []
for F in Order_Dict:
if len(Order_Dict[F]) > 1:
Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
for Ext in Order_Dict[F][1:]:
RemoveList.append(F + Ext)
for item in RemoveList:
FileList.remove(item)
return FileList
## Return the list of unicode files
@cached_property
def UnicodeFileList(self):
return self.FileTypes.get(TAB_UNICODE_FILE,[])
## Return the list of vfr files
@cached_property
def VfrFileList(self):
return self.FileTypes.get(TAB_VFR_FILE, [])
## Return the list of Image Definition files
@cached_property
def IdfFileList(self):
return self.FileTypes.get(TAB_IMAGE_FILE,[])
## Return a list of files which can be built from binary
#
# "Build" binary files are just to copy them to build directory.
#
# @retval list The list of files which can be built later
#
@cached_property
def BinaryFileList(self):
RetVal = []
for F in self.Module.Binaries:
if F.Target not in [TAB_ARCH_COMMON, TAB_STAR] and F.Target != self.BuildTarget:
continue
RetVal.append(F)
self._ApplyBuildRule(F, F.Type, BinaryFileList=RetVal)
return RetVal
@cached_property
def BuildRules(self):
RetVal = {}
BuildRuleDatabase = self.PlatformInfo.BuildRule
for Type in BuildRuleDatabase.FileTypeList:
#first try getting build rule by BuildRuleFamily
RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
if not RuleObject:
# build type is always module type, but ...
if self.ModuleType != self.BuildType:
RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
#second try getting build rule by ToolChainFamily
if not RuleObject:
RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
if not RuleObject:
# build type is always module type, but ...
if self.ModuleType != self.BuildType:
RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
if not RuleObject:
continue
RuleObject = RuleObject.Instantiate(self.Macros)
RetVal[Type] = RuleObject
for Ext in RuleObject.SourceFileExtList:
RetVal[Ext] = RuleObject
return RetVal
def _ApplyBuildRule(self, File, FileType, BinaryFileList=None):
if self._BuildTargets is None:
self._IntroBuildTargetList = set()
self._FinalBuildTargetList = set()
self._BuildTargets = defaultdict(set)
self._FileTypes = defaultdict(set)
if not BinaryFileList:
BinaryFileList = self.BinaryFileList
SubDirectory = os.path.join(self.OutputDir, File.SubDir)
if not os.path.exists(SubDirectory):
CreateDirectory(SubDirectory)
TargetList = set()
FinalTargetName = set()
RuleChain = set()
SourceList = [File]
Index = 0
#
# Make sure to get build rule order value
#
self.BuildOption
while Index < len(SourceList):
# Reset the FileType if not the first iteration.
if Index > 0:
FileType = TAB_UNKNOWN_FILE
Source = SourceList[Index]
Index = Index + 1
if Source != File:
CreateDirectory(Source.Dir)
if File.IsBinary and File == Source and File in BinaryFileList:
# Skip all files that are not binary libraries
if not self.IsLibrary:
continue
RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
elif FileType in self.BuildRules:
RuleObject = self.BuildRules[FileType]
elif Source.Ext in self.BuildRules:
RuleObject = self.BuildRules[Source.Ext]
else:
# No more rule to apply: Source is a final target.
FinalTargetName.add(Source)
continue
FileType = RuleObject.SourceFileType
self._FileTypes[FileType].add(Source)
# stop at STATIC_LIBRARY for library
if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
FinalTargetName.add(Source)
continue
Target = RuleObject.Apply(Source, self.BuildRuleOrder)
if not Target:
# No Target: Source is a final target.
FinalTargetName.add(Source)
continue
TargetList.add(Target)
self._BuildTargets[FileType].add(Target)
if not Source.IsBinary and Source == File:
self._IntroBuildTargetList.add(Target)
# to avoid cyclic rule
if FileType in RuleChain:
EdkLogger.error("build", ERROR_STATEMENT, "Cyclic dependency detected while generating rule for %s" % str(Source))
RuleChain.add(FileType)
SourceList.extend(Target.Outputs)
# For each final target name, retrieve the corresponding TargetDescBlock instance.
for FTargetName in FinalTargetName:
for Target in TargetList:
if FTargetName == Target.Target:
self._FinalBuildTargetList.add(Target)
@cached_property
def Targets(self):
if self._BuildTargets is None:
self._IntroBuildTargetList = set()
self._FinalBuildTargetList = set()
self._BuildTargets = defaultdict(set)
self._FileTypes = defaultdict(set)
#TRICK: call SourceFileList property to apply build rule for source files
self.SourceFileList
#TRICK: call _GetBinaryFileList to apply build rule for binary files
self.BinaryFileList
return self._BuildTargets
@cached_property
def IntroTargetList(self):
self.Targets
return self._IntroBuildTargetList
@cached_property
def CodaTargetList(self):
self.Targets
return self._FinalBuildTargetList
@cached_property
def FileTypes(self):
self.Targets
return self._FileTypes
## Get the list of package object the module depends on and the Platform depends on
#
# @retval list The package object list
#
@cached_property
def DependentPackageList(self):
return self.PackageList
## Return the list of auto-generated code file
#
# @retval list The list of auto-generated file
#
@cached_property
def AutoGenFileList(self):
AutoGenUniIdf = self.BuildType != 'UEFI_HII'
UniStringBinBuffer = BytesIO()
IdfGenBinBuffer = BytesIO()
RetVal = {}
AutoGenC = TemplateString()
AutoGenH = TemplateString()
StringH = TemplateString()
StringIdf = TemplateString()
GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, AutoGenUniIdf, UniStringBinBuffer, StringIdf, AutoGenUniIdf, IdfGenBinBuffer)
#
# AutoGen.c is generated if there are library classes in inf, or there are object files
#
if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0
or TAB_OBJECT_FILE in self.FileTypes):
AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
RetVal[AutoFile] = str(AutoGenC)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if str(AutoGenH) != "":
AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
RetVal[AutoFile] = str(AutoGenH)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if str(StringH) != "":
AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringH)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if UniStringBinBuffer is not None and UniStringBinBuffer.getvalue() != b"":
AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = UniStringBinBuffer.getvalue()
AutoFile.IsBinary = True
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if UniStringBinBuffer is not None:
UniStringBinBuffer.close()
if str(StringIdf) != "":
AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
RetVal[AutoFile] = str(StringIdf)
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if IdfGenBinBuffer is not None and IdfGenBinBuffer.getvalue() != b"":
AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
RetVal[AutoFile] = IdfGenBinBuffer.getvalue()
AutoFile.IsBinary = True
self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
if IdfGenBinBuffer is not None:
IdfGenBinBuffer.close()
return RetVal
## Return the list of library modules explicitly or implicitly used by this module
@cached_property
def DependentLibraryList(self):
# only merge library classes and PCD for non-library module
if self.IsLibrary:
return []
return self.PlatformInfo.ApplyLibraryInstance(self.Module)
## Get the list of PCDs from current module
#
# @retval list The list of PCD
#
@cached_property
def ModulePcdList(self):
# apply PCD settings from platform
RetVal = self.PlatformInfo.ApplyPcdSetting(self, self.Module.Pcds)
return RetVal
@cached_property
def _PcdComments(self):
ReVal = OrderedListDict()
ExtendCopyDictionaryLists(ReVal, self.Module.PcdComments)
if not self.IsLibrary:
for Library in self.DependentLibraryList:
ExtendCopyDictionaryLists(ReVal, Library.PcdComments)
return ReVal
## Get the list of PCDs from dependent libraries
#
# @retval list The list of PCD
#
@cached_property
def LibraryPcdList(self):
if self.IsLibrary:
return []
RetVal = []
Pcds = set()
# get PCDs from dependent libraries
for Library in self.DependentLibraryList:
PcdsInLibrary = OrderedDict()
for Key in Library.Pcds:
# skip duplicated PCDs
if Key in self.Module.Pcds or Key in Pcds:
continue
Pcds.add(Key)
PcdsInLibrary[Key] = copy.copy(Library.Pcds[Key])
RetVal.extend(self.PlatformInfo.ApplyPcdSetting(self, PcdsInLibrary, Library=Library))
return RetVal
## Get the GUID value mapping
#
# @retval dict The mapping between GUID cname and its value
#
@cached_property
def GuidList(self):
RetVal = self.Module.Guids
for Library in self.DependentLibraryList:
RetVal.update(Library.Guids)
ExtendCopyDictionaryLists(self._GuidComments, Library.GuidComments)
ExtendCopyDictionaryLists(self._GuidComments, self.Module.GuidComments)
return RetVal
@cached_property
def GetGuidsUsedByPcd(self):
RetVal = OrderedDict(self.Module.GetGuidsUsedByPcd())
for Library in self.DependentLibraryList:
RetVal.update(Library.GetGuidsUsedByPcd())
return RetVal
## Get the protocol value mapping
#
# @retval dict The mapping between protocol cname and its value
#
@cached_property
def ProtocolList(self):
RetVal = OrderedDict(self.Module.Protocols)
for Library in self.DependentLibraryList:
RetVal.update(Library.Protocols)
ExtendCopyDictionaryLists(self._ProtocolComments, Library.ProtocolComments)
ExtendCopyDictionaryLists(self._ProtocolComments, self.Module.ProtocolComments)
return RetVal
## Get the PPI value mapping
#
# @retval dict The mapping between PPI cname and its value
#
@cached_property
def PpiList(self):
RetVal = OrderedDict(self.Module.Ppis)
for Library in self.DependentLibraryList:
RetVal.update(Library.Ppis)
ExtendCopyDictionaryLists(self._PpiComments, Library.PpiComments)
ExtendCopyDictionaryLists(self._PpiComments, self.Module.PpiComments)
return RetVal
## Get the list of include search path
#
# @retval list The list path
#
@cached_property
def IncludePathList(self):
RetVal = []
RetVal.append(self.MetaFile.Dir)
RetVal.append(self.DebugDir)
for Package in self.PackageList:
PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
if PackageDir not in RetVal:
RetVal.append(PackageDir)
IncludesList = Package.Includes
if Package._PrivateIncludes:
if not self.MetaFile.OriginalPath.Path.startswith(PackageDir):
IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
for Inc in IncludesList:
if Inc not in RetVal:
RetVal.append(str(Inc))
RetVal.extend(self.IncPathFromBuildOptions)
return RetVal
@cached_property
def IncPathFromBuildOptions(self):
IncPathList = []
for tool in self.BuildOption:
if 'FLAGS' in self.BuildOption[tool]:
flags = self.BuildOption[tool]['FLAGS']
whitespace = False
for flag in flags.split(" "):
flag = flag.strip()
if flag.startswith(("/I","-I")):
if len(flag)>2:
if os.path.exists(flag[2:]):
IncPathList.append(flag[2:])
else:
whitespace = True
continue
if whitespace and flag:
if os.path.exists(flag):
IncPathList.append(flag)
whitespace = False
return IncPathList
@cached_property
def IncludePathLength(self):
return sum(len(inc)+1 for inc in self.IncludePathList)
## Get the list of include paths from the packages
#
# @IncludesList list The list path
#
@cached_property
def PackageIncludePathList(self):
IncludesList = []
for Package in self.PackageList:
PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
IncludesList = Package.Includes
if Package._PrivateIncludes:
if not self.MetaFile.Path.startswith(PackageDir):
IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
return IncludesList
## Get HII EX PCDs which maybe used by VFR
#
# efivarstore used by VFR may relate with HII EX PCDs
# Get the variable name and GUID from efivarstore and HII EX PCD
# List the HII EX PCDs in As Built INF if both name and GUID match.
#
# @retval list HII EX PCDs
#
def _GetPcdsMaybeUsedByVfr(self):
if not self.SourceFileList:
return []
NameGuids = set()
for SrcFile in self.SourceFileList:
if SrcFile.Ext.lower() != '.vfr':
continue
Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
if not os.path.exists(Vfri):
continue
VfriFile = open(Vfri, 'r')
Content = VfriFile.read()
VfriFile.close()
Pos = Content.find('efivarstore')
while Pos != -1:
#
# Make sure 'efivarstore' is the start of efivarstore statement
# In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
#
Index = Pos - 1
while Index >= 0 and Content[Index] in ' \t\r\n':
Index -= 1
if Index >= 0 and Content[Index] != ';':
Pos = Content.find('efivarstore', Pos + len('efivarstore'))
continue
#
# 'efivarstore' must be followed by name and guid
#
Name = gEfiVarStoreNamePattern.search(Content, Pos)
if not Name:
break
Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
if not Guid:
break
NameArray = _ConvertStringToByteArray('L"' + Name.group(1) + '"')
NameGuids.add((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
Pos = Content.find('efivarstore', Name.end())
if not NameGuids:
return []
HiiExPcds = []
for Pcd in self.PlatformInfo.Pcds.values():
if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
continue
for SkuInfo in Pcd.SkuInfoList.values():
Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)
if not Value:
continue
Name = _ConvertStringToByteArray(SkuInfo.VariableName)
Guid = GuidStructureStringToGuidString(Value)
if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
HiiExPcds.append(Pcd)
break
return HiiExPcds
def _GenOffsetBin(self):
VfrUniBaseName = {}
for SourceFile in self.Module.Sources:
if SourceFile.Type.upper() == ".VFR" :
#
# search the .map file to find the offset of vfr binary in the PE32+/TE file.
#
VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
elif SourceFile.Type.upper() == ".UNI" :
#
# search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
#
VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
if not VfrUniBaseName:
return None
MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, list(VfrUniBaseName.values()))
if not VfrUniOffsetList:
return None
OutputName = '%sOffset.bin' % self.Name
UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)
try:
fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
except:
EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName, None)
# Use a instance of BytesIO to cache data
fStringIO = BytesIO()
for Item in VfrUniOffsetList:
if (Item[0].find("Strings") != -1):
#
# UNI offset in image.
# GUID + Offset
# { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
#
UniGuid = b'\xe0\xc5\x13\x89\xf63\x86M\x9b\xf1C\xef\x89\xfc\x06f'
fStringIO.write(UniGuid)
UniValue = pack ('Q', int (Item[1], 16))
fStringIO.write (UniValue)
else:
#
# VFR binary offset in image.
# GUID + Offset
# { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
#
VfrGuid = b'\xb4|\xbc\xd0Gj_I\xaa\x11q\x07F\xda\x06\xa2'
fStringIO.write(VfrGuid)
VfrValue = pack ('Q', int (Item[1], 16))
fStringIO.write (VfrValue)
#
# write data into file.
#
try :
fInputfile.write (fStringIO.getvalue())
except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
"file been locked or using by other applications." %UniVfrOffsetFileName, None)
fStringIO.close ()
fInputfile.close ()
return OutputName
@cached_property
def OutputFile(self):
retVal = set()
for Root, Dirs, Files in os.walk(self.BuildDir):
for File in Files:
# lib file is already added through above CodaTargetList, skip it here
if not (File.lower().endswith('.obj') or File.lower().endswith('.debug')):
NewFile = path.join(Root, File)
retVal.add(NewFile)
for Root, Dirs, Files in os.walk(self.FfsOutputDir):
for File in Files:
NewFile = path.join(Root, File)
retVal.add(NewFile)
return retVal
## Create AsBuilt INF file the module
#
def CreateAsBuiltInf(self):
if self.IsAsBuiltInfCreated:
return
# Skip INF file generation for libraries
if self.IsLibrary:
return
# Skip the following code for modules with no source files
if not self.SourceFileList:
return
# Skip the following code for modules without any binary files
if self.BinaryFileList:
return
### TODO: How to handles mixed source and binary modules
# Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
# Also find all packages that the DynamicEx PCDs depend on
Pcds = []
PatchablePcds = []
Packages = []
PcdCheckList = []
PcdTokenSpaceList = []
for Pcd in self.ModulePcdList + self.LibraryPcdList:
if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
PatchablePcds.append(Pcd)
PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_PATCHABLE_IN_MODULE))
elif Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
if Pcd not in Pcds:
Pcds.append(Pcd)
PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX))
PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC))
PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
GuidList = OrderedDict(self.GuidList)
for TokenSpace in self.GetGuidsUsedByPcd:
# If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
# The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
GuidList.pop(TokenSpace)
CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
for Package in self.DerivedPackageList:
if Package in Packages:
continue
BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
Found = False
for Index in range(len(BeChecked)):
for Item in CheckList[Index]:
if Item in BeChecked[Index]:
Packages.append(Package)
Found = True
break
if Found:
break
VfrPcds = self._GetPcdsMaybeUsedByVfr()
for Pkg in self.PlatformInfo.PackageList:
if Pkg in Packages:
continue
for VfrPcd in VfrPcds:
if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC_EX) in Pkg.Pcds or
(VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, TAB_PCDS_DYNAMIC) in Pkg.Pcds):
Packages.append(Pkg)
break
ModuleType = SUP_MODULE_DXE_DRIVER if self.ModuleType == SUP_MODULE_UEFI_DRIVER and self.DepexGenerated else self.ModuleType
DriverType = self.PcdIsDriver if self.PcdIsDriver else ''
Guid = self.Guid
MDefs = self.Module.Defines
AsBuiltInfDict = {
'module_name' : self.Name,
'module_guid' : Guid,
'module_module_type' : ModuleType,
'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
'pcd_is_driver_string' : [],
'module_uefi_specification_version' : [],
'module_pi_specification_version' : [],
'module_entry_point' : self.Module.ModuleEntryPointList,
'module_unload_image' : self.Module.ModuleUnloadImageList,
'module_constructor' : self.Module.ConstructorList,
'module_destructor' : self.Module.DestructorList,
'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
'module_arch' : self.Arch,
'package_item' : [Package.MetaFile.File.replace('\\', '/') for Package in Packages],
'binary_item' : [],
'patchablepcd_item' : [],
'pcd_item' : [],
'protocol_item' : [],
'ppi_item' : [],
'guid_item' : [],
'flags_item' : [],
'libraryclasses_item' : []
}
if 'MODULE_UNI_FILE' in MDefs:
UNIFile = os.path.join(self.MetaFile.Dir, MDefs['MODULE_UNI_FILE'])
if os.path.isfile(UNIFile):
shutil.copy2(UNIFile, self.OutputDir)
if self.AutoGenVersion > int(gInfSpecVersion, 0):
AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
else:
AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
if DriverType:
AsBuiltInfDict['pcd_is_driver_string'].append(DriverType)
if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
AsBuiltInfDict['module_uefi_specification_version'].append(self.Specification['UEFI_SPECIFICATION_VERSION'])
if 'PI_SPECIFICATION_VERSION' in self.Specification:
AsBuiltInfDict['module_pi_specification_version'].append(self.Specification['PI_SPECIFICATION_VERSION'])
OutputDir = self.OutputDir.replace('\\', '/').strip('/')
DebugDir = self.DebugDir.replace('\\', '/').strip('/')
for Item in self.CodaTargetList:
File = Item.Target.Path.replace('\\', '/').strip('/').replace(DebugDir, '').replace(OutputDir, '').strip('/')
if os.path.isabs(File):
File = File.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
if Item.Target.Ext.lower() == '.aml':
AsBuiltInfDict['binary_item'].append('ASL|' + File)
elif Item.Target.Ext.lower() == '.acpi':
AsBuiltInfDict['binary_item'].append('ACPI|' + File)
elif Item.Target.Ext.lower() == '.efi':
AsBuiltInfDict['binary_item'].append('PE32|' + self.Name + '.efi')
else:
AsBuiltInfDict['binary_item'].append('BIN|' + File)
if not self.DepexGenerated:
DepexFile = os.path.join(self.OutputDir, self.Name + '.depex')
if os.path.exists(DepexFile):
self.DepexGenerated = True
if self.DepexGenerated:
if self.ModuleType in [SUP_MODULE_PEIM]:
AsBuiltInfDict['binary_item'].append('PEI_DEPEX|' + self.Name + '.depex')
elif self.ModuleType in [SUP_MODULE_DXE_DRIVER, SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_UEFI_DRIVER]:
AsBuiltInfDict['binary_item'].append('DXE_DEPEX|' + self.Name + '.depex')
elif self.ModuleType in [SUP_MODULE_DXE_SMM_DRIVER]:
AsBuiltInfDict['binary_item'].append('SMM_DEPEX|' + self.Name + '.depex')
Bin = self._GenOffsetBin()
if Bin:
AsBuiltInfDict['binary_item'].append('BIN|%s' % Bin)
for Root, Dirs, Files in os.walk(OutputDir):
for File in Files:
if File.lower().endswith('.pdb'):
AsBuiltInfDict['binary_item'].append('DISPOSABLE|' + File)
HeaderComments = self.Module.HeaderComments
StartPos = 0
for Index in range(len(HeaderComments)):
if HeaderComments[Index].find('@BinaryHeader') != -1:
HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
StartPos = Index
break
AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
GenList = [
(self.ProtocolList, self._ProtocolComments, 'protocol_item'),
(self.PpiList, self._PpiComments, 'ppi_item'),
(GuidList, self._GuidComments, 'guid_item')
]
for Item in GenList:
for CName in Item[0]:
Comments = '\n '.join(Item[1][CName]) if CName in Item[1] else ''
Entry = Comments + '\n ' + CName if Comments else CName
AsBuiltInfDict[Item[2]].append(Entry)
PatchList = parsePcdInfoFromMapFile(
os.path.join(self.OutputDir, self.Name + '.map'),
os.path.join(self.OutputDir, self.Name + '.efi')
)
if PatchList:
for Pcd in PatchablePcds:
TokenCName = Pcd.TokenCName
for PcdItem in GlobalData.MixedPcd:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
TokenCName = PcdItem[0]
break
for PatchPcd in PatchList:
if TokenCName == PatchPcd[0]:
break
else:
continue
PcdValue = ''
if Pcd.DatumType == 'BOOLEAN':
BoolValue = Pcd.DefaultValue.upper()
if BoolValue == 'TRUE':
Pcd.DefaultValue = '1'
elif BoolValue == 'FALSE':
Pcd.DefaultValue = '0'
if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES:
HexFormat = '0x%02x'
if Pcd.DatumType == TAB_UINT16:
HexFormat = '0x%04x'
elif Pcd.DatumType == TAB_UINT32:
HexFormat = '0x%08x'
elif Pcd.DatumType == TAB_UINT64:
HexFormat = '0x%016x'
PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
else:
if Pcd.MaxDatumSize is None or Pcd.MaxDatumSize == '':
EdkLogger.error("build", AUTOGEN_ERROR,
"Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
)
ArraySize = int(Pcd.MaxDatumSize, 0)
PcdValue = Pcd.DefaultValue
if PcdValue[0] != '{':
Unicode = False
if PcdValue[0] == 'L':
Unicode = True
PcdValue = PcdValue.lstrip('L')
PcdValue = eval(PcdValue)
NewValue = '{'
for Index in range(0, len(PcdValue)):
if Unicode:
CharVal = ord(PcdValue[Index])
NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
+ '0x%02x' % (CharVal >> 8) + ', '
else:
NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
Padding = '0x00, '
if Unicode:
Padding = Padding * 2
ArraySize = ArraySize // 2
if ArraySize < (len(PcdValue) + 1):
if Pcd.MaxSizeUserSet:
EdkLogger.error("build", AUTOGEN_ERROR,
"The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
)
else:
ArraySize = len(PcdValue) + 1
if ArraySize > len(PcdValue) + 1:
NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
elif len(PcdValue.split(',')) <= ArraySize:
PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
PcdValue += '}'
else:
if Pcd.MaxSizeUserSet:
EdkLogger.error("build", AUTOGEN_ERROR,
"The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
)
else:
ArraySize = len(PcdValue) + 1
PcdItem = '%s.%s|%s|0x%X' % \
(Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])
PcdComments = ''
if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
if PcdComments:
PcdItem = PcdComments + '\n ' + PcdItem
AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
for Pcd in Pcds + VfrPcds:
PcdCommentList = []
HiiInfo = ''
TokenCName = Pcd.TokenCName
for PcdItem in GlobalData.MixedPcd:
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
TokenCName = PcdItem[0]
break
if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
for SkuName in Pcd.SkuInfoList:
SkuInfo = Pcd.SkuInfoList[SkuName]
HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
break
if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
if HiiInfo:
UsageIndex = -1
UsageStr = ''
for Index, Comment in enumerate(PcdCommentList):
for Usage in UsageList:
if Comment.find(Usage) != -1:
UsageStr = Usage
UsageIndex = Index
break
if UsageIndex != -1:
PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
else:
PcdCommentList.append('## UNDEFINED ' + HiiInfo)
PcdComments = '\n '.join(PcdCommentList)
PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName
if PcdComments:
PcdEntry = PcdComments + '\n ' + PcdEntry
AsBuiltInfDict['pcd_item'].append(PcdEntry)
for Item in self.BuildOption:
if 'FLAGS' in self.BuildOption[Item]:
AsBuiltInfDict['flags_item'].append('%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip()))
# Generated LibraryClasses section in comments.
for Library in self.LibraryAutoGenList:
AsBuiltInfDict['libraryclasses_item'].append(Library.MetaFile.File.replace('\\', '/'))
# Generated UserExtensions TianoCore section.
# All tianocore user extensions are copied.
UserExtStr = ''
for TianoCore in self._GetTianoCoreUserExtensionList():
UserExtStr += '\n'.join(TianoCore)
ExtensionFile = os.path.join(self.MetaFile.Dir, TianoCore[1])
if os.path.isfile(ExtensionFile):
shutil.copy2(ExtensionFile, self.OutputDir)
AsBuiltInfDict['userextension_tianocore_item'] = UserExtStr
# Generated depex expression section in comments.
DepexExpression = self._GetDepexExpresionString()
AsBuiltInfDict['depexsection_item'] = DepexExpression if DepexExpression else ''
AsBuiltInf = TemplateString()
AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
self.IsAsBuiltInfCreated = True
def CacheCopyFile(self, DestDir, SourceDir, File):
if os.path.isdir(File):
return
sub_dir = os.path.relpath(File, SourceDir)
destination_file = os.path.join(DestDir, sub_dir)
destination_dir = os.path.dirname(destination_file)
CreateDirectory(destination_dir)
try:
CopyFileOnChange(File, destination_dir)
except:
EdkLogger.quiet("[cache warning]: fail to copy file:%s to folder:%s" % (File, destination_dir))
return
def CopyModuleToCache(self):
# Find the MakeHashStr and PreMakeHashStr from latest MakeHashFileList
# and PreMakeHashFileList files
MakeHashStr = None
PreMakeHashStr = None
MakeTimeStamp = 0
PreMakeTimeStamp = 0
Files = [f for f in os.listdir(LongFilePath(self.BuildDir)) if path.isfile(LongFilePath(path.join(self.BuildDir, f)))]
for File in Files:
if ".MakeHashFileList." in File:
#find lastest file through time stamp
FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]
if FileTimeStamp > MakeTimeStamp:
MakeTimeStamp = FileTimeStamp
MakeHashStr = File.split('.')[-1]
if len(MakeHashStr) != 32:
EdkLogger.quiet("[cache error]: wrong MakeHashFileList file:%s" % (File))
if ".PreMakeHashFileList." in File:
FileTimeStamp = os.stat(LongFilePath(path.join(self.BuildDir, File)))[8]
if FileTimeStamp > PreMakeTimeStamp:
PreMakeTimeStamp = FileTimeStamp
PreMakeHashStr = File.split('.')[-1]
if len(PreMakeHashStr) != 32:
EdkLogger.quiet("[cache error]: wrong PreMakeHashFileList file:%s" % (File))
if not MakeHashStr:
EdkLogger.quiet("[cache error]: No MakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))
return
if not PreMakeHashStr:
EdkLogger.quiet("[cache error]: No PreMakeHashFileList file for module:%s[%s]" % (self.MetaFile.Path, self.Arch))
return
# Create Cache destination dirs
FileDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
FfsDir = path.join(GlobalData.gBinCacheDest, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
CacheFileDir = path.join(FileDir, MakeHashStr)
CacheFfsDir = path.join(FfsDir, MakeHashStr)
CreateDirectory (CacheFileDir)
CreateDirectory (CacheFfsDir)
# Create ModuleHashPair file to support multiple version cache together
ModuleHashPair = path.join(FileDir, self.Name + ".ModuleHashPair")
ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
if os.path.exists(ModuleHashPair):
with open(ModuleHashPair, 'r') as f:
ModuleHashPairList = json.load(f)
if not (PreMakeHashStr, MakeHashStr) in set(map(tuple, ModuleHashPairList)):
ModuleHashPairList.insert(0, (PreMakeHashStr, MakeHashStr))
with open(ModuleHashPair, 'w') as f:
json.dump(ModuleHashPairList, f, indent=2)
# Copy files to Cache destination dirs
if not self.OutputFile:
Ma = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
self.OutputFile = Ma.Binaries
for File in self.OutputFile:
if File.startswith(os.path.abspath(self.FfsOutputDir)+os.sep):
self.CacheCopyFile(CacheFfsDir, self.FfsOutputDir, File)
else:
if self.Name + ".autogen.hash." in File or \
self.Name + ".autogen.hashchain." in File or \
self.Name + ".hash." in File or \
self.Name + ".hashchain." in File or \
self.Name + ".PreMakeHashFileList." in File or \
self.Name + ".MakeHashFileList." in File:
self.CacheCopyFile(FileDir, self.BuildDir, File)
else:
self.CacheCopyFile(CacheFileDir, self.BuildDir, File)
## Create makefile for the module and its dependent libraries
#
# @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
# dependent libraries will be created
#
@cached_class_function
def CreateMakeFile(self, CreateLibraryMakeFile=True, GenFfsList = []):
# nest this function inside it's only caller.
def CreateTimeStamp():
FileSet = {self.MetaFile.Path}
for SourceFile in self.Module.Sources:
FileSet.add (SourceFile.Path)
for Lib in self.DependentLibraryList:
FileSet.add (Lib.MetaFile.Path)
for f in self.AutoGenDepSet:
FileSet.add (f.Path)
if os.path.exists (self.TimeStampPath):
os.remove (self.TimeStampPath)
SaveFileOnChange(self.TimeStampPath, "\n".join(FileSet), False)
# Ignore generating makefile when it is a binary module
if self.IsBinaryModule:
return
self.GenFfsList = GenFfsList
if not self.IsLibrary and CreateLibraryMakeFile:
for LibraryAutoGen in self.LibraryAutoGenList:
LibraryAutoGen.CreateMakeFile()
# CanSkip uses timestamps to determine build skipping
if self.CanSkip():
return
if len(self.CustomMakefile) == 0:
Makefile = GenMake.ModuleMakefile(self)
else:
Makefile = GenMake.CustomMakefile(self)
if Makefile.Generate():
EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
(self.Name, self.Arch))
else:
EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
(self.Name, self.Arch))
CreateTimeStamp()
MakefileType = Makefile._FileType
MakefileName = Makefile._FILE_NAME_[MakefileType]
MakefilePath = os.path.join(self.MakeFileDir, MakefileName)
FilePath = path.join(self.BuildDir, self.Name + ".makefile")
SaveFileOnChange(FilePath, MakefilePath, False)
def CopyBinaryFiles(self):
for File in self.Module.Binaries:
SrcPath = File.Path
DstPath = os.path.join(self.OutputDir, os.path.basename(SrcPath))
CopyLongFilePath(SrcPath, DstPath)
## Create autogen code for the module and its dependent libraries
#
# @param CreateLibraryCodeFile Flag indicating if or not the code of
# dependent libraries will be created
#
def CreateCodeFile(self, CreateLibraryCodeFile=True):
if self.IsCodeFileCreated:
return
# Need to generate PcdDatabase even PcdDriver is binarymodule
if self.IsBinaryModule and self.PcdIsDriver != '':
CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
return
if self.IsBinaryModule:
if self.IsLibrary:
self.CopyBinaryFiles()
return
if not self.IsLibrary and CreateLibraryCodeFile:
for LibraryAutoGen in self.LibraryAutoGenList:
LibraryAutoGen.CreateCodeFile()
self.LibraryAutoGenList
AutoGenList = []
IgoredAutoGenList = []
for File in self.AutoGenFileList:
if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
AutoGenList.append(str(File))
else:
IgoredAutoGenList.append(str(File))
for ModuleType in self.DepexList:
# Ignore empty [depex] section or [depex] section for SUP_MODULE_USER_DEFINED module
if len(self.DepexList[ModuleType]) == 0 or ModuleType == SUP_MODULE_USER_DEFINED or ModuleType == SUP_MODULE_HOST_APPLICATION:
continue
Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
if len(Dpx.PostfixNotation) != 0:
self.DepexGenerated = True
if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
AutoGenList.append(str(DpxFile))
else:
IgoredAutoGenList.append(str(DpxFile))
if IgoredAutoGenList == []:
EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
(" ".join(AutoGenList), self.Name, self.Arch))
elif AutoGenList == []:
EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
(" ".join(IgoredAutoGenList), self.Name, self.Arch))
else:
EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
(" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
self.IsCodeFileCreated = True
return AutoGenList
## Summarize the ModuleAutoGen objects of all libraries used by this module
@cached_property
def LibraryAutoGenList(self):
RetVal = []
for Library in self.DependentLibraryList:
La = ModuleAutoGen(
self.Workspace,
Library.MetaFile,
self.BuildTarget,
self.ToolChain,
self.Arch,
self.PlatformInfo.MetaFile,
self.DataPipe
)
La.IsLibrary = True
if La not in RetVal:
RetVal.append(La)
for Lib in La.CodaTargetList:
self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
return RetVal
def GenCMakeHash(self):
# GenCMakeHash can only be called in --binary-destination
# Never called in multiprocessing and always directly save result in main process,
# so no need remote dict to share the gCMakeHashFile result with main process
DependencyFileSet = set()
# Add AutoGen files
if self.AutoGenFileList:
for File in set(self.AutoGenFileList):
DependencyFileSet.add(File)
# Add Makefile
abspath = path.join(self.BuildDir, self.Name + ".makefile")
try:
with open(LongFilePath(abspath),"r") as fd:
lines = fd.readlines()
except Exception as e:
EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
if lines:
DependencyFileSet.update(lines)
# Caculate all above dependency files hash
# Initialze hash object
FileList = []
m = hashlib.md5()
for File in sorted(DependencyFileSet, key=lambda x: str(x)):
if not path.exists(LongFilePath(str(File))):
EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
continue
with open(LongFilePath(str(File)), 'rb') as f:
Content = f.read()
m.update(Content)
FileList.append((str(File), hashlib.md5(Content).hexdigest()))
HashChainFile = path.join(self.BuildDir, self.Name + ".autogen.hashchain." + m.hexdigest())
GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
try:
with open(LongFilePath(HashChainFile), 'w') as f:
json.dump(FileList, f, indent=2)
except:
EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
return False
def GenModuleHash(self):
# GenModuleHash only called after autogen phase
# Never called in multiprocessing and always directly save result in main process,
# so no need remote dict to share the gModuleHashFile result with main process
#
# GenPreMakefileHashList consume no dict.
# GenPreMakefileHashList produce local gModuleHashFile dict.
DependencyFileSet = set()
# Add Module Meta file
DependencyFileSet.add(self.MetaFile.Path)
# Add Module's source files
if self.SourceFileList:
for File in set(self.SourceFileList):
DependencyFileSet.add(File.Path)
# Add modules's include header files
# Directly use the deps.txt file in the module BuildDir
abspath = path.join(self.BuildDir, "deps.txt")
rt = None
try:
with open(LongFilePath(abspath),"r") as fd:
lines = fd.readlines()
if lines:
rt = set([item.lstrip().strip("\n") for item in lines if item.strip("\n").endswith(".h")])
except Exception as e:
EdkLogger.error("build",FILE_NOT_FOUND, "%s doesn't exist" % abspath, ExtraData=str(e), RaiseError=False)
if rt:
DependencyFileSet.update(rt)
# Caculate all above dependency files hash
# Initialze hash object
FileList = []
m = hashlib.md5()
BuildDirStr = path.abspath(self.BuildDir).lower()
for File in sorted(DependencyFileSet, key=lambda x: str(x)):
# Skip the AutoGen files in BuildDir which already been
# included in .autogen.hash. file
if BuildDirStr in path.abspath(File).lower():
continue
if not path.exists(LongFilePath(File)):
EdkLogger.quiet("[cache warning]: header file %s is missing for module: %s[%s]" % (File, self.MetaFile.Path, self.Arch))
continue
with open(LongFilePath(File), 'rb') as f:
Content = f.read()
m.update(Content)
FileList.append((File, hashlib.md5(Content).hexdigest()))
HashChainFile = path.join(self.BuildDir, self.Name + ".hashchain." + m.hexdigest())
GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
try:
with open(LongFilePath(HashChainFile), 'w') as f:
json.dump(FileList, f, indent=2)
except:
EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
return False
def GenPreMakefileHashList(self):
# GenPreMakefileHashList consume below dicts:
# gPlatformHashFile
# gPackageHashFile
# gModuleHashFile
# GenPreMakefileHashList produce no dict.
# gModuleHashFile items might be produced in multiprocessing, so
# need check gModuleHashFile remote dict
# skip binary module
if self.IsBinaryModule:
return
FileList = []
m = hashlib.md5()
# Add Platform level hash
HashFile = GlobalData.gPlatformHashFile
if path.exists(LongFilePath(HashFile)):
FileList.append(HashFile)
m.update(HashFile.encode('utf-8'))
else:
EdkLogger.quiet("[cache warning]: No Platform HashFile: %s" % HashFile)
# Add Package level hash
if self.DependentPackageList:
for Pkg in sorted(self.DependentPackageList, key=lambda x: x.PackageName):
if not (Pkg.PackageName, Pkg.Arch) in GlobalData.gPackageHashFile:
EdkLogger.quiet("[cache warning]:No Package %s for module %s[%s]" % (Pkg.PackageName, self.MetaFile.Path, self.Arch))
continue
HashFile = GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)]
if path.exists(LongFilePath(HashFile)):
FileList.append(HashFile)
m.update(HashFile.encode('utf-8'))
else:
EdkLogger.quiet("[cache warning]:No Package HashFile: %s" % HashFile)
# Add Module self
# GenPreMakefileHashList needed in both --binary-destination
# and --hash. And --hash might save ModuleHashFile in remote dict
# during multiprocessing.
if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:
HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]
else:
EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
if path.exists(LongFilePath(HashFile)):
FileList.append(HashFile)
m.update(HashFile.encode('utf-8'))
else:
EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)
# Add Library hash
if self.LibraryAutoGenList:
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):
if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:
HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]
else:
EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))
if path.exists(LongFilePath(HashFile)):
FileList.append(HashFile)
m.update(HashFile.encode('utf-8'))
else:
EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)
# Save PreMakeHashFileList
FilePath = path.join(self.BuildDir, self.Name + ".PreMakeHashFileList." + m.hexdigest())
try:
with open(LongFilePath(FilePath), 'w') as f:
json.dump(FileList, f, indent=0)
except:
EdkLogger.quiet("[cache warning]: fail to save PreMake HashFileList: %s" % FilePath)
def GenMakefileHashList(self):
# GenMakefileHashList only need in --binary-destination which will
# everything in local dict. So don't need check remote dict.
# skip binary module
if self.IsBinaryModule:
return
FileList = []
m = hashlib.md5()
# Add AutoGen hash
HashFile = GlobalData.gCMakeHashFile[(self.MetaFile.Path, self.Arch)]
if path.exists(LongFilePath(HashFile)):
FileList.append(HashFile)
m.update(HashFile.encode('utf-8'))
else:
EdkLogger.quiet("[cache warning]:No AutoGen HashFile: %s" % HashFile)
# Add Module self
if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleHashFile:
HashFile = GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)]
else:
EdkLogger.quiet("[cache error]:No ModuleHashFile for module: %s[%s]" % (self.MetaFile.Path, self.Arch))
if path.exists(LongFilePath(HashFile)):
FileList.append(HashFile)
m.update(HashFile.encode('utf-8'))
else:
EdkLogger.quiet("[cache warning]:No Module HashFile: %s" % HashFile)
# Add Library hash
if self.LibraryAutoGenList:
for Lib in sorted(self.LibraryAutoGenList, key=lambda x: x.MetaFile.Path):
if (Lib.MetaFile.Path, Lib.Arch) in GlobalData.gModuleHashFile:
HashFile = GlobalData.gModuleHashFile[(Lib.MetaFile.Path, Lib.Arch)]
else:
EdkLogger.quiet("[cache error]:No ModuleHashFile for lib: %s[%s]" % (Lib.MetaFile.Path, Lib.Arch))
if path.exists(LongFilePath(HashFile)):
FileList.append(HashFile)
m.update(HashFile.encode('utf-8'))
else:
EdkLogger.quiet("[cache warning]:No Lib HashFile: %s" % HashFile)
# Save MakeHashFileList
FilePath = path.join(self.BuildDir, self.Name + ".MakeHashFileList." + m.hexdigest())
try:
with open(LongFilePath(FilePath), 'w') as f:
json.dump(FileList, f, indent=0)
except:
EdkLogger.quiet("[cache warning]: fail to save Make HashFileList: %s" % FilePath)
def CheckHashChainFile(self, HashChainFile):
# Assume the HashChainFile basename format is the 'x.hashchain.16BytesHexStr'
# The x is module name and the 16BytesHexStr is md5 hexdigest of
# all hashchain files content
HashStr = HashChainFile.split('.')[-1]
if len(HashStr) != 32:
EdkLogger.quiet("[cache error]: wrong format HashChainFile:%s" % (File))
return False
try:
with open(LongFilePath(HashChainFile), 'r') as f:
HashChainList = json.load(f)
except:
EdkLogger.quiet("[cache error]: fail to load HashChainFile: %s" % HashChainFile)
return False
# Print the different file info
# print(HashChainFile)
for idx, (SrcFile, SrcHash) in enumerate (HashChainList):
if SrcFile in GlobalData.gFileHashDict:
DestHash = GlobalData.gFileHashDict[SrcFile]
else:
try:
with open(LongFilePath(SrcFile), 'rb') as f:
Content = f.read()
DestHash = hashlib.md5(Content).hexdigest()
GlobalData.gFileHashDict[SrcFile] = DestHash
except IOError as X:
# cache miss if SrcFile is removed in new version code
GlobalData.gFileHashDict[SrcFile] = 0
EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))
return False
if SrcHash != DestHash:
EdkLogger.quiet("[cache insight]: first cache miss file in %s is %s" % (HashChainFile, SrcFile))
return False
return True
## Decide whether we can skip the left autogen and make process
def CanSkipbyMakeCache(self):
# For --binary-source only
# CanSkipbyMakeCache consume below dicts:
# gModuleMakeCacheStatus
# gHashChainStatus
# GenPreMakefileHashList produce gModuleMakeCacheStatus, gModuleHashFile dict.
# all these dicts might be produced in multiprocessing, so
# need check these remote dict
if not GlobalData.gBinCacheSource:
return False
if (self.MetaFile.Path, self.Arch) in GlobalData.gModuleMakeCacheStatus:
return GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)]
# If Module is binary, which has special build rule, do not skip by cache.
if self.IsBinaryModule:
print("[cache miss]: MakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)
GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
return False
# see .inc as binary file, do not skip by hash
for f_ext in self.SourceFileList:
if '.inc' in str(f_ext):
print("[cache miss]: MakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)
GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
return False
ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")
try:
with open(LongFilePath(ModuleHashPair), 'r') as f:
ModuleHashPairList = json.load(f)
except:
# ModuleHashPair might not exist for new added module
GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)
return False
# Check the PreMakeHash in ModuleHashPairList one by one
for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
SourceHashDir = path.join(ModuleCacheDir, MakeHash)
SourceFfsHashDir = path.join(FfsDir, MakeHash)
PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)
MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)
try:
with open(LongFilePath(MakeHashFileList_FilePah), 'r') as f:
MakeHashFileList = json.load(f)
except:
EdkLogger.quiet("[cache error]: fail to load MakeHashFileList file: %s" % MakeHashFileList_FilePah)
continue
HashMiss = False
for HashChainFile in MakeHashFileList:
HashChainStatus = None
if HashChainFile in GlobalData.gHashChainStatus:
HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
if HashChainStatus == False:
HashMiss = True
break
elif HashChainStatus == True:
continue
# Convert to path start with cache source dir
RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)
NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)
if self.CheckHashChainFile(NewFilePath):
GlobalData.gHashChainStatus[HashChainFile] = True
# Save the module self HashFile for GenPreMakefileHashList later usage
if self.Name + ".hashchain." in HashChainFile:
GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
else:
GlobalData.gHashChainStatus[HashChainFile] = False
HashMiss = True
break
if HashMiss:
continue
# PreMakefile cache hit, restore the module build result
for root, dir, files in os.walk(SourceHashDir):
for f in files:
File = path.join(root, f)
self.CacheCopyFile(self.BuildDir, SourceHashDir, File)
if os.path.exists(SourceFfsHashDir):
for root, dir, files in os.walk(SourceFfsHashDir):
for f in files:
File = path.join(root, f)
self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)
if self.Name == "PcdPeim" or self.Name == "PcdDxe":
CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
print("[cache hit]: MakeCache:", self.MetaFile.Path, self.Arch)
GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
return True
print("[cache miss]: MakeCache:", self.MetaFile.Path, self.Arch)
GlobalData.gModuleMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
return False
## Decide whether we can skip the left autogen and make process
def CanSkipbyPreMakeCache(self):
# CanSkipbyPreMakeCache consume below dicts:
# gModulePreMakeCacheStatus
# gHashChainStatus
# gModuleHashFile
# GenPreMakefileHashList produce gModulePreMakeCacheStatus dict.
# all these dicts might be produced in multiprocessing, so
# need check these remote dicts
if not GlobalData.gUseHashCache or GlobalData.gBinCacheDest:
return False
if (self.MetaFile.Path, self.Arch) in GlobalData.gModulePreMakeCacheStatus:
return GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)]
# If Module is binary, which has special build rule, do not skip by cache.
if self.IsBinaryModule:
print("[cache miss]: PreMakeCache: Skip BinaryModule:", self.MetaFile.Path, self.Arch)
GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
return False
# see .inc as binary file, do not skip by hash
for f_ext in self.SourceFileList:
if '.inc' in str(f_ext):
print("[cache miss]: PreMakeCache: Skip '.inc' File:", self.MetaFile.Path, self.Arch)
GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
return False
# For --hash only in the incremental build
if not GlobalData.gBinCacheSource:
Files = [path.join(self.BuildDir, f) for f in os.listdir(self.BuildDir) if path.isfile(path.join(self.BuildDir, f))]
PreMakeHashFileList_FilePah = None
MakeTimeStamp = 0
# Find latest PreMakeHashFileList file in self.BuildDir folder
for File in Files:
if ".PreMakeHashFileList." in File:
FileTimeStamp = os.stat(path.join(self.BuildDir, File))[8]
if FileTimeStamp > MakeTimeStamp:
MakeTimeStamp = FileTimeStamp
PreMakeHashFileList_FilePah = File
if not PreMakeHashFileList_FilePah:
GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
return False
try:
with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:
PreMakeHashFileList = json.load(f)
except:
EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)
print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
return False
HashMiss = False
for HashChainFile in PreMakeHashFileList:
HashChainStatus = None
if HashChainFile in GlobalData.gHashChainStatus:
HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
if HashChainStatus == False:
HashMiss = True
break
elif HashChainStatus == True:
continue
if self.CheckHashChainFile(HashChainFile):
GlobalData.gHashChainStatus[HashChainFile] = True
# Save the module self HashFile for GenPreMakefileHashList later usage
if self.Name + ".hashchain." in HashChainFile:
GlobalData.gModuleHashFile[(self.MetaFile.Path, self.Arch)] = HashChainFile
else:
GlobalData.gHashChainStatus[HashChainFile] = False
HashMiss = True
break
if HashMiss:
print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
return False
else:
print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)
GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
return True
ModuleCacheDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, self.Arch, self.SourceDir, self.MetaFile.BaseName)
FfsDir = path.join(GlobalData.gBinCacheSource, self.PlatformInfo.OutputDir, self.BuildTarget + "_" + self.ToolChain, TAB_FV_DIRECTORY, "Ffs", self.Guid + self.Name)
ModuleHashPairList = [] # tuple list: [tuple(PreMakefileHash, MakeHash)]
ModuleHashPair = path.join(ModuleCacheDir, self.Name + ".ModuleHashPair")
try:
with open(LongFilePath(ModuleHashPair), 'r') as f:
ModuleHashPairList = json.load(f)
except:
# ModuleHashPair might not exist for new added module
GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
EdkLogger.quiet("[cache warning]: fail to load ModuleHashPair file: %s" % ModuleHashPair)
print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
return False
# Check the PreMakeHash in ModuleHashPairList one by one
for idx, (PreMakefileHash, MakeHash) in enumerate (ModuleHashPairList):
SourceHashDir = path.join(ModuleCacheDir, MakeHash)
SourceFfsHashDir = path.join(FfsDir, MakeHash)
PreMakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".PreMakeHashFileList." + PreMakefileHash)
MakeHashFileList_FilePah = path.join(ModuleCacheDir, self.Name + ".MakeHashFileList." + MakeHash)
try:
with open(LongFilePath(PreMakeHashFileList_FilePah), 'r') as f:
PreMakeHashFileList = json.load(f)
except:
EdkLogger.quiet("[cache error]: fail to load PreMakeHashFileList file: %s" % PreMakeHashFileList_FilePah)
continue
HashMiss = False
for HashChainFile in PreMakeHashFileList:
HashChainStatus = None
if HashChainFile in GlobalData.gHashChainStatus:
HashChainStatus = GlobalData.gHashChainStatus[HashChainFile]
if HashChainStatus == False:
HashMiss = True
break
elif HashChainStatus == True:
continue
# Convert to path start with cache source dir
RelativePath = os.path.relpath(HashChainFile, self.WorkspaceDir)
NewFilePath = os.path.join(GlobalData.gBinCacheSource, RelativePath)
if self.CheckHashChainFile(NewFilePath):
GlobalData.gHashChainStatus[HashChainFile] = True
else:
GlobalData.gHashChainStatus[HashChainFile] = False
HashMiss = True
break
if HashMiss:
continue
# PreMakefile cache hit, restore the module build result
for root, dir, files in os.walk(SourceHashDir):
for f in files:
File = path.join(root, f)
self.CacheCopyFile(self.BuildDir, SourceHashDir, File)
if os.path.exists(SourceFfsHashDir):
for root, dir, files in os.walk(SourceFfsHashDir):
for f in files:
File = path.join(root, f)
self.CacheCopyFile(self.FfsOutputDir, SourceFfsHashDir, File)
if self.Name == "PcdPeim" or self.Name == "PcdDxe":
CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
print("[cache hit]: PreMakeCache:", self.MetaFile.Path, self.Arch)
GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = True
return True
print("[cache miss]: PreMakeCache:", self.MetaFile.Path, self.Arch)
GlobalData.gModulePreMakeCacheStatus[(self.MetaFile.Path, self.Arch)] = False
return False
## Decide whether we can skip the Module build
def CanSkipbyCache(self, gHitSet):
# Hashing feature is off
if not GlobalData.gBinCacheSource:
return False
if self in gHitSet:
return True
return False
## Decide whether we can skip the ModuleAutoGen process
# If any source file is newer than the module than we cannot skip
#
def CanSkip(self):
# Don't skip if cache feature enabled
if GlobalData.gUseHashCache or GlobalData.gBinCacheDest or GlobalData.gBinCacheSource:
return False
if self.MakeFileDir in GlobalData.gSikpAutoGenCache:
return True
if not os.path.exists(self.TimeStampPath):
return False
#last creation time of the module
DstTimeStamp = os.stat(self.TimeStampPath)[8]
SrcTimeStamp = self.Workspace._SrcTimeStamp
if SrcTimeStamp > DstTimeStamp:
return False
with open(self.TimeStampPath,'r') as f:
for source in f:
source = source.rstrip('\n')
if not os.path.exists(source):
return False
if source not in ModuleAutoGen.TimeDict :
ModuleAutoGen.TimeDict[source] = os.stat(source)[8]
if ModuleAutoGen.TimeDict[source] > DstTimeStamp:
return False
GlobalData.gSikpAutoGenCache.add(self.MakeFileDir)
return True
@cached_property
def TimeStampPath(self):
return os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')
| edk2-master | BaseTools/Source/Python/AutoGen/ModuleAutoGen.py |
## @file
# Create makefile for MS nmake and GNU make
#
# Copyright (c) 2019 - 2021, Intel Corporation. All rights reserved.<BR>
# Copyright (c) 2020, ARM Limited. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import os.path as path
import copy
from collections import defaultdict
from .BuildEngine import BuildRule,gDefaultBuildRuleFile,AutoGenReqBuildRuleVerNum
from .GenVar import VariableMgr, var_info
from . import GenMake
from AutoGen.DataPipe import MemoryDataPipe
from AutoGen.ModuleAutoGen import ModuleAutoGen
from AutoGen.AutoGen import AutoGen
from AutoGen.AutoGen import CalculatePriorityValue
from Workspace.WorkspaceCommon import GetModuleLibInstances
from CommonDataClass.CommonClass import SkuInfoClass
from Common.caching import cached_class_function
from Common.Expression import ValueExpressionEx
from Common.StringUtils import StringToArray,NormPath
from Common.BuildToolError import *
from Common.DataType import *
from Common.Misc import *
import Common.VpdInfoFile as VpdInfoFile
## Split command line option string to list
#
# subprocess.Popen needs the args to be a sequence. Otherwise there's problem
# in non-windows platform to launch command
#
def _SplitOption(OptionString):
OptionList = []
LastChar = " "
OptionStart = 0
QuotationMark = ""
for Index in range(0, len(OptionString)):
CurrentChar = OptionString[Index]
if CurrentChar in ['"', "'"]:
if QuotationMark == CurrentChar:
QuotationMark = ""
elif QuotationMark == "":
QuotationMark = CurrentChar
continue
elif QuotationMark:
continue
if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:
if Index > OptionStart:
OptionList.append(OptionString[OptionStart:Index - 1])
OptionStart = Index
LastChar = CurrentChar
OptionList.append(OptionString[OptionStart:])
return OptionList
## AutoGen class for platform
#
# PlatformAutoGen class will process the original information in platform
# file in order to generate makefile for platform.
#
class PlatformAutoGen(AutoGen):
# call super().__init__ then call the worker function with different parameter count
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
if not hasattr(self, "_Init"):
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)
self._Init = True
#
# Used to store all PCDs for both PEI and DXE phase, in order to generate
# correct PCD database
#
_DynaPcdList_ = []
_NonDynaPcdList_ = []
_PlatformPcds = {}
## Initialize PlatformAutoGen
#
#
# @param Workspace WorkspaceAutoGen object
# @param PlatformFile Platform file (DSC file)
# @param Target Build target (DEBUG, RELEASE)
# @param Toolchain Name of tool chain
# @param Arch arch of the platform supports
#
def _InitWorker(self, Workspace, PlatformFile, Target, Toolchain, Arch):
EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen platform [%s] [%s]" % (PlatformFile, Arch))
GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (PlatformFile, Arch, Toolchain, Target)
self.MetaFile = PlatformFile
self.Workspace = Workspace
self.WorkspaceDir = Workspace.WorkspaceDir
self.ToolChain = Toolchain
self.BuildTarget = Target
self.Arch = Arch
self.SourceDir = PlatformFile.SubDir
self.FdTargetList = self.Workspace.FdTargetList
self.FvTargetList = self.Workspace.FvTargetList
# get the original module/package/platform objects
self.BuildDatabase = Workspace.BuildDatabase
self.DscBuildDataObj = Workspace.Platform
# MakeFileName is used to get the Makefile name and as a flag
# indicating whether the file has been created.
self.MakeFileName = ""
self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
self._AsBuildInfList = []
self._AsBuildModuleList = []
self.VariableInfo = None
if GlobalData.gFdfParser is not None:
self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList
for Inf in self._AsBuildInfList:
InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)
M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
if not M.IsBinaryModule:
continue
self._AsBuildModuleList.append(InfClass)
# get library/modules for build
self.LibraryBuildDirectoryList = []
self.ModuleBuildDirectoryList = []
self.DataPipe = MemoryDataPipe(self.BuildDir)
self.DataPipe.FillData(self)
return True
def FillData_LibConstPcd(self):
libConstPcd = {}
for LibAuto in self.LibraryAutoGenList:
if LibAuto.ConstPcd:
libConstPcd[(LibAuto.MetaFile.File,LibAuto.MetaFile.Root,LibAuto.Arch,LibAuto.MetaFile.Path)] = LibAuto.ConstPcd
self.DataPipe.DataContainer = {"LibConstPcd":libConstPcd}
## hash() operator of PlatformAutoGen
#
# The platform file path and arch string will be used to represent
# hash value of this object
#
# @retval int Hash value of the platform file path and arch
#
@cached_class_function
def __hash__(self):
return hash((self.MetaFile, self.Arch,self.ToolChain,self.BuildTarget))
@cached_class_function
def __repr__(self):
return "%s [%s]" % (self.MetaFile, self.Arch)
## Create autogen code for platform and modules
#
# Since there's no autogen code for platform, this method will do nothing
# if CreateModuleCodeFile is set to False.
#
# @param CreateModuleCodeFile Flag indicating if creating module's
# autogen code file or not
#
@cached_class_function
def CreateCodeFile(self, CreateModuleCodeFile=False):
# only module has code to be created, so do nothing if CreateModuleCodeFile is False
if not CreateModuleCodeFile:
return
for Ma in self.ModuleAutoGenList:
Ma.CreateCodeFile(CreateModuleCodeFile)
## Generate Fds Command
@cached_property
def GenFdsCommand(self):
return self.Workspace.GenFdsCommand
## Create makefile for the platform and modules in it
#
# @param CreateModuleMakeFile Flag indicating if the makefile for
# modules will be created as well
#
def CreateMakeFile(self, CreateModuleMakeFile=False, FfsCommand = {}):
if CreateModuleMakeFile:
for Ma in self._MaList:
key = (Ma.MetaFile.File, self.Arch)
if key in FfsCommand:
Ma.CreateMakeFile(CreateModuleMakeFile, FfsCommand[key])
else:
Ma.CreateMakeFile(CreateModuleMakeFile)
self.CreateLibModuelDirs()
def CreateLibModuelDirs(self):
# No need to create makefile for the platform more than once.
if self.MakeFileName:
return
# create library/module build dirs for platform
Makefile = GenMake.PlatformMakefile(self)
self.LibraryBuildDirectoryList = Makefile.GetLibraryBuildDirectoryList()
self.ModuleBuildDirectoryList = Makefile.GetModuleBuildDirectoryList()
self.MakeFileName = Makefile.getMakefileName()
@property
def AllPcdList(self):
return self.DynamicPcdList + self.NonDynamicPcdList
## Deal with Shared FixedAtBuild Pcds
#
def CollectFixedAtBuildPcds(self):
for LibAuto in self.LibraryAutoGenList:
FixedAtBuildPcds = {}
ShareFixedAtBuildPcdsSameValue = {}
for Module in LibAuto.ReferenceModules:
for Pcd in set(Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds):
DefaultValue = Pcd.DefaultValue
# Cover the case: DSC component override the Pcd value and the Pcd only used in one Lib
if Pcd in Module.LibraryPcdList:
Index = Module.LibraryPcdList.index(Pcd)
DefaultValue = Module.LibraryPcdList[Index].DefaultValue
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if key not in FixedAtBuildPcds:
ShareFixedAtBuildPcdsSameValue[key] = True
FixedAtBuildPcds[key] = DefaultValue
else:
if FixedAtBuildPcds[key] != DefaultValue:
ShareFixedAtBuildPcdsSameValue[key] = False
for Pcd in LibAuto.FixedAtBuildPcds:
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:
continue
else:
DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]
if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD:
continue
if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
LibAuto.ConstPcd[key] = FixedAtBuildPcds[key]
def CollectVariables(self, DynamicPcdSet):
VpdRegionSize = 0
VpdRegionBase = 0
if self.Workspace.FdfFile:
FdDict = self.Workspace.FdfProfile.FdDict[GlobalData.gFdfParser.CurrentFdName]
for FdRegion in FdDict.RegionList:
for item in FdRegion.RegionDataList:
if self.Platform.VpdToolGuid.strip() and self.Platform.VpdToolGuid in item:
VpdRegionSize = FdRegion.Size
VpdRegionBase = FdRegion.Offset
break
VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj.SkuIds)
VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)
VariableInfo.SetVpdRegionOffset(VpdRegionBase)
Index = 0
for Pcd in sorted(DynamicPcdSet):
pcdname = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName]
SkuId = Sku.SkuId
if SkuId is None or SkuId == '':
continue
if len(Sku.VariableName) > 0:
if Sku.VariableAttribute and 'NV' not in Sku.VariableAttribute:
continue
VariableGuidStructure = Sku.VariableGuidValue
VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure)
for StorageName in Sku.DefaultStoreDict:
VariableInfo.append_variable(var_info(Index, pcdname, StorageName, SkuName, StringToArray(Sku.VariableName), VariableGuid, Sku.VariableOffset, Sku.VariableAttribute, Sku.HiiDefaultValue, Sku.DefaultStoreDict[StorageName] if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES else StringToArray(Sku.DefaultStoreDict[StorageName]), Pcd.DatumType, Pcd.CustomAttribute['DscPosition'], Pcd.CustomAttribute.get('IsStru',False)))
Index += 1
return VariableInfo
def UpdateNVStoreMaxSize(self, OrgVpdFile):
if self.VariableInfo:
VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)
PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]
if PcdNvStoreDfBuffer:
try:
OrgVpdFile.Read(VpdMapFilePath)
PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])
NvStoreOffset = list(PcdItems.values())[0].strip() if PcdItems else '0'
except:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
NvStoreOffset = int(NvStoreOffset, 16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset)
default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)
maxsize = self.VariableInfo.VpdRegionSize - NvStoreOffset if self.VariableInfo.VpdRegionSize else len(default_skuobj.DefaultValue.split(","))
var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize)
if var_data and default_skuobj:
default_skuobj.DefaultValue = var_data
PcdNvStoreDfBuffer[0].DefaultValue = var_data
PcdNvStoreDfBuffer[0].SkuInfoList.clear()
PcdNvStoreDfBuffer[0].SkuInfoList[TAB_DEFAULT] = default_skuobj
PcdNvStoreDfBuffer[0].MaxDatumSize = str(len(default_skuobj.DefaultValue.split(",")))
return OrgVpdFile
## Collect dynamic PCDs
#
# Gather dynamic PCDs list from each module and their settings from platform
# This interface should be invoked explicitly when platform action is created.
#
def CollectPlatformDynamicPcds(self):
self.CategoryPcds()
self.SortDynamicPcd()
def CategoryPcds(self):
# Category Pcds into DynamicPcds and NonDynamicPcds
# for gathering error information
NoDatumTypePcdList = set()
FdfModuleList = []
for InfName in self._AsBuildInfList:
InfName = mws.join(self.WorkspaceDir, InfName)
FdfModuleList.append(os.path.normpath(InfName))
for M in self._MbList:
# F is the Module for which M is the module autogen
ModPcdList = self.ApplyPcdSetting(M, M.ModulePcdList)
LibPcdList = []
for lib in M.LibraryPcdList:
LibPcdList.extend(self.ApplyPcdSetting(M, M.LibraryPcdList[lib], lib))
for PcdFromModule in ModPcdList + LibPcdList:
# make sure that the "VOID*" kind of datum has MaxDatumSize set
if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, M.MetaFile))
# Check the PCD from Binary INF or Source INF
if M.IsBinaryModule == True:
PcdFromModule.IsFromBinaryInf = True
# Check the PCD from DSC or not
PcdFromModule.IsFromDsc = (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds
if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET or PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
if M.MetaFile.Path not in FdfModuleList:
# If one of the Source built modules listed in the DSC is not listed
# in FDF modules, and the INF lists a PCD can only use the PcdsDynamic
# access method (it is only listed in the DEC file that declares the
# PCD as PcdsDynamic), then build tool will report warning message
# notify the PI that they are attempting to build a module that must
# be included in a flash image in order to be functional. These Dynamic
# PCD will not be added into the Database unless it is used by other
# modules that are included in the FDF file.
if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET and \
PcdFromModule.IsFromBinaryInf == False:
# Print warning message to let the developer make a determine.
continue
# If one of the Source built modules listed in the DSC is not listed in
# FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx
# access method (it is only listed in the DEC file that declares the
# PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the
# PCD to the Platform's PCD Database.
if PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
continue
#
# If a dynamic PCD used by a PEM module/PEI module & DXE module,
# it should be stored in Pcd PEI database, If a dynamic only
# used by DXE module, it should be stored in DXE PCD database.
# The default Phase is DXE
#
if M.ModuleType in SUP_MODULE_SET_PEI:
PcdFromModule.Phase = "PEI"
if PcdFromModule not in self._DynaPcdList_:
self._DynaPcdList_.append(PcdFromModule)
elif PcdFromModule.Phase == 'PEI':
# overwrite any the same PCD existing, if Phase is PEI
Index = self._DynaPcdList_.index(PcdFromModule)
self._DynaPcdList_[Index] = PcdFromModule
elif PcdFromModule not in self._NonDynaPcdList_:
self._NonDynaPcdList_.append(PcdFromModule)
elif PcdFromModule in self._NonDynaPcdList_ and PcdFromModule.IsFromBinaryInf == True:
Index = self._NonDynaPcdList_.index(PcdFromModule)
if self._NonDynaPcdList_[Index].IsFromBinaryInf == False:
#The PCD from Binary INF will override the same one from source INF
self._NonDynaPcdList_.remove (self._NonDynaPcdList_[Index])
PcdFromModule.Pending = False
self._NonDynaPcdList_.append (PcdFromModule)
DscModuleSet = {os.path.normpath(ModuleInf.Path) for ModuleInf in self.Platform.Modules}
# add the PCD from modules that listed in FDF but not in DSC to Database
for InfName in FdfModuleList:
if InfName not in DscModuleSet:
InfClass = PathClass(InfName)
M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
# If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)
# for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.
# For binary module, if in current arch, we need to list the PCDs into database.
if not M.IsBinaryModule:
continue
# Override the module PCD setting by platform setting
ModulePcdList = self.ApplyPcdSetting(M, M.Pcds)
for PcdFromModule in ModulePcdList:
PcdFromModule.IsFromBinaryInf = True
PcdFromModule.IsFromDsc = False
# Only allow the DynamicEx and Patchable PCD in AsBuild INF
if PcdFromModule.Type not in PCD_DYNAMIC_EX_TYPE_SET and PcdFromModule.Type not in TAB_PCDS_PATCHABLE_IN_MODULE:
EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
File=self.MetaFile,
ExtraData="\n\tExisted %s PCD %s in:\n\t\t%s\n"
% (PcdFromModule.Type, PcdFromModule.TokenCName, InfName))
# make sure that the "VOID*" kind of datum has MaxDatumSize set
if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, InfName))
if M.ModuleType in SUP_MODULE_SET_PEI:
PcdFromModule.Phase = "PEI"
if PcdFromModule not in self._DynaPcdList_ and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
self._DynaPcdList_.append(PcdFromModule)
elif PcdFromModule not in self._NonDynaPcdList_ and PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE:
self._NonDynaPcdList_.append(PcdFromModule)
if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
# Overwrite the phase of any the same PCD existing, if Phase is PEI.
# It is to solve the case that a dynamic PCD used by a PEM module/PEI
# module & DXE module at a same time.
# Overwrite the type of the PCDs in source INF by the type of AsBuild
# INF file as DynamicEx.
Index = self._DynaPcdList_.index(PcdFromModule)
self._DynaPcdList_[Index].Phase = PcdFromModule.Phase
self._DynaPcdList_[Index].Type = PcdFromModule.Type
for PcdFromModule in self._NonDynaPcdList_:
# If a PCD is not listed in the DSC file, but binary INF files used by
# this platform all (that use this PCD) list the PCD in a [PatchPcds]
# section, AND all source INF files used by this platform the build
# that use the PCD list the PCD in either a [Pcds] or [PatchPcds]
# section, then the tools must NOT add the PCD to the Platform's PCD
# Database; the build must assign the access method for this PCD as
# PcdsPatchableInModule.
if PcdFromModule not in self._DynaPcdList_:
continue
Index = self._DynaPcdList_.index(PcdFromModule)
if PcdFromModule.IsFromDsc == False and \
PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE and \
PcdFromModule.IsFromBinaryInf == True and \
self._DynaPcdList_[Index].IsFromBinaryInf == False:
Index = self._DynaPcdList_.index(PcdFromModule)
self._DynaPcdList_.remove (self._DynaPcdList_[Index])
# print out error information and break the build, if error found
if len(NoDatumTypePcdList) > 0:
NoDatumTypePcdListString = "\n\t\t".join(NoDatumTypePcdList)
EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
File=self.MetaFile,
ExtraData="\n\tPCD(s) without MaxDatumSize:\n\t\t%s\n"
% NoDatumTypePcdListString)
self._NonDynamicPcdList = sorted(self._NonDynaPcdList_)
self._DynamicPcdList = self._DynaPcdList_
def SortDynamicPcd(self):
#
# Sort dynamic PCD list to:
# 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
# try to be put header of dynamicd List
# 2) If PCD is HII type, the PCD item should be put after unicode type PCD
#
# The reason of sorting is make sure the unicode string is in double-byte alignment in string table.
#
UnicodePcdArray = set()
HiiPcdArray = set()
OtherPcdArray = set()
VpdPcdDict = {}
VpdFile = VpdInfoFile.VpdInfoFile()
NeedProcessVpdMapFile = False
for pcd in self.Platform.Pcds:
if pcd not in self._PlatformPcds:
self._PlatformPcds[pcd] = self.Platform.Pcds[pcd]
for item in self._PlatformPcds:
if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
self._PlatformPcds[item].DatumType = TAB_VOID
if (self.Workspace.ArchList[-1] == self.Arch):
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)
Sku.VpdOffset = Sku.VpdOffset.strip()
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
Pcd.DatumType = TAB_VOID
# if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
# if found HII type PCD then insert to right of UnicodeIndex
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
#Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer
PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer", "gEfiMdeModulePkgTokenSpaceGuid"))
if PcdNvStoreDfBuffer:
self.VariableInfo = self.CollectVariables(self._DynamicPcdList)
vardump = self.VariableInfo.dump()
if vardump:
#
#According to PCD_DATABASE_INIT in edk2\MdeModulePkg\Include\Guid\PcdDataBaseSignatureGuid.h,
#the max size for string PCD should not exceed USHRT_MAX 65535(0xffff).
#typedef UINT16 SIZE_INFO;
#//SIZE_INFO SizeTable[];
if len(vardump.split(",")) > 0xffff:
EdkLogger.error("build", RESOURCE_OVERFLOW, 'The current length of PCD %s value is %d, it exceeds to the max size of String PCD.' %(".".join([PcdNvStoreDfBuffer.TokenSpaceGuidCName,PcdNvStoreDfBuffer.TokenCName]) ,len(vardump.split(","))))
PcdNvStoreDfBuffer.DefaultValue = vardump
for skuname in PcdNvStoreDfBuffer.SkuInfoList:
PcdNvStoreDfBuffer.SkuInfoList[skuname].DefaultValue = vardump
PcdNvStoreDfBuffer.MaxDatumSize = str(len(vardump.split(",")))
else:
#If the end user define [DefaultStores] and [XXX.Menufacturing] in DSC, but forget to configure PcdNvStoreDefaultValueBuffer to PcdsDynamicVpd
if [Pcd for Pcd in self._DynamicPcdList if Pcd.UserDefinedDefaultStoresFlag]:
EdkLogger.warn("build", "PcdNvStoreDefaultValueBuffer should be defined as PcdsDynamicExVpd in dsc file since the DefaultStores is enabled for this platform.\n%s" %self.Platform.MetaFile.Path)
PlatformPcds = sorted(self._PlatformPcds.keys())
#
# Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.
#
VpdSkuMap = {}
for PcdKey in PlatformPcds:
Pcd = self._PlatformPcds[PcdKey]
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \
PcdKey in VpdPcdDict:
Pcd = VpdPcdDict[PcdKey]
SkuValueMap = {}
DefaultSku = Pcd.SkuInfoList.get(TAB_DEFAULT)
if DefaultSku:
PcdValue = DefaultSku.DefaultValue
if PcdValue not in SkuValueMap:
SkuValueMap[PcdValue] = []
VpdFile.Add(Pcd, TAB_DEFAULT, DefaultSku.VpdOffset)
SkuValueMap[PcdValue].append(DefaultSku)
for (SkuName, Sku) in Pcd.SkuInfoList.items():
Sku.VpdOffset = Sku.VpdOffset.strip()
PcdValue = Sku.DefaultValue
if PcdValue == "":
PcdValue = Pcd.DefaultValue
if Sku.VpdOffset != TAB_STAR:
if PcdValue.startswith("{"):
Alignment = 8
elif PcdValue.startswith("L"):
Alignment = 2
else:
Alignment = 1
try:
VpdOffset = int(Sku.VpdOffset)
except:
try:
VpdOffset = int(Sku.VpdOffset, 16)
except:
EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if VpdOffset % Alignment != 0:
if PcdValue.startswith("{"):
EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName), File=self.MetaFile)
else:
EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment))
if PcdValue not in SkuValueMap:
SkuValueMap[PcdValue] = []
VpdFile.Add(Pcd, SkuName, Sku.VpdOffset)
SkuValueMap[PcdValue].append(Sku)
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:
NeedProcessVpdMapFile = True
if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':
EdkLogger.error("Build", FILE_NOT_FOUND, \
"Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
VpdSkuMap[PcdKey] = SkuValueMap
#
# Fix the PCDs define in VPD PCD section that never referenced by module.
# An example is PCD for signature usage.
#
for DscPcd in PlatformPcds:
DscPcdEntry = self._PlatformPcds[DscPcd]
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):
FoundFlag = False
for VpdPcd in VpdFile._VpdArray:
# This PCD has been referenced by module
if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
(VpdPcd.TokenCName == DscPcdEntry.TokenCName):
FoundFlag = True
# Not found, it should be signature
if not FoundFlag :
# just pick the a value to determine whether is unicode string type
SkuValueMap = {}
SkuObjList = list(DscPcdEntry.SkuInfoList.items())
DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)
if DefaultSku:
defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
SkuObjList[0], SkuObjList[defaultindex] = SkuObjList[defaultindex], SkuObjList[0]
for (SkuName, Sku) in SkuObjList:
Sku.VpdOffset = Sku.VpdOffset.strip()
# Need to iterate DEC pcd information to get the value & datumtype
for eachDec in self.PackageList:
for DecPcd in eachDec.Pcds:
DecPcdEntry = eachDec.Pcds[DecPcd]
if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
(DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
# Print warning message to let the developer make a determine.
EdkLogger.warn("build", "Unreferenced vpd pcd used!",
File=self.MetaFile, \
ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
%(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
DscPcdEntry.DatumType = DecPcdEntry.DatumType
DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
DscPcdEntry.TokenValue = DecPcdEntry.TokenValue
DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]
# Only fix the value while no value provided in DSC file.
if not Sku.DefaultValue:
DscPcdEntry.SkuInfoList[list(DscPcdEntry.SkuInfoList.keys())[0]].DefaultValue = DecPcdEntry.DefaultValue
if DscPcdEntry not in self._DynamicPcdList:
self._DynamicPcdList.append(DscPcdEntry)
Sku.VpdOffset = Sku.VpdOffset.strip()
PcdValue = Sku.DefaultValue
if PcdValue == "":
PcdValue = DscPcdEntry.DefaultValue
if Sku.VpdOffset != TAB_STAR:
if PcdValue.startswith("{"):
Alignment = 8
elif PcdValue.startswith("L"):
Alignment = 2
else:
Alignment = 1
try:
VpdOffset = int(Sku.VpdOffset)
except:
try:
VpdOffset = int(Sku.VpdOffset, 16)
except:
EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName))
if VpdOffset % Alignment != 0:
if PcdValue.startswith("{"):
EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName), File=self.MetaFile)
else:
EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment))
if PcdValue not in SkuValueMap:
SkuValueMap[PcdValue] = []
VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)
SkuValueMap[PcdValue].append(Sku)
if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:
NeedProcessVpdMapFile = True
if DscPcdEntry.DatumType == TAB_VOID and PcdValue.startswith("L"):
UnicodePcdArray.add(DscPcdEntry)
elif len(Sku.VariableName) > 0:
HiiPcdArray.add(DscPcdEntry)
else:
OtherPcdArray.add(DscPcdEntry)
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
VpdSkuMap[DscPcd] = SkuValueMap
if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \
VpdFile.GetCount() != 0:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
if VpdFile.GetCount() != 0:
self.FixVpdOffset(VpdFile)
self.FixVpdOffset(self.UpdateNVStoreMaxSize(VpdFile))
PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]
if PcdNvStoreDfBuffer:
PcdName,PcdGuid = PcdNvStoreDfBuffer[0].TokenCName, PcdNvStoreDfBuffer[0].TokenSpaceGuidCName
if (PcdName,PcdGuid) in VpdSkuMap:
DefaultSku = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)
VpdSkuMap[(PcdName,PcdGuid)] = {DefaultSku.DefaultValue:[SkuObj for SkuObj in PcdNvStoreDfBuffer[0].SkuInfoList.values() ]}
# Process VPD map file generated by third party BPDG tool
if NeedProcessVpdMapFile:
VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)
try:
VpdFile.Read(VpdMapFilePath)
# Fixup TAB_STAR offset
for pcd in VpdSkuMap:
vpdinfo = VpdFile.GetVpdInfo(pcd)
if vpdinfo is None:
# just pick the a value to determine whether is unicode string type
continue
for pcdvalue in VpdSkuMap[pcd]:
for sku in VpdSkuMap[pcd][pcdvalue]:
for item in vpdinfo:
if item[2] == pcdvalue:
sku.VpdOffset = item[1]
except:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
# Delete the DynamicPcdList At the last time enter into this function
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)
Sku.VpdOffset = Sku.VpdOffset.strip()
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
Pcd.DatumType = TAB_VOID
PcdValue = Sku.DefaultValue
if Pcd.DatumType == TAB_VOID and PcdValue.startswith("L"):
# if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
UnicodePcdArray.add(Pcd)
elif len(Sku.VariableName) > 0:
# if found HII type PCD then insert to right of UnicodeIndex
HiiPcdArray.add(Pcd)
else:
OtherPcdArray.add(Pcd)
del self._DynamicPcdList[:]
self._DynamicPcdList.extend(list(UnicodePcdArray))
self._DynamicPcdList.extend(list(HiiPcdArray))
self._DynamicPcdList.extend(list(OtherPcdArray))
self._DynamicPcdList.sort()
allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]
for pcd in self._DynamicPcdList:
if len(pcd.SkuInfoList) == 1:
for (SkuName, SkuId) in allskuset:
if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:
continue
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
pcd.SkuInfoList[SkuName].SkuId = SkuId
pcd.SkuInfoList[SkuName].SkuIdName = SkuName
def FixVpdOffset(self, VpdFile ):
FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY)
if not os.path.exists(FvPath):
try:
os.makedirs(FvPath)
except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
if VpdFile.Write(VpdFilePath):
# retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
BPDGToolName = None
for ToolDef in self.ToolDefinition.values():
if TAB_GUID in ToolDef and ToolDef[TAB_GUID] == self.Platform.VpdToolGuid:
if "PATH" not in ToolDef:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
BPDGToolName = ToolDef["PATH"]
break
# Call third party GUID BPDG tool.
if BPDGToolName is not None:
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
else:
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
## Return the platform build data object
@cached_property
def Platform(self):
return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
## Return platform name
@cached_property
def Name(self):
return self.Platform.PlatformName
## Return the meta file GUID
@cached_property
def Guid(self):
return self.Platform.Guid
## Return the platform version
@cached_property
def Version(self):
return self.Platform.Version
## Return the FDF file name
@cached_property
def FdfFile(self):
if self.Workspace.FdfFile:
RetVal= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)
else:
RetVal = ''
return RetVal
## Return the build output directory platform specifies
@cached_property
def OutputDir(self):
return self.Platform.OutputDirectory
## Return the directory to store all intermediate and final files built
@cached_property
def BuildDir(self):
if os.path.isabs(self.OutputDir):
GlobalData.gBuildDirectory = RetVal = path.join(
path.abspath(self.OutputDir),
self.BuildTarget + "_" + self.ToolChain,
)
else:
GlobalData.gBuildDirectory = RetVal = path.join(
self.WorkspaceDir,
self.OutputDir,
self.BuildTarget + "_" + self.ToolChain,
)
return RetVal
## Return directory of platform makefile
#
# @retval string Makefile directory
#
@cached_property
def MakeFileDir(self):
return path.join(self.BuildDir, self.Arch)
## Return build command string
#
# @retval string Build command string
#
@cached_property
def BuildCommand(self):
if "MAKE" in self.EdkIIBuildOption and "PATH" in self.EdkIIBuildOption["MAKE"]:
# MAKE_PATH in DSC [BuildOptions] section is higher priority
Path = self.EdkIIBuildOption["MAKE"]["PATH"]
if Path.startswith('='):
Path = Path[1:].strip()
RetVal = _SplitOption(Path)
elif "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:
RetVal = _SplitOption(self.ToolDefinition["MAKE"]["PATH"])
else:
return []
if "MAKE" in self.ToolDefinition and "FLAGS" in self.ToolDefinition["MAKE"]:
NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()
if NewOption != '':
RetVal += _SplitOption(NewOption)
if "MAKE" in self.EdkIIBuildOption and "FLAGS" in self.EdkIIBuildOption["MAKE"]:
Flags = self.EdkIIBuildOption["MAKE"]["FLAGS"]
if Flags.startswith('='):
RetVal = [RetVal[0]] + _SplitOption(Flags[1:].strip())
else:
RetVal = RetVal + _SplitOption(Flags.strip())
return RetVal
## Compute a tool defintion key priority value in range 0..15
#
# TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE 15
# ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE 14
# TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE 13
# ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE 12
# TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE 11
# ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE 10
# TARGET_*********_****_COMMANDTYPE_ATTRIBUTE 9
# ******_*********_****_COMMANDTYPE_ATTRIBUTE 8
# TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE 7
# ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE 6
# TARGET_*********_ARCH_***********_ATTRIBUTE 5
# ******_*********_ARCH_***********_ATTRIBUTE 4
# TARGET_TOOLCHAIN_****_***********_ATTRIBUTE 3
# ******_TOOLCHAIN_****_***********_ATTRIBUTE 2
# TARGET_*********_****_***********_ATTRIBUTE 1
# ******_*********_****_***********_ATTRIBUTE 0
#
def ToolDefinitionPriority (self,Key):
KeyList = Key.split('_')
Priority = 0
for Index in range (0, min(4, len(KeyList))):
if KeyList[Index] != '*':
Priority += (1 << Index)
return Priority
## Get tool chain definition
#
# Get each tool definition for given tool chain from tools_def.txt and platform
#
@cached_property
def ToolDefinition(self):
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary
if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",
ExtraData="[%s]" % self.MetaFile)
RetVal = OrderedDict()
DllPathList = set()
PrioritizedDefList = sorted(ToolDefinition.keys(), key=self.ToolDefinitionPriority, reverse=True)
for Def in PrioritizedDefList:
Target, Tag, Arch, Tool, Attr = Def.split("_")
if Target == TAB_STAR:
Target = self.BuildTarget
if Tag == TAB_STAR:
Tag = self.ToolChain
if Arch == TAB_STAR:
Arch = self.Arch
if Target != self.BuildTarget or Tag != self.ToolChain or Arch != self.Arch:
continue
Value = ToolDefinition[Def]
# don't record the DLL
if Attr == "DLL":
DllPathList.add(Value)
continue
#
# ToolDefinition is sorted from highest priority to lowest priority.
# Only add the first(highest priority) match to RetVal
#
if Tool not in RetVal:
RetVal[Tool] = OrderedDict()
if Attr not in RetVal[Tool]:
RetVal[Tool][Attr] = Value
ToolsDef = ''
if GlobalData.gOptions.SilentMode and "MAKE" in RetVal:
if "FLAGS" not in RetVal["MAKE"]:
RetVal["MAKE"]["FLAGS"] = ""
RetVal["MAKE"]["FLAGS"] += " -s"
MakeFlags = ''
ToolList = list(RetVal.keys())
ToolList.sort()
for Tool in ToolList:
if Tool == TAB_STAR:
continue
AttrList = list(RetVal[Tool].keys())
if TAB_STAR in ToolList:
AttrList += list(RetVal[TAB_STAR])
AttrList.sort()
for Attr in AttrList:
if Attr in RetVal[Tool]:
Value = RetVal[Tool][Attr]
else:
Value = RetVal[TAB_STAR][Attr]
if Tool in self._BuildOptionWithToolDef(RetVal) and Attr in self._BuildOptionWithToolDef(RetVal)[Tool]:
# check if override is indicated
if self._BuildOptionWithToolDef(RetVal)[Tool][Attr].startswith('='):
Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr][1:].strip()
else:
# Do not append PATH or GUID
if Attr != 'PATH' and Attr != 'GUID':
Value += " " + self._BuildOptionWithToolDef(RetVal)[Tool][Attr]
else:
Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr]
if Attr == "PATH":
# Don't put MAKE definition in the file
if Tool != "MAKE":
ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
elif Attr != "DLL":
# Don't put MAKE definition in the file
if Tool == "MAKE":
if Attr == "FLAGS":
MakeFlags = Value
else:
ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
ToolsDef += "\n"
tool_def_file = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
SaveFileOnChange(tool_def_file, ToolsDef, False)
for DllPath in DllPathList:
os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]
os.environ["MAKE_FLAGS"] = MakeFlags
return RetVal
## Return the paths of tools
@cached_property
def ToolDefinitionFile(self):
tool_def_file = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
if not os.path.exists(tool_def_file):
self.ToolDefinition
return tool_def_file
## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.
@cached_property
def ToolChainFamily(self):
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]:
EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
% self.ToolChain)
RetVal = TAB_COMPILER_MSFT
else:
RetVal = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]
return RetVal
@cached_property
def BuildRuleFamily(self):
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \
or not ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]:
EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
% self.ToolChain)
return TAB_COMPILER_MSFT
return ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]
## Return the build options specific for all modules in this platform
@cached_property
def BuildOption(self):
return self._ExpandBuildOption(self.Platform.BuildOptions)
def _BuildOptionWithToolDef(self, ToolDef):
return self._ExpandBuildOption(self.Platform.BuildOptions, ToolDef=ToolDef)
## Return the build options specific for EDK modules in this platform
@cached_property
def EdkBuildOption(self):
return self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
## Return the build options specific for EDKII modules in this platform
@cached_property
def EdkIIBuildOption(self):
return self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
## Parse build_rule.txt in Conf Directory.
#
# @retval BuildRule object
#
@cached_property
def BuildRule(self):
BuildRuleFile = None
if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:
BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
if not BuildRuleFile:
BuildRuleFile = gDefaultBuildRuleFile
RetVal = BuildRule(BuildRuleFile)
if RetVal._FileVersion == "":
RetVal._FileVersion = AutoGenReqBuildRuleVerNum
else:
if RetVal._FileVersion < AutoGenReqBuildRuleVerNum :
# If Build Rule's version is less than the version number required by the tools, halting the build.
EdkLogger.error("build", AUTOGEN_ERROR,
ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\
% (RetVal._FileVersion, AutoGenReqBuildRuleVerNum))
return RetVal
## Summarize the packages used by modules in this platform
@cached_property
def PackageList(self):
RetVal = set()
for Mb in self._MbList:
RetVal.update(Mb.Packages)
for lb in Mb.LibInstances:
RetVal.update(lb.Packages)
#Collect package set information from INF of FDF
for ModuleFile in self._AsBuildModuleList:
if ModuleFile in self.Platform.Modules:
continue
ModuleData = self.BuildDatabase[ModuleFile, self.Arch, self.BuildTarget, self.ToolChain]
RetVal.update(ModuleData.Packages)
RetVal.update(self.Platform.Packages)
return list(RetVal)
@cached_property
def NonDynamicPcdDict(self):
return {(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):Pcd for Pcd in self.NonDynamicPcdList}
## Get list of non-dynamic PCDs
@property
def NonDynamicPcdList(self):
if not self._NonDynamicPcdList:
self.CollectPlatformDynamicPcds()
return self._NonDynamicPcdList
## Get list of dynamic PCDs
@property
def DynamicPcdList(self):
if not self._DynamicPcdList:
self.CollectPlatformDynamicPcds()
return self._DynamicPcdList
## Generate Token Number for all PCD
@cached_property
def PcdTokenNumber(self):
RetVal = OrderedDict()
TokenNumber = 1
#
# Make the Dynamic and DynamicEx PCD use within different TokenNumber area.
# Such as:
#
# Dynamic PCD:
# TokenNumber 0 ~ 10
# DynamicEx PCD:
# TokeNumber 11 ~ 20
#
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
for Pcd in self.NonDynamicPcdList:
RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = 0
return RetVal
@cached_property
def _MbList(self):
ModuleList = []
for m in self.Platform.Modules:
component = self.Platform.Modules[m]
module = self.BuildDatabase[m, self.Arch, self.BuildTarget, self.ToolChain]
module.Guid = component.Guid
ModuleList.append(module)
return ModuleList
@cached_property
def _MaList(self):
for ModuleFile in self.Platform.Modules:
Ma = ModuleAutoGen(
self.Workspace,
ModuleFile,
self.BuildTarget,
self.ToolChain,
self.Arch,
self.MetaFile,
self.DataPipe
)
self.Platform.Modules[ModuleFile].M = Ma
return [x.M for x in self.Platform.Modules.values()]
## Summarize ModuleAutoGen objects of all modules to be built for this platform
@cached_property
def ModuleAutoGenList(self):
RetVal = []
for Ma in self._MaList:
if Ma not in RetVal:
RetVal.append(Ma)
return RetVal
## Summarize ModuleAutoGen objects of all libraries to be built for this platform
@cached_property
def LibraryAutoGenList(self):
RetVal = []
for Ma in self._MaList:
for La in Ma.LibraryAutoGenList:
if La not in RetVal:
RetVal.append(La)
if Ma not in La.ReferenceModules:
La.ReferenceModules.append(Ma)
return RetVal
## Test if a module is supported by the platform
#
# An error will be raised directly if the module or its arch is not supported
# by the platform or current configuration
#
def ValidModule(self, Module):
return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \
or Module in self._AsBuildModuleList
@cached_property
def GetAllModuleInfo(self,WithoutPcd=True):
ModuleLibs = set()
for m in self.Platform.Modules:
module_obj = self.BuildDatabase[m,self.Arch,self.BuildTarget,self.ToolChain]
if not bool(module_obj.LibraryClass):
Libs = GetModuleLibInstances(module_obj, self.Platform, self.BuildDatabase, self.Arch,self.BuildTarget,self.ToolChain,self.MetaFile,EdkLogger)
else:
Libs = []
ModuleLibs.update( set([(l.MetaFile.File,l.MetaFile.Root,l.MetaFile.Path,l.MetaFile.BaseName,l.MetaFile.OriginalPath,l.Arch,True) for l in Libs]))
if WithoutPcd and module_obj.PcdIsDriver:
continue
ModuleLibs.add((m.File,m.Root,m.Path,m.BaseName,m.OriginalPath,module_obj.Arch,bool(module_obj.LibraryClass)))
return ModuleLibs
## Resolve the library classes in a module to library instances
#
# This method will not only resolve library classes but also sort the library
# instances according to the dependency-ship.
#
# @param Module The module from which the library classes will be resolved
#
# @retval library_list List of library instances sorted
#
def ApplyLibraryInstance(self, Module):
# Cover the case that the binary INF file is list in the FDF file but not DSC file, return empty list directly
if str(Module) not in self.Platform.Modules:
return []
return GetModuleLibInstances(Module,
self.Platform,
self.BuildDatabase,
self.Arch,
self.BuildTarget,
self.ToolChain,
self.MetaFile,
EdkLogger)
## Override PCD setting (type, value, ...)
#
# @param ToPcd The PCD to be overridden
# @param FromPcd The PCD overriding from
#
def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):
#
# in case there's PCDs coming from FDF file, which have no type given.
# at this point, ToPcd.Type has the type found from dependent
# package
#
TokenCName = ToPcd.TokenCName
for PcdItem in GlobalData.MixedPcd:
if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
TokenCName = PcdItem[0]
break
if FromPcd is not None:
if ToPcd.Pending and FromPcd.Type:
ToPcd.Type = FromPcd.Type
elif ToPcd.Type and FromPcd.Type\
and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:
if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:
ToPcd.Type = FromPcd.Type
elif ToPcd.Type and FromPcd.Type \
and ToPcd.Type != FromPcd.Type:
if Library:
Module = str(Module) + " 's library file (" + str(Library) + ")"
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\
% (ToPcd.TokenSpaceGuidCName, TokenCName,
ToPcd.Type, Module, FromPcd.Type, Msg),
File=self.MetaFile)
if FromPcd.MaxDatumSize:
ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize
if FromPcd.DefaultValue:
ToPcd.DefaultValue = FromPcd.DefaultValue
if FromPcd.TokenValue:
ToPcd.TokenValue = FromPcd.TokenValue
if FromPcd.DatumType:
ToPcd.DatumType = FromPcd.DatumType
if FromPcd.SkuInfoList:
ToPcd.SkuInfoList = FromPcd.SkuInfoList
if FromPcd.UserDefinedDefaultStoresFlag:
ToPcd.UserDefinedDefaultStoresFlag = FromPcd.UserDefinedDefaultStoresFlag
# Add Flexible PCD format parse
if ToPcd.DefaultValue:
try:
ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self.Platform._GuidDict)(True)
except BadExpression as Value:
EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),
File=self.MetaFile)
# check the validation of datum
IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)
if not IsValid:
EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,
ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))
ToPcd.validateranges = FromPcd.validateranges
ToPcd.validlists = FromPcd.validlists
ToPcd.expressions = FromPcd.expressions
ToPcd.CustomAttribute = FromPcd.CustomAttribute
if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:
EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
% (ToPcd.TokenSpaceGuidCName, TokenCName))
Value = ToPcd.DefaultValue
if not Value:
ToPcd.MaxDatumSize = '1'
elif Value[0] == 'L':
ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)
elif Value[0] == '{':
ToPcd.MaxDatumSize = str(len(Value.split(',')))
else:
ToPcd.MaxDatumSize = str(len(Value) - 1)
# apply default SKU for dynamic PCDS if specified one is not available
if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \
and not ToPcd.SkuInfoList:
if self.Platform.SkuName in self.Platform.SkuIds:
SkuName = self.Platform.SkuName
else:
SkuName = TAB_DEFAULT
ToPcd.SkuInfoList = {
SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)
}
## Apply PCD setting defined platform to a module
#
# @param Module The module from which the PCD setting will be overridden
#
# @retval PCD_list The list PCDs with settings from platform
#
def ApplyPcdSetting(self, Module, Pcds, Library=""):
# for each PCD in module
for Name, Guid in Pcds:
PcdInModule = Pcds[Name, Guid]
# find out the PCD setting in platform
if (Name, Guid) in self.Platform.Pcds:
PcdInPlatform = self.Platform.Pcds[Name, Guid]
else:
PcdInPlatform = None
# then override the settings if any
self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)
# resolve the VariableGuid value
for SkuId in PcdInModule.SkuInfoList:
Sku = PcdInModule.SkuInfoList[SkuId]
if Sku.VariableGuid == '': continue
Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
if Sku.VariableGuidValue is None:
PackageList = "\n\t".join(str(P) for P in self.PackageList)
EdkLogger.error(
'build',
RESOURCE_NOT_AVAILABLE,
"Value of GUID [%s] is not found in" % Sku.VariableGuid,
ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \
% (Guid, Name, str(Module)),
File=self.MetaFile
)
# override PCD settings with module specific setting
if Module in self.Platform.Modules:
PlatformModule = self.Platform.Modules[str(Module)]
for Key in PlatformModule.Pcds:
if GlobalData.BuildOptionPcd:
for pcd in GlobalData.BuildOptionPcd:
(TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd
if (TokenCName, TokenSpaceGuidCName) == Key and FieldName =="":
PlatformModule.Pcds[Key].DefaultValue = pcdvalue
PlatformModule.Pcds[Key].PcdValueFromComm = pcdvalue
break
Flag = False
if Key in Pcds:
ToPcd = Pcds[Key]
Flag = True
elif Key in GlobalData.MixedPcd:
for PcdItem in GlobalData.MixedPcd[Key]:
if PcdItem in Pcds:
ToPcd = Pcds[PcdItem]
Flag = True
break
if Flag:
self._OverridePcd(ToPcd, PlatformModule.Pcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)
# use PCD value to calculate the MaxDatumSize when it is not specified
for Name, Guid in Pcds:
Pcd = Pcds[Name, Guid]
if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:
Pcd.MaxSizeUserSet = None
Value = Pcd.DefaultValue
if not Value:
Pcd.MaxDatumSize = '1'
elif Value[0] == 'L':
Pcd.MaxDatumSize = str((len(Value) - 2) * 2)
elif Value[0] == '{':
Pcd.MaxDatumSize = str(len(Value.split(',')))
else:
Pcd.MaxDatumSize = str(len(Value) - 1)
return list(Pcds.values())
## Append build options in platform to a module
#
# @param Module The module to which the build options will be appended
#
# @retval options The options appended with build options in platform
#
def ApplyBuildOption(self, Module):
# Get the different options for the different style module
PlatformOptions = self.EdkIIBuildOption
ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)
ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)
ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)
if Module in self.Platform.Modules:
PlatformModule = self.Platform.Modules[str(Module)]
PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)
else:
PlatformModuleOptions = {}
BuildRuleOrder = None
for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
for Tool in Options:
for Attr in Options[Tool]:
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
BuildRuleOrder = Options[Tool][Attr]
AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
list(self.ToolDefinition.keys()))
BuildOptions = defaultdict(lambda: defaultdict(str))
for Tool in AllTools:
for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
if Tool not in Options:
continue
for Attr in Options[Tool]:
#
# Do not generate it in Makefile
#
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
continue
Value = Options[Tool][Attr]
ToolList = [Tool]
if Tool == TAB_STAR:
ToolList = list(AllTools)
ToolList.remove(TAB_STAR)
for ExpandedTool in ToolList:
# check if override is indicated
if Value.startswith('='):
BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value[1:])
else:
if Attr != 'PATH':
BuildOptions[ExpandedTool][Attr] += " " + mws.handleWsMacro(Value)
else:
BuildOptions[ExpandedTool][Attr] = mws.handleWsMacro(Value)
return BuildOptions, BuildRuleOrder
def GetGlobalBuildOptions(self,Module):
ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)
ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)
if Module in self.Platform.Modules:
PlatformModule = self.Platform.Modules[str(Module)]
PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)
else:
PlatformModuleOptions = {}
return ModuleTypeOptions,PlatformModuleOptions
def ModuleGuid(self,Module):
if os.path.basename(Module.MetaFile.File) != os.path.basename(Module.MetaFile.Path):
#
# Length of GUID is 36
#
return os.path.basename(Module.MetaFile.Path)[:36]
return Module.Guid
@cached_property
def UniqueBaseName(self):
retVal ={}
ModuleNameDict = {}
UniqueName = {}
for Module in self._MbList:
unique_base_name = '%s_%s' % (Module.BaseName,self.ModuleGuid(Module))
if unique_base_name not in ModuleNameDict:
ModuleNameDict[unique_base_name] = []
ModuleNameDict[unique_base_name].append(Module.MetaFile)
if Module.BaseName not in UniqueName:
UniqueName[Module.BaseName] = set()
UniqueName[Module.BaseName].add((self.ModuleGuid(Module),Module.MetaFile))
for module_paths in ModuleNameDict.values():
if len(set(module_paths))>1:
samemodules = list(set(module_paths))
EdkLogger.error("build", FILE_DUPLICATED, 'Modules have same BaseName and FILE_GUID:\n'
' %s\n %s' % (samemodules[0], samemodules[1]))
for name in UniqueName:
Guid_Path = UniqueName[name]
if len(Guid_Path) > 1:
for guid,mpath in Guid_Path:
retVal[(name,mpath)] = '%s_%s' % (name,guid)
return retVal
## Expand * in build option key
#
# @param Options Options to be expanded
# @param ToolDef Use specified ToolDef instead of full version.
# This is needed during initialization to prevent
# infinite recursion betweeh BuildOptions,
# ToolDefinition, and this function.
#
# @retval options Options expanded
#
def _ExpandBuildOption(self, Options, ModuleStyle=None, ToolDef=None):
if not ToolDef:
ToolDef = self.ToolDefinition
BuildOptions = {}
FamilyMatch = False
FamilyIsNull = True
OverrideList = {}
#
# Construct a list contain the build options which need override.
#
for Key in Options:
#
# Key[0] -- tool family
# Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
#
if (Key[0] == self.BuildRuleFamily and
(ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
if (Target == self.BuildTarget or Target == TAB_STAR) and\
(ToolChain == self.ToolChain or ToolChain == TAB_STAR) and\
(Arch == self.Arch or Arch == TAB_STAR) and\
Options[Key].startswith("="):
if OverrideList.get(Key[1]) is not None:
OverrideList.pop(Key[1])
OverrideList[Key[1]] = Options[Key]
#
# Use the highest priority value.
#
if (len(OverrideList) >= 2):
KeyList = list(OverrideList.keys())
for Index in range(len(KeyList)):
NowKey = KeyList[Index]
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
for Index1 in range(len(KeyList) - Index - 1):
NextKey = KeyList[Index1 + Index + 1]
#
# Compare two Key, if one is included by another, choose the higher priority one
#
Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
if (Target1 == Target2 or Target1 == TAB_STAR or Target2 == TAB_STAR) and\
(ToolChain1 == ToolChain2 or ToolChain1 == TAB_STAR or ToolChain2 == TAB_STAR) and\
(Arch1 == Arch2 or Arch1 == TAB_STAR or Arch2 == TAB_STAR) and\
(CommandType1 == CommandType2 or CommandType1 == TAB_STAR or CommandType2 == TAB_STAR) and\
(Attr1 == Attr2 or Attr1 == TAB_STAR or Attr2 == TAB_STAR):
if CalculatePriorityValue(NowKey) > CalculatePriorityValue(NextKey):
if Options.get((self.BuildRuleFamily, NextKey)) is not None:
Options.pop((self.BuildRuleFamily, NextKey))
else:
if Options.get((self.BuildRuleFamily, NowKey)) is not None:
Options.pop((self.BuildRuleFamily, NowKey))
for Key in Options:
if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
# Only append build option for the matched style module.
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
continue
elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
continue
Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it
if Family != "":
Found = False
if Tool in ToolDef:
FamilyIsNull = False
if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[Tool]:
if Family == ToolDef[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
FamilyMatch = True
Found = True
if TAB_STAR in ToolDef:
FamilyIsNull = False
if TAB_TOD_DEFINES_BUILDRULEFAMILY in ToolDef[TAB_STAR]:
if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
FamilyMatch = True
Found = True
if not Found:
continue
# expand any wildcard
if Target == TAB_STAR or Target == self.BuildTarget:
if Tag == TAB_STAR or Tag == self.ToolChain:
if Arch == TAB_STAR or Arch == self.Arch:
if Tool not in BuildOptions:
BuildOptions[Tool] = {}
if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
BuildOptions[Tool][Attr] = Options[Key]
else:
# append options for the same tool except PATH
if Attr != 'PATH':
BuildOptions[Tool][Attr] += " " + Options[Key]
else:
BuildOptions[Tool][Attr] = Options[Key]
# Build Option Family has been checked, which need't to be checked again for family.
if FamilyMatch or FamilyIsNull:
return BuildOptions
for Key in Options:
if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
# Only append build option for the matched style module.
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
continue
elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
continue
Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it
if Family == "":
continue
# option has been added before
Found = False
if Tool in ToolDef:
if TAB_TOD_DEFINES_FAMILY in ToolDef[Tool]:
if Family == ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:
Found = True
if TAB_STAR in ToolDef:
if TAB_TOD_DEFINES_FAMILY in ToolDef[TAB_STAR]:
if Family == ToolDef[TAB_STAR][TAB_TOD_DEFINES_FAMILY]:
Found = True
if not Found:
continue
# expand any wildcard
if Target == TAB_STAR or Target == self.BuildTarget:
if Tag == TAB_STAR or Tag == self.ToolChain:
if Arch == TAB_STAR or Arch == self.Arch:
if Tool not in BuildOptions:
BuildOptions[Tool] = {}
if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
BuildOptions[Tool][Attr] = Options[Key]
else:
# append options for the same tool except PATH
if Attr != 'PATH':
BuildOptions[Tool][Attr] += " " + Options[Key]
else:
BuildOptions[Tool][Attr] = Options[Key]
return BuildOptions
| edk2-master | BaseTools/Source/Python/AutoGen/PlatformAutoGen.py |
## @file
# Create makefile for MS nmake and GNU make
#
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import os.path as path
import hashlib
from collections import defaultdict
from GenFds.FdfParser import FdfParser
from Workspace.WorkspaceCommon import GetModuleLibInstances
from AutoGen import GenMake
from AutoGen.AutoGen import AutoGen
from AutoGen.PlatformAutoGen import PlatformAutoGen
from AutoGen.BuildEngine import gDefaultBuildRuleFile
from Common.ToolDefClassObject import gDefaultToolsDefFile
from Common.StringUtils import NormPath
from Common.BuildToolError import *
from Common.DataType import *
from Common.Misc import *
import json
## Regular expression for splitting Dependency Expression string into tokens
gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
## Regular expression for match: PCD(xxxx.yyy)
gPCDAsGuidPattern = re.compile(r"^PCD\(.+\..+\)$")
## Workspace AutoGen class
#
# This class is used mainly to control the whole platform build for different
# architecture. This class will generate top level makefile.
#
class WorkspaceAutoGen(AutoGen):
# call super().__init__ then call the worker function with different parameter count
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
if not hasattr(self, "_Init"):
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs)
self._Init = True
## Initialize WorkspaceAutoGen
#
# @param WorkspaceDir Root directory of workspace
# @param ActivePlatform Meta-file of active platform
# @param Target Build target
# @param Toolchain Tool chain name
# @param ArchList List of architecture of current build
# @param MetaFileDb Database containing meta-files
# @param BuildConfig Configuration of build
# @param ToolDefinition Tool chain definitions
# @param FlashDefinitionFile File of flash definition
# @param Fds FD list to be generated
# @param Fvs FV list to be generated
# @param Caps Capsule list to be generated
# @param SkuId SKU id from command line
#
def _InitWorker(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,
BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None,
Progress=None, BuildModule=None):
self.BuildDatabase = MetaFileDb
self.MetaFile = ActivePlatform
self.WorkspaceDir = WorkspaceDir
self.Platform = self.BuildDatabase[self.MetaFile, TAB_ARCH_COMMON, Target, Toolchain]
GlobalData.gActivePlatform = self.Platform
self.BuildTarget = Target
self.ToolChain = Toolchain
self.ArchList = ArchList
self.SkuId = SkuId
self.UniFlag = UniFlag
self.TargetTxt = BuildConfig
self.ToolDef = ToolDefinition
self.FdfFile = FlashDefinitionFile
self.FdTargetList = Fds if Fds else []
self.FvTargetList = Fvs if Fvs else []
self.CapTargetList = Caps if Caps else []
self.AutoGenObjectList = []
self._GuidDict = {}
# there's many relative directory operations, so ...
os.chdir(self.WorkspaceDir)
self.MergeArch()
self.ValidateBuildTarget()
EdkLogger.info("")
if self.ArchList:
EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))
EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))
EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))
EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))
if BuildModule:
EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))
if self.FdfFile:
EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile))
EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)
if Progress:
Progress.Start("\nProcessing meta-data")
#
# Mark now build in AutoGen Phase
#
#
# Collect Platform Guids to support Guid name in Fdfparser.
#
self.CollectPlatformGuids()
GlobalData.gAutoGenPhase = True
self.ProcessModuleFromPdf()
self.ProcessPcdType()
self.ProcessMixedPcd()
self.VerifyPcdsFromFDF()
self.CollectAllPcds()
for Pa in self.AutoGenObjectList:
Pa.FillData_LibConstPcd()
self.GeneratePkgLevelHash()
#
# Check PCDs token value conflict in each DEC file.
#
self._CheckAllPcdsTokenValueConflict()
#
# Check PCD type and definition between DSC and DEC
#
self._CheckPcdDefineAndType()
self.CreateBuildOptionsFile()
self.CreatePcdTokenNumberFile()
self.GeneratePlatformLevelHash()
#
# Merge Arch
#
def MergeArch(self):
if not self.ArchList:
ArchList = set(self.Platform.SupArchList)
else:
ArchList = set(self.ArchList) & set(self.Platform.SupArchList)
if not ArchList:
EdkLogger.error("build", PARAMETER_INVALID,
ExtraData = "Invalid ARCH specified. [Valid ARCH: %s]" % (" ".join(self.Platform.SupArchList)))
elif self.ArchList and len(ArchList) != len(self.ArchList):
SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))
EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"
% (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))
self.ArchList = tuple(ArchList)
# Validate build target
def ValidateBuildTarget(self):
if self.BuildTarget not in self.Platform.BuildTargets:
EdkLogger.error("build", PARAMETER_INVALID,
ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"
% (self.BuildTarget, " ".join(self.Platform.BuildTargets)))
def CollectPlatformGuids(self):
oriInfList = []
oriPkgSet = set()
PlatformPkg = set()
for Arch in self.ArchList:
Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
oriInfList = Platform.Modules
for ModuleFile in oriInfList:
ModuleData = self.BuildDatabase[ModuleFile, Platform._Arch, Platform._Target, Platform._Toolchain]
oriPkgSet.update(ModuleData.Packages)
for Pkg in oriPkgSet:
Guids = Pkg.Guids
GlobalData.gGuidDict.update(Guids)
if Platform.Packages:
PlatformPkg.update(Platform.Packages)
for Pkg in PlatformPkg:
Guids = Pkg.Guids
GlobalData.gGuidDict.update(Guids)
@cached_property
def FdfProfile(self):
if not self.FdfFile:
self.FdfFile = self.Platform.FlashDefinition
FdfProfile = None
if self.FdfFile:
Fdf = FdfParser(self.FdfFile.Path)
Fdf.ParseFile()
GlobalData.gFdfParser = Fdf
if Fdf.CurrentFdName and Fdf.CurrentFdName in Fdf.Profile.FdDict:
FdDict = Fdf.Profile.FdDict[Fdf.CurrentFdName]
for FdRegion in FdDict.RegionList:
if str(FdRegion.RegionType) == 'FILE' and self.Platform.VpdToolGuid in str(FdRegion.RegionDataList):
if int(FdRegion.Offset) % 8 != 0:
EdkLogger.error("build", FORMAT_INVALID, 'The VPD Base Address %s must be 8-byte aligned.' % (FdRegion.Offset))
FdfProfile = Fdf.Profile
else:
if self.FdTargetList:
EdkLogger.info("No flash definition file found. FD [%s] will be ignored." % " ".join(self.FdTargetList))
self.FdTargetList = []
if self.FvTargetList:
EdkLogger.info("No flash definition file found. FV [%s] will be ignored." % " ".join(self.FvTargetList))
self.FvTargetList = []
if self.CapTargetList:
EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList))
self.CapTargetList = []
return FdfProfile
def ProcessModuleFromPdf(self):
if self.FdfProfile:
for fvname in self.FvTargetList:
if fvname.upper() not in self.FdfProfile.FvDict:
EdkLogger.error("build", OPTION_VALUE_INVALID,
"No such an FV in FDF file: %s" % fvname)
# In DSC file may use FILE_GUID to override the module, then in the Platform.Modules use FILE_GUIDmodule.inf as key,
# but the path (self.MetaFile.Path) is the real path
for key in self.FdfProfile.InfDict:
if key == 'ArchTBD':
MetaFile_cache = defaultdict(set)
for Arch in self.ArchList:
Current_Platform_cache = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
for Pkey in Current_Platform_cache.Modules:
MetaFile_cache[Arch].add(Current_Platform_cache.Modules[Pkey].MetaFile)
for Inf in self.FdfProfile.InfDict[key]:
ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
for Arch in self.ArchList:
if ModuleFile in MetaFile_cache[Arch]:
break
else:
ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
if not ModuleData.IsBinaryModule:
EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)
else:
for Arch in self.ArchList:
if Arch == key:
Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
MetaFileList = set()
for Pkey in Platform.Modules:
MetaFileList.add(Platform.Modules[Pkey].MetaFile)
for Inf in self.FdfProfile.InfDict[key]:
ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
if ModuleFile in MetaFileList:
continue
ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
if not ModuleData.IsBinaryModule:
EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)
# parse FDF file to get PCDs in it, if any
def VerifyPcdsFromFDF(self):
if self.FdfProfile:
PcdSet = self.FdfProfile.PcdDict
self.VerifyPcdDeclearation(PcdSet)
def ProcessPcdType(self):
for Arch in self.ArchList:
Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
Platform.Pcds
# generate the SourcePcdDict and BinaryPcdDict
Libs = []
for BuildData in list(self.BuildDatabase._CACHE_.values()):
if BuildData.Arch != Arch:
continue
if BuildData.MetaFile.Ext == '.inf' and str(BuildData) in Platform.Modules :
Libs.extend(GetModuleLibInstances(BuildData, Platform,
self.BuildDatabase,
Arch,
self.BuildTarget,
self.ToolChain,
self.Platform.MetaFile,
EdkLogger
))
for BuildData in list(self.BuildDatabase._CACHE_.values()):
if BuildData.Arch != Arch:
continue
if BuildData.MetaFile.Ext == '.inf':
for key in BuildData.Pcds:
if BuildData.Pcds[key].Pending:
if key in Platform.Pcds:
PcdInPlatform = Platform.Pcds[key]
if PcdInPlatform.Type:
BuildData.Pcds[key].Type = PcdInPlatform.Type
BuildData.Pcds[key].Pending = False
if BuildData.MetaFile in Platform.Modules:
PlatformModule = Platform.Modules[str(BuildData.MetaFile)]
if key in PlatformModule.Pcds:
PcdInPlatform = PlatformModule.Pcds[key]
if PcdInPlatform.Type:
BuildData.Pcds[key].Type = PcdInPlatform.Type
BuildData.Pcds[key].Pending = False
else:
#Pcd used in Library, Pcd Type from reference module if Pcd Type is Pending
if BuildData.Pcds[key].Pending:
if bool(BuildData.LibraryClass):
if BuildData in set(Libs):
ReferenceModules = BuildData.ReferenceModules
for ReferenceModule in ReferenceModules:
if ReferenceModule.MetaFile in Platform.Modules:
RefPlatformModule = Platform.Modules[str(ReferenceModule.MetaFile)]
if key in RefPlatformModule.Pcds:
PcdInReferenceModule = RefPlatformModule.Pcds[key]
if PcdInReferenceModule.Type:
BuildData.Pcds[key].Type = PcdInReferenceModule.Type
BuildData.Pcds[key].Pending = False
break
def ProcessMixedPcd(self):
for Arch in self.ArchList:
SourcePcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set(),TAB_PCDS_DYNAMIC:set(),TAB_PCDS_FIXED_AT_BUILD:set()}
BinaryPcdDict = {TAB_PCDS_DYNAMIC_EX:set(), TAB_PCDS_PATCHABLE_IN_MODULE:set()}
SourcePcdDict_Keys = SourcePcdDict.keys()
BinaryPcdDict_Keys = BinaryPcdDict.keys()
# generate the SourcePcdDict and BinaryPcdDict
for BuildData in list(self.BuildDatabase._CACHE_.values()):
if BuildData.Arch != Arch:
continue
if BuildData.MetaFile.Ext == '.inf':
for key in BuildData.Pcds:
if TAB_PCDS_DYNAMIC_EX in BuildData.Pcds[key].Type:
if BuildData.IsBinaryModule:
BinaryPcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
else:
SourcePcdDict[TAB_PCDS_DYNAMIC_EX].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
elif TAB_PCDS_PATCHABLE_IN_MODULE in BuildData.Pcds[key].Type:
if BuildData.MetaFile.Ext == '.inf':
if BuildData.IsBinaryModule:
BinaryPcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
else:
SourcePcdDict[TAB_PCDS_PATCHABLE_IN_MODULE].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
elif TAB_PCDS_DYNAMIC in BuildData.Pcds[key].Type:
SourcePcdDict[TAB_PCDS_DYNAMIC].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
elif TAB_PCDS_FIXED_AT_BUILD in BuildData.Pcds[key].Type:
SourcePcdDict[TAB_PCDS_FIXED_AT_BUILD].add((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
#
# A PCD can only use one type for all source modules
#
for i in SourcePcdDict_Keys:
for j in SourcePcdDict_Keys:
if i != j:
Intersections = SourcePcdDict[i].intersection(SourcePcdDict[j])
if len(Intersections) > 0:
EdkLogger.error(
'build',
FORMAT_INVALID,
"Building modules from source INFs, following PCD use %s and %s access method. It must be corrected to use only one access method." % (i, j),
ExtraData='\n\t'.join(str(P[1]+'.'+P[0]) for P in Intersections)
)
#
# intersection the BinaryPCD for Mixed PCD
#
for i in BinaryPcdDict_Keys:
for j in BinaryPcdDict_Keys:
if i != j:
Intersections = BinaryPcdDict[i].intersection(BinaryPcdDict[j])
for item in Intersections:
NewPcd1 = (item[0] + '_' + i, item[1])
NewPcd2 = (item[0] + '_' + j, item[1])
if item not in GlobalData.MixedPcd:
GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]
else:
if NewPcd1 not in GlobalData.MixedPcd[item]:
GlobalData.MixedPcd[item].append(NewPcd1)
if NewPcd2 not in GlobalData.MixedPcd[item]:
GlobalData.MixedPcd[item].append(NewPcd2)
#
# intersection the SourcePCD and BinaryPCD for Mixed PCD
#
for i in SourcePcdDict_Keys:
for j in BinaryPcdDict_Keys:
if i != j:
Intersections = SourcePcdDict[i].intersection(BinaryPcdDict[j])
for item in Intersections:
NewPcd1 = (item[0] + '_' + i, item[1])
NewPcd2 = (item[0] + '_' + j, item[1])
if item not in GlobalData.MixedPcd:
GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]
else:
if NewPcd1 not in GlobalData.MixedPcd[item]:
GlobalData.MixedPcd[item].append(NewPcd1)
if NewPcd2 not in GlobalData.MixedPcd[item]:
GlobalData.MixedPcd[item].append(NewPcd2)
BuildData = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
for key in BuildData.Pcds:
for SinglePcd in GlobalData.MixedPcd:
if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:
for item in GlobalData.MixedPcd[SinglePcd]:
Pcd_Type = item[0].split('_')[-1]
if (Pcd_Type == BuildData.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and BuildData.Pcds[key].Type in PCD_DYNAMIC_EX_TYPE_SET) or \
(Pcd_Type == TAB_PCDS_DYNAMIC and BuildData.Pcds[key].Type in PCD_DYNAMIC_TYPE_SET):
Value = BuildData.Pcds[key]
Value.TokenCName = BuildData.Pcds[key].TokenCName + '_' + Pcd_Type
if len(key) == 2:
newkey = (Value.TokenCName, key[1])
elif len(key) == 3:
newkey = (Value.TokenCName, key[1], key[2])
del BuildData.Pcds[key]
BuildData.Pcds[newkey] = Value
break
break
if self.FdfProfile:
PcdSet = self.FdfProfile.PcdDict
# handle the mixed pcd in FDF file
for key in PcdSet:
if key in GlobalData.MixedPcd:
Value = PcdSet[key]
del PcdSet[key]
for item in GlobalData.MixedPcd[key]:
PcdSet[item] = Value
#Collect package set information from INF of FDF
@cached_property
def PkgSet(self):
if not self.FdfFile:
self.FdfFile = self.Platform.FlashDefinition
if self.FdfFile:
ModuleList = self.FdfProfile.InfList
else:
ModuleList = []
Pkgs = {}
for Arch in self.ArchList:
Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
PkgSet = set()
for mb in [self.BuildDatabase[m, Arch, self.BuildTarget, self.ToolChain] for m in Platform.Modules]:
PkgSet.update(mb.Packages)
for Inf in ModuleList:
ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
if ModuleFile in Platform.Modules:
continue
ModuleData = self.BuildDatabase[ModuleFile, Arch, self.BuildTarget, self.ToolChain]
PkgSet.update(ModuleData.Packages)
PkgSet.update(Platform.Packages)
Pkgs[Arch] = list(PkgSet)
return Pkgs
def VerifyPcdDeclearation(self,PcdSet):
for Arch in self.ArchList:
Platform = self.BuildDatabase[self.MetaFile, Arch, self.BuildTarget, self.ToolChain]
Pkgs = self.PkgSet[Arch]
DecPcds = set()
DecPcdsKey = set()
for Pkg in Pkgs:
for Pcd in Pkg.Pcds:
DecPcds.add((Pcd[0], Pcd[1]))
DecPcdsKey.add((Pcd[0], Pcd[1], Pcd[2]))
Platform.SkuName = self.SkuId
for Name, Guid,Fileds in PcdSet:
if (Name, Guid) not in DecPcds:
EdkLogger.error(
'build',
PARSER_ERROR,
"PCD (%s.%s) used in FDF is not declared in DEC files." % (Guid, Name),
File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],
Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]
)
else:
# Check whether Dynamic or DynamicEx PCD used in FDF file. If used, build break and give a error message.
if (Name, Guid, TAB_PCDS_FIXED_AT_BUILD) in DecPcdsKey \
or (Name, Guid, TAB_PCDS_PATCHABLE_IN_MODULE) in DecPcdsKey \
or (Name, Guid, TAB_PCDS_FEATURE_FLAG) in DecPcdsKey:
continue
elif (Name, Guid, TAB_PCDS_DYNAMIC) in DecPcdsKey or (Name, Guid, TAB_PCDS_DYNAMIC_EX) in DecPcdsKey:
EdkLogger.error(
'build',
PARSER_ERROR,
"Using Dynamic or DynamicEx type of PCD [%s.%s] in FDF file is not allowed." % (Guid, Name),
File = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][0],
Line = self.FdfProfile.PcdFileLineDict[Name, Guid, Fileds][1]
)
def CollectAllPcds(self):
for Arch in self.ArchList:
Pa = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)
#
# Explicitly collect platform's dynamic PCDs
#
Pa.CollectPlatformDynamicPcds()
Pa.CollectFixedAtBuildPcds()
self.AutoGenObjectList.append(Pa)
# We need to calculate the PcdTokenNumber after all Arch Pcds are collected.
for Arch in self.ArchList:
#Pcd TokenNumber
Pa = PlatformAutoGen(self, self.MetaFile, self.BuildTarget, self.ToolChain, Arch)
self.UpdateModuleDataPipe(Arch, {"PCD_TNUM":Pa.PcdTokenNumber})
def UpdateModuleDataPipe(self,arch, attr_dict):
for (Target, Toolchain, Arch, MetaFile) in AutoGen.Cache():
if Arch != arch:
continue
try:
AutoGen.Cache()[(Target, Toolchain, Arch, MetaFile)].DataPipe.DataContainer = attr_dict
except Exception:
pass
#
# Generate Package level hash value
#
def GeneratePkgLevelHash(self):
for Arch in self.ArchList:
GlobalData.gPackageHash = {}
if GlobalData.gUseHashCache:
for Pkg in self.PkgSet[Arch]:
self._GenPkgLevelHash(Pkg)
def CreateBuildOptionsFile(self):
#
# Create BuildOptions Macro & PCD metafile, also add the Active Platform and FDF file.
#
content = 'gCommandLineDefines: '
content += str(GlobalData.gCommandLineDefines)
content += TAB_LINE_BREAK
content += 'BuildOptionPcd: '
content += str(GlobalData.BuildOptionPcd)
content += TAB_LINE_BREAK
content += 'Active Platform: '
content += str(self.Platform)
content += TAB_LINE_BREAK
if self.FdfFile:
content += 'Flash Image Definition: '
content += str(self.FdfFile)
content += TAB_LINE_BREAK
SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)
def CreatePcdTokenNumberFile(self):
#
# Create PcdToken Number file for Dynamic/DynamicEx Pcd.
#
PcdTokenNumber = 'PcdTokenNumber: '
Pa = self.AutoGenObjectList[0]
if Pa.PcdTokenNumber:
if Pa.DynamicPcdList:
for Pcd in Pa.DynamicPcdList:
PcdTokenNumber += TAB_LINE_BREAK
PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
PcdTokenNumber += ' : '
PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])
SaveFileOnChange(os.path.join(self.BuildDir, 'PcdTokenNumber'), PcdTokenNumber, False)
def GeneratePlatformLevelHash(self):
#
# Get set of workspace metafiles
#
AllWorkSpaceMetaFiles = self._GetMetaFiles(self.BuildTarget, self.ToolChain)
AllWorkSpaceMetaFileList = sorted(AllWorkSpaceMetaFiles, key=lambda x: str(x))
#
# Retrieve latest modified time of all metafiles
#
SrcTimeStamp = 0
for f in AllWorkSpaceMetaFiles:
if os.stat(f)[8] > SrcTimeStamp:
SrcTimeStamp = os.stat(f)[8]
self._SrcTimeStamp = SrcTimeStamp
if GlobalData.gUseHashCache:
FileList = []
m = hashlib.md5()
for file in AllWorkSpaceMetaFileList:
if file.endswith('.dec'):
continue
f = open(file, 'rb')
Content = f.read()
f.close()
m.update(Content)
FileList.append((str(file), hashlib.md5(Content).hexdigest()))
HashDir = path.join(self.BuildDir, "Hash_Platform")
HashFile = path.join(HashDir, 'Platform.hash.' + m.hexdigest())
SaveFileOnChange(HashFile, m.hexdigest(), False)
HashChainFile = path.join(HashDir, 'Platform.hashchain.' + m.hexdigest())
GlobalData.gPlatformHashFile = HashChainFile
try:
with open(HashChainFile, 'w') as f:
json.dump(FileList, f, indent=2)
except:
EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
if GlobalData.gBinCacheDest:
# Copy platform hash files to cache destination
FileDir = path.join(GlobalData.gBinCacheDest, self.OutputDir, self.BuildTarget + "_" + self.ToolChain, "Hash_Platform")
CacheFileDir = FileDir
CreateDirectory(CacheFileDir)
CopyFileOnChange(HashFile, CacheFileDir)
CopyFileOnChange(HashChainFile, CacheFileDir)
#
# Write metafile list to build directory
#
AutoGenFilePath = os.path.join(self.BuildDir, 'AutoGen')
if os.path.exists (AutoGenFilePath):
os.remove(AutoGenFilePath)
if not os.path.exists(self.BuildDir):
os.makedirs(self.BuildDir)
with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:
for f in AllWorkSpaceMetaFileList:
print(f, file=file)
return True
def _GenPkgLevelHash(self, Pkg):
if Pkg.PackageName in GlobalData.gPackageHash:
return
PkgDir = os.path.join(self.BuildDir, Pkg.Arch, "Hash_Pkg", Pkg.PackageName)
CreateDirectory(PkgDir)
FileList = []
m = hashlib.md5()
# Get .dec file's hash value
f = open(Pkg.MetaFile.Path, 'rb')
Content = f.read()
f.close()
m.update(Content)
FileList.append((str(Pkg.MetaFile.Path), hashlib.md5(Content).hexdigest()))
# Get include files hash value
if Pkg.Includes:
for inc in sorted(Pkg.Includes, key=lambda x: str(x)):
for Root, Dirs, Files in os.walk(str(inc)):
for File in sorted(Files):
File_Path = os.path.join(Root, File)
f = open(File_Path, 'rb')
Content = f.read()
f.close()
m.update(Content)
FileList.append((str(File_Path), hashlib.md5(Content).hexdigest()))
GlobalData.gPackageHash[Pkg.PackageName] = m.hexdigest()
HashDir = PkgDir
HashFile = path.join(HashDir, Pkg.PackageName + '.hash.' + m.hexdigest())
SaveFileOnChange(HashFile, m.hexdigest(), False)
HashChainFile = path.join(HashDir, Pkg.PackageName + '.hashchain.' + m.hexdigest())
GlobalData.gPackageHashFile[(Pkg.PackageName, Pkg.Arch)] = HashChainFile
try:
with open(HashChainFile, 'w') as f:
json.dump(FileList, f, indent=2)
except:
EdkLogger.quiet("[cache warning]: fail to save hashchain file:%s" % HashChainFile)
if GlobalData.gBinCacheDest:
# Copy Pkg hash files to cache destination dir
FileDir = path.join(GlobalData.gBinCacheDest, self.OutputDir, self.BuildTarget + "_" + self.ToolChain, Pkg.Arch, "Hash_Pkg", Pkg.PackageName)
CacheFileDir = FileDir
CreateDirectory(CacheFileDir)
CopyFileOnChange(HashFile, CacheFileDir)
CopyFileOnChange(HashChainFile, CacheFileDir)
def _GetMetaFiles(self, Target, Toolchain):
AllWorkSpaceMetaFiles = set()
#
# add fdf
#
if self.FdfFile:
AllWorkSpaceMetaFiles.add (self.FdfFile.Path)
for f in GlobalData.gFdfParser.GetAllIncludedFile():
AllWorkSpaceMetaFiles.add (f.FileName)
#
# add dsc
#
AllWorkSpaceMetaFiles.add(self.MetaFile.Path)
#
# add build_rule.txt & tools_def.txt
#
AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultBuildRuleFile))
AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultToolsDefFile))
# add BuildOption metafile
#
AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'BuildOptions'))
# add PcdToken Number file for Dynamic/DynamicEx Pcd
#
AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'PcdTokenNumber'))
for Pa in self.AutoGenObjectList:
AllWorkSpaceMetaFiles.add(Pa.ToolDefinitionFile)
for Arch in self.ArchList:
#
# add dec
#
for Package in PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch).PackageList:
AllWorkSpaceMetaFiles.add(Package.MetaFile.Path)
#
# add included dsc
#
for filePath in self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]._RawData.IncludedFiles:
AllWorkSpaceMetaFiles.add(filePath.Path)
return AllWorkSpaceMetaFiles
def _CheckPcdDefineAndType(self):
PcdTypeSet = {TAB_PCDS_FIXED_AT_BUILD,
TAB_PCDS_PATCHABLE_IN_MODULE,
TAB_PCDS_FEATURE_FLAG,
TAB_PCDS_DYNAMIC,
TAB_PCDS_DYNAMIC_EX}
# This dict store PCDs which are not used by any modules with specified arches
UnusedPcd = OrderedDict()
for Pa in self.AutoGenObjectList:
# Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid
for Pcd in Pa.Platform.Pcds:
PcdType = Pa.Platform.Pcds[Pcd].Type
# If no PCD type, this PCD comes from FDF
if not PcdType:
continue
# Try to remove Hii and Vpd suffix
if PcdType.startswith(TAB_PCDS_DYNAMIC_EX):
PcdType = TAB_PCDS_DYNAMIC_EX
elif PcdType.startswith(TAB_PCDS_DYNAMIC):
PcdType = TAB_PCDS_DYNAMIC
for Package in Pa.PackageList:
# Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType
if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:
break
for Type in PcdTypeSet:
if (Pcd[0], Pcd[1], Type) in Package.Pcds:
EdkLogger.error(
'build',
FORMAT_INVALID,
"Type [%s] of PCD [%s.%s] in DSC file doesn't match the type [%s] defined in DEC file." \
% (Pa.Platform.Pcds[Pcd].Type, Pcd[1], Pcd[0], Type),
ExtraData=None
)
return
else:
UnusedPcd.setdefault(Pcd, []).append(Pa.Arch)
for Pcd in UnusedPcd:
EdkLogger.warn(
'build',
"The PCD was not specified by any INF module in the platform for the given architecture.\n"
"\tPCD: [%s.%s]\n\tPlatform: [%s]\n\tArch: %s"
% (Pcd[1], Pcd[0], os.path.basename(str(self.MetaFile)), str(UnusedPcd[Pcd])),
ExtraData=None
)
def __repr__(self):
return "%s [%s]" % (self.MetaFile, ", ".join(self.ArchList))
## Return the directory to store FV files
@cached_property
def FvDir(self):
return path.join(self.BuildDir, TAB_FV_DIRECTORY)
## Return the directory to store all intermediate and final files built
@cached_property
def BuildDir(self):
return self.AutoGenObjectList[0].BuildDir
## Return the build output directory platform specifies
@cached_property
def OutputDir(self):
return self.Platform.OutputDirectory
## Return platform name
@cached_property
def Name(self):
return self.Platform.PlatformName
## Return meta-file GUID
@cached_property
def Guid(self):
return self.Platform.Guid
## Return platform version
@cached_property
def Version(self):
return self.Platform.Version
## Return paths of tools
@cached_property
def ToolDefinition(self):
return self.AutoGenObjectList[0].ToolDefinition
## Return directory of platform makefile
#
# @retval string Makefile directory
#
@cached_property
def MakeFileDir(self):
return self.BuildDir
## Return build command string
#
# @retval string Build command string
#
@cached_property
def BuildCommand(self):
# BuildCommand should be all the same. So just get one from platform AutoGen
return self.AutoGenObjectList[0].BuildCommand
## Check the PCDs token value conflict in each DEC file.
#
# Will cause build break and raise error message while two PCDs conflict.
#
# @return None
#
def _CheckAllPcdsTokenValueConflict(self):
for Pa in self.AutoGenObjectList:
for Package in Pa.PackageList:
PcdList = list(Package.Pcds.values())
PcdList.sort(key=lambda x: int(x.TokenValue, 0))
Count = 0
while (Count < len(PcdList) - 1) :
Item = PcdList[Count]
ItemNext = PcdList[Count + 1]
#
# Make sure in the same token space the TokenValue should be unique
#
if (int(Item.TokenValue, 0) == int(ItemNext.TokenValue, 0)):
SameTokenValuePcdList = []
SameTokenValuePcdList.append(Item)
SameTokenValuePcdList.append(ItemNext)
RemainPcdListLength = len(PcdList) - Count - 2
for ValueSameCount in range(RemainPcdListLength):
if int(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount].TokenValue, 0) == int(Item.TokenValue, 0):
SameTokenValuePcdList.append(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount])
else:
break;
#
# Sort same token value PCD list with TokenGuid and TokenCName
#
SameTokenValuePcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
SameTokenValuePcdListCount = 0
while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):
Flag = False
TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]
TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]
if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):
for PcdItem in GlobalData.MixedPcd:
if (TemListItem.TokenCName, TemListItem.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem] or \
(TemListItemNext.TokenCName, TemListItemNext.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
Flag = True
if not Flag:
EdkLogger.error(
'build',
FORMAT_INVALID,
"The TokenValue [%s] of PCD [%s.%s] is conflict with: [%s.%s] in %s"\
% (TemListItem.TokenValue, TemListItem.TokenSpaceGuidCName, TemListItem.TokenCName, TemListItemNext.TokenSpaceGuidCName, TemListItemNext.TokenCName, Package),
ExtraData=None
)
SameTokenValuePcdListCount += 1
Count += SameTokenValuePcdListCount
Count += 1
PcdList = list(Package.Pcds.values())
PcdList.sort(key=lambda x: "%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName))
Count = 0
while (Count < len(PcdList) - 1) :
Item = PcdList[Count]
ItemNext = PcdList[Count + 1]
#
# Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.
#
if (Item.TokenSpaceGuidCName == ItemNext.TokenSpaceGuidCName) and (Item.TokenCName == ItemNext.TokenCName) and (int(Item.TokenValue, 0) != int(ItemNext.TokenValue, 0)):
EdkLogger.error(
'build',
FORMAT_INVALID,
"The TokenValue [%s] of PCD [%s.%s] in %s defined in two places should be same as well."\
% (Item.TokenValue, Item.TokenSpaceGuidCName, Item.TokenCName, Package),
ExtraData=None
)
Count += 1
## Generate fds command
@property
def GenFdsCommand(self):
return (GenMake.TopLevelMakefile(self)._TEMPLATE_.Replace(GenMake.TopLevelMakefile(self)._TemplateDict)).strip()
@property
def GenFdsCommandDict(self):
FdsCommandDict = {}
LogLevel = EdkLogger.GetLevel()
if LogLevel == EdkLogger.VERBOSE:
FdsCommandDict["verbose"] = True
elif LogLevel <= EdkLogger.DEBUG_9:
FdsCommandDict["debug"] = LogLevel - 1
elif LogLevel == EdkLogger.QUIET:
FdsCommandDict["quiet"] = True
FdsCommandDict["GenfdsMultiThread"] = GlobalData.gEnableGenfdsMultiThread
if GlobalData.gIgnoreSource:
FdsCommandDict["IgnoreSources"] = True
FdsCommandDict["OptionPcd"] = []
for pcd in GlobalData.BuildOptionPcd:
if pcd[2]:
pcdname = '.'.join(pcd[0:3])
else:
pcdname = '.'.join(pcd[0:2])
if pcd[3].startswith('{'):
FdsCommandDict["OptionPcd"].append(pcdname + '=' + 'H' + '"' + pcd[3] + '"')
else:
FdsCommandDict["OptionPcd"].append(pcdname + '=' + pcd[3])
MacroList = []
# macros passed to GenFds
MacroDict = {}
MacroDict.update(GlobalData.gGlobalDefines)
MacroDict.update(GlobalData.gCommandLineDefines)
for MacroName in MacroDict:
if MacroDict[MacroName] != "":
MacroList.append('"%s=%s"' % (MacroName, MacroDict[MacroName].replace('\\', '\\\\')))
else:
MacroList.append('"%s"' % MacroName)
FdsCommandDict["macro"] = MacroList
FdsCommandDict["fdf_file"] = [self.FdfFile]
FdsCommandDict["build_target"] = self.BuildTarget
FdsCommandDict["toolchain_tag"] = self.ToolChain
FdsCommandDict["active_platform"] = str(self)
FdsCommandDict["conf_directory"] = GlobalData.gConfDirectory
FdsCommandDict["build_architecture_list"] = ','.join(self.ArchList)
FdsCommandDict["platform_build_directory"] = self.BuildDir
FdsCommandDict["fd"] = self.FdTargetList
FdsCommandDict["fv"] = self.FvTargetList
FdsCommandDict["cap"] = self.CapTargetList
return FdsCommandDict
## Create makefile for the platform and modules in it
#
# @param CreateDepsMakeFile Flag indicating if the makefile for
# modules will be created as well
#
def CreateMakeFile(self, CreateDepsMakeFile=False):
if not CreateDepsMakeFile:
return
for Pa in self.AutoGenObjectList:
Pa.CreateMakeFile(CreateDepsMakeFile)
## Create autogen code for platform and modules
#
# Since there's no autogen code for platform, this method will do nothing
# if CreateModuleCodeFile is set to False.
#
# @param CreateDepsCodeFile Flag indicating if creating module's
# autogen code file or not
#
def CreateCodeFile(self, CreateDepsCodeFile=False):
if not CreateDepsCodeFile:
return
for Pa in self.AutoGenObjectList:
Pa.CreateCodeFile(CreateDepsCodeFile)
## Create AsBuilt INF file the platform
#
def CreateAsBuiltInf(self):
return
| edk2-master | BaseTools/Source/Python/AutoGen/WorkspaceAutoGen.py |
## @file
# This file is used to parse a strings file and create or add to a string database
# file.
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
# Import Modules
#
from __future__ import absolute_import
import re
import Common.EdkLogger as EdkLogger
from Common.BuildToolError import *
from .UniClassObject import *
from io import BytesIO
from struct import pack, unpack
from Common.LongFilePathSupport import OpenLongFilePath as open
##
# Static definitions
#
EFI_HII_SIBT_END = '0x00'
EFI_HII_SIBT_STRING_SCSU = '0x10'
EFI_HII_SIBT_STRING_SCSU_FONT = '0x11'
EFI_HII_SIBT_STRINGS_SCSU = '0x12'
EFI_HII_SIBT_STRINGS_SCSU_FONT = '0x13'
EFI_HII_SIBT_STRING_UCS2 = '0x14'
EFI_HII_SIBT_STRING_UCS2_FONT = '0x15'
EFI_HII_SIBT_STRINGS_UCS2 = '0x16'
EFI_HII_SIBT_STRINGS_UCS2_FONT = '0x17'
EFI_HII_SIBT_DUPLICATE = '0x20'
EFI_HII_SIBT_SKIP2 = '0x21'
EFI_HII_SIBT_SKIP1 = '0x22'
EFI_HII_SIBT_EXT1 = '0x30'
EFI_HII_SIBT_EXT2 = '0x31'
EFI_HII_SIBT_EXT4 = '0x32'
EFI_HII_SIBT_FONT = '0x40'
EFI_HII_PACKAGE_STRINGS = '0x04'
EFI_HII_PACKAGE_FORM = '0x02'
StringPackageType = EFI_HII_PACKAGE_STRINGS
StringPackageForm = EFI_HII_PACKAGE_FORM
StringBlockType = EFI_HII_SIBT_STRING_UCS2
StringSkipType = EFI_HII_SIBT_SKIP2
HexHeader = '0x'
COMMENT = '// '
DEFINE_STR = '#define'
COMMENT_DEFINE_STR = COMMENT + DEFINE_STR
NOT_REFERENCED = 'not referenced'
COMMENT_NOT_REFERENCED = ' ' + COMMENT + NOT_REFERENCED
CHAR_ARRAY_DEFIN = 'unsigned char'
COMMON_FILE_NAME = 'Strings'
STRING_TOKEN = re.compile('STRING_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
EFI_HII_ARRAY_SIZE_LENGTH = 4
EFI_HII_PACKAGE_HEADER_LENGTH = 4
EFI_HII_HDR_SIZE_LENGTH = 4
EFI_HII_STRING_OFFSET_LENGTH = 4
EFI_STRING_ID = 1
EFI_STRING_ID_LENGTH = 2
EFI_HII_LANGUAGE_WINDOW = 0
EFI_HII_LANGUAGE_WINDOW_LENGTH = 2
EFI_HII_LANGUAGE_WINDOW_NUMBER = 16
EFI_HII_STRING_PACKAGE_HDR_LENGTH = EFI_HII_PACKAGE_HEADER_LENGTH + EFI_HII_HDR_SIZE_LENGTH + EFI_HII_STRING_OFFSET_LENGTH + EFI_HII_LANGUAGE_WINDOW_LENGTH * EFI_HII_LANGUAGE_WINDOW_NUMBER + EFI_STRING_ID_LENGTH
H_C_FILE_HEADER = ['//', \
'// DO NOT EDIT -- auto-generated file', \
'//', \
'// This file is generated by the StrGather utility', \
'//']
LANGUAGE_NAME_STRING_NAME = '$LANGUAGE_NAME'
PRINTABLE_LANGUAGE_NAME_STRING_NAME = '$PRINTABLE_LANGUAGE_NAME'
## Convert a dec number to a hex string
#
# Convert a dec number to a formatted hex string in length digit
# The digit is set to default 8
# The hex string starts with "0x"
# DecToHexStr(1000) is '0x000003E8'
# DecToHexStr(1000, 6) is '0x0003E8'
#
# @param Dec: The number in dec format
# @param Digit: The needed digit of hex string
#
# @retval: The formatted hex string
#
def DecToHexStr(Dec, Digit = 8):
return '0x{0:0{1}X}'.format(Dec, Digit)
## Convert a dec number to a hex list
#
# Convert a dec number to a formatted hex list in size digit
# The digit is set to default 8
# DecToHexList(1000) is ['0xE8', '0x03', '0x00', '0x00']
# DecToHexList(1000, 6) is ['0xE8', '0x03', '0x00']
#
# @param Dec: The number in dec format
# @param Digit: The needed digit of hex list
#
# @retval: A list for formatted hex string
#
def DecToHexList(Dec, Digit = 8):
Hex = '{0:0{1}X}'.format(Dec, Digit)
return ["0x" + Hex[Bit:Bit + 2] for Bit in range(Digit - 2, -1, -2)]
## Convert a acsii string to a hex list
#
# Convert a acsii string to a formatted hex list
# AscToHexList('en-US') is ['0x65', '0x6E', '0x2D', '0x55', '0x53']
#
# @param Ascii: The acsii string
#
# @retval: A list for formatted hex string
#
def AscToHexList(Ascii):
try:
return ['0x{0:02X}'.format(Item) for Item in Ascii]
except:
return ['0x{0:02X}'.format(ord(Item)) for Item in Ascii]
## Create content of .h file
#
# Create content of .h file
#
# @param BaseName: The basename of strings
# @param UniObjectClass A UniObjectClass instance
# @param IsCompatibleMode Compatible mode
# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
#
# @retval Str: A string of .h file content
#
def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
Str = []
ValueStartPtr = 60
Line = COMMENT_DEFINE_STR + ' ' + LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(0, 4) + COMMENT_NOT_REFERENCED
Str = WriteLine(Str, Line)
Line = COMMENT_DEFINE_STR + ' ' + PRINTABLE_LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + PRINTABLE_LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(1, 4) + COMMENT_NOT_REFERENCED
Str = WriteLine(Str, Line)
UnusedStr = ''
#Group the referred/Unused STRING token together.
for Index in range(2, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]])):
StringItem = UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]][Index]
Name = StringItem.StringName
Token = StringItem.Token
Referenced = StringItem.Referenced
if Name is not None:
Line = ''
if Referenced == True:
if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
Line = DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4)
else:
Line = DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4)
Str = WriteLine(Str, Line)
else:
if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
else:
Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
UnusedStr = WriteLine(UnusedStr, Line)
Str.extend( UnusedStr)
Str = WriteLine(Str, '')
if IsCompatibleMode or UniGenCFlag:
Str = WriteLine(Str, 'extern unsigned char ' + BaseName + 'Strings[];')
return "".join(Str)
## Create a complete .h file
#
# Create a complet .h file with file header and file content
#
# @param BaseName: The basename of strings
# @param UniObjectClass A UniObjectClass instance
# @param IsCompatibleMode Compatible mode
# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
#
# @retval Str: A string of complete .h file
#
def CreateHFile(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
HFile = WriteLine('', CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag))
return "".join(HFile)
## Create a buffer to store all items in an array
#
# @param BinBuffer Buffer to contain Binary data.
# @param Array: The array need to be formatted
#
def CreateBinBuffer(BinBuffer, Array):
for Item in Array:
BinBuffer.write(pack("B", int(Item, 16)))
## Create a formatted string all items in an array
#
# Use ',' to join each item in an array, and break an new line when reaching the width (default is 16)
#
# @param Array: The array need to be formatted
# @param Width: The line length, the default value is set to 16
#
# @retval ArrayItem: A string for all formatted array items
#
def CreateArrayItem(Array, Width = 16):
MaxLength = Width
Index = 0
Line = ' '
ArrayItem = []
for Item in Array:
if Index < MaxLength:
Line = Line + Item + ', '
Index = Index + 1
else:
ArrayItem = WriteLine(ArrayItem, Line)
Line = ' ' + Item + ', '
Index = 1
ArrayItem = Write(ArrayItem, Line.rstrip())
return "".join(ArrayItem)
## CreateCFileStringValue
#
# Create a line with string value
#
# @param Value: Value of the string
#
# @retval Str: A formatted string with string value
#
def CreateCFileStringValue(Value):
Value = [StringBlockType] + Value
Str = WriteLine('', CreateArrayItem(Value))
return "".join(Str)
## GetFilteredLanguage
#
# apply get best language rules to the UNI language code list
#
# @param UniLanguageList: language code definition list in *.UNI file
# @param LanguageFilterList: language code filter list of RFC4646 format in DSC file
#
# @retval UniLanguageListFiltered: the filtered language code
#
def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
UniLanguageListFiltered = []
# if filter list is empty, then consider there is no filter
if LanguageFilterList == []:
UniLanguageListFiltered = UniLanguageList
return UniLanguageListFiltered
for Language in LanguageFilterList:
# first check for exact match
if Language in UniLanguageList:
if Language not in UniLanguageListFiltered:
UniLanguageListFiltered.append(Language)
# find the first one with the same/equivalent primary tag
else:
if Language.find('-') != -1:
PrimaryTag = Language[0:Language.find('-')].lower()
else:
PrimaryTag = Language
if len(PrimaryTag) == 3:
PrimaryTag = LangConvTable.get(PrimaryTag)
for UniLanguage in UniLanguageList:
if UniLanguage.find('-') != -1:
UniLanguagePrimaryTag = UniLanguage[0:UniLanguage.find('-')].lower()
else:
UniLanguagePrimaryTag = UniLanguage
if len(UniLanguagePrimaryTag) == 3:
UniLanguagePrimaryTag = LangConvTable.get(UniLanguagePrimaryTag)
if PrimaryTag == UniLanguagePrimaryTag:
if UniLanguage not in UniLanguageListFiltered:
UniLanguageListFiltered.append(UniLanguage)
break
else:
# Here is rule 3 for "get best language"
# If tag is not listed in the Unicode file, the default ("en") tag should be used for that language
# for better processing, find the one that best suit for it.
DefaultTag = 'en'
if DefaultTag not in UniLanguageListFiltered:
# check whether language code with primary code equivalent with DefaultTag already in the list, if so, use that
for UniLanguage in UniLanguageList:
if UniLanguage.startswith('en-') or UniLanguage.startswith('eng-'):
if UniLanguage not in UniLanguageListFiltered:
UniLanguageListFiltered.append(UniLanguage)
break
else:
UniLanguageListFiltered.append(DefaultTag)
return UniLanguageListFiltered
## Create content of .c file
#
# Create content of .c file
#
# @param BaseName: The basename of strings
# @param UniObjectClass A UniObjectClass instance
# @param IsCompatibleMode Compatible mode
# @param UniBinBuffer UniBinBuffer to contain UniBinary data.
# @param FilterInfo Platform language filter information
#
# @retval Str: A string of .c file content
#
def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer, FilterInfo):
#
# Init array length
#
TotalLength = EFI_HII_ARRAY_SIZE_LENGTH
Str = ''
Offset = 0
EDK2Module = FilterInfo[0]
if EDK2Module:
LanguageFilterList = FilterInfo[1]
else:
# EDK module is using ISO639-2 format filter, convert to the RFC4646 format
LanguageFilterList = [LangConvTable.get(F.lower()) for F in FilterInfo[1]]
UniLanguageList = []
for IndexI in range(len(UniObjectClass.LanguageDef)):
UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList)
#
# Create lines for each language's strings
#
for IndexI in range(len(UniObjectClass.LanguageDef)):
Language = UniObjectClass.LanguageDef[IndexI][0]
if Language not in UniLanguageListFiltered:
continue
StringBuffer = BytesIO()
StrStringValue = ''
ArrayLength = 0
NumberOfUseOtherLangDef = 0
Index = 0
for IndexJ in range(1, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[IndexI][0]])):
Item = UniObjectClass.OrderedStringListByToken[Language][IndexJ]
Name = Item.StringName
Value = Item.StringValueByteList
Referenced = Item.Referenced
Token = Item.Token
UseOtherLangDef = Item.UseOtherLangDef
if UseOtherLangDef != '' and Referenced:
NumberOfUseOtherLangDef = NumberOfUseOtherLangDef + 1
Index = Index + 1
else:
if NumberOfUseOtherLangDef > 0:
StrStringValue = WriteLine(StrStringValue, CreateArrayItem([StringSkipType] + DecToHexList(NumberOfUseOtherLangDef, 4)))
CreateBinBuffer (StringBuffer, ([StringSkipType] + DecToHexList(NumberOfUseOtherLangDef, 4)))
NumberOfUseOtherLangDef = 0
ArrayLength = ArrayLength + 3
if Referenced and Item.Token > 0:
Index = Index + 1
StrStringValue = WriteLine(StrStringValue, "// %s: %s:%s" % (DecToHexStr(Index, 4), Name, DecToHexStr(Token, 4)))
StrStringValue = Write(StrStringValue, CreateCFileStringValue(Value))
CreateBinBuffer (StringBuffer, [StringBlockType] + Value)
ArrayLength = ArrayLength + Item.Length + 1 # 1 is for the length of string type
#
# EFI_HII_PACKAGE_HEADER
#
Offset = EFI_HII_STRING_PACKAGE_HDR_LENGTH + len(Language) + 1
ArrayLength = Offset + ArrayLength + 1
#
# Create PACKAGE HEADER
#
Str = WriteLine(Str, '// PACKAGE HEADER\n')
TotalLength = TotalLength + ArrayLength
List = DecToHexList(ArrayLength, 6) + \
[StringPackageType] + \
DecToHexList(Offset) + \
DecToHexList(Offset) + \
DecToHexList(EFI_HII_LANGUAGE_WINDOW, EFI_HII_LANGUAGE_WINDOW_LENGTH * 2) * EFI_HII_LANGUAGE_WINDOW_NUMBER + \
DecToHexList(EFI_STRING_ID, 4) + \
AscToHexList(Language) + \
DecToHexList(0, 2)
Str = WriteLine(Str, CreateArrayItem(List, 16) + '\n')
#
# Create PACKAGE DATA
#
Str = WriteLine(Str, '// PACKAGE DATA\n')
Str = Write(Str, StrStringValue)
#
# Add an EFI_HII_SIBT_END at last
#
Str = WriteLine(Str, ' ' + EFI_HII_SIBT_END + ",")
#
# Create binary UNI string
#
if UniBinBuffer:
CreateBinBuffer (UniBinBuffer, List)
UniBinBuffer.write (StringBuffer.getvalue())
UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END, 16)))
StringBuffer.close()
#
# Create line for string variable name
# "unsigned char $(BaseName)Strings[] = {"
#
AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n')
if IsCompatibleMode:
#
# Create FRAMEWORK_EFI_HII_PACK_HEADER in compatible mode
#
AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Length')
AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength + 2)) + '\n')
AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Type')
AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(2, 4)) + '\n')
else:
#
# Create whole array length in UEFI mode
#
AllStr = WriteLine(AllStr, '// STRGATHER_OUTPUT_HEADER')
AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength)) + '\n')
#
# Join package data
#
AllStr = Write(AllStr, Str)
return "".join(AllStr)
## Create end of .c file
#
# Create end of .c file
#
# @retval Str: A string of .h file end
#
def CreateCFileEnd():
Str = Write('', '};')
return Str
## Create a .c file
#
# Create a complete .c file
#
# @param BaseName: The basename of strings
# @param UniObjectClass A UniObjectClass instance
# @param IsCompatibleMode Compatible Mode
# @param FilterInfo Platform language filter information
#
# @retval CFile: A string of complete .c file
#
def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode, FilterInfo):
CFile = ''
CFile = WriteLine(CFile, CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, None, FilterInfo))
CFile = WriteLine(CFile, CreateCFileEnd())
return "".join(CFile)
## GetFileList
#
# Get a list for all files
#
# @param IncludeList: A list of all path to be searched
# @param SkipList: A list of all types of file could be skipped
#
# @retval FileList: A list of all files found
#
def GetFileList(SourceFileList, IncludeList, SkipList):
if IncludeList is None:
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined")
FileList = []
if SkipList is None:
SkipList = []
for File in SourceFileList:
for Dir in IncludeList:
if not os.path.exists(Dir):
continue
File = os.path.join(Dir, File.Path)
#
# Ignore Dir
#
if os.path.isfile(File) != True:
continue
#
# Ignore file listed in skip list
#
IsSkip = False
for Skip in SkipList:
if os.path.splitext(File)[1].upper() == Skip.upper():
EdkLogger.verbose("Skipped %s for string token uses search" % File)
IsSkip = True
break
if not IsSkip:
FileList.append(File)
break
return FileList
## SearchString
#
# Search whether all string defined in UniObjectClass are referenced
# All string used should be set to Referenced
#
# @param UniObjectClass: Input UniObjectClass
# @param FileList: Search path list
# @param IsCompatibleMode Compatible Mode
#
# @retval UniObjectClass: UniObjectClass after searched
#
def SearchString(UniObjectClass, FileList, IsCompatibleMode):
if FileList == []:
return UniObjectClass
for File in FileList:
try:
if os.path.isfile(File):
Lines = open(File, 'r')
for Line in Lines:
for StrName in STRING_TOKEN.findall(Line):
EdkLogger.debug(EdkLogger.DEBUG_5, "Found string identifier: " + StrName)
UniObjectClass.SetStringReferenced(StrName)
except:
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "SearchString: Error while processing file", File=File, RaiseError=False)
raise
UniObjectClass.ReToken()
return UniObjectClass
## GetStringFiles
#
# This function is used for UEFI2.1 spec
#
#
def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None, FilterInfo = [True, []]):
if len(UniFilList) > 0:
if ShellMode:
#
# support ISO 639-2 codes in .UNI files of EDK Shell
#
Uni = UniFileClassObject(sorted(UniFilList, key=lambda x: x.File), True, IncludePathList)
else:
Uni = UniFileClassObject(sorted(UniFilList, key=lambda x: x.File), IsCompatibleMode, IncludePathList)
else:
EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, 'No unicode files given')
FileList = GetFileList(SourceFileList, IncludeList, SkipList)
Uni = SearchString(Uni, sorted (FileList), IsCompatibleMode)
HFile = CreateHFile(BaseName, Uni, IsCompatibleMode, UniGenCFlag)
CFile = None
if IsCompatibleMode or UniGenCFlag:
CFile = CreateCFile(BaseName, Uni, IsCompatibleMode, FilterInfo)
if UniGenBinBuffer:
CreateCFileContent(BaseName, Uni, IsCompatibleMode, UniGenBinBuffer, FilterInfo)
return HFile, CFile
#
# Write an item
#
def Write(Target, Item):
if isinstance(Target,str):
Target = [Target]
if not Target:
Target = []
if isinstance(Item,list):
Target.extend(Item)
else:
Target.append(Item)
return Target
#
# Write an item with a break line
#
def WriteLine(Target, Item):
if isinstance(Target,str):
Target = [Target]
if not Target:
Target = []
if isinstance(Item, list):
Target.extend(Item)
else:
Target.append(Item)
Target.append('\n')
return Target
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
if __name__ == '__main__':
EdkLogger.info('start')
UniFileList = [
r'C:\\Edk\\Strings2.uni',
r'C:\\Edk\\Strings.uni'
]
SrcFileList = []
for Root, Dirs, Files in os.walk('C:\\Edk'):
for File in Files:
SrcFileList.append(File)
IncludeList = [
r'C:\\Edk'
]
SkipList = ['.inf', '.uni']
BaseName = 'DriverSample'
(h, c) = GetStringFiles(UniFileList, SrcFileList, IncludeList, SkipList, BaseName, True)
hfile = open('unistring.h', 'w')
cfile = open('unistring.c', 'w')
hfile.write(h)
cfile.write(c)
EdkLogger.info('end')
| edk2-master | BaseTools/Source/Python/AutoGen/StrGather.py |
## @file
# This tool adds EFI_FIRMWARE_IMAGE_AUTHENTICATION for a binary.
#
# This tool only support CertType - EFI_CERT_TYPE_PKCS7_GUID
# {0x4aafd29d, 0x68df, 0x49ee, {0x8a, 0xa9, 0x34, 0x7d, 0x37, 0x56, 0x65, 0xa7}}
#
# This tool has been tested with OpenSSL.
#
# Copyright (c) 2016 - 2017, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Pkcs7Sign
'''
from __future__ import print_function
import os
import sys
import argparse
import subprocess
import uuid
import struct
import collections
from Common.BuildVersion import gBUILD_VERSION
#
# Globals for help information
#
__prog__ = 'Pkcs7Sign'
__version__ = '%s Version %s' % (__prog__, '0.9 ' + gBUILD_VERSION)
__copyright__ = 'Copyright (c) 2016, Intel Corporation. All rights reserved.'
__usage__ = '%s -e|-d [options] <input_file>' % (__prog__)
#
# GUID for PKCS7 from UEFI Specification
#
WIN_CERT_REVISION = 0x0200
WIN_CERT_TYPE_EFI_GUID = 0x0EF1
EFI_CERT_TYPE_PKCS7_GUID = uuid.UUID('{4aafd29d-68df-49ee-8aa9-347d375665a7}')
#
# typedef struct _WIN_CERTIFICATE {
# UINT32 dwLength;
# UINT16 wRevision;
# UINT16 wCertificateType;
# //UINT8 bCertificate[ANYSIZE_ARRAY];
# } WIN_CERTIFICATE;
#
# typedef struct _WIN_CERTIFICATE_UEFI_GUID {
# WIN_CERTIFICATE Hdr;
# EFI_GUID CertType;
# //UINT8 CertData[ANYSIZE_ARRAY];
# } WIN_CERTIFICATE_UEFI_GUID;
#
# typedef struct {
# UINT64 MonotonicCount;
# WIN_CERTIFICATE_UEFI_GUID AuthInfo;
# } EFI_FIRMWARE_IMAGE_AUTHENTICATION;
#
#
# Filename of test signing private cert that is stored in same directory as this tool
#
TEST_SIGNER_PRIVATE_CERT_FILENAME = 'TestCert.pem'
TEST_OTHER_PUBLIC_CERT_FILENAME = 'TestSub.pub.pem'
TEST_TRUSTED_PUBLIC_CERT_FILENAME = 'TestRoot.pub.pem'
if __name__ == '__main__':
#
# Create command line argument parser object
#
parser = argparse.ArgumentParser(prog=__prog__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-e", action="store_true", dest='Encode', help='encode file')
group.add_argument("-d", action="store_true", dest='Decode', help='decode file')
group.add_argument("--version", action='version', version=__version__)
parser.add_argument("-o", "--output", dest='OutputFile', type=str, metavar='filename', help="specify the output filename", required=True)
parser.add_argument("--signer-private-cert", dest='SignerPrivateCertFile', type=argparse.FileType('rb'), help="specify the signer private cert filename. If not specified, a test signer private cert is used.")
parser.add_argument("--other-public-cert", dest='OtherPublicCertFile', type=argparse.FileType('rb'), help="specify the other public cert filename. If not specified, a test other public cert is used.")
parser.add_argument("--trusted-public-cert", dest='TrustedPublicCertFile', type=argparse.FileType('rb'), help="specify the trusted public cert filename. If not specified, a test trusted public cert is used.")
parser.add_argument("--monotonic-count", dest='MonotonicCountStr', type=str, help="specify the MonotonicCount in FMP capsule. If not specified, 0 is used.")
parser.add_argument("--signature-size", dest='SignatureSizeStr', type=str, help="specify the signature size for decode process.")
parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages")
parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages")
parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0, 10), default=0, help="set debug level")
parser.add_argument(metavar="input_file", dest='InputFile', type=argparse.FileType('rb'), help="specify the input filename")
#
# Parse command line arguments
#
args = parser.parse_args()
#
# Generate file path to Open SSL command
#
OpenSslCommand = 'openssl'
try:
OpenSslPath = os.environ['OPENSSL_PATH']
OpenSslCommand = os.path.join(OpenSslPath, OpenSslCommand)
if ' ' in OpenSslCommand:
OpenSslCommand = '"' + OpenSslCommand + '"'
except:
pass
#
# Verify that Open SSL command is available
#
try:
Process = subprocess.Popen('%s version' % (OpenSslCommand), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(1)
Version = Process.communicate()
if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode)
print(Version[0].decode())
#
# Read input file into a buffer and save input filename
#
args.InputFileName = args.InputFile.name
args.InputFileBuffer = args.InputFile.read()
args.InputFile.close()
#
# Save output filename and check if path exists
#
OutputDir = os.path.dirname(args.OutputFile)
if not os.path.exists(OutputDir):
print('ERROR: The output path does not exist: %s' % OutputDir)
sys.exit(1)
args.OutputFileName = args.OutputFile
try:
if args.MonotonicCountStr.upper().startswith('0X'):
args.MonotonicCountValue = int(args.MonotonicCountStr, 16)
else:
args.MonotonicCountValue = int(args.MonotonicCountStr)
except:
args.MonotonicCountValue = int(0)
if args.Encode:
#
# Save signer private cert filename and close private cert file
#
try:
args.SignerPrivateCertFileName = args.SignerPrivateCertFile.name
args.SignerPrivateCertFile.close()
except:
try:
#
# Get path to currently executing script or executable
#
if hasattr(sys, 'frozen'):
Pkcs7ToolPath = sys.executable
else:
Pkcs7ToolPath = sys.argv[0]
if Pkcs7ToolPath.startswith('"'):
Pkcs7ToolPath = Pkcs7ToolPath[1:]
if Pkcs7ToolPath.endswith('"'):
Pkcs7ToolPath = RsaToolPath[:-1]
args.SignerPrivateCertFileName = os.path.join(os.path.dirname(os.path.realpath(Pkcs7ToolPath)), TEST_SIGNER_PRIVATE_CERT_FILENAME)
args.SignerPrivateCertFile = open(args.SignerPrivateCertFileName, 'rb')
args.SignerPrivateCertFile.close()
except:
print('ERROR: test signer private cert file %s missing' % (args.SignerPrivateCertFileName))
sys.exit(1)
#
# Save other public cert filename and close public cert file
#
try:
args.OtherPublicCertFileName = args.OtherPublicCertFile.name
args.OtherPublicCertFile.close()
except:
try:
#
# Get path to currently executing script or executable
#
if hasattr(sys, 'frozen'):
Pkcs7ToolPath = sys.executable
else:
Pkcs7ToolPath = sys.argv[0]
if Pkcs7ToolPath.startswith('"'):
Pkcs7ToolPath = Pkcs7ToolPath[1:]
if Pkcs7ToolPath.endswith('"'):
Pkcs7ToolPath = RsaToolPath[:-1]
args.OtherPublicCertFileName = os.path.join(os.path.dirname(os.path.realpath(Pkcs7ToolPath)), TEST_OTHER_PUBLIC_CERT_FILENAME)
args.OtherPublicCertFile = open(args.OtherPublicCertFileName, 'rb')
args.OtherPublicCertFile.close()
except:
print('ERROR: test other public cert file %s missing' % (args.OtherPublicCertFileName))
sys.exit(1)
format = "%dsQ" % len(args.InputFileBuffer)
FullInputFileBuffer = struct.pack(format, args.InputFileBuffer, args.MonotonicCountValue)
#
# Sign the input file using the specified private key and capture signature from STDOUT
#
Process = subprocess.Popen('%s smime -sign -binary -signer "%s" -outform DER -md sha256 -certfile "%s"' % (OpenSslCommand, args.SignerPrivateCertFileName, args.OtherPublicCertFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
Signature = Process.communicate(input=FullInputFileBuffer)[0]
if Process.returncode != 0:
sys.exit(Process.returncode)
#
# Write output file that contains Signature, and Input data
#
args.OutputFile = open(args.OutputFileName, 'wb')
args.OutputFile.write(Signature)
args.OutputFile.write(args.InputFileBuffer)
args.OutputFile.close()
if args.Decode:
#
# Save trusted public cert filename and close public cert file
#
try:
args.TrustedPublicCertFileName = args.TrustedPublicCertFile.name
args.TrustedPublicCertFile.close()
except:
try:
#
# Get path to currently executing script or executable
#
if hasattr(sys, 'frozen'):
Pkcs7ToolPath = sys.executable
else:
Pkcs7ToolPath = sys.argv[0]
if Pkcs7ToolPath.startswith('"'):
Pkcs7ToolPath = Pkcs7ToolPath[1:]
if Pkcs7ToolPath.endswith('"'):
Pkcs7ToolPath = RsaToolPath[:-1]
args.TrustedPublicCertFileName = os.path.join(os.path.dirname(os.path.realpath(Pkcs7ToolPath)), TEST_TRUSTED_PUBLIC_CERT_FILENAME)
args.TrustedPublicCertFile = open(args.TrustedPublicCertFileName, 'rb')
args.TrustedPublicCertFile.close()
except:
print('ERROR: test trusted public cert file %s missing' % (args.TrustedPublicCertFileName))
sys.exit(1)
if not args.SignatureSizeStr:
print("ERROR: please use the option --signature-size to specify the size of the signature data!")
sys.exit(1)
else:
if args.SignatureSizeStr.upper().startswith('0X'):
SignatureSize = int(args.SignatureSizeStr, 16)
else:
SignatureSize = int(args.SignatureSizeStr)
if SignatureSize < 0:
print("ERROR: The value of option --signature-size can't be set to negative value!")
sys.exit(1)
elif SignatureSize > len(args.InputFileBuffer):
print("ERROR: The value of option --signature-size is exceed the size of the input file !")
sys.exit(1)
args.SignatureBuffer = args.InputFileBuffer[0:SignatureSize]
args.InputFileBuffer = args.InputFileBuffer[SignatureSize:]
format = "%dsQ" % len(args.InputFileBuffer)
FullInputFileBuffer = struct.pack(format, args.InputFileBuffer, args.MonotonicCountValue)
#
# Save output file contents from input file
#
open(args.OutputFileName, 'wb').write(FullInputFileBuffer)
#
# Verify signature
#
Process = subprocess.Popen('%s smime -verify -inform DER -content %s -CAfile %s' % (OpenSslCommand, args.OutputFileName, args.TrustedPublicCertFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
Process.communicate(input=args.SignatureBuffer)[0]
if Process.returncode != 0:
print('ERROR: Verification failed')
os.remove (args.OutputFileName)
sys.exit(Process.returncode)
open(args.OutputFileName, 'wb').write(args.InputFileBuffer)
| edk2-master | BaseTools/Source/Python/Pkcs7Sign/Pkcs7Sign.py |
## @file
# This file is used to parse INF file of EDK project
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import Common.LongFilePathOs as os
import Common.EdkLogger as EdkLogger
from Common.DataType import *
from CommonDataClass.DataClass import *
from Eot.Identification import Identification
from Common.StringUtils import *
from Eot.Parser import *
from Eot import Database
from Eot import EotGlobalData
## EdkInfParser() class
#
# This class defined basic INF object which is used by inheriting
#
# @param object: Inherited from object class
#
class EdkInfParser(object):
## The constructor
#
# @param self: The object pointer
# @param Filename: INF file name
# @param Database: Eot database
# @param SourceFileList: A list for all source file belonging this INF file
#
def __init__(self, Filename = None, Database = None, SourceFileList = None):
self.Identification = Identification()
self.Sources = []
self.Macros = {}
self.Cur = Database.Cur
self.TblFile = Database.TblFile
self.TblInf = Database.TblInf
self.FileID = -1
# Load Inf file if filename is not None
if Filename is not None:
self.LoadInfFile(Filename)
if SourceFileList:
for Item in SourceFileList:
self.TblInf.Insert(MODEL_EFI_SOURCE_FILE, Item, '', '', '', '', 'COMMON', -1, self.FileID, -1, -1, -1, -1, 0)
## LoadInffile() method
#
# Load INF file and insert a record in database
#
# @param self: The object pointer
# @param Filename: Input value for filename of Inf file
#
def LoadInfFile(self, Filename = None):
# Insert a record for file
Filename = NormPath(Filename)
self.Identification.FileFullPath = Filename
(self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename)
self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF)
self.ParseInf(PreProcess(Filename, False), self.Identification.FileRelativePath, Filename)
## ParserSource() method
#
# Parse Source section and insert records in database
#
# @param self: The object pointer
# @param CurrentSection: current section name
# @param SectionItemList: the item belonging current section
# @param ArchList: A list for arch for this section
# @param ThirdList: A list for third item for this section
#
def ParserSource(self, CurrentSection, SectionItemList, ArchList, ThirdList):
for Index in range(0, len(ArchList)):
Arch = ArchList[Index]
Third = ThirdList[Index]
if Arch == '':
Arch = TAB_ARCH_COMMON
for Item in SectionItemList:
if CurrentSection.upper() == 'defines'.upper():
(Name, Value) = AddToSelfMacro(self.Macros, Item[0])
self.TblInf.Insert(MODEL_META_DATA_HEADER, Name, Value, Third, '', '', Arch, -1, self.FileID, Item[1], -1, Item[1], -1, 0)
## ParseInf() method
#
# Parse INF file and get sections information
#
# @param self: The object pointer
# @param Lines: contents of INF file
# @param FileRelativePath: relative path of the file
# @param Filename: file name of INF file
#
def ParseInf(self, Lines = [], FileRelativePath = '', Filename = ''):
IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
[], [], TAB_UNKNOWN, [], [], []
LineNo = 0
for Line in Lines:
LineNo = LineNo + 1
if Line == '':
continue
if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
self.ParserSource(CurrentSection, SectionItemList, ArchList, ThirdList)
# Parse the new section
SectionItemList = []
ArchList = []
ThirdList = []
# Parse section name
CurrentSection = ''
LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
for Item in LineList:
ItemList = GetSplitValueList(Item, TAB_SPLIT)
if CurrentSection == '':
CurrentSection = ItemList[0]
else:
if CurrentSection != ItemList[0]:
EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo)
ItemList.append('')
ItemList.append('')
if len(ItemList) > 5:
RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
else:
ArchList.append(ItemList[1].upper())
ThirdList.append(ItemList[2])
continue
# Add a section item
SectionItemList.append([Line, LineNo])
# End of parse
self.ParserSource(CurrentSection, SectionItemList, ArchList, ThirdList)
#End of For
| edk2-master | BaseTools/Source/Python/Eot/InfParserLite.py |
## @file
# This file is used to create a database used by EOT tool
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import sqlite3
import Common.LongFilePathOs as os, time
import Common.EdkLogger as EdkLogger
import CommonDataClass.DataClass as DataClass
from Table.TableDataModel import TableDataModel
from Table.TableFile import TableFile
from Table.TableFunction import TableFunction
from Table.TableIdentifier import TableIdentifier
from Table.TableEotReport import TableEotReport
from Table.TableInf import TableInf
from Table.TableDec import TableDec
from Table.TableDsc import TableDsc
from Table.TableFdf import TableFdf
from Table.TableQuery import TableQuery
##
# Static definitions
#
DATABASE_PATH = "Eot.db"
## Database class
#
# This class defined the EOT database
# During the phase of initialization, the database will create all tables and
# insert all records of table DataModel
#
class Database(object):
## The constructor
#
# @param self: The object pointer
# @param DbPath: The file path of the database
#
def __init__(self, DbPath):
self.DbPath = DbPath
self.Conn = None
self.Cur = None
self.TblDataModel = None
self.TblFile = None
self.TblFunction = None
self.TblIdentifier = None
self.TblReport = None
self.TblInf = None
self.TblDec = None
self.TblDsc = None
self.TblFdf = None
self.TblQuery = None
self.TblQuery2 = None
## InitDatabase() method
# 1. Delete all old existing tables
# 2. Create new tables
# 3. Initialize table DataModel
#
# @param self: The object pointer
# @param NewDatabase: Check if it needs to create a new database
#
def InitDatabase(self, NewDatabase = True):
EdkLogger.verbose("\nInitialize EOT database started ...")
#
# Drop all old existing tables
#
if NewDatabase:
if os.path.exists(self.DbPath):
os.remove(self.DbPath)
self.Conn = sqlite3.connect(self.DbPath, isolation_level = 'DEFERRED')
self.Conn.execute("PRAGMA page_size=8192")
self.Conn.execute("PRAGMA synchronous=OFF")
# to avoid non-ascii character conversion error
self.Conn.text_factory = str
self.Cur = self.Conn.cursor()
self.TblDataModel = TableDataModel(self.Cur)
self.TblFile = TableFile(self.Cur)
self.TblFunction = TableFunction(self.Cur)
self.TblIdentifier = TableIdentifier(self.Cur)
self.TblReport = TableEotReport(self.Cur)
self.TblInf = TableInf(self.Cur)
self.TblDec = TableDec(self.Cur)
self.TblDsc = TableDsc(self.Cur)
self.TblFdf = TableFdf(self.Cur)
self.TblQuery = TableQuery(self.Cur)
self.TblQuery2 = TableQuery(self.Cur)
self.TblQuery2.Table = 'Query2'
# Create new tables
if NewDatabase:
self.TblDataModel.Create()
self.TblFile.Create()
self.TblFunction.Create()
self.TblReport.Create()
self.TblInf.Create()
self.TblDec.Create()
self.TblDsc.Create()
self.TblFdf.Create()
self.TblQuery.Create()
self.TblQuery2.Create()
# Init each table's ID
self.TblDataModel.InitID()
self.TblFile.InitID()
self.TblFunction.InitID()
self.TblReport.InitID()
self.TblInf.InitID()
self.TblDec.InitID()
self.TblDsc.InitID()
self.TblFdf.InitID()
self.TblQuery.Drop()
self.TblQuery.Create()
self.TblQuery.InitID()
self.TblQuery2.Drop()
self.TblQuery2.Create()
self.TblQuery2.InitID()
# Initialize table DataModel
if NewDatabase:
self.TblDataModel.InitTable()
EdkLogger.verbose("Initialize EOT database ... DONE!")
## QueryTable() method
#
# Query a table
#
# @param self: The object pointer
# @param Table: The instance of the table to be queried
#
def QueryTable(self, Table):
Table.Query()
## Close() method
#
# Commit all first
# Close the connection and cursor
#
def Close(self):
# Commit to file
self.Conn.commit()
# Close connection and cursor
self.Cur.close()
self.Conn.close()
## InsertOneFile() method
#
# Insert one file's information to the database
# 1. Create a record in TableFile
# 2. Create functions one by one
# 2.1 Create variables of function one by one
# 2.2 Create pcds of function one by one
# 3. Create variables one by one
# 4. Create pcds one by one
#
# @param self: The object pointer
# @param File: The object of the file to be inserted
#
def InsertOneFile(self, File):
# Insert a record for file
FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)
IdTable = TableIdentifier(self.Cur)
IdTable.Table = "Identifier%s" % FileID
IdTable.Create()
# Insert function of file
for Function in File.FunctionList:
FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \
Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \
Function.BodyStartLine, Function.BodyStartColumn, FileID, \
Function.FunNameStartLine, Function.FunNameStartColumn)
# Insert Identifier of function
for Identifier in Function.IdentifierList:
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
# Insert Identifier of file
for Identifier in File.IdentifierList:
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
EdkLogger.verbose("Insert information from file %s ... DONE!" % File.FullPath)
## UpdateIdentifierBelongsToFunction() method
#
# Update the field "BelongsToFunction" for each Identifier
#
# @param self: The object pointer
#
def UpdateIdentifierBelongsToFunction(self):
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine from Function"""
Records = self.TblFunction.Exec(SqlCommand)
Data1 = []
Data2 = []
for Record in Records:
FunctionID = Record[0]
BelongsToFile = Record[1]
StartLine = Record[2]
EndLine = Record[3]
SqlCommand = """Update Identifier%s set BelongsToFunction = %s where BelongsToFile = %s and StartLine > %s and EndLine < %s""" % \
(BelongsToFile, FunctionID, BelongsToFile, StartLine, EndLine)
self.TblIdentifier.Exec(SqlCommand)
SqlCommand = """Update Identifier%s set BelongsToFunction = %s, Model = %s where BelongsToFile = %s and Model = %s and EndLine = %s""" % \
(BelongsToFile, FunctionID, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, BelongsToFile, DataClass.MODEL_IDENTIFIER_COMMENT, StartLine - 1)
self.TblIdentifier.Exec(SqlCommand)
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
EdkLogger.Initialize()
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
EdkLogger.verbose("Start at " + time.strftime('%H:%M:%S', time.localtime()))
Db = Database(DATABASE_PATH)
Db.InitDatabase()
Db.QueryTable(Db.TblDataModel)
identifier1 = DataClass.IdentifierClass(-1, '', '', "i''1", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 32, 43, 54, 43)
identifier2 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 15, 43, 20, 43)
identifier3 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 55, 43, 58, 43)
identifier4 = DataClass.IdentifierClass(-1, '', '', "i1'", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 77, 43, 88, 43)
fun1 = DataClass.FunctionClass(-1, '', '', 'fun1', '', 21, 2, 60, 45, 1, 23, 0, [], [])
file = DataClass.FileClass(-1, 'F1', 'c', 'C:\\', 'C:\\F1.exe', DataClass.MODEL_FILE_C, '2007-12-28', [fun1], [identifier1, identifier2, identifier3, identifier4], [])
Db.InsertOneFile(file)
Db.QueryTable(Db.TblFile)
Db.QueryTable(Db.TblFunction)
Db.QueryTable(Db.TblIdentifier)
Db.Close()
EdkLogger.verbose("End at " + time.strftime('%H:%M:%S', time.localtime()))
| edk2-master | BaseTools/Source/Python/Eot/Database.py |
## @file
# fragments of source file
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import re
import Common.LongFilePathOs as os
from .ParserWarning import Warning
from Common.LongFilePathSupport import OpenLongFilePath as open
# Profile contents of a file
PPDirectiveList = []
AssignmentExpressionList = []
PredicateExpressionList = []
FunctionDefinitionList = []
VariableDeclarationList = []
EnumerationDefinitionList = []
StructUnionDefinitionList = []
TypedefDefinitionList = []
FunctionCallingList = []
## Class FileProfile
#
# record file data when parsing source
#
# May raise Exception when opening file.
#
class FileProfile :
## The constructor
#
# @param self: The object pointer
# @param FileName: The file that to be parsed
#
def __init__(self, FileName):
self.FileLinesList = []
self.FileLinesListFromFile = []
try:
fsock = open(FileName, "rb", 0)
try:
self.FileLinesListFromFile = fsock.readlines()
finally:
fsock.close()
except IOError:
raise Warning("Error when opening file %s" % FileName)
| edk2-master | BaseTools/Source/Python/Eot/FileProfile.py |
## @file
# Python 'Eot' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/Eot/__init__.py |
## @file
# This file is used to define common parsing related functions used in parsing
# Inf/Dsc/Makefile process
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os, re
import Common.EdkLogger as EdkLogger
from Common.DataType import *
from CommonDataClass.DataClass import *
from Common.StringUtils import CleanString, GetSplitValueList, ReplaceMacro
from . import EotGlobalData
from Common.StringUtils import GetSplitList
from Common.LongFilePathSupport import OpenLongFilePath as open
import subprocess
## DeCompress
#
# Call external decompress tool to decompress the fv section
#
def DeCompress(Method, Input):
# Write the input to a temp file
open('_Temp.bin', 'wb').write(Input)
cmd = ''
if Method == 'Lzma':
cmd = r'LzmaCompress -o _New.bin -d _Temp.bin'
if Method == 'Efi':
cmd = r'TianoCompress -d --uefi -o _New.bin _Temp.bin'
if Method == 'Framework':
cmd = r'TianoCompress -d -o _New.bin _Temp.bin'
# Call tool to create the decompressed output file
Process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
Process.communicate()[0]
# Return the beffer of New.bin
if os.path.exists('_New.bin'):
return open('_New.bin', 'rb').read()
## PreProcess() method
#
# Pre process a file
#
# 1. Remove all comments
# 2. Merge multiple lines code to one line
#
# @param Filename: Name of the file to be parsed
# @param MergeMultipleLines: Switch for if merge multiple lines
# @param LineNo: Default line no
#
# @return Lines: The file contents after removing comments
#
def PreProcess(Filename, MergeMultipleLines = True, LineNo = -1):
Lines = []
Filename = os.path.normpath(Filename)
if not os.path.isfile(Filename):
EdkLogger.error("Eot", EdkLogger.FILE_NOT_FOUND, ExtraData=Filename)
IsFindBlockComment = False
IsFindBlockCode = False
ReservedLine = ''
ReservedLineLength = 0
for Line in open(Filename, 'r'):
Line = Line.strip()
# Remove comment block
if Line.find(TAB_COMMENT_EDK_START) > -1:
ReservedLine = GetSplitList(Line, TAB_COMMENT_EDK_START, 1)[0]
IsFindBlockComment = True
if Line.find(TAB_COMMENT_EDK_END) > -1:
Line = ReservedLine + GetSplitList(Line, TAB_COMMENT_EDK_END, 1)[1]
ReservedLine = ''
IsFindBlockComment = False
if IsFindBlockComment:
Lines.append('')
continue
# Remove comments at tail and remove spaces again
Line = CleanString(Line)
if Line == '':
Lines.append('')
continue
if MergeMultipleLines:
# Add multiple lines to one line
if IsFindBlockCode and Line[-1] != TAB_SLASH:
ReservedLine = (ReservedLine + TAB_SPACE_SPLIT + Line).strip()
Lines.append(ReservedLine)
for Index in (0, ReservedLineLength):
Lines.append('')
ReservedLine = ''
ReservedLineLength = 0
IsFindBlockCode = False
continue
if Line[-1] == TAB_SLASH:
ReservedLine = ReservedLine + TAB_SPACE_SPLIT + Line[0:-1].strip()
ReservedLineLength = ReservedLineLength + 1
IsFindBlockCode = True
continue
Lines.append(Line)
return Lines
## AddToGlobalMacro() method
#
# Add a macro to EotGlobalData.gMACRO
#
# @param Name: Name of the macro
# @param Value: Value of the macro
#
def AddToGlobalMacro(Name, Value):
Value = ReplaceMacro(Value, EotGlobalData.gMACRO, True)
EotGlobalData.gMACRO[Name] = Value
## AddToSelfMacro() method
#
# Parse a line of macro definition and add it to a macro set
#
# @param SelfMacro: The self macro set
# @param Line: The line of a macro definition
#
# @return Name: Name of macro
# @return Value: Value of macro
#
def AddToSelfMacro(SelfMacro, Line):
Name, Value = '', ''
List = GetSplitValueList(Line, TAB_EQUAL_SPLIT, 1)
if len(List) == 2:
Name = List[0]
Value = List[1]
Value = ReplaceMacro(Value, EotGlobalData.gMACRO, True)
Value = ReplaceMacro(Value, SelfMacro, True)
SelfMacro[Name] = Value
return (Name, Value)
## GetIncludeListOfFile() method
#
# Get the include path list for a source file
#
# 1. Find the source file belongs to which INF file
# 2. Find the inf's package
# 3. Return the include path list of the package
#
# @param WorkSpace: WORKSPACE path
# @param Filepath: File path
# @param Db: Eot database
#
# @return IncludeList: A list of include directories
#
def GetIncludeListOfFile(WorkSpace, Filepath, Db):
IncludeList = []
Filepath = os.path.normpath(Filepath)
SqlCommand = """
select Value1 from Inf where Model = %s and BelongsToFile in(
select distinct B.BelongsToFile from File as A left join Inf as B
where A.ID = B.BelongsToFile and B.Model = %s and (A.Path || '%s' || B.Value1) = '%s')""" \
% (MODEL_META_DATA_PACKAGE, MODEL_EFI_SOURCE_FILE, '\\', Filepath)
RecordSet = Db.TblFile.Exec(SqlCommand)
for Record in RecordSet:
DecFullPath = os.path.normpath(os.path.join(WorkSpace, Record[0]))
(DecPath, DecName) = os.path.split(DecFullPath)
SqlCommand = """select Value1 from Dec where BelongsToFile =
(select ID from File where FullPath = '%s') and Model = %s""" \
% (DecFullPath, MODEL_EFI_INCLUDE)
NewRecordSet = Db.TblDec.Exec(SqlCommand)
for NewRecord in NewRecordSet:
IncludePath = os.path.normpath(os.path.join(DecPath, NewRecord[0]))
if IncludePath not in IncludeList:
IncludeList.append(IncludePath)
return IncludeList
## GetTableList() method
#
# Search table file and find all small tables
#
# @param FileModelList: Model code for the file list
# @param Table: Table to insert records
# @param Db: Eot database
#
# @return TableList: A list of tables
#
def GetTableList(FileModelList, Table, Db):
TableList = []
SqlCommand = """select ID, FullPath from File where Model in %s""" % str(FileModelList)
RecordSet = Db.TblFile.Exec(SqlCommand)
for Record in RecordSet:
TableName = Table + str(Record[0])
TableList.append([TableName, Record[1]])
return TableList
## GetAllIncludeDir() method
#
# Find all Include directories
#
# @param Db: Eot database
#
# @return IncludeList: A list of include directories
#
def GetAllIncludeDirs(Db):
IncludeList = []
SqlCommand = """select distinct Value1 from Inf where Model = %s order by Value1""" % MODEL_EFI_INCLUDE
RecordSet = Db.TblInf.Exec(SqlCommand)
for Record in RecordSet:
IncludeList.append(Record[0])
return IncludeList
## GetAllIncludeFiles() method
#
# Find all Include files
#
# @param Db: Eot database
#
# @return IncludeFileList: A list of include files
#
def GetAllIncludeFiles(Db):
IncludeList = GetAllIncludeDirs(Db)
IncludeFileList = []
for Dir in IncludeList:
if os.path.isdir(Dir):
SubDir = os.listdir(Dir)
for Item in SubDir:
if os.path.isfile(Item):
IncludeFileList.append(Item)
return IncludeFileList
## GetAllSourceFiles() method
#
# Find all source files
#
# @param Db: Eot database
#
# @return SourceFileList: A list of source files
#
def GetAllSourceFiles(Db):
SourceFileList = []
SqlCommand = """select distinct Value1 from Inf where Model = %s order by Value1""" % MODEL_EFI_SOURCE_FILE
RecordSet = Db.TblInf.Exec(SqlCommand)
for Record in RecordSet:
SourceFileList.append(Record[0])
return SourceFileList
## GetAllFiles() method
#
# Find all files, both source files and include files
#
# @param Db: Eot database
#
# @return FileList: A list of files
#
def GetAllFiles(Db):
FileList = []
IncludeFileList = GetAllIncludeFiles(Db)
SourceFileList = GetAllSourceFiles(Db)
for Item in IncludeFileList:
if os.path.isfile(Item) and Item not in FileList:
FileList.append(Item)
for Item in SourceFileList:
if os.path.isfile(Item) and Item not in FileList:
FileList.append(Item)
return FileList
## ParseConditionalStatement() method
#
# Parse conditional statement
#
# @param Line: One line to be parsed
# @param Macros: A set of all macro
# @param StatusSet: A set of all status
#
# @retval True: Find keyword of conditional statement
# @retval False: Not find keyword of conditional statement
#
def ParseConditionalStatement(Line, Macros, StatusSet):
NewLine = Line.upper()
if NewLine.find(TAB_IF_EXIST.upper()) > -1:
IfLine = Line[NewLine.find(TAB_IF_EXIST) + len(TAB_IF_EXIST) + 1:].strip()
IfLine = ReplaceMacro(IfLine, EotGlobalData.gMACRO, True)
IfLine = ReplaceMacro(IfLine, Macros, True)
IfLine = IfLine.replace("\"", '')
IfLine = IfLine.replace("(", '')
IfLine = IfLine.replace(")", '')
Status = os.path.exists(os.path.normpath(IfLine))
StatusSet.append([Status])
return True
if NewLine.find(TAB_IF_DEF.upper()) > -1:
IfLine = Line[NewLine.find(TAB_IF_DEF) + len(TAB_IF_DEF) + 1:].strip()
Status = False
if IfLine in Macros or IfLine in EotGlobalData.gMACRO:
Status = True
StatusSet.append([Status])
return True
if NewLine.find(TAB_IF_N_DEF.upper()) > -1:
IfLine = Line[NewLine.find(TAB_IF_N_DEF) + len(TAB_IF_N_DEF) + 1:].strip()
Status = False
if IfLine not in Macros and IfLine not in EotGlobalData.gMACRO:
Status = True
StatusSet.append([Status])
return True
if NewLine.find(TAB_IF.upper()) > -1:
IfLine = Line[NewLine.find(TAB_IF) + len(TAB_IF) + 1:].strip()
Status = ParseConditionalStatementMacros(IfLine, Macros)
StatusSet.append([Status])
return True
if NewLine.find(TAB_ELSE_IF.upper()) > -1:
IfLine = Line[NewLine.find(TAB_ELSE_IF) + len(TAB_ELSE_IF) + 1:].strip()
Status = ParseConditionalStatementMacros(IfLine, Macros)
StatusSet[-1].append(Status)
return True
if NewLine.find(TAB_ELSE.upper()) > -1:
Status = False
for Item in StatusSet[-1]:
Status = Status or Item
StatusSet[-1].append(not Status)
return True
if NewLine.find(TAB_END_IF.upper()) > -1:
StatusSet.pop()
return True
return False
## ParseConditionalStatement() method
#
# Parse conditional statement with Macros
#
# @param Line: One line to be parsed
# @param Macros: A set of macros
#
# @return Line: New line after replacing macros
#
def ParseConditionalStatementMacros(Line, Macros):
if Line.upper().find('DEFINED(') > -1 or Line.upper().find('EXIST') > -1:
return False
Line = ReplaceMacro(Line, EotGlobalData.gMACRO, True)
Line = ReplaceMacro(Line, Macros, True)
Line = Line.replace("&&", "and")
Line = Line.replace("||", "or")
return eval(Line)
## GetConditionalStatementStatus() method
#
# 1. Assume the latest status as True
# 2. Pop the top status of status set, previous status
# 3. Compare the latest one and the previous one and get new status
#
# @param StatusSet: A set of all status
#
# @return Status: The final status
#
def GetConditionalStatementStatus(StatusSet):
Status = True
for Item in StatusSet:
Status = Status and Item[-1]
return Status
## SearchBelongsToFunction() method
#
# Search all functions belong to the file
#
# @param BelongsToFile: File id
# @param StartLine: Start line of search scope
# @param EndLine: End line of search scope
#
# @return: The found function
#
def SearchBelongsToFunction(BelongsToFile, StartLine, EndLine):
SqlCommand = """select ID, Name from Function where BelongsToFile = %s and StartLine <= %s and EndLine >= %s""" %(BelongsToFile, StartLine, EndLine)
RecordSet = EotGlobalData.gDb.TblFunction.Exec(SqlCommand)
if RecordSet != []:
return RecordSet[0][0], RecordSet[0][1]
else:
return -1, ''
## SearchPpiCallFunction() method
#
# Search all used PPI calling function 'PeiServicesReInstallPpi' and 'PeiServicesInstallPpi'
# Store the result to database
#
# @param Identifier: Table id
# @param SourceFileID: Source file id
# @param SourceFileFullPath: Source file full path
# @param ItemMode: Mode of the item
#
def SearchPpiCallFunction(Identifier, SourceFileID, SourceFileFullPath, ItemMode):
ItemName, ItemType, GuidName, GuidMacro, GuidValue = '', 'Ppi', '', '', ''
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' and Model = %s)""" \
% (Identifier, 'PeiServicesReInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
BelongsToFunctionID, BelongsToFunction = -1, ''
Db = EotGlobalData.gDb.TblReport
RecordSet = Db.Exec(SqlCommand)
for Record in RecordSet:
Index = 0
BelongsToFile, StartLine, EndLine = Record[2], Record[3], Record[4]
BelongsToFunctionID, BelongsToFunction = SearchBelongsToFunction(BelongsToFile, StartLine, EndLine)
VariableList = Record[0].split(',')
for Variable in VariableList:
Variable = Variable.strip()
# Get index of the variable
if Variable.find('[') > -1:
Index = int(Variable[Variable.find('[') + 1 : Variable.find(']')])
Variable = Variable[:Variable.find('[')]
# Get variable name
if Variable.startswith('&'):
Variable = Variable[1:]
# Get variable value
SqlCommand = """select Value from %s where (Name like '%%%s%%') and Model = %s""" \
% (Identifier, Variable, MODEL_IDENTIFIER_VARIABLE)
NewRecordSet = Db.Exec(SqlCommand)
if NewRecordSet:
NewRecord = NewRecordSet[0][0]
VariableValueList = NewRecord.split('},')
if len(VariableValueList) > Index:
VariableValue = VariableValueList[Index]
NewVariableValueList = VariableValue.split(',')
if len(NewVariableValueList) > 1:
NewVariableValue = NewVariableValueList[1].strip()
if NewVariableValue.startswith('&'):
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, NewVariableValue[1:], GuidMacro, GuidValue, BelongsToFunction, 0)
continue
else:
EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, NewParameter))
ItemName, ItemType, GuidName, GuidMacro, GuidValue = '', 'Ppi', '', '', ''
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Value like '%%%s%%' and Model = %s)""" \
% (Identifier, 'PeiServicesInstallPpi', MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION)
BelongsToFunctionID, BelongsToFunction = -1, ''
Db = EotGlobalData.gDb.TblReport
RecordSet = Db.Exec(SqlCommand)
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' and Model = %s)""" \
% (Identifier, 'PeiServicesInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
Db = EotGlobalData.gDb.TblReport
RecordSet2 = Db.Exec(SqlCommand)
for Record in RecordSet + RecordSet2:
if Record == []:
continue
Index = 0
BelongsToFile, StartLine, EndLine = Record[2], Record[3], Record[4]
BelongsToFunctionID, BelongsToFunction = SearchBelongsToFunction(BelongsToFile, StartLine, EndLine)
Variable = Record[0].replace('PeiServicesInstallPpi', '').replace('(', '').replace(')', '').replace('&', '').strip()
Variable = Variable[Variable.find(',') + 1:].strip()
# Get index of the variable
if Variable.find('[') > -1:
Index = int(Variable[Variable.find('[') + 1 : Variable.find(']')])
Variable = Variable[:Variable.find('[')]
# Get variable name
if Variable.startswith('&'):
Variable = Variable[1:]
# Get variable value
SqlCommand = """select Value from %s where (Name like '%%%s%%') and Model = %s""" \
% (Identifier, Variable, MODEL_IDENTIFIER_VARIABLE)
NewRecordSet = Db.Exec(SqlCommand)
if NewRecordSet:
NewRecord = NewRecordSet[0][0]
VariableValueList = NewRecord.split('},')
for VariableValue in VariableValueList[Index:]:
NewVariableValueList = VariableValue.split(',')
if len(NewVariableValueList) > 1:
NewVariableValue = NewVariableValueList[1].strip()
if NewVariableValue.startswith('&'):
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, NewVariableValue[1:], GuidMacro, GuidValue, BelongsToFunction, 0)
continue
else:
EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, NewParameter))
## SearchPpis() method
#
# Search all used PPI calling function
# Store the result to database
#
# @param SqlCommand: SQL command statement
# @param Table: Table id
# @param SourceFileID: Source file id
# @param SourceFileFullPath: Source file full path
# @param ItemMode: Mode of the item
# @param PpiMode: Mode of PPI
#
def SearchPpi(SqlCommand, Table, SourceFileID, SourceFileFullPath, ItemMode, PpiMode = 1):
ItemName, ItemType, GuidName, GuidMacro, GuidValue = '', 'Ppi', '', '', ''
BelongsToFunctionID, BelongsToFunction = -1, ''
Db = EotGlobalData.gDb.TblReport
RecordSet = Db.Exec(SqlCommand)
for Record in RecordSet:
Parameter = GetPpiParameter(Record[0], PpiMode)
BelongsToFile, StartLine, EndLine = Record[2], Record[3], Record[4]
# Get BelongsToFunction
BelongsToFunctionID, BelongsToFunction = SearchBelongsToFunction(BelongsToFile, StartLine, EndLine)
# Default is Not Found
IsFound = False
# For Consumed Ppi
if ItemMode == 'Consumed':
if Parameter.startswith('g'):
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, Parameter, GuidMacro, GuidValue, BelongsToFunction, 0)
else:
EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, Parameter))
continue
# Direct Parameter.Guid
SqlCommand = """select Value from %s where (Name like '%%%s.Guid%%' or Name like '%%%s->Guid%%') and Model = %s""" % (Table, Parameter, Parameter, MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION)
NewRecordSet = Db.Exec(SqlCommand)
for NewRecord in NewRecordSet:
GuidName = GetParameterName(NewRecord[0])
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
IsFound = True
# Defined Parameter
if not IsFound:
Key = Parameter
if Key.rfind(' ') > -1:
Key = Key[Key.rfind(' ') : ].strip().replace('&', '')
Value = FindKeyValue(EotGlobalData.gDb.TblFile, Table, Key)
List = GetSplitValueList(Value.replace('\n', ''), TAB_COMMA_SPLIT)
if len(List) > 1:
GuidName = GetParameterName(List[1])
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
IsFound = True
# A list Parameter
if not IsFound:
Start = Parameter.find('[')
End = Parameter.find(']')
if Start > -1 and End > -1 and Start < End:
try:
Index = int(Parameter[Start + 1 : End])
Parameter = Parameter[0 : Start]
SqlCommand = """select Value from %s where Name = '%s' and Model = %s""" % (Table, Parameter, MODEL_IDENTIFIER_VARIABLE)
NewRecordSet = Db.Exec(SqlCommand)
for NewRecord in NewRecordSet:
NewParameter = GetSplitValueList(NewRecord[0], '}')[Index]
GuidName = GetPpiParameter(NewParameter[NewParameter.find('{') : ])
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
IsFound = True
except Exception:
pass
# A External Parameter
if not IsFound:
SqlCommand = """select File.ID from Inf, File
where BelongsToFile = (select BelongsToFile from Inf where Value1 = '%s')
and Inf.Model = %s and Inf.Value1 = File.FullPath and File.Model = %s""" % (SourceFileFullPath, MODEL_EFI_SOURCE_FILE, MODEL_FILE_C)
NewRecordSet = Db.Exec(SqlCommand)
for NewRecord in NewRecordSet:
Table = 'Identifier' + str(NewRecord[0])
SqlCommand = """select Value from %s where Name = '%s' and Modifier = 'EFI_PEI_PPI_DESCRIPTOR' and Model = %s""" % (Table, Parameter, MODEL_IDENTIFIER_VARIABLE)
PpiSet = Db.Exec(SqlCommand)
if PpiSet != []:
GuidName = GetPpiParameter(PpiSet[0][0])
if GuidName != '':
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
IsFound = True
break
if not IsFound:
EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, Parameter))
## SearchProtocols() method
#
# Search all used PROTOCOL calling function
# Store the result to database
#
# @param SqlCommand: SQL command statement
# @param Table: Table id
# @param SourceFileID: Source file id
# @param SourceFileFullPath: Source file full path
# @param ItemMode: Mode of the item
# @param ProtocolMode: Mode of PROTOCOL
#
def SearchProtocols(SqlCommand, Table, SourceFileID, SourceFileFullPath, ItemMode, ProtocolMode):
ItemName, ItemType, GuidName, GuidMacro, GuidValue = '', 'Protocol', '', '', ''
BelongsToFunctionID, BelongsToFunction = -1, ''
Db = EotGlobalData.gDb.TblReport
RecordSet = Db.Exec(SqlCommand)
for Record in RecordSet:
Parameter = ''
BelongsToFile, StartLine, EndLine = Record[2], Record[3], Record[4]
# Get BelongsToFunction
BelongsToFunctionID, BelongsToFunction = SearchBelongsToFunction(BelongsToFile, StartLine, EndLine)
# Default is Not Found
IsFound = False
if ProtocolMode == 0 or ProtocolMode == 1:
Parameter = GetProtocolParameter(Record[0], ProtocolMode)
if Parameter.startswith('g') or Parameter.endswith('Guid') or Parameter == 'ShellEnvProtocol' or Parameter == 'ShellInterfaceProtocol':
GuidName = GetParameterName(Parameter)
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
IsFound = True
if ProtocolMode == 2:
Protocols = GetSplitValueList(Record[0], TAB_COMMA_SPLIT)
for Protocol in Protocols:
if Protocol.startswith('&') and Protocol.endswith('Guid'):
GuidName = GetParameterName(Protocol)
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
IsFound = True
else:
NewValue = FindKeyValue(EotGlobalData.gDb.TblFile, Table, Protocol)
if Protocol != NewValue and NewValue.endswith('Guid'):
GuidName = GetParameterName(NewValue)
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
IsFound = True
if not IsFound:
if BelongsToFunction in EotGlobalData.gProducedProtocolLibrary or BelongsToFunction in EotGlobalData.gConsumedProtocolLibrary:
EotGlobalData.gOP_UN_MATCHED_IN_LIBRARY_CALLING.write('%s, %s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, Parameter, BelongsToFunction))
else:
EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, Parameter))
## SearchFunctionCalling() method
#
# Search all used PPI/PROTOCOL calling function by library
# Store the result to database
#
# @param SqlCommand: SQL command statement
# @param Table: Table id
# @param SourceFileID: Source file id
# @param SourceFileFullPath: Source file full path
# @param ItemType: Type of the item, PPI or PROTOCOL
# @param ItemMode: Mode of item
#
def SearchFunctionCalling(Table, SourceFileID, SourceFileFullPath, ItemType, ItemMode):
LibraryList = {}
Db = EotGlobalData.gDb.TblReport
Parameters, ItemName, GuidName, GuidMacro, GuidValue, BelongsToFunction = [], '', '', '', '', ''
if ItemType == 'Protocol' and ItemMode == 'Produced':
LibraryList = EotGlobalData.gProducedProtocolLibrary
elif ItemType == 'Protocol' and ItemMode == 'Consumed':
LibraryList = EotGlobalData.gConsumedProtocolLibrary
elif ItemType == 'Protocol' and ItemMode == 'Callback':
LibraryList = EotGlobalData.gCallbackProtocolLibrary
elif ItemType == 'Ppi' and ItemMode == 'Produced':
LibraryList = EotGlobalData.gProducedPpiLibrary
elif ItemType == 'Ppi' and ItemMode == 'Consumed':
LibraryList = EotGlobalData.gConsumedPpiLibrary
for Library in LibraryList:
Index = LibraryList[Library]
SqlCommand = """select Value, StartLine from %s
where Name like '%%%s%%' and Model = %s""" \
% (Table, Library, MODEL_IDENTIFIER_FUNCTION_CALLING)
RecordSet = Db.Exec(SqlCommand)
for Record in RecordSet:
IsFound = False
if Index == -1:
ParameterList = GetSplitValueList(Record[0], TAB_COMMA_SPLIT)
for Parameter in ParameterList:
Parameters.append(GetParameterName(Parameter))
else:
Parameters = [GetProtocolParameter(Record[0], Index)]
StartLine = Record[1]
for Parameter in Parameters:
if Parameter.startswith('g') or Parameter.endswith('Guid') or Parameter == 'ShellEnvProtocol' or Parameter == 'ShellInterfaceProtocol':
GuidName = GetParameterName(Parameter)
Db.Insert(-1, '', '', SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, 0)
IsFound = True
if not IsFound:
EotGlobalData.gOP_UN_MATCHED.write('%s, %s, %s, %s, %s, %s\n' % (ItemType, ItemMode, SourceFileID, SourceFileFullPath, StartLine, Parameter))
## FindProtocols() method
#
# Find defined protocols
#
# @param SqlCommand: SQL command statement
# @param Table: Table id
# @param SourceFileID: Source file id
# @param SourceFileFullPath: Source file full path
# @param ItemName: String of protocol definition
# @param ItemType: Type of the item, PPI or PROTOCOL
# @param ItemMode: Mode of item
#
#def FindProtocols(Db, SqlCommand, Table, SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue):
# BelongsToFunction = ''
# RecordSet = Db.Exec(SqlCommand)
# for Record in RecordSet:
# IsFound = True
# Parameter = GetProtocolParameter(Record[0])
## GetProtocolParameter() method
#
# Parse string of protocol and find parameters
#
# @param Parameter: Parameter to be parsed
# @param Index: The index of the parameter
#
# @return: call common GetParameter
#
def GetProtocolParameter(Parameter, Index = 1):
return GetParameter(Parameter, Index)
## GetPpiParameter() method
#
# Parse string of ppi and find parameters
#
# @param Parameter: Parameter to be parsed
# @param Index: The index of the parameter
#
# @return: call common GetParameter
#
def GetPpiParameter(Parameter, Index = 1):
return GetParameter(Parameter, Index)
## GetParameter() method
#
# Get a parameter by index
#
# @param Parameter: Parameter to be parsed
# @param Index: The index of the parameter
#
# @return Parameter: The found parameter
#
def GetParameter(Parameter, Index = 1):
ParameterList = GetSplitValueList(Parameter, TAB_COMMA_SPLIT)
if len(ParameterList) > Index:
Parameter = GetParameterName(ParameterList[Index])
return Parameter
return ''
## GetParameterName() method
#
# Get a parameter name
#
# @param Parameter: Parameter to be parsed
#
# @return: The name of parameter
#
def GetParameterName(Parameter):
if isinstance(Parameter, type('')) and Parameter.startswith('&'):
return Parameter[1:].replace('{', '').replace('}', '').replace('\r', '').replace('\n', '').strip()
else:
return Parameter.strip()
## FindKeyValue() method
#
# Find key value of a variable
#
# @param Db: Database to be searched
# @param Table: Table to be searched
# @param Key: The keyword
#
# @return Value: The value of the keyword
#
def FindKeyValue(Db, Table, Key):
SqlCommand = """select Value from %s where Name = '%s' and (Model = %s or Model = %s)""" % (Table, Key, MODEL_IDENTIFIER_VARIABLE, MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION)
RecordSet = Db.Exec(SqlCommand)
Value = ''
for Record in RecordSet:
if Record[0] != 'NULL':
Value = FindKeyValue(Db, Table, GetParameterName(Record[0]))
if Value != '':
return Value
else:
return Key
## ParseMapFile() method
#
# Parse map files to get a dict of 'ModuleName' : {FunName : FunAddress}
#
# @param Files: A list of map files
#
# @return AllMaps: An object of all map files
#
def ParseMapFile(Files):
AllMaps = {}
CurrentModule = ''
CurrentMaps = {}
for File in Files:
Content = open(File, 'r').readlines()
for Line in Content:
Line = CleanString(Line)
# skip empty line
if Line == '':
continue
if Line.find('(') > -1 and Line.find(')') > -1:
if CurrentModule != '' and CurrentMaps != {}:
AllMaps[CurrentModule] = CurrentMaps
CurrentModule = Line[:Line.find('(')]
CurrentMaps = {}
continue
else:
Name = ''
Address = ''
List = Line.split()
Address = List[0]
if List[1] == 'F' or List[1] == 'FS':
Name = List[2]
else:
Name = List[1]
CurrentMaps[Name] = Address
continue
return AllMaps
## ConvertGuid
#
# Convert a GUID to a GUID with all upper letters
#
# @param guid: The GUID to be converted
#
# @param newGuid: The GUID with all upper letters.
#
def ConvertGuid(guid):
numList = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
newGuid = ''
if guid.startswith('g'):
guid = guid[1:]
for i in guid:
if i.upper() == i and i not in numList:
newGuid = newGuid + ('_' + i)
else:
newGuid = newGuid + i.upper()
if newGuid.startswith('_'):
newGuid = newGuid[1:]
if newGuid.endswith('_'):
newGuid = newGuid[:-1]
return newGuid
## ConvertGuid2() method
#
# Convert a GUID to a GUID with new string instead of old string
#
# @param guid: The GUID to be converted
# @param old: Old string to be replaced
# @param new: New string to replace the old one
#
# @param newGuid: The GUID after replacement
#
def ConvertGuid2(guid, old, new):
newGuid = ConvertGuid(guid)
newGuid = newGuid.replace(old, new)
return newGuid
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
pass
| edk2-master | BaseTools/Source/Python/Eot/Parser.py |
## @file
# This file is used to save global datas
#
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from collections import OrderedDict
from Common.LongFilePathSupport import OpenLongFilePath as open
gEFI_SOURCE = ''
gEDK_SOURCE = ''
gWORKSPACE = ''
gSHELL_INF = 'Application\Shell'
gMAKE_FILE = ''
gDSC_FILE = ''
gFV_FILE = []
gFV = []
gMAP_FILE = []
gMap = {}
gDb = ''
gIdentifierTableList = []
# Global macro
gMACRO = {}
gMACRO['EFI_SOURCE'] = gEFI_SOURCE
gMACRO['EDK_SOURCE'] = gEDK_SOURCE
gMACRO['SHELL_INF'] = gSHELL_INF
gMACRO['CAPSULE_INF'] = ''
# Log file for unmatched variables
gUN_MATCHED_LOG = 'Log_UnMatched.log'
gOP_UN_MATCHED = open(gUN_MATCHED_LOG, 'w+')
# Log file for all INF files
gINF_FILES = 'Log_Inf_File.log'
gOP_INF = open(gINF_FILES, 'w+')
# Log file for not dispatched PEIM/DRIVER
gUN_DISPATCHED_LOG = 'Log_UnDispatched.log'
gOP_UN_DISPATCHED = open(gUN_DISPATCHED_LOG, 'w+')
# Log file for unmatched variables in function calling
gUN_MATCHED_IN_LIBRARY_CALLING_LOG = 'Log_UnMatchedInLibraryCalling.log'
gOP_UN_MATCHED_IN_LIBRARY_CALLING = open(gUN_MATCHED_IN_LIBRARY_CALLING_LOG, 'w+')
# Log file for order of dispatched PEIM/DRIVER
gDISPATCH_ORDER_LOG = 'Log_DispatchOrder.log'
gOP_DISPATCH_ORDER = open(gDISPATCH_ORDER_LOG, 'w+')
# Log file for found source files
gSOURCE_FILES = 'Log_SourceFiles.log'
gOP_SOURCE_FILES = open(gSOURCE_FILES, 'w+')
# Dict for GUID found in DEC files
gGuidDict = dict()
# Dict for PROTOCOL
gProtocolList = {}
# Dict for PPI
gPpiList = {}
# Dict for consumed PPI function calling
gConsumedPpiLibrary = OrderedDict()
gConsumedPpiLibrary['EfiCommonLocateInterface'] = 0
gConsumedPpiLibrary['PeiServicesLocatePpi'] = 0
# Dict for produced PROTOCOL function calling
gProducedProtocolLibrary = OrderedDict()
gProducedProtocolLibrary['RegisterEsalClass'] = 0
gProducedProtocolLibrary['CoreInstallProtocolInterface'] = 1
gProducedProtocolLibrary['CoreInstallMultipleProtocolInterfaces'] = -1
gProducedProtocolLibrary['EfiInstallProtocolInterface'] = 1
gProducedProtocolLibrary['EfiReinstallProtocolInterface'] = 1
gProducedProtocolLibrary['EfiLibNamedEventSignal'] = 0
gProducedProtocolLibrary['LibInstallProtocolInterfaces'] = 1
gProducedProtocolLibrary['LibReinstallProtocolInterfaces'] = 1
# Dict for consumed PROTOCOL function calling
gConsumedProtocolLibrary = OrderedDict()
gConsumedProtocolLibrary['EfiHandleProtocol'] = 0
gConsumedProtocolLibrary['EfiLocateProtocolHandleBuffers'] = 0
gConsumedProtocolLibrary['EfiLocateProtocolInterface'] = 0
gConsumedProtocolLibrary['EfiHandleProtocol'] = 1
# Dict for callback PROTOCOL function calling
gCallbackProtocolLibrary = OrderedDict()
gCallbackProtocolLibrary['EfiRegisterProtocolCallback'] = 2
gArchProtocolGuids = {'665e3ff6-46cc-11d4-9a38-0090273fc14d',
'26baccb1-6f42-11d4-bce7-0080c73c8881',
'26baccb2-6f42-11d4-bce7-0080c73c8881',
'1da97072-bddc-4b30-99f1-72a0b56fff2a',
'27cfac87-46cc-11d4-9a38-0090273fc14d',
'27cfac88-46cc-11d4-9a38-0090273fc14d',
'b7dfb4e1-052f-449f-87be-9818fc91b733',
'a46423e3-4617-49f1-b9ff-d1bfa9115839',
'd2b2b828-0826-48a7-b3df-983c006024f0',
'26baccb3-6f42-11d4-bce7-0080c73c8881',
'1e5668e2-8481-11d4-bcf1-0080c73c8881',
'6441f818-6362-4e44-b570-7dba31dd2453',
'665e3ff5-46cc-11d4-9a38-0090273fc14d'}
| edk2-master | BaseTools/Source/Python/Eot/EotGlobalData.py |
## @file
# preprocess source file
#
# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import sys
import Common.LongFilePathOs as os
import re
from . import CodeFragmentCollector
from . import FileProfile
from CommonDataClass import DataClass
from Common import EdkLogger
from .EotToolError import *
from . import EotGlobalData
# Global Dicts
IncludeFileListDict = {}
IncludePathListDict = {}
ComplexTypeDict = {}
SUDict = {}
## GetFuncDeclPattern() method
#
# Get the pattern of function declaration
#
# @return p: the pattern of function declaration
#
def GetFuncDeclPattern():
p = re.compile(r'(EFIAPI|EFI_BOOT_SERVICE|EFI_RUNTIME_SERVICE)?\s*[_\w]+\s*\(.*\).*', re.DOTALL)
return p
## GetArrayPattern() method
#
# Get the pattern of array
#
# @return p: the pattern of array
#
def GetArrayPattern():
p = re.compile(r'[_\w]*\s*[\[.*\]]+')
return p
## GetTypedefFuncPointerPattern() method
#
# Get the pattern of function pointer
#
# @return p: the pattern of function pointer
#
def GetTypedefFuncPointerPattern():
p = re.compile('[_\w\s]*\([\w\s]*\*+\s*[_\w]+\s*\)\s*\(.*\)', re.DOTALL)
return p
## GetDB() method
#
# Get global database instance
#
# @return EotGlobalData.gDb: the global database instance
#
def GetDB():
return EotGlobalData.gDb
## PrintErrorMsg() method
#
# print error message
#
# @param ErrorType: Type of error
# @param Msg: Error message
# @param TableName: table name of error found
# @param ItemId: id of item
#
def PrintErrorMsg(ErrorType, Msg, TableName, ItemId):
Msg = Msg.replace('\n', '').replace('\r', '')
MsgPartList = Msg.split()
Msg = ''
for Part in MsgPartList:
Msg += Part
Msg += ' '
GetDB().TblReport.Insert(ErrorType, OtherMsg = Msg, BelongsToTable = TableName, BelongsToItem = ItemId)
## GetIdType() method
#
# Find type of input string
#
# @param Str: String to be parsed
#
# @return Type: The type of the string
#
def GetIdType(Str):
Type = DataClass.MODEL_UNKNOWN
Str = Str.replace('#', '# ')
List = Str.split()
if List[1] == 'include':
Type = DataClass.MODEL_IDENTIFIER_INCLUDE
elif List[1] == 'define':
Type = DataClass.MODEL_IDENTIFIER_MACRO_DEFINE
elif List[1] == 'ifdef':
Type = DataClass.MODEL_IDENTIFIER_MACRO_IFDEF
elif List[1] == 'ifndef':
Type = DataClass.MODEL_IDENTIFIER_MACRO_IFNDEF
elif List[1] == 'endif':
Type = DataClass.MODEL_IDENTIFIER_MACRO_ENDIF
elif List[1] == 'pragma':
Type = DataClass.MODEL_IDENTIFIER_MACRO_PROGMA
else:
Type = DataClass.MODEL_UNKNOWN
return Type
## GetIdentifierList() method
#
# Get id of all files
#
# @return IdList: The list of all id of files
#
def GetIdentifierList():
IdList = []
for pp in FileProfile.PPDirectiveList:
Type = GetIdType(pp.Content)
IdPP = DataClass.IdentifierClass(-1, '', '', '', pp.Content, Type, -1, -1, pp.StartPos[0], pp.StartPos[1], pp.EndPos[0], pp.EndPos[1])
IdList.append(IdPP)
for ae in FileProfile.AssignmentExpressionList:
IdAE = DataClass.IdentifierClass(-1, ae.Operator, '', ae.Name, ae.Value, DataClass.MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION, -1, -1, ae.StartPos[0], ae.StartPos[1], ae.EndPos[0], ae.EndPos[1])
IdList.append(IdAE)
FuncDeclPattern = GetFuncDeclPattern()
ArrayPattern = GetArrayPattern()
for var in FileProfile.VariableDeclarationList:
DeclText = var.Declarator.strip()
while DeclText.startswith('*'):
var.Modifier += '*'
DeclText = DeclText.lstrip('*').strip()
var.Declarator = DeclText
if FuncDeclPattern.match(var.Declarator):
DeclSplitList = var.Declarator.split('(')
FuncName = DeclSplitList[0]
FuncNamePartList = FuncName.split()
if len(FuncNamePartList) > 1:
FuncName = FuncNamePartList[-1]
Index = 0
while Index < len(FuncNamePartList) - 1:
var.Modifier += ' ' + FuncNamePartList[Index]
var.Declarator = var.Declarator.lstrip().lstrip(FuncNamePartList[Index])
Index += 1
IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', var.Declarator, '', DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, -1, -1, var.StartPos[0], var.StartPos[1], var.EndPos[0], var.EndPos[1])
IdList.append(IdVar)
continue
if var.Declarator.find('{') == -1:
for decl in var.Declarator.split(','):
DeclList = decl.split('=')
Name = DeclList[0].strip()
if ArrayPattern.match(Name):
LSBPos = var.Declarator.find('[')
var.Modifier += ' ' + Name[LSBPos:]
Name = Name[0:LSBPos]
IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0], var.StartPos[1], var.EndPos[0], var.EndPos[1])
IdList.append(IdVar)
else:
DeclList = var.Declarator.split('=')
Name = DeclList[0].strip()
if ArrayPattern.match(Name):
LSBPos = var.Declarator.find('[')
var.Modifier += ' ' + Name[LSBPos:]
Name = Name[0:LSBPos]
IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0], var.StartPos[1], var.EndPos[0], var.EndPos[1])
IdList.append(IdVar)
for enum in FileProfile.EnumerationDefinitionList:
LBPos = enum.Content.find('{')
RBPos = enum.Content.find('}')
Name = enum.Content[4:LBPos].strip()
Value = enum.Content[LBPos+1:RBPos]
IdEnum = DataClass.IdentifierClass(-1, '', '', Name, Value, DataClass.MODEL_IDENTIFIER_ENUMERATE, -1, -1, enum.StartPos[0], enum.StartPos[1], enum.EndPos[0], enum.EndPos[1])
IdList.append(IdEnum)
for su in FileProfile.StructUnionDefinitionList:
Type = DataClass.MODEL_IDENTIFIER_STRUCTURE
SkipLen = 6
if su.Content.startswith('union'):
Type = DataClass.MODEL_IDENTIFIER_UNION
SkipLen = 5
LBPos = su.Content.find('{')
RBPos = su.Content.find('}')
if LBPos == -1 or RBPos == -1:
Name = su.Content[SkipLen:].strip()
Value = ''
else:
Name = su.Content[SkipLen:LBPos].strip()
Value = su.Content[LBPos+1:RBPos]
IdPE = DataClass.IdentifierClass(-1, '', '', Name, Value, Type, -1, -1, su.StartPos[0], su.StartPos[1], su.EndPos[0], su.EndPos[1])
IdList.append(IdPE)
TdFuncPointerPattern = GetTypedefFuncPointerPattern()
for td in FileProfile.TypedefDefinitionList:
Modifier = ''
Name = td.ToType
Value = td.FromType
if TdFuncPointerPattern.match(td.ToType):
Modifier = td.FromType
LBPos = td.ToType.find('(')
TmpStr = td.ToType[LBPos+1:].strip()
StarPos = TmpStr.find('*')
if StarPos != -1:
Modifier += ' ' + TmpStr[0:StarPos]
while TmpStr[StarPos] == '*':
Modifier += ' ' + '*'
StarPos += 1
TmpStr = TmpStr[StarPos:].strip()
RBPos = TmpStr.find(')')
Name = TmpStr[0:RBPos]
Value = 'FP' + TmpStr[RBPos + 1:]
IdTd = DataClass.IdentifierClass(-1, Modifier, '', Name, Value, DataClass.MODEL_IDENTIFIER_TYPEDEF, -1, -1, td.StartPos[0], td.StartPos[1], td.EndPos[0], td.EndPos[1])
IdList.append(IdTd)
for funcCall in FileProfile.FunctionCallingList:
IdFC = DataClass.IdentifierClass(-1, '', '', funcCall.FuncName, funcCall.ParamList, DataClass.MODEL_IDENTIFIER_FUNCTION_CALLING, -1, -1, funcCall.StartPos[0], funcCall.StartPos[1], funcCall.EndPos[0], funcCall.EndPos[1])
IdList.append(IdFC)
return IdList
## GetParamList() method
#
# Get a list of parameters
#
# @param FuncDeclarator: Function declarator
# @param FuncNameLine: Line number of function name
# @param FuncNameOffset: Offset of function name
#
# @return ParamIdList: A list of parameters
#
def GetParamList(FuncDeclarator, FuncNameLine = 0, FuncNameOffset = 0):
ParamIdList = []
DeclSplitList = FuncDeclarator.split('(')
if len(DeclSplitList) < 2:
return ParamIdList
FuncName = DeclSplitList[0]
ParamStr = DeclSplitList[1].rstrip(')')
LineSkipped = 0
OffsetSkipped = 0
Start = 0
while FuncName.find('\n', Start) != -1:
LineSkipped += 1
OffsetSkipped = 0
Start += FuncName.find('\n', Start)
Start += 1
OffsetSkipped += len(FuncName[Start:])
OffsetSkipped += 1 #skip '('
ParamBeginLine = FuncNameLine + LineSkipped
ParamBeginOffset = OffsetSkipped
for p in ParamStr.split(','):
ListP = p.split()
if len(ListP) == 0:
continue
ParamName = ListP[-1]
DeclText = ParamName.strip()
RightSpacePos = p.rfind(ParamName)
ParamModifier = p[0:RightSpacePos]
if ParamName == 'OPTIONAL':
if ParamModifier == '':
ParamModifier += ' ' + 'OPTIONAL'
DeclText = ''
else:
ParamName = ListP[-2]
DeclText = ParamName.strip()
RightSpacePos = p.rfind(ParamName)
ParamModifier = p[0:RightSpacePos]
ParamModifier += 'OPTIONAL'
while DeclText.startswith('*'):
ParamModifier += ' ' + '*'
DeclText = DeclText.lstrip('*').strip()
ParamName = DeclText
Start = 0
while p.find('\n', Start) != -1:
LineSkipped += 1
OffsetSkipped = 0
Start += p.find('\n', Start)
Start += 1
OffsetSkipped += len(p[Start:])
ParamEndLine = ParamBeginLine + LineSkipped
ParamEndOffset = OffsetSkipped
IdParam = DataClass.IdentifierClass(-1, ParamModifier, '', ParamName, '', DataClass.MODEL_IDENTIFIER_PARAMETER, -1, -1, ParamBeginLine, ParamBeginOffset, ParamEndLine, ParamEndOffset)
ParamIdList.append(IdParam)
ParamBeginLine = ParamEndLine
ParamBeginOffset = OffsetSkipped + 1 #skip ','
return ParamIdList
## GetFunctionList()
#
# Get a list of functions
#
# @return FuncObjList: A list of function objects
#
def GetFunctionList():
FuncObjList = []
for FuncDef in FileProfile.FunctionDefinitionList:
ParamIdList = []
DeclText = FuncDef.Declarator.strip()
while DeclText.startswith('*'):
FuncDef.Modifier += '*'
DeclText = DeclText.lstrip('*').strip()
FuncDef.Declarator = FuncDef.Declarator.lstrip('*')
DeclSplitList = FuncDef.Declarator.split('(')
if len(DeclSplitList) < 2:
continue
FuncName = DeclSplitList[0]
FuncNamePartList = FuncName.split()
if len(FuncNamePartList) > 1:
FuncName = FuncNamePartList[-1]
Index = 0
while Index < len(FuncNamePartList) - 1:
FuncDef.Modifier += ' ' + FuncNamePartList[Index]
Index += 1
FuncObj = DataClass.FunctionClass(-1, FuncDef.Declarator, FuncDef.Modifier, FuncName.strip(), '', FuncDef.StartPos[0], FuncDef.StartPos[1], FuncDef.EndPos[0], FuncDef.EndPos[1], FuncDef.LeftBracePos[0], FuncDef.LeftBracePos[1], -1, ParamIdList, [])
FuncObjList.append(FuncObj)
return FuncObjList
## CreateCCodeDB() method
#
# Create database for all c code
#
# @param FileNameList: A list of all c code file names
#
def CreateCCodeDB(FileNameList):
FileObjList = []
ParseErrorFileList = []
ParsedFiles = {}
for FullName in FileNameList:
if os.path.splitext(FullName)[1] in ('.h', '.c'):
if FullName.lower() in ParsedFiles:
continue
ParsedFiles[FullName.lower()] = 1
EdkLogger.info("Parsing " + FullName)
model = FullName.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H
collector = CodeFragmentCollector.CodeFragmentCollector(FullName)
try:
collector.ParseFile()
except:
ParseErrorFileList.append(FullName)
BaseName = os.path.basename(FullName)
DirName = os.path.dirname(FullName)
Ext = os.path.splitext(BaseName)[1].lstrip('.')
ModifiedTime = os.path.getmtime(FullName)
FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
FileObjList.append(FileObj)
collector.CleanFileProfileBuffer()
if len(ParseErrorFileList) > 0:
EdkLogger.info("Found unrecoverable error during parsing:\n\t%s\n" % "\n\t".join(ParseErrorFileList))
Db = EotGlobalData.gDb
for file in FileObjList:
Db.InsertOneFile(file)
Db.UpdateIdentifierBelongsToFunction()
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
EdkLogger.Initialize()
EdkLogger.SetLevel(EdkLogger.QUIET)
CollectSourceCodeDataIntoDB(sys.argv[1])
print('Done!')
| edk2-master | BaseTools/Source/Python/Eot/c.py |
## @file
# Warning information of Eot
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
class Warning (Exception):
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param File The FDF name
# @param Line The Line number that error occurs
#
def __init__(self, Str, File = None, Line = None):
self.message = Str
self.FileName = File
self.LineNumber = Line
self.ToolName = 'EOT'
| edk2-master | BaseTools/Source/Python/Eot/ParserWarning.py |
## @file
# preprocess source file
#
# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import re
import Common.LongFilePathOs as os
import sys
if sys.version_info.major == 3:
import antlr4 as antlr
from Eot.CParser4.CLexer import CLexer
from Eot.CParser4.CParser import CParser
else:
import antlr3 as antlr
antlr.InputStream = antlr.StringStream
from Eot.CParser3.CLexer import CLexer
from Eot.CParser3.CParser import CParser
from Eot import FileProfile
from Eot.CodeFragment import PP_Directive
from Eot.ParserWarning import Warning
##define T_CHAR_SPACE ' '
##define T_CHAR_NULL '\0'
##define T_CHAR_CR '\r'
##define T_CHAR_TAB '\t'
##define T_CHAR_LF '\n'
##define T_CHAR_SLASH '/'
##define T_CHAR_BACKSLASH '\\'
##define T_CHAR_DOUBLE_QUOTE '\"'
##define T_CHAR_SINGLE_QUOTE '\''
##define T_CHAR_STAR '*'
##define T_CHAR_HASH '#'
(T_CHAR_SPACE, T_CHAR_NULL, T_CHAR_CR, T_CHAR_TAB, T_CHAR_LF, T_CHAR_SLASH, \
T_CHAR_BACKSLASH, T_CHAR_DOUBLE_QUOTE, T_CHAR_SINGLE_QUOTE, T_CHAR_STAR, T_CHAR_HASH) = \
(' ', '\0', '\r', '\t', '\n', '/', '\\', '\"', '\'', '*', '#')
SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
(T_COMMENT_TWO_SLASH, T_COMMENT_SLASH_STAR) = (0, 1)
(T_PP_INCLUDE, T_PP_DEFINE, T_PP_OTHERS) = (0, 1, 2)
## The collector for source code fragments.
#
# PreprocessFile method should be called prior to ParseFile
#
# GetNext*** procedures mean these procedures will get next token first, then make judgement.
# Get*** procedures mean these procedures will make judgement on current token only.
#
class CodeFragmentCollector:
## The constructor
#
# @param self The object pointer
# @param FileName The file that to be parsed
#
def __init__(self, FileName):
self.Profile = FileProfile.FileProfile(FileName)
self.Profile.FileLinesList.append(T_CHAR_LF)
self.FileName = FileName
self.CurrentLineNumber = 1
self.CurrentOffsetWithinLine = 0
self.__Token = ""
self.__SkippedChars = ""
## __EndOfFile() method
#
# Judge current buffer pos is at file end
#
# @param self The object pointer
# @retval True Current File buffer position is at file end
# @retval False Current File buffer position is NOT at file end
#
def __EndOfFile(self):
NumberOfLines = len(self.Profile.FileLinesList)
SizeOfLastLine = len(self.Profile.FileLinesList[-1])
if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
return True
elif self.CurrentLineNumber > NumberOfLines:
return True
else:
return False
## __EndOfLine() method
#
# Judge current buffer pos is at line end
#
# @param self The object pointer
# @retval True Current File buffer position is at line end
# @retval False Current File buffer position is NOT at line end
#
def __EndOfLine(self):
SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
if self.CurrentOffsetWithinLine >= SizeOfCurrentLine - 1:
return True
else:
return False
## Rewind() method
#
# Reset file data buffer to the initial state
#
# @param self The object pointer
#
def Rewind(self):
self.CurrentLineNumber = 1
self.CurrentOffsetWithinLine = 0
## __UndoOneChar() method
#
# Go back one char in the file buffer
#
# @param self The object pointer
# @retval True Successfully go back one char
# @retval False Not able to go back one char as file beginning reached
#
def __UndoOneChar(self):
if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
return False
elif self.CurrentOffsetWithinLine == 0:
self.CurrentLineNumber -= 1
self.CurrentOffsetWithinLine = len(self.__CurrentLine()) - 1
else:
self.CurrentOffsetWithinLine -= 1
return True
## __GetOneChar() method
#
# Move forward one char in the file buffer
#
# @param self The object pointer
#
def __GetOneChar(self):
if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
self.CurrentLineNumber += 1
self.CurrentOffsetWithinLine = 0
else:
self.CurrentOffsetWithinLine += 1
## __CurrentChar() method
#
# Get the char pointed to by the file buffer pointer
#
# @param self The object pointer
# @retval Char Current char
#
def __CurrentChar(self):
CurrentChar = self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
return CurrentChar
## __NextChar() method
#
# Get the one char pass the char pointed to by the file buffer pointer
#
# @param self The object pointer
# @retval Char Next char
#
def __NextChar(self):
if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
return self.Profile.FileLinesList[self.CurrentLineNumber][0]
else:
return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
## __SetCurrentCharValue() method
#
# Modify the value of current char
#
# @param self The object pointer
# @param Value The new value of current char
#
def __SetCurrentCharValue(self, Value):
self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
## __SetCharValue() method
#
# Modify the value of current char
#
# @param self The object pointer
# @param Value The new value of current char
#
def __SetCharValue(self, Line, Offset, Value):
self.Profile.FileLinesList[Line - 1][Offset] = Value
## __CurrentLine() method
#
# Get the list that contains current line contents
#
# @param self The object pointer
# @retval List current line contents
#
def __CurrentLine(self):
return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
## __InsertComma() method
#
# Insert ',' to replace PP
#
# @param self The object pointer
# @retval List current line contents
#
def __InsertComma(self, Line):
if self.Profile.FileLinesList[Line - 1][0] != T_CHAR_HASH:
BeforeHashPart = str(self.Profile.FileLinesList[Line - 1]).split(T_CHAR_HASH)[0]
if BeforeHashPart.rstrip().endswith(T_CHAR_COMMA) or BeforeHashPart.rstrip().endswith(';'):
return
if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(','):
return
if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(';'):
return
if str(self.Profile.FileLinesList[Line]).lstrip().startswith(',') or str(self.Profile.FileLinesList[Line]).lstrip().startswith(';'):
return
self.Profile.FileLinesList[Line - 1].insert(self.CurrentOffsetWithinLine, ',')
## PreprocessFileWithClear() method
#
# Run a preprocess for the file to clean all comments
#
# @param self The object pointer
#
def PreprocessFileWithClear(self):
self.Rewind()
InComment = False
DoubleSlashComment = False
HashComment = False
PPExtend = False
PPDirectiveObj = None
# HashComment in quoted string " " is ignored.
InString = False
InCharLiteral = False
self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
while not self.__EndOfFile():
if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
InString = not InString
if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
InCharLiteral = not InCharLiteral
# meet new line, then no longer in a comment for // and '#'
if self.__CurrentChar() == T_CHAR_LF:
if HashComment and PPDirectiveObj is not None:
if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
PPDirectiveObj.Content += T_CHAR_LF
PPExtend = True
else:
PPExtend = False
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
if InComment and DoubleSlashComment:
InComment = False
DoubleSlashComment = False
if InComment and HashComment and not PPExtend:
InComment = False
HashComment = False
PPDirectiveObj.Content += T_CHAR_LF
PPDirectiveObj.EndPos = EndLinePos
FileProfile.PPDirectiveList.append(PPDirectiveObj)
PPDirectiveObj = None
if InString or InCharLiteral:
CurrentLine = "".join(self.__CurrentLine())
if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
self.CurrentLineNumber += 1
self.CurrentOffsetWithinLine = 0
# check for */ comment end
elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
self.__SetCurrentCharValue(T_CHAR_SPACE)
self.__GetOneChar()
self.__SetCurrentCharValue(T_CHAR_SPACE)
self.__GetOneChar()
InComment = False
# set comments to spaces
elif InComment:
if HashComment:
# // follows hash PP directive
if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
InComment = False
HashComment = False
PPDirectiveObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine - 1)
FileProfile.PPDirectiveList.append(PPDirectiveObj)
PPDirectiveObj = None
continue
else:
PPDirectiveObj.Content += self.__CurrentChar()
self.__SetCurrentCharValue(T_CHAR_SPACE)
self.__GetOneChar()
# check for // comment
elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
InComment = True
DoubleSlashComment = True
# check for '#' comment
elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
InComment = True
HashComment = True
PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
# check for /* comment start
elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
self.__SetCurrentCharValue( T_CHAR_SPACE)
self.__GetOneChar()
self.__SetCurrentCharValue( T_CHAR_SPACE)
self.__GetOneChar()
InComment = True
else:
self.__GetOneChar()
EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
if InComment and HashComment and not PPExtend:
PPDirectiveObj.EndPos = EndLinePos
FileProfile.PPDirectiveList.append(PPDirectiveObj)
self.Rewind()
## ParseFile() method
#
# Parse the file profile buffer to extract fd, fv ... information
# Exception will be raised if syntax error found
#
# @param self The object pointer
#
def ParseFile(self):
self.PreprocessFileWithClear()
# restore from ListOfList to ListOfString
self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
FileStringContents = ''
for fileLine in self.Profile.FileLinesList:
FileStringContents += fileLine
cStream = antlr.InputStream(FileStringContents)
lexer = CLexer(cStream)
tStream = antlr.CommonTokenStream(lexer)
parser = CParser(tStream)
parser.translation_unit()
## CleanFileProfileBuffer() method
#
# Reset all contents of the profile of a file
#
def CleanFileProfileBuffer(self):
FileProfile.PPDirectiveList = []
FileProfile.AssignmentExpressionList = []
FileProfile.FunctionDefinitionList = []
FileProfile.VariableDeclarationList = []
FileProfile.EnumerationDefinitionList = []
FileProfile.StructUnionDefinitionList = []
FileProfile.TypedefDefinitionList = []
FileProfile.FunctionCallingList = []
## PrintFragments() method
#
# Print the contents of the profile of a file
#
def PrintFragments(self):
print('################# ' + self.FileName + '#####################')
print('/****************************************/')
print('/************** ASSIGNMENTS *************/')
print('/****************************************/')
for assign in FileProfile.AssignmentExpressionList:
print(str(assign.StartPos) + assign.Name + assign.Operator + assign.Value)
print('/****************************************/')
print('/********* PREPROCESS DIRECTIVES ********/')
print('/****************************************/')
for pp in FileProfile.PPDirectiveList:
print(str(pp.StartPos) + pp.Content)
print('/****************************************/')
print('/********* VARIABLE DECLARATIONS ********/')
print('/****************************************/')
for var in FileProfile.VariableDeclarationList:
print(str(var.StartPos) + var.Modifier + ' '+ var.Declarator)
print('/****************************************/')
print('/********* FUNCTION DEFINITIONS *********/')
print('/****************************************/')
for func in FileProfile.FunctionDefinitionList:
print(str(func.StartPos) + func.Modifier + ' '+ func.Declarator + ' ' + str(func.NamePos))
print('/****************************************/')
print('/************ ENUMERATIONS **************/')
print('/****************************************/')
for enum in FileProfile.EnumerationDefinitionList:
print(str(enum.StartPos) + enum.Content)
print('/****************************************/')
print('/*********** STRUCTS/UNIONS *************/')
print('/****************************************/')
for su in FileProfile.StructUnionDefinitionList:
print(str(su.StartPos) + su.Content)
print('/****************************************/')
print('/************** TYPEDEFS ****************/')
print('/****************************************/')
for typedef in FileProfile.TypedefDefinitionList:
print(str(typedef.StartPos) + typedef.ToType)
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == "__main__":
print("For Test.")
| edk2-master | BaseTools/Source/Python/Eot/CodeFragmentCollector.py |
## @file
# This file is used to be the main entrance of EOT tool
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os, time, glob
import Common.EdkLogger as EdkLogger
import Eot.EotGlobalData as EotGlobalData
from optparse import OptionParser
from Common.StringUtils import NormPath
from Common import BuildToolError
from Common.Misc import GuidStructureStringToGuidString
from collections import OrderedDict as sdict
from Eot.Parser import *
from Eot.InfParserLite import EdkInfParser
from Common.StringUtils import GetSplitValueList
from Eot import c
from Eot import Database
from array import array
from Eot.Report import Report
from Common.BuildVersion import gBUILD_VERSION
from Eot.Parser import ConvertGuid
from Common.LongFilePathSupport import OpenLongFilePath as open
import struct
import uuid
import copy
import codecs
from GenFds.AprioriSection import DXE_APRIORI_GUID, PEI_APRIORI_GUID
gGuidStringFormat = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X"
gIndention = -4
class Image(array):
_HEADER_ = struct.Struct("")
_HEADER_SIZE_ = _HEADER_.size
def __new__(cls, *args, **kwargs):
return array.__new__(cls, 'B')
def __init__(self, ID=None):
if ID is None:
self._ID_ = str(uuid.uuid1()).upper()
else:
self._ID_ = ID
self._BUF_ = None
self._LEN_ = None
self._OFF_ = None
self._SubImages = sdict() # {offset: Image()}
array.__init__(self)
def __repr__(self):
return self._ID_
def __len__(self):
Len = array.__len__(self)
for Offset in self._SubImages.keys():
Len += len(self._SubImages[Offset])
return Len
def _Unpack(self):
self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_])
return len(self)
def _Pack(self, PadByte=0xFF):
raise NotImplementedError
def frombuffer(self, Buffer, Offset=0, Size=None):
self._BUF_ = Buffer
self._OFF_ = Offset
# we may need the Size information in advance if it's given
self._LEN_ = Size
self._LEN_ = self._Unpack()
def empty(self):
del self[0:]
def GetField(self, FieldStruct, Offset=0):
return FieldStruct.unpack_from(self, Offset)
def SetField(self, FieldStruct, Offset, *args):
# check if there's enough space
Size = FieldStruct.size
if Size > len(self):
self.extend([0] * (Size - len(self)))
FieldStruct.pack_into(self, Offset, *args)
def _SetData(self, Data):
if len(self) < self._HEADER_SIZE_:
self.extend([0] * (self._HEADER_SIZE_ - len(self)))
else:
del self[self._HEADER_SIZE_:]
self.extend(Data)
def _GetData(self):
if len(self) > self._HEADER_SIZE_:
return self[self._HEADER_SIZE_:]
return None
Data = property(_GetData, _SetData)
## CompressedImage() class
#
# A class for Compressed Image
#
class CompressedImage(Image):
# UncompressedLength = 4-byte
# CompressionType = 1-byte
_HEADER_ = struct.Struct("1I 1B")
_HEADER_SIZE_ = _HEADER_.size
_ORIG_SIZE_ = struct.Struct("1I")
_CMPRS_TYPE_ = struct.Struct("4x 1B")
def __init__(self, CompressedData=None, CompressionType=None, UncompressedLength=None):
Image.__init__(self)
if UncompressedLength is not None:
self.UncompressedLength = UncompressedLength
if CompressionType is not None:
self.CompressionType = CompressionType
if CompressedData is not None:
self.Data = CompressedData
def __str__(self):
global gIndention
S = "algorithm=%s uncompressed=%x" % (self.CompressionType, self.UncompressedLength)
for Sec in self.Sections:
S += '\n' + str(Sec)
return S
def _SetOriginalSize(self, Size):
self.SetField(self._ORIG_SIZE_, 0, Size)
def _GetOriginalSize(self):
return self.GetField(self._ORIG_SIZE_)[0]
def _SetCompressionType(self, Type):
self.SetField(self._CMPRS_TYPE_, 0, Type)
def _GetCompressionType(self):
return self.GetField(self._CMPRS_TYPE_)[0]
def _GetSections(self):
try:
TmpData = DeCompress('Efi', self[self._HEADER_SIZE_:])
DecData = array('B')
DecData.fromstring(TmpData)
except:
TmpData = DeCompress('Framework', self[self._HEADER_SIZE_:])
DecData = array('B')
DecData.fromstring(TmpData)
SectionList = []
Offset = 0
while Offset < len(DecData):
Sec = Section()
try:
Sec.frombuffer(DecData, Offset)
Offset += Sec.Size
# the section is aligned to 4-byte boundary
except:
break
SectionList.append(Sec)
return SectionList
UncompressedLength = property(_GetOriginalSize, _SetOriginalSize)
CompressionType = property(_GetCompressionType, _SetCompressionType)
Sections = property(_GetSections)
## Ui() class
#
# A class for Ui
#
class Ui(Image):
_HEADER_ = struct.Struct("")
_HEADER_SIZE_ = 0
def __init__(self):
Image.__init__(self)
def __str__(self):
return self.String
def _Unpack(self):
# keep header in this Image object
self.empty()
self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_])
return len(self)
def _GetUiString(self):
return codecs.utf_16_decode(self[0:-2].tostring())[0]
String = property(_GetUiString)
## Depex() class
#
# A class for Depex
#
class Depex(Image):
_HEADER_ = struct.Struct("")
_HEADER_SIZE_ = 0
_GUID_ = struct.Struct("1I2H8B")
_OPCODE_ = struct.Struct("1B")
_OPCODE_STRING_ = {
0x00 : "BEFORE",
0x01 : "AFTER",
0x02 : "PUSH",
0x03 : "AND",
0x04 : "OR",
0x05 : "NOT",
0x06 : "TRUE",
0x07 : "FALSE",
0x08 : "END",
0x09 : "SOR"
}
_NEXT_ = {
-1 : _OPCODE_, # first one in depex must be an opcdoe
0x00 : _GUID_, #"BEFORE",
0x01 : _GUID_, #"AFTER",
0x02 : _GUID_, #"PUSH",
0x03 : _OPCODE_, #"AND",
0x04 : _OPCODE_, #"OR",
0x05 : _OPCODE_, #"NOT",
0x06 : _OPCODE_, #"TRUE",
0x07 : _OPCODE_, #"FALSE",
0x08 : None, #"END",
0x09 : _OPCODE_, #"SOR"
}
def __init__(self):
Image.__init__(self)
self._ExprList = []
def __str__(self):
global gIndention
gIndention += 4
Indention = ' ' * gIndention
S = '\n'
for T in self.Expression:
if T in self._OPCODE_STRING_:
S += Indention + self._OPCODE_STRING_[T]
if T not in [0x00, 0x01, 0x02]:
S += '\n'
else:
S += ' ' + gGuidStringFormat % T + '\n'
gIndention -= 4
return S
def _Unpack(self):
# keep header in this Image object
self.empty()
self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_])
return len(self)
def _GetExpression(self):
if self._ExprList == []:
Offset = 0
CurrentData = self._OPCODE_
while Offset < len(self):
Token = CurrentData.unpack_from(self, Offset)
Offset += CurrentData.size
if len(Token) == 1:
Token = Token[0]
if Token in self._NEXT_:
CurrentData = self._NEXT_[Token]
else:
CurrentData = self._GUID_
else:
CurrentData = self._OPCODE_
self._ExprList.append(Token)
if CurrentData is None:
break
return self._ExprList
Expression = property(_GetExpression)
# # FirmwareVolume() class
#
# A class for Firmware Volume
#
class FirmwareVolume(Image):
# Read FvLength, Attributes, HeaderLength, Checksum
_HEADER_ = struct.Struct("16x 1I2H8B 1Q 4x 1I 1H 1H")
_HEADER_SIZE_ = _HEADER_.size
_FfsGuid = "8C8CE578-8A3D-4F1C-9935-896185C32DD3"
_GUID_ = struct.Struct("16x 1I2H8B")
_LENGTH_ = struct.Struct("16x 16x 1Q")
_SIG_ = struct.Struct("16x 16x 8x 1I")
_ATTR_ = struct.Struct("16x 16x 8x 4x 1I")
_HLEN_ = struct.Struct("16x 16x 8x 4x 4x 1H")
_CHECKSUM_ = struct.Struct("16x 16x 8x 4x 4x 2x 1H")
def __init__(self, Name=''):
Image.__init__(self)
self.Name = Name
self.FfsDict = sdict()
self.OrderedFfsDict = sdict()
self.UnDispatchedFfsDict = sdict()
self.ProtocolList = sdict()
def CheckArchProtocol(self):
for Item in EotGlobalData.gArchProtocolGuids:
if Item.lower() not in EotGlobalData.gProtocolList:
return False
return True
def ParseDepex(self, Depex, Type):
List = None
if Type == 'Ppi':
List = EotGlobalData.gPpiList
if Type == 'Protocol':
List = EotGlobalData.gProtocolList
DepexStack = []
DepexList = []
DepexString = ''
FileDepex = None
CouldBeLoaded = True
for Index in range(0, len(Depex.Expression)):
Item = Depex.Expression[Index]
if Item == 0x00:
Index = Index + 1
Guid = gGuidStringFormat % Depex.Expression[Index]
if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08:
return (True, 'BEFORE %s' % Guid, [Guid, 'BEFORE'])
elif Item == 0x01:
Index = Index + 1
Guid = gGuidStringFormat % Depex.Expression[Index]
if Guid in self.OrderedFfsDict and Depex.Expression[Index + 1] == 0x08:
return (True, 'AFTER %s' % Guid, [Guid, 'AFTER'])
elif Item == 0x02:
Index = Index + 1
Guid = gGuidStringFormat % Depex.Expression[Index]
if Guid.lower() in List:
DepexStack.append(True)
DepexList.append(Guid)
else:
DepexStack.append(False)
DepexList.append(Guid)
continue
elif Item == 0x03 or Item == 0x04:
DepexStack.append(eval(str(DepexStack.pop()) + ' ' + Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop())))
DepexList.append(str(DepexList.pop()) + ' ' + Depex._OPCODE_STRING_[Item].upper() + ' ' + str(DepexList.pop()))
elif Item == 0x05:
DepexStack.append(eval(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexStack.pop())))
DepexList.append(Depex._OPCODE_STRING_[Item].lower() + ' ' + str(DepexList.pop()))
elif Item == 0x06:
DepexStack.append(True)
DepexList.append('TRUE')
DepexString = DepexString + 'TRUE' + ' '
elif Item == 0x07:
DepexStack.append(False)
DepexList.append('False')
DepexString = DepexString + 'FALSE' + ' '
elif Item == 0x08:
if Index != len(Depex.Expression) - 1:
CouldBeLoaded = False
else:
CouldBeLoaded = DepexStack.pop()
else:
CouldBeLoaded = False
if DepexList != []:
DepexString = DepexList[0].strip()
return (CouldBeLoaded, DepexString, FileDepex)
def Dispatch(self, Db=None):
if Db is None:
return False
self.UnDispatchedFfsDict = copy.copy(self.FfsDict)
# Find PeiCore, DexCore, PeiPriori, DxePriori first
FfsSecCoreGuid = None
FfsPeiCoreGuid = None
FfsDxeCoreGuid = None
FfsPeiPrioriGuid = None
FfsDxePrioriGuid = None
for FfsID in list(self.UnDispatchedFfsDict.keys()):
Ffs = self.UnDispatchedFfsDict[FfsID]
if Ffs.Type == 0x03:
FfsSecCoreGuid = FfsID
continue
if Ffs.Type == 0x04:
FfsPeiCoreGuid = FfsID
continue
if Ffs.Type == 0x05:
FfsDxeCoreGuid = FfsID
continue
if Ffs.Guid.lower() == PEI_APRIORI_GUID.lower():
FfsPeiPrioriGuid = FfsID
continue
if Ffs.Guid.lower() == DXE_APRIORI_GUID.lower():
FfsDxePrioriGuid = FfsID
continue
# Parse SEC_CORE first
if FfsSecCoreGuid is not None:
self.OrderedFfsDict[FfsSecCoreGuid] = self.UnDispatchedFfsDict.pop(FfsSecCoreGuid)
self.LoadPpi(Db, FfsSecCoreGuid)
# Parse PEI first
if FfsPeiCoreGuid is not None:
self.OrderedFfsDict[FfsPeiCoreGuid] = self.UnDispatchedFfsDict.pop(FfsPeiCoreGuid)
self.LoadPpi(Db, FfsPeiCoreGuid)
if FfsPeiPrioriGuid is not None:
# Load PEIM described in priori file
FfsPeiPriori = self.UnDispatchedFfsDict.pop(FfsPeiPrioriGuid)
if len(FfsPeiPriori.Sections) == 1:
Section = FfsPeiPriori.Sections.popitem()[1]
if Section.Type == 0x19:
GuidStruct = struct.Struct('1I2H8B')
Start = 4
while len(Section) > Start:
Guid = GuidStruct.unpack_from(Section[Start : Start + 16])
GuidString = gGuidStringFormat % Guid
Start = Start + 16
if GuidString in self.UnDispatchedFfsDict:
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
self.LoadPpi(Db, GuidString)
self.DisPatchPei(Db)
# Parse DXE then
if FfsDxeCoreGuid is not None:
self.OrderedFfsDict[FfsDxeCoreGuid] = self.UnDispatchedFfsDict.pop(FfsDxeCoreGuid)
self.LoadProtocol(Db, FfsDxeCoreGuid)
if FfsDxePrioriGuid is not None:
# Load PEIM described in priori file
FfsDxePriori = self.UnDispatchedFfsDict.pop(FfsDxePrioriGuid)
if len(FfsDxePriori.Sections) == 1:
Section = FfsDxePriori.Sections.popitem()[1]
if Section.Type == 0x19:
GuidStruct = struct.Struct('1I2H8B')
Start = 4
while len(Section) > Start:
Guid = GuidStruct.unpack_from(Section[Start : Start + 16])
GuidString = gGuidStringFormat % Guid
Start = Start + 16
if GuidString in self.UnDispatchedFfsDict:
self.OrderedFfsDict[GuidString] = self.UnDispatchedFfsDict.pop(GuidString)
self.LoadProtocol(Db, GuidString)
self.DisPatchDxe(Db)
def LoadProtocol(self, Db, ModuleGuid):
SqlCommand = """select GuidValue from Report
where SourceFileFullPath in
(select Value1 from Inf where BelongsToFile =
(select BelongsToFile from Inf
where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
and Model = %s)
and ItemType = 'Protocol' and ItemMode = 'Produced'""" \
% (ModuleGuid, 5001, 3007)
RecordSet = Db.TblReport.Exec(SqlCommand)
for Record in RecordSet:
SqlCommand = """select Value2 from Inf where BelongsToFile =
(select DISTINCT BelongsToFile from Inf
where Value1 =
(select SourceFileFullPath from Report
where GuidValue like '%s' and ItemMode = 'Callback'))
and Value1 = 'FILE_GUID'""" % Record[0]
CallBackSet = Db.TblReport.Exec(SqlCommand)
if CallBackSet != []:
EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid
else:
EotGlobalData.gProtocolList[Record[0].lower()] = ModuleGuid
def LoadPpi(self, Db, ModuleGuid):
SqlCommand = """select GuidValue from Report
where SourceFileFullPath in
(select Value1 from Inf where BelongsToFile =
(select BelongsToFile from Inf
where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
and Model = %s)
and ItemType = 'Ppi' and ItemMode = 'Produced'""" \
% (ModuleGuid, 5001, 3007)
RecordSet = Db.TblReport.Exec(SqlCommand)
for Record in RecordSet:
EotGlobalData.gPpiList[Record[0].lower()] = ModuleGuid
def DisPatchDxe(self, Db):
IsInstalled = False
ScheduleList = sdict()
for FfsID in list(self.UnDispatchedFfsDict.keys()):
CouldBeLoaded = False
DepexString = ''
FileDepex = None
Ffs = self.UnDispatchedFfsDict[FfsID]
if Ffs.Type == 0x07:
# Get Depex
IsFoundDepex = False
for Section in Ffs.Sections.values():
# Find Depex
if Section.Type == 0x13:
IsFoundDepex = True
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Protocol')
break
if Section.Type == 0x01:
CompressSections = Section._SubImages[4]
for CompressSection in CompressSections.Sections:
if CompressSection.Type == 0x13:
IsFoundDepex = True
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Protocol')
break
if CompressSection.Type == 0x02:
NewSections = CompressSection._SubImages[4]
for NewSection in NewSections.Sections:
if NewSection.Type == 0x13:
IsFoundDepex = True
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Protocol')
break
# Not find Depex
if not IsFoundDepex:
CouldBeLoaded = self.CheckArchProtocol()
DepexString = ''
FileDepex = None
# Append New Ffs
if CouldBeLoaded:
IsInstalled = True
NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
NewFfs.Depex = DepexString
if FileDepex is not None:
ScheduleList.insert(FileDepex[1], FfsID, NewFfs, FileDepex[0])
else:
ScheduleList[FfsID] = NewFfs
else:
self.UnDispatchedFfsDict[FfsID].Depex = DepexString
for FfsID in ScheduleList.keys():
NewFfs = ScheduleList.pop(FfsID)
FfsName = 'UnKnown'
self.OrderedFfsDict[FfsID] = NewFfs
self.LoadProtocol(Db, FfsID)
SqlCommand = """select Value2 from Inf
where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s)
and Model = %s and Value1='BASE_NAME'""" % (FfsID, 5001, 5001)
RecordSet = Db.TblReport.Exec(SqlCommand)
if RecordSet != []:
FfsName = RecordSet[0][0]
if IsInstalled:
self.DisPatchDxe(Db)
def DisPatchPei(self, Db):
IsInstalled = False
for FfsID in list(self.UnDispatchedFfsDict.keys()):
CouldBeLoaded = True
DepexString = ''
FileDepex = None
Ffs = self.UnDispatchedFfsDict[FfsID]
if Ffs.Type == 0x06 or Ffs.Type == 0x08:
# Get Depex
for Section in Ffs.Sections.values():
if Section.Type == 0x1B:
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(Section._SubImages[4], 'Ppi')
break
if Section.Type == 0x01:
CompressSections = Section._SubImages[4]
for CompressSection in CompressSections.Sections:
if CompressSection.Type == 0x1B:
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(CompressSection._SubImages[4], 'Ppi')
break
if CompressSection.Type == 0x02:
NewSections = CompressSection._SubImages[4]
for NewSection in NewSections.Sections:
if NewSection.Type == 0x1B:
CouldBeLoaded, DepexString, FileDepex = self.ParseDepex(NewSection._SubImages[4], 'Ppi')
break
# Append New Ffs
if CouldBeLoaded:
IsInstalled = True
NewFfs = self.UnDispatchedFfsDict.pop(FfsID)
NewFfs.Depex = DepexString
self.OrderedFfsDict[FfsID] = NewFfs
self.LoadPpi(Db, FfsID)
else:
self.UnDispatchedFfsDict[FfsID].Depex = DepexString
if IsInstalled:
self.DisPatchPei(Db)
def __str__(self):
global gIndention
gIndention += 4
FvInfo = '\n' + ' ' * gIndention
FvInfo += "[FV:%s] file_system=%s size=%x checksum=%s\n" % (self.Name, self.FileSystemGuid, self.Size, self.Checksum)
FfsInfo = "\n".join([str(self.FfsDict[FfsId]) for FfsId in self.FfsDict])
gIndention -= 4
return FvInfo + FfsInfo
def _Unpack(self):
Size = self._LENGTH_.unpack_from(self._BUF_, self._OFF_)[0]
self.empty()
self.extend(self._BUF_[self._OFF_:self._OFF_ + Size])
# traverse the FFS
EndOfFv = Size
FfsStartAddress = self.HeaderSize
LastFfsObj = None
while FfsStartAddress < EndOfFv:
FfsObj = Ffs()
FfsObj.frombuffer(self, FfsStartAddress)
FfsId = repr(FfsObj)
if ((self.Attributes & 0x00000800) != 0 and len(FfsObj) == 0xFFFFFF) \
or ((self.Attributes & 0x00000800) == 0 and len(FfsObj) == 0):
if LastFfsObj is not None:
LastFfsObj.FreeSpace = EndOfFv - LastFfsObj._OFF_ - len(LastFfsObj)
else:
if FfsId in self.FfsDict:
EdkLogger.error("FV", 0, "Duplicate GUID in FFS",
ExtraData="\t%s @ %s\n\t%s @ %s" \
% (FfsObj.Guid, FfsObj.Offset,
self.FfsDict[FfsId].Guid, self.FfsDict[FfsId].Offset))
self.FfsDict[FfsId] = FfsObj
if LastFfsObj is not None:
LastFfsObj.FreeSpace = FfsStartAddress - LastFfsObj._OFF_ - len(LastFfsObj)
FfsStartAddress += len(FfsObj)
#
# align to next 8-byte aligned address: A = (A + 8 - 1) & (~(8 - 1))
# The next FFS must be at the latest next 8-byte aligned address
#
FfsStartAddress = (FfsStartAddress + 7) & (~7)
LastFfsObj = FfsObj
def _GetAttributes(self):
return self.GetField(self._ATTR_, 0)[0]
def _GetSize(self):
return self.GetField(self._LENGTH_, 0)[0]
def _GetChecksum(self):
return self.GetField(self._CHECKSUM_, 0)[0]
def _GetHeaderLength(self):
return self.GetField(self._HLEN_, 0)[0]
def _GetFileSystemGuid(self):
return gGuidStringFormat % self.GetField(self._GUID_, 0)
Attributes = property(_GetAttributes)
Size = property(_GetSize)
Checksum = property(_GetChecksum)
HeaderSize = property(_GetHeaderLength)
FileSystemGuid = property(_GetFileSystemGuid)
## GuidDefinedImage() class
#
# A class for GUID Defined Image
#
class GuidDefinedImage(Image):
_HEADER_ = struct.Struct("1I2H8B 1H 1H")
_HEADER_SIZE_ = _HEADER_.size
_GUID_ = struct.Struct("1I2H8B")
_DATA_OFFSET_ = struct.Struct("16x 1H")
_ATTR_ = struct.Struct("18x 1H")
CRC32_GUID = "FC1BCDB0-7D31-49AA-936A-A4600D9DD083"
TIANO_COMPRESS_GUID = 'A31280AD-481E-41B6-95E8-127F4C984779'
LZMA_COMPRESS_GUID = 'EE4E5898-3914-4259-9D6E-DC7BD79403CF'
def __init__(self, SectionDefinitionGuid=None, DataOffset=None, Attributes=None, Data=None):
Image.__init__(self)
if SectionDefinitionGuid is not None:
self.SectionDefinitionGuid = SectionDefinitionGuid
if DataOffset is not None:
self.DataOffset = DataOffset
if Attributes is not None:
self.Attributes = Attributes
if Data is not None:
self.Data = Data
def __str__(self):
S = "guid=%s" % (gGuidStringFormat % self.SectionDefinitionGuid)
for Sec in self.Sections:
S += "\n" + str(Sec)
return S
def _Unpack(self):
# keep header in this Image object
self.empty()
self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._LEN_])
return len(self)
def _SetAttribute(self, Attribute):
self.SetField(self._ATTR_, 0, Attribute)
def _GetAttribute(self):
return self.GetField(self._ATTR_)[0]
def _SetGuid(self, Guid):
self.SetField(self._GUID_, 0, Guid)
def _GetGuid(self):
return self.GetField(self._GUID_)
def _SetDataOffset(self, Offset):
self.SetField(self._DATA_OFFSET_, 0, Offset)
def _GetDataOffset(self):
return self.GetField(self._DATA_OFFSET_)[0]
def _GetSections(self):
SectionList = []
Guid = gGuidStringFormat % self.SectionDefinitionGuid
if Guid == self.CRC32_GUID:
# skip the CRC32 value, we don't do CRC32 verification here
Offset = self.DataOffset - 4
while Offset < len(self):
Sec = Section()
try:
Sec.frombuffer(self, Offset)
Offset += Sec.Size
# the section is aligned to 4-byte boundary
Offset = (Offset + 3) & (~3)
except:
break
SectionList.append(Sec)
elif Guid == self.TIANO_COMPRESS_GUID:
try:
# skip the header
Offset = self.DataOffset - 4
TmpData = DeCompress('Framework', self[self.Offset:])
DecData = array('B')
DecData.fromstring(TmpData)
Offset = 0
while Offset < len(DecData):
Sec = Section()
try:
Sec.frombuffer(DecData, Offset)
Offset += Sec.Size
# the section is aligned to 4-byte boundary
Offset = (Offset + 3) & (~3)
except:
break
SectionList.append(Sec)
except:
pass
elif Guid == self.LZMA_COMPRESS_GUID:
try:
# skip the header
Offset = self.DataOffset - 4
TmpData = DeCompress('Lzma', self[self.Offset:])
DecData = array('B')
DecData.fromstring(TmpData)
Offset = 0
while Offset < len(DecData):
Sec = Section()
try:
Sec.frombuffer(DecData, Offset)
Offset += Sec.Size
# the section is aligned to 4-byte boundary
Offset = (Offset + 3) & (~3)
except:
break
SectionList.append(Sec)
except:
pass
return SectionList
Attributes = property(_GetAttribute, _SetAttribute)
SectionDefinitionGuid = property(_GetGuid, _SetGuid)
DataOffset = property(_GetDataOffset, _SetDataOffset)
Sections = property(_GetSections)
## Section() class
#
# A class for Section
#
class Section(Image):
_TypeName = {
0x00 : "<unknown>",
0x01 : "COMPRESSION",
0x02 : "GUID_DEFINED",
0x10 : "PE32",
0x11 : "PIC",
0x12 : "TE",
0x13 : "DXE_DEPEX",
0x14 : "VERSION",
0x15 : "USER_INTERFACE",
0x16 : "COMPATIBILITY16",
0x17 : "FIRMWARE_VOLUME_IMAGE",
0x18 : "FREEFORM_SUBTYPE_GUID",
0x19 : "RAW",
0x1B : "PEI_DEPEX"
}
_SectionSubImages = {
0x01 : CompressedImage,
0x02 : GuidDefinedImage,
0x17 : FirmwareVolume,
0x13 : Depex,
0x1B : Depex,
0x15 : Ui
}
# Size = 3-byte
# Type = 1-byte
_HEADER_ = struct.Struct("3B 1B")
_HEADER_SIZE_ = _HEADER_.size
# SubTypeGuid
# _FREE_FORM_SUBTYPE_GUID_HEADER_ = struct.Struct("1I2H8B")
_SIZE_ = struct.Struct("3B")
_TYPE_ = struct.Struct("3x 1B")
def __init__(self, Type=None, Size=None):
Image.__init__(self)
self._Alignment = 1
if Type is not None:
self.Type = Type
if Size is not None:
self.Size = Size
def __str__(self):
global gIndention
gIndention += 4
SectionInfo = ' ' * gIndention
if self.Type in self._TypeName:
SectionInfo += "[SECTION:%s] offset=%x size=%x" % (self._TypeName[self.Type], self._OFF_, self.Size)
else:
SectionInfo += "[SECTION:%x<unknown>] offset=%x size=%x " % (self.Type, self._OFF_, self.Size)
for Offset in self._SubImages.keys():
SectionInfo += ", " + str(self._SubImages[Offset])
gIndention -= 4
return SectionInfo
def _Unpack(self):
self.empty()
Type, = self._TYPE_.unpack_from(self._BUF_, self._OFF_)
Size1, Size2, Size3 = self._SIZE_.unpack_from(self._BUF_, self._OFF_)
Size = Size1 + (Size2 << 8) + (Size3 << 16)
if Type not in self._SectionSubImages:
# no need to extract sub-image, keep all in this Image object
self.extend(self._BUF_[self._OFF_ : self._OFF_ + Size])
else:
# keep header in this Image object
self.extend(self._BUF_[self._OFF_ : self._OFF_ + self._HEADER_SIZE_])
#
# use new Image object to represent payload, which may be another kind
# of image such as PE32
#
PayloadOffset = self._HEADER_SIZE_
PayloadLen = self.Size - self._HEADER_SIZE_
Payload = self._SectionSubImages[self.Type]()
Payload.frombuffer(self._BUF_, self._OFF_ + self._HEADER_SIZE_, PayloadLen)
self._SubImages[PayloadOffset] = Payload
return Size
def _SetSize(self, Size):
Size1 = Size & 0xFF
Size2 = (Size & 0xFF00) >> 8
Size3 = (Size & 0xFF0000) >> 16
self.SetField(self._SIZE_, 0, Size1, Size2, Size3)
def _GetSize(self):
Size1, Size2, Size3 = self.GetField(self._SIZE_)
return Size1 + (Size2 << 8) + (Size3 << 16)
def _SetType(self, Type):
self.SetField(self._TYPE_, 0, Type)
def _GetType(self):
return self.GetField(self._TYPE_)[0]
def _GetAlignment(self):
return self._Alignment
def _SetAlignment(self, Alignment):
self._Alignment = Alignment
AlignmentMask = Alignment - 1
# section alignment is actually for payload, so we need to add header size
PayloadOffset = self._OFF_ + self._HEADER_SIZE_
if (PayloadOffset & (~AlignmentMask)) == 0:
return
NewOffset = (PayloadOffset + AlignmentMask) & (~AlignmentMask)
while (NewOffset - PayloadOffset) < self._HEADER_SIZE_:
NewOffset += self._Alignment
def tofile(self, f):
self.Size = len(self)
Image.tofile(self, f)
for Offset in self._SubImages:
self._SubImages[Offset].tofile(f)
Type = property(_GetType, _SetType)
Size = property(_GetSize, _SetSize)
Alignment = property(_GetAlignment, _SetAlignment)
## Ffs() class
#
# A class for Ffs Section
#
class Ffs(Image):
_FfsFormat = "24B%(payload_size)sB"
# skip IntegrityCheck
_HEADER_ = struct.Struct("1I2H8B 2x 1B 1B 3B 1B")
_HEADER_SIZE_ = _HEADER_.size
_NAME_ = struct.Struct("1I2H8B")
_INT_CHECK_ = struct.Struct("16x 1H")
_TYPE_ = struct.Struct("18x 1B")
_ATTR_ = struct.Struct("19x 1B")
_SIZE_ = struct.Struct("20x 3B")
_STATE_ = struct.Struct("23x 1B")
FFS_ATTRIB_FIXED = 0x04
FFS_ATTRIB_DATA_ALIGNMENT = 0x38
FFS_ATTRIB_CHECKSUM = 0x40
_TypeName = {
0x00 : "<unknown>",
0x01 : "RAW",
0x02 : "FREEFORM",
0x03 : "SECURITY_CORE",
0x04 : "PEI_CORE",
0x05 : "DXE_CORE",
0x06 : "PEIM",
0x07 : "DRIVER",
0x08 : "COMBINED_PEIM_DRIVER",
0x09 : "APPLICATION",
0x0A : "SMM",
0x0B : "FIRMWARE_VOLUME_IMAGE",
0x0C : "COMBINED_SMM_DXE",
0x0D : "SMM_CORE",
0x0E : "MM_STANDALONE",
0x0F : "MM_CORE_STANDALONE",
0xc0 : "OEM_MIN",
0xdf : "OEM_MAX",
0xe0 : "DEBUG_MIN",
0xef : "DEBUG_MAX",
0xf0 : "FFS_MIN",
0xff : "FFS_MAX",
0xf0 : "FFS_PAD",
}
def __init__(self):
Image.__init__(self)
self.FreeSpace = 0
self.Sections = sdict()
self.Depex = ''
self.__ID__ = None
def __str__(self):
global gIndention
gIndention += 4
Indention = ' ' * gIndention
FfsInfo = Indention
FfsInfo += "[FFS:%s] offset=%x size=%x guid=%s free_space=%x alignment=%s\n" % \
(Ffs._TypeName[self.Type], self._OFF_, self.Size, self.Guid, self.FreeSpace, self.Alignment)
SectionInfo = '\n'.join([str(self.Sections[Offset]) for Offset in self.Sections.keys()])
gIndention -= 4
return FfsInfo + SectionInfo + "\n"
def __len__(self):
return self.Size
def __repr__(self):
return self.__ID__
def _Unpack(self):
Size1, Size2, Size3 = self._SIZE_.unpack_from(self._BUF_, self._OFF_)
Size = Size1 + (Size2 << 8) + (Size3 << 16)
self.empty()
self.extend(self._BUF_[self._OFF_ : self._OFF_ + Size])
# Pad FFS may use the same GUID. We need to avoid it.
if self.Type == 0xf0:
self.__ID__ = str(uuid.uuid1()).upper()
else:
self.__ID__ = self.Guid
# Traverse the SECTION. RAW and PAD do not have sections
if self.Type not in [0xf0, 0x01] and Size > 0 and Size < 0xFFFFFF:
EndOfFfs = Size
SectionStartAddress = self._HEADER_SIZE_
while SectionStartAddress < EndOfFfs:
SectionObj = Section()
SectionObj.frombuffer(self, SectionStartAddress)
#f = open(repr(SectionObj), 'wb')
#SectionObj.Size = 0
#SectionObj.tofile(f)
#f.close()
self.Sections[SectionStartAddress] = SectionObj
SectionStartAddress += len(SectionObj)
SectionStartAddress = (SectionStartAddress + 3) & (~3)
def Pack(self):
pass
def SetFreeSpace(self, Size):
self.FreeSpace = Size
def _GetGuid(self):
return gGuidStringFormat % self.Name
def _SetName(self, Value):
# Guid1, Guid2, Guid3, Guid4, Guid5, Guid6, Guid7, Guid8, Guid9, Guid10, Guid11
self.SetField(self._NAME_, 0, Value)
def _GetName(self):
# Guid1, Guid2, Guid3, Guid4, Guid5, Guid6, Guid7, Guid8, Guid9, Guid10, Guid11
return self.GetField(self._NAME_)
def _SetSize(self, Size):
Size1 = Size & 0xFF
Size2 = (Size & 0xFF00) >> 8
Size3 = (Size & 0xFF0000) >> 16
self.SetField(self._SIZE_, 0, Size1, Size2, Size3)
def _GetSize(self):
Size1, Size2, Size3 = self.GetField(self._SIZE_)
return Size1 + (Size2 << 8) + (Size3 << 16)
def _SetType(self, Type):
self.SetField(self._TYPE_, 0, Type)
def _GetType(self):
return self.GetField(self._TYPE_)[0]
def _SetAttributes(self, Value):
self.SetField(self._ATTR_, 0, Value)
def _GetAttributes(self):
return self.GetField(self._ATTR_)[0]
def _GetFixed(self):
if (self.Attributes & self.FFS_ATTRIB_FIXED) != 0:
return True
return False
def _GetCheckSum(self):
if (self.Attributes & self.FFS_ATTRIB_CHECKSUM) != 0:
return True
return False
def _GetAlignment(self):
return (self.Attributes & self.FFS_ATTRIB_DATA_ALIGNMENT) >> 3
def _SetState(self, Value):
self.SetField(self._STATE_, 0, Value)
def _GetState(self):
return self.GetField(self._STATE_)[0]
Name = property(_GetName, _SetName)
Guid = property(_GetGuid)
Type = property(_GetType, _SetType)
Size = property(_GetSize, _SetSize)
Attributes = property(_GetAttributes, _SetAttributes)
Fixed = property(_GetFixed)
Checksum = property(_GetCheckSum)
Alignment = property(_GetAlignment)
State = property(_GetState, _SetState)
## MultipleFv() class
#
# A class for Multiple FV
#
class MultipleFv(FirmwareVolume):
def __init__(self, FvList):
FirmwareVolume.__init__(self)
self.BasicInfo = []
for FvPath in FvList:
Fd = None
FvName = os.path.splitext(os.path.split(FvPath)[1])[0]
if FvPath.strip():
Fd = open(FvPath, 'rb')
Buf = array('B')
try:
Buf.fromfile(Fd, os.path.getsize(FvPath))
except EOFError:
pass
Fv = FirmwareVolume(FvName)
Fv.frombuffer(Buf, 0, len(Buf))
self.BasicInfo.append([Fv.Name, Fv.FileSystemGuid, Fv.Size])
self.FfsDict.update(Fv.FfsDict)
## Class Eot
#
# This class is used to define Eot main entrance
#
# @param object: Inherited from object class
#
class Eot(object):
## The constructor
#
# @param self: The object pointer
#
def __init__(self, CommandLineOption=True, IsInit=True, SourceFileList=None, \
IncludeDirList=None, DecFileList=None, GuidList=None, LogFile=None,
FvFileList="", MapFileList="", Report='Report.html', Dispatch=None):
# Version and Copyright
self.VersionNumber = ("0.02" + " " + gBUILD_VERSION)
self.Version = "%prog Version " + self.VersionNumber
self.Copyright = "Copyright (c) 2008 - 2018, Intel Corporation All rights reserved."
self.Report = Report
self.IsInit = IsInit
self.SourceFileList = SourceFileList
self.IncludeDirList = IncludeDirList
self.DecFileList = DecFileList
self.GuidList = GuidList
self.LogFile = LogFile
self.FvFileList = FvFileList
self.MapFileList = MapFileList
self.Dispatch = Dispatch
# Check workspace environment
if "EFI_SOURCE" not in os.environ:
if "EDK_SOURCE" not in os.environ:
pass
else:
EotGlobalData.gEDK_SOURCE = os.path.normpath(os.getenv("EDK_SOURCE"))
else:
EotGlobalData.gEFI_SOURCE = os.path.normpath(os.getenv("EFI_SOURCE"))
EotGlobalData.gEDK_SOURCE = os.path.join(EotGlobalData.gEFI_SOURCE, 'Edk')
if "WORKSPACE" not in os.environ:
EdkLogger.error("EOT", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
ExtraData="WORKSPACE")
else:
EotGlobalData.gWORKSPACE = os.path.normpath(os.getenv("WORKSPACE"))
EotGlobalData.gMACRO['WORKSPACE'] = EotGlobalData.gWORKSPACE
EotGlobalData.gMACRO['EFI_SOURCE'] = EotGlobalData.gEFI_SOURCE
EotGlobalData.gMACRO['EDK_SOURCE'] = EotGlobalData.gEDK_SOURCE
# Parse the options and args
if CommandLineOption:
self.ParseOption()
if self.FvFileList:
for FvFile in GetSplitValueList(self.FvFileList, ' '):
FvFile = os.path.normpath(FvFile)
if not os.path.isfile(FvFile):
EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % FvFile)
EotGlobalData.gFV_FILE.append(FvFile)
else:
EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "The fv file list of target platform was not specified")
if self.MapFileList:
for MapFile in GetSplitValueList(self.MapFileList, ' '):
MapFile = os.path.normpath(MapFile)
if not os.path.isfile(MapFile):
EdkLogger.error("Eot", EdkLogger.EOT_ERROR, "Can not find file %s " % MapFile)
EotGlobalData.gMAP_FILE.append(MapFile)
# Generate source file list
self.GenerateSourceFileList(self.SourceFileList, self.IncludeDirList)
# Generate guid list of dec file list
self.ParseDecFile(self.DecFileList)
# Generate guid list from GUID list file
self.ParseGuidList(self.GuidList)
# Init Eot database
EotGlobalData.gDb = Database.Database(Database.DATABASE_PATH)
EotGlobalData.gDb.InitDatabase(self.IsInit)
# Build ECC database
self.BuildDatabase()
# Parse Ppi/Protocol
self.ParseExecutionOrder()
# Merge Identifier tables
self.GenerateQueryTable()
# Generate report database
self.GenerateReportDatabase()
# Load Fv Info
self.LoadFvInfo()
# Load Map Info
self.LoadMapInfo()
# Generate Report
self.GenerateReport()
# Convert log file
self.ConvertLogFile(self.LogFile)
# DONE
EdkLogger.quiet("EOT FINISHED!")
# Close Database
EotGlobalData.gDb.Close()
## ParseDecFile() method
#
# parse DEC file and get all GUID names with GUID values as {GuidName : GuidValue}
# The Dict is stored in EotGlobalData.gGuidDict
#
# @param self: The object pointer
# @param DecFileList: A list of all DEC files
#
def ParseDecFile(self, DecFileList):
if DecFileList:
path = os.path.normpath(DecFileList)
lfr = open(path, 'rb')
for line in lfr:
path = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip()))
if os.path.exists(path):
dfr = open(path, 'rb')
for line in dfr:
line = CleanString(line)
list = line.split('=')
if len(list) == 2:
EotGlobalData.gGuidDict[list[0].strip()] = GuidStructureStringToGuidString(list[1].strip())
## ParseGuidList() method
#
# Parse Guid list and get all GUID names with GUID values as {GuidName : GuidValue}
# The Dict is stored in EotGlobalData.gGuidDict
#
# @param self: The object pointer
# @param GuidList: A list of all GUID and its value
#
def ParseGuidList(self, GuidList):
Path = os.path.join(EotGlobalData.gWORKSPACE, GuidList)
if os.path.isfile(Path):
for Line in open(Path):
if Line.strip():
(GuidName, GuidValue) = Line.split()
EotGlobalData.gGuidDict[GuidName] = GuidValue
## ConvertLogFile() method
#
# Parse a real running log file to get real dispatch order
# The result is saved to old file name + '.new'
#
# @param self: The object pointer
# @param LogFile: A real running log file name
#
def ConvertLogFile(self, LogFile):
newline = []
lfr = None
lfw = None
if LogFile:
lfr = open(LogFile, 'rb')
lfw = open(LogFile + '.new', 'wb')
for line in lfr:
line = line.strip()
line = line.replace('.efi', '')
index = line.find("Loading PEIM at ")
if index > -1:
newline.append(line[index + 55 : ])
continue
index = line.find("Loading driver at ")
if index > -1:
newline.append(line[index + 57 : ])
continue
for line in newline:
lfw.write(line + '\r\n')
if lfr:
lfr.close()
if lfw:
lfw.close()
## GenerateSourceFileList() method
#
# Generate a list of all source files
# 1. Search the file list one by one
# 2. Store inf file name with source file names under it like
# { INF file name: [source file1, source file2, ...]}
# 3. Search the include list to find all .h files
# 4. Store source file list to EotGlobalData.gSOURCE_FILES
# 5. Store INF file list to EotGlobalData.gINF_FILES
#
# @param self: The object pointer
# @param SourceFileList: A list of all source files
# @param IncludeFileList: A list of all include files
#
def GenerateSourceFileList(self, SourceFileList, IncludeFileList):
EdkLogger.quiet("Generating source files list ... ")
mSourceFileList = []
mInfFileList = []
mDecFileList = []
mFileList = {}
mCurrentInfFile = ''
mCurrentSourceFileList = []
if SourceFileList:
sfl = open(SourceFileList, 'r')
for line in sfl:
line = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip()))
if line[-2:].upper() == '.C' or line[-2:].upper() == '.H':
if line not in mCurrentSourceFileList:
mCurrentSourceFileList.append(line)
mSourceFileList.append(line)
EotGlobalData.gOP_SOURCE_FILES.write('%s\n' % line)
if line[-4:].upper() == '.INF':
if mCurrentInfFile != '':
mFileList[mCurrentInfFile] = mCurrentSourceFileList
mCurrentSourceFileList = []
mCurrentInfFile = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line))
EotGlobalData.gOP_INF.write('%s\n' % mCurrentInfFile)
if mCurrentInfFile not in mFileList:
mFileList[mCurrentInfFile] = mCurrentSourceFileList
# Get all include files from packages
if IncludeFileList:
ifl = open(IncludeFileList, 'rb')
for line in ifl:
if not line.strip():
continue
newline = os.path.normpath(os.path.join(EotGlobalData.gWORKSPACE, line.strip()))
for Root, Dirs, Files in os.walk(str(newline)):
for File in Files:
FullPath = os.path.normpath(os.path.join(Root, File))
if FullPath not in mSourceFileList and File[-2:].upper() == '.H':
mSourceFileList.append(FullPath)
EotGlobalData.gOP_SOURCE_FILES.write('%s\n' % FullPath)
if FullPath not in mDecFileList and File.upper().find('.DEC') > -1:
mDecFileList.append(FullPath)
EotGlobalData.gSOURCE_FILES = mSourceFileList
EotGlobalData.gOP_SOURCE_FILES.close()
EotGlobalData.gINF_FILES = mFileList
EotGlobalData.gOP_INF.close()
## GenerateReport() method
#
# Generate final HTML report
#
# @param self: The object pointer
#
def GenerateReport(self):
EdkLogger.quiet("Generating report file ... ")
Rep = Report(self.Report, EotGlobalData.gFV, self.Dispatch)
Rep.GenerateReport()
## LoadMapInfo() method
#
# Load map files and parse them
#
# @param self: The object pointer
#
def LoadMapInfo(self):
if EotGlobalData.gMAP_FILE != []:
EdkLogger.quiet("Parsing Map file ... ")
EotGlobalData.gMap = ParseMapFile(EotGlobalData.gMAP_FILE)
## LoadFvInfo() method
#
# Load FV binary files and parse them
#
# @param self: The object pointer
#
def LoadFvInfo(self):
EdkLogger.quiet("Parsing FV file ... ")
EotGlobalData.gFV = MultipleFv(EotGlobalData.gFV_FILE)
EotGlobalData.gFV.Dispatch(EotGlobalData.gDb)
for Protocol in EotGlobalData.gProtocolList:
EotGlobalData.gOP_UN_MATCHED_IN_LIBRARY_CALLING.write('%s\n' %Protocol)
## GenerateReportDatabase() method
#
# Generate data for the information needed by report
# 1. Update name, macro and value of all found PPI/PROTOCOL GUID
# 2. Install hard coded PPI/PROTOCOL
#
# @param self: The object pointer
#
def GenerateReportDatabase(self):
EdkLogger.quiet("Generating the cross-reference table of GUID for Ppi/Protocol ... ")
# Update Protocol/Ppi Guid
SqlCommand = """select DISTINCT GuidName from Report"""
RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
for Record in RecordSet:
GuidName = Record[0]
GuidMacro = ''
GuidMacro2 = ''
GuidValue = ''
# Find guid value defined in Dec file
if GuidName in EotGlobalData.gGuidDict:
GuidValue = EotGlobalData.gGuidDict[GuidName]
SqlCommand = """update Report set GuidMacro = '%s', GuidValue = '%s' where GuidName = '%s'""" %(GuidMacro, GuidValue, GuidName)
EotGlobalData.gDb.TblReport.Exec(SqlCommand)
continue
# Search defined Macros for guid name
SqlCommand ="""select DISTINCT Value, Modifier from Query where Name like '%s'""" % GuidName
GuidMacroSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
# Ignore NULL result
if not GuidMacroSet:
continue
GuidMacro = GuidMacroSet[0][0].strip()
if not GuidMacro:
continue
# Find Guid value of Guid Macro
SqlCommand ="""select DISTINCT Value from Query2 where Value like '%%%s%%' and Model = %s""" % (GuidMacro, MODEL_IDENTIFIER_MACRO_DEFINE)
GuidValueSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
if GuidValueSet != []:
GuidValue = GuidValueSet[0][0]
GuidValue = GuidValue[GuidValue.find(GuidMacro) + len(GuidMacro) :]
GuidValue = GuidValue.lower().replace('\\', '').replace('\r', '').replace('\n', '').replace('l', '').strip()
GuidValue = GuidStructureStringToGuidString(GuidValue)
SqlCommand = """update Report set GuidMacro = '%s', GuidValue = '%s' where GuidName = '%s'""" %(GuidMacro, GuidValue, GuidName)
EotGlobalData.gDb.TblReport.Exec(SqlCommand)
continue
# Update Hard Coded Ppi/Protocol
SqlCommand = """select DISTINCT GuidValue, ItemType from Report where ModuleID = -2 and ItemMode = 'Produced'"""
RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
for Record in RecordSet:
if Record[1] == 'Ppi':
EotGlobalData.gPpiList[Record[0].lower()] = -2
if Record[1] == 'Protocol':
EotGlobalData.gProtocolList[Record[0].lower()] = -2
## GenerateQueryTable() method
#
# Generate two tables improve query performance
#
# @param self: The object pointer
#
def GenerateQueryTable(self):
EdkLogger.quiet("Generating temp query table for analysis ... ")
for Identifier in EotGlobalData.gIdentifierTableList:
SqlCommand = """insert into Query (Name, Modifier, Value, Model)
select Name, Modifier, Value, Model from %s where (Model = %s or Model = %s)""" \
% (Identifier[0], MODEL_IDENTIFIER_VARIABLE, MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION)
EotGlobalData.gDb.TblReport.Exec(SqlCommand)
SqlCommand = """insert into Query2 (Name, Modifier, Value, Model)
select Name, Modifier, Value, Model from %s where Model = %s""" \
% (Identifier[0], MODEL_IDENTIFIER_MACRO_DEFINE)
EotGlobalData.gDb.TblReport.Exec(SqlCommand)
## ParseExecutionOrder() method
#
# Get final execution order
# 1. Search all PPI
# 2. Search all PROTOCOL
#
# @param self: The object pointer
#
def ParseExecutionOrder(self):
EdkLogger.quiet("Searching Ppi/Protocol ... ")
for Identifier in EotGlobalData.gIdentifierTableList:
ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled = \
-1, '', '', -1, '', '', '', '', '', '', '', '', 0
SourceFileID = Identifier[0].replace('Identifier', '')
SourceFileFullPath = Identifier[1]
Identifier = Identifier[0]
# Find Ppis
ItemMode = 'Produced'
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
% (Identifier, '.InstallPpi', '->InstallPpi', 'PeiInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode)
ItemMode = 'Produced'
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
% (Identifier, '.ReInstallPpi', '->ReInstallPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 2)
SearchPpiCallFunction(Identifier, SourceFileID, SourceFileFullPath, ItemMode)
ItemMode = 'Consumed'
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
% (Identifier, '.LocatePpi', '->LocatePpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode)
SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Ppi', ItemMode)
ItemMode = 'Callback'
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
% (Identifier, '.NotifyPpi', '->NotifyPpi', MODEL_IDENTIFIER_FUNCTION_CALLING)
SearchPpi(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode)
# Find Protocols
ItemMode = 'Produced'
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
% (Identifier, '.InstallProtocolInterface', '.ReInstallProtocolInterface', '->InstallProtocolInterface', '->ReInstallProtocolInterface', MODEL_IDENTIFIER_FUNCTION_CALLING)
SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 1)
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
% (Identifier, '.InstallMultipleProtocolInterfaces', '->InstallMultipleProtocolInterfaces', MODEL_IDENTIFIER_FUNCTION_CALLING)
SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 2)
SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode)
ItemMode = 'Consumed'
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
% (Identifier, '.LocateProtocol', '->LocateProtocol', MODEL_IDENTIFIER_FUNCTION_CALLING)
SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 0)
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
% (Identifier, '.HandleProtocol', '->HandleProtocol', MODEL_IDENTIFIER_FUNCTION_CALLING)
SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 1)
SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode)
ItemMode = 'Callback'
SqlCommand = """select Value, Name, BelongsToFile, StartLine, EndLine from %s
where (Name like '%%%s%%' or Name like '%%%s%%') and Model = %s""" \
% (Identifier, '.RegisterProtocolNotify', '->RegisterProtocolNotify', MODEL_IDENTIFIER_FUNCTION_CALLING)
SearchProtocols(SqlCommand, Identifier, SourceFileID, SourceFileFullPath, ItemMode, 0)
SearchFunctionCalling(Identifier, SourceFileID, SourceFileFullPath, 'Protocol', ItemMode)
# Hard Code
EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gEfiSecPlatformInformationPpiGuid', '', '', '', 0)
EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gEfiNtLoadAsDllPpiGuid', '', '', '', 0)
EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gNtPeiLoadFileGuid', '', '', '', 0)
EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiNtAutoScanPpiGuid', '', '', '', 0)
EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gNtFwhPpiGuid', '', '', '', 0)
EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiNtThunkPpiGuid', '', '', '', 0)
EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiPlatformTypePpiGuid', '', '', '', 0)
EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiFrequencySelectionCpuPpiGuid', '', '', '', 0)
EotGlobalData.gDb.TblReport.Insert(-2, '', '', -1, '', '', 'Ppi', 'Produced', 'gPeiCachePpiGuid', '', '', '', 0)
EotGlobalData.gDb.Conn.commit()
## BuildDatabase() methoc
#
# Build the database for target
#
# @param self: The object pointer
#
def BuildDatabase(self):
# Clean report table
EotGlobalData.gDb.TblReport.Drop()
EotGlobalData.gDb.TblReport.Create()
# Build database
if self.IsInit:
self.BuildMetaDataFileDatabase(EotGlobalData.gINF_FILES)
EdkLogger.quiet("Building database for source code ...")
c.CreateCCodeDB(EotGlobalData.gSOURCE_FILES)
EdkLogger.quiet("Building database for source code done!")
EotGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EotGlobalData.gDb)
## BuildMetaDataFileDatabase() method
#
# Build the database for meta data files
#
# @param self: The object pointer
# @param Inf_Files: A list for all INF files
#
def BuildMetaDataFileDatabase(self, Inf_Files):
EdkLogger.quiet("Building database for meta data files ...")
for InfFile in Inf_Files:
if not InfFile:
continue
EdkLogger.quiet("Parsing %s ..." % str(InfFile))
EdkInfParser(InfFile, EotGlobalData.gDb, Inf_Files[InfFile])
EotGlobalData.gDb.Conn.commit()
EdkLogger.quiet("Building database for meta data files done!")
## ParseOption() method
#
# Parse command line options
#
# @param self: The object pointer
#
def ParseOption(self):
(Options, Target) = self.EotOptionParser()
# Set log level
self.SetLogLevel(Options)
if Options.FvFileList:
self.FvFileList = Options.FvFileList
if Options.MapFileList:
self.MapFileList = Options.FvMapFileList
if Options.SourceFileList:
self.SourceFileList = Options.SourceFileList
if Options.IncludeDirList:
self.IncludeDirList = Options.IncludeDirList
if Options.DecFileList:
self.DecFileList = Options.DecFileList
if Options.GuidList:
self.GuidList = Options.GuidList
if Options.LogFile:
self.LogFile = Options.LogFile
if Options.keepdatabase:
self.IsInit = False
## SetLogLevel() method
#
# Set current log level of the tool based on args
#
# @param self: The object pointer
# @param Option: The option list including log level setting
#
def SetLogLevel(self, Option):
if Option.verbose is not None:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
elif Option.quiet is not None:
EdkLogger.SetLevel(EdkLogger.QUIET)
elif Option.debug is not None:
EdkLogger.SetLevel(Option.debug + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
## EotOptionParser() method
#
# Using standard Python module optparse to parse command line option of this tool.
#
# @param self: The object pointer
#
# @retval Opt A optparse.Values object containing the parsed options
# @retval Args Target of build command
#
def EotOptionParser(self):
Parser = OptionParser(description = self.Copyright, version = self.Version, prog = "Eot.exe", usage = "%prog [options]")
Parser.add_option("-m", "--makefile filename", action="store", type="string", dest='MakeFile',
help="Specify a makefile for the platform.")
Parser.add_option("-c", "--dsc filename", action="store", type="string", dest="DscFile",
help="Specify a dsc file for the platform.")
Parser.add_option("-f", "--fv filename", action="store", type="string", dest="FvFileList",
help="Specify fv file list, quoted by \"\".")
Parser.add_option("-a", "--map filename", action="store", type="string", dest="MapFileList",
help="Specify map file list, quoted by \"\".")
Parser.add_option("-s", "--source files", action="store", type="string", dest="SourceFileList",
help="Specify source file list by a file")
Parser.add_option("-i", "--include dirs", action="store", type="string", dest="IncludeDirList",
help="Specify include dir list by a file")
Parser.add_option("-e", "--dec files", action="store", type="string", dest="DecFileList",
help="Specify dec file list by a file")
Parser.add_option("-g", "--guid list", action="store", type="string", dest="GuidList",
help="Specify guid file list by a file")
Parser.add_option("-l", "--log filename", action="store", type="string", dest="LogFile",
help="Specify real execution log file")
Parser.add_option("-k", "--keepdatabase", action="store_true", type=None, help="The existing Eot database will not be cleaned except report information if this option is specified.")
Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\
"including library instances selected, final dependency expression, "\
"and warning messages, etc.")
Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
(Opt, Args)=Parser.parse_args()
return (Opt, Args)
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
# Initialize log system
EdkLogger.Initialize()
EdkLogger.IsRaiseError = False
EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
StartTime = time.clock()
Eot = Eot(CommandLineOption=False,
SourceFileList=r'C:\TestEot\Source.txt',
GuidList=r'C:\TestEot\Guid.txt',
FvFileList=r'C:\TestEot\FVRECOVERY.Fv')
FinishTime = time.clock()
BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime))))
EdkLogger.quiet("\n%s [%s]" % (time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration))
| edk2-master | BaseTools/Source/Python/Eot/EotMain.py |
## @file
# Standardized Error Handling infrastructures.
#
# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
# Error id
ERROR_1 = 1000
# Error message
gEccErrorMessage = {
ERROR_1 : "RESERVED"
}
| edk2-master | BaseTools/Source/Python/Eot/EotToolError.py |
## @file
# This file is used to define the identification of INF/DEC/DSC files
#
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
## Identification
#
# This class defined basic Identification information structure which is used by INF/DEC/DSC files
#
# @param object: Inherited from object class
#
# @var FileName: To store data for Filename
# @var FileFullPath: To store data for full path of the file
# @var FileRelativePath: To store data for relative path of the file
# @var RunStatus: Status of build system running
#
class Identification(object):
def __init__(self):
self.FileName = ''
self.FileFullPath = ''
self.FileRelativePath = ''
self.PackagePath = ''
## GetFileName
#
# Reserved
#
def GetFileName(self, FileFullPath, FileRelativePath):
pass
## GetFileName
#
# Reserved
#
def GetFileFullPath(self, FileName, FileRelativePath):
pass
## GetFileName
#
# Reserved
#
def GetFileRelativePath(self, FileName, FileFullPath):
pass
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
id = Identification()
| edk2-master | BaseTools/Source/Python/Eot/Identification.py |
## @file
# fragments of source file
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## The description of comment contents and start & end position
#
#
class Comment :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
# @param CommentType The type of comment (T_COMMENT_TWO_SLASH or T_COMMENT_SLASH_STAR).
#
def __init__(self, Str, Begin, End, CommentType):
self.Content = Str
self.StartPos = Begin
self.EndPos = End
self.Type = CommentType
## The description of preprocess directives and start & end position
#
#
class PP_Directive :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Str, Begin, End):
self.Content = Str
self.StartPos = Begin
self.EndPos = End
## The description of assignment expression and start & end position
#
#
class AssignmentExpression :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Lvalue, Op, Exp, Begin, End):
self.Name = Lvalue
self.Operator = Op
self.Value = Exp
self.StartPos = Begin
self.EndPos = End
## The description of predicate expression and start & end position
#
#
class PredicateExpression :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Str, Begin, End):
self.Content = Str
self.StartPos = Begin
self.EndPos = End
## The description of function definition and start & end position
#
#
class FunctionDefinition :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
# @param LBPos The left brace position tuple.
#
def __init__(self, ModifierStr, DeclStr, Begin, End, LBPos, NamePos):
self.Modifier = ModifierStr
self.Declarator = DeclStr
self.StartPos = Begin
self.EndPos = End
self.LeftBracePos = LBPos
self.NamePos = NamePos
## The description of variable declaration and start & end position
#
#
class VariableDeclaration :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, ModifierStr, DeclStr, Begin, End):
self.Modifier = ModifierStr
self.Declarator = DeclStr
self.StartPos = Begin
self.EndPos = End
## The description of enum definition and start & end position
#
#
class EnumerationDefinition :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Str, Begin, End):
self.Content = Str
self.StartPos = Begin
self.EndPos = End
## The description of struct/union definition and start & end position
#
#
class StructUnionDefinition :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Str, Begin, End):
self.Content = Str
self.StartPos = Begin
self.EndPos = End
## The description of 'Typedef' definition and start & end position
#
#
class TypedefDefinition :
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, FromStr, ToStr, Begin, End):
self.FromType = FromStr
self.ToType = ToStr
self.StartPos = Begin
self.EndPos = End
## The description of function calling definition and start & end position
#
#
class FunctionCalling:
## The constructor
#
# @param self The object pointer
# @param Str The message to record
# @param Begin The start position tuple.
# @param End The end position tuple.
#
def __init__(self, Name, Param, Begin, End):
self.FuncName = Name
self.ParamList = Param
self.StartPos = Begin
self.EndPos = End
| edk2-master | BaseTools/Source/Python/Eot/CodeFragment.py |
## @file
# This file is used to create report for Eot tool
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import absolute_import
import Common.LongFilePathOs as os
from . import EotGlobalData
from Common.LongFilePathSupport import OpenLongFilePath as open
## Report() class
#
# This class defined Report
#
# @param object: Inherited from object class
#
class Report(object):
## The constructor
#
# @param self: The object pointer
# @param ReportName: name of the report
# @param FvObj: FV object after parsing FV images
#
def __init__(self, ReportName = 'Report.html', FvObj = None, DispatchName=None):
self.ReportName = ReportName
self.Op = open(ReportName, 'w+')
self.DispatchList = None
if DispatchName:
self.DispatchList = open(DispatchName, 'w+')
self.FvObj = FvObj
self.FfsIndex = 0
self.PpiIndex = 0
self.ProtocolIndex = 0
if EotGlobalData.gMACRO['EFI_SOURCE'] == '':
EotGlobalData.gMACRO['EFI_SOURCE'] = EotGlobalData.gMACRO['EDK_SOURCE']
## WriteLn() method
#
# Write a line in the report
#
# @param self: The object pointer
# @param Line: The lint to be written into
#
def WriteLn(self, Line):
self.Op.write('%s\n' % Line)
## GenerateReport() method
#
# A caller to generate report
#
# @param self: The object pointer
#
def GenerateReport(self):
self.GenerateHeader()
self.GenerateFv()
self.GenerateTail()
self.Op.close()
self.GenerateUnDispatchedList()
## GenerateUnDispatchedList() method
#
# Create a list for not dispatched items
#
# @param self: The object pointer
#
def GenerateUnDispatchedList(self):
FvObj = self.FvObj
EotGlobalData.gOP_UN_DISPATCHED.write('%s\n' % FvObj.Name)
for Item in FvObj.UnDispatchedFfsDict.keys():
EotGlobalData.gOP_UN_DISPATCHED.write('%s\n' % FvObj.UnDispatchedFfsDict[Item])
## GenerateFv() method
#
# Generate FV information
#
# @param self: The object pointer
#
def GenerateFv(self):
FvObj = self.FvObj
Content = """ <tr>
<td width="20%%"><strong>Name</strong></td>
<td width="60%%"><strong>Guid</strong></td>
<td width="20%%"><strong>Size</strong></td>
</tr>"""
self.WriteLn(Content)
for Info in FvObj.BasicInfo:
FvName = Info[0]
FvGuid = Info[1]
FvSize = Info[2]
Content = """ <tr>
<td>%s</td>
<td>%s</td>
<td>%s</td>
</tr>""" % (FvName, FvGuid, FvSize)
self.WriteLn(Content)
Content = """ <td colspan="3"><table width="100%%" border="1">
<tr>"""
self.WriteLn(Content)
EotGlobalData.gOP_DISPATCH_ORDER.write('Dispatched:\n')
for FfsId in FvObj.OrderedFfsDict.keys():
self.GenerateFfs(FvObj.OrderedFfsDict[FfsId])
Content = """ </table></td>
</tr>"""
self.WriteLn(Content)
# For UnDispatched
Content = """ <td colspan="3"><table width="100%%" border="1">
<tr>
<tr><strong>UnDispatched</strong></tr>"""
self.WriteLn(Content)
EotGlobalData.gOP_DISPATCH_ORDER.write('\nUnDispatched:\n')
for FfsId in FvObj.UnDispatchedFfsDict.keys():
self.GenerateFfs(FvObj.UnDispatchedFfsDict[FfsId])
Content = """ </table></td>
</tr>"""
self.WriteLn(Content)
## GenerateDepex() method
#
# Generate Depex information
#
# @param self: The object pointer
# @param DepexString: A DEPEX string needed to be parsed
#
def GenerateDepex(self, DepexString):
NonGuidList = ['AND', 'OR', 'NOT', 'BEFORE', 'AFTER', 'TRUE', 'FALSE']
ItemList = DepexString.split(' ')
DepexString = ''
for Item in ItemList:
if Item not in NonGuidList:
SqlCommand = """select DISTINCT GuidName from Report where GuidValue like '%s' and ItemMode = 'Produced' group by GuidName""" % (Item)
RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
if RecordSet != []:
Item = RecordSet[0][0]
DepexString = DepexString + Item + ' '
Content = """ <tr>
<td width="5%%"></td>
<td width="95%%">%s</td>
</tr>""" % (DepexString)
self.WriteLn(Content)
## GeneratePpi() method
#
# Generate PPI information
#
# @param self: The object pointer
# @param Name: CName of a GUID
# @param Guid: Value of a GUID
# @param Type: Type of a GUID
#
def GeneratePpi(self, Name, Guid, Type):
self.GeneratePpiProtocol('Ppi', Name, Guid, Type, self.PpiIndex)
## GenerateProtocol() method
#
# Generate PROTOCOL information
#
# @param self: The object pointer
# @param Name: CName of a GUID
# @param Guid: Value of a GUID
# @param Type: Type of a GUID
#
def GenerateProtocol(self, Name, Guid, Type):
self.GeneratePpiProtocol('Protocol', Name, Guid, Type, self.ProtocolIndex)
## GeneratePpiProtocol() method
#
# Generate PPI/PROTOCOL information
#
# @param self: The object pointer
# @param Model: Model of a GUID, PPI or PROTOCOL
# @param Name: Name of a GUID
# @param Guid: Value of a GUID
# @param Type: Type of a GUID
# @param CName: CName(Index) of a GUID
#
def GeneratePpiProtocol(self, Model, Name, Guid, Type, CName):
Content = """ <tr>
<td width="5%%"></td>
<td width="10%%">%s</td>
<td width="85%%" colspan="3">%s</td>
<!-- %s -->
</tr>""" % (Model, Name, Guid)
self.WriteLn(Content)
if Type == 'Produced':
SqlCommand = """select DISTINCT SourceFileFullPath, BelongsToFunction from Report where GuidName like '%s' and ItemMode = 'Callback'""" % Name
RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
for Record in RecordSet:
SqlCommand = """select FullPath from File
where ID = (
select DISTINCT BelongsToFile from Inf
where Value1 like '%s')""" % Record[0]
ModuleSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
Inf = ModuleSet[0][0].replace(EotGlobalData.gMACRO['WORKSPACE'], '.')
Function = Record[1]
Address = ''
for Item in EotGlobalData.gMap:
if Function in EotGlobalData.gMap[Item]:
Address = EotGlobalData.gMap[Item][Function]
break
if '_' + Function in EotGlobalData.gMap[Item]:
Address = EotGlobalData.gMap[Item]['_' + Function]
break
Content = """ <tr>
<td width="5%%"></td>
<td width="10%%">%s</td>
<td width="40%%">%s</td>
<td width="35%%">%s</td>
<td width="10%%">%s</td>
</tr>""" % ('Callback', Inf, Function, Address)
self.WriteLn(Content)
## GenerateFfs() method
#
# Generate FFS information
#
# @param self: The object pointer
# @param FfsObj: FFS object after FV image is parsed
#
def GenerateFfs(self, FfsObj):
self.FfsIndex = self.FfsIndex + 1
if FfsObj is not None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:
FfsGuid = FfsObj.Guid
FfsOffset = FfsObj._OFF_
FfsName = 'Unknown-Module'
FfsPath = FfsGuid
FfsType = FfsObj._TypeName[FfsObj.Type]
# Hard code for Binary INF
if FfsGuid.upper() == '7BB28B99-61BB-11D5-9A5D-0090273FC14D':
FfsName = 'Logo'
if FfsGuid.upper() == '7E374E25-8E01-4FEE-87F2-390C23C606CD':
FfsName = 'AcpiTables'
if FfsGuid.upper() == '961578FE-B6B7-44C3-AF35-6BC705CD2B1F':
FfsName = 'Fat'
# Find FFS Path and Name
SqlCommand = """select Value2 from Inf
where BelongsToFile = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s)
and Model = %s and Value1='BASE_NAME'""" % (FfsGuid, 5001, 5001)
RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
if RecordSet != []:
FfsName = RecordSet[0][0]
SqlCommand = """select FullPath from File
where ID = (select BelongsToFile from Inf where Value1 = 'FILE_GUID' and lower(Value2) = lower('%s') and Model = %s)
and Model = %s""" % (FfsGuid, 5001, 1011)
RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
if RecordSet != []:
FfsPath = RecordSet[0][0]
Content = """ <tr>
<tr class='styleFfs' id='FfsHeader%s'>
<td width="55%%"><span onclick="Display('FfsHeader%s', 'Ffs%s')" onMouseOver="funOnMouseOver()" onMouseOut="funOnMouseOut()">%s</span></td>
<td width="15%%">%s</td>
<!--<td width="20%%">%s</td>-->
<!--<td width="20%%">%s</td>-->
<td width="10%%">%s</td>
</tr>
<tr id='Ffs%s' style='display:none;'>
<td colspan="4"><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, FfsPath, FfsName, FfsGuid, FfsOffset, FfsType, self.FfsIndex)
if self.DispatchList:
if FfsObj.Type in [0x04, 0x06]:
self.DispatchList.write("%s %s %s %s\n" % (FfsGuid, "P", FfsName, FfsPath))
if FfsObj.Type in [0x05, 0x07, 0x08, 0x0A]:
self.DispatchList.write("%s %s %s %s\n" % (FfsGuid, "D", FfsName, FfsPath))
self.WriteLn(Content)
EotGlobalData.gOP_DISPATCH_ORDER.write('%s\n' %FfsName)
if FfsObj.Depex != '':
Content = """ <tr>
<td><span id='DepexHeader%s' class="styleDepex" onclick="Display('DepexHeader%s', 'Depex%s')" onMouseOver="funOnMouseOver()" onMouseOut="funOnMouseOut()">  DEPEX expression</span></td>
</tr>
<tr id='Depex%s' style='display:none;'>
<td><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, self.FfsIndex)
self.WriteLn(Content)
self.GenerateDepex(FfsObj.Depex)
Content = """ </table></td>
</tr>"""
self.WriteLn(Content)
# End of DEPEX
# Find Consumed Ppi/Protocol
SqlCommand = """select ModuleName, ItemType, GuidName, GuidValue, GuidMacro from Report
where SourceFileFullPath in
(select Value1 from Inf where BelongsToFile =
(select BelongsToFile from Inf
where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
and Model = %s)
and ItemMode = 'Consumed' group by GuidName order by ItemType""" \
% (FfsGuid, 5001, 3007)
RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
if RecordSet != []:
Count = len(RecordSet)
Content = """ <tr>
<td><span id='ConsumedHeader%s' class="styleConsumed" onclick="Display('ConsumedHeader%s', 'Consumed%s')" onMouseOver="funOnMouseOver()" onMouseOut="funOnMouseOut()">  Consumed Ppis/Protocols List (%s)</span></td>
</tr>
<tr id='Consumed%s' style='display:none;'>
<td><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, Count, self.FfsIndex)
self.WriteLn(Content)
self.ProtocolIndex = 0
for Record in RecordSet:
self.ProtocolIndex = self.ProtocolIndex + 1
Name = Record[2]
CName = Record[4]
Guid = Record[3]
Type = Record[1]
self.GeneratePpiProtocol(Type, Name, Guid, 'Consumed', CName)
Content = """ </table></td>
</tr>"""
self.WriteLn(Content)
#End of Consumed Ppi/Protocol
# Find Produced Ppi/Protocol
SqlCommand = """select ModuleName, ItemType, GuidName, GuidValue, GuidMacro from Report
where SourceFileFullPath in
(select Value1 from Inf where BelongsToFile =
(select BelongsToFile from Inf
where Value1 = 'FILE_GUID' and Value2 like '%s' and Model = %s)
and Model = %s)
and ItemMode = 'Produced' group by GuidName order by ItemType""" \
% (FfsGuid, 5001, 3007)
RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
if RecordSet != []:
Count = len(RecordSet)
Content = """ <tr>
<td><span id='ProducedHeader%s' class="styleProduced" onclick="Display('ProducedHeader%s', 'Produced%s')" onMouseOver="funOnMouseOver()" onMouseOut="funOnMouseOut()">  Produced Ppis/Protocols List (%s)</span></td>
</tr>
<tr id='Produced%s' style='display:none;'>
<td><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, Count, self.FfsIndex)
self.WriteLn(Content)
self.PpiIndex = 0
for Record in RecordSet:
self.PpiIndex = self.PpiIndex + 1
Name = Record[2]
CName = Record[4]
Guid = Record[3]
Type = Record[1]
self.GeneratePpiProtocol(Type, Name, Guid, 'Produced', CName)
Content = """ </table></td>
</tr>"""
self.WriteLn(Content)
RecordSet = None
# End of Produced Ppi/Protocol
Content = """ </table></td>
</tr>"""
self.WriteLn(Content)
## GenerateTail() method
#
# Generate end tags of HTML report
#
# @param self: The object pointer
#
def GenerateTail(self):
Tail = """</table>
</body>
</html>"""
self.WriteLn(Tail)
## GenerateHeader() method
#
# Generate start tags of HTML report
#
# @param self: The object pointer
#
def GenerateHeader(self):
Header = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<html>
<head>
<title>Execution Order Tool Report</title>
<meta http-equiv="Content-Type" content="text/html">
<style type="text/css">
<!--
.styleFfs {
color: #006600;
font-weight: bold;
}
.styleDepex {
color: #FF0066;
font-weight: bold;
}
.styleProduced {
color: #0000FF;
font-weight: bold;
}
.styleConsumed {
color: #FF00FF;
font-weight: bold;
}
-->
</style>
<Script type="text/javascript">
function Display(ParentID, SubID)
{
SubItem = document.getElementById(SubID);
ParentItem = document.getElementById(ParentID);
if (SubItem.style.display == 'none')
{
SubItem.style.display = ''
ParentItem.style.fontWeight = 'normal'
}
else
{
SubItem.style.display = 'none'
ParentItem.style.fontWeight = 'bold'
}
}
function funOnMouseOver()
{
document.body.style.cursor = "hand";
}
function funOnMouseOut()
{
document.body.style.cursor = "";
}
</Script>
</head>
<body>
<table width="100%%" border="1">"""
self.WriteLn(Header)
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
# script.
#
if __name__ == '__main__':
# Initialize log system
FilePath = 'FVRECOVERYFLOPPY.fv'
if FilePath.lower().endswith(".fv"):
fd = open(FilePath, 'rb')
buf = array('B')
try:
buf.fromfile(fd, os.path.getsize(FilePath))
except EOFError:
pass
fv = FirmwareVolume("FVRECOVERY", buf, 0)
report = Report('Report.html', fv)
report.GenerateReport()
| edk2-master | BaseTools/Source/Python/Eot/Report.py |
# $ANTLR 3.0.1 C.g 2010-02-23 09:58:53
from antlr3 import *
from antlr3.compat import set, frozenset
## @file
# The file defines the Lexer for C source files.
#
# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# for convenience in actions
HIDDEN = BaseRecognizer.HIDDEN
# token types
T114=114
T115=115
T116=116
T117=117
FloatTypeSuffix=16
LETTER=11
T29=29
T28=28
T27=27
T26=26
T25=25
EOF=-1
STRING_LITERAL=9
FLOATING_POINT_LITERAL=10
T38=38
T37=37
T39=39
T34=34
COMMENT=22
T33=33
T36=36
T35=35
T30=30
T32=32
T31=31
LINE_COMMENT=23
IntegerTypeSuffix=14
CHARACTER_LITERAL=8
T49=49
T48=48
T100=100
T43=43
T42=42
T102=102
T41=41
T101=101
T40=40
T47=47
T46=46
T45=45
T44=44
T109=109
T107=107
T108=108
T105=105
WS=19
T106=106
T103=103
T104=104
T50=50
LINE_COMMAND=24
T59=59
T113=113
T52=52
T112=112
T51=51
T111=111
T54=54
T110=110
EscapeSequence=12
DECIMAL_LITERAL=7
T53=53
T56=56
T55=55
T58=58
T57=57
T75=75
T76=76
T73=73
T74=74
T79=79
T77=77
T78=78
Exponent=15
HexDigit=13
T72=72
T71=71
T70=70
T62=62
T63=63
T64=64
T65=65
T66=66
T67=67
T68=68
T69=69
IDENTIFIER=4
UnicodeVocabulary=21
HEX_LITERAL=5
T61=61
T60=60
T99=99
T97=97
BS=20
T98=98
T95=95
T96=96
OCTAL_LITERAL=6
T94=94
Tokens=118
T93=93
T92=92
T91=91
T90=90
T88=88
T89=89
T84=84
T85=85
T86=86
T87=87
UnicodeEscape=18
T81=81
T80=80
T83=83
OctalEscape=17
T82=82
class CLexer(Lexer):
grammarFileName = "C.g"
def __init__(self, input=None):
Lexer.__init__(self, input)
self.dfa25 = self.DFA25(
self, 25,
eot = self.DFA25_eot,
eof = self.DFA25_eof,
min = self.DFA25_min,
max = self.DFA25_max,
accept = self.DFA25_accept,
special = self.DFA25_special,
transition = self.DFA25_transition
)
self.dfa35 = self.DFA35(
self, 35,
eot = self.DFA35_eot,
eof = self.DFA35_eof,
min = self.DFA35_min,
max = self.DFA35_max,
accept = self.DFA35_accept,
special = self.DFA35_special,
transition = self.DFA35_transition
)
# $ANTLR start T25
def mT25(self, ):
try:
self.type = T25
# C.g:27:5: ( ';' )
# C.g:27:7: ';'
self.match(u';')
finally:
pass
# $ANTLR end T25
# $ANTLR start T26
def mT26(self, ):
try:
self.type = T26
# C.g:28:5: ( 'typedef' )
# C.g:28:7: 'typedef'
self.match("typedef")
finally:
pass
# $ANTLR end T26
# $ANTLR start T27
def mT27(self, ):
try:
self.type = T27
# C.g:29:5: ( ',' )
# C.g:29:7: ','
self.match(u',')
finally:
pass
# $ANTLR end T27
# $ANTLR start T28
def mT28(self, ):
try:
self.type = T28
# C.g:30:5: ( '=' )
# C.g:30:7: '='
self.match(u'=')
finally:
pass
# $ANTLR end T28
# $ANTLR start T29
def mT29(self, ):
try:
self.type = T29
# C.g:31:5: ( 'extern' )
# C.g:31:7: 'extern'
self.match("extern")
finally:
pass
# $ANTLR end T29
# $ANTLR start T30
def mT30(self, ):
try:
self.type = T30
# C.g:32:5: ( 'static' )
# C.g:32:7: 'static'
self.match("static")
finally:
pass
# $ANTLR end T30
# $ANTLR start T31
def mT31(self, ):
try:
self.type = T31
# C.g:33:5: ( 'auto' )
# C.g:33:7: 'auto'
self.match("auto")
finally:
pass
# $ANTLR end T31
# $ANTLR start T32
def mT32(self, ):
try:
self.type = T32
# C.g:34:5: ( 'register' )
# C.g:34:7: 'register'
self.match("register")
finally:
pass
# $ANTLR end T32
# $ANTLR start T33
def mT33(self, ):
try:
self.type = T33
# C.g:35:5: ( 'STATIC' )
# C.g:35:7: 'STATIC'
self.match("STATIC")
finally:
pass
# $ANTLR end T33
# $ANTLR start T34
def mT34(self, ):
try:
self.type = T34
# C.g:36:5: ( 'void' )
# C.g:36:7: 'void'
self.match("void")
finally:
pass
# $ANTLR end T34
# $ANTLR start T35
def mT35(self, ):
try:
self.type = T35
# C.g:37:5: ( 'char' )
# C.g:37:7: 'char'
self.match("char")
finally:
pass
# $ANTLR end T35
# $ANTLR start T36
def mT36(self, ):
try:
self.type = T36
# C.g:38:5: ( 'short' )
# C.g:38:7: 'short'
self.match("short")
finally:
pass
# $ANTLR end T36
# $ANTLR start T37
def mT37(self, ):
try:
self.type = T37
# C.g:39:5: ( 'int' )
# C.g:39:7: 'int'
self.match("int")
finally:
pass
# $ANTLR end T37
# $ANTLR start T38
def mT38(self, ):
try:
self.type = T38
# C.g:40:5: ( 'long' )
# C.g:40:7: 'long'
self.match("long")
finally:
pass
# $ANTLR end T38
# $ANTLR start T39
def mT39(self, ):
try:
self.type = T39
# C.g:41:5: ( 'float' )
# C.g:41:7: 'float'
self.match("float")
finally:
pass
# $ANTLR end T39
# $ANTLR start T40
def mT40(self, ):
try:
self.type = T40
# C.g:42:5: ( 'double' )
# C.g:42:7: 'double'
self.match("double")
finally:
pass
# $ANTLR end T40
# $ANTLR start T41
def mT41(self, ):
try:
self.type = T41
# C.g:43:5: ( 'signed' )
# C.g:43:7: 'signed'
self.match("signed")
finally:
pass
# $ANTLR end T41
# $ANTLR start T42
def mT42(self, ):
try:
self.type = T42
# C.g:44:5: ( 'unsigned' )
# C.g:44:7: 'unsigned'
self.match("unsigned")
finally:
pass
# $ANTLR end T42
# $ANTLR start T43
def mT43(self, ):
try:
self.type = T43
# C.g:45:5: ( '{' )
# C.g:45:7: '{'
self.match(u'{')
finally:
pass
# $ANTLR end T43
# $ANTLR start T44
def mT44(self, ):
try:
self.type = T44
# C.g:46:5: ( '}' )
# C.g:46:7: '}'
self.match(u'}')
finally:
pass
# $ANTLR end T44
# $ANTLR start T45
def mT45(self, ):
try:
self.type = T45
# C.g:47:5: ( 'struct' )
# C.g:47:7: 'struct'
self.match("struct")
finally:
pass
# $ANTLR end T45
# $ANTLR start T46
def mT46(self, ):
try:
self.type = T46
# C.g:48:5: ( 'union' )
# C.g:48:7: 'union'
self.match("union")
finally:
pass
# $ANTLR end T46
# $ANTLR start T47
def mT47(self, ):
try:
self.type = T47
# C.g:49:5: ( ':' )
# C.g:49:7: ':'
self.match(u':')
finally:
pass
# $ANTLR end T47
# $ANTLR start T48
def mT48(self, ):
try:
self.type = T48
# C.g:50:5: ( 'enum' )
# C.g:50:7: 'enum'
self.match("enum")
finally:
pass
# $ANTLR end T48
# $ANTLR start T49
def mT49(self, ):
try:
self.type = T49
# C.g:51:5: ( 'const' )
# C.g:51:7: 'const'
self.match("const")
finally:
pass
# $ANTLR end T49
# $ANTLR start T50
def mT50(self, ):
try:
self.type = T50
# C.g:52:5: ( 'volatile' )
# C.g:52:7: 'volatile'
self.match("volatile")
finally:
pass
# $ANTLR end T50
# $ANTLR start T51
def mT51(self, ):
try:
self.type = T51
# C.g:53:5: ( 'IN' )
# C.g:53:7: 'IN'
self.match("IN")
finally:
pass
# $ANTLR end T51
# $ANTLR start T52
def mT52(self, ):
try:
self.type = T52
# C.g:54:5: ( 'OUT' )
# C.g:54:7: 'OUT'
self.match("OUT")
finally:
pass
# $ANTLR end T52
# $ANTLR start T53
def mT53(self, ):
try:
self.type = T53
# C.g:55:5: ( 'OPTIONAL' )
# C.g:55:7: 'OPTIONAL'
self.match("OPTIONAL")
finally:
pass
# $ANTLR end T53
# $ANTLR start T54
def mT54(self, ):
try:
self.type = T54
# C.g:56:5: ( 'CONST' )
# C.g:56:7: 'CONST'
self.match("CONST")
finally:
pass
# $ANTLR end T54
# $ANTLR start T55
def mT55(self, ):
try:
self.type = T55
# C.g:57:5: ( 'UNALIGNED' )
# C.g:57:7: 'UNALIGNED'
self.match("UNALIGNED")
finally:
pass
# $ANTLR end T55
# $ANTLR start T56
def mT56(self, ):
try:
self.type = T56
# C.g:58:5: ( 'VOLATILE' )
# C.g:58:7: 'VOLATILE'
self.match("VOLATILE")
finally:
pass
# $ANTLR end T56
# $ANTLR start T57
def mT57(self, ):
try:
self.type = T57
# C.g:59:5: ( 'GLOBAL_REMOVE_IF_UNREFERENCED' )
# C.g:59:7: 'GLOBAL_REMOVE_IF_UNREFERENCED'
self.match("GLOBAL_REMOVE_IF_UNREFERENCED")
finally:
pass
# $ANTLR end T57
# $ANTLR start T58
def mT58(self, ):
try:
self.type = T58
# C.g:60:5: ( 'EFIAPI' )
# C.g:60:7: 'EFIAPI'
self.match("EFIAPI")
finally:
pass
# $ANTLR end T58
# $ANTLR start T59
def mT59(self, ):
try:
self.type = T59
# C.g:61:5: ( 'EFI_BOOTSERVICE' )
# C.g:61:7: 'EFI_BOOTSERVICE'
self.match("EFI_BOOTSERVICE")
finally:
pass
# $ANTLR end T59
# $ANTLR start T60
def mT60(self, ):
try:
self.type = T60
# C.g:62:5: ( 'EFI_RUNTIMESERVICE' )
# C.g:62:7: 'EFI_RUNTIMESERVICE'
self.match("EFI_RUNTIMESERVICE")
finally:
pass
# $ANTLR end T60
# $ANTLR start T61
def mT61(self, ):
try:
self.type = T61
# C.g:63:5: ( 'PACKED' )
# C.g:63:7: 'PACKED'
self.match("PACKED")
finally:
pass
# $ANTLR end T61
# $ANTLR start T62
def mT62(self, ):
try:
self.type = T62
# C.g:64:5: ( '(' )
# C.g:64:7: '('
self.match(u'(')
finally:
pass
# $ANTLR end T62
# $ANTLR start T63
def mT63(self, ):
try:
self.type = T63
# C.g:65:5: ( ')' )
# C.g:65:7: ')'
self.match(u')')
finally:
pass
# $ANTLR end T63
# $ANTLR start T64
def mT64(self, ):
try:
self.type = T64
# C.g:66:5: ( '[' )
# C.g:66:7: '['
self.match(u'[')
finally:
pass
# $ANTLR end T64
# $ANTLR start T65
def mT65(self, ):
try:
self.type = T65
# C.g:67:5: ( ']' )
# C.g:67:7: ']'
self.match(u']')
finally:
pass
# $ANTLR end T65
# $ANTLR start T66
def mT66(self, ):
try:
self.type = T66
# C.g:68:5: ( '*' )
# C.g:68:7: '*'
self.match(u'*')
finally:
pass
# $ANTLR end T66
# $ANTLR start T67
def mT67(self, ):
try:
self.type = T67
# C.g:69:5: ( '...' )
# C.g:69:7: '...'
self.match("...")
finally:
pass
# $ANTLR end T67
# $ANTLR start T68
def mT68(self, ):
try:
self.type = T68
# C.g:70:5: ( '+' )
# C.g:70:7: '+'
self.match(u'+')
finally:
pass
# $ANTLR end T68
# $ANTLR start T69
def mT69(self, ):
try:
self.type = T69
# C.g:71:5: ( '-' )
# C.g:71:7: '-'
self.match(u'-')
finally:
pass
# $ANTLR end T69
# $ANTLR start T70
def mT70(self, ):
try:
self.type = T70
# C.g:72:5: ( '/' )
# C.g:72:7: '/'
self.match(u'/')
finally:
pass
# $ANTLR end T70
# $ANTLR start T71
def mT71(self, ):
try:
self.type = T71
# C.g:73:5: ( '%' )
# C.g:73:7: '%'
self.match(u'%')
finally:
pass
# $ANTLR end T71
# $ANTLR start T72
def mT72(self, ):
try:
self.type = T72
# C.g:74:5: ( '++' )
# C.g:74:7: '++'
self.match("++")
finally:
pass
# $ANTLR end T72
# $ANTLR start T73
def mT73(self, ):
try:
self.type = T73
# C.g:75:5: ( '--' )
# C.g:75:7: '--'
self.match("--")
finally:
pass
# $ANTLR end T73
# $ANTLR start T74
def mT74(self, ):
try:
self.type = T74
# C.g:76:5: ( 'sizeof' )
# C.g:76:7: 'sizeof'
self.match("sizeof")
finally:
pass
# $ANTLR end T74
# $ANTLR start T75
def mT75(self, ):
try:
self.type = T75
# C.g:77:5: ( '.' )
# C.g:77:7: '.'
self.match(u'.')
finally:
pass
# $ANTLR end T75
# $ANTLR start T76
def mT76(self, ):
try:
self.type = T76
# C.g:78:5: ( '->' )
# C.g:78:7: '->'
self.match("->")
finally:
pass
# $ANTLR end T76
# $ANTLR start T77
def mT77(self, ):
try:
self.type = T77
# C.g:79:5: ( '&' )
# C.g:79:7: '&'
self.match(u'&')
finally:
pass
# $ANTLR end T77
# $ANTLR start T78
def mT78(self, ):
try:
self.type = T78
# C.g:80:5: ( '~' )
# C.g:80:7: '~'
self.match(u'~')
finally:
pass
# $ANTLR end T78
# $ANTLR start T79
def mT79(self, ):
try:
self.type = T79
# C.g:81:5: ( '!' )
# C.g:81:7: '!'
self.match(u'!')
finally:
pass
# $ANTLR end T79
# $ANTLR start T80
def mT80(self, ):
try:
self.type = T80
# C.g:82:5: ( '*=' )
# C.g:82:7: '*='
self.match("*=")
finally:
pass
# $ANTLR end T80
# $ANTLR start T81
def mT81(self, ):
try:
self.type = T81
# C.g:83:5: ( '/=' )
# C.g:83:7: '/='
self.match("/=")
finally:
pass
# $ANTLR end T81
# $ANTLR start T82
def mT82(self, ):
try:
self.type = T82
# C.g:84:5: ( '%=' )
# C.g:84:7: '%='
self.match("%=")
finally:
pass
# $ANTLR end T82
# $ANTLR start T83
def mT83(self, ):
try:
self.type = T83
# C.g:85:5: ( '+=' )
# C.g:85:7: '+='
self.match("+=")
finally:
pass
# $ANTLR end T83
# $ANTLR start T84
def mT84(self, ):
try:
self.type = T84
# C.g:86:5: ( '-=' )
# C.g:86:7: '-='
self.match("-=")
finally:
pass
# $ANTLR end T84
# $ANTLR start T85
def mT85(self, ):
try:
self.type = T85
# C.g:87:5: ( '<<=' )
# C.g:87:7: '<<='
self.match("<<=")
finally:
pass
# $ANTLR end T85
# $ANTLR start T86
def mT86(self, ):
try:
self.type = T86
# C.g:88:5: ( '>>=' )
# C.g:88:7: '>>='
self.match(">>=")
finally:
pass
# $ANTLR end T86
# $ANTLR start T87
def mT87(self, ):
try:
self.type = T87
# C.g:89:5: ( '&=' )
# C.g:89:7: '&='
self.match("&=")
finally:
pass
# $ANTLR end T87
# $ANTLR start T88
def mT88(self, ):
try:
self.type = T88
# C.g:90:5: ( '^=' )
# C.g:90:7: '^='
self.match("^=")
finally:
pass
# $ANTLR end T88
# $ANTLR start T89
def mT89(self, ):
try:
self.type = T89
# C.g:91:5: ( '|=' )
# C.g:91:7: '|='
self.match("|=")
finally:
pass
# $ANTLR end T89
# $ANTLR start T90
def mT90(self, ):
try:
self.type = T90
# C.g:92:5: ( '?' )
# C.g:92:7: '?'
self.match(u'?')
finally:
pass
# $ANTLR end T90
# $ANTLR start T91
def mT91(self, ):
try:
self.type = T91
# C.g:93:5: ( '||' )
# C.g:93:7: '||'
self.match("||")
finally:
pass
# $ANTLR end T91
# $ANTLR start T92
def mT92(self, ):
try:
self.type = T92
# C.g:94:5: ( '&&' )
# C.g:94:7: '&&'
self.match("&&")
finally:
pass
# $ANTLR end T92
# $ANTLR start T93
def mT93(self, ):
try:
self.type = T93
# C.g:95:5: ( '|' )
# C.g:95:7: '|'
self.match(u'|')
finally:
pass
# $ANTLR end T93
# $ANTLR start T94
def mT94(self, ):
try:
self.type = T94
# C.g:96:5: ( '^' )
# C.g:96:7: '^'
self.match(u'^')
finally:
pass
# $ANTLR end T94
# $ANTLR start T95
def mT95(self, ):
try:
self.type = T95
# C.g:97:5: ( '==' )
# C.g:97:7: '=='
self.match("==")
finally:
pass
# $ANTLR end T95
# $ANTLR start T96
def mT96(self, ):
try:
self.type = T96
# C.g:98:5: ( '!=' )
# C.g:98:7: '!='
self.match("!=")
finally:
pass
# $ANTLR end T96
# $ANTLR start T97
def mT97(self, ):
try:
self.type = T97
# C.g:99:5: ( '<' )
# C.g:99:7: '<'
self.match(u'<')
finally:
pass
# $ANTLR end T97
# $ANTLR start T98
def mT98(self, ):
try:
self.type = T98
# C.g:100:5: ( '>' )
# C.g:100:7: '>'
self.match(u'>')
finally:
pass
# $ANTLR end T98
# $ANTLR start T99
def mT99(self, ):
try:
self.type = T99
# C.g:101:5: ( '<=' )
# C.g:101:7: '<='
self.match("<=")
finally:
pass
# $ANTLR end T99
# $ANTLR start T100
def mT100(self, ):
try:
self.type = T100
# C.g:102:6: ( '>=' )
# C.g:102:8: '>='
self.match(">=")
finally:
pass
# $ANTLR end T100
# $ANTLR start T101
def mT101(self, ):
try:
self.type = T101
# C.g:103:6: ( '<<' )
# C.g:103:8: '<<'
self.match("<<")
finally:
pass
# $ANTLR end T101
# $ANTLR start T102
def mT102(self, ):
try:
self.type = T102
# C.g:104:6: ( '>>' )
# C.g:104:8: '>>'
self.match(">>")
finally:
pass
# $ANTLR end T102
# $ANTLR start T103
def mT103(self, ):
try:
self.type = T103
# C.g:105:6: ( '__asm__' )
# C.g:105:8: '__asm__'
self.match("__asm__")
finally:
pass
# $ANTLR end T103
# $ANTLR start T104
def mT104(self, ):
try:
self.type = T104
# C.g:106:6: ( '_asm' )
# C.g:106:8: '_asm'
self.match("_asm")
finally:
pass
# $ANTLR end T104
# $ANTLR start T105
def mT105(self, ):
try:
self.type = T105
# C.g:107:6: ( '__asm' )
# C.g:107:8: '__asm'
self.match("__asm")
finally:
pass
# $ANTLR end T105
# $ANTLR start T106
def mT106(self, ):
try:
self.type = T106
# C.g:108:6: ( 'case' )
# C.g:108:8: 'case'
self.match("case")
finally:
pass
# $ANTLR end T106
# $ANTLR start T107
def mT107(self, ):
try:
self.type = T107
# C.g:109:6: ( 'default' )
# C.g:109:8: 'default'
self.match("default")
finally:
pass
# $ANTLR end T107
# $ANTLR start T108
def mT108(self, ):
try:
self.type = T108
# C.g:110:6: ( 'if' )
# C.g:110:8: 'if'
self.match("if")
finally:
pass
# $ANTLR end T108
# $ANTLR start T109
def mT109(self, ):
try:
self.type = T109
# C.g:111:6: ( 'else' )
# C.g:111:8: 'else'
self.match("else")
finally:
pass
# $ANTLR end T109
# $ANTLR start T110
def mT110(self, ):
try:
self.type = T110
# C.g:112:6: ( 'switch' )
# C.g:112:8: 'switch'
self.match("switch")
finally:
pass
# $ANTLR end T110
# $ANTLR start T111
def mT111(self, ):
try:
self.type = T111
# C.g:113:6: ( 'while' )
# C.g:113:8: 'while'
self.match("while")
finally:
pass
# $ANTLR end T111
# $ANTLR start T112
def mT112(self, ):
try:
self.type = T112
# C.g:114:6: ( 'do' )
# C.g:114:8: 'do'
self.match("do")
finally:
pass
# $ANTLR end T112
# $ANTLR start T113
def mT113(self, ):
try:
self.type = T113
# C.g:115:6: ( 'for' )
# C.g:115:8: 'for'
self.match("for")
finally:
pass
# $ANTLR end T113
# $ANTLR start T114
def mT114(self, ):
try:
self.type = T114
# C.g:116:6: ( 'goto' )
# C.g:116:8: 'goto'
self.match("goto")
finally:
pass
# $ANTLR end T114
# $ANTLR start T115
def mT115(self, ):
try:
self.type = T115
# C.g:117:6: ( 'continue' )
# C.g:117:8: 'continue'
self.match("continue")
finally:
pass
# $ANTLR end T115
# $ANTLR start T116
def mT116(self, ):
try:
self.type = T116
# C.g:118:6: ( 'break' )
# C.g:118:8: 'break'
self.match("break")
finally:
pass
# $ANTLR end T116
# $ANTLR start T117
def mT117(self, ):
try:
self.type = T117
# C.g:119:6: ( 'return' )
# C.g:119:8: 'return'
self.match("return")
finally:
pass
# $ANTLR end T117
# $ANTLR start IDENTIFIER
def mIDENTIFIER(self, ):
try:
self.type = IDENTIFIER
# C.g:586:2: ( LETTER ( LETTER | '0' .. '9' )* )
# C.g:586:4: LETTER ( LETTER | '0' .. '9' )*
self.mLETTER()
# C.g:586:11: ( LETTER | '0' .. '9' )*
while True: #loop1
alt1 = 2
LA1_0 = self.input.LA(1)
if (LA1_0 == u'$' or (u'0' <= LA1_0 <= u'9') or (u'A' <= LA1_0 <= u'Z') or LA1_0 == u'_' or (u'a' <= LA1_0 <= u'z')) :
alt1 = 1
if alt1 == 1:
# C.g:
if self.input.LA(1) == u'$' or (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop1
finally:
pass
# $ANTLR end IDENTIFIER
# $ANTLR start LETTER
def mLETTER(self, ):
try:
# C.g:591:2: ( '$' | 'A' .. 'Z' | 'a' .. 'z' | '_' )
# C.g:
if self.input.LA(1) == u'$' or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end LETTER
# $ANTLR start CHARACTER_LITERAL
def mCHARACTER_LITERAL(self, ):
try:
self.type = CHARACTER_LITERAL
# C.g:598:5: ( ( 'L' )? '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\'' )
# C.g:598:9: ( 'L' )? '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\''
# C.g:598:9: ( 'L' )?
alt2 = 2
LA2_0 = self.input.LA(1)
if (LA2_0 == u'L') :
alt2 = 1
if alt2 == 1:
# C.g:598:10: 'L'
self.match(u'L')
self.match(u'\'')
# C.g:598:21: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) )
alt3 = 2
LA3_0 = self.input.LA(1)
if (LA3_0 == u'\\') :
alt3 = 1
elif ((u'\u0000' <= LA3_0 <= u'&') or (u'(' <= LA3_0 <= u'[') or (u']' <= LA3_0 <= u'\uFFFE')) :
alt3 = 2
else:
nvae = NoViableAltException("598:21: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) )", 3, 0, self.input)
raise nvae
if alt3 == 1:
# C.g:598:23: EscapeSequence
self.mEscapeSequence()
elif alt3 == 2:
# C.g:598:40: ~ ( '\\'' | '\\\\' )
if (u'\u0000' <= self.input.LA(1) <= u'&') or (u'(' <= self.input.LA(1) <= u'[') or (u']' <= self.input.LA(1) <= u'\uFFFE'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
self.match(u'\'')
finally:
pass
# $ANTLR end CHARACTER_LITERAL
# $ANTLR start STRING_LITERAL
def mSTRING_LITERAL(self, ):
try:
self.type = STRING_LITERAL
# C.g:602:5: ( ( 'L' )? '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"' )
# C.g:602:8: ( 'L' )? '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"'
# C.g:602:8: ( 'L' )?
alt4 = 2
LA4_0 = self.input.LA(1)
if (LA4_0 == u'L') :
alt4 = 1
if alt4 == 1:
# C.g:602:9: 'L'
self.match(u'L')
self.match(u'"')
# C.g:602:19: ( EscapeSequence | ~ ( '\\\\' | '\"' ) )*
while True: #loop5
alt5 = 3
LA5_0 = self.input.LA(1)
if (LA5_0 == u'\\') :
alt5 = 1
elif ((u'\u0000' <= LA5_0 <= u'!') or (u'#' <= LA5_0 <= u'[') or (u']' <= LA5_0 <= u'\uFFFE')) :
alt5 = 2
if alt5 == 1:
# C.g:602:21: EscapeSequence
self.mEscapeSequence()
elif alt5 == 2:
# C.g:602:38: ~ ( '\\\\' | '\"' )
if (u'\u0000' <= self.input.LA(1) <= u'!') or (u'#' <= self.input.LA(1) <= u'[') or (u']' <= self.input.LA(1) <= u'\uFFFE'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop5
self.match(u'"')
finally:
pass
# $ANTLR end STRING_LITERAL
# $ANTLR start HEX_LITERAL
def mHEX_LITERAL(self, ):
try:
self.type = HEX_LITERAL
# C.g:605:13: ( '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )? )
# C.g:605:15: '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )?
self.match(u'0')
if self.input.LA(1) == u'X' or self.input.LA(1) == u'x':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
# C.g:605:29: ( HexDigit )+
cnt6 = 0
while True: #loop6
alt6 = 2
LA6_0 = self.input.LA(1)
if ((u'0' <= LA6_0 <= u'9') or (u'A' <= LA6_0 <= u'F') or (u'a' <= LA6_0 <= u'f')) :
alt6 = 1
if alt6 == 1:
# C.g:605:29: HexDigit
self.mHexDigit()
else:
if cnt6 >= 1:
break #loop6
eee = EarlyExitException(6, self.input)
raise eee
cnt6 += 1
# C.g:605:39: ( IntegerTypeSuffix )?
alt7 = 2
LA7_0 = self.input.LA(1)
if (LA7_0 == u'L' or LA7_0 == u'U' or LA7_0 == u'l' or LA7_0 == u'u') :
alt7 = 1
if alt7 == 1:
# C.g:605:39: IntegerTypeSuffix
self.mIntegerTypeSuffix()
finally:
pass
# $ANTLR end HEX_LITERAL
# $ANTLR start DECIMAL_LITERAL
def mDECIMAL_LITERAL(self, ):
try:
self.type = DECIMAL_LITERAL
# C.g:607:17: ( ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )? )
# C.g:607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )?
# C.g:607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* )
alt9 = 2
LA9_0 = self.input.LA(1)
if (LA9_0 == u'0') :
alt9 = 1
elif ((u'1' <= LA9_0 <= u'9')) :
alt9 = 2
else:
nvae = NoViableAltException("607:19: ( '0' | '1' .. '9' ( '0' .. '9' )* )", 9, 0, self.input)
raise nvae
if alt9 == 1:
# C.g:607:20: '0'
self.match(u'0')
elif alt9 == 2:
# C.g:607:26: '1' .. '9' ( '0' .. '9' )*
self.matchRange(u'1', u'9')
# C.g:607:35: ( '0' .. '9' )*
while True: #loop8
alt8 = 2
LA8_0 = self.input.LA(1)
if ((u'0' <= LA8_0 <= u'9')) :
alt8 = 1
if alt8 == 1:
# C.g:607:35: '0' .. '9'
self.matchRange(u'0', u'9')
else:
break #loop8
# C.g:607:46: ( IntegerTypeSuffix )?
alt10 = 2
LA10_0 = self.input.LA(1)
if (LA10_0 == u'L' or LA10_0 == u'U' or LA10_0 == u'l' or LA10_0 == u'u') :
alt10 = 1
if alt10 == 1:
# C.g:607:46: IntegerTypeSuffix
self.mIntegerTypeSuffix()
finally:
pass
# $ANTLR end DECIMAL_LITERAL
# $ANTLR start OCTAL_LITERAL
def mOCTAL_LITERAL(self, ):
try:
self.type = OCTAL_LITERAL
# C.g:609:15: ( '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )? )
# C.g:609:17: '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )?
self.match(u'0')
# C.g:609:21: ( '0' .. '7' )+
cnt11 = 0
while True: #loop11
alt11 = 2
LA11_0 = self.input.LA(1)
if ((u'0' <= LA11_0 <= u'7')) :
alt11 = 1
if alt11 == 1:
# C.g:609:22: '0' .. '7'
self.matchRange(u'0', u'7')
else:
if cnt11 >= 1:
break #loop11
eee = EarlyExitException(11, self.input)
raise eee
cnt11 += 1
# C.g:609:33: ( IntegerTypeSuffix )?
alt12 = 2
LA12_0 = self.input.LA(1)
if (LA12_0 == u'L' or LA12_0 == u'U' or LA12_0 == u'l' or LA12_0 == u'u') :
alt12 = 1
if alt12 == 1:
# C.g:609:33: IntegerTypeSuffix
self.mIntegerTypeSuffix()
finally:
pass
# $ANTLR end OCTAL_LITERAL
# $ANTLR start HexDigit
def mHexDigit(self, ):
try:
# C.g:612:10: ( ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' ) )
# C.g:612:12: ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' )
if (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'F') or (u'a' <= self.input.LA(1) <= u'f'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end HexDigit
# $ANTLR start IntegerTypeSuffix
def mIntegerTypeSuffix(self, ):
try:
# C.g:616:2: ( ( 'u' | 'U' ) | ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) )
alt13 = 4
LA13_0 = self.input.LA(1)
if (LA13_0 == u'U' or LA13_0 == u'u') :
LA13_1 = self.input.LA(2)
if (LA13_1 == u'L' or LA13_1 == u'l') :
LA13_3 = self.input.LA(3)
if (LA13_3 == u'L' or LA13_3 == u'l') :
alt13 = 4
else:
alt13 = 3
else:
alt13 = 1
elif (LA13_0 == u'L' or LA13_0 == u'l') :
alt13 = 2
else:
nvae = NoViableAltException("614:1: fragment IntegerTypeSuffix : ( ( 'u' | 'U' ) | ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) );", 13, 0, self.input)
raise nvae
if alt13 == 1:
# C.g:616:4: ( 'u' | 'U' )
if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
elif alt13 == 2:
# C.g:617:4: ( 'l' | 'L' )
if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
elif alt13 == 3:
# C.g:618:4: ( 'u' | 'U' ) ( 'l' | 'L' )
if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
elif alt13 == 4:
# C.g:619:4: ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' )
if self.input.LA(1) == u'U' or self.input.LA(1) == u'u':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
if self.input.LA(1) == u'L' or self.input.LA(1) == u'l':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end IntegerTypeSuffix
# $ANTLR start FLOATING_POINT_LITERAL
def mFLOATING_POINT_LITERAL(self, ):
try:
self.type = FLOATING_POINT_LITERAL
# C.g:623:5: ( ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )? | '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )? | ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )? | ( '0' .. '9' )+ ( Exponent )? FloatTypeSuffix )
alt25 = 4
alt25 = self.dfa25.predict(self.input)
if alt25 == 1:
# C.g:623:9: ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )?
# C.g:623:9: ( '0' .. '9' )+
cnt14 = 0
while True: #loop14
alt14 = 2
LA14_0 = self.input.LA(1)
if ((u'0' <= LA14_0 <= u'9')) :
alt14 = 1
if alt14 == 1:
# C.g:623:10: '0' .. '9'
self.matchRange(u'0', u'9')
else:
if cnt14 >= 1:
break #loop14
eee = EarlyExitException(14, self.input)
raise eee
cnt14 += 1
self.match(u'.')
# C.g:623:25: ( '0' .. '9' )*
while True: #loop15
alt15 = 2
LA15_0 = self.input.LA(1)
if ((u'0' <= LA15_0 <= u'9')) :
alt15 = 1
if alt15 == 1:
# C.g:623:26: '0' .. '9'
self.matchRange(u'0', u'9')
else:
break #loop15
# C.g:623:37: ( Exponent )?
alt16 = 2
LA16_0 = self.input.LA(1)
if (LA16_0 == u'E' or LA16_0 == u'e') :
alt16 = 1
if alt16 == 1:
# C.g:623:37: Exponent
self.mExponent()
# C.g:623:47: ( FloatTypeSuffix )?
alt17 = 2
LA17_0 = self.input.LA(1)
if (LA17_0 == u'D' or LA17_0 == u'F' or LA17_0 == u'd' or LA17_0 == u'f') :
alt17 = 1
if alt17 == 1:
# C.g:623:47: FloatTypeSuffix
self.mFloatTypeSuffix()
elif alt25 == 2:
# C.g:624:9: '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )?
self.match(u'.')
# C.g:624:13: ( '0' .. '9' )+
cnt18 = 0
while True: #loop18
alt18 = 2
LA18_0 = self.input.LA(1)
if ((u'0' <= LA18_0 <= u'9')) :
alt18 = 1
if alt18 == 1:
# C.g:624:14: '0' .. '9'
self.matchRange(u'0', u'9')
else:
if cnt18 >= 1:
break #loop18
eee = EarlyExitException(18, self.input)
raise eee
cnt18 += 1
# C.g:624:25: ( Exponent )?
alt19 = 2
LA19_0 = self.input.LA(1)
if (LA19_0 == u'E' or LA19_0 == u'e') :
alt19 = 1
if alt19 == 1:
# C.g:624:25: Exponent
self.mExponent()
# C.g:624:35: ( FloatTypeSuffix )?
alt20 = 2
LA20_0 = self.input.LA(1)
if (LA20_0 == u'D' or LA20_0 == u'F' or LA20_0 == u'd' or LA20_0 == u'f') :
alt20 = 1
if alt20 == 1:
# C.g:624:35: FloatTypeSuffix
self.mFloatTypeSuffix()
elif alt25 == 3:
# C.g:625:9: ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )?
# C.g:625:9: ( '0' .. '9' )+
cnt21 = 0
while True: #loop21
alt21 = 2
LA21_0 = self.input.LA(1)
if ((u'0' <= LA21_0 <= u'9')) :
alt21 = 1
if alt21 == 1:
# C.g:625:10: '0' .. '9'
self.matchRange(u'0', u'9')
else:
if cnt21 >= 1:
break #loop21
eee = EarlyExitException(21, self.input)
raise eee
cnt21 += 1
self.mExponent()
# C.g:625:30: ( FloatTypeSuffix )?
alt22 = 2
LA22_0 = self.input.LA(1)
if (LA22_0 == u'D' or LA22_0 == u'F' or LA22_0 == u'd' or LA22_0 == u'f') :
alt22 = 1
if alt22 == 1:
# C.g:625:30: FloatTypeSuffix
self.mFloatTypeSuffix()
elif alt25 == 4:
# C.g:626:9: ( '0' .. '9' )+ ( Exponent )? FloatTypeSuffix
# C.g:626:9: ( '0' .. '9' )+
cnt23 = 0
while True: #loop23
alt23 = 2
LA23_0 = self.input.LA(1)
if ((u'0' <= LA23_0 <= u'9')) :
alt23 = 1
if alt23 == 1:
# C.g:626:10: '0' .. '9'
self.matchRange(u'0', u'9')
else:
if cnt23 >= 1:
break #loop23
eee = EarlyExitException(23, self.input)
raise eee
cnt23 += 1
# C.g:626:21: ( Exponent )?
alt24 = 2
LA24_0 = self.input.LA(1)
if (LA24_0 == u'E' or LA24_0 == u'e') :
alt24 = 1
if alt24 == 1:
# C.g:626:21: Exponent
self.mExponent()
self.mFloatTypeSuffix()
finally:
pass
# $ANTLR end FLOATING_POINT_LITERAL
# $ANTLR start Exponent
def mExponent(self, ):
try:
# C.g:630:10: ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ )
# C.g:630:12: ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+
if self.input.LA(1) == u'E' or self.input.LA(1) == u'e':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
# C.g:630:22: ( '+' | '-' )?
alt26 = 2
LA26_0 = self.input.LA(1)
if (LA26_0 == u'+' or LA26_0 == u'-') :
alt26 = 1
if alt26 == 1:
# C.g:
if self.input.LA(1) == u'+' or self.input.LA(1) == u'-':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
# C.g:630:33: ( '0' .. '9' )+
cnt27 = 0
while True: #loop27
alt27 = 2
LA27_0 = self.input.LA(1)
if ((u'0' <= LA27_0 <= u'9')) :
alt27 = 1
if alt27 == 1:
# C.g:630:34: '0' .. '9'
self.matchRange(u'0', u'9')
else:
if cnt27 >= 1:
break #loop27
eee = EarlyExitException(27, self.input)
raise eee
cnt27 += 1
finally:
pass
# $ANTLR end Exponent
# $ANTLR start FloatTypeSuffix
def mFloatTypeSuffix(self, ):
try:
# C.g:633:17: ( ( 'f' | 'F' | 'd' | 'D' ) )
# C.g:633:19: ( 'f' | 'F' | 'd' | 'D' )
if self.input.LA(1) == u'D' or self.input.LA(1) == u'F' or self.input.LA(1) == u'd' or self.input.LA(1) == u'f':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
finally:
pass
# $ANTLR end FloatTypeSuffix
# $ANTLR start EscapeSequence
def mEscapeSequence(self, ):
try:
# C.g:637:5: ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape )
alt28 = 2
LA28_0 = self.input.LA(1)
if (LA28_0 == u'\\') :
LA28_1 = self.input.LA(2)
if (LA28_1 == u'"' or LA28_1 == u'\'' or LA28_1 == u'\\' or LA28_1 == u'b' or LA28_1 == u'f' or LA28_1 == u'n' or LA28_1 == u'r' or LA28_1 == u't') :
alt28 = 1
elif ((u'0' <= LA28_1 <= u'7')) :
alt28 = 2
else:
nvae = NoViableAltException("635:1: fragment EscapeSequence : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape );", 28, 1, self.input)
raise nvae
else:
nvae = NoViableAltException("635:1: fragment EscapeSequence : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape );", 28, 0, self.input)
raise nvae
if alt28 == 1:
# C.g:637:8: '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' )
self.match(u'\\')
if self.input.LA(1) == u'"' or self.input.LA(1) == u'\'' or self.input.LA(1) == u'\\' or self.input.LA(1) == u'b' or self.input.LA(1) == u'f' or self.input.LA(1) == u'n' or self.input.LA(1) == u'r' or self.input.LA(1) == u't':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
elif alt28 == 2:
# C.g:638:9: OctalEscape
self.mOctalEscape()
finally:
pass
# $ANTLR end EscapeSequence
# $ANTLR start OctalEscape
def mOctalEscape(self, ):
try:
# C.g:643:5: ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) )
alt29 = 3
LA29_0 = self.input.LA(1)
if (LA29_0 == u'\\') :
LA29_1 = self.input.LA(2)
if ((u'0' <= LA29_1 <= u'3')) :
LA29_2 = self.input.LA(3)
if ((u'0' <= LA29_2 <= u'7')) :
LA29_4 = self.input.LA(4)
if ((u'0' <= LA29_4 <= u'7')) :
alt29 = 1
else:
alt29 = 2
else:
alt29 = 3
elif ((u'4' <= LA29_1 <= u'7')) :
LA29_3 = self.input.LA(3)
if ((u'0' <= LA29_3 <= u'7')) :
alt29 = 2
else:
alt29 = 3
else:
nvae = NoViableAltException("641:1: fragment OctalEscape : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );", 29, 1, self.input)
raise nvae
else:
nvae = NoViableAltException("641:1: fragment OctalEscape : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );", 29, 0, self.input)
raise nvae
if alt29 == 1:
# C.g:643:9: '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' )
self.match(u'\\')
# C.g:643:14: ( '0' .. '3' )
# C.g:643:15: '0' .. '3'
self.matchRange(u'0', u'3')
# C.g:643:25: ( '0' .. '7' )
# C.g:643:26: '0' .. '7'
self.matchRange(u'0', u'7')
# C.g:643:36: ( '0' .. '7' )
# C.g:643:37: '0' .. '7'
self.matchRange(u'0', u'7')
elif alt29 == 2:
# C.g:644:9: '\\\\' ( '0' .. '7' ) ( '0' .. '7' )
self.match(u'\\')
# C.g:644:14: ( '0' .. '7' )
# C.g:644:15: '0' .. '7'
self.matchRange(u'0', u'7')
# C.g:644:25: ( '0' .. '7' )
# C.g:644:26: '0' .. '7'
self.matchRange(u'0', u'7')
elif alt29 == 3:
# C.g:645:9: '\\\\' ( '0' .. '7' )
self.match(u'\\')
# C.g:645:14: ( '0' .. '7' )
# C.g:645:15: '0' .. '7'
self.matchRange(u'0', u'7')
finally:
pass
# $ANTLR end OctalEscape
# $ANTLR start UnicodeEscape
def mUnicodeEscape(self, ):
try:
# C.g:650:5: ( '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit )
# C.g:650:9: '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit
self.match(u'\\')
self.match(u'u')
self.mHexDigit()
self.mHexDigit()
self.mHexDigit()
self.mHexDigit()
finally:
pass
# $ANTLR end UnicodeEscape
# $ANTLR start WS
def mWS(self, ):
try:
self.type = WS
# C.g:653:5: ( ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) )
# C.g:653:8: ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' )
if (u'\t' <= self.input.LA(1) <= u'\n') or (u'\f' <= self.input.LA(1) <= u'\r') or self.input.LA(1) == u' ':
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
#action start
self.channel=HIDDEN;
#action end
finally:
pass
# $ANTLR end WS
# $ANTLR start BS
def mBS(self, ):
try:
self.type = BS
# C.g:657:5: ( ( '\\\\' ) )
# C.g:657:7: ( '\\\\' )
# C.g:657:7: ( '\\\\' )
# C.g:657:8: '\\\\'
self.match(u'\\')
#action start
self.channel=HIDDEN;
#action end
finally:
pass
# $ANTLR end BS
# $ANTLR start UnicodeVocabulary
def mUnicodeVocabulary(self, ):
try:
self.type = UnicodeVocabulary
# C.g:665:5: ( '\\u0003' .. '\\uFFFE' )
# C.g:665:7: '\\u0003' .. '\\uFFFE'
self.matchRange(u'\u0003', u'\uFFFE')
finally:
pass
# $ANTLR end UnicodeVocabulary
# $ANTLR start COMMENT
def mCOMMENT(self, ):
try:
self.type = COMMENT
# C.g:668:5: ( '/*' ( options {greedy=false; } : . )* '*/' )
# C.g:668:9: '/*' ( options {greedy=false; } : . )* '*/'
self.match("/*")
# C.g:668:14: ( options {greedy=false; } : . )*
while True: #loop30
alt30 = 2
LA30_0 = self.input.LA(1)
if (LA30_0 == u'*') :
LA30_1 = self.input.LA(2)
if (LA30_1 == u'/') :
alt30 = 2
elif ((u'\u0000' <= LA30_1 <= u'.') or (u'0' <= LA30_1 <= u'\uFFFE')) :
alt30 = 1
elif ((u'\u0000' <= LA30_0 <= u')') or (u'+' <= LA30_0 <= u'\uFFFE')) :
alt30 = 1
if alt30 == 1:
# C.g:668:42: .
self.matchAny()
else:
break #loop30
self.match("*/")
#action start
self.channel=HIDDEN;
#action end
finally:
pass
# $ANTLR end COMMENT
# $ANTLR start LINE_COMMENT
def mLINE_COMMENT(self, ):
try:
self.type = LINE_COMMENT
# C.g:673:5: ( '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
# C.g:673:7: '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
self.match("//")
# C.g:673:12: (~ ( '\\n' | '\\r' ) )*
while True: #loop31
alt31 = 2
LA31_0 = self.input.LA(1)
if ((u'\u0000' <= LA31_0 <= u'\t') or (u'\u000B' <= LA31_0 <= u'\f') or (u'\u000E' <= LA31_0 <= u'\uFFFE')) :
alt31 = 1
if alt31 == 1:
# C.g:673:12: ~ ( '\\n' | '\\r' )
if (u'\u0000' <= self.input.LA(1) <= u'\t') or (u'\u000B' <= self.input.LA(1) <= u'\f') or (u'\u000E' <= self.input.LA(1) <= u'\uFFFE'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop31
# C.g:673:26: ( '\\r' )?
alt32 = 2
LA32_0 = self.input.LA(1)
if (LA32_0 == u'\r') :
alt32 = 1
if alt32 == 1:
# C.g:673:26: '\\r'
self.match(u'\r')
self.match(u'\n')
#action start
self.channel=HIDDEN;
#action end
finally:
pass
# $ANTLR end LINE_COMMENT
# $ANTLR start LINE_COMMAND
def mLINE_COMMAND(self, ):
try:
self.type = LINE_COMMAND
# C.g:678:5: ( '#' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' )
# C.g:678:7: '#' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n'
self.match(u'#')
# C.g:678:11: (~ ( '\\n' | '\\r' ) )*
while True: #loop33
alt33 = 2
LA33_0 = self.input.LA(1)
if ((u'\u0000' <= LA33_0 <= u'\t') or (u'\u000B' <= LA33_0 <= u'\f') or (u'\u000E' <= LA33_0 <= u'\uFFFE')) :
alt33 = 1
if alt33 == 1:
# C.g:678:11: ~ ( '\\n' | '\\r' )
if (u'\u0000' <= self.input.LA(1) <= u'\t') or (u'\u000B' <= self.input.LA(1) <= u'\f') or (u'\u000E' <= self.input.LA(1) <= u'\uFFFE'):
self.input.consume();
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
else:
break #loop33
# C.g:678:25: ( '\\r' )?
alt34 = 2
LA34_0 = self.input.LA(1)
if (LA34_0 == u'\r') :
alt34 = 1
if alt34 == 1:
# C.g:678:25: '\\r'
self.match(u'\r')
self.match(u'\n')
#action start
self.channel=HIDDEN;
#action end
finally:
pass
# $ANTLR end LINE_COMMAND
def mTokens(self):
# C.g:1:8: ( T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | T33 | T34 | T35 | T36 | T37 | T38 | T39 | T40 | T41 | T42 | T43 | T44 | T45 | T46 | T47 | T48 | T49 | T50 | T51 | T52 | T53 | T54 | T55 | T56 | T57 | T58 | T59 | T60 | T61 | T62 | T63 | T64 | T65 | T66 | T67 | T68 | T69 | T70 | T71 | T72 | T73 | T74 | T75 | T76 | T77 | T78 | T79 | T80 | T81 | T82 | T83 | T84 | T85 | T86 | T87 | T88 | T89 | T90 | T91 | T92 | T93 | T94 | T95 | T96 | T97 | T98 | T99 | T100 | T101 | T102 | T103 | T104 | T105 | T106 | T107 | T108 | T109 | T110 | T111 | T112 | T113 | T114 | T115 | T116 | T117 | IDENTIFIER | CHARACTER_LITERAL | STRING_LITERAL | HEX_LITERAL | DECIMAL_LITERAL | OCTAL_LITERAL | FLOATING_POINT_LITERAL | WS | BS | UnicodeVocabulary | COMMENT | LINE_COMMENT | LINE_COMMAND )
alt35 = 106
alt35 = self.dfa35.predict(self.input)
if alt35 == 1:
# C.g:1:10: T25
self.mT25()
elif alt35 == 2:
# C.g:1:14: T26
self.mT26()
elif alt35 == 3:
# C.g:1:18: T27
self.mT27()
elif alt35 == 4:
# C.g:1:22: T28
self.mT28()
elif alt35 == 5:
# C.g:1:26: T29
self.mT29()
elif alt35 == 6:
# C.g:1:30: T30
self.mT30()
elif alt35 == 7:
# C.g:1:34: T31
self.mT31()
elif alt35 == 8:
# C.g:1:38: T32
self.mT32()
elif alt35 == 9:
# C.g:1:42: T33
self.mT33()
elif alt35 == 10:
# C.g:1:46: T34
self.mT34()
elif alt35 == 11:
# C.g:1:50: T35
self.mT35()
elif alt35 == 12:
# C.g:1:54: T36
self.mT36()
elif alt35 == 13:
# C.g:1:58: T37
self.mT37()
elif alt35 == 14:
# C.g:1:62: T38
self.mT38()
elif alt35 == 15:
# C.g:1:66: T39
self.mT39()
elif alt35 == 16:
# C.g:1:70: T40
self.mT40()
elif alt35 == 17:
# C.g:1:74: T41
self.mT41()
elif alt35 == 18:
# C.g:1:78: T42
self.mT42()
elif alt35 == 19:
# C.g:1:82: T43
self.mT43()
elif alt35 == 20:
# C.g:1:86: T44
self.mT44()
elif alt35 == 21:
# C.g:1:90: T45
self.mT45()
elif alt35 == 22:
# C.g:1:94: T46
self.mT46()
elif alt35 == 23:
# C.g:1:98: T47
self.mT47()
elif alt35 == 24:
# C.g:1:102: T48
self.mT48()
elif alt35 == 25:
# C.g:1:106: T49
self.mT49()
elif alt35 == 26:
# C.g:1:110: T50
self.mT50()
elif alt35 == 27:
# C.g:1:114: T51
self.mT51()
elif alt35 == 28:
# C.g:1:118: T52
self.mT52()
elif alt35 == 29:
# C.g:1:122: T53
self.mT53()
elif alt35 == 30:
# C.g:1:126: T54
self.mT54()
elif alt35 == 31:
# C.g:1:130: T55
self.mT55()
elif alt35 == 32:
# C.g:1:134: T56
self.mT56()
elif alt35 == 33:
# C.g:1:138: T57
self.mT57()
elif alt35 == 34:
# C.g:1:142: T58
self.mT58()
elif alt35 == 35:
# C.g:1:146: T59
self.mT59()
elif alt35 == 36:
# C.g:1:150: T60
self.mT60()
elif alt35 == 37:
# C.g:1:154: T61
self.mT61()
elif alt35 == 38:
# C.g:1:158: T62
self.mT62()
elif alt35 == 39:
# C.g:1:162: T63
self.mT63()
elif alt35 == 40:
# C.g:1:166: T64
self.mT64()
elif alt35 == 41:
# C.g:1:170: T65
self.mT65()
elif alt35 == 42:
# C.g:1:174: T66
self.mT66()
elif alt35 == 43:
# C.g:1:178: T67
self.mT67()
elif alt35 == 44:
# C.g:1:182: T68
self.mT68()
elif alt35 == 45:
# C.g:1:186: T69
self.mT69()
elif alt35 == 46:
# C.g:1:190: T70
self.mT70()
elif alt35 == 47:
# C.g:1:194: T71
self.mT71()
elif alt35 == 48:
# C.g:1:198: T72
self.mT72()
elif alt35 == 49:
# C.g:1:202: T73
self.mT73()
elif alt35 == 50:
# C.g:1:206: T74
self.mT74()
elif alt35 == 51:
# C.g:1:210: T75
self.mT75()
elif alt35 == 52:
# C.g:1:214: T76
self.mT76()
elif alt35 == 53:
# C.g:1:218: T77
self.mT77()
elif alt35 == 54:
# C.g:1:222: T78
self.mT78()
elif alt35 == 55:
# C.g:1:226: T79
self.mT79()
elif alt35 == 56:
# C.g:1:230: T80
self.mT80()
elif alt35 == 57:
# C.g:1:234: T81
self.mT81()
elif alt35 == 58:
# C.g:1:238: T82
self.mT82()
elif alt35 == 59:
# C.g:1:242: T83
self.mT83()
elif alt35 == 60:
# C.g:1:246: T84
self.mT84()
elif alt35 == 61:
# C.g:1:250: T85
self.mT85()
elif alt35 == 62:
# C.g:1:254: T86
self.mT86()
elif alt35 == 63:
# C.g:1:258: T87
self.mT87()
elif alt35 == 64:
# C.g:1:262: T88
self.mT88()
elif alt35 == 65:
# C.g:1:266: T89
self.mT89()
elif alt35 == 66:
# C.g:1:270: T90
self.mT90()
elif alt35 == 67:
# C.g:1:274: T91
self.mT91()
elif alt35 == 68:
# C.g:1:278: T92
self.mT92()
elif alt35 == 69:
# C.g:1:282: T93
self.mT93()
elif alt35 == 70:
# C.g:1:286: T94
self.mT94()
elif alt35 == 71:
# C.g:1:290: T95
self.mT95()
elif alt35 == 72:
# C.g:1:294: T96
self.mT96()
elif alt35 == 73:
# C.g:1:298: T97
self.mT97()
elif alt35 == 74:
# C.g:1:302: T98
self.mT98()
elif alt35 == 75:
# C.g:1:306: T99
self.mT99()
elif alt35 == 76:
# C.g:1:310: T100
self.mT100()
elif alt35 == 77:
# C.g:1:315: T101
self.mT101()
elif alt35 == 78:
# C.g:1:320: T102
self.mT102()
elif alt35 == 79:
# C.g:1:325: T103
self.mT103()
elif alt35 == 80:
# C.g:1:330: T104
self.mT104()
elif alt35 == 81:
# C.g:1:335: T105
self.mT105()
elif alt35 == 82:
# C.g:1:340: T106
self.mT106()
elif alt35 == 83:
# C.g:1:345: T107
self.mT107()
elif alt35 == 84:
# C.g:1:350: T108
self.mT108()
elif alt35 == 85:
# C.g:1:355: T109
self.mT109()
elif alt35 == 86:
# C.g:1:360: T110
self.mT110()
elif alt35 == 87:
# C.g:1:365: T111
self.mT111()
elif alt35 == 88:
# C.g:1:370: T112
self.mT112()
elif alt35 == 89:
# C.g:1:375: T113
self.mT113()
elif alt35 == 90:
# C.g:1:380: T114
self.mT114()
elif alt35 == 91:
# C.g:1:385: T115
self.mT115()
elif alt35 == 92:
# C.g:1:390: T116
self.mT116()
elif alt35 == 93:
# C.g:1:395: T117
self.mT117()
elif alt35 == 94:
# C.g:1:400: IDENTIFIER
self.mIDENTIFIER()
elif alt35 == 95:
# C.g:1:411: CHARACTER_LITERAL
self.mCHARACTER_LITERAL()
elif alt35 == 96:
# C.g:1:429: STRING_LITERAL
self.mSTRING_LITERAL()
elif alt35 == 97:
# C.g:1:444: HEX_LITERAL
self.mHEX_LITERAL()
elif alt35 == 98:
# C.g:1:456: DECIMAL_LITERAL
self.mDECIMAL_LITERAL()
elif alt35 == 99:
# C.g:1:472: OCTAL_LITERAL
self.mOCTAL_LITERAL()
elif alt35 == 100:
# C.g:1:486: FLOATING_POINT_LITERAL
self.mFLOATING_POINT_LITERAL()
elif alt35 == 101:
# C.g:1:509: WS
self.mWS()
elif alt35 == 102:
# C.g:1:512: BS
self.mBS()
elif alt35 == 103:
# C.g:1:515: UnicodeVocabulary
self.mUnicodeVocabulary()
elif alt35 == 104:
# C.g:1:533: COMMENT
self.mCOMMENT()
elif alt35 == 105:
# C.g:1:541: LINE_COMMENT
self.mLINE_COMMENT()
elif alt35 == 106:
# C.g:1:554: LINE_COMMAND
self.mLINE_COMMAND()
# lookup tables for DFA #25
DFA25_eot = DFA.unpack(
u"\7\uffff\1\10\2\uffff"
)
DFA25_eof = DFA.unpack(
u"\12\uffff"
)
DFA25_min = DFA.unpack(
u"\2\56\2\uffff\1\53\1\uffff\2\60\2\uffff"
)
DFA25_max = DFA.unpack(
u"\1\71\1\146\2\uffff\1\71\1\uffff\1\71\1\146\2\uffff"
)
DFA25_accept = DFA.unpack(
u"\2\uffff\1\2\1\1\1\uffff\1\4\2\uffff\2\3"
)
DFA25_special = DFA.unpack(
u"\12\uffff"
)
DFA25_transition = [
DFA.unpack(u"\1\2\1\uffff\12\1"),
DFA.unpack(u"\1\3\1\uffff\12\1\12\uffff\1\5\1\4\1\5\35\uffff\1\5"
u"\1\4\1\5"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\6\1\uffff\1\6\2\uffff\12\7"),
DFA.unpack(u""),
DFA.unpack(u"\12\7"),
DFA.unpack(u"\12\7\12\uffff\1\11\1\uffff\1\11\35\uffff\1\11\1\uffff"
u"\1\11"),
DFA.unpack(u""),
DFA.unpack(u"")
]
# class definition for DFA #25
DFA25 = DFA
# lookup tables for DFA #35
DFA35_eot = DFA.unpack(
u"\2\uffff\1\76\1\uffff\1\101\14\76\3\uffff\10\76\4\uffff\1\151\1"
u"\153\1\157\1\163\1\167\1\171\1\174\1\uffff\1\177\1\u0082\1\u0085"
u"\1\u0087\1\u008a\1\uffff\5\76\1\uffff\2\73\2\u0095\2\uffff\1\73"
u"\2\uffff\1\76\4\uffff\16\76\1\u00ad\5\76\1\u00b4\1\76\3\uffff\1"
u"\u00b7\10\76\34\uffff\1\u00c1\2\uffff\1\u00c3\10\uffff\5\76\3\uffff"
u"\1\u00c9\1\uffff\1\u0095\3\uffff\23\76\1\uffff\1\u00de\1\76\1\u00e0"
u"\3\76\1\uffff\2\76\1\uffff\1\76\1\u00e7\6\76\4\uffff\5\76\1\uffff"
u"\1\76\1\u00f5\1\76\1\u00f7\6\76\1\u00fe\4\76\1\u0103\1\u0104\2"
u"\76\1\u0107\1\uffff\1\u0108\1\uffff\6\76\1\uffff\10\76\1\u0118"
u"\1\76\1\u011a\2\76\1\uffff\1\76\1\uffff\5\76\1\u0123\1\uffff\4"
u"\76\2\uffff\1\76\1\u0129\2\uffff\1\u012a\3\76\1\u012e\1\76\1\u0130"
u"\7\76\1\u0139\1\uffff\1\u013a\1\uffff\1\u013b\1\76\1\u013d\1\u013e"
u"\1\u013f\1\u0140\1\u0141\1\u0142\1\uffff\1\76\1\u0144\1\u0145\2"
u"\76\2\uffff\1\76\1\u0149\1\76\1\uffff\1\76\1\uffff\5\76\1\u0151"
u"\1\u0152\1\76\3\uffff\1\u0154\6\uffff\1\76\2\uffff\2\76\1\u0158"
u"\1\uffff\7\76\2\uffff\1\u0160\1\uffff\1\u0161\1\u0162\1\u0163\1"
u"\uffff\1\u0164\1\u0165\1\76\1\u0167\3\76\6\uffff\1\u016b\1\uffff"
u"\3\76\1\uffff\21\76\1\u0180\2\76\1\uffff\3\76\1\u0186\1\76\1\uffff"
u"\11\76\1\u0191\1\uffff"
)
DFA35_eof = DFA.unpack(
u"\u0192\uffff"
)
DFA35_min = DFA.unpack(
u"\1\3\1\uffff\1\171\1\uffff\1\75\1\154\1\150\1\165\1\145\1\124\1"
u"\157\1\141\1\146\1\157\1\154\1\145\1\156\3\uffff\1\116\1\120\1"
u"\117\1\116\1\117\1\114\1\106\1\101\4\uffff\1\75\1\56\1\53\1\55"
u"\1\52\1\75\1\46\1\uffff\1\75\1\74\3\75\1\uffff\1\137\1\150\1\157"
u"\1\162\1\42\1\uffff\2\0\2\56\2\uffff\1\0\2\uffff\1\160\4\uffff"
u"\1\163\1\164\1\165\1\151\1\141\1\147\1\157\1\164\1\147\1\101\1"
u"\151\1\163\1\156\1\141\1\44\1\164\1\156\1\162\1\157\1\146\1\44"
u"\1\151\3\uffff\1\44\2\124\1\116\1\101\1\114\1\117\1\111\1\103\34"
u"\uffff\1\75\2\uffff\1\75\10\uffff\1\141\1\163\1\151\1\164\1\145"
u"\3\uffff\1\56\1\uffff\1\56\3\uffff\3\145\1\155\2\164\1\165\1\145"
u"\1\156\1\162\1\157\1\151\1\165\1\124\1\141\1\144\1\145\1\163\1"
u"\162\1\uffff\1\44\1\147\1\44\2\141\1\142\1\uffff\1\151\1\157\1"
u"\uffff\1\111\1\44\1\123\1\114\1\101\1\102\1\101\1\113\4\uffff\1"
u"\163\1\155\1\154\1\157\1\141\1\uffff\1\144\1\44\1\162\1\44\1\143"
u"\1\151\1\143\1\157\1\145\1\164\1\44\1\163\1\162\1\111\1\164\2\44"
u"\1\151\1\164\1\44\1\uffff\1\44\1\uffff\1\164\1\165\1\154\1\147"
u"\1\156\1\117\1\uffff\1\124\1\111\1\124\1\101\1\102\1\120\1\105"
u"\1\155\1\44\1\145\1\44\1\153\1\145\1\uffff\1\156\1\uffff\1\150"
u"\1\143\1\164\1\146\1\144\1\44\1\uffff\1\164\1\156\1\103\1\151\2"
u"\uffff\1\156\1\44\2\uffff\1\44\1\154\1\145\1\156\1\44\1\116\1\44"
u"\1\107\1\111\1\114\1\125\1\117\1\111\1\104\1\44\1\uffff\1\44\1"
u"\uffff\1\44\1\146\6\44\1\uffff\1\145\2\44\1\154\1\165\2\uffff\1"
u"\164\1\44\1\145\1\uffff\1\101\1\uffff\1\116\1\114\1\137\1\116\1"
u"\117\2\44\1\137\3\uffff\1\44\6\uffff\1\162\2\uffff\2\145\1\44\1"
u"\uffff\1\144\1\114\2\105\1\122\2\124\2\uffff\1\44\1\uffff\3\44"
u"\1\uffff\2\44\1\104\1\44\1\105\1\111\1\123\6\uffff\1\44\1\uffff"
u"\2\115\1\105\1\uffff\1\117\1\105\1\122\1\126\1\123\1\126\2\105"
u"\1\111\1\137\1\122\1\103\1\111\1\126\1\105\1\106\1\111\1\44\1\137"
u"\1\103\1\uffff\1\125\1\105\1\116\1\44\1\122\1\uffff\1\105\1\106"
u"\1\105\1\122\1\105\1\116\1\103\1\105\1\104\1\44\1\uffff"
)
DFA35_max = DFA.unpack(
u"\1\ufffe\1\uffff\1\171\1\uffff\1\75\1\170\1\167\1\165\1\145\1\124"
u"\2\157\1\156\3\157\1\156\3\uffff\1\116\1\125\1\117\1\116\1\117"
u"\1\114\1\106\1\101\4\uffff\1\75\1\71\1\75\1\76\3\75\1\uffff\2\75"
u"\1\76\1\75\1\174\1\uffff\1\141\1\150\1\157\1\162\1\47\1\uffff\2"
u"\ufffe\1\170\1\146\2\uffff\1\ufffe\2\uffff\1\160\4\uffff\1\163"
u"\1\164\1\165\1\151\1\162\1\172\1\157\2\164\1\101\1\154\1\163\1"
u"\156\1\141\1\172\1\164\1\156\1\162\1\157\1\146\1\172\1\163\3\uffff"
u"\1\172\2\124\1\116\1\101\1\114\1\117\1\111\1\103\34\uffff\1\75"
u"\2\uffff\1\75\10\uffff\1\141\1\163\1\151\1\164\1\145\3\uffff\1"
u"\146\1\uffff\1\146\3\uffff\3\145\1\155\2\164\1\165\1\145\1\156"
u"\1\162\1\157\1\151\1\165\1\124\1\141\1\144\1\145\1\164\1\162\1"
u"\uffff\1\172\1\147\1\172\2\141\1\142\1\uffff\1\151\1\157\1\uffff"
u"\1\111\1\172\1\123\1\114\1\101\1\102\1\137\1\113\4\uffff\1\163"
u"\1\155\1\154\1\157\1\141\1\uffff\1\144\1\172\1\162\1\172\1\143"
u"\1\151\1\143\1\157\1\145\1\164\1\172\1\163\1\162\1\111\1\164\2"
u"\172\1\151\1\164\1\172\1\uffff\1\172\1\uffff\1\164\1\165\1\154"
u"\1\147\1\156\1\117\1\uffff\1\124\1\111\1\124\1\101\1\122\1\120"
u"\1\105\1\155\1\172\1\145\1\172\1\153\1\145\1\uffff\1\156\1\uffff"
u"\1\150\1\143\1\164\1\146\1\144\1\172\1\uffff\1\164\1\156\1\103"
u"\1\151\2\uffff\1\156\1\172\2\uffff\1\172\1\154\1\145\1\156\1\172"
u"\1\116\1\172\1\107\1\111\1\114\1\125\1\117\1\111\1\104\1\172\1"
u"\uffff\1\172\1\uffff\1\172\1\146\6\172\1\uffff\1\145\2\172\1\154"
u"\1\165\2\uffff\1\164\1\172\1\145\1\uffff\1\101\1\uffff\1\116\1"
u"\114\1\137\1\116\1\117\2\172\1\137\3\uffff\1\172\6\uffff\1\162"
u"\2\uffff\2\145\1\172\1\uffff\1\144\1\114\2\105\1\122\2\124\2\uffff"
u"\1\172\1\uffff\3\172\1\uffff\2\172\1\104\1\172\1\105\1\111\1\123"
u"\6\uffff\1\172\1\uffff\2\115\1\105\1\uffff\1\117\1\105\1\122\1"
u"\126\1\123\1\126\2\105\1\111\1\137\1\122\1\103\1\111\1\126\1\105"
u"\1\106\1\111\1\172\1\137\1\103\1\uffff\1\125\1\105\1\116\1\172"
u"\1\122\1\uffff\1\105\1\106\1\105\1\122\1\105\1\116\1\103\1\105"
u"\1\104\1\172\1\uffff"
)
DFA35_accept = DFA.unpack(
u"\1\uffff\1\1\1\uffff\1\3\15\uffff\1\23\1\24\1\27\10\uffff\1\46"
u"\1\47\1\50\1\51\7\uffff\1\66\5\uffff\1\102\5\uffff\1\136\4\uffff"
u"\1\145\1\146\1\uffff\1\147\1\1\1\uffff\1\136\1\3\1\107\1\4\26\uffff"
u"\1\23\1\24\1\27\11\uffff\1\46\1\47\1\50\1\51\1\70\1\52\1\53\1\63"
u"\1\144\1\73\1\60\1\54\1\74\1\64\1\61\1\55\1\150\1\151\1\71\1\56"
u"\1\72\1\57\1\77\1\104\1\65\1\66\1\110\1\67\1\uffff\1\113\1\111"
u"\1\uffff\1\114\1\112\1\100\1\106\1\103\1\101\1\105\1\102\5\uffff"
u"\1\140\1\137\1\141\1\uffff\1\142\1\uffff\1\145\1\146\1\152\23\uffff"
u"\1\124\6\uffff\1\130\2\uffff\1\33\10\uffff\1\75\1\115\1\76\1\116"
u"\5\uffff\1\143\24\uffff\1\15\1\uffff\1\131\6\uffff\1\34\15\uffff"
u"\1\125\1\uffff\1\30\6\uffff\1\7\4\uffff\1\12\1\122\2\uffff\1\13"
u"\1\16\17\uffff\1\120\1\uffff\1\132\10\uffff\1\14\5\uffff\1\31\1"
u"\17\3\uffff\1\26\1\uffff\1\36\10\uffff\1\121\1\127\1\134\1\uffff"
u"\1\5\1\126\1\6\1\25\1\62\1\21\1\uffff\1\135\1\11\3\uffff\1\20\7"
u"\uffff\1\42\1\45\1\uffff\1\2\3\uffff\1\123\7\uffff\1\117\1\10\1"
u"\32\1\133\1\22\1\35\1\uffff\1\40\3\uffff\1\37\24\uffff\1\43\5\uffff"
u"\1\44\12\uffff\1\41"
)
DFA35_special = DFA.unpack(
u"\u0192\uffff"
)
DFA35_transition = [
DFA.unpack(u"\6\73\2\70\1\73\2\70\22\73\1\70\1\50\1\65\1\72\1\63"
u"\1\45\1\46\1\64\1\34\1\35\1\40\1\42\1\3\1\43\1\41\1\44\1\66\11"
u"\67\1\23\1\1\1\51\1\4\1\52\1\55\1\73\2\63\1\26\1\63\1\32\1\63\1"
u"\31\1\63\1\24\2\63\1\62\2\63\1\25\1\33\2\63\1\11\1\63\1\27\1\30"
u"\4\63\1\36\1\71\1\37\1\53\1\56\1\73\1\7\1\61\1\13\1\17\1\5\1\16"
u"\1\60\1\63\1\14\2\63\1\15\5\63\1\10\1\6\1\2\1\20\1\12\1\57\3\63"
u"\1\21\1\54\1\22\1\47\uff80\73"),
DFA.unpack(u""),
DFA.unpack(u"\1\75"),
DFA.unpack(u""),
DFA.unpack(u"\1\100"),
DFA.unpack(u"\1\102\1\uffff\1\104\11\uffff\1\103"),
DFA.unpack(u"\1\110\1\107\12\uffff\1\106\2\uffff\1\105"),
DFA.unpack(u"\1\111"),
DFA.unpack(u"\1\112"),
DFA.unpack(u"\1\113"),
DFA.unpack(u"\1\114"),
DFA.unpack(u"\1\115\6\uffff\1\117\6\uffff\1\116"),
DFA.unpack(u"\1\120\7\uffff\1\121"),
DFA.unpack(u"\1\122"),
DFA.unpack(u"\1\124\2\uffff\1\123"),
DFA.unpack(u"\1\125\11\uffff\1\126"),
DFA.unpack(u"\1\127"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\133"),
DFA.unpack(u"\1\134\4\uffff\1\135"),
DFA.unpack(u"\1\136"),
DFA.unpack(u"\1\137"),
DFA.unpack(u"\1\140"),
DFA.unpack(u"\1\141"),
DFA.unpack(u"\1\142"),
DFA.unpack(u"\1\143"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\150"),
DFA.unpack(u"\1\152\1\uffff\12\154"),
DFA.unpack(u"\1\156\21\uffff\1\155"),
DFA.unpack(u"\1\162\17\uffff\1\160\1\161"),
DFA.unpack(u"\1\164\4\uffff\1\165\15\uffff\1\166"),
DFA.unpack(u"\1\170"),
DFA.unpack(u"\1\173\26\uffff\1\172"),
DFA.unpack(u""),
DFA.unpack(u"\1\176"),
DFA.unpack(u"\1\u0080\1\u0081"),
DFA.unpack(u"\1\u0084\1\u0083"),
DFA.unpack(u"\1\u0086"),
DFA.unpack(u"\1\u0089\76\uffff\1\u0088"),
DFA.unpack(u""),
DFA.unpack(u"\1\u008c\1\uffff\1\u008d"),
DFA.unpack(u"\1\u008e"),
DFA.unpack(u"\1\u008f"),
DFA.unpack(u"\1\u0090"),
DFA.unpack(u"\1\u0091\4\uffff\1\u0092"),
DFA.unpack(u""),
DFA.unpack(u"\47\u0092\1\uffff\uffd7\u0092"),
DFA.unpack(u"\uffff\u0091"),
DFA.unpack(u"\1\154\1\uffff\10\u0094\2\154\12\uffff\3\154\21\uffff"
u"\1\u0093\13\uffff\3\154\21\uffff\1\u0093"),
DFA.unpack(u"\1\154\1\uffff\12\u0096\12\uffff\3\154\35\uffff\3\154"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\uffff\u0099"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u009a"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u009b"),
DFA.unpack(u"\1\u009c"),
DFA.unpack(u"\1\u009d"),
DFA.unpack(u"\1\u009e"),
DFA.unpack(u"\1\u009f\20\uffff\1\u00a0"),
DFA.unpack(u"\1\u00a2\22\uffff\1\u00a1"),
DFA.unpack(u"\1\u00a3"),
DFA.unpack(u"\1\u00a4"),
DFA.unpack(u"\1\u00a5\14\uffff\1\u00a6"),
DFA.unpack(u"\1\u00a7"),
DFA.unpack(u"\1\u00a9\2\uffff\1\u00a8"),
DFA.unpack(u"\1\u00aa"),
DFA.unpack(u"\1\u00ab"),
DFA.unpack(u"\1\u00ac"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00ae"),
DFA.unpack(u"\1\u00af"),
DFA.unpack(u"\1\u00b0"),
DFA.unpack(u"\1\u00b1"),
DFA.unpack(u"\1\u00b2"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\24\76\1\u00b3\5\76"),
DFA.unpack(u"\1\u00b6\11\uffff\1\u00b5"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00b8"),
DFA.unpack(u"\1\u00b9"),
DFA.unpack(u"\1\u00ba"),
DFA.unpack(u"\1\u00bb"),
DFA.unpack(u"\1\u00bc"),
DFA.unpack(u"\1\u00bd"),
DFA.unpack(u"\1\u00be"),
DFA.unpack(u"\1\u00bf"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00c0"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00c2"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00c4"),
DFA.unpack(u"\1\u00c5"),
DFA.unpack(u"\1\u00c6"),
DFA.unpack(u"\1\u00c7"),
DFA.unpack(u"\1\u00c8"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\154\1\uffff\10\u0094\2\154\12\uffff\3\154\35\uffff"
u"\3\154"),
DFA.unpack(u""),
DFA.unpack(u"\1\154\1\uffff\12\u0096\12\uffff\3\154\35\uffff\3\154"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00ca"),
DFA.unpack(u"\1\u00cb"),
DFA.unpack(u"\1\u00cc"),
DFA.unpack(u"\1\u00cd"),
DFA.unpack(u"\1\u00ce"),
DFA.unpack(u"\1\u00cf"),
DFA.unpack(u"\1\u00d0"),
DFA.unpack(u"\1\u00d1"),
DFA.unpack(u"\1\u00d2"),
DFA.unpack(u"\1\u00d3"),
DFA.unpack(u"\1\u00d4"),
DFA.unpack(u"\1\u00d5"),
DFA.unpack(u"\1\u00d6"),
DFA.unpack(u"\1\u00d7"),
DFA.unpack(u"\1\u00d8"),
DFA.unpack(u"\1\u00d9"),
DFA.unpack(u"\1\u00da"),
DFA.unpack(u"\1\u00dc\1\u00db"),
DFA.unpack(u"\1\u00dd"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00df"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00e1"),
DFA.unpack(u"\1\u00e2"),
DFA.unpack(u"\1\u00e3"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00e4"),
DFA.unpack(u"\1\u00e5"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00e6"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00e8"),
DFA.unpack(u"\1\u00e9"),
DFA.unpack(u"\1\u00ea"),
DFA.unpack(u"\1\u00eb"),
DFA.unpack(u"\1\u00ed\35\uffff\1\u00ec"),
DFA.unpack(u"\1\u00ee"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u00ef"),
DFA.unpack(u"\1\u00f0"),
DFA.unpack(u"\1\u00f1"),
DFA.unpack(u"\1\u00f2"),
DFA.unpack(u"\1\u00f3"),
DFA.unpack(u""),
DFA.unpack(u"\1\u00f4"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00f6"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00f8"),
DFA.unpack(u"\1\u00f9"),
DFA.unpack(u"\1\u00fa"),
DFA.unpack(u"\1\u00fb"),
DFA.unpack(u"\1\u00fc"),
DFA.unpack(u"\1\u00fd"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u00ff"),
DFA.unpack(u"\1\u0100"),
DFA.unpack(u"\1\u0101"),
DFA.unpack(u"\1\u0102"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0105"),
DFA.unpack(u"\1\u0106"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0109"),
DFA.unpack(u"\1\u010a"),
DFA.unpack(u"\1\u010b"),
DFA.unpack(u"\1\u010c"),
DFA.unpack(u"\1\u010d"),
DFA.unpack(u"\1\u010e"),
DFA.unpack(u""),
DFA.unpack(u"\1\u010f"),
DFA.unpack(u"\1\u0110"),
DFA.unpack(u"\1\u0111"),
DFA.unpack(u"\1\u0112"),
DFA.unpack(u"\1\u0114\17\uffff\1\u0113"),
DFA.unpack(u"\1\u0115"),
DFA.unpack(u"\1\u0116"),
DFA.unpack(u"\1\u0117"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0119"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u011b"),
DFA.unpack(u"\1\u011c"),
DFA.unpack(u""),
DFA.unpack(u"\1\u011d"),
DFA.unpack(u""),
DFA.unpack(u"\1\u011e"),
DFA.unpack(u"\1\u011f"),
DFA.unpack(u"\1\u0120"),
DFA.unpack(u"\1\u0121"),
DFA.unpack(u"\1\u0122"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0124"),
DFA.unpack(u"\1\u0125"),
DFA.unpack(u"\1\u0126"),
DFA.unpack(u"\1\u0127"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0128"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u012b"),
DFA.unpack(u"\1\u012c"),
DFA.unpack(u"\1\u012d"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u012f"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0131"),
DFA.unpack(u"\1\u0132"),
DFA.unpack(u"\1\u0133"),
DFA.unpack(u"\1\u0134"),
DFA.unpack(u"\1\u0135"),
DFA.unpack(u"\1\u0136"),
DFA.unpack(u"\1\u0137"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\u0138\1"
u"\uffff\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u013c"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0143"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0146"),
DFA.unpack(u"\1\u0147"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0148"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u014a"),
DFA.unpack(u""),
DFA.unpack(u"\1\u014b"),
DFA.unpack(u""),
DFA.unpack(u"\1\u014c"),
DFA.unpack(u"\1\u014d"),
DFA.unpack(u"\1\u014e"),
DFA.unpack(u"\1\u014f"),
DFA.unpack(u"\1\u0150"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0153"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0155"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\u0156"),
DFA.unpack(u"\1\u0157"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0159"),
DFA.unpack(u"\1\u015a"),
DFA.unpack(u"\1\u015b"),
DFA.unpack(u"\1\u015c"),
DFA.unpack(u"\1\u015d"),
DFA.unpack(u"\1\u015e"),
DFA.unpack(u"\1\u015f"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0166"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0168"),
DFA.unpack(u"\1\u0169"),
DFA.unpack(u"\1\u016a"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u""),
DFA.unpack(u"\1\u016c"),
DFA.unpack(u"\1\u016d"),
DFA.unpack(u"\1\u016e"),
DFA.unpack(u""),
DFA.unpack(u"\1\u016f"),
DFA.unpack(u"\1\u0170"),
DFA.unpack(u"\1\u0171"),
DFA.unpack(u"\1\u0172"),
DFA.unpack(u"\1\u0173"),
DFA.unpack(u"\1\u0174"),
DFA.unpack(u"\1\u0175"),
DFA.unpack(u"\1\u0176"),
DFA.unpack(u"\1\u0177"),
DFA.unpack(u"\1\u0178"),
DFA.unpack(u"\1\u0179"),
DFA.unpack(u"\1\u017a"),
DFA.unpack(u"\1\u017b"),
DFA.unpack(u"\1\u017c"),
DFA.unpack(u"\1\u017d"),
DFA.unpack(u"\1\u017e"),
DFA.unpack(u"\1\u017f"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0181"),
DFA.unpack(u"\1\u0182"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0183"),
DFA.unpack(u"\1\u0184"),
DFA.unpack(u"\1\u0185"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"\1\u0187"),
DFA.unpack(u""),
DFA.unpack(u"\1\u0188"),
DFA.unpack(u"\1\u0189"),
DFA.unpack(u"\1\u018a"),
DFA.unpack(u"\1\u018b"),
DFA.unpack(u"\1\u018c"),
DFA.unpack(u"\1\u018d"),
DFA.unpack(u"\1\u018e"),
DFA.unpack(u"\1\u018f"),
DFA.unpack(u"\1\u0190"),
DFA.unpack(u"\1\76\13\uffff\12\76\7\uffff\32\76\4\uffff\1\76\1\uffff"
u"\32\76"),
DFA.unpack(u"")
]
# class definition for DFA #35
DFA35 = DFA
| edk2-master | BaseTools/Source/Python/Eot/CParser3/CLexer.py |
edk2-master | BaseTools/Source/Python/Eot/CParser3/__init__.py |
|
# $ANTLR 3.0.1 C.g 2010-02-23 09:58:53
from __future__ import print_function
from __future__ import absolute_import
from antlr3 import *
from antlr3.compat import set, frozenset
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
from . import CodeFragment
from . import FileProfile
# for convenience in actions
HIDDEN = BaseRecognizer.HIDDEN
# token types
BS=20
LINE_COMMENT=23
FloatTypeSuffix=16
IntegerTypeSuffix=14
LETTER=11
OCTAL_LITERAL=6
CHARACTER_LITERAL=8
Exponent=15
EOF=-1
HexDigit=13
STRING_LITERAL=9
WS=19
FLOATING_POINT_LITERAL=10
IDENTIFIER=4
UnicodeEscape=18
LINE_COMMAND=24
UnicodeVocabulary=21
HEX_LITERAL=5
COMMENT=22
DECIMAL_LITERAL=7
EscapeSequence=12
OctalEscape=17
# token names
tokenNames = [
"<invalid>", "<EOR>", "<DOWN>", "<UP>",
"IDENTIFIER", "HEX_LITERAL", "OCTAL_LITERAL", "DECIMAL_LITERAL", "CHARACTER_LITERAL",
"STRING_LITERAL", "FLOATING_POINT_LITERAL", "LETTER", "EscapeSequence",
"HexDigit", "IntegerTypeSuffix", "Exponent", "FloatTypeSuffix", "OctalEscape",
"UnicodeEscape", "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
"LINE_COMMAND", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
"'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'", "'int'",
"'long'", "'float'", "'double'", "'signed'", "'unsigned'", "'{'", "'}'",
"'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'", "'IN'",
"'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'",
"'EFIAPI'", "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
"'('", "')'", "'['", "']'", "'*'", "'...'", "'+'", "'-'", "'/'", "'%'",
"'++'", "'--'", "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='",
"'/='", "'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
"'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'", "'>'", "'<='",
"'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'", "'__asm'", "'case'",
"'default'", "'if'", "'else'", "'switch'", "'while'", "'do'", "'for'",
"'goto'", "'continue'", "'break'", "'return'"
]
class function_definition_scope(object):
def __init__(self):
self.ModifierText = None
self.DeclText = None
self.LBLine = None
self.LBOffset = None
self.DeclLine = None
self.DeclOffset = None
class postfix_expression_scope(object):
def __init__(self):
self.FuncCallText = None
class CParser(Parser):
grammarFileName = "C.g"
tokenNames = tokenNames
def __init__(self, input):
Parser.__init__(self, input)
self.ruleMemo = {}
self.function_definition_stack = []
self.postfix_expression_stack = []
def printTokenInfo(self, line, offset, tokenText):
print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.PredicateExpressionList.append(PredExp)
def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.EnumerationDefinitionList.append(EnumDef)
def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.StructUnionDefinitionList.append(SUDef)
def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.TypedefDefinitionList.append(Tdef)
def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
FileProfile.FunctionDefinitionList.append(FuncDef)
def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.VariableDeclarationList.append(VarDecl)
def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.FunctionCallingList.append(FuncCall)
# $ANTLR start translation_unit
# C.g:102:1: translation_unit : ( external_declaration )* ;
def translation_unit(self, ):
translation_unit_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 1):
return
# C.g:103:2: ( ( external_declaration )* )
# C.g:103:4: ( external_declaration )*
# C.g:103:4: ( external_declaration )*
while True: #loop1
alt1 = 2
LA1_0 = self.input.LA(1)
if (LA1_0 == IDENTIFIER or LA1_0 == 26 or (29 <= LA1_0 <= 42) or (45 <= LA1_0 <= 46) or (48 <= LA1_0 <= 62) or LA1_0 == 66) :
alt1 = 1
if alt1 == 1:
# C.g:0:0: external_declaration
self.following.append(self.FOLLOW_external_declaration_in_translation_unit74)
self.external_declaration()
self.following.pop()
if self.failed:
return
else:
break #loop1
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 1, translation_unit_StartIndex)
pass
return
# $ANTLR end translation_unit
# $ANTLR start external_declaration
# C.g:114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );
def external_declaration(self, ):
external_declaration_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 2):
return
# C.g:119:2: ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? )
alt3 = 3
LA3_0 = self.input.LA(1)
if ((29 <= LA3_0 <= 33)) :
LA3_1 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 1, self.input)
raise nvae
elif (LA3_0 == 34) :
LA3_2 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 2, self.input)
raise nvae
elif (LA3_0 == 35) :
LA3_3 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 3, self.input)
raise nvae
elif (LA3_0 == 36) :
LA3_4 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 4, self.input)
raise nvae
elif (LA3_0 == 37) :
LA3_5 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 5, self.input)
raise nvae
elif (LA3_0 == 38) :
LA3_6 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 6, self.input)
raise nvae
elif (LA3_0 == 39) :
LA3_7 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 7, self.input)
raise nvae
elif (LA3_0 == 40) :
LA3_8 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 8, self.input)
raise nvae
elif (LA3_0 == 41) :
LA3_9 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 9, self.input)
raise nvae
elif (LA3_0 == 42) :
LA3_10 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 10, self.input)
raise nvae
elif ((45 <= LA3_0 <= 46)) :
LA3_11 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 11, self.input)
raise nvae
elif (LA3_0 == 48) :
LA3_12 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 12, self.input)
raise nvae
elif (LA3_0 == IDENTIFIER) :
LA3_13 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
elif (True) :
alt3 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 13, self.input)
raise nvae
elif (LA3_0 == 58) :
LA3_14 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 14, self.input)
raise nvae
elif (LA3_0 == 66) and (self.synpred4()):
alt3 = 1
elif (LA3_0 == 59) :
LA3_16 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 16, self.input)
raise nvae
elif (LA3_0 == 60) :
LA3_17 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 17, self.input)
raise nvae
elif ((49 <= LA3_0 <= 57) or LA3_0 == 61) :
LA3_18 = self.input.LA(2)
if (self.synpred4()) :
alt3 = 1
elif (self.synpred5()) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 18, self.input)
raise nvae
elif (LA3_0 == 62) and (self.synpred4()):
alt3 = 1
elif (LA3_0 == 26) :
alt3 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("114:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 0, self.input)
raise nvae
if alt3 == 1:
# C.g:119:4: ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition
self.following.append(self.FOLLOW_function_definition_in_external_declaration113)
self.function_definition()
self.following.pop()
if self.failed:
return
elif alt3 == 2:
# C.g:120:4: declaration
self.following.append(self.FOLLOW_declaration_in_external_declaration118)
self.declaration()
self.following.pop()
if self.failed:
return
elif alt3 == 3:
# C.g:121:4: macro_statement ( ';' )?
self.following.append(self.FOLLOW_macro_statement_in_external_declaration123)
self.macro_statement()
self.following.pop()
if self.failed:
return
# C.g:121:20: ( ';' )?
alt2 = 2
LA2_0 = self.input.LA(1)
if (LA2_0 == 25) :
alt2 = 1
if alt2 == 1:
# C.g:121:21: ';'
self.match(self.input, 25, self.FOLLOW_25_in_external_declaration126)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 2, external_declaration_StartIndex)
pass
return
# $ANTLR end external_declaration
class function_definition_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start function_definition
# C.g:126:1: function_definition : (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) ;
def function_definition(self, ):
self.function_definition_stack.append(function_definition_scope())
retval = self.function_definition_return()
retval.start = self.input.LT(1)
function_definition_StartIndex = self.input.index()
d = None
a = None
b = None
declarator1 = None
self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = ''
self.function_definition_stack[-1].LBLine = 0
self.function_definition_stack[-1].LBOffset = 0
self.function_definition_stack[-1].DeclLine = 0
self.function_definition_stack[-1].DeclOffset = 0
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 3):
return retval
# C.g:146:2: ( (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) )
# C.g:146:4: (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement )
# C.g:146:5: (d= declaration_specifiers )?
alt4 = 2
LA4 = self.input.LA(1)
if LA4 == 29 or LA4 == 30 or LA4 == 31 or LA4 == 32 or LA4 == 33 or LA4 == 34 or LA4 == 35 or LA4 == 36 or LA4 == 37 or LA4 == 38 or LA4 == 39 or LA4 == 40 or LA4 == 41 or LA4 == 42 or LA4 == 45 or LA4 == 46 or LA4 == 48 or LA4 == 49 or LA4 == 50 or LA4 == 51 or LA4 == 52 or LA4 == 53 or LA4 == 54 or LA4 == 55 or LA4 == 56 or LA4 == 57 or LA4 == 61:
alt4 = 1
elif LA4 == IDENTIFIER:
LA4 = self.input.LA(2)
if LA4 == 66:
alt4 = 1
elif LA4 == 58:
LA4_21 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 59:
LA4_22 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 60:
LA4_23 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == IDENTIFIER:
LA4_24 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 62:
LA4_25 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 29 or LA4 == 30 or LA4 == 31 or LA4 == 32 or LA4 == 33:
LA4_26 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 34:
LA4_27 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 35:
LA4_28 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 36:
LA4_29 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 37:
LA4_30 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 38:
LA4_31 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 39:
LA4_32 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 40:
LA4_33 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 41:
LA4_34 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 42:
LA4_35 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 45 or LA4 == 46:
LA4_36 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 48:
LA4_37 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 49 or LA4 == 50 or LA4 == 51 or LA4 == 52 or LA4 == 53 or LA4 == 54 or LA4 == 55 or LA4 == 56 or LA4 == 57 or LA4 == 61:
LA4_38 = self.input.LA(3)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 58:
LA4_14 = self.input.LA(2)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 59:
LA4_16 = self.input.LA(2)
if (self.synpred7()) :
alt4 = 1
elif LA4 == 60:
LA4_17 = self.input.LA(2)
if (self.synpred7()) :
alt4 = 1
if alt4 == 1:
# C.g:0:0: d= declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_function_definition157)
d = self.declaration_specifiers()
self.following.pop()
if self.failed:
return retval
self.following.append(self.FOLLOW_declarator_in_function_definition160)
declarator1 = self.declarator()
self.following.pop()
if self.failed:
return retval
# C.g:147:3: ( ( declaration )+ a= compound_statement | b= compound_statement )
alt6 = 2
LA6_0 = self.input.LA(1)
if (LA6_0 == IDENTIFIER or LA6_0 == 26 or (29 <= LA6_0 <= 42) or (45 <= LA6_0 <= 46) or (48 <= LA6_0 <= 61)) :
alt6 = 1
elif (LA6_0 == 43) :
alt6 = 2
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("147:3: ( ( declaration )+ a= compound_statement | b= compound_statement )", 6, 0, self.input)
raise nvae
if alt6 == 1:
# C.g:147:5: ( declaration )+ a= compound_statement
# C.g:147:5: ( declaration )+
cnt5 = 0
while True: #loop5
alt5 = 2
LA5_0 = self.input.LA(1)
if (LA5_0 == IDENTIFIER or LA5_0 == 26 or (29 <= LA5_0 <= 42) or (45 <= LA5_0 <= 46) or (48 <= LA5_0 <= 61)) :
alt5 = 1
if alt5 == 1:
# C.g:0:0: declaration
self.following.append(self.FOLLOW_declaration_in_function_definition166)
self.declaration()
self.following.pop()
if self.failed:
return retval
else:
if cnt5 >= 1:
break #loop5
if self.backtracking > 0:
self.failed = True
return retval
eee = EarlyExitException(5, self.input)
raise eee
cnt5 += 1
self.following.append(self.FOLLOW_compound_statement_in_function_definition171)
a = self.compound_statement()
self.following.pop()
if self.failed:
return retval
elif alt6 == 2:
# C.g:148:5: b= compound_statement
self.following.append(self.FOLLOW_compound_statement_in_function_definition180)
b = self.compound_statement()
self.following.pop()
if self.failed:
return retval
if self.backtracking == 0:
if d is not None:
self.function_definition_stack[-1].ModifierText = self.input.toString(d.start, d.stop)
else:
self.function_definition_stack[-1].ModifierText = ''
self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start, declarator1.stop)
self.function_definition_stack[-1].DeclLine = declarator1.start.line
self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
if a is not None:
self.function_definition_stack[-1].LBLine = a.start.line
self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
else:
self.function_definition_stack[-1].LBLine = b.start.line
self.function_definition_stack[-1].LBOffset = b.start.charPositionInLine
retval.stop = self.input.LT(-1)
if self.backtracking == 0:
self.StoreFunctionDefinition(retval.start.line, retval.start.charPositionInLine, retval.stop.line, retval.stop.charPositionInLine, self.function_definition_stack[-1].ModifierText, self.function_definition_stack[-1].DeclText, self.function_definition_stack[-1].LBLine, self.function_definition_stack[-1].LBOffset, self.function_definition_stack[-1].DeclLine, self.function_definition_stack[-1].DeclOffset)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 3, function_definition_StartIndex)
self.function_definition_stack.pop()
pass
return retval
# $ANTLR end function_definition
# $ANTLR start declaration
# C.g:166:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );
def declaration(self, ):
declaration_StartIndex = self.input.index()
a = None
d = None
e = None
b = None
c = None
s = None
t = None
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 4):
return
# C.g:167:2: (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' )
alt9 = 2
LA9_0 = self.input.LA(1)
if (LA9_0 == 26) :
alt9 = 1
elif (LA9_0 == IDENTIFIER or (29 <= LA9_0 <= 42) or (45 <= LA9_0 <= 46) or (48 <= LA9_0 <= 61)) :
alt9 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("166:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );", 9, 0, self.input)
raise nvae
if alt9 == 1:
# C.g:167:4: a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';'
a = self.input.LT(1)
self.match(self.input, 26, self.FOLLOW_26_in_declaration203)
if self.failed:
return
# C.g:167:17: (b= declaration_specifiers )?
alt7 = 2
LA7 = self.input.LA(1)
if LA7 == 29 or LA7 == 30 or LA7 == 31 or LA7 == 32 or LA7 == 33 or LA7 == 34 or LA7 == 35 or LA7 == 36 or LA7 == 37 or LA7 == 38 or LA7 == 39 or LA7 == 40 or LA7 == 41 or LA7 == 42 or LA7 == 45 or LA7 == 46 or LA7 == 48 or LA7 == 49 or LA7 == 50 or LA7 == 51 or LA7 == 52 or LA7 == 53 or LA7 == 54 or LA7 == 55 or LA7 == 56 or LA7 == 57 or LA7 == 61:
alt7 = 1
elif LA7 == IDENTIFIER:
LA7_13 = self.input.LA(2)
if (LA7_13 == 62) :
LA7_21 = self.input.LA(3)
if (self.synpred10()) :
alt7 = 1
elif (LA7_13 == IDENTIFIER or (29 <= LA7_13 <= 42) or (45 <= LA7_13 <= 46) or (48 <= LA7_13 <= 61) or LA7_13 == 66) :
alt7 = 1
elif LA7 == 58:
LA7_14 = self.input.LA(2)
if (self.synpred10()) :
alt7 = 1
elif LA7 == 59:
LA7_16 = self.input.LA(2)
if (self.synpred10()) :
alt7 = 1
elif LA7 == 60:
LA7_17 = self.input.LA(2)
if (self.synpred10()) :
alt7 = 1
if alt7 == 1:
# C.g:0:0: b= declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_declaration207)
b = self.declaration_specifiers()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_init_declarator_list_in_declaration216)
c = self.init_declarator_list()
self.following.pop()
if self.failed:
return
d = self.input.LT(1)
self.match(self.input, 25, self.FOLLOW_25_in_declaration220)
if self.failed:
return
if self.backtracking == 0:
if b is not None:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start, b.stop), self.input.toString(c.start, c.stop))
else:
self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start, c.stop))
elif alt9 == 2:
# C.g:175:4: s= declaration_specifiers (t= init_declarator_list )? e= ';'
self.following.append(self.FOLLOW_declaration_specifiers_in_declaration234)
s = self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# C.g:175:30: (t= init_declarator_list )?
alt8 = 2
LA8_0 = self.input.LA(1)
if (LA8_0 == IDENTIFIER or (58 <= LA8_0 <= 60) or LA8_0 == 62 or LA8_0 == 66) :
alt8 = 1
if alt8 == 1:
# C.g:0:0: t= init_declarator_list
self.following.append(self.FOLLOW_init_declarator_list_in_declaration238)
t = self.init_declarator_list()
self.following.pop()
if self.failed:
return
e = self.input.LT(1)
self.match(self.input, 25, self.FOLLOW_25_in_declaration243)
if self.failed:
return
if self.backtracking == 0:
if t is not None:
self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start, s.stop), self.input.toString(t.start, t.stop))
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 4, declaration_StartIndex)
pass
return
# $ANTLR end declaration
class declaration_specifiers_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start declaration_specifiers
# C.g:182:1: declaration_specifiers : ( storage_class_specifier | type_specifier | type_qualifier )+ ;
def declaration_specifiers(self, ):
retval = self.declaration_specifiers_return()
retval.start = self.input.LT(1)
declaration_specifiers_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 5):
return retval
# C.g:183:2: ( ( storage_class_specifier | type_specifier | type_qualifier )+ )
# C.g:183:6: ( storage_class_specifier | type_specifier | type_qualifier )+
# C.g:183:6: ( storage_class_specifier | type_specifier | type_qualifier )+
cnt10 = 0
while True: #loop10
alt10 = 4
LA10 = self.input.LA(1)
if LA10 == 58:
LA10_2 = self.input.LA(2)
if (self.synpred15()) :
alt10 = 3
elif LA10 == 59:
LA10_3 = self.input.LA(2)
if (self.synpred15()) :
alt10 = 3
elif LA10 == 60:
LA10_4 = self.input.LA(2)
if (self.synpred15()) :
alt10 = 3
elif LA10 == IDENTIFIER:
LA10_5 = self.input.LA(2)
if (self.synpred14()) :
alt10 = 2
elif LA10 == 53:
LA10_9 = self.input.LA(2)
if (self.synpred15()) :
alt10 = 3
elif LA10 == 29 or LA10 == 30 or LA10 == 31 or LA10 == 32 or LA10 == 33:
alt10 = 1
elif LA10 == 34 or LA10 == 35 or LA10 == 36 or LA10 == 37 or LA10 == 38 or LA10 == 39 or LA10 == 40 or LA10 == 41 or LA10 == 42 or LA10 == 45 or LA10 == 46 or LA10 == 48:
alt10 = 2
elif LA10 == 49 or LA10 == 50 or LA10 == 51 or LA10 == 52 or LA10 == 54 or LA10 == 55 or LA10 == 56 or LA10 == 57 or LA10 == 61:
alt10 = 3
if alt10 == 1:
# C.g:183:10: storage_class_specifier
self.following.append(self.FOLLOW_storage_class_specifier_in_declaration_specifiers264)
self.storage_class_specifier()
self.following.pop()
if self.failed:
return retval
elif alt10 == 2:
# C.g:184:7: type_specifier
self.following.append(self.FOLLOW_type_specifier_in_declaration_specifiers272)
self.type_specifier()
self.following.pop()
if self.failed:
return retval
elif alt10 == 3:
# C.g:185:13: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_declaration_specifiers286)
self.type_qualifier()
self.following.pop()
if self.failed:
return retval
else:
if cnt10 >= 1:
break #loop10
if self.backtracking > 0:
self.failed = True
return retval
eee = EarlyExitException(10, self.input)
raise eee
cnt10 += 1
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 5, declaration_specifiers_StartIndex)
pass
return retval
# $ANTLR end declaration_specifiers
class init_declarator_list_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start init_declarator_list
# C.g:189:1: init_declarator_list : init_declarator ( ',' init_declarator )* ;
def init_declarator_list(self, ):
retval = self.init_declarator_list_return()
retval.start = self.input.LT(1)
init_declarator_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 6):
return retval
# C.g:190:2: ( init_declarator ( ',' init_declarator )* )
# C.g:190:4: init_declarator ( ',' init_declarator )*
self.following.append(self.FOLLOW_init_declarator_in_init_declarator_list308)
self.init_declarator()
self.following.pop()
if self.failed:
return retval
# C.g:190:20: ( ',' init_declarator )*
while True: #loop11
alt11 = 2
LA11_0 = self.input.LA(1)
if (LA11_0 == 27) :
alt11 = 1
if alt11 == 1:
# C.g:190:21: ',' init_declarator
self.match(self.input, 27, self.FOLLOW_27_in_init_declarator_list311)
if self.failed:
return retval
self.following.append(self.FOLLOW_init_declarator_in_init_declarator_list313)
self.init_declarator()
self.following.pop()
if self.failed:
return retval
else:
break #loop11
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 6, init_declarator_list_StartIndex)
pass
return retval
# $ANTLR end init_declarator_list
# $ANTLR start init_declarator
# C.g:193:1: init_declarator : declarator ( '=' initializer )? ;
def init_declarator(self, ):
init_declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 7):
return
# C.g:194:2: ( declarator ( '=' initializer )? )
# C.g:194:4: declarator ( '=' initializer )?
self.following.append(self.FOLLOW_declarator_in_init_declarator326)
self.declarator()
self.following.pop()
if self.failed:
return
# C.g:194:15: ( '=' initializer )?
alt12 = 2
LA12_0 = self.input.LA(1)
if (LA12_0 == 28) :
alt12 = 1
if alt12 == 1:
# C.g:194:16: '=' initializer
self.match(self.input, 28, self.FOLLOW_28_in_init_declarator329)
if self.failed:
return
self.following.append(self.FOLLOW_initializer_in_init_declarator331)
self.initializer()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 7, init_declarator_StartIndex)
pass
return
# $ANTLR end init_declarator
# $ANTLR start storage_class_specifier
# C.g:197:1: storage_class_specifier : ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' );
def storage_class_specifier(self, ):
storage_class_specifier_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 8):
return
# C.g:198:2: ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' )
# C.g:
if (29 <= self.input.LA(1) <= 33):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_storage_class_specifier0
)
raise mse
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 8, storage_class_specifier_StartIndex)
pass
return
# $ANTLR end storage_class_specifier
# $ANTLR start type_specifier
# C.g:205:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );
def type_specifier(self, ):
type_specifier_StartIndex = self.input.index()
s = None
e = None
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 9):
return
# C.g:206:2: ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id )
alt13 = 12
LA13_0 = self.input.LA(1)
if (LA13_0 == 34) :
alt13 = 1
elif (LA13_0 == 35) :
alt13 = 2
elif (LA13_0 == 36) :
alt13 = 3
elif (LA13_0 == 37) :
alt13 = 4
elif (LA13_0 == 38) :
alt13 = 5
elif (LA13_0 == 39) :
alt13 = 6
elif (LA13_0 == 40) :
alt13 = 7
elif (LA13_0 == 41) :
alt13 = 8
elif (LA13_0 == 42) :
alt13 = 9
elif ((45 <= LA13_0 <= 46)) :
alt13 = 10
elif (LA13_0 == 48) :
alt13 = 11
elif (LA13_0 == IDENTIFIER) and (self.synpred34()):
alt13 = 12
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("205:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );", 13, 0, self.input)
raise nvae
if alt13 == 1:
# C.g:206:4: 'void'
self.match(self.input, 34, self.FOLLOW_34_in_type_specifier376)
if self.failed:
return
elif alt13 == 2:
# C.g:207:4: 'char'
self.match(self.input, 35, self.FOLLOW_35_in_type_specifier381)
if self.failed:
return
elif alt13 == 3:
# C.g:208:4: 'short'
self.match(self.input, 36, self.FOLLOW_36_in_type_specifier386)
if self.failed:
return
elif alt13 == 4:
# C.g:209:4: 'int'
self.match(self.input, 37, self.FOLLOW_37_in_type_specifier391)
if self.failed:
return
elif alt13 == 5:
# C.g:210:4: 'long'
self.match(self.input, 38, self.FOLLOW_38_in_type_specifier396)
if self.failed:
return
elif alt13 == 6:
# C.g:211:4: 'float'
self.match(self.input, 39, self.FOLLOW_39_in_type_specifier401)
if self.failed:
return
elif alt13 == 7:
# C.g:212:4: 'double'
self.match(self.input, 40, self.FOLLOW_40_in_type_specifier406)
if self.failed:
return
elif alt13 == 8:
# C.g:213:4: 'signed'
self.match(self.input, 41, self.FOLLOW_41_in_type_specifier411)
if self.failed:
return
elif alt13 == 9:
# C.g:214:4: 'unsigned'
self.match(self.input, 42, self.FOLLOW_42_in_type_specifier416)
if self.failed:
return
elif alt13 == 10:
# C.g:215:4: s= struct_or_union_specifier
self.following.append(self.FOLLOW_struct_or_union_specifier_in_type_specifier423)
s = self.struct_or_union_specifier()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
if s.stop is not None:
self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start, s.stop))
elif alt13 == 11:
# C.g:220:4: e= enum_specifier
self.following.append(self.FOLLOW_enum_specifier_in_type_specifier433)
e = self.enum_specifier()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
if e.stop is not None:
self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
elif alt13 == 12:
# C.g:225:4: ( IDENTIFIER ( type_qualifier )* declarator )=> type_id
self.following.append(self.FOLLOW_type_id_in_type_specifier451)
self.type_id()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 9, type_specifier_StartIndex)
pass
return
# $ANTLR end type_specifier
# $ANTLR start type_id
# C.g:228:1: type_id : IDENTIFIER ;
def type_id(self, ):
type_id_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 10):
return
# C.g:229:5: ( IDENTIFIER )
# C.g:229:9: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_type_id467)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 10, type_id_StartIndex)
pass
return
# $ANTLR end type_id
class struct_or_union_specifier_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start struct_or_union_specifier
# C.g:233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );
def struct_or_union_specifier(self, ):
retval = self.struct_or_union_specifier_return()
retval.start = self.input.LT(1)
struct_or_union_specifier_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 11):
return retval
# C.g:235:2: ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER )
alt15 = 2
LA15_0 = self.input.LA(1)
if ((45 <= LA15_0 <= 46)) :
LA15_1 = self.input.LA(2)
if (LA15_1 == IDENTIFIER) :
LA15_2 = self.input.LA(3)
if (LA15_2 == 43) :
alt15 = 1
elif (LA15_2 == EOF or LA15_2 == IDENTIFIER or LA15_2 == 25 or LA15_2 == 27 or (29 <= LA15_2 <= 42) or (45 <= LA15_2 <= 64) or LA15_2 == 66) :
alt15 = 2
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 2, self.input)
raise nvae
elif (LA15_1 == 43) :
alt15 = 1
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 1, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("233:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 0, self.input)
raise nvae
if alt15 == 1:
# C.g:235:4: struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}'
self.following.append(self.FOLLOW_struct_or_union_in_struct_or_union_specifier494)
self.struct_or_union()
self.following.pop()
if self.failed:
return retval
# C.g:235:20: ( IDENTIFIER )?
alt14 = 2
LA14_0 = self.input.LA(1)
if (LA14_0 == IDENTIFIER) :
alt14 = 1
if alt14 == 1:
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_struct_or_union_specifier496)
if self.failed:
return retval
self.match(self.input, 43, self.FOLLOW_43_in_struct_or_union_specifier499)
if self.failed:
return retval
self.following.append(self.FOLLOW_struct_declaration_list_in_struct_or_union_specifier501)
self.struct_declaration_list()
self.following.pop()
if self.failed:
return retval
self.match(self.input, 44, self.FOLLOW_44_in_struct_or_union_specifier503)
if self.failed:
return retval
elif alt15 == 2:
# C.g:236:4: struct_or_union IDENTIFIER
self.following.append(self.FOLLOW_struct_or_union_in_struct_or_union_specifier508)
self.struct_or_union()
self.following.pop()
if self.failed:
return retval
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_struct_or_union_specifier510)
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 11, struct_or_union_specifier_StartIndex)
pass
return retval
# $ANTLR end struct_or_union_specifier
# $ANTLR start struct_or_union
# C.g:239:1: struct_or_union : ( 'struct' | 'union' );
def struct_or_union(self, ):
struct_or_union_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 12):
return
# C.g:240:2: ( 'struct' | 'union' )
# C.g:
if (45 <= self.input.LA(1) <= 46):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_struct_or_union0
)
raise mse
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 12, struct_or_union_StartIndex)
pass
return
# $ANTLR end struct_or_union
# $ANTLR start struct_declaration_list
# C.g:244:1: struct_declaration_list : ( struct_declaration )+ ;
def struct_declaration_list(self, ):
struct_declaration_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 13):
return
# C.g:245:2: ( ( struct_declaration )+ )
# C.g:245:4: ( struct_declaration )+
# C.g:245:4: ( struct_declaration )+
cnt16 = 0
while True: #loop16
alt16 = 2
LA16_0 = self.input.LA(1)
if (LA16_0 == IDENTIFIER or (34 <= LA16_0 <= 42) or (45 <= LA16_0 <= 46) or (48 <= LA16_0 <= 61)) :
alt16 = 1
if alt16 == 1:
# C.g:0:0: struct_declaration
self.following.append(self.FOLLOW_struct_declaration_in_struct_declaration_list537)
self.struct_declaration()
self.following.pop()
if self.failed:
return
else:
if cnt16 >= 1:
break #loop16
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(16, self.input)
raise eee
cnt16 += 1
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 13, struct_declaration_list_StartIndex)
pass
return
# $ANTLR end struct_declaration_list
# $ANTLR start struct_declaration
# C.g:248:1: struct_declaration : specifier_qualifier_list struct_declarator_list ';' ;
def struct_declaration(self, ):
struct_declaration_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 14):
return
# C.g:249:2: ( specifier_qualifier_list struct_declarator_list ';' )
# C.g:249:4: specifier_qualifier_list struct_declarator_list ';'
self.following.append(self.FOLLOW_specifier_qualifier_list_in_struct_declaration549)
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_struct_declarator_list_in_struct_declaration551)
self.struct_declarator_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_struct_declaration553)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 14, struct_declaration_StartIndex)
pass
return
# $ANTLR end struct_declaration
# $ANTLR start specifier_qualifier_list
# C.g:252:1: specifier_qualifier_list : ( type_qualifier | type_specifier )+ ;
def specifier_qualifier_list(self, ):
specifier_qualifier_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 15):
return
# C.g:253:2: ( ( type_qualifier | type_specifier )+ )
# C.g:253:4: ( type_qualifier | type_specifier )+
# C.g:253:4: ( type_qualifier | type_specifier )+
cnt17 = 0
while True: #loop17
alt17 = 3
LA17 = self.input.LA(1)
if LA17 == 58:
LA17_2 = self.input.LA(2)
if (self.synpred39()) :
alt17 = 1
elif LA17 == 59:
LA17_3 = self.input.LA(2)
if (self.synpred39()) :
alt17 = 1
elif LA17 == 60:
LA17_4 = self.input.LA(2)
if (self.synpred39()) :
alt17 = 1
elif LA17 == IDENTIFIER:
LA17 = self.input.LA(2)
if LA17 == EOF or LA17 == IDENTIFIER or LA17 == 34 or LA17 == 35 or LA17 == 36 or LA17 == 37 or LA17 == 38 or LA17 == 39 or LA17 == 40 or LA17 == 41 or LA17 == 42 or LA17 == 45 or LA17 == 46 or LA17 == 48 or LA17 == 49 or LA17 == 50 or LA17 == 51 or LA17 == 52 or LA17 == 53 or LA17 == 54 or LA17 == 55 or LA17 == 56 or LA17 == 57 or LA17 == 58 or LA17 == 59 or LA17 == 60 or LA17 == 61 or LA17 == 63 or LA17 == 66:
alt17 = 2
elif LA17 == 62:
LA17_94 = self.input.LA(3)
if (self.synpred40()) :
alt17 = 2
elif LA17 == 47:
LA17_95 = self.input.LA(3)
if (self.synpred40()) :
alt17 = 2
elif LA17 == 64:
LA17_96 = self.input.LA(3)
if (self.synpred40()) :
alt17 = 2
elif LA17 == 49 or LA17 == 50 or LA17 == 51 or LA17 == 52 or LA17 == 53 or LA17 == 54 or LA17 == 55 or LA17 == 56 or LA17 == 57 or LA17 == 61:
alt17 = 1
elif LA17 == 34 or LA17 == 35 or LA17 == 36 or LA17 == 37 or LA17 == 38 or LA17 == 39 or LA17 == 40 or LA17 == 41 or LA17 == 42 or LA17 == 45 or LA17 == 46 or LA17 == 48:
alt17 = 2
if alt17 == 1:
# C.g:253:6: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_specifier_qualifier_list566)
self.type_qualifier()
self.following.pop()
if self.failed:
return
elif alt17 == 2:
# C.g:253:23: type_specifier
self.following.append(self.FOLLOW_type_specifier_in_specifier_qualifier_list570)
self.type_specifier()
self.following.pop()
if self.failed:
return
else:
if cnt17 >= 1:
break #loop17
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(17, self.input)
raise eee
cnt17 += 1
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 15, specifier_qualifier_list_StartIndex)
pass
return
# $ANTLR end specifier_qualifier_list
# $ANTLR start struct_declarator_list
# C.g:256:1: struct_declarator_list : struct_declarator ( ',' struct_declarator )* ;
def struct_declarator_list(self, ):
struct_declarator_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 16):
return
# C.g:257:2: ( struct_declarator ( ',' struct_declarator )* )
# C.g:257:4: struct_declarator ( ',' struct_declarator )*
self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list584)
self.struct_declarator()
self.following.pop()
if self.failed:
return
# C.g:257:22: ( ',' struct_declarator )*
while True: #loop18
alt18 = 2
LA18_0 = self.input.LA(1)
if (LA18_0 == 27) :
alt18 = 1
if alt18 == 1:
# C.g:257:23: ',' struct_declarator
self.match(self.input, 27, self.FOLLOW_27_in_struct_declarator_list587)
if self.failed:
return
self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list589)
self.struct_declarator()
self.following.pop()
if self.failed:
return
else:
break #loop18
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 16, struct_declarator_list_StartIndex)
pass
return
# $ANTLR end struct_declarator_list
# $ANTLR start struct_declarator
# C.g:260:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );
def struct_declarator(self, ):
struct_declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 17):
return
# C.g:261:2: ( declarator ( ':' constant_expression )? | ':' constant_expression )
alt20 = 2
LA20_0 = self.input.LA(1)
if (LA20_0 == IDENTIFIER or (58 <= LA20_0 <= 60) or LA20_0 == 62 or LA20_0 == 66) :
alt20 = 1
elif (LA20_0 == 47) :
alt20 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("260:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );", 20, 0, self.input)
raise nvae
if alt20 == 1:
# C.g:261:4: declarator ( ':' constant_expression )?
self.following.append(self.FOLLOW_declarator_in_struct_declarator602)
self.declarator()
self.following.pop()
if self.failed:
return
# C.g:261:15: ( ':' constant_expression )?
alt19 = 2
LA19_0 = self.input.LA(1)
if (LA19_0 == 47) :
alt19 = 1
if alt19 == 1:
# C.g:261:16: ':' constant_expression
self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator605)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_struct_declarator607)
self.constant_expression()
self.following.pop()
if self.failed:
return
elif alt20 == 2:
# C.g:262:4: ':' constant_expression
self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator614)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_struct_declarator616)
self.constant_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 17, struct_declarator_StartIndex)
pass
return
# $ANTLR end struct_declarator
class enum_specifier_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start enum_specifier
# C.g:265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );
def enum_specifier(self, ):
retval = self.enum_specifier_return()
retval.start = self.input.LT(1)
enum_specifier_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 18):
return retval
# C.g:267:2: ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER )
alt23 = 3
LA23_0 = self.input.LA(1)
if (LA23_0 == 48) :
LA23_1 = self.input.LA(2)
if (LA23_1 == IDENTIFIER) :
LA23_2 = self.input.LA(3)
if (LA23_2 == 43) :
alt23 = 2
elif (LA23_2 == EOF or LA23_2 == IDENTIFIER or LA23_2 == 25 or LA23_2 == 27 or (29 <= LA23_2 <= 42) or (45 <= LA23_2 <= 64) or LA23_2 == 66) :
alt23 = 3
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 2, self.input)
raise nvae
elif (LA23_1 == 43) :
alt23 = 1
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 1, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("265:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 0, self.input)
raise nvae
if alt23 == 1:
# C.g:267:4: 'enum' '{' enumerator_list ( ',' )? '}'
self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier634)
if self.failed:
return retval
self.match(self.input, 43, self.FOLLOW_43_in_enum_specifier636)
if self.failed:
return retval
self.following.append(self.FOLLOW_enumerator_list_in_enum_specifier638)
self.enumerator_list()
self.following.pop()
if self.failed:
return retval
# C.g:267:31: ( ',' )?
alt21 = 2
LA21_0 = self.input.LA(1)
if (LA21_0 == 27) :
alt21 = 1
if alt21 == 1:
# C.g:0:0: ','
self.match(self.input, 27, self.FOLLOW_27_in_enum_specifier640)
if self.failed:
return retval
self.match(self.input, 44, self.FOLLOW_44_in_enum_specifier643)
if self.failed:
return retval
elif alt23 == 2:
# C.g:268:4: 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}'
self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier648)
if self.failed:
return retval
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enum_specifier650)
if self.failed:
return retval
self.match(self.input, 43, self.FOLLOW_43_in_enum_specifier652)
if self.failed:
return retval
self.following.append(self.FOLLOW_enumerator_list_in_enum_specifier654)
self.enumerator_list()
self.following.pop()
if self.failed:
return retval
# C.g:268:42: ( ',' )?
alt22 = 2
LA22_0 = self.input.LA(1)
if (LA22_0 == 27) :
alt22 = 1
if alt22 == 1:
# C.g:0:0: ','
self.match(self.input, 27, self.FOLLOW_27_in_enum_specifier656)
if self.failed:
return retval
self.match(self.input, 44, self.FOLLOW_44_in_enum_specifier659)
if self.failed:
return retval
elif alt23 == 3:
# C.g:269:4: 'enum' IDENTIFIER
self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier664)
if self.failed:
return retval
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enum_specifier666)
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 18, enum_specifier_StartIndex)
pass
return retval
# $ANTLR end enum_specifier
# $ANTLR start enumerator_list
# C.g:272:1: enumerator_list : enumerator ( ',' enumerator )* ;
def enumerator_list(self, ):
enumerator_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 19):
return
# C.g:273:2: ( enumerator ( ',' enumerator )* )
# C.g:273:4: enumerator ( ',' enumerator )*
self.following.append(self.FOLLOW_enumerator_in_enumerator_list677)
self.enumerator()
self.following.pop()
if self.failed:
return
# C.g:273:15: ( ',' enumerator )*
while True: #loop24
alt24 = 2
LA24_0 = self.input.LA(1)
if (LA24_0 == 27) :
LA24_1 = self.input.LA(2)
if (LA24_1 == IDENTIFIER) :
alt24 = 1
if alt24 == 1:
# C.g:273:16: ',' enumerator
self.match(self.input, 27, self.FOLLOW_27_in_enumerator_list680)
if self.failed:
return
self.following.append(self.FOLLOW_enumerator_in_enumerator_list682)
self.enumerator()
self.following.pop()
if self.failed:
return
else:
break #loop24
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 19, enumerator_list_StartIndex)
pass
return
# $ANTLR end enumerator_list
# $ANTLR start enumerator
# C.g:276:1: enumerator : IDENTIFIER ( '=' constant_expression )? ;
def enumerator(self, ):
enumerator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 20):
return
# C.g:277:2: ( IDENTIFIER ( '=' constant_expression )? )
# C.g:277:4: IDENTIFIER ( '=' constant_expression )?
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enumerator695)
if self.failed:
return
# C.g:277:15: ( '=' constant_expression )?
alt25 = 2
LA25_0 = self.input.LA(1)
if (LA25_0 == 28) :
alt25 = 1
if alt25 == 1:
# C.g:277:16: '=' constant_expression
self.match(self.input, 28, self.FOLLOW_28_in_enumerator698)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_enumerator700)
self.constant_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 20, enumerator_StartIndex)
pass
return
# $ANTLR end enumerator
# $ANTLR start type_qualifier
# C.g:280:1: type_qualifier : ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' | 'PACKED' );
def type_qualifier(self, ):
type_qualifier_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 21):
return
# C.g:281:2: ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' | 'PACKED' )
# C.g:
if (49 <= self.input.LA(1) <= 61):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_type_qualifier0
)
raise mse
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 21, type_qualifier_StartIndex)
pass
return
# $ANTLR end type_qualifier
class declarator_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start declarator
# C.g:296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );
def declarator(self, ):
retval = self.declarator_return()
retval.start = self.input.LT(1)
declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 22):
return retval
# C.g:297:2: ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer )
alt30 = 2
LA30_0 = self.input.LA(1)
if (LA30_0 == 66) :
LA30_1 = self.input.LA(2)
if (self.synpred66()) :
alt30 = 1
elif (True) :
alt30 = 2
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );", 30, 1, self.input)
raise nvae
elif (LA30_0 == IDENTIFIER or (58 <= LA30_0 <= 60) or LA30_0 == 62) :
alt30 = 1
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("296:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );", 30, 0, self.input)
raise nvae
if alt30 == 1:
# C.g:297:4: ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator
# C.g:297:4: ( pointer )?
alt26 = 2
LA26_0 = self.input.LA(1)
if (LA26_0 == 66) :
alt26 = 1
if alt26 == 1:
# C.g:0:0: pointer
self.following.append(self.FOLLOW_pointer_in_declarator784)
self.pointer()
self.following.pop()
if self.failed:
return retval
# C.g:297:13: ( 'EFIAPI' )?
alt27 = 2
LA27_0 = self.input.LA(1)
if (LA27_0 == 58) :
alt27 = 1
if alt27 == 1:
# C.g:297:14: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_declarator788)
if self.failed:
return retval
# C.g:297:25: ( 'EFI_BOOTSERVICE' )?
alt28 = 2
LA28_0 = self.input.LA(1)
if (LA28_0 == 59) :
alt28 = 1
if alt28 == 1:
# C.g:297:26: 'EFI_BOOTSERVICE'
self.match(self.input, 59, self.FOLLOW_59_in_declarator793)
if self.failed:
return retval
# C.g:297:46: ( 'EFI_RUNTIMESERVICE' )?
alt29 = 2
LA29_0 = self.input.LA(1)
if (LA29_0 == 60) :
alt29 = 1
if alt29 == 1:
# C.g:297:47: 'EFI_RUNTIMESERVICE'
self.match(self.input, 60, self.FOLLOW_60_in_declarator798)
if self.failed:
return retval
self.following.append(self.FOLLOW_direct_declarator_in_declarator802)
self.direct_declarator()
self.following.pop()
if self.failed:
return retval
elif alt30 == 2:
# C.g:299:4: pointer
self.following.append(self.FOLLOW_pointer_in_declarator808)
self.pointer()
self.following.pop()
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 22, declarator_StartIndex)
pass
return retval
# $ANTLR end declarator
# $ANTLR start direct_declarator
# C.g:302:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );
def direct_declarator(self, ):
direct_declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 23):
return
# C.g:303:2: ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ )
alt34 = 2
LA34_0 = self.input.LA(1)
if (LA34_0 == IDENTIFIER) :
alt34 = 1
elif (LA34_0 == 62) :
alt34 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("302:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );", 34, 0, self.input)
raise nvae
if alt34 == 1:
# C.g:303:4: IDENTIFIER ( declarator_suffix )*
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_direct_declarator819)
if self.failed:
return
# C.g:303:15: ( declarator_suffix )*
while True: #loop31
alt31 = 2
LA31_0 = self.input.LA(1)
if (LA31_0 == 62) :
LA31 = self.input.LA(2)
if LA31 == 63:
LA31_30 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 58:
LA31_31 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 66:
LA31_32 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 59:
LA31_33 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 60:
LA31_34 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == IDENTIFIER:
LA31_35 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 29 or LA31 == 30 or LA31 == 31 or LA31 == 32 or LA31 == 33:
LA31_37 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 34:
LA31_38 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 35:
LA31_39 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 36:
LA31_40 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 37:
LA31_41 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 38:
LA31_42 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 39:
LA31_43 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 40:
LA31_44 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 41:
LA31_45 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 42:
LA31_46 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 45 or LA31 == 46:
LA31_47 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 48:
LA31_48 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 49 or LA31 == 50 or LA31 == 51 or LA31 == 52 or LA31 == 53 or LA31 == 54 or LA31 == 55 or LA31 == 56 or LA31 == 57 or LA31 == 61:
LA31_49 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif (LA31_0 == 64) :
LA31 = self.input.LA(2)
if LA31 == 65:
LA31_51 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 62:
LA31_52 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == IDENTIFIER:
LA31_53 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == HEX_LITERAL:
LA31_54 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == OCTAL_LITERAL:
LA31_55 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == DECIMAL_LITERAL:
LA31_56 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == CHARACTER_LITERAL:
LA31_57 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == STRING_LITERAL:
LA31_58 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == FLOATING_POINT_LITERAL:
LA31_59 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 72:
LA31_60 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 73:
LA31_61 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 66 or LA31 == 68 or LA31 == 69 or LA31 == 77 or LA31 == 78 or LA31 == 79:
LA31_62 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
elif LA31 == 74:
LA31_63 = self.input.LA(3)
if (self.synpred67()) :
alt31 = 1
if alt31 == 1:
# C.g:0:0: declarator_suffix
self.following.append(self.FOLLOW_declarator_suffix_in_direct_declarator821)
self.declarator_suffix()
self.following.pop()
if self.failed:
return
else:
break #loop31
elif alt34 == 2:
# C.g:304:4: '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+
self.match(self.input, 62, self.FOLLOW_62_in_direct_declarator827)
if self.failed:
return
# C.g:304:8: ( 'EFIAPI' )?
alt32 = 2
LA32_0 = self.input.LA(1)
if (LA32_0 == 58) :
LA32_1 = self.input.LA(2)
if (self.synpred69()) :
alt32 = 1
if alt32 == 1:
# C.g:304:9: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_direct_declarator830)
if self.failed:
return
self.following.append(self.FOLLOW_declarator_in_direct_declarator834)
self.declarator()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_direct_declarator836)
if self.failed:
return
# C.g:304:35: ( declarator_suffix )+
cnt33 = 0
while True: #loop33
alt33 = 2
LA33_0 = self.input.LA(1)
if (LA33_0 == 62) :
LA33 = self.input.LA(2)
if LA33 == 63:
LA33_30 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 58:
LA33_31 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 66:
LA33_32 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 59:
LA33_33 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 60:
LA33_34 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == IDENTIFIER:
LA33_35 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 29 or LA33 == 30 or LA33 == 31 or LA33 == 32 or LA33 == 33:
LA33_37 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 34:
LA33_38 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 35:
LA33_39 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 36:
LA33_40 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 37:
LA33_41 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 38:
LA33_42 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 39:
LA33_43 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 40:
LA33_44 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 41:
LA33_45 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 42:
LA33_46 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 45 or LA33 == 46:
LA33_47 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 48:
LA33_48 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 49 or LA33 == 50 or LA33 == 51 or LA33 == 52 or LA33 == 53 or LA33 == 54 or LA33 == 55 or LA33 == 56 or LA33 == 57 or LA33 == 61:
LA33_49 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif (LA33_0 == 64) :
LA33 = self.input.LA(2)
if LA33 == 65:
LA33_51 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 62:
LA33_52 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == IDENTIFIER:
LA33_53 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == HEX_LITERAL:
LA33_54 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == OCTAL_LITERAL:
LA33_55 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == DECIMAL_LITERAL:
LA33_56 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == CHARACTER_LITERAL:
LA33_57 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == STRING_LITERAL:
LA33_58 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == FLOATING_POINT_LITERAL:
LA33_59 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 72:
LA33_60 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 73:
LA33_61 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 66 or LA33 == 68 or LA33 == 69 or LA33 == 77 or LA33 == 78 or LA33 == 79:
LA33_62 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
elif LA33 == 74:
LA33_63 = self.input.LA(3)
if (self.synpred70()) :
alt33 = 1
if alt33 == 1:
# C.g:0:0: declarator_suffix
self.following.append(self.FOLLOW_declarator_suffix_in_direct_declarator838)
self.declarator_suffix()
self.following.pop()
if self.failed:
return
else:
if cnt33 >= 1:
break #loop33
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(33, self.input)
raise eee
cnt33 += 1
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 23, direct_declarator_StartIndex)
pass
return
# $ANTLR end direct_declarator
# $ANTLR start declarator_suffix
# C.g:307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );
def declarator_suffix(self, ):
declarator_suffix_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 24):
return
# C.g:308:2: ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' )
alt35 = 5
LA35_0 = self.input.LA(1)
if (LA35_0 == 64) :
LA35_1 = self.input.LA(2)
if (LA35_1 == 65) :
alt35 = 2
elif ((IDENTIFIER <= LA35_1 <= FLOATING_POINT_LITERAL) or LA35_1 == 62 or LA35_1 == 66 or (68 <= LA35_1 <= 69) or (72 <= LA35_1 <= 74) or (77 <= LA35_1 <= 79)) :
alt35 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 1, self.input)
raise nvae
elif (LA35_0 == 62) :
LA35 = self.input.LA(2)
if LA35 == 63:
alt35 = 5
elif LA35 == 29 or LA35 == 30 or LA35 == 31 or LA35 == 32 or LA35 == 33 or LA35 == 34 or LA35 == 35 or LA35 == 36 or LA35 == 37 or LA35 == 38 or LA35 == 39 or LA35 == 40 or LA35 == 41 or LA35 == 42 or LA35 == 45 or LA35 == 46 or LA35 == 48 or LA35 == 49 or LA35 == 50 or LA35 == 51 or LA35 == 52 or LA35 == 53 or LA35 == 54 or LA35 == 55 or LA35 == 56 or LA35 == 57 or LA35 == 58 or LA35 == 59 or LA35 == 60 or LA35 == 61 or LA35 == 66:
alt35 = 3
elif LA35 == IDENTIFIER:
LA35_29 = self.input.LA(3)
if (self.synpred73()) :
alt35 = 3
elif (self.synpred74()) :
alt35 = 4
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 29, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 2, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("307:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 0, self.input)
raise nvae
if alt35 == 1:
# C.g:308:6: '[' constant_expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix852)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_declarator_suffix854)
self.constant_expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix856)
if self.failed:
return
elif alt35 == 2:
# C.g:309:9: '[' ']'
self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix866)
if self.failed:
return
self.match(self.input, 65, self.FOLLOW_65_in_declarator_suffix868)
if self.failed:
return
elif alt35 == 3:
# C.g:310:9: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix878)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_type_list_in_declarator_suffix880)
self.parameter_type_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix882)
if self.failed:
return
elif alt35 == 4:
# C.g:311:9: '(' identifier_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix892)
if self.failed:
return
self.following.append(self.FOLLOW_identifier_list_in_declarator_suffix894)
self.identifier_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix896)
if self.failed:
return
elif alt35 == 5:
# C.g:312:9: '(' ')'
self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix906)
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix908)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 24, declarator_suffix_StartIndex)
pass
return
# $ANTLR end declarator_suffix
# $ANTLR start pointer
# C.g:315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );
def pointer(self, ):
pointer_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 25):
return
# C.g:316:2: ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' )
alt38 = 3
LA38_0 = self.input.LA(1)
if (LA38_0 == 66) :
LA38 = self.input.LA(2)
if LA38 == 66:
LA38_2 = self.input.LA(3)
if (self.synpred78()) :
alt38 = 2
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 2, self.input)
raise nvae
elif LA38 == 58:
LA38_3 = self.input.LA(3)
if (self.synpred77()) :
alt38 = 1
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 3, self.input)
raise nvae
elif LA38 == 59:
LA38_4 = self.input.LA(3)
if (self.synpred77()) :
alt38 = 1
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 4, self.input)
raise nvae
elif LA38 == 60:
LA38_5 = self.input.LA(3)
if (self.synpred77()) :
alt38 = 1
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 5, self.input)
raise nvae
elif LA38 == EOF or LA38 == IDENTIFIER or LA38 == 25 or LA38 == 26 or LA38 == 27 or LA38 == 28 or LA38 == 29 or LA38 == 30 or LA38 == 31 or LA38 == 32 or LA38 == 33 or LA38 == 34 or LA38 == 35 or LA38 == 36 or LA38 == 37 or LA38 == 38 or LA38 == 39 or LA38 == 40 or LA38 == 41 or LA38 == 42 or LA38 == 43 or LA38 == 45 or LA38 == 46 or LA38 == 47 or LA38 == 48 or LA38 == 62 or LA38 == 63 or LA38 == 64:
alt38 = 3
elif LA38 == 53:
LA38_21 = self.input.LA(3)
if (self.synpred77()) :
alt38 = 1
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 21, self.input)
raise nvae
elif LA38 == 49 or LA38 == 50 or LA38 == 51 or LA38 == 52 or LA38 == 54 or LA38 == 55 or LA38 == 56 or LA38 == 57 or LA38 == 61:
LA38_29 = self.input.LA(3)
if (self.synpred77()) :
alt38 = 1
elif (True) :
alt38 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 29, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 1, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("315:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 0, self.input)
raise nvae
if alt38 == 1:
# C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
self.match(self.input, 66, self.FOLLOW_66_in_pointer919)
if self.failed:
return
# C.g:316:8: ( type_qualifier )+
cnt36 = 0
while True: #loop36
alt36 = 2
LA36 = self.input.LA(1)
if LA36 == 58:
LA36_2 = self.input.LA(2)
if (self.synpred75()) :
alt36 = 1
elif LA36 == 59:
LA36_3 = self.input.LA(2)
if (self.synpred75()) :
alt36 = 1
elif LA36 == 60:
LA36_4 = self.input.LA(2)
if (self.synpred75()) :
alt36 = 1
elif LA36 == 53:
LA36_20 = self.input.LA(2)
if (self.synpred75()) :
alt36 = 1
elif LA36 == 49 or LA36 == 50 or LA36 == 51 or LA36 == 52 or LA36 == 54 or LA36 == 55 or LA36 == 56 or LA36 == 57 or LA36 == 61:
LA36_28 = self.input.LA(2)
if (self.synpred75()) :
alt36 = 1
if alt36 == 1:
# C.g:0:0: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_pointer921)
self.type_qualifier()
self.following.pop()
if self.failed:
return
else:
if cnt36 >= 1:
break #loop36
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(36, self.input)
raise eee
cnt36 += 1
# C.g:316:24: ( pointer )?
alt37 = 2
LA37_0 = self.input.LA(1)
if (LA37_0 == 66) :
LA37_1 = self.input.LA(2)
if (self.synpred76()) :
alt37 = 1
if alt37 == 1:
# C.g:0:0: pointer
self.following.append(self.FOLLOW_pointer_in_pointer924)
self.pointer()
self.following.pop()
if self.failed:
return
elif alt38 == 2:
# C.g:317:4: '*' pointer
self.match(self.input, 66, self.FOLLOW_66_in_pointer930)
if self.failed:
return
self.following.append(self.FOLLOW_pointer_in_pointer932)
self.pointer()
self.following.pop()
if self.failed:
return
elif alt38 == 3:
# C.g:318:4: '*'
self.match(self.input, 66, self.FOLLOW_66_in_pointer937)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 25, pointer_StartIndex)
pass
return
# $ANTLR end pointer
# $ANTLR start parameter_type_list
# C.g:321:1: parameter_type_list : parameter_list ( ',' ( 'OPTIONAL' )? '...' )? ;
def parameter_type_list(self, ):
parameter_type_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 26):
return
# C.g:322:2: ( parameter_list ( ',' ( 'OPTIONAL' )? '...' )? )
# C.g:322:4: parameter_list ( ',' ( 'OPTIONAL' )? '...' )?
self.following.append(self.FOLLOW_parameter_list_in_parameter_type_list948)
self.parameter_list()
self.following.pop()
if self.failed:
return
# C.g:322:19: ( ',' ( 'OPTIONAL' )? '...' )?
alt40 = 2
LA40_0 = self.input.LA(1)
if (LA40_0 == 27) :
alt40 = 1
if alt40 == 1:
# C.g:322:20: ',' ( 'OPTIONAL' )? '...'
self.match(self.input, 27, self.FOLLOW_27_in_parameter_type_list951)
if self.failed:
return
# C.g:322:24: ( 'OPTIONAL' )?
alt39 = 2
LA39_0 = self.input.LA(1)
if (LA39_0 == 53) :
alt39 = 1
if alt39 == 1:
# C.g:322:25: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_type_list954)
if self.failed:
return
self.match(self.input, 67, self.FOLLOW_67_in_parameter_type_list958)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 26, parameter_type_list_StartIndex)
pass
return
# $ANTLR end parameter_type_list
# $ANTLR start parameter_list
# C.g:325:1: parameter_list : parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* ;
def parameter_list(self, ):
parameter_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 27):
return
# C.g:326:2: ( parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* )
# C.g:326:4: parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )*
self.following.append(self.FOLLOW_parameter_declaration_in_parameter_list971)
self.parameter_declaration()
self.following.pop()
if self.failed:
return
# C.g:326:26: ( ',' ( 'OPTIONAL' )? parameter_declaration )*
while True: #loop42
alt42 = 2
LA42_0 = self.input.LA(1)
if (LA42_0 == 27) :
LA42_1 = self.input.LA(2)
if (LA42_1 == 53) :
LA42_3 = self.input.LA(3)
if (self.synpred82()) :
alt42 = 1
elif (LA42_1 == IDENTIFIER or (29 <= LA42_1 <= 42) or (45 <= LA42_1 <= 46) or (48 <= LA42_1 <= 52) or (54 <= LA42_1 <= 61) or LA42_1 == 66) :
alt42 = 1
if alt42 == 1:
# C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_parameter_list974)
if self.failed:
return
# C.g:326:31: ( 'OPTIONAL' )?
alt41 = 2
LA41_0 = self.input.LA(1)
if (LA41_0 == 53) :
LA41_1 = self.input.LA(2)
if (self.synpred81()) :
alt41 = 1
if alt41 == 1:
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_list977)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_declaration_in_parameter_list981)
self.parameter_declaration()
self.following.pop()
if self.failed:
return
else:
break #loop42
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 27, parameter_list_StartIndex)
pass
return
# $ANTLR end parameter_list
# $ANTLR start parameter_declaration
# C.g:329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );
def parameter_declaration(self, ):
parameter_declaration_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 28):
return
# C.g:330:2: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER )
alt46 = 2
LA46 = self.input.LA(1)
if LA46 == 29 or LA46 == 30 or LA46 == 31 or LA46 == 32 or LA46 == 33 or LA46 == 34 or LA46 == 35 or LA46 == 36 or LA46 == 37 or LA46 == 38 or LA46 == 39 or LA46 == 40 or LA46 == 41 or LA46 == 42 or LA46 == 45 or LA46 == 46 or LA46 == 48 or LA46 == 49 or LA46 == 50 or LA46 == 51 or LA46 == 52 or LA46 == 53 or LA46 == 54 or LA46 == 55 or LA46 == 56 or LA46 == 57 or LA46 == 58 or LA46 == 59 or LA46 == 60 or LA46 == 61:
alt46 = 1
elif LA46 == IDENTIFIER:
LA46_13 = self.input.LA(2)
if (self.synpred86()) :
alt46 = 1
elif (True) :
alt46 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 13, self.input)
raise nvae
elif LA46 == 66:
alt46 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("329:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 0, self.input)
raise nvae
if alt46 == 1:
# C.g:330:4: declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )?
self.following.append(self.FOLLOW_declaration_specifiers_in_parameter_declaration994)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# C.g:330:27: ( declarator | abstract_declarator )*
while True: #loop43
alt43 = 3
LA43 = self.input.LA(1)
if LA43 == 66:
LA43_5 = self.input.LA(2)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == IDENTIFIER or LA43 == 58 or LA43 == 59 or LA43 == 60:
alt43 = 1
elif LA43 == 62:
LA43 = self.input.LA(2)
if LA43 == 29 or LA43 == 30 or LA43 == 31 or LA43 == 32 or LA43 == 33 or LA43 == 34 or LA43 == 35 or LA43 == 36 or LA43 == 37 or LA43 == 38 or LA43 == 39 or LA43 == 40 or LA43 == 41 or LA43 == 42 or LA43 == 45 or LA43 == 46 or LA43 == 48 or LA43 == 49 or LA43 == 50 or LA43 == 51 or LA43 == 52 or LA43 == 53 or LA43 == 54 or LA43 == 55 or LA43 == 56 or LA43 == 57 or LA43 == 61 or LA43 == 63 or LA43 == 64:
alt43 = 2
elif LA43 == IDENTIFIER:
LA43_37 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 58:
LA43_38 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 66:
LA43_39 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 59:
LA43_40 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 60:
LA43_41 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 62:
LA43_43 = self.input.LA(3)
if (self.synpred83()) :
alt43 = 1
elif (self.synpred84()) :
alt43 = 2
elif LA43 == 64:
alt43 = 2
if alt43 == 1:
# C.g:330:28: declarator
self.following.append(self.FOLLOW_declarator_in_parameter_declaration997)
self.declarator()
self.following.pop()
if self.failed:
return
elif alt43 == 2:
# C.g:330:39: abstract_declarator
self.following.append(self.FOLLOW_abstract_declarator_in_parameter_declaration999)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
else:
break #loop43
# C.g:330:61: ( 'OPTIONAL' )?
alt44 = 2
LA44_0 = self.input.LA(1)
if (LA44_0 == 53) :
alt44 = 1
if alt44 == 1:
# C.g:330:62: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_parameter_declaration1004)
if self.failed:
return
elif alt46 == 2:
# C.g:332:4: ( pointer )* IDENTIFIER
# C.g:332:4: ( pointer )*
while True: #loop45
alt45 = 2
LA45_0 = self.input.LA(1)
if (LA45_0 == 66) :
alt45 = 1
if alt45 == 1:
# C.g:0:0: pointer
self.following.append(self.FOLLOW_pointer_in_parameter_declaration1013)
self.pointer()
self.following.pop()
if self.failed:
return
else:
break #loop45
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_parameter_declaration1016)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 28, parameter_declaration_StartIndex)
pass
return
# $ANTLR end parameter_declaration
# $ANTLR start identifier_list
# C.g:335:1: identifier_list : IDENTIFIER ( ',' IDENTIFIER )* ;
def identifier_list(self, ):
identifier_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 29):
return
# C.g:336:2: ( IDENTIFIER ( ',' IDENTIFIER )* )
# C.g:336:4: IDENTIFIER ( ',' IDENTIFIER )*
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1027)
if self.failed:
return
# C.g:337:2: ( ',' IDENTIFIER )*
while True: #loop47
alt47 = 2
LA47_0 = self.input.LA(1)
if (LA47_0 == 27) :
alt47 = 1
if alt47 == 1:
# C.g:337:3: ',' IDENTIFIER
self.match(self.input, 27, self.FOLLOW_27_in_identifier_list1031)
if self.failed:
return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1033)
if self.failed:
return
else:
break #loop47
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 29, identifier_list_StartIndex)
pass
return
# $ANTLR end identifier_list
# $ANTLR start type_name
# C.g:340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );
def type_name(self, ):
type_name_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 30):
return
# C.g:341:2: ( specifier_qualifier_list ( abstract_declarator )? | type_id )
alt49 = 2
LA49_0 = self.input.LA(1)
if ((34 <= LA49_0 <= 42) or (45 <= LA49_0 <= 46) or (48 <= LA49_0 <= 61)) :
alt49 = 1
elif (LA49_0 == IDENTIFIER) :
LA49_13 = self.input.LA(2)
if (self.synpred90()) :
alt49 = 1
elif (True) :
alt49 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 13, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("340:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 0, self.input)
raise nvae
if alt49 == 1:
# C.g:341:4: specifier_qualifier_list ( abstract_declarator )?
self.following.append(self.FOLLOW_specifier_qualifier_list_in_type_name1046)
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
return
# C.g:341:29: ( abstract_declarator )?
alt48 = 2
LA48_0 = self.input.LA(1)
if (LA48_0 == 62 or LA48_0 == 64 or LA48_0 == 66) :
alt48 = 1
if alt48 == 1:
# C.g:0:0: abstract_declarator
self.following.append(self.FOLLOW_abstract_declarator_in_type_name1048)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
elif alt49 == 2:
# C.g:342:4: type_id
self.following.append(self.FOLLOW_type_id_in_type_name1054)
self.type_id()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 30, type_name_StartIndex)
pass
return
# $ANTLR end type_name
# $ANTLR start abstract_declarator
# C.g:345:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );
def abstract_declarator(self, ):
abstract_declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 31):
return
# C.g:346:2: ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator )
alt51 = 2
LA51_0 = self.input.LA(1)
if (LA51_0 == 66) :
alt51 = 1
elif (LA51_0 == 62 or LA51_0 == 64) :
alt51 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("345:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );", 51, 0, self.input)
raise nvae
if alt51 == 1:
# C.g:346:4: pointer ( direct_abstract_declarator )?
self.following.append(self.FOLLOW_pointer_in_abstract_declarator1065)
self.pointer()
self.following.pop()
if self.failed:
return
# C.g:346:12: ( direct_abstract_declarator )?
alt50 = 2
LA50_0 = self.input.LA(1)
if (LA50_0 == 62) :
LA50 = self.input.LA(2)
if LA50 == 63:
LA50_12 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 58:
LA50_13 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 66:
LA50_14 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 59:
LA50_15 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 60:
LA50_16 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == IDENTIFIER:
LA50_17 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 62:
LA50_18 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 64:
LA50_19 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 29 or LA50 == 30 or LA50 == 31 or LA50 == 32 or LA50 == 33:
LA50_20 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 34:
LA50_21 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 35:
LA50_22 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 36:
LA50_23 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 37:
LA50_24 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 38:
LA50_25 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 39:
LA50_26 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 40:
LA50_27 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 41:
LA50_28 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 42:
LA50_29 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 45 or LA50 == 46:
LA50_30 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 48:
LA50_31 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 49 or LA50 == 50 or LA50 == 51 or LA50 == 52 or LA50 == 53 or LA50 == 54 or LA50 == 55 or LA50 == 56 or LA50 == 57 or LA50 == 61:
LA50_32 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif (LA50_0 == 64) :
LA50 = self.input.LA(2)
if LA50 == 65:
LA50_33 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 62:
LA50_34 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == IDENTIFIER:
LA50_35 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == HEX_LITERAL:
LA50_36 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == OCTAL_LITERAL:
LA50_37 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == DECIMAL_LITERAL:
LA50_38 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == CHARACTER_LITERAL:
LA50_39 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == STRING_LITERAL:
LA50_40 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == FLOATING_POINT_LITERAL:
LA50_41 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 72:
LA50_42 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 73:
LA50_43 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 66 or LA50 == 68 or LA50 == 69 or LA50 == 77 or LA50 == 78 or LA50 == 79:
LA50_44 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
elif LA50 == 74:
LA50_45 = self.input.LA(3)
if (self.synpred91()) :
alt50 = 1
if alt50 == 1:
# C.g:0:0: direct_abstract_declarator
self.following.append(self.FOLLOW_direct_abstract_declarator_in_abstract_declarator1067)
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
return
elif alt51 == 2:
# C.g:347:4: direct_abstract_declarator
self.following.append(self.FOLLOW_direct_abstract_declarator_in_abstract_declarator1073)
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 31, abstract_declarator_StartIndex)
pass
return
# $ANTLR end abstract_declarator
# $ANTLR start direct_abstract_declarator
# C.g:350:1: direct_abstract_declarator : ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* ;
def direct_abstract_declarator(self, ):
direct_abstract_declarator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 32):
return
# C.g:351:2: ( ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* )
# C.g:351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )*
# C.g:351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )
alt52 = 2
LA52_0 = self.input.LA(1)
if (LA52_0 == 62) :
LA52 = self.input.LA(2)
if LA52 == IDENTIFIER or LA52 == 29 or LA52 == 30 or LA52 == 31 or LA52 == 32 or LA52 == 33 or LA52 == 34 or LA52 == 35 or LA52 == 36 or LA52 == 37 or LA52 == 38 or LA52 == 39 or LA52 == 40 or LA52 == 41 or LA52 == 42 or LA52 == 45 or LA52 == 46 or LA52 == 48 or LA52 == 49 or LA52 == 50 or LA52 == 51 or LA52 == 52 or LA52 == 53 or LA52 == 54 or LA52 == 55 or LA52 == 56 or LA52 == 57 or LA52 == 58 or LA52 == 59 or LA52 == 60 or LA52 == 61 or LA52 == 63:
alt52 = 2
elif LA52 == 66:
LA52_18 = self.input.LA(3)
if (self.synpred93()) :
alt52 = 1
elif (True) :
alt52 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 18, self.input)
raise nvae
elif LA52 == 62 or LA52 == 64:
alt52 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 1, self.input)
raise nvae
elif (LA52_0 == 64) :
alt52 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("351:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 0, self.input)
raise nvae
if alt52 == 1:
# C.g:351:6: '(' abstract_declarator ')'
self.match(self.input, 62, self.FOLLOW_62_in_direct_abstract_declarator1086)
if self.failed:
return
self.following.append(self.FOLLOW_abstract_declarator_in_direct_abstract_declarator1088)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_direct_abstract_declarator1090)
if self.failed:
return
elif alt52 == 2:
# C.g:351:36: abstract_declarator_suffix
self.following.append(self.FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1094)
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
return
# C.g:351:65: ( abstract_declarator_suffix )*
while True: #loop53
alt53 = 2
LA53_0 = self.input.LA(1)
if (LA53_0 == 62) :
LA53 = self.input.LA(2)
if LA53 == 63:
LA53_12 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 58:
LA53_13 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 66:
LA53_14 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 59:
LA53_15 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 60:
LA53_16 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == IDENTIFIER:
LA53_17 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 29 or LA53 == 30 or LA53 == 31 or LA53 == 32 or LA53 == 33:
LA53_19 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 34:
LA53_20 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 35:
LA53_21 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 36:
LA53_22 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 37:
LA53_23 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 38:
LA53_24 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 39:
LA53_25 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 40:
LA53_26 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 41:
LA53_27 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 42:
LA53_28 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 45 or LA53 == 46:
LA53_29 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 48:
LA53_30 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 49 or LA53 == 50 or LA53 == 51 or LA53 == 52 or LA53 == 53 or LA53 == 54 or LA53 == 55 or LA53 == 56 or LA53 == 57 or LA53 == 61:
LA53_31 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif (LA53_0 == 64) :
LA53 = self.input.LA(2)
if LA53 == 65:
LA53_33 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 62:
LA53_34 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == IDENTIFIER:
LA53_35 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == HEX_LITERAL:
LA53_36 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == OCTAL_LITERAL:
LA53_37 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == DECIMAL_LITERAL:
LA53_38 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == CHARACTER_LITERAL:
LA53_39 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == STRING_LITERAL:
LA53_40 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == FLOATING_POINT_LITERAL:
LA53_41 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 72:
LA53_42 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 73:
LA53_43 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 66 or LA53 == 68 or LA53 == 69 or LA53 == 77 or LA53 == 78 or LA53 == 79:
LA53_44 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
elif LA53 == 74:
LA53_45 = self.input.LA(3)
if (self.synpred94()) :
alt53 = 1
if alt53 == 1:
# C.g:0:0: abstract_declarator_suffix
self.following.append(self.FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1098)
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
return
else:
break #loop53
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 32, direct_abstract_declarator_StartIndex)
pass
return
# $ANTLR end direct_abstract_declarator
# $ANTLR start abstract_declarator_suffix
# C.g:354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );
def abstract_declarator_suffix(self, ):
abstract_declarator_suffix_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 33):
return
# C.g:355:2: ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' )
alt54 = 4
LA54_0 = self.input.LA(1)
if (LA54_0 == 64) :
LA54_1 = self.input.LA(2)
if (LA54_1 == 65) :
alt54 = 1
elif ((IDENTIFIER <= LA54_1 <= FLOATING_POINT_LITERAL) or LA54_1 == 62 or LA54_1 == 66 or (68 <= LA54_1 <= 69) or (72 <= LA54_1 <= 74) or (77 <= LA54_1 <= 79)) :
alt54 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 1, self.input)
raise nvae
elif (LA54_0 == 62) :
LA54_2 = self.input.LA(2)
if (LA54_2 == 63) :
alt54 = 3
elif (LA54_2 == IDENTIFIER or (29 <= LA54_2 <= 42) or (45 <= LA54_2 <= 46) or (48 <= LA54_2 <= 61) or LA54_2 == 66) :
alt54 = 4
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 2, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("354:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 0, self.input)
raise nvae
if alt54 == 1:
# C.g:355:4: '[' ']'
self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1110)
if self.failed:
return
self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1112)
if self.failed:
return
elif alt54 == 2:
# C.g:356:4: '[' constant_expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1117)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_abstract_declarator_suffix1119)
self.constant_expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 65, self.FOLLOW_65_in_abstract_declarator_suffix1121)
if self.failed:
return
elif alt54 == 3:
# C.g:357:4: '(' ')'
self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1126)
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1128)
if self.failed:
return
elif alt54 == 4:
# C.g:358:4: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1133)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_type_list_in_abstract_declarator_suffix1135)
self.parameter_type_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1137)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 33, abstract_declarator_suffix_StartIndex)
pass
return
# $ANTLR end abstract_declarator_suffix
# $ANTLR start initializer
# C.g:361:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );
def initializer(self, ):
initializer_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 34):
return
# C.g:363:2: ( assignment_expression | '{' initializer_list ( ',' )? '}' )
alt56 = 2
LA56_0 = self.input.LA(1)
if ((IDENTIFIER <= LA56_0 <= FLOATING_POINT_LITERAL) or LA56_0 == 62 or LA56_0 == 66 or (68 <= LA56_0 <= 69) or (72 <= LA56_0 <= 74) or (77 <= LA56_0 <= 79)) :
alt56 = 1
elif (LA56_0 == 43) :
alt56 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("361:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );", 56, 0, self.input)
raise nvae
if alt56 == 1:
# C.g:363:4: assignment_expression
self.following.append(self.FOLLOW_assignment_expression_in_initializer1150)
self.assignment_expression()
self.following.pop()
if self.failed:
return
elif alt56 == 2:
# C.g:364:4: '{' initializer_list ( ',' )? '}'
self.match(self.input, 43, self.FOLLOW_43_in_initializer1155)
if self.failed:
return
self.following.append(self.FOLLOW_initializer_list_in_initializer1157)
self.initializer_list()
self.following.pop()
if self.failed:
return
# C.g:364:25: ( ',' )?
alt55 = 2
LA55_0 = self.input.LA(1)
if (LA55_0 == 27) :
alt55 = 1
if alt55 == 1:
# C.g:0:0: ','
self.match(self.input, 27, self.FOLLOW_27_in_initializer1159)
if self.failed:
return
self.match(self.input, 44, self.FOLLOW_44_in_initializer1162)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 34, initializer_StartIndex)
pass
return
# $ANTLR end initializer
# $ANTLR start initializer_list
# C.g:367:1: initializer_list : initializer ( ',' initializer )* ;
def initializer_list(self, ):
initializer_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 35):
return
# C.g:368:2: ( initializer ( ',' initializer )* )
# C.g:368:4: initializer ( ',' initializer )*
self.following.append(self.FOLLOW_initializer_in_initializer_list1173)
self.initializer()
self.following.pop()
if self.failed:
return
# C.g:368:16: ( ',' initializer )*
while True: #loop57
alt57 = 2
LA57_0 = self.input.LA(1)
if (LA57_0 == 27) :
LA57_1 = self.input.LA(2)
if ((IDENTIFIER <= LA57_1 <= FLOATING_POINT_LITERAL) or LA57_1 == 43 or LA57_1 == 62 or LA57_1 == 66 or (68 <= LA57_1 <= 69) or (72 <= LA57_1 <= 74) or (77 <= LA57_1 <= 79)) :
alt57 = 1
if alt57 == 1:
# C.g:368:17: ',' initializer
self.match(self.input, 27, self.FOLLOW_27_in_initializer_list1176)
if self.failed:
return
self.following.append(self.FOLLOW_initializer_in_initializer_list1178)
self.initializer()
self.following.pop()
if self.failed:
return
else:
break #loop57
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 35, initializer_list_StartIndex)
pass
return
# $ANTLR end initializer_list
class argument_expression_list_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start argument_expression_list
# C.g:373:1: argument_expression_list : assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* ;
def argument_expression_list(self, ):
retval = self.argument_expression_list_return()
retval.start = self.input.LT(1)
argument_expression_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 36):
return retval
# C.g:374:2: ( assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* )
# C.g:374:6: assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )*
self.following.append(self.FOLLOW_assignment_expression_in_argument_expression_list1196)
self.assignment_expression()
self.following.pop()
if self.failed:
return retval
# C.g:374:28: ( 'OPTIONAL' )?
alt58 = 2
LA58_0 = self.input.LA(1)
if (LA58_0 == 53) :
alt58 = 1
if alt58 == 1:
# C.g:374:29: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_argument_expression_list1199)
if self.failed:
return retval
# C.g:374:42: ( ',' assignment_expression ( 'OPTIONAL' )? )*
while True: #loop60
alt60 = 2
LA60_0 = self.input.LA(1)
if (LA60_0 == 27) :
alt60 = 1
if alt60 == 1:
# C.g:374:43: ',' assignment_expression ( 'OPTIONAL' )?
self.match(self.input, 27, self.FOLLOW_27_in_argument_expression_list1204)
if self.failed:
return retval
self.following.append(self.FOLLOW_assignment_expression_in_argument_expression_list1206)
self.assignment_expression()
self.following.pop()
if self.failed:
return retval
# C.g:374:69: ( 'OPTIONAL' )?
alt59 = 2
LA59_0 = self.input.LA(1)
if (LA59_0 == 53) :
alt59 = 1
if alt59 == 1:
# C.g:374:70: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_argument_expression_list1209)
if self.failed:
return retval
else:
break #loop60
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 36, argument_expression_list_StartIndex)
pass
return retval
# $ANTLR end argument_expression_list
# $ANTLR start additive_expression
# C.g:377:1: additive_expression : ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* ;
def additive_expression(self, ):
additive_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 37):
return
# C.g:378:2: ( ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* )
# C.g:378:4: ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )*
# C.g:378:4: ( multiplicative_expression )
# C.g:378:5: multiplicative_expression
self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1225)
self.multiplicative_expression()
self.following.pop()
if self.failed:
return
# C.g:378:32: ( '+' multiplicative_expression | '-' multiplicative_expression )*
while True: #loop61
alt61 = 3
LA61_0 = self.input.LA(1)
if (LA61_0 == 68) :
alt61 = 1
elif (LA61_0 == 69) :
alt61 = 2
if alt61 == 1:
# C.g:378:33: '+' multiplicative_expression
self.match(self.input, 68, self.FOLLOW_68_in_additive_expression1229)
if self.failed:
return
self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1231)
self.multiplicative_expression()
self.following.pop()
if self.failed:
return
elif alt61 == 2:
# C.g:378:65: '-' multiplicative_expression
self.match(self.input, 69, self.FOLLOW_69_in_additive_expression1235)
if self.failed:
return
self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1237)
self.multiplicative_expression()
self.following.pop()
if self.failed:
return
else:
break #loop61
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 37, additive_expression_StartIndex)
pass
return
# $ANTLR end additive_expression
# $ANTLR start multiplicative_expression
# C.g:381:1: multiplicative_expression : ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* ;
def multiplicative_expression(self, ):
multiplicative_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 38):
return
# C.g:382:2: ( ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* )
# C.g:382:4: ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )*
# C.g:382:4: ( cast_expression )
# C.g:382:5: cast_expression
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1251)
self.cast_expression()
self.following.pop()
if self.failed:
return
# C.g:382:22: ( '*' cast_expression | '/' cast_expression | '%' cast_expression )*
while True: #loop62
alt62 = 4
LA62 = self.input.LA(1)
if LA62 == 66:
alt62 = 1
elif LA62 == 70:
alt62 = 2
elif LA62 == 71:
alt62 = 3
if alt62 == 1:
# C.g:382:23: '*' cast_expression
self.match(self.input, 66, self.FOLLOW_66_in_multiplicative_expression1255)
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1257)
self.cast_expression()
self.following.pop()
if self.failed:
return
elif alt62 == 2:
# C.g:382:45: '/' cast_expression
self.match(self.input, 70, self.FOLLOW_70_in_multiplicative_expression1261)
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1263)
self.cast_expression()
self.following.pop()
if self.failed:
return
elif alt62 == 3:
# C.g:382:67: '%' cast_expression
self.match(self.input, 71, self.FOLLOW_71_in_multiplicative_expression1267)
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1269)
self.cast_expression()
self.following.pop()
if self.failed:
return
else:
break #loop62
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 38, multiplicative_expression_StartIndex)
pass
return
# $ANTLR end multiplicative_expression
# $ANTLR start cast_expression
# C.g:385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );
def cast_expression(self, ):
cast_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 39):
return
# C.g:386:2: ( '(' type_name ')' cast_expression | unary_expression )
alt63 = 2
LA63_0 = self.input.LA(1)
if (LA63_0 == 62) :
LA63 = self.input.LA(2)
if LA63 == 34 or LA63 == 35 or LA63 == 36 or LA63 == 37 or LA63 == 38 or LA63 == 39 or LA63 == 40 or LA63 == 41 or LA63 == 42 or LA63 == 45 or LA63 == 46 or LA63 == 48 or LA63 == 49 or LA63 == 50 or LA63 == 51 or LA63 == 52 or LA63 == 53 or LA63 == 54 or LA63 == 55 or LA63 == 56 or LA63 == 57 or LA63 == 58 or LA63 == 59 or LA63 == 60 or LA63 == 61:
alt63 = 1
elif LA63 == IDENTIFIER:
LA63_25 = self.input.LA(3)
if (self.synpred109()) :
alt63 = 1
elif (True) :
alt63 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 25, self.input)
raise nvae
elif LA63 == HEX_LITERAL or LA63 == OCTAL_LITERAL or LA63 == DECIMAL_LITERAL or LA63 == CHARACTER_LITERAL or LA63 == STRING_LITERAL or LA63 == FLOATING_POINT_LITERAL or LA63 == 62 or LA63 == 66 or LA63 == 68 or LA63 == 69 or LA63 == 72 or LA63 == 73 or LA63 == 74 or LA63 == 77 or LA63 == 78 or LA63 == 79:
alt63 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 1, self.input)
raise nvae
elif ((IDENTIFIER <= LA63_0 <= FLOATING_POINT_LITERAL) or LA63_0 == 66 or (68 <= LA63_0 <= 69) or (72 <= LA63_0 <= 74) or (77 <= LA63_0 <= 79)) :
alt63 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("385:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 0, self.input)
raise nvae
if alt63 == 1:
# C.g:386:4: '(' type_name ')' cast_expression
self.match(self.input, 62, self.FOLLOW_62_in_cast_expression1282)
if self.failed:
return
self.following.append(self.FOLLOW_type_name_in_cast_expression1284)
self.type_name()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_cast_expression1286)
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_cast_expression1288)
self.cast_expression()
self.following.pop()
if self.failed:
return
elif alt63 == 2:
# C.g:387:4: unary_expression
self.following.append(self.FOLLOW_unary_expression_in_cast_expression1293)
self.unary_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 39, cast_expression_StartIndex)
pass
return
# $ANTLR end cast_expression
# $ANTLR start unary_expression
# C.g:390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );
def unary_expression(self, ):
unary_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 40):
return
# C.g:391:2: ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' )
alt64 = 6
LA64 = self.input.LA(1)
if LA64 == IDENTIFIER or LA64 == HEX_LITERAL or LA64 == OCTAL_LITERAL or LA64 == DECIMAL_LITERAL or LA64 == CHARACTER_LITERAL or LA64 == STRING_LITERAL or LA64 == FLOATING_POINT_LITERAL or LA64 == 62:
alt64 = 1
elif LA64 == 72:
alt64 = 2
elif LA64 == 73:
alt64 = 3
elif LA64 == 66 or LA64 == 68 or LA64 == 69 or LA64 == 77 or LA64 == 78 or LA64 == 79:
alt64 = 4
elif LA64 == 74:
LA64_12 = self.input.LA(2)
if (LA64_12 == 62) :
LA64_13 = self.input.LA(3)
if (self.synpred114()) :
alt64 = 5
elif (True) :
alt64 = 6
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 13, self.input)
raise nvae
elif ((IDENTIFIER <= LA64_12 <= FLOATING_POINT_LITERAL) or LA64_12 == 66 or (68 <= LA64_12 <= 69) or (72 <= LA64_12 <= 74) or (77 <= LA64_12 <= 79)) :
alt64 = 5
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 12, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("390:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 0, self.input)
raise nvae
if alt64 == 1:
# C.g:391:4: postfix_expression
self.following.append(self.FOLLOW_postfix_expression_in_unary_expression1304)
self.postfix_expression()
self.following.pop()
if self.failed:
return
elif alt64 == 2:
# C.g:392:4: '++' unary_expression
self.match(self.input, 72, self.FOLLOW_72_in_unary_expression1309)
if self.failed:
return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1311)
self.unary_expression()
self.following.pop()
if self.failed:
return
elif alt64 == 3:
# C.g:393:4: '--' unary_expression
self.match(self.input, 73, self.FOLLOW_73_in_unary_expression1316)
if self.failed:
return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1318)
self.unary_expression()
self.following.pop()
if self.failed:
return
elif alt64 == 4:
# C.g:394:4: unary_operator cast_expression
self.following.append(self.FOLLOW_unary_operator_in_unary_expression1323)
self.unary_operator()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_unary_expression1325)
self.cast_expression()
self.following.pop()
if self.failed:
return
elif alt64 == 5:
# C.g:395:4: 'sizeof' unary_expression
self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1330)
if self.failed:
return
self.following.append(self.FOLLOW_unary_expression_in_unary_expression1332)
self.unary_expression()
self.following.pop()
if self.failed:
return
elif alt64 == 6:
# C.g:396:4: 'sizeof' '(' type_name ')'
self.match(self.input, 74, self.FOLLOW_74_in_unary_expression1337)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_unary_expression1339)
if self.failed:
return
self.following.append(self.FOLLOW_type_name_in_unary_expression1341)
self.type_name()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_unary_expression1343)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 40, unary_expression_StartIndex)
pass
return
# $ANTLR end unary_expression
# $ANTLR start postfix_expression
# C.g:399:1: postfix_expression : p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* ;
def postfix_expression(self, ):
self.postfix_expression_stack.append(postfix_expression_scope())
postfix_expression_StartIndex = self.input.index()
a = None
b = None
x = None
y = None
z = None
p = None
c = None
self.postfix_expression_stack[-1].FuncCallText = ''
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 41):
return
# C.g:406:2: (p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* )
# C.g:406:6: p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
self.following.append(self.FOLLOW_primary_expression_in_postfix_expression1367)
p = self.primary_expression()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start, p.stop)
# C.g:407:9: ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )*
while True: #loop65
alt65 = 10
LA65 = self.input.LA(1)
if LA65 == 66:
LA65_1 = self.input.LA(2)
if (LA65_1 == IDENTIFIER) :
LA65_30 = self.input.LA(3)
if (self.synpred120()) :
alt65 = 6
elif LA65 == 64:
alt65 = 1
elif LA65 == 62:
LA65 = self.input.LA(2)
if LA65 == 63:
alt65 = 2
elif LA65 == 29 or LA65 == 30 or LA65 == 31 or LA65 == 32 or LA65 == 33 or LA65 == 34 or LA65 == 35 or LA65 == 36 or LA65 == 37 or LA65 == 38 or LA65 == 39 or LA65 == 40 or LA65 == 41 or LA65 == 42 or LA65 == 45 or LA65 == 46 or LA65 == 48 or LA65 == 49 or LA65 == 50 or LA65 == 51 or LA65 == 52 or LA65 == 53 or LA65 == 54 or LA65 == 55 or LA65 == 56 or LA65 == 57 or LA65 == 58 or LA65 == 59 or LA65 == 60 or LA65 == 61:
alt65 = 4
elif LA65 == IDENTIFIER:
LA65_55 = self.input.LA(3)
if (self.synpred117()) :
alt65 = 3
elif (self.synpred118()) :
alt65 = 4
elif LA65 == 66:
LA65_57 = self.input.LA(3)
if (self.synpred117()) :
alt65 = 3
elif (self.synpred118()) :
alt65 = 4
elif LA65 == HEX_LITERAL or LA65 == OCTAL_LITERAL or LA65 == DECIMAL_LITERAL or LA65 == CHARACTER_LITERAL or LA65 == STRING_LITERAL or LA65 == FLOATING_POINT_LITERAL or LA65 == 62 or LA65 == 68 or LA65 == 69 or LA65 == 72 or LA65 == 73 or LA65 == 74 or LA65 == 77 or LA65 == 78 or LA65 == 79:
alt65 = 3
elif LA65 == 75:
alt65 = 5
elif LA65 == 76:
alt65 = 7
elif LA65 == 72:
alt65 = 8
elif LA65 == 73:
alt65 = 9
if alt65 == 1:
# C.g:407:13: '[' expression ']'
self.match(self.input, 64, self.FOLLOW_64_in_postfix_expression1383)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_postfix_expression1385)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 65, self.FOLLOW_65_in_postfix_expression1387)
if self.failed:
return
elif alt65 == 2:
# C.g:408:13: '(' a= ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1401)
if self.failed:
return
a = self.input.LT(1)
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1405)
if self.failed:
return
if self.backtracking == 0:
self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, a.line, a.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, '')
elif alt65 == 3:
# C.g:409:13: '(' c= argument_expression_list b= ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1420)
if self.failed:
return
self.following.append(self.FOLLOW_argument_expression_list_in_postfix_expression1424)
c = self.argument_expression_list()
self.following.pop()
if self.failed:
return
b = self.input.LT(1)
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1428)
if self.failed:
return
if self.backtracking == 0:
self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start, c.stop))
elif alt65 == 4:
# C.g:410:13: '(' macro_parameter_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1444)
if self.failed:
return
self.following.append(self.FOLLOW_macro_parameter_list_in_postfix_expression1446)
self.macro_parameter_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1448)
if self.failed:
return
elif alt65 == 5:
# C.g:411:13: '.' x= IDENTIFIER
self.match(self.input, 75, self.FOLLOW_75_in_postfix_expression1462)
if self.failed:
return
x = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1466)
if self.failed:
return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += '.' + x.text
elif alt65 == 6:
# C.g:412:13: '*' y= IDENTIFIER
self.match(self.input, 66, self.FOLLOW_66_in_postfix_expression1482)
if self.failed:
return
y = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1486)
if self.failed:
return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText = y.text
elif alt65 == 7:
# C.g:413:13: '->' z= IDENTIFIER
self.match(self.input, 76, self.FOLLOW_76_in_postfix_expression1502)
if self.failed:
return
z = self.input.LT(1)
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1506)
if self.failed:
return
if self.backtracking == 0:
self.postfix_expression_stack[-1].FuncCallText += '->' + z.text
elif alt65 == 8:
# C.g:414:13: '++'
self.match(self.input, 72, self.FOLLOW_72_in_postfix_expression1522)
if self.failed:
return
elif alt65 == 9:
# C.g:415:13: '--'
self.match(self.input, 73, self.FOLLOW_73_in_postfix_expression1536)
if self.failed:
return
else:
break #loop65
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 41, postfix_expression_StartIndex)
self.postfix_expression_stack.pop()
pass
return
# $ANTLR end postfix_expression
# $ANTLR start macro_parameter_list
# C.g:419:1: macro_parameter_list : parameter_declaration ( ',' parameter_declaration )* ;
def macro_parameter_list(self, ):
macro_parameter_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 42):
return
# C.g:420:2: ( parameter_declaration ( ',' parameter_declaration )* )
# C.g:420:4: parameter_declaration ( ',' parameter_declaration )*
self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1559)
self.parameter_declaration()
self.following.pop()
if self.failed:
return
# C.g:420:26: ( ',' parameter_declaration )*
while True: #loop66
alt66 = 2
LA66_0 = self.input.LA(1)
if (LA66_0 == 27) :
alt66 = 1
if alt66 == 1:
# C.g:420:27: ',' parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_macro_parameter_list1562)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1564)
self.parameter_declaration()
self.following.pop()
if self.failed:
return
else:
break #loop66
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 42, macro_parameter_list_StartIndex)
pass
return
# $ANTLR end macro_parameter_list
# $ANTLR start unary_operator
# C.g:423:1: unary_operator : ( '&' | '*' | '+' | '-' | '~' | '!' );
def unary_operator(self, ):
unary_operator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 43):
return
# C.g:424:2: ( '&' | '*' | '+' | '-' | '~' | '!' )
# C.g:
if self.input.LA(1) == 66 or (68 <= self.input.LA(1) <= 69) or (77 <= self.input.LA(1) <= 79):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_unary_operator0
)
raise mse
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 43, unary_operator_StartIndex)
pass
return
# $ANTLR end unary_operator
class primary_expression_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start primary_expression
# C.g:432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );
def primary_expression(self, ):
retval = self.primary_expression_return()
retval.start = self.input.LT(1)
primary_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 44):
return retval
# C.g:433:2: ( IDENTIFIER | constant | '(' expression ')' )
alt67 = 3
LA67 = self.input.LA(1)
if LA67 == IDENTIFIER:
LA67_1 = self.input.LA(2)
if (LA67_1 == EOF or LA67_1 == 25 or (27 <= LA67_1 <= 28) or LA67_1 == 44 or LA67_1 == 47 or LA67_1 == 53 or (62 <= LA67_1 <= 66) or (68 <= LA67_1 <= 73) or (75 <= LA67_1 <= 77) or (80 <= LA67_1 <= 102)) :
alt67 = 1
elif (LA67_1 == IDENTIFIER or LA67_1 == STRING_LITERAL) :
alt67 = 2
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );", 67, 1, self.input)
raise nvae
elif LA67 == HEX_LITERAL or LA67 == OCTAL_LITERAL or LA67 == DECIMAL_LITERAL or LA67 == CHARACTER_LITERAL or LA67 == STRING_LITERAL or LA67 == FLOATING_POINT_LITERAL:
alt67 = 2
elif LA67 == 62:
alt67 = 3
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("432:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );", 67, 0, self.input)
raise nvae
if alt67 == 1:
# C.g:433:4: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_primary_expression1613)
if self.failed:
return retval
elif alt67 == 2:
# C.g:434:4: constant
self.following.append(self.FOLLOW_constant_in_primary_expression1618)
self.constant()
self.following.pop()
if self.failed:
return retval
elif alt67 == 3:
# C.g:435:4: '(' expression ')'
self.match(self.input, 62, self.FOLLOW_62_in_primary_expression1623)
if self.failed:
return retval
self.following.append(self.FOLLOW_expression_in_primary_expression1625)
self.expression()
self.following.pop()
if self.failed:
return retval
self.match(self.input, 63, self.FOLLOW_63_in_primary_expression1627)
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 44, primary_expression_StartIndex)
pass
return retval
# $ANTLR end primary_expression
# $ANTLR start constant
# C.g:438:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );
def constant(self, ):
constant_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 45):
return
# C.g:439:5: ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL )
alt72 = 6
LA72 = self.input.LA(1)
if LA72 == HEX_LITERAL:
alt72 = 1
elif LA72 == OCTAL_LITERAL:
alt72 = 2
elif LA72 == DECIMAL_LITERAL:
alt72 = 3
elif LA72 == CHARACTER_LITERAL:
alt72 = 4
elif LA72 == IDENTIFIER or LA72 == STRING_LITERAL:
alt72 = 5
elif LA72 == FLOATING_POINT_LITERAL:
alt72 = 6
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("438:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );", 72, 0, self.input)
raise nvae
if alt72 == 1:
# C.g:439:9: HEX_LITERAL
self.match(self.input, HEX_LITERAL, self.FOLLOW_HEX_LITERAL_in_constant1643)
if self.failed:
return
elif alt72 == 2:
# C.g:440:9: OCTAL_LITERAL
self.match(self.input, OCTAL_LITERAL, self.FOLLOW_OCTAL_LITERAL_in_constant1653)
if self.failed:
return
elif alt72 == 3:
# C.g:441:9: DECIMAL_LITERAL
self.match(self.input, DECIMAL_LITERAL, self.FOLLOW_DECIMAL_LITERAL_in_constant1663)
if self.failed:
return
elif alt72 == 4:
# C.g:442:7: CHARACTER_LITERAL
self.match(self.input, CHARACTER_LITERAL, self.FOLLOW_CHARACTER_LITERAL_in_constant1671)
if self.failed:
return
elif alt72 == 5:
# C.g:443:7: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )*
# C.g:443:7: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+
cnt70 = 0
while True: #loop70
alt70 = 2
LA70_0 = self.input.LA(1)
if (LA70_0 == IDENTIFIER) :
LA70_1 = self.input.LA(2)
if (LA70_1 == STRING_LITERAL) :
alt70 = 1
elif (LA70_1 == IDENTIFIER) :
LA70_33 = self.input.LA(3)
if (self.synpred138()) :
alt70 = 1
elif (LA70_0 == STRING_LITERAL) :
alt70 = 1
if alt70 == 1:
# C.g:443:8: ( IDENTIFIER )* ( STRING_LITERAL )+
# C.g:443:8: ( IDENTIFIER )*
while True: #loop68
alt68 = 2
LA68_0 = self.input.LA(1)
if (LA68_0 == IDENTIFIER) :
alt68 = 1
if alt68 == 1:
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1680)
if self.failed:
return
else:
break #loop68
# C.g:443:20: ( STRING_LITERAL )+
cnt69 = 0
while True: #loop69
alt69 = 2
LA69_0 = self.input.LA(1)
if (LA69_0 == STRING_LITERAL) :
LA69_31 = self.input.LA(2)
if (self.synpred137()) :
alt69 = 1
if alt69 == 1:
# C.g:0:0: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_constant1683)
if self.failed:
return
else:
if cnt69 >= 1:
break #loop69
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(69, self.input)
raise eee
cnt69 += 1
else:
if cnt70 >= 1:
break #loop70
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(70, self.input)
raise eee
cnt70 += 1
# C.g:443:38: ( IDENTIFIER )*
while True: #loop71
alt71 = 2
LA71_0 = self.input.LA(1)
if (LA71_0 == IDENTIFIER) :
alt71 = 1
if alt71 == 1:
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1688)
if self.failed:
return
else:
break #loop71
elif alt72 == 6:
# C.g:444:9: FLOATING_POINT_LITERAL
self.match(self.input, FLOATING_POINT_LITERAL, self.FOLLOW_FLOATING_POINT_LITERAL_in_constant1699)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 45, constant_StartIndex)
pass
return
# $ANTLR end constant
class expression_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start expression
# C.g:449:1: expression : assignment_expression ( ',' assignment_expression )* ;
def expression(self, ):
retval = self.expression_return()
retval.start = self.input.LT(1)
expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 46):
return retval
# C.g:450:2: ( assignment_expression ( ',' assignment_expression )* )
# C.g:450:4: assignment_expression ( ',' assignment_expression )*
self.following.append(self.FOLLOW_assignment_expression_in_expression1715)
self.assignment_expression()
self.following.pop()
if self.failed:
return retval
# C.g:450:26: ( ',' assignment_expression )*
while True: #loop73
alt73 = 2
LA73_0 = self.input.LA(1)
if (LA73_0 == 27) :
alt73 = 1
if alt73 == 1:
# C.g:450:27: ',' assignment_expression
self.match(self.input, 27, self.FOLLOW_27_in_expression1718)
if self.failed:
return retval
self.following.append(self.FOLLOW_assignment_expression_in_expression1720)
self.assignment_expression()
self.following.pop()
if self.failed:
return retval
else:
break #loop73
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 46, expression_StartIndex)
pass
return retval
# $ANTLR end expression
# $ANTLR start constant_expression
# C.g:453:1: constant_expression : conditional_expression ;
def constant_expression(self, ):
constant_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 47):
return
# C.g:454:2: ( conditional_expression )
# C.g:454:4: conditional_expression
self.following.append(self.FOLLOW_conditional_expression_in_constant_expression1733)
self.conditional_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 47, constant_expression_StartIndex)
pass
return
# $ANTLR end constant_expression
# $ANTLR start assignment_expression
# C.g:457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );
def assignment_expression(self, ):
assignment_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 48):
return
# C.g:458:2: ( lvalue assignment_operator assignment_expression | conditional_expression )
alt74 = 2
LA74 = self.input.LA(1)
if LA74 == IDENTIFIER:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_13 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 13, self.input)
raise nvae
elif LA74 == 62:
LA74_14 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 14, self.input)
raise nvae
elif LA74 == 75:
LA74_15 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 15, self.input)
raise nvae
elif LA74 == 66:
LA74_16 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 16, self.input)
raise nvae
elif LA74 == 76:
LA74_17 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 17, self.input)
raise nvae
elif LA74 == 72:
LA74_18 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 18, self.input)
raise nvae
elif LA74 == 73:
LA74_19 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 19, self.input)
raise nvae
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
elif LA74 == STRING_LITERAL:
LA74_21 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 21, self.input)
raise nvae
elif LA74 == IDENTIFIER:
LA74_22 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 22, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 1, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_44 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 44, self.input)
raise nvae
elif LA74 == 62:
LA74_45 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 45, self.input)
raise nvae
elif LA74 == 75:
LA74_46 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 46, self.input)
raise nvae
elif LA74 == 66:
LA74_47 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 47, self.input)
raise nvae
elif LA74 == 76:
LA74_48 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 48, self.input)
raise nvae
elif LA74 == 72:
LA74_49 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 49, self.input)
raise nvae
elif LA74 == 73:
LA74_50 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 50, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 2, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_73 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 73, self.input)
raise nvae
elif LA74 == 62:
LA74_74 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 74, self.input)
raise nvae
elif LA74 == 75:
LA74_75 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 75, self.input)
raise nvae
elif LA74 == 66:
LA74_76 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 76, self.input)
raise nvae
elif LA74 == 76:
LA74_77 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 77, self.input)
raise nvae
elif LA74 == 72:
LA74_78 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 78, self.input)
raise nvae
elif LA74 == 73:
LA74_79 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 79, self.input)
raise nvae
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 3, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_102 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 102, self.input)
raise nvae
elif LA74 == 62:
LA74_103 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 103, self.input)
raise nvae
elif LA74 == 75:
LA74_104 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 104, self.input)
raise nvae
elif LA74 == 66:
LA74_105 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 105, self.input)
raise nvae
elif LA74 == 76:
LA74_106 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 106, self.input)
raise nvae
elif LA74 == 72:
LA74_107 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 107, self.input)
raise nvae
elif LA74 == 73:
LA74_108 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 108, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 4, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_131 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 131, self.input)
raise nvae
elif LA74 == 62:
LA74_132 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 132, self.input)
raise nvae
elif LA74 == 75:
LA74_133 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 133, self.input)
raise nvae
elif LA74 == 66:
LA74_134 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 134, self.input)
raise nvae
elif LA74 == 76:
LA74_135 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 135, self.input)
raise nvae
elif LA74 == 72:
LA74_136 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 136, self.input)
raise nvae
elif LA74 == 73:
LA74_137 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 137, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 5, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74 = self.input.LA(2)
if LA74 == IDENTIFIER:
LA74_160 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 160, self.input)
raise nvae
elif LA74 == 64:
LA74_161 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 161, self.input)
raise nvae
elif LA74 == 62:
LA74_162 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 162, self.input)
raise nvae
elif LA74 == 75:
LA74_163 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 163, self.input)
raise nvae
elif LA74 == 66:
LA74_164 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 164, self.input)
raise nvae
elif LA74 == 76:
LA74_165 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 165, self.input)
raise nvae
elif LA74 == 72:
LA74_166 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 166, self.input)
raise nvae
elif LA74 == 73:
LA74_167 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 167, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
elif LA74 == STRING_LITERAL:
LA74_189 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 189, self.input)
raise nvae
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 6, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74 = self.input.LA(2)
if LA74 == 64:
LA74_191 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 191, self.input)
raise nvae
elif LA74 == 62:
LA74_192 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 192, self.input)
raise nvae
elif LA74 == 75:
LA74_193 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 193, self.input)
raise nvae
elif LA74 == 66:
LA74_194 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 194, self.input)
raise nvae
elif LA74 == 76:
LA74_195 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 195, self.input)
raise nvae
elif LA74 == 72:
LA74_196 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 196, self.input)
raise nvae
elif LA74 == 73:
LA74_197 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 197, self.input)
raise nvae
elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 63 or LA74 == 65 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 71 or LA74 == 77 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101 or LA74 == 102:
alt74 = 2
elif LA74 == 28 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88 or LA74 == 89:
alt74 = 1
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 7, self.input)
raise nvae
elif LA74 == 62:
LA74 = self.input.LA(2)
if LA74 == IDENTIFIER:
LA74_220 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 220, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74_221 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 221, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74_222 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 222, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74_223 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 223, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74_224 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 224, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74_225 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 225, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74_226 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 226, self.input)
raise nvae
elif LA74 == 62:
LA74_227 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 227, self.input)
raise nvae
elif LA74 == 72:
LA74_228 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 228, self.input)
raise nvae
elif LA74 == 73:
LA74_229 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 229, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74_230 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 230, self.input)
raise nvae
elif LA74 == 74:
LA74_231 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 231, self.input)
raise nvae
elif LA74 == 34 or LA74 == 35 or LA74 == 36 or LA74 == 37 or LA74 == 38 or LA74 == 39 or LA74 == 40 or LA74 == 41 or LA74 == 42 or LA74 == 45 or LA74 == 46 or LA74 == 48 or LA74 == 49 or LA74 == 50 or LA74 == 51 or LA74 == 52 or LA74 == 53 or LA74 == 54 or LA74 == 55 or LA74 == 56 or LA74 == 57 or LA74 == 58 or LA74 == 59 or LA74 == 60 or LA74 == 61:
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 8, self.input)
raise nvae
elif LA74 == 72:
LA74 = self.input.LA(2)
if LA74 == IDENTIFIER:
LA74_244 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 244, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74_245 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 245, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74_246 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 246, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74_247 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 247, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74_248 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 248, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74_249 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 249, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74_250 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 250, self.input)
raise nvae
elif LA74 == 62:
LA74_251 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 251, self.input)
raise nvae
elif LA74 == 72:
LA74_252 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 252, self.input)
raise nvae
elif LA74 == 73:
LA74_253 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 253, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74_254 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 254, self.input)
raise nvae
elif LA74 == 74:
LA74_255 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 255, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 9, self.input)
raise nvae
elif LA74 == 73:
LA74 = self.input.LA(2)
if LA74 == IDENTIFIER:
LA74_256 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 256, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74_257 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 257, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74_258 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 258, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74_259 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 259, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74_260 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 260, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74_261 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 261, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74_262 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 262, self.input)
raise nvae
elif LA74 == 62:
LA74_263 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 263, self.input)
raise nvae
elif LA74 == 72:
LA74_264 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 264, self.input)
raise nvae
elif LA74 == 73:
LA74_265 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 265, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74_266 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 266, self.input)
raise nvae
elif LA74 == 74:
LA74_267 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 267, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 10, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74 = self.input.LA(2)
if LA74 == 62:
LA74_268 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 268, self.input)
raise nvae
elif LA74 == IDENTIFIER:
LA74_269 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 269, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74_270 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 270, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74_271 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 271, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74_272 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 272, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74_273 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 273, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74_274 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 274, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74_275 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 275, self.input)
raise nvae
elif LA74 == 72:
LA74_276 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 276, self.input)
raise nvae
elif LA74 == 73:
LA74_277 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 277, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74_278 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 278, self.input)
raise nvae
elif LA74 == 74:
LA74_279 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 279, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 11, self.input)
raise nvae
elif LA74 == 74:
LA74 = self.input.LA(2)
if LA74 == 62:
LA74_280 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 280, self.input)
raise nvae
elif LA74 == IDENTIFIER:
LA74_281 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 281, self.input)
raise nvae
elif LA74 == HEX_LITERAL:
LA74_282 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 282, self.input)
raise nvae
elif LA74 == OCTAL_LITERAL:
LA74_283 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 283, self.input)
raise nvae
elif LA74 == DECIMAL_LITERAL:
LA74_284 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 284, self.input)
raise nvae
elif LA74 == CHARACTER_LITERAL:
LA74_285 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 285, self.input)
raise nvae
elif LA74 == STRING_LITERAL:
LA74_286 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 286, self.input)
raise nvae
elif LA74 == FLOATING_POINT_LITERAL:
LA74_287 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 287, self.input)
raise nvae
elif LA74 == 72:
LA74_288 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 288, self.input)
raise nvae
elif LA74 == 73:
LA74_289 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 289, self.input)
raise nvae
elif LA74 == 66 or LA74 == 68 or LA74 == 69 or LA74 == 77 or LA74 == 78 or LA74 == 79:
LA74_290 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 290, self.input)
raise nvae
elif LA74 == 74:
LA74_291 = self.input.LA(3)
if (self.synpred142()) :
alt74 = 1
elif (True) :
alt74 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 291, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 12, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("457:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 0, self.input)
raise nvae
if alt74 == 1:
# C.g:458:4: lvalue assignment_operator assignment_expression
self.following.append(self.FOLLOW_lvalue_in_assignment_expression1744)
self.lvalue()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_assignment_operator_in_assignment_expression1746)
self.assignment_operator()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_assignment_expression_in_assignment_expression1748)
self.assignment_expression()
self.following.pop()
if self.failed:
return
elif alt74 == 2:
# C.g:459:4: conditional_expression
self.following.append(self.FOLLOW_conditional_expression_in_assignment_expression1753)
self.conditional_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 48, assignment_expression_StartIndex)
pass
return
# $ANTLR end assignment_expression
# $ANTLR start lvalue
# C.g:462:1: lvalue : unary_expression ;
def lvalue(self, ):
lvalue_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 49):
return
# C.g:463:2: ( unary_expression )
# C.g:463:4: unary_expression
self.following.append(self.FOLLOW_unary_expression_in_lvalue1765)
self.unary_expression()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 49, lvalue_StartIndex)
pass
return
# $ANTLR end lvalue
# $ANTLR start assignment_operator
# C.g:466:1: assignment_operator : ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' );
def assignment_operator(self, ):
assignment_operator_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 50):
return
# C.g:467:2: ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' )
# C.g:
if self.input.LA(1) == 28 or (80 <= self.input.LA(1) <= 89):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_assignment_operator0
)
raise mse
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 50, assignment_operator_StartIndex)
pass
return
# $ANTLR end assignment_operator
# $ANTLR start conditional_expression
# C.g:480:1: conditional_expression : e= logical_or_expression ( '?' expression ':' conditional_expression )? ;
def conditional_expression(self, ):
conditional_expression_StartIndex = self.input.index()
e = None
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 51):
return
# C.g:481:2: (e= logical_or_expression ( '?' expression ':' conditional_expression )? )
# C.g:481:4: e= logical_or_expression ( '?' expression ':' conditional_expression )?
self.following.append(self.FOLLOW_logical_or_expression_in_conditional_expression1839)
e = self.logical_or_expression()
self.following.pop()
if self.failed:
return
# C.g:481:28: ( '?' expression ':' conditional_expression )?
alt75 = 2
LA75_0 = self.input.LA(1)
if (LA75_0 == 90) :
alt75 = 1
if alt75 == 1:
# C.g:481:29: '?' expression ':' conditional_expression
self.match(self.input, 90, self.FOLLOW_90_in_conditional_expression1842)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_conditional_expression1844)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 47, self.FOLLOW_47_in_conditional_expression1846)
if self.failed:
return
self.following.append(self.FOLLOW_conditional_expression_in_conditional_expression1848)
self.conditional_expression()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 51, conditional_expression_StartIndex)
pass
return
# $ANTLR end conditional_expression
class logical_or_expression_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start logical_or_expression
# C.g:484:1: logical_or_expression : logical_and_expression ( '||' logical_and_expression )* ;
def logical_or_expression(self, ):
retval = self.logical_or_expression_return()
retval.start = self.input.LT(1)
logical_or_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 52):
return retval
# C.g:485:2: ( logical_and_expression ( '||' logical_and_expression )* )
# C.g:485:4: logical_and_expression ( '||' logical_and_expression )*
self.following.append(self.FOLLOW_logical_and_expression_in_logical_or_expression1863)
self.logical_and_expression()
self.following.pop()
if self.failed:
return retval
# C.g:485:27: ( '||' logical_and_expression )*
while True: #loop76
alt76 = 2
LA76_0 = self.input.LA(1)
if (LA76_0 == 91) :
alt76 = 1
if alt76 == 1:
# C.g:485:28: '||' logical_and_expression
self.match(self.input, 91, self.FOLLOW_91_in_logical_or_expression1866)
if self.failed:
return retval
self.following.append(self.FOLLOW_logical_and_expression_in_logical_or_expression1868)
self.logical_and_expression()
self.following.pop()
if self.failed:
return retval
else:
break #loop76
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 52, logical_or_expression_StartIndex)
pass
return retval
# $ANTLR end logical_or_expression
# $ANTLR start logical_and_expression
# C.g:488:1: logical_and_expression : inclusive_or_expression ( '&&' inclusive_or_expression )* ;
def logical_and_expression(self, ):
logical_and_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 53):
return
# C.g:489:2: ( inclusive_or_expression ( '&&' inclusive_or_expression )* )
# C.g:489:4: inclusive_or_expression ( '&&' inclusive_or_expression )*
self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1881)
self.inclusive_or_expression()
self.following.pop()
if self.failed:
return
# C.g:489:28: ( '&&' inclusive_or_expression )*
while True: #loop77
alt77 = 2
LA77_0 = self.input.LA(1)
if (LA77_0 == 92) :
alt77 = 1
if alt77 == 1:
# C.g:489:29: '&&' inclusive_or_expression
self.match(self.input, 92, self.FOLLOW_92_in_logical_and_expression1884)
if self.failed:
return
self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1886)
self.inclusive_or_expression()
self.following.pop()
if self.failed:
return
else:
break #loop77
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 53, logical_and_expression_StartIndex)
pass
return
# $ANTLR end logical_and_expression
# $ANTLR start inclusive_or_expression
# C.g:492:1: inclusive_or_expression : exclusive_or_expression ( '|' exclusive_or_expression )* ;
def inclusive_or_expression(self, ):
inclusive_or_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 54):
return
# C.g:493:2: ( exclusive_or_expression ( '|' exclusive_or_expression )* )
# C.g:493:4: exclusive_or_expression ( '|' exclusive_or_expression )*
self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1899)
self.exclusive_or_expression()
self.following.pop()
if self.failed:
return
# C.g:493:28: ( '|' exclusive_or_expression )*
while True: #loop78
alt78 = 2
LA78_0 = self.input.LA(1)
if (LA78_0 == 93) :
alt78 = 1
if alt78 == 1:
# C.g:493:29: '|' exclusive_or_expression
self.match(self.input, 93, self.FOLLOW_93_in_inclusive_or_expression1902)
if self.failed:
return
self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1904)
self.exclusive_or_expression()
self.following.pop()
if self.failed:
return
else:
break #loop78
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 54, inclusive_or_expression_StartIndex)
pass
return
# $ANTLR end inclusive_or_expression
# $ANTLR start exclusive_or_expression
# C.g:496:1: exclusive_or_expression : and_expression ( '^' and_expression )* ;
def exclusive_or_expression(self, ):
exclusive_or_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 55):
return
# C.g:497:2: ( and_expression ( '^' and_expression )* )
# C.g:497:4: and_expression ( '^' and_expression )*
self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1917)
self.and_expression()
self.following.pop()
if self.failed:
return
# C.g:497:19: ( '^' and_expression )*
while True: #loop79
alt79 = 2
LA79_0 = self.input.LA(1)
if (LA79_0 == 94) :
alt79 = 1
if alt79 == 1:
# C.g:497:20: '^' and_expression
self.match(self.input, 94, self.FOLLOW_94_in_exclusive_or_expression1920)
if self.failed:
return
self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1922)
self.and_expression()
self.following.pop()
if self.failed:
return
else:
break #loop79
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 55, exclusive_or_expression_StartIndex)
pass
return
# $ANTLR end exclusive_or_expression
# $ANTLR start and_expression
# C.g:500:1: and_expression : equality_expression ( '&' equality_expression )* ;
def and_expression(self, ):
and_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 56):
return
# C.g:501:2: ( equality_expression ( '&' equality_expression )* )
# C.g:501:4: equality_expression ( '&' equality_expression )*
self.following.append(self.FOLLOW_equality_expression_in_and_expression1935)
self.equality_expression()
self.following.pop()
if self.failed:
return
# C.g:501:24: ( '&' equality_expression )*
while True: #loop80
alt80 = 2
LA80_0 = self.input.LA(1)
if (LA80_0 == 77) :
alt80 = 1
if alt80 == 1:
# C.g:501:25: '&' equality_expression
self.match(self.input, 77, self.FOLLOW_77_in_and_expression1938)
if self.failed:
return
self.following.append(self.FOLLOW_equality_expression_in_and_expression1940)
self.equality_expression()
self.following.pop()
if self.failed:
return
else:
break #loop80
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 56, and_expression_StartIndex)
pass
return
# $ANTLR end and_expression
# $ANTLR start equality_expression
# C.g:503:1: equality_expression : relational_expression ( ( '==' | '!=' ) relational_expression )* ;
def equality_expression(self, ):
equality_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 57):
return
# C.g:504:2: ( relational_expression ( ( '==' | '!=' ) relational_expression )* )
# C.g:504:4: relational_expression ( ( '==' | '!=' ) relational_expression )*
self.following.append(self.FOLLOW_relational_expression_in_equality_expression1952)
self.relational_expression()
self.following.pop()
if self.failed:
return
# C.g:504:26: ( ( '==' | '!=' ) relational_expression )*
while True: #loop81
alt81 = 2
LA81_0 = self.input.LA(1)
if ((95 <= LA81_0 <= 96)) :
alt81 = 1
if alt81 == 1:
# C.g:504:27: ( '==' | '!=' ) relational_expression
if (95 <= self.input.LA(1) <= 96):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_equality_expression1955
)
raise mse
self.following.append(self.FOLLOW_relational_expression_in_equality_expression1961)
self.relational_expression()
self.following.pop()
if self.failed:
return
else:
break #loop81
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 57, equality_expression_StartIndex)
pass
return
# $ANTLR end equality_expression
# $ANTLR start relational_expression
# C.g:507:1: relational_expression : shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* ;
def relational_expression(self, ):
relational_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 58):
return
# C.g:508:2: ( shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* )
# C.g:508:4: shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
self.following.append(self.FOLLOW_shift_expression_in_relational_expression1975)
self.shift_expression()
self.following.pop()
if self.failed:
return
# C.g:508:21: ( ( '<' | '>' | '<=' | '>=' ) shift_expression )*
while True: #loop82
alt82 = 2
LA82_0 = self.input.LA(1)
if ((97 <= LA82_0 <= 100)) :
alt82 = 1
if alt82 == 1:
# C.g:508:22: ( '<' | '>' | '<=' | '>=' ) shift_expression
if (97 <= self.input.LA(1) <= 100):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_relational_expression1978
)
raise mse
self.following.append(self.FOLLOW_shift_expression_in_relational_expression1988)
self.shift_expression()
self.following.pop()
if self.failed:
return
else:
break #loop82
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 58, relational_expression_StartIndex)
pass
return
# $ANTLR end relational_expression
# $ANTLR start shift_expression
# C.g:511:1: shift_expression : additive_expression ( ( '<<' | '>>' ) additive_expression )* ;
def shift_expression(self, ):
shift_expression_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 59):
return
# C.g:512:2: ( additive_expression ( ( '<<' | '>>' ) additive_expression )* )
# C.g:512:4: additive_expression ( ( '<<' | '>>' ) additive_expression )*
self.following.append(self.FOLLOW_additive_expression_in_shift_expression2001)
self.additive_expression()
self.following.pop()
if self.failed:
return
# C.g:512:24: ( ( '<<' | '>>' ) additive_expression )*
while True: #loop83
alt83 = 2
LA83_0 = self.input.LA(1)
if ((101 <= LA83_0 <= 102)) :
alt83 = 1
if alt83 == 1:
# C.g:512:25: ( '<<' | '>>' ) additive_expression
if (101 <= self.input.LA(1) <= 102):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_shift_expression2004
)
raise mse
self.following.append(self.FOLLOW_additive_expression_in_shift_expression2010)
self.additive_expression()
self.following.pop()
if self.failed:
return
else:
break #loop83
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 59, shift_expression_StartIndex)
pass
return
# $ANTLR end shift_expression
# $ANTLR start statement
# C.g:517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );
def statement(self, ):
statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 60):
return
# C.g:518:2: ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration )
alt84 = 11
LA84 = self.input.LA(1)
if LA84 == IDENTIFIER:
LA84 = self.input.LA(2)
if LA84 == 62:
LA84_43 = self.input.LA(3)
if (self.synpred169()) :
alt84 = 3
elif (self.synpred173()) :
alt84 = 7
elif (self.synpred174()) :
alt84 = 8
elif (True) :
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 43, self.input)
raise nvae
elif LA84 == 47:
alt84 = 1
elif LA84 == STRING_LITERAL or LA84 == 27 or LA84 == 28 or LA84 == 64 or LA84 == 68 or LA84 == 69 or LA84 == 70 or LA84 == 71 or LA84 == 72 or LA84 == 73 or LA84 == 75 or LA84 == 76 or LA84 == 77 or LA84 == 80 or LA84 == 81 or LA84 == 82 or LA84 == 83 or LA84 == 84 or LA84 == 85 or LA84 == 86 or LA84 == 87 or LA84 == 88 or LA84 == 89 or LA84 == 90 or LA84 == 91 or LA84 == 92 or LA84 == 93 or LA84 == 94 or LA84 == 95 or LA84 == 96 or LA84 == 97 or LA84 == 98 or LA84 == 99 or LA84 == 100 or LA84 == 101 or LA84 == 102:
alt84 = 3
elif LA84 == 66:
LA84_47 = self.input.LA(3)
if (self.synpred169()) :
alt84 = 3
elif (True) :
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 47, self.input)
raise nvae
elif LA84 == IDENTIFIER:
LA84_53 = self.input.LA(3)
if (self.synpred169()) :
alt84 = 3
elif (True) :
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 53, self.input)
raise nvae
elif LA84 == 25:
LA84_68 = self.input.LA(3)
if (self.synpred169()) :
alt84 = 3
elif (True) :
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 68, self.input)
raise nvae
elif LA84 == 29 or LA84 == 30 or LA84 == 31 or LA84 == 32 or LA84 == 33 or LA84 == 34 or LA84 == 35 or LA84 == 36 or LA84 == 37 or LA84 == 38 or LA84 == 39 or LA84 == 40 or LA84 == 41 or LA84 == 42 or LA84 == 45 or LA84 == 46 or LA84 == 48 or LA84 == 49 or LA84 == 50 or LA84 == 51 or LA84 == 52 or LA84 == 53 or LA84 == 54 or LA84 == 55 or LA84 == 56 or LA84 == 57 or LA84 == 58 or LA84 == 59 or LA84 == 60 or LA84 == 61:
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 1, self.input)
raise nvae
elif LA84 == 106 or LA84 == 107:
alt84 = 1
elif LA84 == 43:
alt84 = 2
elif LA84 == HEX_LITERAL or LA84 == OCTAL_LITERAL or LA84 == DECIMAL_LITERAL or LA84 == CHARACTER_LITERAL or LA84 == STRING_LITERAL or LA84 == FLOATING_POINT_LITERAL or LA84 == 25 or LA84 == 62 or LA84 == 66 or LA84 == 68 or LA84 == 69 or LA84 == 72 or LA84 == 73 or LA84 == 74 or LA84 == 77 or LA84 == 78 or LA84 == 79:
alt84 = 3
elif LA84 == 108 or LA84 == 110:
alt84 = 4
elif LA84 == 111 or LA84 == 112 or LA84 == 113:
alt84 = 5
elif LA84 == 114 or LA84 == 115 or LA84 == 116 or LA84 == 117:
alt84 = 6
elif LA84 == 103:
alt84 = 8
elif LA84 == 104:
alt84 = 9
elif LA84 == 105:
alt84 = 10
elif LA84 == 26 or LA84 == 29 or LA84 == 30 or LA84 == 31 or LA84 == 32 or LA84 == 33 or LA84 == 34 or LA84 == 35 or LA84 == 36 or LA84 == 37 or LA84 == 38 or LA84 == 39 or LA84 == 40 or LA84 == 41 or LA84 == 42 or LA84 == 45 or LA84 == 46 or LA84 == 48 or LA84 == 49 or LA84 == 50 or LA84 == 51 or LA84 == 52 or LA84 == 53 or LA84 == 54 or LA84 == 55 or LA84 == 56 or LA84 == 57 or LA84 == 58 or LA84 == 59 or LA84 == 60 or LA84 == 61:
alt84 = 11
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("517:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 0, self.input)
raise nvae
if alt84 == 1:
# C.g:518:4: labeled_statement
self.following.append(self.FOLLOW_labeled_statement_in_statement2025)
self.labeled_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 2:
# C.g:519:4: compound_statement
self.following.append(self.FOLLOW_compound_statement_in_statement2030)
self.compound_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 3:
# C.g:520:4: expression_statement
self.following.append(self.FOLLOW_expression_statement_in_statement2035)
self.expression_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 4:
# C.g:521:4: selection_statement
self.following.append(self.FOLLOW_selection_statement_in_statement2040)
self.selection_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 5:
# C.g:522:4: iteration_statement
self.following.append(self.FOLLOW_iteration_statement_in_statement2045)
self.iteration_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 6:
# C.g:523:4: jump_statement
self.following.append(self.FOLLOW_jump_statement_in_statement2050)
self.jump_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 7:
# C.g:524:4: macro_statement
self.following.append(self.FOLLOW_macro_statement_in_statement2055)
self.macro_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 8:
# C.g:525:4: asm2_statement
self.following.append(self.FOLLOW_asm2_statement_in_statement2060)
self.asm2_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 9:
# C.g:526:4: asm1_statement
self.following.append(self.FOLLOW_asm1_statement_in_statement2065)
self.asm1_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 10:
# C.g:527:4: asm_statement
self.following.append(self.FOLLOW_asm_statement_in_statement2070)
self.asm_statement()
self.following.pop()
if self.failed:
return
elif alt84 == 11:
# C.g:528:4: declaration
self.following.append(self.FOLLOW_declaration_in_statement2075)
self.declaration()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 60, statement_StartIndex)
pass
return
# $ANTLR end statement
# $ANTLR start asm2_statement
# C.g:531:1: asm2_statement : ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' ;
def asm2_statement(self, ):
asm2_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 61):
return
# C.g:532:2: ( ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' )
# C.g:532:4: ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';'
# C.g:532:4: ( '__asm__' )?
alt85 = 2
LA85_0 = self.input.LA(1)
if (LA85_0 == 103) :
alt85 = 1
if alt85 == 1:
# C.g:0:0: '__asm__'
self.match(self.input, 103, self.FOLLOW_103_in_asm2_statement2086)
if self.failed:
return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_asm2_statement2089)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_asm2_statement2091)
if self.failed:
return
# C.g:532:30: (~ ( ';' ) )*
while True: #loop86
alt86 = 2
LA86_0 = self.input.LA(1)
if (LA86_0 == 63) :
LA86_1 = self.input.LA(2)
if ((IDENTIFIER <= LA86_1 <= LINE_COMMAND) or (26 <= LA86_1 <= 117)) :
alt86 = 1
elif ((IDENTIFIER <= LA86_0 <= LINE_COMMAND) or (26 <= LA86_0 <= 62) or (64 <= LA86_0 <= 117)) :
alt86 = 1
if alt86 == 1:
# C.g:532:31: ~ ( ';' )
if (IDENTIFIER <= self.input.LA(1) <= LINE_COMMAND) or (26 <= self.input.LA(1) <= 117):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_asm2_statement2094
)
raise mse
else:
break #loop86
self.match(self.input, 63, self.FOLLOW_63_in_asm2_statement2101)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_asm2_statement2103)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 61, asm2_statement_StartIndex)
pass
return
# $ANTLR end asm2_statement
# $ANTLR start asm1_statement
# C.g:535:1: asm1_statement : '_asm' '{' (~ ( '}' ) )* '}' ;
def asm1_statement(self, ):
asm1_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 62):
return
# C.g:536:2: ( '_asm' '{' (~ ( '}' ) )* '}' )
# C.g:536:4: '_asm' '{' (~ ( '}' ) )* '}'
self.match(self.input, 104, self.FOLLOW_104_in_asm1_statement2115)
if self.failed:
return
self.match(self.input, 43, self.FOLLOW_43_in_asm1_statement2117)
if self.failed:
return
# C.g:536:15: (~ ( '}' ) )*
while True: #loop87
alt87 = 2
LA87_0 = self.input.LA(1)
if ((IDENTIFIER <= LA87_0 <= 43) or (45 <= LA87_0 <= 117)) :
alt87 = 1
if alt87 == 1:
# C.g:536:16: ~ ( '}' )
if (IDENTIFIER <= self.input.LA(1) <= 43) or (45 <= self.input.LA(1) <= 117):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_asm1_statement2120
)
raise mse
else:
break #loop87
self.match(self.input, 44, self.FOLLOW_44_in_asm1_statement2127)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 62, asm1_statement_StartIndex)
pass
return
# $ANTLR end asm1_statement
# $ANTLR start asm_statement
# C.g:539:1: asm_statement : '__asm' '{' (~ ( '}' ) )* '}' ;
def asm_statement(self, ):
asm_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 63):
return
# C.g:540:2: ( '__asm' '{' (~ ( '}' ) )* '}' )
# C.g:540:4: '__asm' '{' (~ ( '}' ) )* '}'
self.match(self.input, 105, self.FOLLOW_105_in_asm_statement2138)
if self.failed:
return
self.match(self.input, 43, self.FOLLOW_43_in_asm_statement2140)
if self.failed:
return
# C.g:540:16: (~ ( '}' ) )*
while True: #loop88
alt88 = 2
LA88_0 = self.input.LA(1)
if ((IDENTIFIER <= LA88_0 <= 43) or (45 <= LA88_0 <= 117)) :
alt88 = 1
if alt88 == 1:
# C.g:540:17: ~ ( '}' )
if (IDENTIFIER <= self.input.LA(1) <= 43) or (45 <= self.input.LA(1) <= 117):
self.input.consume();
self.errorRecovery = False
self.failed = False
else:
if self.backtracking > 0:
self.failed = True
return
mse = MismatchedSetException(None, self.input)
self.recoverFromMismatchedSet(
self.input, mse, self.FOLLOW_set_in_asm_statement2143
)
raise mse
else:
break #loop88
self.match(self.input, 44, self.FOLLOW_44_in_asm_statement2150)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 63, asm_statement_StartIndex)
pass
return
# $ANTLR end asm_statement
# $ANTLR start macro_statement
# C.g:543:1: macro_statement : IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' ;
def macro_statement(self, ):
macro_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 64):
return
# C.g:544:2: ( IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' )
# C.g:544:4: IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')'
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_macro_statement2162)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_macro_statement2164)
if self.failed:
return
# C.g:544:19: ( declaration )*
while True: #loop89
alt89 = 2
LA89 = self.input.LA(1)
if LA89 == IDENTIFIER:
LA89 = self.input.LA(2)
if LA89 == 62:
LA89_45 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_47 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 66:
LA89_50 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_68 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_71 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_72 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_73 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_74 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_75 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_76 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_77 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_78 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_79 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_80 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_81 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_82 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_83 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_84 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_85 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_86 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 26:
LA89 = self.input.LA(2)
if LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_87 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_88 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_89 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_90 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_91 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_92 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_93 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_94 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_95 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_96 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_97 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_98 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_99 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_100 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 66:
LA89_101 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_102 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_103 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_104 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_105 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_106 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_107 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_108 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_109 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_110 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_111 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_112 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_113 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_114 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_115 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_116 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_117 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_118 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_119 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_120 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_121 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_122 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_123 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_124 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_125 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_126 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_127 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_128 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_129 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_130 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_131 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_132 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_133 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_134 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_135 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_136 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_137 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_138 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_139 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_140 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_141 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_142 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_143 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_144 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_145 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_146 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_147 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_148 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_149 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_150 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_151 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_152 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_153 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_154 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_155 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_156 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_157 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_158 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_159 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_160 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_161 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_162 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_163 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_164 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_165 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_166 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_167 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_168 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_169 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_170 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_171 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_172 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_173 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_174 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_175 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_176 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_177 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_178 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_179 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_180 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_181 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_182 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_183 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_184 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_185 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_186 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_187 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_188 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_189 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_190 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_191 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_192 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_193 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_194 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_195 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_196 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_197 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_198 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_199 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_200 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_201 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_202 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_203 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_204 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_205 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_206 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_207 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_208 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_209 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_210 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_211 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_212 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_213 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_214 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_215 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_216 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_217 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_218 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_219 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_220 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_221 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_222 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_223 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_224 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_225 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_226 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_227 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_228 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_229 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_230 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_231 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_232 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_233 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_234 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_235 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_236 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_237 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_238 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_239 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_240 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_241 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_242 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_243 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_244 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_245 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_246 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_247 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_248 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_249 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_250 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_251 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_252 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_253 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_254 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_255 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_256 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_257 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_258 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_259 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_260 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_261 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_262 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_263 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_264 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_265 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_266 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_267 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_268 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_269 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_270 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_271 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_272 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_273 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_274 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_275 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_276 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_277 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_278 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_279 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_280 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_281 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_282 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_283 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_284 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_285 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_286 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_287 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_288 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_289 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_290 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_291 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_292 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_293 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_294 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_295 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_296 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_297 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_298 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_299 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_300 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_301 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_302 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_303 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_304 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_305 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_40 = self.input.LA(2)
if (LA89_40 == IDENTIFIER) :
LA89_306 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif (LA89_40 == 43) :
LA89_307 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_41 = self.input.LA(2)
if (LA89_41 == 43) :
LA89_308 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif (LA89_41 == IDENTIFIER) :
LA89_309 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 58 or LA89 == 59 or LA89 == 60 or LA89 == 61:
LA89 = self.input.LA(2)
if LA89 == 66:
LA89_310 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 58:
LA89_311 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 59:
LA89_312 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 60:
LA89_313 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == IDENTIFIER:
LA89_314 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 62:
LA89_315 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 25:
LA89_316 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33:
LA89_317 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 34:
LA89_318 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 35:
LA89_319 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 36:
LA89_320 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 37:
LA89_321 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 38:
LA89_322 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 39:
LA89_323 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 40:
LA89_324 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 41:
LA89_325 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 42:
LA89_326 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 45 or LA89 == 46:
LA89_327 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 48:
LA89_328 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 61:
LA89_329 = self.input.LA(3)
if (self.synpred181()) :
alt89 = 1
if alt89 == 1:
# C.g:0:0: declaration
self.following.append(self.FOLLOW_declaration_in_macro_statement2166)
self.declaration()
self.following.pop()
if self.failed:
return
else:
break #loop89
# C.g:544:33: ( statement_list )?
alt90 = 2
LA90 = self.input.LA(1)
if LA90 == IDENTIFIER:
LA90 = self.input.LA(2)
if LA90 == 25 or LA90 == 29 or LA90 == 30 or LA90 == 31 or LA90 == 32 or LA90 == 33 or LA90 == 34 or LA90 == 35 or LA90 == 36 or LA90 == 37 or LA90 == 38 or LA90 == 39 or LA90 == 40 or LA90 == 41 or LA90 == 42 or LA90 == 45 or LA90 == 46 or LA90 == 47 or LA90 == 48 or LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61:
alt90 = 1
elif LA90 == 62:
LA90_45 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_46 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == IDENTIFIER:
LA90_47 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 64:
LA90_48 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_49 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_50 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_51 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_52 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_53 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_54 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_55 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_56 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_57 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_58 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_59 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_60 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_61 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_62 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_63 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_64 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_65 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_66 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_67 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_70 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25 or LA90 == 26 or LA90 == 29 or LA90 == 30 or LA90 == 31 or LA90 == 32 or LA90 == 33 or LA90 == 34 or LA90 == 35 or LA90 == 36 or LA90 == 37 or LA90 == 38 or LA90 == 39 or LA90 == 40 or LA90 == 41 or LA90 == 42 or LA90 == 43 or LA90 == 45 or LA90 == 46 or LA90 == 48 or LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61 or LA90 == 103 or LA90 == 104 or LA90 == 105 or LA90 == 106 or LA90 == 107 or LA90 == 108 or LA90 == 110 or LA90 == 111 or LA90 == 112 or LA90 == 113 or LA90 == 114 or LA90 == 115 or LA90 == 116 or LA90 == 117:
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90 = self.input.LA(2)
if LA90 == 64:
LA90_87 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_88 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_89 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_90 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_91 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_92 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_93 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_94 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_95 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_96 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_97 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_98 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_99 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_100 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_101 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_102 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_103 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_104 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_105 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_106 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_107 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_108 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90 = self.input.LA(2)
if LA90 == 64:
LA90_111 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_112 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_113 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_114 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_115 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_116 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_117 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_118 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_119 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_120 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_121 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_122 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_123 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_124 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_125 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_126 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_127 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_128 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_129 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_130 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_131 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_134 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90 = self.input.LA(2)
if LA90 == 64:
LA90_135 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_136 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_137 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_138 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_139 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_140 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_141 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_142 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_143 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_144 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_145 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_146 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_147 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_148 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_149 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_150 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_151 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_152 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_153 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_154 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_155 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_156 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90 = self.input.LA(2)
if LA90 == 64:
LA90_159 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_160 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_161 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_162 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_163 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_164 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_165 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_166 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_167 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_168 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_169 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_170 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_171 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_172 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_173 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_174 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_175 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_176 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_177 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_178 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_179 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_181 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90 = self.input.LA(2)
if LA90 == IDENTIFIER:
LA90_183 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 64:
LA90_184 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_185 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_186 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_187 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_188 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_189 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_190 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_191 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_192 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_193 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_194 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_195 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_196 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_197 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_198 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_199 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_200 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_201 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_202 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_203 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_204 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_205 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_206 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90 = self.input.LA(2)
if LA90 == 64:
LA90_209 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_210 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 75:
LA90_211 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66:
LA90_212 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 76:
LA90_213 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_214 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_215 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 28 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88 or LA90 == 89:
LA90_216 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 70:
LA90_217 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 71:
LA90_218 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 68:
LA90_219 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 69:
LA90_220 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 101 or LA90 == 102:
LA90_221 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 97 or LA90 == 98 or LA90 == 99 or LA90 == 100:
LA90_222 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 95 or LA90 == 96:
LA90_223 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 77:
LA90_224 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 94:
LA90_225 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 93:
LA90_226 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 92:
LA90_227 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 91:
LA90_228 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 90:
LA90_229 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 27:
LA90_230 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 25:
alt90 = 1
elif LA90 == 62:
LA90 = self.input.LA(2)
if LA90 == IDENTIFIER:
LA90_233 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90_234 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90_235 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90_236 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90_237 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_238 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90_239 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_240 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_241 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_242 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90_243 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90_244 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 61:
LA90_245 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 34:
LA90_246 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 35:
LA90_247 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 36:
LA90_248 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 37:
LA90_249 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 38:
LA90_250 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 39:
LA90_251 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 40:
LA90_252 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 41:
LA90_253 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 42:
LA90_254 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 45 or LA90 == 46:
LA90_255 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 48:
LA90_256 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90 = self.input.LA(2)
if LA90 == IDENTIFIER:
LA90_257 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90_258 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90_259 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90_260 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90_261 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_262 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90_263 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_264 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_265 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_266 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90_267 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90_268 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90 = self.input.LA(2)
if LA90 == IDENTIFIER:
LA90_269 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90_270 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90_271 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90_272 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90_273 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_274 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90_275 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 62:
LA90_276 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_277 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_278 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90_279 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90_280 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90 = self.input.LA(2)
if LA90 == 62:
LA90_281 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == IDENTIFIER:
LA90_282 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90_283 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90_284 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90_285 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90_286 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_287 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90_288 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_289 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_290 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90_291 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90_292 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90 = self.input.LA(2)
if LA90 == 62:
LA90_293 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == IDENTIFIER:
LA90_294 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == HEX_LITERAL:
LA90_295 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == OCTAL_LITERAL:
LA90_296 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == DECIMAL_LITERAL:
LA90_297 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == CHARACTER_LITERAL:
LA90_298 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == STRING_LITERAL:
LA90_299 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == FLOATING_POINT_LITERAL:
LA90_300 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 72:
LA90_301 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 73:
LA90_302 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 66 or LA90 == 68 or LA90 == 69 or LA90 == 77 or LA90 == 78 or LA90 == 79:
LA90_303 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
elif LA90 == 74:
LA90_304 = self.input.LA(3)
if (self.synpred182()) :
alt90 = 1
if alt90 == 1:
# C.g:0:0: statement_list
self.following.append(self.FOLLOW_statement_list_in_macro_statement2170)
self.statement_list()
self.following.pop()
if self.failed:
return
# C.g:544:49: ( expression )?
alt91 = 2
LA91_0 = self.input.LA(1)
if ((IDENTIFIER <= LA91_0 <= FLOATING_POINT_LITERAL) or LA91_0 == 62 or LA91_0 == 66 or (68 <= LA91_0 <= 69) or (72 <= LA91_0 <= 74) or (77 <= LA91_0 <= 79)) :
alt91 = 1
if alt91 == 1:
# C.g:0:0: expression
self.following.append(self.FOLLOW_expression_in_macro_statement2173)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_macro_statement2176)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 64, macro_statement_StartIndex)
pass
return
# $ANTLR end macro_statement
# $ANTLR start labeled_statement
# C.g:547:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );
def labeled_statement(self, ):
labeled_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 65):
return
# C.g:548:2: ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement )
alt92 = 3
LA92 = self.input.LA(1)
if LA92 == IDENTIFIER:
alt92 = 1
elif LA92 == 106:
alt92 = 2
elif LA92 == 107:
alt92 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("547:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );", 92, 0, self.input)
raise nvae
if alt92 == 1:
# C.g:548:4: IDENTIFIER ':' statement
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_labeled_statement2188)
if self.failed:
return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2190)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_labeled_statement2192)
self.statement()
self.following.pop()
if self.failed:
return
elif alt92 == 2:
# C.g:549:4: 'case' constant_expression ':' statement
self.match(self.input, 106, self.FOLLOW_106_in_labeled_statement2197)
if self.failed:
return
self.following.append(self.FOLLOW_constant_expression_in_labeled_statement2199)
self.constant_expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2201)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_labeled_statement2203)
self.statement()
self.following.pop()
if self.failed:
return
elif alt92 == 3:
# C.g:550:4: 'default' ':' statement
self.match(self.input, 107, self.FOLLOW_107_in_labeled_statement2208)
if self.failed:
return
self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2210)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_labeled_statement2212)
self.statement()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 65, labeled_statement_StartIndex)
pass
return
# $ANTLR end labeled_statement
class compound_statement_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start compound_statement
# C.g:553:1: compound_statement : '{' ( declaration )* ( statement_list )? '}' ;
def compound_statement(self, ):
retval = self.compound_statement_return()
retval.start = self.input.LT(1)
compound_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 66):
return retval
# C.g:554:2: ( '{' ( declaration )* ( statement_list )? '}' )
# C.g:554:4: '{' ( declaration )* ( statement_list )? '}'
self.match(self.input, 43, self.FOLLOW_43_in_compound_statement2223)
if self.failed:
return retval
# C.g:554:8: ( declaration )*
while True: #loop93
alt93 = 2
LA93 = self.input.LA(1)
if LA93 == IDENTIFIER:
LA93 = self.input.LA(2)
if LA93 == 62:
LA93_44 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_47 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 66:
LA93_48 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_49 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_50 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_51 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_52 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_53 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_54 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_55 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_56 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_57 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_58 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_59 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_60 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_61 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_62 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_63 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_64 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_65 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 26:
LA93 = self.input.LA(2)
if LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_86 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_87 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_88 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_89 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_90 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_91 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_92 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_93 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_94 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_95 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_96 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_97 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_98 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_99 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 66:
LA93_100 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_101 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_102 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_103 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_104 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_105 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_106 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_107 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_108 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_109 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_110 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_111 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_112 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_113 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_114 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_115 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_116 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_117 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_118 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_119 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_120 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_121 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_122 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_123 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_124 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_125 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_126 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_127 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_128 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_129 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_130 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_131 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_132 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_133 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_134 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_135 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_136 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_137 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_138 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_139 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_140 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_141 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_142 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_143 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_144 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_145 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_146 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_147 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_148 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_149 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_150 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_151 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_152 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_153 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_154 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_155 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_156 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_157 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_158 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_159 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_160 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_161 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_162 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_163 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_164 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_165 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_166 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_167 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_168 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_169 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_170 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_171 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_172 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_173 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_174 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_175 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_176 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_177 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_178 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_179 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_180 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_181 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_182 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_183 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_184 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_185 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_186 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_187 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_188 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_189 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_190 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_191 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_192 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_193 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_194 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_195 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_196 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_197 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_198 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_199 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_200 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_201 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_202 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_203 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_204 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_205 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_206 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_207 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_208 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_209 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_210 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_211 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_212 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_213 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_214 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_215 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_216 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_217 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_218 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_219 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_220 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_221 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_222 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_223 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_224 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_225 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_226 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_227 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_228 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_229 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_230 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_231 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_232 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_233 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_234 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_235 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_236 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_237 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_238 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_239 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_240 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_241 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_242 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_243 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_244 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_245 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_246 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_247 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_248 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_249 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_250 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_251 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_252 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_253 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_254 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_255 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_256 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_257 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_258 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_259 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_260 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_261 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_262 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_263 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_264 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_265 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_266 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_267 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_268 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_269 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_270 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_271 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_272 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_273 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_274 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_275 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_276 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_277 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_278 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_279 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_280 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_281 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_282 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_283 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_284 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_285 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_286 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_287 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_288 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_289 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_290 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_291 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_292 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_293 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_294 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_295 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_296 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_297 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_298 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_299 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_300 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_301 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_302 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_303 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_304 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_40 = self.input.LA(2)
if (LA93_40 == IDENTIFIER) :
LA93_305 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif (LA93_40 == 43) :
LA93_306 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_41 = self.input.LA(2)
if (LA93_41 == 43) :
LA93_307 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif (LA93_41 == IDENTIFIER) :
LA93_308 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 58 or LA93 == 59 or LA93 == 60 or LA93 == 61:
LA93 = self.input.LA(2)
if LA93 == 66:
LA93_309 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 58:
LA93_310 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 59:
LA93_311 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 60:
LA93_312 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == IDENTIFIER:
LA93_313 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 62:
LA93_314 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 25:
LA93_315 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33:
LA93_316 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 34:
LA93_317 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 35:
LA93_318 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 36:
LA93_319 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 37:
LA93_320 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 38:
LA93_321 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 39:
LA93_322 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 40:
LA93_323 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 41:
LA93_324 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 42:
LA93_325 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 45 or LA93 == 46:
LA93_326 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 48:
LA93_327 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 61:
LA93_328 = self.input.LA(3)
if (self.synpred186()) :
alt93 = 1
if alt93 == 1:
# C.g:0:0: declaration
self.following.append(self.FOLLOW_declaration_in_compound_statement2225)
self.declaration()
self.following.pop()
if self.failed:
return retval
else:
break #loop93
# C.g:554:21: ( statement_list )?
alt94 = 2
LA94_0 = self.input.LA(1)
if ((IDENTIFIER <= LA94_0 <= FLOATING_POINT_LITERAL) or (25 <= LA94_0 <= 26) or (29 <= LA94_0 <= 43) or (45 <= LA94_0 <= 46) or (48 <= LA94_0 <= 62) or LA94_0 == 66 or (68 <= LA94_0 <= 69) or (72 <= LA94_0 <= 74) or (77 <= LA94_0 <= 79) or (103 <= LA94_0 <= 108) or (110 <= LA94_0 <= 117)) :
alt94 = 1
if alt94 == 1:
# C.g:0:0: statement_list
self.following.append(self.FOLLOW_statement_list_in_compound_statement2228)
self.statement_list()
self.following.pop()
if self.failed:
return retval
self.match(self.input, 44, self.FOLLOW_44_in_compound_statement2231)
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 66, compound_statement_StartIndex)
pass
return retval
# $ANTLR end compound_statement
# $ANTLR start statement_list
# C.g:557:1: statement_list : ( statement )+ ;
def statement_list(self, ):
statement_list_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 67):
return
# C.g:558:2: ( ( statement )+ )
# C.g:558:4: ( statement )+
# C.g:558:4: ( statement )+
cnt95 = 0
while True: #loop95
alt95 = 2
LA95 = self.input.LA(1)
if LA95 == IDENTIFIER:
LA95 = self.input.LA(2)
if LA95 == 62:
LA95_46 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25 or LA95 == 29 or LA95 == 30 or LA95 == 31 or LA95 == 32 or LA95 == 33 or LA95 == 34 or LA95 == 35 or LA95 == 36 or LA95 == 37 or LA95 == 38 or LA95 == 39 or LA95 == 40 or LA95 == 41 or LA95 == 42 or LA95 == 45 or LA95 == 46 or LA95 == 47 or LA95 == 48 or LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61:
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_48 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == IDENTIFIER:
LA95_49 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 64:
LA95_50 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_51 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_52 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_53 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_54 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_55 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_56 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_57 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_58 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_59 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_60 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_61 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_62 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_63 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_64 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_65 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_66 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_67 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_68 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_69 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_88 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95 = self.input.LA(2)
if LA95 == 64:
LA95_89 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_90 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_91 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_92 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_93 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_94 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_95 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_96 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_97 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_98 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_99 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_100 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_101 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_102 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_103 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_104 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_105 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_106 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_107 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_108 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_109 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_110 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95 = self.input.LA(2)
if LA95 == 64:
LA95_113 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_114 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_115 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_116 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_117 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_118 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_119 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_120 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_121 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_122 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_123 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_124 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_125 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_126 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_127 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_128 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_129 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_130 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_131 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_132 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_133 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_135 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95 = self.input.LA(2)
if LA95 == 64:
LA95_137 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_138 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_139 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_140 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_141 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_142 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_143 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_144 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_145 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_146 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_147 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_148 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_149 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_150 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_151 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_152 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_153 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_154 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_155 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_156 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_157 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_158 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95 = self.input.LA(2)
if LA95 == 64:
LA95_161 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_162 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_163 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_164 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_165 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_166 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_167 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_168 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_169 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_170 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_171 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_172 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_173 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_174 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_175 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_176 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_177 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_178 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_179 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_180 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_181 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_182 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95 = self.input.LA(2)
if LA95 == IDENTIFIER:
LA95_185 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 64:
LA95_186 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_187 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_188 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_189 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_190 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_191 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_192 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_193 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_194 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_195 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_196 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_197 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_198 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_199 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_200 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_201 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_202 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_203 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_204 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_205 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_206 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_208 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_209 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95 = self.input.LA(2)
if LA95 == 64:
LA95_211 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_212 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 75:
LA95_213 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66:
LA95_214 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 76:
LA95_215 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_216 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_217 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 70:
LA95_218 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 71:
LA95_219 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 68:
LA95_220 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 69:
LA95_221 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 101 or LA95 == 102:
LA95_222 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 97 or LA95 == 98 or LA95 == 99 or LA95 == 100:
LA95_223 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 95 or LA95 == 96:
LA95_224 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 77:
LA95_225 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 94:
LA95_226 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 93:
LA95_227 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 92:
LA95_228 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 91:
LA95_229 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 90:
LA95_230 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 27:
LA95_231 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25:
alt95 = 1
elif LA95 == 28 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88 or LA95 == 89:
LA95_234 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95 = self.input.LA(2)
if LA95 == IDENTIFIER:
LA95_235 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95_236 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95_237 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95_238 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95_239 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_240 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95_241 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_242 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_243 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_244 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95_245 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95_246 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61:
LA95_247 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 34:
LA95_248 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 35:
LA95_249 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 36:
LA95_250 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 37:
LA95_251 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 38:
LA95_252 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 39:
LA95_253 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 40:
LA95_254 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 41:
LA95_255 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 42:
LA95_256 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 45 or LA95 == 46:
LA95_257 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 48:
LA95_258 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95 = self.input.LA(2)
if LA95 == IDENTIFIER:
LA95_259 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95_260 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95_261 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95_262 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95_263 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_264 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95_265 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_266 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_267 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_268 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95_269 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95_270 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95 = self.input.LA(2)
if LA95 == IDENTIFIER:
LA95_271 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95_272 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95_273 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95_274 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95_275 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_276 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95_277 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 62:
LA95_278 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_279 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_280 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95_281 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95_282 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95 = self.input.LA(2)
if LA95 == 62:
LA95_283 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == IDENTIFIER:
LA95_284 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95_285 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95_286 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95_287 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95_288 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_289 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95_290 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_291 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_292 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95_293 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95_294 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95 = self.input.LA(2)
if LA95 == 62:
LA95_295 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == IDENTIFIER:
LA95_296 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == HEX_LITERAL:
LA95_297 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == OCTAL_LITERAL:
LA95_298 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == DECIMAL_LITERAL:
LA95_299 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == CHARACTER_LITERAL:
LA95_300 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == STRING_LITERAL:
LA95_301 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == FLOATING_POINT_LITERAL:
LA95_302 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 72:
LA95_303 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 73:
LA95_304 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 66 or LA95 == 68 or LA95 == 69 or LA95 == 77 or LA95 == 78 or LA95 == 79:
LA95_305 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 74:
LA95_306 = self.input.LA(3)
if (self.synpred188()) :
alt95 = 1
elif LA95 == 25 or LA95 == 26 or LA95 == 29 or LA95 == 30 or LA95 == 31 or LA95 == 32 or LA95 == 33 or LA95 == 34 or LA95 == 35 or LA95 == 36 or LA95 == 37 or LA95 == 38 or LA95 == 39 or LA95 == 40 or LA95 == 41 or LA95 == 42 or LA95 == 43 or LA95 == 45 or LA95 == 46 or LA95 == 48 or LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 61 or LA95 == 103 or LA95 == 104 or LA95 == 105 or LA95 == 106 or LA95 == 107 or LA95 == 108 or LA95 == 110 or LA95 == 111 or LA95 == 112 or LA95 == 113 or LA95 == 114 or LA95 == 115 or LA95 == 116 or LA95 == 117:
alt95 = 1
if alt95 == 1:
# C.g:0:0: statement
self.following.append(self.FOLLOW_statement_in_statement_list2242)
self.statement()
self.following.pop()
if self.failed:
return
else:
if cnt95 >= 1:
break #loop95
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(95, self.input)
raise eee
cnt95 += 1
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 67, statement_list_StartIndex)
pass
return
# $ANTLR end statement_list
class expression_statement_return(object):
def __init__(self):
self.start = None
self.stop = None
# $ANTLR start expression_statement
# C.g:561:1: expression_statement : ( ';' | expression ';' );
def expression_statement(self, ):
retval = self.expression_statement_return()
retval.start = self.input.LT(1)
expression_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 68):
return retval
# C.g:562:2: ( ';' | expression ';' )
alt96 = 2
LA96_0 = self.input.LA(1)
if (LA96_0 == 25) :
alt96 = 1
elif ((IDENTIFIER <= LA96_0 <= FLOATING_POINT_LITERAL) or LA96_0 == 62 or LA96_0 == 66 or (68 <= LA96_0 <= 69) or (72 <= LA96_0 <= 74) or (77 <= LA96_0 <= 79)) :
alt96 = 2
else:
if self.backtracking > 0:
self.failed = True
return retval
nvae = NoViableAltException("561:1: expression_statement : ( ';' | expression ';' );", 96, 0, self.input)
raise nvae
if alt96 == 1:
# C.g:562:4: ';'
self.match(self.input, 25, self.FOLLOW_25_in_expression_statement2254)
if self.failed:
return retval
elif alt96 == 2:
# C.g:563:4: expression ';'
self.following.append(self.FOLLOW_expression_in_expression_statement2259)
self.expression()
self.following.pop()
if self.failed:
return retval
self.match(self.input, 25, self.FOLLOW_25_in_expression_statement2261)
if self.failed:
return retval
retval.stop = self.input.LT(-1)
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 68, expression_statement_StartIndex)
pass
return retval
# $ANTLR end expression_statement
# $ANTLR start selection_statement
# C.g:566:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );
def selection_statement(self, ):
selection_statement_StartIndex = self.input.index()
e = None
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 69):
return
# C.g:567:2: ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement )
alt98 = 2
LA98_0 = self.input.LA(1)
if (LA98_0 == 108) :
alt98 = 1
elif (LA98_0 == 110) :
alt98 = 2
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("566:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );", 98, 0, self.input)
raise nvae
if alt98 == 1:
# C.g:567:4: 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )?
self.match(self.input, 108, self.FOLLOW_108_in_selection_statement2272)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2274)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_selection_statement2278)
e = self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2280)
if self.failed:
return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
self.following.append(self.FOLLOW_statement_in_selection_statement2284)
self.statement()
self.following.pop()
if self.failed:
return
# C.g:567:167: ( options {k=1; backtrack=false; } : 'else' statement )?
alt97 = 2
LA97_0 = self.input.LA(1)
if (LA97_0 == 109) :
alt97 = 1
if alt97 == 1:
# C.g:567:200: 'else' statement
self.match(self.input, 109, self.FOLLOW_109_in_selection_statement2299)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_selection_statement2301)
self.statement()
self.following.pop()
if self.failed:
return
elif alt98 == 2:
# C.g:568:4: 'switch' '(' expression ')' statement
self.match(self.input, 110, self.FOLLOW_110_in_selection_statement2308)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2310)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_selection_statement2312)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_selection_statement2314)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_selection_statement2316)
self.statement()
self.following.pop()
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 69, selection_statement_StartIndex)
pass
return
# $ANTLR end selection_statement
# $ANTLR start iteration_statement
# C.g:571:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );
def iteration_statement(self, ):
iteration_statement_StartIndex = self.input.index()
e = None
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 70):
return
# C.g:572:2: ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement )
alt100 = 3
LA100 = self.input.LA(1)
if LA100 == 111:
alt100 = 1
elif LA100 == 112:
alt100 = 2
elif LA100 == 113:
alt100 = 3
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("571:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );", 100, 0, self.input)
raise nvae
if alt100 == 1:
# C.g:572:4: 'while' '(' e= expression ')' statement
self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2327)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2329)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_iteration_statement2333)
e = self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2335)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_iteration_statement2337)
self.statement()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
elif alt100 == 2:
# C.g:573:4: 'do' statement 'while' '(' e= expression ')' ';'
self.match(self.input, 112, self.FOLLOW_112_in_iteration_statement2344)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_iteration_statement2346)
self.statement()
self.following.pop()
if self.failed:
return
self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2348)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2350)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_iteration_statement2354)
e = self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2356)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_iteration_statement2358)
if self.failed:
return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
elif alt100 == 3:
# C.g:574:4: 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement
self.match(self.input, 113, self.FOLLOW_113_in_iteration_statement2365)
if self.failed:
return
self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2367)
if self.failed:
return
self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2369)
self.expression_statement()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2373)
e = self.expression_statement()
self.following.pop()
if self.failed:
return
# C.g:574:58: ( expression )?
alt99 = 2
LA99_0 = self.input.LA(1)
if ((IDENTIFIER <= LA99_0 <= FLOATING_POINT_LITERAL) or LA99_0 == 62 or LA99_0 == 66 or (68 <= LA99_0 <= 69) or (72 <= LA99_0 <= 74) or (77 <= LA99_0 <= 79)) :
alt99 = 1
if alt99 == 1:
# C.g:0:0: expression
self.following.append(self.FOLLOW_expression_in_iteration_statement2375)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_iteration_statement2378)
if self.failed:
return
self.following.append(self.FOLLOW_statement_in_iteration_statement2380)
self.statement()
self.following.pop()
if self.failed:
return
if self.backtracking == 0:
self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start, e.stop))
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 70, iteration_statement_StartIndex)
pass
return
# $ANTLR end iteration_statement
# $ANTLR start jump_statement
# C.g:577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );
def jump_statement(self, ):
jump_statement_StartIndex = self.input.index()
try:
try:
if self.backtracking > 0 and self.alreadyParsedRule(self.input, 71):
return
# C.g:578:2: ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' )
alt101 = 5
LA101 = self.input.LA(1)
if LA101 == 114:
alt101 = 1
elif LA101 == 115:
alt101 = 2
elif LA101 == 116:
alt101 = 3
elif LA101 == 117:
LA101_4 = self.input.LA(2)
if (LA101_4 == 25) :
alt101 = 4
elif ((IDENTIFIER <= LA101_4 <= FLOATING_POINT_LITERAL) or LA101_4 == 62 or LA101_4 == 66 or (68 <= LA101_4 <= 69) or (72 <= LA101_4 <= 74) or (77 <= LA101_4 <= 79)) :
alt101 = 5
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 4, self.input)
raise nvae
else:
if self.backtracking > 0:
self.failed = True
return
nvae = NoViableAltException("577:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 0, self.input)
raise nvae
if alt101 == 1:
# C.g:578:4: 'goto' IDENTIFIER ';'
self.match(self.input, 114, self.FOLLOW_114_in_jump_statement2393)
if self.failed:
return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_jump_statement2395)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2397)
if self.failed:
return
elif alt101 == 2:
# C.g:579:4: 'continue' ';'
self.match(self.input, 115, self.FOLLOW_115_in_jump_statement2402)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2404)
if self.failed:
return
elif alt101 == 3:
# C.g:580:4: 'break' ';'
self.match(self.input, 116, self.FOLLOW_116_in_jump_statement2409)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2411)
if self.failed:
return
elif alt101 == 4:
# C.g:581:4: 'return' ';'
self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2416)
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2418)
if self.failed:
return
elif alt101 == 5:
# C.g:582:4: 'return' expression ';'
self.match(self.input, 117, self.FOLLOW_117_in_jump_statement2423)
if self.failed:
return
self.following.append(self.FOLLOW_expression_in_jump_statement2425)
self.expression()
self.following.pop()
if self.failed:
return
self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2427)
if self.failed:
return
except RecognitionException as re:
self.reportError(re)
self.recover(self.input, re)
finally:
if self.backtracking > 0:
self.memoize(self.input, 71, jump_statement_StartIndex)
pass
return
# $ANTLR end jump_statement
# $ANTLR start synpred2
def synpred2_fragment(self, ):
# C.g:119:6: ( declaration_specifiers )
# C.g:119:6: declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_synpred2100)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred2
# $ANTLR start synpred4
def synpred4_fragment(self, ):
# C.g:119:4: ( ( declaration_specifiers )? declarator ( declaration )* '{' )
# C.g:119:6: ( declaration_specifiers )? declarator ( declaration )* '{'
# C.g:119:6: ( declaration_specifiers )?
alt102 = 2
LA102 = self.input.LA(1)
if LA102 == 29 or LA102 == 30 or LA102 == 31 or LA102 == 32 or LA102 == 33 or LA102 == 34 or LA102 == 35 or LA102 == 36 or LA102 == 37 or LA102 == 38 or LA102 == 39 or LA102 == 40 or LA102 == 41 or LA102 == 42 or LA102 == 45 or LA102 == 46 or LA102 == 48 or LA102 == 49 or LA102 == 50 or LA102 == 51 or LA102 == 52 or LA102 == 53 or LA102 == 54 or LA102 == 55 or LA102 == 56 or LA102 == 57 or LA102 == 61:
alt102 = 1
elif LA102 == IDENTIFIER:
LA102 = self.input.LA(2)
if LA102 == 62:
LA102_21 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 29 or LA102 == 30 or LA102 == 31 or LA102 == 32 or LA102 == 33:
LA102_23 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 34:
LA102_24 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 35:
LA102_25 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 36:
LA102_26 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 37:
LA102_27 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 38:
LA102_28 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 39:
LA102_29 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 40:
LA102_30 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 41:
LA102_31 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 42:
LA102_32 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 45 or LA102 == 46:
LA102_33 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 48:
LA102_34 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == IDENTIFIER:
LA102_35 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 58:
LA102_36 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 66:
alt102 = 1
elif LA102 == 59:
LA102_39 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 60:
LA102_40 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 49 or LA102 == 50 or LA102 == 51 or LA102 == 52 or LA102 == 53 or LA102 == 54 or LA102 == 55 or LA102 == 56 or LA102 == 57 or LA102 == 61:
LA102_41 = self.input.LA(3)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 58:
LA102_14 = self.input.LA(2)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 59:
LA102_16 = self.input.LA(2)
if (self.synpred2()) :
alt102 = 1
elif LA102 == 60:
LA102_17 = self.input.LA(2)
if (self.synpred2()) :
alt102 = 1
if alt102 == 1:
# C.g:0:0: declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_synpred4100)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_declarator_in_synpred4103)
self.declarator()
self.following.pop()
if self.failed:
return
# C.g:119:41: ( declaration )*
while True: #loop103
alt103 = 2
LA103_0 = self.input.LA(1)
if (LA103_0 == IDENTIFIER or LA103_0 == 26 or (29 <= LA103_0 <= 42) or (45 <= LA103_0 <= 46) or (48 <= LA103_0 <= 61)) :
alt103 = 1
if alt103 == 1:
# C.g:0:0: declaration
self.following.append(self.FOLLOW_declaration_in_synpred4105)
self.declaration()
self.following.pop()
if self.failed:
return
else:
break #loop103
self.match(self.input, 43, self.FOLLOW_43_in_synpred4108)
if self.failed:
return
# $ANTLR end synpred4
# $ANTLR start synpred5
def synpred5_fragment(self, ):
# C.g:120:4: ( declaration )
# C.g:120:4: declaration
self.following.append(self.FOLLOW_declaration_in_synpred5118)
self.declaration()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred5
# $ANTLR start synpred7
def synpred7_fragment(self, ):
# C.g:146:6: ( declaration_specifiers )
# C.g:146:6: declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_synpred7157)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred7
# $ANTLR start synpred10
def synpred10_fragment(self, ):
# C.g:167:18: ( declaration_specifiers )
# C.g:167:18: declaration_specifiers
self.following.append(self.FOLLOW_declaration_specifiers_in_synpred10207)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred10
# $ANTLR start synpred14
def synpred14_fragment(self, ):
# C.g:184:7: ( type_specifier )
# C.g:184:7: type_specifier
self.following.append(self.FOLLOW_type_specifier_in_synpred14272)
self.type_specifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred14
# $ANTLR start synpred15
def synpred15_fragment(self, ):
# C.g:185:13: ( type_qualifier )
# C.g:185:13: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred15286)
self.type_qualifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred15
# $ANTLR start synpred33
def synpred33_fragment(self, ):
# C.g:225:16: ( type_qualifier )
# C.g:225:16: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred33444)
self.type_qualifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred33
# $ANTLR start synpred34
def synpred34_fragment(self, ):
# C.g:225:4: ( IDENTIFIER ( type_qualifier )* declarator )
# C.g:225:5: IDENTIFIER ( type_qualifier )* declarator
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred34442)
if self.failed:
return
# C.g:225:16: ( type_qualifier )*
while True: #loop106
alt106 = 2
LA106 = self.input.LA(1)
if LA106 == 58:
LA106_2 = self.input.LA(2)
if (self.synpred33()) :
alt106 = 1
elif LA106 == 59:
LA106_3 = self.input.LA(2)
if (self.synpred33()) :
alt106 = 1
elif LA106 == 60:
LA106_4 = self.input.LA(2)
if (self.synpred33()) :
alt106 = 1
elif LA106 == 49 or LA106 == 50 or LA106 == 51 or LA106 == 52 or LA106 == 53 or LA106 == 54 or LA106 == 55 or LA106 == 56 or LA106 == 57 or LA106 == 61:
alt106 = 1
if alt106 == 1:
# C.g:0:0: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred34444)
self.type_qualifier()
self.following.pop()
if self.failed:
return
else:
break #loop106
self.following.append(self.FOLLOW_declarator_in_synpred34447)
self.declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred34
# $ANTLR start synpred39
def synpred39_fragment(self, ):
# C.g:253:6: ( type_qualifier )
# C.g:253:6: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred39566)
self.type_qualifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred39
# $ANTLR start synpred40
def synpred40_fragment(self, ):
# C.g:253:23: ( type_specifier )
# C.g:253:23: type_specifier
self.following.append(self.FOLLOW_type_specifier_in_synpred40570)
self.type_specifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred40
# $ANTLR start synpred66
def synpred66_fragment(self, ):
# C.g:297:4: ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator )
# C.g:297:4: ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator
# C.g:297:4: ( pointer )?
alt111 = 2
LA111_0 = self.input.LA(1)
if (LA111_0 == 66) :
alt111 = 1
if alt111 == 1:
# C.g:0:0: pointer
self.following.append(self.FOLLOW_pointer_in_synpred66784)
self.pointer()
self.following.pop()
if self.failed:
return
# C.g:297:13: ( 'EFIAPI' )?
alt112 = 2
LA112_0 = self.input.LA(1)
if (LA112_0 == 58) :
alt112 = 1
if alt112 == 1:
# C.g:297:14: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_synpred66788)
if self.failed:
return
# C.g:297:25: ( 'EFI_BOOTSERVICE' )?
alt113 = 2
LA113_0 = self.input.LA(1)
if (LA113_0 == 59) :
alt113 = 1
if alt113 == 1:
# C.g:297:26: 'EFI_BOOTSERVICE'
self.match(self.input, 59, self.FOLLOW_59_in_synpred66793)
if self.failed:
return
# C.g:297:46: ( 'EFI_RUNTIMESERVICE' )?
alt114 = 2
LA114_0 = self.input.LA(1)
if (LA114_0 == 60) :
alt114 = 1
if alt114 == 1:
# C.g:297:47: 'EFI_RUNTIMESERVICE'
self.match(self.input, 60, self.FOLLOW_60_in_synpred66798)
if self.failed:
return
self.following.append(self.FOLLOW_direct_declarator_in_synpred66802)
self.direct_declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred66
# $ANTLR start synpred67
def synpred67_fragment(self, ):
# C.g:303:15: ( declarator_suffix )
# C.g:303:15: declarator_suffix
self.following.append(self.FOLLOW_declarator_suffix_in_synpred67821)
self.declarator_suffix()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred67
# $ANTLR start synpred69
def synpred69_fragment(self, ):
# C.g:304:9: ( 'EFIAPI' )
# C.g:304:9: 'EFIAPI'
self.match(self.input, 58, self.FOLLOW_58_in_synpred69830)
if self.failed:
return
# $ANTLR end synpred69
# $ANTLR start synpred70
def synpred70_fragment(self, ):
# C.g:304:35: ( declarator_suffix )
# C.g:304:35: declarator_suffix
self.following.append(self.FOLLOW_declarator_suffix_in_synpred70838)
self.declarator_suffix()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred70
# $ANTLR start synpred73
def synpred73_fragment(self, ):
# C.g:310:9: ( '(' parameter_type_list ')' )
# C.g:310:9: '(' parameter_type_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred73878)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_type_list_in_synpred73880)
self.parameter_type_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred73882)
if self.failed:
return
# $ANTLR end synpred73
# $ANTLR start synpred74
def synpred74_fragment(self, ):
# C.g:311:9: ( '(' identifier_list ')' )
# C.g:311:9: '(' identifier_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred74892)
if self.failed:
return
self.following.append(self.FOLLOW_identifier_list_in_synpred74894)
self.identifier_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred74896)
if self.failed:
return
# $ANTLR end synpred74
# $ANTLR start synpred75
def synpred75_fragment(self, ):
# C.g:316:8: ( type_qualifier )
# C.g:316:8: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred75921)
self.type_qualifier()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred75
# $ANTLR start synpred76
def synpred76_fragment(self, ):
# C.g:316:24: ( pointer )
# C.g:316:24: pointer
self.following.append(self.FOLLOW_pointer_in_synpred76924)
self.pointer()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred76
# $ANTLR start synpred77
def synpred77_fragment(self, ):
# C.g:316:4: ( '*' ( type_qualifier )+ ( pointer )? )
# C.g:316:4: '*' ( type_qualifier )+ ( pointer )?
self.match(self.input, 66, self.FOLLOW_66_in_synpred77919)
if self.failed:
return
# C.g:316:8: ( type_qualifier )+
cnt116 = 0
while True: #loop116
alt116 = 2
LA116_0 = self.input.LA(1)
if ((49 <= LA116_0 <= 61)) :
alt116 = 1
if alt116 == 1:
# C.g:0:0: type_qualifier
self.following.append(self.FOLLOW_type_qualifier_in_synpred77921)
self.type_qualifier()
self.following.pop()
if self.failed:
return
else:
if cnt116 >= 1:
break #loop116
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(116, self.input)
raise eee
cnt116 += 1
# C.g:316:24: ( pointer )?
alt117 = 2
LA117_0 = self.input.LA(1)
if (LA117_0 == 66) :
alt117 = 1
if alt117 == 1:
# C.g:0:0: pointer
self.following.append(self.FOLLOW_pointer_in_synpred77924)
self.pointer()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred77
# $ANTLR start synpred78
def synpred78_fragment(self, ):
# C.g:317:4: ( '*' pointer )
# C.g:317:4: '*' pointer
self.match(self.input, 66, self.FOLLOW_66_in_synpred78930)
if self.failed:
return
self.following.append(self.FOLLOW_pointer_in_synpred78932)
self.pointer()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred78
# $ANTLR start synpred81
def synpred81_fragment(self, ):
# C.g:326:32: ( 'OPTIONAL' )
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred81977)
if self.failed:
return
# $ANTLR end synpred81
# $ANTLR start synpred82
def synpred82_fragment(self, ):
# C.g:326:27: ( ',' ( 'OPTIONAL' )? parameter_declaration )
# C.g:326:27: ',' ( 'OPTIONAL' )? parameter_declaration
self.match(self.input, 27, self.FOLLOW_27_in_synpred82974)
if self.failed:
return
# C.g:326:31: ( 'OPTIONAL' )?
alt119 = 2
LA119_0 = self.input.LA(1)
if (LA119_0 == 53) :
LA119_1 = self.input.LA(2)
if (self.synpred81()) :
alt119 = 1
if alt119 == 1:
# C.g:326:32: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred82977)
if self.failed:
return
self.following.append(self.FOLLOW_parameter_declaration_in_synpred82981)
self.parameter_declaration()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred82
# $ANTLR start synpred83
def synpred83_fragment(self, ):
# C.g:330:28: ( declarator )
# C.g:330:28: declarator
self.following.append(self.FOLLOW_declarator_in_synpred83997)
self.declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred83
# $ANTLR start synpred84
def synpred84_fragment(self, ):
# C.g:330:39: ( abstract_declarator )
# C.g:330:39: abstract_declarator
self.following.append(self.FOLLOW_abstract_declarator_in_synpred84999)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred84
# $ANTLR start synpred86
def synpred86_fragment(self, ):
# C.g:330:4: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? )
# C.g:330:4: declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )?
self.following.append(self.FOLLOW_declaration_specifiers_in_synpred86994)
self.declaration_specifiers()
self.following.pop()
if self.failed:
return
# C.g:330:27: ( declarator | abstract_declarator )*
while True: #loop120
alt120 = 3
LA120 = self.input.LA(1)
if LA120 == 66:
LA120_3 = self.input.LA(2)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == IDENTIFIER or LA120 == 58 or LA120 == 59 or LA120 == 60:
alt120 = 1
elif LA120 == 62:
LA120 = self.input.LA(2)
if LA120 == 29 or LA120 == 30 or LA120 == 31 or LA120 == 32 or LA120 == 33 or LA120 == 34 or LA120 == 35 or LA120 == 36 or LA120 == 37 or LA120 == 38 or LA120 == 39 or LA120 == 40 or LA120 == 41 or LA120 == 42 or LA120 == 45 or LA120 == 46 or LA120 == 48 or LA120 == 49 or LA120 == 50 or LA120 == 51 or LA120 == 52 or LA120 == 53 or LA120 == 54 or LA120 == 55 or LA120 == 56 or LA120 == 57 or LA120 == 61 or LA120 == 63 or LA120 == 64:
alt120 = 2
elif LA120 == 58:
LA120_21 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == 66:
LA120_22 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == 59:
LA120_23 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == 60:
LA120_24 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == IDENTIFIER:
LA120_25 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == 62:
LA120_26 = self.input.LA(3)
if (self.synpred83()) :
alt120 = 1
elif (self.synpred84()) :
alt120 = 2
elif LA120 == 64:
alt120 = 2
if alt120 == 1:
# C.g:330:28: declarator
self.following.append(self.FOLLOW_declarator_in_synpred86997)
self.declarator()
self.following.pop()
if self.failed:
return
elif alt120 == 2:
# C.g:330:39: abstract_declarator
self.following.append(self.FOLLOW_abstract_declarator_in_synpred86999)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
else:
break #loop120
# C.g:330:61: ( 'OPTIONAL' )?
alt121 = 2
LA121_0 = self.input.LA(1)
if (LA121_0 == 53) :
alt121 = 1
if alt121 == 1:
# C.g:330:62: 'OPTIONAL'
self.match(self.input, 53, self.FOLLOW_53_in_synpred861004)
if self.failed:
return
# $ANTLR end synpred86
# $ANTLR start synpred90
def synpred90_fragment(self, ):
# C.g:341:4: ( specifier_qualifier_list ( abstract_declarator )? )
# C.g:341:4: specifier_qualifier_list ( abstract_declarator )?
self.following.append(self.FOLLOW_specifier_qualifier_list_in_synpred901046)
self.specifier_qualifier_list()
self.following.pop()
if self.failed:
return
# C.g:341:29: ( abstract_declarator )?
alt122 = 2
LA122_0 = self.input.LA(1)
if (LA122_0 == 62 or LA122_0 == 64 or LA122_0 == 66) :
alt122 = 1
if alt122 == 1:
# C.g:0:0: abstract_declarator
self.following.append(self.FOLLOW_abstract_declarator_in_synpred901048)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred90
# $ANTLR start synpred91
def synpred91_fragment(self, ):
# C.g:346:12: ( direct_abstract_declarator )
# C.g:346:12: direct_abstract_declarator
self.following.append(self.FOLLOW_direct_abstract_declarator_in_synpred911067)
self.direct_abstract_declarator()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred91
# $ANTLR start synpred93
def synpred93_fragment(self, ):
# C.g:351:6: ( '(' abstract_declarator ')' )
# C.g:351:6: '(' abstract_declarator ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred931086)
if self.failed:
return
self.following.append(self.FOLLOW_abstract_declarator_in_synpred931088)
self.abstract_declarator()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred931090)
if self.failed:
return
# $ANTLR end synpred93
# $ANTLR start synpred94
def synpred94_fragment(self, ):
# C.g:351:65: ( abstract_declarator_suffix )
# C.g:351:65: abstract_declarator_suffix
self.following.append(self.FOLLOW_abstract_declarator_suffix_in_synpred941098)
self.abstract_declarator_suffix()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred94
# $ANTLR start synpred109
def synpred109_fragment(self, ):
# C.g:386:4: ( '(' type_name ')' cast_expression )
# C.g:386:4: '(' type_name ')' cast_expression
self.match(self.input, 62, self.FOLLOW_62_in_synpred1091282)
if self.failed:
return
self.following.append(self.FOLLOW_type_name_in_synpred1091284)
self.type_name()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1091286)
if self.failed:
return
self.following.append(self.FOLLOW_cast_expression_in_synpred1091288)
self.cast_expression()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred109
# $ANTLR start synpred114
def synpred114_fragment(self, ):
# C.g:395:4: ( 'sizeof' unary_expression )
# C.g:395:4: 'sizeof' unary_expression
self.match(self.input, 74, self.FOLLOW_74_in_synpred1141330)
if self.failed:
return
self.following.append(self.FOLLOW_unary_expression_in_synpred1141332)
self.unary_expression()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred114
# $ANTLR start synpred117
def synpred117_fragment(self, ):
# C.g:409:13: ( '(' argument_expression_list ')' )
# C.g:409:13: '(' argument_expression_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred1171420)
if self.failed:
return
self.following.append(self.FOLLOW_argument_expression_list_in_synpred1171424)
self.argument_expression_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1171428)
if self.failed:
return
# $ANTLR end synpred117
# $ANTLR start synpred118
def synpred118_fragment(self, ):
# C.g:410:13: ( '(' macro_parameter_list ')' )
# C.g:410:13: '(' macro_parameter_list ')'
self.match(self.input, 62, self.FOLLOW_62_in_synpred1181444)
if self.failed:
return
self.following.append(self.FOLLOW_macro_parameter_list_in_synpred1181446)
self.macro_parameter_list()
self.following.pop()
if self.failed:
return
self.match(self.input, 63, self.FOLLOW_63_in_synpred1181448)
if self.failed:
return
# $ANTLR end synpred118
# $ANTLR start synpred120
def synpred120_fragment(self, ):
# C.g:412:13: ( '*' IDENTIFIER )
# C.g:412:13: '*' IDENTIFIER
self.match(self.input, 66, self.FOLLOW_66_in_synpred1201482)
if self.failed:
return
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1201486)
if self.failed:
return
# $ANTLR end synpred120
# $ANTLR start synpred137
def synpred137_fragment(self, ):
# C.g:443:20: ( STRING_LITERAL )
# C.g:443:20: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1371683)
if self.failed:
return
# $ANTLR end synpred137
# $ANTLR start synpred138
def synpred138_fragment(self, ):
# C.g:443:8: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )
# C.g:443:8: ( IDENTIFIER )* ( STRING_LITERAL )+
# C.g:443:8: ( IDENTIFIER )*
while True: #loop125
alt125 = 2
LA125_0 = self.input.LA(1)
if (LA125_0 == IDENTIFIER) :
alt125 = 1
if alt125 == 1:
# C.g:0:0: IDENTIFIER
self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1381680)
if self.failed:
return
else:
break #loop125
# C.g:443:20: ( STRING_LITERAL )+
cnt126 = 0
while True: #loop126
alt126 = 2
LA126_0 = self.input.LA(1)
if (LA126_0 == STRING_LITERAL) :
alt126 = 1
if alt126 == 1:
# C.g:0:0: STRING_LITERAL
self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1381683)
if self.failed:
return
else:
if cnt126 >= 1:
break #loop126
if self.backtracking > 0:
self.failed = True
return
eee = EarlyExitException(126, self.input)
raise eee
cnt126 += 1
# $ANTLR end synpred138
# $ANTLR start synpred142
def synpred142_fragment(self, ):
# C.g:458:4: ( lvalue assignment_operator assignment_expression )
# C.g:458:4: lvalue assignment_operator assignment_expression
self.following.append(self.FOLLOW_lvalue_in_synpred1421744)
self.lvalue()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_assignment_operator_in_synpred1421746)
self.assignment_operator()
self.following.pop()
if self.failed:
return
self.following.append(self.FOLLOW_assignment_expression_in_synpred1421748)
self.assignment_expression()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred142
# $ANTLR start synpred169
def synpred169_fragment(self, ):
# C.g:520:4: ( expression_statement )
# C.g:520:4: expression_statement
self.following.append(self.FOLLOW_expression_statement_in_synpred1692035)
self.expression_statement()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred169
# $ANTLR start synpred173
def synpred173_fragment(self, ):
# C.g:524:4: ( macro_statement )
# C.g:524:4: macro_statement
self.following.append(self.FOLLOW_macro_statement_in_synpred1732055)
self.macro_statement()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred173
# $ANTLR start synpred174
def synpred174_fragment(self, ):
# C.g:525:4: ( asm2_statement )
# C.g:525:4: asm2_statement
self.following.append(self.FOLLOW_asm2_statement_in_synpred1742060)
self.asm2_statement()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred174
# $ANTLR start synpred181
def synpred181_fragment(self, ):
# C.g:544:19: ( declaration )
# C.g:544:19: declaration
self.following.append(self.FOLLOW_declaration_in_synpred1812166)
self.declaration()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred181
# $ANTLR start synpred182
def synpred182_fragment(self, ):
# C.g:544:33: ( statement_list )
# C.g:544:33: statement_list
self.following.append(self.FOLLOW_statement_list_in_synpred1822170)
self.statement_list()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred182
# $ANTLR start synpred186
def synpred186_fragment(self, ):
# C.g:554:8: ( declaration )
# C.g:554:8: declaration
self.following.append(self.FOLLOW_declaration_in_synpred1862225)
self.declaration()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred186
# $ANTLR start synpred188
def synpred188_fragment(self, ):
# C.g:558:4: ( statement )
# C.g:558:4: statement
self.following.append(self.FOLLOW_statement_in_synpred1882242)
self.statement()
self.following.pop()
if self.failed:
return
# $ANTLR end synpred188
def synpred69(self):
self.backtracking += 1
start = self.input.mark()
self.synpred69_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred81(self):
self.backtracking += 1
start = self.input.mark()
self.synpred81_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred82(self):
self.backtracking += 1
start = self.input.mark()
self.synpred82_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred66(self):
self.backtracking += 1
start = self.input.mark()
self.synpred66_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred83(self):
self.backtracking += 1
start = self.input.mark()
self.synpred83_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred84(self):
self.backtracking += 1
start = self.input.mark()
self.synpred84_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred67(self):
self.backtracking += 1
start = self.input.mark()
self.synpred67_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred86(self):
self.backtracking += 1
start = self.input.mark()
self.synpred86_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred120(self):
self.backtracking += 1
start = self.input.mark()
self.synpred120_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred40(self):
self.backtracking += 1
start = self.input.mark()
self.synpred40_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred142(self):
self.backtracking += 1
start = self.input.mark()
self.synpred142_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred182(self):
self.backtracking += 1
start = self.input.mark()
self.synpred182_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred109(self):
self.backtracking += 1
start = self.input.mark()
self.synpred109_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred181(self):
self.backtracking += 1
start = self.input.mark()
self.synpred181_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred186(self):
self.backtracking += 1
start = self.input.mark()
self.synpred186_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred188(self):
self.backtracking += 1
start = self.input.mark()
self.synpred188_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred169(self):
self.backtracking += 1
start = self.input.mark()
self.synpred169_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred117(self):
self.backtracking += 1
start = self.input.mark()
self.synpred117_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred70(self):
self.backtracking += 1
start = self.input.mark()
self.synpred70_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred118(self):
self.backtracking += 1
start = self.input.mark()
self.synpred118_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred34(self):
self.backtracking += 1
start = self.input.mark()
self.synpred34_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred33(self):
self.backtracking += 1
start = self.input.mark()
self.synpred33_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred94(self):
self.backtracking += 1
start = self.input.mark()
self.synpred94_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred39(self):
self.backtracking += 1
start = self.input.mark()
self.synpred39_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred74(self):
self.backtracking += 1
start = self.input.mark()
self.synpred74_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred114(self):
self.backtracking += 1
start = self.input.mark()
self.synpred114_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred93(self):
self.backtracking += 1
start = self.input.mark()
self.synpred93_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred75(self):
self.backtracking += 1
start = self.input.mark()
self.synpred75_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred137(self):
self.backtracking += 1
start = self.input.mark()
self.synpred137_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred90(self):
self.backtracking += 1
start = self.input.mark()
self.synpred90_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred138(self):
self.backtracking += 1
start = self.input.mark()
self.synpred138_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred91(self):
self.backtracking += 1
start = self.input.mark()
self.synpred91_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred73(self):
self.backtracking += 1
start = self.input.mark()
self.synpred73_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred5(self):
self.backtracking += 1
start = self.input.mark()
self.synpred5_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred78(self):
self.backtracking += 1
start = self.input.mark()
self.synpred78_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred7(self):
self.backtracking += 1
start = self.input.mark()
self.synpred7_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred76(self):
self.backtracking += 1
start = self.input.mark()
self.synpred76_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred77(self):
self.backtracking += 1
start = self.input.mark()
self.synpred77_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred2(self):
self.backtracking += 1
start = self.input.mark()
self.synpred2_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred4(self):
self.backtracking += 1
start = self.input.mark()
self.synpred4_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred174(self):
self.backtracking += 1
start = self.input.mark()
self.synpred174_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred173(self):
self.backtracking += 1
start = self.input.mark()
self.synpred173_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred14(self):
self.backtracking += 1
start = self.input.mark()
self.synpred14_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred15(self):
self.backtracking += 1
start = self.input.mark()
self.synpred15_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
def synpred10(self):
self.backtracking += 1
start = self.input.mark()
self.synpred10_fragment()
success = not self.failed
self.input.rewind(start)
self.backtracking -= 1
self.failed = False
return success
FOLLOW_external_declaration_in_translation_unit74 = frozenset([1, 4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
FOLLOW_function_definition_in_external_declaration113 = frozenset([1])
FOLLOW_declaration_in_external_declaration118 = frozenset([1])
FOLLOW_macro_statement_in_external_declaration123 = frozenset([1, 25])
FOLLOW_25_in_external_declaration126 = frozenset([1])
FOLLOW_declaration_specifiers_in_function_definition157 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_declarator_in_function_definition160 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_declaration_in_function_definition166 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_compound_statement_in_function_definition171 = frozenset([1])
FOLLOW_compound_statement_in_function_definition180 = frozenset([1])
FOLLOW_26_in_declaration203 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
FOLLOW_declaration_specifiers_in_declaration207 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_init_declarator_list_in_declaration216 = frozenset([25])
FOLLOW_25_in_declaration220 = frozenset([1])
FOLLOW_declaration_specifiers_in_declaration234 = frozenset([4, 25, 58, 59, 60, 62, 66])
FOLLOW_init_declarator_list_in_declaration238 = frozenset([25])
FOLLOW_25_in_declaration243 = frozenset([1])
FOLLOW_storage_class_specifier_in_declaration_specifiers264 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_specifier_in_declaration_specifiers272 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_qualifier_in_declaration_specifiers286 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_init_declarator_in_init_declarator_list308 = frozenset([1, 27])
FOLLOW_27_in_init_declarator_list311 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_init_declarator_in_init_declarator_list313 = frozenset([1, 27])
FOLLOW_declarator_in_init_declarator326 = frozenset([1, 28])
FOLLOW_28_in_init_declarator329 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_initializer_in_init_declarator331 = frozenset([1])
FOLLOW_set_in_storage_class_specifier0 = frozenset([1])
FOLLOW_34_in_type_specifier376 = frozenset([1])
FOLLOW_35_in_type_specifier381 = frozenset([1])
FOLLOW_36_in_type_specifier386 = frozenset([1])
FOLLOW_37_in_type_specifier391 = frozenset([1])
FOLLOW_38_in_type_specifier396 = frozenset([1])
FOLLOW_39_in_type_specifier401 = frozenset([1])
FOLLOW_40_in_type_specifier406 = frozenset([1])
FOLLOW_41_in_type_specifier411 = frozenset([1])
FOLLOW_42_in_type_specifier416 = frozenset([1])
FOLLOW_struct_or_union_specifier_in_type_specifier423 = frozenset([1])
FOLLOW_enum_specifier_in_type_specifier433 = frozenset([1])
FOLLOW_type_id_in_type_specifier451 = frozenset([1])
FOLLOW_IDENTIFIER_in_type_id467 = frozenset([1])
FOLLOW_struct_or_union_in_struct_or_union_specifier494 = frozenset([4, 43])
FOLLOW_IDENTIFIER_in_struct_or_union_specifier496 = frozenset([43])
FOLLOW_43_in_struct_or_union_specifier499 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_struct_declaration_list_in_struct_or_union_specifier501 = frozenset([44])
FOLLOW_44_in_struct_or_union_specifier503 = frozenset([1])
FOLLOW_struct_or_union_in_struct_or_union_specifier508 = frozenset([4])
FOLLOW_IDENTIFIER_in_struct_or_union_specifier510 = frozenset([1])
FOLLOW_set_in_struct_or_union0 = frozenset([1])
FOLLOW_struct_declaration_in_struct_declaration_list537 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_specifier_qualifier_list_in_struct_declaration549 = frozenset([4, 47, 58, 59, 60, 62, 66])
FOLLOW_struct_declarator_list_in_struct_declaration551 = frozenset([25])
FOLLOW_25_in_struct_declaration553 = frozenset([1])
FOLLOW_type_qualifier_in_specifier_qualifier_list566 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_specifier_in_specifier_qualifier_list570 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_struct_declarator_in_struct_declarator_list584 = frozenset([1, 27])
FOLLOW_27_in_struct_declarator_list587 = frozenset([4, 47, 58, 59, 60, 62, 66])
FOLLOW_struct_declarator_in_struct_declarator_list589 = frozenset([1, 27])
FOLLOW_declarator_in_struct_declarator602 = frozenset([1, 47])
FOLLOW_47_in_struct_declarator605 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_struct_declarator607 = frozenset([1])
FOLLOW_47_in_struct_declarator614 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_struct_declarator616 = frozenset([1])
FOLLOW_48_in_enum_specifier634 = frozenset([43])
FOLLOW_43_in_enum_specifier636 = frozenset([4])
FOLLOW_enumerator_list_in_enum_specifier638 = frozenset([27, 44])
FOLLOW_27_in_enum_specifier640 = frozenset([44])
FOLLOW_44_in_enum_specifier643 = frozenset([1])
FOLLOW_48_in_enum_specifier648 = frozenset([4])
FOLLOW_IDENTIFIER_in_enum_specifier650 = frozenset([43])
FOLLOW_43_in_enum_specifier652 = frozenset([4])
FOLLOW_enumerator_list_in_enum_specifier654 = frozenset([27, 44])
FOLLOW_27_in_enum_specifier656 = frozenset([44])
FOLLOW_44_in_enum_specifier659 = frozenset([1])
FOLLOW_48_in_enum_specifier664 = frozenset([4])
FOLLOW_IDENTIFIER_in_enum_specifier666 = frozenset([1])
FOLLOW_enumerator_in_enumerator_list677 = frozenset([1, 27])
FOLLOW_27_in_enumerator_list680 = frozenset([4])
FOLLOW_enumerator_in_enumerator_list682 = frozenset([1, 27])
FOLLOW_IDENTIFIER_in_enumerator695 = frozenset([1, 28])
FOLLOW_28_in_enumerator698 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_enumerator700 = frozenset([1])
FOLLOW_set_in_type_qualifier0 = frozenset([1])
FOLLOW_pointer_in_declarator784 = frozenset([4, 58, 59, 60, 62])
FOLLOW_58_in_declarator788 = frozenset([4, 59, 60, 62])
FOLLOW_59_in_declarator793 = frozenset([4, 60, 62])
FOLLOW_60_in_declarator798 = frozenset([4, 62])
FOLLOW_direct_declarator_in_declarator802 = frozenset([1])
FOLLOW_pointer_in_declarator808 = frozenset([1])
FOLLOW_IDENTIFIER_in_direct_declarator819 = frozenset([1, 62, 64])
FOLLOW_declarator_suffix_in_direct_declarator821 = frozenset([1, 62, 64])
FOLLOW_62_in_direct_declarator827 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_58_in_direct_declarator830 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_declarator_in_direct_declarator834 = frozenset([63])
FOLLOW_63_in_direct_declarator836 = frozenset([62, 64])
FOLLOW_declarator_suffix_in_direct_declarator838 = frozenset([1, 62, 64])
FOLLOW_64_in_declarator_suffix852 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_declarator_suffix854 = frozenset([65])
FOLLOW_65_in_declarator_suffix856 = frozenset([1])
FOLLOW_64_in_declarator_suffix866 = frozenset([65])
FOLLOW_65_in_declarator_suffix868 = frozenset([1])
FOLLOW_62_in_declarator_suffix878 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_type_list_in_declarator_suffix880 = frozenset([63])
FOLLOW_63_in_declarator_suffix882 = frozenset([1])
FOLLOW_62_in_declarator_suffix892 = frozenset([4])
FOLLOW_identifier_list_in_declarator_suffix894 = frozenset([63])
FOLLOW_63_in_declarator_suffix896 = frozenset([1])
FOLLOW_62_in_declarator_suffix906 = frozenset([63])
FOLLOW_63_in_declarator_suffix908 = frozenset([1])
FOLLOW_66_in_pointer919 = frozenset([49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_qualifier_in_pointer921 = frozenset([1, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_pointer_in_pointer924 = frozenset([1])
FOLLOW_66_in_pointer930 = frozenset([66])
FOLLOW_pointer_in_pointer932 = frozenset([1])
FOLLOW_66_in_pointer937 = frozenset([1])
FOLLOW_parameter_list_in_parameter_type_list948 = frozenset([1, 27])
FOLLOW_27_in_parameter_type_list951 = frozenset([53, 67])
FOLLOW_53_in_parameter_type_list954 = frozenset([67])
FOLLOW_67_in_parameter_type_list958 = frozenset([1])
FOLLOW_parameter_declaration_in_parameter_list971 = frozenset([1, 27])
FOLLOW_27_in_parameter_list974 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_53_in_parameter_list977 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_declaration_in_parameter_list981 = frozenset([1, 27])
FOLLOW_declaration_specifiers_in_parameter_declaration994 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_declarator_in_parameter_declaration997 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_abstract_declarator_in_parameter_declaration999 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_53_in_parameter_declaration1004 = frozenset([1])
FOLLOW_pointer_in_parameter_declaration1013 = frozenset([4, 66])
FOLLOW_IDENTIFIER_in_parameter_declaration1016 = frozenset([1])
FOLLOW_IDENTIFIER_in_identifier_list1027 = frozenset([1, 27])
FOLLOW_27_in_identifier_list1031 = frozenset([4])
FOLLOW_IDENTIFIER_in_identifier_list1033 = frozenset([1, 27])
FOLLOW_specifier_qualifier_list_in_type_name1046 = frozenset([1, 62, 64, 66])
FOLLOW_abstract_declarator_in_type_name1048 = frozenset([1])
FOLLOW_type_id_in_type_name1054 = frozenset([1])
FOLLOW_pointer_in_abstract_declarator1065 = frozenset([1, 62, 64])
FOLLOW_direct_abstract_declarator_in_abstract_declarator1067 = frozenset([1])
FOLLOW_direct_abstract_declarator_in_abstract_declarator1073 = frozenset([1])
FOLLOW_62_in_direct_abstract_declarator1086 = frozenset([62, 64, 66])
FOLLOW_abstract_declarator_in_direct_abstract_declarator1088 = frozenset([63])
FOLLOW_63_in_direct_abstract_declarator1090 = frozenset([1, 62, 64])
FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1094 = frozenset([1, 62, 64])
FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1098 = frozenset([1, 62, 64])
FOLLOW_64_in_abstract_declarator_suffix1110 = frozenset([65])
FOLLOW_65_in_abstract_declarator_suffix1112 = frozenset([1])
FOLLOW_64_in_abstract_declarator_suffix1117 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_abstract_declarator_suffix1119 = frozenset([65])
FOLLOW_65_in_abstract_declarator_suffix1121 = frozenset([1])
FOLLOW_62_in_abstract_declarator_suffix1126 = frozenset([63])
FOLLOW_63_in_abstract_declarator_suffix1128 = frozenset([1])
FOLLOW_62_in_abstract_declarator_suffix1133 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_type_list_in_abstract_declarator_suffix1135 = frozenset([63])
FOLLOW_63_in_abstract_declarator_suffix1137 = frozenset([1])
FOLLOW_assignment_expression_in_initializer1150 = frozenset([1])
FOLLOW_43_in_initializer1155 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_initializer_list_in_initializer1157 = frozenset([27, 44])
FOLLOW_27_in_initializer1159 = frozenset([44])
FOLLOW_44_in_initializer1162 = frozenset([1])
FOLLOW_initializer_in_initializer_list1173 = frozenset([1, 27])
FOLLOW_27_in_initializer_list1176 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_initializer_in_initializer_list1178 = frozenset([1, 27])
FOLLOW_assignment_expression_in_argument_expression_list1196 = frozenset([1, 27, 53])
FOLLOW_53_in_argument_expression_list1199 = frozenset([1, 27])
FOLLOW_27_in_argument_expression_list1204 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_assignment_expression_in_argument_expression_list1206 = frozenset([1, 27, 53])
FOLLOW_53_in_argument_expression_list1209 = frozenset([1, 27])
FOLLOW_multiplicative_expression_in_additive_expression1225 = frozenset([1, 68, 69])
FOLLOW_68_in_additive_expression1229 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_multiplicative_expression_in_additive_expression1231 = frozenset([1, 68, 69])
FOLLOW_69_in_additive_expression1235 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_multiplicative_expression_in_additive_expression1237 = frozenset([1, 68, 69])
FOLLOW_cast_expression_in_multiplicative_expression1251 = frozenset([1, 66, 70, 71])
FOLLOW_66_in_multiplicative_expression1255 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_multiplicative_expression1257 = frozenset([1, 66, 70, 71])
FOLLOW_70_in_multiplicative_expression1261 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_multiplicative_expression1263 = frozenset([1, 66, 70, 71])
FOLLOW_71_in_multiplicative_expression1267 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_multiplicative_expression1269 = frozenset([1, 66, 70, 71])
FOLLOW_62_in_cast_expression1282 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_name_in_cast_expression1284 = frozenset([63])
FOLLOW_63_in_cast_expression1286 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_cast_expression1288 = frozenset([1])
FOLLOW_unary_expression_in_cast_expression1293 = frozenset([1])
FOLLOW_postfix_expression_in_unary_expression1304 = frozenset([1])
FOLLOW_72_in_unary_expression1309 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_unary_expression_in_unary_expression1311 = frozenset([1])
FOLLOW_73_in_unary_expression1316 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_unary_expression_in_unary_expression1318 = frozenset([1])
FOLLOW_unary_operator_in_unary_expression1323 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_unary_expression1325 = frozenset([1])
FOLLOW_74_in_unary_expression1330 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_unary_expression_in_unary_expression1332 = frozenset([1])
FOLLOW_74_in_unary_expression1337 = frozenset([62])
FOLLOW_62_in_unary_expression1339 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_name_in_unary_expression1341 = frozenset([63])
FOLLOW_63_in_unary_expression1343 = frozenset([1])
FOLLOW_primary_expression_in_postfix_expression1367 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_64_in_postfix_expression1383 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_postfix_expression1385 = frozenset([65])
FOLLOW_65_in_postfix_expression1387 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_62_in_postfix_expression1401 = frozenset([63])
FOLLOW_63_in_postfix_expression1405 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_62_in_postfix_expression1420 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_argument_expression_list_in_postfix_expression1424 = frozenset([63])
FOLLOW_63_in_postfix_expression1428 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_62_in_postfix_expression1444 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_macro_parameter_list_in_postfix_expression1446 = frozenset([63])
FOLLOW_63_in_postfix_expression1448 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_75_in_postfix_expression1462 = frozenset([4])
FOLLOW_IDENTIFIER_in_postfix_expression1466 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_66_in_postfix_expression1482 = frozenset([4])
FOLLOW_IDENTIFIER_in_postfix_expression1486 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_76_in_postfix_expression1502 = frozenset([4])
FOLLOW_IDENTIFIER_in_postfix_expression1506 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_72_in_postfix_expression1522 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_73_in_postfix_expression1536 = frozenset([1, 62, 64, 66, 72, 73, 75, 76])
FOLLOW_parameter_declaration_in_macro_parameter_list1559 = frozenset([1, 27])
FOLLOW_27_in_macro_parameter_list1562 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_declaration_in_macro_parameter_list1564 = frozenset([1, 27])
FOLLOW_set_in_unary_operator0 = frozenset([1])
FOLLOW_IDENTIFIER_in_primary_expression1613 = frozenset([1])
FOLLOW_constant_in_primary_expression1618 = frozenset([1])
FOLLOW_62_in_primary_expression1623 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_primary_expression1625 = frozenset([63])
FOLLOW_63_in_primary_expression1627 = frozenset([1])
FOLLOW_HEX_LITERAL_in_constant1643 = frozenset([1])
FOLLOW_OCTAL_LITERAL_in_constant1653 = frozenset([1])
FOLLOW_DECIMAL_LITERAL_in_constant1663 = frozenset([1])
FOLLOW_CHARACTER_LITERAL_in_constant1671 = frozenset([1])
FOLLOW_IDENTIFIER_in_constant1680 = frozenset([4, 9])
FOLLOW_STRING_LITERAL_in_constant1683 = frozenset([1, 4, 9])
FOLLOW_IDENTIFIER_in_constant1688 = frozenset([1, 4])
FOLLOW_FLOATING_POINT_LITERAL_in_constant1699 = frozenset([1])
FOLLOW_assignment_expression_in_expression1715 = frozenset([1, 27])
FOLLOW_27_in_expression1718 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_assignment_expression_in_expression1720 = frozenset([1, 27])
FOLLOW_conditional_expression_in_constant_expression1733 = frozenset([1])
FOLLOW_lvalue_in_assignment_expression1744 = frozenset([28, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89])
FOLLOW_assignment_operator_in_assignment_expression1746 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_assignment_expression_in_assignment_expression1748 = frozenset([1])
FOLLOW_conditional_expression_in_assignment_expression1753 = frozenset([1])
FOLLOW_unary_expression_in_lvalue1765 = frozenset([1])
FOLLOW_set_in_assignment_operator0 = frozenset([1])
FOLLOW_logical_or_expression_in_conditional_expression1839 = frozenset([1, 90])
FOLLOW_90_in_conditional_expression1842 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_conditional_expression1844 = frozenset([47])
FOLLOW_47_in_conditional_expression1846 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_conditional_expression_in_conditional_expression1848 = frozenset([1])
FOLLOW_logical_and_expression_in_logical_or_expression1863 = frozenset([1, 91])
FOLLOW_91_in_logical_or_expression1866 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_logical_and_expression_in_logical_or_expression1868 = frozenset([1, 91])
FOLLOW_inclusive_or_expression_in_logical_and_expression1881 = frozenset([1, 92])
FOLLOW_92_in_logical_and_expression1884 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_inclusive_or_expression_in_logical_and_expression1886 = frozenset([1, 92])
FOLLOW_exclusive_or_expression_in_inclusive_or_expression1899 = frozenset([1, 93])
FOLLOW_93_in_inclusive_or_expression1902 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_exclusive_or_expression_in_inclusive_or_expression1904 = frozenset([1, 93])
FOLLOW_and_expression_in_exclusive_or_expression1917 = frozenset([1, 94])
FOLLOW_94_in_exclusive_or_expression1920 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_and_expression_in_exclusive_or_expression1922 = frozenset([1, 94])
FOLLOW_equality_expression_in_and_expression1935 = frozenset([1, 77])
FOLLOW_77_in_and_expression1938 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_equality_expression_in_and_expression1940 = frozenset([1, 77])
FOLLOW_relational_expression_in_equality_expression1952 = frozenset([1, 95, 96])
FOLLOW_set_in_equality_expression1955 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_relational_expression_in_equality_expression1961 = frozenset([1, 95, 96])
FOLLOW_shift_expression_in_relational_expression1975 = frozenset([1, 97, 98, 99, 100])
FOLLOW_set_in_relational_expression1978 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_shift_expression_in_relational_expression1988 = frozenset([1, 97, 98, 99, 100])
FOLLOW_additive_expression_in_shift_expression2001 = frozenset([1, 101, 102])
FOLLOW_set_in_shift_expression2004 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_additive_expression_in_shift_expression2010 = frozenset([1, 101, 102])
FOLLOW_labeled_statement_in_statement2025 = frozenset([1])
FOLLOW_compound_statement_in_statement2030 = frozenset([1])
FOLLOW_expression_statement_in_statement2035 = frozenset([1])
FOLLOW_selection_statement_in_statement2040 = frozenset([1])
FOLLOW_iteration_statement_in_statement2045 = frozenset([1])
FOLLOW_jump_statement_in_statement2050 = frozenset([1])
FOLLOW_macro_statement_in_statement2055 = frozenset([1])
FOLLOW_asm2_statement_in_statement2060 = frozenset([1])
FOLLOW_asm1_statement_in_statement2065 = frozenset([1])
FOLLOW_asm_statement_in_statement2070 = frozenset([1])
FOLLOW_declaration_in_statement2075 = frozenset([1])
FOLLOW_103_in_asm2_statement2086 = frozenset([4])
FOLLOW_IDENTIFIER_in_asm2_statement2089 = frozenset([62])
FOLLOW_62_in_asm2_statement2091 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_set_in_asm2_statement2094 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_63_in_asm2_statement2101 = frozenset([25])
FOLLOW_25_in_asm2_statement2103 = frozenset([1])
FOLLOW_104_in_asm1_statement2115 = frozenset([43])
FOLLOW_43_in_asm1_statement2117 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_set_in_asm1_statement2120 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_44_in_asm1_statement2127 = frozenset([1])
FOLLOW_105_in_asm_statement2138 = frozenset([43])
FOLLOW_43_in_asm_statement2140 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_set_in_asm_statement2143 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_44_in_asm_statement2150 = frozenset([1])
FOLLOW_IDENTIFIER_in_macro_statement2162 = frozenset([62])
FOLLOW_62_in_macro_statement2164 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_declaration_in_macro_statement2166 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_list_in_macro_statement2170 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_macro_statement2173 = frozenset([63])
FOLLOW_63_in_macro_statement2176 = frozenset([1])
FOLLOW_IDENTIFIER_in_labeled_statement2188 = frozenset([47])
FOLLOW_47_in_labeled_statement2190 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_labeled_statement2192 = frozenset([1])
FOLLOW_106_in_labeled_statement2197 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_constant_expression_in_labeled_statement2199 = frozenset([47])
FOLLOW_47_in_labeled_statement2201 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_labeled_statement2203 = frozenset([1])
FOLLOW_107_in_labeled_statement2208 = frozenset([47])
FOLLOW_47_in_labeled_statement2210 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_labeled_statement2212 = frozenset([1])
FOLLOW_43_in_compound_statement2223 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_declaration_in_compound_statement2225 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_list_in_compound_statement2228 = frozenset([44])
FOLLOW_44_in_compound_statement2231 = frozenset([1])
FOLLOW_statement_in_statement_list2242 = frozenset([1, 4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_25_in_expression_statement2254 = frozenset([1])
FOLLOW_expression_in_expression_statement2259 = frozenset([25])
FOLLOW_25_in_expression_statement2261 = frozenset([1])
FOLLOW_108_in_selection_statement2272 = frozenset([62])
FOLLOW_62_in_selection_statement2274 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_selection_statement2278 = frozenset([63])
FOLLOW_63_in_selection_statement2280 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_selection_statement2284 = frozenset([1, 109])
FOLLOW_109_in_selection_statement2299 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_selection_statement2301 = frozenset([1])
FOLLOW_110_in_selection_statement2308 = frozenset([62])
FOLLOW_62_in_selection_statement2310 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_selection_statement2312 = frozenset([63])
FOLLOW_63_in_selection_statement2314 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_selection_statement2316 = frozenset([1])
FOLLOW_111_in_iteration_statement2327 = frozenset([62])
FOLLOW_62_in_iteration_statement2329 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_iteration_statement2333 = frozenset([63])
FOLLOW_63_in_iteration_statement2335 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_iteration_statement2337 = frozenset([1])
FOLLOW_112_in_iteration_statement2344 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_iteration_statement2346 = frozenset([111])
FOLLOW_111_in_iteration_statement2348 = frozenset([62])
FOLLOW_62_in_iteration_statement2350 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_iteration_statement2354 = frozenset([63])
FOLLOW_63_in_iteration_statement2356 = frozenset([25])
FOLLOW_25_in_iteration_statement2358 = frozenset([1])
FOLLOW_113_in_iteration_statement2365 = frozenset([62])
FOLLOW_62_in_iteration_statement2367 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_statement_in_iteration_statement2369 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_statement_in_iteration_statement2373 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 63, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_iteration_statement2375 = frozenset([63])
FOLLOW_63_in_iteration_statement2378 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117])
FOLLOW_statement_in_iteration_statement2380 = frozenset([1])
FOLLOW_114_in_jump_statement2393 = frozenset([4])
FOLLOW_IDENTIFIER_in_jump_statement2395 = frozenset([25])
FOLLOW_25_in_jump_statement2397 = frozenset([1])
FOLLOW_115_in_jump_statement2402 = frozenset([25])
FOLLOW_25_in_jump_statement2404 = frozenset([1])
FOLLOW_116_in_jump_statement2409 = frozenset([25])
FOLLOW_25_in_jump_statement2411 = frozenset([1])
FOLLOW_117_in_jump_statement2416 = frozenset([25])
FOLLOW_25_in_jump_statement2418 = frozenset([1])
FOLLOW_117_in_jump_statement2423 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_expression_in_jump_statement2425 = frozenset([25])
FOLLOW_25_in_jump_statement2427 = frozenset([1])
FOLLOW_declaration_specifiers_in_synpred2100 = frozenset([1])
FOLLOW_declaration_specifiers_in_synpred4100 = frozenset([4, 58, 59, 60, 62, 66])
FOLLOW_declarator_in_synpred4103 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_declaration_in_synpred4105 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_43_in_synpred4108 = frozenset([1])
FOLLOW_declaration_in_synpred5118 = frozenset([1])
FOLLOW_declaration_specifiers_in_synpred7157 = frozenset([1])
FOLLOW_declaration_specifiers_in_synpred10207 = frozenset([1])
FOLLOW_type_specifier_in_synpred14272 = frozenset([1])
FOLLOW_type_qualifier_in_synpred15286 = frozenset([1])
FOLLOW_type_qualifier_in_synpred33444 = frozenset([1])
FOLLOW_IDENTIFIER_in_synpred34442 = frozenset([4, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
FOLLOW_type_qualifier_in_synpred34444 = frozenset([4, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 66])
FOLLOW_declarator_in_synpred34447 = frozenset([1])
FOLLOW_type_qualifier_in_synpred39566 = frozenset([1])
FOLLOW_type_specifier_in_synpred40570 = frozenset([1])
FOLLOW_pointer_in_synpred66784 = frozenset([4, 58, 59, 60, 62])
FOLLOW_58_in_synpred66788 = frozenset([4, 59, 60, 62])
FOLLOW_59_in_synpred66793 = frozenset([4, 60, 62])
FOLLOW_60_in_synpred66798 = frozenset([4, 62])
FOLLOW_direct_declarator_in_synpred66802 = frozenset([1])
FOLLOW_declarator_suffix_in_synpred67821 = frozenset([1])
FOLLOW_58_in_synpred69830 = frozenset([1])
FOLLOW_declarator_suffix_in_synpred70838 = frozenset([1])
FOLLOW_62_in_synpred73878 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_type_list_in_synpred73880 = frozenset([63])
FOLLOW_63_in_synpred73882 = frozenset([1])
FOLLOW_62_in_synpred74892 = frozenset([4])
FOLLOW_identifier_list_in_synpred74894 = frozenset([63])
FOLLOW_63_in_synpred74896 = frozenset([1])
FOLLOW_type_qualifier_in_synpred75921 = frozenset([1])
FOLLOW_pointer_in_synpred76924 = frozenset([1])
FOLLOW_66_in_synpred77919 = frozenset([49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_qualifier_in_synpred77921 = frozenset([1, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_pointer_in_synpred77924 = frozenset([1])
FOLLOW_66_in_synpred78930 = frozenset([66])
FOLLOW_pointer_in_synpred78932 = frozenset([1])
FOLLOW_53_in_synpred81977 = frozenset([1])
FOLLOW_27_in_synpred82974 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_53_in_synpred82977 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_parameter_declaration_in_synpred82981 = frozenset([1])
FOLLOW_declarator_in_synpred83997 = frozenset([1])
FOLLOW_abstract_declarator_in_synpred84999 = frozenset([1])
FOLLOW_declaration_specifiers_in_synpred86994 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_declarator_in_synpred86997 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_abstract_declarator_in_synpred86999 = frozenset([1, 4, 53, 58, 59, 60, 62, 64, 66])
FOLLOW_53_in_synpred861004 = frozenset([1])
FOLLOW_specifier_qualifier_list_in_synpred901046 = frozenset([1, 62, 64, 66])
FOLLOW_abstract_declarator_in_synpred901048 = frozenset([1])
FOLLOW_direct_abstract_declarator_in_synpred911067 = frozenset([1])
FOLLOW_62_in_synpred931086 = frozenset([62, 64, 66])
FOLLOW_abstract_declarator_in_synpred931088 = frozenset([63])
FOLLOW_63_in_synpred931090 = frozenset([1])
FOLLOW_abstract_declarator_suffix_in_synpred941098 = frozenset([1])
FOLLOW_62_in_synpred1091282 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61])
FOLLOW_type_name_in_synpred1091284 = frozenset([63])
FOLLOW_63_in_synpred1091286 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_cast_expression_in_synpred1091288 = frozenset([1])
FOLLOW_74_in_synpred1141330 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_unary_expression_in_synpred1141332 = frozenset([1])
FOLLOW_62_in_synpred1171420 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_argument_expression_list_in_synpred1171424 = frozenset([63])
FOLLOW_63_in_synpred1171428 = frozenset([1])
FOLLOW_62_in_synpred1181444 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 66])
FOLLOW_macro_parameter_list_in_synpred1181446 = frozenset([63])
FOLLOW_63_in_synpred1181448 = frozenset([1])
FOLLOW_66_in_synpred1201482 = frozenset([4])
FOLLOW_IDENTIFIER_in_synpred1201486 = frozenset([1])
FOLLOW_STRING_LITERAL_in_synpred1371683 = frozenset([1])
FOLLOW_IDENTIFIER_in_synpred1381680 = frozenset([4, 9])
FOLLOW_STRING_LITERAL_in_synpred1381683 = frozenset([1, 9])
FOLLOW_lvalue_in_synpred1421744 = frozenset([28, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89])
FOLLOW_assignment_operator_in_synpred1421746 = frozenset([4, 5, 6, 7, 8, 9, 10, 62, 66, 68, 69, 72, 73, 74, 77, 78, 79])
FOLLOW_assignment_expression_in_synpred1421748 = frozenset([1])
FOLLOW_expression_statement_in_synpred1692035 = frozenset([1])
FOLLOW_macro_statement_in_synpred1732055 = frozenset([1])
FOLLOW_asm2_statement_in_synpred1742060 = frozenset([1])
FOLLOW_declaration_in_synpred1812166 = frozenset([1])
FOLLOW_statement_list_in_synpred1822170 = frozenset([1])
FOLLOW_declaration_in_synpred1862225 = frozenset([1])
FOLLOW_statement_in_synpred1882242 = frozenset([1])
| edk2-master | BaseTools/Source/Python/Eot/CParser3/CParser.py |
# Generated from C.g4 by ANTLR 4.7.1
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
import Ecc.CodeFragment as CodeFragment
import Ecc.FileProfile as FileProfile
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2k")
buf.write("\u0383\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.")
buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64")
buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:")
buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t")
buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t")
buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t")
buf.write("U\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4")
buf.write("^\t^\4_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4")
buf.write("g\tg\4h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4")
buf.write("p\tp\4q\tq\4r\tr\3\2\3\2\3\3\3\3\3\4\3\4\3\4\3\4\3\4\3")
buf.write("\4\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7")
buf.write("\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\t\3\n\3")
buf.write("\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13")
buf.write("\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\16")
buf.write("\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\20\3\20")
buf.write("\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22")
buf.write("\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23")
buf.write("\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25")
buf.write("\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27")
buf.write("\3\27\3\27\3\27\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\32")
buf.write("\3\32\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33")
buf.write("\3\33\3\33\3\33\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\36")
buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37")
buf.write("\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3!\3!\3")
buf.write("!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3")
buf.write("\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"")
buf.write("\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#")
buf.write("\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3%\3")
buf.write("%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3")
buf.write("&\3&\3&\3&\3&\3&\3&\3\'\3\'\3(\3(\3)\3)\3*\3*\3+\3+\3")
buf.write(",\3,\3,\3,\3-\3-\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\61")
buf.write("\3\62\3\62\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64")
buf.write("\3\64\3\65\3\65\3\65\3\66\3\66\3\67\3\67\38\38\39\39\3")
buf.write("9\3:\3:\3:\3;\3;\3;\3<\3<\3<\3=\3=\3=\3>\3>\3>\3>\3?\3")
buf.write("?\3?\3?\3@\3@\3@\3A\3A\3A\3B\3B\3B\3C\3C\3D\3D\3D\3E\3")
buf.write("E\3E\3F\3F\3G\3G\3H\3H\3H\3I\3I\3I\3J\3J\3K\3K\3L\3L\3")
buf.write("L\3M\3M\3M\3N\3N\3N\3O\3O\3O\3P\3P\3P\3P\3P\3P\3P\3P\3")
buf.write("Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\3R\3S\3S\3S\3S\3S\3T\3T\3")
buf.write("T\3T\3T\3T\3T\3T\3U\3U\3U\3V\3V\3V\3V\3V\3W\3W\3W\3W\3")
buf.write("W\3W\3W\3X\3X\3X\3X\3X\3X\3Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3[\3")
buf.write("[\3[\3[\3[\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3\\\3]\3]\3")
buf.write("]\3]\3]\3]\3]\3^\3^\3^\7^\u02b2\n^\f^\16^\u02b5\13^\3")
buf.write("_\3_\3`\5`\u02ba\n`\3`\3`\3`\5`\u02bf\n`\3`\3`\3a\5a\u02c4")
buf.write("\na\3a\3a\3a\7a\u02c9\na\fa\16a\u02cc\13a\3a\3a\3b\3b")
buf.write("\3b\6b\u02d3\nb\rb\16b\u02d4\3b\5b\u02d8\nb\3c\3c\3c\7")
buf.write("c\u02dd\nc\fc\16c\u02e0\13c\5c\u02e2\nc\3c\5c\u02e5\n")
buf.write("c\3d\3d\6d\u02e9\nd\rd\16d\u02ea\3d\5d\u02ee\nd\3e\3e")
buf.write("\3f\3f\3f\3f\3f\3f\5f\u02f8\nf\3g\6g\u02fb\ng\rg\16g\u02fc")
buf.write("\3g\3g\7g\u0301\ng\fg\16g\u0304\13g\3g\5g\u0307\ng\3g")
buf.write("\5g\u030a\ng\3g\3g\6g\u030e\ng\rg\16g\u030f\3g\5g\u0313")
buf.write("\ng\3g\5g\u0316\ng\3g\6g\u0319\ng\rg\16g\u031a\3g\3g\5")
buf.write("g\u031f\ng\3g\6g\u0322\ng\rg\16g\u0323\3g\5g\u0327\ng")
buf.write("\3g\5g\u032a\ng\3h\3h\5h\u032e\nh\3h\6h\u0331\nh\rh\16")
buf.write("h\u0332\3i\3i\3j\3j\3j\5j\u033a\nj\3k\3k\3k\3k\3k\3k\3")
buf.write("k\3k\3k\5k\u0345\nk\3l\3l\3l\3l\3l\3l\3l\3m\3m\3m\3m\3")
buf.write("n\3n\3n\3n\3o\3o\3p\3p\3p\3p\7p\u035c\np\fp\16p\u035f")
buf.write("\13p\3p\3p\3p\3p\3p\3q\3q\3q\3q\7q\u036a\nq\fq\16q\u036d")
buf.write("\13q\3q\5q\u0370\nq\3q\3q\3q\3q\3r\3r\7r\u0378\nr\fr\16")
buf.write("r\u037b\13r\3r\5r\u037e\nr\3r\3r\3r\3r\3\u035d\2s\3\3")
buf.write("\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16")
buf.write("\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61")
buf.write("\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*")
buf.write("S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u<w")
buf.write("=y>{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008b")
buf.write("G\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009b")
buf.write("O\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00ab")
buf.write("W\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb")
buf.write("_\u00bd\2\u00bf`\u00c1a\u00c3b\u00c5c\u00c7d\u00c9\2\u00cb")
buf.write("\2\u00cde\u00cf\2\u00d1\2\u00d3\2\u00d5\2\u00d7\2\u00d9")
buf.write("f\u00dbg\u00ddh\u00dfi\u00e1j\u00e3k\3\2\20\6\2&&C\\a")
buf.write("ac|\4\2))^^\4\2$$^^\4\2ZZzz\5\2\62;CHch\6\2NNWWnnww\4")
buf.write("\2WWww\4\2NNnn\4\2GGgg\4\2--//\6\2FFHHffhh\t\2))^^ddh")
buf.write("hppttvv\5\2\13\f\16\17\"\"\4\2\f\f\17\17\2\u03a2\2\3\3")
buf.write("\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2")
buf.write("\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2")
buf.write("\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2")
buf.write("\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2")
buf.write("\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3")
buf.write("\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2")
buf.write("\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3")
buf.write("\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K")
buf.write("\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2")
buf.write("U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2")
buf.write("\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2")
buf.write("\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2")
buf.write("\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3")
buf.write("\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083")
buf.write("\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2")
buf.write("\2\2\u008b\3\2\2\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091")
buf.write("\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2\2\2\u0097\3\2\2")
buf.write("\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f")
buf.write("\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2")
buf.write("\2\2\u00a7\3\2\2\2\2\u00a9\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad")
buf.write("\3\2\2\2\2\u00af\3\2\2\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2")
buf.write("\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9\3\2\2\2\2\u00bb")
buf.write("\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2")
buf.write("\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00cd\3\2\2\2\2\u00d9")
buf.write("\3\2\2\2\2\u00db\3\2\2\2\2\u00dd\3\2\2\2\2\u00df\3\2\2")
buf.write("\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\3\u00e5\3\2\2\2\5\u00e7")
buf.write("\3\2\2\2\7\u00e9\3\2\2\2\t\u00f1\3\2\2\2\13\u00f3\3\2")
buf.write("\2\2\r\u00f5\3\2\2\2\17\u00fc\3\2\2\2\21\u0103\3\2\2\2")
buf.write("\23\u0108\3\2\2\2\25\u0111\3\2\2\2\27\u0118\3\2\2\2\31")
buf.write("\u011d\3\2\2\2\33\u0122\3\2\2\2\35\u0128\3\2\2\2\37\u012c")
buf.write("\3\2\2\2!\u0131\3\2\2\2#\u0137\3\2\2\2%\u013e\3\2\2\2")
buf.write("\'\u0145\3\2\2\2)\u014e\3\2\2\2+\u0150\3\2\2\2-\u0157")
buf.write("\3\2\2\2/\u015d\3\2\2\2\61\u015f\3\2\2\2\63\u0164\3\2")
buf.write("\2\2\65\u016a\3\2\2\2\67\u0173\3\2\2\29\u0176\3\2\2\2")
buf.write(";\u017a\3\2\2\2=\u0183\3\2\2\2?\u0189\3\2\2\2A\u0193\3")
buf.write("\2\2\2C\u019c\3\2\2\2E\u01ba\3\2\2\2G\u01c1\3\2\2\2I\u01d1")
buf.write("\3\2\2\2K\u01e4\3\2\2\2M\u01eb\3\2\2\2O\u01ed\3\2\2\2")
buf.write("Q\u01ef\3\2\2\2S\u01f1\3\2\2\2U\u01f3\3\2\2\2W\u01f5\3")
buf.write("\2\2\2Y\u01f9\3\2\2\2[\u01fb\3\2\2\2]\u01fd\3\2\2\2_\u01ff")
buf.write("\3\2\2\2a\u0201\3\2\2\2c\u0204\3\2\2\2e\u0207\3\2\2\2")
buf.write("g\u020e\3\2\2\2i\u0210\3\2\2\2k\u0213\3\2\2\2m\u0215\3")
buf.write("\2\2\2o\u0217\3\2\2\2q\u0219\3\2\2\2s\u021c\3\2\2\2u\u021f")
buf.write("\3\2\2\2w\u0222\3\2\2\2y\u0225\3\2\2\2{\u0228\3\2\2\2")
buf.write("}\u022c\3\2\2\2\177\u0230\3\2\2\2\u0081\u0233\3\2\2\2")
buf.write("\u0083\u0236\3\2\2\2\u0085\u0239\3\2\2\2\u0087\u023b\3")
buf.write("\2\2\2\u0089\u023e\3\2\2\2\u008b\u0241\3\2\2\2\u008d\u0243")
buf.write("\3\2\2\2\u008f\u0245\3\2\2\2\u0091\u0248\3\2\2\2\u0093")
buf.write("\u024b\3\2\2\2\u0095\u024d\3\2\2\2\u0097\u024f\3\2\2\2")
buf.write("\u0099\u0252\3\2\2\2\u009b\u0255\3\2\2\2\u009d\u0258\3")
buf.write("\2\2\2\u009f\u025b\3\2\2\2\u00a1\u0263\3\2\2\2\u00a3\u0268")
buf.write("\3\2\2\2\u00a5\u026e\3\2\2\2\u00a7\u0273\3\2\2\2\u00a9")
buf.write("\u027b\3\2\2\2\u00ab\u027e\3\2\2\2\u00ad\u0283\3\2\2\2")
buf.write("\u00af\u028a\3\2\2\2\u00b1\u0290\3\2\2\2\u00b3\u0293\3")
buf.write("\2\2\2\u00b5\u0298\3\2\2\2\u00b7\u02a1\3\2\2\2\u00b9\u02a7")
buf.write("\3\2\2\2\u00bb\u02ae\3\2\2\2\u00bd\u02b6\3\2\2\2\u00bf")
buf.write("\u02b9\3\2\2\2\u00c1\u02c3\3\2\2\2\u00c3\u02cf\3\2\2\2")
buf.write("\u00c5\u02e1\3\2\2\2\u00c7\u02e6\3\2\2\2\u00c9\u02ef\3")
buf.write("\2\2\2\u00cb\u02f7\3\2\2\2\u00cd\u0329\3\2\2\2\u00cf\u032b")
buf.write("\3\2\2\2\u00d1\u0334\3\2\2\2\u00d3\u0339\3\2\2\2\u00d5")
buf.write("\u0344\3\2\2\2\u00d7\u0346\3\2\2\2\u00d9\u034d\3\2\2\2")
buf.write("\u00db\u0351\3\2\2\2\u00dd\u0355\3\2\2\2\u00df\u0357\3")
buf.write("\2\2\2\u00e1\u0365\3\2\2\2\u00e3\u0375\3\2\2\2\u00e5\u00e6")
buf.write("\7}\2\2\u00e6\4\3\2\2\2\u00e7\u00e8\7=\2\2\u00e8\6\3\2")
buf.write("\2\2\u00e9\u00ea\7v\2\2\u00ea\u00eb\7{\2\2\u00eb\u00ec")
buf.write("\7r\2\2\u00ec\u00ed\7g\2\2\u00ed\u00ee\7f\2\2\u00ee\u00ef")
buf.write("\7g\2\2\u00ef\u00f0\7h\2\2\u00f0\b\3\2\2\2\u00f1\u00f2")
buf.write("\7.\2\2\u00f2\n\3\2\2\2\u00f3\u00f4\7?\2\2\u00f4\f\3\2")
buf.write("\2\2\u00f5\u00f6\7g\2\2\u00f6\u00f7\7z\2\2\u00f7\u00f8")
buf.write("\7v\2\2\u00f8\u00f9\7g\2\2\u00f9\u00fa\7t\2\2\u00fa\u00fb")
buf.write("\7p\2\2\u00fb\16\3\2\2\2\u00fc\u00fd\7u\2\2\u00fd\u00fe")
buf.write("\7v\2\2\u00fe\u00ff\7c\2\2\u00ff\u0100\7v\2\2\u0100\u0101")
buf.write("\7k\2\2\u0101\u0102\7e\2\2\u0102\20\3\2\2\2\u0103\u0104")
buf.write("\7c\2\2\u0104\u0105\7w\2\2\u0105\u0106\7v\2\2\u0106\u0107")
buf.write("\7q\2\2\u0107\22\3\2\2\2\u0108\u0109\7t\2\2\u0109\u010a")
buf.write("\7g\2\2\u010a\u010b\7i\2\2\u010b\u010c\7k\2\2\u010c\u010d")
buf.write("\7u\2\2\u010d\u010e\7v\2\2\u010e\u010f\7g\2\2\u010f\u0110")
buf.write("\7t\2\2\u0110\24\3\2\2\2\u0111\u0112\7U\2\2\u0112\u0113")
buf.write("\7V\2\2\u0113\u0114\7C\2\2\u0114\u0115\7V\2\2\u0115\u0116")
buf.write("\7K\2\2\u0116\u0117\7E\2\2\u0117\26\3\2\2\2\u0118\u0119")
buf.write("\7x\2\2\u0119\u011a\7q\2\2\u011a\u011b\7k\2\2\u011b\u011c")
buf.write("\7f\2\2\u011c\30\3\2\2\2\u011d\u011e\7e\2\2\u011e\u011f")
buf.write("\7j\2\2\u011f\u0120\7c\2\2\u0120\u0121\7t\2\2\u0121\32")
buf.write("\3\2\2\2\u0122\u0123\7u\2\2\u0123\u0124\7j\2\2\u0124\u0125")
buf.write("\7q\2\2\u0125\u0126\7t\2\2\u0126\u0127\7v\2\2\u0127\34")
buf.write("\3\2\2\2\u0128\u0129\7k\2\2\u0129\u012a\7p\2\2\u012a\u012b")
buf.write("\7v\2\2\u012b\36\3\2\2\2\u012c\u012d\7n\2\2\u012d\u012e")
buf.write("\7q\2\2\u012e\u012f\7p\2\2\u012f\u0130\7i\2\2\u0130 \3")
buf.write("\2\2\2\u0131\u0132\7h\2\2\u0132\u0133\7n\2\2\u0133\u0134")
buf.write("\7q\2\2\u0134\u0135\7c\2\2\u0135\u0136\7v\2\2\u0136\"")
buf.write("\3\2\2\2\u0137\u0138\7f\2\2\u0138\u0139\7q\2\2\u0139\u013a")
buf.write("\7w\2\2\u013a\u013b\7d\2\2\u013b\u013c\7n\2\2\u013c\u013d")
buf.write("\7g\2\2\u013d$\3\2\2\2\u013e\u013f\7u\2\2\u013f\u0140")
buf.write("\7k\2\2\u0140\u0141\7i\2\2\u0141\u0142\7p\2\2\u0142\u0143")
buf.write("\7g\2\2\u0143\u0144\7f\2\2\u0144&\3\2\2\2\u0145\u0146")
buf.write("\7w\2\2\u0146\u0147\7p\2\2\u0147\u0148\7u\2\2\u0148\u0149")
buf.write("\7k\2\2\u0149\u014a\7i\2\2\u014a\u014b\7p\2\2\u014b\u014c")
buf.write("\7g\2\2\u014c\u014d\7f\2\2\u014d(\3\2\2\2\u014e\u014f")
buf.write("\7\177\2\2\u014f*\3\2\2\2\u0150\u0151\7u\2\2\u0151\u0152")
buf.write("\7v\2\2\u0152\u0153\7t\2\2\u0153\u0154\7w\2\2\u0154\u0155")
buf.write("\7e\2\2\u0155\u0156\7v\2\2\u0156,\3\2\2\2\u0157\u0158")
buf.write("\7w\2\2\u0158\u0159\7p\2\2\u0159\u015a\7k\2\2\u015a\u015b")
buf.write("\7q\2\2\u015b\u015c\7p\2\2\u015c.\3\2\2\2\u015d\u015e")
buf.write("\7<\2\2\u015e\60\3\2\2\2\u015f\u0160\7g\2\2\u0160\u0161")
buf.write("\7p\2\2\u0161\u0162\7w\2\2\u0162\u0163\7o\2\2\u0163\62")
buf.write("\3\2\2\2\u0164\u0165\7e\2\2\u0165\u0166\7q\2\2\u0166\u0167")
buf.write("\7p\2\2\u0167\u0168\7u\2\2\u0168\u0169\7v\2\2\u0169\64")
buf.write("\3\2\2\2\u016a\u016b\7x\2\2\u016b\u016c\7q\2\2\u016c\u016d")
buf.write("\7n\2\2\u016d\u016e\7c\2\2\u016e\u016f\7v\2\2\u016f\u0170")
buf.write("\7k\2\2\u0170\u0171\7n\2\2\u0171\u0172\7g\2\2\u0172\66")
buf.write("\3\2\2\2\u0173\u0174\7K\2\2\u0174\u0175\7P\2\2\u01758")
buf.write("\3\2\2\2\u0176\u0177\7Q\2\2\u0177\u0178\7W\2\2\u0178\u0179")
buf.write("\7V\2\2\u0179:\3\2\2\2\u017a\u017b\7Q\2\2\u017b\u017c")
buf.write("\7R\2\2\u017c\u017d\7V\2\2\u017d\u017e\7K\2\2\u017e\u017f")
buf.write("\7Q\2\2\u017f\u0180\7P\2\2\u0180\u0181\7C\2\2\u0181\u0182")
buf.write("\7N\2\2\u0182<\3\2\2\2\u0183\u0184\7E\2\2\u0184\u0185")
buf.write("\7Q\2\2\u0185\u0186\7P\2\2\u0186\u0187\7U\2\2\u0187\u0188")
buf.write("\7V\2\2\u0188>\3\2\2\2\u0189\u018a\7W\2\2\u018a\u018b")
buf.write("\7P\2\2\u018b\u018c\7C\2\2\u018c\u018d\7N\2\2\u018d\u018e")
buf.write("\7K\2\2\u018e\u018f\7I\2\2\u018f\u0190\7P\2\2\u0190\u0191")
buf.write("\7G\2\2\u0191\u0192\7F\2\2\u0192@\3\2\2\2\u0193\u0194")
buf.write("\7X\2\2\u0194\u0195\7Q\2\2\u0195\u0196\7N\2\2\u0196\u0197")
buf.write("\7C\2\2\u0197\u0198\7V\2\2\u0198\u0199\7K\2\2\u0199\u019a")
buf.write("\7N\2\2\u019a\u019b\7G\2\2\u019bB\3\2\2\2\u019c\u019d")
buf.write("\7I\2\2\u019d\u019e\7N\2\2\u019e\u019f\7Q\2\2\u019f\u01a0")
buf.write("\7D\2\2\u01a0\u01a1\7C\2\2\u01a1\u01a2\7N\2\2\u01a2\u01a3")
buf.write("\7a\2\2\u01a3\u01a4\7T\2\2\u01a4\u01a5\7G\2\2\u01a5\u01a6")
buf.write("\7O\2\2\u01a6\u01a7\7Q\2\2\u01a7\u01a8\7X\2\2\u01a8\u01a9")
buf.write("\7G\2\2\u01a9\u01aa\7a\2\2\u01aa\u01ab\7K\2\2\u01ab\u01ac")
buf.write("\7H\2\2\u01ac\u01ad\7a\2\2\u01ad\u01ae\7W\2\2\u01ae\u01af")
buf.write("\7P\2\2\u01af\u01b0\7T\2\2\u01b0\u01b1\7G\2\2\u01b1\u01b2")
buf.write("\7H\2\2\u01b2\u01b3\7G\2\2\u01b3\u01b4\7T\2\2\u01b4\u01b5")
buf.write("\7G\2\2\u01b5\u01b6\7P\2\2\u01b6\u01b7\7E\2\2\u01b7\u01b8")
buf.write("\7G\2\2\u01b8\u01b9\7F\2\2\u01b9D\3\2\2\2\u01ba\u01bb")
buf.write("\7G\2\2\u01bb\u01bc\7H\2\2\u01bc\u01bd\7K\2\2\u01bd\u01be")
buf.write("\7C\2\2\u01be\u01bf\7R\2\2\u01bf\u01c0\7K\2\2\u01c0F\3")
buf.write("\2\2\2\u01c1\u01c2\7G\2\2\u01c2\u01c3\7H\2\2\u01c3\u01c4")
buf.write("\7K\2\2\u01c4\u01c5\7a\2\2\u01c5\u01c6\7D\2\2\u01c6\u01c7")
buf.write("\7Q\2\2\u01c7\u01c8\7Q\2\2\u01c8\u01c9\7V\2\2\u01c9\u01ca")
buf.write("\7U\2\2\u01ca\u01cb\7G\2\2\u01cb\u01cc\7T\2\2\u01cc\u01cd")
buf.write("\7X\2\2\u01cd\u01ce\7K\2\2\u01ce\u01cf\7E\2\2\u01cf\u01d0")
buf.write("\7G\2\2\u01d0H\3\2\2\2\u01d1\u01d2\7G\2\2\u01d2\u01d3")
buf.write("\7H\2\2\u01d3\u01d4\7K\2\2\u01d4\u01d5\7a\2\2\u01d5\u01d6")
buf.write("\7T\2\2\u01d6\u01d7\7W\2\2\u01d7\u01d8\7P\2\2\u01d8\u01d9")
buf.write("\7V\2\2\u01d9\u01da\7K\2\2\u01da\u01db\7O\2\2\u01db\u01dc")
buf.write("\7G\2\2\u01dc\u01dd\7U\2\2\u01dd\u01de\7G\2\2\u01de\u01df")
buf.write("\7T\2\2\u01df\u01e0\7X\2\2\u01e0\u01e1\7K\2\2\u01e1\u01e2")
buf.write("\7E\2\2\u01e2\u01e3\7G\2\2\u01e3J\3\2\2\2\u01e4\u01e5")
buf.write("\7R\2\2\u01e5\u01e6\7C\2\2\u01e6\u01e7\7E\2\2\u01e7\u01e8")
buf.write("\7M\2\2\u01e8\u01e9\7G\2\2\u01e9\u01ea\7F\2\2\u01eaL\3")
buf.write("\2\2\2\u01eb\u01ec\7*\2\2\u01ecN\3\2\2\2\u01ed\u01ee\7")
buf.write("+\2\2\u01eeP\3\2\2\2\u01ef\u01f0\7]\2\2\u01f0R\3\2\2\2")
buf.write("\u01f1\u01f2\7_\2\2\u01f2T\3\2\2\2\u01f3\u01f4\7,\2\2")
buf.write("\u01f4V\3\2\2\2\u01f5\u01f6\7\60\2\2\u01f6\u01f7\7\60")
buf.write("\2\2\u01f7\u01f8\7\60\2\2\u01f8X\3\2\2\2\u01f9\u01fa\7")
buf.write("-\2\2\u01faZ\3\2\2\2\u01fb\u01fc\7/\2\2\u01fc\\\3\2\2")
buf.write("\2\u01fd\u01fe\7\61\2\2\u01fe^\3\2\2\2\u01ff\u0200\7\'")
buf.write("\2\2\u0200`\3\2\2\2\u0201\u0202\7-\2\2\u0202\u0203\7-")
buf.write("\2\2\u0203b\3\2\2\2\u0204\u0205\7/\2\2\u0205\u0206\7/")
buf.write("\2\2\u0206d\3\2\2\2\u0207\u0208\7u\2\2\u0208\u0209\7k")
buf.write("\2\2\u0209\u020a\7|\2\2\u020a\u020b\7g\2\2\u020b\u020c")
buf.write("\7q\2\2\u020c\u020d\7h\2\2\u020df\3\2\2\2\u020e\u020f")
buf.write("\7\60\2\2\u020fh\3\2\2\2\u0210\u0211\7/\2\2\u0211\u0212")
buf.write("\7@\2\2\u0212j\3\2\2\2\u0213\u0214\7(\2\2\u0214l\3\2\2")
buf.write("\2\u0215\u0216\7\u0080\2\2\u0216n\3\2\2\2\u0217\u0218")
buf.write("\7#\2\2\u0218p\3\2\2\2\u0219\u021a\7,\2\2\u021a\u021b")
buf.write("\7?\2\2\u021br\3\2\2\2\u021c\u021d\7\61\2\2\u021d\u021e")
buf.write("\7?\2\2\u021et\3\2\2\2\u021f\u0220\7\'\2\2\u0220\u0221")
buf.write("\7?\2\2\u0221v\3\2\2\2\u0222\u0223\7-\2\2\u0223\u0224")
buf.write("\7?\2\2\u0224x\3\2\2\2\u0225\u0226\7/\2\2\u0226\u0227")
buf.write("\7?\2\2\u0227z\3\2\2\2\u0228\u0229\7>\2\2\u0229\u022a")
buf.write("\7>\2\2\u022a\u022b\7?\2\2\u022b|\3\2\2\2\u022c\u022d")
buf.write("\7@\2\2\u022d\u022e\7@\2\2\u022e\u022f\7?\2\2\u022f~\3")
buf.write("\2\2\2\u0230\u0231\7(\2\2\u0231\u0232\7?\2\2\u0232\u0080")
buf.write("\3\2\2\2\u0233\u0234\7`\2\2\u0234\u0235\7?\2\2\u0235\u0082")
buf.write("\3\2\2\2\u0236\u0237\7~\2\2\u0237\u0238\7?\2\2\u0238\u0084")
buf.write("\3\2\2\2\u0239\u023a\7A\2\2\u023a\u0086\3\2\2\2\u023b")
buf.write("\u023c\7~\2\2\u023c\u023d\7~\2\2\u023d\u0088\3\2\2\2\u023e")
buf.write("\u023f\7(\2\2\u023f\u0240\7(\2\2\u0240\u008a\3\2\2\2\u0241")
buf.write("\u0242\7~\2\2\u0242\u008c\3\2\2\2\u0243\u0244\7`\2\2\u0244")
buf.write("\u008e\3\2\2\2\u0245\u0246\7?\2\2\u0246\u0247\7?\2\2\u0247")
buf.write("\u0090\3\2\2\2\u0248\u0249\7#\2\2\u0249\u024a\7?\2\2\u024a")
buf.write("\u0092\3\2\2\2\u024b\u024c\7>\2\2\u024c\u0094\3\2\2\2")
buf.write("\u024d\u024e\7@\2\2\u024e\u0096\3\2\2\2\u024f\u0250\7")
buf.write(">\2\2\u0250\u0251\7?\2\2\u0251\u0098\3\2\2\2\u0252\u0253")
buf.write("\7@\2\2\u0253\u0254\7?\2\2\u0254\u009a\3\2\2\2\u0255\u0256")
buf.write("\7>\2\2\u0256\u0257\7>\2\2\u0257\u009c\3\2\2\2\u0258\u0259")
buf.write("\7@\2\2\u0259\u025a\7@\2\2\u025a\u009e\3\2\2\2\u025b\u025c")
buf.write("\7a\2\2\u025c\u025d\7a\2\2\u025d\u025e\7c\2\2\u025e\u025f")
buf.write("\7u\2\2\u025f\u0260\7o\2\2\u0260\u0261\7a\2\2\u0261\u0262")
buf.write("\7a\2\2\u0262\u00a0\3\2\2\2\u0263\u0264\7a\2\2\u0264\u0265")
buf.write("\7c\2\2\u0265\u0266\7u\2\2\u0266\u0267\7o\2\2\u0267\u00a2")
buf.write("\3\2\2\2\u0268\u0269\7a\2\2\u0269\u026a\7a\2\2\u026a\u026b")
buf.write("\7c\2\2\u026b\u026c\7u\2\2\u026c\u026d\7o\2\2\u026d\u00a4")
buf.write("\3\2\2\2\u026e\u026f\7e\2\2\u026f\u0270\7c\2\2\u0270\u0271")
buf.write("\7u\2\2\u0271\u0272\7g\2\2\u0272\u00a6\3\2\2\2\u0273\u0274")
buf.write("\7f\2\2\u0274\u0275\7g\2\2\u0275\u0276\7h\2\2\u0276\u0277")
buf.write("\7c\2\2\u0277\u0278\7w\2\2\u0278\u0279\7n\2\2\u0279\u027a")
buf.write("\7v\2\2\u027a\u00a8\3\2\2\2\u027b\u027c\7k\2\2\u027c\u027d")
buf.write("\7h\2\2\u027d\u00aa\3\2\2\2\u027e\u027f\7g\2\2\u027f\u0280")
buf.write("\7n\2\2\u0280\u0281\7u\2\2\u0281\u0282\7g\2\2\u0282\u00ac")
buf.write("\3\2\2\2\u0283\u0284\7u\2\2\u0284\u0285\7y\2\2\u0285\u0286")
buf.write("\7k\2\2\u0286\u0287\7v\2\2\u0287\u0288\7e\2\2\u0288\u0289")
buf.write("\7j\2\2\u0289\u00ae\3\2\2\2\u028a\u028b\7y\2\2\u028b\u028c")
buf.write("\7j\2\2\u028c\u028d\7k\2\2\u028d\u028e\7n\2\2\u028e\u028f")
buf.write("\7g\2\2\u028f\u00b0\3\2\2\2\u0290\u0291\7f\2\2\u0291\u0292")
buf.write("\7q\2\2\u0292\u00b2\3\2\2\2\u0293\u0294\7i\2\2\u0294\u0295")
buf.write("\7q\2\2\u0295\u0296\7v\2\2\u0296\u0297\7q\2\2\u0297\u00b4")
buf.write("\3\2\2\2\u0298\u0299\7e\2\2\u0299\u029a\7q\2\2\u029a\u029b")
buf.write("\7p\2\2\u029b\u029c\7v\2\2\u029c\u029d\7k\2\2\u029d\u029e")
buf.write("\7p\2\2\u029e\u029f\7w\2\2\u029f\u02a0\7g\2\2\u02a0\u00b6")
buf.write("\3\2\2\2\u02a1\u02a2\7d\2\2\u02a2\u02a3\7t\2\2\u02a3\u02a4")
buf.write("\7g\2\2\u02a4\u02a5\7c\2\2\u02a5\u02a6\7m\2\2\u02a6\u00b8")
buf.write("\3\2\2\2\u02a7\u02a8\7t\2\2\u02a8\u02a9\7g\2\2\u02a9\u02aa")
buf.write("\7v\2\2\u02aa\u02ab\7w\2\2\u02ab\u02ac\7t\2\2\u02ac\u02ad")
buf.write("\7p\2\2\u02ad\u00ba\3\2\2\2\u02ae\u02b3\5\u00bd_\2\u02af")
buf.write("\u02b2\5\u00bd_\2\u02b0\u02b2\4\62;\2\u02b1\u02af\3\2")
buf.write("\2\2\u02b1\u02b0\3\2\2\2\u02b2\u02b5\3\2\2\2\u02b3\u02b1")
buf.write("\3\2\2\2\u02b3\u02b4\3\2\2\2\u02b4\u00bc\3\2\2\2\u02b5")
buf.write("\u02b3\3\2\2\2\u02b6\u02b7\t\2\2\2\u02b7\u00be\3\2\2\2")
buf.write("\u02b8\u02ba\7N\2\2\u02b9\u02b8\3\2\2\2\u02b9\u02ba\3")
buf.write("\2\2\2\u02ba\u02bb\3\2\2\2\u02bb\u02be\7)\2\2\u02bc\u02bf")
buf.write("\5\u00d3j\2\u02bd\u02bf\n\3\2\2\u02be\u02bc\3\2\2\2\u02be")
buf.write("\u02bd\3\2\2\2\u02bf\u02c0\3\2\2\2\u02c0\u02c1\7)\2\2")
buf.write("\u02c1\u00c0\3\2\2\2\u02c2\u02c4\7N\2\2\u02c3\u02c2\3")
buf.write("\2\2\2\u02c3\u02c4\3\2\2\2\u02c4\u02c5\3\2\2\2\u02c5\u02ca")
buf.write("\7$\2\2\u02c6\u02c9\5\u00d3j\2\u02c7\u02c9\n\4\2\2\u02c8")
buf.write("\u02c6\3\2\2\2\u02c8\u02c7\3\2\2\2\u02c9\u02cc\3\2\2\2")
buf.write("\u02ca\u02c8\3\2\2\2\u02ca\u02cb\3\2\2\2\u02cb\u02cd\3")
buf.write("\2\2\2\u02cc\u02ca\3\2\2\2\u02cd\u02ce\7$\2\2\u02ce\u00c2")
buf.write("\3\2\2\2\u02cf\u02d0\7\62\2\2\u02d0\u02d2\t\5\2\2\u02d1")
buf.write("\u02d3\5\u00c9e\2\u02d2\u02d1\3\2\2\2\u02d3\u02d4\3\2")
buf.write("\2\2\u02d4\u02d2\3\2\2\2\u02d4\u02d5\3\2\2\2\u02d5\u02d7")
buf.write("\3\2\2\2\u02d6\u02d8\5\u00cbf\2\u02d7\u02d6\3\2\2\2\u02d7")
buf.write("\u02d8\3\2\2\2\u02d8\u00c4\3\2\2\2\u02d9\u02e2\7\62\2")
buf.write("\2\u02da\u02de\4\63;\2\u02db\u02dd\4\62;\2\u02dc\u02db")
buf.write("\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2\u02de")
buf.write("\u02df\3\2\2\2\u02df\u02e2\3\2\2\2\u02e0\u02de\3\2\2\2")
buf.write("\u02e1\u02d9\3\2\2\2\u02e1\u02da\3\2\2\2\u02e2\u02e4\3")
buf.write("\2\2\2\u02e3\u02e5\5\u00cbf\2\u02e4\u02e3\3\2\2\2\u02e4")
buf.write("\u02e5\3\2\2\2\u02e5\u00c6\3\2\2\2\u02e6\u02e8\7\62\2")
buf.write("\2\u02e7\u02e9\4\629\2\u02e8\u02e7\3\2\2\2\u02e9\u02ea")
buf.write("\3\2\2\2\u02ea\u02e8\3\2\2\2\u02ea\u02eb\3\2\2\2\u02eb")
buf.write("\u02ed\3\2\2\2\u02ec\u02ee\5\u00cbf\2\u02ed\u02ec\3\2")
buf.write("\2\2\u02ed\u02ee\3\2\2\2\u02ee\u00c8\3\2\2\2\u02ef\u02f0")
buf.write("\t\6\2\2\u02f0\u00ca\3\2\2\2\u02f1\u02f8\t\7\2\2\u02f2")
buf.write("\u02f3\t\b\2\2\u02f3\u02f8\t\t\2\2\u02f4\u02f5\t\b\2\2")
buf.write("\u02f5\u02f6\t\t\2\2\u02f6\u02f8\t\t\2\2\u02f7\u02f1\3")
buf.write("\2\2\2\u02f7\u02f2\3\2\2\2\u02f7\u02f4\3\2\2\2\u02f8\u00cc")
buf.write("\3\2\2\2\u02f9\u02fb\4\62;\2\u02fa\u02f9\3\2\2\2\u02fb")
buf.write("\u02fc\3\2\2\2\u02fc\u02fa\3\2\2\2\u02fc\u02fd\3\2\2\2")
buf.write("\u02fd\u02fe\3\2\2\2\u02fe\u0302\7\60\2\2\u02ff\u0301")
buf.write("\4\62;\2\u0300\u02ff\3\2\2\2\u0301\u0304\3\2\2\2\u0302")
buf.write("\u0300\3\2\2\2\u0302\u0303\3\2\2\2\u0303\u0306\3\2\2\2")
buf.write("\u0304\u0302\3\2\2\2\u0305\u0307\5\u00cfh\2\u0306\u0305")
buf.write("\3\2\2\2\u0306\u0307\3\2\2\2\u0307\u0309\3\2\2\2\u0308")
buf.write("\u030a\5\u00d1i\2\u0309\u0308\3\2\2\2\u0309\u030a\3\2")
buf.write("\2\2\u030a\u032a\3\2\2\2\u030b\u030d\7\60\2\2\u030c\u030e")
buf.write("\4\62;\2\u030d\u030c\3\2\2\2\u030e\u030f\3\2\2\2\u030f")
buf.write("\u030d\3\2\2\2\u030f\u0310\3\2\2\2\u0310\u0312\3\2\2\2")
buf.write("\u0311\u0313\5\u00cfh\2\u0312\u0311\3\2\2\2\u0312\u0313")
buf.write("\3\2\2\2\u0313\u0315\3\2\2\2\u0314\u0316\5\u00d1i\2\u0315")
buf.write("\u0314\3\2\2\2\u0315\u0316\3\2\2\2\u0316\u032a\3\2\2\2")
buf.write("\u0317\u0319\4\62;\2\u0318\u0317\3\2\2\2\u0319\u031a\3")
buf.write("\2\2\2\u031a\u0318\3\2\2\2\u031a\u031b\3\2\2\2\u031b\u031c")
buf.write("\3\2\2\2\u031c\u031e\5\u00cfh\2\u031d\u031f\5\u00d1i\2")
buf.write("\u031e\u031d\3\2\2\2\u031e\u031f\3\2\2\2\u031f\u032a\3")
buf.write("\2\2\2\u0320\u0322\4\62;\2\u0321\u0320\3\2\2\2\u0322\u0323")
buf.write("\3\2\2\2\u0323\u0321\3\2\2\2\u0323\u0324\3\2\2\2\u0324")
buf.write("\u0326\3\2\2\2\u0325\u0327\5\u00cfh\2\u0326\u0325\3\2")
buf.write("\2\2\u0326\u0327\3\2\2\2\u0327\u0328\3\2\2\2\u0328\u032a")
buf.write("\5\u00d1i\2\u0329\u02fa\3\2\2\2\u0329\u030b\3\2\2\2\u0329")
buf.write("\u0318\3\2\2\2\u0329\u0321\3\2\2\2\u032a\u00ce\3\2\2\2")
buf.write("\u032b\u032d\t\n\2\2\u032c\u032e\t\13\2\2\u032d\u032c")
buf.write("\3\2\2\2\u032d\u032e\3\2\2\2\u032e\u0330\3\2\2\2\u032f")
buf.write("\u0331\4\62;\2\u0330\u032f\3\2\2\2\u0331\u0332\3\2\2\2")
buf.write("\u0332\u0330\3\2\2\2\u0332\u0333\3\2\2\2\u0333\u00d0\3")
buf.write("\2\2\2\u0334\u0335\t\f\2\2\u0335\u00d2\3\2\2\2\u0336\u0337")
buf.write("\7^\2\2\u0337\u033a\t\r\2\2\u0338\u033a\5\u00d5k\2\u0339")
buf.write("\u0336\3\2\2\2\u0339\u0338\3\2\2\2\u033a\u00d4\3\2\2\2")
buf.write("\u033b\u033c\7^\2\2\u033c\u033d\4\62\65\2\u033d\u033e")
buf.write("\4\629\2\u033e\u0345\4\629\2\u033f\u0340\7^\2\2\u0340")
buf.write("\u0341\4\629\2\u0341\u0345\4\629\2\u0342\u0343\7^\2\2")
buf.write("\u0343\u0345\4\629\2\u0344\u033b\3\2\2\2\u0344\u033f\3")
buf.write("\2\2\2\u0344\u0342\3\2\2\2\u0345\u00d6\3\2\2\2\u0346\u0347")
buf.write("\7^\2\2\u0347\u0348\7w\2\2\u0348\u0349\5\u00c9e\2\u0349")
buf.write("\u034a\5\u00c9e\2\u034a\u034b\5\u00c9e\2\u034b\u034c\5")
buf.write("\u00c9e\2\u034c\u00d8\3\2\2\2\u034d\u034e\t\16\2\2\u034e")
buf.write("\u034f\3\2\2\2\u034f\u0350\bm\2\2\u0350\u00da\3\2\2\2")
buf.write("\u0351\u0352\7^\2\2\u0352\u0353\3\2\2\2\u0353\u0354\b")
buf.write("n\2\2\u0354\u00dc\3\2\2\2\u0355\u0356\4\5\0\2\u0356\u00de")
buf.write("\3\2\2\2\u0357\u0358\7\61\2\2\u0358\u0359\7,\2\2\u0359")
buf.write("\u035d\3\2\2\2\u035a\u035c\13\2\2\2\u035b\u035a\3\2\2")
buf.write("\2\u035c\u035f\3\2\2\2\u035d\u035e\3\2\2\2\u035d\u035b")
buf.write("\3\2\2\2\u035e\u0360\3\2\2\2\u035f\u035d\3\2\2\2\u0360")
buf.write("\u0361\7,\2\2\u0361\u0362\7\61\2\2\u0362\u0363\3\2\2\2")
buf.write("\u0363\u0364\bp\2\2\u0364\u00e0\3\2\2\2\u0365\u0366\7")
buf.write("\61\2\2\u0366\u0367\7\61\2\2\u0367\u036b\3\2\2\2\u0368")
buf.write("\u036a\n\17\2\2\u0369\u0368\3\2\2\2\u036a\u036d\3\2\2")
buf.write("\2\u036b\u0369\3\2\2\2\u036b\u036c\3\2\2\2\u036c\u036f")
buf.write("\3\2\2\2\u036d\u036b\3\2\2\2\u036e\u0370\7\17\2\2\u036f")
buf.write("\u036e\3\2\2\2\u036f\u0370\3\2\2\2\u0370\u0371\3\2\2\2")
buf.write("\u0371\u0372\7\f\2\2\u0372\u0373\3\2\2\2\u0373\u0374\b")
buf.write("q\2\2\u0374\u00e2\3\2\2\2\u0375\u0379\7%\2\2\u0376\u0378")
buf.write("\n\17\2\2\u0377\u0376\3\2\2\2\u0378\u037b\3\2\2\2\u0379")
buf.write("\u0377\3\2\2\2\u0379\u037a\3\2\2\2\u037a\u037d\3\2\2\2")
buf.write("\u037b\u0379\3\2\2\2\u037c\u037e\7\17\2\2\u037d\u037c")
buf.write("\3\2\2\2\u037d\u037e\3\2\2\2\u037e\u037f\3\2\2\2\u037f")
buf.write("\u0380\7\f\2\2\u0380\u0381\3\2\2\2\u0381\u0382\br\2\2")
buf.write("\u0382\u00e4\3\2\2\2\'\2\u02b1\u02b3\u02b9\u02be\u02c3")
buf.write("\u02c8\u02ca\u02d4\u02d7\u02de\u02e1\u02e4\u02ea\u02ed")
buf.write("\u02f7\u02fc\u0302\u0306\u0309\u030f\u0312\u0315\u031a")
buf.write("\u031e\u0323\u0326\u0329\u032d\u0332\u0339\u0344\u035d")
buf.write("\u036b\u036f\u0379\u037d\3\2\3\2")
return buf.getvalue()
class CLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
T__2 = 3
T__3 = 4
T__4 = 5
T__5 = 6
T__6 = 7
T__7 = 8
T__8 = 9
T__9 = 10
T__10 = 11
T__11 = 12
T__12 = 13
T__13 = 14
T__14 = 15
T__15 = 16
T__16 = 17
T__17 = 18
T__18 = 19
T__19 = 20
T__20 = 21
T__21 = 22
T__22 = 23
T__23 = 24
T__24 = 25
T__25 = 26
T__26 = 27
T__27 = 28
T__28 = 29
T__29 = 30
T__30 = 31
T__31 = 32
T__32 = 33
T__33 = 34
T__34 = 35
T__35 = 36
T__36 = 37
T__37 = 38
T__38 = 39
T__39 = 40
T__40 = 41
T__41 = 42
T__42 = 43
T__43 = 44
T__44 = 45
T__45 = 46
T__46 = 47
T__47 = 48
T__48 = 49
T__49 = 50
T__50 = 51
T__51 = 52
T__52 = 53
T__53 = 54
T__54 = 55
T__55 = 56
T__56 = 57
T__57 = 58
T__58 = 59
T__59 = 60
T__60 = 61
T__61 = 62
T__62 = 63
T__63 = 64
T__64 = 65
T__65 = 66
T__66 = 67
T__67 = 68
T__68 = 69
T__69 = 70
T__70 = 71
T__71 = 72
T__72 = 73
T__73 = 74
T__74 = 75
T__75 = 76
T__76 = 77
T__77 = 78
T__78 = 79
T__79 = 80
T__80 = 81
T__81 = 82
T__82 = 83
T__83 = 84
T__84 = 85
T__85 = 86
T__86 = 87
T__87 = 88
T__88 = 89
T__89 = 90
T__90 = 91
T__91 = 92
IDENTIFIER = 93
CHARACTER_LITERAL = 94
STRING_LITERAL = 95
HEX_LITERAL = 96
DECIMAL_LITERAL = 97
OCTAL_LITERAL = 98
FLOATING_POINT_LITERAL = 99
WS = 100
BS = 101
UnicodeVocabulary = 102
COMMENT = 103
LINE_COMMENT = 104
LINE_COMMAND = 105
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'{'", "';'", "'typedef'", "','", "'='", "'extern'", "'static'",
"'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'",
"'int'", "'long'", "'float'", "'double'", "'signed'", "'unsigned'",
"'}'", "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'",
"'IN'", "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'",
"'GLOBAL_REMOVE_IF_UNREFERENCED'", "'EFIAPI'", "'EFI_BOOTSERVICE'",
"'EFI_RUNTIMESERVICE'", "'PACKED'", "'('", "')'", "'['", "']'",
"'*'", "'...'", "'+'", "'-'", "'/'", "'%'", "'++'", "'--'",
"'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='", "'/='",
"'%='", "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
"'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'",
"'>'", "'<='", "'>='", "'<<'", "'>>'", "'__asm__'", "'_asm'",
"'__asm'", "'case'", "'default'", "'if'", "'else'", "'switch'",
"'while'", "'do'", "'goto'", "'continue'", "'break'", "'return'" ]
symbolicNames = [ "<INVALID>",
"IDENTIFIER", "CHARACTER_LITERAL", "STRING_LITERAL", "HEX_LITERAL",
"DECIMAL_LITERAL", "OCTAL_LITERAL", "FLOATING_POINT_LITERAL",
"WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
"LINE_COMMAND" ]
ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
"T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13",
"T__14", "T__15", "T__16", "T__17", "T__18", "T__19",
"T__20", "T__21", "T__22", "T__23", "T__24", "T__25",
"T__26", "T__27", "T__28", "T__29", "T__30", "T__31",
"T__32", "T__33", "T__34", "T__35", "T__36", "T__37",
"T__38", "T__39", "T__40", "T__41", "T__42", "T__43",
"T__44", "T__45", "T__46", "T__47", "T__48", "T__49",
"T__50", "T__51", "T__52", "T__53", "T__54", "T__55",
"T__56", "T__57", "T__58", "T__59", "T__60", "T__61",
"T__62", "T__63", "T__64", "T__65", "T__66", "T__67",
"T__68", "T__69", "T__70", "T__71", "T__72", "T__73",
"T__74", "T__75", "T__76", "T__77", "T__78", "T__79",
"T__80", "T__81", "T__82", "T__83", "T__84", "T__85",
"T__86", "T__87", "T__88", "T__89", "T__90", "T__91",
"IDENTIFIER", "LETTER", "CHARACTER_LITERAL", "STRING_LITERAL",
"HEX_LITERAL", "DECIMAL_LITERAL", "OCTAL_LITERAL", "HexDigit",
"IntegerTypeSuffix", "FLOATING_POINT_LITERAL", "Exponent",
"FloatTypeSuffix", "EscapeSequence", "OctalEscape", "UnicodeEscape",
"WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT",
"LINE_COMMAND" ]
grammarFileName = "C.g4"
# @param output= sys.stdout Type: TextIO
def __init__(self,input=None,output= sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.1")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
def printTokenInfo(self,line,offset,tokenText):
print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
def StorePredicateExpression(self,StartLine,StartOffset,EndLine,EndOffset,Text):
PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.PredicateExpressionList.append(PredExp)
def StoreEnumerationDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.EnumerationDefinitionList.append(EnumDef)
def StoreStructUnionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.StructUnionDefinitionList.append(SUDef)
def StoreTypedefDefinition(self,StartLine,StartOffset,EndLine,EndOffset,FromText,ToText):
Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.TypedefDefinitionList.append(Tdef)
def StoreFunctionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText,LeftBraceLine,LeftBraceOffset,DeclLine,DeclOffset):
FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
FileProfile.FunctionDefinitionList.append(FuncDef)
def StoreVariableDeclaration(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText):
VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.VariableDeclarationList.append(VarDecl)
def StoreFunctionCalling(self,StartLine,StartOffset,EndLine,EndOffset,FuncName,ParamList):
FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.FunctionCallingList.append(FuncCall)
| edk2-master | BaseTools/Source/Python/Eot/CParser4/CLexer.py |
edk2-master | BaseTools/Source/Python/Eot/CParser4/__init__.py |
|
# Generated from C.g4 by ANTLR 4.7.1
# encoding: utf-8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
import Ecc.CodeFragment as CodeFragment
import Ecc.FileProfile as FileProfile
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3k")
buf.write("\u0380\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
buf.write("\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t")
buf.write(";\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\t")
buf.write("D\4E\tE\4F\tF\4G\tG\4H\tH\3\2\7\2\u0092\n\2\f\2\16\2\u0095")
buf.write("\13\2\3\3\5\3\u0098\n\3\3\3\3\3\7\3\u009c\n\3\f\3\16\3")
buf.write("\u009f\13\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3\u00a7\n\3\5\3")
buf.write("\u00a9\n\3\3\4\5\4\u00ac\n\4\3\4\3\4\6\4\u00b0\n\4\r\4")
buf.write("\16\4\u00b1\3\4\3\4\3\4\5\4\u00b7\n\4\3\4\3\4\3\5\3\5")
buf.write("\3\5\6\5\u00be\n\5\r\5\16\5\u00bf\3\6\3\6\5\6\u00c4\n")
buf.write("\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6\u00cc\n\6\3\6\3\6\3\6\5")
buf.write("\6\u00d1\n\6\3\7\3\7\3\7\7\7\u00d6\n\7\f\7\16\7\u00d9")
buf.write("\13\7\3\b\3\b\3\b\5\b\u00de\n\b\3\t\3\t\3\n\3\n\3\n\3")
buf.write("\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n")
buf.write("\7\n\u00f3\n\n\f\n\16\n\u00f6\13\n\3\n\3\n\5\n\u00fa\n")
buf.write("\n\3\13\3\13\3\f\3\f\5\f\u0100\n\f\3\f\3\f\3\f\3\f\3\f")
buf.write("\3\f\3\f\5\f\u0109\n\f\3\r\3\r\3\16\6\16\u010e\n\16\r")
buf.write("\16\16\16\u010f\3\17\3\17\3\17\3\17\3\20\3\20\6\20\u0118")
buf.write("\n\20\r\20\16\20\u0119\3\21\3\21\3\21\7\21\u011f\n\21")
buf.write("\f\21\16\21\u0122\13\21\3\22\3\22\3\22\5\22\u0127\n\22")
buf.write("\3\22\3\22\5\22\u012b\n\22\3\23\3\23\3\23\3\23\5\23\u0131")
buf.write("\n\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u013a\n")
buf.write("\23\3\23\3\23\3\23\3\23\5\23\u0140\n\23\3\24\3\24\3\24")
buf.write("\7\24\u0145\n\24\f\24\16\24\u0148\13\24\3\25\3\25\3\25")
buf.write("\5\25\u014d\n\25\3\26\3\26\3\27\5\27\u0152\n\27\3\27\5")
buf.write("\27\u0155\n\27\3\27\5\27\u0158\n\27\3\27\5\27\u015b\n")
buf.write("\27\3\27\3\27\5\27\u015f\n\27\3\30\3\30\7\30\u0163\n\30")
buf.write("\f\30\16\30\u0166\13\30\3\30\3\30\5\30\u016a\n\30\3\30")
buf.write("\3\30\3\30\6\30\u016f\n\30\r\30\16\30\u0170\5\30\u0173")
buf.write("\n\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\31")
buf.write("\3\31\3\31\3\31\3\31\3\31\3\31\5\31\u0185\n\31\3\32\3")
buf.write("\32\6\32\u0189\n\32\r\32\16\32\u018a\3\32\5\32\u018e\n")
buf.write("\32\3\32\3\32\3\32\5\32\u0193\n\32\3\33\3\33\3\33\5\33")
buf.write("\u0198\n\33\3\33\5\33\u019b\n\33\3\34\3\34\3\34\5\34\u01a0")
buf.write("\n\34\3\34\7\34\u01a3\n\34\f\34\16\34\u01a6\13\34\3\35")
buf.write("\3\35\3\35\7\35\u01ab\n\35\f\35\16\35\u01ae\13\35\3\35")
buf.write("\5\35\u01b1\n\35\3\35\7\35\u01b4\n\35\f\35\16\35\u01b7")
buf.write("\13\35\3\35\5\35\u01ba\n\35\3\36\3\36\3\36\7\36\u01bf")
buf.write("\n\36\f\36\16\36\u01c2\13\36\3\37\3\37\5\37\u01c6\n\37")
buf.write("\3\37\5\37\u01c9\n\37\3 \3 \5 \u01cd\n \3 \5 \u01d0\n")
buf.write(" \3!\3!\3!\3!\3!\5!\u01d7\n!\3!\7!\u01da\n!\f!\16!\u01dd")
buf.write("\13!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\5")
buf.write("\"\u01eb\n\"\3#\3#\3#\3#\5#\u01f1\n#\3#\3#\5#\u01f5\n")
buf.write("#\3$\3$\3$\7$\u01fa\n$\f$\16$\u01fd\13$\3%\3%\5%\u0201")
buf.write("\n%\3%\3%\3%\5%\u0206\n%\7%\u0208\n%\f%\16%\u020b\13%")
buf.write("\3&\3&\3&\3&\3&\7&\u0212\n&\f&\16&\u0215\13&\3\'\3\'\3")
buf.write("\'\3\'\3\'\3\'\3\'\7\'\u021e\n\'\f\'\16\'\u0221\13\'\3")
buf.write("(\3(\3(\3(\3(\3(\5(\u0229\n(\3)\3)\3)\3)\3)\3)\3)\3)\3")
buf.write(")\3)\3)\3)\3)\3)\3)\5)\u023a\n)\3*\3*\3*\3*\3*\3*\3*\3")
buf.write("*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3*\3")
buf.write("*\3*\3*\3*\7*\u0259\n*\f*\16*\u025c\13*\3+\3+\3+\7+\u0261")
buf.write("\n+\f+\16+\u0264\13+\3,\3,\3-\3-\3-\3-\3-\3-\5-\u026e")
buf.write("\n-\3.\3.\3.\3.\3.\7.\u0275\n.\f.\16.\u0278\13.\3.\6.")
buf.write("\u027b\n.\r.\16.\u027c\6.\u027f\n.\r.\16.\u0280\3.\7.")
buf.write("\u0284\n.\f.\16.\u0287\13.\3.\5.\u028a\n.\3/\3/\3/\7/")
buf.write("\u028f\n/\f/\16/\u0292\13/\3\60\3\60\3\61\3\61\3\61\3")
buf.write("\61\3\61\5\61\u029b\n\61\3\62\3\62\3\63\3\63\3\64\3\64")
buf.write("\3\64\3\64\3\64\3\64\3\64\5\64\u02a8\n\64\3\65\3\65\3")
buf.write("\65\7\65\u02ad\n\65\f\65\16\65\u02b0\13\65\3\66\3\66\3")
buf.write("\66\7\66\u02b5\n\66\f\66\16\66\u02b8\13\66\3\67\3\67\3")
buf.write("\67\7\67\u02bd\n\67\f\67\16\67\u02c0\13\67\38\38\38\7")
buf.write("8\u02c5\n8\f8\168\u02c8\138\39\39\39\79\u02cd\n9\f9\16")
buf.write("9\u02d0\139\3:\3:\3:\7:\u02d5\n:\f:\16:\u02d8\13:\3;\3")
buf.write(";\3;\7;\u02dd\n;\f;\16;\u02e0\13;\3<\3<\3<\7<\u02e5\n")
buf.write("<\f<\16<\u02e8\13<\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\5")
buf.write("=\u02f5\n=\3>\5>\u02f8\n>\3>\3>\3>\7>\u02fd\n>\f>\16>")
buf.write("\u0300\13>\3>\3>\3>\3?\3?\3?\7?\u0308\n?\f?\16?\u030b")
buf.write("\13?\3?\3?\3@\3@\3@\7@\u0312\n@\f@\16@\u0315\13@\3@\3")
buf.write("@\3A\3A\3A\7A\u031c\nA\fA\16A\u031f\13A\3A\5A\u0322\n")
buf.write("A\3A\5A\u0325\nA\3A\3A\3B\3B\3B\3B\3B\3B\3B\3B\3B\3B\3")
buf.write("B\5B\u0334\nB\3C\3C\7C\u0338\nC\fC\16C\u033b\13C\3C\5")
buf.write("C\u033e\nC\3C\3C\3D\6D\u0343\nD\rD\16D\u0344\3E\3E\3E")
buf.write("\3E\5E\u034b\nE\3F\3F\3F\3F\3F\3F\3F\3F\5F\u0355\nF\3")
buf.write("F\3F\3F\3F\3F\3F\5F\u035d\nF\3G\3G\3G\3G\3G\3G\3G\3G\3")
buf.write("G\3G\3G\3G\3G\3G\3G\3G\5G\u036f\nG\3H\3H\3H\3H\3H\3H\3")
buf.write("H\3H\3H\3H\3H\3H\3H\5H\u037e\nH\3H\2\2I\2\4\6\b\n\f\16")
buf.write("\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDF")
buf.write("HJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084\u0086")
buf.write("\u0088\u008a\u008c\u008e\2\f\3\2\b\f\3\2\27\30\3\2\33")
buf.write("\'\5\2,,./\679\4\2\7\7:C\3\2IJ\3\2KN\3\2OP\3\2\4\4\3\2")
buf.write("\26\26\2\u03d8\2\u0093\3\2\2\2\4\u00a8\3\2\2\2\6\u00ab")
buf.write("\3\2\2\2\b\u00bd\3\2\2\2\n\u00d0\3\2\2\2\f\u00d2\3\2\2")
buf.write("\2\16\u00da\3\2\2\2\20\u00df\3\2\2\2\22\u00f9\3\2\2\2")
buf.write("\24\u00fb\3\2\2\2\26\u0108\3\2\2\2\30\u010a\3\2\2\2\32")
buf.write("\u010d\3\2\2\2\34\u0111\3\2\2\2\36\u0117\3\2\2\2 \u011b")
buf.write("\3\2\2\2\"\u012a\3\2\2\2$\u013f\3\2\2\2&\u0141\3\2\2\2")
buf.write("(\u0149\3\2\2\2*\u014e\3\2\2\2,\u015e\3\2\2\2.\u0172\3")
buf.write("\2\2\2\60\u0184\3\2\2\2\62\u0192\3\2\2\2\64\u0194\3\2")
buf.write("\2\2\66\u019c\3\2\2\28\u01b9\3\2\2\2:\u01bb\3\2\2\2<\u01c8")
buf.write("\3\2\2\2>\u01cf\3\2\2\2@\u01d6\3\2\2\2B\u01ea\3\2\2\2")
buf.write("D\u01f4\3\2\2\2F\u01f6\3\2\2\2H\u01fe\3\2\2\2J\u020c\3")
buf.write("\2\2\2L\u0216\3\2\2\2N\u0228\3\2\2\2P\u0239\3\2\2\2R\u023b")
buf.write("\3\2\2\2T\u025d\3\2\2\2V\u0265\3\2\2\2X\u026d\3\2\2\2")
buf.write("Z\u0289\3\2\2\2\\\u028b\3\2\2\2^\u0293\3\2\2\2`\u029a")
buf.write("\3\2\2\2b\u029c\3\2\2\2d\u029e\3\2\2\2f\u02a0\3\2\2\2")
buf.write("h\u02a9\3\2\2\2j\u02b1\3\2\2\2l\u02b9\3\2\2\2n\u02c1\3")
buf.write("\2\2\2p\u02c9\3\2\2\2r\u02d1\3\2\2\2t\u02d9\3\2\2\2v\u02e1")
buf.write("\3\2\2\2x\u02f4\3\2\2\2z\u02f7\3\2\2\2|\u0304\3\2\2\2")
buf.write("~\u030e\3\2\2\2\u0080\u0318\3\2\2\2\u0082\u0333\3\2\2")
buf.write("\2\u0084\u0335\3\2\2\2\u0086\u0342\3\2\2\2\u0088\u034a")
buf.write("\3\2\2\2\u008a\u035c\3\2\2\2\u008c\u036e\3\2\2\2\u008e")
buf.write("\u037d\3\2\2\2\u0090\u0092\5\4\3\2\u0091\u0090\3\2\2\2")
buf.write("\u0092\u0095\3\2\2\2\u0093\u0091\3\2\2\2\u0093\u0094\3")
buf.write("\2\2\2\u0094\3\3\2\2\2\u0095\u0093\3\2\2\2\u0096\u0098")
buf.write("\5\b\5\2\u0097\u0096\3\2\2\2\u0097\u0098\3\2\2\2\u0098")
buf.write("\u0099\3\2\2\2\u0099\u009d\5,\27\2\u009a\u009c\5\n\6\2")
buf.write("\u009b\u009a\3\2\2\2\u009c\u009f\3\2\2\2\u009d\u009b\3")
buf.write("\2\2\2\u009d\u009e\3\2\2\2\u009e\u00a0\3\2\2\2\u009f\u009d")
buf.write("\3\2\2\2\u00a0\u00a1\7\3\2\2\u00a1\u00a9\3\2\2\2\u00a2")
buf.write("\u00a9\5\6\4\2\u00a3\u00a9\5\n\6\2\u00a4\u00a6\5\u0080")
buf.write("A\2\u00a5\u00a7\7\4\2\2\u00a6\u00a5\3\2\2\2\u00a6\u00a7")
buf.write("\3\2\2\2\u00a7\u00a9\3\2\2\2\u00a8\u0097\3\2\2\2\u00a8")
buf.write("\u00a2\3\2\2\2\u00a8\u00a3\3\2\2\2\u00a8\u00a4\3\2\2\2")
buf.write("\u00a9\5\3\2\2\2\u00aa\u00ac\5\b\5\2\u00ab\u00aa\3\2\2")
buf.write("\2\u00ab\u00ac\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad\u00b6")
buf.write("\5,\27\2\u00ae\u00b0\5\n\6\2\u00af\u00ae\3\2\2\2\u00b0")
buf.write("\u00b1\3\2\2\2\u00b1\u00af\3\2\2\2\u00b1\u00b2\3\2\2\2")
buf.write("\u00b2\u00b3\3\2\2\2\u00b3\u00b4\5\u0084C\2\u00b4\u00b7")
buf.write("\3\2\2\2\u00b5\u00b7\5\u0084C\2\u00b6\u00af\3\2\2\2\u00b6")
buf.write("\u00b5\3\2\2\2\u00b7\u00b8\3\2\2\2\u00b8\u00b9\b\4\1\2")
buf.write("\u00b9\7\3\2\2\2\u00ba\u00be\5\20\t\2\u00bb\u00be\5\22")
buf.write("\n\2\u00bc\u00be\5*\26\2\u00bd\u00ba\3\2\2\2\u00bd\u00bb")
buf.write("\3\2\2\2\u00bd\u00bc\3\2\2\2\u00be\u00bf\3\2\2\2\u00bf")
buf.write("\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\t\3\2\2\2\u00c1")
buf.write("\u00c3\7\5\2\2\u00c2\u00c4\5\b\5\2\u00c3\u00c2\3\2\2\2")
buf.write("\u00c3\u00c4\3\2\2\2\u00c4\u00c5\3\2\2\2\u00c5\u00c6\5")
buf.write("\f\7\2\u00c6\u00c7\7\4\2\2\u00c7\u00c8\b\6\1\2\u00c8\u00d1")
buf.write("\3\2\2\2\u00c9\u00cb\5\b\5\2\u00ca\u00cc\5\f\7\2\u00cb")
buf.write("\u00ca\3\2\2\2\u00cb\u00cc\3\2\2\2\u00cc\u00cd\3\2\2\2")
buf.write("\u00cd\u00ce\7\4\2\2\u00ce\u00cf\b\6\1\2\u00cf\u00d1\3")
buf.write("\2\2\2\u00d0\u00c1\3\2\2\2\u00d0\u00c9\3\2\2\2\u00d1\13")
buf.write("\3\2\2\2\u00d2\u00d7\5\16\b\2\u00d3\u00d4\7\6\2\2\u00d4")
buf.write("\u00d6\5\16\b\2\u00d5\u00d3\3\2\2\2\u00d6\u00d9\3\2\2")
buf.write("\2\u00d7\u00d5\3\2\2\2\u00d7\u00d8\3\2\2\2\u00d8\r\3\2")
buf.write("\2\2\u00d9\u00d7\3\2\2\2\u00da\u00dd\5,\27\2\u00db\u00dc")
buf.write("\7\7\2\2\u00dc\u00de\5D#\2\u00dd\u00db\3\2\2\2\u00dd\u00de")
buf.write("\3\2\2\2\u00de\17\3\2\2\2\u00df\u00e0\t\2\2\2\u00e0\21")
buf.write("\3\2\2\2\u00e1\u00fa\7\r\2\2\u00e2\u00fa\7\16\2\2\u00e3")
buf.write("\u00fa\7\17\2\2\u00e4\u00fa\7\20\2\2\u00e5\u00fa\7\21")
buf.write("\2\2\u00e6\u00fa\7\22\2\2\u00e7\u00fa\7\23\2\2\u00e8\u00fa")
buf.write("\7\24\2\2\u00e9\u00fa\7\25\2\2\u00ea\u00eb\5\26\f\2\u00eb")
buf.write("\u00ec\b\n\1\2\u00ec\u00fa\3\2\2\2\u00ed\u00ee\5$\23\2")
buf.write("\u00ee\u00ef\b\n\1\2\u00ef\u00fa\3\2\2\2\u00f0\u00f4\7")
buf.write("_\2\2\u00f1\u00f3\5*\26\2\u00f2\u00f1\3\2\2\2\u00f3\u00f6")
buf.write("\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f4\u00f5\3\2\2\2\u00f5")
buf.write("\u00f7\3\2\2\2\u00f6\u00f4\3\2\2\2\u00f7\u00fa\5,\27\2")
buf.write("\u00f8\u00fa\5\24\13\2\u00f9\u00e1\3\2\2\2\u00f9\u00e2")
buf.write("\3\2\2\2\u00f9\u00e3\3\2\2\2\u00f9\u00e4\3\2\2\2\u00f9")
buf.write("\u00e5\3\2\2\2\u00f9\u00e6\3\2\2\2\u00f9\u00e7\3\2\2\2")
buf.write("\u00f9\u00e8\3\2\2\2\u00f9\u00e9\3\2\2\2\u00f9\u00ea\3")
buf.write("\2\2\2\u00f9\u00ed\3\2\2\2\u00f9\u00f0\3\2\2\2\u00f9\u00f8")
buf.write("\3\2\2\2\u00fa\23\3\2\2\2\u00fb\u00fc\7_\2\2\u00fc\25")
buf.write("\3\2\2\2\u00fd\u00ff\5\30\r\2\u00fe\u0100\7_\2\2\u00ff")
buf.write("\u00fe\3\2\2\2\u00ff\u0100\3\2\2\2\u0100\u0101\3\2\2\2")
buf.write("\u0101\u0102\7\3\2\2\u0102\u0103\5\32\16\2\u0103\u0104")
buf.write("\7\26\2\2\u0104\u0109\3\2\2\2\u0105\u0106\5\30\r\2\u0106")
buf.write("\u0107\7_\2\2\u0107\u0109\3\2\2\2\u0108\u00fd\3\2\2\2")
buf.write("\u0108\u0105\3\2\2\2\u0109\27\3\2\2\2\u010a\u010b\t\3")
buf.write("\2\2\u010b\31\3\2\2\2\u010c\u010e\5\34\17\2\u010d\u010c")
buf.write("\3\2\2\2\u010e\u010f\3\2\2\2\u010f\u010d\3\2\2\2\u010f")
buf.write("\u0110\3\2\2\2\u0110\33\3\2\2\2\u0111\u0112\5\36\20\2")
buf.write("\u0112\u0113\5 \21\2\u0113\u0114\7\4\2\2\u0114\35\3\2")
buf.write("\2\2\u0115\u0118\5*\26\2\u0116\u0118\5\22\n\2\u0117\u0115")
buf.write("\3\2\2\2\u0117\u0116\3\2\2\2\u0118\u0119\3\2\2\2\u0119")
buf.write("\u0117\3\2\2\2\u0119\u011a\3\2\2\2\u011a\37\3\2\2\2\u011b")
buf.write("\u0120\5\"\22\2\u011c\u011d\7\6\2\2\u011d\u011f\5\"\22")
buf.write("\2\u011e\u011c\3\2\2\2\u011f\u0122\3\2\2\2\u0120\u011e")
buf.write("\3\2\2\2\u0120\u0121\3\2\2\2\u0121!\3\2\2\2\u0122\u0120")
buf.write("\3\2\2\2\u0123\u0126\5,\27\2\u0124\u0125\7\31\2\2\u0125")
buf.write("\u0127\5^\60\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2\2\2")
buf.write("\u0127\u012b\3\2\2\2\u0128\u0129\7\31\2\2\u0129\u012b")
buf.write("\5^\60\2\u012a\u0123\3\2\2\2\u012a\u0128\3\2\2\2\u012b")
buf.write("#\3\2\2\2\u012c\u012d\7\32\2\2\u012d\u012e\7\3\2\2\u012e")
buf.write("\u0130\5&\24\2\u012f\u0131\7\6\2\2\u0130\u012f\3\2\2\2")
buf.write("\u0130\u0131\3\2\2\2\u0131\u0132\3\2\2\2\u0132\u0133\7")
buf.write("\26\2\2\u0133\u0140\3\2\2\2\u0134\u0135\7\32\2\2\u0135")
buf.write("\u0136\7_\2\2\u0136\u0137\7\3\2\2\u0137\u0139\5&\24\2")
buf.write("\u0138\u013a\7\6\2\2\u0139\u0138\3\2\2\2\u0139\u013a\3")
buf.write("\2\2\2\u013a\u013b\3\2\2\2\u013b\u013c\7\26\2\2\u013c")
buf.write("\u0140\3\2\2\2\u013d\u013e\7\32\2\2\u013e\u0140\7_\2\2")
buf.write("\u013f\u012c\3\2\2\2\u013f\u0134\3\2\2\2\u013f\u013d\3")
buf.write("\2\2\2\u0140%\3\2\2\2\u0141\u0146\5(\25\2\u0142\u0143")
buf.write("\7\6\2\2\u0143\u0145\5(\25\2\u0144\u0142\3\2\2\2\u0145")
buf.write("\u0148\3\2\2\2\u0146\u0144\3\2\2\2\u0146\u0147\3\2\2\2")
buf.write("\u0147\'\3\2\2\2\u0148\u0146\3\2\2\2\u0149\u014c\7_\2")
buf.write("\2\u014a\u014b\7\7\2\2\u014b\u014d\5^\60\2\u014c\u014a")
buf.write("\3\2\2\2\u014c\u014d\3\2\2\2\u014d)\3\2\2\2\u014e\u014f")
buf.write("\t\4\2\2\u014f+\3\2\2\2\u0150\u0152\5\62\32\2\u0151\u0150")
buf.write("\3\2\2\2\u0151\u0152\3\2\2\2\u0152\u0154\3\2\2\2\u0153")
buf.write("\u0155\7$\2\2\u0154\u0153\3\2\2\2\u0154\u0155\3\2\2\2")
buf.write("\u0155\u0157\3\2\2\2\u0156\u0158\7%\2\2\u0157\u0156\3")
buf.write("\2\2\2\u0157\u0158\3\2\2\2\u0158\u015a\3\2\2\2\u0159\u015b")
buf.write("\7&\2\2\u015a\u0159\3\2\2\2\u015a\u015b\3\2\2\2\u015b")
buf.write("\u015c\3\2\2\2\u015c\u015f\5.\30\2\u015d\u015f\5\62\32")
buf.write("\2\u015e\u0151\3\2\2\2\u015e\u015d\3\2\2\2\u015f-\3\2")
buf.write("\2\2\u0160\u0164\7_\2\2\u0161\u0163\5\60\31\2\u0162\u0161")
buf.write("\3\2\2\2\u0163\u0166\3\2\2\2\u0164\u0162\3\2\2\2\u0164")
buf.write("\u0165\3\2\2\2\u0165\u0173\3\2\2\2\u0166\u0164\3\2\2\2")
buf.write("\u0167\u0169\7(\2\2\u0168\u016a\7$\2\2\u0169\u0168\3\2")
buf.write("\2\2\u0169\u016a\3\2\2\2\u016a\u016b\3\2\2\2\u016b\u016c")
buf.write("\5,\27\2\u016c\u016e\7)\2\2\u016d\u016f\5\60\31\2\u016e")
buf.write("\u016d\3\2\2\2\u016f\u0170\3\2\2\2\u0170\u016e\3\2\2\2")
buf.write("\u0170\u0171\3\2\2\2\u0171\u0173\3\2\2\2\u0172\u0160\3")
buf.write("\2\2\2\u0172\u0167\3\2\2\2\u0173/\3\2\2\2\u0174\u0175")
buf.write("\7*\2\2\u0175\u0176\5^\60\2\u0176\u0177\7+\2\2\u0177\u0185")
buf.write("\3\2\2\2\u0178\u0179\7*\2\2\u0179\u0185\7+\2\2\u017a\u017b")
buf.write("\7(\2\2\u017b\u017c\5\64\33\2\u017c\u017d\7)\2\2\u017d")
buf.write("\u0185\3\2\2\2\u017e\u017f\7(\2\2\u017f\u0180\5:\36\2")
buf.write("\u0180\u0181\7)\2\2\u0181\u0185\3\2\2\2\u0182\u0183\7")
buf.write("(\2\2\u0183\u0185\7)\2\2\u0184\u0174\3\2\2\2\u0184\u0178")
buf.write("\3\2\2\2\u0184\u017a\3\2\2\2\u0184\u017e\3\2\2\2\u0184")
buf.write("\u0182\3\2\2\2\u0185\61\3\2\2\2\u0186\u0188\7,\2\2\u0187")
buf.write("\u0189\5*\26\2\u0188\u0187\3\2\2\2\u0189\u018a\3\2\2\2")
buf.write("\u018a\u0188\3\2\2\2\u018a\u018b\3\2\2\2\u018b\u018d\3")
buf.write("\2\2\2\u018c\u018e\5\62\32\2\u018d\u018c\3\2\2\2\u018d")
buf.write("\u018e\3\2\2\2\u018e\u0193\3\2\2\2\u018f\u0190\7,\2\2")
buf.write("\u0190\u0193\5\62\32\2\u0191\u0193\7,\2\2\u0192\u0186")
buf.write("\3\2\2\2\u0192\u018f\3\2\2\2\u0192\u0191\3\2\2\2\u0193")
buf.write("\63\3\2\2\2\u0194\u019a\5\66\34\2\u0195\u0197\7\6\2\2")
buf.write("\u0196\u0198\7\37\2\2\u0197\u0196\3\2\2\2\u0197\u0198")
buf.write("\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u019b\7-\2\2\u019a")
buf.write("\u0195\3\2\2\2\u019a\u019b\3\2\2\2\u019b\65\3\2\2\2\u019c")
buf.write("\u01a4\58\35\2\u019d\u019f\7\6\2\2\u019e\u01a0\7\37\2")
buf.write("\2\u019f\u019e\3\2\2\2\u019f\u01a0\3\2\2\2\u01a0\u01a1")
buf.write("\3\2\2\2\u01a1\u01a3\58\35\2\u01a2\u019d\3\2\2\2\u01a3")
buf.write("\u01a6\3\2\2\2\u01a4\u01a2\3\2\2\2\u01a4\u01a5\3\2\2\2")
buf.write("\u01a5\67\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a7\u01ac\5\b")
buf.write("\5\2\u01a8\u01ab\5,\27\2\u01a9\u01ab\5> \2\u01aa\u01a8")
buf.write("\3\2\2\2\u01aa\u01a9\3\2\2\2\u01ab\u01ae\3\2\2\2\u01ac")
buf.write("\u01aa\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad\u01b0\3\2\2\2")
buf.write("\u01ae\u01ac\3\2\2\2\u01af\u01b1\7\37\2\2\u01b0\u01af")
buf.write("\3\2\2\2\u01b0\u01b1\3\2\2\2\u01b1\u01ba\3\2\2\2\u01b2")
buf.write("\u01b4\5\62\32\2\u01b3\u01b2\3\2\2\2\u01b4\u01b7\3\2\2")
buf.write("\2\u01b5\u01b3\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\u01b8")
buf.write("\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8\u01ba\7_\2\2\u01b9")
buf.write("\u01a7\3\2\2\2\u01b9\u01b5\3\2\2\2\u01ba9\3\2\2\2\u01bb")
buf.write("\u01c0\7_\2\2\u01bc\u01bd\7\6\2\2\u01bd\u01bf\7_\2\2\u01be")
buf.write("\u01bc\3\2\2\2\u01bf\u01c2\3\2\2\2\u01c0\u01be\3\2\2\2")
buf.write("\u01c0\u01c1\3\2\2\2\u01c1;\3\2\2\2\u01c2\u01c0\3\2\2")
buf.write("\2\u01c3\u01c5\5\36\20\2\u01c4\u01c6\5> \2\u01c5\u01c4")
buf.write("\3\2\2\2\u01c5\u01c6\3\2\2\2\u01c6\u01c9\3\2\2\2\u01c7")
buf.write("\u01c9\5\24\13\2\u01c8\u01c3\3\2\2\2\u01c8\u01c7\3\2\2")
buf.write("\2\u01c9=\3\2\2\2\u01ca\u01cc\5\62\32\2\u01cb\u01cd\5")
buf.write("@!\2\u01cc\u01cb\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cd\u01d0")
buf.write("\3\2\2\2\u01ce\u01d0\5@!\2\u01cf\u01ca\3\2\2\2\u01cf\u01ce")
buf.write("\3\2\2\2\u01d0?\3\2\2\2\u01d1\u01d2\7(\2\2\u01d2\u01d3")
buf.write("\5> \2\u01d3\u01d4\7)\2\2\u01d4\u01d7\3\2\2\2\u01d5\u01d7")
buf.write("\5B\"\2\u01d6\u01d1\3\2\2\2\u01d6\u01d5\3\2\2\2\u01d7")
buf.write("\u01db\3\2\2\2\u01d8\u01da\5B\"\2\u01d9\u01d8\3\2\2\2")
buf.write("\u01da\u01dd\3\2\2\2\u01db\u01d9\3\2\2\2\u01db\u01dc\3")
buf.write("\2\2\2\u01dcA\3\2\2\2\u01dd\u01db\3\2\2\2\u01de\u01df")
buf.write("\7*\2\2\u01df\u01eb\7+\2\2\u01e0\u01e1\7*\2\2\u01e1\u01e2")
buf.write("\5^\60\2\u01e2\u01e3\7+\2\2\u01e3\u01eb\3\2\2\2\u01e4")
buf.write("\u01e5\7(\2\2\u01e5\u01eb\7)\2\2\u01e6\u01e7\7(\2\2\u01e7")
buf.write("\u01e8\5\64\33\2\u01e8\u01e9\7)\2\2\u01e9\u01eb\3\2\2")
buf.write("\2\u01ea\u01de\3\2\2\2\u01ea\u01e0\3\2\2\2\u01ea\u01e4")
buf.write("\3\2\2\2\u01ea\u01e6\3\2\2\2\u01ebC\3\2\2\2\u01ec\u01f5")
buf.write("\5`\61\2\u01ed\u01ee\7\3\2\2\u01ee\u01f0\5F$\2\u01ef\u01f1")
buf.write("\7\6\2\2\u01f0\u01ef\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1")
buf.write("\u01f2\3\2\2\2\u01f2\u01f3\7\26\2\2\u01f3\u01f5\3\2\2")
buf.write("\2\u01f4\u01ec\3\2\2\2\u01f4\u01ed\3\2\2\2\u01f5E\3\2")
buf.write("\2\2\u01f6\u01fb\5D#\2\u01f7\u01f8\7\6\2\2\u01f8\u01fa")
buf.write("\5D#\2\u01f9\u01f7\3\2\2\2\u01fa\u01fd\3\2\2\2\u01fb\u01f9")
buf.write("\3\2\2\2\u01fb\u01fc\3\2\2\2\u01fcG\3\2\2\2\u01fd\u01fb")
buf.write("\3\2\2\2\u01fe\u0200\5`\61\2\u01ff\u0201\7\37\2\2\u0200")
buf.write("\u01ff\3\2\2\2\u0200\u0201\3\2\2\2\u0201\u0209\3\2\2\2")
buf.write("\u0202\u0203\7\6\2\2\u0203\u0205\5`\61\2\u0204\u0206\7")
buf.write("\37\2\2\u0205\u0204\3\2\2\2\u0205\u0206\3\2\2\2\u0206")
buf.write("\u0208\3\2\2\2\u0207\u0202\3\2\2\2\u0208\u020b\3\2\2\2")
buf.write("\u0209\u0207\3\2\2\2\u0209\u020a\3\2\2\2\u020aI\3\2\2")
buf.write("\2\u020b\u0209\3\2\2\2\u020c\u0213\5L\'\2\u020d\u020e")
buf.write("\7.\2\2\u020e\u0212\5L\'\2\u020f\u0210\7/\2\2\u0210\u0212")
buf.write("\5L\'\2\u0211\u020d\3\2\2\2\u0211\u020f\3\2\2\2\u0212")
buf.write("\u0215\3\2\2\2\u0213\u0211\3\2\2\2\u0213\u0214\3\2\2\2")
buf.write("\u0214K\3\2\2\2\u0215\u0213\3\2\2\2\u0216\u021f\5N(\2")
buf.write("\u0217\u0218\7,\2\2\u0218\u021e\5N(\2\u0219\u021a\7\60")
buf.write("\2\2\u021a\u021e\5N(\2\u021b\u021c\7\61\2\2\u021c\u021e")
buf.write("\5N(\2\u021d\u0217\3\2\2\2\u021d\u0219\3\2\2\2\u021d\u021b")
buf.write("\3\2\2\2\u021e\u0221\3\2\2\2\u021f\u021d\3\2\2\2\u021f")
buf.write("\u0220\3\2\2\2\u0220M\3\2\2\2\u0221\u021f\3\2\2\2\u0222")
buf.write("\u0223\7(\2\2\u0223\u0224\5<\37\2\u0224\u0225\7)\2\2\u0225")
buf.write("\u0226\5N(\2\u0226\u0229\3\2\2\2\u0227\u0229\5P)\2\u0228")
buf.write("\u0222\3\2\2\2\u0228\u0227\3\2\2\2\u0229O\3\2\2\2\u022a")
buf.write("\u023a\5R*\2\u022b\u022c\7\62\2\2\u022c\u023a\5P)\2\u022d")
buf.write("\u022e\7\63\2\2\u022e\u023a\5P)\2\u022f\u0230\5V,\2\u0230")
buf.write("\u0231\5N(\2\u0231\u023a\3\2\2\2\u0232\u0233\7\64\2\2")
buf.write("\u0233\u023a\5P)\2\u0234\u0235\7\64\2\2\u0235\u0236\7")
buf.write("(\2\2\u0236\u0237\5<\37\2\u0237\u0238\7)\2\2\u0238\u023a")
buf.write("\3\2\2\2\u0239\u022a\3\2\2\2\u0239\u022b\3\2\2\2\u0239")
buf.write("\u022d\3\2\2\2\u0239\u022f\3\2\2\2\u0239\u0232\3\2\2\2")
buf.write("\u0239\u0234\3\2\2\2\u023aQ\3\2\2\2\u023b\u023c\5X-\2")
buf.write("\u023c\u025a\b*\1\2\u023d\u023e\7*\2\2\u023e\u023f\5\\")
buf.write("/\2\u023f\u0240\7+\2\2\u0240\u0259\3\2\2\2\u0241\u0242")
buf.write("\7(\2\2\u0242\u0243\7)\2\2\u0243\u0259\b*\1\2\u0244\u0245")
buf.write("\7(\2\2\u0245\u0246\5H%\2\u0246\u0247\7)\2\2\u0247\u0248")
buf.write("\b*\1\2\u0248\u0259\3\2\2\2\u0249\u024a\7(\2\2\u024a\u024b")
buf.write("\5T+\2\u024b\u024c\7)\2\2\u024c\u0259\3\2\2\2\u024d\u024e")
buf.write("\7\65\2\2\u024e\u024f\7_\2\2\u024f\u0259\b*\1\2\u0250")
buf.write("\u0251\7,\2\2\u0251\u0252\7_\2\2\u0252\u0259\b*\1\2\u0253")
buf.write("\u0254\7\66\2\2\u0254\u0255\7_\2\2\u0255\u0259\b*\1\2")
buf.write("\u0256\u0259\7\62\2\2\u0257\u0259\7\63\2\2\u0258\u023d")
buf.write("\3\2\2\2\u0258\u0241\3\2\2\2\u0258\u0244\3\2\2\2\u0258")
buf.write("\u0249\3\2\2\2\u0258\u024d\3\2\2\2\u0258\u0250\3\2\2\2")
buf.write("\u0258\u0253\3\2\2\2\u0258\u0256\3\2\2\2\u0258\u0257\3")
buf.write("\2\2\2\u0259\u025c\3\2\2\2\u025a\u0258\3\2\2\2\u025a\u025b")
buf.write("\3\2\2\2\u025bS\3\2\2\2\u025c\u025a\3\2\2\2\u025d\u0262")
buf.write("\58\35\2\u025e\u025f\7\6\2\2\u025f\u0261\58\35\2\u0260")
buf.write("\u025e\3\2\2\2\u0261\u0264\3\2\2\2\u0262\u0260\3\2\2\2")
buf.write("\u0262\u0263\3\2\2\2\u0263U\3\2\2\2\u0264\u0262\3\2\2")
buf.write("\2\u0265\u0266\t\5\2\2\u0266W\3\2\2\2\u0267\u026e\7_\2")
buf.write("\2\u0268\u026e\5Z.\2\u0269\u026a\7(\2\2\u026a\u026b\5")
buf.write("\\/\2\u026b\u026c\7)\2\2\u026c\u026e\3\2\2\2\u026d\u0267")
buf.write("\3\2\2\2\u026d\u0268\3\2\2\2\u026d\u0269\3\2\2\2\u026e")
buf.write("Y\3\2\2\2\u026f\u028a\7b\2\2\u0270\u028a\7d\2\2\u0271")
buf.write("\u028a\7c\2\2\u0272\u028a\7`\2\2\u0273\u0275\7_\2\2\u0274")
buf.write("\u0273\3\2\2\2\u0275\u0278\3\2\2\2\u0276\u0274\3\2\2\2")
buf.write("\u0276\u0277\3\2\2\2\u0277\u027a\3\2\2\2\u0278\u0276\3")
buf.write("\2\2\2\u0279\u027b\7a\2\2\u027a\u0279\3\2\2\2\u027b\u027c")
buf.write("\3\2\2\2\u027c\u027a\3\2\2\2\u027c\u027d\3\2\2\2\u027d")
buf.write("\u027f\3\2\2\2\u027e\u0276\3\2\2\2\u027f\u0280\3\2\2\2")
buf.write("\u0280\u027e\3\2\2\2\u0280\u0281\3\2\2\2\u0281\u0285\3")
buf.write("\2\2\2\u0282\u0284\7_\2\2\u0283\u0282\3\2\2\2\u0284\u0287")
buf.write("\3\2\2\2\u0285\u0283\3\2\2\2\u0285\u0286\3\2\2\2\u0286")
buf.write("\u028a\3\2\2\2\u0287\u0285\3\2\2\2\u0288\u028a\7e\2\2")
buf.write("\u0289\u026f\3\2\2\2\u0289\u0270\3\2\2\2\u0289\u0271\3")
buf.write("\2\2\2\u0289\u0272\3\2\2\2\u0289\u027e\3\2\2\2\u0289\u0288")
buf.write("\3\2\2\2\u028a[\3\2\2\2\u028b\u0290\5`\61\2\u028c\u028d")
buf.write("\7\6\2\2\u028d\u028f\5`\61\2\u028e\u028c\3\2\2\2\u028f")
buf.write("\u0292\3\2\2\2\u0290\u028e\3\2\2\2\u0290\u0291\3\2\2\2")
buf.write("\u0291]\3\2\2\2\u0292\u0290\3\2\2\2\u0293\u0294\5f\64")
buf.write("\2\u0294_\3\2\2\2\u0295\u0296\5b\62\2\u0296\u0297\5d\63")
buf.write("\2\u0297\u0298\5`\61\2\u0298\u029b\3\2\2\2\u0299\u029b")
buf.write("\5f\64\2\u029a\u0295\3\2\2\2\u029a\u0299\3\2\2\2\u029b")
buf.write("a\3\2\2\2\u029c\u029d\5P)\2\u029dc\3\2\2\2\u029e\u029f")
buf.write("\t\6\2\2\u029fe\3\2\2\2\u02a0\u02a7\5h\65\2\u02a1\u02a2")
buf.write("\7D\2\2\u02a2\u02a3\5\\/\2\u02a3\u02a4\7\31\2\2\u02a4")
buf.write("\u02a5\5f\64\2\u02a5\u02a6\b\64\1\2\u02a6\u02a8\3\2\2")
buf.write("\2\u02a7\u02a1\3\2\2\2\u02a7\u02a8\3\2\2\2\u02a8g\3\2")
buf.write("\2\2\u02a9\u02ae\5j\66\2\u02aa\u02ab\7E\2\2\u02ab\u02ad")
buf.write("\5j\66\2\u02ac\u02aa\3\2\2\2\u02ad\u02b0\3\2\2\2\u02ae")
buf.write("\u02ac\3\2\2\2\u02ae\u02af\3\2\2\2\u02afi\3\2\2\2\u02b0")
buf.write("\u02ae\3\2\2\2\u02b1\u02b6\5l\67\2\u02b2\u02b3\7F\2\2")
buf.write("\u02b3\u02b5\5l\67\2\u02b4\u02b2\3\2\2\2\u02b5\u02b8\3")
buf.write("\2\2\2\u02b6\u02b4\3\2\2\2\u02b6\u02b7\3\2\2\2\u02b7k")
buf.write("\3\2\2\2\u02b8\u02b6\3\2\2\2\u02b9\u02be\5n8\2\u02ba\u02bb")
buf.write("\7G\2\2\u02bb\u02bd\5n8\2\u02bc\u02ba\3\2\2\2\u02bd\u02c0")
buf.write("\3\2\2\2\u02be\u02bc\3\2\2\2\u02be\u02bf\3\2\2\2\u02bf")
buf.write("m\3\2\2\2\u02c0\u02be\3\2\2\2\u02c1\u02c6\5p9\2\u02c2")
buf.write("\u02c3\7H\2\2\u02c3\u02c5\5p9\2\u02c4\u02c2\3\2\2\2\u02c5")
buf.write("\u02c8\3\2\2\2\u02c6\u02c4\3\2\2\2\u02c6\u02c7\3\2\2\2")
buf.write("\u02c7o\3\2\2\2\u02c8\u02c6\3\2\2\2\u02c9\u02ce\5r:\2")
buf.write("\u02ca\u02cb\7\67\2\2\u02cb\u02cd\5r:\2\u02cc\u02ca\3")
buf.write("\2\2\2\u02cd\u02d0\3\2\2\2\u02ce\u02cc\3\2\2\2\u02ce\u02cf")
buf.write("\3\2\2\2\u02cfq\3\2\2\2\u02d0\u02ce\3\2\2\2\u02d1\u02d6")
buf.write("\5t;\2\u02d2\u02d3\t\7\2\2\u02d3\u02d5\5t;\2\u02d4\u02d2")
buf.write("\3\2\2\2\u02d5\u02d8\3\2\2\2\u02d6\u02d4\3\2\2\2\u02d6")
buf.write("\u02d7\3\2\2\2\u02d7s\3\2\2\2\u02d8\u02d6\3\2\2\2\u02d9")
buf.write("\u02de\5v<\2\u02da\u02db\t\b\2\2\u02db\u02dd\5v<\2\u02dc")
buf.write("\u02da\3\2\2\2\u02dd\u02e0\3\2\2\2\u02de\u02dc\3\2\2\2")
buf.write("\u02de\u02df\3\2\2\2\u02dfu\3\2\2\2\u02e0\u02de\3\2\2")
buf.write("\2\u02e1\u02e6\5J&\2\u02e2\u02e3\t\t\2\2\u02e3\u02e5\5")
buf.write("J&\2\u02e4\u02e2\3\2\2\2\u02e5\u02e8\3\2\2\2\u02e6\u02e4")
buf.write("\3\2\2\2\u02e6\u02e7\3\2\2\2\u02e7w\3\2\2\2\u02e8\u02e6")
buf.write("\3\2\2\2\u02e9\u02f5\5\u0082B\2\u02ea\u02f5\5\u0084C\2")
buf.write("\u02eb\u02f5\5\u0088E\2\u02ec\u02f5\5\u008aF\2\u02ed\u02f5")
buf.write("\5\u008cG\2\u02ee\u02f5\5\u008eH\2\u02ef\u02f5\5\u0080")
buf.write("A\2\u02f0\u02f5\5z>\2\u02f1\u02f5\5|?\2\u02f2\u02f5\5")
buf.write("~@\2\u02f3\u02f5\5\n\6\2\u02f4\u02e9\3\2\2\2\u02f4\u02ea")
buf.write("\3\2\2\2\u02f4\u02eb\3\2\2\2\u02f4\u02ec\3\2\2\2\u02f4")
buf.write("\u02ed\3\2\2\2\u02f4\u02ee\3\2\2\2\u02f4\u02ef\3\2\2\2")
buf.write("\u02f4\u02f0\3\2\2\2\u02f4\u02f1\3\2\2\2\u02f4\u02f2\3")
buf.write("\2\2\2\u02f4\u02f3\3\2\2\2\u02f5y\3\2\2\2\u02f6\u02f8")
buf.write("\7Q\2\2\u02f7\u02f6\3\2\2\2\u02f7\u02f8\3\2\2\2\u02f8")
buf.write("\u02f9\3\2\2\2\u02f9\u02fa\7_\2\2\u02fa\u02fe\7(\2\2\u02fb")
buf.write("\u02fd\n\n\2\2\u02fc\u02fb\3\2\2\2\u02fd\u0300\3\2\2\2")
buf.write("\u02fe\u02fc\3\2\2\2\u02fe\u02ff\3\2\2\2\u02ff\u0301\3")
buf.write("\2\2\2\u0300\u02fe\3\2\2\2\u0301\u0302\7)\2\2\u0302\u0303")
buf.write("\7\4\2\2\u0303{\3\2\2\2\u0304\u0305\7R\2\2\u0305\u0309")
buf.write("\7\3\2\2\u0306\u0308\n\13\2\2\u0307\u0306\3\2\2\2\u0308")
buf.write("\u030b\3\2\2\2\u0309\u0307\3\2\2\2\u0309\u030a\3\2\2\2")
buf.write("\u030a\u030c\3\2\2\2\u030b\u0309\3\2\2\2\u030c\u030d\7")
buf.write("\26\2\2\u030d}\3\2\2\2\u030e\u030f\7S\2\2\u030f\u0313")
buf.write("\7\3\2\2\u0310\u0312\n\13\2\2\u0311\u0310\3\2\2\2\u0312")
buf.write("\u0315\3\2\2\2\u0313\u0311\3\2\2\2\u0313\u0314\3\2\2\2")
buf.write("\u0314\u0316\3\2\2\2\u0315\u0313\3\2\2\2\u0316\u0317\7")
buf.write("\26\2\2\u0317\177\3\2\2\2\u0318\u0319\7_\2\2\u0319\u031d")
buf.write("\7(\2\2\u031a\u031c\5\n\6\2\u031b\u031a\3\2\2\2\u031c")
buf.write("\u031f\3\2\2\2\u031d\u031b\3\2\2\2\u031d\u031e\3\2\2\2")
buf.write("\u031e\u0321\3\2\2\2\u031f\u031d\3\2\2\2\u0320\u0322\5")
buf.write("\u0086D\2\u0321\u0320\3\2\2\2\u0321\u0322\3\2\2\2\u0322")
buf.write("\u0324\3\2\2\2\u0323\u0325\5\\/\2\u0324\u0323\3\2\2\2")
buf.write("\u0324\u0325\3\2\2\2\u0325\u0326\3\2\2\2\u0326\u0327\7")
buf.write(")\2\2\u0327\u0081\3\2\2\2\u0328\u0329\7_\2\2\u0329\u032a")
buf.write("\7\31\2\2\u032a\u0334\5x=\2\u032b\u032c\7T\2\2\u032c\u032d")
buf.write("\5^\60\2\u032d\u032e\7\31\2\2\u032e\u032f\5x=\2\u032f")
buf.write("\u0334\3\2\2\2\u0330\u0331\7U\2\2\u0331\u0332\7\31\2\2")
buf.write("\u0332\u0334\5x=\2\u0333\u0328\3\2\2\2\u0333\u032b\3\2")
buf.write("\2\2\u0333\u0330\3\2\2\2\u0334\u0083\3\2\2\2\u0335\u0339")
buf.write("\7\3\2\2\u0336\u0338\5\n\6\2\u0337\u0336\3\2\2\2\u0338")
buf.write("\u033b\3\2\2\2\u0339\u0337\3\2\2\2\u0339\u033a\3\2\2\2")
buf.write("\u033a\u033d\3\2\2\2\u033b\u0339\3\2\2\2\u033c\u033e\5")
buf.write("\u0086D\2\u033d\u033c\3\2\2\2\u033d\u033e\3\2\2\2\u033e")
buf.write("\u033f\3\2\2\2\u033f\u0340\7\26\2\2\u0340\u0085\3\2\2")
buf.write("\2\u0341\u0343\5x=\2\u0342\u0341\3\2\2\2\u0343\u0344\3")
buf.write("\2\2\2\u0344\u0342\3\2\2\2\u0344\u0345\3\2\2\2\u0345\u0087")
buf.write("\3\2\2\2\u0346\u034b\7\4\2\2\u0347\u0348\5\\/\2\u0348")
buf.write("\u0349\7\4\2\2\u0349\u034b\3\2\2\2\u034a\u0346\3\2\2\2")
buf.write("\u034a\u0347\3\2\2\2\u034b\u0089\3\2\2\2\u034c\u034d\7")
buf.write("V\2\2\u034d\u034e\7(\2\2\u034e\u034f\5\\/\2\u034f\u0350")
buf.write("\7)\2\2\u0350\u0351\bF\1\2\u0351\u0354\5x=\2\u0352\u0353")
buf.write("\7W\2\2\u0353\u0355\5x=\2\u0354\u0352\3\2\2\2\u0354\u0355")
buf.write("\3\2\2\2\u0355\u035d\3\2\2\2\u0356\u0357\7X\2\2\u0357")
buf.write("\u0358\7(\2\2\u0358\u0359\5\\/\2\u0359\u035a\7)\2\2\u035a")
buf.write("\u035b\5x=\2\u035b\u035d\3\2\2\2\u035c\u034c\3\2\2\2\u035c")
buf.write("\u0356\3\2\2\2\u035d\u008b\3\2\2\2\u035e\u035f\7Y\2\2")
buf.write("\u035f\u0360\7(\2\2\u0360\u0361\5\\/\2\u0361\u0362\7)")
buf.write("\2\2\u0362\u0363\5x=\2\u0363\u0364\bG\1\2\u0364\u036f")
buf.write("\3\2\2\2\u0365\u0366\7Z\2\2\u0366\u0367\5x=\2\u0367\u0368")
buf.write("\7Y\2\2\u0368\u0369\7(\2\2\u0369\u036a\5\\/\2\u036a\u036b")
buf.write("\7)\2\2\u036b\u036c\7\4\2\2\u036c\u036d\bG\1\2\u036d\u036f")
buf.write("\3\2\2\2\u036e\u035e\3\2\2\2\u036e\u0365\3\2\2\2\u036f")
buf.write("\u008d\3\2\2\2\u0370\u0371\7[\2\2\u0371\u0372\7_\2\2\u0372")
buf.write("\u037e\7\4\2\2\u0373\u0374\7\\\2\2\u0374\u037e\7\4\2\2")
buf.write("\u0375\u0376\7]\2\2\u0376\u037e\7\4\2\2\u0377\u0378\7")
buf.write("^\2\2\u0378\u037e\7\4\2\2\u0379\u037a\7^\2\2\u037a\u037b")
buf.write("\5\\/\2\u037b\u037c\7\4\2\2\u037c\u037e\3\2\2\2\u037d")
buf.write("\u0370\3\2\2\2\u037d\u0373\3\2\2\2\u037d\u0375\3\2\2\2")
buf.write("\u037d\u0377\3\2\2\2\u037d\u0379\3\2\2\2\u037e\u008f\3")
buf.write("\2\2\2o\u0093\u0097\u009d\u00a6\u00a8\u00ab\u00b1\u00b6")
buf.write("\u00bd\u00bf\u00c3\u00cb\u00d0\u00d7\u00dd\u00f4\u00f9")
buf.write("\u00ff\u0108\u010f\u0117\u0119\u0120\u0126\u012a\u0130")
buf.write("\u0139\u013f\u0146\u014c\u0151\u0154\u0157\u015a\u015e")
buf.write("\u0164\u0169\u0170\u0172\u0184\u018a\u018d\u0192\u0197")
buf.write("\u019a\u019f\u01a4\u01aa\u01ac\u01b0\u01b5\u01b9\u01c0")
buf.write("\u01c5\u01c8\u01cc\u01cf\u01d6\u01db\u01ea\u01f0\u01f4")
buf.write("\u01fb\u0200\u0205\u0209\u0211\u0213\u021d\u021f\u0228")
buf.write("\u0239\u0258\u025a\u0262\u026d\u0276\u027c\u0280\u0285")
buf.write("\u0289\u0290\u029a\u02a7\u02ae\u02b6\u02be\u02c6\u02ce")
buf.write("\u02d6\u02de\u02e6\u02f4\u02f7\u02fe\u0309\u0313\u031d")
buf.write("\u0321\u0324\u0333\u0339\u033d\u0344\u034a\u0354\u035c")
buf.write("\u036e\u037d")
return buf.getvalue()
class CParser ( Parser ):
grammarFileName = "C.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'{'", "';'", "'typedef'", "','", "'='",
"'extern'", "'static'", "'auto'", "'register'", "'STATIC'",
"'void'", "'char'", "'short'", "'int'", "'long'", "'float'",
"'double'", "'signed'", "'unsigned'", "'}'", "'struct'",
"'union'", "':'", "'enum'", "'const'", "'volatile'",
"'IN'", "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'",
"'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'", "'EFIAPI'",
"'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'PACKED'",
"'('", "')'", "'['", "']'", "'*'", "'...'", "'+'",
"'-'", "'/'", "'%'", "'++'", "'--'", "'sizeof'", "'.'",
"'->'", "'&'", "'~'", "'!'", "'*='", "'/='", "'%='",
"'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='",
"'?'", "'||'", "'&&'", "'|'", "'^'", "'=='", "'!='",
"'<'", "'>'", "'<='", "'>='", "'<<'", "'>>'", "'__asm__'",
"'_asm'", "'__asm'", "'case'", "'default'", "'if'",
"'else'", "'switch'", "'while'", "'do'", "'goto'",
"'continue'", "'break'", "'return'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "IDENTIFIER", "CHARACTER_LITERAL", "STRING_LITERAL",
"HEX_LITERAL", "DECIMAL_LITERAL", "OCTAL_LITERAL",
"FLOATING_POINT_LITERAL", "WS", "BS", "UnicodeVocabulary",
"COMMENT", "LINE_COMMENT", "LINE_COMMAND" ]
RULE_translation_unit = 0
RULE_external_declaration = 1
RULE_function_definition = 2
RULE_declaration_specifiers = 3
RULE_declaration = 4
RULE_init_declarator_list = 5
RULE_init_declarator = 6
RULE_storage_class_specifier = 7
RULE_type_specifier = 8
RULE_type_id = 9
RULE_struct_or_union_specifier = 10
RULE_struct_or_union = 11
RULE_struct_declaration_list = 12
RULE_struct_declaration = 13
RULE_specifier_qualifier_list = 14
RULE_struct_declarator_list = 15
RULE_struct_declarator = 16
RULE_enum_specifier = 17
RULE_enumerator_list = 18
RULE_enumerator = 19
RULE_type_qualifier = 20
RULE_declarator = 21
RULE_direct_declarator = 22
RULE_declarator_suffix = 23
RULE_pointer = 24
RULE_parameter_type_list = 25
RULE_parameter_list = 26
RULE_parameter_declaration = 27
RULE_identifier_list = 28
RULE_type_name = 29
RULE_abstract_declarator = 30
RULE_direct_abstract_declarator = 31
RULE_abstract_declarator_suffix = 32
RULE_initializer = 33
RULE_initializer_list = 34
RULE_argument_expression_list = 35
RULE_additive_expression = 36
RULE_multiplicative_expression = 37
RULE_cast_expression = 38
RULE_unary_expression = 39
RULE_postfix_expression = 40
RULE_macro_parameter_list = 41
RULE_unary_operator = 42
RULE_primary_expression = 43
RULE_constant = 44
RULE_expression = 45
RULE_constant_expression = 46
RULE_assignment_expression = 47
RULE_lvalue = 48
RULE_assignment_operator = 49
RULE_conditional_expression = 50
RULE_logical_or_expression = 51
RULE_logical_and_expression = 52
RULE_inclusive_or_expression = 53
RULE_exclusive_or_expression = 54
RULE_and_expression = 55
RULE_equality_expression = 56
RULE_relational_expression = 57
RULE_shift_expression = 58
RULE_statement = 59
RULE_asm2_statement = 60
RULE_asm1_statement = 61
RULE_asm_statement = 62
RULE_macro_statement = 63
RULE_labeled_statement = 64
RULE_compound_statement = 65
RULE_statement_list = 66
RULE_expression_statement = 67
RULE_selection_statement = 68
RULE_iteration_statement = 69
RULE_jump_statement = 70
ruleNames = [ "translation_unit", "external_declaration", "function_definition",
"declaration_specifiers", "declaration", "init_declarator_list",
"init_declarator", "storage_class_specifier", "type_specifier",
"type_id", "struct_or_union_specifier", "struct_or_union",
"struct_declaration_list", "struct_declaration", "specifier_qualifier_list",
"struct_declarator_list", "struct_declarator", "enum_specifier",
"enumerator_list", "enumerator", "type_qualifier", "declarator",
"direct_declarator", "declarator_suffix", "pointer",
"parameter_type_list", "parameter_list", "parameter_declaration",
"identifier_list", "type_name", "abstract_declarator",
"direct_abstract_declarator", "abstract_declarator_suffix",
"initializer", "initializer_list", "argument_expression_list",
"additive_expression", "multiplicative_expression", "cast_expression",
"unary_expression", "postfix_expression", "macro_parameter_list",
"unary_operator", "primary_expression", "constant", "expression",
"constant_expression", "assignment_expression", "lvalue",
"assignment_operator", "conditional_expression", "logical_or_expression",
"logical_and_expression", "inclusive_or_expression",
"exclusive_or_expression", "and_expression", "equality_expression",
"relational_expression", "shift_expression", "statement",
"asm2_statement", "asm1_statement", "asm_statement",
"macro_statement", "labeled_statement", "compound_statement",
"statement_list", "expression_statement", "selection_statement",
"iteration_statement", "jump_statement" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
T__11=12
T__12=13
T__13=14
T__14=15
T__15=16
T__16=17
T__17=18
T__18=19
T__19=20
T__20=21
T__21=22
T__22=23
T__23=24
T__24=25
T__25=26
T__26=27
T__27=28
T__28=29
T__29=30
T__30=31
T__31=32
T__32=33
T__33=34
T__34=35
T__35=36
T__36=37
T__37=38
T__38=39
T__39=40
T__40=41
T__41=42
T__42=43
T__43=44
T__44=45
T__45=46
T__46=47
T__47=48
T__48=49
T__49=50
T__50=51
T__51=52
T__52=53
T__53=54
T__54=55
T__55=56
T__56=57
T__57=58
T__58=59
T__59=60
T__60=61
T__61=62
T__62=63
T__63=64
T__64=65
T__65=66
T__66=67
T__67=68
T__68=69
T__69=70
T__70=71
T__71=72
T__72=73
T__73=74
T__74=75
T__75=76
T__76=77
T__77=78
T__78=79
T__79=80
T__80=81
T__81=82
T__82=83
T__83=84
T__84=85
T__85=86
T__86=87
T__87=88
T__88=89
T__89=90
T__90=91
T__91=92
IDENTIFIER=93
CHARACTER_LITERAL=94
STRING_LITERAL=95
HEX_LITERAL=96
DECIMAL_LITERAL=97
OCTAL_LITERAL=98
FLOATING_POINT_LITERAL=99
WS=100
BS=101
UnicodeVocabulary=102
COMMENT=103
LINE_COMMENT=104
LINE_COMMAND=105
# @param input Type: TokenStream
# @param output= sys.stdout Type: TextIO
def __init__(self,input,output= sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.1")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
def printTokenInfo(self,line,offset,tokenText):
print(str(line)+ ',' + str(offset) + ':' + str(tokenText))
def StorePredicateExpression(self,StartLine,StartOffset,EndLine,EndOffset,Text):
PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.PredicateExpressionList.append(PredExp)
def StoreEnumerationDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.EnumerationDefinitionList.append(EnumDef)
def StoreStructUnionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,Text):
SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.StructUnionDefinitionList.append(SUDef)
def StoreTypedefDefinition(self,StartLine,StartOffset,EndLine,EndOffset,FromText,ToText):
Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.TypedefDefinitionList.append(Tdef)
def StoreFunctionDefinition(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText,LeftBraceLine,LeftBraceOffset,DeclLine,DeclOffset):
FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
FileProfile.FunctionDefinitionList.append(FuncDef)
def StoreVariableDeclaration(self,StartLine,StartOffset,EndLine,EndOffset,ModifierText,DeclText):
VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.VariableDeclarationList.append(VarDecl)
def StoreFunctionCalling(self,StartLine,StartOffset,EndLine,EndOffset,FuncName,ParamList):
FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
FileProfile.FunctionCallingList.append(FuncCall)
class Translation_unitContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def external_declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.External_declarationContext)
else:
return self.getTypedRuleContext(CParser.External_declarationContext,i)
def getRuleIndex(self):
return CParser.RULE_translation_unit
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterTranslation_unit" ):
listener.enterTranslation_unit(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitTranslation_unit" ):
listener.exitTranslation_unit(self)
def translation_unit(self):
localctx = CParser.Translation_unitContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_translation_unit)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 145
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__41))) != 0) or _la==CParser.IDENTIFIER:
self.state = 142
self.external_declaration()
self.state = 147
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class External_declarationContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
def declaration_specifiers(self):
return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
# @param i=None Type: int
def declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.DeclarationContext)
else:
return self.getTypedRuleContext(CParser.DeclarationContext,i)
def function_definition(self):
return self.getTypedRuleContext(CParser.Function_definitionContext,0)
def macro_statement(self):
return self.getTypedRuleContext(CParser.Macro_statementContext,0)
def getRuleIndex(self):
return CParser.RULE_external_declaration
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterExternal_declaration" ):
listener.enterExternal_declaration(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitExternal_declaration" ):
listener.exitExternal_declaration(self)
def external_declaration(self):
localctx = CParser.External_declarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_external_declaration)
self._la = 0 # Token type
try:
self.state = 166
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,4,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 149
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,1,self._ctx)
if la_ == 1:
self.state = 148
self.declaration_specifiers()
self.state = 151
self.declarator()
self.state = 155
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER:
self.state = 152
self.declaration()
self.state = 157
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 158
self.match(CParser.T__0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 160
self.function_definition()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 161
self.declaration()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 162
self.macro_statement()
self.state = 164
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__1:
self.state = 163
self.match(CParser.T__1)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Function_definitionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.ModifierText = ''
self.DeclText = ''
self.LBLine = 0
self.LBOffset = 0
self.DeclLine = 0
self.DeclOffset = 0
self.d = None # Declaration_specifiersContext
self._declaration_specifiers = None # Declaration_specifiersContext
self._declarator = None # DeclaratorContext
self.a = None # Compound_statementContext
self.b = None # Compound_statementContext
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
def compound_statement(self):
return self.getTypedRuleContext(CParser.Compound_statementContext,0)
def declaration_specifiers(self):
return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
# @param i=None Type: int
def declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.DeclarationContext)
else:
return self.getTypedRuleContext(CParser.DeclarationContext,i)
def getRuleIndex(self):
return CParser.RULE_function_definition
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterFunction_definition" ):
listener.enterFunction_definition(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitFunction_definition" ):
listener.exitFunction_definition(self)
def function_definition(self):
localctx = CParser.Function_definitionContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_function_definition)
ModifierText = '';
DeclText = '';
LBLine = 0;
LBOffset = 0;
DeclLine = 0;
DeclOffset = 0;
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 169
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,5,self._ctx)
if la_ == 1:
self.state = 168
localctx.d = localctx._declaration_specifiers = self.declaration_specifiers()
self.state = 171
localctx._declarator = self.declarator()
self.state = 180
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__2, CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9, CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36, CParser.IDENTIFIER]:
self.state = 173
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 172
self.declaration()
self.state = 175
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER):
break
self.state = 177
localctx.a = self.compound_statement()
pass
elif token in [CParser.T__0]:
self.state = 179
localctx.b = self.compound_statement()
pass
else:
raise NoViableAltException(self)
if localctx.d != None:
ModifierText = (None if localctx._declaration_specifiers is None else self._input.getText((localctx._declaration_specifiers.start,localctx._declaration_specifiers.stop)))
else:
ModifierText = ''
DeclText = (None if localctx._declarator is None else self._input.getText((localctx._declarator.start,localctx._declarator.stop)))
DeclLine = (None if localctx._declarator is None else localctx._declarator.start).line
DeclOffset = (None if localctx._declarator is None else localctx._declarator.start).column
if localctx.a != None:
LBLine = (None if localctx.a is None else localctx.a.start).line
LBOffset = (None if localctx.a is None else localctx.a.start).column
else:
LBLine = (None if localctx.b is None else localctx.b.start).line
LBOffset = (None if localctx.b is None else localctx.b.start).column
self._ctx.stop = self._input.LT(-1)
self.StoreFunctionDefinition(localctx.start.line, localctx.start.column, localctx.stop.line, localctx.stop.column, ModifierText, DeclText, LBLine, LBOffset, DeclLine, DeclOffset)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Declaration_specifiersContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def storage_class_specifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Storage_class_specifierContext)
else:
return self.getTypedRuleContext(CParser.Storage_class_specifierContext,i)
# @param i=None Type: int
def type_specifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_specifierContext)
else:
return self.getTypedRuleContext(CParser.Type_specifierContext,i)
# @param i=None Type: int
def type_qualifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_qualifierContext)
else:
return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
def getRuleIndex(self):
return CParser.RULE_declaration_specifiers
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDeclaration_specifiers" ):
listener.enterDeclaration_specifiers(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDeclaration_specifiers" ):
listener.exitDeclaration_specifiers(self)
def declaration_specifiers(self):
localctx = CParser.Declaration_specifiersContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_declaration_specifiers)
try:
self.enterOuterAlt(localctx, 1)
self.state = 187
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 187
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9]:
self.state = 184
self.storage_class_specifier()
pass
elif token in [CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.IDENTIFIER]:
self.state = 185
self.type_specifier()
pass
elif token in [CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36]:
self.state = 186
self.type_qualifier()
pass
else:
raise NoViableAltException(self)
else:
raise NoViableAltException(self)
self.state = 189
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,9,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclarationContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.a = None # Token
self.b = None # Declaration_specifiersContext
self.c = None # Init_declarator_listContext
self.d = None # Token
self.s = None # Declaration_specifiersContext
self.t = None # Init_declarator_listContext
self.e = None # Token
def init_declarator_list(self):
return self.getTypedRuleContext(CParser.Init_declarator_listContext,0)
def declaration_specifiers(self):
return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
def getRuleIndex(self):
return CParser.RULE_declaration
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDeclaration" ):
listener.enterDeclaration(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDeclaration" ):
listener.exitDeclaration(self)
def declaration(self):
localctx = CParser.DeclarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_declaration)
self._la = 0 # Token type
try:
self.state = 206
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__2]:
self.enterOuterAlt(localctx, 1)
self.state = 191
localctx.a = self.match(CParser.T__2)
self.state = 193
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,10,self._ctx)
if la_ == 1:
self.state = 192
localctx.b = self.declaration_specifiers()
self.state = 195
localctx.c = self.init_declarator_list()
self.state = 196
localctx.d = self.match(CParser.T__1)
if localctx.b is not None:
self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, (0 if localctx.d is None else localctx.d.line), localctx.d.column, (None if localctx.b is None else self._input.getText((localctx.b.start,localctx.b.stop))), (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
else:
self.StoreTypedefDefinition(localctx.a.line, localctx.a.column, (0 if localctx.d is None else localctx.d.line), localctx.d.column, '', (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
pass
elif token in [CParser.T__5, CParser.T__6, CParser.T__7, CParser.T__8, CParser.T__9, CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36, CParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 2)
self.state = 199
localctx.s = self.declaration_specifiers()
self.state = 201
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 34)) & ~0x3f) == 0 and ((1 << (_la - 34)) & ((1 << (CParser.T__33 - 34)) | (1 << (CParser.T__34 - 34)) | (1 << (CParser.T__35 - 34)) | (1 << (CParser.T__37 - 34)) | (1 << (CParser.T__41 - 34)) | (1 << (CParser.IDENTIFIER - 34)))) != 0):
self.state = 200
localctx.t = self.init_declarator_list()
self.state = 203
localctx.e = self.match(CParser.T__1)
if localctx.t is not None:
self.StoreVariableDeclaration((None if localctx.s is None else localctx.s.start).line, (None if localctx.s is None else localctx.s.start).column, (None if localctx.t is None else localctx.t.start).line, (None if localctx.t is None else localctx.t.start).column, (None if localctx.s is None else self._input.getText((localctx.s.start,localctx.s.stop))), (None if localctx.t is None else self._input.getText((localctx.t.start,localctx.t.stop))))
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Init_declarator_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def init_declarator(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Init_declaratorContext)
else:
return self.getTypedRuleContext(CParser.Init_declaratorContext,i)
def getRuleIndex(self):
return CParser.RULE_init_declarator_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterInit_declarator_list" ):
listener.enterInit_declarator_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitInit_declarator_list" ):
listener.exitInit_declarator_list(self)
def init_declarator_list(self):
localctx = CParser.Init_declarator_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_init_declarator_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 208
self.init_declarator()
self.state = 213
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 209
self.match(CParser.T__3)
self.state = 210
self.init_declarator()
self.state = 215
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Init_declaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
def initializer(self):
return self.getTypedRuleContext(CParser.InitializerContext,0)
def getRuleIndex(self):
return CParser.RULE_init_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterInit_declarator" ):
listener.enterInit_declarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitInit_declarator" ):
listener.exitInit_declarator(self)
def init_declarator(self):
localctx = CParser.Init_declaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_init_declarator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 216
self.declarator()
self.state = 219
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__4:
self.state = 217
self.match(CParser.T__4)
self.state = 218
self.initializer()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Storage_class_specifierContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_storage_class_specifier
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStorage_class_specifier" ):
listener.enterStorage_class_specifier(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStorage_class_specifier" ):
listener.exitStorage_class_specifier(self)
def storage_class_specifier(self):
localctx = CParser.Storage_class_specifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_storage_class_specifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 221
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Type_specifierContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.s = None # Struct_or_union_specifierContext
self.e = None # Enum_specifierContext
def struct_or_union_specifier(self):
return self.getTypedRuleContext(CParser.Struct_or_union_specifierContext,0)
def enum_specifier(self):
return self.getTypedRuleContext(CParser.Enum_specifierContext,0)
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
# @param i=None Type: int
def type_qualifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_qualifierContext)
else:
return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
def type_id(self):
return self.getTypedRuleContext(CParser.Type_idContext,0)
def getRuleIndex(self):
return CParser.RULE_type_specifier
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterType_specifier" ):
listener.enterType_specifier(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitType_specifier" ):
listener.exitType_specifier(self)
def type_specifier(self):
localctx = CParser.Type_specifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_type_specifier)
try:
self.state = 247
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,16,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 223
self.match(CParser.T__10)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 224
self.match(CParser.T__11)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 225
self.match(CParser.T__12)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 226
self.match(CParser.T__13)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 227
self.match(CParser.T__14)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 228
self.match(CParser.T__15)
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 229
self.match(CParser.T__16)
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 230
self.match(CParser.T__17)
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 231
self.match(CParser.T__18)
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 232
localctx.s = self.struct_or_union_specifier()
if localctx.s.stop is not None:
self.StoreStructUnionDefinition((None if localctx.s is None else localctx.s.start).line, (None if localctx.s is None else localctx.s.start).column, (None if localctx.s is None else localctx.s.stop).line, (None if localctx.s is None else localctx.s.stop).column, (None if localctx.s is None else self._input.getText((localctx.s.start,localctx.s.stop))))
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 235
localctx.e = self.enum_specifier()
if localctx.e.stop is not None:
self.StoreEnumerationDefinition((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
pass
elif la_ == 12:
self.enterOuterAlt(localctx, 12)
self.state = 238
self.match(CParser.IDENTIFIER)
self.state = 242
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 239
self.type_qualifier()
self.state = 244
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
self.state = 245
self.declarator()
pass
elif la_ == 13:
self.enterOuterAlt(localctx, 13)
self.state = 246
self.type_id()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Type_idContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def getRuleIndex(self):
return CParser.RULE_type_id
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterType_id" ):
listener.enterType_id(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitType_id" ):
listener.exitType_id(self)
def type_id(self):
localctx = CParser.Type_idContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_type_id)
try:
self.enterOuterAlt(localctx, 1)
self.state = 249
self.match(CParser.IDENTIFIER)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_or_union_specifierContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def struct_or_union(self):
return self.getTypedRuleContext(CParser.Struct_or_unionContext,0)
def struct_declaration_list(self):
return self.getTypedRuleContext(CParser.Struct_declaration_listContext,0)
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def getRuleIndex(self):
return CParser.RULE_struct_or_union_specifier
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_or_union_specifier" ):
listener.enterStruct_or_union_specifier(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_or_union_specifier" ):
listener.exitStruct_or_union_specifier(self)
def struct_or_union_specifier(self):
localctx = CParser.Struct_or_union_specifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_struct_or_union_specifier)
self._la = 0 # Token type
try:
self.state = 262
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,18,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 251
self.struct_or_union()
self.state = 253
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.IDENTIFIER:
self.state = 252
self.match(CParser.IDENTIFIER)
self.state = 255
self.match(CParser.T__0)
self.state = 256
self.struct_declaration_list()
self.state = 257
self.match(CParser.T__19)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 259
self.struct_or_union()
self.state = 260
self.match(CParser.IDENTIFIER)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_or_unionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_struct_or_union
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_or_union" ):
listener.enterStruct_or_union(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_or_union" ):
listener.exitStruct_or_union(self)
def struct_or_union(self):
localctx = CParser.Struct_or_unionContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_struct_or_union)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 264
_la = self._input.LA(1)
if not(_la==CParser.T__20 or _la==CParser.T__21):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_declaration_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def struct_declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Struct_declarationContext)
else:
return self.getTypedRuleContext(CParser.Struct_declarationContext,i)
def getRuleIndex(self):
return CParser.RULE_struct_declaration_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_declaration_list" ):
listener.enterStruct_declaration_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_declaration_list" ):
listener.exitStruct_declaration_list(self)
def struct_declaration_list(self):
localctx = CParser.Struct_declaration_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_struct_declaration_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 267
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 266
self.struct_declaration()
self.state = 269
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0) or _la==CParser.IDENTIFIER):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_declarationContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def specifier_qualifier_list(self):
return self.getTypedRuleContext(CParser.Specifier_qualifier_listContext,0)
def struct_declarator_list(self):
return self.getTypedRuleContext(CParser.Struct_declarator_listContext,0)
def getRuleIndex(self):
return CParser.RULE_struct_declaration
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_declaration" ):
listener.enterStruct_declaration(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_declaration" ):
listener.exitStruct_declaration(self)
def struct_declaration(self):
localctx = CParser.Struct_declarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_struct_declaration)
try:
self.enterOuterAlt(localctx, 1)
self.state = 271
self.specifier_qualifier_list()
self.state = 272
self.struct_declarator_list()
self.state = 273
self.match(CParser.T__1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Specifier_qualifier_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def type_qualifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_qualifierContext)
else:
return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
# @param i=None Type: int
def type_specifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_specifierContext)
else:
return self.getTypedRuleContext(CParser.Type_specifierContext,i)
def getRuleIndex(self):
return CParser.RULE_specifier_qualifier_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterSpecifier_qualifier_list" ):
listener.enterSpecifier_qualifier_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitSpecifier_qualifier_list" ):
listener.exitSpecifier_qualifier_list(self)
def specifier_qualifier_list(self):
localctx = CParser.Specifier_qualifier_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_specifier_qualifier_list)
try:
self.enterOuterAlt(localctx, 1)
self.state = 277
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 277
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__24, CParser.T__25, CParser.T__26, CParser.T__27, CParser.T__28, CParser.T__29, CParser.T__30, CParser.T__31, CParser.T__32, CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__36]:
self.state = 275
self.type_qualifier()
pass
elif token in [CParser.T__10, CParser.T__11, CParser.T__12, CParser.T__13, CParser.T__14, CParser.T__15, CParser.T__16, CParser.T__17, CParser.T__18, CParser.T__20, CParser.T__21, CParser.T__23, CParser.IDENTIFIER]:
self.state = 276
self.type_specifier()
pass
else:
raise NoViableAltException(self)
else:
raise NoViableAltException(self)
self.state = 279
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,21,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_declarator_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def struct_declarator(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Struct_declaratorContext)
else:
return self.getTypedRuleContext(CParser.Struct_declaratorContext,i)
def getRuleIndex(self):
return CParser.RULE_struct_declarator_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_declarator_list" ):
listener.enterStruct_declarator_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_declarator_list" ):
listener.exitStruct_declarator_list(self)
def struct_declarator_list(self):
localctx = CParser.Struct_declarator_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_struct_declarator_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 281
self.struct_declarator()
self.state = 286
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 282
self.match(CParser.T__3)
self.state = 283
self.struct_declarator()
self.state = 288
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Struct_declaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
def constant_expression(self):
return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_struct_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStruct_declarator" ):
listener.enterStruct_declarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStruct_declarator" ):
listener.exitStruct_declarator(self)
def struct_declarator(self):
localctx = CParser.Struct_declaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_struct_declarator)
self._la = 0 # Token type
try:
self.state = 296
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__33, CParser.T__34, CParser.T__35, CParser.T__37, CParser.T__41, CParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 289
self.declarator()
self.state = 292
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__22:
self.state = 290
self.match(CParser.T__22)
self.state = 291
self.constant_expression()
pass
elif token in [CParser.T__22]:
self.enterOuterAlt(localctx, 2)
self.state = 294
self.match(CParser.T__22)
self.state = 295
self.constant_expression()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Enum_specifierContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def enumerator_list(self):
return self.getTypedRuleContext(CParser.Enumerator_listContext,0)
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def getRuleIndex(self):
return CParser.RULE_enum_specifier
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterEnum_specifier" ):
listener.enterEnum_specifier(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitEnum_specifier" ):
listener.exitEnum_specifier(self)
def enum_specifier(self):
localctx = CParser.Enum_specifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_enum_specifier)
self._la = 0 # Token type
try:
self.state = 317
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,27,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 298
self.match(CParser.T__23)
self.state = 299
self.match(CParser.T__0)
self.state = 300
self.enumerator_list()
self.state = 302
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__3:
self.state = 301
self.match(CParser.T__3)
self.state = 304
self.match(CParser.T__19)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 306
self.match(CParser.T__23)
self.state = 307
self.match(CParser.IDENTIFIER)
self.state = 308
self.match(CParser.T__0)
self.state = 309
self.enumerator_list()
self.state = 311
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__3:
self.state = 310
self.match(CParser.T__3)
self.state = 313
self.match(CParser.T__19)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 315
self.match(CParser.T__23)
self.state = 316
self.match(CParser.IDENTIFIER)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Enumerator_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def enumerator(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.EnumeratorContext)
else:
return self.getTypedRuleContext(CParser.EnumeratorContext,i)
def getRuleIndex(self):
return CParser.RULE_enumerator_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterEnumerator_list" ):
listener.enterEnumerator_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitEnumerator_list" ):
listener.exitEnumerator_list(self)
def enumerator_list(self):
localctx = CParser.Enumerator_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_enumerator_list)
try:
self.enterOuterAlt(localctx, 1)
self.state = 319
self.enumerator()
self.state = 324
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,28,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 320
self.match(CParser.T__3)
self.state = 321
self.enumerator()
self.state = 326
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,28,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EnumeratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def constant_expression(self):
return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_enumerator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterEnumerator" ):
listener.enterEnumerator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitEnumerator" ):
listener.exitEnumerator(self)
def enumerator(self):
localctx = CParser.EnumeratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_enumerator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 327
self.match(CParser.IDENTIFIER)
self.state = 330
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__4:
self.state = 328
self.match(CParser.T__4)
self.state = 329
self.constant_expression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Type_qualifierContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_type_qualifier
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterType_qualifier" ):
listener.enterType_qualifier(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitType_qualifier" ):
listener.exitType_qualifier(self)
def type_qualifier(self):
localctx = CParser.Type_qualifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 40, self.RULE_type_qualifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 332
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DeclaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def direct_declarator(self):
return self.getTypedRuleContext(CParser.Direct_declaratorContext,0)
def pointer(self):
return self.getTypedRuleContext(CParser.PointerContext,0)
def getRuleIndex(self):
return CParser.RULE_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDeclarator" ):
listener.enterDeclarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDeclarator" ):
listener.exitDeclarator(self)
def declarator(self):
localctx = CParser.DeclaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_declarator)
self._la = 0 # Token type
try:
self.state = 348
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,34,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 335
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__41:
self.state = 334
self.pointer()
self.state = 338
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__33:
self.state = 337
self.match(CParser.T__33)
self.state = 341
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__34:
self.state = 340
self.match(CParser.T__34)
self.state = 344
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__35:
self.state = 343
self.match(CParser.T__35)
self.state = 346
self.direct_declarator()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 347
self.pointer()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Direct_declaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
# @param i=None Type: int
def declarator_suffix(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Declarator_suffixContext)
else:
return self.getTypedRuleContext(CParser.Declarator_suffixContext,i)
def declarator(self):
return self.getTypedRuleContext(CParser.DeclaratorContext,0)
def getRuleIndex(self):
return CParser.RULE_direct_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDirect_declarator" ):
listener.enterDirect_declarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDirect_declarator" ):
listener.exitDirect_declarator(self)
def direct_declarator(self):
localctx = CParser.Direct_declaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 44, self.RULE_direct_declarator)
try:
self.state = 368
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 350
self.match(CParser.IDENTIFIER)
self.state = 354
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,35,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 351
self.declarator_suffix()
self.state = 356
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,35,self._ctx)
pass
elif token in [CParser.T__37]:
self.enterOuterAlt(localctx, 2)
self.state = 357
self.match(CParser.T__37)
self.state = 359
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,36,self._ctx)
if la_ == 1:
self.state = 358
self.match(CParser.T__33)
self.state = 361
self.declarator()
self.state = 362
self.match(CParser.T__38)
self.state = 364
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 363
self.declarator_suffix()
else:
raise NoViableAltException(self)
self.state = 366
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,37,self._ctx)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Declarator_suffixContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def constant_expression(self):
return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
def parameter_type_list(self):
return self.getTypedRuleContext(CParser.Parameter_type_listContext,0)
def identifier_list(self):
return self.getTypedRuleContext(CParser.Identifier_listContext,0)
def getRuleIndex(self):
return CParser.RULE_declarator_suffix
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDeclarator_suffix" ):
listener.enterDeclarator_suffix(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDeclarator_suffix" ):
listener.exitDeclarator_suffix(self)
def declarator_suffix(self):
localctx = CParser.Declarator_suffixContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_declarator_suffix)
try:
self.state = 386
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,39,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 370
self.match(CParser.T__39)
self.state = 371
self.constant_expression()
self.state = 372
self.match(CParser.T__40)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 374
self.match(CParser.T__39)
self.state = 375
self.match(CParser.T__40)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 376
self.match(CParser.T__37)
self.state = 377
self.parameter_type_list()
self.state = 378
self.match(CParser.T__38)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 380
self.match(CParser.T__37)
self.state = 381
self.identifier_list()
self.state = 382
self.match(CParser.T__38)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 384
self.match(CParser.T__37)
self.state = 385
self.match(CParser.T__38)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class PointerContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def type_qualifier(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Type_qualifierContext)
else:
return self.getTypedRuleContext(CParser.Type_qualifierContext,i)
def pointer(self):
return self.getTypedRuleContext(CParser.PointerContext,0)
def getRuleIndex(self):
return CParser.RULE_pointer
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterPointer" ):
listener.enterPointer(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitPointer" ):
listener.exitPointer(self)
def pointer(self):
localctx = CParser.PointerContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_pointer)
try:
self.state = 400
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,42,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 388
self.match(CParser.T__41)
self.state = 390
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 389
self.type_qualifier()
else:
raise NoViableAltException(self)
self.state = 392
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,40,self._ctx)
self.state = 395
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,41,self._ctx)
if la_ == 1:
self.state = 394
self.pointer()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 397
self.match(CParser.T__41)
self.state = 398
self.pointer()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 399
self.match(CParser.T__41)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Parameter_type_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parameter_list(self):
return self.getTypedRuleContext(CParser.Parameter_listContext,0)
def getRuleIndex(self):
return CParser.RULE_parameter_type_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterParameter_type_list" ):
listener.enterParameter_type_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitParameter_type_list" ):
listener.exitParameter_type_list(self)
def parameter_type_list(self):
localctx = CParser.Parameter_type_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 50, self.RULE_parameter_type_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 402
self.parameter_list()
self.state = 408
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__3:
self.state = 403
self.match(CParser.T__3)
self.state = 405
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__28:
self.state = 404
self.match(CParser.T__28)
self.state = 407
self.match(CParser.T__42)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Parameter_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def parameter_declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Parameter_declarationContext)
else:
return self.getTypedRuleContext(CParser.Parameter_declarationContext,i)
def getRuleIndex(self):
return CParser.RULE_parameter_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterParameter_list" ):
listener.enterParameter_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitParameter_list" ):
listener.exitParameter_list(self)
def parameter_list(self):
localctx = CParser.Parameter_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 52, self.RULE_parameter_list)
try:
self.enterOuterAlt(localctx, 1)
self.state = 410
self.parameter_declaration()
self.state = 418
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,46,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 411
self.match(CParser.T__3)
self.state = 413
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,45,self._ctx)
if la_ == 1:
self.state = 412
self.match(CParser.T__28)
self.state = 415
self.parameter_declaration()
self.state = 420
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,46,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Parameter_declarationContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def declaration_specifiers(self):
return self.getTypedRuleContext(CParser.Declaration_specifiersContext,0)
# @param i=None Type: int
def declarator(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.DeclaratorContext)
else:
return self.getTypedRuleContext(CParser.DeclaratorContext,i)
# @param i=None Type: int
def abstract_declarator(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Abstract_declaratorContext)
else:
return self.getTypedRuleContext(CParser.Abstract_declaratorContext,i)
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
# @param i=None Type: int
def pointer(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.PointerContext)
else:
return self.getTypedRuleContext(CParser.PointerContext,i)
def getRuleIndex(self):
return CParser.RULE_parameter_declaration
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterParameter_declaration" ):
listener.enterParameter_declaration(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitParameter_declaration" ):
listener.exitParameter_declaration(self)
def parameter_declaration(self):
localctx = CParser.Parameter_declarationContext(self, self._ctx, self.state)
self.enterRule(localctx, 54, self.RULE_parameter_declaration)
self._la = 0 # Token type
try:
self.state = 439
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,51,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 421
self.declaration_specifiers()
self.state = 426
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 34)) & ~0x3f) == 0 and ((1 << (_la - 34)) & ((1 << (CParser.T__33 - 34)) | (1 << (CParser.T__34 - 34)) | (1 << (CParser.T__35 - 34)) | (1 << (CParser.T__37 - 34)) | (1 << (CParser.T__39 - 34)) | (1 << (CParser.T__41 - 34)) | (1 << (CParser.IDENTIFIER - 34)))) != 0):
self.state = 424
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,47,self._ctx)
if la_ == 1:
self.state = 422
self.declarator()
pass
elif la_ == 2:
self.state = 423
self.abstract_declarator()
pass
self.state = 428
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 430
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__28:
self.state = 429
self.match(CParser.T__28)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 435
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__41:
self.state = 432
self.pointer()
self.state = 437
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 438
self.match(CParser.IDENTIFIER)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Identifier_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def IDENTIFIER(self,i=None):
if i is None:
return self.getTokens(CParser.IDENTIFIER)
else:
return self.getToken(CParser.IDENTIFIER, i)
def getRuleIndex(self):
return CParser.RULE_identifier_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterIdentifier_list" ):
listener.enterIdentifier_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitIdentifier_list" ):
listener.exitIdentifier_list(self)
def identifier_list(self):
localctx = CParser.Identifier_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 56, self.RULE_identifier_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 441
self.match(CParser.IDENTIFIER)
self.state = 446
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 442
self.match(CParser.T__3)
self.state = 443
self.match(CParser.IDENTIFIER)
self.state = 448
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Type_nameContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def specifier_qualifier_list(self):
return self.getTypedRuleContext(CParser.Specifier_qualifier_listContext,0)
def abstract_declarator(self):
return self.getTypedRuleContext(CParser.Abstract_declaratorContext,0)
def type_id(self):
return self.getTypedRuleContext(CParser.Type_idContext,0)
def getRuleIndex(self):
return CParser.RULE_type_name
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterType_name" ):
listener.enterType_name(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitType_name" ):
listener.exitType_name(self)
def type_name(self):
localctx = CParser.Type_nameContext(self, self._ctx, self.state)
self.enterRule(localctx, 58, self.RULE_type_name)
self._la = 0 # Token type
try:
self.state = 454
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,54,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 449
self.specifier_qualifier_list()
self.state = 451
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__37) | (1 << CParser.T__39) | (1 << CParser.T__41))) != 0):
self.state = 450
self.abstract_declarator()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 453
self.type_id()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Abstract_declaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def pointer(self):
return self.getTypedRuleContext(CParser.PointerContext,0)
def direct_abstract_declarator(self):
return self.getTypedRuleContext(CParser.Direct_abstract_declaratorContext,0)
def getRuleIndex(self):
return CParser.RULE_abstract_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAbstract_declarator" ):
listener.enterAbstract_declarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAbstract_declarator" ):
listener.exitAbstract_declarator(self)
def abstract_declarator(self):
localctx = CParser.Abstract_declaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 60, self.RULE_abstract_declarator)
try:
self.state = 461
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__41]:
self.enterOuterAlt(localctx, 1)
self.state = 456
self.pointer()
self.state = 458
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,55,self._ctx)
if la_ == 1:
self.state = 457
self.direct_abstract_declarator()
pass
elif token in [CParser.T__37, CParser.T__39]:
self.enterOuterAlt(localctx, 2)
self.state = 460
self.direct_abstract_declarator()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Direct_abstract_declaratorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def abstract_declarator(self):
return self.getTypedRuleContext(CParser.Abstract_declaratorContext,0)
# @param i=None Type: int
def abstract_declarator_suffix(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Abstract_declarator_suffixContext)
else:
return self.getTypedRuleContext(CParser.Abstract_declarator_suffixContext,i)
def getRuleIndex(self):
return CParser.RULE_direct_abstract_declarator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterDirect_abstract_declarator" ):
listener.enterDirect_abstract_declarator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitDirect_abstract_declarator" ):
listener.exitDirect_abstract_declarator(self)
def direct_abstract_declarator(self):
localctx = CParser.Direct_abstract_declaratorContext(self, self._ctx, self.state)
self.enterRule(localctx, 62, self.RULE_direct_abstract_declarator)
try:
self.enterOuterAlt(localctx, 1)
self.state = 468
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,57,self._ctx)
if la_ == 1:
self.state = 463
self.match(CParser.T__37)
self.state = 464
self.abstract_declarator()
self.state = 465
self.match(CParser.T__38)
pass
elif la_ == 2:
self.state = 467
self.abstract_declarator_suffix()
pass
self.state = 473
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,58,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 470
self.abstract_declarator_suffix()
self.state = 475
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,58,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Abstract_declarator_suffixContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def constant_expression(self):
return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
def parameter_type_list(self):
return self.getTypedRuleContext(CParser.Parameter_type_listContext,0)
def getRuleIndex(self):
return CParser.RULE_abstract_declarator_suffix
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAbstract_declarator_suffix" ):
listener.enterAbstract_declarator_suffix(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAbstract_declarator_suffix" ):
listener.exitAbstract_declarator_suffix(self)
def abstract_declarator_suffix(self):
localctx = CParser.Abstract_declarator_suffixContext(self, self._ctx, self.state)
self.enterRule(localctx, 64, self.RULE_abstract_declarator_suffix)
try:
self.state = 488
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,59,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 476
self.match(CParser.T__39)
self.state = 477
self.match(CParser.T__40)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 478
self.match(CParser.T__39)
self.state = 479
self.constant_expression()
self.state = 480
self.match(CParser.T__40)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 482
self.match(CParser.T__37)
self.state = 483
self.match(CParser.T__38)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 484
self.match(CParser.T__37)
self.state = 485
self.parameter_type_list()
self.state = 486
self.match(CParser.T__38)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class InitializerContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def assignment_expression(self):
return self.getTypedRuleContext(CParser.Assignment_expressionContext,0)
def initializer_list(self):
return self.getTypedRuleContext(CParser.Initializer_listContext,0)
def getRuleIndex(self):
return CParser.RULE_initializer
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterInitializer" ):
listener.enterInitializer(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitInitializer" ):
listener.exitInitializer(self)
def initializer(self):
localctx = CParser.InitializerContext(self, self._ctx, self.state)
self.enterRule(localctx, 66, self.RULE_initializer)
self._la = 0 # Token type
try:
self.state = 498
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__37, CParser.T__41, CParser.T__43, CParser.T__44, CParser.T__47, CParser.T__48, CParser.T__49, CParser.T__52, CParser.T__53, CParser.T__54, CParser.IDENTIFIER, CParser.CHARACTER_LITERAL, CParser.STRING_LITERAL, CParser.HEX_LITERAL, CParser.DECIMAL_LITERAL, CParser.OCTAL_LITERAL, CParser.FLOATING_POINT_LITERAL]:
self.enterOuterAlt(localctx, 1)
self.state = 490
self.assignment_expression()
pass
elif token in [CParser.T__0]:
self.enterOuterAlt(localctx, 2)
self.state = 491
self.match(CParser.T__0)
self.state = 492
self.initializer_list()
self.state = 494
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__3:
self.state = 493
self.match(CParser.T__3)
self.state = 496
self.match(CParser.T__19)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Initializer_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def initializer(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.InitializerContext)
else:
return self.getTypedRuleContext(CParser.InitializerContext,i)
def getRuleIndex(self):
return CParser.RULE_initializer_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterInitializer_list" ):
listener.enterInitializer_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitInitializer_list" ):
listener.exitInitializer_list(self)
def initializer_list(self):
localctx = CParser.Initializer_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 68, self.RULE_initializer_list)
try:
self.enterOuterAlt(localctx, 1)
self.state = 500
self.initializer()
self.state = 505
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,62,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 501
self.match(CParser.T__3)
self.state = 502
self.initializer()
self.state = 507
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,62,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Argument_expression_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def assignment_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Assignment_expressionContext)
else:
return self.getTypedRuleContext(CParser.Assignment_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_argument_expression_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterArgument_expression_list" ):
listener.enterArgument_expression_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitArgument_expression_list" ):
listener.exitArgument_expression_list(self)
def argument_expression_list(self):
localctx = CParser.Argument_expression_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 70, self.RULE_argument_expression_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 508
self.assignment_expression()
self.state = 510
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__28:
self.state = 509
self.match(CParser.T__28)
self.state = 519
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 512
self.match(CParser.T__3)
self.state = 513
self.assignment_expression()
self.state = 515
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__28:
self.state = 514
self.match(CParser.T__28)
self.state = 521
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Additive_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def multiplicative_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Multiplicative_expressionContext)
else:
return self.getTypedRuleContext(CParser.Multiplicative_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_additive_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAdditive_expression" ):
listener.enterAdditive_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAdditive_expression" ):
listener.exitAdditive_expression(self)
def additive_expression(self):
localctx = CParser.Additive_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 72, self.RULE_additive_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 522
self.multiplicative_expression()
self.state = 529
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__43 or _la==CParser.T__44:
self.state = 527
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__43]:
self.state = 523
self.match(CParser.T__43)
self.state = 524
self.multiplicative_expression()
pass
elif token in [CParser.T__44]:
self.state = 525
self.match(CParser.T__44)
self.state = 526
self.multiplicative_expression()
pass
else:
raise NoViableAltException(self)
self.state = 531
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Multiplicative_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def cast_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Cast_expressionContext)
else:
return self.getTypedRuleContext(CParser.Cast_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_multiplicative_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterMultiplicative_expression" ):
listener.enterMultiplicative_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitMultiplicative_expression" ):
listener.exitMultiplicative_expression(self)
def multiplicative_expression(self):
localctx = CParser.Multiplicative_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 74, self.RULE_multiplicative_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 532
self.cast_expression()
self.state = 541
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__41) | (1 << CParser.T__45) | (1 << CParser.T__46))) != 0):
self.state = 539
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__41]:
self.state = 533
self.match(CParser.T__41)
self.state = 534
self.cast_expression()
pass
elif token in [CParser.T__45]:
self.state = 535
self.match(CParser.T__45)
self.state = 536
self.cast_expression()
pass
elif token in [CParser.T__46]:
self.state = 537
self.match(CParser.T__46)
self.state = 538
self.cast_expression()
pass
else:
raise NoViableAltException(self)
self.state = 543
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cast_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def type_name(self):
return self.getTypedRuleContext(CParser.Type_nameContext,0)
def cast_expression(self):
return self.getTypedRuleContext(CParser.Cast_expressionContext,0)
def unary_expression(self):
return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_cast_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterCast_expression" ):
listener.enterCast_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitCast_expression" ):
listener.exitCast_expression(self)
def cast_expression(self):
localctx = CParser.Cast_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 76, self.RULE_cast_expression)
try:
self.state = 550
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,70,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 544
self.match(CParser.T__37)
self.state = 545
self.type_name()
self.state = 546
self.match(CParser.T__38)
self.state = 547
self.cast_expression()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 549
self.unary_expression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Unary_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def postfix_expression(self):
return self.getTypedRuleContext(CParser.Postfix_expressionContext,0)
def unary_expression(self):
return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
def unary_operator(self):
return self.getTypedRuleContext(CParser.Unary_operatorContext,0)
def cast_expression(self):
return self.getTypedRuleContext(CParser.Cast_expressionContext,0)
def type_name(self):
return self.getTypedRuleContext(CParser.Type_nameContext,0)
def getRuleIndex(self):
return CParser.RULE_unary_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterUnary_expression" ):
listener.enterUnary_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitUnary_expression" ):
listener.exitUnary_expression(self)
def unary_expression(self):
localctx = CParser.Unary_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 78, self.RULE_unary_expression)
try:
self.state = 567
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,71,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 552
self.postfix_expression()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 553
self.match(CParser.T__47)
self.state = 554
self.unary_expression()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 555
self.match(CParser.T__48)
self.state = 556
self.unary_expression()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 557
self.unary_operator()
self.state = 558
self.cast_expression()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 560
self.match(CParser.T__49)
self.state = 561
self.unary_expression()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 562
self.match(CParser.T__49)
self.state = 563
self.match(CParser.T__37)
self.state = 564
self.type_name()
self.state = 565
self.match(CParser.T__38)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Postfix_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.FuncCallText = ''
self.p = None # Primary_expressionContext
self.a = None # Token
self.c = None # Argument_expression_listContext
self.b = None # Token
self.x = None # Token
self.y = None # Token
self.z = None # Token
def primary_expression(self):
return self.getTypedRuleContext(CParser.Primary_expressionContext,0)
# @param i=None Type: int
def expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.ExpressionContext)
else:
return self.getTypedRuleContext(CParser.ExpressionContext,i)
# @param i=None Type: int
def macro_parameter_list(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Macro_parameter_listContext)
else:
return self.getTypedRuleContext(CParser.Macro_parameter_listContext,i)
# @param i=None Type: int
def argument_expression_list(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Argument_expression_listContext)
else:
return self.getTypedRuleContext(CParser.Argument_expression_listContext,i)
# @param i=None Type: int
def IDENTIFIER(self,i=None):
if i is None:
return self.getTokens(CParser.IDENTIFIER)
else:
return self.getToken(CParser.IDENTIFIER, i)
def getRuleIndex(self):
return CParser.RULE_postfix_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterPostfix_expression" ):
listener.enterPostfix_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitPostfix_expression" ):
listener.exitPostfix_expression(self)
def postfix_expression(self):
localctx = CParser.Postfix_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 80, self.RULE_postfix_expression)
self.FuncCallText=''
try:
self.enterOuterAlt(localctx, 1)
self.state = 569
localctx.p = self.primary_expression()
self.FuncCallText += (None if localctx.p is None else self._input.getText((localctx.p.start,localctx.p.stop)))
self.state = 600
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,73,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 598
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,72,self._ctx)
if la_ == 1:
self.state = 571
self.match(CParser.T__39)
self.state = 572
self.expression()
self.state = 573
self.match(CParser.T__40)
pass
elif la_ == 2:
self.state = 575
self.match(CParser.T__37)
self.state = 576
localctx.a = self.match(CParser.T__38)
self.StoreFunctionCalling((None if localctx.p is None else localctx.p.start).line, (None if localctx.p is None else localctx.p.start).column, (0 if localctx.a is None else localctx.a.line), localctx.a.column, self.FuncCallText, '')
pass
elif la_ == 3:
self.state = 578
self.match(CParser.T__37)
self.state = 579
localctx.c = self.argument_expression_list()
self.state = 580
localctx.b = self.match(CParser.T__38)
self.StoreFunctionCalling((None if localctx.p is None else localctx.p.start).line, (None if localctx.p is None else localctx.p.start).column, (0 if localctx.b is None else localctx.b.line), localctx.b.column, self.FuncCallText, (None if localctx.c is None else self._input.getText((localctx.c.start,localctx.c.stop))))
pass
elif la_ == 4:
self.state = 583
self.match(CParser.T__37)
self.state = 584
self.macro_parameter_list()
self.state = 585
self.match(CParser.T__38)
pass
elif la_ == 5:
self.state = 587
self.match(CParser.T__50)
self.state = 588
localctx.x = self.match(CParser.IDENTIFIER)
self.FuncCallText += '.' + (None if localctx.x is None else localctx.x.text)
pass
elif la_ == 6:
self.state = 590
self.match(CParser.T__41)
self.state = 591
localctx.y = self.match(CParser.IDENTIFIER)
self.FuncCallText = (None if localctx.y is None else localctx.y.text)
pass
elif la_ == 7:
self.state = 593
self.match(CParser.T__51)
self.state = 594
localctx.z = self.match(CParser.IDENTIFIER)
self.FuncCallText += '->' + (None if localctx.z is None else localctx.z.text)
pass
elif la_ == 8:
self.state = 596
self.match(CParser.T__47)
pass
elif la_ == 9:
self.state = 597
self.match(CParser.T__48)
pass
self.state = 602
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,73,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Macro_parameter_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def parameter_declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Parameter_declarationContext)
else:
return self.getTypedRuleContext(CParser.Parameter_declarationContext,i)
def getRuleIndex(self):
return CParser.RULE_macro_parameter_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterMacro_parameter_list" ):
listener.enterMacro_parameter_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitMacro_parameter_list" ):
listener.exitMacro_parameter_list(self)
def macro_parameter_list(self):
localctx = CParser.Macro_parameter_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 82, self.RULE_macro_parameter_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 603
self.parameter_declaration()
self.state = 608
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 604
self.match(CParser.T__3)
self.state = 605
self.parameter_declaration()
self.state = 610
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Unary_operatorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_unary_operator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterUnary_operator" ):
listener.enterUnary_operator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitUnary_operator" ):
listener.exitUnary_operator(self)
def unary_operator(self):
localctx = CParser.Unary_operatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 84, self.RULE_unary_operator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 611
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__41) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Primary_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def constant(self):
return self.getTypedRuleContext(CParser.ConstantContext,0)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_primary_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterPrimary_expression" ):
listener.enterPrimary_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitPrimary_expression" ):
listener.exitPrimary_expression(self)
def primary_expression(self):
localctx = CParser.Primary_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 86, self.RULE_primary_expression)
try:
self.state = 619
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,75,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 613
self.match(CParser.IDENTIFIER)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 614
self.constant()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 615
self.match(CParser.T__37)
self.state = 616
self.expression()
self.state = 617
self.match(CParser.T__38)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ConstantContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def HEX_LITERAL(self):
return self.getToken(CParser.HEX_LITERAL, 0)
def OCTAL_LITERAL(self):
return self.getToken(CParser.OCTAL_LITERAL, 0)
def DECIMAL_LITERAL(self):
return self.getToken(CParser.DECIMAL_LITERAL, 0)
def CHARACTER_LITERAL(self):
return self.getToken(CParser.CHARACTER_LITERAL, 0)
# @param i=None Type: int
def IDENTIFIER(self,i=None):
if i is None:
return self.getTokens(CParser.IDENTIFIER)
else:
return self.getToken(CParser.IDENTIFIER, i)
# @param i=None Type: int
def STRING_LITERAL(self,i=None):
if i is None:
return self.getTokens(CParser.STRING_LITERAL)
else:
return self.getToken(CParser.STRING_LITERAL, i)
def FLOATING_POINT_LITERAL(self):
return self.getToken(CParser.FLOATING_POINT_LITERAL, 0)
def getRuleIndex(self):
return CParser.RULE_constant
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterConstant" ):
listener.enterConstant(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitConstant" ):
listener.exitConstant(self)
def constant(self):
localctx = CParser.ConstantContext(self, self._ctx, self.state)
self.enterRule(localctx, 88, self.RULE_constant)
self._la = 0 # Token type
try:
self.state = 647
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.HEX_LITERAL]:
self.enterOuterAlt(localctx, 1)
self.state = 621
self.match(CParser.HEX_LITERAL)
pass
elif token in [CParser.OCTAL_LITERAL]:
self.enterOuterAlt(localctx, 2)
self.state = 622
self.match(CParser.OCTAL_LITERAL)
pass
elif token in [CParser.DECIMAL_LITERAL]:
self.enterOuterAlt(localctx, 3)
self.state = 623
self.match(CParser.DECIMAL_LITERAL)
pass
elif token in [CParser.CHARACTER_LITERAL]:
self.enterOuterAlt(localctx, 4)
self.state = 624
self.match(CParser.CHARACTER_LITERAL)
pass
elif token in [CParser.IDENTIFIER, CParser.STRING_LITERAL]:
self.enterOuterAlt(localctx, 5)
self.state = 636
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 628
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.IDENTIFIER:
self.state = 625
self.match(CParser.IDENTIFIER)
self.state = 630
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 632
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 631
self.match(CParser.STRING_LITERAL)
else:
raise NoViableAltException(self)
self.state = 634
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,77,self._ctx)
else:
raise NoViableAltException(self)
self.state = 638
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,78,self._ctx)
self.state = 643
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.IDENTIFIER:
self.state = 640
self.match(CParser.IDENTIFIER)
self.state = 645
self._errHandler.sync(self)
_la = self._input.LA(1)
pass
elif token in [CParser.FLOATING_POINT_LITERAL]:
self.enterOuterAlt(localctx, 6)
self.state = 646
self.match(CParser.FLOATING_POINT_LITERAL)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExpressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def assignment_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Assignment_expressionContext)
else:
return self.getTypedRuleContext(CParser.Assignment_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterExpression" ):
listener.enterExpression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitExpression" ):
listener.exitExpression(self)
def expression(self):
localctx = CParser.ExpressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 90, self.RULE_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 649
self.assignment_expression()
self.state = 654
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__3:
self.state = 650
self.match(CParser.T__3)
self.state = 651
self.assignment_expression()
self.state = 656
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Constant_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def conditional_expression(self):
return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_constant_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterConstant_expression" ):
listener.enterConstant_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitConstant_expression" ):
listener.exitConstant_expression(self)
def constant_expression(self):
localctx = CParser.Constant_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 92, self.RULE_constant_expression)
try:
self.enterOuterAlt(localctx, 1)
self.state = 657
self.conditional_expression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Assignment_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def lvalue(self):
return self.getTypedRuleContext(CParser.LvalueContext,0)
def assignment_operator(self):
return self.getTypedRuleContext(CParser.Assignment_operatorContext,0)
def assignment_expression(self):
return self.getTypedRuleContext(CParser.Assignment_expressionContext,0)
def conditional_expression(self):
return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_assignment_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAssignment_expression" ):
listener.enterAssignment_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAssignment_expression" ):
listener.exitAssignment_expression(self)
def assignment_expression(self):
localctx = CParser.Assignment_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 94, self.RULE_assignment_expression)
try:
self.state = 664
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,82,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 659
self.lvalue()
self.state = 660
self.assignment_operator()
self.state = 661
self.assignment_expression()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 663
self.conditional_expression()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class LvalueContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unary_expression(self):
return self.getTypedRuleContext(CParser.Unary_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_lvalue
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterLvalue" ):
listener.enterLvalue(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitLvalue" ):
listener.exitLvalue(self)
def lvalue(self):
localctx = CParser.LvalueContext(self, self._ctx, self.state)
self.enterRule(localctx, 96, self.RULE_lvalue)
try:
self.enterOuterAlt(localctx, 1)
self.state = 666
self.unary_expression()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Assignment_operatorContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_assignment_operator
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAssignment_operator" ):
listener.enterAssignment_operator(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAssignment_operator" ):
listener.exitAssignment_operator(self)
def assignment_operator(self):
localctx = CParser.Assignment_operatorContext(self, self._ctx, self.state)
self.enterRule(localctx, 98, self.RULE_assignment_operator)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 668
_la = self._input.LA(1)
if not(((((_la - 5)) & ~0x3f) == 0 and ((1 << (_la - 5)) & ((1 << (CParser.T__4 - 5)) | (1 << (CParser.T__55 - 5)) | (1 << (CParser.T__56 - 5)) | (1 << (CParser.T__57 - 5)) | (1 << (CParser.T__58 - 5)) | (1 << (CParser.T__59 - 5)) | (1 << (CParser.T__60 - 5)) | (1 << (CParser.T__61 - 5)) | (1 << (CParser.T__62 - 5)) | (1 << (CParser.T__63 - 5)) | (1 << (CParser.T__64 - 5)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Conditional_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.e = None # Logical_or_expressionContext
def logical_or_expression(self):
return self.getTypedRuleContext(CParser.Logical_or_expressionContext,0)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def conditional_expression(self):
return self.getTypedRuleContext(CParser.Conditional_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_conditional_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterConditional_expression" ):
listener.enterConditional_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitConditional_expression" ):
listener.exitConditional_expression(self)
def conditional_expression(self):
localctx = CParser.Conditional_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 100, self.RULE_conditional_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 670
localctx.e = self.logical_or_expression()
self.state = 677
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__65:
self.state = 671
self.match(CParser.T__65)
self.state = 672
self.expression()
self.state = 673
self.match(CParser.T__22)
self.state = 674
self.conditional_expression()
self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Logical_or_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def logical_and_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Logical_and_expressionContext)
else:
return self.getTypedRuleContext(CParser.Logical_and_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_logical_or_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterLogical_or_expression" ):
listener.enterLogical_or_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitLogical_or_expression" ):
listener.exitLogical_or_expression(self)
def logical_or_expression(self):
localctx = CParser.Logical_or_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 102, self.RULE_logical_or_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 679
self.logical_and_expression()
self.state = 684
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__66:
self.state = 680
self.match(CParser.T__66)
self.state = 681
self.logical_and_expression()
self.state = 686
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Logical_and_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def inclusive_or_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Inclusive_or_expressionContext)
else:
return self.getTypedRuleContext(CParser.Inclusive_or_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_logical_and_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterLogical_and_expression" ):
listener.enterLogical_and_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitLogical_and_expression" ):
listener.exitLogical_and_expression(self)
def logical_and_expression(self):
localctx = CParser.Logical_and_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 104, self.RULE_logical_and_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 687
self.inclusive_or_expression()
self.state = 692
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__67:
self.state = 688
self.match(CParser.T__67)
self.state = 689
self.inclusive_or_expression()
self.state = 694
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Inclusive_or_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def exclusive_or_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Exclusive_or_expressionContext)
else:
return self.getTypedRuleContext(CParser.Exclusive_or_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_inclusive_or_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterInclusive_or_expression" ):
listener.enterInclusive_or_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitInclusive_or_expression" ):
listener.exitInclusive_or_expression(self)
def inclusive_or_expression(self):
localctx = CParser.Inclusive_or_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 106, self.RULE_inclusive_or_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 695
self.exclusive_or_expression()
self.state = 700
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__68:
self.state = 696
self.match(CParser.T__68)
self.state = 697
self.exclusive_or_expression()
self.state = 702
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Exclusive_or_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def and_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.And_expressionContext)
else:
return self.getTypedRuleContext(CParser.And_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_exclusive_or_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterExclusive_or_expression" ):
listener.enterExclusive_or_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitExclusive_or_expression" ):
listener.exitExclusive_or_expression(self)
def exclusive_or_expression(self):
localctx = CParser.Exclusive_or_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 108, self.RULE_exclusive_or_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 703
self.and_expression()
self.state = 708
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__69:
self.state = 704
self.match(CParser.T__69)
self.state = 705
self.and_expression()
self.state = 710
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class And_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def equality_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Equality_expressionContext)
else:
return self.getTypedRuleContext(CParser.Equality_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_and_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAnd_expression" ):
listener.enterAnd_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAnd_expression" ):
listener.exitAnd_expression(self)
def and_expression(self):
localctx = CParser.And_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 110, self.RULE_and_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 711
self.equality_expression()
self.state = 716
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__52:
self.state = 712
self.match(CParser.T__52)
self.state = 713
self.equality_expression()
self.state = 718
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Equality_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def relational_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Relational_expressionContext)
else:
return self.getTypedRuleContext(CParser.Relational_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_equality_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterEquality_expression" ):
listener.enterEquality_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitEquality_expression" ):
listener.exitEquality_expression(self)
def equality_expression(self):
localctx = CParser.Equality_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 112, self.RULE_equality_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 719
self.relational_expression()
self.state = 724
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__70 or _la==CParser.T__71:
self.state = 720
_la = self._input.LA(1)
if not(_la==CParser.T__70 or _la==CParser.T__71):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 721
self.relational_expression()
self.state = 726
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Relational_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def shift_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Shift_expressionContext)
else:
return self.getTypedRuleContext(CParser.Shift_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_relational_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterRelational_expression" ):
listener.enterRelational_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitRelational_expression" ):
listener.exitRelational_expression(self)
def relational_expression(self):
localctx = CParser.Relational_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 114, self.RULE_relational_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 727
self.shift_expression()
self.state = 732
self._errHandler.sync(self)
_la = self._input.LA(1)
while ((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & ((1 << (CParser.T__72 - 73)) | (1 << (CParser.T__73 - 73)) | (1 << (CParser.T__74 - 73)) | (1 << (CParser.T__75 - 73)))) != 0):
self.state = 728
_la = self._input.LA(1)
if not(((((_la - 73)) & ~0x3f) == 0 and ((1 << (_la - 73)) & ((1 << (CParser.T__72 - 73)) | (1 << (CParser.T__73 - 73)) | (1 << (CParser.T__74 - 73)) | (1 << (CParser.T__75 - 73)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 729
self.shift_expression()
self.state = 734
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Shift_expressionContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def additive_expression(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.Additive_expressionContext)
else:
return self.getTypedRuleContext(CParser.Additive_expressionContext,i)
def getRuleIndex(self):
return CParser.RULE_shift_expression
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterShift_expression" ):
listener.enterShift_expression(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitShift_expression" ):
listener.exitShift_expression(self)
def shift_expression(self):
localctx = CParser.Shift_expressionContext(self, self._ctx, self.state)
self.enterRule(localctx, 116, self.RULE_shift_expression)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 735
self.additive_expression()
self.state = 740
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==CParser.T__76 or _la==CParser.T__77:
self.state = 736
_la = self._input.LA(1)
if not(_la==CParser.T__76 or _la==CParser.T__77):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 737
self.additive_expression()
self.state = 742
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def labeled_statement(self):
return self.getTypedRuleContext(CParser.Labeled_statementContext,0)
def compound_statement(self):
return self.getTypedRuleContext(CParser.Compound_statementContext,0)
def expression_statement(self):
return self.getTypedRuleContext(CParser.Expression_statementContext,0)
def selection_statement(self):
return self.getTypedRuleContext(CParser.Selection_statementContext,0)
def iteration_statement(self):
return self.getTypedRuleContext(CParser.Iteration_statementContext,0)
def jump_statement(self):
return self.getTypedRuleContext(CParser.Jump_statementContext,0)
def macro_statement(self):
return self.getTypedRuleContext(CParser.Macro_statementContext,0)
def asm2_statement(self):
return self.getTypedRuleContext(CParser.Asm2_statementContext,0)
def asm1_statement(self):
return self.getTypedRuleContext(CParser.Asm1_statementContext,0)
def asm_statement(self):
return self.getTypedRuleContext(CParser.Asm_statementContext,0)
def declaration(self):
return self.getTypedRuleContext(CParser.DeclarationContext,0)
def getRuleIndex(self):
return CParser.RULE_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStatement" ):
listener.enterStatement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStatement" ):
listener.exitStatement(self)
def statement(self):
localctx = CParser.StatementContext(self, self._ctx, self.state)
self.enterRule(localctx, 118, self.RULE_statement)
try:
self.state = 754
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,92,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 743
self.labeled_statement()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 744
self.compound_statement()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 745
self.expression_statement()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 746
self.selection_statement()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 747
self.iteration_statement()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 748
self.jump_statement()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 749
self.macro_statement()
pass
elif la_ == 8:
self.enterOuterAlt(localctx, 8)
self.state = 750
self.asm2_statement()
pass
elif la_ == 9:
self.enterOuterAlt(localctx, 9)
self.state = 751
self.asm1_statement()
pass
elif la_ == 10:
self.enterOuterAlt(localctx, 10)
self.state = 752
self.asm_statement()
pass
elif la_ == 11:
self.enterOuterAlt(localctx, 11)
self.state = 753
self.declaration()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Asm2_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def getRuleIndex(self):
return CParser.RULE_asm2_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAsm2_statement" ):
listener.enterAsm2_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAsm2_statement" ):
listener.exitAsm2_statement(self)
def asm2_statement(self):
localctx = CParser.Asm2_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 120, self.RULE_asm2_statement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 757
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==CParser.T__78:
self.state = 756
self.match(CParser.T__78)
self.state = 759
self.match(CParser.IDENTIFIER)
self.state = 760
self.match(CParser.T__37)
self.state = 764
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,94,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 761
_la = self._input.LA(1)
if _la <= 0 or _la==CParser.T__1:
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 766
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,94,self._ctx)
self.state = 767
self.match(CParser.T__38)
self.state = 768
self.match(CParser.T__1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Asm1_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_asm1_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAsm1_statement" ):
listener.enterAsm1_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAsm1_statement" ):
listener.exitAsm1_statement(self)
def asm1_statement(self):
localctx = CParser.Asm1_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 122, self.RULE_asm1_statement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 770
self.match(CParser.T__79)
self.state = 771
self.match(CParser.T__0)
self.state = 775
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__3) | (1 << CParser.T__4) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__22) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__38) | (1 << CParser.T__39) | (1 << CParser.T__40) | (1 << CParser.T__41) | (1 << CParser.T__42) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__45) | (1 << CParser.T__46) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__50) | (1 << CParser.T__51) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54) | (1 << CParser.T__55) | (1 << CParser.T__56) | (1 << CParser.T__57) | (1 << CParser.T__58) | (1 << CParser.T__59) | (1 << CParser.T__60) | (1 << CParser.T__61) | (1 << CParser.T__62))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CParser.T__63 - 64)) | (1 << (CParser.T__64 - 64)) | (1 << (CParser.T__65 - 64)) | (1 << (CParser.T__66 - 64)) | (1 << (CParser.T__67 - 64)) | (1 << (CParser.T__68 - 64)) | (1 << (CParser.T__69 - 64)) | (1 << (CParser.T__70 - 64)) | (1 << (CParser.T__71 - 64)) | (1 << (CParser.T__72 - 64)) | (1 << (CParser.T__73 - 64)) | (1 << (CParser.T__74 - 64)) | (1 << (CParser.T__75 - 64)) | (1 << (CParser.T__76 - 64)) | (1 << (CParser.T__77 - 64)) | (1 << (CParser.T__78 - 64)) | (1 << (CParser.T__79 - 64)) | (1 << (CParser.T__80 - 64)) | (1 << (CParser.T__81 - 64)) | (1 << (CParser.T__82 - 64)) | (1 << (CParser.T__83 - 64)) | (1 << (CParser.T__84 - 64)) | (1 << (CParser.T__85 - 64)) | (1 << (CParser.T__86 - 64)) | (1 << (CParser.T__87 - 64)) | (1 << (CParser.T__88 - 64)) | (1 << (CParser.T__89 - 64)) | (1 << (CParser.T__90 - 64)) | (1 << (CParser.T__91 - 64)) | (1 << (CParser.IDENTIFIER - 64)) | (1 << (CParser.CHARACTER_LITERAL - 64)) | (1 << (CParser.STRING_LITERAL - 64)) | (1 << (CParser.HEX_LITERAL - 64)) | (1 << (CParser.DECIMAL_LITERAL - 64)) | (1 << (CParser.OCTAL_LITERAL - 64)) | (1 << (CParser.FLOATING_POINT_LITERAL - 64)) | (1 << (CParser.WS - 64)) | (1 << (CParser.BS - 64)) | (1 << (CParser.UnicodeVocabulary - 64)) | (1 << (CParser.COMMENT - 64)) | (1 << (CParser.LINE_COMMENT - 64)) | (1 << (CParser.LINE_COMMAND - 64)))) != 0):
self.state = 772
_la = self._input.LA(1)
if _la <= 0 or _la==CParser.T__19:
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 777
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 778
self.match(CParser.T__19)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Asm_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return CParser.RULE_asm_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterAsm_statement" ):
listener.enterAsm_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitAsm_statement" ):
listener.exitAsm_statement(self)
def asm_statement(self):
localctx = CParser.Asm_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 124, self.RULE_asm_statement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 780
self.match(CParser.T__80)
self.state = 781
self.match(CParser.T__0)
self.state = 785
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__3) | (1 << CParser.T__4) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__22) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__38) | (1 << CParser.T__39) | (1 << CParser.T__40) | (1 << CParser.T__41) | (1 << CParser.T__42) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__45) | (1 << CParser.T__46) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__50) | (1 << CParser.T__51) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54) | (1 << CParser.T__55) | (1 << CParser.T__56) | (1 << CParser.T__57) | (1 << CParser.T__58) | (1 << CParser.T__59) | (1 << CParser.T__60) | (1 << CParser.T__61) | (1 << CParser.T__62))) != 0) or ((((_la - 64)) & ~0x3f) == 0 and ((1 << (_la - 64)) & ((1 << (CParser.T__63 - 64)) | (1 << (CParser.T__64 - 64)) | (1 << (CParser.T__65 - 64)) | (1 << (CParser.T__66 - 64)) | (1 << (CParser.T__67 - 64)) | (1 << (CParser.T__68 - 64)) | (1 << (CParser.T__69 - 64)) | (1 << (CParser.T__70 - 64)) | (1 << (CParser.T__71 - 64)) | (1 << (CParser.T__72 - 64)) | (1 << (CParser.T__73 - 64)) | (1 << (CParser.T__74 - 64)) | (1 << (CParser.T__75 - 64)) | (1 << (CParser.T__76 - 64)) | (1 << (CParser.T__77 - 64)) | (1 << (CParser.T__78 - 64)) | (1 << (CParser.T__79 - 64)) | (1 << (CParser.T__80 - 64)) | (1 << (CParser.T__81 - 64)) | (1 << (CParser.T__82 - 64)) | (1 << (CParser.T__83 - 64)) | (1 << (CParser.T__84 - 64)) | (1 << (CParser.T__85 - 64)) | (1 << (CParser.T__86 - 64)) | (1 << (CParser.T__87 - 64)) | (1 << (CParser.T__88 - 64)) | (1 << (CParser.T__89 - 64)) | (1 << (CParser.T__90 - 64)) | (1 << (CParser.T__91 - 64)) | (1 << (CParser.IDENTIFIER - 64)) | (1 << (CParser.CHARACTER_LITERAL - 64)) | (1 << (CParser.STRING_LITERAL - 64)) | (1 << (CParser.HEX_LITERAL - 64)) | (1 << (CParser.DECIMAL_LITERAL - 64)) | (1 << (CParser.OCTAL_LITERAL - 64)) | (1 << (CParser.FLOATING_POINT_LITERAL - 64)) | (1 << (CParser.WS - 64)) | (1 << (CParser.BS - 64)) | (1 << (CParser.UnicodeVocabulary - 64)) | (1 << (CParser.COMMENT - 64)) | (1 << (CParser.LINE_COMMENT - 64)) | (1 << (CParser.LINE_COMMAND - 64)))) != 0):
self.state = 782
_la = self._input.LA(1)
if _la <= 0 or _la==CParser.T__19:
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 787
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 788
self.match(CParser.T__19)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Macro_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
# @param i=None Type: int
def declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.DeclarationContext)
else:
return self.getTypedRuleContext(CParser.DeclarationContext,i)
def statement_list(self):
return self.getTypedRuleContext(CParser.Statement_listContext,0)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_macro_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterMacro_statement" ):
listener.enterMacro_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitMacro_statement" ):
listener.exitMacro_statement(self)
def macro_statement(self):
localctx = CParser.Macro_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 126, self.RULE_macro_statement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 790
self.match(CParser.IDENTIFIER)
self.state = 791
self.match(CParser.T__37)
self.state = 795
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,97,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 792
self.declaration()
self.state = 797
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,97,self._ctx)
self.state = 799
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,98,self._ctx)
if la_ == 1:
self.state = 798
self.statement_list()
self.state = 802
self._errHandler.sync(self)
_la = self._input.LA(1)
if ((((_la - 38)) & ~0x3f) == 0 and ((1 << (_la - 38)) & ((1 << (CParser.T__37 - 38)) | (1 << (CParser.T__41 - 38)) | (1 << (CParser.T__43 - 38)) | (1 << (CParser.T__44 - 38)) | (1 << (CParser.T__47 - 38)) | (1 << (CParser.T__48 - 38)) | (1 << (CParser.T__49 - 38)) | (1 << (CParser.T__52 - 38)) | (1 << (CParser.T__53 - 38)) | (1 << (CParser.T__54 - 38)) | (1 << (CParser.IDENTIFIER - 38)) | (1 << (CParser.CHARACTER_LITERAL - 38)) | (1 << (CParser.STRING_LITERAL - 38)) | (1 << (CParser.HEX_LITERAL - 38)) | (1 << (CParser.DECIMAL_LITERAL - 38)) | (1 << (CParser.OCTAL_LITERAL - 38)) | (1 << (CParser.FLOATING_POINT_LITERAL - 38)))) != 0):
self.state = 801
self.expression()
self.state = 804
self.match(CParser.T__38)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Labeled_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def statement(self):
return self.getTypedRuleContext(CParser.StatementContext,0)
def constant_expression(self):
return self.getTypedRuleContext(CParser.Constant_expressionContext,0)
def getRuleIndex(self):
return CParser.RULE_labeled_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterLabeled_statement" ):
listener.enterLabeled_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitLabeled_statement" ):
listener.exitLabeled_statement(self)
def labeled_statement(self):
localctx = CParser.Labeled_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 128, self.RULE_labeled_statement)
try:
self.state = 817
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.IDENTIFIER]:
self.enterOuterAlt(localctx, 1)
self.state = 806
self.match(CParser.IDENTIFIER)
self.state = 807
self.match(CParser.T__22)
self.state = 808
self.statement()
pass
elif token in [CParser.T__81]:
self.enterOuterAlt(localctx, 2)
self.state = 809
self.match(CParser.T__81)
self.state = 810
self.constant_expression()
self.state = 811
self.match(CParser.T__22)
self.state = 812
self.statement()
pass
elif token in [CParser.T__82]:
self.enterOuterAlt(localctx, 3)
self.state = 814
self.match(CParser.T__82)
self.state = 815
self.match(CParser.T__22)
self.state = 816
self.statement()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Compound_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def declaration(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.DeclarationContext)
else:
return self.getTypedRuleContext(CParser.DeclarationContext,i)
def statement_list(self):
return self.getTypedRuleContext(CParser.Statement_listContext,0)
def getRuleIndex(self):
return CParser.RULE_compound_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterCompound_statement" ):
listener.enterCompound_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitCompound_statement" ):
listener.exitCompound_statement(self)
def compound_statement(self):
localctx = CParser.Compound_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 130, self.RULE_compound_statement)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 819
self.match(CParser.T__0)
self.state = 823
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,101,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
self.state = 820
self.declaration()
self.state = 825
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,101,self._ctx)
self.state = 827
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << CParser.T__0) | (1 << CParser.T__1) | (1 << CParser.T__2) | (1 << CParser.T__5) | (1 << CParser.T__6) | (1 << CParser.T__7) | (1 << CParser.T__8) | (1 << CParser.T__9) | (1 << CParser.T__10) | (1 << CParser.T__11) | (1 << CParser.T__12) | (1 << CParser.T__13) | (1 << CParser.T__14) | (1 << CParser.T__15) | (1 << CParser.T__16) | (1 << CParser.T__17) | (1 << CParser.T__18) | (1 << CParser.T__20) | (1 << CParser.T__21) | (1 << CParser.T__23) | (1 << CParser.T__24) | (1 << CParser.T__25) | (1 << CParser.T__26) | (1 << CParser.T__27) | (1 << CParser.T__28) | (1 << CParser.T__29) | (1 << CParser.T__30) | (1 << CParser.T__31) | (1 << CParser.T__32) | (1 << CParser.T__33) | (1 << CParser.T__34) | (1 << CParser.T__35) | (1 << CParser.T__36) | (1 << CParser.T__37) | (1 << CParser.T__41) | (1 << CParser.T__43) | (1 << CParser.T__44) | (1 << CParser.T__47) | (1 << CParser.T__48) | (1 << CParser.T__49) | (1 << CParser.T__52) | (1 << CParser.T__53) | (1 << CParser.T__54))) != 0) or ((((_la - 79)) & ~0x3f) == 0 and ((1 << (_la - 79)) & ((1 << (CParser.T__78 - 79)) | (1 << (CParser.T__79 - 79)) | (1 << (CParser.T__80 - 79)) | (1 << (CParser.T__81 - 79)) | (1 << (CParser.T__82 - 79)) | (1 << (CParser.T__83 - 79)) | (1 << (CParser.T__85 - 79)) | (1 << (CParser.T__86 - 79)) | (1 << (CParser.T__87 - 79)) | (1 << (CParser.T__88 - 79)) | (1 << (CParser.T__89 - 79)) | (1 << (CParser.T__90 - 79)) | (1 << (CParser.T__91 - 79)) | (1 << (CParser.IDENTIFIER - 79)) | (1 << (CParser.CHARACTER_LITERAL - 79)) | (1 << (CParser.STRING_LITERAL - 79)) | (1 << (CParser.HEX_LITERAL - 79)) | (1 << (CParser.DECIMAL_LITERAL - 79)) | (1 << (CParser.OCTAL_LITERAL - 79)) | (1 << (CParser.FLOATING_POINT_LITERAL - 79)))) != 0):
self.state = 826
self.statement_list()
self.state = 829
self.match(CParser.T__19)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Statement_listContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
# @param i=None Type: int
def statement(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.StatementContext)
else:
return self.getTypedRuleContext(CParser.StatementContext,i)
def getRuleIndex(self):
return CParser.RULE_statement_list
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterStatement_list" ):
listener.enterStatement_list(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitStatement_list" ):
listener.exitStatement_list(self)
def statement_list(self):
localctx = CParser.Statement_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 132, self.RULE_statement_list)
try:
self.enterOuterAlt(localctx, 1)
self.state = 832
self._errHandler.sync(self)
_alt = 1
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt == 1:
self.state = 831
self.statement()
else:
raise NoViableAltException(self)
self.state = 834
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,103,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Expression_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_expression_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterExpression_statement" ):
listener.enterExpression_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitExpression_statement" ):
listener.exitExpression_statement(self)
def expression_statement(self):
localctx = CParser.Expression_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 134, self.RULE_expression_statement)
try:
self.state = 840
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__1]:
self.enterOuterAlt(localctx, 1)
self.state = 836
self.match(CParser.T__1)
pass
elif token in [CParser.T__37, CParser.T__41, CParser.T__43, CParser.T__44, CParser.T__47, CParser.T__48, CParser.T__49, CParser.T__52, CParser.T__53, CParser.T__54, CParser.IDENTIFIER, CParser.CHARACTER_LITERAL, CParser.STRING_LITERAL, CParser.HEX_LITERAL, CParser.DECIMAL_LITERAL, CParser.OCTAL_LITERAL, CParser.FLOATING_POINT_LITERAL]:
self.enterOuterAlt(localctx, 2)
self.state = 837
self.expression()
self.state = 838
self.match(CParser.T__1)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Selection_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.e = None # ExpressionContext
# @param i=None Type: int
def statement(self,i=None):
if i is None:
return self.getTypedRuleContexts(CParser.StatementContext)
else:
return self.getTypedRuleContext(CParser.StatementContext,i)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_selection_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterSelection_statement" ):
listener.enterSelection_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitSelection_statement" ):
listener.exitSelection_statement(self)
def selection_statement(self):
localctx = CParser.Selection_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 136, self.RULE_selection_statement)
try:
self.state = 858
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__83]:
self.enterOuterAlt(localctx, 1)
self.state = 842
self.match(CParser.T__83)
self.state = 843
self.match(CParser.T__37)
self.state = 844
localctx.e = self.expression()
self.state = 845
self.match(CParser.T__38)
self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
self.state = 847
self.statement()
self.state = 850
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,105,self._ctx)
if la_ == 1:
self.state = 848
self.match(CParser.T__84)
self.state = 849
self.statement()
pass
elif token in [CParser.T__85]:
self.enterOuterAlt(localctx, 2)
self.state = 852
self.match(CParser.T__85)
self.state = 853
self.match(CParser.T__37)
self.state = 854
self.expression()
self.state = 855
self.match(CParser.T__38)
self.state = 856
self.statement()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Iteration_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
self.e = None # ExpressionContext
def statement(self):
return self.getTypedRuleContext(CParser.StatementContext,0)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_iteration_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterIteration_statement" ):
listener.enterIteration_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitIteration_statement" ):
listener.exitIteration_statement(self)
def iteration_statement(self):
localctx = CParser.Iteration_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 138, self.RULE_iteration_statement)
try:
self.state = 876
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [CParser.T__86]:
self.enterOuterAlt(localctx, 1)
self.state = 860
self.match(CParser.T__86)
self.state = 861
self.match(CParser.T__37)
self.state = 862
localctx.e = self.expression()
self.state = 863
self.match(CParser.T__38)
self.state = 864
self.statement()
self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
pass
elif token in [CParser.T__87]:
self.enterOuterAlt(localctx, 2)
self.state = 867
self.match(CParser.T__87)
self.state = 868
self.statement()
self.state = 869
self.match(CParser.T__86)
self.state = 870
self.match(CParser.T__37)
self.state = 871
localctx.e = self.expression()
self.state = 872
self.match(CParser.T__38)
self.state = 873
self.match(CParser.T__1)
self.StorePredicateExpression((None if localctx.e is None else localctx.e.start).line, (None if localctx.e is None else localctx.e.start).column, (None if localctx.e is None else localctx.e.stop).line, (None if localctx.e is None else localctx.e.stop).column, (None if localctx.e is None else self._input.getText((localctx.e.start,localctx.e.stop))))
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Jump_statementContext(ParserRuleContext):
# @param parent=None Type: ParserRuleContext
# @param invokingState=-1 Type: int
def __init__(self,parser,parent=None,invokingState=-1):
super().__init__(parent, invokingState)
self.parser = parser
def IDENTIFIER(self):
return self.getToken(CParser.IDENTIFIER, 0)
def expression(self):
return self.getTypedRuleContext(CParser.ExpressionContext,0)
def getRuleIndex(self):
return CParser.RULE_jump_statement
# @param listener Type: ParseTreeListener
def enterRule(self,listener):
if hasattr( listener, "enterJump_statement" ):
listener.enterJump_statement(self)
# @param listener Type: ParseTreeListener
def exitRule(self,listener):
if hasattr( listener, "exitJump_statement" ):
listener.exitJump_statement(self)
def jump_statement(self):
localctx = CParser.Jump_statementContext(self, self._ctx, self.state)
self.enterRule(localctx, 140, self.RULE_jump_statement)
try:
self.state = 891
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,108,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 878
self.match(CParser.T__88)
self.state = 879
self.match(CParser.IDENTIFIER)
self.state = 880
self.match(CParser.T__1)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 881
self.match(CParser.T__89)
self.state = 882
self.match(CParser.T__1)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 883
self.match(CParser.T__90)
self.state = 884
self.match(CParser.T__1)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 885
self.match(CParser.T__91)
self.state = 886
self.match(CParser.T__1)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 887
self.match(CParser.T__91)
self.state = 888
self.expression()
self.state = 889
self.match(CParser.T__1)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
| edk2-master | BaseTools/Source/Python/Eot/CParser4/CParser.py |
# Generated from C.g4 by ANTLR 4.7.1
from antlr4 import *
if __name__ is not None and "." in __name__:
from .CParser import CParser
else:
from CParser import CParser
## @file
# The file defines the parser for C source files.
#
# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
# This file is generated by running:
# java org.antlr.Tool C.g
#
# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
import Ecc.CodeFragment as CodeFragment
import Ecc.FileProfile as FileProfile
# This class defines a complete listener for a parse tree produced by CParser.
class CListener(ParseTreeListener):
# Enter a parse tree produced by CParser#translation_unit.
# @param ctx Type: CParser.Translation_unitContext
def enterTranslation_unit(self,ctx):
pass
# Exit a parse tree produced by CParser#translation_unit.
# @param ctx Type: CParser.Translation_unitContext
def exitTranslation_unit(self,ctx):
pass
# Enter a parse tree produced by CParser#external_declaration.
# @param ctx Type: CParser.External_declarationContext
def enterExternal_declaration(self,ctx):
pass
# Exit a parse tree produced by CParser#external_declaration.
# @param ctx Type: CParser.External_declarationContext
def exitExternal_declaration(self,ctx):
pass
# Enter a parse tree produced by CParser#function_definition.
# @param ctx Type: CParser.Function_definitionContext
def enterFunction_definition(self,ctx):
pass
# Exit a parse tree produced by CParser#function_definition.
# @param ctx Type: CParser.Function_definitionContext
def exitFunction_definition(self,ctx):
pass
# Enter a parse tree produced by CParser#declaration_specifiers.
# @param ctx Type: CParser.Declaration_specifiersContext
def enterDeclaration_specifiers(self,ctx):
pass
# Exit a parse tree produced by CParser#declaration_specifiers.
# @param ctx Type: CParser.Declaration_specifiersContext
def exitDeclaration_specifiers(self,ctx):
pass
# Enter a parse tree produced by CParser#declaration.
# @param ctx Type: CParser.DeclarationContext
def enterDeclaration(self,ctx):
pass
# Exit a parse tree produced by CParser#declaration.
# @param ctx Type: CParser.DeclarationContext
def exitDeclaration(self,ctx):
pass
# Enter a parse tree produced by CParser#init_declarator_list.
# @param ctx Type: CParser.Init_declarator_listContext
def enterInit_declarator_list(self,ctx):
pass
# Exit a parse tree produced by CParser#init_declarator_list.
# @param ctx Type: CParser.Init_declarator_listContext
def exitInit_declarator_list(self,ctx):
pass
# Enter a parse tree produced by CParser#init_declarator.
# @param ctx Type: CParser.Init_declaratorContext
def enterInit_declarator(self,ctx):
pass
# Exit a parse tree produced by CParser#init_declarator.
# @param ctx Type: CParser.Init_declaratorContext
def exitInit_declarator(self,ctx):
pass
# Enter a parse tree produced by CParser#storage_class_specifier.
# @param ctx Type: CParser.Storage_class_specifierContext
def enterStorage_class_specifier(self,ctx):
pass
# Exit a parse tree produced by CParser#storage_class_specifier.
# @param ctx Type: CParser.Storage_class_specifierContext
def exitStorage_class_specifier(self,ctx):
pass
# Enter a parse tree produced by CParser#type_specifier.
# @param ctx Type: CParser.Type_specifierContext
def enterType_specifier(self,ctx):
pass
# Exit a parse tree produced by CParser#type_specifier.
# @param ctx Type: CParser.Type_specifierContext
def exitType_specifier(self,ctx):
pass
# Enter a parse tree produced by CParser#type_id.
# @param ctx Type: CParser.Type_idContext
def enterType_id(self,ctx):
pass
# Exit a parse tree produced by CParser#type_id.
# @param ctx Type: CParser.Type_idContext
def exitType_id(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_or_union_specifier.
# @param ctx Type: CParser.Struct_or_union_specifierContext
def enterStruct_or_union_specifier(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_or_union_specifier.
# @param ctx Type: CParser.Struct_or_union_specifierContext
def exitStruct_or_union_specifier(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_or_union.
# @param ctx Type: CParser.Struct_or_unionContext
def enterStruct_or_union(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_or_union.
# @param ctx Type: CParser.Struct_or_unionContext
def exitStruct_or_union(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_declaration_list.
# @param ctx Type: CParser.Struct_declaration_listContext
def enterStruct_declaration_list(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_declaration_list.
# @param ctx Type: CParser.Struct_declaration_listContext
def exitStruct_declaration_list(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_declaration.
# @param ctx Type: CParser.Struct_declarationContext
def enterStruct_declaration(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_declaration.
# @param ctx Type: CParser.Struct_declarationContext
def exitStruct_declaration(self,ctx):
pass
# Enter a parse tree produced by CParser#specifier_qualifier_list.
# @param ctx Type: CParser.Specifier_qualifier_listContext
def enterSpecifier_qualifier_list(self,ctx):
pass
# Exit a parse tree produced by CParser#specifier_qualifier_list.
# @param ctx Type: CParser.Specifier_qualifier_listContext
def exitSpecifier_qualifier_list(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_declarator_list.
# @param ctx Type: CParser.Struct_declarator_listContext
def enterStruct_declarator_list(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_declarator_list.
# @param ctx Type: CParser.Struct_declarator_listContext
def exitStruct_declarator_list(self,ctx):
pass
# Enter a parse tree produced by CParser#struct_declarator.
# @param ctx Type: CParser.Struct_declaratorContext
def enterStruct_declarator(self,ctx):
pass
# Exit a parse tree produced by CParser#struct_declarator.
# @param ctx Type: CParser.Struct_declaratorContext
def exitStruct_declarator(self,ctx):
pass
# Enter a parse tree produced by CParser#enum_specifier.
# @param ctx Type: CParser.Enum_specifierContext
def enterEnum_specifier(self,ctx):
pass
# Exit a parse tree produced by CParser#enum_specifier.
# @param ctx Type: CParser.Enum_specifierContext
def exitEnum_specifier(self,ctx):
pass
# Enter a parse tree produced by CParser#enumerator_list.
# @param ctx Type: CParser.Enumerator_listContext
def enterEnumerator_list(self,ctx):
pass
# Exit a parse tree produced by CParser#enumerator_list.
# @param ctx Type: CParser.Enumerator_listContext
def exitEnumerator_list(self,ctx):
pass
# Enter a parse tree produced by CParser#enumerator.
# @param ctx Type: CParser.EnumeratorContext
def enterEnumerator(self,ctx):
pass
# Exit a parse tree produced by CParser#enumerator.
# @param ctx Type: CParser.EnumeratorContext
def exitEnumerator(self,ctx):
pass
# Enter a parse tree produced by CParser#type_qualifier.
# @param ctx Type: CParser.Type_qualifierContext
def enterType_qualifier(self,ctx):
pass
# Exit a parse tree produced by CParser#type_qualifier.
# @param ctx Type: CParser.Type_qualifierContext
def exitType_qualifier(self,ctx):
pass
# Enter a parse tree produced by CParser#declarator.
# @param ctx Type: CParser.DeclaratorContext
def enterDeclarator(self,ctx):
pass
# Exit a parse tree produced by CParser#declarator.
# @param ctx Type: CParser.DeclaratorContext
def exitDeclarator(self,ctx):
pass
# Enter a parse tree produced by CParser#direct_declarator.
# @param ctx Type: CParser.Direct_declaratorContext
def enterDirect_declarator(self,ctx):
pass
# Exit a parse tree produced by CParser#direct_declarator.
# @param ctx Type: CParser.Direct_declaratorContext
def exitDirect_declarator(self,ctx):
pass
# Enter a parse tree produced by CParser#declarator_suffix.
# @param ctx Type: CParser.Declarator_suffixContext
def enterDeclarator_suffix(self,ctx):
pass
# Exit a parse tree produced by CParser#declarator_suffix.
# @param ctx Type: CParser.Declarator_suffixContext
def exitDeclarator_suffix(self,ctx):
pass
# Enter a parse tree produced by CParser#pointer.
# @param ctx Type: CParser.PointerContext
def enterPointer(self,ctx):
pass
# Exit a parse tree produced by CParser#pointer.
# @param ctx Type: CParser.PointerContext
def exitPointer(self,ctx):
pass
# Enter a parse tree produced by CParser#parameter_type_list.
# @param ctx Type: CParser.Parameter_type_listContext
def enterParameter_type_list(self,ctx):
pass
# Exit a parse tree produced by CParser#parameter_type_list.
# @param ctx Type: CParser.Parameter_type_listContext
def exitParameter_type_list(self,ctx):
pass
# Enter a parse tree produced by CParser#parameter_list.
# @param ctx Type: CParser.Parameter_listContext
def enterParameter_list(self,ctx):
pass
# Exit a parse tree produced by CParser#parameter_list.
# @param ctx Type: CParser.Parameter_listContext
def exitParameter_list(self,ctx):
pass
# Enter a parse tree produced by CParser#parameter_declaration.
# @param ctx Type: CParser.Parameter_declarationContext
def enterParameter_declaration(self,ctx):
pass
# Exit a parse tree produced by CParser#parameter_declaration.
# @param ctx Type: CParser.Parameter_declarationContext
def exitParameter_declaration(self,ctx):
pass
# Enter a parse tree produced by CParser#identifier_list.
# @param ctx Type: CParser.Identifier_listContext
def enterIdentifier_list(self,ctx):
pass
# Exit a parse tree produced by CParser#identifier_list.
# @param ctx Type: CParser.Identifier_listContext
def exitIdentifier_list(self,ctx):
pass
# Enter a parse tree produced by CParser#type_name.
# @param ctx Type: CParser.Type_nameContext
def enterType_name(self,ctx):
pass
# Exit a parse tree produced by CParser#type_name.
# @param ctx Type: CParser.Type_nameContext
def exitType_name(self,ctx):
pass
# Enter a parse tree produced by CParser#abstract_declarator.
# @param ctx Type: CParser.Abstract_declaratorContext
def enterAbstract_declarator(self,ctx):
pass
# Exit a parse tree produced by CParser#abstract_declarator.
# @param ctx Type: CParser.Abstract_declaratorContext
def exitAbstract_declarator(self,ctx):
pass
# Enter a parse tree produced by CParser#direct_abstract_declarator.
# @param ctx Type: CParser.Direct_abstract_declaratorContext
def enterDirect_abstract_declarator(self,ctx):
pass
# Exit a parse tree produced by CParser#direct_abstract_declarator.
# @param ctx Type: CParser.Direct_abstract_declaratorContext
def exitDirect_abstract_declarator(self,ctx):
pass
# Enter a parse tree produced by CParser#abstract_declarator_suffix.
# @param ctx Type: CParser.Abstract_declarator_suffixContext
def enterAbstract_declarator_suffix(self,ctx):
pass
# Exit a parse tree produced by CParser#abstract_declarator_suffix.
# @param ctx Type: CParser.Abstract_declarator_suffixContext
def exitAbstract_declarator_suffix(self,ctx):
pass
# Enter a parse tree produced by CParser#initializer.
# @param ctx Type: CParser.InitializerContext
def enterInitializer(self,ctx):
pass
# Exit a parse tree produced by CParser#initializer.
# @param ctx Type: CParser.InitializerContext
def exitInitializer(self,ctx):
pass
# Enter a parse tree produced by CParser#initializer_list.
# @param ctx Type: CParser.Initializer_listContext
def enterInitializer_list(self,ctx):
pass
# Exit a parse tree produced by CParser#initializer_list.
# @param ctx Type: CParser.Initializer_listContext
def exitInitializer_list(self,ctx):
pass
# Enter a parse tree produced by CParser#argument_expression_list.
# @param ctx Type: CParser.Argument_expression_listContext
def enterArgument_expression_list(self,ctx):
pass
# Exit a parse tree produced by CParser#argument_expression_list.
# @param ctx Type: CParser.Argument_expression_listContext
def exitArgument_expression_list(self,ctx):
pass
# Enter a parse tree produced by CParser#additive_expression.
# @param ctx Type: CParser.Additive_expressionContext
def enterAdditive_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#additive_expression.
# @param ctx Type: CParser.Additive_expressionContext
def exitAdditive_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#multiplicative_expression.
# @param ctx Type: CParser.Multiplicative_expressionContext
def enterMultiplicative_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#multiplicative_expression.
# @param ctx Type: CParser.Multiplicative_expressionContext
def exitMultiplicative_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#cast_expression.
# @param ctx Type: CParser.Cast_expressionContext
def enterCast_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#cast_expression.
# @param ctx Type: CParser.Cast_expressionContext
def exitCast_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#unary_expression.
# @param ctx Type: CParser.Unary_expressionContext
def enterUnary_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#unary_expression.
# @param ctx Type: CParser.Unary_expressionContext
def exitUnary_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#postfix_expression.
# @param ctx Type: CParser.Postfix_expressionContext
def enterPostfix_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#postfix_expression.
# @param ctx Type: CParser.Postfix_expressionContext
def exitPostfix_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#macro_parameter_list.
# @param ctx Type: CParser.Macro_parameter_listContext
def enterMacro_parameter_list(self,ctx):
pass
# Exit a parse tree produced by CParser#macro_parameter_list.
# @param ctx Type: CParser.Macro_parameter_listContext
def exitMacro_parameter_list(self,ctx):
pass
# Enter a parse tree produced by CParser#unary_operator.
# @param ctx Type: CParser.Unary_operatorContext
def enterUnary_operator(self,ctx):
pass
# Exit a parse tree produced by CParser#unary_operator.
# @param ctx Type: CParser.Unary_operatorContext
def exitUnary_operator(self,ctx):
pass
# Enter a parse tree produced by CParser#primary_expression.
# @param ctx Type: CParser.Primary_expressionContext
def enterPrimary_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#primary_expression.
# @param ctx Type: CParser.Primary_expressionContext
def exitPrimary_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#constant.
# @param ctx Type: CParser.ConstantContext
def enterConstant(self,ctx):
pass
# Exit a parse tree produced by CParser#constant.
# @param ctx Type: CParser.ConstantContext
def exitConstant(self,ctx):
pass
# Enter a parse tree produced by CParser#expression.
# @param ctx Type: CParser.ExpressionContext
def enterExpression(self,ctx):
pass
# Exit a parse tree produced by CParser#expression.
# @param ctx Type: CParser.ExpressionContext
def exitExpression(self,ctx):
pass
# Enter a parse tree produced by CParser#constant_expression.
# @param ctx Type: CParser.Constant_expressionContext
def enterConstant_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#constant_expression.
# @param ctx Type: CParser.Constant_expressionContext
def exitConstant_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#assignment_expression.
# @param ctx Type: CParser.Assignment_expressionContext
def enterAssignment_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#assignment_expression.
# @param ctx Type: CParser.Assignment_expressionContext
def exitAssignment_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#lvalue.
# @param ctx Type: CParser.LvalueContext
def enterLvalue(self,ctx):
pass
# Exit a parse tree produced by CParser#lvalue.
# @param ctx Type: CParser.LvalueContext
def exitLvalue(self,ctx):
pass
# Enter a parse tree produced by CParser#assignment_operator.
# @param ctx Type: CParser.Assignment_operatorContext
def enterAssignment_operator(self,ctx):
pass
# Exit a parse tree produced by CParser#assignment_operator.
# @param ctx Type: CParser.Assignment_operatorContext
def exitAssignment_operator(self,ctx):
pass
# Enter a parse tree produced by CParser#conditional_expression.
# @param ctx Type: CParser.Conditional_expressionContext
def enterConditional_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#conditional_expression.
# @param ctx Type: CParser.Conditional_expressionContext
def exitConditional_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#logical_or_expression.
# @param ctx Type: CParser.Logical_or_expressionContext
def enterLogical_or_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#logical_or_expression.
# @param ctx Type: CParser.Logical_or_expressionContext
def exitLogical_or_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#logical_and_expression.
# @param ctx Type: CParser.Logical_and_expressionContext
def enterLogical_and_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#logical_and_expression.
# @param ctx Type: CParser.Logical_and_expressionContext
def exitLogical_and_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#inclusive_or_expression.
# @param ctx Type: CParser.Inclusive_or_expressionContext
def enterInclusive_or_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#inclusive_or_expression.
# @param ctx Type: CParser.Inclusive_or_expressionContext
def exitInclusive_or_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#exclusive_or_expression.
# @param ctx Type: CParser.Exclusive_or_expressionContext
def enterExclusive_or_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#exclusive_or_expression.
# @param ctx Type: CParser.Exclusive_or_expressionContext
def exitExclusive_or_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#and_expression.
# @param ctx Type: CParser.And_expressionContext
def enterAnd_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#and_expression.
# @param ctx Type: CParser.And_expressionContext
def exitAnd_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#equality_expression.
# @param ctx Type: CParser.Equality_expressionContext
def enterEquality_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#equality_expression.
# @param ctx Type: CParser.Equality_expressionContext
def exitEquality_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#relational_expression.
# @param ctx Type: CParser.Relational_expressionContext
def enterRelational_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#relational_expression.
# @param ctx Type: CParser.Relational_expressionContext
def exitRelational_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#shift_expression.
# @param ctx Type: CParser.Shift_expressionContext
def enterShift_expression(self,ctx):
pass
# Exit a parse tree produced by CParser#shift_expression.
# @param ctx Type: CParser.Shift_expressionContext
def exitShift_expression(self,ctx):
pass
# Enter a parse tree produced by CParser#statement.
# @param ctx Type: CParser.StatementContext
def enterStatement(self,ctx):
pass
# Exit a parse tree produced by CParser#statement.
# @param ctx Type: CParser.StatementContext
def exitStatement(self,ctx):
pass
# Enter a parse tree produced by CParser#asm2_statement.
# @param ctx Type: CParser.Asm2_statementContext
def enterAsm2_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#asm2_statement.
# @param ctx Type: CParser.Asm2_statementContext
def exitAsm2_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#asm1_statement.
# @param ctx Type: CParser.Asm1_statementContext
def enterAsm1_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#asm1_statement.
# @param ctx Type: CParser.Asm1_statementContext
def exitAsm1_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#asm_statement.
# @param ctx Type: CParser.Asm_statementContext
def enterAsm_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#asm_statement.
# @param ctx Type: CParser.Asm_statementContext
def exitAsm_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#macro_statement.
# @param ctx Type: CParser.Macro_statementContext
def enterMacro_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#macro_statement.
# @param ctx Type: CParser.Macro_statementContext
def exitMacro_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#labeled_statement.
# @param ctx Type: CParser.Labeled_statementContext
def enterLabeled_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#labeled_statement.
# @param ctx Type: CParser.Labeled_statementContext
def exitLabeled_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#compound_statement.
# @param ctx Type: CParser.Compound_statementContext
def enterCompound_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#compound_statement.
# @param ctx Type: CParser.Compound_statementContext
def exitCompound_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#statement_list.
# @param ctx Type: CParser.Statement_listContext
def enterStatement_list(self,ctx):
pass
# Exit a parse tree produced by CParser#statement_list.
# @param ctx Type: CParser.Statement_listContext
def exitStatement_list(self,ctx):
pass
# Enter a parse tree produced by CParser#expression_statement.
# @param ctx Type: CParser.Expression_statementContext
def enterExpression_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#expression_statement.
# @param ctx Type: CParser.Expression_statementContext
def exitExpression_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#selection_statement.
# @param ctx Type: CParser.Selection_statementContext
def enterSelection_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#selection_statement.
# @param ctx Type: CParser.Selection_statementContext
def exitSelection_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#iteration_statement.
# @param ctx Type: CParser.Iteration_statementContext
def enterIteration_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#iteration_statement.
# @param ctx Type: CParser.Iteration_statementContext
def exitIteration_statement(self,ctx):
pass
# Enter a parse tree produced by CParser#jump_statement.
# @param ctx Type: CParser.Jump_statementContext
def enterJump_statement(self,ctx):
pass
# Exit a parse tree produced by CParser#jump_statement.
# @param ctx Type: CParser.Jump_statementContext
def exitJump_statement(self,ctx):
pass
| edk2-master | BaseTools/Source/Python/Eot/CParser4/CListener.py |
## @file
# Python 'GenPatchPcdTable' package initialization file.
#
# This file is required to make Python interpreter treat the directory
# as containing package.
#
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
| edk2-master | BaseTools/Source/Python/GenPatchPcdTable/__init__.py |
## @file
# Generate PCD table for 'Patchable In Module' type PCD with given .map file.
# The Patch PCD table like:
#
# PCD Name Offset in binary
# ======== ================
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#
#====================================== External Libraries ========================================
from __future__ import print_function
import optparse
import Common.LongFilePathOs as os
import re
import array
from Common.BuildToolError import *
import Common.EdkLogger as EdkLogger
from Common.Misc import PeImageClass, startPatternGeneral, addressPatternGeneral, valuePatternGcc, pcdPatternGcc, secReGeneral
from Common.BuildVersion import gBUILD_VERSION
from Common.LongFilePathSupport import OpenLongFilePath as open
# Version and Copyright
__version_number__ = ("0.10" + " " + gBUILD_VERSION)
__version__ = "%prog Version " + __version_number__
__copyright__ = "Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved."
#====================================== Internal Libraries ========================================
#============================================== Code ===============================================
symRe = re.compile('^([\da-fA-F]+):([\da-fA-F]+) +([\.\-:\\\\\w\?@\$<>]+) +([\da-fA-F]+)', re.UNICODE)
def parsePcdInfoFromMapFile(mapfilepath, efifilepath):
""" Parse map file to get binary patch pcd information
@param path Map file absolution path
@return a list which element hold (PcdName, Offset, SectionName)
"""
lines = []
try:
f = open(mapfilepath, 'r')
lines = f.readlines()
f.close()
except:
return None
if len(lines) == 0: return None
firstline = lines[0].strip()
if re.match('^\s*Address\s*Size\s*Align\s*Out\s*In\s*Symbol\s*$', firstline):
return _parseForXcodeAndClang9(lines, efifilepath)
if (firstline.startswith("Archive member included ") and
firstline.endswith(" file (symbol)")):
return _parseForGCC(lines, efifilepath)
if firstline.startswith("# Path:"):
return _parseForXcodeAndClang9(lines, efifilepath)
return _parseGeneral(lines, efifilepath)
def _parseForXcodeAndClang9(lines, efifilepath):
valuePattern = re.compile('^([\da-fA-FxX]+)([\s\S]*)([_]*_gPcd_BinaryPatch_([\w]+))')
status = 0
pcds = []
for line in lines:
line = line.strip()
if status == 0 and (re.match('^\s*Address\s*Size\s*Align\s*Out\s*In\s*Symbol\s*$', line) \
or line == "# Symbols:"):
status = 1
continue
if status == 1 and len(line) != 0:
if '_gPcd_BinaryPatch_' in line:
m = valuePattern.match(line)
if m is not None:
pcds.append((m.groups(0)[3], int(m.groups(0)[0], 16)))
return pcds
def _parseForGCC(lines, efifilepath):
""" Parse map file generated by GCC linker """
dataPattern = re.compile('^.data._gPcd_BinaryPatch_([\w_\d]+)$')
status = 0
imageBase = -1
sections = []
bpcds = []
for index, line in enumerate(lines):
line = line.strip()
# status machine transection
if status == 0 and line == "Memory Configuration":
status = 1
continue
elif status == 1 and line == 'Linker script and memory map':
status = 2
continue
elif status ==2 and line == 'START GROUP':
status = 3
continue
# status handler
if status == 3:
m = valuePatternGcc.match(line)
if m is not None:
sections.append(m.groups(0))
if status == 3:
m = dataPattern.match(line)
if m is not None:
if lines[index + 1]:
PcdName = m.groups(0)[0]
m = pcdPatternGcc.match(lines[index + 1].strip())
if m is not None:
bpcds.append((PcdName, int(m.groups(0)[0], 16), int(sections[-1][1], 16), sections[-1][0]))
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
if efisecs is None or len(efisecs) == 0:
return None
#redirection
redirection = 0
for efisec in efisecs:
for section in sections:
if section[0].strip() == efisec[0].strip() and section[0].strip() == '.text':
redirection = int(section[1], 16) - efisec[1]
pcds = []
for pcd in bpcds:
for efisec in efisecs:
if pcd[1] >= efisec[1] and pcd[1] < efisec[1]+efisec[3]:
#assert efisec[0].strip() == pcd[3].strip() and efisec[1] + redirection == pcd[2], "There are some differences between map file and efi file"
pcds.append([pcd[0], efisec[2] + pcd[1] - efisec[1] - redirection, efisec[0]])
return pcds
def _parseGeneral(lines, efifilepath):
""" For MSFT, ICC, EBC
@param lines line array for map file
@return a list which element hold (PcdName, Offset, SectionName)
"""
status = 0 #0 - beginning of file; 1 - PE section definition; 2 - symbol table
secs = [] # key = section name
bPcds = []
symPattern = re.compile('^[_]+gPcd_BinaryPatch_([\w]+)')
for line in lines:
line = line.strip()
if startPatternGeneral.match(line):
status = 1
continue
if addressPatternGeneral.match(line):
status = 2
continue
if line.startswith("entry point at"):
status = 3
continue
if status == 1 and len(line) != 0:
m = secReGeneral.match(line)
assert m is not None, "Fail to parse the section in map file , line is %s" % line
sec_no, sec_start, sec_length, sec_name, sec_class = m.groups(0)
secs.append([int(sec_no, 16), int(sec_start, 16), int(sec_length, 16), sec_name, sec_class])
if status == 2 and len(line) != 0:
m = symRe.match(line)
assert m is not None, "Fail to parse the symbol in map file, line is %s" % line
sec_no, sym_offset, sym_name, vir_addr = m.groups(0)
sec_no = int(sec_no, 16)
sym_offset = int(sym_offset, 16)
vir_addr = int(vir_addr, 16)
m2 = symPattern.match(sym_name)
if m2 is not None:
# fond a binary pcd entry in map file
for sec in secs:
if sec[0] == sec_no and (sym_offset >= sec[1] and sym_offset < sec[1] + sec[2]):
bPcds.append([m2.groups(0)[0], sec[3], sym_offset, vir_addr, sec_no])
if len(bPcds) == 0: return None
# get section information from efi file
efisecs = PeImageClass(efifilepath).SectionHeaderList
if efisecs is None or len(efisecs) == 0:
return None
pcds = []
for pcd in bPcds:
index = 0
for efisec in efisecs:
index = index + 1
if pcd[1].strip() == efisec[0].strip():
pcds.append([pcd[0], efisec[2] + pcd[2], efisec[0]])
elif pcd[4] == index:
pcds.append([pcd[0], efisec[2] + pcd[2], efisec[0]])
return pcds
def generatePcdTable(list, pcdpath):
try:
f = open(pcdpath, 'w')
except:
pass
f.write('PCD Name Offset Section Name\r\n')
for pcditem in list:
f.write('%-30s 0x%-08X %-6s\r\n' % (pcditem[0], pcditem[1], pcditem[2]))
f.close()
#print 'Success to generate Binary Patch PCD table at %s!' % pcdpath
if __name__ == '__main__':
UsageString = "%prog -m <MapFile> -e <EfiFile> -o <OutFile>"
AdditionalNotes = "\nPCD table is generated in file name with .BinaryPcdTable.txt postfix"
parser = optparse.OptionParser(description=__copyright__, version=__version__, usage=UsageString)
parser.add_option('-m', '--mapfile', action='store', dest='mapfile',
help='Absolute path of module map file.')
parser.add_option('-e', '--efifile', action='store', dest='efifile',
help='Absolute path of EFI binary file.')
parser.add_option('-o', '--outputfile', action='store', dest='outfile',
help='Absolute path of output file to store the got patchable PCD table.')
(options, args) = parser.parse_args()
if options.mapfile is None or options.efifile is None:
print(parser.get_usage())
elif os.path.exists(options.mapfile) and os.path.exists(options.efifile):
list = parsePcdInfoFromMapFile(options.mapfile, options.efifile)
if list is not None:
if options.outfile is not None:
generatePcdTable(list, options.outfile)
else:
generatePcdTable(list, options.mapfile.replace('.map', '.BinaryPcdTable.txt'))
else:
print('Fail to generate Patch PCD Table based on map file and efi file')
else:
print('Fail to generate Patch PCD Table for fail to find map file or efi file!')
| edk2-master | BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py |
## @file
# This tool encodes and decodes GUIDed FFS sections or FMP capsule for a GUID type of
# EFI_CERT_TYPE_RSA2048_SHA256_GUID defined in the UEFI 2.4 Specification as
# {0xa7717414, 0xc616, 0x4977, {0x94, 0x20, 0x84, 0x47, 0x12, 0xa7, 0x35, 0xbf}}
# This tool has been tested with OpenSSL 1.0.1e 11 Feb 2013
#
# Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Rsa2048Sha256Sign
'''
from __future__ import print_function
import os
import sys
import argparse
import subprocess
import uuid
import struct
import collections
from Common.BuildVersion import gBUILD_VERSION
#
# Globals for help information
#
__prog__ = 'Rsa2048Sha256Sign'
__version__ = '%s Version %s' % (__prog__, '0.9 ' + gBUILD_VERSION)
__copyright__ = 'Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.'
__usage__ = '%s -e|-d [options] <input_file>' % (__prog__)
#
# GUID for SHA 256 Hash Algorithm from UEFI Specification
#
EFI_HASH_ALGORITHM_SHA256_GUID = uuid.UUID('{51aa59de-fdf2-4ea3-bc63-875fb7842ee9}')
#
# Structure definition to unpack EFI_CERT_BLOCK_RSA_2048_SHA256 from UEFI 2.4 Specification
#
# typedef struct _EFI_CERT_BLOCK_RSA_2048_SHA256 {
# EFI_GUID HashType;
# UINT8 PublicKey[256];
# UINT8 Signature[256];
# } EFI_CERT_BLOCK_RSA_2048_SHA256;
#
EFI_CERT_BLOCK_RSA_2048_SHA256 = collections.namedtuple('EFI_CERT_BLOCK_RSA_2048_SHA256', ['HashType', 'PublicKey', 'Signature'])
EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT = struct.Struct('16s256s256s')
#
# Filename of test signing private key that is stored in same directory as this tool
#
TEST_SIGNING_PRIVATE_KEY_FILENAME = 'TestSigningPrivateKey.pem'
if __name__ == '__main__':
#
# Create command line argument parser object
#
parser = argparse.ArgumentParser(prog=__prog__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-e", action="store_true", dest='Encode', help='encode file')
group.add_argument("-d", action="store_true", dest='Decode', help='decode file')
group.add_argument("--version", action='version', version=__version__)
parser.add_argument("-o", "--output", dest='OutputFile', type=str, metavar='filename', help="specify the output filename", required=True)
parser.add_argument("--monotonic-count", dest='MonotonicCountStr', type=str, help="specify the MonotonicCount in FMP capsule.")
parser.add_argument("--private-key", dest='PrivateKeyFile', type=argparse.FileType('rb'), help="specify the private key filename. If not specified, a test signing key is used.")
parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages")
parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages")
parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0, 10), default=0, help="set debug level")
parser.add_argument(metavar="input_file", dest='InputFile', type=argparse.FileType('rb'), help="specify the input filename")
#
# Parse command line arguments
#
args = parser.parse_args()
#
# Generate file path to Open SSL command
#
OpenSslCommand = 'openssl'
try:
OpenSslPath = os.environ['OPENSSL_PATH']
OpenSslCommand = os.path.join(OpenSslPath, OpenSslCommand)
if ' ' in OpenSslCommand:
OpenSslCommand = '"' + OpenSslCommand + '"'
except:
pass
#
# Verify that Open SSL command is available
#
try:
Process = subprocess.Popen('%s version' % (OpenSslCommand), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(1)
Version = Process.communicate()
if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode)
print(Version[0].decode('utf-8'))
#
# Read input file into a buffer and save input filename
#
args.InputFileName = args.InputFile.name
args.InputFileBuffer = args.InputFile.read()
args.InputFile.close()
#
# Save output filename and check if path exists
#
OutputDir = os.path.dirname(args.OutputFile)
if not os.path.exists(OutputDir):
print('ERROR: The output path does not exist: %s' % OutputDir)
sys.exit(1)
args.OutputFileName = args.OutputFile
#
# Save private key filename and close private key file
#
try:
args.PrivateKeyFileName = args.PrivateKeyFile.name
args.PrivateKeyFile.close()
except:
try:
#
# Get path to currently executing script or executable
#
if hasattr(sys, 'frozen'):
RsaToolPath = sys.executable
else:
RsaToolPath = sys.argv[0]
if RsaToolPath.startswith('"'):
RsaToolPath = RsaToolPath[1:]
if RsaToolPath.endswith('"'):
RsaToolPath = RsaToolPath[:-1]
args.PrivateKeyFileName = os.path.join(os.path.dirname(os.path.realpath(RsaToolPath)), TEST_SIGNING_PRIVATE_KEY_FILENAME)
args.PrivateKeyFile = open(args.PrivateKeyFileName, 'rb')
args.PrivateKeyFile.close()
except:
print('ERROR: test signing private key file %s missing' % (args.PrivateKeyFileName))
sys.exit(1)
#
# Extract public key from private key into STDOUT
#
Process = subprocess.Popen('%s rsa -in "%s" -modulus -noout' % (OpenSslCommand, args.PrivateKeyFileName), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
PublicKeyHexString = Process.communicate()[0].split(b'=')[1].strip()
PublicKeyHexString = PublicKeyHexString.decode('utf-8')
PublicKey = ''
while len(PublicKeyHexString) > 0:
PublicKey = PublicKey + PublicKeyHexString[0:2]
PublicKeyHexString=PublicKeyHexString[2:]
if Process.returncode != 0:
sys.exit(Process.returncode)
if args.MonotonicCountStr:
try:
if args.MonotonicCountStr.upper().startswith('0X'):
args.MonotonicCountValue = int(args.MonotonicCountStr, 16)
else:
args.MonotonicCountValue = int(args.MonotonicCountStr)
except:
pass
if args.Encode:
FullInputFileBuffer = args.InputFileBuffer
if args.MonotonicCountStr:
format = "%dsQ" % len(args.InputFileBuffer)
FullInputFileBuffer = struct.pack(format, args.InputFileBuffer, args.MonotonicCountValue)
#
# Sign the input file using the specified private key and capture signature from STDOUT
#
Process = subprocess.Popen('%s dgst -sha256 -sign "%s"' % (OpenSslCommand, args.PrivateKeyFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
Signature = Process.communicate(input=FullInputFileBuffer)[0]
if Process.returncode != 0:
sys.exit(Process.returncode)
#
# Write output file that contains hash GUID, Public Key, Signature, and Input data
#
args.OutputFile = open(args.OutputFileName, 'wb')
args.OutputFile.write(EFI_HASH_ALGORITHM_SHA256_GUID.bytes_le)
args.OutputFile.write(bytearray.fromhex(str(PublicKey)))
args.OutputFile.write(Signature)
args.OutputFile.write(args.InputFileBuffer)
args.OutputFile.close()
if args.Decode:
#
# Parse Hash Type, Public Key, and Signature from the section header
#
Header = EFI_CERT_BLOCK_RSA_2048_SHA256._make(EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT.unpack_from(args.InputFileBuffer))
args.InputFileBuffer = args.InputFileBuffer[EFI_CERT_BLOCK_RSA_2048_SHA256_STRUCT.size:]
#
# Verify that the Hash Type matches the expected SHA256 type
#
if uuid.UUID(bytes_le = Header.HashType) != EFI_HASH_ALGORITHM_SHA256_GUID:
print('ERROR: unsupport hash GUID')
sys.exit(1)
#
# Verify the public key
#
if Header.PublicKey != bytearray.fromhex(PublicKey):
print('ERROR: Public key in input file does not match public key from private key file')
sys.exit(1)
FullInputFileBuffer = args.InputFileBuffer
if args.MonotonicCountStr:
format = "%dsQ" % len(args.InputFileBuffer)
FullInputFileBuffer = struct.pack(format, args.InputFileBuffer, args.MonotonicCountValue)
#
# Write Signature to output file
#
open(args.OutputFileName, 'wb').write(Header.Signature)
#
# Verify signature
#
Process = subprocess.Popen('%s dgst -sha256 -prverify "%s" -signature %s' % (OpenSslCommand, args.PrivateKeyFileName, args.OutputFileName), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
Process.communicate(input=FullInputFileBuffer)
if Process.returncode != 0:
print('ERROR: Verification failed')
os.remove (args.OutputFileName)
sys.exit(Process.returncode)
#
# Save output file contents from input file
#
open(args.OutputFileName, 'wb').write(args.InputFileBuffer)
| edk2-master | BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256Sign.py |
## @file
# This tool can be used to generate new RSA 2048 bit private/public key pairs
# in a PEM file format using OpenSSL command line utilities that are installed
# on the path specified by the system environment variable OPENSSL_PATH.
# This tool can also optionally write one or more SHA 256 hashes of 2048 bit
# public keys to a binary file, write one or more SHA 256 hashes of 2048 bit
# public keys to a file in a C structure format, and in verbose mode display
# one or more SHA 256 hashes of 2048 bit public keys in a C structure format
# on STDOUT.
# This tool has been tested with OpenSSL 1.0.1e 11 Feb 2013
#
# Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
Rsa2048Sha256GenerateKeys
'''
from __future__ import print_function
import os
import sys
import argparse
import subprocess
from Common.BuildVersion import gBUILD_VERSION
#
# Globals for help information
#
__prog__ = 'Rsa2048Sha256GenerateKeys'
__version__ = '%s Version %s' % (__prog__, '0.9 ' + gBUILD_VERSION)
__copyright__ = 'Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.'
__usage__ = '%s [options]' % (__prog__)
if __name__ == '__main__':
#
# Create command line argument parser object
#
parser = argparse.ArgumentParser(prog=__prog__, usage=__usage__, description=__copyright__, conflict_handler='resolve')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--version", action='version', version=__version__)
group.add_argument("-o", "--output", dest='OutputFile', type=argparse.FileType('wb'), metavar='filename', nargs='*', help="specify the output private key filename in PEM format")
group.add_argument("-i", "--input", dest='InputFile', type=argparse.FileType('rb'), metavar='filename', nargs='*', help="specify the input private key filename in PEM format")
parser.add_argument("--public-key-hash", dest='PublicKeyHashFile', type=argparse.FileType('wb'), help="specify the public key hash filename that is SHA 256 hash of 2048 bit RSA public key in binary format")
parser.add_argument("--public-key-hash-c", dest='PublicKeyHashCFile', type=argparse.FileType('wb'), help="specify the public key hash filename that is SHA 256 hash of 2048 bit RSA public key in C structure format")
parser.add_argument("-v", "--verbose", dest='Verbose', action="store_true", help="increase output messages")
parser.add_argument("-q", "--quiet", dest='Quiet', action="store_true", help="reduce output messages")
parser.add_argument("--debug", dest='Debug', type=int, metavar='[0-9]', choices=range(0, 10), default=0, help="set debug level")
#
# Parse command line arguments
#
args = parser.parse_args()
#
# Generate file path to Open SSL command
#
OpenSslCommand = 'openssl'
try:
OpenSslPath = os.environ['OPENSSL_PATH']
OpenSslCommand = os.path.join(OpenSslPath, OpenSslCommand)
if ' ' in OpenSslCommand:
OpenSslCommand = '"' + OpenSslCommand + '"'
except:
pass
#
# Verify that Open SSL command is available
#
try:
Process = subprocess.Popen('%s version' % (OpenSslCommand), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
except:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(1)
Version = Process.communicate()
if Process.returncode != 0:
print('ERROR: Open SSL command not available. Please verify PATH or set OPENSSL_PATH')
sys.exit(Process.returncode)
print(Version[0].decode())
args.PemFileName = []
#
# Check for output file argument
#
if args.OutputFile is not None:
for Item in args.OutputFile:
#
# Save PEM filename and close output file
#
args.PemFileName.append(Item.name)
Item.close()
#
# Generate private key and save it to output file in a PEM file format
#
Process = subprocess.Popen('%s genrsa -out %s 2048' % (OpenSslCommand, Item.name), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
Process.communicate()
if Process.returncode != 0:
print('ERROR: RSA 2048 key generation failed')
sys.exit(Process.returncode)
#
# Check for input file argument
#
if args.InputFile is not None:
for Item in args.InputFile:
#
# Save PEM filename and close input file
#
args.PemFileName.append(Item.name)
Item.close()
PublicKeyHash = bytearray()
for Item in args.PemFileName:
#
# Extract public key from private key into STDOUT
#
Process = subprocess.Popen('%s rsa -in %s -modulus -noout' % (OpenSslCommand, Item), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
PublicKeyHexString = Process.communicate()[0].decode().split(b'=')[1].strip()
if Process.returncode != 0:
print('ERROR: Unable to extract public key from private key')
sys.exit(Process.returncode)
PublicKey = bytearray()
for Index in range (0, len(PublicKeyHexString), 2):
PublicKey = PublicKey + PublicKeyHexString[Index:Index + 2]
#
# Generate SHA 256 hash of RSA 2048 bit public key into STDOUT
#
Process = subprocess.Popen('%s dgst -sha256 -binary' % (OpenSslCommand), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
Process.stdin.write (PublicKey)
PublicKeyHash = PublicKeyHash + Process.communicate()[0].decode()
if Process.returncode != 0:
print('ERROR: Unable to extract SHA 256 hash of public key')
sys.exit(Process.returncode)
#
# Write SHA 256 hash of 2048 bit binary public key to public key hash file
#
try:
args.PublicKeyHashFile.write (PublicKeyHash)
args.PublicKeyHashFile.close ()
except:
pass
#
# Convert public key hash to a C structure string
#
PublicKeyHashC = '{'
for Item in PublicKeyHash:
PublicKeyHashC = PublicKeyHashC + '0x%02x, ' % (Item)
PublicKeyHashC = PublicKeyHashC[:-2] + '}'
#
# Write SHA 256 of 2048 bit binary public key to public key hash C structure file
#
try:
args.PublicKeyHashCFile.write (bytes(PublicKeyHashC))
args.PublicKeyHashCFile.close ()
except:
pass
#
# If verbose is enabled display the public key in C structure format
#
if args.Verbose:
print('PublicKeySha256 = ' + PublicKeyHashC)
| edk2-master | BaseTools/Source/Python/Rsa2048Sha256Sign/Rsa2048Sha256GenerateKeys.py |
## @file
# package and install PyEfiCompressor extension
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from distutils.core import setup, Extension
import os
if 'BASE_TOOLS_PATH' not in os.environ:
raise "Please define BASE_TOOLS_PATH to the root of base tools tree"
BaseToolsDir = os.environ['BASE_TOOLS_PATH']
setup(
name="EfiCompressor",
version="0.01",
ext_modules=[
Extension(
'EfiCompressor',
sources=[
os.path.join(BaseToolsDir, 'Source', 'C', 'Common', 'Decompress.c'),
'EfiCompressor.c'
],
include_dirs=[
os.path.join(BaseToolsDir, 'Source', 'C', 'Include'),
os.path.join(BaseToolsDir, 'Source', 'C', 'Include', 'Ia32'),
os.path.join(BaseToolsDir, 'Source', 'C', 'Common')
],
)
],
)
| edk2-master | BaseTools/Source/C/PyEfiCompressor/setup.py |
# @file NmakeSubdirs.py
# This script support parallel build for nmake in windows environment.
# It supports Python2.x and Python3.x both.
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#
# Import Modules
#
from __future__ import print_function
import argparse
import threading
import time
import os
import subprocess
import multiprocessing
import copy
import sys
__prog__ = 'NmakeSubdirs'
__version__ = '%s Version %s' % (__prog__, '0.10 ')
__copyright__ = 'Copyright (c) 2018, Intel Corporation. All rights reserved.'
__description__ = 'Replace for NmakeSubdirs.bat in windows ,support parallel build for nmake.\n'
cpu_count = multiprocessing.cpu_count()
output_lock = threading.Lock()
def RunCommand(WorkDir=None, *Args, **kwargs):
if WorkDir is None:
WorkDir = os.curdir
if "stderr" not in kwargs:
kwargs["stderr"] = subprocess.STDOUT
if "stdout" not in kwargs:
kwargs["stdout"] = subprocess.PIPE
p = subprocess.Popen(Args, cwd=WorkDir, stderr=kwargs["stderr"], stdout=kwargs["stdout"])
stdout, stderr = p.communicate()
message = ""
if stdout is not None:
message = stdout.decode(errors='ignore') #for compatibility in python 2 and 3
if p.returncode != 0:
raise RuntimeError("Error while execute command \'{0}\' in direcotry {1}\n{2}".format(" ".join(Args), WorkDir, message))
output_lock.acquire(True)
print("execute command \"{0}\" in directory {1}".format(" ".join(Args), WorkDir))
try:
print(message)
except:
pass
output_lock.release()
return p.returncode, stdout
class TaskUnit(object):
def __init__(self, func, args, kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
def __eq__(self, other):
return id(self).__eq__(id(other))
def run(self):
return self.func(*self.args, **self.kwargs)
def __str__(self):
para = list(self.args)
para.extend("{0}={1}".format(k, v)for k, v in self.kwargs.items())
return "{0}({1})".format(self.func.__name__, ",".join(para))
class ThreadControl(object):
def __init__(self, maxthread):
self._processNum = maxthread
self.pending = []
self.running = []
self.pendingLock = threading.Lock()
self.runningLock = threading.Lock()
self.error = False
self.errorLock = threading.Lock()
self.errorMsg = "errorMsg"
def addTask(self, func, *args, **kwargs):
self.pending.append(TaskUnit(func, args, kwargs))
def waitComplete(self):
self._schedule.join()
def startSchedule(self):
self._schedule = threading.Thread(target=self.Schedule)
self._schedule.start()
def Schedule(self):
for i in range(self._processNum):
task = threading.Thread(target=self.startTask)
task.daemon = False
self.running.append(task)
self.runningLock.acquire(True)
for thread in self.running:
thread.start()
self.runningLock.release()
while len(self.running) > 0:
time.sleep(0.1)
if self.error:
print("subprocess not exit successfully")
print(self.errorMsg)
def startTask(self):
while True:
if self.error:
break
self.pendingLock.acquire(True)
if len(self.pending) == 0:
self.pendingLock.release()
break
task = self.pending.pop(0)
self.pendingLock.release()
try:
task.run()
except RuntimeError as e:
if self.error: break
self.errorLock.acquire(True)
self.error = True
self.errorMsg = str(e)
time.sleep(0.1)
self.errorLock.release()
break
self.runningLock.acquire(True)
self.running.remove(threading.current_thread())
self.runningLock.release()
def Run():
curdir = os.path.abspath(os.curdir)
if len(args.subdirs) == 1:
args.jobs = 1
if args.jobs == 1:
try:
for dir in args.subdirs:
RunCommand(os.path.join(curdir, dir), "nmake", args.target, stdout=sys.stdout, stderr=subprocess.STDOUT)
except RuntimeError:
exit(1)
else:
controller = ThreadControl(args.jobs)
for dir in args.subdirs:
controller.addTask(RunCommand, os.path.join(curdir, dir), "nmake", args.target)
controller.startSchedule()
controller.waitComplete()
if controller.error:
exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog=__prog__, description=__description__ + __copyright__, conflict_handler='resolve')
parser.add_argument("target", help="the target for nmake")
parser.add_argument("subdirs", nargs="+", help="the relative dir path of makefile")
parser.add_argument("--jobs", type=int, dest="jobs", default=cpu_count, help="thread number")
parser.add_argument('--version', action='version', version=__version__)
args = parser.parse_args()
Run()
| edk2-master | BaseTools/Source/C/Makefiles/NmakeSubdirs.py |
## @file
# Unit tests for AutoGen.UniClassObject
#
# Copyright (c) 2015, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import os
import unittest
import codecs
import TestTools
from Common.Misc import PathClass
import AutoGen.UniClassObject as BtUni
from Common import EdkLogger
EdkLogger.InitializeForUnitTest()
class Tests(TestTools.BaseToolsTest):
SampleData = u'''
#langdef en-US "English"
#string STR_A #language en-US "STR_A for en-US"
'''
def EncodeToFile(self, encoding, string=None):
if string is None:
string = self.SampleData
if encoding is not None:
data = codecs.encode(string, encoding)
else:
data = string
path = 'input.uni'
self.WriteTmpFile(path, data)
return PathClass(self.GetTmpFilePath(path))
def ErrorFailure(self, error, encoding, shouldPass):
msg = error + ' should '
if shouldPass:
msg += 'not '
msg += 'be generated for '
msg += '%s data in a .uni file' % encoding
self.fail(msg)
def UnicodeErrorFailure(self, encoding, shouldPass):
self.ErrorFailure('UnicodeError', encoding, shouldPass)
def EdkErrorFailure(self, encoding, shouldPass):
self.ErrorFailure('EdkLogger.FatalError', encoding, shouldPass)
def CheckFile(self, encoding, shouldPass, string=None):
path = self.EncodeToFile(encoding, string)
try:
BtUni.UniFileClassObject([path])
if shouldPass:
return
except UnicodeError:
if not shouldPass:
return
else:
self.UnicodeErrorFailure(encoding, shouldPass)
except EdkLogger.FatalError:
if not shouldPass:
return
else:
self.EdkErrorFailure(encoding, shouldPass)
except Exception:
pass
self.EdkErrorFailure(encoding, shouldPass)
def testUtf16InUniFile(self):
self.CheckFile('utf_16', shouldPass=True)
def testSupplementaryPlaneUnicodeCharInUtf16File(self):
#
# Supplementary Plane characters can exist in UTF-16 files,
# but they are not valid UCS-2 characters.
#
# This test makes sure that BaseTools rejects these characters
# if seen in a .uni file.
#
data = u'''
#langdef en-US "English"
#string STR_A #language en-US "CodePoint (\U00010300) > 0xFFFF"
'''
self.CheckFile('utf_16', shouldPass=False, string=data)
def testSurrogatePairUnicodeCharInUtf16File(self):
#
# Surrogate Pair code points are used in UTF-16 files to
# encode the Supplementary Plane characters. But, a Surrogate
# Pair code point which is not followed by another Surrogate
# Pair code point might be interpreted as a single code point
# with the Surrogate Pair code point.
#
# This test makes sure that BaseTools rejects these characters
# if seen in a .uni file.
#
data = codecs.BOM_UTF16_LE + b'//\x01\xd8 '
self.CheckFile(encoding=None, shouldPass=False, string=data)
def testValidUtf8File(self):
self.CheckFile(encoding='utf_8', shouldPass=True)
def testValidUtf8FileWithBom(self):
#
# Same test as testValidUtf8File, but add the UTF-8 BOM
#
data = codecs.BOM_UTF8 + codecs.encode(self.SampleData, 'utf_8')
self.CheckFile(encoding=None, shouldPass=True, string=data)
def test32bitUnicodeCharInUtf8File(self):
data = u'''
#langdef en-US "English"
#string STR_A #language en-US "CodePoint (\U00010300) > 0xFFFF"
'''
self.CheckFile('utf_16', shouldPass=False, string=data)
def test32bitUnicodeCharInUtf8File(self):
data = u'''
#langdef en-US "English"
#string STR_A #language en-US "CodePoint (\U00010300) > 0xFFFF"
'''
self.CheckFile('utf_8', shouldPass=False, string=data)
def test32bitUnicodeCharInUtf8Comment(self):
data = u'''
// Even in comments, we reject non-UCS-2 chars: \U00010300
#langdef en-US "English"
#string STR_A #language en-US "A"
'''
self.CheckFile('utf_8', shouldPass=False, string=data)
def testSurrogatePairUnicodeCharInUtf8File(self):
#
# Surrogate Pair code points are used in UTF-16 files to
# encode the Supplementary Plane characters. In UTF-8, it is
# trivial to encode these code points, but they are not valid
# code points for characters, since they are reserved for the
# UTF-16 Surrogate Pairs.
#
# This test makes sure that BaseTools rejects these characters
# if seen in a .uni file.
#
data = b'\xed\xa0\x81'
self.CheckFile(encoding=None, shouldPass=False, string=data)
def testSurrogatePairUnicodeCharInUtf8FileWithBom(self):
#
# Same test as testSurrogatePairUnicodeCharInUtf8File, but add
# the UTF-8 BOM
#
data = codecs.BOM_UTF8 + b'\xed\xa0\x81'
self.CheckFile(encoding=None, shouldPass=False, string=data)
TheTestSuite = TestTools.MakeTheTestSuite(locals())
if __name__ == '__main__':
allTests = TheTestSuite()
unittest.TextTestRunner().run(allTests)
| edk2-master | BaseTools/Tests/CheckUnicodeSourceFiles.py |
## @file
# Unit tests for checking syntax of Python source code
#
# Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import os
import unittest
import py_compile
import TestTools
class Tests(TestTools.BaseToolsTest):
def setUp(self):
TestTools.BaseToolsTest.setUp(self)
def SingleFileTest(self, filename):
try:
py_compile.compile(filename, doraise=True)
except Exception as e:
self.fail('syntax error: %s, Error is %s' % (filename, str(e)))
def MakePythonSyntaxCheckTests():
def GetAllPythonSourceFiles():
pythonSourceFiles = []
for (root, dirs, files) in os.walk(TestTools.PythonSourceDir):
for filename in files:
if filename.lower().endswith('.py'):
pythonSourceFiles.append(
os.path.join(root, filename)
)
return pythonSourceFiles
def MakeTestName(filename):
assert filename.lower().endswith('.py')
name = filename[:-3]
name = name.replace(TestTools.PythonSourceDir, '')
name = name.replace(os.path.sep, '_')
return 'test' + name
def MakeNewTest(filename):
test = MakeTestName(filename)
newmethod = lambda self: self.SingleFileTest(filename)
setattr(
Tests,
test,
newmethod
)
for filename in GetAllPythonSourceFiles():
MakeNewTest(filename)
MakePythonSyntaxCheckTests()
del MakePythonSyntaxCheckTests
TheTestSuite = TestTools.MakeTheTestSuite(locals())
if __name__ == '__main__':
allTests = TheTestSuite()
unittest.TextTestRunner().run(allTests)
| edk2-master | BaseTools/Tests/CheckPythonSyntax.py |
from __future__ import print_function
## @file
# Utility functions and classes for BaseTools unit tests
#
# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import base64
import os
import os.path
import random
import shutil
import subprocess
import sys
import unittest
import codecs
TestsDir = os.path.realpath(os.path.split(sys.argv[0])[0])
BaseToolsDir = os.path.realpath(os.path.join(TestsDir, '..'))
CSourceDir = os.path.join(BaseToolsDir, 'Source', 'C')
PythonSourceDir = os.path.join(BaseToolsDir, 'Source', 'Python')
TestTempDir = os.path.join(TestsDir, 'TestTempDir')
if PythonSourceDir not in sys.path:
#
# Allow unit tests to import BaseTools python modules. This is very useful
# for writing unit tests.
#
sys.path.append(PythonSourceDir)
def MakeTheTestSuite(localItems):
tests = []
for name, item in localItems.items():
if isinstance(item, type):
if issubclass(item, unittest.TestCase):
tests.append(unittest.TestLoader().loadTestsFromTestCase(item))
elif issubclass(item, unittest.TestSuite):
tests.append(item())
return lambda: unittest.TestSuite(tests)
def GetBaseToolsPaths():
if sys.platform in ('win32', 'win64'):
return [ os.path.join(BaseToolsDir, 'Bin', sys.platform.title()) ]
else:
uname = os.popen('uname -sm').read().strip()
for char in (' ', '/'):
uname = uname.replace(char, '-')
return [
os.path.join(BaseToolsDir, 'Bin', uname),
os.path.join(BaseToolsDir, 'BinWrappers', uname),
os.path.join(BaseToolsDir, 'BinWrappers', 'PosixLike')
]
BaseToolsBinPaths = GetBaseToolsPaths()
class BaseToolsTest(unittest.TestCase):
def cleanOutDir(self, dir):
for dirItem in os.listdir(dir):
if dirItem in ('.', '..'): continue
dirItem = os.path.join(dir, dirItem)
self.RemoveFileOrDir(dirItem)
def CleanUpTmpDir(self):
if os.path.exists(self.testDir):
self.cleanOutDir(self.testDir)
def HandleTreeDeleteError(self, function, path, excinfo):
os.chmod(path, stat.S_IWRITE)
function(path)
def RemoveDir(self, dir):
shutil.rmtree(dir, False, self.HandleTreeDeleteError)
def RemoveFileOrDir(self, path):
if not os.path.exists(path):
return
elif os.path.isdir(path):
self.RemoveDir(path)
else:
os.remove(path)
def DisplayBinaryData(self, description, data):
print(description, '(base64 encoded):')
b64data = base64.b64encode(data)
print(b64data)
def DisplayFile(self, fileName):
sys.stdout.write(self.ReadTmpFile(fileName))
sys.stdout.flush()
def FindToolBin(self, toolName):
for binPath in BaseToolsBinPaths:
bin = os.path.join(binPath, toolName)
if os.path.exists(bin):
break
assert os.path.exists(bin)
return bin
def RunTool(self, *args, **kwd):
if 'toolName' in kwd: toolName = kwd['toolName']
else: toolName = None
if 'logFile' in kwd: logFile = kwd['logFile']
else: logFile = None
if toolName is None: toolName = self.toolName
if sys.platform == "win32":
toolName += ".exe"
bin = self.FindToolBin(toolName)
if logFile is not None:
logFile = open(os.path.join(self.testDir, logFile), 'w')
popenOut = logFile
else:
popenOut = subprocess.PIPE
args = [toolName] + list(args)
Proc = subprocess.Popen(
args, executable=bin,
stdout=popenOut, stderr=subprocess.STDOUT
)
if logFile is None:
Proc.stdout.read()
return Proc.wait()
def GetTmpFilePath(self, fileName):
return os.path.join(self.testDir, fileName)
def OpenTmpFile(self, fileName, mode = 'r'):
return open(os.path.join(self.testDir, fileName), mode)
def ReadTmpFile(self, fileName):
f = open(self.GetTmpFilePath(fileName), 'rb')
data = f.read()
f.close()
return data
def WriteTmpFile(self, fileName, data):
if isinstance(data, bytes):
with open(self.GetTmpFilePath(fileName), 'wb') as f:
f.write(data)
else:
with codecs.open(self.GetTmpFilePath(fileName), 'w', encoding='utf-8') as f:
f.write(data)
def GenRandomFileData(self, fileName, minlen = None, maxlen = None):
if maxlen is None: maxlen = minlen
f = self.OpenTmpFile(fileName, 'w')
f.write(self.GetRandomString(minlen, maxlen))
f.close()
def GetRandomString(self, minlen = None, maxlen = None):
if minlen is None: minlen = 1024
if maxlen is None: maxlen = minlen
return ''.join(
[chr(random.randint(0, 255))
for x in range(random.randint(minlen, maxlen))
])
def setUp(self):
self.savedEnvPath = os.environ['PATH']
self.savedSysPath = sys.path[:]
for binPath in BaseToolsBinPaths:
os.environ['PATH'] = \
os.path.pathsep.join((os.environ['PATH'], binPath))
self.testDir = TestTempDir
if not os.path.exists(self.testDir):
os.mkdir(self.testDir)
else:
self.cleanOutDir(self.testDir)
def tearDown(self):
self.RemoveFileOrDir(self.testDir)
os.environ['PATH'] = self.savedEnvPath
sys.path = self.savedSysPath
| edk2-master | BaseTools/Tests/TestTools.py |
## @file
# Test whether PYTHON_COMMAND is available and the
# minimum Python version is installed.
#
# Copyright (c) 2013 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
import sys
if __name__ == '__main__':
# Check if the major and minor versions required were specified.
if len(sys.argv) >= 3:
req_major_version = int(sys.argv[1])
req_minor_version = int(sys.argv[2])
else:
# If the minimum version wasn't specified on the command line,
# default to 3.6 because BaseTools uses syntax from PEP 526
# (https://peps.python.org/pep-0526/)
req_major_version = 3
req_minor_version = 6
if sys.version_info.major == req_major_version and \
sys.version_info.minor >= req_minor_version:
sys.exit(0)
else:
sys.exit(1)
| edk2-master | BaseTools/Tests/PythonTest.py |
## @file
# Unit tests for TianoCompress utility
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
from __future__ import print_function
import os
import random
import sys
import unittest
import TestTools
class Tests(TestTools.BaseToolsTest):
def setUp(self):
TestTools.BaseToolsTest.setUp(self)
self.toolName = 'TianoCompress'
def testHelp(self):
result = self.RunTool('--help', logFile='help')
#self.DisplayFile('help')
self.assertTrue(result == 0)
def compressionTestCycle(self, data):
path = self.GetTmpFilePath('input')
self.WriteTmpFile('input', data)
result = self.RunTool(
'-e',
'-o', self.GetTmpFilePath('output1'),
self.GetTmpFilePath('input')
)
self.assertTrue(result == 0)
result = self.RunTool(
'-d',
'-o', self.GetTmpFilePath('output2'),
self.GetTmpFilePath('output1')
)
self.assertTrue(result == 0)
start = self.ReadTmpFile('input')
finish = self.ReadTmpFile('output2')
startEqualsFinish = start == finish
if not startEqualsFinish:
print()
print('Original data did not match decompress(compress(data))')
self.DisplayBinaryData('original data', start)
self.DisplayBinaryData('after compression', self.ReadTmpFile('output1'))
self.DisplayBinaryData('after decompression', finish)
self.assertTrue(startEqualsFinish)
def testRandomDataCycles(self):
for i in range(8):
data = self.GetRandomString(1024, 2048)
self.compressionTestCycle(data)
self.CleanUpTmpDir()
TheTestSuite = TestTools.MakeTheTestSuite(locals())
if __name__ == '__main__':
allTests = TheTestSuite()
unittest.TextTestRunner().run(allTests)
| edk2-master | BaseTools/Tests/TianoCompress.py |
## @file
# Routines for generating Pcd Database
#
# Copyright (c) 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
import unittest
from Common.Misc import RemoveCComments
from Workspace.BuildClassObject import ArrayIndex
class TestRe(unittest.TestCase):
def test_ccomments(self):
TestStr1 = """ {0x01,0x02} """
self.assertEquals(TestStr1, RemoveCComments(TestStr1))
TestStr2 = """ L'TestString' """
self.assertEquals(TestStr2, RemoveCComments(TestStr2))
TestStr3 = """ 'TestString' """
self.assertEquals(TestStr3, RemoveCComments(TestStr3))
TestStr4 = """
{CODE({
{0x01, {0x02, 0x03, 0x04 }},// Data comment
{0x01, {0x02, 0x03, 0x04 }},// Data comment
})
} /*
This is multiple line comments
The seconde line comment
*/
// This is a comment
"""
Expect_TestStr4 = """{CODE({
{0x01, {0x02, 0x03, 0x04 }},
{0x01, {0x02, 0x03, 0x04 }},
})
}"""
self.assertEquals(Expect_TestStr4, RemoveCComments(TestStr4).strip())
def Test_ArrayIndex(self):
TestStr1 = """[1]"""
self.assertEquals(['[1]'], ArrayIndex.findall(TestStr1))
TestStr2 = """[1][2][0x1][0x01][]"""
self.assertEquals(['[1]','[2]','[0x1]','[0x01]','[]'], ArrayIndex.findall(TestStr2))
if __name__ == '__main__':
unittest.main()
| edk2-master | BaseTools/Tests/TestRegularExpression.py |
## @file
# Unit tests for Python based BaseTools
#
# Copyright (c) 2008 - 2015, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import os
import sys
import unittest
def TheTestSuite():
suites = []
import CheckPythonSyntax
suites.append(CheckPythonSyntax.TheTestSuite())
import CheckUnicodeSourceFiles
suites.append(CheckUnicodeSourceFiles.TheTestSuite())
return unittest.TestSuite(suites)
if __name__ == '__main__':
allTests = TheTestSuite()
unittest.TextTestRunner().run(allTests)
| edk2-master | BaseTools/Tests/PythonToolsTests.py |
## @file
# Unit tests for C based BaseTools
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import os
import sys
import unittest
import TianoCompress
modules = (
TianoCompress,
)
def TheTestSuite():
suites = list(map(lambda module: module.TheTestSuite(), modules))
return unittest.TestSuite(suites)
if __name__ == '__main__':
allTests = TheTestSuite()
unittest.TextTestRunner().run(allTests)
| edk2-master | BaseTools/Tests/CToolsTests.py |
## @file
# Unit tests for BaseTools utilities
#
# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
# Import Modules
#
import os
import sys
import unittest
import TestTools
def GetCTestSuite():
import CToolsTests
return CToolsTests.TheTestSuite()
def GetPythonTestSuite():
import PythonToolsTests
return PythonToolsTests.TheTestSuite()
def GetAllTestsSuite():
return unittest.TestSuite([GetCTestSuite(), GetPythonTestSuite()])
if __name__ == '__main__':
allTests = GetAllTestsSuite()
unittest.TextTestRunner(verbosity=2).run(allTests)
| edk2-master | BaseTools/Tests/RunTests.py |
##
# Copyright (c) Microsoft Corporation.
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import os
import logging
import json
try:
from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin
class BuildToolsReportGenerator(IUefiBuildPlugin):
def do_report(self, thebuilder):
try:
from edk2toolext.environment import version_aggregator
except ImportError:
logging.critical("Loading BuildToolsReportGenerator failed, please update your Edk2-PyTool-Extensions")
return 0
OutputReport = os.path.join(thebuilder.env.GetValue("BUILD_OUTPUT_BASE"), "BUILD_TOOLS_REPORT")
OutputReport = os.path.normpath(OutputReport)
if not os.path.isdir(os.path.dirname(OutputReport)):
os.makedirs(os.path.dirname(OutputReport))
Report = BuildToolsReport()
Report.MakeReport(version_aggregator.GetVersionAggregator().GetAggregatedVersionInformation(), OutputReport=OutputReport)
def do_pre_build(self, thebuilder):
self.do_report(thebuilder)
return 0
def do_post_build(self, thebuilder):
self.do_report(thebuilder)
return 0
except ImportError:
pass
class BuildToolsReport(object):
MY_FOLDER = os.path.dirname(os.path.realpath(__file__))
VERSION = "1.00"
def __init__(self):
pass
def MakeReport(self, BuildTools, OutputReport="BuildToolsReport"):
logging.info("Writing BuildToolsReports to {0}".format(OutputReport))
versions_list = []
for key, value in BuildTools.items():
versions_list.append(value)
versions_list = sorted(versions_list, key=lambda k: k['type'])
json_dict = {"modules": versions_list,
"PluginVersion": BuildToolsReport.VERSION}
htmlfile = open(OutputReport + ".html", "w")
jsonfile = open(OutputReport + ".json", "w")
template = open(os.path.join(BuildToolsReport.MY_FOLDER, "BuildToolsReport_Template.html"), "r")
for line in template.readlines():
if "%TO_BE_FILLED_IN_BY_PYTHON_SCRIPT%" in line:
line = line.replace("%TO_BE_FILLED_IN_BY_PYTHON_SCRIPT%", json.dumps(json_dict))
htmlfile.write(line)
jsonfile.write(json.dumps(versions_list, indent=4))
jsonfile.close()
template.close()
htmlfile.close()
| edk2-master | BaseTools/Plugin/BuildToolsReport/BuildToolsReportGenerator.py |
# @file LinuxGcc5ToolChain.py
# Plugin to configures paths for GCC5 ARM/AARCH64 Toolchain
##
# This plugin works in conjuncture with the tools_def
#
# Copyright (c) Microsoft Corporation
# Copyright (c) 2020, Hewlett Packard Enterprise Development LP. All rights reserved.<BR>
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import os
import logging
from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin
from edk2toolext.environment import shell_environment
class LinuxGcc5ToolChain(IUefiBuildPlugin):
def do_post_build(self, thebuilder):
return 0
def do_pre_build(self, thebuilder):
self.Logger = logging.getLogger("LinuxGcc5ToolChain")
#
# GCC5 - The ARM and AARCH64 compilers need their paths set if available
if thebuilder.env.GetValue("TOOL_CHAIN_TAG") == "GCC5":
# Start with AARACH64 compiler
ret = self._check_aarch64()
if ret != 0:
self.Logger.critical("Failed in check aarch64")
return ret
# Check arm compiler
ret = self._check_arm()
if ret != 0:
self.Logger.critical("Failed in check arm")
return ret
# Check RISCV64 compiler
ret = self._check_riscv64()
if ret != 0:
self.Logger.critical("Failed in check riscv64")
return ret
# Check LoongArch64 compiler
ret = self._check_loongarch64()
if ret != 0:
self.Logger.critical("Failed in check loongarch64")
return ret
return 0
def _check_arm(self):
# check to see if full path already configured
if shell_environment.GetEnvironment().get_shell_var("GCC5_ARM_PREFIX") is not None:
self.Logger.info("GCC5_ARM_PREFIX is already set.")
else:
# now check for install dir. If set then set the Prefix
install_path = shell_environment.GetEnvironment().get_shell_var("GCC5_ARM_INSTALL")
if install_path is None:
return 0
# make GCC5_ARM_PREFIX to align with tools_def.txt
prefix = os.path.join(install_path, "bin", "arm-none-linux-gnueabihf-")
shell_environment.GetEnvironment().set_shell_var("GCC5_ARM_PREFIX", prefix)
# now confirm it exists
if not os.path.exists(shell_environment.GetEnvironment().get_shell_var("GCC5_ARM_PREFIX") + "gcc"):
self.Logger.error("Path for GCC5_ARM_PREFIX toolchain is invalid")
return -2
return 0
def _check_aarch64(self):
# check to see if full path already configured
if shell_environment.GetEnvironment().get_shell_var("GCC5_AARCH64_PREFIX") is not None:
self.Logger.info("GCC5_AARCH64_PREFIX is already set.")
else:
# now check for install dir. If set then set the Prefix
install_path = shell_environment.GetEnvironment(
).get_shell_var("GCC5_AARCH64_INSTALL")
if install_path is None:
return 0
# make GCC5_AARCH64_PREFIX to align with tools_def.txt
prefix = os.path.join(install_path, "bin", "aarch64-none-linux-gnu-")
shell_environment.GetEnvironment().set_shell_var("GCC5_AARCH64_PREFIX", prefix)
# now confirm it exists
if not os.path.exists(shell_environment.GetEnvironment().get_shell_var("GCC5_AARCH64_PREFIX") + "gcc"):
self.Logger.error(
"Path for GCC5_AARCH64_PREFIX toolchain is invalid")
return -2
return 0
def _check_riscv64(self):
# now check for install dir. If set then set the Prefix
install_path = shell_environment.GetEnvironment(
).get_shell_var("GCC5_RISCV64_INSTALL")
if install_path is None:
return 0
# check to see if full path already configured
if shell_environment.GetEnvironment().get_shell_var("GCC5_RISCV64_PREFIX") is not None:
self.Logger.info("GCC5_RISCV64_PREFIX is already set.")
else:
# make GCC5_RISCV64_PREFIX to align with tools_def.txt
prefix = os.path.join(install_path, "bin", "riscv64-unknown-elf-")
shell_environment.GetEnvironment().set_shell_var("GCC5_RISCV64_PREFIX", prefix)
# now confirm it exists
if not os.path.exists(shell_environment.GetEnvironment().get_shell_var("GCC5_RISCV64_PREFIX") + "gcc"):
self.Logger.error(
"Path for GCC5_RISCV64_PREFIX toolchain is invalid")
return -2
# Check if LD_LIBRARY_PATH is set for the libraries of RISC-V GCC toolchain
if shell_environment.GetEnvironment().get_shell_var("LD_LIBRARY_PATH") is not None:
self.Logger.info("LD_LIBRARY_PATH is already set.")
prefix = os.path.join(install_path, "lib")
shell_environment.GetEnvironment().set_shell_var("LD_LIBRARY_PATH", prefix)
return 0
def _check_loongarch64(self):
# check to see if full path already configured
if shell_environment.GetEnvironment().get_shell_var("GCC5_LOONGARCH64_PREFIX") is not None:
self.Logger.info("GCC5_LOONGARCH64_PREFIX is already set.")
else:
# now check for install dir. If set then set the Prefix
install_path = shell_environment.GetEnvironment(
).get_shell_var("GCC5_LOONGARCH64_INSTALL")
if install_path is None:
return 0
# make GCC5_LOONGARCH64_PREFIX to align with tools_def.txt
prefix = os.path.join(install_path, "bin", "loongarch64-unknown-linux-gnu-")
shell_environment.GetEnvironment().set_shell_var("GCC5_LOONGARCH64_PREFIX", prefix)
# now confirm it exists
if not os.path.exists(shell_environment.GetEnvironment().get_shell_var("GCC5_LOONGARCH64_PREFIX") + "gcc"):
self.Logger.error(
"Path for GCC5_LOONGARCH64_PREFIX toolchain is invalid")
return -2
return 0
| edk2-master | BaseTools/Plugin/LinuxGcc5ToolChain/LinuxGcc5ToolChain.py |
# @file WindowsVsToolChain.py
# Plugin to configures paths for the VS2017 and VS2019 tool chain
##
# This plugin works in conjuncture with the tools_def
#
# Copyright (c) Microsoft Corporation
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import os
import logging
from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin
import edk2toollib.windows.locate_tools as locate_tools
from edk2toollib.windows.locate_tools import FindWithVsWhere
from edk2toolext.environment import shell_environment
from edk2toolext.environment import version_aggregator
from edk2toollib.utility_functions import GetHostInfo
class WindowsVsToolChain(IUefiBuildPlugin):
def do_post_build(self, thebuilder):
return 0
def do_pre_build(self, thebuilder):
self.Logger = logging.getLogger("WindowsVsToolChain")
interesting_keys = ["ExtensionSdkDir", "INCLUDE", "LIB", "LIBPATH", "UniversalCRTSdkDir",
"UCRTVersion", "WindowsLibPath", "WindowsSdkBinPath", "WindowsSdkDir", "WindowsSdkVerBinPath",
"WindowsSDKVersion", "VCToolsInstallDir", "Path"]
#
# VS2017 - Follow VS2017 where there is potential for many versions of the tools.
# If a specific version is required then the user must set both env variables:
# VS150INSTALLPATH: base install path on system to VC install dir. Here you will find the VC folder, etc
# VS150TOOLVER: version number for the VC compiler tools
# VS2017_PREFIX: path to MSVC compiler folder with trailing slash (can be used instead of two vars above)
# VS2017_HOST: set the host architecture to use for host tools, and host libs, etc
if thebuilder.env.GetValue("TOOL_CHAIN_TAG") == "VS2017":
# check to see if host is configured
# HostType for VS2017 should be (defined in tools_def):
# x86 == 32bit Intel
# x64 == 64bit Intel
# arm == 32bit Arm
# arm64 == 64bit Arm
#
HostType = shell_environment.GetEnvironment().get_shell_var("VS2017_HOST")
if HostType is not None:
HostType = HostType.lower()
self.Logger.info(
f"HOST TYPE defined by environment. Host Type is {HostType}")
else:
HostInfo = GetHostInfo()
if HostInfo.arch == "x86":
if HostInfo.bit == "32":
HostType = "x86"
elif HostInfo.bit == "64":
HostType = "x64"
else:
raise NotImplementedError()
# VS2017_HOST options are not exactly the same as QueryVcVariables. This translates.
VC_HOST_ARCH_TRANSLATOR = {
"x86": "x86", "x64": "AMD64", "arm": "not supported", "arm64": "not supported"}
# check to see if full path already configured
if shell_environment.GetEnvironment().get_shell_var("VS2017_PREFIX") != None:
self.Logger.info("VS2017_PREFIX is already set.")
else:
install_path = self._get_vs_install_path(
"VS2017".lower(), "VS150INSTALLPATH")
vc_ver = self._get_vc_version(install_path, "VS150TOOLVER")
if install_path is None or vc_ver is None:
self.Logger.error(
"Failed to configure environment for VS2017")
return -1
version_aggregator.GetVersionAggregator().ReportVersion(
"Visual Studio Install Path", install_path, version_aggregator.VersionTypes.INFO)
version_aggregator.GetVersionAggregator().ReportVersion(
"VC Version", vc_ver, version_aggregator.VersionTypes.TOOL)
# make VS2017_PREFIX to align with tools_def.txt
prefix = os.path.join(install_path, "VC",
"Tools", "MSVC", vc_ver)
prefix = prefix + os.path.sep
shell_environment.GetEnvironment().set_shell_var("VS2017_PREFIX", prefix)
shell_environment.GetEnvironment().set_shell_var("VS2017_HOST", HostType)
shell_env = shell_environment.GetEnvironment()
# Use the tools lib to determine the correct values for the vars that interest us.
vs_vars = locate_tools.QueryVcVariables(
interesting_keys, VC_HOST_ARCH_TRANSLATOR[HostType], vs_version="vs2017")
for (k, v) in vs_vars.items():
shell_env.set_shell_var(k, v)
# now confirm it exists
if not os.path.exists(shell_environment.GetEnvironment().get_shell_var("VS2017_PREFIX")):
self.Logger.error("Path for VS2017 toolchain is invalid")
return -2
#
# VS2019 - Follow VS2019 where there is potential for many versions of the tools.
# If a specific version is required then the user must set both env variables:
# VS160INSTALLPATH: base install path on system to VC install dir. Here you will find the VC folder, etc
# VS160TOOLVER: version number for the VC compiler tools
# VS2019_PREFIX: path to MSVC compiler folder with trailing slash (can be used instead of two vars above)
# VS2017_HOST: set the host architecture to use for host tools, and host libs, etc
elif thebuilder.env.GetValue("TOOL_CHAIN_TAG") == "VS2019":
# check to see if host is configured
# HostType for VS2019 should be (defined in tools_def):
# x86 == 32bit Intel
# x64 == 64bit Intel
# arm == 32bit Arm
# arm64 == 64bit Arm
#
HostType = shell_environment.GetEnvironment().get_shell_var("VS2019_HOST")
if HostType is not None:
HostType = HostType.lower()
self.Logger.info(
f"HOST TYPE defined by environment. Host Type is {HostType}")
else:
HostInfo = GetHostInfo()
if HostInfo.arch == "x86":
if HostInfo.bit == "32":
HostType = "x86"
elif HostInfo.bit == "64":
HostType = "x64"
else:
raise NotImplementedError()
# VS2019_HOST options are not exactly the same as QueryVcVariables. This translates.
VC_HOST_ARCH_TRANSLATOR = {
"x86": "x86", "x64": "AMD64", "arm": "not supported", "arm64": "not supported"}
# check to see if full path already configured
if shell_environment.GetEnvironment().get_shell_var("VS2019_PREFIX") != None:
self.Logger.info("VS2019_PREFIX is already set.")
else:
install_path = self._get_vs_install_path(
"VS2019".lower(), "VS160INSTALLPATH")
vc_ver = self._get_vc_version(install_path, "VS160TOOLVER")
if install_path is None or vc_ver is None:
self.Logger.error(
"Failed to configure environment for VS2019")
return -1
version_aggregator.GetVersionAggregator().ReportVersion(
"Visual Studio Install Path", install_path, version_aggregator.VersionTypes.INFO)
version_aggregator.GetVersionAggregator().ReportVersion(
"VC Version", vc_ver, version_aggregator.VersionTypes.TOOL)
# make VS2019_PREFIX to align with tools_def.txt
prefix = os.path.join(install_path, "VC",
"Tools", "MSVC", vc_ver)
prefix = prefix + os.path.sep
shell_environment.GetEnvironment().set_shell_var("VS2019_PREFIX", prefix)
shell_environment.GetEnvironment().set_shell_var("VS2019_HOST", HostType)
shell_env = shell_environment.GetEnvironment()
# Use the tools lib to determine the correct values for the vars that interest us.
vs_vars = locate_tools.QueryVcVariables(
interesting_keys, VC_HOST_ARCH_TRANSLATOR[HostType], vs_version="vs2019")
for (k, v) in vs_vars.items():
shell_env.set_shell_var(k, v)
# now confirm it exists
if not os.path.exists(shell_environment.GetEnvironment().get_shell_var("VS2019_PREFIX")):
self.Logger.error("Path for VS2019 toolchain is invalid")
return -2
return 0
def _get_vs_install_path(self, vs_version, varname):
# check if already specified
path = None
if varname is not None:
path = shell_environment.GetEnvironment().get_shell_var(varname)
if(path is None):
# Not specified...find latest
try:
path = FindWithVsWhere(vs_version=vs_version)
except (EnvironmentError, ValueError, RuntimeError) as e:
self.Logger.error(str(e))
return None
if path is not None and os.path.exists(path):
self.Logger.debug("Found VS instance for %s", vs_version)
else:
self.Logger.error(
f"VsWhere successfully executed, but could not find VS instance for {vs_version}.")
return path
def _get_vc_version(self, path, varname):
# check if already specified
vc_ver = shell_environment.GetEnvironment().get_shell_var(varname)
if (path is None):
self.Logger.critical(
"Failed to find Visual Studio tools. Might need to check for VS install")
return vc_ver
if(vc_ver is None):
# Not specified...find latest
p2 = os.path.join(path, "VC", "Tools", "MSVC")
if not os.path.isdir(p2):
self.Logger.critical(
"Failed to find VC tools. Might need to check for VS install")
return vc_ver
vc_ver = os.listdir(p2)[-1].strip() # get last in list
self.Logger.debug("Found VC Tool version is %s" % vc_ver)
return vc_ver
| edk2-master | BaseTools/Plugin/WindowsVsToolChain/WindowsVsToolChain.py |
## @file WinRcPath.py
# Plugin to find Windows SDK Resource Compiler rc.exe
##
# This plugin works in conjuncture with the tools_def to support rc.exe
#
# Copyright (c) Microsoft Corporation
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import os
from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin
import edk2toollib.windows.locate_tools as locate_tools
from edk2toolext.environment import shell_environment
from edk2toolext.environment import version_aggregator
class WinRcPath(IUefiBuildPlugin):
def do_post_build(self, thebuilder):
return 0
def do_pre_build(self, thebuilder):
#get the locate tools module
path = locate_tools.FindToolInWinSdk("rc.exe")
if path is None:
thebuilder.logging.warning("Failed to find rc.exe")
else:
p = os.path.abspath(os.path.dirname(path))
shell_environment.GetEnvironment().set_shell_var("WINSDK_PATH_FOR_RC_EXE", p)
version_aggregator.GetVersionAggregator().ReportVersion("WINSDK_PATH_FOR_RC_EXE", p, version_aggregator.VersionTypes.INFO)
return 0
| edk2-master | BaseTools/Plugin/WindowsResourceCompiler/WinRcPath.py |
# @file HostBasedUnitTestRunner.py
# Plugin to located any host-based unit tests in the output directory and execute them.
##
# Copyright (c) Microsoft Corporation.
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
import os
import logging
import glob
import stat
import xml.etree.ElementTree
from edk2toolext.environment.plugintypes.uefi_build_plugin import IUefiBuildPlugin
from edk2toolext import edk2_logging
import edk2toollib.windows.locate_tools as locate_tools
from edk2toolext.environment import shell_environment
from edk2toollib.utility_functions import RunCmd
from edk2toollib.utility_functions import GetHostInfo
from textwrap import dedent
class HostBasedUnitTestRunner(IUefiBuildPlugin):
def do_pre_build(self, thebuilder):
'''
Run Prebuild
'''
return 0
def do_post_build(self, thebuilder):
'''
After a build, will automatically locate and run all host-based unit tests. Logs any
failures with Warning severity and will return a count of the failures as the return code.
EXPECTS:
- Build Var 'CI_BUILD_TYPE' - If not set to 'host_unit_test', will not do anything.
UPDATES:
- Shell Var 'CMOCKA_XML_FILE'
'''
ci_type = thebuilder.env.GetValue('CI_BUILD_TYPE')
if ci_type != 'host_unit_test':
return 0
shell_env = shell_environment.GetEnvironment()
logging.log(edk2_logging.get_section_level(),
"Run Host based Unit Tests")
path = thebuilder.env.GetValue("BUILD_OUTPUT_BASE")
failure_count = 0
# Set up the reporting type for Cmocka.
shell_env.set_shell_var('CMOCKA_MESSAGE_OUTPUT', 'xml')
for arch in thebuilder.env.GetValue("TARGET_ARCH").split():
logging.log(edk2_logging.get_subsection_level(),
"Testing for architecture: " + arch)
cp = os.path.join(path, arch)
# If any old results XML files exist, clean them up.
for old_result in glob.iglob(os.path.join(cp, "*.result.xml")):
os.remove(old_result)
# Find and Run any Host Tests
if GetHostInfo().os.upper() == "LINUX":
testList = glob.glob(os.path.join(cp, "*Test*"))
for a in testList[:]:
p = os.path.join(cp, a)
# It must be a file
if not os.path.isfile(p):
testList.remove(a)
logging.debug(f"Remove directory file: {p}")
continue
# It must be executable
if os.stat(p).st_mode & (stat.S_IEXEC | stat.S_IXGRP | stat.S_IXOTH) == 0:
testList.remove(a)
logging.debug(f"Remove non-executable file: {p}")
continue
logging.info(f"Test file found: {p}")
elif GetHostInfo().os.upper() == "WINDOWS":
testList = glob.glob(os.path.join(cp, "*Test*.exe"))
else:
raise NotImplementedError("Unsupported Operating System")
if not testList:
logging.warning(dedent("""
UnitTest Coverage:
No unit tests discovered. Test coverage will not be generated.
Prevent this message by:
1. Adding host-based unit tests to this package
2. Ensuring tests have the word "Test" in their name
3. Disabling HostUnitTestCompilerPlugin in the package CI YAML file
""").strip())
return 0
for test in testList:
# Configure output name if test uses cmocka.
shell_env.set_shell_var(
'CMOCKA_XML_FILE', test + ".CMOCKA.%g." + arch + ".result.xml")
# Configure output name if test uses gtest.
shell_env.set_shell_var(
'GTEST_OUTPUT', "xml:" + test + ".GTEST." + arch + ".result.xml")
# Run the test.
ret = RunCmd('"' + test + '"', "", workingdir=cp)
if ret != 0:
logging.error("UnitTest Execution Error: " +
os.path.basename(test))
else:
logging.info("UnitTest Completed: " +
os.path.basename(test))
file_match_pattern = test + ".*." + arch + ".result.xml"
xml_results_list = glob.glob(file_match_pattern)
for xml_result_file in xml_results_list:
root = xml.etree.ElementTree.parse(
xml_result_file).getroot()
for suite in root:
for case in suite:
for result in case:
if result.tag == 'failure':
logging.warning(
"%s Test Failed" % os.path.basename(test))
logging.warning(
" %s - %s" % (case.attrib['name'], result.text))
failure_count += 1
if thebuilder.env.GetValue("CODE_COVERAGE") != "FALSE":
if thebuilder.env.GetValue("TOOL_CHAIN_TAG") == "GCC5":
ret = self.gen_code_coverage_gcc(thebuilder)
if ret != 0:
failure_count += 1
elif thebuilder.env.GetValue("TOOL_CHAIN_TAG").startswith ("VS"):
ret = self.gen_code_coverage_msvc(thebuilder)
if ret != 0:
failure_count += 1
else:
logging.info("Skipping code coverage. Currently, support GCC and MSVC compiler.")
return failure_count
def gen_code_coverage_gcc(self, thebuilder):
logging.info("Generating UnitTest code coverage")
buildOutputBase = thebuilder.env.GetValue("BUILD_OUTPUT_BASE")
workspace = thebuilder.env.GetValue("WORKSPACE")
# Generate base code coverage for all source files
ret = RunCmd("lcov", f"--no-external --capture --initial --directory {buildOutputBase} --output-file {buildOutputBase}/cov-base.info --rc lcov_branch_coverage=1")
if ret != 0:
logging.error("UnitTest Coverage: Failed to build initial coverage data.")
return 1
# Coverage data for tested files only
ret = RunCmd("lcov", f"--capture --directory {buildOutputBase}/ --output-file {buildOutputBase}/coverage-test.info --rc lcov_branch_coverage=1")
if ret != 0:
logging.error("UnitTest Coverage: Failed to build coverage data for tested files.")
return 1
# Aggregate all coverage data
ret = RunCmd("lcov", f"--add-tracefile {buildOutputBase}/cov-base.info --add-tracefile {buildOutputBase}/coverage-test.info --output-file {buildOutputBase}/total-coverage.info --rc lcov_branch_coverage=1")
if ret != 0:
logging.error("UnitTest Coverage: Failed to aggregate coverage data.")
return 1
# Generate coverage XML
ret = RunCmd("lcov_cobertura",f"{buildOutputBase}/total-coverage.info -o {buildOutputBase}/compare.xml")
if ret != 0:
logging.error("UnitTest Coverage: Failed to generate coverage XML.")
return 1
# Filter out auto-generated and test code
ret = RunCmd("lcov_cobertura",f"{buildOutputBase}/total-coverage.info --excludes ^.*UnitTest\|^.*MU\|^.*Mock\|^.*DEBUG -o {buildOutputBase}/coverage.xml")
if ret != 0:
logging.error("UnitTest Coverage: Failed generate filtered coverage XML.")
return 1
# Generate all coverage file
testCoverageList = glob.glob (f"{workspace}/Build/**/total-coverage.info", recursive=True)
coverageFile = ""
for testCoverage in testCoverageList:
coverageFile += " --add-tracefile " + testCoverage
ret = RunCmd("lcov", f"{coverageFile} --output-file {workspace}/Build/all-coverage.info --rc lcov_branch_coverage=1")
if ret != 0:
logging.error("UnitTest Coverage: Failed generate all coverage file.")
return 1
# Generate and XML file if requested.for all package
if os.path.isfile(f"{workspace}/Build/coverage.xml"):
os.remove(f"{workspace}/Build/coverage.xml")
ret = RunCmd("lcov_cobertura",f"{workspace}/Build/all-coverage.info --excludes ^.*UnitTest\|^.*MU\|^.*Mock\|^.*DEBUG -o {workspace}/Build/coverage.xml")
return 0
def gen_code_coverage_msvc(self, thebuilder):
logging.info("Generating UnitTest code coverage")
buildOutputBase = thebuilder.env.GetValue("BUILD_OUTPUT_BASE")
testList = glob.glob(os.path.join(buildOutputBase, "**","*Test*.exe"), recursive=True)
workspace = thebuilder.env.GetValue("WORKSPACE")
workspace = (workspace + os.sep) if workspace[-1] != os.sep else workspace
workspaceBuild = os.path.join(workspace, 'Build')
# Generate coverage file
coverageFile = ""
for testFile in testList:
ret = RunCmd("OpenCppCoverage", f"--source {workspace} --export_type binary:{testFile}.cov -- {testFile}")
if ret != 0:
logging.error("UnitTest Coverage: Failed to collect coverage data.")
return 1
coverageFile = f" --input_coverage={testFile}.cov"
totalCoverageFile = os.path.join(buildOutputBase, 'coverage.cov')
if os.path.isfile(totalCoverageFile):
coverageFile += f" --input_coverage={totalCoverageFile}"
ret = RunCmd(
"OpenCppCoverage",
f"--export_type binary:{totalCoverageFile} " +
f"--working_dir={workspaceBuild} " +
f"{coverageFile}"
)
if ret != 0:
logging.error("UnitTest Coverage: Failed to collect coverage data.")
return 1
# Generate and XML file if requested.by each package
ret = RunCmd(
"OpenCppCoverage",
f"--export_type cobertura:{os.path.join(buildOutputBase, 'coverage.xml')} " +
f"--working_dir={workspaceBuild} " +
f"--input_coverage={totalCoverageFile} "
)
if ret != 0:
logging.error("UnitTest Coverage: Failed to generate cobertura format xml in single package.")
return 1
# Generate total report XML file for all package
testCoverageList = glob.glob(os.path.join(workspace, "Build", "**", "*Test*.exe.cov"), recursive=True)
coverageFile = ""
totalCoverageFile = os.path.join(workspaceBuild, 'coverage.cov')
for testCoverage in testCoverageList:
coverageFile = f" --input_coverage={testCoverage}"
if os.path.isfile(totalCoverageFile):
coverageFile += f" --input_coverage={totalCoverageFile}"
ret = RunCmd(
"OpenCppCoverage",
f"--export_type binary:{totalCoverageFile} " +
f"--working_dir={workspaceBuild} " +
f"{coverageFile}"
)
if ret != 0:
logging.error("UnitTest Coverage: Failed to collect coverage data.")
return 1
ret = RunCmd(
"OpenCppCoverage",
f"--export_type cobertura:{os.path.join(workspaceBuild, 'coverage.xml')} " +
f"--working_dir={workspaceBuild} " +
f"--input_coverage={totalCoverageFile}"
)
if ret != 0:
logging.error("UnitTest Coverage: Failed to generate cobertura format xml.")
return 1
return 0
| edk2-master | BaseTools/Plugin/HostBasedUnitTestRunner/HostBasedUnitTestRunner.py |
## @file
# Detect unreferenced PCD and GUID/Protocols/PPIs.
#
# Copyright (c) 2019, Intel Corporation. All rights reserved.
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
'''
DetectNotUsedItem
'''
import re
import os
import sys
import argparse
#
# Globals for help information
#
__prog__ = 'DetectNotUsedItem'
__version__ = '%s Version %s' % (__prog__, '0.1')
__copyright__ = 'Copyright (c) 2019, Intel Corporation. All rights reserved.'
__description__ = "Detect unreferenced PCD and GUID/Protocols/PPIs.\n"
SectionList = ["LibraryClasses", "Guids", "Ppis", "Protocols", "Pcd"]
class PROCESS(object):
def __init__(self, DecPath, InfDirs):
self.Dec = DecPath
self.InfPath = InfDirs
self.Log = []
def ParserDscFdfInfFile(self):
AllContentList = []
for File in self.SearchbyExt([".dsc", ".fdf", ".inf"]):
AllContentList += self.ParseDscFdfInfContent(File)
return AllContentList
# Search File by extension name
def SearchbyExt(self, ExtList):
FileList = []
for path in self.InfPath:
if type(ExtList) == type(''):
for root, _, files in os.walk(path, topdown=True, followlinks=False):
for filename in files:
if filename.endswith(ExtList):
FileList.append(os.path.join(root, filename))
elif type(ExtList) == type([]):
for root, _, files in os.walk(path, topdown=True, followlinks=False):
for filename in files:
for Ext in ExtList:
if filename.endswith(Ext):
FileList.append(os.path.join(root, filename))
return FileList
# Parse DEC file to get Line number and Name
# return section name, the Item Name and comments line number
def ParseDecContent(self):
SectionRE = re.compile(r'\[(.*)\]')
Flag = False
Comments = {}
Comment_Line = []
ItemName = {}
with open(self.Dec, 'r') as F:
for Index, content in enumerate(F):
NotComment = not content.strip().startswith("#")
Section = SectionRE.findall(content)
if Section and NotComment:
Flag = self.IsNeedParseSection(Section[0])
if Flag:
Comment_Line.append(Index)
if NotComment:
if content != "\n" and content != "\r\n":
ItemName[Index] = content.split('=')[0].split('|')[0].split('#')[0].strip()
Comments[Index] = Comment_Line
Comment_Line = []
return ItemName, Comments
def IsNeedParseSection(self, SectionName):
for item in SectionList:
if item in SectionName:
return True
return False
# Parse DSC, FDF, INF File, remove comments, return Lines list
def ParseDscFdfInfContent(self, File):
with open(File, 'r') as F:
lines = F.readlines()
for Index in range(len(lines) - 1, -1, -1):
if lines[Index].strip().startswith("#") or lines[Index] == "\n" or lines[Index] == "\r\n":
lines.remove(lines[Index])
elif "#" in lines[Index]:
lines[Index] = lines[Index].split("#")[0].strip()
else:
lines[Index] = lines[Index].strip()
return lines
def DetectNotUsedItem(self):
NotUsedItem = {}
DecItem, DecComments = self.ParseDecContent()
InfDscFdfContent = self.ParserDscFdfInfFile()
for LineNum in list(DecItem.keys()):
DecItemName = DecItem[LineNum]
Match_reg = re.compile("(?<![a-zA-Z0-9_-])%s(?![a-zA-Z0-9_-])" % DecItemName)
MatchFlag = False
for Line in InfDscFdfContent:
if Match_reg.search(Line):
MatchFlag = True
break
if not MatchFlag:
NotUsedItem[LineNum] = DecItemName
self.Display(NotUsedItem)
return NotUsedItem, DecComments
def Display(self, UnuseDict):
print("DEC File:\n%s\n%s%s" % (self.Dec, "{:<15}".format("Line Number"), "{:<0}".format("Unused Item")))
self.Log.append(
"DEC File:\n%s\n%s%s\n" % (self.Dec, "{:<15}".format("Line Number"), "{:<0}".format("Unused Item")))
for num in list(sorted(UnuseDict.keys())):
ItemName = UnuseDict[num]
print("%s%s%s" % (" " * 3, "{:<12}".format(num + 1), "{:<1}".format(ItemName)))
self.Log.append(("%s%s%s\n" % (" " * 3, "{:<12}".format(num + 1), "{:<1}".format(ItemName))))
def Clean(self, UnUseDict, Comments):
removednum = []
for num in list(UnUseDict.keys()):
if num in list(Comments.keys()):
removednum += Comments[num]
with open(self.Dec, 'r') as Dec:
lines = Dec.readlines()
try:
with open(self.Dec, 'w+') as T:
for linenum in range(len(lines)):
if linenum in removednum:
continue
else:
T.write(lines[linenum])
print("DEC File has been clean: %s" % (self.Dec))
except Exception as err:
print(err)
class Main(object):
def mainprocess(self, Dec, Dirs, Isclean, LogPath):
for dir in Dirs:
if not os.path.exists(dir):
print("Error: Invalid path for '--dirs': %s" % dir)
sys.exit(1)
Pa = PROCESS(Dec, Dirs)
unuse, comment = Pa.DetectNotUsedItem()
if Isclean:
Pa.Clean(unuse, comment)
self.Logging(Pa.Log, LogPath)
def Logging(self, content, LogPath):
if LogPath:
try:
if os.path.isdir(LogPath):
FilePath = os.path.dirname(LogPath)
if not os.path.exists(FilePath):
os.makedirs(FilePath)
with open(LogPath, 'w+') as log:
for line in content:
log.write(line)
print("Log save to file: %s" % LogPath)
except Exception as e:
print("Save log Error: %s" % e)
def main():
parser = argparse.ArgumentParser(prog=__prog__,
description=__description__ + __copyright__,
conflict_handler='resolve')
parser.add_argument('-i', '--input', metavar="", dest='InputDec', help="Input DEC file name.")
parser.add_argument('--dirs', metavar="", action='append', dest='Dirs',
help="The package directory. To specify more directories, please repeat this option.")
parser.add_argument('--clean', action='store_true', default=False, dest='Clean',
help="Clean the unreferenced items from DEC file.")
parser.add_argument('--log', metavar="", dest="Logfile", default=False,
help="Put log in specified file as well as on console.")
options = parser.parse_args()
if options.InputDec:
if not (os.path.exists(options.InputDec) and options.InputDec.endswith(".dec")):
print("Error: Invalid DEC file input: %s" % options.InputDec)
if options.Dirs:
M = Main()
M.mainprocess(options.InputDec, options.Dirs, options.Clean, options.Logfile)
else:
print("Error: the following argument is required:'--dirs'.")
else:
print("Error: the following argument is required:'-i/--input'.")
if __name__ == '__main__':
main()
| edk2-master | BaseTools/Scripts/DetectNotUsedItem.py |
#!/usr/bin/python3
'''
Copyright (c) Apple Inc. 2021
SPDX-License-Identifier: BSD-2-Clause-Patent
Class that abstracts PE/COFF debug info parsing via a Python file like
object. You can port this code into an arbitrary debugger by invoking
the classes and passing in a file like object that abstracts the debugger
reading memory.
If you run this file directly it will parse the passed in PE/COFF files
for debug info:
python3 ./efi_pefcoff.py DxeCore.efi
IA32`<path...>/DxeCore.dll load = 0x00000000
EntryPoint = 0x000030d2 TextAddress = 0x00000240 DataAddress = 0x000042c0
.text 0x00000240 (0x04080) flags:0x60000020
.data 0x000042C0 (0x001C0) flags:0xC0000040
.reloc 0x00004480 (0x00240) flags:0x42000040
Note: PeCoffClass uses virtual addresses and not file offsets.
It needs to work when images are loaded into memory.
as long as virtual address map to file addresses this
code can process binary files.
Note: This file can also contain generic worker functions (like GuidNames)
that abstract debugger agnostic services to the debugger.
This file should never import debugger specific modules.
'''
import sys
import os
import uuid
import struct
import re
from ctypes import c_char, c_uint8, c_uint16, c_uint32, c_uint64, c_void_p
from ctypes import ARRAY, sizeof
from ctypes import Structure, LittleEndianStructure
#
# The empty LittleEndianStructure must have _fields_ assigned prior to use or
# sizeof(). Anything that is size UINTN may need to get adjusted.
#
# The issue is ctypes matches our local machine, not the machine we are
# trying to debug. Call patch_ctypes() passing in the byte width from the
# debugger python to make sure you are in sync.
#
# Splitting out the _field_ from the Structure (LittleEndianStructure) class
# allows it to be patched.
#
class EFI_LOADED_IMAGE_PROTOCOL(LittleEndianStructure):
pass
EFI_LOADED_IMAGE_PROTOCOL_fields_ = [
('Revision', c_uint32),
('ParentHandle', c_void_p),
('SystemTable', c_void_p),
('DeviceHandle', c_void_p),
('FilePath', c_void_p),
('Reserved', c_void_p),
('LoadOptionsSize', c_uint32),
('LoadOptions', c_void_p),
('ImageBase', c_void_p),
('ImageSize', c_uint64),
('ImageCodeType', c_uint32),
('ImageDataType', c_uint32),
('Unload', c_void_p),
]
class EFI_GUID(LittleEndianStructure):
_fields_ = [
('Data1', c_uint32),
('Data2', c_uint16),
('Data3', c_uint16),
('Data4', ARRAY(c_uint8, 8))
]
class EFI_SYSTEM_TABLE_POINTER(LittleEndianStructure):
_fields_ = [
('Signature', c_uint64),
('EfiSystemTableBase', c_uint64),
('Crc32', c_uint32)
]
class EFI_DEBUG_IMAGE_INFO_NORMAL(LittleEndianStructure):
pass
EFI_DEBUG_IMAGE_INFO_NORMAL_fields_ = [
('ImageInfoType', c_uint32),
('LoadedImageProtocolInstance', c_void_p),
('ImageHandle', c_void_p)
]
class EFI_DEBUG_IMAGE_INFO(LittleEndianStructure):
pass
EFI_DEBUG_IMAGE_INFO_fields_ = [
('NormalImage', c_void_p),
]
class EFI_DEBUG_IMAGE_INFO_TABLE_HEADER(LittleEndianStructure):
pass
EFI_DEBUG_IMAGE_INFO_TABLE_HEADER_fields_ = [
('UpdateStatus', c_uint32),
('TableSize', c_uint32),
('EfiDebugImageInfoTable', c_void_p),
]
class EFI_TABLE_HEADER(LittleEndianStructure):
_fields_ = [
('Signature', c_uint64),
('Revision', c_uint32),
('HeaderSize', c_uint32),
('CRC32', c_uint32),
('Reserved', c_uint32),
]
class EFI_CONFIGURATION_TABLE(LittleEndianStructure):
pass
EFI_CONFIGURATION_TABLE_fields_ = [
('VendorGuid', EFI_GUID),
('VendorTable', c_void_p)
]
class EFI_SYSTEM_TABLE(LittleEndianStructure):
pass
EFI_SYSTEM_TABLE_fields_ = [
('Hdr', EFI_TABLE_HEADER),
('FirmwareVendor', c_void_p),
('FirmwareRevision', c_uint32),
('ConsoleInHandle', c_void_p),
('ConIn', c_void_p),
('ConsoleOutHandle', c_void_p),
('ConOut', c_void_p),
('StandardErrHandle', c_void_p),
('StdErr', c_void_p),
('RuntimeService', c_void_p),
('BootService', c_void_p),
('NumberOfTableEntries', c_void_p),
('ConfigurationTable', c_void_p),
]
class EFI_IMAGE_DATA_DIRECTORY(LittleEndianStructure):
_fields_ = [
('VirtualAddress', c_uint32),
('Size', c_uint32)
]
class EFI_TE_IMAGE_HEADER(LittleEndianStructure):
_fields_ = [
('Signature', ARRAY(c_char, 2)),
('Machine', c_uint16),
('NumberOfSections', c_uint8),
('Subsystem', c_uint8),
('StrippedSize', c_uint16),
('AddressOfEntryPoint', c_uint32),
('BaseOfCode', c_uint32),
('ImageBase', c_uint64),
('DataDirectoryBaseReloc', EFI_IMAGE_DATA_DIRECTORY),
('DataDirectoryDebug', EFI_IMAGE_DATA_DIRECTORY)
]
class EFI_IMAGE_DOS_HEADER(LittleEndianStructure):
_fields_ = [
('e_magic', c_uint16),
('e_cblp', c_uint16),
('e_cp', c_uint16),
('e_crlc', c_uint16),
('e_cparhdr', c_uint16),
('e_minalloc', c_uint16),
('e_maxalloc', c_uint16),
('e_ss', c_uint16),
('e_sp', c_uint16),
('e_csum', c_uint16),
('e_ip', c_uint16),
('e_cs', c_uint16),
('e_lfarlc', c_uint16),
('e_ovno', c_uint16),
('e_res', ARRAY(c_uint16, 4)),
('e_oemid', c_uint16),
('e_oeminfo', c_uint16),
('e_res2', ARRAY(c_uint16, 10)),
('e_lfanew', c_uint16)
]
class EFI_IMAGE_FILE_HEADER(LittleEndianStructure):
_fields_ = [
('Machine', c_uint16),
('NumberOfSections', c_uint16),
('TimeDateStamp', c_uint32),
('PointerToSymbolTable', c_uint32),
('NumberOfSymbols', c_uint32),
('SizeOfOptionalHeader', c_uint16),
('Characteristics', c_uint16)
]
class EFI_IMAGE_OPTIONAL_HEADER32(LittleEndianStructure):
_fields_ = [
('Magic', c_uint16),
('MajorLinkerVersion', c_uint8),
('MinorLinkerVersion', c_uint8),
('SizeOfCode', c_uint32),
('SizeOfInitializedData', c_uint32),
('SizeOfUninitializedData', c_uint32),
('AddressOfEntryPoint', c_uint32),
('BaseOfCode', c_uint32),
('BaseOfData', c_uint32),
('ImageBase', c_uint32),
('SectionAlignment', c_uint32),
('FileAlignment', c_uint32),
('MajorOperatingSystemVersion', c_uint16),
('MinorOperatingSystemVersion', c_uint16),
('MajorImageVersion', c_uint16),
('MinorImageVersion', c_uint16),
('MajorSubsystemVersion', c_uint16),
('MinorSubsystemVersion', c_uint16),
('Win32VersionValue', c_uint32),
('SizeOfImage', c_uint32),
('SizeOfHeaders', c_uint32),
('CheckSum', c_uint32),
('Subsystem', c_uint16),
('DllCharacteristics', c_uint16),
('SizeOfStackReserve', c_uint32),
('SizeOfStackCommit', c_uint32),
('SizeOfHeapReserve', c_uint32),
('SizeOfHeapCommit', c_uint32),
('LoaderFlags', c_uint32),
('NumberOfRvaAndSizes', c_uint32),
('DataDirectory', ARRAY(EFI_IMAGE_DATA_DIRECTORY, 16))
]
class EFI_IMAGE_NT_HEADERS32(LittleEndianStructure):
_fields_ = [
('Signature', c_uint32),
('FileHeader', EFI_IMAGE_FILE_HEADER),
('OptionalHeader', EFI_IMAGE_OPTIONAL_HEADER32)
]
class EFI_IMAGE_OPTIONAL_HEADER64(LittleEndianStructure):
_fields_ = [
('Magic', c_uint16),
('MajorLinkerVersion', c_uint8),
('MinorLinkerVersion', c_uint8),
('SizeOfCode', c_uint32),
('SizeOfInitializedData', c_uint32),
('SizeOfUninitializedData', c_uint32),
('AddressOfEntryPoint', c_uint32),
('BaseOfCode', c_uint32),
('BaseOfData', c_uint32),
('ImageBase', c_uint32),
('SectionAlignment', c_uint32),
('FileAlignment', c_uint32),
('MajorOperatingSystemVersion', c_uint16),
('MinorOperatingSystemVersion', c_uint16),
('MajorImageVersion', c_uint16),
('MinorImageVersion', c_uint16),
('MajorSubsystemVersion', c_uint16),
('MinorSubsystemVersion', c_uint16),
('Win32VersionValue', c_uint32),
('SizeOfImage', c_uint32),
('SizeOfHeaders', c_uint32),
('CheckSum', c_uint32),
('Subsystem', c_uint16),
('DllCharacteristics', c_uint16),
('SizeOfStackReserve', c_uint64),
('SizeOfStackCommit', c_uint64),
('SizeOfHeapReserve', c_uint64),
('SizeOfHeapCommit', c_uint64),
('LoaderFlags', c_uint32),
('NumberOfRvaAndSizes', c_uint32),
('DataDirectory', ARRAY(EFI_IMAGE_DATA_DIRECTORY, 16))
]
class EFI_IMAGE_NT_HEADERS64(LittleEndianStructure):
_fields_ = [
('Signature', c_uint32),
('FileHeader', EFI_IMAGE_FILE_HEADER),
('OptionalHeader', EFI_IMAGE_OPTIONAL_HEADER64)
]
class EFI_IMAGE_DEBUG_DIRECTORY_ENTRY(LittleEndianStructure):
_fields_ = [
('Characteristics', c_uint32),
('TimeDateStamp', c_uint32),
('MajorVersion', c_uint16),
('MinorVersion', c_uint16),
('Type', c_uint32),
('SizeOfData', c_uint32),
('RVA', c_uint32),
('FileOffset', c_uint32),
]
class EFI_IMAGE_SECTION_HEADER(LittleEndianStructure):
_fields_ = [
('Name', ARRAY(c_char, 8)),
('VirtualSize', c_uint32),
('VirtualAddress', c_uint32),
('SizeOfRawData', c_uint32),
('PointerToRawData', c_uint32),
('PointerToRelocations', c_uint32),
('PointerToLinenumbers', c_uint32),
('NumberOfRelocations', c_uint16),
('NumberOfLinenumbers', c_uint16),
('Characteristics', c_uint32),
]
EFI_IMAGE_NT_OPTIONAL_HDR32_MAGIC = 0x10b
EFI_IMAGE_NT_OPTIONAL_HDR64_MAGIC = 0x20b
DIRECTORY_DEBUG = 6
image_machine_dict = {
0x014c: "IA32",
0x0200: "IPF",
0x0EBC: "EBC",
0x8664: "X64",
0x01c2: "ARM",
0xAA64: "AArch64",
0x5032: "RISC32",
0x5064: "RISC64",
0x5128: "RISCV128",
}
def patch_void_p_to_ctype(patch_type, to_patch):
'''Optionally patch c_void_p in the Structure._fields_'''
if patch_type is None:
return to_patch
result = []
for name, c_type in to_patch:
if type(c_type) == type(c_void_p):
result.append((name, c_uint32))
else:
result.append((name, c_type))
return result
def patch_ctypes(pointer_width=8):
'''
Pass in the pointer width of the system being debugged. If it is not
the same as c_void_p then patch the _fields_ with the correct type.
For any ctypes Structure that has a c_void_p this function needs to be
called prior to use or sizeof() to initialize _fields_.
'''
if sizeof(c_void_p) == pointer_width:
patch_type = None
elif pointer_width == 16:
assert False
elif pointer_width == 8:
patch_type = c_uint64
elif pointer_width == 4:
patch_type = c_uint32
else:
raise Exception(f'ERROR: Unkown pointer_width = {pointer_width}')
# If you add a ctypes Structure class with a c_void_p you need to add
# it to this list. Note: you should use c_void_p for UINTN values.
EFI_LOADED_IMAGE_PROTOCOL._fields_ = patch_void_p_to_ctype(
patch_type, EFI_LOADED_IMAGE_PROTOCOL_fields_)
EFI_DEBUG_IMAGE_INFO_NORMAL._fields_ = patch_void_p_to_ctype(
patch_type, EFI_DEBUG_IMAGE_INFO_NORMAL_fields_)
EFI_DEBUG_IMAGE_INFO._fields_ = patch_void_p_to_ctype(
patch_type, EFI_DEBUG_IMAGE_INFO_fields_)
EFI_DEBUG_IMAGE_INFO_TABLE_HEADER._fields_ = patch_void_p_to_ctype(
patch_type, EFI_DEBUG_IMAGE_INFO_TABLE_HEADER_fields_)
EFI_CONFIGURATION_TABLE._fields_ = patch_void_p_to_ctype(
patch_type, EFI_CONFIGURATION_TABLE_fields_)
EFI_SYSTEM_TABLE._fields_ = patch_void_p_to_ctype(
patch_type, EFI_SYSTEM_TABLE_fields_)
# patch up anything else that needs to know pointer_width
EfiStatusClass(pointer_width)
def ctype_to_str(ctype, indent='', hide_list=[]):
'''
Given a ctype object print out as a string by walking the _fields_
in the cstring Class
'''
result = ''
for field in ctype._fields_:
attr = getattr(ctype, field[0])
tname = type(attr).__name__
if field[0] in hide_list:
continue
result += indent + f'{field[0]} = '
if tname == 'EFI_GUID':
result += GuidNames.to_name(GuidNames.to_uuid(attr)) + '\n'
elif issubclass(type(attr), Structure):
result += f'{tname}\n' + \
ctype_to_str(attr, indent + ' ', hide_list)
elif isinstance(attr, int):
result += f'0x{attr:x}\n'
else:
result += f'{attr}\n'
return result
def hexline(addr, data):
hexstr = ''
printable = ''
for i in range(0, len(data)):
hexstr += f'{data[i]:02x} '
printable += chr(data[i]) if data[i] > 0x20 and data[i] < 0x7f else '.'
return f'{addr:04x} {hexstr:48s} |{printable:s}|'
def hexdump(data, indent=''):
if not isinstance(data, bytearray):
data = bytearray(data)
result = ''
for i in range(0, len(data), 16):
result += indent + hexline(i, data[i:i+16]) + '\n'
return result
class EfiTpl:
''' Return string for EFI_TPL'''
def __init__(self, tpl):
self.tpl = tpl
def __str__(self):
if self.tpl < 4:
result = f'{self.tpl:d}'
elif self.tpl < 8:
result = "TPL_APPLICATION"
if self.tpl - 4 > 0:
result += f' + {self.tpl - 4:d}'
elif self.tpl < 16:
result = "TPL_CALLBACK"
if self.tpl - 8 > 0:
result += f' + {self.tpl - 8:d}'
elif self.tpl < 31:
result = "TPL_NOTIFY"
if self.tpl - 16 > 0:
result += f' + {self.tpl - 16:d}'
elif self.tpl == 31:
result = "TPL_HIGH_LEVEL"
else:
result = f'Invalid TPL = {self.tpl:d}'
return result
class EfiBootMode:
'''
Class to return human readable string for EFI_BOOT_MODE
Methods
-----------
to_str(boot_mode, default)
return string for boot_mode, and return default if there is not a
match.
'''
EFI_BOOT_MODE_dict = {
0x00: "BOOT_WITH_FULL_CONFIGURATION",
0x01: "BOOT_WITH_MINIMAL_CONFIGURATION",
0x02: "BOOT_ASSUMING_NO_CONFIGURATION_CHANGES",
0x03: "BOOT_WITH_FULL_CONFIGURATION_PLUS_DIAGNOSTICS",
0x04: "BOOT_WITH_DEFAULT_SETTINGS",
0x05: "BOOT_ON_S4_RESUME",
0x06: "BOOT_ON_S5_RESUME",
0x07: "BOOT_WITH_MFG_MODE_SETTINGS",
0x10: "BOOT_ON_S2_RESUME",
0x11: "BOOT_ON_S3_RESUME",
0x12: "BOOT_ON_FLASH_UPDATE",
0x20: "BOOT_IN_RECOVERY_MODE",
}
def __init__(self, boot_mode):
self._boot_mode = boot_mode
def __str__(self):
return self.to_str(self._boot_mode)
@classmethod
def to_str(cls, boot_mode, default=''):
return cls.EFI_BOOT_MODE_dict.get(boot_mode, default)
class EfiStatusClass:
'''
Class to decode EFI_STATUS to a human readable string. You need to
pass in pointer_width to get the corret value since the EFI_STATUS
code values are different based on the sizeof UINTN. The default is
sizeof(UINTN) == 8.
Attributes
??????
_dict_ : dictionary
dictionary of EFI_STATUS that has beed updated to match
pointer_width.
Methods
-----------
patch_dictionary(pointer_width)
to_str(status, default)
'''
_dict_ = {}
_EFI_STATUS_UINT32_dict = {
0: "Success",
1: "Warning Unknown Glyph",
2: "Warning Delete Failure",
3: "Warning Write Failure",
4: "Warning Buffer Too Small",
5: "Warning Stale Data",
6: "Warngin File System",
(0x20000000 | 0): "Warning interrupt source pending",
(0x20000000 | 1): "Warning interrupt source quiesced",
(0x80000000 | 1): "Load Error",
(0x80000000 | 2): "Invalid Parameter",
(0x80000000 | 3): "Unsupported",
(0x80000000 | 4): "Bad Buffer Size",
(0x80000000 | 5): "Buffer Too Small",
(0x80000000 | 6): "Not Ready",
(0x80000000 | 7): "Device Error",
(0x80000000 | 8): "Write Protected",
(0x80000000 | 9): "Out of Resources",
(0x80000000 | 10): "Volume Corrupt",
(0x80000000 | 11): "Volume Full",
(0x80000000 | 12): "No Media",
(0x80000000 | 13): "Media changed",
(0x80000000 | 14): "Not Found",
(0x80000000 | 15): "Access Denied",
(0x80000000 | 16): "No Response",
(0x80000000 | 17): "No mapping",
(0x80000000 | 18): "Time out",
(0x80000000 | 19): "Not started",
(0x80000000 | 20): "Already started",
(0x80000000 | 21): "Aborted",
(0x80000000 | 22): "ICMP Error",
(0x80000000 | 23): "TFTP Error",
(0x80000000 | 24): "Protocol Error",
(0x80000000 | 25): "Incompatible Version",
(0x80000000 | 26): "Security Violation",
(0x80000000 | 27): "CRC Error",
(0x80000000 | 28): "End of Media",
(0x80000000 | 31): "End of File",
(0x80000000 | 32): "Invalid Language",
(0x80000000 | 33): "Compromised Data",
(0x80000000 | 35): "HTTP Error",
(0xA0000000 | 0): "Interrupt Pending",
}
def __init__(self, status=None, pointer_width=8):
self.status = status
# this will convert to 64-bit version if needed
self.patch_dictionary(pointer_width)
def __str__(self):
return self.to_str(self.status)
@classmethod
def to_str(cls, status, default=''):
return cls._dict_.get(status, default)
@classmethod
def patch_dictionary(cls, pointer_width):
'''Patch UINTN upper bits like values '''
if cls._dict_:
# only patch the class variable once
return False
if pointer_width == 4:
cls._dict = cls._EFI_STATUS_UINT32_dict
elif pointer_width == 8:
for key, value in cls._EFI_STATUS_UINT32_dict.items():
mask = (key & 0xE0000000) << 32
new_key = (key & 0x1FFFFFFF) | mask
cls._dict_[new_key] = value
return True
else:
return False
class GuidNames:
'''
Class to expose the C names of EFI_GUID's. The _dict_ starts with
common EFI System Table entry EFI_GUID's. _dict_ can get updated with the
build generated Guid.xref file if a path to a module is passed
into add_build_guid_file(). If symbols are loaded for any module
in the build the path the build product should imply the
relative location of that builds Guid.xref file.
Attributes
??????----
_dict_ : dictionary
dictionary of EFI_GUID (uuid) strings to C global names
Methods
-------
to_uuid(uuid)
convert a hex UUID string or bytearray to a uuid.UUID
to_name(uuid)
convert a UUID string to a C global constant name.
to_guid(guid_name)
convert a C global constant EFI_GUID name to uuid hex string.
is_guid_str(name)
name is a hex UUID string.
Example: 49152E77-1ADA-4764-B7A2-7AFEFED95E8B
to_c_guid(value)
convert a uuid.UUID or UUID string to a c_guid string
(see is_c_guid())
from_c_guid(value)
covert a C guid string to a hex UUID string.
is_c_guid(name)
name is the C initialization value for an EFI_GUID. Example:
{ 0x414e6bdd, 0xe47b, 0x47cc, { 0xb2, 0x44, 0xbb, 0x61,
0x02, 0x0c, 0xf5, 0x16 }}
add_build_guid_file(module_path, custom_file):
assume module_path is an edk2 build product and load the Guid.xref
file from that build to fill in _dict_. If you know the path and
file name of a custom Guid.xref you can pass it in as custom_file.
'''
_dict_ = { # Common EFI System Table values
'05AD34BA-6F02-4214-952E-4DA0398E2BB9':
'gEfiDxeServicesTableGuid',
'7739F24C-93D7-11D4-9A3A-0090273FC14D':
'gEfiHobListGuid',
'4C19049F-4137-4DD3-9C10-8B97A83FFDFA':
'gEfiMemoryTypeInformationGuid',
'49152E77-1ADA-4764-B7A2-7AFEFED95E8B':
'gEfiDebugImageInfoTableGuid',
'060CC026-4C0D-4DDA-8F41-595FEF00A502':
'gMemoryStatusCodeRecordGuid',
'EB9D2D31-2D88-11D3-9A16-0090273FC14D':
'gEfiSmbiosTableGuid',
'EB9D2D30-2D88-11D3-9A16-0090273FC14D':
'gEfiAcpi10TableGuid',
'8868E871-E4F1-11D3-BC22-0080C73C8881':
'gEfiAcpi20TableGuid',
}
guid_files = []
def __init__(self, uuid=None, pointer_width=8):
self.uuid = None if uuid is None else self.to_uuid(uuid)
def __str__(self):
if self.uuid is None:
result = ''
for key, value in GuidNames._dict_.items():
result += f'{key}: {value}\n'
else:
result = self.to_name(self.uuid)
return result
@classmethod
def to_uuid(cls, obj):
try:
return uuid.UUID(bytes_le=bytes(obj))
except (ValueError, TypeError):
try:
return uuid.UUID(bytes_le=obj)
except (ValueError, TypeError):
return uuid.UUID(obj)
@classmethod
def to_name(cls, uuid):
if not isinstance(uuid, str):
uuid = str(uuid)
if cls.is_c_guid(uuid):
uuid = cls.from_c_guid(uuid)
return cls._dict_.get(uuid.upper(), uuid.upper())
@classmethod
def to_guid(cls, guid_name):
for key, value in cls._dict_.items():
if guid_name == value:
return key.upper()
else:
raise KeyError(key)
@classmethod
def is_guid_str(cls, name):
if not isinstance(name, str):
return False
return name.count('-') >= 4
@classmethod
def to_c_guid(cls, value):
if isinstance(value, uuid.UUID):
guid = value
else:
guid = uuid.UUID(value)
(data1, data2, data3,
data4_0, data4_1, data4_2, data4_3,
data4_4, data4_5, data4_6, data4_7) = struct.unpack(
'<IHH8B', guid.bytes_le)
return (f'{{ 0x{data1:08X}, 0x{data2:04X}, 0x{data3:04X}, '
f'{{ 0x{data4_0:02X}, 0x{data4_1:02X}, 0x{data4_2:02X}, '
f'0x{data4_3:02X}, 0x{data4_4:02X}, 0x{data4_5:02X}, '
f'0x{data4_6:02X}, 0x{data4_7:02X} }} }}')
@ classmethod
def from_c_guid(cls, value):
try:
hex = [int(x, 16) for x in re.findall(r"[\w']+", value)]
return (f'{hex[0]:08X}-{hex[1]:04X}-{hex[2]:04X}'
+ f'-{hex[3]:02X}{hex[4]:02X}-{hex[5]:02X}{hex[6]:02X}'
+ f'{hex[7]:02X}{hex[8]:02X}{hex[9]:02X}{hex[10]:02X}')
except ValueError:
return value
@ classmethod
def is_c_guid(cls, name):
if not isinstance(name, str):
return False
return name.count('{') == 2 and name.count('}') == 2
@ classmethod
def add_build_guid_file(cls, module_path, custom_file=None):
if custom_file is not None:
xref = custom_file
else:
# module_path will look like:
# <repo>/Build/OvmfX64/DEBUG_XCODE5/X64/../DxeCore.dll
# Walk backwards looking for a toolchain like name.
# Then look for GUID database:
# Build/OvmfX64//DEBUG_XCODE5/FV/Guid.xref
for i in reversed(module_path.split(os.sep)):
if (i.startswith('DEBUG_') or
i.startswith('RELEASE_') or
i.startswith('NOOPT_')):
build_root = os.path.join(
module_path.rsplit(i, 1)[0], i)
break
xref = os.path.join(build_root, 'FV', 'Guid.xref')
if xref in cls.guid_files:
# only processes the file one time
return True
with open(xref) as f:
content = f.readlines()
cls.guid_files.append(xref)
for lines in content:
try:
if cls.is_guid_str(lines):
# a regex would be more pedantic
words = lines.split()
cls._dict_[words[0].upper()] = words[1].strip('\n')
except ValueError:
pass
return True
return False
class EFI_HOB_GENERIC_HEADER(LittleEndianStructure):
_fields_ = [
('HobType', c_uint16),
('HobLength', c_uint16),
('Reserved', c_uint32)
]
class EFI_HOB_HANDOFF_INFO_TABLE(LittleEndianStructure):
_fields_ = [
('Header', EFI_HOB_GENERIC_HEADER),
('Version', c_uint32),
('BootMode', c_uint32),
('EfiMemoryTop', c_uint64),
('EfiMemoryBottom', c_uint64),
('EfiFreeMemoryTop', c_uint64),
('EfiFreeMemoryBottom', c_uint64),
('EfiEndOfHobList', c_uint64),
]
class EFI_HOB_MEMORY_ALLOCATION(LittleEndianStructure):
_fields_ = [
('Header', EFI_HOB_GENERIC_HEADER),
('Name', EFI_GUID),
('MemoryBaseAddress', c_uint64),
('MemoryLength', c_uint64),
('MemoryType', c_uint32),
('Reserved', c_uint32),
]
class EFI_HOB_RESOURCE_DESCRIPTOR(LittleEndianStructure):
_fields_ = [
('Header', EFI_HOB_GENERIC_HEADER),
('Owner', EFI_GUID),
('ResourceType', c_uint32),
('ResourceAttribute', c_uint32),
('PhysicalStart', c_uint64),
('ResourceLength', c_uint64),
]
class EFI_HOB_GUID_TYPE(LittleEndianStructure):
_fields_ = [
('Header', EFI_HOB_GENERIC_HEADER),
('Name', EFI_GUID),
]
class EFI_HOB_FIRMWARE_VOLUME(LittleEndianStructure):
_fields_ = [
('Header', EFI_HOB_GENERIC_HEADER),
('BaseAddress', c_uint64),
('Length', c_uint64),
]
class EFI_HOB_CPU(LittleEndianStructure):
_fields_ = [
('Header', EFI_HOB_GENERIC_HEADER),
('SizeOfMemorySpace', c_uint8),
('SizeOfIoSpace', c_uint8),
('Reserved', ARRAY(c_uint8, 6)),
]
class EFI_HOB_MEMORY_POOL(LittleEndianStructure):
_fields_ = [
('Header', EFI_HOB_GENERIC_HEADER),
]
class EFI_HOB_FIRMWARE_VOLUME2(LittleEndianStructure):
_fields_ = [
('Header', EFI_HOB_GENERIC_HEADER),
('BaseAddress', c_uint64),
('Length', c_uint64),
('FvName', EFI_GUID),
('FileName', EFI_GUID)
]
class EFI_HOB_FIRMWARE_VOLUME3(LittleEndianStructure):
_fields_ = [
('HobType', c_uint16),
('HobLength', c_uint16),
('Reserved', c_uint32),
('BaseAddress', c_uint64),
('Length', c_uint64),
('AuthenticationStatus', c_uint32),
('ExtractedFv', c_uint8),
('FvName', EFI_GUID),
('FileName', EFI_GUID),
]
class EFI_HOB_UEFI_CAPSULE(LittleEndianStructure):
_fields_ = [
('HobType', c_uint16),
('HobLength', c_uint16),
('Reserved', c_uint32),
('BaseAddress', c_uint64),
('Length', c_uint64),
]
class EfiHob:
'''
Parse EFI Device Paths based on the edk2 C Structures defined above.
In the context of this class verbose means hexdump extra data.
Attributes
??????
Hob : list
List of HOBs. Each entry contains the name, HOB type, HOB length,
the ctype struct for the HOB, and any extra data.
Methods
-----------
get_hob_by_type(hob_type)
return string that decodes the HOBs of hob_type. If hob_type is
None then return all HOBs.
'''
Hob = []
verbose = False
hob_dict = {
1: EFI_HOB_HANDOFF_INFO_TABLE,
2: EFI_HOB_MEMORY_ALLOCATION,
3: EFI_HOB_RESOURCE_DESCRIPTOR,
4: EFI_HOB_GUID_TYPE,
5: EFI_HOB_FIRMWARE_VOLUME,
6: EFI_HOB_CPU,
7: EFI_HOB_MEMORY_POOL,
9: EFI_HOB_FIRMWARE_VOLUME2,
0xb: EFI_HOB_UEFI_CAPSULE,
0xc: EFI_HOB_FIRMWARE_VOLUME3,
0xffff: EFI_HOB_GENERIC_HEADER,
}
def __init__(self, file, address=None, verbose=False, count=1000):
self._file = file
EfiHob.verbose = verbose
if len(EfiHob.Hob) != 0 and address is None:
return
if address is not None:
hob_ptr = address
else:
hob_ptr = EfiConfigurationTable(file).GetConfigTable(
'7739F24C-93D7-11D4-9A3A-0090273FC14D')
self.read_hobs(hob_ptr)
@ classmethod
def __str__(cls):
return cls.get_hob_by_type(None)
@ classmethod
def get_hob_by_type(cls, hob_type):
result = ""
for (Name, HobType, HobLen, chob, extra) in cls.Hob:
if hob_type is not None:
if hob_type != HobType:
continue
result += f'Type: {Name:s} (0x{HobType:01x}) Len: 0x{HobLen:03x}\n'
result += ctype_to_str(chob, ' ', ['Reserved'])
if cls.verbose:
if extra is not None:
result += hexdump(extra, ' ')
return result
def read_hobs(self, hob_ptr, count=1000):
if hob_ptr is None:
return
try:
for _ in range(count): # while True
hdr, _ = self._ctype_read_ex(EFI_HOB_GENERIC_HEADER, hob_ptr)
if hdr.HobType == 0xffff:
break
type_str = self.hob_dict.get(
hdr.HobType, EFI_HOB_GENERIC_HEADER)
hob, extra = self._ctype_read_ex(
type_str, hob_ptr, hdr.HobLength)
EfiHob.Hob.append(
(type(hob).__name__,
hdr.HobType,
hdr.HobLength,
hob,
extra))
hob_ptr += hdr.HobLength
except ValueError:
pass
def _ctype_read_ex(self, ctype_struct, offset=0, rsize=None):
if offset != 0:
self._file.seek(offset)
type_size = sizeof(ctype_struct)
size = rsize if rsize else type_size
data = self._file.read(size)
cdata = ctype_struct.from_buffer(bytearray(data))
if size > type_size:
return cdata, data[type_size:]
else:
return cdata, None
class EFI_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Type', c_uint8),
('SubType', c_uint8),
# UINT8 Length[2]
# Cheat and use c_uint16 since we don't care about alignment
('Length', c_uint16)
]
class PCI_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('Function', c_uint8),
('Device', c_uint8)
]
class PCCARD_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('FunctionNumber', c_uint8),
]
class MEMMAP_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('StartingAddress', c_uint64),
('EndingAddress', c_uint64),
]
class VENDOR_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('Guid', EFI_GUID),
]
class CONTROLLER_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('ControllerNumber', c_uint32),
]
class BMC_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('InterfaceType', c_uint8),
('BaseAddress', ARRAY(c_uint8, 8)),
]
class BBS_BBS_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('DeviceType', c_uint16),
('StatusFlag', c_uint16)
]
class ACPI_HID_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('HID', c_uint32),
('UID', c_uint32)
]
class ACPI_EXTENDED_HID_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('HID', c_uint32),
('UID', c_uint32),
('CID', c_uint32)
]
class ACPI_ADR_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('ARD', c_uint32)
]
class ACPI_NVDIMM_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('NFITDeviceHandle', c_uint32)
]
class ATAPI_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("PrimarySecondary", c_uint8),
("SlaveMaster", c_uint8),
("Lun", c_uint16)
]
class SCSI_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("Pun", c_uint16),
("Lun", c_uint16)
]
class FIBRECHANNEL_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("Reserved", c_uint32),
("WWN", c_uint64),
("Lun", c_uint64)
]
class F1394_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("Reserved", c_uint32),
("Guid", c_uint64)
]
class USB_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("ParentPortNumber", c_uint8),
("InterfaceNumber", c_uint8),
]
class I2O_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("Tid", c_uint32)
]
class INFINIBAND_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("ResourceFlags", c_uint32),
("PortGid", ARRAY(c_uint8, 16)),
("ServiceId", c_uint64),
("TargetPortId", c_uint64),
("DeviceId", c_uint64)
]
class UART_FLOW_CONTROL_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("Guid", EFI_GUID),
("FlowControlMap", c_uint32)
]
class SAS_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("Guid", EFI_GUID),
("Reserved", c_uint32),
("SasAddress", c_uint64),
("Lun", c_uint64),
("DeviceTopology", c_uint16),
("RelativeTargetPort", c_uint16)
]
class EFI_MAC_ADDRESS(LittleEndianStructure):
_pack_ = 1
_fields_ = [
("Addr", ARRAY(c_uint8, 32)),
]
class MAC_ADDR_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('MacAddress', EFI_MAC_ADDRESS),
('IfType', c_uint8)
]
class IPv4_ADDRESS(LittleEndianStructure):
_fields_ = [
("Addr", ARRAY(c_uint8, 4)),
]
class IPv4_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('LocalIpAddress', IPv4_ADDRESS),
('RemoteIpAddress', IPv4_ADDRESS),
('LocalPort', c_uint16),
('RemotePort', c_uint16),
('Protocol', c_uint16),
('StaticIpAddress', c_uint8),
('GatewayIpAddress', IPv4_ADDRESS),
('SubnetMask', IPv4_ADDRESS)
]
class IPv6_ADDRESS(LittleEndianStructure):
_fields_ = [
("Addr", ARRAY(c_uint8, 16)),
]
class IPv6_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('LocalIpAddress', IPv6_ADDRESS),
('RemoteIpAddress', IPv6_ADDRESS),
('LocalPort', c_uint16),
('RemotePort', c_uint16),
('Protocol', c_uint16),
('IpAddressOrigin', c_uint8),
('PrefixLength', c_uint8),
('GatewayIpAddress', IPv6_ADDRESS)
]
class UART_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('Reserved', c_uint32),
('BaudRate', c_uint64),
('DataBits', c_uint8),
('Parity', c_uint8),
('StopBits', c_uint8)
]
class USB_CLASS_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('VendorId', c_uint16),
('ProductId', c_uint16),
('DeviceClass', c_uint8),
('DeviceCSjblass', c_uint8),
('DeviceProtocol', c_uint8),
]
class USB_WWID_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('InterfaceNumber', c_uint16),
('VendorId', c_uint16),
('ProductId', c_uint16),
]
class DEVICE_LOGICAL_UNIT_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('Lun', c_uint8)
]
class SATA_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('HBAPortNumber', c_uint16),
('PortMultiplierPortNumber', c_uint16),
('Lun', c_uint16),
]
class ISCSI_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('NetworkProtocol', c_uint16),
('LoginOption', c_uint16),
('Lun', c_uint64),
('TargetPortalGroupTag', c_uint16),
]
class VLAN_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("VlandId", c_uint16)
]
class FIBRECHANNELEX_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("Reserved", c_uint16),
("WWN", ARRAY(c_uint8, 8)),
("Lun", ARRAY(c_uint8, 8)),
]
class SASEX_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("SasAddress", ARRAY(c_uint8, 8)),
("Lun", ARRAY(c_uint8, 8)),
("DeviceTopology", c_uint16),
("RelativeTargetPort", c_uint16)
]
class NVME_NAMESPACE_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("NamespaceId", c_uint32),
("NamespaceUuid", c_uint64)
]
class DNS_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("IsIPv6", c_uint8),
("DnsServerIp", IPv6_ADDRESS)
]
class UFS_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("Pun", c_uint8),
("Lun", c_uint8),
]
class SD_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("SlotNumber", c_uint8)
]
class BLUETOOTH_ADDRESS(LittleEndianStructure):
_pack_ = 1
_fields_ = [
("Address", ARRAY(c_uint8, 6))
]
class BLUETOOTH_LE_ADDRESS(LittleEndianStructure):
_pack_ = 1
_fields_ = [
("Format", c_uint8),
("Class", c_uint16)
]
class BLUETOOTH_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("BD_ADDR", BLUETOOTH_ADDRESS)
]
class WIFI_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("SSId", ARRAY(c_uint8, 32))
]
class EMMC_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("SlotNumber", c_uint8)
]
class BLUETOOTH_LE_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("BD_ADDR", BLUETOOTH_LE_ADDRESS)
]
class NVDIMM_NAMESPACE_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("Uuid", EFI_GUID)
]
class REST_SERVICE_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("RESTService", c_uint8),
("AccessMode", c_uint8)
]
class REST_VENDOR_SERVICE_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
("RESTService", c_uint8),
("AccessMode", c_uint8),
("Guid", EFI_GUID),
]
class HARDDRIVE_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('PartitionNumber', c_uint32),
('PartitionStart', c_uint64),
('PartitionSize', c_uint64),
('Signature', ARRAY(c_uint8, 16)),
('MBRType', c_uint8),
('SignatureType', c_uint8)
]
class CDROM_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('BootEntry', c_uint32),
('PartitionStart', c_uint64),
('PartitionSize', c_uint64)
]
class MEDIA_PROTOCOL_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('Protocol', EFI_GUID)
]
class MEDIA_FW_VOL_FILEPATH_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('FvFileName', EFI_GUID)
]
class MEDIA_FW_VOL_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('FvName', EFI_GUID)
]
class MEDIA_RELATIVE_OFFSET_RANGE_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('Reserved', c_uint32),
('StartingOffset', c_uint64),
('EndingOffset', c_uint64)
]
class MEDIA_RAM_DISK_DEVICE_PATH(LittleEndianStructure):
_pack_ = 1
_fields_ = [
('Header', EFI_DEVICE_PATH),
('StartingAddr', c_uint64),
('EndingAddr', c_uint64),
('TypeGuid', EFI_GUID),
('Instance', c_uint16)
]
class EfiDevicePath:
'''
Parse EFI Device Paths based on the edk2 C Structures defined above.
In the context of this class verbose means hexdump extra data.
Attributes
??????
DevicePath : list
List of devixe path instances. Each instance is a list of nodes
for the given Device Path instance.
Methods
-----------
device_path_node(address)
return the Device Path ctype hdr, ctype, and any extra data in
the Device Path node. This is just a single Device Path node,
not the entire Device Path.
device_path_node_str(address)
return the device path node (not the entire Device Path) as a string
'''
DevicePath = []
device_path_dict = {
# ( Type, SubType ) : Device Path C typedef
# HARDWARE_DEVICE_PATH
(1, 1): PCI_DEVICE_PATH,
(1, 2): PCCARD_DEVICE_PATH,
(1, 3): MEMMAP_DEVICE_PATH,
(1, 4): VENDOR_DEVICE_PATH,
(1, 5): CONTROLLER_DEVICE_PATH,
(1, 6): BMC_DEVICE_PATH,
# ACPI_DEVICE_PATH
(2, 1): ACPI_HID_DEVICE_PATH,
(2, 2): ACPI_EXTENDED_HID_DEVICE_PATH,
(2, 3): ACPI_ADR_DEVICE_PATH,
(2, 4): ACPI_NVDIMM_DEVICE_PATH,
# MESSAGING_DEVICE_PATH
(3, 1): ATAPI_DEVICE_PATH,
(3, 2): SCSI_DEVICE_PATH,
(3, 3): FIBRECHANNEL_DEVICE_PATH,
(3, 4): F1394_DEVICE_PATH,
(3, 5): USB_DEVICE_PATH,
(3, 6): I2O_DEVICE_PATH,
(3, 9): INFINIBAND_DEVICE_PATH,
(3, 10): VENDOR_DEVICE_PATH,
(3, 11): MAC_ADDR_DEVICE_PATH,
(3, 12): IPv4_DEVICE_PATH,
(3, 13): IPv6_DEVICE_PATH,
(3, 14): UART_DEVICE_PATH,
(3, 15): USB_CLASS_DEVICE_PATH,
(3, 16): USB_WWID_DEVICE_PATH,
(3, 17): DEVICE_LOGICAL_UNIT_DEVICE_PATH,
(3, 18): SATA_DEVICE_PATH,
(3, 19): ISCSI_DEVICE_PATH,
(3, 20): VLAN_DEVICE_PATH,
(3, 21): FIBRECHANNELEX_DEVICE_PATH,
(3, 22): SASEX_DEVICE_PATH,
(3, 23): NVME_NAMESPACE_DEVICE_PATH,
(3, 24): DNS_DEVICE_PATH,
(3, 25): UFS_DEVICE_PATH,
(3, 26): SD_DEVICE_PATH,
(3, 27): BLUETOOTH_DEVICE_PATH,
(3, 28): WIFI_DEVICE_PATH,
(3, 29): EMMC_DEVICE_PATH,
(3, 30): BLUETOOTH_LE_DEVICE_PATH,
(3, 31): DNS_DEVICE_PATH,
(3, 32): NVDIMM_NAMESPACE_DEVICE_PATH,
(3, 33): REST_SERVICE_DEVICE_PATH,
(3, 34): REST_VENDOR_SERVICE_DEVICE_PATH,
# MEDIA_DEVICE_PATH
(4, 1): HARDDRIVE_DEVICE_PATH,
(4, 2): CDROM_DEVICE_PATH,
(4, 3): VENDOR_DEVICE_PATH,
(4, 4): EFI_DEVICE_PATH,
(4, 5): MEDIA_PROTOCOL_DEVICE_PATH,
(4, 6): MEDIA_FW_VOL_FILEPATH_DEVICE_PATH,
(4, 7): MEDIA_FW_VOL_DEVICE_PATH,
(4, 8): MEDIA_RELATIVE_OFFSET_RANGE_DEVICE_PATH,
(4, 9): MEDIA_RAM_DISK_DEVICE_PATH,
# BBS_DEVICE_PATH
(5, 1): BBS_BBS_DEVICE_PATH,
}
guid_override_dict = {
uuid.UUID('37499A9D-542F-4C89-A026-35DA142094E4'):
UART_FLOW_CONTROL_DEVICE_PATH,
uuid.UUID('D487DDB4-008B-11D9-AFDC-001083FFCA4D'):
SAS_DEVICE_PATH,
}
def __init__(self, file, ptr=None, verbose=False, count=64):
'''
Convert ptr into a list of Device Path nodes. If verbose also hexdump
extra data.
'''
self._file = file
self._verbose = verbose
if ptr is None:
return
try:
instance = []
for _ in range(count): # while True
hdr, _ = self._ctype_read_ex(EFI_DEVICE_PATH, ptr)
if hdr.Length < sizeof(EFI_DEVICE_PATH):
# Not a valid device path
break
if hdr.Type == 0x7F: # END_DEVICE_PATH_TYPE
self.DevicePath.append(instance)
if hdr.SubType == 0xFF: # END_ENTIRE_DEVICE_PATH_SUBTYPE
break
if hdr.SubType == 0x01: # END_INSTANCE_DEVICE_PATH_SUBTYPE
# start new device path instance
instance = []
type_str = self.device_path_dict.get(
(hdr.Type, hdr.SubType), EFI_DEVICE_PATH)
node, extra = self._ctype_read_ex(type_str, ptr, hdr.Length)
if 'VENDOR_DEVICE_PATH' in type(node).__name__:
guid_type = self.guid_override_dict.get(
GuidNames.to_uuid(node.Guid), None)
if guid_type:
# use the ctype associated with the GUID
node, extra = self._ctype_read_ex(
guid_type, ptr, hdr.Length)
instance.append((type(node).__name__, hdr.Type,
hdr.SubType, hdr.Length, node, extra))
ptr += hdr.Length
except ValueError:
pass
def __str__(self):
''' '''
if not self.valid():
return '<class: EfiDevicePath>'
result = ""
for instance in self.DevicePath:
for (Name, Type, SubType, Length, cnode, extra) in instance:
result += f'{Name:s} {Type:2d}:{SubType:2d} Len: {Length:3d}\n'
result += ctype_to_str(cnode, ' ', ['Reserved'])
if self._verbose:
if extra is not None:
result += hexdump(extra, ' ')
result += '\n'
return result
def valid(self):
return True if self.DevicePath else False
def device_path_node(self, address):
try:
hdr, _ = self._ctype_read_ex(EFI_DEVICE_PATH, address)
if hdr.Length < sizeof(EFI_DEVICE_PATH):
return None, None, None
type_str = self.device_path_dict.get(
(hdr.Type, hdr.SubType), EFI_DEVICE_PATH)
cnode, extra = self._ctype_read_ex(type_str, address, hdr.Length)
return hdr, cnode, extra
except ValueError:
return None, None, None
def device_path_node_str(self, address, verbose=False):
hdr, cnode, extra = self.device_path_node(address)
if hdr is None:
return ''
cname = type(cnode).__name__
result = f'{cname:s} {hdr.Type:2d}:{hdr.SubType:2d} '
result += f'Len: 0x{hdr.Length:03x}\n'
result += ctype_to_str(cnode, ' ', ['Reserved'])
if verbose:
if extra is not None:
result += hexdump(extra, ' ')
return result
def _ctype_read_ex(self, ctype_struct, offset=0, rsize=None):
if offset != 0:
self._file.seek(offset)
type_size = sizeof(ctype_struct)
size = rsize if rsize else type_size
data = self._file.read(size)
if data is None:
return None, None
cdata = ctype_struct.from_buffer(bytearray(data))
if size > type_size:
return cdata, data[type_size:]
else:
return cdata, None
class EfiConfigurationTable:
'''
A class to abstract EFI Configuration Tables from gST->ConfigurationTable
and gST->NumberOfTableEntries. Pass in the gST pointer from EFI,
likely you need to look up this address after you have loaded symbols
Attributes
??????
ConfigurationTableDict : dictionary
dictionary of EFI Configuration Table entries
Methods
-----------
GetConfigTable(uuid)
pass in VendorGuid and return VendorTable from EFI System Table
DebugImageInfo(table)
return tuple of load address and size of PE/COFF images
'''
ConfigurationTableDict = {}
def __init__(self, file, gST_addr=None):
self._file = file
if gST_addr is None:
# ToDo add code to search for gST via EFI_SYSTEM_TABLE_POINTER
return
gST = self._ctype_read(EFI_SYSTEM_TABLE, gST_addr)
self.read_efi_config_table(gST.NumberOfTableEntries,
gST.ConfigurationTable,
self._ctype_read)
@ classmethod
def __str__(cls):
'''return EFI_CONFIGURATION_TABLE entries as a string'''
result = ""
for key, value in cls.ConfigurationTableDict.items():
result += f'{GuidNames().to_name(key):>37s}: '
result += f'VendorTable = 0x{value:08x}\n'
return result
def _ctype_read(self, ctype_struct, offset=0):
'''ctype worker function to read data'''
if offset != 0:
self._file.seek(offset)
data = self._file.read(sizeof(ctype_struct))
return ctype_struct.from_buffer(bytearray(data))
@ classmethod
def read_efi_config_table(cls, table_cnt, table_ptr, ctype_read):
'''Create a dictionary of EFI Configuration table entries'''
EmptryTables = EFI_CONFIGURATION_TABLE * table_cnt
Tables = ctype_read(EmptryTables, table_ptr)
for i in range(table_cnt):
cls.ConfigurationTableDict[str(GuidNames.to_uuid(
Tables[i].VendorGuid)).upper()] = Tables[i].VendorTable
return cls.ConfigurationTableDict
def GetConfigTable(self, uuid):
''' Return VendorTable for VendorGuid (uuid.UUID) or None'''
return self.ConfigurationTableDict.get(uuid.upper())
def DebugImageInfo(self, table=None):
'''
Walk the debug image info table to find the LoadedImage protocols
for all the loaded PE/COFF images and return a list of load address
and image size.
'''
ImageLoad = []
if table is None:
table = self.GetConfigTable('49152e77-1ada-4764-b7a2-7afefed95e8b')
DbgInfoHdr = self._ctype_read(EFI_DEBUG_IMAGE_INFO_TABLE_HEADER, table)
NormalImageArray = EFI_DEBUG_IMAGE_INFO * DbgInfoHdr.TableSize
NormalImageArray = self._ctype_read(
NormalImageArray, DbgInfoHdr.EfiDebugImageInfoTable)
for i in range(DbgInfoHdr.TableSize):
ImageInfo = self._ctype_read(
EFI_DEBUG_IMAGE_INFO_NORMAL, NormalImageArray[i].NormalImage)
LoadedImage = self._ctype_read(
EFI_LOADED_IMAGE_PROTOCOL,
ImageInfo.LoadedImageProtocolInstance)
ImageLoad.append((LoadedImage.ImageBase, LoadedImage.ImageSize))
return ImageLoad
class PeTeImage:
'''
A class to abstract PE/COFF or TE image processing via passing in a
Python file like object. If you pass in an address the PE/COFF is parsed,
if you pass in NULL for an address then you get a class instance you can
use to search memory for a PE/COFF hader given a pc value.
Attributes
??????
LoadAddress : int
Load address of the PE/COFF image
AddressOfEntryPoint : int
Address of the Entry point of the PE/COFF image
TextAddress : int
Start of the PE/COFF text section
DataAddress : int
Start of the PE/COFF data section
CodeViewPdb : str
File name of the symbols file
CodeViewUuid : uuid:UUID
GUID for "RSDS" Debug Directory entry, or Mach-O UUID for "MTOC"
Methods
-----------
pcToPeCoff(address, step, max_range, rom_range)
Given an address(pc) find the PE/COFF image it is in
sections_to_str()
return a string giving info for all the PE/COFF sections
'''
def __init__(self, file, address=0):
self._file = file
# book keeping, but public
self.PeHdr = None
self.TeHdr = None
self.Machine = None
self.Subsystem = None
self.CodeViewSig = None
self.e_lfanew = 0
self.NumberOfSections = 0
self.Sections = None
# Things debuggers may want to know
self.LoadAddress = 0 if address is None else address
self.EndLoadAddress = 0
self.AddressOfEntryPoint = 0
self.TextAddress = 0
self.DataAddress = 0
self.CodeViewPdb = None
self.CodeViewUuid = None
self.TeAdjust = 0
self.dir_name = {
0: 'Export Table',
1: 'Import Table',
2: 'Resource Table',
3: 'Exception Table',
4: 'Certificate Table',
5: 'Relocation Table',
6: 'Debug',
7: 'Architecture',
8: 'Global Ptr',
9: 'TLS Table',
10: 'Load Config Table',
11: 'Bound Import',
12: 'IAT',
13: 'Delay Import Descriptor',
14: 'CLR Runtime Header',
15: 'Reserved',
}
if address is not None:
if self.maybe():
self.parse()
def __str__(self):
if self.PeHdr is None and self.TeHdr is None:
# no PE/COFF header found
return "<class: PeTeImage>"
if self.CodeViewPdb:
pdb = f'{self.Machine}`{self.CodeViewPdb}'
else:
pdb = 'No Debug Info:'
if self.CodeViewUuid:
guid = f'{self.CodeViewUuid}:'
else:
guid = ''
slide = f'slide = {self.TeAdjust:d} ' if self.TeAdjust != 0 else ' '
res = guid + f'{pdb} load = 0x{self.LoadAddress:08x} ' + slide
return res
def _seek(self, offset):
"""
seek() relative to start of PE/COFF (TE) image
"""
self._file.seek(self.LoadAddress + offset)
def _read_offset(self, size, offset=None):
"""
read() relative to start of PE/COFF (TE) image
if offset is not None then seek() before the read
"""
if offset is not None:
self._seek(offset)
return self._file.read(size)
def _read_ctype(self, ctype_struct, offset=None):
data = self._read_offset(sizeof(ctype_struct), offset)
return ctype_struct.from_buffer(bytearray(data), 0)
def _unsigned(self, i):
"""return a 32-bit unsigned int (UINT32) """
return int.from_bytes(i, byteorder='little', signed=False)
def pcToPeCoff(self,
address,
step=None,
max_range=None,
rom_range=[0xFE800000, 0xFFFFFFFF]):
"""
Given an address search backwards for PE/COFF (TE) header
For DXE 4K is probably OK
For PEI you might have to search every 4 bytes.
"""
if step is None:
step = 0x1000
if max_range is None:
max_range = 0x200000
if address in range(*rom_range):
# The XIP code in the ROM ends up 4 byte aligned.
step = 4
max_range = min(max_range, 0x100000)
# Align address to page boundary for memory image search.
address = address & ~(step-1)
# Search every step backward
offset_range = list(range(0, min(max_range, address), step))
for offset in offset_range:
if self.maybe(address - offset):
if self.parse():
return True
return False
def maybe(self, offset=None):
"""Probe to see if this offset is likely a PE/COFF or TE file """
self.LoadAddress = 0
e_magic = self._read_offset(2, offset)
header_ok = e_magic == b'MZ' or e_magic == b'VZ'
if offset is not None and header_ok:
self.LoadAddress = offset
return header_ok
def parse(self):
"""Parse PE/COFF (TE) debug directory entry """
DosHdr = self._read_ctype(EFI_IMAGE_DOS_HEADER, 0)
if DosHdr.e_magic == self._unsigned(b'VZ'):
# TE image
self.TeHdr = self._read_ctype(EFI_TE_IMAGE_HEADER, 0)
self.TeAdjust = sizeof(self.TeHdr) - self.TeHdr.StrippedSize
self.Machine = image_machine_dict.get(self.TeHdr.Machine, None)
self.Subsystem = self.TeHdr.Subsystem
self.AddressOfEntryPoint = self.TeHdr.AddressOfEntryPoint
debug_dir_size = self.TeHdr.DataDirectoryDebug.Size
debug_dir_offset = (self.TeAdjust +
self.TeHdr.DataDirectoryDebug.VirtualAddress)
else:
if DosHdr.e_magic == self._unsigned(b'MZ'):
self.e_lfanew = DosHdr.e_lfanew
else:
self.e_lfanew = 0
self.PeHdr = self._read_ctype(
EFI_IMAGE_NT_HEADERS64, self.e_lfanew)
if self.PeHdr.Signature != self._unsigned(b'PE\0\0'):
return False
if self.PeHdr.OptionalHeader.Magic == \
EFI_IMAGE_NT_OPTIONAL_HDR32_MAGIC:
self.PeHdr = self._read_ctype(
EFI_IMAGE_NT_HEADERS32, self.e_lfanew)
if self.PeHdr.OptionalHeader.NumberOfRvaAndSizes <= \
DIRECTORY_DEBUG:
return False
self.Machine = image_machine_dict.get(
self.PeHdr.FileHeader.Machine, None)
self.Subsystem = self.PeHdr.OptionalHeader.Subsystem
self.AddressOfEntryPoint = \
self.PeHdr.OptionalHeader.AddressOfEntryPoint
self.TeAdjust = 0
debug_dir_size = self.PeHdr.OptionalHeader.DataDirectory[
DIRECTORY_DEBUG].Size
debug_dir_offset = self.PeHdr.OptionalHeader.DataDirectory[
DIRECTORY_DEBUG].VirtualAddress
if self.Machine is None or self.Subsystem not in [0, 10, 11, 12]:
return False
self.AddressOfEntryPoint += self.LoadAddress
self.sections()
return self.processDebugDirEntry(debug_dir_offset, debug_dir_size)
def sections(self):
'''Parse the PE/COFF (TE) section table'''
if self.Sections is not None:
return
elif self.TeHdr is not None:
self.NumberOfSections = self.TeHdr.NumberOfSections
offset = sizeof(EFI_TE_IMAGE_HEADER)
elif self.PeHdr is not None:
self.NumberOfSections = self.PeHdr.FileHeader.NumberOfSections
offset = sizeof(c_uint32) + \
sizeof(EFI_IMAGE_FILE_HEADER)
offset += self.PeHdr.FileHeader.SizeOfOptionalHeader
offset += self.e_lfanew
else:
return
self.Sections = EFI_IMAGE_SECTION_HEADER * self.NumberOfSections
self.Sections = self._read_ctype(self.Sections, offset)
for i in range(self.NumberOfSections):
name = str(self.Sections[i].Name, 'ascii', 'ignore')
addr = self.Sections[i].VirtualAddress
addr += self.LoadAddress + self.TeAdjust
if name == '.text':
self.TextAddress = addr
elif name == '.data':
self.DataAddress = addr
end_addr = addr + self.Sections[i].VirtualSize - 1
if end_addr > self.EndLoadAddress:
self.EndLoadAddress = end_addr
def sections_to_str(self):
# return text summary of sections
# name virt addr (virt size) flags:Characteristics
result = ''
for i in range(self.NumberOfSections):
name = str(self.Sections[i].Name, 'ascii', 'ignore')
result += f'{name:8s} '
result += f'0x{self.Sections[i].VirtualAddress:08X} '
result += f'(0x{self.Sections[i].VirtualSize:05X}) '
result += f'flags:0x{self.Sections[i].Characteristics:08X}\n'
return result
def directory_to_str(self):
result = ''
if self.TeHdr:
debug_size = self.TeHdr.DataDirectoryDebug.Size
if debug_size > 0:
debug_offset = (self.TeAdjust
+ self.TeHdr.DataDirectoryDebug.VirtualAddress)
result += f"Debug 0x{debug_offset:08X} 0x{debug_size}\n"
relocation_size = self.TeHdr.DataDirectoryBaseReloc.Size
if relocation_size > 0:
relocation_offset = (
self.TeAdjust
+ self.TeHdr.DataDirectoryBaseReloc.VirtualAddress)
result += f'Relocation 0x{relocation_offset:08X} '
result += f' 0x{relocation_size}\n'
elif self.PeHdr:
for i in range(self.PeHdr.OptionalHeader.NumberOfRvaAndSizes):
size = self.PeHdr.OptionalHeader.DataDirectory[i].Size
if size == 0:
continue
virt_addr = self.PeHdr.OptionalHeader.DataDirectory[
i].VirtualAddress
name = self.dir_name.get(i, '?')
result += f'{name:s} 0x{virt_addr:08X} 0x{size:X}\n'
return result
def processDebugDirEntry(self, virt_address, virt_size):
"""Process PE/COFF Debug Directory Entry"""
if (virt_address == 0 or
virt_size < sizeof(EFI_IMAGE_DEBUG_DIRECTORY_ENTRY)):
return False
data = bytearray(self._read_offset(virt_size, virt_address))
for offset in range(0,
virt_size,
sizeof(EFI_IMAGE_DEBUG_DIRECTORY_ENTRY)):
DirectoryEntry = EFI_IMAGE_DEBUG_DIRECTORY_ENTRY.from_buffer(
data[offset:])
if DirectoryEntry.Type != 2:
continue
entry = self._read_offset(
DirectoryEntry.SizeOfData, DirectoryEntry.RVA + self.TeAdjust)
self.CodeViewSig = entry[:4]
if self.CodeViewSig == b'MTOC':
self.CodeViewUuid = uuid.UUID(bytes_le=entry[4:4+16])
PdbOffset = 20
elif self.CodeViewSig == b'RSDS':
self.CodeViewUuid = uuid.UUID(bytes_le=entry[4:4+16])
PdbOffset = 24
elif self.CodeViewSig == b'NB10':
PdbOffset = 16
else:
continue
# can't find documentation about Pdb string encoding?
# guessing utf-8 since that will match file systems in macOS
# and Linux Windows is UTF-16, or ANSI adjusted for local.
# We might need a different value for Windows here?
self.CodeViewPdb = entry[PdbOffset:].split(b'\x00')[
0].decode('utf-8')
return True
return False
def main():
'''Process arguments as PE/COFF files'''
for fname in sys.argv[1:]:
with open(fname, 'rb') as f:
image = PeTeImage(f)
print(image)
res = f'EntryPoint = 0x{image.AddressOfEntryPoint:08x} '
res += f'TextAddress = 0x{image.TextAddress:08x} '
res += f'DataAddress = 0x{image.DataAddress:08x}'
print(res)
print(image.sections_to_str())
print('Data Directories:')
print(image.directory_to_str())
if __name__ == "__main__":
main()
| edk2-master | BaseTools/Scripts/efi_debugging.py |
## @file
# Retrieves the people to request review from on submission of a commit.
#
# Copyright (c) 2019, Linaro Ltd. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import print_function
from collections import defaultdict
from collections import OrderedDict
import argparse
import os
import re
import SetupGit
EXPRESSIONS = {
'exclude': re.compile(r'^X:\s*(?P<exclude>.*?)\r*$'),
'file': re.compile(r'^F:\s*(?P<file>.*?)\r*$'),
'list': re.compile(r'^L:\s*(?P<list>.*?)\r*$'),
'maintainer': re.compile(r'^M:\s*(?P<maintainer>.*?)\r*$'),
'reviewer': re.compile(r'^R:\s*(?P<reviewer>.*?)\r*$'),
'status': re.compile(r'^S:\s*(?P<status>.*?)\r*$'),
'tree': re.compile(r'^T:\s*(?P<tree>.*?)\r*$'),
'webpage': re.compile(r'^W:\s*(?P<webpage>.*?)\r*$')
}
def printsection(section):
"""Prints out the dictionary describing a Maintainers.txt section."""
print('===')
for key in section.keys():
print("Key: %s" % key)
for item in section[key]:
print(' %s' % item)
def pattern_to_regex(pattern):
"""Takes a string containing regular UNIX path wildcards
and returns a string suitable for matching with regex."""
pattern = pattern.replace('.', r'\.')
pattern = pattern.replace('?', r'.')
pattern = pattern.replace('*', r'.*')
if pattern.endswith('/'):
pattern += r'.*'
elif pattern.endswith('.*'):
pattern = pattern[:-2]
pattern += r'(?!.*?/.*?)'
return pattern
def path_in_section(path, section):
"""Returns True of False indicating whether the path is covered by
the current section."""
if not 'file' in section:
return False
for pattern in section['file']:
regex = pattern_to_regex(pattern)
match = re.match(regex, path)
if match:
# Check if there is an exclude pattern that applies
for pattern in section['exclude']:
regex = pattern_to_regex(pattern)
match = re.match(regex, path)
if match:
return False
return True
return False
def get_section_maintainers(path, section):
"""Returns a list with email addresses to any M: and R: entries
matching the provided path in the provided section."""
maintainers = []
lists = []
nowarn_status = ['Supported', 'Maintained']
if path_in_section(path, section):
for status in section['status']:
if status not in nowarn_status:
print('WARNING: Maintained status for "%s" is \'%s\'!' % (path, status))
for address in section['maintainer'], section['reviewer']:
# Convert to list if necessary
if isinstance(address, list):
maintainers += address
else:
lists += [address]
for address in section['list']:
# Convert to list if necessary
if isinstance(address, list):
lists += address
else:
lists += [address]
return maintainers, lists
def get_maintainers(path, sections, level=0):
"""For 'path', iterates over all sections, returning maintainers
for matching ones."""
maintainers = []
lists = []
for section in sections:
tmp_maint, tmp_lists = get_section_maintainers(path, section)
if tmp_maint:
maintainers += tmp_maint
if tmp_lists:
lists += tmp_lists
if not maintainers:
# If no match found, look for match for (nonexistent) file
# REPO.working_dir/<default>
print('"%s": no maintainers found, looking for default' % path)
if level == 0:
maintainers = get_maintainers('<default>', sections, level=level + 1)
else:
print("No <default> maintainers set for project.")
if not maintainers:
return None
return maintainers + lists
def parse_maintainers_line(line):
"""Parse one line of Maintainers.txt, returning any match group and its key."""
for key, expression in EXPRESSIONS.items():
match = expression.match(line)
if match:
return key, match.group(key)
return None, None
def parse_maintainers_file(filename):
"""Parse the Maintainers.txt from top-level of repo and
return a list containing dictionaries of all sections."""
with open(filename, 'r') as text:
line = text.readline()
sectionlist = []
section = defaultdict(list)
while line:
key, value = parse_maintainers_line(line)
if key and value:
section[key].append(value)
line = text.readline()
# If end of section (end of file, or non-tag line encountered)...
if not key or not value or not line:
# ...if non-empty, append section to list.
if section:
sectionlist.append(section.copy())
section.clear()
return sectionlist
def get_modified_files(repo, args):
"""Returns a list of the files modified by the commit specified in 'args'."""
commit = repo.commit(args.commit)
return commit.stats.files
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(
description='Retrieves information on who to cc for review on a given commit')
PARSER.add_argument('commit',
action="store",
help='git revision to examine (default: HEAD)',
nargs='?',
default='HEAD')
PARSER.add_argument('-l', '--lookup',
help='Find section matches for path LOOKUP',
required=False)
ARGS = PARSER.parse_args()
REPO = SetupGit.locate_repo()
CONFIG_FILE = os.path.join(REPO.working_dir, 'Maintainers.txt')
SECTIONS = parse_maintainers_file(CONFIG_FILE)
if ARGS.lookup:
FILES = [ARGS.lookup.replace('\\','/')]
else:
FILES = get_modified_files(REPO, ARGS)
ADDRESSES = []
for file in FILES:
print(file)
addresslist = get_maintainers(file, SECTIONS)
if addresslist:
ADDRESSES += addresslist
for address in list(OrderedDict.fromkeys(ADDRESSES)):
if '<' in address and '>' in address:
address = address.split('>', 1)[0] + '>'
print(' %s' % address)
| edk2-master | BaseTools/Scripts/GetMaintainer.py |
#!/usr/bin/python3
'''
Copyright (c) Apple Inc. 2021
SPDX-License-Identifier: BSD-2-Clause-Patent
Example usage:
OvmfPkg/build.sh qemu -gdb tcp::9000
lldb -o "gdb-remote localhost:9000" -o "command script import efi_lldb.py"
'''
import optparse
import shlex
import subprocess
import uuid
import sys
import os
from pathlib import Path
from efi_debugging import EfiDevicePath, EfiConfigurationTable, EfiTpl
from efi_debugging import EfiHob, GuidNames, EfiStatusClass, EfiBootMode
from efi_debugging import PeTeImage, patch_ctypes
try:
# Just try for LLDB in case PYTHONPATH is already correctly setup
import lldb
except ImportError:
try:
env = os.environ.copy()
env['LLDB_DEFAULT_PYTHON_VERSION'] = str(sys.version_info.major)
lldb_python_path = subprocess.check_output(
["xcrun", "lldb", "-P"], env=env).decode("utf-8").strip()
sys.path.append(lldb_python_path)
import lldb
except ValueError:
print("Couldn't find LLDB.framework from lldb -P")
print("PYTHONPATH should match the currently selected lldb")
sys.exit(-1)
class LldbFileObject(object):
'''
Class that fakes out file object to abstract lldb from the generic code.
For lldb this is memory so we don't have a concept of the end of the file.
'''
def __init__(self, process):
# _exe_ctx is lldb.SBExecutionContext
self._process = process
self._offset = 0
self._SBError = lldb.SBError()
def tell(self):
return self._offset
def read(self, size=-1):
if size == -1:
# arbitrary default size
size = 0x1000000
data = self._process.ReadMemory(self._offset, size, self._SBError)
if self._SBError.fail:
raise MemoryError(
f'lldb could not read memory 0x{size:x} '
f' bytes from 0x{self._offset:08x}')
else:
return data
def readable(self):
return True
def seek(self, offset, whence=0):
if whence == 0:
self._offset = offset
elif whence == 1:
self._offset += offset
else:
# whence == 2 is seek from end
raise NotImplementedError
def seekable(self):
return True
def write(self, data):
result = self._process.WriteMemory(self._offset, data, self._SBError)
if self._SBError.fail:
raise MemoryError(
f'lldb could not write memory to 0x{self._offset:08x}')
return result
def writable(self):
return True
def truncate(self, size=None):
raise NotImplementedError
def flush(self):
raise NotImplementedError
def fileno(self):
raise NotImplementedError
class EfiSymbols:
"""
Class to manage EFI Symbols
You need to pass file, and exe_ctx to load symbols.
You can print(EfiSymbols()) to see the currently loaded symbols
"""
loaded = {}
stride = None
range = None
verbose = False
def __init__(self, target=None):
if target:
EfiSymbols.target = target
EfiSymbols._file = LldbFileObject(target.process)
@ classmethod
def __str__(cls):
return ''.join(f'{pecoff}\n' for (pecoff, _) in cls.loaded.values())
@ classmethod
def configure_search(cls, stride, range, verbose=False):
cls.stride = stride
cls.range = range
cls.verbose = verbose
@ classmethod
def clear(cls):
cls.loaded = {}
@ classmethod
def add_symbols_for_pecoff(cls, pecoff):
'''Tell lldb the location of the .text and .data sections.'''
if pecoff.LoadAddress in cls.loaded:
return 'Already Loaded: '
module = cls.target.AddModule(None, None, str(pecoff.CodeViewUuid))
if not module:
module = cls.target.AddModule(pecoff.CodeViewPdb,
None,
str(pecoff.CodeViewUuid))
if module.IsValid():
SBError = cls.target.SetModuleLoadAddress(
module, pecoff.LoadAddress + pecoff.TeAdjust)
if SBError.success:
cls.loaded[pecoff.LoadAddress] = (pecoff, module)
return ''
return 'Symbols NOT FOUND: '
@ classmethod
def address_to_symbols(cls, address, reprobe=False):
'''
Given an address search backwards for a PE/COFF (or TE) header
and load symbols. Return a status string.
'''
if not isinstance(address, int):
address = int(address)
pecoff, _ = cls.address_in_loaded_pecoff(address)
if not reprobe and pecoff is not None:
# skip the probe of the remote
return f'{pecoff} is already loaded'
pecoff = PeTeImage(cls._file, None)
if pecoff.pcToPeCoff(address, cls.stride, cls.range):
res = cls.add_symbols_for_pecoff(pecoff)
return f'{res}{pecoff}'
else:
return f'0x{address:08x} not in a PE/COFF (or TE) image'
@ classmethod
def address_in_loaded_pecoff(cls, address):
if not isinstance(address, int):
address = int(address)
for (pecoff, module) in cls.loaded.values():
if (address >= pecoff.LoadAddress and
address <= pecoff.EndLoadAddress):
return pecoff, module
return None, None
@ classmethod
def unload_symbols(cls, address):
pecoff, module = cls.address_in_loaded_pecoff(address)
if module:
name = str(module)
cls.target.ClearModuleLoadAddress(module)
cls.target.RemoveModule(module)
del cls.loaded[pecoff.LoadAddress]
return f'{name:s} was unloaded'
return f'0x{address:x} was not in a loaded image'
def arg_to_address(frame, arg):
''' convert an lldb command arg into a memory address (addr_t)'''
if arg is None:
return None
arg_str = arg if isinstance(arg, str) else str(arg)
SBValue = frame.EvaluateExpression(arg_str)
if SBValue.error.fail:
return arg
if (SBValue.TypeIsPointerType() or
SBValue.value_type == lldb.eValueTypeRegister or
SBValue.value_type == lldb.eValueTypeRegisterSet or
SBValue.value_type == lldb.eValueTypeConstResult):
try:
addr = SBValue.GetValueAsAddress()
except ValueError:
addr = SBValue.unsigned
else:
try:
addr = SBValue.address_of.GetValueAsAddress()
except ValueError:
addr = SBValue.address_of.unsigned
return addr
def arg_to_data(frame, arg):
''' convert an lldb command arg into a data vale (uint32_t/uint64_t)'''
if not isinstance(arg, str):
arg_str = str(str)
SBValue = frame.EvaluateExpression(arg_str)
return SBValue.unsigned
class EfiDevicePathCommand:
def create_options(self):
''' standard lldb command help/options parser'''
usage = "usage: %prog [options]"
description = '''Command that can EFI Config Tables
'''
# Pass add_help_option = False, since this keeps the command in line
# with lldb commands, and we wire up "help command" to work by
# providing the long & short help methods below.
self.parser = optparse.OptionParser(
description=description,
prog='devicepath',
usage=usage,
add_help_option=False)
self.parser.add_option(
'-v',
'--verbose',
action='store_true',
dest='verbose',
help='hex dump extra data',
default=False)
self.parser.add_option(
'-n',
'--node',
action='store_true',
dest='node',
help='dump a single device path node',
default=False)
self.parser.add_option(
'-h',
'--help',
action='store_true',
dest='help',
help='Show help for the command',
default=False)
def get_short_help(self):
'''standard lldb function method'''
return "Display EFI Tables"
def get_long_help(self):
'''standard lldb function method'''
return self.help_string
def __init__(self, debugger, internal_dict):
'''standard lldb function method'''
self.create_options()
self.help_string = self.parser.format_help()
def __call__(self, debugger, command, exe_ctx, result):
'''standard lldb function method'''
# Use the Shell Lexer to properly parse up command options just like a
# shell would
command_args = shlex.split(command)
try:
(options, args) = self.parser.parse_args(command_args)
dev_list = []
for arg in args:
dev_list.append(arg_to_address(exe_ctx.frame, arg))
except ValueError:
# if you don't handle exceptions, passing an incorrect argument
# to the OptionParser will cause LLDB to exit (courtesy of
# OptParse dealing with argument errors by throwing SystemExit)
result.SetError("option parsing failed")
return
if options.help:
self.parser.print_help()
return
file = LldbFileObject(exe_ctx.process)
for dev_addr in dev_list:
if options.node:
print(EfiDevicePath(file).device_path_node_str(
dev_addr, options.verbose))
else:
device_path = EfiDevicePath(file, dev_addr, options.verbose)
if device_path.valid():
print(device_path)
class EfiHobCommand:
def create_options(self):
''' standard lldb command help/options parser'''
usage = "usage: %prog [options]"
description = '''Command that can EFI dump EFI HOBs'''
# Pass add_help_option = False, since this keeps the command in line
# with lldb commands, and we wire up "help command" to work by
# providing the long & short help methods below.
self.parser = optparse.OptionParser(
description=description,
prog='table',
usage=usage,
add_help_option=False)
self.parser.add_option(
'-a',
'--address',
type="int",
dest='address',
help='Parse HOBs from address',
default=None)
self.parser.add_option(
'-t',
'--type',
type="int",
dest='type',
help='Only dump HOBS of his type',
default=None)
self.parser.add_option(
'-v',
'--verbose',
action='store_true',
dest='verbose',
help='hex dump extra data',
default=False)
self.parser.add_option(
'-h',
'--help',
action='store_true',
dest='help',
help='Show help for the command',
default=False)
def get_short_help(self):
'''standard lldb function method'''
return "Display EFI Hobs"
def get_long_help(self):
'''standard lldb function method'''
return self.help_string
def __init__(self, debugger, internal_dict):
'''standard lldb function method'''
self.create_options()
self.help_string = self.parser.format_help()
def __call__(self, debugger, command, exe_ctx, result):
'''standard lldb function method'''
# Use the Shell Lexer to properly parse up command options just like a
# shell would
command_args = shlex.split(command)
try:
(options, _) = self.parser.parse_args(command_args)
except ValueError:
# if you don't handle exceptions, passing an incorrect argument
# to the OptionParser will cause LLDB to exit (courtesy of
# OptParse dealing with argument errors by throwing SystemExit)
result.SetError("option parsing failed")
return
if options.help:
self.parser.print_help()
return
address = arg_to_address(exe_ctx.frame, options.address)
file = LldbFileObject(exe_ctx.process)
hob = EfiHob(file, address, options.verbose).get_hob_by_type(
options.type)
print(hob)
class EfiTableCommand:
def create_options(self):
''' standard lldb command help/options parser'''
usage = "usage: %prog [options]"
description = '''Command that can display EFI Config Tables
'''
# Pass add_help_option = False, since this keeps the command in line
# with lldb commands, and we wire up "help command" to work by
# providing the long & short help methods below.
self.parser = optparse.OptionParser(
description=description,
prog='table',
usage=usage,
add_help_option=False)
self.parser.add_option(
'-h',
'--help',
action='store_true',
dest='help',
help='Show help for the command',
default=False)
def get_short_help(self):
'''standard lldb function method'''
return "Display EFI Tables"
def get_long_help(self):
'''standard lldb function method'''
return self.help_string
def __init__(self, debugger, internal_dict):
'''standard lldb function method'''
self.create_options()
self.help_string = self.parser.format_help()
def __call__(self, debugger, command, exe_ctx, result):
'''standard lldb function method'''
# Use the Shell Lexer to properly parse up command options just like a
# shell would
command_args = shlex.split(command)
try:
(options, _) = self.parser.parse_args(command_args)
except ValueError:
# if you don't handle exceptions, passing an incorrect argument
# to the OptionParser will cause LLDB to exit (courtesy of
# OptParse dealing with argument errors by throwing SystemExit)
result.SetError("option parsing failed")
return
if options.help:
self.parser.print_help()
return
gST = exe_ctx.target.FindFirstGlobalVariable('gST')
if gST.error.fail:
print('Error: This command requires symbols for gST to be loaded')
return
file = LldbFileObject(exe_ctx.process)
table = EfiConfigurationTable(file, gST.unsigned)
if table:
print(table, '\n')
class EfiGuidCommand:
def create_options(self):
''' standard lldb command help/options parser'''
usage = "usage: %prog [options]"
description = '''
Command that can display all EFI GUID's or give info on a
specific GUID's
'''
self.parser = optparse.OptionParser(
description=description,
prog='guid',
usage=usage,
add_help_option=False)
self.parser.add_option(
'-n',
'--new',
action='store_true',
dest='new',
help='Generate a new GUID',
default=False)
self.parser.add_option(
'-v',
'--verbose',
action='store_true',
dest='verbose',
help='Also display GUID C structure values',
default=False)
self.parser.add_option(
'-h',
'--help',
action='store_true',
dest='help',
help='Show help for the command',
default=False)
def get_short_help(self):
'''standard lldb function method'''
return "Display EFI GUID's"
def get_long_help(self):
'''standard lldb function method'''
return self.help_string
def __init__(self, debugger, internal_dict):
'''standard lldb function method'''
self.create_options()
self.help_string = self.parser.format_help()
def __call__(self, debugger, command, exe_ctx, result):
'''standard lldb function method'''
# Use the Shell Lexer to properly parse up command options just like a
# shell would
command_args = shlex.split(command)
try:
(options, args) = self.parser.parse_args(command_args)
if len(args) >= 1:
# guid { 0x414e6bdd, 0xe47b, 0x47cc,
# { 0xb2, 0x44, 0xbb, 0x61, 0x02, 0x0c,0xf5, 0x16 }}
# this generates multiple args
arg = ' '.join(args)
except ValueError:
# if you don't handle exceptions, passing an incorrect argument
# to the OptionParser will cause LLDB to exit (courtesy of
# OptParse dealing with argument errors by throwing SystemExit)
result.SetError("option parsing failed")
return
if options.help:
self.parser.print_help()
return
if options.new:
guid = uuid.uuid4()
print(str(guid).upper())
print(GuidNames.to_c_guid(guid))
return
if len(args) > 0:
if GuidNames.is_guid_str(arg):
# guid 05AD34BA-6F02-4214-952E-4DA0398E2BB9
key = arg.lower()
name = GuidNames.to_name(key)
elif GuidNames.is_c_guid(arg):
# guid { 0x414e6bdd, 0xe47b, 0x47cc,
# { 0xb2, 0x44, 0xbb, 0x61, 0x02, 0x0c,0xf5, 0x16 }}
key = GuidNames.from_c_guid(arg)
name = GuidNames.to_name(key)
else:
# guid gEfiDxeServicesTableGuid
name = arg
try:
key = GuidNames.to_guid(name)
name = GuidNames.to_name(key)
except ValueError:
return
extra = f'{GuidNames.to_c_guid(key)}: ' if options.verbose else ''
print(f'{key}: {extra}{name}')
else:
for key, value in GuidNames._dict_.items():
if options.verbose:
extra = f'{GuidNames.to_c_guid(key)}: '
else:
extra = ''
print(f'{key}: {extra}{value}')
class EfiSymbolicateCommand(object):
'''Class to abstract an lldb command'''
def create_options(self):
''' standard lldb command help/options parser'''
usage = "usage: %prog [options]"
description = '''Command that can load EFI PE/COFF and TE image
symbols. If you are having trouble in PEI try adding --pei.
'''
# Pass add_help_option = False, since this keeps the command in line
# with lldb commands, and we wire up "help command" to work by
# providing the long & short help methods below.
self.parser = optparse.OptionParser(
description=description,
prog='efi_symbols',
usage=usage,
add_help_option=False)
self.parser.add_option(
'-a',
'--address',
type="int",
dest='address',
help='Load symbols for image at address',
default=None)
self.parser.add_option(
'-f',
'--frame',
action='store_true',
dest='frame',
help='Load symbols for current stack frame',
default=False)
self.parser.add_option(
'-p',
'--pc',
action='store_true',
dest='pc',
help='Load symbols for pc',
default=False)
self.parser.add_option(
'--pei',
action='store_true',
dest='pei',
help='Load symbols for PEI (searches every 4 bytes)',
default=False)
self.parser.add_option(
'-e',
'--extended',
action='store_true',
dest='extended',
help='Try to load all symbols based on config tables.',
default=False)
self.parser.add_option(
'-r',
'--range',
type="long",
dest='range',
help='How far to search backward for start of PE/COFF Image',
default=None)
self.parser.add_option(
'-s',
'--stride',
type="long",
dest='stride',
help='Boundary to search for PE/COFF header',
default=None)
self.parser.add_option(
'-t',
'--thread',
action='store_true',
dest='thread',
help='Load symbols for the frames of all threads',
default=False)
self.parser.add_option(
'-h',
'--help',
action='store_true',
dest='help',
help='Show help for the command',
default=False)
def get_short_help(self):
'''standard lldb function method'''
return (
"Load symbols based on an address that is part of"
" a PE/COFF EFI image.")
def get_long_help(self):
'''standard lldb function method'''
return self.help_string
def __init__(self, debugger, unused):
'''standard lldb function method'''
self.create_options()
self.help_string = self.parser.format_help()
def lldb_print(self, lldb_str):
# capture command out like an lldb command
self.result.PutCString(lldb_str)
# flush the output right away
self.result.SetImmediateOutputFile(
self.exe_ctx.target.debugger.GetOutputFile())
def __call__(self, debugger, command, exe_ctx, result):
'''standard lldb function method'''
# Use the Shell Lexer to properly parse up command options just like a
# shell would
command_args = shlex.split(command)
try:
(options, _) = self.parser.parse_args(command_args)
except ValueError:
# if you don't handle exceptions, passing an incorrect argument
# to the OptionParser will cause LLDB to exit (courtesy of
# OptParse dealing with argument errors by throwing SystemExit)
result.SetError("option parsing failed")
return
if options.help:
self.parser.print_help()
return
file = LldbFileObject(exe_ctx.process)
efi_symbols = EfiSymbols(exe_ctx.target)
self.result = result
self.exe_ctx = exe_ctx
if options.pei:
# XIP code ends up on a 4 byte boundary.
options.stride = 4
options.range = 0x100000
efi_symbols.configure_search(options.stride, options.range)
if not options.pc and options.address is None:
# default to
options.frame = True
if options.frame:
if not exe_ctx.frame.IsValid():
result.SetError("invalid frame")
return
threads = exe_ctx.process.threads if options.thread else [
exe_ctx.thread]
for thread in threads:
for frame in thread:
res = efi_symbols.address_to_symbols(frame.pc)
self.lldb_print(res)
else:
if options.address is not None:
address = options.address
elif options.pc:
try:
address = exe_ctx.thread.GetSelectedFrame().pc
except ValueError:
result.SetError("invalid pc")
return
else:
address = 0
res = efi_symbols.address_to_symbols(address.pc)
print(res)
if options.extended:
gST = exe_ctx.target.FindFirstGlobalVariable('gST')
if gST.error.fail:
print('Error: This command requires symbols to be loaded')
else:
table = EfiConfigurationTable(file, gST.unsigned)
for address, _ in table.DebugImageInfo():
res = efi_symbols.address_to_symbols(address)
self.lldb_print(res)
# keep trying module file names until we find a GUID xref file
for m in exe_ctx.target.modules:
if GuidNames.add_build_guid_file(str(m.file)):
break
def CHAR16_TypeSummary(valobj, internal_dict):
'''
Display CHAR16 as a String in the debugger.
Note: utf-8 is returned as that is the value for the debugger.
'''
SBError = lldb.SBError()
Str = ''
if valobj.TypeIsPointerType():
if valobj.GetValueAsUnsigned() == 0:
return "NULL"
# CHAR16 * max string size 1024
for i in range(1024):
Char = valobj.GetPointeeData(i, 1).GetUnsignedInt16(SBError, 0)
if SBError.fail or Char == 0:
break
Str += chr(Char)
return 'L"' + Str + '"'
if valobj.num_children == 0:
# CHAR16
return "L'" + chr(valobj.unsigned) + "'"
else:
# CHAR16 []
for i in range(valobj.num_children):
Char = valobj.GetChildAtIndex(i).data.GetUnsignedInt16(SBError, 0)
if Char == 0:
break
Str += chr(Char)
return 'L"' + Str + '"'
return Str
def CHAR8_TypeSummary(valobj, internal_dict):
'''
Display CHAR8 as a String in the debugger.
Note: utf-8 is returned as that is the value for the debugger.
'''
SBError = lldb.SBError()
Str = ''
if valobj.TypeIsPointerType():
if valobj.GetValueAsUnsigned() == 0:
return "NULL"
# CHAR8 * max string size 1024
for i in range(1024):
Char = valobj.GetPointeeData(i, 1).GetUnsignedInt8(SBError, 0)
if SBError.fail or Char == 0:
break
Str += chr(Char)
Str = '"' + Str + '"'
return Str
if valobj.num_children == 0:
# CHAR8
return "'" + chr(valobj.unsigned) + "'"
else:
# CHAR8 []
for i in range(valobj.num_children):
Char = valobj.GetChildAtIndex(i).data.GetUnsignedInt8(SBError, 0)
if SBError.fail or Char == 0:
break
Str += chr(Char)
return '"' + Str + '"'
return Str
def EFI_STATUS_TypeSummary(valobj, internal_dict):
if valobj.TypeIsPointerType():
return ''
return str(EfiStatusClass(valobj.unsigned))
def EFI_TPL_TypeSummary(valobj, internal_dict):
if valobj.TypeIsPointerType():
return ''
return str(EfiTpl(valobj.unsigned))
def EFI_GUID_TypeSummary(valobj, internal_dict):
if valobj.TypeIsPointerType():
return ''
return str(GuidNames(bytes(valobj.data.uint8)))
def EFI_BOOT_MODE_TypeSummary(valobj, internal_dict):
if valobj.TypeIsPointerType():
return ''
'''Return #define name for EFI_BOOT_MODE'''
return str(EfiBootMode(valobj.unsigned))
def lldb_type_formaters(debugger, mod_name):
'''Teach lldb about EFI types'''
category = debugger.GetDefaultCategory()
FormatBool = lldb.SBTypeFormat(lldb.eFormatBoolean)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("BOOLEAN"), FormatBool)
FormatHex = lldb.SBTypeFormat(lldb.eFormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("UINT64"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("INT64"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("UINT32"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("INT32"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("UINT16"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("INT16"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("UINT8"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("INT8"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("UINTN"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("INTN"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("CHAR8"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("CHAR16"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier(
"EFI_PHYSICAL_ADDRESS"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier(
"PHYSICAL_ADDRESS"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier("EFI_LBA"), FormatHex)
category.AddTypeFormat(
lldb.SBTypeNameSpecifier("EFI_BOOT_MODE"), FormatHex)
category.AddTypeFormat(lldb.SBTypeNameSpecifier(
"EFI_FV_FILETYPE"), FormatHex)
#
# Smart type printing for EFI
#
debugger.HandleCommand(
f'type summary add GUID - -python-function '
f'{mod_name}.EFI_GUID_TypeSummary')
debugger.HandleCommand(
f'type summary add EFI_GUID --python-function '
f'{mod_name}.EFI_GUID_TypeSummary')
debugger.HandleCommand(
f'type summary add EFI_STATUS --python-function '
f'{mod_name}.EFI_STATUS_TypeSummary')
debugger.HandleCommand(
f'type summary add EFI_TPL - -python-function '
f'{mod_name}.EFI_TPL_TypeSummary')
debugger.HandleCommand(
f'type summary add EFI_BOOT_MODE --python-function '
f'{mod_name}.EFI_BOOT_MODE_TypeSummary')
debugger.HandleCommand(
f'type summary add CHAR16 --python-function '
f'{mod_name}.CHAR16_TypeSummary')
# W605 this is the correct escape sequence for the lldb command
debugger.HandleCommand(
f'type summary add --regex "CHAR16 \[[0-9]+\]" ' # noqa: W605
f'--python-function {mod_name}.CHAR16_TypeSummary')
debugger.HandleCommand(
f'type summary add CHAR8 --python-function '
f'{mod_name}.CHAR8_TypeSummary')
# W605 this is the correct escape sequence for the lldb command
debugger.HandleCommand(
f'type summary add --regex "CHAR8 \[[0-9]+\]" ' # noqa: W605
f'--python-function {mod_name}.CHAR8_TypeSummary')
class LldbWorkaround:
needed = True
@classmethod
def activate(cls):
if cls.needed:
lldb.debugger.HandleCommand("process handle SIGALRM -n false")
cls.needed = False
def LoadEmulatorEfiSymbols(frame, bp_loc, internal_dict):
#
# This is an lldb breakpoint script, and assumes the breakpoint is on a
# function with the same prototype as SecGdbScriptBreak(). The
# argument names are important as lldb looks them up.
#
# VOID
# SecGdbScriptBreak (
# char *FileName,
# int FileNameLength,
# long unsigned int LoadAddress,
# int AddSymbolFlag
# )
# {
# return;
# }
#
# When the emulator loads a PE/COFF image, it calls the stub function with
# the filename of the symbol file, the length of the FileName, the
# load address and a flag to indicate if this is a load or unload operation
#
LldbWorkaround().activate()
symbols = EfiSymbols(frame.thread.process.target)
LoadAddress = frame.FindVariable("LoadAddress").unsigned
if frame.FindVariable("AddSymbolFlag").unsigned == 1:
res = symbols.address_to_symbols(LoadAddress)
else:
res = symbols.unload_symbols(LoadAddress)
print(res)
# make breakpoint command continue
return False
def __lldb_init_module(debugger, internal_dict):
'''
This initializer is being run from LLDB in the embedded command interpreter
'''
mod_name = Path(__file__).stem
lldb_type_formaters(debugger, mod_name)
# Add any commands contained in this module to LLDB
debugger.HandleCommand(
f'command script add -c {mod_name}.EfiSymbolicateCommand efi_symbols')
debugger.HandleCommand(
f'command script add -c {mod_name}.EfiGuidCommand guid')
debugger.HandleCommand(
f'command script add -c {mod_name}.EfiTableCommand table')
debugger.HandleCommand(
f'command script add -c {mod_name}.EfiHobCommand hob')
debugger.HandleCommand(
f'command script add -c {mod_name}.EfiDevicePathCommand devicepath')
print('EFI specific commands have been installed.')
# patch the ctypes c_void_p values if the debuggers OS and EFI have
# different ideas on the size of the debug.
try:
patch_ctypes(debugger.GetSelectedTarget().addr_size)
except ValueError:
# incase the script is imported and the debugger has not target
# defaults to sizeof(UINTN) == sizeof(UINT64)
patch_ctypes()
try:
target = debugger.GetSelectedTarget()
if target.FindFunctions('SecGdbScriptBreak').symbols:
breakpoint = target.BreakpointCreateByName('SecGdbScriptBreak')
# Set the emulator breakpoints, if we are in the emulator
cmd = 'breakpoint command add -s python -F '
cmd += f'efi_lldb.LoadEmulatorEfiSymbols {breakpoint.GetID()}'
debugger.HandleCommand(cmd)
print('Type r to run emulator.')
else:
raise ValueError("No Emulator Symbols")
except ValueError:
# default action when the script is imported
debugger.HandleCommand("efi_symbols --frame --extended")
debugger.HandleCommand("register read")
debugger.HandleCommand("bt all")
if __name__ == '__main__':
pass
| edk2-master | BaseTools/Scripts/efi_lldb.py |
# @file ConvertMasmToNasm.py
# This script assists with conversion of MASM assembly syntax to NASM
#
# Copyright (c) 2007 - 2016, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import print_function
#
# Import Modules
#
import argparse
import io
import os.path
import re
import subprocess
import sys
class UnsupportedConversion(Exception):
pass
class NoSourceFile(Exception):
pass
class UnsupportedArch(Exception):
unsupported = ('aarch64', 'arm', 'ebc', 'ipf')
class CommonUtils:
# Version and Copyright
VersionNumber = "0.01"
__version__ = "%prog Version " + VersionNumber
__copyright__ = "Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved."
__usage__ = "%prog [options] source.asm [destination.nasm]"
def __init__(self, clone=None):
if clone is None:
self.args = self.ProcessCommandLine()
else:
self.args = clone.args
self.unsupportedSyntaxSeen = False
self.src = self.args.source
self.keep = self.args.keep
assert(os.path.exists(self.src))
self.dirmode = os.path.isdir(self.src)
srcExt = os.path.splitext(self.src)[1]
assert (self.dirmode or srcExt != '.nasm')
self.infmode = not self.dirmode and srcExt == '.inf'
self.diff = self.args.diff
self.git = self.args.git
self.force = self.args.force
if clone is None:
self.rootdir = os.getcwd()
self.DetectGit()
else:
self.rootdir = clone.rootdir
self.gitdir = clone.gitdir
self.gitemail = clone.gitemail
def ProcessCommandLine(self):
parser = argparse.ArgumentParser(description=self.__copyright__)
parser.add_argument('--version', action='version',
version='%(prog)s ' + self.VersionNumber)
parser.add_argument("-q", "--quiet", action="store_true",
help="Disable all messages except FATAL ERRORS.")
parser.add_argument("--git", action="store_true",
help="Use git to create commits for each file converted")
parser.add_argument("--keep", action="append", choices=('asm', 's'),
default=[],
help="Don't remove files with this extension")
parser.add_argument("--diff", action="store_true",
help="Show diff of conversion")
parser.add_argument("-f", "--force", action="store_true",
help="Force conversion even if unsupported")
parser.add_argument('source', help='MASM input file')
parser.add_argument('dest', nargs='?',
help='NASM output file (default=input.nasm; - for stdout)')
return parser.parse_args()
def RootRelative(self, path):
result = path
if result.startswith(self.rootdir):
result = result[len(self.rootdir):]
while len(result) > 0 and result[0] in '/\\':
result = result[1:]
return result
def MatchAndSetMo(self, regexp, string):
self.mo = regexp.match(string)
return self.mo is not None
def SearchAndSetMo(self, regexp, string):
self.mo = regexp.search(string)
return self.mo is not None
def ReplacePreserveSpacing(self, string, find, replace):
if len(find) >= len(replace):
padded = replace + (' ' * (len(find) - len(replace)))
return string.replace(find, padded)
elif find.find(replace) >= 0:
return string.replace(find, replace)
else:
lenDiff = len(replace) - len(find)
result = string
for i in range(lenDiff, -1, -1):
padded = find + (' ' * i)
result = result.replace(padded, replace)
return result
def DetectGit(self):
lastpath = os.path.realpath(self.src)
self.gitdir = None
while True:
path = os.path.split(lastpath)[0]
if path == lastpath:
self.gitemail = None
return
candidate = os.path.join(path, '.git')
if os.path.isdir(candidate):
self.gitdir = candidate
self.gitemail = self.FormatGitEmailAddress()
return
lastpath = path
def FormatGitEmailAddress(self):
if not self.git or not self.gitdir:
return ''
cmd = ('git', 'config', 'user.name')
name = self.RunAndCaptureOutput(cmd).strip()
cmd = ('git', 'config', 'user.email')
email = self.RunAndCaptureOutput(cmd).strip()
if name.find(',') >= 0:
name = '"' + name + '"'
return name + ' <' + email + '>'
def RunAndCaptureOutput(self, cmd, checkExitCode=True, pipeIn=None):
if pipeIn:
subpStdin = subprocess.PIPE
else:
subpStdin = None
p = subprocess.Popen(args=cmd, stdout=subprocess.PIPE, stdin=subpStdin)
(stdout, stderr) = p.communicate(pipeIn)
if checkExitCode:
if p.returncode != 0:
print('command:', ' '.join(cmd))
print('stdout:', stdout)
print('stderr:', stderr)
print('return:', p.returncode)
assert p.returncode == 0
return stdout.decode('utf-8', 'ignore')
def FileUpdated(self, path):
if not self.git or not self.gitdir:
return
cmd = ('git', 'add', path)
self.RunAndCaptureOutput(cmd)
def FileAdded(self, path):
self.FileUpdated(path)
def RemoveFile(self, path):
if not self.git or not self.gitdir:
return
if self.ShouldKeepFile(path):
return
cmd = ('git', 'rm', path)
self.RunAndCaptureOutput(cmd)
def ShouldKeepFile(self, path):
ext = os.path.splitext(path)[1].lower()
if ext.startswith('.'):
ext = ext[1:]
return ext in self.keep
def FileConversionFinished(self, pkg, module, src, dst):
if not self.git or not self.gitdir:
return
if not self.args.quiet:
print('Committing: Conversion of', dst)
prefix = ' '.join(filter(lambda a: a, [pkg, module]))
message = ''
if self.unsupportedSyntaxSeen:
message += 'ERROR! '
message += '%s: Convert %s to NASM\n' % (prefix, src)
message += '\n'
message += 'The %s script was used to convert\n' % sys.argv[0]
message += '%s to %s\n' % (src, dst)
message += '\n'
message += 'Contributed-under: TianoCore Contribution Agreement 1.0\n'
assert(self.gitemail is not None)
message += 'Signed-off-by: %s\n' % self.gitemail
message = message.encode('utf-8', 'ignore')
cmd = ('git', 'commit', '-F', '-')
self.RunAndCaptureOutput(cmd, pipeIn=message)
class ConvertAsmFile(CommonUtils):
def __init__(self, src, dst, clone):
CommonUtils.__init__(self, clone)
self.ConvertAsmFile(src, dst)
self.FileAdded(dst)
self.RemoveFile(src)
def ConvertAsmFile(self, inputFile, outputFile=None):
self.globals = set()
self.unsupportedSyntaxSeen = False
self.inputFilename = inputFile
if not outputFile:
outputFile = os.path.splitext(inputFile)[0] + '.nasm'
self.outputFilename = outputFile
fullSrc = os.path.realpath(inputFile)
srcParentDir = os.path.basename(os.path.split(fullSrc)[0])
maybeArch = srcParentDir.lower()
if maybeArch in UnsupportedArch.unsupported:
raise UnsupportedArch
self.ia32 = maybeArch == 'ia32'
self.x64 = maybeArch == 'x64'
self.inputFileBase = os.path.basename(self.inputFilename)
self.outputFileBase = os.path.basename(self.outputFilename)
self.output = io.BytesIO()
if not self.args.quiet:
dirpath, src = os.path.split(self.inputFilename)
dirpath = self.RootRelative(dirpath)
dst = os.path.basename(self.outputFilename)
print('Converting:', dirpath, src, '->', dst)
lines = io.open(self.inputFilename).readlines()
self.Convert(lines)
if self.outputFilename == '-' and not self.diff:
output_data = self.output.getvalue()
if sys.version_info >= (3, 0):
output_data = output_data.decode('utf-8', 'ignore')
sys.stdout.write(output_data)
self.output.close()
else:
f = io.open(self.outputFilename, 'wb')
f.write(self.output.getvalue())
f.close()
self.output.close()
endOfLineRe = re.compile(r'''
\s* ( ; .* )? \n $
''',
re.VERBOSE | re.MULTILINE
)
begOfLineRe = re.compile(r'''
\s*
''',
re.VERBOSE
)
def Convert(self, lines):
self.proc = None
self.anonLabelCount = -1
output = self.output
self.oldAsmEmptyLineCount = 0
self.newAsmEmptyLineCount = 0
for line in lines:
mo = self.begOfLineRe.search(line)
assert mo is not None
self.indent = mo.group()
lineWithoutBeginning = line[len(self.indent):]
mo = self.endOfLineRe.search(lineWithoutBeginning)
if mo is None:
endOfLine = ''
else:
endOfLine = mo.group()
oldAsm = line[len(self.indent):len(line) - len(endOfLine)]
self.originalLine = line.rstrip()
if line.strip() == '':
self.oldAsmEmptyLineCount += 1
self.TranslateAsm(oldAsm, endOfLine)
if line.strip() != '':
self.oldAsmEmptyLineCount = 0
procDeclRe = re.compile(r'''
(?: ASM_PFX \s* [(] \s* )?
([\w@][\w@0-9]*) \s*
[)]? \s+
PROC
(?: \s+ NEAR | FAR )?
(?: \s+ C )?
(?: \s+ (PUBLIC | PRIVATE) )?
(?: \s+ USES ( (?: \s+ \w[\w0-9]* )+ ) )?
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
procEndRe = re.compile(r'''
([\w@][\w@0-9]*) \s+
ENDP
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
varAndTypeSubRe = r' (?: [\w@][\w@0-9]* ) (?: \s* : \s* \w+ )? '
publicRe = re.compile(r'''
PUBLIC \s+
( %s (?: \s* , \s* %s )* )
\s* $
''' % (varAndTypeSubRe, varAndTypeSubRe),
re.VERBOSE | re.IGNORECASE
)
varAndTypeSubRe = re.compile(varAndTypeSubRe, re.VERBOSE | re.IGNORECASE)
macroDeclRe = re.compile(r'''
([\w@][\w@0-9]*) \s+
MACRO
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
sectionDeclRe = re.compile(r'''
([\w@][\w@0-9]*) \s+
( SECTION | ENDS )
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
externRe = re.compile(r'''
EXTE?RN \s+ (?: C \s+ )?
([\w@][\w@0-9]*) \s* : \s* (\w+)
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
externdefRe = re.compile(r'''
EXTERNDEF \s+ (?: C \s+ )?
([\w@][\w@0-9]*) \s* : \s* (\w+)
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
protoRe = re.compile(r'''
([\w@][\w@0-9]*) \s+
PROTO
(?: \s+ .* )?
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
defineDataRe = re.compile(r'''
([\w@][\w@0-9]*) \s+
( db | dw | dd | dq ) \s+
( .*? )
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
equRe = re.compile(r'''
([\w@][\w@0-9]*) \s+ EQU \s+ (\S.*?)
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
ignoreRe = re.compile(r'''
\. (?: const |
mmx |
model |
xmm |
x?list |
[3-6]86p?
) |
page
(?: \s+ .* )?
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
whitespaceRe = re.compile(r'\s+', re.MULTILINE)
def TranslateAsm(self, oldAsm, endOfLine):
assert(oldAsm.strip() == oldAsm)
endOfLine = endOfLine.replace(self.inputFileBase, self.outputFileBase)
oldOp = oldAsm.split()
if len(oldOp) >= 1:
oldOp = oldOp[0]
else:
oldOp = ''
if oldAsm == '':
newAsm = oldAsm
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif oldOp in ('#include', ):
newAsm = oldAsm
self.EmitLine(oldAsm + endOfLine)
elif oldOp.lower() in ('end', 'title', 'text'):
newAsm = ''
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif oldAsm.lower() == '@@:':
self.anonLabelCount += 1
self.EmitLine(self.anonLabel(self.anonLabelCount) + ':')
elif self.MatchAndSetMo(self.ignoreRe, oldAsm):
newAsm = ''
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif oldAsm.lower() == 'ret':
for i in range(len(self.uses) - 1, -1, -1):
register = self.uses[i]
self.EmitNewContent('pop ' + register)
newAsm = 'ret'
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
self.uses = tuple()
elif oldOp.lower() == 'lea':
newAsm = self.ConvertLea(oldAsm)
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif oldAsm.lower() == 'end':
newAsm = ''
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
self.uses = tuple()
elif self.MatchAndSetMo(self.equRe, oldAsm):
equ = self.mo.group(1)
newAsm = '%%define %s %s' % (equ, self.mo.group(2))
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif self.MatchAndSetMo(self.externRe, oldAsm) or \
self.MatchAndSetMo(self.protoRe, oldAsm):
extern = self.mo.group(1)
self.NewGlobal(extern)
newAsm = 'extern ' + extern
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif self.MatchAndSetMo(self.externdefRe, oldAsm):
newAsm = ''
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif self.MatchAndSetMo(self.macroDeclRe, oldAsm):
newAsm = '%%macro %s 0' % self.mo.group(1)
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif oldOp.lower() == 'endm':
newAsm = r'%endmacro'
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif self.MatchAndSetMo(self.sectionDeclRe, oldAsm):
name = self.mo.group(1)
ty = self.mo.group(2)
if ty.lower() == 'section':
newAsm = '.' + name
else:
newAsm = ''
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif self.MatchAndSetMo(self.procDeclRe, oldAsm):
proc = self.proc = self.mo.group(1)
visibility = self.mo.group(2)
if visibility is None:
visibility = ''
else:
visibility = visibility.lower()
if visibility != 'private':
self.NewGlobal(self.proc)
proc = 'ASM_PFX(' + proc + ')'
self.EmitNewContent('global ' + proc)
newAsm = proc + ':'
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
uses = self.mo.group(3)
if uses is not None:
uses = tuple(filter(None, uses.split()))
else:
uses = tuple()
self.uses = uses
for register in self.uses:
self.EmitNewContent(' push ' + register)
elif self.MatchAndSetMo(self.procEndRe, oldAsm):
newAsm = ''
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif self.MatchAndSetMo(self.publicRe, oldAsm):
publics = re.findall(self.varAndTypeSubRe, self.mo.group(1))
publics = tuple(map(lambda p: p.split(':')[0].strip(), publics))
for i in range(len(publics) - 1):
name = publics[i]
self.EmitNewContent('global ASM_PFX(%s)' % publics[i])
self.NewGlobal(name)
name = publics[-1]
self.NewGlobal(name)
newAsm = 'global ASM_PFX(%s)' % name
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
elif self.MatchAndSetMo(self.defineDataRe, oldAsm):
name = self.mo.group(1)
ty = self.mo.group(2)
value = self.mo.group(3)
if value == '?':
value = 0
newAsm = '%s: %s %s' % (name, ty, value)
newAsm = self.CommonConversions(newAsm)
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
else:
newAsm = self.CommonConversions(oldAsm)
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
def NewGlobal(self, name):
regex = re.compile(r'(?<![_\w\d])(?<!ASM_PFX\()(' + re.escape(name) +
r')(?![_\w\d])')
self.globals.add(regex)
def ConvertAnonymousLabels(self, oldAsm):
newAsm = oldAsm
anonLabel = self.anonLabel(self.anonLabelCount)
newAsm = newAsm.replace('@b', anonLabel)
newAsm = newAsm.replace('@B', anonLabel)
anonLabel = self.anonLabel(self.anonLabelCount + 1)
newAsm = newAsm.replace('@f', anonLabel)
newAsm = newAsm.replace('@F', anonLabel)
return newAsm
def anonLabel(self, count):
return '.%d' % count
def EmitString(self, string):
self.output.write(string.encode('utf-8', 'ignore'))
def EmitLineWithDiff(self, old, new):
newLine = (self.indent + new).rstrip()
if self.diff:
if old is None:
print('+%s' % newLine)
elif newLine != old:
print('-%s' % old)
print('+%s' % newLine)
else:
print('', newLine)
if newLine != '':
self.newAsmEmptyLineCount = 0
self.EmitString(newLine + '\r\n')
def EmitLine(self, string):
self.EmitLineWithDiff(self.originalLine, string)
def EmitNewContent(self, string):
self.EmitLineWithDiff(None, string)
def EmitAsmReplaceOp(self, oldAsm, oldOp, newOp, endOfLine):
newAsm = oldAsm.replace(oldOp, newOp, 1)
self.EmitAsmWithComment(oldAsm, newAsm, endOfLine)
hexNumRe = re.compile(r'0*((?=[\da-f])\d*(?<=\d)[\da-f]*)h', re.IGNORECASE)
def EmitAsmWithComment(self, oldAsm, newAsm, endOfLine):
for glblRe in self.globals:
newAsm = glblRe.sub(r'ASM_PFX(\1)', newAsm)
newAsm = self.hexNumRe.sub(r'0x\1', newAsm)
newLine = newAsm + endOfLine
emitNewLine = ((newLine.strip() != '') or
((oldAsm + endOfLine).strip() == ''))
if emitNewLine and newLine.strip() == '':
self.newAsmEmptyLineCount += 1
if self.newAsmEmptyLineCount > 1:
emitNewLine = False
if emitNewLine:
self.EmitLine(newLine.rstrip())
elif self.diff:
print('-%s' % self.originalLine)
leaRe = re.compile(r'''
(lea \s+) ([\w@][\w@0-9]*) \s* , \s* (\S (?:.*\S)?)
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
def ConvertLea(self, oldAsm):
newAsm = oldAsm
if self.MatchAndSetMo(self.leaRe, oldAsm):
lea = self.mo.group(1)
dst = self.mo.group(2)
src = self.mo.group(3)
if src.find('[') < 0:
src = '[' + src + ']'
newAsm = lea + dst + ', ' + src
newAsm = self.CommonConversions(newAsm)
return newAsm
ptrRe = re.compile(r'''
(?<! \S )
([dfq]?word|byte) \s+ (?: ptr ) (\s*)
(?= [[\s] )
''',
re.VERBOSE | re.IGNORECASE
)
def ConvertPtr(self, oldAsm):
newAsm = oldAsm
while self.SearchAndSetMo(self.ptrRe, newAsm):
ty = self.mo.group(1)
if ty.lower() == 'fword':
ty = ''
else:
ty += self.mo.group(2)
newAsm = newAsm[:self.mo.start(0)] + ty + newAsm[self.mo.end(0):]
return newAsm
labelByteRe = re.compile(r'''
(?: \s+ label \s+ (?: [dfq]?word | byte ) )
(?! \S )
''',
re.VERBOSE | re.IGNORECASE
)
def ConvertLabelByte(self, oldAsm):
newAsm = oldAsm
if self.SearchAndSetMo(self.labelByteRe, newAsm):
newAsm = newAsm[:self.mo.start(0)] + ':' + newAsm[self.mo.end(0):]
return newAsm
unaryBitwiseOpRe = re.compile(r'''
( NOT )
(?= \s+ \S )
''',
re.VERBOSE | re.IGNORECASE
)
binaryBitwiseOpRe = re.compile(r'''
( \S \s+ )
( AND | OR | SHL | SHR )
(?= \s+ \S )
''',
re.VERBOSE | re.IGNORECASE
)
bitwiseOpReplacements = {
'not': '~',
'and': '&',
'shl': '<<',
'shr': '>>',
'or': '|',
}
def ConvertBitwiseOp(self, oldAsm):
newAsm = oldAsm
while self.SearchAndSetMo(self.binaryBitwiseOpRe, newAsm):
prefix = self.mo.group(1)
op = self.bitwiseOpReplacements[self.mo.group(2).lower()]
newAsm = newAsm[:self.mo.start(0)] + prefix + op + \
newAsm[self.mo.end(0):]
while self.SearchAndSetMo(self.unaryBitwiseOpRe, newAsm):
op = self.bitwiseOpReplacements[self.mo.group(1).lower()]
newAsm = newAsm[:self.mo.start(0)] + op + newAsm[self.mo.end(0):]
return newAsm
sectionRe = re.compile(r'''
\. ( code |
data
)
(?: \s+ .* )?
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
segmentRe = re.compile(r'''
( code |
data )
(?: \s+ SEGMENT )
(?: \s+ .* )?
\s* $
''',
re.VERBOSE | re.IGNORECASE
)
def ConvertSection(self, oldAsm):
newAsm = oldAsm
if self.MatchAndSetMo(self.sectionRe, newAsm) or \
self.MatchAndSetMo(self.segmentRe, newAsm):
name = self.mo.group(1).lower()
if name == 'code':
if self.x64:
self.EmitLine('DEFAULT REL')
name = 'text'
newAsm = 'SECTION .' + name
return newAsm
fwordRe = re.compile(r'''
(?<! \S )
fword
(?! \S )
''',
re.VERBOSE | re.IGNORECASE
)
def FwordUnsupportedCheck(self, oldAsm):
newAsm = oldAsm
if self.SearchAndSetMo(self.fwordRe, newAsm):
newAsm = self.Unsupported(newAsm, 'fword used')
return newAsm
__common_conversion_routines__ = (
ConvertAnonymousLabels,
ConvertPtr,
FwordUnsupportedCheck,
ConvertBitwiseOp,
ConvertLabelByte,
ConvertSection,
)
def CommonConversions(self, oldAsm):
newAsm = oldAsm
for conv in self.__common_conversion_routines__:
newAsm = conv(self, newAsm)
return newAsm
def Unsupported(self, asm, message=None):
if not self.force:
raise UnsupportedConversion
self.unsupportedSyntaxSeen = True
newAsm = '%error conversion unsupported'
if message:
newAsm += '; ' + message
newAsm += ': ' + asm
return newAsm
class ConvertInfFile(CommonUtils):
def __init__(self, inf, clone):
CommonUtils.__init__(self, clone)
self.inf = inf
self.ScanInfAsmFiles()
if self.infmode:
self.ConvertInfAsmFiles()
infSrcRe = re.compile(r'''
\s*
( [\w@][\w@0-9/]* \.(asm|s) )
\s* (?: \| [^#]* )?
\s* (?: \# .* )?
$
''',
re.VERBOSE | re.IGNORECASE
)
def GetInfAsmFileMapping(self):
srcToDst = {'order': []}
for line in self.lines:
line = line.rstrip()
if self.MatchAndSetMo(self.infSrcRe, line):
src = self.mo.group(1)
srcExt = self.mo.group(2)
dst = os.path.splitext(src)[0] + '.nasm'
fullDst = os.path.join(self.dir, dst)
if src not in srcToDst and not os.path.exists(fullDst):
srcToDst[src] = dst
srcToDst['order'].append(src)
return srcToDst
def ScanInfAsmFiles(self):
src = self.inf
assert os.path.isfile(src)
f = io.open(src, 'rt')
self.lines = f.readlines()
f.close()
path = os.path.realpath(self.inf)
(self.dir, inf) = os.path.split(path)
parent = os.path.normpath(self.dir)
(lastpath, self.moduleName) = os.path.split(parent)
self.packageName = None
while True:
lastpath = os.path.normpath(lastpath)
(parent, basename) = os.path.split(lastpath)
if parent == lastpath:
break
if basename.endswith('Pkg'):
self.packageName = basename
break
lastpath = parent
self.srcToDst = self.GetInfAsmFileMapping()
self.dstToSrc = {'order': []}
for src in self.srcToDst['order']:
srcExt = os.path.splitext(src)[1]
dst = self.srcToDst[src]
if dst not in self.dstToSrc:
self.dstToSrc[dst] = [src]
self.dstToSrc['order'].append(dst)
else:
self.dstToSrc[dst].append(src)
def __len__(self):
return len(self.dstToSrc['order'])
def __iter__(self):
return iter(self.dstToSrc['order'])
def ConvertInfAsmFiles(self):
notConverted = []
unsupportedArchCount = 0
for dst in self:
didSomething = False
try:
self.UpdateInfAsmFile(dst)
didSomething = True
except UnsupportedConversion:
if not self.args.quiet:
print('MASM=>NASM conversion unsupported for', dst)
notConverted.append(dst)
except NoSourceFile:
if not self.args.quiet:
print('Source file missing for', reldst)
notConverted.append(dst)
except UnsupportedArch:
unsupportedArchCount += 1
else:
if didSomething:
self.ConversionFinished(dst)
if len(notConverted) > 0 and not self.args.quiet:
for dst in notConverted:
reldst = self.RootRelative(dst)
print('Unabled to convert', reldst)
if unsupportedArchCount > 0 and not self.args.quiet:
print('Skipped', unsupportedArchCount, 'files based on architecture')
def UpdateInfAsmFile(self, dst, IgnoreMissingAsm=False):
infPath = os.path.split(os.path.realpath(self.inf))[0]
asmSrc = os.path.splitext(dst)[0] + '.asm'
fullSrc = os.path.join(infPath, asmSrc)
fullDst = os.path.join(infPath, dst)
srcParentDir = os.path.basename(os.path.split(fullSrc)[0])
if srcParentDir.lower() in UnsupportedArch.unsupported:
raise UnsupportedArch
elif not os.path.exists(fullSrc):
if not IgnoreMissingAsm:
raise NoSourceFile
else: # not os.path.exists(fullDst):
conv = ConvertAsmFile(fullSrc, fullDst, self)
self.unsupportedSyntaxSeen = conv.unsupportedSyntaxSeen
fileChanged = False
recentSources = list()
i = 0
while i < len(self.lines):
line = self.lines[i].rstrip()
updatedLine = line
lineChanged = False
preserveOldSource = False
for src in self.dstToSrc[dst]:
assert self.srcToDst[src] == dst
updatedLine = self.ReplacePreserveSpacing(
updatedLine, src, dst)
lineChanged = updatedLine != line
if lineChanged:
preserveOldSource = self.ShouldKeepFile(src)
break
if lineChanged:
if preserveOldSource:
if updatedLine.strip() not in recentSources:
self.lines.insert(i, updatedLine + '\n')
recentSources.append(updatedLine.strip())
i += 1
if self.diff:
print('+%s' % updatedLine)
if self.diff:
print('', line)
else:
if self.diff:
print('-%s' % line)
if updatedLine.strip() in recentSources:
self.lines[i] = None
else:
self.lines[i] = updatedLine + '\n'
recentSources.append(updatedLine.strip())
if self.diff:
print('+%s' % updatedLine)
else:
if len(recentSources) > 0:
recentSources = list()
if self.diff:
print('', line)
fileChanged |= lineChanged
i += 1
if fileChanged:
self.lines = list(filter(lambda l: l is not None, self.lines))
for src in self.dstToSrc[dst]:
if not src.endswith('.asm'):
fullSrc = os.path.join(infPath, src)
if os.path.exists(fullSrc):
self.RemoveFile(fullSrc)
if fileChanged:
f = io.open(self.inf, 'w', newline='\r\n')
f.writelines(self.lines)
f.close()
self.FileUpdated(self.inf)
def ConversionFinished(self, dst):
asmSrc = os.path.splitext(dst)[0] + '.asm'
self.FileConversionFinished(
self.packageName, self.moduleName, asmSrc, dst)
class ConvertInfFiles(CommonUtils):
def __init__(self, infs, clone):
CommonUtils.__init__(self, clone)
infs = map(lambda i: ConvertInfFile(i, self), infs)
infs = filter(lambda i: len(i) > 0, infs)
dstToInfs = {'order': []}
for inf in infs:
for dst in inf:
fulldst = os.path.realpath(os.path.join(inf.dir, dst))
pair = (inf, dst)
if fulldst in dstToInfs:
dstToInfs[fulldst].append(pair)
else:
dstToInfs['order'].append(fulldst)
dstToInfs[fulldst] = [pair]
notConverted = []
unsupportedArchCount = 0
for dst in dstToInfs['order']:
didSomething = False
try:
for inf, reldst in dstToInfs[dst]:
inf.UpdateInfAsmFile(reldst, IgnoreMissingAsm=didSomething)
didSomething = True
except UnsupportedConversion:
if not self.args.quiet:
print('MASM=>NASM conversion unsupported for', reldst)
notConverted.append(dst)
except NoSourceFile:
if not self.args.quiet:
print('Source file missing for', reldst)
notConverted.append(dst)
except UnsupportedArch:
unsupportedArchCount += 1
else:
if didSomething:
inf.ConversionFinished(reldst)
if len(notConverted) > 0 and not self.args.quiet:
for dst in notConverted:
reldst = self.RootRelative(dst)
print('Unabled to convert', reldst)
if unsupportedArchCount > 0 and not self.args.quiet:
print('Skipped', unsupportedArchCount, 'files based on architecture')
class ConvertDirectories(CommonUtils):
def __init__(self, paths, clone):
CommonUtils.__init__(self, clone)
self.paths = paths
self.ConvertInfAndAsmFiles()
def ConvertInfAndAsmFiles(self):
infs = list()
for path in self.paths:
assert(os.path.exists(path))
for path in self.paths:
for root, dirs, files in os.walk(path):
for d in ('.svn', '.git'):
if d in dirs:
dirs.remove(d)
for f in files:
if f.lower().endswith('.inf'):
inf = os.path.realpath(os.path.join(root, f))
infs.append(inf)
ConvertInfFiles(infs, self)
class ConvertAsmApp(CommonUtils):
def __init__(self):
CommonUtils.__init__(self)
src = self.args.source
dst = self.args.dest
if self.infmode:
ConvertInfFiles((src,), self)
elif self.dirmode:
ConvertDirectories((src,), self)
elif not self.dirmode:
ConvertAsmFile(src, dst, self)
ConvertAsmApp()
| edk2-master | BaseTools/Scripts/ConvertMasmToNasm.py |
## @file
# Check a patch for various format issues
#
# Copyright (c) 2015, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import print_function
VersionNumber = '0.1'
__copyright__ = "Copyright (c) 2015, Intel Corporation All rights reserved."
import argparse
import codecs
import os
import sys
class ConvertOneArg:
"""Converts utf-16 to utf-8 for one command line argument.
This could be a single file, or a directory.
"""
def __init__(self, utf8, source):
self.utf8 = utf8
self.source = source
self.ok = True
if not os.path.exists(source):
self.ok = False
elif os.path.isdir(source):
for (root, dirs, files) in os.walk(source):
files = filter(lambda a: a.endswith('.uni'), files)
for filename in files:
path = os.path.join(root, filename)
self.ok &= self.convert_one_file(path)
if not self.ok:
break
if not self.ok:
break
else:
self.ok &= self.convert_one_file(source)
def convert_one_file(self, source):
if self.utf8:
new_enc, old_enc = 'utf-8', 'utf-16'
else:
new_enc, old_enc = 'utf-16', 'utf-8'
#
# Read file
#
f = open(source, mode='rb')
file_content = f.read()
f.close()
#
# Detect UTF-16 Byte Order Mark at beginning of file.
#
bom = (file_content.startswith(codecs.BOM_UTF16_BE) or
file_content.startswith(codecs.BOM_UTF16_LE))
if bom != self.utf8:
print("%s: already %s" % (source, new_enc))
return True
#
# Decode old string data
#
str_content = file_content.decode(old_enc, 'ignore')
#
# Encode new string data
#
new_content = str_content.encode(new_enc, 'ignore')
#
# Write converted data back to file
#
f = open(source, mode='wb')
f.write(new_content)
f.close()
print(source + ": converted, size", len(file_content), '=>', len(new_content))
return True
class ConvertUniApp:
"""Converts .uni files between utf-16 and utf-8."""
def __init__(self):
self.parse_options()
sources = self.args.source
self.ok = True
for patch in sources:
self.process_one_arg(patch)
if self.ok:
self.retval = 0
else:
self.retval = -1
def process_one_arg(self, arg):
self.ok &= ConvertOneArg(self.utf8, arg).ok
def parse_options(self):
parser = argparse.ArgumentParser(description=__copyright__)
parser.add_argument('--version', action='version',
version='%(prog)s ' + VersionNumber)
parser.add_argument('source', nargs='+',
help='[uni file | directory]')
group = parser.add_mutually_exclusive_group()
group.add_argument("--utf-8",
action="store_true",
help="Convert from utf-16 to utf-8 [default]")
group.add_argument("--utf-16",
action="store_true",
help="Convert from utf-8 to utf-16")
self.args = parser.parse_args()
self.utf8 = not self.args.utf_16
if __name__ == "__main__":
sys.exit(ConvertUniApp().retval)
| edk2-master | BaseTools/Scripts/ConvertUni.py |
## @file
# Get current UTC date and time information and output as ascii code.
#
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
VersionNumber = '0.1'
import sys
import datetime
import argparse
def Main():
PARSER = argparse.ArgumentParser(
description='Retrieves UTC date and time information (output ordering: year, date, time) - Version ' + VersionNumber)
PARSER.add_argument('--year',
action='store_true',
help='Return UTC year of now. [Example output (2019): 39313032]')
PARSER.add_argument('--date',
action='store_true',
help='Return UTC date MMDD of now. [Example output (7th August): 37303830]')
PARSER.add_argument('--time',
action='store_true',
help='Return 24-hour-format UTC time HHMM of now. [Example output (14:25): 35323431]')
ARGS = PARSER.parse_args()
if len(sys.argv) == 1:
print ("ERROR: At least one argument is required!\n")
PARSER.print_help()
today = datetime.datetime.utcnow()
if ARGS.year:
ReversedNumber = str(today.year)[::-1]
print (''.join(hex(ord(HexString))[2:] for HexString in ReversedNumber))
if ARGS.date:
ReversedNumber = str(today.strftime("%m%d"))[::-1]
print (''.join(hex(ord(HexString))[2:] for HexString in ReversedNumber))
if ARGS.time:
ReversedNumber = str(today.strftime("%H%M"))[::-1]
print (''.join(hex(ord(HexString))[2:] for HexString in ReversedNumber))
if __name__ == '__main__':
Main()
| edk2-master | BaseTools/Scripts/GetUtcDateTime.py |
##
# Generate symbal for memory profile info.
#
# This tool depends on DIA2Dump.exe (VS) or nm (gcc) to parse debug entry.
#
# Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
##
from __future__ import print_function
import os
import re
import sys
from optparse import OptionParser
versionNumber = "1.1"
__copyright__ = "Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved."
class Symbols:
def __init__(self):
self.listLineAddress = []
self.pdbName = ""
# Cache for function
self.functionName = ""
# Cache for line
self.sourceName = ""
def getSymbol (self, rva):
index = 0
lineName = 0
sourceName = "??"
while index + 1 < self.lineCount :
if self.listLineAddress[index][0] <= rva and self.listLineAddress[index + 1][0] > rva :
offset = rva - self.listLineAddress[index][0]
functionName = self.listLineAddress[index][1]
lineName = self.listLineAddress[index][2]
sourceName = self.listLineAddress[index][3]
if lineName == 0 :
return " (" + self.listLineAddress[index][1] + "() - " + ")"
else :
return " (" + self.listLineAddress[index][1] + "() - " + sourceName + ":" + str(lineName) + ")"
index += 1
return " (unknown)"
def parse_debug_file(self, driverName, pdbName):
if cmp (pdbName, "") == 0 :
return
self.pdbName = pdbName;
try:
nmCommand = "nm"
nmLineOption = "-l"
print("parsing (debug) - " + pdbName)
os.system ('%s %s %s > nmDump.line.log' % (nmCommand, nmLineOption, pdbName))
except :
print('ERROR: nm command not available. Please verify PATH')
return
#
# parse line
#
linefile = open("nmDump.line.log")
reportLines = linefile.readlines()
linefile.close()
# 000113ca T AllocatePool c:\home\edk-ii\MdePkg\Library\UefiMemoryAllocationLib\MemoryAllocationLib.c:399
patchLineFileMatchString = "([0-9a-fA-F]*)\s+[T|D|t|d]\s+(\w+)\s*((?:[a-zA-Z]:)?[\w+\-./_a-zA-Z0-9\\\\]*):?([0-9]*)"
for reportLine in reportLines:
#print "check - " + reportLine
match = re.match(patchLineFileMatchString, reportLine)
if match is not None:
#print "match - " + reportLine[:-1]
#print "0 - " + match.group(0)
#print "1 - " + match.group(1)
#print "2 - " + match.group(2)
#print "3 - " + match.group(3)
#print "4 - " + match.group(4)
rva = int (match.group(1), 16)
functionName = match.group(2)
sourceName = match.group(3)
if cmp (match.group(4), "") != 0 :
lineName = int (match.group(4))
else :
lineName = 0
self.listLineAddress.append ([rva, functionName, lineName, sourceName])
self.lineCount = len (self.listLineAddress)
self.listLineAddress = sorted(self.listLineAddress, key=lambda symbolAddress:symbolAddress[0])
#for key in self.listLineAddress :
#print "rva - " + "%x"%(key[0]) + ", func - " + key[1] + ", line - " + str(key[2]) + ", source - " + key[3]
def parse_pdb_file(self, driverName, pdbName):
if cmp (pdbName, "") == 0 :
return
self.pdbName = pdbName;
try:
#DIA2DumpCommand = "\"C:\\Program Files (x86)\Microsoft Visual Studio 14.0\\DIA SDK\\Samples\\DIA2Dump\\x64\\Debug\\Dia2Dump.exe\""
DIA2DumpCommand = "Dia2Dump.exe"
#DIA2SymbolOption = "-p"
DIA2LinesOption = "-l"
print("parsing (pdb) - " + pdbName)
#os.system ('%s %s %s > DIA2Dump.symbol.log' % (DIA2DumpCommand, DIA2SymbolOption, pdbName))
os.system ('%s %s %s > DIA2Dump.line.log' % (DIA2DumpCommand, DIA2LinesOption, pdbName))
except :
print('ERROR: DIA2Dump command not available. Please verify PATH')
return
#
# parse line
#
linefile = open("DIA2Dump.line.log")
reportLines = linefile.readlines()
linefile.close()
# ** GetDebugPrintErrorLevel
# line 32 at [0000C790][0001:0000B790], len = 0x3 c:\home\edk-ii\mdepkg\library\basedebugprinterrorlevellib\basedebugprinterrorlevellib.c (MD5: 687C0AE564079D35D56ED5D84A6164CC)
# line 36 at [0000C793][0001:0000B793], len = 0x5
# line 37 at [0000C798][0001:0000B798], len = 0x2
patchLineFileMatchString = "\s+line ([0-9]+) at \[([0-9a-fA-F]{8})\]\[[0-9a-fA-F]{4}\:[0-9a-fA-F]{8}\], len = 0x[0-9a-fA-F]+\s*([\w+\-\:./_a-zA-Z0-9\\\\]*)\s*"
patchLineFileMatchStringFunc = "\*\*\s+(\w+)\s*"
for reportLine in reportLines:
#print "check line - " + reportLine
match = re.match(patchLineFileMatchString, reportLine)
if match is not None:
#print "match - " + reportLine[:-1]
#print "0 - " + match.group(0)
#print "1 - " + match.group(1)
#print "2 - " + match.group(2)
if cmp (match.group(3), "") != 0 :
self.sourceName = match.group(3)
sourceName = self.sourceName
functionName = self.functionName
rva = int (match.group(2), 16)
lineName = int (match.group(1))
self.listLineAddress.append ([rva, functionName, lineName, sourceName])
else :
match = re.match(patchLineFileMatchStringFunc, reportLine)
if match is not None:
self.functionName = match.group(1)
self.lineCount = len (self.listLineAddress)
self.listLineAddress = sorted(self.listLineAddress, key=lambda symbolAddress:symbolAddress[0])
#for key in self.listLineAddress :
#print "rva - " + "%x"%(key[0]) + ", func - " + key[1] + ", line - " + str(key[2]) + ", source - " + key[3]
class SymbolsFile:
def __init__(self):
self.symbolsTable = {}
symbolsFile = ""
driverName = ""
rvaName = ""
symbolName = ""
def getSymbolName(driverName, rva):
global symbolsFile
#print "driverName - " + driverName
try :
symbolList = symbolsFile.symbolsTable[driverName]
if symbolList is not None:
return symbolList.getSymbol (rva)
else:
return " (???)"
except Exception:
return " (???)"
def processLine(newline):
global driverName
global rvaName
driverPrefixLen = len("Driver - ")
# get driver name
if cmp(newline[0:driverPrefixLen], "Driver - ") == 0 :
driverlineList = newline.split(" ")
driverName = driverlineList[2]
#print "Checking : ", driverName
# EDKII application output
pdbMatchString = "Driver - \w* \(Usage - 0x[0-9a-fA-F]+\) \(Pdb - ([:\-.\w\\\\/]*)\)\s*"
pdbName = ""
match = re.match(pdbMatchString, newline)
if match is not None:
#print "match - " + newline
#print "0 - " + match.group(0)
#print "1 - " + match.group(1)
pdbName = match.group(1)
#print "PDB - " + pdbName
symbolsFile.symbolsTable[driverName] = Symbols()
if cmp (pdbName[-3:], "pdb") == 0 :
symbolsFile.symbolsTable[driverName].parse_pdb_file (driverName, pdbName)
else :
symbolsFile.symbolsTable[driverName].parse_debug_file (driverName, pdbName)
elif cmp(newline, "") == 0 :
driverName = ""
# check entry line
if newline.find ("<==") != -1 :
entry_list = newline.split(" ")
rvaName = entry_list[4]
#print "rva : ", rvaName
symbolName = getSymbolName (driverName, int(rvaName, 16))
else :
rvaName = ""
symbolName = ""
if cmp(rvaName, "") == 0 :
return newline
else :
return newline + symbolName
def myOptionParser():
usage = "%prog [--version] [-h] [--help] [-i inputfile [-o outputfile]]"
Parser = OptionParser(usage=usage, description=__copyright__, version="%prog " + str(versionNumber))
Parser.add_option("-i", "--inputfile", dest="inputfilename", type="string", help="The input memory profile info file output from MemoryProfileInfo application in MdeModulePkg")
Parser.add_option("-o", "--outputfile", dest="outputfilename", type="string", help="The output memory profile info file with symbol, MemoryProfileInfoSymbol.txt will be used if it is not specified")
(Options, args) = Parser.parse_args()
if Options.inputfilename is None:
Parser.error("no input file specified")
if Options.outputfilename is None:
Options.outputfilename = "MemoryProfileInfoSymbol.txt"
return Options
def main():
global symbolsFile
global Options
Options = myOptionParser()
symbolsFile = SymbolsFile()
try :
file = open(Options.inputfilename)
except Exception:
print("fail to open " + Options.inputfilename)
return 1
try :
newfile = open(Options.outputfilename, "w")
except Exception:
print("fail to open " + Options.outputfilename)
return 1
try:
while True:
line = file.readline()
if not line:
break
newline = line[:-1]
newline = processLine(newline)
newfile.write(newline)
newfile.write("\n")
finally:
file.close()
newfile.close()
if __name__ == '__main__':
sys.exit(main())
| edk2-master | BaseTools/Scripts/MemoryProfileSymbolGen.py |
## @file
# Set up the git configuration for contributing to TianoCore projects
#
# Copyright (c) 2019, Linaro Ltd. All rights reserved.<BR>
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import print_function
import argparse
import os.path
import re
import sys
try:
import git
except ImportError:
print('Unable to load gitpython module - please install and try again.')
sys.exit(1)
try:
# Try Python 2 'ConfigParser' module first since helpful lib2to3 will
# otherwise automagically load it with the name 'configparser'
import ConfigParser
except ImportError:
# Otherwise, try loading the Python 3 'configparser' under an alias
try:
import configparser as ConfigParser
except ImportError:
print("Unable to load configparser/ConfigParser module - please install and try again!")
sys.exit(1)
# Assumptions: Script is in edk2/BaseTools/Scripts,
# templates in edk2/BaseTools/Conf
CONFDIR = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
'Conf')
UPSTREAMS = [
{'name': 'edk2',
'repo': 'https://github.com/tianocore/edk2.git',
'list': '[email protected]'},
{'name': 'edk2-platforms',
'repo': 'https://github.com/tianocore/edk2-platforms.git',
'list': '[email protected]', 'prefix': 'edk2-platforms'},
{'name': 'edk2-non-osi',
'repo': 'https://github.com/tianocore/edk2-non-osi.git',
'list': '[email protected]', 'prefix': 'edk2-non-osi'},
{'name': 'edk2-test',
'repo': 'https://github.com/tianocore/edk2-test.git',
'list': '[email protected]', 'prefix': 'edk2-test'}
]
# The minimum version required for all of the below options to work
MIN_GIT_VERSION = (1, 9, 0)
# Set of options to be set identically for all repositories
OPTIONS = [
{'section': 'alias', 'option': 'fp',
'value': 'format-patch -M --stat=1000 --stat-graph-width=20'},
{'section': 'am', 'option': 'keepcr', 'value': True},
{'section': 'am', 'option': 'signoff', 'value': True},
{'section': 'cherry-pick', 'option': 'signoff', 'value': True},
{'section': 'color', 'option': 'diff', 'value': True},
{'section': 'color', 'option': 'grep', 'value': 'auto'},
{'section': 'commit', 'option': 'signoff', 'value': True},
{'section': 'core', 'option': 'abbrev', 'value': 12},
{'section': 'core', 'option': 'attributesFile',
'value': os.path.join(CONFDIR, 'gitattributes').replace('\\', '/')},
{'section': 'core', 'option': 'whitespace', 'value': 'cr-at-eol'},
{'section': 'diff', 'option': 'algorithm', 'value': 'patience'},
{'section': 'diff', 'option': 'orderFile',
'value': os.path.join(CONFDIR, 'diff.order').replace('\\', '/')},
{'section': 'diff', 'option': 'renames', 'value': 'copies'},
{'section': 'diff', 'option': 'statGraphWidth', 'value': '20'},
{'section': 'diff "ini"', 'option': 'xfuncname',
'value': '^\\\\[[A-Za-z0-9_., ]+]'},
{'section': 'format', 'option': 'coverLetter', 'value': True},
{'section': 'format', 'option': 'numbered', 'value': True},
{'section': 'format', 'option': 'signoff', 'value': False},
{'section': 'log', 'option': 'mailmap', 'value': True},
{'section': 'notes', 'option': 'rewriteRef', 'value': 'refs/notes/commits'},
{'section': 'sendemail', 'option': 'chainreplyto', 'value': False},
{'section': 'sendemail', 'option': 'thread', 'value': True},
{'section': 'sendemail', 'option': 'transferEncoding', 'value': '8bit'},
]
def locate_repo():
"""Opens a Repo object for the current tree, searching upwards in the directory hierarchy."""
try:
repo = git.Repo(path='.', search_parent_directories=True)
except (git.InvalidGitRepositoryError, git.NoSuchPathError):
print("It doesn't look like we're inside a git repository - aborting.")
sys.exit(2)
return repo
def fuzzy_match_repo_url(one, other):
"""Compares two repository URLs, ignoring protocol and optional trailing '.git'."""
oneresult = re.match(r'.*://(?P<oneresult>.*?)(\.git)*$', one)
otherresult = re.match(r'.*://(?P<otherresult>.*?)(\.git)*$', other)
if oneresult and otherresult:
onestring = oneresult.group('oneresult')
otherstring = otherresult.group('otherresult')
if onestring == otherstring:
return True
return False
def get_upstream(url, name):
"""Extracts the dict for the current repo origin."""
for upstream in UPSTREAMS:
if (fuzzy_match_repo_url(upstream['repo'], url) or
upstream['name'] == name):
return upstream
print("Unknown upstream '%s' - aborting!" % url)
sys.exit(3)
def check_versions():
"""Checks versions of dependencies."""
version = git.cmd.Git().version_info
if version < MIN_GIT_VERSION:
print('Need git version %d.%d or later!' % (version[0], version[1]))
sys.exit(4)
def write_config_value(repo, section, option, data):
"""."""
with repo.config_writer(config_level='repository') as configwriter:
configwriter.set_value(section, option, data)
if __name__ == '__main__':
check_versions()
PARSER = argparse.ArgumentParser(
description='Sets up a git repository according to TianoCore rules.')
PARSER.add_argument('-c', '--check',
help='check current config only, printing what would be changed',
action='store_true',
required=False)
PARSER.add_argument('-f', '--force',
help='overwrite existing settings conflicting with program defaults',
action='store_true',
required=False)
PARSER.add_argument('-n', '--name', type=str, metavar='repo',
choices=['edk2', 'edk2-platforms', 'edk2-non-osi'],
help='set the repo name to configure for, if not '
'detected automatically',
required=False)
PARSER.add_argument('-v', '--verbose',
help='enable more detailed output',
action='store_true',
required=False)
ARGS = PARSER.parse_args()
REPO = locate_repo()
if REPO.bare:
print('Bare repo - please check out an upstream one!')
sys.exit(6)
URL = REPO.remotes.origin.url
UPSTREAM = get_upstream(URL, ARGS.name)
if not UPSTREAM:
print("Upstream '%s' unknown, aborting!" % URL)
sys.exit(7)
# Set a list email address if our upstream wants it
if 'list' in UPSTREAM:
OPTIONS.append({'section': 'sendemail', 'option': 'to',
'value': UPSTREAM['list']})
# Append a subject prefix entry to OPTIONS if our upstream wants it
if 'prefix' in UPSTREAM:
OPTIONS.append({'section': 'format', 'option': 'subjectPrefix',
'value': "PATCH " + UPSTREAM['prefix']})
CONFIG = REPO.config_reader(config_level='repository')
for entry in OPTIONS:
exists = False
try:
# Make sure to read boolean/int settings as real type rather than strings
if isinstance(entry['value'], bool):
value = CONFIG.getboolean(entry['section'], entry['option'])
elif isinstance(entry['value'], int):
value = CONFIG.getint(entry['section'], entry['option'])
else:
value = CONFIG.get(entry['section'], entry['option'])
exists = True
# Don't bail out from options not already being set
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
pass
if exists:
if value == entry['value']:
if ARGS.verbose:
print("%s.%s already set (to '%s')" % (entry['section'],
entry['option'], value))
else:
if ARGS.force:
write_config_value(REPO, entry['section'], entry['option'], entry['value'])
else:
print("Not overwriting existing %s.%s value:" % (entry['section'],
entry['option']))
print(" '%s' != '%s'" % (value, entry['value']))
print(" add '-f' to command line to force overwriting existing settings")
else:
print("%s.%s => '%s'" % (entry['section'], entry['option'], entry['value']))
if not ARGS.check:
write_config_value(REPO, entry['section'], entry['option'], entry['value'])
| edk2-master | BaseTools/Scripts/SetupGit.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.