add more lexer tokens

This commit is contained in:
Josh Holtrop 2011-08-31 10:41:31 -04:00
parent 49a061da2f
commit 9063eb10ce

View File

@ -5,11 +5,28 @@ class Lexer(object):
def __init__(self): def __init__(self):
self.reserved = { self.reserved = {
'C': 'C', 'C': 'C',
# types
'char': 'CHAR',
'short': 'SHORT',
'int': 'INT',
'long': 'LONG',
# control
'if': 'IF',
'else': 'ELSE',
'while': 'WHILE',
'for': 'FOR',
'return': 'RETURN',
} }
self.tokens = [ self.tokens = [
'LPAREN', 'LPAREN',
'RPAREN', 'RPAREN',
'LBRACKET',
'RBRACKET',
'LCURLY',
'RCURLY',
'SEMICOLON', 'SEMICOLON',
'STRING', 'STRING',
'ID', 'ID',
@ -21,6 +38,10 @@ class Lexer(object):
self.t_LPAREN = r'\(' self.t_LPAREN = r'\('
self.t_RPAREN = r'\)' self.t_RPAREN = r'\)'
self.t_LBRACKET = r'\['
self.t_RBRACKET = r'\]'
self.t_LCURLY = r'\{'
self.t_RCURLY = r'\}'
self.t_SEMICOLON = r';' self.t_SEMICOLON = r';'
self.t_ignore = ' \t\r' self.t_ignore = ' \t\r'