🎉
This commit is contained in:
118
plthy_impl/parser.py
Normal file
118
plthy_impl/parser.py
Normal file
@ -0,0 +1,118 @@
|
||||
from rply import ParserGenerator
|
||||
|
||||
from plthy_impl.lexer import ALL_TOKENS
|
||||
from plthy_impl import ast_nodes
|
||||
|
||||
class Parser():
|
||||
def __init__(self):
|
||||
self.pg = ParserGenerator(
|
||||
[i[0] for i in ALL_TOKENS],
|
||||
precedence=[
|
||||
('left', ["KEYWORD_MAYBE", "KEYWORD_RETURN"]),
|
||||
('left', ["KEYWORD_IF", "KEYWORD_DEFINE", "KEYWORD_AS"]),
|
||||
('left', ["KEYWORD_DO", "BUILTIN"]),
|
||||
('left', ["SYMBOL_EQUALS", "SYMBOL_SET"]),
|
||||
('left', ["SYMBOL_PLUS", "SYMBOL_MINUS"]),
|
||||
('left', ["SYMBOL_TIMES", "SYMBOL_DIVIDE"])
|
||||
]
|
||||
)
|
||||
|
||||
def parse(self, token_input) -> ast_nodes.BaseBox:
|
||||
# Top-level program stuff
|
||||
@self.pg.production('program : KEYWORD_HELLO SYMBOL_PIPE statements KEYWORD_GOODBYE SYMBOL_PIPE')
|
||||
def program(tokens):
|
||||
return ast_nodes.Program(tokens[2])
|
||||
|
||||
## statements ##
|
||||
@self.pg.production('statements : ')
|
||||
def statements_none(_):
|
||||
return []
|
||||
|
||||
@self.pg.production('statements : statement SYMBOL_PIPE statements')
|
||||
def statements(tokens):
|
||||
return [tokens[0]] + tokens[2]
|
||||
|
||||
## statement ##
|
||||
@self.pg.production('statement : SYMBOL_DOLLAR expression SYMBOL_SET ID', precedence="SYMBOL_SET")
|
||||
def statement_set(tokens):
|
||||
return ast_nodes.StatementSet(tokens[1], tokens[3].value)
|
||||
|
||||
@self.pg.production('statement : KEYWORD_DO command')
|
||||
def statement_do(tokens):
|
||||
return ast_nodes.Do(tokens[1])
|
||||
|
||||
@self.pg.production('statement : KEYWORD_MAYBE statement')
|
||||
def statement_maybe(tokens):
|
||||
return ast_nodes.Maybe(tokens[1])
|
||||
|
||||
@self.pg.production('statement : statement KEYWORD_IF expression')
|
||||
def statement_if(tokens):
|
||||
return ast_nodes.StatementIf(tokens[0], tokens[2])
|
||||
|
||||
@self.pg.production('statement : KEYWORD_DEFINE ID SYMBOL_LT DATA_NUMERAL SYMBOL_GT KEYWORD_AS statement', precedence="KEYWORD_DEFINE")
|
||||
def statement_define(tokens):
|
||||
return ast_nodes.StatementDefine(tokens[1].value, int(tokens[3].value), tokens[6])
|
||||
|
||||
@self.pg.production('statement : KEYWORD_RETURN expression')
|
||||
def statement_return(tokens):
|
||||
return ast_nodes.StatementReturn(tokens[1])
|
||||
|
||||
@self.pg.production('statement : SYMBOL_LBRACKET statements SYMBOL_RBRACKET')
|
||||
def statement_scope(tokens):
|
||||
return ast_nodes.Scope(tokens[1])
|
||||
|
||||
## command ##
|
||||
@self.pg.production('command : BUILTIN SYMBOL_LT expressions SYMBOL_GT')
|
||||
def command_builtin(tokens):
|
||||
return ast_nodes.Builtin(tokens[0].value, tokens[2])
|
||||
|
||||
@self.pg.production('command : SYMBOL_QUOTE ID SYMBOL_QUOTE SYMBOL_LT expressions SYMBOL_GT')
|
||||
def command_call(tokens):
|
||||
return ast_nodes.Call(tokens[1].value,tokens[4])
|
||||
|
||||
## expressions ##
|
||||
@self.pg.production('expressions : ')
|
||||
def expressions_none(_):
|
||||
return []
|
||||
|
||||
@self.pg.production('expressions : expression SYMBOL_SEMICOLON expressions ')
|
||||
def expressions(tokens):
|
||||
return [tokens[0]] + tokens[2]
|
||||
|
||||
## expression ##
|
||||
@self.pg.production('expression : DATA_NUMERAL')
|
||||
def exp_numeral(tokens):
|
||||
return ast_nodes.ExpNumeral(float(tokens[0].value))
|
||||
|
||||
@self.pg.production('expression : DATA_STRING')
|
||||
def exp_string(tokens):
|
||||
return ast_nodes.ExpString(tokens[0].value[1:-1])
|
||||
|
||||
@self.pg.production('expression : statement', precedence="KEYWORD_IF")
|
||||
def exp_statement(tokens):
|
||||
return(tokens[0])
|
||||
|
||||
@self.pg.production('expression : KEYWORD_VARIABLE ID')
|
||||
def exp_variable(tokens):
|
||||
return ast_nodes.ExpVariable(tokens[1].value)
|
||||
|
||||
@self.pg.production('expression : expression SYMBOL_PLUS expression')
|
||||
@self.pg.production('expression : expression SYMBOL_MINUS expression')
|
||||
@self.pg.production('expression : expression SYMBOL_TIMES expression')
|
||||
@self.pg.production('expression : expression SYMBOL_DIVIDE expression')
|
||||
@self.pg.production('expression : expression SYMBOL_EQUALS expression')
|
||||
def exp_a_binop(tokens):
|
||||
return ast_nodes.ExpABinop(tokens[1].value,tokens[0],tokens[2])
|
||||
|
||||
@self.pg.production('expression : KEYWORD_ARGUMENT ARG')
|
||||
def exp_arg(tokens):
|
||||
return ast_nodes.ExpArg(int(tokens[1].value[1:]))
|
||||
|
||||
## Error Handling ##
|
||||
@self.pg.error
|
||||
def error_handle(token):
|
||||
raise Exception(token.name, token.value, token.source_pos)
|
||||
|
||||
## Finish ##
|
||||
parser = self.pg.build()
|
||||
return parser.parse(token_input)
|
Reference in New Issue
Block a user