174 lines
7.0 KiB
Python
174 lines
7.0 KiB
Python
from rply import ParserGenerator
|
|
|
|
from plthy_impl.lexer import ALL_TOKENS
|
|
from plthy_impl import ast_nodes
|
|
|
|
class Parser():
|
|
def __init__(self):
|
|
self.pg = ParserGenerator(
|
|
[i[0] for i in ALL_TOKENS],
|
|
precedence=[
|
|
('left', ["KEYWORD_SET","KEYWORD_IF", "KEYWORD_MAYBE", "KEYWORD_RETURN"]),
|
|
('left', [ "KEYWORD_BECAUSE", "KEYWORD_UNTIL", "KEYWORD_DEFINE", "KEYWORD_AS"]),
|
|
('left', ["KEYWORD_DO", "BUILTIN"]),
|
|
('left', ["SYMBOL_EQUALS", "SYMBOL_LT","SYMBOL_GT"]),
|
|
('left', ["SYMBOL_PLUS", "SYMBOL_MINUS", "SYMBOL_OR", "SYMBOL_AND"]),
|
|
('left', ["SYMBOL_LCURL", "SYMBOL_TIMES", "SYMBOL_DIVIDE", "SYMBOL_TILDE", "SYMBOL_MOD", "ID"])
|
|
]
|
|
)
|
|
|
|
def parse(self, token_input) -> ast_nodes.BaseBox:
|
|
# Top-level program stuff
|
|
@self.pg.production('program : KEYWORD_HELLO SYMBOL_PIPE statements KEYWORD_GOODBYE SYMBOL_PIPE')
|
|
def program(tokens):
|
|
return ast_nodes.Program(tokens[2])
|
|
|
|
## statements ##
|
|
@self.pg.production('statements : ')
|
|
def statements_none(_):
|
|
return []
|
|
|
|
@self.pg.production('statements : statement SYMBOL_PIPE statements')
|
|
def statements(tokens):
|
|
return [tokens[0]] + tokens[2]
|
|
|
|
## statement ##
|
|
@self.pg.production('statement : KEYWORD_SKIP')
|
|
def statement_skip(tokens):
|
|
return ast_nodes.StatementSkip()
|
|
|
|
@self.pg.production('statement : KEYWORD_MAYBE statement')
|
|
def statement_maybe(tokens):
|
|
return ast_nodes.Maybe(tokens[1])
|
|
|
|
@self.pg.production('statement : KEYWORD_DO command')
|
|
def statement_do(tokens):
|
|
return ast_nodes.Do(tokens[1])
|
|
|
|
@self.pg.production('statement : SYMBOL_LBRACKET statements SYMBOL_RBRACKET')
|
|
def statement_scope(tokens):
|
|
return ast_nodes.Scope(tokens[1])
|
|
|
|
@self.pg.production('statement : KEYWORD_SET expression SYMBOL_SET ID')
|
|
def statement_set(tokens):
|
|
return ast_nodes.StatementSet(tokens[1], tokens[3].value)
|
|
|
|
@self.pg.production('statement : KEYWORD_DEFINE ID SYMBOL_LT DATA_INT SYMBOL_GT KEYWORD_AS statement', precedence="KEYWORD_DEFINE")
|
|
def statement_define(tokens):
|
|
return ast_nodes.StatementDefine(tokens[1].value, int(tokens[3].value), tokens[6])
|
|
|
|
@self.pg.production('statement : KEYWORD_RETURN expression')
|
|
def statement_return(tokens):
|
|
return ast_nodes.StatementReturn(tokens[1])
|
|
|
|
## command ##
|
|
@self.pg.production('command : statement KEYWORD_IF expression')
|
|
def command_if(tokens):
|
|
return ast_nodes.CommandIf(tokens[0], tokens[2])
|
|
|
|
@self.pg.production('command : statement KEYWORD_ELSE statement KEYWORD_IF expression')
|
|
def command_ifelse(tokens):
|
|
return ast_nodes.CommandIfElse(tokens[0],tokens[2],tokens[4])
|
|
|
|
@self.pg.production('command : statement KEYWORD_BECAUSE expression')
|
|
def command_because(tokens):
|
|
return ast_nodes.CommandBecause(tokens[0], tokens[2])
|
|
|
|
@self.pg.production('command : statement KEYWORD_UNTIL expression')
|
|
def command_until(tokens):
|
|
return ast_nodes.CommandUntil(tokens[0],tokens[2])
|
|
|
|
@self.pg.production('command : BUILTIN expressions SYMBOL_GT')
|
|
def command_builtin(tokens):
|
|
return ast_nodes.Builtin(tokens[0], tokens[1])
|
|
|
|
@self.pg.production('command : SYMBOL_QUOTE ID SYMBOL_QUOTE SYMBOL_LT expressions SYMBOL_GT')
|
|
def command_call(tokens):
|
|
return ast_nodes.Call(tokens[1].value,tokens[4])
|
|
|
|
## expressions ##
|
|
@self.pg.production('expressions : ')
|
|
def expressions_none(_):
|
|
return []
|
|
|
|
@self.pg.production('expressions : expression SYMBOL_SEMICOLON expressions ')
|
|
def expressions(tokens):
|
|
return [tokens[0]] + tokens[2]
|
|
|
|
## expression ##
|
|
@self.pg.production('expression : DATA_STRING')
|
|
def exp_string(tokens):
|
|
return ast_nodes.ExpString(tokens[0].value[1:-1])
|
|
|
|
@self.pg.production('expression : DATA_INT')
|
|
def exp_int(tokens):
|
|
return ast_nodes.ExpInt(int(tokens[0].value))
|
|
|
|
@self.pg.production('expression : DATA_FLOAT')
|
|
def exp_float(tokens):
|
|
return ast_nodes.ExpFloat(float(tokens[0].value))
|
|
|
|
@self.pg.production('expression : DATA_BOOL')
|
|
def exp_bool(tokens):
|
|
|
|
return ast_nodes.ExpBool(tokens[0].value == "true")
|
|
|
|
@self.pg.production('expression : SYMBOL_LCURL expressions SYMBOL_RCURL')
|
|
def exp_list(tokens):
|
|
return ast_nodes.ExpList(tokens[1])
|
|
|
|
@self.pg.production('expression : expression SYMBOL_LCURL expression SYMBOL_RCURL')
|
|
def exp_index(tokens):
|
|
return ast_nodes.ExpIndex(tokens[0],tokens[2])
|
|
|
|
@self.pg.production('expression : SYMBOL_LPARENS expression SYMBOL_RPARENS')
|
|
def exp_paren(tokens):
|
|
return tokens[1]
|
|
|
|
@self.pg.production('expression : expression SYMBOL_PLUS expression')
|
|
@self.pg.production('expression : expression SYMBOL_MINUS expression')
|
|
@self.pg.production('expression : expression SYMBOL_TIMES expression')
|
|
@self.pg.production('expression : expression SYMBOL_DIVIDE expression')
|
|
@self.pg.production('expression : expression SYMBOL_EQUALS expression')
|
|
@self.pg.production('expression : expression SYMBOL_LT expression')
|
|
@self.pg.production('expression : expression SYMBOL_GT expression')
|
|
@self.pg.production('expression : expression SYMBOL_MOD expression')
|
|
def exp_a_binop(tokens):
|
|
return ast_nodes.ExpABinop(tokens[1].value,tokens[0],tokens[2])
|
|
|
|
@self.pg.production('expression : expression SYMBOL_OR expression')
|
|
@self.pg.production('expression : expression SYMBOL_AND expression')
|
|
def exp_b_binop(tokens):
|
|
pass
|
|
|
|
@self.pg.production('expression : SYMBOL_TILDE expression')
|
|
def exp_not(tokens):
|
|
pass
|
|
|
|
@self.pg.production('expression : KEYWORD_VARIABLE ID')
|
|
def exp_variable(tokens):
|
|
return ast_nodes.ExpVariable(tokens[1].value)
|
|
|
|
@self.pg.production('expression : KEYWORD_ARGUMENT ARG')
|
|
def exp_arg(tokens):
|
|
return ast_nodes.ExpArg(int(tokens[1].value[1:]))
|
|
|
|
@self.pg.production('expression : statement')
|
|
def exp_statement(tokens):
|
|
return(tokens[0])
|
|
|
|
## Error Handling ##
|
|
@self.pg.error
|
|
def error_handle(token):
|
|
if token.source_pos is None:
|
|
print("E002: No valediction")
|
|
elif token.source_pos.lineno == 1 and token.source_pos.colno == 1:
|
|
print("E001: No greeting")
|
|
else:
|
|
print(f"E003: Unexpected token '{token.value}' ({token.name}) at line {token.source_pos.lineno}, column {token.source_pos.colno}.")
|
|
exit()
|
|
|
|
## Finish ##
|
|
parser = self.pg.build()
|
|
return parser.parse(token_input)
|