GASSER Thibaud (PRESTA EXT)
498dcb79ad
https://www.codewars.com/kata/52a78825cdfc2cfc87000005/solutions/python
106 lines
3.3 KiB
Python
106 lines
3.3 KiB
Python
import re
|
|
from enum import Enum
|
|
from dataclasses import dataclass
|
|
|
|
class TokenType(Enum):
|
|
INTEGER = 1
|
|
FLOATING = 2
|
|
PLUS = 3,
|
|
MINUS = 4,
|
|
MULT = 5,
|
|
DIV = 6,
|
|
OPEN_PAREN = 7,
|
|
CLOSE_PAREN = 8
|
|
|
|
|
|
@dataclass
|
|
class Token:
|
|
type: TokenType
|
|
value: str | int
|
|
|
|
|
|
def tokenize(expr):
|
|
scanner = re.Scanner((
|
|
(r"\d+", lambda scanner,token: Token(TokenType.INTEGER, token)),
|
|
(r"\d+\.\d", lambda scanner,token: Token(TokenType.FLOATING, token)),
|
|
(r"\+", lambda scanner,token: Token(TokenType.PLUS, token)),
|
|
(r"\-", lambda scanner,token: Token(TokenType.MINUS, token)),
|
|
(r"\*", lambda scanner,token: Token(TokenType.MULT, token)),
|
|
(r"\/", lambda scanner,token: Token(TokenType.DIV, token)),
|
|
(r"\(", lambda scanner,token: Token(TokenType.OPEN_PAREN, token)),
|
|
(r"\)", lambda scanner,token: Token(TokenType.CLOSE_PAREN, token)),
|
|
(r"\s+", None)
|
|
))
|
|
|
|
tokens, unknown = scanner.scan(expr)
|
|
assert len(unknown) == 0, f"Unknown token: {unknown}"
|
|
return tokens
|
|
|
|
|
|
class Parser:
|
|
def __init__(self, tokens):
|
|
self.tokens = tokens
|
|
self.pos = 0
|
|
|
|
def _accept(self, token_type) -> Token | None:
|
|
if self.pos < len(self.tokens) and self.tokens[self.pos].type == token_type:
|
|
token = self.tokens[self.pos]
|
|
self.pos += 1
|
|
return token
|
|
return None
|
|
|
|
def parse_expr(self):
|
|
tree = self.parse_expr_plus()
|
|
assert self.pos == len(self.tokens), "Not all tokens where consumed!"
|
|
return tree
|
|
|
|
def parse_expr_plus(self):
|
|
lhs = self.parse_expr_mult()
|
|
while ((token := self._accept(TokenType.PLUS)) is not None
|
|
or (token := self._accept(TokenType.MINUS)) is not None):
|
|
rhs = self.parse_expr_mult()
|
|
if token.type == TokenType.PLUS:
|
|
lhs += rhs
|
|
elif token.type == TokenType.MINUS:
|
|
lhs -= rhs
|
|
else:
|
|
raise SyntaxError(f"Unexpected token {token[0]}")
|
|
return lhs
|
|
|
|
def parse_expr_mult(self):
|
|
lhs = self.parse_primary()
|
|
while ((token := self._accept(TokenType.MULT)) is not None
|
|
or (token := self._accept(TokenType.DIV)) is not None):
|
|
if token.type == TokenType.MULT:
|
|
lhs *= self.parse_primary()
|
|
elif token.type == TokenType.DIV:
|
|
lhs /= self.parse_primary()
|
|
else:
|
|
raise SyntaxError(f"Unexpected token {token[0]}")
|
|
return lhs
|
|
|
|
def parse_primary(self):
|
|
assert self.pos <= len(self.tokens)
|
|
sign = 1
|
|
if self._accept(TokenType.MINUS) is not None:
|
|
sign *= -1
|
|
if (token := self._accept(TokenType.INTEGER)) is not None:
|
|
return sign * int(token.value)
|
|
elif (token := self._accept(TokenType.OPEN_PAREN)) is not None:
|
|
expr = self.parse_expr_plus()
|
|
if self._accept(TokenType.CLOSE_PAREN) is None:
|
|
raise SyntaxError(f"Expected token {TokenType.CLOSE_PAREN}")
|
|
return sign * expr
|
|
assert False, f"Unexpected token type for parse_primary: {self.tokens[self.pos][0]}"
|
|
|
|
|
|
def calc(expression):
|
|
p = Parser(tokenize(expression))
|
|
try:
|
|
return p.parse_expr()
|
|
except:
|
|
print(expression)
|
|
print(p.tokens)
|
|
raise
|
|
|