"""
This Calculator module is a simple calculator that can parse and evaluate expressions of the form:
expression ::= term | expression operator expression
operator ::= + | - | * | /
with the usual precedence rules.
"""

from Operators import Operator, STANDARD_OPERATORS
from Expression import Token, Term, Expression, TermExpression, OperatorExpression

class Calculator:
    def __init__(self, operators = STANDARD_OPERATORS):
        self.operators = operators

    def _tokenize(self, line: str) -> list[Token]:
        """
        Tokenize an expression into a list of tokens.
        """
        tokens = []
        for token in line.split():
            if token in self.operators:
                tokens.append(self.operators[token])
            else:
                try:
                    term = float(token)
                    tokens.append(term)
                except ValueError:
                    raise ValueError(f"Unknown token: {token}")
        return tokens

    def _parse(self, tokens: list[Token]) -> Expression:
        if not tokens:
            raise ValueError("Empty expression")
        if len(tokens) == 1:
            if isinstance(tokens[0], Term):
                return TermExpression(tokens[0])
            raise ValueError(f"Expected a term, got {tokens[0]}")
        if len(tokens) == 2:
            raise ValueError("Invalid expression")

        # Find the rightest operator with the lowest precedence
        operator = None
        for i, token in enumerate(tokens):
            if isinstance(token, Operator):
                if operator is None or token.precedence <= operator.precedence:
                    operator = token
                    operator_index = i

        # Split the expression into two parts
        left = tokens[:operator_index]
        right = tokens[operator_index + 1:]

        # Parse the left and right parts recursively
        return OperatorExpression(operator, self._parse(left), self._parse(right))

    def __call__(self, expression: str) -> Term:
        return self._parse(self._tokenize(expression))()