1706 lines
58 KiB
Python
1706 lines
58 KiB
Python
# Kyler Olsen
|
||
# Feb 2024
|
||
|
||
from enum import Enum
|
||
from typing import ClassVar, Sequence
|
||
from textwrap import indent
|
||
|
||
|
||
class FileInfo:
|
||
|
||
_filename: str
|
||
_line: int
|
||
_col: int
|
||
_length: int
|
||
_lines: int
|
||
|
||
def __init__(
|
||
self,
|
||
filename: str,
|
||
line: int,
|
||
col: int,
|
||
length: int,
|
||
lines: int = 0,
|
||
):
|
||
self._filename = filename
|
||
self._line = line
|
||
self._col = col
|
||
self._length = length
|
||
self._lines = lines
|
||
|
||
def __repr__(self) -> str:
|
||
return (
|
||
f"{type(self).__name__}"
|
||
f"('{self._filename}',{self._line},{self._col},{self._length})"
|
||
)
|
||
|
||
def __str__(self) -> str:
|
||
return f"Ln {self.line}, Col {self.col} in file {self.filename}"
|
||
|
||
def __add__(self, other: "FileInfo") -> "FileInfo":
|
||
filename = self.filename
|
||
line = self.line
|
||
col = self.col
|
||
if self.line != other.line:
|
||
if other.lines == 0:
|
||
length = other.col + other.length
|
||
else:
|
||
length = other.length
|
||
lines = other.line - self.line
|
||
else:
|
||
length = (other.col + other.length) - col
|
||
lines = 0
|
||
return FileInfo(
|
||
filename,
|
||
line,
|
||
col,
|
||
length,
|
||
lines,
|
||
)
|
||
|
||
@property
|
||
def filename(self) -> str: return self._filename
|
||
@property
|
||
def line(self) -> int: return self._line
|
||
@property
|
||
def col(self) -> int: return self._col
|
||
@property
|
||
def length(self) -> int: return self._length
|
||
@property
|
||
def lines(self) -> int: return self._lines
|
||
|
||
|
||
class CompilerError(Exception):
|
||
|
||
_compiler_error_type = "Compiler"
|
||
|
||
def __init__(
|
||
self,
|
||
message: str,
|
||
file_info: FileInfo,
|
||
file_info_context: FileInfo | None = None,
|
||
):
|
||
new_message = message
|
||
new_message += (
|
||
f"\nIn file {file_info.filename} at line {file_info.line} "
|
||
)
|
||
if file_info_context is not None and file_info_context.lines:
|
||
file_info_context = None
|
||
if file_info.lines:
|
||
new_message += f"to line {file_info.line + file_info.lines}"
|
||
with open(file_info.filename, 'r', encoding='utf-8') as file:
|
||
new_message += ''.join(
|
||
file.readlines()[
|
||
file_info.line-1:file_info.line + file_info.lines])
|
||
else:
|
||
new_message += f"col {file_info.col}\n\n"
|
||
with open(file_info.filename, 'r', encoding='utf-8') as file:
|
||
new_message += file.readlines()[file_info.line-1]
|
||
if file_info_context is not None:
|
||
context_line = [' '] * max(
|
||
file_info.col + file_info.length,
|
||
file_info_context.col +file_info_context.length,
|
||
)
|
||
for i in range(
|
||
file_info_context.col - 1,
|
||
file_info_context.col + file_info_context.length
|
||
):
|
||
context_line[i] = '~'
|
||
for i in range(
|
||
file_info.col - 1,
|
||
file_info.col + file_info.length
|
||
):
|
||
context_line[i] = '^'
|
||
new_message += ''.join(context_line)
|
||
else:
|
||
new_message += ' ' * (
|
||
file_info.col - 1) + '^' * file_info.length
|
||
|
||
super().__init__(new_message)
|
||
|
||
def compiler_error(self) -> str:
|
||
return (
|
||
f"[{self._compiler_error_type} Error] {type(self).__name__}:\n"
|
||
f"{indent(str(self), ' |', lambda _: True)}"
|
||
)
|
||
|
||
|
||
# -- Lexical --
|
||
|
||
class LexerError(CompilerError):
|
||
|
||
_compiler_error_type = "Lexical"
|
||
|
||
|
||
class _InterTokenType(Enum):
|
||
Generic = 'Generic'
|
||
Comment = 'Comment'
|
||
Word = 'Word'
|
||
NumberLiteral = 'NumberLiteral'
|
||
Punctuation = 'Punctuation'
|
||
|
||
|
||
class _NumberLiteralType(Enum):
|
||
Number = 'Number'
|
||
Real = 'Real'
|
||
Exp = 'Exp'
|
||
|
||
|
||
_OnlyNewLineTerminatedTokens = (
|
||
_InterTokenType.Comment,
|
||
)
|
||
|
||
_NewLineTerminatedTokens = _OnlyNewLineTerminatedTokens + (
|
||
_InterTokenType.Word,
|
||
_InterTokenType.NumberLiteral,
|
||
_InterTokenType.Punctuation,
|
||
)
|
||
|
||
_ID_Start = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz"
|
||
|
||
_ID_Continue = _ID_Start# + "0123456789"
|
||
|
||
_Keywords = (
|
||
'screen', 'graph', 'anim', 'const',
|
||
)
|
||
|
||
_Num_Start = "0123456789"
|
||
|
||
_Num_Start_Next = {
|
||
_NumberLiteralType.Number: {
|
||
'.': _NumberLiteralType.Real,
|
||
},
|
||
}
|
||
|
||
_Num_Continue = {
|
||
_NumberLiteralType.Number: _Num_Start + ".eE_",
|
||
_NumberLiteralType.Real: _Num_Start + "eE_",
|
||
_NumberLiteralType.Exp: _Num_Start + "_",
|
||
}
|
||
|
||
_Num_Continue_Next = {
|
||
_NumberLiteralType.Number: {
|
||
'.': _NumberLiteralType.Real,
|
||
'e': _NumberLiteralType.Exp,
|
||
'E': _NumberLiteralType.Exp,
|
||
},
|
||
_NumberLiteralType.Real: {
|
||
'e': _NumberLiteralType.Exp,
|
||
'E': _NumberLiteralType.Exp,
|
||
},
|
||
}
|
||
|
||
_Punctuation_Any = "_+-*/%^<>=!{[(}]),;:∑∏∞≠≤≥∫αβπ→"
|
||
|
||
_Punctuation = (
|
||
"+", "-", "*", "/", "%", "^",
|
||
"=", "!", "<", "<=", ">", ">=",
|
||
"{", "}", "[", "]", "(", ")",
|
||
"_", "->", ",", ";", ":", "∑",
|
||
"∏", "∞", "≠", "≤", "≥", "∫",
|
||
"α", "β", "θ", "π", "→",
|
||
)
|
||
|
||
_Punctuation_Conversion = {
|
||
"<=": "≤",
|
||
">=": "≥",
|
||
"->": "→",
|
||
}
|
||
|
||
_Punctuation_Enclosing = {
|
||
'(':')',
|
||
')':'(',
|
||
'[':']',
|
||
']':'[',
|
||
'{':'}',
|
||
'}':'{',
|
||
}
|
||
|
||
_ID_Conversion = {
|
||
"sum": "∑",
|
||
"pi": "π",
|
||
"alpha": "α",
|
||
"beta": "β",
|
||
"inf": "∞",
|
||
"product": "∏",
|
||
"integral": "∫",
|
||
"theta": "θ",
|
||
}
|
||
|
||
|
||
class Token:
|
||
|
||
_type: ClassVar[str] = 'Generic'
|
||
_value: str
|
||
_file_info: FileInfo
|
||
|
||
def __init__(self, value: str, file_info: FileInfo):
|
||
self._value = value
|
||
self._file_info = file_info
|
||
|
||
def __str__(self) -> str:
|
||
return f"Type: {self._type}, Value: {self.value}"
|
||
|
||
@property
|
||
def value(self) -> str: return self._value
|
||
|
||
@property
|
||
def file_info(self) -> FileInfo: return self._file_info
|
||
|
||
class Identifier(Token): _type = 'Identifier'
|
||
class Keyword(Token): _type = 'Keyword'
|
||
class NumberLiteral(Token): _type = 'NumberLiteral'
|
||
class Punctuation(Token): _type = 'Punctuation'
|
||
|
||
|
||
def lexer(file: str, filename: str) -> Sequence[Token]:
|
||
tokens: list[Token] = []
|
||
current: str = ""
|
||
current_line: int = 0
|
||
current_col: int = 0
|
||
number_type: _NumberLiteralType = _NumberLiteralType.Number
|
||
token_type: _InterTokenType = _InterTokenType.Generic
|
||
|
||
for line, line_str in enumerate(file.splitlines()):
|
||
fi = FileInfo(filename, current_line, current_col, len(current))
|
||
if token_type in _NewLineTerminatedTokens:
|
||
if token_type is _InterTokenType.Word:
|
||
if len(current) > 15:
|
||
raise LexerError("Identifier Too Long", fi)
|
||
if current.lower() in _Keywords:
|
||
tokens.append(Keyword(current, fi))
|
||
elif current.lower() in _ID_Conversion.keys():
|
||
tokens.append(
|
||
Punctuation(_ID_Conversion[current.lower()], fi))
|
||
else:
|
||
tokens.append(Identifier(current, fi))
|
||
elif token_type is _InterTokenType.NumberLiteral:
|
||
tokens.append(NumberLiteral(current, fi))
|
||
number_type = _NumberLiteralType.Number
|
||
elif token_type is _InterTokenType.Punctuation:
|
||
if current not in _Punctuation:
|
||
raise LexerError("Invalid Punctuation", fi)
|
||
if current in _Punctuation_Conversion.keys():
|
||
current = _Punctuation_Conversion[current]
|
||
tokens.append(Punctuation(current, fi))
|
||
token_type = _InterTokenType.Generic
|
||
|
||
for col, char in enumerate(line_str):
|
||
if token_type in _OnlyNewLineTerminatedTokens:
|
||
current += char
|
||
elif token_type is _InterTokenType.Word:
|
||
if char in _ID_Continue:
|
||
current += char
|
||
else:
|
||
fi = FileInfo(
|
||
filename, current_line, current_col, len(current))
|
||
if len(current) > 15:
|
||
raise LexerError("Identifier Too Long", fi)
|
||
if current.lower() in _Keywords:
|
||
tokens.append(Keyword(current, fi))
|
||
elif current.lower() in _ID_Conversion.keys():
|
||
tokens.append(
|
||
Punctuation(_ID_Conversion[current.lower()], fi))
|
||
else:
|
||
tokens.append(Identifier(current, fi))
|
||
token_type = _InterTokenType.Generic
|
||
elif token_type is _InterTokenType.NumberLiteral:
|
||
if (
|
||
number_type in _Num_Continue and
|
||
char in _Num_Continue[number_type]
|
||
):
|
||
current += char
|
||
if (
|
||
number_type in _Num_Continue_Next and
|
||
char in _Num_Continue_Next[number_type]
|
||
):
|
||
number_type = _Num_Continue_Next[number_type][char]
|
||
else:
|
||
fi = FileInfo(
|
||
filename, current_line, current_col, len(current))
|
||
tokens.append(NumberLiteral(current, fi))
|
||
number_type = _NumberLiteralType.Number
|
||
token_type = _InterTokenType.Generic
|
||
elif token_type is _InterTokenType.Punctuation:
|
||
if char in _Punctuation_Any and current + char in _Punctuation:
|
||
current += char
|
||
else:
|
||
fi = FileInfo(
|
||
filename, current_line, current_col, len(current))
|
||
if current not in _Punctuation:
|
||
raise LexerError("Invalid Punctuation", fi)
|
||
if current in _Punctuation_Conversion.keys():
|
||
current = _Punctuation_Conversion[current]
|
||
tokens.append(Punctuation(current, fi))
|
||
token_type = _InterTokenType.Generic
|
||
|
||
if token_type is _InterTokenType.Generic:
|
||
current = char
|
||
current_line = line + 1
|
||
current_col = col + 1
|
||
if char == '#':
|
||
token_type = _InterTokenType.Comment
|
||
elif char in _ID_Start:
|
||
token_type = _InterTokenType.Word
|
||
elif (
|
||
char == '.' and
|
||
line_str[col+1] in _Num_Continue[_NumberLiteralType.Real]
|
||
):
|
||
token_type = _InterTokenType.NumberLiteral
|
||
if char in _Num_Start_Next[number_type]:
|
||
number_type = _Num_Start_Next[number_type][char]
|
||
elif char in _Num_Start:
|
||
token_type = _InterTokenType.NumberLiteral
|
||
if char in _Num_Start_Next[number_type]:
|
||
number_type = _Num_Start_Next[number_type][char]
|
||
elif char in _Punctuation_Any:
|
||
token_type = _InterTokenType.Punctuation
|
||
|
||
fi = FileInfo(filename, current_line, current_col, len(current))
|
||
if token_type in _NewLineTerminatedTokens:
|
||
if token_type is _InterTokenType.Word:
|
||
if len(current) > 31:
|
||
raise LexerError("Identifier Too Long", fi)
|
||
if current.lower() in _Keywords:
|
||
tokens.append(Keyword(current, fi))
|
||
elif current.lower() in _ID_Conversion.keys():
|
||
tokens.append(Punctuation(_ID_Conversion[current.lower()], fi))
|
||
else:
|
||
tokens.append(Identifier(current, fi))
|
||
elif token_type is _InterTokenType.NumberLiteral:
|
||
tokens.append(NumberLiteral(current, fi))
|
||
number_type = _NumberLiteralType.Number
|
||
elif token_type is _InterTokenType.Punctuation:
|
||
if current not in _Punctuation:
|
||
raise LexerError("Invalid Punctuation", fi)
|
||
if current in _Punctuation_Conversion.keys():
|
||
current = _Punctuation_Conversion[current]
|
||
tokens.append(Punctuation(current, fi))
|
||
token_type = _InterTokenType.Generic
|
||
|
||
return tokens
|
||
|
||
|
||
# # -- Syntax --
|
||
|
||
|
||
|
||
class SyntaxError(CompilerError):
|
||
|
||
_compiler_error_type = "Syntax"
|
||
|
||
|
||
class UnexpectedEndOfTokenStream(SyntaxError): pass
|
||
|
||
|
||
class _ExpectedTokenBase(SyntaxError):
|
||
|
||
_token_type = Token
|
||
|
||
def __init__(
|
||
self,
|
||
token: Token,
|
||
expected: str | None = None,
|
||
found: str | None = None,
|
||
):
|
||
if expected is None:
|
||
expected = self._token_type.__name__
|
||
found = found or type(token).__name__
|
||
else:
|
||
found = found or token.value
|
||
message = f"Expected '{expected}' but found '{found}'."
|
||
super().__init__(message, token.file_info)
|
||
|
||
|
||
class ExpectedIdentifier(_ExpectedTokenBase): _type_name = Identifier
|
||
class ExpectedKeyword(_ExpectedTokenBase): _type_name = Keyword
|
||
class ExpectedNumberLiteral(_ExpectedTokenBase): _type_name = NumberLiteral
|
||
class ExpectedPunctuation(_ExpectedTokenBase): _type_name = Punctuation
|
||
|
||
class ExpectedLiteral(_ExpectedTokenBase):
|
||
_type_name = (NumberLiteral, Punctuation)
|
||
|
||
|
||
class _UnexpectedTokenBase(_ExpectedTokenBase):
|
||
|
||
def __init__(
|
||
self,
|
||
token: Token,
|
||
expected: str | list[str] | None = None,
|
||
found: str | None = None,
|
||
):
|
||
if isinstance(expected, list):
|
||
if len(expected) > 1:
|
||
s = ""
|
||
for i in expected[:-1]:
|
||
s += i + "', '"
|
||
s = s[:-1] + "or '" + expected[-1]
|
||
expected = s
|
||
else:
|
||
expected = expected[0]
|
||
super().__init__(token, expected, found)
|
||
|
||
|
||
class UnexpectedToken(_UnexpectedTokenBase):
|
||
|
||
def __init__(
|
||
self,
|
||
token: Token,
|
||
expected: str | list[str],
|
||
found: str | None = None,
|
||
):
|
||
if isinstance(expected, list):
|
||
if len(expected) > 1:
|
||
s = ""
|
||
for i in expected[:-1]:
|
||
s += i + "', '"
|
||
s = s[:-1] + "or '" + expected[-1]
|
||
expected = s
|
||
found = found or type(token).__name__
|
||
super().__init__(token, expected, found)
|
||
|
||
|
||
class UnexpectedIdentifier(_UnexpectedTokenBase): _type_name = Identifier
|
||
class UnexpectedKeyword(_UnexpectedTokenBase): _type_name = Keyword
|
||
class UnexpectedNumberLiteral(_UnexpectedTokenBase): _type_name = NumberLiteral
|
||
class UnexpectedPunctuation(_UnexpectedTokenBase): _type_name = Punctuation
|
||
|
||
|
||
class ExpressionError(Exception): pass
|
||
|
||
class ExpectedExpression(SyntaxError):
|
||
|
||
def __init__(
|
||
self,
|
||
message: str,
|
||
token: Token,
|
||
):
|
||
super().__init__(message, token.file_info)
|
||
|
||
|
||
_Id_Punctuation = [
|
||
'∑',
|
||
'π',
|
||
'α',
|
||
'β',
|
||
'∞',
|
||
'∏',
|
||
'∫',
|
||
'θ',
|
||
]
|
||
|
||
|
||
class Expression:
|
||
|
||
_file_info: FileInfo
|
||
|
||
@property
|
||
def file_info(self) -> FileInfo: return self._file_info
|
||
|
||
def has_pi(self) -> bool: return False
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Expression\n"
|
||
return s
|
||
|
||
|
||
class LiteralExpression(Expression):
|
||
|
||
_file_info: FileInfo
|
||
_value: NumberLiteral | Punctuation
|
||
|
||
def __init__(
|
||
self,
|
||
file_info: FileInfo,
|
||
value: NumberLiteral | Punctuation,
|
||
):
|
||
self._file_info = file_info
|
||
self._value = value
|
||
|
||
def has_pi(self) -> bool: return self._value.value == 'π'
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Literal Expression ({self._value.value})\n"
|
||
return s
|
||
|
||
|
||
class EnclosedExpression(Expression):
|
||
|
||
_file_info: FileInfo
|
||
_expression: Expression
|
||
|
||
def __init__(
|
||
self,
|
||
file_info: FileInfo,
|
||
expression: Expression,
|
||
):
|
||
self._file_info = file_info
|
||
self._expression = expression
|
||
|
||
def has_pi(self) -> bool: return self._expression.has_pi()
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Enclosed Expression\n"
|
||
s += self._expression.tree_str(f"{pre_cont}└─", f"{pre_cont} ")
|
||
return s
|
||
|
||
|
||
class UnaryOperator(Enum):
|
||
Negate = "-"
|
||
Factorial = "!"
|
||
|
||
|
||
class UnaryExpression(Expression):
|
||
|
||
_file_info: FileInfo
|
||
_expression: Expression
|
||
_operator: UnaryOperator
|
||
|
||
def __init__(
|
||
self,
|
||
file_info: FileInfo,
|
||
expression: Expression,
|
||
operator: UnaryOperator,
|
||
):
|
||
self._file_info = file_info
|
||
self._expression = expression
|
||
self._operator = operator
|
||
|
||
def has_pi(self) -> bool: return self._expression.has_pi()
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Unary Expression ({self._operator})\n"
|
||
s += self._expression.tree_str(f"{pre_cont}└─", f"{pre_cont} ")
|
||
return s
|
||
|
||
|
||
class BinaryOperator(Enum):
|
||
Exponential = "^"
|
||
Subscript = "_"
|
||
Division = "/"
|
||
Modulus = "%"
|
||
Multiplication = "*"
|
||
Subtraction = "-"
|
||
Addition = "+"
|
||
|
||
|
||
class BinaryExpression(Expression):
|
||
|
||
_file_info: FileInfo
|
||
_expression1: Expression
|
||
_expression2: Expression
|
||
_operator: BinaryOperator
|
||
|
||
def __init__(
|
||
self,
|
||
file_info: FileInfo,
|
||
expression1: Expression,
|
||
expression2: Expression,
|
||
operator: BinaryOperator,
|
||
):
|
||
self._file_info = file_info
|
||
self._expression1 = expression1
|
||
self._expression2 = expression2
|
||
self._operator = operator
|
||
|
||
def has_pi(self) -> bool:
|
||
return self._expression1.has_pi() or self._expression2.has_pi()
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Binary Expression ({self._operator})\n"
|
||
s += self._expression1.tree_str(f"{pre_cont}├─", f"{pre_cont}│ ")
|
||
s += self._expression2.tree_str(f"{pre_cont}└─", f"{pre_cont} ")
|
||
return s
|
||
|
||
|
||
_Operator_Precedence: tuple[
|
||
UnaryOperator |
|
||
BinaryOperator,
|
||
...
|
||
] = (
|
||
UnaryOperator.Negate,
|
||
UnaryOperator.Factorial,
|
||
BinaryOperator.Exponential,
|
||
BinaryOperator.Subscript,
|
||
BinaryOperator.Division,
|
||
BinaryOperator.Modulus,
|
||
BinaryOperator.Multiplication,
|
||
BinaryOperator.Subtraction,
|
||
BinaryOperator.Addition,
|
||
)
|
||
|
||
|
||
class FunctionCall(Expression):
|
||
|
||
_file_info: FileInfo
|
||
_identifier: Identifier
|
||
_arguments: list[Expression]
|
||
|
||
def __init__(
|
||
self,
|
||
file_info: FileInfo,
|
||
identifier: Identifier,
|
||
arguments: list[Expression],
|
||
):
|
||
self._file_info = file_info
|
||
self._identifier = identifier
|
||
self._arguments = arguments
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Function Call ({self._identifier.value})\n"
|
||
for arg in self._arguments[:-1]:
|
||
s += arg.tree_str(f"{pre_cont}├─", f"{pre_cont}│ ")
|
||
s += self._arguments[-1].tree_str(f"{pre_cont}└─", f"{pre_cont} ")
|
||
return s
|
||
|
||
|
||
class Constant:
|
||
|
||
_file_info: FileInfo
|
||
_identifier: Identifier
|
||
_expression: Expression
|
||
|
||
def __init__(
|
||
self,
|
||
file_info: FileInfo,
|
||
identifier: Identifier,
|
||
expression: Expression,
|
||
):
|
||
self._file_info = file_info
|
||
self._identifier = identifier
|
||
self._expression = expression
|
||
|
||
@property
|
||
def file_info(self) -> FileInfo: return self._file_info
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Constant ({self._identifier.value})\n"
|
||
s += self._expression.tree_str(f"{pre_cont}└─", f"{pre_cont} ")
|
||
return s
|
||
|
||
@staticmethod
|
||
def _sa(tokens: list[Token], first_token: Token) -> "Constant":
|
||
constant_tokens, last_token = _get_to_symbol(tokens, ';')
|
||
name, _ = _get_to_symbol(constant_tokens, '=')
|
||
if len(name) > 1:
|
||
raise UnexpectedToken(name[1], '=')
|
||
_assert_token(ExpectedIdentifier, name[0])
|
||
identifier: Identifier = name[0] # type: ignore
|
||
fi = first_token.file_info + last_token.file_info
|
||
try: return Constant(
|
||
fi, identifier, _expression_sa(constant_tokens))
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
|
||
|
||
class AnimationDirection(Enum):
|
||
Increase = "increase"
|
||
Decrease = "decrease"
|
||
Bounce = "bounce"
|
||
|
||
|
||
class InlineAnimation:
|
||
|
||
_file_info: FileInfo
|
||
_range_start: Expression
|
||
_range_start_inclusive: bool
|
||
_range_end: Expression
|
||
_range_end_inclusive: bool
|
||
_step: Expression
|
||
_direction: AnimationDirection
|
||
|
||
def __init__(
|
||
self,
|
||
file_info: FileInfo,
|
||
range_start: Expression,
|
||
range_start_inclusive: bool,
|
||
range_end: Expression,
|
||
range_end_inclusive: bool,
|
||
step: Expression,
|
||
direction: AnimationDirection,
|
||
):
|
||
self._file_info = file_info
|
||
self._range_start = range_start
|
||
self._range_start_inclusive = range_start_inclusive
|
||
self._range_end = range_end
|
||
self._range_end_inclusive = range_end_inclusive
|
||
self._step = step
|
||
self._direction = direction
|
||
|
||
@property
|
||
def file_info(self) -> FileInfo: return self._file_info
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Inline Animation\n"
|
||
s += f"{pre_cont}├─ Range Start \
|
||
({'≤' if self._range_start_inclusive else '<'})\n"
|
||
s += self._range_start.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
s += f"{pre_cont}├─ Range End \
|
||
({'≤' if self._range_end_inclusive else '<'})\n"
|
||
s += self._range_end.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
s += f"{pre_cont}├─ Step\n"
|
||
s += self._step.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
s += f"{pre_cont}└─ Direction: {self._direction}\n"
|
||
return s
|
||
|
||
@staticmethod
|
||
def _sa(tokens: list[Token], token: Token) -> "InlineAnimation":
|
||
_, anim_tokens, last_token = _get_nested_group(tokens, ('{','}'))
|
||
fi = token.file_info + last_token.file_info
|
||
return InlineAnimation(fi, *_animation_sa(anim_tokens, last_token))
|
||
|
||
|
||
class Animation:
|
||
|
||
_file_info: FileInfo
|
||
_identifier: Identifier
|
||
_range_start: Expression
|
||
_range_start_inclusive: bool
|
||
_range_end: Expression
|
||
_range_end_inclusive: bool
|
||
_step: Expression
|
||
_direction: AnimationDirection
|
||
|
||
def __init__(
|
||
self,
|
||
file_info: FileInfo,
|
||
identifier: Identifier,
|
||
range_start: Expression,
|
||
range_start_inclusive: bool,
|
||
range_end: Expression,
|
||
range_end_inclusive: bool,
|
||
step: Expression,
|
||
direction: AnimationDirection,
|
||
):
|
||
self._file_info = file_info
|
||
self._identifier = identifier
|
||
self._range_start = range_start
|
||
self._range_start_inclusive = range_start_inclusive
|
||
self._range_end = range_end
|
||
self._range_end_inclusive = range_end_inclusive
|
||
self._step = step
|
||
self._direction = direction
|
||
|
||
@property
|
||
def file_info(self) -> FileInfo: return self._file_info
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Animation ({self._identifier.value})\n"
|
||
s += f"{pre_cont}├─ Range Start \
|
||
({'≤' if self._range_start_inclusive else '<'})\n"
|
||
s += self._range_start.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
s += f"{pre_cont}├─ Range End \
|
||
({'≤' if self._range_end_inclusive else '<'})\n"
|
||
s += self._range_end.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
s += f"{pre_cont}├─ Step\n"
|
||
s += self._step.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
s += f"{pre_cont}└─ Direction: {self._direction}\n"
|
||
return s
|
||
|
||
@staticmethod
|
||
def _sa(tokens: list[Token], token: Token) -> "Animation":
|
||
_assert_token(ExpectedIdentifier, tokens[0])
|
||
identifier: Identifier = tokens.pop(0) # type: ignore
|
||
_, anim_tokens, last_token = _get_nested_group(tokens, ('{','}'))
|
||
fi = token.file_info + last_token.file_info
|
||
return Animation(
|
||
fi, identifier, *_animation_sa(anim_tokens, last_token))
|
||
|
||
|
||
class Graph:
|
||
|
||
_parameter_conversions: ClassVar[dict] = {
|
||
("x",): "x",
|
||
("y",): "y",
|
||
("t",): "t",
|
||
("r",): "r",
|
||
("θ",): "theta",
|
||
("c","_","a",): "color_alpha",
|
||
("c","_","w",): "color_grey",
|
||
("c","_","r",): "color_red",
|
||
("c","_","g",): "color_green",
|
||
("c","_","b",): "color_blue",
|
||
("c","_","h",): "color_hue",
|
||
("c","_","s",): "color_saturation",
|
||
("c","_","l",): "color_luminosity",
|
||
}
|
||
|
||
_file_info: FileInfo
|
||
_x: None | Expression | InlineAnimation
|
||
_y: None | Expression | InlineAnimation
|
||
_t: None | InlineAnimation
|
||
_r: None | Expression
|
||
_theta: None | InlineAnimation
|
||
_color_alpha: None | Expression
|
||
_color_grey: None | Expression
|
||
_color_red: None | Expression
|
||
_color_green: None | Expression
|
||
_color_blue: None | Expression
|
||
_color_hue: None | Expression
|
||
_color_saturation: None | Expression
|
||
_color_luminosity: None | Expression
|
||
|
||
def __init__(
|
||
self,
|
||
file_info: FileInfo,
|
||
x: None | Expression | InlineAnimation = None,
|
||
y: None | Expression | InlineAnimation = None,
|
||
t: None | InlineAnimation = None,
|
||
r: None | Expression = None,
|
||
theta: None | InlineAnimation = None,
|
||
color_alpha: None | Expression = None,
|
||
color_grey: None | Expression = None,
|
||
color_red: None | Expression = None,
|
||
color_green: None | Expression = None,
|
||
color_blue: None | Expression = None,
|
||
color_hue: None | Expression = None,
|
||
color_saturation: None | Expression = None,
|
||
color_luminosity: None | Expression = None,
|
||
):
|
||
self._file_info = file_info
|
||
self._x = x
|
||
self._y = y
|
||
self._t = t
|
||
self._r = r
|
||
self._theta = theta
|
||
self._color_alpha = color_alpha
|
||
self._color_grey = color_grey
|
||
self._color_red = color_red
|
||
self._color_green = color_green
|
||
self._color_blue = color_blue
|
||
self._color_hue = color_hue
|
||
self._color_saturation = color_saturation
|
||
self._color_luminosity = color_luminosity
|
||
|
||
@property
|
||
def file_info(self) -> FileInfo: return self._file_info
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Graph\n"
|
||
if self._x is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._y is not None or
|
||
self._t is not None or
|
||
self._r is not None or
|
||
self._theta is not None or
|
||
self._color_alpha is not None or
|
||
self._color_grey is not None or
|
||
self._color_red is not None or
|
||
self._color_green is not None or
|
||
self._color_blue is not None or
|
||
self._color_hue is not None or
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ X\n'
|
||
s += self._x.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ X\n'
|
||
s += self._x.tree_str(f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._y is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._t is not None or
|
||
self._r is not None or
|
||
self._theta is not None or
|
||
self._color_alpha is not None or
|
||
self._color_grey is not None or
|
||
self._color_red is not None or
|
||
self._color_green is not None or
|
||
self._color_blue is not None or
|
||
self._color_hue is not None or
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ Y\n'
|
||
s += self._y.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ Y\n'
|
||
s += self._y.tree_str(f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._t is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._r is not None or
|
||
self._theta is not None or
|
||
self._color_alpha is not None or
|
||
self._color_grey is not None or
|
||
self._color_red is not None or
|
||
self._color_green is not None or
|
||
self._color_blue is not None or
|
||
self._color_hue is not None or
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ T\n'
|
||
s += self._t.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ T\n'
|
||
s += self._t.tree_str(f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._r is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._theta is not None or
|
||
self._color_alpha is not None or
|
||
self._color_grey is not None or
|
||
self._color_red is not None or
|
||
self._color_green is not None or
|
||
self._color_blue is not None or
|
||
self._color_hue is not None or
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ R\n'
|
||
s += self._r.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ R\n'
|
||
s += self._r.tree_str(f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._theta is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._color_alpha is not None or
|
||
self._color_grey is not None or
|
||
self._color_red is not None or
|
||
self._color_green is not None or
|
||
self._color_blue is not None or
|
||
self._color_hue is not None or
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ θ\n'
|
||
s += self._theta.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ θ\n'
|
||
s += self._theta.tree_str(f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._color_alpha is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._color_grey is not None or
|
||
self._color_red is not None or
|
||
self._color_green is not None or
|
||
self._color_blue is not None or
|
||
self._color_hue is not None or
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ C_a\n'
|
||
s += self._color_alpha.tree_str(
|
||
f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ C_a\n'
|
||
s += self._color_alpha.tree_str(
|
||
f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._color_grey is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._color_red is not None or
|
||
self._color_green is not None or
|
||
self._color_blue is not None or
|
||
self._color_hue is not None or
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ C_w\n'
|
||
s += self._color_grey.tree_str(
|
||
f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ C_w\n'
|
||
s += self._color_grey.tree_str(
|
||
f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._color_red is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._color_green is not None or
|
||
self._color_blue is not None or
|
||
self._color_hue is not None or
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ C_r\n'
|
||
s += self._color_red.tree_str(
|
||
f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ C_r\n'
|
||
s += self._color_red.tree_str(
|
||
f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._color_green is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._color_blue is not None or
|
||
self._color_hue is not None or
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ C_g\n'
|
||
s += self._color_green.tree_str(
|
||
f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ C_g\n'
|
||
s += self._color_green.tree_str(
|
||
f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._color_blue is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._color_hue is not None or
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ C_b\n'
|
||
s += self._color_blue.tree_str(
|
||
f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ C_b\n'
|
||
s += self._color_blue.tree_str(
|
||
f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._color_hue is not None:
|
||
s += pre_cont
|
||
if (
|
||
self._color_saturation is not None or
|
||
self._color_luminosity is not None
|
||
):
|
||
s += '├─ C_h\n'
|
||
s += self._color_hue.tree_str(
|
||
f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ C_h\n'
|
||
s += self._color_hue.tree_str(
|
||
f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._color_saturation is not None:
|
||
s += pre_cont
|
||
if self._color_luminosity is not None:
|
||
s += '├─ C_s\n'
|
||
s += self._color_saturation.tree_str(
|
||
f"{pre_cont}│ └─", f"{pre_cont}│ ")
|
||
else:
|
||
s+= '└─ C_s\n'
|
||
s += self._color_saturation.tree_str(
|
||
f"{pre_cont} └─", f"{pre_cont} ")
|
||
if self._color_luminosity is not None:
|
||
s+= f'{pre_cont}└─ C_l\n'
|
||
s += self._color_luminosity.tree_str(
|
||
f"{pre_cont} └─", f"{pre_cont} ")
|
||
return s
|
||
|
||
@staticmethod
|
||
def _sa(tokens: list[Token], first_token: Token) -> "Graph":
|
||
values: dict = {}
|
||
_, anim_tokens, last_token = _get_nested_group(tokens, ('{','}'))
|
||
while anim_tokens:
|
||
name, _ = _get_to_symbol(anim_tokens, ':')
|
||
key = tuple(i.value.lower() for i in name)
|
||
if key not in Graph._parameter_conversions:
|
||
fi = name[0].file_info
|
||
name = ''.join(i.value for i in name)
|
||
fi._length = len(name)
|
||
token = Identifier(name, fi)
|
||
raise UnexpectedIdentifier(token, [
|
||
"x",
|
||
"y",
|
||
"t",
|
||
"r",
|
||
"θ",
|
||
"C_a",
|
||
"C_w",
|
||
"C_r",
|
||
"C_g",
|
||
"C_b",
|
||
"C_h",
|
||
"C_s",
|
||
"C_l",
|
||
])
|
||
try: value, _ = _get_to_symbol(anim_tokens, ',', '}')
|
||
except UnexpectedEndOfTokenStream:
|
||
value = anim_tokens[:]
|
||
del anim_tokens[:]
|
||
values[key] = value
|
||
args: dict = {}
|
||
if ('x',) in values:
|
||
if isinstance(values[('x',)][0], Keyword):
|
||
if values[('x',)][0].value.lower() != 'anim':
|
||
raise ExpectedKeyword(values[('x',)][0], 'anim')
|
||
args['x'] = InlineAnimation._sa(
|
||
values[('x',)][1:], values[('x',)][0])
|
||
else:
|
||
try: args['x'] = _expression_sa(values[('x',)])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if ('y',) in values:
|
||
if isinstance(values[('y',)][0], Keyword):
|
||
if values[('y',)][0].value.lower() != 'anim':
|
||
raise ExpectedKeyword(values[('y',)][0], 'anim')
|
||
args['y'] = InlineAnimation._sa(
|
||
values[('y',)][1:], values[('y',)][0])
|
||
else:
|
||
try: args['y'] = _expression_sa(values[('y',)])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if ('t',) in values:
|
||
if values[('t',)][0].value.lower() != 'anim':
|
||
raise ExpectedKeyword(values[('t',)][0], 'anim')
|
||
args['t'] = InlineAnimation._sa(
|
||
values[('t',)][1:], values[('t',)][0])
|
||
if ('r',) in values:
|
||
try: args['r'] = _expression_sa(values[('r',)])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if ('θ',) in values:
|
||
if values[('θ',)][0].value.lower() != 'anim':
|
||
raise ExpectedKeyword(values[('θ',)][0], 'anim')
|
||
args['theta'] = InlineAnimation._sa(
|
||
values[('θ',)][1:], values[('θ',)][0])
|
||
if ('c','_','a') in values:
|
||
try: args['color_alpha'] = _expression_sa(values[('c','_','a')])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if ('c','_','w') in values:
|
||
try: args['color_grey'] = _expression_sa(values[('c','_','w')])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if ('c','_','r') in values:
|
||
try: args['color_red'] = _expression_sa(values[('c','_','r')])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if ('c','_','g') in values:
|
||
try: args['color_green'] = _expression_sa(values[('c','_','g')])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if ('c','_','b') in values:
|
||
try: args['color_blue'] = _expression_sa(values[('c','_','b')])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if ('c','_','h') in values:
|
||
try: args['color_hue'] = _expression_sa(values[('c','_','h')])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if ('c','_','s') in values:
|
||
args['color_saturation'] = _expression_sa(values[('c','_','s')])
|
||
if ('c','_','l') in values:
|
||
try: args['color_luminosity'] = _expression_sa(values[('c','_','l')])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
fi = first_token.file_info + last_token.file_info
|
||
return Graph(fi, **args)
|
||
|
||
|
||
class Screen:
|
||
|
||
_parameter_conversions: ClassVar[dict] = {
|
||
("top",): "top",
|
||
("bottom",): "bottom",
|
||
("right",): "right",
|
||
("left",): "left",
|
||
("width",): "width",
|
||
("height",): "height",
|
||
("width","scale",): "width_scale",
|
||
("height","scale",): "height_scale",
|
||
("fps",): "fps",
|
||
}
|
||
|
||
_file_info: FileInfo
|
||
_top: None | int
|
||
_bottom: None | int
|
||
_right: None | int
|
||
_left: None | int
|
||
_width: None | int
|
||
_height: None | int
|
||
_width_scale: float
|
||
_height_scale: float
|
||
_fps: int
|
||
|
||
def __init__(
|
||
self,
|
||
file_info: FileInfo,
|
||
top: None | int = None,
|
||
bottom: None | int = None,
|
||
right: None | int = None,
|
||
left: None | int = None,
|
||
width: None | int = None,
|
||
height: None | int = None,
|
||
width_scale: float = 20,
|
||
height_scale: float = 20,
|
||
fps: int = 30,
|
||
):
|
||
self._file_info = file_info
|
||
self._top = top
|
||
self._bottom = bottom
|
||
self._right = right
|
||
self._left = left
|
||
self._width = width
|
||
self._height = height
|
||
self._width_scale = width_scale
|
||
self._height_scale = height_scale
|
||
self._fps = fps
|
||
|
||
@property
|
||
def file_info(self) -> FileInfo: return self._file_info
|
||
|
||
def tree_str(self, pre: str = "", pre_cont: str = "") -> str:
|
||
s: str = f"{pre} Screen\n"
|
||
if self._top is not None:
|
||
s += pre_cont
|
||
s += f'├─ Top: {self._top}\n' if (
|
||
self._bottom is not None and
|
||
self._right is not None and
|
||
self._left is not None and
|
||
self._width is not None and
|
||
self._height is not None and
|
||
self._width_scale != 20 and
|
||
self._height_scale != 20 and
|
||
self._fps != 30
|
||
) else f'└─ Top: {self._top}\n'
|
||
if self._bottom is not None:
|
||
s += pre_cont
|
||
s += f'├─ Bottom: {self._bottom}\n' if (
|
||
self._right is not None and
|
||
self._left is not None and
|
||
self._width is not None and
|
||
self._height is not None and
|
||
self._width_scale != 20 and
|
||
self._height_scale != 20 and
|
||
self._fps != 30
|
||
) else f'└─ Bottom: {self._bottom}\n'
|
||
if self._right is not None:
|
||
s += pre_cont
|
||
s += f'├─ Right: {self._right}\n' if (
|
||
self._left is not None and
|
||
self._width is not None and
|
||
self._height is not None and
|
||
self._width_scale != 20 and
|
||
self._height_scale != 20 and
|
||
self._fps != 30
|
||
) else f'└─ Right: {self._right}\n'
|
||
if self._left is not None:
|
||
s += pre_cont
|
||
s += f'├─ Left: {self._left}\n' if (
|
||
self._width is not None and
|
||
self._height is not None and
|
||
self._width_scale != 20 and
|
||
self._height_scale != 20 and
|
||
self._fps != 30
|
||
) else f'└─ Left: {self._left}\n'
|
||
if self._width is not None:
|
||
s += pre_cont
|
||
s += f'├─ Width: {self._width}\n' if (
|
||
self._height is not None and
|
||
self._width_scale != 20 and
|
||
self._height_scale != 20 and
|
||
self._fps != 30
|
||
) else f'└─ Width: {self._width}\n'
|
||
if self._height is not None:
|
||
s += pre_cont
|
||
s += f'├─ Height: {self._height}\n' if (
|
||
self._width_scale != 20 and
|
||
self._height_scale != 20 and
|
||
self._fps != 30
|
||
) else f'└─ Height: {self._height}\n'
|
||
if self._width_scale != 20:
|
||
s += pre_cont
|
||
s += f'├─ Width Scale: {self._width_scale}\n' if (
|
||
self._height_scale != 20 and
|
||
self._fps != 30
|
||
) else f'└─ Width Scale: {self._width_scale}\n'
|
||
if self._height_scale != 20:
|
||
s += pre_cont
|
||
s += f'├─ Height Scale: {self._height_scale}\n' if (
|
||
self._fps != 30
|
||
) else f'└─ Height Scale: {self._height_scale}\n'
|
||
if self._fps != 30:
|
||
s += pre_cont
|
||
s += f'└─ FPS: {self._fps}\n'
|
||
return s
|
||
|
||
@staticmethod
|
||
def _sa(tokens: list[Token], first_token: Token) -> "Screen":
|
||
values: dict = {}
|
||
_, screen_tokens, last_token = _get_nested_group(tokens, ('{','}'))
|
||
while screen_tokens:
|
||
name, _ = _get_to_symbol(screen_tokens, ':')
|
||
try: value, _ = _get_to_symbol(screen_tokens, ',')
|
||
except UnexpectedEndOfTokenStream:
|
||
value = screen_tokens[:]
|
||
del screen_tokens[:]
|
||
key = Screen._parameter_conversions[
|
||
tuple(i.value.lower() for i in name)]
|
||
if len(value) > 1:
|
||
raise UnexpectedToken(value[1], [",","}"])
|
||
values[key] = value[0].value
|
||
fi = first_token.file_info + last_token.file_info
|
||
return Screen(fi, **values)
|
||
|
||
|
||
class File:
|
||
|
||
_children: list[Screen | Graph | Animation | Constant]
|
||
_file_info: FileInfo
|
||
|
||
def __init__(
|
||
self,
|
||
children: list[Screen | Graph | Animation | Constant],
|
||
file_info: FileInfo,
|
||
):
|
||
self._children = children[:]
|
||
self._file_info = file_info
|
||
|
||
@property
|
||
def children(self) -> list[
|
||
Screen |
|
||
Graph |
|
||
Animation |
|
||
Constant
|
||
]:
|
||
return self._children[:]
|
||
|
||
@property
|
||
def file_info(self) -> FileInfo: return self._file_info
|
||
|
||
def tree_str(self) -> str:
|
||
s: str = "File\n"
|
||
if self._children:
|
||
for child in self._children[:-1]:
|
||
s += child.tree_str("├─", "│ ")
|
||
s += self._children[-1].tree_str("└─", " ")
|
||
return s
|
||
|
||
@staticmethod
|
||
def _sa(tokens: list[Token]) -> "File":
|
||
children: list[Screen | Graph | Animation | Constant] = []
|
||
file_fi: FileInfo = tokens[0].file_info + tokens[-1].file_info
|
||
|
||
while tokens:
|
||
token = tokens.pop(0)
|
||
if isinstance(token, Keyword):
|
||
match token.value.lower():
|
||
case 'screen':
|
||
children.append(Screen._sa(tokens, token))
|
||
case 'graph':
|
||
children.append(Graph._sa(tokens, token))
|
||
case 'anim':
|
||
children.append(Animation._sa(tokens, token))
|
||
case 'const':
|
||
children.append(Constant._sa(tokens, token))
|
||
case _:
|
||
raise ExpectedKeyword(
|
||
token,
|
||
"screen', 'graph', 'anim', or 'const",
|
||
token.value.lower(),
|
||
)
|
||
else:
|
||
raise UnexpectedToken(token, "keyword")
|
||
|
||
return File(children, file_fi)
|
||
|
||
|
||
def _assert_token(
|
||
exception: type[_ExpectedTokenBase],
|
||
token: Token,
|
||
value: str | None = None,
|
||
token_type: type[Token] | None = None,
|
||
):
|
||
if not isinstance(token, token_type or exception._token_type):
|
||
raise exception(token)
|
||
if value is not None and token.value != value:
|
||
raise exception(token, value)
|
||
|
||
def _get_nested_group(
|
||
tokens: list[Token],
|
||
encloses: tuple[str, str] = ('(',')'),
|
||
) -> tuple[Token, list[Token], Token]:
|
||
first_token = tokens.pop(0)
|
||
_assert_token(ExpectedPunctuation, first_token, encloses[0])
|
||
nested = 1
|
||
expr_len = -1
|
||
for i in range(len(tokens)):
|
||
if tokens[i].value == encloses[0]: nested += 1
|
||
elif tokens[i].value == encloses[1]: nested -= 1
|
||
if nested == 0:
|
||
expr_len = i
|
||
break
|
||
else:
|
||
raise UnexpectedEndOfTokenStream(
|
||
f"Expected '{encloses[1]}' but found '{tokens[-1].value}'.",
|
||
tokens[-1].file_info,
|
||
)
|
||
expr_tokens = tokens[:expr_len]
|
||
last_token = tokens[expr_len]
|
||
del tokens[:expr_len+1]
|
||
return first_token, expr_tokens, last_token
|
||
|
||
def _get_to_symbol(
|
||
tokens: list[Token],
|
||
symbols: str | Sequence[str] = ';',
|
||
end: None | str = None
|
||
) -> tuple[list[Token], Token]:
|
||
expr_len = -1
|
||
if end:
|
||
start = _Punctuation_Enclosing[end]
|
||
nested = 0
|
||
for i in range(len(tokens)):
|
||
if tokens[i].value == start: nested += 1
|
||
elif tokens[i].value == end:
|
||
if nested == 0 and end in symbols:
|
||
expr_len = i
|
||
break
|
||
elif nested == 0:
|
||
raise UnexpectedPunctuation(
|
||
tokens[i],
|
||
f"{start}' before '{end}",
|
||
tokens[i].value,
|
||
)
|
||
nested -= 1
|
||
elif nested == 0 and tokens[i].value in symbols:
|
||
expr_len = i
|
||
break
|
||
else:
|
||
raise UnexpectedEndOfTokenStream(
|
||
"Unexpected End of Token Stream.", tokens[-1].file_info)
|
||
else:
|
||
for i in range(len(tokens)):
|
||
if tokens[i].value in symbols:
|
||
expr_len = i
|
||
break
|
||
else:
|
||
raise UnexpectedEndOfTokenStream(
|
||
"Unexpected End of Token Stream.", tokens[-1].file_info)
|
||
expr_tokens = tokens[:expr_len]
|
||
last_token = tokens[expr_len]
|
||
del tokens[:expr_len+1]
|
||
return expr_tokens, last_token
|
||
|
||
def _animation_sa(tokens: list[Token], last_token: Token) -> tuple[
|
||
Expression,
|
||
bool,
|
||
Expression,
|
||
bool,
|
||
Expression,
|
||
AnimationDirection,
|
||
]:
|
||
_assert_token(ExpectedIdentifier, tokens[0], 'R')
|
||
_assert_token(ExpectedPunctuation, tokens[1], ':')
|
||
del tokens[:2]
|
||
range_tokens, comparison = _get_to_symbol(tokens, ('<','≤'))
|
||
try: range_start = _expression_sa(range_tokens)
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
range_start_inclusive = comparison.value == '≤'
|
||
value, comparison = _get_to_symbol(tokens, ('<','≤'))
|
||
if len(value) != 1:
|
||
raise ExpectedPunctuation(value[1], "<' or '≤")
|
||
_assert_token(ExpectedIdentifier, value[0], 'x')
|
||
try: range_tokens, _ = _get_to_symbol(tokens, ',')
|
||
except UnexpectedEndOfTokenStream:
|
||
range_tokens = tokens[:]
|
||
del tokens[:]
|
||
try: range_end = _expression_sa(range_tokens)
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
range_end_inclusive = comparison.value == '≤'
|
||
|
||
has_pi = range_start.has_pi() or range_end.has_pi()
|
||
|
||
if tokens:
|
||
_assert_token(ExpectedIdentifier, tokens[0], 'S')
|
||
_assert_token(ExpectedPunctuation, tokens[1], ':')
|
||
del tokens[:2]
|
||
try: step_tokens, _ = _get_to_symbol(tokens, ',')
|
||
except UnexpectedEndOfTokenStream:
|
||
step_tokens = tokens[:]
|
||
del tokens[:]
|
||
try: step = _expression_sa(step_tokens)
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if tokens:
|
||
_assert_token(ExpectedIdentifier, tokens[0], 'D')
|
||
_assert_token(ExpectedPunctuation, tokens[1], ':')
|
||
del tokens[:2]
|
||
token = tokens.pop(0)
|
||
_assert_token(ExpectedIdentifier, token)
|
||
if token.value.lower() in ["increase","decrease","bounce"]:
|
||
direction = AnimationDirection(token.value.lower())
|
||
else:
|
||
raise ExpectedIdentifier(
|
||
token,
|
||
"increase', 'decrease', or 'bounce",
|
||
token.value.lower(),
|
||
)
|
||
else:
|
||
direction = AnimationDirection.Increase
|
||
else:
|
||
if has_pi:
|
||
step = BinaryExpression(
|
||
last_token.file_info,
|
||
LiteralExpression(
|
||
last_token.file_info,
|
||
Punctuation("π", last_token.file_info)
|
||
),
|
||
LiteralExpression(
|
||
last_token.file_info,
|
||
NumberLiteral("32", last_token.file_info)
|
||
),
|
||
BinaryOperator.Division,
|
||
)
|
||
else:
|
||
step = BinaryExpression(
|
||
last_token.file_info,
|
||
LiteralExpression(
|
||
last_token.file_info,
|
||
NumberLiteral("1", last_token.file_info)
|
||
),
|
||
LiteralExpression(
|
||
last_token.file_info,
|
||
NumberLiteral("10", last_token.file_info)
|
||
),
|
||
BinaryOperator.Division,
|
||
)
|
||
direction = AnimationDirection.Increase
|
||
|
||
return (
|
||
range_start,
|
||
range_start_inclusive,
|
||
range_end,
|
||
range_end_inclusive,
|
||
step,
|
||
direction,
|
||
)
|
||
|
||
def _expression_sa(tokens: list[Token]) -> Expression:
|
||
if not tokens:
|
||
raise ExpressionError("Expected Expression.")
|
||
elif len(tokens) == 1:
|
||
token = tokens.pop(0)
|
||
_assert_token(ExpectedLiteral,token)
|
||
if isinstance(token, Punctuation):
|
||
if token.value not in ['π']:
|
||
raise ExpectedPunctuation(
|
||
token, "', '".join(_Id_Punctuation))
|
||
return LiteralExpression(token.file_info, token) # type: ignore
|
||
|
||
max_operator: int = -1
|
||
max_operator_precedence: int = -1
|
||
nested = 0
|
||
one_enclosed = True
|
||
for i, token in enumerate(tokens):
|
||
if token.value == '(': nested += 1
|
||
elif token.value == ')':
|
||
if nested == 0:
|
||
raise UnexpectedPunctuation(token, "(' before ')", token.value)
|
||
nested -= 1
|
||
elif nested == 0 and isinstance(token, Punctuation):
|
||
one_enclosed = False
|
||
for j, operator in reversed(list(enumerate(_Operator_Precedence))):
|
||
if j <= max_operator_precedence:
|
||
break
|
||
elif operator.value == token.value:
|
||
max_operator = i
|
||
max_operator_precedence = j
|
||
break
|
||
elif nested == 0:
|
||
one_enclosed = False
|
||
|
||
if one_enclosed and tokens[0].value == '(' and tokens[-1].value == ')':
|
||
if not tokens[1:-1]:
|
||
fi = tokens[0].file_info + tokens[-1].file_info
|
||
raise UnexpectedEndOfTokenStream(
|
||
"Expected expression between '(' and ')'.", fi)
|
||
token = tokens.pop(0)
|
||
last_token = tokens.pop(-1)
|
||
fi = token.file_info + last_token.file_info
|
||
try: return EnclosedExpression(fi,_expression_sa(tokens))
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
|
||
if max_operator == -1:
|
||
function_identifier = tokens.pop(0)
|
||
_assert_token(ExpectedIdentifier, function_identifier)
|
||
token = tokens.pop(0)
|
||
_assert_token(ExpectedPunctuation, token, '(')
|
||
function_args: list[Expression] = []
|
||
while tokens:
|
||
arg_tokens, last_token = _get_to_symbol(tokens, (',', ')'), ')')
|
||
if arg_tokens:
|
||
if len(arg_tokens) > 1 and arg_tokens[1].value == '=':
|
||
_assert_token(ExpectedIdentifier, arg_tokens[0])
|
||
arg_identifier = Identifier(
|
||
arg_tokens[0].value,
|
||
arg_tokens[0].file_info,
|
||
)
|
||
del arg_tokens[:2]
|
||
else:
|
||
arg_identifier = None
|
||
if not arg_tokens:
|
||
fi = last_token.file_info
|
||
raise UnexpectedEndOfTokenStream("Expected Expression.", fi)
|
||
try: expression = _expression_sa(arg_tokens)
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),last_token)
|
||
if arg_identifier is not None:
|
||
fi = arg_identifier.file_info + expression.file_info
|
||
else:
|
||
fi = expression.file_info
|
||
function_args.append(expression)
|
||
fi = function_identifier.file_info + last_token.file_info
|
||
return FunctionCall(
|
||
fi,
|
||
Identifier(
|
||
function_identifier.value,
|
||
function_identifier.file_info,
|
||
),
|
||
function_args,
|
||
)
|
||
|
||
if (
|
||
tokens[max_operator].value in UnaryOperator and
|
||
max_operator == 0
|
||
):
|
||
operator = UnaryOperator(tokens[max_operator].value)
|
||
if not tokens[max_operator + 1:]:
|
||
fi = tokens[max_operator].file_info
|
||
raise UnexpectedEndOfTokenStream(
|
||
f"Expected expression after '{tokens[max_operator].value}'.",
|
||
fi,
|
||
)
|
||
try: expression = _expression_sa(tokens[max_operator + 1:])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),tokens[max_operator])
|
||
fi = tokens[max_operator].file_info + expression.file_info
|
||
return UnaryExpression(fi, expression, operator)
|
||
elif tokens[max_operator].value in BinaryOperator:
|
||
operator = BinaryOperator(tokens[max_operator].value)
|
||
if not tokens[:max_operator]:
|
||
fi = tokens[max_operator].file_info
|
||
raise UnexpectedEndOfTokenStream(
|
||
f"Expected expression before '{tokens[max_operator].value}'.",
|
||
fi,
|
||
)
|
||
try: expression1 = _expression_sa(tokens[:max_operator])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),tokens[max_operator])
|
||
if not tokens[max_operator + 1:]:
|
||
fi = tokens[max_operator].file_info
|
||
raise UnexpectedEndOfTokenStream(
|
||
f"Expected expression after '{tokens[max_operator].value}'.",
|
||
fi,
|
||
)
|
||
try: expression2 = _expression_sa(tokens[max_operator + 1:])
|
||
except ExpressionError as err:
|
||
raise ExpectedExpression(str(err),tokens[max_operator])
|
||
fi = expression1.file_info + expression2.file_info
|
||
return BinaryExpression(fi, expression1, expression2, operator)
|
||
else: raise SyntaxError("Expression Error", tokens[max_operator].file_info)
|
||
|
||
def syntactical_analyzer(tokens: Sequence[Token]) -> File:
|
||
return File._sa(list(tokens))
|
||
|
||
if __name__ == '__main__':
|
||
try:
|
||
with open("example.graph", encoding='utf-8') as file:
|
||
code = file.read()
|
||
tokens = lexer(code, "example.graph")
|
||
with open("tokens.txt", 'w', encoding='utf-8') as file:
|
||
file.write('\n'.join([str(t) for t in tokens]))
|
||
syntax_tree = syntactical_analyzer(tokens)
|
||
with open("syntax.txt", 'w', encoding='utf-8') as file:
|
||
file.write(syntax_tree.tree_str())
|
||
except CompilerError as err:
|
||
print(err.compiler_error())
|
||
# raise
|