diff --git a/compiler.py b/compiler.py index be98bd4..fbbb6cf 100644 --- a/compiler.py +++ b/compiler.py @@ -88,13 +88,13 @@ class CompilerError(Exception): file_info_context = None if file_info.lines: new_message += f"to line {file_info.line + file_info.lines}" - with open(file_info.filename, 'r') as file: + with open(file_info.filename, 'r', encoding='utf-8') as file: new_message += ''.join( file.readlines()[ file_info.line-1:file_info.line + file_info.lines]) else: new_message += f"col {file_info.col}\n\n" - with open(file_info.filename, 'r') as file: + with open(file_info.filename, 'r', encoding='utf-8') as file: new_message += file.readlines()[file_info.line-1] if file_info_context is not None: context_line = [' '] * max( @@ -125,36 +125,6 @@ class CompilerError(Exception): ) -class CompilerWarning(Warning): - - _compiler_warning_type = "Compiler" - - def __init__(self, message: str, file_info: FileInfo): - new_message = message - new_message += ( - f"\nIn file {file_info.filename} at line {file_info.line} " - ) - if file_info.lines: - new_message += f"to line {file_info.line + file_info.lines}" - with open(file_info.filename, 'r') as file: - new_message += ''.join( - file.readlines()[ - file_info.line-1:file_info.line + file_info.lines]) - else: - new_message += f"col {file_info.col}\n\n" - with open(file_info.filename, 'r') as file: - new_message += file.readlines()[file_info.line-1] - new_message += ' ' * ( - file_info.col - 1) + '^' * file_info.length - - super().__init__(new_message) - - def compiler_error(self) -> str: - return ( - f"[{self._compiler_warning_type} Warning] {type(self).__name__}:\n" - f"{indent(str(self), ' |', lambda _: True)}" - ) - # -- Lexical -- class LexerError(CompilerError): @@ -191,7 +161,7 @@ _ID_Start = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz" _ID_Continue = _ID_Start# + "0123456789" _Keywords = ( - 'screen', 'graph', 'anim', 'img', + 'screen', 'graph', 'anim', 'const', ) _Num_Start = "0123456789" @@ -237,6 +207,15 @@ _Punctuation_Conversion = { "->": "→", } +_Punctuation_Enclosing = { + '(':')', + ')':'(', + '[':']', + ']':'[', + '{':'}', + '}':'{', +} + _ID_Conversion = { "sum": "∑", "pi": "π", @@ -268,7 +247,6 @@ class Token: @property def file_info(self) -> FileInfo: return self._file_info -class Directive(Token): _type = 'Directive' class Identifier(Token): _type = 'Identifier' class Keyword(Token): _type = 'Keyword' class NumberLiteral(Token): _type = 'NumberLiteral' @@ -289,7 +267,7 @@ def lexer(file: str, filename: str) -> Sequence[Token]: if token_type is _InterTokenType.Word: if len(current) > 15: raise LexerError("Identifier Too Long", fi) - if current in _Keywords: + if current.lower() in _Keywords: tokens.append(Keyword(current, fi)) elif current.lower() in _ID_Conversion.keys(): tokens.append( @@ -318,7 +296,7 @@ def lexer(file: str, filename: str) -> Sequence[Token]: filename, current_line, current_col, len(current)) if len(current) > 15: raise LexerError("Identifier Too Long", fi) - if current in _Keywords: + if current.lower() in _Keywords: tokens.append(Keyword(current, fi)) elif current.lower() in _ID_Conversion.keys(): tokens.append( @@ -383,7 +361,7 @@ def lexer(file: str, filename: str) -> Sequence[Token]: if token_type is _InterTokenType.Word: if len(current) > 31: raise LexerError("Identifier Too Long", fi) - if current in _Keywords: + if current.lower() in _Keywords: tokens.append(Keyword(current, fi)) elif current.lower() in _ID_Conversion.keys(): tokens.append(Punctuation(_ID_Conversion[current.lower()], fi)) @@ -417,11 +395,11 @@ class UnexpectedEndOfTokenStream(SyntaxError): pass class _ExpectedTokenBase(SyntaxError): - _token_type = lexer.Token + _token_type = Token def __init__( self, - token: lexer.Token, + token: Token, expected: str | None = None, found: str | None = None, ): @@ -434,14 +412,13 @@ class _ExpectedTokenBase(SyntaxError): super().__init__(message, token.file_info) -class ExpectedIdentifier(_ExpectedTokenBase): - _type_name = Identifier -class ExpectedKeyword(_ExpectedTokenBase): - _type_name = Keyword -class ExpectedNumberLiteral(_ExpectedTokenBase): - _type_name = NumberLiteral -class ExpectedPunctuation(_ExpectedTokenBase): - _type_name = Punctuation +class ExpectedIdentifier(_ExpectedTokenBase): _type_name = Identifier +class ExpectedKeyword(_ExpectedTokenBase): _type_name = Keyword +class ExpectedNumberLiteral(_ExpectedTokenBase): _type_name = NumberLiteral +class ExpectedPunctuation(_ExpectedTokenBase): _type_name = Punctuation + +class ExpectedLiteral(_ExpectedTokenBase): + _type_name = (NumberLiteral, Punctuation) class _UnexpectedTokenBase(_ExpectedTokenBase): @@ -483,71 +460,740 @@ class UnexpectedToken(_UnexpectedTokenBase): super().__init__(token, expected, found) -class Statement: +class UnexpectedIdentifier(_UnexpectedTokenBase): _type_name = Identifier +class UnexpectedKeyword(_UnexpectedTokenBase): _type_name = Keyword +class UnexpectedNumberLiteral(_UnexpectedTokenBase): _type_name = NumberLiteral +class UnexpectedPunctuation(_UnexpectedTokenBase): _type_name = Punctuation + + +class ExpressionError(Exception): pass + +class ExpectedExpression(SyntaxError): + + def __init__( + self, + message: str, + token: Token, + ): + super().__init__(message, token.file_info) + + +_Id_Punctuation = [ + '∑', + 'π', + 'α', + 'β', + '∞', + '∏', + '∫', + 'θ', +] + + +class Expression: _file_info: FileInfo @property def file_info(self) -> FileInfo: return self._file_info + def has_pi(self) -> bool: return False + def tree_str(self, pre: str = "", pre_cont: str = "") -> str: - s: str = f"{pre}Statement\n" + s: str = f"{pre} Expression\n" + return s + + +class LiteralExpression(Expression): + + _file_info: FileInfo + _value: NumberLiteral | Punctuation + + def __init__( + self, + file_info: FileInfo, + value: NumberLiteral | Punctuation, + ): + self._file_info = file_info + self._value = value + + def has_pi(self) -> bool: return self._value.value == 'π' + + def tree_str(self, pre: str = "", pre_cont: str = "") -> str: + s: str = f"{pre} Literal Expression ({self._value.value})\n" + return s + + +class EnclosedExpression(Expression): + + _file_info: FileInfo + _expression: Expression + + def __init__( + self, + file_info: FileInfo, + expression: Expression, + ): + self._file_info = file_info + self._expression = expression + + def has_pi(self) -> bool: return self._expression.has_pi() + + def tree_str(self, pre: str = "", pre_cont: str = "") -> str: + s: str = f"{pre} Enclosed Expression\n" + s += self._expression.tree_str(f"{pre_cont}└─", f"{pre_cont} ") + return s + + +class UnaryOperator(Enum): + Negate = "-" + Factorial = "!" + + +class UnaryExpression(Expression): + + _file_info: FileInfo + _expression: Expression + _operator: UnaryOperator + + def __init__( + self, + file_info: FileInfo, + expression: Expression, + operator: UnaryOperator, + ): + self._file_info = file_info + self._expression = expression + self._operator = operator + + def has_pi(self) -> bool: return self._expression.has_pi() + + def tree_str(self, pre: str = "", pre_cont: str = "") -> str: + s: str = f"{pre} Unary Expression ({self._operator})\n" + s += self._expression.tree_str(f"{pre_cont}└─", f"{pre_cont} ") + return s + + +class BinaryOperator(Enum): + Exponential = "^" + Subscript = "_" + Division = "/" + Modulus = "%" + Multiplication = "*" + Subtraction = "-" + Addition = "+" + + +class BinaryExpression(Expression): + + _file_info: FileInfo + _expression1: Expression + _expression2: Expression + _operator: BinaryOperator + + def __init__( + self, + file_info: FileInfo, + expression1: Expression, + expression2: Expression, + operator: BinaryOperator, + ): + self._file_info = file_info + self._expression1 = expression1 + self._expression2 = expression2 + self._operator = operator + + def has_pi(self) -> bool: + return self._expression1.has_pi() or self._expression2.has_pi() + + def tree_str(self, pre: str = "", pre_cont: str = "") -> str: + s: str = f"{pre} Binary Expression ({self._operator})\n" + s += self._expression1.tree_str(f"{pre_cont}├─", f"{pre_cont}│ ") + s += self._expression2.tree_str(f"{pre_cont}└─", f"{pre_cont} ") + return s + + +_Operator_Precedence: tuple[ + UnaryOperator | + BinaryOperator, + ... +] = ( + UnaryOperator.Negate, + UnaryOperator.Factorial, + BinaryOperator.Exponential, + BinaryOperator.Subscript, + BinaryOperator.Division, + BinaryOperator.Modulus, + BinaryOperator.Multiplication, + BinaryOperator.Subtraction, + BinaryOperator.Addition, +) + + +class FunctionCall(Expression): + + _file_info: FileInfo + _identifier: Identifier + _arguments: list[Expression] + + def __init__( + self, + file_info: FileInfo, + identifier: Identifier, + arguments: list[Expression], + ): + self._file_info = file_info + self._identifier = identifier + self._arguments = arguments + + def tree_str(self, pre: str = "", pre_cont: str = "") -> str: + s: str = f"{pre} Function Call ({self._identifier.value})\n" + for arg in self._arguments[:-1]: + s += arg.tree_str(f"{pre_cont}├─", f"{pre_cont}│ ") + s += self._arguments[-1].tree_str(f"{pre_cont}└─", f"{pre_cont} ") + return s + + +class Constant: + + _file_info: FileInfo + _identifier: Identifier + _expression: Expression + + def __init__( + self, + file_info: FileInfo, + identifier: Identifier, + expression: Expression, + ): + self._file_info = file_info + self._identifier = identifier + self._expression = expression + + @property + def file_info(self) -> FileInfo: return self._file_info + + def tree_str(self, pre: str = "", pre_cont: str = "") -> str: + s: str = f"{pre} Constant ({self._identifier.value})\n" + s += self._expression.tree_str(f"{pre_cont}└─", f"{pre_cont} ") return s @staticmethod - def _sa(tokens: list[Token], token: Token) -> "Statement": - pass + def _sa(tokens: list[Token], first_token: Token) -> "Constant": + constant_tokens, last_token = _get_to_symbol(tokens, ';') + name, _ = _get_to_symbol(constant_tokens, '=') + if len(name) > 1: + raise UnexpectedToken(name[1], '=') + _assert_token(ExpectedIdentifier, name[0]) + identifier: Identifier = name[0] # type: ignore + fi = first_token.file_info + last_token.file_info + try: return Constant( + fi, identifier, _expression_sa(constant_tokens)) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + + +class AnimationDirection(Enum): + Increase = "increase" + Decrease = "decrease" + Bounce = "bounce" + + +class InlineAnimation: + + _file_info: FileInfo + _range_start: Expression + _range_start_inclusive: bool + _range_end: Expression + _range_end_inclusive: bool + _step: Expression + _direction: AnimationDirection + + def __init__( + self, + file_info: FileInfo, + range_start: Expression, + range_start_inclusive: bool, + range_end: Expression, + range_end_inclusive: bool, + step: Expression, + direction: AnimationDirection, + ): + self._file_info = file_info + self._range_start = range_start + self._range_start_inclusive = range_start_inclusive + self._range_end = range_end + self._range_end_inclusive = range_end_inclusive + self._step = step + self._direction = direction + + @property + def file_info(self) -> FileInfo: return self._file_info + + def tree_str(self, pre: str = "", pre_cont: str = "") -> str: + s: str = f"{pre} Inline Animation\n" + s += f"{pre_cont}├─ Range Start \ +({'≤' if self._range_start_inclusive else '<'})\n" + s += self._range_start.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + s += f"{pre_cont}├─ Range End \ +({'≤' if self._range_end_inclusive else '<'})\n" + s += self._range_end.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + s += f"{pre_cont}├─ Step\n" + s += self._step.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + s += f"{pre_cont}└─ Direction: {self._direction}\n" + return s + + @staticmethod + def _sa(tokens: list[Token], token: Token) -> "InlineAnimation": + _, anim_tokens, last_token = _get_nested_group(tokens, ('{','}')) + fi = token.file_info + last_token.file_info + return InlineAnimation(fi, *_animation_sa(anim_tokens, last_token)) class Animation: _file_info: FileInfo + _identifier: Identifier + _range_start: Expression + _range_start_inclusive: bool + _range_end: Expression + _range_end_inclusive: bool + _step: Expression + _direction: AnimationDirection + + def __init__( + self, + file_info: FileInfo, + identifier: Identifier, + range_start: Expression, + range_start_inclusive: bool, + range_end: Expression, + range_end_inclusive: bool, + step: Expression, + direction: AnimationDirection, + ): + self._file_info = file_info + self._identifier = identifier + self._range_start = range_start + self._range_start_inclusive = range_start_inclusive + self._range_end = range_end + self._range_end_inclusive = range_end_inclusive + self._step = step + self._direction = direction @property def file_info(self) -> FileInfo: return self._file_info def tree_str(self, pre: str = "", pre_cont: str = "") -> str: - s: str = f"{pre}Animation\n" + s: str = f"{pre} Animation ({self._identifier.value})\n" + s += f"{pre_cont}├─ Range Start \ +({'≤' if self._range_start_inclusive else '<'})\n" + s += self._range_start.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + s += f"{pre_cont}├─ Range End \ +({'≤' if self._range_end_inclusive else '<'})\n" + s += self._range_end.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + s += f"{pre_cont}├─ Step\n" + s += self._step.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + s += f"{pre_cont}└─ Direction: {self._direction}\n" return s @staticmethod def _sa(tokens: list[Token], token: Token) -> "Animation": - pass + _assert_token(ExpectedIdentifier, tokens[0]) + identifier: Identifier = tokens.pop(0) # type: ignore + _, anim_tokens, last_token = _get_nested_group(tokens, ('{','}')) + fi = token.file_info + last_token.file_info + return Animation( + fi, identifier, *_animation_sa(anim_tokens, last_token)) class Graph: + _parameter_conversions: ClassVar[dict] = { + ("x",): "x", + ("y",): "y", + ("t",): "t", + ("r",): "r", + ("θ",): "theta", + ("c","_","a",): "color_alpha", + ("c","_","w",): "color_grey", + ("c","_","r",): "color_red", + ("c","_","g",): "color_green", + ("c","_","b",): "color_blue", + ("c","_","h",): "color_hue", + ("c","_","s",): "color_saturation", + ("c","_","l",): "color_luminosity", + } + _file_info: FileInfo - _x: None | Statement | Animation - _y: None | Statement | Animation - _t: None | Animation - _r: None | Statement - _theta: None | Animation + _x: None | Expression | InlineAnimation + _y: None | Expression | InlineAnimation + _t: None | InlineAnimation + _r: None | Expression + _theta: None | InlineAnimation + _color_alpha: None | Expression + _color_grey: None | Expression + _color_red: None | Expression + _color_green: None | Expression + _color_blue: None | Expression + _color_hue: None | Expression + _color_saturation: None | Expression + _color_luminosity: None | Expression + + def __init__( + self, + file_info: FileInfo, + x: None | Expression | InlineAnimation = None, + y: None | Expression | InlineAnimation = None, + t: None | InlineAnimation = None, + r: None | Expression = None, + theta: None | InlineAnimation = None, + color_alpha: None | Expression = None, + color_grey: None | Expression = None, + color_red: None | Expression = None, + color_green: None | Expression = None, + color_blue: None | Expression = None, + color_hue: None | Expression = None, + color_saturation: None | Expression = None, + color_luminosity: None | Expression = None, + ): + self._file_info = file_info + self._x = x + self._y = y + self._t = t + self._r = r + self._theta = theta + self._color_alpha = color_alpha + self._color_grey = color_grey + self._color_red = color_red + self._color_green = color_green + self._color_blue = color_blue + self._color_hue = color_hue + self._color_saturation = color_saturation + self._color_luminosity = color_luminosity @property def file_info(self) -> FileInfo: return self._file_info def tree_str(self, pre: str = "", pre_cont: str = "") -> str: - s: str = f"{pre}Graph\n" + s: str = f"{pre} Graph\n" + if self._x is not None: + s += pre_cont + if ( + self._y is not None or + self._t is not None or + self._r is not None or + self._theta is not None or + self._color_alpha is not None or + self._color_grey is not None or + self._color_red is not None or + self._color_green is not None or + self._color_blue is not None or + self._color_hue is not None or + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ X\n' + s += self._x.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ X\n' + s += self._x.tree_str(f"{pre_cont} └─", f"{pre_cont} ") + if self._y is not None: + s += pre_cont + if ( + self._t is not None or + self._r is not None or + self._theta is not None or + self._color_alpha is not None or + self._color_grey is not None or + self._color_red is not None or + self._color_green is not None or + self._color_blue is not None or + self._color_hue is not None or + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ Y\n' + s += self._y.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ Y\n' + s += self._y.tree_str(f"{pre_cont} └─", f"{pre_cont} ") + if self._t is not None: + s += pre_cont + if ( + self._r is not None or + self._theta is not None or + self._color_alpha is not None or + self._color_grey is not None or + self._color_red is not None or + self._color_green is not None or + self._color_blue is not None or + self._color_hue is not None or + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ T\n' + s += self._t.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ T\n' + s += self._t.tree_str(f"{pre_cont} └─", f"{pre_cont} ") + if self._r is not None: + s += pre_cont + if ( + self._theta is not None or + self._color_alpha is not None or + self._color_grey is not None or + self._color_red is not None or + self._color_green is not None or + self._color_blue is not None or + self._color_hue is not None or + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ R\n' + s += self._r.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ R\n' + s += self._r.tree_str(f"{pre_cont} └─", f"{pre_cont} ") + if self._theta is not None: + s += pre_cont + if ( + self._color_alpha is not None or + self._color_grey is not None or + self._color_red is not None or + self._color_green is not None or + self._color_blue is not None or + self._color_hue is not None or + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ θ\n' + s += self._theta.tree_str(f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ θ\n' + s += self._theta.tree_str(f"{pre_cont} └─", f"{pre_cont} ") + if self._color_alpha is not None: + s += pre_cont + if ( + self._color_grey is not None or + self._color_red is not None or + self._color_green is not None or + self._color_blue is not None or + self._color_hue is not None or + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ C_a\n' + s += self._color_alpha.tree_str( + f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ C_a\n' + s += self._color_alpha.tree_str( + f"{pre_cont} └─", f"{pre_cont} ") + if self._color_grey is not None: + s += pre_cont + if ( + self._color_red is not None or + self._color_green is not None or + self._color_blue is not None or + self._color_hue is not None or + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ C_w\n' + s += self._color_grey.tree_str( + f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ C_w\n' + s += self._color_grey.tree_str( + f"{pre_cont} └─", f"{pre_cont} ") + if self._color_red is not None: + s += pre_cont + if ( + self._color_green is not None or + self._color_blue is not None or + self._color_hue is not None or + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ C_r\n' + s += self._color_red.tree_str( + f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ C_r\n' + s += self._color_red.tree_str( + f"{pre_cont} └─", f"{pre_cont} ") + if self._color_green is not None: + s += pre_cont + if ( + self._color_blue is not None or + self._color_hue is not None or + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ C_g\n' + s += self._color_green.tree_str( + f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ C_g\n' + s += self._color_green.tree_str( + f"{pre_cont} └─", f"{pre_cont} ") + if self._color_blue is not None: + s += pre_cont + if ( + self._color_hue is not None or + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ C_b\n' + s += self._color_blue.tree_str( + f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ C_b\n' + s += self._color_blue.tree_str( + f"{pre_cont} └─", f"{pre_cont} ") + if self._color_hue is not None: + s += pre_cont + if ( + self._color_saturation is not None or + self._color_luminosity is not None + ): + s += '├─ C_h\n' + s += self._color_hue.tree_str( + f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ C_h\n' + s += self._color_hue.tree_str( + f"{pre_cont} └─", f"{pre_cont} ") + if self._color_saturation is not None: + s += pre_cont + if self._color_luminosity is not None: + s += '├─ C_s\n' + s += self._color_saturation.tree_str( + f"{pre_cont}│ └─", f"{pre_cont}│ ") + else: + s+= '└─ C_s\n' + s += self._color_saturation.tree_str( + f"{pre_cont} └─", f"{pre_cont} ") + if self._color_luminosity is not None: + s+= f'{pre_cont}└─ C_l\n' + s += self._color_luminosity.tree_str( + f"{pre_cont} └─", f"{pre_cont} ") return s @staticmethod - def _sa(tokens: list[Token], token: Token) -> "Graph": - pass + def _sa(tokens: list[Token], first_token: Token) -> "Graph": + values: dict = {} + _, anim_tokens, last_token = _get_nested_group(tokens, ('{','}')) + while anim_tokens: + name, _ = _get_to_symbol(anim_tokens, ':') + key = tuple(i.value.lower() for i in name) + if key not in Graph._parameter_conversions: + fi = name[0].file_info + name = ''.join(i.value for i in name) + fi._length = len(name) + token = Identifier(name, fi) + raise UnexpectedIdentifier(token, [ + "x", + "y", + "t", + "r", + "θ", + "C_a", + "C_w", + "C_r", + "C_g", + "C_b", + "C_h", + "C_s", + "C_l", + ]) + try: value, _ = _get_to_symbol(anim_tokens, ',', '}') + except UnexpectedEndOfTokenStream: + value = anim_tokens[:] + del anim_tokens[:] + values[key] = value + args: dict = {} + if ('x',) in values: + if isinstance(values[('x',)][0], Keyword): + if values[('x',)][0].value.lower() != 'anim': + raise ExpectedKeyword(values[('x',)][0], 'anim') + args['x'] = InlineAnimation._sa( + values[('x',)][1:], values[('x',)][0]) + else: + try: args['x'] = _expression_sa(values[('x',)]) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if ('y',) in values: + if isinstance(values[('y',)][0], Keyword): + if values[('y',)][0].value.lower() != 'anim': + raise ExpectedKeyword(values[('y',)][0], 'anim') + args['y'] = InlineAnimation._sa( + values[('y',)][1:], values[('y',)][0]) + else: + try: args['y'] = _expression_sa(values[('y',)]) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if ('t',) in values: + if values[('t',)][0].value.lower() != 'anim': + raise ExpectedKeyword(values[('t',)][0], 'anim') + args['t'] = InlineAnimation._sa( + values[('t',)][1:], values[('t',)][0]) + if ('r',) in values: + try: args['r'] = _expression_sa(values[('r',)]) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if ('θ',) in values: + if values[('θ',)][0].value.lower() != 'anim': + raise ExpectedKeyword(values[('θ',)][0], 'anim') + args['theta'] = InlineAnimation._sa( + values[('θ',)][1:], values[('θ',)][0]) + if ('c','_','a') in values: + try: args['color_alpha'] = _expression_sa(values[('c','_','a')]) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if ('c','_','w') in values: + try: args['color_grey'] = _expression_sa(values[('c','_','w')]) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if ('c','_','r') in values: + try: args['color_red'] = _expression_sa(values[('c','_','r')]) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if ('c','_','g') in values: + try: args['color_green'] = _expression_sa(values[('c','_','g')]) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if ('c','_','b') in values: + try: args['color_blue'] = _expression_sa(values[('c','_','b')]) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if ('c','_','h') in values: + try: args['color_hue'] = _expression_sa(values[('c','_','h')]) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if ('c','_','s') in values: + args['color_saturation'] = _expression_sa(values[('c','_','s')]) + if ('c','_','l') in values: + try: args['color_luminosity'] = _expression_sa(values[('c','_','l')]) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + fi = first_token.file_info + last_token.file_info + return Graph(fi, **args) class Screen: - _parameter_convertions: ClassVar[dict] = { - ("top"): "top", - ("bottom"): "bottom", - ("right"): "right", - ("left"): "left", - ("width"): "width", - ("height"): "height", - ("width","scale"): "width_scale", - ("height","scale"): "height_scale", - ("fps"): "fps", + _parameter_conversions: ClassVar[dict] = { + ("top",): "top", + ("bottom",): "bottom", + ("right",): "right", + ("left",): "left", + ("width",): "width", + ("height",): "height", + ("width","scale",): "width_scale", + ("height","scale",): "height_scale", + ("fps",): "fps", } _file_info: FileInfo @@ -660,7 +1306,7 @@ class Screen: ) else f'└─ Height Scale: {self._height_scale}\n' if self._fps != 30: s += pre_cont - s += f'└─ FPS: {self._height_scale}\n' + s += f'└─ FPS: {self._fps}\n' return s @staticmethod @@ -671,9 +1317,10 @@ class Screen: name, _ = _get_to_symbol(screen_tokens, ':') try: value, _ = _get_to_symbol(screen_tokens, ',') except UnexpectedEndOfTokenStream: - value = screen_tokens + value = screen_tokens[:] del screen_tokens[:] - key = Screen._parameter_convertions[tuple(i.value for i in name)] + key = Screen._parameter_conversions[ + tuple(i.value.lower() for i in name)] if len(value) > 1: raise UnexpectedToken(value[1], [",","}"]) values[key] = value[0].value @@ -683,12 +1330,12 @@ class Screen: class File: - _children: list[Screen | Graph | Animation | Statement] + _children: list[Screen | Graph | Animation | Constant] _file_info: FileInfo def __init__( self, - children: list[Screen | Graph | Animation | Statement], + children: list[Screen | Graph | Animation | Constant], file_info: FileInfo, ): self._children = children[:] @@ -699,7 +1346,7 @@ class File: Screen | Graph | Animation | - Statement + Constant ]: return self._children[:] @@ -707,7 +1354,7 @@ class File: def file_info(self) -> FileInfo: return self._file_info def tree_str(self) -> str: - s: str = " File\n" + s: str = "File\n" if self._children: for child in self._children[:-1]: s += child.tree_str("├─", "│ ") @@ -716,7 +1363,7 @@ class File: @staticmethod def _sa(tokens: list[Token]) -> "File": - children: list[Screen | Graph | Animation | Statement] = [] + children: list[Screen | Graph | Animation | Constant] = [] file_fi: FileInfo = tokens[0].file_info + tokens[-1].file_info while tokens: @@ -729,8 +1376,14 @@ class File: children.append(Graph._sa(tokens, token)) case 'anim': children.append(Animation._sa(tokens, token)) + case 'const': + children.append(Constant._sa(tokens, token)) case _: - raise ExpectedKeyword(token, "screen', 'graph', or 'anim") + raise ExpectedKeyword( + token, + "screen', 'graph', 'anim', or 'const", + token.value.lower(), + ) else: raise UnexpectedToken(token, "keyword") @@ -775,26 +1428,278 @@ def _get_nested_group( def _get_to_symbol( tokens: list[Token], symbols: str | Sequence[str] = ';', + end: None | str = None ) -> tuple[list[Token], Token]: expr_len = -1 - for i in range(len(tokens)): - if tokens[i].value in symbols: - expr_len = i - break + if end: + start = _Punctuation_Enclosing[end] + nested = 0 + for i in range(len(tokens)): + if tokens[i].value == start: nested += 1 + elif tokens[i].value == end: + if nested == 0 and end in symbols: + expr_len = i + break + elif nested == 0: + raise UnexpectedPunctuation( + tokens[i], + f"{start}' before '{end}", + tokens[i].value, + ) + nested -= 1 + elif nested == 0 and tokens[i].value in symbols: + expr_len = i + break + else: + raise UnexpectedEndOfTokenStream( + "Unexpected End of Token Stream.", tokens[-1].file_info) else: - raise UnexpectedEndOfTokenStream( - "Unexpected End of Token Stream.", tokens[-1].file_info) + for i in range(len(tokens)): + if tokens[i].value in symbols: + expr_len = i + break + else: + raise UnexpectedEndOfTokenStream( + "Unexpected End of Token Stream.", tokens[-1].file_info) expr_tokens = tokens[:expr_len] last_token = tokens[expr_len] del tokens[:expr_len+1] return expr_tokens, last_token +def _animation_sa(tokens: list[Token], last_token: Token) -> tuple[ + Expression, + bool, + Expression, + bool, + Expression, + AnimationDirection, +]: + _assert_token(ExpectedIdentifier, tokens[0], 'R') + _assert_token(ExpectedPunctuation, tokens[1], ':') + del tokens[:2] + range_tokens, comparison = _get_to_symbol(tokens, ('<','≤')) + try: range_start = _expression_sa(range_tokens) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + range_start_inclusive = comparison.value == '≤' + value, comparison = _get_to_symbol(tokens, ('<','≤')) + if len(value) != 1: + raise ExpectedPunctuation(value[1], "<' or '≤") + _assert_token(ExpectedIdentifier, value[0], 'x') + try: range_tokens, _ = _get_to_symbol(tokens, ',') + except UnexpectedEndOfTokenStream: + range_tokens = tokens[:] + del tokens[:] + try: range_end = _expression_sa(range_tokens) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + range_end_inclusive = comparison.value == '≤' + + has_pi = range_start.has_pi() or range_end.has_pi() + + if tokens: + _assert_token(ExpectedIdentifier, tokens[0], 'S') + _assert_token(ExpectedPunctuation, tokens[1], ':') + del tokens[:2] + try: step_tokens, _ = _get_to_symbol(tokens, ',') + except UnexpectedEndOfTokenStream: + step_tokens = tokens[:] + del tokens[:] + try: step = _expression_sa(step_tokens) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if tokens: + _assert_token(ExpectedIdentifier, tokens[0], 'D') + _assert_token(ExpectedPunctuation, tokens[1], ':') + del tokens[:2] + token = tokens.pop(0) + _assert_token(ExpectedIdentifier, token) + if token.value.lower() in ["increase","decrease","bounce"]: + direction = AnimationDirection(token.value.lower()) + else: + raise ExpectedIdentifier( + token, + "increase', 'decrease', or 'bounce", + token.value.lower(), + ) + else: + direction = AnimationDirection.Increase + else: + if has_pi: + step = BinaryExpression( + last_token.file_info, + LiteralExpression( + last_token.file_info, + Punctuation("π", last_token.file_info) + ), + LiteralExpression( + last_token.file_info, + NumberLiteral("32", last_token.file_info) + ), + BinaryOperator.Division, + ) + else: + step = BinaryExpression( + last_token.file_info, + LiteralExpression( + last_token.file_info, + NumberLiteral("1", last_token.file_info) + ), + LiteralExpression( + last_token.file_info, + NumberLiteral("10", last_token.file_info) + ), + BinaryOperator.Division, + ) + direction = AnimationDirection.Increase + + return ( + range_start, + range_start_inclusive, + range_end, + range_end_inclusive, + step, + direction, + ) + +def _expression_sa(tokens: list[Token]) -> Expression: + if not tokens: + raise ExpressionError("Expected Expression.") + elif len(tokens) == 1: + token = tokens.pop(0) + _assert_token(ExpectedLiteral,token) + if isinstance(token, Punctuation): + if token.value not in ['π']: + raise ExpectedPunctuation( + token, "', '".join(_Id_Punctuation)) + return LiteralExpression(token.file_info, token) # type: ignore + + max_operator: int = -1 + max_operator_precedence: int = -1 + nested = 0 + one_enclosed = True + for i, token in enumerate(tokens): + if token.value == '(': nested += 1 + elif token.value == ')': + if nested == 0: + raise UnexpectedPunctuation(token, "(' before ')", token.value) + nested -= 1 + elif nested == 0 and isinstance(token, Punctuation): + one_enclosed = False + for j, operator in reversed(list(enumerate(_Operator_Precedence))): + if j <= max_operator_precedence: + break + elif operator.value == token.value: + max_operator = i + max_operator_precedence = j + break + elif nested == 0: + one_enclosed = False + + if one_enclosed and tokens[0].value == '(' and tokens[-1].value == ')': + if not tokens[1:-1]: + fi = tokens[0].file_info + tokens[-1].file_info + raise UnexpectedEndOfTokenStream( + "Expected expression between '(' and ')'.", fi) + token = tokens.pop(0) + last_token = tokens.pop(-1) + fi = token.file_info + last_token.file_info + try: return EnclosedExpression(fi,_expression_sa(tokens)) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + + if max_operator == -1: + function_identifier = tokens.pop(0) + _assert_token(ExpectedIdentifier, function_identifier) + token = tokens.pop(0) + _assert_token(ExpectedPunctuation, token, '(') + function_args: list[Expression] = [] + while tokens: + arg_tokens, last_token = _get_to_symbol(tokens, (',', ')'), ')') + if arg_tokens: + if len(arg_tokens) > 1 and arg_tokens[1].value == '=': + _assert_token(ExpectedIdentifier, arg_tokens[0]) + arg_identifier = Identifier( + arg_tokens[0].value, + arg_tokens[0].file_info, + ) + del arg_tokens[:2] + else: + arg_identifier = None + if not arg_tokens: + fi = last_token.file_info + raise UnexpectedEndOfTokenStream("Expected Expression.", fi) + try: expression = _expression_sa(arg_tokens) + except ExpressionError as err: + raise ExpectedExpression(str(err),last_token) + if arg_identifier is not None: + fi = arg_identifier.file_info + expression.file_info + else: + fi = expression.file_info + function_args.append(expression) + fi = function_identifier.file_info + last_token.file_info + return FunctionCall( + fi, + Identifier( + function_identifier.value, + function_identifier.file_info, + ), + function_args, + ) + + if ( + tokens[max_operator].value in UnaryOperator and + max_operator == 0 + ): + operator = UnaryOperator(tokens[max_operator].value) + if not tokens[max_operator + 1:]: + fi = tokens[max_operator].file_info + raise UnexpectedEndOfTokenStream( + f"Expected expression after '{tokens[max_operator].value}'.", + fi, + ) + try: expression = _expression_sa(tokens[max_operator + 1:]) + except ExpressionError as err: + raise ExpectedExpression(str(err),tokens[max_operator]) + fi = tokens[max_operator].file_info + expression.file_info + return UnaryExpression(fi, expression, operator) + elif tokens[max_operator].value in BinaryOperator: + operator = BinaryOperator(tokens[max_operator].value) + if not tokens[:max_operator]: + fi = tokens[max_operator].file_info + raise UnexpectedEndOfTokenStream( + f"Expected expression before '{tokens[max_operator].value}'.", + fi, + ) + try: expression1 = _expression_sa(tokens[:max_operator]) + except ExpressionError as err: + raise ExpectedExpression(str(err),tokens[max_operator]) + if not tokens[max_operator + 1:]: + fi = tokens[max_operator].file_info + raise UnexpectedEndOfTokenStream( + f"Expected expression after '{tokens[max_operator].value}'.", + fi, + ) + try: expression2 = _expression_sa(tokens[max_operator + 1:]) + except ExpressionError as err: + raise ExpectedExpression(str(err),tokens[max_operator]) + fi = expression1.file_info + expression2.file_info + return BinaryExpression(fi, expression1, expression2, operator) + else: raise SyntaxError("Expression Error", tokens[max_operator].file_info) + def syntactical_analyzer(tokens: Sequence[Token]) -> File: return File._sa(list(tokens)) if __name__ == '__main__': - with open("example.graph", encoding='utf-8') as file: - code = file.read() - tokens = lexer(code, "example.graph") - with open("tokens.txt", 'w', encoding='utf-8') as file: - file.write('\n'.join([str(t) for t in tokens])) + try: + with open("example.graph", encoding='utf-8') as file: + code = file.read() + tokens = lexer(code, "example.graph") + with open("tokens.txt", 'w', encoding='utf-8') as file: + file.write('\n'.join([str(t) for t in tokens])) + syntax_tree = syntactical_analyzer(tokens) + with open("syntax.txt", 'w', encoding='utf-8') as file: + file.write(syntax_tree.tree_str()) + except CompilerError as err: + print(err.compiler_error()) + # raise diff --git a/example.graph b/example.graph index aa5c9ee..0895b0f 100644 --- a/example.graph +++ b/example.graph @@ -9,14 +9,15 @@ SCREEN { FPS: 60, } -R = 7; -r = 4; -d = 1; +const R = 7; +const r = 4; +const d = 1; -s = 0; -e = 8*π; +const s = 0; +const e = 8*pi; +const m = cos(π/32); -ANIM offset {R:s <= x <= e,S:π/128} +ANIM offset {R:s <= x <= e,S:π/128,D:INCREASE} GRAPH { X: (R - r) * cos(t) + d * cos(((R - r) / r) * t), diff --git a/readme.md b/readme.md index 77704ab..7157af7 100644 --- a/readme.md +++ b/readme.md @@ -25,7 +25,7 @@ ID_Start ::= Keywords are not case sensitive. ``` -screen graph anim img +screen graph anim const sum pi alpha beta theta inf product integral ``` @@ -86,7 +86,7 @@ A `file` can contain any number of the following elements: - *Screen* - *Graph* - *Animation* -- *Statement* +- *Constant* ### Screen @@ -148,6 +148,7 @@ Listed bellow are the possible parameters. - Possible Values: Natural Numbers, `Unset` - Note: Centered on zero if `right` and `left` are unset. + The range uses `right` or `left` if one is set. Has no affect if `right` and `left` are set. **Height** @@ -160,6 +161,7 @@ Listed bellow are the possible parameters. - Possible Values: Natural Numbers, `Unset` - Note: Centered on zero if `top` and `bottom` are unset. + The range uses `top` or `bottom` if one is set. Has no affect if `top` and `bottom` are set. **Width Scale** @@ -173,7 +175,7 @@ Listed bellow are the possible parameters. - Possible Values: Positive Real Numbers - Note: Centered on zero if `right` and `left` are unset. - Has no affect if `right` and `left` are set or if `width` are set. + Has no affect if `right` and `left` are set or if `width` is set. **Height Scale** - Role: @@ -186,7 +188,7 @@ Listed bellow are the possible parameters. - Possible Values: Positive Real Numbers - Note: Centered on zero if `top` and `bottom` are unset. - Has no affect if `top` and `bottom` are set or if `height` are set. + Has no affect if `top` and `bottom` are set or if `height` is set. **FPS** - Role: @@ -246,9 +248,10 @@ If only alpha is set the default color space is **Grey-scale**. Listed bellow are the available color spaces and their subscripts. Default value in parenthesis for the unused subscripts of the color spaces. -- **Grey-scale**: `wa` - Grey-scale (1), Alpha (1) -- **RGB**: `rgba` - Red (0), Green (0), Blue (0), Alpha (1) -- **HSL**: `hsla` - Hue (0), Saturation (1), Luminosity (0.5), Alpha (1) +- **Grey-scale**: `w`, `a` - Grey-scale (1), Alpha (1) +- **RGB**: `r`, `g`, `b`, `a` - Red (0), Green (0), Blue (0), Alpha (1) +- **HSL**: `h`, `s`, `l`, `a` - Hue (0), Saturation (1), Luminosity (0.5), +Alpha (1) All values can be a real number between 0 and 1 inclusive. Values bellow this range will be interpreted as 0. @@ -284,6 +287,12 @@ is `π/32`. - `decrease` - `bounce` +#### Constant + +A *constant* begins with the `const` keyword. It then is followed by an +identifier, the assignment operator (`=`) followed by an `expression` then +ended with a semicolon (`;`). + ### Expressions @@ -329,6 +338,8 @@ Here are all operators and their types and names in order of precedence. | `+` | Binary | Addition *Operator* | | `=` | Binary | Assignment *Operator* | +**Note**: The assignment operator (`=`) can only be used in a *Constant* + #### Literal A `literal` is just simply the content of the literal. @@ -337,10 +348,6 @@ A `literal` is just simply the content of the literal. A `identifier` is just simply the name of the identifier. -#### Statement - -A `statement` is made up of an `expression` followed by a semicolon (`;`). - ## Semantics ### Screen Block diff --git a/syntax.txt b/syntax.txt new file mode 100644 index 0000000..bdfc832 --- /dev/null +++ b/syntax.txt @@ -0,0 +1,101 @@ +File +├─ Screen +│ ├─ Width Scale: 100 +│ ├─ Height Scale: 100 +│ └─ FPS: 60 +├─ Constant (R) +│ └─ Literal Expression (7) +├─ Constant (r) +│ └─ Literal Expression (4) +├─ Constant (d) +│ └─ Literal Expression (1) +├─ Constant (s) +│ └─ Literal Expression (0) +├─ Constant (e) +│ └─ Binary Expression (BinaryOperator.Multiplication) +│ ├─ Literal Expression (8) +│ └─ Literal Expression (π) +├─ Constant (m) +│ └─ Function Call (cos) +│ └─ Binary Expression (BinaryOperator.Division) +│ ├─ Literal Expression (π) +│ └─ Literal Expression (32) +├─ Animation (offset) +│ ├─ Range Start (≤) +│ │ └─ Literal Expression (s) +│ ├─ Range End (≤) +│ │ └─ Literal Expression (e) +│ ├─ Step +│ │ └─ Binary Expression (BinaryOperator.Division) +│ │ ├─ Literal Expression (π) +│ │ └─ Literal Expression (128) +│ └─ Direction: AnimationDirection.Increase +└─ Graph + ├─ X + │ └─ Binary Expression (BinaryOperator.Addition) + │ ├─ Binary Expression (BinaryOperator.Multiplication) + │ │ ├─ Enclosed Expression + │ │ │ └─ Binary Expression (BinaryOperator.Subtraction) + │ │ │ ├─ Literal Expression (R) + │ │ │ └─ Literal Expression (r) + │ │ └─ Function Call (cos) + │ │ └─ Literal Expression (t) + │ └─ Binary Expression (BinaryOperator.Multiplication) + │ ├─ Literal Expression (d) + │ └─ Function Call (cos) + │ └─ Binary Expression (BinaryOperator.Multiplication) + │ ├─ Enclosed Expression + │ │ └─ Binary Expression (BinaryOperator.Division) + │ │ ├─ Enclosed Expression + │ │ │ └─ Binary Expression (BinaryOperator.Subtraction) + │ │ │ ├─ Literal Expression (R) + │ │ │ └─ Literal Expression (r) + │ │ └─ Literal Expression (r) + │ └─ Literal Expression (t) + ├─ Y + │ └─ Binary Expression (BinaryOperator.Subtraction) + │ ├─ Binary Expression (BinaryOperator.Multiplication) + │ │ ├─ Enclosed Expression + │ │ │ └─ Binary Expression (BinaryOperator.Subtraction) + │ │ │ ├─ Literal Expression (R) + │ │ │ └─ Literal Expression (r) + │ │ └─ Function Call (sin) + │ │ └─ Literal Expression (t) + │ └─ Binary Expression (BinaryOperator.Multiplication) + │ ├─ Literal Expression (d) + │ └─ Function Call (sin) + │ └─ Binary Expression (BinaryOperator.Multiplication) + │ ├─ Enclosed Expression + │ │ └─ Binary Expression (BinaryOperator.Division) + │ │ ├─ Enclosed Expression + │ │ │ └─ Binary Expression (BinaryOperator.Subtraction) + │ │ │ ├─ Literal Expression (R) + │ │ │ └─ Literal Expression (r) + │ │ └─ Literal Expression (r) + │ └─ Literal Expression (t) + ├─ T + │ └─ Inline Animation + │ ├─ Range Start (≤) + │ │ └─ Binary Expression (BinaryOperator.Addition) + │ │ ├─ Literal Expression (s) + │ │ └─ Literal Expression (offset) + │ ├─ Range End (≤) + │ │ └─ Binary Expression (BinaryOperator.Addition) + │ │ ├─ Literal Expression (e) + │ │ └─ Literal Expression (offset) + │ ├─ Step + │ │ └─ Binary Expression (BinaryOperator.Division) + │ │ ├─ Literal Expression (π) + │ │ └─ Literal Expression (32) + │ └─ Direction: AnimationDirection.Increase + └─ C_w + └─ Enclosed Expression + └─ Binary Expression (BinaryOperator.Division) + ├─ Enclosed Expression + │ └─ Binary Expression (BinaryOperator.Subtraction) + │ ├─ Literal Expression (t) + │ └─ Literal Expression (s) + └─ Enclosed Expression + └─ Binary Expression (BinaryOperator.Subtraction) + ├─ Literal Expression (e) + └─ Literal Expression (s) diff --git a/tokens.txt b/tokens.txt index dc929d6..e56c386 100644 --- a/tokens.txt +++ b/tokens.txt @@ -1,4 +1,4 @@ -Type: Identifier, Value: SCREEN +Type: Keyword, Value: SCREEN Type: Punctuation, Value: { Type: Identifier, Value: Width Type: Identifier, Value: Scale @@ -15,38 +15,65 @@ Type: Punctuation, Value: : Type: NumberLiteral, Value: 60 Type: Punctuation, Value: , Type: Punctuation, Value: } +Type: Keyword, Value: const Type: Identifier, Value: R Type: Punctuation, Value: = Type: NumberLiteral, Value: 7 Type: Punctuation, Value: ; +Type: Keyword, Value: const Type: Identifier, Value: r Type: Punctuation, Value: = Type: NumberLiteral, Value: 4 Type: Punctuation, Value: ; +Type: Keyword, Value: const Type: Identifier, Value: d Type: Punctuation, Value: = Type: NumberLiteral, Value: 1 Type: Punctuation, Value: ; -Type: Identifier, Value: ANIM +Type: Keyword, Value: const +Type: Identifier, Value: s +Type: Punctuation, Value: = +Type: NumberLiteral, Value: 0 +Type: Punctuation, Value: ; +Type: Keyword, Value: const +Type: Identifier, Value: e +Type: Punctuation, Value: = +Type: NumberLiteral, Value: 8 +Type: Punctuation, Value: * +Type: Punctuation, Value: π +Type: Punctuation, Value: ; +Type: Keyword, Value: const +Type: Identifier, Value: m +Type: Punctuation, Value: = +Type: Identifier, Value: cos +Type: Punctuation, Value: ( +Type: Punctuation, Value: π +Type: Punctuation, Value: / +Type: NumberLiteral, Value: 32 +Type: Punctuation, Value: ) +Type: Punctuation, Value: ; +Type: Keyword, Value: ANIM Type: Identifier, Value: offset Type: Punctuation, Value: { Type: Identifier, Value: R Type: Punctuation, Value: : -Type: NumberLiteral, Value: 0 +Type: Identifier, Value: s Type: Punctuation, Value: ≤ Type: Identifier, Value: x Type: Punctuation, Value: ≤ -Type: NumberLiteral, Value: 8 -Type: Punctuation, Value: * -Type: Punctuation, Value: π +Type: Identifier, Value: e Type: Punctuation, Value: , Type: Identifier, Value: S Type: Punctuation, Value: : Type: Punctuation, Value: π Type: Punctuation, Value: / Type: NumberLiteral, Value: 128 +Type: Punctuation, Value: , +Type: Identifier, Value: D +Type: Punctuation, Value: : +Type: Identifier, Value: INCREASE Type: Punctuation, Value: } -Type: Identifier, Value: GRAPH +Type: Keyword, Value: GRAPH Type: Punctuation, Value: { Type: Identifier, Value: X Type: Punctuation, Value: : @@ -110,19 +137,17 @@ Type: Punctuation, Value: ) Type: Punctuation, Value: , Type: Identifier, Value: T Type: Punctuation, Value: : -Type: Identifier, Value: ANIM +Type: Keyword, Value: ANIM Type: Punctuation, Value: { Type: Identifier, Value: R Type: Punctuation, Value: : -Type: NumberLiteral, Value: 0 +Type: Identifier, Value: s Type: Punctuation, Value: + Type: Identifier, Value: offset Type: Punctuation, Value: ≤ Type: Identifier, Value: x Type: Punctuation, Value: ≤ -Type: NumberLiteral, Value: 8 -Type: Punctuation, Value: * -Type: Punctuation, Value: π +Type: Identifier, Value: e Type: Punctuation, Value: + Type: Identifier, Value: offset Type: Punctuation, Value: , @@ -141,22 +166,13 @@ Type: Punctuation, Value: ( Type: Punctuation, Value: ( Type: Identifier, Value: t Type: Punctuation, Value: - -Type: Identifier, Value: r -Type: Punctuation, Value: [ -Type: NumberLiteral, Value: 0 -Type: Punctuation, Value: ] +Type: Identifier, Value: s Type: Punctuation, Value: ) Type: Punctuation, Value: / Type: Punctuation, Value: ( -Type: Identifier, Value: r -Type: Punctuation, Value: [ -Type: NumberLiteral, Value: 1 -Type: Punctuation, Value: ] +Type: Identifier, Value: e Type: Punctuation, Value: - -Type: Identifier, Value: r -Type: Punctuation, Value: [ -Type: NumberLiteral, Value: 0 -Type: Punctuation, Value: ] +Type: Identifier, Value: s Type: Punctuation, Value: ) Type: Punctuation, Value: ) Type: Punctuation, Value: } \ No newline at end of file