Started Syntax Analysis

This commit is contained in:
Kyler Olsen 2024-04-10 09:20:50 -06:00
parent 5e686d246a
commit 20887b1c7a
3 changed files with 56 additions and 54 deletions

View File

@ -1972,63 +1972,61 @@ def lexer(file: str, filename: str) -> Sequence[Token]:
# return f"{pre} Directive: {self._content}\n"
# class File:
class File:
# _children: list[Directive | StructBlock | FunctionBlock | EnumBlock]
# _file_info: FileInfo
_children: list[Screen | Graph | Animation | Statement]
_file_info: FileInfo
# def __init__(
# self,
# children: list[Directive | StructBlock | FunctionBlock | EnumBlock],
# file_info: FileInfo,
# ):
# self._children = children[:]
# self._file_info = file_info
def __init__(
self,
children: list[Screen | Graph | Animation | Statement],
file_info: FileInfo,
):
self._children = children[:]
self._file_info = file_info
# @property
# def children(self) -> list[
# Directive |
# StructBlock |
# FunctionBlock |
# EnumBlock
# ]:
# return self._children[:]
@property
def children(self) -> list[
Screen |
Graph |
Animation |
Statement
]:
return self._children[:]
# @property
# def file_info(self) -> FileInfo: return self._file_info
@property
def file_info(self) -> FileInfo: return self._file_info
# def tree_str(self) -> str:
# s: str = " File\n"
# if self._children:
# for child in self._children[:-1]:
# s += child.tree_str("├─", "│ ")
# s += self._children[-1].tree_str("└─", " ")
# return s
def tree_str(self) -> str:
s: str = " File\n"
if self._children:
for child in self._children[:-1]:
s += child.tree_str("├─", "")
s += self._children[-1].tree_str("└─", " ")
return s
# @staticmethod
# def _sa(tokens: list[Token]) -> "File":
# children: list[Directive | StructBlock | FunctionBlock | EnumBlock] = []
# file_fi: FileInfo = tokens[0].file_info + tokens[-1].file_info
@staticmethod
def _sa(tokens: list[Token]) -> "File":
children: list[Screen | Graph | Animation | Statement] = []
file_fi: FileInfo = tokens[0].file_info + tokens[-1].file_info
# while tokens:
# token = tokens.pop(0)
# _assert_token_mult(token, (Directive, Keyword))
# if isinstance(token, Directive):
# children.append(Directive(token.value, token.file_info))
# elif isinstance(token, Keyword):
# match token.value:
# case 'struct':
# children.append(StructBlock._sa(tokens, token))
# case 'enum':
# children.append(EnumBlock._sa(tokens, token))
# case 'fn':
# children.append(FunctionBlock._sa(tokens, token))
# case _:
# raise ExpectedKeyword(token, "struct', 'enum', or 'fn")
# else:
# raise UnexpectedToken(token, "directive' or 'keyword")
while tokens:
token = tokens.pop(0)
_assert_token_mult(token, (Keyword))
if isinstance(token, Keyword):
match token.value.lower():
case 'screen':
children.append(Screen._sa(tokens, token))
case 'graph':
children.append(Graph._sa(tokens, token))
case 'anim':
children.append(Anim._sa(tokens, token))
case _:
raise ExpectedKeyword(token, "struct', 'enum', or 'fn")
else:
raise UnexpectedToken(token, "directive' or 'keyword")
# return File(children, file_fi)
return File(children, file_fi)
# def _assert_token(
@ -2394,8 +2392,8 @@ def lexer(file: str, filename: str) -> Sequence[Token]:
# expr_tokens: list[Token] = [token] + _get_to_symbol(tokens)[0]
# return _expression_sa(expr_tokens)
# def syntactical_analyzer(tokens: Sequence[Token]) -> File:
# return File._sa(list(tokens))
def syntactical_analyzer(tokens: Sequence[Token]) -> File:
return File._sa(list(tokens))
if __name__ == '__main__':
with open("example.graph", encoding='utf-8') as file:

View File

@ -13,11 +13,14 @@ R = 7;
r = 4;
d = 1;
ANIM offset {R:0 <= x <= 8*π,S:π/128}
s = 0;
e = 8*π;
ANIM offset {R:s <= x <= e,S:π/128}
GRAPH {
X: (R - r) * cos(t) + d * cos(((R - r) / r) * t),
Y: (R - r) * sin(t) - d * sin(((R - r) / r) * t),
T: ANIM {R:0+offset <= x <= 8*π+offset,S:π/32},
C_w: ((t - r[0]) / (r[1] - r[0]))
T: ANIM {R:s+offset <= x <= e+offset,S:π/32},
C_w: ((t - s) / (e - s))
}

View File

@ -86,6 +86,7 @@ A `file` can contain any number of the following elements:
- *Screen*
- *Graph*
- *Animation*
- *Statement*
<!-- - *Image* -->
### Screen