Updated Exception Types

This commit is contained in:
Kyler 2024-02-28 00:46:37 -07:00
parent d14cb9f034
commit 4a3d6ee8d2
2 changed files with 19 additions and 13 deletions

View File

@ -22,4 +22,7 @@ class FileInfo:
self._length = length self._length = length
class CompilerError(Exception): pass class CompilerError(Exception):
def __init__(self, message: str, file_info: FileInfo):
super().__init__(message, file_info)

View File

@ -127,7 +127,10 @@ _Punctuation = (
) )
class LexerError(CompilerError): pass class LexerError(CompilerError):
def __init__(self, message: str, file_info: FileInfo):
super().__init__(message, file_info)
class Token: class Token:
@ -165,15 +168,15 @@ def lexer(file: str, filename: str) -> Sequence[Token]:
token_type: _InterTokenType = _InterTokenType.Generic token_type: _InterTokenType = _InterTokenType.Generic
for line, line_str in enumerate(file.splitlines()): for line, line_str in enumerate(file.splitlines()):
if token_type in _NewLineErrorTokens:
raise LexerError("Unexpected Newline")
if token_type in _NewLineTerminatedTokens:
fi = FileInfo(filename, current_line, current_col, len(current)) fi = FileInfo(filename, current_line, current_col, len(current))
if token_type in _NewLineErrorTokens:
raise LexerError("Unexpected Newline", fi)
if token_type in _NewLineTerminatedTokens:
if token_type is _InterTokenType.Directive: if token_type is _InterTokenType.Directive:
tokens.append(Directive(current, fi)) tokens.append(Directive(current, fi))
elif token_type is _InterTokenType.Word: elif token_type is _InterTokenType.Word:
if len(current) > 15: if len(current) > 15:
raise LexerError("Identifier Too Long") raise LexerError("Identifier Too Long", fi)
if current in _Keywords: if current in _Keywords:
tokens.append(Keyword(current, fi)) tokens.append(Keyword(current, fi))
else: else:
@ -182,7 +185,7 @@ def lexer(file: str, filename: str) -> Sequence[Token]:
tokens.append(NumberLiteral(current, fi)) tokens.append(NumberLiteral(current, fi))
elif token_type is _InterTokenType.Punctuation: elif token_type is _InterTokenType.Punctuation:
if current not in _Punctuation: if current not in _Punctuation:
raise LexerError("Invalid Punctuation") raise LexerError("Invalid Punctuation", fi)
tokens.append(Punctuation(current, fi)) tokens.append(Punctuation(current, fi))
token_type = _InterTokenType.Generic token_type = _InterTokenType.Generic
@ -198,10 +201,10 @@ def lexer(file: str, filename: str) -> Sequence[Token]:
if char in _ID_Continue: if char in _ID_Continue:
current += char current += char
else: else:
if len(current) > 15:
raise LexerError("Identifier Too Long")
fi = FileInfo( fi = FileInfo(
filename, current_line, current_col, len(current)) filename, current_line, current_col, len(current))
if len(current) > 15:
raise LexerError("Identifier Too Long", fi)
if current in _Keywords: if current in _Keywords:
tokens.append(Keyword(current, fi)) tokens.append(Keyword(current, fi))
else: else:
@ -228,14 +231,14 @@ def lexer(file: str, filename: str) -> Sequence[Token]:
escaped = True escaped = True
elif char == "'": elif char == "'":
current += char current += char
fi = FileInfo(
filename, current_line, current_col, len(current))
if ( if (
current[1] != '\\' and current[1] != '\\' and
len(current) == 3 or len(current) == 3 or
len(current) > 3 len(current) > 3
): ):
raise LexerError("Character Literal Too Long") raise LexerError("Character Literal Too Long", fi)
fi = FileInfo(
filename, current_line, current_col, len(current))
tokens.append(StringLiteral(current, fi)) tokens.append(StringLiteral(current, fi))
token_type = _InterTokenType.Generic token_type = _InterTokenType.Generic
continue continue
@ -257,10 +260,10 @@ def lexer(file: str, filename: str) -> Sequence[Token]:
if char in _Punctuation_Any: if char in _Punctuation_Any:
current += char current += char
else: else:
if current not in _Punctuation:
raise LexerError("Invalid Punctuation")
fi = FileInfo( fi = FileInfo(
filename, current_line, current_col, len(current)) filename, current_line, current_col, len(current))
if current not in _Punctuation:
raise LexerError("Invalid Punctuation", fi)
tokens.append(Punctuation(current, fi)) tokens.append(Punctuation(current, fi))
token_type = _InterTokenType.Generic token_type = _InterTokenType.Generic