Adjusted keywords

This commit is contained in:
Kyler Olsen 2025-06-19 22:20:53 -06:00
parent 14158ed9f4
commit 35e4b97575
2 changed files with 44 additions and 11 deletions

View File

@ -35,20 +35,20 @@ typedef enum {
TOKEN_KW_UNION, TOKEN_KW_UNION,
// Control Flow // Control Flow
TOKEN_KW_BREAK, TOKEN_KW_BREAK,
TOKEN_KW_CASE,
TOKEN_KW_CONTINUE, TOKEN_KW_CONTINUE,
TOKEN_KW_DEFAULT,
TOKEN_KW_DO, TOKEN_KW_DO,
TOKEN_KW_ELSE, TOKEN_KW_ELSE,
TOKEN_KW_FOR, TOKEN_KW_FOR,
TOKEN_KW_IF, TOKEN_KW_IF,
TOKEN_KW_MATCH,
TOKEN_KW_RETURN, TOKEN_KW_RETURN,
TOKEN_KW_SWITCH,
TOKEN_KW_WHILE, TOKEN_KW_WHILE,
// Values // Values
TOKEN_KW_ERROR,
TOKEN_KW_FALSE, TOKEN_KW_FALSE,
TOKEN_KW_NONE,
TOKEN_KW_SOME,
TOKEN_KW_TRUE, TOKEN_KW_TRUE,
TOKEN_KW_VALUE,
// Types // Types
TOKEN_KW_BOOL, TOKEN_KW_BOOL,
TOKEN_KW_F32, TOKEN_KW_F32,
@ -57,6 +57,8 @@ typedef enum {
TOKEN_KW_I16, TOKEN_KW_I16,
TOKEN_KW_I32, TOKEN_KW_I32,
TOKEN_KW_I64, TOKEN_KW_I64,
TOKEN_KW_OPTION,
TOKEN_KW_RESULT,
TOKEN_KW_U8, TOKEN_KW_U8,
TOKEN_KW_U16, TOKEN_KW_U16,
TOKEN_KW_U32, TOKEN_KW_U32,
@ -67,6 +69,7 @@ typedef enum {
TOKEN_KW_IMPORT, TOKEN_KW_IMPORT,
// Operators // Operators
TOKEN_KW_AND, TOKEN_KW_AND,
TOKEN_KW_CAST,
TOKEN_KW_IS, TOKEN_KW_IS,
TOKEN_KW_NOT, TOKEN_KW_NOT,
TOKEN_KW_OR, TOKEN_KW_OR,

View File

@ -32,20 +32,20 @@ const KeywordPair KEYWORDS[] = {
{"union", TOKEN_KW_UNION}, {"union", TOKEN_KW_UNION},
// Control Flow // Control Flow
{"break", TOKEN_KW_BREAK}, {"break", TOKEN_KW_BREAK},
{"case", TOKEN_KW_CASE},
{"continue", TOKEN_KW_CONTINUE}, {"continue", TOKEN_KW_CONTINUE},
{"default", TOKEN_KW_DEFAULT},
{"do", TOKEN_KW_DO}, {"do", TOKEN_KW_DO},
{"else", TOKEN_KW_ELSE}, {"else", TOKEN_KW_ELSE},
{"for", TOKEN_KW_FOR}, {"for", TOKEN_KW_FOR},
{"if", TOKEN_KW_IF}, {"if", TOKEN_KW_IF},
{"match", TOKEN_KW_MATCH},
{"return", TOKEN_KW_RETURN}, {"return", TOKEN_KW_RETURN},
{"switch", TOKEN_KW_SWITCH},
{"while", TOKEN_KW_WHILE}, {"while", TOKEN_KW_WHILE},
// Values // Values
{"Error", TOKEN_KW_ERROR},
{"False", TOKEN_KW_FALSE}, {"False", TOKEN_KW_FALSE},
{"None", TOKEN_KW_NONE},
{"Some", TOKEN_KW_SOME},
{"True", TOKEN_KW_TRUE}, {"True", TOKEN_KW_TRUE},
{"Value", TOKEN_KW_VALUE},
// Types // Types
{"bool", TOKEN_KW_BOOL}, {"bool", TOKEN_KW_BOOL},
{"f32", TOKEN_KW_F32}, {"f32", TOKEN_KW_F32},
@ -54,6 +54,8 @@ const KeywordPair KEYWORDS[] = {
{"i16", TOKEN_KW_I16}, {"i16", TOKEN_KW_I16},
{"i32", TOKEN_KW_I32}, {"i32", TOKEN_KW_I32},
{"i64", TOKEN_KW_I64}, {"i64", TOKEN_KW_I64},
{"option", TOKEN_KW_OPTION},
{"result", TOKEN_KW_RESULT},
{"u8", TOKEN_KW_U8}, {"u8", TOKEN_KW_U8},
{"u16", TOKEN_KW_U16}, {"u16", TOKEN_KW_U16},
{"u32", TOKEN_KW_U32}, {"u32", TOKEN_KW_U32},
@ -64,6 +66,7 @@ const KeywordPair KEYWORDS[] = {
{"import", TOKEN_KW_IMPORT}, {"import", TOKEN_KW_IMPORT},
// Operators // Operators
{"and", TOKEN_KW_AND}, {"and", TOKEN_KW_AND},
{"cast", TOKEN_KW_CAST},
{"is", TOKEN_KW_IS}, {"is", TOKEN_KW_IS},
{"not", TOKEN_KW_NOT}, {"not", TOKEN_KW_NOT},
{"or", TOKEN_KW_OR}, {"or", TOKEN_KW_OR},
@ -72,8 +75,34 @@ const KeywordPair KEYWORDS[] = {
}; };
const char *RESERVED_KEYWORDS[] = { const char *RESERVED_KEYWORDS[] = {
"extern", "f16", "f128", "i128", "impl", "in", "move", "new", "self", "case",
"super", "trait", "tuple", "type", "u128", "use", "where", "yeet", "default",
"defer",
"del",
"extern",
"f16",
"f128",
"handle",
"i128",
"impl",
"in",
"match",
"move",
"new",
"pointer",
"Pointer",
"Result",
"self",
"super",
"switch",
"trait",
"transform",
"tuple",
"type",
"u128",
"use",
"where",
"yeet",
}; };
void lexer_init(Lexer* lexer, const char* filename, const char* source) { void lexer_init(Lexer* lexer, const char* filename, const char* source) {
@ -311,6 +340,7 @@ static LexerResult lexer_next(Lexer* lexer) {
return lexer_result(lexer, TOKEN_OPERATOR, start, start_line); return lexer_result(lexer, TOKEN_OPERATOR, start, start_line);
case '.': return lexer_result(lexer, TOKEN_OPERATOR, start, start_line); case '.': return lexer_result(lexer, TOKEN_OPERATOR, start, start_line);
case ',': return lexer_result(lexer, TOKEN_OPERATOR, start, start_line); case ',': return lexer_result(lexer, TOKEN_OPERATOR, start, start_line);
case ':': return lexer_result(lexer, TOKEN_OPERATOR, start, start_line);
case '(': return lexer_result(lexer, TOKEN_LPAREN, start, start_line); case '(': return lexer_result(lexer, TOKEN_LPAREN, start, start_line);
case ')': return lexer_result(lexer, TOKEN_RPAREN, start, start_line); case ')': return lexer_result(lexer, TOKEN_RPAREN, start, start_line);
case ';': return lexer_result(lexer, TOKEN_SEMICOLON, start, start_line); case ';': return lexer_result(lexer, TOKEN_SEMICOLON, start, start_line);