Compare commits
2 Commits
ab690f4bb5
...
68c114ce1d
Author | SHA1 | Date |
---|---|---|
|
68c114ce1d | |
|
c9b0d0875a |
|
@ -22,14 +22,14 @@ typedef enum {
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
TokenType type;
|
TokenType type;
|
||||||
const char *start;
|
const char* start;
|
||||||
size_t length;
|
size_t length;
|
||||||
FileInfo file_info;
|
FileInfo file_info;
|
||||||
} Token;
|
} Token;
|
||||||
|
|
||||||
typedef struct {
|
typedef struct {
|
||||||
const char *filename;
|
const char* filename;
|
||||||
const char *source;
|
const char* source;
|
||||||
size_t pos;
|
size_t pos;
|
||||||
size_t column;
|
size_t column;
|
||||||
size_t line;
|
size_t line;
|
||||||
|
@ -43,7 +43,7 @@ typedef struct {
|
||||||
};
|
};
|
||||||
} TokenResult;
|
} TokenResult;
|
||||||
|
|
||||||
void lexer_init(Lexer *lexer, const char *filename, const char *source);
|
void lexer_init(Lexer* lexer, const char* filename, const char* source);
|
||||||
TokenResult lexer_next(Lexer *lexer);
|
TokenResult lexer_next(Lexer* lexer);
|
||||||
|
|
||||||
#endif // SYNC_LEXER_H
|
#endif // SYNC_LEXER_H
|
||||||
|
|
23
src/lexer.c
23
src/lexer.c
|
@ -3,7 +3,7 @@
|
||||||
#include "sync/types.h"
|
#include "sync/types.h"
|
||||||
#include "sync/lexer.h"
|
#include "sync/lexer.h"
|
||||||
|
|
||||||
void lexer_init(Lexer *lexer, const char *filename, const char *source) {
|
void lexer_init(Lexer* lexer, const char* filename, const char* source) {
|
||||||
lexer->filename = filename;
|
lexer->filename = filename;
|
||||||
lexer->source = source;
|
lexer->source = source;
|
||||||
lexer->pos = 0;
|
lexer->pos = 0;
|
||||||
|
@ -11,7 +11,7 @@ void lexer_init(Lexer *lexer, const char *filename, const char *source) {
|
||||||
lexer->line = 1;
|
lexer->line = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
static FileInfo get_file_info(Lexer *lexer, size_t start, size_t start_line) {
|
static FileInfo get_file_info(Lexer* lexer, size_t start, size_t start_line) {
|
||||||
return (FileInfo){
|
return (FileInfo){
|
||||||
.filename = lexer->filename,
|
.filename = lexer->filename,
|
||||||
.line = lexer->line,
|
.line = lexer->line,
|
||||||
|
@ -21,15 +21,15 @@ static FileInfo get_file_info(Lexer *lexer, size_t start, size_t start_line) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
static char peek(Lexer *lexer) {
|
static char peek(Lexer* lexer) {
|
||||||
return lexer->source[lexer->pos];
|
return lexer->source[lexer->pos];
|
||||||
}
|
}
|
||||||
|
|
||||||
static char far_peek(Lexer *lexer, size_t index) {
|
static char far_peek(Lexer* lexer, size_t index) {
|
||||||
return lexer->source[lexer->pos + index];
|
return lexer->source[lexer->pos + index];
|
||||||
}
|
}
|
||||||
|
|
||||||
static void advance(Lexer *lexer) {
|
static void advance(Lexer* lexer) {
|
||||||
if (lexer->source[lexer->pos] == '\n') {
|
if (lexer->source[lexer->pos] == '\n') {
|
||||||
lexer->line++;
|
lexer->line++;
|
||||||
lexer->column = 1;
|
lexer->column = 1;
|
||||||
|
@ -78,15 +78,20 @@ static char is_identifier_char(char c) {
|
||||||
return isalnum(c) || c == '_';
|
return isalnum(c) || c == '_';
|
||||||
}
|
}
|
||||||
|
|
||||||
static TokenResult lexer_result(Lexer *lexer, TokenType type, size_t start, size_t start_line) {
|
static TokenResult lexer_result(Lexer* lexer, TokenType type, size_t start, size_t start_line) {
|
||||||
return (TokenResult){SYNC_RESULT, .result = (Token){type, &lexer->source[start], lexer->pos - start, get_file_info(lexer, start, start_line)}};
|
return (TokenResult){SYNC_RESULT, .result = (Token){
|
||||||
|
type,
|
||||||
|
&lexer->source[start],
|
||||||
|
lexer->pos - start,
|
||||||
|
get_file_info(lexer, start, start_line)
|
||||||
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
static TokenResult lexer_error(Lexer *lexer, const char *message, size_t start, size_t start_line) {
|
static TokenResult lexer_error(Lexer* lexer, const char* message, size_t start, size_t start_line) {
|
||||||
return (TokenResult){SYNC_ERROR, .error = (SyncError){SYNC_LEXER_ERROR, message, get_file_info(lexer, start, start_line)}};
|
return (TokenResult){SYNC_ERROR, .error = (SyncError){SYNC_LEXER_ERROR, message, get_file_info(lexer, start, start_line)}};
|
||||||
}
|
}
|
||||||
|
|
||||||
TokenResult lexer_next(Lexer *lexer) {
|
TokenResult lexer_next(Lexer* lexer) {
|
||||||
// Gets the next token from the source
|
// Gets the next token from the source
|
||||||
|
|
||||||
while (isspace(peek(lexer)) || peek(lexer) == '/') {
|
while (isspace(peek(lexer)) || peek(lexer) == '/') {
|
||||||
|
|
14
src/main.c
14
src/main.c
|
@ -3,21 +3,23 @@
|
||||||
#include "sync/types.h"
|
#include "sync/types.h"
|
||||||
#include "sync/lexer.h"
|
#include "sync/lexer.h"
|
||||||
|
|
||||||
static void print_token(Token token) {
|
const char* TOKEN_TYPES[] = {
|
||||||
printf("Token: %-15s | Text: %.*s\n",
|
|
||||||
(const char *[]){
|
|
||||||
"EOF", "IDENTIFIER", "NUMBER", "OPERATOR",
|
"EOF", "IDENTIFIER", "NUMBER", "OPERATOR",
|
||||||
"LPAREN", "RPAREN", "SEMICOLON", "LBRACE",
|
"LPAREN", "RPAREN", "SEMICOLON", "LBRACE",
|
||||||
"RBRACE", "LBRACKET", "RBRACKET", "CHARACTER",
|
"RBRACE", "LBRACKET", "RBRACKET", "CHARACTER",
|
||||||
"STRING"
|
"STRING"
|
||||||
}[token.type],
|
};
|
||||||
|
|
||||||
|
static void print_token(Token token) {
|
||||||
|
printf("Token: %-15s | Text: %.*s\n",
|
||||||
|
TOKEN_TYPES[token.type],
|
||||||
(int)token.length, token.start
|
(int)token.length, token.start
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
int main(void) {
|
int main(void) {
|
||||||
const char *filename = "test/example1.zn";
|
const char* filename = "test/example1.zn";
|
||||||
FILE *file = fopen(filename, "rb");
|
FILE* file = fopen(filename, "rb");
|
||||||
if (!file) {
|
if (!file) {
|
||||||
fprintf(stderr, "Failed to open file: %s\n", filename);
|
fprintf(stderr, "Failed to open file: %s\n", filename);
|
||||||
return 1;
|
return 1;
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
#include "../include/sync/lexer.h"
|
#include "../include/sync/lexer.h"
|
||||||
|
|
||||||
void test_tokenize_simple_assignment(void) {
|
void test_tokenize_simple_assignment(void) {
|
||||||
const char *src = "x = 42;";
|
const char* src = "x = 42;";
|
||||||
Lexer lexer;
|
Lexer lexer;
|
||||||
lexer_init(&lexer, "<stdin>", src);
|
lexer_init(&lexer, "<stdin>", src);
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ void test_tokenize_simple_assignment(void) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void test_tokenize_function_call(void) {
|
void test_tokenize_function_call(void) {
|
||||||
const char *src = "print(x);";
|
const char* src = "print(x);";
|
||||||
Lexer lexer;
|
Lexer lexer;
|
||||||
lexer_init(&lexer, "<stdin>", src);
|
lexer_init(&lexer, "<stdin>", src);
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue