Compare commits
2 Commits
ab690f4bb5
...
68c114ce1d
Author | SHA1 | Date |
---|---|---|
|
68c114ce1d | |
|
c9b0d0875a |
|
@ -22,14 +22,14 @@ typedef enum {
|
|||
|
||||
typedef struct {
|
||||
TokenType type;
|
||||
const char *start;
|
||||
const char* start;
|
||||
size_t length;
|
||||
FileInfo file_info;
|
||||
} Token;
|
||||
|
||||
typedef struct {
|
||||
const char *filename;
|
||||
const char *source;
|
||||
const char* filename;
|
||||
const char* source;
|
||||
size_t pos;
|
||||
size_t column;
|
||||
size_t line;
|
||||
|
@ -43,7 +43,7 @@ typedef struct {
|
|||
};
|
||||
} TokenResult;
|
||||
|
||||
void lexer_init(Lexer *lexer, const char *filename, const char *source);
|
||||
TokenResult lexer_next(Lexer *lexer);
|
||||
void lexer_init(Lexer* lexer, const char* filename, const char* source);
|
||||
TokenResult lexer_next(Lexer* lexer);
|
||||
|
||||
#endif // SYNC_LEXER_H
|
||||
|
|
23
src/lexer.c
23
src/lexer.c
|
@ -3,7 +3,7 @@
|
|||
#include "sync/types.h"
|
||||
#include "sync/lexer.h"
|
||||
|
||||
void lexer_init(Lexer *lexer, const char *filename, const char *source) {
|
||||
void lexer_init(Lexer* lexer, const char* filename, const char* source) {
|
||||
lexer->filename = filename;
|
||||
lexer->source = source;
|
||||
lexer->pos = 0;
|
||||
|
@ -11,7 +11,7 @@ void lexer_init(Lexer *lexer, const char *filename, const char *source) {
|
|||
lexer->line = 1;
|
||||
}
|
||||
|
||||
static FileInfo get_file_info(Lexer *lexer, size_t start, size_t start_line) {
|
||||
static FileInfo get_file_info(Lexer* lexer, size_t start, size_t start_line) {
|
||||
return (FileInfo){
|
||||
.filename = lexer->filename,
|
||||
.line = lexer->line,
|
||||
|
@ -21,15 +21,15 @@ static FileInfo get_file_info(Lexer *lexer, size_t start, size_t start_line) {
|
|||
};
|
||||
}
|
||||
|
||||
static char peek(Lexer *lexer) {
|
||||
static char peek(Lexer* lexer) {
|
||||
return lexer->source[lexer->pos];
|
||||
}
|
||||
|
||||
static char far_peek(Lexer *lexer, size_t index) {
|
||||
static char far_peek(Lexer* lexer, size_t index) {
|
||||
return lexer->source[lexer->pos + index];
|
||||
}
|
||||
|
||||
static void advance(Lexer *lexer) {
|
||||
static void advance(Lexer* lexer) {
|
||||
if (lexer->source[lexer->pos] == '\n') {
|
||||
lexer->line++;
|
||||
lexer->column = 1;
|
||||
|
@ -78,15 +78,20 @@ static char is_identifier_char(char c) {
|
|||
return isalnum(c) || c == '_';
|
||||
}
|
||||
|
||||
static TokenResult lexer_result(Lexer *lexer, TokenType type, size_t start, size_t start_line) {
|
||||
return (TokenResult){SYNC_RESULT, .result = (Token){type, &lexer->source[start], lexer->pos - start, get_file_info(lexer, start, start_line)}};
|
||||
static TokenResult lexer_result(Lexer* lexer, TokenType type, size_t start, size_t start_line) {
|
||||
return (TokenResult){SYNC_RESULT, .result = (Token){
|
||||
type,
|
||||
&lexer->source[start],
|
||||
lexer->pos - start,
|
||||
get_file_info(lexer, start, start_line)
|
||||
}};
|
||||
}
|
||||
|
||||
static TokenResult lexer_error(Lexer *lexer, const char *message, size_t start, size_t start_line) {
|
||||
static TokenResult lexer_error(Lexer* lexer, const char* message, size_t start, size_t start_line) {
|
||||
return (TokenResult){SYNC_ERROR, .error = (SyncError){SYNC_LEXER_ERROR, message, get_file_info(lexer, start, start_line)}};
|
||||
}
|
||||
|
||||
TokenResult lexer_next(Lexer *lexer) {
|
||||
TokenResult lexer_next(Lexer* lexer) {
|
||||
// Gets the next token from the source
|
||||
|
||||
while (isspace(peek(lexer)) || peek(lexer) == '/') {
|
||||
|
|
18
src/main.c
18
src/main.c
|
@ -3,21 +3,23 @@
|
|||
#include "sync/types.h"
|
||||
#include "sync/lexer.h"
|
||||
|
||||
const char* TOKEN_TYPES[] = {
|
||||
"EOF", "IDENTIFIER", "NUMBER", "OPERATOR",
|
||||
"LPAREN", "RPAREN", "SEMICOLON", "LBRACE",
|
||||
"RBRACE", "LBRACKET", "RBRACKET", "CHARACTER",
|
||||
"STRING"
|
||||
};
|
||||
|
||||
static void print_token(Token token) {
|
||||
printf("Token: %-15s | Text: %.*s\n",
|
||||
(const char *[]){
|
||||
"EOF", "IDENTIFIER", "NUMBER", "OPERATOR",
|
||||
"LPAREN", "RPAREN", "SEMICOLON", "LBRACE",
|
||||
"RBRACE", "LBRACKET", "RBRACKET", "CHARACTER",
|
||||
"STRING"
|
||||
}[token.type],
|
||||
TOKEN_TYPES[token.type],
|
||||
(int)token.length, token.start
|
||||
);
|
||||
}
|
||||
|
||||
int main(void) {
|
||||
const char *filename = "test/example1.zn";
|
||||
FILE *file = fopen(filename, "rb");
|
||||
const char* filename = "test/example1.zn";
|
||||
FILE* file = fopen(filename, "rb");
|
||||
if (!file) {
|
||||
fprintf(stderr, "Failed to open file: %s\n", filename);
|
||||
return 1;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
#include "../include/sync/lexer.h"
|
||||
|
||||
void test_tokenize_simple_assignment(void) {
|
||||
const char *src = "x = 42;";
|
||||
const char* src = "x = 42;";
|
||||
Lexer lexer;
|
||||
lexer_init(&lexer, "<stdin>", src);
|
||||
|
||||
|
@ -26,7 +26,7 @@ void test_tokenize_simple_assignment(void) {
|
|||
}
|
||||
|
||||
void test_tokenize_function_call(void) {
|
||||
const char *src = "print(x);";
|
||||
const char* src = "print(x);";
|
||||
Lexer lexer;
|
||||
lexer_init(&lexer, "<stdin>", src);
|
||||
|
||||
|
|
Loading…
Reference in New Issue