101 lines
3.0 KiB
C
101 lines
3.0 KiB
C
// Kyler Olsen
|
|
// ZINC Bootstrap compiler
|
|
// Main
|
|
// June 2025
|
|
|
|
#include <stdio.h>
|
|
#include <stdlib.h>
|
|
#include "sync/types.h"
|
|
#include "sync/lexer.h"
|
|
#include "sync/syntax.h"
|
|
|
|
const char* TOKEN_TYPES[] = {
|
|
"EOF", "IDENTIFIER", "NUMBER", "OPERATOR", "LPAREN", "RPAREN", "SEMICOLON",
|
|
"LBRACE", "RBRACE", "LBRACKET", "RBRACKET", "CHARACTER", "STRING",
|
|
// Definitions and Declarations
|
|
"KW_CONST", "KW_ENUM", "KW_FN", "KW_LET", "KW_MUT", "KW_PUBLIC",
|
|
"KW_STATIC", "KW_STRUCT", "KW_UNION",
|
|
// Control Flow
|
|
"KW_BREAK", "KW_CASE", "KW_CONTINUE", "KW_DEFAULT", "KW_DO", "KW_ELSE",
|
|
"KW_FOR", "KW_IF", "KW_MATCH", "KW_RETURN", "KW_SWITCH", "KW_WHILE",
|
|
// Values
|
|
"KW_FALSE", "KW_TRUE",
|
|
// Types
|
|
"KW_BOOL", "KW_F32", "KW_F64", "KW_I8", "KW_I16", "KW_I32", "KW_I64",
|
|
"KW_U8", "KW_U16", "KW_U32", "KW_U64", "KW_VOID",
|
|
// Modules
|
|
"KW_AS", "KW_IMPORT",
|
|
// Operators
|
|
"KW_AND", "KW_IS", "KW_NOT", "KW_OR", "KW_SIZEOF", "KW_XOR",
|
|
};
|
|
|
|
static void print_token(Token token) {
|
|
printf("Token: %-15s | Text: %.*s\n",
|
|
TOKEN_TYPES[token.type],
|
|
(int)token.length, token.start
|
|
);
|
|
}
|
|
|
|
int main(void) {
|
|
const char* filename = "test/example1.zn";
|
|
FILE* file = fopen(filename, "rb");
|
|
if (!file) {
|
|
fprintf(stderr, "Failed to open file: %s\n", filename);
|
|
return 1;
|
|
}
|
|
fseek(file, 0, SEEK_END);
|
|
long filesize = ftell(file);
|
|
fseek(file, 0, SEEK_SET);
|
|
char *source = malloc(filesize + 1);
|
|
if (!source) {
|
|
fprintf(stderr, "Failed to allocate memory.\n");
|
|
fclose(file);
|
|
return 1;
|
|
}
|
|
fread(source, 1, filesize, file);
|
|
source[filesize] = '\0';
|
|
fclose(file);
|
|
|
|
Lexer lexer;
|
|
lexer_init(&lexer, filename, source);
|
|
|
|
LexerResult lexer_result = lexical_analysis(&lexer);
|
|
if (lexer_result.type == SYNC_ERROR) {
|
|
fprintf(stderr, "Error: %s\n", lexer_result.error.message);
|
|
free(source);
|
|
return lexer_result.error.code;
|
|
}
|
|
|
|
TokenResult* token_result = lexer_result.result;
|
|
int error_count = 0;
|
|
while (token_result != NULL) {
|
|
if (token_result->type == SYNC_ERROR) {
|
|
fprintf(stderr, "Error: %s\n", token_result->error.message);
|
|
fprintf(stderr, "\tFilename: %s\n", token_result->error.file_info.filename);
|
|
fprintf(stderr, "\tLn: %zi, Col: %zi\n", token_result->error.file_info.line, token_result->error.file_info.column);
|
|
clean_token_result(lexer_result.result);
|
|
free(source);
|
|
return 1;
|
|
} else {
|
|
print_token(token_result->result);
|
|
}
|
|
token_result = token_result->next;
|
|
}
|
|
|
|
TokenArrayResult array_result = token_result_array(lexer_result.result);
|
|
clean_token_result(lexer_result.result);
|
|
if (array_result.type == SYNC_ERROR) {
|
|
fprintf(stderr, "Error: %s\n", array_result.error.message);
|
|
free(source);
|
|
return array_result.error.code;
|
|
}
|
|
TokenArray tokens = array_result.result;
|
|
|
|
free(tokens.tokens);
|
|
free(source);
|
|
|
|
printf("Compilation Completed.");
|
|
|
|
return 0;
|
|
}
|