Tokenizing and printing

This commit is contained in:
vegowotenks 2024-10-01 10:53:26 +02:00
parent 371eca6269
commit b5912b95f7
5 changed files with 259 additions and 1 deletions

View file

@ -22,6 +22,20 @@
#include <stdio.h>
#include "../include/utilitiec/argumentc/argumentc.h"
#include "interpreter.h"
#include "tokenizer.h"
int tokenize_all(StringView source, DynamicArray* a)
{
Token t;
while ((t = Tokenizer_NextToken(&source)).type != TOKENTYPE_NONE) {
int append_code = DynamicArray_Append(a, &t);
if (append_code) return append_code;
if (t.type == TOKENTYPE_ERROR) break;
}
return EXIT_SUCCESS;
}
char* load_file_string(StringView path)
{
@ -59,6 +73,8 @@ char* load_file_string(StringView path)
fclose(stream);
buffer[length] = '\0';
return buffer;
}
@ -72,11 +88,33 @@ int main(int argc, const char* argv [])
fprintf(stderr, "Usage: [program] --file path/to/script_file\n");
return 1;
}
Argumentc_Destroy(&arguments);
char* script_string = load_file_string(script_file.content);
if (script_string == NULL) return 1;
puts(script_string);
StringView source = StringView_FromString(script_string);
DynamicArray tokens;
if (DynamicArray_Create(&tokens, sizeof(Token), 128, NULL)) {
fprintf(stderr, "Fatal Error: Failed to create dynamicarray\n");
return 1;
}
if (tokenize_all(source, &tokens)) {
fprintf(stderr, "Fatal Error: Out of Memory in tokenizing\n");
return 1;
}
Interpreter interpreter;
Interpreter_Create(&interpreter, &tokens);
Interpreter_Interpret(&interpreter);
Interpreter_Destroy(&interpreter);
DynamicArray_Destroy(&tokens);
free(script_string);
return EXIT_SUCCESS;
}