implementation of useful error messages

This commit is contained in:
vegowotenks 2024-10-10 18:59:17 +02:00
parent 2ce8200366
commit c12e763b28
14 changed files with 295 additions and 68 deletions

View file

@ -53,27 +53,40 @@ static Token _Tokenizer_ParseDouble(bool negative, StringView integerPart, Strin
};
}
static Token _Tokenizer_NumberToken(StringView* source)
static Token _Tokenizer_NumberToken(StringView* source, TokenContext* current)
{
uint32_t token_length = 0;
bool negative = false;
if (StringView_StartsWith(*source, StringView_FromString("-"))) {
negative = true;
*source = StringView_Drop(*source, 1);
token_length++;
}
StringView integerPart = StringViewOfNumberTillNextNonDigit(source);
token_length += integerPart.length;
bool has_point = false;
if (source->length != 0 && source->source[0] == '.') {
*source = StringView_Drop(*source, 1);
has_point = true;
token_length++;
}
StringView decimalPart = StringViewOfNumberTillNextNonDigit(source);
token_length += decimalPart.length;
Token token;
if (has_point) {
return _Tokenizer_ParseDouble(negative, integerPart, decimalPart);
token = _Tokenizer_ParseDouble(negative, integerPart, decimalPart);
} else {
return _Tokenizer_ParseInt64(negative, integerPart);
token = _Tokenizer_ParseInt64(negative, integerPart);
}
//udpate context
token.context = *current;
current->col += token_length;
return token;
}
static bool _Tokenizer_IdentifierLetter(char c)
@ -81,17 +94,21 @@ static bool _Tokenizer_IdentifierLetter(char c)
return isalnum(c);
}
static Token _Tokenizer_IdentifierToken(StringView* source)
static Token _Tokenizer_IdentifierToken(StringView* source, TokenContext* current)
{
StringView identifier = StringView_TakeWhile(*source, _Tokenizer_IdentifierLetter);
*source = StringView_Drop(*source, identifier.length);
return (Token) {
Token token = (Token) {
.type = TOKENTYPE_IDENTIFIER,
.get = {
.identifier = identifier,
}
},
.context = *current
};
current->col += identifier.length;
return token;
}
static bool _Tokenizer_ContinueCommentFunction(char c)
@ -104,7 +121,7 @@ static bool _Tokenizer_ContinueStringFunction(char c)
return c != '"';
}
static Token _Tokenizer_SimpleToken(StringView* source)
static Token _Tokenizer_SimpleToken(StringView* source, TokenContext* current)
{
const char* literal_table[] = { "{", "}", "&", ":", "+", "->", "-", "*", "/", "|", "==", "!=", "<", "<=", ">", ">=", ",", ";", "bind", "as", "(", ")" };
const enum TokenType type_table[] = {
@ -136,26 +153,32 @@ static Token _Tokenizer_SimpleToken(StringView* source)
StringView literal_view = StringView_FromString(literal_table[i]);
if (StringView_StartsWith(*source, literal_view)) {
*source = StringView_Drop(*source, literal_view.length);
return (Token) {
Token token = (Token) {
.type = type_table[i],
.get = { .identifier = STRINGVIEW_NONE }
.get = { .identifier = STRINGVIEW_NONE },
.context = *current
};
current->col += literal_view.length;
return token;
}
}
return TOKEN_NONE;
}
Token _Tokenizer_CommentToken(StringView* source)
Token _Tokenizer_CommentToken(StringView* source, TokenContext* current)
{
StringView comment = StringView_SpanWhile(source, _Tokenizer_ContinueCommentFunction);
return (Token) {
Token token = (Token) {
.type = TOKENTYPE_COMMENT,
.get = { .identifier = comment },
.context = *current
};
current->col += comment.length;
return token;
}
Token _Tokenizer_StringToken(StringView* source)
Token _Tokenizer_StringToken(StringView* source, TokenContext* current)
{
*source = StringView_Drop(*source, 1);
StringView string = StringView_SpanWhile(source, _Tokenizer_ContinueStringFunction);
@ -163,15 +186,34 @@ Token _Tokenizer_StringToken(StringView* source)
string.length += 2;
*source = StringView_Drop(*source, 1);
return (Token) {
Token token = (Token) {
.type = TOKENTYPE_STRING,
.get = { .identifier = string },
.context = *current
};
size_t newline_count = StringView_Count(string, StringView_FromString("\n"));
if(newline_count == 0) {
current->col += string.length;
} else {
current->row += newline_count;
current->col = 1;
StringView last_newline_split;
StringView_LastSplit(&last_newline_split, &string, StringView_FromString("\n"));
current->col += last_newline_split.length;
}
return token;
}
Token Tokenizer_NextToken(StringView* source)
Token Tokenizer_NextToken(StringView* source, TokenContext* context)
{
while (source->length != 0 && isspace(source->source[0])) {
if (source->source[0] == '\n') {
context->col = 1;
context->row++;
} else {
context->col++;
}
0[source] = StringView_Slice(*source, 1, source->length);
}
@ -180,7 +222,7 @@ Token Tokenizer_NextToken(StringView* source)
}
{
Token simple_token = _Tokenizer_SimpleToken(source);
Token simple_token = _Tokenizer_SimpleToken(source, context);
if (simple_token.type != TOKENTYPE_NONE) {
return simple_token;
}
@ -188,16 +230,18 @@ Token Tokenizer_NextToken(StringView* source)
if (isdigit(source->source[0]) || StringView_StartsWith(*source, StringView_FromString("-"))) {
// parse int/double
return _Tokenizer_NumberToken(source);
return _Tokenizer_NumberToken(source, context);
} else if (isalpha(source->source[0])) {
// parse name
return _Tokenizer_IdentifierToken(source);
return _Tokenizer_IdentifierToken(source, context);
} else if (StringView_StartsWith(*source, StringView_FromString("#"))) {
return _Tokenizer_CommentToken(source);
return _Tokenizer_CommentToken(source, context);
} else if (StringView_StartsWith(*source, StringView_FromString("\""))) {
return _Tokenizer_StringToken(source);
return _Tokenizer_StringToken(source, context);
} else {
return (Token) {.type = TOKENTYPE_ERROR, .get = {.error = *source } };
Token non_token = (Token) {.type = TOKENTYPE_ERROR, .get = {.error = *source }, .context = *context };
context->col++;
return non_token;
}
}