Skip to content

Commit

Permalink
fix(EOF): fixed EOF recognition
Browse files Browse the repository at this point in the history
  • Loading branch information
wesuRage committed Dec 2, 2024
1 parent 12145af commit 31fd928
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions src/frontend/lexer/lexer.c
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ Token getNextToken() {
char buffer[1024];
int i = 0;
eat_char(); // Consume the opening quote
while (pick_char() != '"' && pick_char() != CEOF) {
while (pick_char() != '"' && pick_char() != CEOF && pick_char() != EOF) {
if (i >= (int)sizeof(buffer) - 1) {
lexer_error(filename, line, col, position, position, currentChar, "String too long");
break;
Expand Down Expand Up @@ -294,7 +294,7 @@ Token getNextToken() {
}

// EOF
if (pick_char() == CEOF) {
if (pick_char() == CEOF || pick_char() == EOF) {
return (Token){TOKEN_EOF, strdup("EOF"), line, col, col, position, position, strdup(filename), strdup("")};
}

Expand All @@ -320,7 +320,7 @@ Token *tokenize(FILE *sourceFile, const char *fileName, int *count) {
tokenCount = 0;
Token token = getNextToken();

while (currentChar != CEOF) {
while (pick_char() != CEOF && pick_char() != EOF) {
addToken(
token.type,
token.lexeme,
Expand All @@ -347,7 +347,7 @@ Token *tokenize(FILE *sourceFile, const char *fileName, int *count) {
token.filename,
token.message
);
/*

addToken(
TOKEN_EOF,
"EOF",
Expand All @@ -359,7 +359,7 @@ Token *tokenize(FILE *sourceFile, const char *fileName, int *count) {
filename,
""
);
*/


*count = tokenCount;
return tokens;
Expand Down

0 comments on commit 31fd928

Please sign in to comment.