Fixed Lexer tests

This commit is contained in:
Pasha Bibko
2025-08-20 21:25:33 +01:00
parent d604de28fd
commit b6db61b2c3
5 changed files with 26 additions and 8 deletions

2
.vscode/launch.json vendored
View File

@@ -74,7 +74,7 @@
"stopAtEntry": false,
"cwd": "${workspaceFolder}",
"environment": [],
"externalConsole": true,
"console": "externalTerminal",
"preLaunchTask": "build",
},

View File

@@ -1,8 +1,8 @@
func<int> fib(int: num)
{
# Base cases (temp excluded) #
# if (n == 0) { return 0 } #
# if (n == 1) { return 1 } #
# Base cases #
if (n == 0) { return 0 }
if (n == 1) { return 1 }
# RECURSION BABYYYY #
return fib(n - 1) + fib(n - 2)

View File

@@ -131,6 +131,9 @@ namespace PashaBibko::LXC::Lexer
char* contents;
};
// Function for converting token types to their equivalent C-Strings //
const char* TokenTypeToCStr(Token::TokenType type);
// Typedef for the output type of how the Lexer outputs //
typedef std::vector<Token> LexerOutput;
}

View File

@@ -55,7 +55,7 @@ namespace PashaBibko::LXC::Lexer
// Helper macro for converting type to string //
#define TOKEN_TYPE_CASE(type) case type: return #type;
static constexpr const char* TokenTypeToCStr(Token::TokenType type)
const char* TokenTypeToCStr(Token::TokenType type)
{
switch (type)
{

View File

@@ -8,9 +8,19 @@ namespace PashaBibko::LXC::Internal
{
static void ExpectTokens(const Lexer::LexerOutput& tokens, const std::vector<Lexer::Token::TokenType>& expected)
{
ASSERT_EQ(tokens.size(), expected.size());
for (size_t i = 0; i < tokens.size(); i++)
EXPECT_EQ(tokens[i].type, expected[i]);
size_t length = std::min(tokens.size(), expected.size());
for (size_t i = 0; i < length; i++)
{
bool equal = tokens[i].type == expected[i];
if (equal == false)
{
EXPECT_EQ(i, -1);
EXPECT_STREQ(LXC::Lexer::TokenTypeToCStr(tokens[i].type), LXC::Lexer::TokenTypeToCStr(expected[i]));
return; // Early return stops the console from being filled with errors that may be incorrect
}
}
}
}
@@ -130,9 +140,11 @@ namespace PashaBibko::LXC::Lexer
Token::Identifier, // add
Token::OpenParen, // (
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // a
Token::Comma, // ,
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // b
Token::CloseParen, // )
Token::OpenBrace, // {
@@ -151,6 +163,7 @@ namespace PashaBibko::LXC::Lexer
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // c
Token::Assign, // =
Token::Identifier, // add
@@ -192,6 +205,7 @@ namespace PashaBibko::LXC::Lexer
Token::Identifier, // fib
Token::OpenParen, // (
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // num
Token::CloseParen, // )
Token::OpenBrace, // {
@@ -246,6 +260,7 @@ namespace PashaBibko::LXC::Lexer
Token::OpenBrace, // {
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // res
Token::Assign, // =
Token::Identifier, // fib