diff --git a/.vscode/launch.json b/.vscode/launch.json index aa19ecd..0592b8b 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -74,7 +74,7 @@ "stopAtEntry": false, "cwd": "${workspaceFolder}", "environment": [], - "externalConsole": true, + "console": "externalTerminal", "preLaunchTask": "build", }, diff --git a/examples/Fib.lx b/examples/Fib.lx index 59d1c9a..61fb2ec 100644 --- a/examples/Fib.lx +++ b/examples/Fib.lx @@ -1,8 +1,8 @@ func fib(int: num) { - # Base cases (temp excluded) # - # if (n == 0) { return 0 } # - # if (n == 1) { return 1 } # + # Base cases # + if (n == 0) { return 0 } + if (n == 1) { return 1 } # RECURSION BABYYYY # return fib(n - 1) + fib(n - 2) diff --git a/lexer/inc/Token.h b/lexer/inc/Token.h index 5ece791..925ba1b 100644 --- a/lexer/inc/Token.h +++ b/lexer/inc/Token.h @@ -131,6 +131,9 @@ namespace PashaBibko::LXC::Lexer char* contents; }; + // Function for converting token types to their equivalent C-Strings // + const char* TokenTypeToCStr(Token::TokenType type); + // Typedef for the output type of how the Lexer outputs // typedef std::vector LexerOutput; } diff --git a/lexer/src/Token.cpp b/lexer/src/Token.cpp index d20116d..63adcbb 100644 --- a/lexer/src/Token.cpp +++ b/lexer/src/Token.cpp @@ -55,7 +55,7 @@ namespace PashaBibko::LXC::Lexer // Helper macro for converting type to string // #define TOKEN_TYPE_CASE(type) case type: return #type; - static constexpr const char* TokenTypeToCStr(Token::TokenType type) + const char* TokenTypeToCStr(Token::TokenType type) { switch (type) { diff --git a/tests/src/LexerTests.cpp b/tests/src/LexerTests.cpp index bff9d05..d9cdf48 100644 --- a/tests/src/LexerTests.cpp +++ b/tests/src/LexerTests.cpp @@ -8,9 +8,19 @@ namespace PashaBibko::LXC::Internal { static void ExpectTokens(const Lexer::LexerOutput& tokens, const std::vector& expected) { - ASSERT_EQ(tokens.size(), expected.size()); - for (size_t i = 0; i < tokens.size(); i++) - EXPECT_EQ(tokens[i].type, expected[i]); + size_t length = std::min(tokens.size(), expected.size()); + for (size_t i = 0; i < length; i++) + { + bool equal = tokens[i].type == expected[i]; + + if (equal == false) + { + EXPECT_EQ(i, -1); + EXPECT_STREQ(LXC::Lexer::TokenTypeToCStr(tokens[i].type), LXC::Lexer::TokenTypeToCStr(expected[i])); + + return; // Early return stops the console from being filled with errors that may be incorrect + } + } } } @@ -130,9 +140,11 @@ namespace PashaBibko::LXC::Lexer Token::Identifier, // add Token::OpenParen, // ( Token::Identifier, // int + Token::Colon, // : Token::Identifier, // a Token::Comma, // , Token::Identifier, // int + Token::Colon, // : Token::Identifier, // b Token::CloseParen, // ) Token::OpenBrace, // { @@ -151,6 +163,7 @@ namespace PashaBibko::LXC::Lexer Token::CloseParen, // ) Token::OpenBrace, // { Token::Identifier, // int + Token::Colon, // : Token::Identifier, // c Token::Assign, // = Token::Identifier, // add @@ -192,6 +205,7 @@ namespace PashaBibko::LXC::Lexer Token::Identifier, // fib Token::OpenParen, // ( Token::Identifier, // int + Token::Colon, // : Token::Identifier, // num Token::CloseParen, // ) Token::OpenBrace, // { @@ -246,6 +260,7 @@ namespace PashaBibko::LXC::Lexer Token::OpenBrace, // { Token::Identifier, // int + Token::Colon, // : Token::Identifier, // res Token::Assign, // = Token::Identifier, // fib