From 18f5a3954d0169a5a82bde7a15c26ef1b251ea8f Mon Sep 17 00:00:00 2001 From: Pasha Bibko <156938226+PashaBibko@users.noreply.github.com> Date: Thu, 21 Aug 2025 20:27:02 +0100 Subject: [PATCH] Implemented parser-context tests --- .vscode/settings.json | 6 -- tests/src/ParserTests.cpp | 118 ++++++++++++++++++++++++++++++++++++-- 2 files changed, 114 insertions(+), 10 deletions(-) delete mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index ba291a3..0000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "files.associations": - { - "type_traits": "cpp" - } -} diff --git a/tests/src/ParserTests.cpp b/tests/src/ParserTests.cpp index d4fc2e5..d19c51a 100644 --- a/tests/src/ParserTests.cpp +++ b/tests/src/ParserTests.cpp @@ -8,21 +8,131 @@ namespace PashaBibko::LXC::Parser { TEST(ParserTests, ParserContextAtAndInBounds) { - EXPECT_TRUE(true); + { + std::string src = "576 hello return { null }"; + Util::ReturnVal tokens = Lexer::TokenizeFile(src); + ASSERT_FALSE(tokens.Failed()); + + ParserContext ctx(tokens.Result()); + ASSERT_FALSE(ctx.At() == nullptr); + EXPECT_TRUE(ctx.At()->type == Lexer::Token::NumLiteral); + EXPECT_STREQ(ctx.At()->Str(), "576"); + EXPECT_TRUE(ctx.InBounds()); + } + + { + std::string src = " "; + Util::ReturnVal tokens = Lexer::TokenizeFile(src); + ASSERT_FALSE(tokens.Failed()); + + ParserContext ctx(tokens.Result()); + EXPECT_TRUE(ctx.At() == nullptr); + EXPECT_FALSE(ctx.InBounds()); + } } TEST(ParserTests, ParserContextPeek) { - EXPECT_TRUE(true); + { + std::string src = "int hello"; + Util::ReturnVal tokens = Lexer::TokenizeFile(src); + ASSERT_FALSE(tokens.Failed()); + + ParserContext ctx(tokens.Result()); + ASSERT_FALSE(ctx.Peek() == nullptr); + EXPECT_TRUE(ctx.Peek()->type == Lexer::Token::Identifier); + EXPECT_STREQ(ctx.Peek()->Str(), "hello"); + } + + { + std::string src = "null"; + Util::ReturnVal tokens = Lexer::TokenizeFile(src); + ASSERT_FALSE(tokens.Failed()); + } } TEST(ParserTests, ParserContextAdvance) { - EXPECT_TRUE(true); + std::string src = "func main() { return 23 }"; + Util::ReturnVal tokens = Lexer::TokenizeFile(src); + ASSERT_FALSE(tokens.Failed()); + + ParserContext ctx(tokens.Result()); + + for (size_t idx = 0; idx < 10; idx++) + { + const Lexer::Token* current = ctx.Advance(); + ASSERT_FALSE(current == nullptr); + const Lexer::Token::TokenType cType = current->type; + + switch (idx) + { + case 0: + EXPECT_TRUE(cType == Lexer::Token::OpenCrocodile); + break; + + case 1: + EXPECT_TRUE(cType == Lexer::Token::Identifier); + break; + + case 2: + EXPECT_TRUE(cType == Lexer::Token::CloseCrocodile); + break; + + case 3: + EXPECT_TRUE(cType == Lexer::Token::Identifier); + break; + + case 4: + EXPECT_TRUE(cType == Lexer::Token::OpenParen); + break; + + case 5: + EXPECT_TRUE(cType == Lexer::Token::CloseParen); + break; + + case 6: + EXPECT_TRUE(cType == Lexer::Token::OpenBrace); + break; + + case 7: + EXPECT_TRUE(cType == Lexer::Token::Return); + break; + + case 8: + EXPECT_TRUE(cType == Lexer::Token::NumLiteral); + break; + + case 9: + EXPECT_TRUE(cType == Lexer::Token::CloseBrace); + break; + + default: + ASSERT_FALSE(false); // There were two many tokens generated by the lexer + } + } } TEST(ParserTests, ParserContextExpect) { - EXPECT_TRUE(true); + std::string src = "func main() { return 42 }"; + Util::ReturnVal tokens = Lexer::TokenizeFile(src); + ASSERT_FALSE(tokens.Failed()); + + ParserContext ctx(tokens.Result()); + ASSERT_TRUE(ctx.Expect(std::array + { + Lexer::Token::FunctionDef, + Lexer::Token::OpenCrocodile, + Lexer::Token::Identifier, + Lexer::Token::CloseCrocodile, + Lexer::Token::Identifier, + Lexer::Token::OpenParen, + Lexer::Token::CloseParen, + Lexer::Token::OpenBrace, + Lexer::Token::Return, + Lexer::Token::NumLiteral, + Lexer::Token::CloseBrace + })); } }