diff --git a/CMakeLists.txt b/CMakeLists.txt index f661140..e58ec2c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -10,13 +10,15 @@ project(LXC_Project LANGUAGES CXX) # Makes .exes be outputted within the root directory # set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}) -# Adds the G-Tests # +# Enables testing # enable_testing() add_subdirectory(external/googletest) -add_subdirectory(tests) # Adds the sub-directories of all of the binaries # add_subdirectory(Lexer) # The app subdirectory # add_subdirectory(LXC) + +# Compiles the tests # +add_subdirectory(tests) diff --git a/Common/LXC.h b/Common/LXC.h index 19ea8c3..9404982 100644 --- a/Common/LXC.h +++ b/Common/LXC.h @@ -19,6 +19,6 @@ // LXC util files // -#include -#include -#include +#include +#include +#include diff --git a/Common/File.h b/Common/modules/File.h similarity index 100% rename from Common/File.h rename to Common/modules/File.h diff --git a/Common/IO.h b/Common/modules/IO.h similarity index 99% rename from Common/IO.h rename to Common/modules/IO.h index 61dec89..e7000ee 100644 --- a/Common/IO.h +++ b/Common/modules/IO.h @@ -1,6 +1,6 @@ #pragma once -#include +#include #include #include diff --git a/Common/OS.h b/Common/modules/OS.h similarity index 100% rename from Common/OS.h rename to Common/modules/OS.h diff --git a/Common/Result.h b/Common/modules/Result.h similarity index 99% rename from Common/Result.h rename to Common/modules/Result.h index 18886e5..756ca00 100644 --- a/Common/Result.h +++ b/Common/modules/Result.h @@ -1,6 +1,6 @@ #pragma once -#include +#include #include #include diff --git a/LXC/LXC.cpp b/LXC/LXC.cpp index c8f7e8e..a7de45c 100644 --- a/LXC/LXC.cpp +++ b/LXC/LXC.cpp @@ -9,7 +9,7 @@ int main(int argc, char** argv) // Creates the debug log // Util::CreateLog("LXC.log"); - std::filesystem::path src = "example/example.lx"; + std::filesystem::path src = "examples/Fib.lx"; // Reads the given file to a string // Util::ReturnVal fileContents = Util::ReadFile(src); diff --git a/Lexer/inc/Token.h b/Lexer/inc/Token.h index 56f26e4..b354322 100644 --- a/Lexer/inc/Token.h +++ b/Lexer/inc/Token.h @@ -88,12 +88,6 @@ namespace LXC::Lexer using T = std::underlying_type_t; return static_cast(type) & static_cast(mask); } - - template static constexpr bool IsTypeClass(Token token) - { - using T = std::underlying_type_t; - return static_cast(token.type) & static_cast(mask); - } // Constructor to set the data of the token for more complex token types // Token(const LexerContext& ctx, uint32_t start, unsigned short len, TokenType _type); diff --git a/examples/Fib.lx b/examples/Fib.lx new file mode 100644 index 0000000..8fecfcc --- /dev/null +++ b/examples/Fib.lx @@ -0,0 +1,15 @@ +int fib(int num) +{ + # Base cases # + if (n == 0) { return 0 } + if (n == 1) { return 1 } + + # RECURSION BABYYYY # + return fib(n - 1) + fib(n - 2) +} + +int main(void) +{ + int res = fib(8) + return res == 21 +} diff --git a/example/example.lx b/examples/LawsOfMath.lx similarity index 100% rename from example/example.lx rename to examples/LawsOfMath.lx diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index ba1e46d..3a3b985 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -2,10 +2,23 @@ file (GLOB TestSources src/*.cpp inc/*.h) add_executable(LXC_Tests ${TestSources}) +# Creates the shared precompiled header # +target_include_directories(LXC_Tests PRIVATE ${CMAKE_SOURCE_DIR}/Common) +target_precompile_headers(LXC_Tests PRIVATE ${CMAKE_SOURCE_DIR}/Common/LXC.h) + +# Includes headers for modules to test # +target_include_directories(LXC_Tests PRIVATE + ${CMAKE_SOURCE_DIR}/Lexer/inc +) + # Links with GoogleTest # target_link_libraries(LXC_Tests + # Testing libraries # gtest gtest_main + + # Libraries to test # + Lexer ) # Registers the test # diff --git a/tests/src/LXC_Tests.cpp b/tests/src/LXC_Tests.cpp deleted file mode 100644 index f9c9f90..0000000 --- a/tests/src/LXC_Tests.cpp +++ /dev/null @@ -1,11 +0,0 @@ -#include - -TEST(IWantSomeTestsToPass, BasicTest1) -{ - EXPECT_EQ(1 + 1, 2); -} - -TEST(IWantSomeTestsToPass, BasicTest2) -{ - EXPECT_EQ(1 + 2, 2); -} diff --git a/tests/src/LexerTests.cpp b/tests/src/LexerTests.cpp new file mode 100644 index 0000000..ec758da --- /dev/null +++ b/tests/src/LexerTests.cpp @@ -0,0 +1,250 @@ +#include + +#include +#include + +// Local util functions // +namespace LXC::Internal +{ + static void ExpectTokens(const Lexer::LexerOutput& tokens, const std::vector& expected) + { + ASSERT_EQ(tokens.size(), expected.size()); + for (size_t i = 0; i < tokens.size(); i++) + { + EXPECT_EQ(tokens[i].type, expected[i]); + } + } +} + +// The tests for the lexer // +namespace LXC::Lexer +{ + TEST(LexerTests, ReturnsTrueForMatching) + { + EXPECT_TRUE(Token::IsTypeClass(Token::Add)); + EXPECT_TRUE(Token::IsTypeClass(Token::Sub)); + EXPECT_TRUE(Token::IsTypeClass(Token::Mul)); + + EXPECT_TRUE(Token::IsTypeClass(Token::If)); + EXPECT_TRUE(Token::IsTypeClass(Token::While)); + + EXPECT_TRUE(Token::IsTypeClass(Token::StringLiteral)); + EXPECT_TRUE(Token::IsTypeClass(Token::NumLiteral)); + + EXPECT_TRUE(Token::IsTypeClass(Token::CloseBracket)); + EXPECT_TRUE(Token::IsTypeClass(Token::Comma)); + + EXPECT_TRUE(Token::IsTypeClass(Token::End_of_file)); + } + + TEST(LexerTests, ReturnsFalseForNonMatching) + { + EXPECT_FALSE(Token::IsTypeClass(Token::StringLiteral)); + EXPECT_FALSE(Token::IsTypeClass(Token::End_of_file)); + EXPECT_FALSE(Token::IsTypeClass(Token::If)); + + EXPECT_FALSE(Token::IsTypeClass(Token::NumLiteral)); + EXPECT_FALSE(Token::IsTypeClass(Token::Comma)); + + EXPECT_FALSE(Token::IsTypeClass(Token::Add)); + EXPECT_FALSE(Token::IsTypeClass(Token::CloseBracket)); + + EXPECT_FALSE(Token::IsTypeClass(Token::While)); + EXPECT_FALSE(Token::IsTypeClass(Token::Mul)); + + EXPECT_FALSE(Token::IsTypeClass(Token::Sub)); + } + + TEST(LexerTests, EmptyInput) + { + Util::ReturnVal result = TokenizeFile(""); + ASSERT_TRUE(result.Suceeded()); + EXPECT_TRUE(result.Result().empty()); + } + + TEST(LexerTests, SingleIdentifier) + { + Util::ReturnVal result = TokenizeFile("hello"); + ASSERT_TRUE(result.Suceeded()); + Internal::ExpectTokens(result, { Token::Identifier }); + } + + TEST(LexerTests, SingleNumber) + { + Util::ReturnVal result = TokenizeFile("12345"); + ASSERT_TRUE(result.Suceeded()); + Internal::ExpectTokens(result, { Token::NumLiteral }); + } + + TEST(LexerTests, SingleStringLiteral) + { + Util::ReturnVal result = TokenizeFile("\"string literal\""); + ASSERT_TRUE(result.Suceeded()); + Internal::ExpectTokens(result, { Token::StringLiteral }); + } + + TEST(LexerTests, MultipleSymbolsAndOperators) + { + Util::ReturnVal result = TokenizeFile("+ = ("); + ASSERT_TRUE(result.Suceeded()); + Internal::ExpectTokens(result, { Token::Add, Token::Assign, Token::OpenParen }); + } + + TEST(LexerTests, WhileTrueTokenTest) + { + Util::ReturnVal result = TokenizeFile("while (true)"); + ASSERT_TRUE(result.Suceeded()); + Internal::ExpectTokens(result, { Token::While, Token::OpenParen, Token::Identifier, Token::CloseParen }); + } + + TEST(LexerTests, UnterminatedString) + { + Util::ReturnVal result = TokenizeFile("\"This is supposed to be unterminated"); + ASSERT_FALSE(result.Suceeded()); + EXPECT_EQ(result.Error().reason, LexerError::UnterminatedStringLiteral); + } + + TEST(LexerTests, InvalidCharacter) + { + Util::ReturnVal result = TokenizeFile("^^^"); + ASSERT_FALSE(result.Suceeded()); + EXPECT_EQ(result.Error().reason, LexerError::InvalidCharacter); + } + + TEST(LexerTests, InvalidOperand) + { + Util::ReturnVal result = TokenizeFile("+/*"); + ASSERT_FALSE(result.Suceeded()); + EXPECT_EQ(result.Error().reason, LexerError::UnknownSymbolOrOperand); + } + + TEST(LexerTests, ExampleFile_LawsOfMath) + { + Util::ReturnVal fileContents = Util::ReadFile("examples/LawsOfMath.lx"); + ASSERT_TRUE(fileContents.Suceeded()); + Util::ReturnVal tokens = TokenizeFile(fileContents); + Internal::ExpectTokens(tokens, + { + Token::Identifier, // int + Token::Identifier, // add + Token::OpenParen, // ( + Token::Identifier, // int + Token::Identifier, // a + Token::Comma, // , + Token::Identifier, // int + Token::Identifier, // b + Token::CloseParen, // ) + Token::OpenBrace, // { + Token::Return, // return + Token::Identifier, // a + Token::Add, // + + Token::Identifier, // b + Token::CloseBrace, // } + + Token::Identifier, // int + Token::Identifier, // main + Token::OpenParen, // ( + Token::Identifier, // void + Token::CloseParen, // ) + Token::OpenBrace, // { + Token::Identifier, // int + Token::Identifier, // c + Token::Assign, // = + Token::Identifier, // add + Token::OpenParen, // ( + Token::NumLiteral, // 3 + Token::Comma, // , + Token::NumLiteral, // 4 + Token::CloseParen, // ) + Token::If, // if + Token::OpenParen, // ( + Token::Identifier, // c + Token::Eql, // == + Token::NumLiteral, // 7 + Token::CloseParen, // ) + Token::OpenBrace, // { + Token::Return, // return + Token::NumLiteral, // 0 + Token::CloseBrace, // } + Token::Else, // else + Token::OpenBrace, // { + Token::Return, // return + Token::NumLiteral, // 1 + Token::CloseBrace, // } + Token::CloseBrace // } + }); + } + + TEST(LexerTests, ExampleFile_Fib) + { + Util::ReturnVal fileContents = Util::ReadFile("examples/Fib.lx"); + ASSERT_TRUE(fileContents.Suceeded()); + Util::ReturnVal tokens = TokenizeFile(fileContents); + Internal::ExpectTokens(tokens, + { + Token::Identifier, // int + Token::Identifier, // fib + Token::OpenParen, // ( + Token::Identifier, // int + Token::Identifier, // num + Token::CloseParen, // ) + + Token::If, // if + Token::OpenParen, // ( + Token::Identifier, // n + Token::Eql, // == + Token::NumLiteral, // 0 + Token::OpenBrace, // { + Token::Return, // return + Token::NumLiteral, // 0 + Token::CloseBrace, // } + + Token::If, // if + Token::OpenParen, // ( + Token::Identifier, // n + Token::Eql, // == + Token::NumLiteral, // 1 + Token::OpenBrace, // { + Token::Return, // return + Token::NumLiteral, // 1 + Token::CloseBrace, // } + + Token::Return, // return + Token::Identifier, // fib + Token::OpenParen, // ( + Token::Identifier, // n + Token::Sub, // - + Token::NumLiteral, // 1 + Token::CloseParen, // ) + + Token::Add, // + + + Token::Identifier, // fib + Token::OpenParen, // ( + Token::Identifier, // n + Token::Sub, // - + Token::NumLiteral, // 1 + Token::CloseParen, // ) + + Token::Identifier, // int + Token::Identifier, // main + Token::OpenParen, // ( + Token::Identifier, // void + Token::CloseParen, // ) + + Token::OpenBrace, // { + Token::Identifier, // int + Token::Identifier, // res + Token::Assign, // = + Token::Identifier, // fib + Token::OpenParen, // ( + Token::NumLiteral, // 8 + Token::CloseParen, // ) + Token::Return, // return + Token::Identifier, // res + Token::Eql, // == + Token::NumLiteral, // 21 + Token::CloseBrace // } + }); + } +}