263 lines
9.5 KiB
C++
263 lines
9.5 KiB
C++
#include <gtest/gtest.h>
|
|
|
|
#include <Lexer.h>
|
|
#include <Token.h>
|
|
|
|
// Local util functions //
|
|
namespace PashaBibko::LXC::Internal
|
|
{
|
|
static void ExpectTokens(const Lexer::LexerOutput& tokens, const std::vector<Lexer::Token::TokenType>& expected)
|
|
{
|
|
ASSERT_EQ(tokens.size(), expected.size());
|
|
for (size_t i = 0; i < tokens.size(); i++)
|
|
EXPECT_EQ(tokens[i].type, expected[i]);
|
|
}
|
|
}
|
|
|
|
// The tests for the lexer //
|
|
namespace PashaBibko::LXC::Lexer
|
|
{
|
|
TEST(LexerTests, ReturnsTrueForMatching)
|
|
{
|
|
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Add));
|
|
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Sub));
|
|
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Mul));
|
|
|
|
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(Token::If));
|
|
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(Token::While));
|
|
|
|
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(Token::StringLiteral));
|
|
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(Token::NumLiteral));
|
|
|
|
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(Token::CloseBracket));
|
|
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(Token::Comma));
|
|
|
|
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Misc>(Token::End_of_file));
|
|
}
|
|
|
|
TEST(LexerTests, ReturnsFalseForNonMatching)
|
|
{
|
|
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::StringLiteral));
|
|
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::End_of_file));
|
|
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::If));
|
|
|
|
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(Token::NumLiteral));
|
|
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(Token::Comma));
|
|
|
|
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(Token::Add));
|
|
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(Token::CloseBracket));
|
|
|
|
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(Token::While));
|
|
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(Token::Mul));
|
|
|
|
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Misc>(Token::Sub));
|
|
}
|
|
|
|
TEST(LexerTests, EmptyInput)
|
|
{
|
|
Util::ReturnVal result = TokenizeFile("");
|
|
ASSERT_TRUE(result.Suceeded());
|
|
EXPECT_TRUE(result.Result().empty());
|
|
}
|
|
|
|
TEST(LexerTests, SingleIdentifier)
|
|
{
|
|
Util::ReturnVal result = TokenizeFile("hello");
|
|
ASSERT_TRUE(result.Suceeded());
|
|
Internal::ExpectTokens(result, { Token::Identifier });
|
|
}
|
|
|
|
TEST(LexerTests, SingleNumber)
|
|
{
|
|
Util::ReturnVal result = TokenizeFile("12345");
|
|
ASSERT_TRUE(result.Suceeded());
|
|
Internal::ExpectTokens(result, { Token::NumLiteral });
|
|
}
|
|
|
|
TEST(LexerTests, SingleStringLiteral)
|
|
{
|
|
Util::ReturnVal result = TokenizeFile("\"string literal\"");
|
|
ASSERT_TRUE(result.Suceeded());
|
|
Internal::ExpectTokens(result, { Token::StringLiteral });
|
|
}
|
|
|
|
TEST(LexerTests, MultipleSymbolsAndOperators)
|
|
{
|
|
Util::ReturnVal result = TokenizeFile("+ = (");
|
|
ASSERT_TRUE(result.Suceeded());
|
|
Internal::ExpectTokens(result, { Token::Add, Token::Assign, Token::OpenParen });
|
|
}
|
|
|
|
TEST(LexerTests, WhileTrueTokenTest)
|
|
{
|
|
Util::ReturnVal result = TokenizeFile("while (true)");
|
|
ASSERT_TRUE(result.Suceeded());
|
|
Internal::ExpectTokens(result, { Token::While, Token::OpenParen, Token::Identifier, Token::CloseParen });
|
|
}
|
|
|
|
TEST(LexerTests, UnterminatedString)
|
|
{
|
|
Util::ReturnVal result = TokenizeFile("\"This is supposed to be unterminated");
|
|
ASSERT_FALSE(result.Suceeded());
|
|
EXPECT_EQ(result.Error().reason, LexerError::UnterminatedStringLiteral);
|
|
}
|
|
|
|
TEST(LexerTests, InvalidCharacter)
|
|
{
|
|
Util::ReturnVal result = TokenizeFile("^^^");
|
|
ASSERT_FALSE(result.Suceeded());
|
|
EXPECT_EQ(result.Error().reason, LexerError::InvalidCharacter);
|
|
}
|
|
|
|
TEST(LexerTests, InvalidOperand)
|
|
{
|
|
Util::ReturnVal result = TokenizeFile("+/*");
|
|
ASSERT_FALSE(result.Suceeded());
|
|
EXPECT_EQ(result.Error().reason, LexerError::UnknownSymbolOrOperand);
|
|
}
|
|
|
|
TEST(LexerTests, ExampleFile_LawsOfMath)
|
|
{
|
|
Util::ReturnVal fileContents = Util::ReadFile("examples/LawsOfMath.lx");
|
|
ASSERT_TRUE(fileContents.Suceeded());
|
|
Util::ReturnVal tokens = TokenizeFile(fileContents);
|
|
Internal::ExpectTokens(tokens,
|
|
{
|
|
Token::FunctionDef, // func
|
|
Token::OpenCrocodile, // <
|
|
Token::Identifier, // int
|
|
Token::CloseCrocodile, // >
|
|
Token::Identifier, // add
|
|
Token::OpenParen, // (
|
|
Token::Identifier, // int
|
|
Token::Identifier, // a
|
|
Token::Comma, // ,
|
|
Token::Identifier, // int
|
|
Token::Identifier, // b
|
|
Token::CloseParen, // )
|
|
Token::OpenBrace, // {
|
|
Token::Return, // return
|
|
Token::Identifier, // a
|
|
Token::Add, // +
|
|
Token::Identifier, // b
|
|
Token::CloseBrace, // }
|
|
|
|
Token::FunctionDef, // func
|
|
Token::OpenCrocodile, // <
|
|
Token::Identifier, // int
|
|
Token::CloseCrocodile, // >
|
|
Token::Identifier, // main
|
|
Token::OpenParen, // (
|
|
Token::CloseParen, // )
|
|
Token::OpenBrace, // {
|
|
Token::Identifier, // int
|
|
Token::Identifier, // c
|
|
Token::Assign, // =
|
|
Token::Identifier, // add
|
|
Token::OpenParen, // (
|
|
Token::NumLiteral, // 3
|
|
Token::Comma, // ,
|
|
Token::NumLiteral, // 4
|
|
Token::CloseParen, // )
|
|
Token::If, // if
|
|
Token::OpenParen, // (
|
|
Token::Identifier, // c
|
|
Token::Eql, // ==
|
|
Token::NumLiteral, // 7
|
|
Token::CloseParen, // )
|
|
Token::OpenBrace, // {
|
|
Token::Return, // return
|
|
Token::NumLiteral, // 0
|
|
Token::CloseBrace, // }
|
|
Token::Else, // else
|
|
Token::OpenBrace, // {
|
|
Token::Return, // return
|
|
Token::NumLiteral, // 1
|
|
Token::CloseBrace, // }
|
|
Token::CloseBrace // }
|
|
});
|
|
}
|
|
|
|
TEST(LexerTests, ExampleFile_Fib)
|
|
{
|
|
Util::ReturnVal fileContents = Util::ReadFile("examples/Fib.lx");
|
|
ASSERT_TRUE(fileContents.Suceeded());
|
|
Util::ReturnVal tokens = TokenizeFile(fileContents);
|
|
Internal::ExpectTokens(tokens,
|
|
{
|
|
Token::FunctionDef, // func
|
|
Token::OpenCrocodile, // <
|
|
Token::Identifier, // int
|
|
Token::CloseCrocodile, // >
|
|
Token::Identifier, // fib
|
|
Token::OpenParen, // (
|
|
Token::Identifier, // int
|
|
Token::Identifier, // num
|
|
Token::CloseParen, // )
|
|
Token::OpenBrace, // {
|
|
|
|
Token::If, // if
|
|
Token::OpenParen, // (
|
|
Token::Identifier, // n
|
|
Token::Eql, // ==
|
|
Token::NumLiteral, // 0
|
|
Token::CloseParen, // )
|
|
Token::OpenBrace, // {
|
|
Token::Return, // return
|
|
Token::NumLiteral, // 0
|
|
Token::CloseBrace, // }
|
|
|
|
Token::If, // if
|
|
Token::OpenParen, // (
|
|
Token::Identifier, // n
|
|
Token::Eql, // ==
|
|
Token::NumLiteral, // 1
|
|
Token::CloseParen, // )
|
|
Token::OpenBrace, // {
|
|
Token::Return, // return
|
|
Token::NumLiteral, // 1
|
|
Token::CloseBrace, // }
|
|
|
|
Token::Return, // return
|
|
Token::Identifier, // fib
|
|
Token::OpenParen, // (
|
|
Token::Identifier, // n
|
|
Token::Sub, // -
|
|
Token::NumLiteral, // 1
|
|
Token::CloseParen, // )
|
|
|
|
Token::Add, // +
|
|
|
|
Token::Identifier, // fib
|
|
Token::OpenParen, // (
|
|
Token::Identifier, // n
|
|
Token::Sub, // -
|
|
Token::NumLiteral, // 2
|
|
Token::CloseParen, // )
|
|
Token::CloseBrace, // }
|
|
|
|
Token::FunctionDef, // func
|
|
Token::OpenCrocodile, // <
|
|
Token::Identifier, // int
|
|
Token::CloseCrocodile, // >
|
|
Token::Identifier, // main
|
|
Token::OpenParen, // (
|
|
Token::CloseParen, // )
|
|
|
|
Token::OpenBrace, // {
|
|
Token::Identifier, // int
|
|
Token::Identifier, // res
|
|
Token::Assign, // =
|
|
Token::Identifier, // fib
|
|
Token::OpenParen, // (
|
|
Token::NumLiteral, // 8
|
|
Token::CloseParen, // )
|
|
Token::Return, // return
|
|
Token::Identifier, // res
|
|
Token::Eql, // ==
|
|
Token::NumLiteral, // 21
|
|
Token::CloseBrace // }
|
|
});
|
|
}
|
|
}
|