Removed raw-enums

This commit is contained in:
Pasha Bibko
2025-08-24 21:17:50 +01:00
parent 2427d67269
commit 025a1ee0b4
9 changed files with 315 additions and 326 deletions

View File

@@ -6,7 +6,7 @@
// Local util functions //
namespace PashaBibko::LXC::Internal
{
static void ExpectTokens(const Lexer::LexerOutput& tokens, const std::vector<Lexer::Token::TokenType>& expected)
static void ExpectTokens(const Lexer::LexerOutput& tokens, const std::vector<Lexer::TokenType>& expected)
{
size_t length = std::min(tokens.size(), expected.size());
for (size_t i = 0; i < length; i++)
@@ -29,38 +29,38 @@ namespace PashaBibko::LXC::Lexer
{
TEST(LexerTests, ReturnsTrueForMatching)
{
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Add));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Sub));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Mul));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(TokenType::Add));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(TokenType::Sub));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(TokenType::Mul));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(Token::If));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(Token::While));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::If));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::While));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(Token::StringLiteral));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(Token::NumLiteral));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::StringLiteral));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::NumLiteral));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(Token::CloseBracket));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(Token::Comma));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::CloseBracket));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::Comma));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Misc>(Token::End_of_file));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Misc>(TokenType::End_of_file));
}
TEST(LexerTests, ReturnsFalseForNonMatching)
{
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::StringLiteral));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::End_of_file));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::If));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(TokenType::StringLiteral));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(TokenType::End_of_file));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(TokenType::If));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(Token::NumLiteral));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(Token::Comma));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::NumLiteral));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::Comma));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(Token::Add));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(Token::CloseBracket));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::Add));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::CloseBracket));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(Token::While));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(Token::Mul));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::While));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::Mul));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Misc>(Token::Sub));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Misc>(TokenType::Sub));
}
TEST(LexerTests, EmptyInput)
@@ -74,35 +74,35 @@ namespace PashaBibko::LXC::Lexer
{
Util::ReturnVal result = TokenizeFile("hello");
ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::Identifier });
Internal::ExpectTokens(result.Result(), { TokenType::Identifier });
}
TEST(LexerTests, SingleNumber)
{
Util::ReturnVal result = TokenizeFile("12345");
ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::NumLiteral });
Internal::ExpectTokens(result.Result(), { TokenType::NumLiteral });
}
TEST(LexerTests, SingleStringLiteral)
{
Util::ReturnVal result = TokenizeFile("\"string literal\"");
ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::StringLiteral });
Internal::ExpectTokens(result.Result(), { TokenType::StringLiteral });
}
TEST(LexerTests, MultipleSymbolsAndOperators)
{
Util::ReturnVal result = TokenizeFile("+ = (");
ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::Add, Token::Assign, Token::OpenParen });
Internal::ExpectTokens(result.Result(), { TokenType::Add, TokenType::Assign, TokenType::OpenParen });
}
TEST(LexerTests, WhileTrueTokenTest)
{
Util::ReturnVal result = TokenizeFile("while (true)");
ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::While, Token::OpenParen, Token::Identifier, Token::CloseParen });
Internal::ExpectTokens(result.Result(), { TokenType::While, TokenType::OpenParen, TokenType::Identifier, TokenType::CloseParen });
}
TEST(LexerTests, UnterminatedString)
@@ -133,61 +133,61 @@ namespace PashaBibko::LXC::Lexer
Util::ReturnVal tokens = TokenizeFile(fileContents.Result());
Internal::ExpectTokens(tokens.Result(),
{
Token::FunctionDef, // func
Token::OpenCrocodile, // <
Token::Identifier, // int
Token::CloseCrocodile, // >
Token::Identifier, // add
Token::OpenParen, // (
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // a
Token::Comma, // ,
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // b
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Return, // return
Token::Identifier, // a
Token::Add, // +
Token::Identifier, // b
Token::CloseBrace, // }
TokenType::FunctionDef, // func
TokenType::OpenCrocodile, // <
TokenType::Identifier, // int
TokenType::CloseCrocodile, // >
TokenType::Identifier, // add
TokenType::OpenParen, // (
TokenType::Identifier, // int
TokenType::Colon, // :
TokenType::Identifier, // a
TokenType::Comma, // ,
TokenType::Identifier, // int
TokenType::Colon, // :
TokenType::Identifier, // b
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
TokenType::Return, // return
TokenType::Identifier, // a
TokenType::Add, // +
TokenType::Identifier, // b
TokenType::CloseBrace, // }
Token::FunctionDef, // func
Token::OpenCrocodile, // <
Token::Identifier, // int
Token::CloseCrocodile, // >
Token::Identifier, // main
Token::OpenParen, // (
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // c
Token::Assign, // =
Token::Identifier, // add
Token::OpenParen, // (
Token::NumLiteral, // 3
Token::Comma, // ,
Token::NumLiteral, // 4
Token::CloseParen, // )
Token::If, // if
Token::OpenParen, // (
Token::Identifier, // c
Token::Eql, // ==
Token::NumLiteral, // 7
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Return, // return
Token::NumLiteral, // 0
Token::CloseBrace, // }
Token::Else, // else
Token::OpenBrace, // {
Token::Return, // return
Token::NumLiteral, // 1
Token::CloseBrace, // }
Token::CloseBrace // }
TokenType::FunctionDef, // func
TokenType::OpenCrocodile, // <
TokenType::Identifier, // int
TokenType::CloseCrocodile, // >
TokenType::Identifier, // main
TokenType::OpenParen, // (
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
TokenType::Identifier, // int
TokenType::Colon, // :
TokenType::Identifier, // c
TokenType::Assign, // =
TokenType::Identifier, // add
TokenType::OpenParen, // (
TokenType::NumLiteral, // 3
TokenType::Comma, // ,
TokenType::NumLiteral, // 4
TokenType::CloseParen, // )
TokenType::If, // if
TokenType::OpenParen, // (
TokenType::Identifier, // c
TokenType::Eql, // ==
TokenType::NumLiteral, // 7
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
TokenType::Return, // return
TokenType::NumLiteral, // 0
TokenType::CloseBrace, // }
TokenType::Else, // else
TokenType::OpenBrace, // {
TokenType::Return, // return
TokenType::NumLiteral, // 1
TokenType::CloseBrace, // }
TokenType::CloseBrace // }
});
}
@@ -198,80 +198,80 @@ namespace PashaBibko::LXC::Lexer
Util::ReturnVal tokens = TokenizeFile(fileContents.Result());
Internal::ExpectTokens(tokens.Result(),
{
Token::FunctionDef, // func
Token::OpenCrocodile, // <
Token::Identifier, // int
Token::CloseCrocodile, // >
Token::Identifier, // fib
Token::OpenParen, // (
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // num
Token::CloseParen, // )
Token::OpenBrace, // {
TokenType::FunctionDef, // func
TokenType::OpenCrocodile, // <
TokenType::Identifier, // int
TokenType::CloseCrocodile, // >
TokenType::Identifier, // fib
TokenType::OpenParen, // (
TokenType::Identifier, // int
TokenType::Colon, // :
TokenType::Identifier, // num
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
Token::If, // if
Token::OpenParen, // (
Token::Identifier, // n
Token::Eql, // ==
Token::NumLiteral, // 0
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Return, // return
Token::NumLiteral, // 0
Token::CloseBrace, // }
TokenType::If, // if
TokenType::OpenParen, // (
TokenType::Identifier, // n
TokenType::Eql, // ==
TokenType::NumLiteral, // 0
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
TokenType::Return, // return
TokenType::NumLiteral, // 0
TokenType::CloseBrace, // }
Token::If, // if
Token::OpenParen, // (
Token::Identifier, // n
Token::Eql, // ==
Token::NumLiteral, // 1
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Return, // return
Token::NumLiteral, // 1
Token::CloseBrace, // }
TokenType::If, // if
TokenType::OpenParen, // (
TokenType::Identifier, // n
TokenType::Eql, // ==
TokenType::NumLiteral, // 1
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
TokenType::Return, // return
TokenType::NumLiteral, // 1
TokenType::CloseBrace, // }
Token::Return, // return
Token::Identifier, // fib
Token::OpenParen, // (
Token::Identifier, // n
Token::Sub, // -
Token::NumLiteral, // 1
Token::CloseParen, // )
TokenType::Return, // return
TokenType::Identifier, // fib
TokenType::OpenParen, // (
TokenType::Identifier, // n
TokenType::Sub, // -
TokenType::NumLiteral, // 1
TokenType::CloseParen, // )
Token::Add, // +
TokenType::Add, // +
Token::Identifier, // fib
Token::OpenParen, // (
Token::Identifier, // n
Token::Sub, // -
Token::NumLiteral, // 2
Token::CloseParen, // )
Token::CloseBrace, // }
TokenType::Identifier, // fib
TokenType::OpenParen, // (
TokenType::Identifier, // n
TokenType::Sub, // -
TokenType::NumLiteral, // 2
TokenType::CloseParen, // )
TokenType::CloseBrace, // }
Token::FunctionDef, // func
Token::OpenCrocodile, // <
Token::Identifier, // int
Token::CloseCrocodile, // >
Token::Identifier, // main
Token::OpenParen, // (
Token::CloseParen, // )
TokenType::FunctionDef, // func
TokenType::OpenCrocodile, // <
TokenType::Identifier, // int
TokenType::CloseCrocodile, // >
TokenType::Identifier, // main
TokenType::OpenParen, // (
TokenType::CloseParen, // )
Token::OpenBrace, // {
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // res
Token::Assign, // =
Token::Identifier, // fib
Token::OpenParen, // (
Token::NumLiteral, // 8
Token::CloseParen, // )
Token::Return, // return
Token::Identifier, // res
Token::Eql, // ==
Token::NumLiteral, // 21
Token::CloseBrace // }
TokenType::OpenBrace, // {
TokenType::Identifier, // int
TokenType::Colon, // :
TokenType::Identifier, // res
TokenType::Assign, // =
TokenType::Identifier, // fib
TokenType::OpenParen, // (
TokenType::NumLiteral, // 8
TokenType::CloseParen, // )
TokenType::Return, // return
TokenType::Identifier, // res
TokenType::Eql, // ==
TokenType::NumLiteral, // 21
TokenType::CloseBrace // }
});
}
}

View File

@@ -15,7 +15,7 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result());
ASSERT_FALSE(ctx.At() == nullptr);
EXPECT_TRUE(ctx.At()->type == Lexer::Token::NumLiteral);
EXPECT_TRUE(ctx.At()->type == Lexer::TokenType::NumLiteral);
EXPECT_STREQ(ctx.At()->Str(), "576");
EXPECT_TRUE(ctx.InBounds());
}
@@ -40,7 +40,7 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result());
ASSERT_FALSE(ctx.Peek() == nullptr);
EXPECT_TRUE(ctx.Peek()->type == Lexer::Token::Identifier);
EXPECT_TRUE(ctx.Peek()->type == Lexer::TokenType::Identifier);
EXPECT_STREQ(ctx.Peek()->Str(), "hello");
}
@@ -59,18 +59,18 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result());
static const Lexer::Token::TokenType results[] =
static const Lexer::TokenType results[] =
{
Lexer::Token::OpenCrocodile,
Lexer::Token::Identifier,
Lexer::Token::CloseCrocodile,
Lexer::Token::Identifier,
Lexer::Token::OpenParen,
Lexer::Token::CloseParen,
Lexer::Token::OpenBrace,
Lexer::Token::Return,
Lexer::Token::NumLiteral,
Lexer::Token::CloseBrace
Lexer::TokenType::OpenCrocodile,
Lexer::TokenType::Identifier,
Lexer::TokenType::CloseCrocodile,
Lexer::TokenType::Identifier,
Lexer::TokenType::OpenParen,
Lexer::TokenType::CloseParen,
Lexer::TokenType::OpenBrace,
Lexer::TokenType::Return,
Lexer::TokenType::NumLiteral,
Lexer::TokenType::CloseBrace
};
std::size_t resultLength = tokens.Result().size();
@@ -92,24 +92,24 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result());
ASSERT_TRUE(ctx.Expect(std::array
{
Lexer::Token::FunctionDef,
Lexer::Token::OpenCrocodile,
Lexer::Token::Identifier,
Lexer::Token::CloseCrocodile,
Lexer::Token::Identifier,
Lexer::Token::OpenParen,
Lexer::Token::CloseParen,
Lexer::Token::OpenBrace,
Lexer::Token::Return,
Lexer::Token::NumLiteral,
Lexer::Token::CloseBrace
Lexer::TokenType::FunctionDef,
Lexer::TokenType::OpenCrocodile,
Lexer::TokenType::Identifier,
Lexer::TokenType::CloseCrocodile,
Lexer::TokenType::Identifier,
Lexer::TokenType::OpenParen,
Lexer::TokenType::CloseParen,
Lexer::TokenType::OpenBrace,
Lexer::TokenType::Return,
Lexer::TokenType::NumLiteral,
Lexer::TokenType::CloseBrace
}));
ASSERT_FALSE(ctx.Expect(std::array
{
Lexer::Token::FunctionDef,
Lexer::Token::OpenCrocodile,
Lexer::Token::NumLiteral
Lexer::TokenType::FunctionDef,
Lexer::TokenType::OpenCrocodile,
Lexer::TokenType::NumLiteral
}));
}
}