Removed raw-enums

This commit is contained in:
Pasha Bibko
2025-08-24 21:17:50 +01:00
parent 2427d67269
commit 025a1ee0b4
9 changed files with 315 additions and 326 deletions

View File

@@ -33,14 +33,14 @@ namespace PashaBibko::LXC::AST
class Operation final : public NodeValue
{
public:
Operation(NodeValuePtr& left, Lexer::Token::TokenType operand, NodeValuePtr& right);
Operation(NodeValuePtr& left, Lexer::TokenType operand, NodeValuePtr& right);
private:
// The sides of the operation //
NodeValuePtr m_Lhs, m_Rhs;
// The operand of the operation //
Lexer::Token::TokenType m_Operand;
Lexer::TokenType m_Operand;
};
class VarDeclaration final : public Node

View File

@@ -12,7 +12,7 @@ namespace PashaBibko::LXC::AST
: NodeValue(NodeType::IntLiteral), m_NumberValue(value)
{}
Operation::Operation(NodeValuePtr& left, Lexer::Token::TokenType operand, NodeValuePtr& right)
Operation::Operation(NodeValuePtr& left, Lexer::TokenType operand, NodeValuePtr& right)
: NodeValue(NodeType::Operation), m_Lhs(std::move(left)), m_Operand(operand), m_Rhs(std::move(right))
{}

View File

@@ -4,10 +4,7 @@
namespace PashaBibko::LXC::Lexer
{
namespace TokenClass
{
// Bitmask for different token classes //
enum ClassMask : unsigned short
enum class TokenClass : unsigned short
{
// Mathematical and logic operators //
Operator = 1 << (1 + 8),
@@ -24,72 +21,64 @@ namespace PashaBibko::LXC::Lexer
// Tokens not defined by previous classes //
Misc = 1 << (5 + 8)
};
};
struct LexerContext;
// Data type for storing the output of the lexer //
class Token final
{
public:
// Enum of token type organised by their token class //
enum TokenType : unsigned short
enum class TokenType : unsigned short
{
// === Operators === //
Add = TokenClass::Operator,
Add = (unsigned short)TokenClass::Operator,
Sub,
Mul,
Div,
Mod,
Eql,
// === Keywords === //
For = TokenClass::Keyword,
For = (unsigned short)TokenClass::Keyword,
While,
If,
ElseIf,
Else,
Return,
FunctionDef,
// === User defined === //
StringLiteral = TokenClass::UserDefined,
StringLiteral = (unsigned short)TokenClass::UserDefined,
NumLiteral,
Identifier,
// === Symbols === //
Assign = TokenClass::Symbols,
Assign = (unsigned short)TokenClass::Symbols,
Colon,
CloseBracket,
OpenBracket,
CloseBrace,
OpenBrace,
CloseParen,
OpenParen,
CloseCrocodile,
OpenCrocodile,
Comma,
// === Misc === //
End_of_file = TokenClass::Misc,
End_of_file = (unsigned short)TokenClass::Misc,
UNDEFINED = 65535 // Invalid token type (max number)
};
// Data type for storing the output of the lexer //
class Token final
{
public:
// Util function calculating wether a token is of a given class //
template<TokenClass::ClassMask mask> static constexpr bool IsTypeClass(TokenType type)
template<TokenClass mask> static constexpr bool IsTypeClass(TokenType type)
{
using T = std::underlying_type_t<TokenType>;
return static_cast<T>(type) & static_cast<T>(mask);
@@ -132,7 +121,7 @@ namespace PashaBibko::LXC::Lexer
};
// Function for converting token types to their equivalent C-Strings //
const char* TokenTypeToCStr(Token::TokenType type);
const char* TokenTypeToCStr(TokenType type);
// Typedef for the output type of how the Lexer outputs //
typedef std::vector<Token> LexerOutput;

View File

@@ -38,46 +38,46 @@ namespace PashaBibko::LXC::Internal
c == '>' || c == ':';
}
static const std::unordered_map<std::string_view, Lexer::Token::TokenType> operatorMap =
static const std::unordered_map<std::string_view, Lexer::TokenType> operatorMap =
{
{ "+", Lexer::Token::Add },
{ "-", Lexer::Token::Sub },
{ "*", Lexer::Token::Mul },
{ "/", Lexer::Token::Div },
{ "%", Lexer::Token::Mod },
{ "+", Lexer::TokenType::Add },
{ "-", Lexer::TokenType::Sub },
{ "*", Lexer::TokenType::Mul },
{ "/", Lexer::TokenType::Div },
{ "%", Lexer::TokenType::Mod },
{ "==", Lexer::Token::Eql },
{ "==", Lexer::TokenType::Eql },
{ "=", Lexer::Token::Assign }
{ "=", Lexer::TokenType::Assign }
};
static const std::unordered_map<char, Lexer::Token::TokenType> symbolMap =
static const std::unordered_map<char, Lexer::TokenType> symbolMap =
{
{ ',', Lexer::Token::Comma },
{ ':', Lexer::Token::Colon },
{ ',', Lexer::TokenType::Comma },
{ ':', Lexer::TokenType::Colon },
{ '[', Lexer::Token::CloseBracket },
{ ']', Lexer::Token::OpenBracket },
{ '[', Lexer::TokenType::CloseBracket },
{ ']', Lexer::TokenType::OpenBracket },
{ '}', Lexer::Token::CloseBrace },
{ '{', Lexer::Token::OpenBrace },
{ '}', Lexer::TokenType::CloseBrace },
{ '{', Lexer::TokenType::OpenBrace },
{ ')', Lexer::Token::CloseParen },
{ '(', Lexer::Token::OpenParen },
{ ')', Lexer::TokenType::CloseParen },
{ '(', Lexer::TokenType::OpenParen },
{ '>', Lexer::Token::CloseCrocodile },
{ '<', Lexer::Token::OpenCrocodile }
{ '>', Lexer::TokenType::CloseCrocodile },
{ '<', Lexer::TokenType::OpenCrocodile }
};
static const std::unordered_map<std::string_view, Lexer::Token::TokenType> keywords =
static const std::unordered_map<std::string_view, Lexer::TokenType> keywords =
{
{ "for", Lexer::Token::For },
{ "while", Lexer::Token::While },
{ "if", Lexer::Token::If },
{ "elif", Lexer::Token::ElseIf },
{ "else", Lexer::Token::Else },
{ "return", Lexer::Token::Return },
{ "func", Lexer::Token::FunctionDef },
{ "for", Lexer::TokenType::For },
{ "while", Lexer::TokenType::While },
{ "if", Lexer::TokenType::If },
{ "elif", Lexer::TokenType::ElseIf },
{ "else", Lexer::TokenType::Else },
{ "return", Lexer::TokenType::Return },
{ "func", Lexer::TokenType::FunctionDef },
};
}
@@ -126,7 +126,7 @@ namespace PashaBibko::LXC::Lexer
// Creates the token (if at the end of the string literal) //
if (!trackers.inStrLiteral)
ctx.out.emplace_back(ctx, trackers.sectionStart + 1, (unsigned short)(ctx.index - trackers.sectionStart - 1), Token::StringLiteral);
ctx.out.emplace_back(ctx, trackers.sectionStart + 1, (unsigned short)(ctx.index - trackers.sectionStart - 1), TokenType::StringLiteral);
} else if (trackers.inStrLiteral) {}
@@ -140,7 +140,7 @@ namespace PashaBibko::LXC::Lexer
// Checks for the end of the number literal to create the token //
if (!Internal::IsNumeric(next)) _UNLIKELY
{
ctx.out.emplace_back(ctx, trackers.sectionStart, (unsigned short)(ctx.index - trackers.sectionStart + 1), Token::NumLiteral);
ctx.out.emplace_back(ctx, trackers.sectionStart, (unsigned short)(ctx.index - trackers.sectionStart + 1), TokenType::NumLiteral);
trackers.inNumLiteral = false;
}
}
@@ -158,7 +158,7 @@ namespace PashaBibko::LXC::Lexer
// Finds out if the word is a keyword or not //
std::string_view fullWord(ctx.source.data() + trackers.sectionStart, ctx.index - trackers.sectionStart + 1);
auto it = Internal::keywords.find(fullWord);
Token::TokenType tType = (it != Internal::keywords.end()) ? it->second : Token::Identifier;
TokenType tType = (it != Internal::keywords.end()) ? it->second : TokenType::Identifier;
ctx.out.emplace_back(ctx, trackers.sectionStart, (unsigned short)(ctx.index - trackers.sectionStart + 1), tType);
trackers.inIdentifier = false;

View File

@@ -55,46 +55,46 @@ namespace PashaBibko::LXC::Lexer
// Helper macro for converting type to string //
#define TOKEN_TYPE_CASE(type) case type: return #type;
const char* TokenTypeToCStr(Token::TokenType type)
const char* TokenTypeToCStr(TokenType type)
{
switch (type)
{
// All the different types of tokens //
TOKEN_TYPE_CASE(Token::Add);
TOKEN_TYPE_CASE(Token::Sub);
TOKEN_TYPE_CASE(Token::Mul);
TOKEN_TYPE_CASE(Token::Div);
TOKEN_TYPE_CASE(Token::Mod);
TOKEN_TYPE_CASE(TokenType::Add);
TOKEN_TYPE_CASE(TokenType::Sub);
TOKEN_TYPE_CASE(TokenType::Mul);
TOKEN_TYPE_CASE(TokenType::Div);
TOKEN_TYPE_CASE(TokenType::Mod);
TOKEN_TYPE_CASE(Token::Eql);
TOKEN_TYPE_CASE(TokenType::Eql);
TOKEN_TYPE_CASE(Token::For);
TOKEN_TYPE_CASE(Token::While);
TOKEN_TYPE_CASE(Token::If);
TOKEN_TYPE_CASE(Token::ElseIf);
TOKEN_TYPE_CASE(Token::Else);
TOKEN_TYPE_CASE(Token::Return);
TOKEN_TYPE_CASE(TokenType::For);
TOKEN_TYPE_CASE(TokenType::While);
TOKEN_TYPE_CASE(TokenType::If);
TOKEN_TYPE_CASE(TokenType::ElseIf);
TOKEN_TYPE_CASE(TokenType::Else);
TOKEN_TYPE_CASE(TokenType::Return);
TOKEN_TYPE_CASE(Token::FunctionDef);
TOKEN_TYPE_CASE(TokenType::FunctionDef);
TOKEN_TYPE_CASE(Token::StringLiteral);
TOKEN_TYPE_CASE(Token::NumLiteral);
TOKEN_TYPE_CASE(Token::Identifier);
TOKEN_TYPE_CASE(TokenType::StringLiteral);
TOKEN_TYPE_CASE(TokenType::NumLiteral);
TOKEN_TYPE_CASE(TokenType::Identifier);
TOKEN_TYPE_CASE(Token::Assign);
TOKEN_TYPE_CASE(Token::Colon);
TOKEN_TYPE_CASE(Token::CloseBracket);
TOKEN_TYPE_CASE(Token::OpenBracket);
TOKEN_TYPE_CASE(Token::CloseBrace);
TOKEN_TYPE_CASE(Token::OpenBrace);
TOKEN_TYPE_CASE(Token::CloseParen);
TOKEN_TYPE_CASE(Token::OpenParen);
TOKEN_TYPE_CASE(Token::CloseCrocodile);
TOKEN_TYPE_CASE(Token::OpenCrocodile);
TOKEN_TYPE_CASE(Token::Comma);
TOKEN_TYPE_CASE(TokenType::Assign);
TOKEN_TYPE_CASE(TokenType::Colon);
TOKEN_TYPE_CASE(TokenType::CloseBracket);
TOKEN_TYPE_CASE(TokenType::OpenBracket);
TOKEN_TYPE_CASE(TokenType::CloseBrace);
TOKEN_TYPE_CASE(TokenType::OpenBrace);
TOKEN_TYPE_CASE(TokenType::CloseParen);
TOKEN_TYPE_CASE(TokenType::OpenParen);
TOKEN_TYPE_CASE(TokenType::CloseCrocodile);
TOKEN_TYPE_CASE(TokenType::OpenCrocodile);
TOKEN_TYPE_CASE(TokenType::Comma);
TOKEN_TYPE_CASE(Token::End_of_file);
TOKEN_TYPE_CASE(Token::UNDEFINED);
TOKEN_TYPE_CASE(TokenType::End_of_file);
TOKEN_TYPE_CASE(TokenType::UNDEFINED);
// When the case has not been defined yet //
default:

View File

@@ -38,7 +38,7 @@ namespace PashaBibko::LXC::Parser
}
// Checks if the tokens are the correct types //
inline bool Expect(const std::span<const Lexer::Token::TokenType>& tokens) const
inline bool Expect(const std::span<const Lexer::TokenType>& tokens) const
{
for (int i = 0; i < tokens.size(); i++)
{

View File

@@ -34,12 +34,12 @@ namespace PashaBibko::LXC::Parser
switch (current->type)
{
// Fowards to the ParseIdentifier and returns the result/error //
case Lexer::Token::Identifier:
case Lexer::TokenType::Identifier:
ctx.Advance();
return Internal::CreateNodeV<AST::VarAccess>(current->Str());
// Returns an integer literal node //
case Lexer::Token::NumLiteral:
case Lexer::TokenType::NumLiteral:
ctx.Advance();
return Internal::CreateNodeV<AST::IntLiteral>(current->Str());
@@ -52,7 +52,7 @@ namespace PashaBibko::LXC::Parser
static Util::ReturnVal<AST::NodeValuePtr, ParserError> ParseFunctionCall(ParserContext& ctx)
{
// Checks if the upcoming pattern matches a function signature: [Identifier(function name), OpenBracket(Start of params)] //
if (ctx.Expect(std::array{ Lexer::Token::Identifier, Lexer::Token::OpenParen }))
if (ctx.Expect(std::array{ Lexer::TokenType::Identifier, Lexer::TokenType::OpenParen }))
{
// Captures the function name and advances over it and the start paren //
const Lexer::Token* functionNameToken = ctx.At();
@@ -64,11 +64,11 @@ namespace PashaBibko::LXC::Parser
while (current != nullptr)
{
// End of the function call //
if (current->type == Lexer::Token::CloseParen)
if (current->type == Lexer::TokenType::CloseParen)
return Internal::CreateNodeV<AST::FunctionCall>(functionNameToken->Str(), arguments);
// Checks for seperating comma //
if (!ctx.Expect(std::array{ Lexer::Token::Comma }))
if (!ctx.Expect(std::array{ Lexer::TokenType::Comma }))
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
ctx.Advance();
@@ -116,7 +116,7 @@ namespace PashaBibko::LXC::Parser
if (at == nullptr)
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
if (at->type == Lexer::Token::Return)
if (at->type == Lexer::TokenType::Return)
{
// Iterates over the return token and parses the value to be returned //
ctx.Advance();
@@ -139,7 +139,7 @@ namespace PashaBibko::LXC::Parser
static Util::ReturnVal<AST::NodePtr, ParserError> ParseVarDeclaration(ParserContext& ctx)
{
// Checks for the pattern of a variable declaration //
if (ctx.Expect(std::array{ Lexer::Token::Identifier, Lexer::Token::Colon }))
if (ctx.Expect(std::array{ Lexer::TokenType::Identifier, Lexer::TokenType::Colon }))
{
// Can safely advance over the pattern (types are not checked/stored yet) //
ctx.Advance(2);
@@ -149,7 +149,7 @@ namespace PashaBibko::LXC::Parser
if (varName == nullptr)
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
if (varName->type != Lexer::Token::Identifier)
if (varName->type != Lexer::TokenType::Identifier)
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
// Checks for a default value for the variable //
@@ -157,7 +157,7 @@ namespace PashaBibko::LXC::Parser
if (varAssign == nullptr)
return Internal::CreateNode<AST::VarDeclaration>(varNameStr);
if (varAssign->type != Lexer::Token::Assign)
if (varAssign->type != Lexer::TokenType::Assign)
return Internal::CreateNode<AST::VarDeclaration>(varNameStr);
// Creates a node with the default value of the variable //
@@ -182,7 +182,7 @@ namespace PashaBibko::LXC::Parser
Util::ReturnVal<FunctionAST, ParserError> ParseFunction(ParserContext& ctx)
{
// Checks for the sequence of: func<T> funcName( //
if (!ctx.Expect(std::array{ Lexer::Token::FunctionDef, Lexer::Token::OpenCrocodile, Lexer::Token::Identifier, Lexer::Token::CloseCrocodile, Lexer::Token::Identifier, Lexer::Token::OpenParen }))
if (!ctx.Expect(std::array{ Lexer::TokenType::FunctionDef, Lexer::TokenType::OpenCrocodile, Lexer::TokenType::Identifier, Lexer::TokenType::CloseCrocodile, Lexer::TokenType::Identifier, Lexer::TokenType::OpenParen }))
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
// Assumes int for now so skips over func<T> //
@@ -197,10 +197,10 @@ namespace PashaBibko::LXC::Parser
if (paramsStart == nullptr )
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
while (ctx.At()->type != Lexer::Token::CloseParen)
while (ctx.At()->type != Lexer::TokenType::CloseParen)
{
// Checks for parameter pattern: identifier, identifier //
if (!ctx.Expect(std::array{ Lexer::Token::Identifier, Lexer::Token::Colon, Lexer::Token::Identifier }))
if (!ctx.Expect(std::array{ Lexer::TokenType::Identifier, Lexer::TokenType::Colon, Lexer::TokenType::Identifier }))
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
const Lexer::Token* paramType = ctx.At();
@@ -213,7 +213,7 @@ namespace PashaBibko::LXC::Parser
if (end == nullptr)
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
if (end->type == Lexer::Token::Comma || end->type == Lexer::Token::CloseParen)
if (end->type == Lexer::TokenType::Comma || end->type == Lexer::TokenType::CloseParen)
continue;
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
@@ -226,7 +226,7 @@ namespace PashaBibko::LXC::Parser
const Lexer::Token* current = ctx.At();
while (current != nullptr)
{
if (current->type == Lexer::Token::CloseBrace)
if (current->type == Lexer::TokenType::CloseBrace)
{
// Advances over closing brace before returning the function //
ctx.Advance();
@@ -260,7 +260,7 @@ namespace PashaBibko::LXC::Parser
switch (current->type)
{
// Only functions are currently supported //
case Lexer::Token::FunctionDef:
case Lexer::TokenType::FunctionDef:
{
// Parses the function and add it to the vector if there are no errors //
Util::ReturnVal func = ParseFunction(ctx);

View File

@@ -6,7 +6,7 @@
// Local util functions //
namespace PashaBibko::LXC::Internal
{
static void ExpectTokens(const Lexer::LexerOutput& tokens, const std::vector<Lexer::Token::TokenType>& expected)
static void ExpectTokens(const Lexer::LexerOutput& tokens, const std::vector<Lexer::TokenType>& expected)
{
size_t length = std::min(tokens.size(), expected.size());
for (size_t i = 0; i < length; i++)
@@ -29,38 +29,38 @@ namespace PashaBibko::LXC::Lexer
{
TEST(LexerTests, ReturnsTrueForMatching)
{
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Add));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Sub));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Mul));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(TokenType::Add));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(TokenType::Sub));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(TokenType::Mul));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(Token::If));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(Token::While));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::If));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::While));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(Token::StringLiteral));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(Token::NumLiteral));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::StringLiteral));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::NumLiteral));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(Token::CloseBracket));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(Token::Comma));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::CloseBracket));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::Comma));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Misc>(Token::End_of_file));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Misc>(TokenType::End_of_file));
}
TEST(LexerTests, ReturnsFalseForNonMatching)
{
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::StringLiteral));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::End_of_file));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::If));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(TokenType::StringLiteral));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(TokenType::End_of_file));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(TokenType::If));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(Token::NumLiteral));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(Token::Comma));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::NumLiteral));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::Comma));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(Token::Add));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(Token::CloseBracket));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::Add));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::CloseBracket));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(Token::While));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(Token::Mul));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::While));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::Mul));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Misc>(Token::Sub));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Misc>(TokenType::Sub));
}
TEST(LexerTests, EmptyInput)
@@ -74,35 +74,35 @@ namespace PashaBibko::LXC::Lexer
{
Util::ReturnVal result = TokenizeFile("hello");
ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::Identifier });
Internal::ExpectTokens(result.Result(), { TokenType::Identifier });
}
TEST(LexerTests, SingleNumber)
{
Util::ReturnVal result = TokenizeFile("12345");
ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::NumLiteral });
Internal::ExpectTokens(result.Result(), { TokenType::NumLiteral });
}
TEST(LexerTests, SingleStringLiteral)
{
Util::ReturnVal result = TokenizeFile("\"string literal\"");
ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::StringLiteral });
Internal::ExpectTokens(result.Result(), { TokenType::StringLiteral });
}
TEST(LexerTests, MultipleSymbolsAndOperators)
{
Util::ReturnVal result = TokenizeFile("+ = (");
ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::Add, Token::Assign, Token::OpenParen });
Internal::ExpectTokens(result.Result(), { TokenType::Add, TokenType::Assign, TokenType::OpenParen });
}
TEST(LexerTests, WhileTrueTokenTest)
{
Util::ReturnVal result = TokenizeFile("while (true)");
ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::While, Token::OpenParen, Token::Identifier, Token::CloseParen });
Internal::ExpectTokens(result.Result(), { TokenType::While, TokenType::OpenParen, TokenType::Identifier, TokenType::CloseParen });
}
TEST(LexerTests, UnterminatedString)
@@ -133,61 +133,61 @@ namespace PashaBibko::LXC::Lexer
Util::ReturnVal tokens = TokenizeFile(fileContents.Result());
Internal::ExpectTokens(tokens.Result(),
{
Token::FunctionDef, // func
Token::OpenCrocodile, // <
Token::Identifier, // int
Token::CloseCrocodile, // >
Token::Identifier, // add
Token::OpenParen, // (
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // a
Token::Comma, // ,
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // b
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Return, // return
Token::Identifier, // a
Token::Add, // +
Token::Identifier, // b
Token::CloseBrace, // }
TokenType::FunctionDef, // func
TokenType::OpenCrocodile, // <
TokenType::Identifier, // int
TokenType::CloseCrocodile, // >
TokenType::Identifier, // add
TokenType::OpenParen, // (
TokenType::Identifier, // int
TokenType::Colon, // :
TokenType::Identifier, // a
TokenType::Comma, // ,
TokenType::Identifier, // int
TokenType::Colon, // :
TokenType::Identifier, // b
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
TokenType::Return, // return
TokenType::Identifier, // a
TokenType::Add, // +
TokenType::Identifier, // b
TokenType::CloseBrace, // }
Token::FunctionDef, // func
Token::OpenCrocodile, // <
Token::Identifier, // int
Token::CloseCrocodile, // >
Token::Identifier, // main
Token::OpenParen, // (
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // c
Token::Assign, // =
Token::Identifier, // add
Token::OpenParen, // (
Token::NumLiteral, // 3
Token::Comma, // ,
Token::NumLiteral, // 4
Token::CloseParen, // )
Token::If, // if
Token::OpenParen, // (
Token::Identifier, // c
Token::Eql, // ==
Token::NumLiteral, // 7
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Return, // return
Token::NumLiteral, // 0
Token::CloseBrace, // }
Token::Else, // else
Token::OpenBrace, // {
Token::Return, // return
Token::NumLiteral, // 1
Token::CloseBrace, // }
Token::CloseBrace // }
TokenType::FunctionDef, // func
TokenType::OpenCrocodile, // <
TokenType::Identifier, // int
TokenType::CloseCrocodile, // >
TokenType::Identifier, // main
TokenType::OpenParen, // (
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
TokenType::Identifier, // int
TokenType::Colon, // :
TokenType::Identifier, // c
TokenType::Assign, // =
TokenType::Identifier, // add
TokenType::OpenParen, // (
TokenType::NumLiteral, // 3
TokenType::Comma, // ,
TokenType::NumLiteral, // 4
TokenType::CloseParen, // )
TokenType::If, // if
TokenType::OpenParen, // (
TokenType::Identifier, // c
TokenType::Eql, // ==
TokenType::NumLiteral, // 7
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
TokenType::Return, // return
TokenType::NumLiteral, // 0
TokenType::CloseBrace, // }
TokenType::Else, // else
TokenType::OpenBrace, // {
TokenType::Return, // return
TokenType::NumLiteral, // 1
TokenType::CloseBrace, // }
TokenType::CloseBrace // }
});
}
@@ -198,80 +198,80 @@ namespace PashaBibko::LXC::Lexer
Util::ReturnVal tokens = TokenizeFile(fileContents.Result());
Internal::ExpectTokens(tokens.Result(),
{
Token::FunctionDef, // func
Token::OpenCrocodile, // <
Token::Identifier, // int
Token::CloseCrocodile, // >
Token::Identifier, // fib
Token::OpenParen, // (
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // num
Token::CloseParen, // )
Token::OpenBrace, // {
TokenType::FunctionDef, // func
TokenType::OpenCrocodile, // <
TokenType::Identifier, // int
TokenType::CloseCrocodile, // >
TokenType::Identifier, // fib
TokenType::OpenParen, // (
TokenType::Identifier, // int
TokenType::Colon, // :
TokenType::Identifier, // num
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
Token::If, // if
Token::OpenParen, // (
Token::Identifier, // n
Token::Eql, // ==
Token::NumLiteral, // 0
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Return, // return
Token::NumLiteral, // 0
Token::CloseBrace, // }
TokenType::If, // if
TokenType::OpenParen, // (
TokenType::Identifier, // n
TokenType::Eql, // ==
TokenType::NumLiteral, // 0
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
TokenType::Return, // return
TokenType::NumLiteral, // 0
TokenType::CloseBrace, // }
Token::If, // if
Token::OpenParen, // (
Token::Identifier, // n
Token::Eql, // ==
Token::NumLiteral, // 1
Token::CloseParen, // )
Token::OpenBrace, // {
Token::Return, // return
Token::NumLiteral, // 1
Token::CloseBrace, // }
TokenType::If, // if
TokenType::OpenParen, // (
TokenType::Identifier, // n
TokenType::Eql, // ==
TokenType::NumLiteral, // 1
TokenType::CloseParen, // )
TokenType::OpenBrace, // {
TokenType::Return, // return
TokenType::NumLiteral, // 1
TokenType::CloseBrace, // }
Token::Return, // return
Token::Identifier, // fib
Token::OpenParen, // (
Token::Identifier, // n
Token::Sub, // -
Token::NumLiteral, // 1
Token::CloseParen, // )
TokenType::Return, // return
TokenType::Identifier, // fib
TokenType::OpenParen, // (
TokenType::Identifier, // n
TokenType::Sub, // -
TokenType::NumLiteral, // 1
TokenType::CloseParen, // )
Token::Add, // +
TokenType::Add, // +
Token::Identifier, // fib
Token::OpenParen, // (
Token::Identifier, // n
Token::Sub, // -
Token::NumLiteral, // 2
Token::CloseParen, // )
Token::CloseBrace, // }
TokenType::Identifier, // fib
TokenType::OpenParen, // (
TokenType::Identifier, // n
TokenType::Sub, // -
TokenType::NumLiteral, // 2
TokenType::CloseParen, // )
TokenType::CloseBrace, // }
Token::FunctionDef, // func
Token::OpenCrocodile, // <
Token::Identifier, // int
Token::CloseCrocodile, // >
Token::Identifier, // main
Token::OpenParen, // (
Token::CloseParen, // )
TokenType::FunctionDef, // func
TokenType::OpenCrocodile, // <
TokenType::Identifier, // int
TokenType::CloseCrocodile, // >
TokenType::Identifier, // main
TokenType::OpenParen, // (
TokenType::CloseParen, // )
Token::OpenBrace, // {
Token::Identifier, // int
Token::Colon, // :
Token::Identifier, // res
Token::Assign, // =
Token::Identifier, // fib
Token::OpenParen, // (
Token::NumLiteral, // 8
Token::CloseParen, // )
Token::Return, // return
Token::Identifier, // res
Token::Eql, // ==
Token::NumLiteral, // 21
Token::CloseBrace // }
TokenType::OpenBrace, // {
TokenType::Identifier, // int
TokenType::Colon, // :
TokenType::Identifier, // res
TokenType::Assign, // =
TokenType::Identifier, // fib
TokenType::OpenParen, // (
TokenType::NumLiteral, // 8
TokenType::CloseParen, // )
TokenType::Return, // return
TokenType::Identifier, // res
TokenType::Eql, // ==
TokenType::NumLiteral, // 21
TokenType::CloseBrace // }
});
}
}

View File

@@ -15,7 +15,7 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result());
ASSERT_FALSE(ctx.At() == nullptr);
EXPECT_TRUE(ctx.At()->type == Lexer::Token::NumLiteral);
EXPECT_TRUE(ctx.At()->type == Lexer::TokenType::NumLiteral);
EXPECT_STREQ(ctx.At()->Str(), "576");
EXPECT_TRUE(ctx.InBounds());
}
@@ -40,7 +40,7 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result());
ASSERT_FALSE(ctx.Peek() == nullptr);
EXPECT_TRUE(ctx.Peek()->type == Lexer::Token::Identifier);
EXPECT_TRUE(ctx.Peek()->type == Lexer::TokenType::Identifier);
EXPECT_STREQ(ctx.Peek()->Str(), "hello");
}
@@ -59,18 +59,18 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result());
static const Lexer::Token::TokenType results[] =
static const Lexer::TokenType results[] =
{
Lexer::Token::OpenCrocodile,
Lexer::Token::Identifier,
Lexer::Token::CloseCrocodile,
Lexer::Token::Identifier,
Lexer::Token::OpenParen,
Lexer::Token::CloseParen,
Lexer::Token::OpenBrace,
Lexer::Token::Return,
Lexer::Token::NumLiteral,
Lexer::Token::CloseBrace
Lexer::TokenType::OpenCrocodile,
Lexer::TokenType::Identifier,
Lexer::TokenType::CloseCrocodile,
Lexer::TokenType::Identifier,
Lexer::TokenType::OpenParen,
Lexer::TokenType::CloseParen,
Lexer::TokenType::OpenBrace,
Lexer::TokenType::Return,
Lexer::TokenType::NumLiteral,
Lexer::TokenType::CloseBrace
};
std::size_t resultLength = tokens.Result().size();
@@ -92,24 +92,24 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result());
ASSERT_TRUE(ctx.Expect(std::array
{
Lexer::Token::FunctionDef,
Lexer::Token::OpenCrocodile,
Lexer::Token::Identifier,
Lexer::Token::CloseCrocodile,
Lexer::Token::Identifier,
Lexer::Token::OpenParen,
Lexer::Token::CloseParen,
Lexer::Token::OpenBrace,
Lexer::Token::Return,
Lexer::Token::NumLiteral,
Lexer::Token::CloseBrace
Lexer::TokenType::FunctionDef,
Lexer::TokenType::OpenCrocodile,
Lexer::TokenType::Identifier,
Lexer::TokenType::CloseCrocodile,
Lexer::TokenType::Identifier,
Lexer::TokenType::OpenParen,
Lexer::TokenType::CloseParen,
Lexer::TokenType::OpenBrace,
Lexer::TokenType::Return,
Lexer::TokenType::NumLiteral,
Lexer::TokenType::CloseBrace
}));
ASSERT_FALSE(ctx.Expect(std::array
{
Lexer::Token::FunctionDef,
Lexer::Token::OpenCrocodile,
Lexer::Token::NumLiteral
Lexer::TokenType::FunctionDef,
Lexer::TokenType::OpenCrocodile,
Lexer::TokenType::NumLiteral
}));
}
}