Removed raw-enums

This commit is contained in:
Pasha Bibko
2025-08-24 21:17:50 +01:00
parent 2427d67269
commit 025a1ee0b4
9 changed files with 315 additions and 326 deletions

View File

@@ -33,14 +33,14 @@ namespace PashaBibko::LXC::AST
class Operation final : public NodeValue class Operation final : public NodeValue
{ {
public: public:
Operation(NodeValuePtr& left, Lexer::Token::TokenType operand, NodeValuePtr& right); Operation(NodeValuePtr& left, Lexer::TokenType operand, NodeValuePtr& right);
private: private:
// The sides of the operation // // The sides of the operation //
NodeValuePtr m_Lhs, m_Rhs; NodeValuePtr m_Lhs, m_Rhs;
// The operand of the operation // // The operand of the operation //
Lexer::Token::TokenType m_Operand; Lexer::TokenType m_Operand;
}; };
class VarDeclaration final : public Node class VarDeclaration final : public Node

View File

@@ -12,7 +12,7 @@ namespace PashaBibko::LXC::AST
: NodeValue(NodeType::IntLiteral), m_NumberValue(value) : NodeValue(NodeType::IntLiteral), m_NumberValue(value)
{} {}
Operation::Operation(NodeValuePtr& left, Lexer::Token::TokenType operand, NodeValuePtr& right) Operation::Operation(NodeValuePtr& left, Lexer::TokenType operand, NodeValuePtr& right)
: NodeValue(NodeType::Operation), m_Lhs(std::move(left)), m_Operand(operand), m_Rhs(std::move(right)) : NodeValue(NodeType::Operation), m_Lhs(std::move(left)), m_Operand(operand), m_Rhs(std::move(right))
{} {}

View File

@@ -4,92 +4,81 @@
namespace PashaBibko::LXC::Lexer namespace PashaBibko::LXC::Lexer
{ {
namespace TokenClass enum class TokenClass : unsigned short
{ {
// Bitmask for different token classes // // Mathematical and logic operators //
enum ClassMask : unsigned short Operator = 1 << (1 + 8),
{
// Mathematical and logic operators //
Operator = 1 << (1 + 8),
// Special words defined by the compiler // // Special words defined by the compiler //
Keyword = 1 << (2 + 8), Keyword = 1 << (2 + 8),
// Words such as literals and identifiers // // Words such as literals and identifiers //
UserDefined = 1 << (3 + 8), UserDefined = 1 << (3 + 8),
// Symbols in the source like (? , . ! <) // // Symbols in the source like (? , . ! <) //
Symbols = 1 << (4 + 8), Symbols = 1 << (4 + 8),
// Tokens not defined by previous classes // // Tokens not defined by previous classes //
Misc = 1 << (5 + 8) Misc = 1 << (5 + 8)
};
}; };
struct LexerContext; struct LexerContext;
// Enum of token type organised by their token class //
enum class TokenType : unsigned short
{
// === Operators === //
Add = (unsigned short)TokenClass::Operator,
Sub,
Mul,
Div,
Mod,
Eql,
// === Keywords === //
For = (unsigned short)TokenClass::Keyword,
While,
If,
ElseIf,
Else,
Return,
FunctionDef,
// === User defined === //
StringLiteral = (unsigned short)TokenClass::UserDefined,
NumLiteral,
Identifier,
// === Symbols === //
Assign = (unsigned short)TokenClass::Symbols,
Colon,
CloseBracket,
OpenBracket,
CloseBrace,
OpenBrace,
CloseParen,
OpenParen,
CloseCrocodile,
OpenCrocodile,
Comma,
// === Misc === //
End_of_file = (unsigned short)TokenClass::Misc,
UNDEFINED = 65535 // Invalid token type (max number)
};
// Data type for storing the output of the lexer // // Data type for storing the output of the lexer //
class Token final class Token final
{ {
public: public:
// Enum of token type organised by their token class //
enum TokenType : unsigned short
{
// === Operators === //
Add = TokenClass::Operator,
Sub,
Mul,
Div,
Mod,
Eql,
// === Keywords === //
For = TokenClass::Keyword,
While,
If,
ElseIf,
Else,
Return,
FunctionDef,
// === User defined === //
StringLiteral = TokenClass::UserDefined,
NumLiteral,
Identifier,
// === Symbols === //
Assign = TokenClass::Symbols,
Colon,
CloseBracket,
OpenBracket,
CloseBrace,
OpenBrace,
CloseParen,
OpenParen,
CloseCrocodile,
OpenCrocodile,
Comma,
// === Misc === //
End_of_file = TokenClass::Misc,
UNDEFINED = 65535 // Invalid token type (max number)
};
// Util function calculating wether a token is of a given class // // Util function calculating wether a token is of a given class //
template<TokenClass::ClassMask mask> static constexpr bool IsTypeClass(TokenType type) template<TokenClass mask> static constexpr bool IsTypeClass(TokenType type)
{ {
using T = std::underlying_type_t<TokenType>; using T = std::underlying_type_t<TokenType>;
return static_cast<T>(type) & static_cast<T>(mask); return static_cast<T>(type) & static_cast<T>(mask);
@@ -132,7 +121,7 @@ namespace PashaBibko::LXC::Lexer
}; };
// Function for converting token types to their equivalent C-Strings // // Function for converting token types to their equivalent C-Strings //
const char* TokenTypeToCStr(Token::TokenType type); const char* TokenTypeToCStr(TokenType type);
// Typedef for the output type of how the Lexer outputs // // Typedef for the output type of how the Lexer outputs //
typedef std::vector<Token> LexerOutput; typedef std::vector<Token> LexerOutput;

View File

@@ -38,46 +38,46 @@ namespace PashaBibko::LXC::Internal
c == '>' || c == ':'; c == '>' || c == ':';
} }
static const std::unordered_map<std::string_view, Lexer::Token::TokenType> operatorMap = static const std::unordered_map<std::string_view, Lexer::TokenType> operatorMap =
{ {
{ "+", Lexer::Token::Add }, { "+", Lexer::TokenType::Add },
{ "-", Lexer::Token::Sub }, { "-", Lexer::TokenType::Sub },
{ "*", Lexer::Token::Mul }, { "*", Lexer::TokenType::Mul },
{ "/", Lexer::Token::Div }, { "/", Lexer::TokenType::Div },
{ "%", Lexer::Token::Mod }, { "%", Lexer::TokenType::Mod },
{ "==", Lexer::Token::Eql }, { "==", Lexer::TokenType::Eql },
{ "=", Lexer::Token::Assign } { "=", Lexer::TokenType::Assign }
}; };
static const std::unordered_map<char, Lexer::Token::TokenType> symbolMap = static const std::unordered_map<char, Lexer::TokenType> symbolMap =
{ {
{ ',', Lexer::Token::Comma }, { ',', Lexer::TokenType::Comma },
{ ':', Lexer::Token::Colon }, { ':', Lexer::TokenType::Colon },
{ '[', Lexer::Token::CloseBracket }, { '[', Lexer::TokenType::CloseBracket },
{ ']', Lexer::Token::OpenBracket }, { ']', Lexer::TokenType::OpenBracket },
{ '}', Lexer::Token::CloseBrace }, { '}', Lexer::TokenType::CloseBrace },
{ '{', Lexer::Token::OpenBrace }, { '{', Lexer::TokenType::OpenBrace },
{ ')', Lexer::Token::CloseParen }, { ')', Lexer::TokenType::CloseParen },
{ '(', Lexer::Token::OpenParen }, { '(', Lexer::TokenType::OpenParen },
{ '>', Lexer::Token::CloseCrocodile }, { '>', Lexer::TokenType::CloseCrocodile },
{ '<', Lexer::Token::OpenCrocodile } { '<', Lexer::TokenType::OpenCrocodile }
}; };
static const std::unordered_map<std::string_view, Lexer::Token::TokenType> keywords = static const std::unordered_map<std::string_view, Lexer::TokenType> keywords =
{ {
{ "for", Lexer::Token::For }, { "for", Lexer::TokenType::For },
{ "while", Lexer::Token::While }, { "while", Lexer::TokenType::While },
{ "if", Lexer::Token::If }, { "if", Lexer::TokenType::If },
{ "elif", Lexer::Token::ElseIf }, { "elif", Lexer::TokenType::ElseIf },
{ "else", Lexer::Token::Else }, { "else", Lexer::TokenType::Else },
{ "return", Lexer::Token::Return }, { "return", Lexer::TokenType::Return },
{ "func", Lexer::Token::FunctionDef }, { "func", Lexer::TokenType::FunctionDef },
}; };
} }
@@ -126,7 +126,7 @@ namespace PashaBibko::LXC::Lexer
// Creates the token (if at the end of the string literal) // // Creates the token (if at the end of the string literal) //
if (!trackers.inStrLiteral) if (!trackers.inStrLiteral)
ctx.out.emplace_back(ctx, trackers.sectionStart + 1, (unsigned short)(ctx.index - trackers.sectionStart - 1), Token::StringLiteral); ctx.out.emplace_back(ctx, trackers.sectionStart + 1, (unsigned short)(ctx.index - trackers.sectionStart - 1), TokenType::StringLiteral);
} else if (trackers.inStrLiteral) {} } else if (trackers.inStrLiteral) {}
@@ -140,7 +140,7 @@ namespace PashaBibko::LXC::Lexer
// Checks for the end of the number literal to create the token // // Checks for the end of the number literal to create the token //
if (!Internal::IsNumeric(next)) _UNLIKELY if (!Internal::IsNumeric(next)) _UNLIKELY
{ {
ctx.out.emplace_back(ctx, trackers.sectionStart, (unsigned short)(ctx.index - trackers.sectionStart + 1), Token::NumLiteral); ctx.out.emplace_back(ctx, trackers.sectionStart, (unsigned short)(ctx.index - trackers.sectionStart + 1), TokenType::NumLiteral);
trackers.inNumLiteral = false; trackers.inNumLiteral = false;
} }
} }
@@ -158,7 +158,7 @@ namespace PashaBibko::LXC::Lexer
// Finds out if the word is a keyword or not // // Finds out if the word is a keyword or not //
std::string_view fullWord(ctx.source.data() + trackers.sectionStart, ctx.index - trackers.sectionStart + 1); std::string_view fullWord(ctx.source.data() + trackers.sectionStart, ctx.index - trackers.sectionStart + 1);
auto it = Internal::keywords.find(fullWord); auto it = Internal::keywords.find(fullWord);
Token::TokenType tType = (it != Internal::keywords.end()) ? it->second : Token::Identifier; TokenType tType = (it != Internal::keywords.end()) ? it->second : TokenType::Identifier;
ctx.out.emplace_back(ctx, trackers.sectionStart, (unsigned short)(ctx.index - trackers.sectionStart + 1), tType); ctx.out.emplace_back(ctx, trackers.sectionStart, (unsigned short)(ctx.index - trackers.sectionStart + 1), tType);
trackers.inIdentifier = false; trackers.inIdentifier = false;

View File

@@ -55,46 +55,46 @@ namespace PashaBibko::LXC::Lexer
// Helper macro for converting type to string // // Helper macro for converting type to string //
#define TOKEN_TYPE_CASE(type) case type: return #type; #define TOKEN_TYPE_CASE(type) case type: return #type;
const char* TokenTypeToCStr(Token::TokenType type) const char* TokenTypeToCStr(TokenType type)
{ {
switch (type) switch (type)
{ {
// All the different types of tokens // // All the different types of tokens //
TOKEN_TYPE_CASE(Token::Add); TOKEN_TYPE_CASE(TokenType::Add);
TOKEN_TYPE_CASE(Token::Sub); TOKEN_TYPE_CASE(TokenType::Sub);
TOKEN_TYPE_CASE(Token::Mul); TOKEN_TYPE_CASE(TokenType::Mul);
TOKEN_TYPE_CASE(Token::Div); TOKEN_TYPE_CASE(TokenType::Div);
TOKEN_TYPE_CASE(Token::Mod); TOKEN_TYPE_CASE(TokenType::Mod);
TOKEN_TYPE_CASE(Token::Eql); TOKEN_TYPE_CASE(TokenType::Eql);
TOKEN_TYPE_CASE(Token::For); TOKEN_TYPE_CASE(TokenType::For);
TOKEN_TYPE_CASE(Token::While); TOKEN_TYPE_CASE(TokenType::While);
TOKEN_TYPE_CASE(Token::If); TOKEN_TYPE_CASE(TokenType::If);
TOKEN_TYPE_CASE(Token::ElseIf); TOKEN_TYPE_CASE(TokenType::ElseIf);
TOKEN_TYPE_CASE(Token::Else); TOKEN_TYPE_CASE(TokenType::Else);
TOKEN_TYPE_CASE(Token::Return); TOKEN_TYPE_CASE(TokenType::Return);
TOKEN_TYPE_CASE(Token::FunctionDef); TOKEN_TYPE_CASE(TokenType::FunctionDef);
TOKEN_TYPE_CASE(Token::StringLiteral); TOKEN_TYPE_CASE(TokenType::StringLiteral);
TOKEN_TYPE_CASE(Token::NumLiteral); TOKEN_TYPE_CASE(TokenType::NumLiteral);
TOKEN_TYPE_CASE(Token::Identifier); TOKEN_TYPE_CASE(TokenType::Identifier);
TOKEN_TYPE_CASE(Token::Assign); TOKEN_TYPE_CASE(TokenType::Assign);
TOKEN_TYPE_CASE(Token::Colon); TOKEN_TYPE_CASE(TokenType::Colon);
TOKEN_TYPE_CASE(Token::CloseBracket); TOKEN_TYPE_CASE(TokenType::CloseBracket);
TOKEN_TYPE_CASE(Token::OpenBracket); TOKEN_TYPE_CASE(TokenType::OpenBracket);
TOKEN_TYPE_CASE(Token::CloseBrace); TOKEN_TYPE_CASE(TokenType::CloseBrace);
TOKEN_TYPE_CASE(Token::OpenBrace); TOKEN_TYPE_CASE(TokenType::OpenBrace);
TOKEN_TYPE_CASE(Token::CloseParen); TOKEN_TYPE_CASE(TokenType::CloseParen);
TOKEN_TYPE_CASE(Token::OpenParen); TOKEN_TYPE_CASE(TokenType::OpenParen);
TOKEN_TYPE_CASE(Token::CloseCrocodile); TOKEN_TYPE_CASE(TokenType::CloseCrocodile);
TOKEN_TYPE_CASE(Token::OpenCrocodile); TOKEN_TYPE_CASE(TokenType::OpenCrocodile);
TOKEN_TYPE_CASE(Token::Comma); TOKEN_TYPE_CASE(TokenType::Comma);
TOKEN_TYPE_CASE(Token::End_of_file); TOKEN_TYPE_CASE(TokenType::End_of_file);
TOKEN_TYPE_CASE(Token::UNDEFINED); TOKEN_TYPE_CASE(TokenType::UNDEFINED);
// When the case has not been defined yet // // When the case has not been defined yet //
default: default:

View File

@@ -38,7 +38,7 @@ namespace PashaBibko::LXC::Parser
} }
// Checks if the tokens are the correct types // // Checks if the tokens are the correct types //
inline bool Expect(const std::span<const Lexer::Token::TokenType>& tokens) const inline bool Expect(const std::span<const Lexer::TokenType>& tokens) const
{ {
for (int i = 0; i < tokens.size(); i++) for (int i = 0; i < tokens.size(); i++)
{ {

View File

@@ -34,12 +34,12 @@ namespace PashaBibko::LXC::Parser
switch (current->type) switch (current->type)
{ {
// Fowards to the ParseIdentifier and returns the result/error // // Fowards to the ParseIdentifier and returns the result/error //
case Lexer::Token::Identifier: case Lexer::TokenType::Identifier:
ctx.Advance(); ctx.Advance();
return Internal::CreateNodeV<AST::VarAccess>(current->Str()); return Internal::CreateNodeV<AST::VarAccess>(current->Str());
// Returns an integer literal node // // Returns an integer literal node //
case Lexer::Token::NumLiteral: case Lexer::TokenType::NumLiteral:
ctx.Advance(); ctx.Advance();
return Internal::CreateNodeV<AST::IntLiteral>(current->Str()); return Internal::CreateNodeV<AST::IntLiteral>(current->Str());
@@ -52,7 +52,7 @@ namespace PashaBibko::LXC::Parser
static Util::ReturnVal<AST::NodeValuePtr, ParserError> ParseFunctionCall(ParserContext& ctx) static Util::ReturnVal<AST::NodeValuePtr, ParserError> ParseFunctionCall(ParserContext& ctx)
{ {
// Checks if the upcoming pattern matches a function signature: [Identifier(function name), OpenBracket(Start of params)] // // Checks if the upcoming pattern matches a function signature: [Identifier(function name), OpenBracket(Start of params)] //
if (ctx.Expect(std::array{ Lexer::Token::Identifier, Lexer::Token::OpenParen })) if (ctx.Expect(std::array{ Lexer::TokenType::Identifier, Lexer::TokenType::OpenParen }))
{ {
// Captures the function name and advances over it and the start paren // // Captures the function name and advances over it and the start paren //
const Lexer::Token* functionNameToken = ctx.At(); const Lexer::Token* functionNameToken = ctx.At();
@@ -64,11 +64,11 @@ namespace PashaBibko::LXC::Parser
while (current != nullptr) while (current != nullptr)
{ {
// End of the function call // // End of the function call //
if (current->type == Lexer::Token::CloseParen) if (current->type == Lexer::TokenType::CloseParen)
return Internal::CreateNodeV<AST::FunctionCall>(functionNameToken->Str(), arguments); return Internal::CreateNodeV<AST::FunctionCall>(functionNameToken->Str(), arguments);
// Checks for seperating comma // // Checks for seperating comma //
if (!ctx.Expect(std::array{ Lexer::Token::Comma })) if (!ctx.Expect(std::array{ Lexer::TokenType::Comma }))
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
ctx.Advance(); ctx.Advance();
@@ -116,7 +116,7 @@ namespace PashaBibko::LXC::Parser
if (at == nullptr) if (at == nullptr)
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
if (at->type == Lexer::Token::Return) if (at->type == Lexer::TokenType::Return)
{ {
// Iterates over the return token and parses the value to be returned // // Iterates over the return token and parses the value to be returned //
ctx.Advance(); ctx.Advance();
@@ -139,7 +139,7 @@ namespace PashaBibko::LXC::Parser
static Util::ReturnVal<AST::NodePtr, ParserError> ParseVarDeclaration(ParserContext& ctx) static Util::ReturnVal<AST::NodePtr, ParserError> ParseVarDeclaration(ParserContext& ctx)
{ {
// Checks for the pattern of a variable declaration // // Checks for the pattern of a variable declaration //
if (ctx.Expect(std::array{ Lexer::Token::Identifier, Lexer::Token::Colon })) if (ctx.Expect(std::array{ Lexer::TokenType::Identifier, Lexer::TokenType::Colon }))
{ {
// Can safely advance over the pattern (types are not checked/stored yet) // // Can safely advance over the pattern (types are not checked/stored yet) //
ctx.Advance(2); ctx.Advance(2);
@@ -149,7 +149,7 @@ namespace PashaBibko::LXC::Parser
if (varName == nullptr) if (varName == nullptr)
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
if (varName->type != Lexer::Token::Identifier) if (varName->type != Lexer::TokenType::Identifier)
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
// Checks for a default value for the variable // // Checks for a default value for the variable //
@@ -157,7 +157,7 @@ namespace PashaBibko::LXC::Parser
if (varAssign == nullptr) if (varAssign == nullptr)
return Internal::CreateNode<AST::VarDeclaration>(varNameStr); return Internal::CreateNode<AST::VarDeclaration>(varNameStr);
if (varAssign->type != Lexer::Token::Assign) if (varAssign->type != Lexer::TokenType::Assign)
return Internal::CreateNode<AST::VarDeclaration>(varNameStr); return Internal::CreateNode<AST::VarDeclaration>(varNameStr);
// Creates a node with the default value of the variable // // Creates a node with the default value of the variable //
@@ -182,7 +182,7 @@ namespace PashaBibko::LXC::Parser
Util::ReturnVal<FunctionAST, ParserError> ParseFunction(ParserContext& ctx) Util::ReturnVal<FunctionAST, ParserError> ParseFunction(ParserContext& ctx)
{ {
// Checks for the sequence of: func<T> funcName( // // Checks for the sequence of: func<T> funcName( //
if (!ctx.Expect(std::array{ Lexer::Token::FunctionDef, Lexer::Token::OpenCrocodile, Lexer::Token::Identifier, Lexer::Token::CloseCrocodile, Lexer::Token::Identifier, Lexer::Token::OpenParen })) if (!ctx.Expect(std::array{ Lexer::TokenType::FunctionDef, Lexer::TokenType::OpenCrocodile, Lexer::TokenType::Identifier, Lexer::TokenType::CloseCrocodile, Lexer::TokenType::Identifier, Lexer::TokenType::OpenParen }))
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
// Assumes int for now so skips over func<T> // // Assumes int for now so skips over func<T> //
@@ -197,10 +197,10 @@ namespace PashaBibko::LXC::Parser
if (paramsStart == nullptr ) if (paramsStart == nullptr )
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
while (ctx.At()->type != Lexer::Token::CloseParen) while (ctx.At()->type != Lexer::TokenType::CloseParen)
{ {
// Checks for parameter pattern: identifier, identifier // // Checks for parameter pattern: identifier, identifier //
if (!ctx.Expect(std::array{ Lexer::Token::Identifier, Lexer::Token::Colon, Lexer::Token::Identifier })) if (!ctx.Expect(std::array{ Lexer::TokenType::Identifier, Lexer::TokenType::Colon, Lexer::TokenType::Identifier }))
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
const Lexer::Token* paramType = ctx.At(); const Lexer::Token* paramType = ctx.At();
@@ -213,7 +213,7 @@ namespace PashaBibko::LXC::Parser
if (end == nullptr) if (end == nullptr)
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
if (end->type == Lexer::Token::Comma || end->type == Lexer::Token::CloseParen) if (end->type == Lexer::TokenType::Comma || end->type == Lexer::TokenType::CloseParen)
continue; continue;
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
@@ -226,7 +226,7 @@ namespace PashaBibko::LXC::Parser
const Lexer::Token* current = ctx.At(); const Lexer::Token* current = ctx.At();
while (current != nullptr) while (current != nullptr)
{ {
if (current->type == Lexer::Token::CloseBrace) if (current->type == Lexer::TokenType::CloseBrace)
{ {
// Advances over closing brace before returning the function // // Advances over closing brace before returning the function //
ctx.Advance(); ctx.Advance();
@@ -260,7 +260,7 @@ namespace PashaBibko::LXC::Parser
switch (current->type) switch (current->type)
{ {
// Only functions are currently supported // // Only functions are currently supported //
case Lexer::Token::FunctionDef: case Lexer::TokenType::FunctionDef:
{ {
// Parses the function and add it to the vector if there are no errors // // Parses the function and add it to the vector if there are no errors //
Util::ReturnVal func = ParseFunction(ctx); Util::ReturnVal func = ParseFunction(ctx);

View File

@@ -6,7 +6,7 @@
// Local util functions // // Local util functions //
namespace PashaBibko::LXC::Internal namespace PashaBibko::LXC::Internal
{ {
static void ExpectTokens(const Lexer::LexerOutput& tokens, const std::vector<Lexer::Token::TokenType>& expected) static void ExpectTokens(const Lexer::LexerOutput& tokens, const std::vector<Lexer::TokenType>& expected)
{ {
size_t length = std::min(tokens.size(), expected.size()); size_t length = std::min(tokens.size(), expected.size());
for (size_t i = 0; i < length; i++) for (size_t i = 0; i < length; i++)
@@ -29,38 +29,38 @@ namespace PashaBibko::LXC::Lexer
{ {
TEST(LexerTests, ReturnsTrueForMatching) TEST(LexerTests, ReturnsTrueForMatching)
{ {
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Add)); EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(TokenType::Add));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Sub)); EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(TokenType::Sub));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(Token::Mul)); EXPECT_TRUE(Token::IsTypeClass<TokenClass::Operator>(TokenType::Mul));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(Token::If)); EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::If));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(Token::While)); EXPECT_TRUE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::While));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(Token::StringLiteral)); EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::StringLiteral));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(Token::NumLiteral)); EXPECT_TRUE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::NumLiteral));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(Token::CloseBracket)); EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::CloseBracket));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(Token::Comma)); EXPECT_TRUE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::Comma));
EXPECT_TRUE(Token::IsTypeClass<TokenClass::Misc>(Token::End_of_file)); EXPECT_TRUE(Token::IsTypeClass<TokenClass::Misc>(TokenType::End_of_file));
} }
TEST(LexerTests, ReturnsFalseForNonMatching) TEST(LexerTests, ReturnsFalseForNonMatching)
{ {
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::StringLiteral)); EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(TokenType::StringLiteral));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::End_of_file)); EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(TokenType::End_of_file));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(Token::If)); EXPECT_FALSE(Token::IsTypeClass<TokenClass::Operator>(TokenType::If));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(Token::NumLiteral)); EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::NumLiteral));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(Token::Comma)); EXPECT_FALSE(Token::IsTypeClass<TokenClass::Keyword>(TokenType::Comma));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(Token::Add)); EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::Add));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(Token::CloseBracket)); EXPECT_FALSE(Token::IsTypeClass<TokenClass::UserDefined>(TokenType::CloseBracket));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(Token::While)); EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::While));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(Token::Mul)); EXPECT_FALSE(Token::IsTypeClass<TokenClass::Symbols>(TokenType::Mul));
EXPECT_FALSE(Token::IsTypeClass<TokenClass::Misc>(Token::Sub)); EXPECT_FALSE(Token::IsTypeClass<TokenClass::Misc>(TokenType::Sub));
} }
TEST(LexerTests, EmptyInput) TEST(LexerTests, EmptyInput)
@@ -74,35 +74,35 @@ namespace PashaBibko::LXC::Lexer
{ {
Util::ReturnVal result = TokenizeFile("hello"); Util::ReturnVal result = TokenizeFile("hello");
ASSERT_TRUE(result.Success()); ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::Identifier }); Internal::ExpectTokens(result.Result(), { TokenType::Identifier });
} }
TEST(LexerTests, SingleNumber) TEST(LexerTests, SingleNumber)
{ {
Util::ReturnVal result = TokenizeFile("12345"); Util::ReturnVal result = TokenizeFile("12345");
ASSERT_TRUE(result.Success()); ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::NumLiteral }); Internal::ExpectTokens(result.Result(), { TokenType::NumLiteral });
} }
TEST(LexerTests, SingleStringLiteral) TEST(LexerTests, SingleStringLiteral)
{ {
Util::ReturnVal result = TokenizeFile("\"string literal\""); Util::ReturnVal result = TokenizeFile("\"string literal\"");
ASSERT_TRUE(result.Success()); ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::StringLiteral }); Internal::ExpectTokens(result.Result(), { TokenType::StringLiteral });
} }
TEST(LexerTests, MultipleSymbolsAndOperators) TEST(LexerTests, MultipleSymbolsAndOperators)
{ {
Util::ReturnVal result = TokenizeFile("+ = ("); Util::ReturnVal result = TokenizeFile("+ = (");
ASSERT_TRUE(result.Success()); ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::Add, Token::Assign, Token::OpenParen }); Internal::ExpectTokens(result.Result(), { TokenType::Add, TokenType::Assign, TokenType::OpenParen });
} }
TEST(LexerTests, WhileTrueTokenTest) TEST(LexerTests, WhileTrueTokenTest)
{ {
Util::ReturnVal result = TokenizeFile("while (true)"); Util::ReturnVal result = TokenizeFile("while (true)");
ASSERT_TRUE(result.Success()); ASSERT_TRUE(result.Success());
Internal::ExpectTokens(result.Result(), { Token::While, Token::OpenParen, Token::Identifier, Token::CloseParen }); Internal::ExpectTokens(result.Result(), { TokenType::While, TokenType::OpenParen, TokenType::Identifier, TokenType::CloseParen });
} }
TEST(LexerTests, UnterminatedString) TEST(LexerTests, UnterminatedString)
@@ -133,61 +133,61 @@ namespace PashaBibko::LXC::Lexer
Util::ReturnVal tokens = TokenizeFile(fileContents.Result()); Util::ReturnVal tokens = TokenizeFile(fileContents.Result());
Internal::ExpectTokens(tokens.Result(), Internal::ExpectTokens(tokens.Result(),
{ {
Token::FunctionDef, // func TokenType::FunctionDef, // func
Token::OpenCrocodile, // < TokenType::OpenCrocodile, // <
Token::Identifier, // int TokenType::Identifier, // int
Token::CloseCrocodile, // > TokenType::CloseCrocodile, // >
Token::Identifier, // add TokenType::Identifier, // add
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::Identifier, // int TokenType::Identifier, // int
Token::Colon, // : TokenType::Colon, // :
Token::Identifier, // a TokenType::Identifier, // a
Token::Comma, // , TokenType::Comma, // ,
Token::Identifier, // int TokenType::Identifier, // int
Token::Colon, // : TokenType::Colon, // :
Token::Identifier, // b TokenType::Identifier, // b
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::OpenBrace, // { TokenType::OpenBrace, // {
Token::Return, // return TokenType::Return, // return
Token::Identifier, // a TokenType::Identifier, // a
Token::Add, // + TokenType::Add, // +
Token::Identifier, // b TokenType::Identifier, // b
Token::CloseBrace, // } TokenType::CloseBrace, // }
Token::FunctionDef, // func TokenType::FunctionDef, // func
Token::OpenCrocodile, // < TokenType::OpenCrocodile, // <
Token::Identifier, // int TokenType::Identifier, // int
Token::CloseCrocodile, // > TokenType::CloseCrocodile, // >
Token::Identifier, // main TokenType::Identifier, // main
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::OpenBrace, // { TokenType::OpenBrace, // {
Token::Identifier, // int TokenType::Identifier, // int
Token::Colon, // : TokenType::Colon, // :
Token::Identifier, // c TokenType::Identifier, // c
Token::Assign, // = TokenType::Assign, // =
Token::Identifier, // add TokenType::Identifier, // add
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::NumLiteral, // 3 TokenType::NumLiteral, // 3
Token::Comma, // , TokenType::Comma, // ,
Token::NumLiteral, // 4 TokenType::NumLiteral, // 4
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::If, // if TokenType::If, // if
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::Identifier, // c TokenType::Identifier, // c
Token::Eql, // == TokenType::Eql, // ==
Token::NumLiteral, // 7 TokenType::NumLiteral, // 7
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::OpenBrace, // { TokenType::OpenBrace, // {
Token::Return, // return TokenType::Return, // return
Token::NumLiteral, // 0 TokenType::NumLiteral, // 0
Token::CloseBrace, // } TokenType::CloseBrace, // }
Token::Else, // else TokenType::Else, // else
Token::OpenBrace, // { TokenType::OpenBrace, // {
Token::Return, // return TokenType::Return, // return
Token::NumLiteral, // 1 TokenType::NumLiteral, // 1
Token::CloseBrace, // } TokenType::CloseBrace, // }
Token::CloseBrace // } TokenType::CloseBrace // }
}); });
} }
@@ -198,80 +198,80 @@ namespace PashaBibko::LXC::Lexer
Util::ReturnVal tokens = TokenizeFile(fileContents.Result()); Util::ReturnVal tokens = TokenizeFile(fileContents.Result());
Internal::ExpectTokens(tokens.Result(), Internal::ExpectTokens(tokens.Result(),
{ {
Token::FunctionDef, // func TokenType::FunctionDef, // func
Token::OpenCrocodile, // < TokenType::OpenCrocodile, // <
Token::Identifier, // int TokenType::Identifier, // int
Token::CloseCrocodile, // > TokenType::CloseCrocodile, // >
Token::Identifier, // fib TokenType::Identifier, // fib
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::Identifier, // int TokenType::Identifier, // int
Token::Colon, // : TokenType::Colon, // :
Token::Identifier, // num TokenType::Identifier, // num
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::OpenBrace, // { TokenType::OpenBrace, // {
Token::If, // if TokenType::If, // if
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::Identifier, // n TokenType::Identifier, // n
Token::Eql, // == TokenType::Eql, // ==
Token::NumLiteral, // 0 TokenType::NumLiteral, // 0
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::OpenBrace, // { TokenType::OpenBrace, // {
Token::Return, // return TokenType::Return, // return
Token::NumLiteral, // 0 TokenType::NumLiteral, // 0
Token::CloseBrace, // } TokenType::CloseBrace, // }
Token::If, // if TokenType::If, // if
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::Identifier, // n TokenType::Identifier, // n
Token::Eql, // == TokenType::Eql, // ==
Token::NumLiteral, // 1 TokenType::NumLiteral, // 1
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::OpenBrace, // { TokenType::OpenBrace, // {
Token::Return, // return TokenType::Return, // return
Token::NumLiteral, // 1 TokenType::NumLiteral, // 1
Token::CloseBrace, // } TokenType::CloseBrace, // }
Token::Return, // return TokenType::Return, // return
Token::Identifier, // fib TokenType::Identifier, // fib
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::Identifier, // n TokenType::Identifier, // n
Token::Sub, // - TokenType::Sub, // -
Token::NumLiteral, // 1 TokenType::NumLiteral, // 1
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::Add, // + TokenType::Add, // +
Token::Identifier, // fib TokenType::Identifier, // fib
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::Identifier, // n TokenType::Identifier, // n
Token::Sub, // - TokenType::Sub, // -
Token::NumLiteral, // 2 TokenType::NumLiteral, // 2
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::CloseBrace, // } TokenType::CloseBrace, // }
Token::FunctionDef, // func TokenType::FunctionDef, // func
Token::OpenCrocodile, // < TokenType::OpenCrocodile, // <
Token::Identifier, // int TokenType::Identifier, // int
Token::CloseCrocodile, // > TokenType::CloseCrocodile, // >
Token::Identifier, // main TokenType::Identifier, // main
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::OpenBrace, // { TokenType::OpenBrace, // {
Token::Identifier, // int TokenType::Identifier, // int
Token::Colon, // : TokenType::Colon, // :
Token::Identifier, // res TokenType::Identifier, // res
Token::Assign, // = TokenType::Assign, // =
Token::Identifier, // fib TokenType::Identifier, // fib
Token::OpenParen, // ( TokenType::OpenParen, // (
Token::NumLiteral, // 8 TokenType::NumLiteral, // 8
Token::CloseParen, // ) TokenType::CloseParen, // )
Token::Return, // return TokenType::Return, // return
Token::Identifier, // res TokenType::Identifier, // res
Token::Eql, // == TokenType::Eql, // ==
Token::NumLiteral, // 21 TokenType::NumLiteral, // 21
Token::CloseBrace // } TokenType::CloseBrace // }
}); });
} }
} }

View File

@@ -15,7 +15,7 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result()); ParserContext ctx(tokens.Result());
ASSERT_FALSE(ctx.At() == nullptr); ASSERT_FALSE(ctx.At() == nullptr);
EXPECT_TRUE(ctx.At()->type == Lexer::Token::NumLiteral); EXPECT_TRUE(ctx.At()->type == Lexer::TokenType::NumLiteral);
EXPECT_STREQ(ctx.At()->Str(), "576"); EXPECT_STREQ(ctx.At()->Str(), "576");
EXPECT_TRUE(ctx.InBounds()); EXPECT_TRUE(ctx.InBounds());
} }
@@ -40,7 +40,7 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result()); ParserContext ctx(tokens.Result());
ASSERT_FALSE(ctx.Peek() == nullptr); ASSERT_FALSE(ctx.Peek() == nullptr);
EXPECT_TRUE(ctx.Peek()->type == Lexer::Token::Identifier); EXPECT_TRUE(ctx.Peek()->type == Lexer::TokenType::Identifier);
EXPECT_STREQ(ctx.Peek()->Str(), "hello"); EXPECT_STREQ(ctx.Peek()->Str(), "hello");
} }
@@ -59,18 +59,18 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result()); ParserContext ctx(tokens.Result());
static const Lexer::Token::TokenType results[] = static const Lexer::TokenType results[] =
{ {
Lexer::Token::OpenCrocodile, Lexer::TokenType::OpenCrocodile,
Lexer::Token::Identifier, Lexer::TokenType::Identifier,
Lexer::Token::CloseCrocodile, Lexer::TokenType::CloseCrocodile,
Lexer::Token::Identifier, Lexer::TokenType::Identifier,
Lexer::Token::OpenParen, Lexer::TokenType::OpenParen,
Lexer::Token::CloseParen, Lexer::TokenType::CloseParen,
Lexer::Token::OpenBrace, Lexer::TokenType::OpenBrace,
Lexer::Token::Return, Lexer::TokenType::Return,
Lexer::Token::NumLiteral, Lexer::TokenType::NumLiteral,
Lexer::Token::CloseBrace Lexer::TokenType::CloseBrace
}; };
std::size_t resultLength = tokens.Result().size(); std::size_t resultLength = tokens.Result().size();
@@ -92,24 +92,24 @@ namespace PashaBibko::LXC::Parser
ParserContext ctx(tokens.Result()); ParserContext ctx(tokens.Result());
ASSERT_TRUE(ctx.Expect(std::array ASSERT_TRUE(ctx.Expect(std::array
{ {
Lexer::Token::FunctionDef, Lexer::TokenType::FunctionDef,
Lexer::Token::OpenCrocodile, Lexer::TokenType::OpenCrocodile,
Lexer::Token::Identifier, Lexer::TokenType::Identifier,
Lexer::Token::CloseCrocodile, Lexer::TokenType::CloseCrocodile,
Lexer::Token::Identifier, Lexer::TokenType::Identifier,
Lexer::Token::OpenParen, Lexer::TokenType::OpenParen,
Lexer::Token::CloseParen, Lexer::TokenType::CloseParen,
Lexer::Token::OpenBrace, Lexer::TokenType::OpenBrace,
Lexer::Token::Return, Lexer::TokenType::Return,
Lexer::Token::NumLiteral, Lexer::TokenType::NumLiteral,
Lexer::Token::CloseBrace Lexer::TokenType::CloseBrace
})); }));
ASSERT_FALSE(ctx.Expect(std::array ASSERT_FALSE(ctx.Expect(std::array
{ {
Lexer::Token::FunctionDef, Lexer::TokenType::FunctionDef,
Lexer::Token::OpenCrocodile, Lexer::TokenType::OpenCrocodile,
Lexer::Token::NumLiteral Lexer::TokenType::NumLiteral
})); }));
} }
} }