Started implementation of parser

This commit is contained in:
Pasha Bibko
2025-07-24 17:49:23 +01:00
parent 28049ece94
commit 90a958a30b
3 changed files with 94 additions and 8 deletions

View File

@@ -7,7 +7,7 @@
namespace LXC::Util
{
// Util function to stop/ the program //
// Util function to stop the program //
inline void Stop()
{
// Only checks for a debugger when compiled in Debug mode //
@@ -64,10 +64,15 @@ namespace LXC::Util
{
public:
// Constructor for function sucess //
ReturnVal(ResultType result)
ReturnVal(const ResultType& result)
: m_Result(result), m_FunctionFailed(false)
{}
// Move constructor (for large objects) //
ReturnVal(ResultType&& result)
: m_Result(std::move(result)), m_FunctionFailed(false)
{}
// Constructor for function fail //
ReturnVal(FunctionFail<ErrorType> error)
: m_Error(error.error), m_FunctionFailed(true)

View File

@@ -11,6 +11,14 @@ namespace LXC::Parser
struct FunctionAST
{
FunctionAST() :
name{}, contents{}
{}
FunctionAST(FunctionAST&& other) :
name{}, contents{}
{}
std::string name;
AST::SyntaxBranch contents;
};

View File

@@ -4,13 +4,86 @@
namespace LXC::Parser
{
Util::ReturnVal<std::vector<FunctionAST>, ParserError> TurnTokensIntoAST(const Lexer::LexerOutput& input)
class ParserContext final
{
public:
// Sets the information of the context //
ParserContext(const Lexer::LexerOutput& inputTokens) :
input(inputTokens), length(inputTokens.size()), index(0)
{}
// Returns a pointer to the token x-distance away (default 1) //
inline const Lexer::Token* Peek(size_t peekDistance = 1) const
{
// Checks if at the end of the tokens before fetching to stop errors //
size_t peekIndex = index + peekDistance;
if (peekIndex >= length)
return nullptr;
return &input[peekIndex];
}
// Advances x-distance in the tokens (default 1) //
inline const Lexer::Token* Next(size_t distance = 1)
{
// Adds the distance and checks if in bounds to stop read errors //
index += distance;
if (index >= length)
return nullptr;
return &input[index];
}
// Is the current index in bounds //
inline bool InBounds() const { return index < length; }
// Output //
std::vector<FunctionAST> output;
private:
size_t index;
const Lexer::LexerOutput& input;
const size_t length;
};
static Util::ReturnVal<void*, ParserError> ParseFunction(const ParserContext& ctx)
{
for (const auto& token : input)
{
Util::PrintLn(token);
}
return Util::FunctionFail<ParserError>();
}
Util::ReturnVal<std::vector<FunctionAST>, ParserError> TurnTokensIntoAST(const Lexer::LexerOutput& input)
{
// Creates the context of the Parser from the tokens and the output of the function //
ParserContext ctx(input);
// Continues whilst in the bounds of the tokens //
while (ctx.InBounds())
{
// Peeks the next token (guaranteed to not be at the end) //
const Lexer::Token* current = ctx.Peek();
// Switches over the type of the current token to run the correct logic //
switch (current->type)
{
// Only functions are currently supported //
case Lexer::Token::FunctionDef:
{
// Parses the function and add it to the vector if there are no errors //
Util::ReturnVal func = ParseFunction(ctx);
if (func.Failed())
return Util::FunctionFail<ParserError>(func.Error());
break;
}
// The rest return a function-fail //
default:
return Util::FunctionFail<ParserError>(); // <- TODO: Make an actual error
}
}
// Returns the output as it has gone through all the tokens //
return std::move(ctx.output); // Moves instead of copying, (required due to std::unique_ptr<>)
}
}