Added foundation for Lexer
This commit is contained in:
@@ -2,7 +2,7 @@
|
||||
|
||||
// Standard libraries //
|
||||
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
// LXC util files //
|
||||
|
||||
|
||||
@@ -9,7 +9,9 @@ namespace LXC::Util
|
||||
template<typename ErrorType> struct FunctionFail
|
||||
{
|
||||
explicit FunctionFail(ErrorType _err)
|
||||
: error(_err) {}
|
||||
: error(_err)
|
||||
{
|
||||
}
|
||||
|
||||
ErrorType error;
|
||||
};
|
||||
|
||||
10
LXC/LXC.cpp
10
LXC/LXC.cpp
@@ -7,7 +7,7 @@ int main(int argc, char** argv)
|
||||
using namespace LXC;
|
||||
|
||||
// Reads the given file to a string //
|
||||
Util::ReturnVal fileContents = Util::ReadFile("example/example.lx");
|
||||
Util::ReturnVal fileContents = Util::ReadFile("example/example.jk.lx");
|
||||
if (fileContents.Failed())
|
||||
{
|
||||
// Stores the error for easier access //
|
||||
@@ -26,5 +26,13 @@ int main(int argc, char** argv)
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Turns the file contents into a vector of tokens //
|
||||
Util::ReturnVal tokens = Lexer::TokenizeFile(fileContents);
|
||||
if (tokens.Failed())
|
||||
{
|
||||
// Returns with default error code //
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -6,12 +6,24 @@ namespace LXC::Lexer
|
||||
{
|
||||
struct LexerContext
|
||||
{
|
||||
// Constructor to set the information of the context //
|
||||
LexerContext(const std::string& _source);
|
||||
|
||||
// Trackers for the Lexer itself //
|
||||
std::string source;
|
||||
const std::string& source;
|
||||
size_t index;
|
||||
|
||||
LexerOutput out;
|
||||
const size_t len;
|
||||
|
||||
// Trackers for where the Lexer is within the user version of source //
|
||||
unsigned short column;
|
||||
unsigned short line;
|
||||
};
|
||||
|
||||
struct LexerError
|
||||
{};
|
||||
|
||||
// Turns a file into a vector of tokens //
|
||||
Util::ReturnVal<LexerOutput, LexerError> TokenizeFile(const std::string& fileContents);
|
||||
}
|
||||
|
||||
@@ -112,4 +112,7 @@ namespace LXC::Lexer
|
||||
// The data of the token //
|
||||
const char* contents;
|
||||
};
|
||||
|
||||
// Typedef for the output type of how the Lexer outputs //
|
||||
typedef std::vector<Token> LexerOutput;
|
||||
}
|
||||
|
||||
@@ -5,4 +5,22 @@
|
||||
|
||||
namespace LXC::Lexer
|
||||
{
|
||||
LexerContext::LexerContext(const std::string& _source) :
|
||||
source(_source), index(0), out{}, len(_source.length()), column(0), line(0)
|
||||
{}
|
||||
|
||||
Util::ReturnVal<LexerOutput, LexerError> TokenizeFile(const std::string& fileContents)
|
||||
{
|
||||
// Creates the context for the lexer //
|
||||
LexerContext context(fileContents);
|
||||
|
||||
while (context.index > context.len)
|
||||
{
|
||||
// Iterates to the next index //
|
||||
context.column++;
|
||||
context.index++;
|
||||
}
|
||||
|
||||
return context.out;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user