start on lexer

This commit is contained in:
SimonFJ20 2023-01-09 21:00:16 +01:00
parent ca6fde3c0b
commit a023474bdb
3 changed files with 103 additions and 0 deletions

14
markup/lexer.cpp Normal file
View File

@ -0,0 +1,14 @@
#include "lexer.hpp"
#include "result.hpp"
#include <cctype>
#include <string_view>
namespace markup {
auto constexpr Lexer::next() noexcept -> Result<Token, void>
{
if (done())
return token(TokenTypes::Eof, index);
}
}

88
markup/lexer.hpp Normal file
View File

@ -0,0 +1,88 @@
#pragma once
#include "utils.hpp"
#include <optional>
#include <string_view>
namespace markup {
enum class TokenTypes {
Eof,
Whitespace,
MultilineComment,
SinglelineComment,
Name,
Int,
Float,
String,
Id, // Example = `#my_id`
Class, // Example = `.my_class`
True,
False,
Null,
LBrace,
RBrace,
Comma,
Equal,
};
struct Token {
TokenTypes type;
size_t index, length;
int line, column;
};
class Lexer final {
public:
Lexer(std::string_view text)
: text { text }
{ }
auto constexpr next() noexcept -> Result<Token, void>;
auto peek() noexcept -> Result<Token, void>
{
if (last_token)
return Result<Token, void>::create_ok(*last_token);
return {};
}
private:
auto constexpr make_number() noexcept -> Result<Token, void>;
auto constexpr make_id() noexcept -> Result<Token, void>;
[[nodiscard]] auto constexpr inline token(
TokenTypes type, size_t begin) noexcept -> Token
{
auto token = Token { type, begin, index - begin, line, column };
last_token = token;
return token;
}
[[nodiscard]] auto constexpr inline done() const noexcept -> bool
{
return index >= text.size();
}
[[nodiscard]] auto constexpr inline current() const noexcept -> char
{
return text.at(index);
}
auto constexpr inline step() noexcept -> void
{
if (done())
return;
index++;
column++;
if (!done() and text.at(index) == '\n') {
column = 1;
line++;
}
}
std::string_view text;
size_t index = 0;
int line = 1;
int column = 1;
std::optional<Token> last_token;
};
}

View File

@ -1,6 +1,7 @@
markup_sources = files(
'parser.cpp',
'lexer.cpp',
)
markup_inc = include_directories('.')