Add length field to LexedToken

This commit is contained in:
Josh Holtrop 2021-07-06 11:59:35 -04:00
parent d9e4f64d2e
commit 3158e51059
2 changed files with 11 additions and 9 deletions

View File

@ -132,6 +132,7 @@ class <%= classname %>
{
size_t row;
size_t col;
size_t length;
uint token;
}
@ -161,7 +162,7 @@ class <%= classname %>
private LexedToken attempt_lex_token()
{
LexedToken lt = LexedToken(m_input_row, m_input_col, TOKEN_NONE);
LexedToken lt = LexedToken(m_input_row, m_input_col, 0, TOKEN_NONE);
struct LexedTokenState
{
size_t length;
@ -216,6 +217,7 @@ class <%= classname %>
if (last_accepts_info.token != TOKEN_NONE)
{
lt.token = last_accepts_info.token;
lt.length = last_accepts_info.length;
m_input_position += last_accepts_info.length;
m_input_row += last_accepts_info.delta_row;
if (last_accepts_info.delta_row != 0u)

View File

@ -52,12 +52,12 @@ unittest
alias LT = Testparser.Lexer.LexedToken;
string input = "5 + 4 * \n677 + 567";
Testparser.Lexer lexer = new Testparser.Lexer(cast(const(ubyte) *)input.ptr, input.length);
assert(lexer.lex_token() == LT(0, 0, Testparser.TOKEN_INT));
assert(lexer.lex_token() == LT(0, 2, Testparser.TOKEN_PLUS));
assert(lexer.lex_token() == LT(0, 4, Testparser.TOKEN_INT));
assert(lexer.lex_token() == LT(0, 6, Testparser.TOKEN_TIMES));
assert(lexer.lex_token() == LT(1, 0, Testparser.TOKEN_INT));
assert(lexer.lex_token() == LT(1, 4, Testparser.TOKEN_PLUS));
assert(lexer.lex_token() == LT(1, 6, Testparser.TOKEN_INT));
assert(lexer.lex_token() == LT(1, 9, Testparser.TOKEN_EOF));
assert(lexer.lex_token() == LT(0, 0, 1, Testparser.TOKEN_INT));
assert(lexer.lex_token() == LT(0, 2, 1, Testparser.TOKEN_PLUS));
assert(lexer.lex_token() == LT(0, 4, 1, Testparser.TOKEN_INT));
assert(lexer.lex_token() == LT(0, 6, 1, Testparser.TOKEN_TIMES));
assert(lexer.lex_token() == LT(1, 0, 3, Testparser.TOKEN_INT));
assert(lexer.lex_token() == LT(1, 4, 1, Testparser.TOKEN_PLUS));
assert(lexer.lex_token() == LT(1, 6, 3, Testparser.TOKEN_INT));
assert(lexer.lex_token() == LT(1, 9, 0, Testparser.TOKEN_EOF));
}