Use P_SUCCESS instead of P_TOKEN

This commit is contained in:
Josh Holtrop 2023-07-11 15:43:15 -04:00
parent eee6513384
commit 779ba052c1
2 changed files with 14 additions and 15 deletions

View File

@ -16,7 +16,6 @@ public enum : size_t
P_DECODE_ERROR,
P_UNEXPECTED_INPUT,
P_UNEXPECTED_TOKEN,
P_TOKEN,
P_DROP,
P_EOF,
}
@ -242,7 +241,7 @@ public static class Lexer
* Lex the next token in the input stream.
*
* Returns one of:
* - P_TOKEN
* - P_SUCCESS
* - P_DECODE_ERROR
* - P_UNEXPECTED_INPUT
*/
@ -289,7 +288,7 @@ public static class Lexer
* Attempt to lex the next token in the input stream.
*
* Returns one of:
* - P_TOKEN
* - P_SUCCESS
* - P_DECODE_ERROR
* - P_UNEXPECTED_INPUT
* - P_DROP
@ -338,12 +337,12 @@ public static class Lexer
token_info.token = token_to_accept;
token_info.length = match_info.length;
*out_token_info = token_info;
return P_TOKEN;
return P_SUCCESS;
case P_EOF:
token_info.token = TOKEN___EOF;
*out_token_info = token_info;
return P_TOKEN;
return P_SUCCESS;
default:
return result;
@ -544,7 +543,7 @@ public static class Parser
if (token == INVALID_TOKEN_ID)
{
size_t lexer_result = m_lexer.lex_token(&token_info);
if (lexer_result != P_TOKEN)
if (lexer_result != P_SUCCESS)
{
return lexer_result;
}

View File

@ -45,24 +45,24 @@ unittest
TokenInfo token_info;
string input = "5 + 4 * \n677 + 567";
Lexer lexer = new Lexer(input);
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(lexer.lex_token(&token_info) == P_SUCCESS);
assert(token_info == TokenInfo(Position(0, 0), 1, TOKEN_int));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(lexer.lex_token(&token_info) == P_SUCCESS);
assert(token_info == TokenInfo(Position(0, 2), 1, TOKEN_plus));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(lexer.lex_token(&token_info) == P_SUCCESS);
assert(token_info == TokenInfo(Position(0, 4), 1, TOKEN_int));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(lexer.lex_token(&token_info) == P_SUCCESS);
assert(token_info == TokenInfo(Position(0, 6), 1, TOKEN_times));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(lexer.lex_token(&token_info) == P_SUCCESS);
assert(token_info == TokenInfo(Position(1, 0), 3, TOKEN_int));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(lexer.lex_token(&token_info) == P_SUCCESS);
assert(token_info == TokenInfo(Position(1, 4), 1, TOKEN_plus));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(lexer.lex_token(&token_info) == P_SUCCESS);
assert(token_info == TokenInfo(Position(1, 6), 3, TOKEN_int));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(lexer.lex_token(&token_info) == P_SUCCESS);
assert(token_info == TokenInfo(Position(1, 9), 0, TOKEN___EOF));
lexer = new Lexer("");
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(lexer.lex_token(&token_info) == P_SUCCESS);
assert(token_info == TokenInfo(Position(0, 0), 0, TOKEN___EOF));
}