This commit is contained in:
Josh Holtrop 2023-08-20 16:13:18 -04:00
parent 4ffdea07bb
commit 2b515e1a7a

View File

@ -666,6 +666,28 @@ void state_values_stack_init(state_values_stack_t * stack)
stack->entries = (state_value_t *)malloc(initial_capacity * sizeof(state_value_t));
}
/**
* Index a state_values stack.
*
* @param stack
* state_values stack structure.
* @param index
* Index to the stack.
*
* @return Pointer to the state value structure at the given index.
*/
state_value_t * state_values_stack_index(state_values_stack_t * stack, int index)
{
if (index >= 0)
{
return &stack->entries[index];
}
else
{
return &stack->entries[stack->length - (size_t)(unsigned int)(-index)];
}
}
/**
* Push a new state_value to the state_values stack.
*
@ -689,6 +711,30 @@ void state_values_stack_push(state_values_stack_t * stack)
stack->length = current_length + 1u;
}
/**
* Pop entries from a state_values stack.
*
* @param stack
* state_values stack structure.
* @param n
* Number of states to pop.
*/
void state_values_stack_pop(state_values_stack_t * stack, size_t n)
{
stack->length -= n;
}
/**
* Free memory for a state_values stack structure.
*
* @param stack
* state_values stack structure.
*/
void state_values_stack_free(state_values_stack_t * stack)
{
free(stack->entries);
}
/**
* Execute user code associated with a parser rule.
*
@ -696,7 +742,7 @@ void state_values_stack_push(state_values_stack_t * stack)
*
* @return Parse value.
*/
static <%= @grammar.prefix %>value_t parser_user_code(uint32_t rule, state_value_t[] statevalues, uint32_t n_states)
static <%= @grammar.prefix %>value_t parser_user_code(uint32_t rule, state_value_stack_t * statevalues, uint32_t n_states)
{
<%= @grammar.prefix %>value_t _pvalue;
@ -785,9 +831,12 @@ size_t <%= @grammar.prefix %>parse(<%= @grammar.prefix %>context_t * context)
{
<%= @grammar.prefix %>token_info_t token_info;
<%= @grammar.prefix %>token_t token = INVALID_TOKEN_ID;
state_value_t[] statevalues = new state_value_t[](1);
state_values_stack_t statevalues;
size_t reduced_rule_set = INVALID_ID;
<%= @grammar.prefix %>value_t reduced_parser_value;
state_values_stack_init(&statevalues);
state_values_stack_push(&statevalues);
size_t result;
for (;;)
{
if (token == INVALID_TOKEN_ID)
@ -795,39 +844,42 @@ size_t <%= @grammar.prefix %>parse(<%= @grammar.prefix %>context_t * context)
size_t lexer_result = <%= @grammar.prefix %>lex(context, &token_info);
if (lexer_result != P_SUCCESS)
{
return lexer_result;
result = lexer_result;
break;
}
token = token_info.token;
}
size_t shift_state = INVALID_ID;
if (reduced_rule_set != INVALID_ID)
{
shift_state = check_shift(statevalues[$-1].state_id, reduced_rule_set);
shift_state = check_shift(state_value_stack_index(&statevalues, -1)->state_id, reduced_rule_set);
}
if (shift_state == INVALID_ID)
{
shift_state = check_shift(statevalues[$-1].state_id, token);
shift_state = check_shift(state_value_stack_index(&statevalues, -1)->state_id, token);
if ((shift_state != INVALID_ID) && (token == TOKEN___EOF))
{
/* Successful parse. */
context.parse_result = statevalues[$-1].pvalue;
return P_SUCCESS;
context.parse_result = state_value_stack_index(&statevalues, -1)->pvalue;
result = P_SUCCESS;
break;
}
}
if (shift_state != INVALID_ID)
{
/* We have something to shift. */
statevalues ~= state_value_t(shift_state);
state_value_stack_push(&statevalues);
state_value_stack_index(&statevalues, -1)->state_id = shift_state;
if (reduced_rule_set == INVALID_ID)
{
/* We shifted a token, mark it consumed. */
token = INVALID_TOKEN_ID;
statevalues[$-1].pvalue = token_info.pvalue;
state_value_stack_index(&statevalues, -1)->pvalue = token_info.pvalue;
}
else
{
/* We shifted a RuleSet. */
statevalues[$-1].pvalue = reduced_parser_value;
state_value_stack_index(&statevalues, -1)->pvalue = reduced_parser_value;
<%= @grammar.prefix %>value_t new_parse_result;
reduced_parser_value = new_parse_result;
reduced_rule_set = INVALID_ID;
@ -835,13 +887,13 @@ size_t <%= @grammar.prefix %>parse(<%= @grammar.prefix %>context_t * context)
continue;
}
size_t reduce_index = check_reduce(statevalues[$-1].state_id, token);
size_t reduce_index = check_reduce(state_value_stack_index(&statevalues, -1)->state_id, token);
if (reduce_index != INVALID_ID)
{
/* We have something to reduce. */
reduced_parser_value = parser_user_code(parser_reduce_table[reduce_index].rule, statevalues, parser_reduce_table[reduce_index].n_states);
reduced_parser_value = parser_user_code(parser_reduce_table[reduce_index].rule, &statevalues, parser_reduce_table[reduce_index].n_states);
reduced_rule_set = parser_reduce_table[reduce_index].rule_set;
statevalues.length -= parser_reduce_table[reduce_index].n_states;
state_value_stack_pop(&statevalues, parser_reduce_table[reduce_index].n_states);
continue;
}
@ -852,8 +904,10 @@ size_t <%= @grammar.prefix %>parse(<%= @grammar.prefix %>context_t * context)
* it will point to the correct position of the unexpected token. */
context.text_position = token_info.position;
context.token = token;
return P_UNEXPECTED_TOKEN;
result = P_UNEXPECTED_TOKEN;
break;
}
state_values_stack_free(&statevalues);
}
/**