Add p_context_delete() and p_tree_delete() for D targets

This commit is contained in:
Josh Holtrop 2026-02-28 21:11:53 -05:00
parent d440d0442d
commit dd687d0299
11 changed files with 105 additions and 24 deletions

View File

@ -9,7 +9,7 @@ module <%= @grammar.modulename %>;
<% end %>
import core.memory;
import core.stdc.stdlib : malloc;
import core.stdc.stdlib : malloc, free;
/**************************************************************************
* User code blocks
@ -94,6 +94,8 @@ private struct TreeNode
{
<%= @grammar.prefix %>position_t position;
<%= @grammar.prefix %>position_t end_position;
ushort n_fields;
bool is_token;
void *[0] fields;
}
@ -103,6 +105,8 @@ public struct <%= @grammar.tree_prefix %>Token<%= @grammar.tree_suffix %>
/* TreeNode fields must be present in the same order here. */
<%= @grammar.prefix %>position_t position;
<%= @grammar.prefix %>position_t end_position;
ushort n_fields;
bool is_token;
<%= @grammar.prefix %>token_t token;
<%= @grammar.prefix %>value_t pvalue;
<%= @grammar.token_user_fields %>
@ -115,6 +119,8 @@ public struct <%= @grammar.tree_prefix %><%= name %><%= @grammar.tree_suffix %>
{
<%= @grammar.prefix %>position_t position;
<%= @grammar.prefix %>position_t end_position;
ushort n_fields;
bool is_token;
<% rule_set.tree_fields.each do |fields| %>
union
{
@ -225,6 +231,7 @@ private enum size_t INVALID_ID = cast(size_t)-1;
*************************************************************************/
/**
* Allocate and initialize lexer/parser context structure.
*
* Deinitialize and deallocate with <%= @grammar.prefix %>context_delete().
*
@ -249,6 +256,16 @@ private enum size_t INVALID_ID = cast(size_t)-1;
return context;
}
/**
* Deinitialize and deallocate lexer/parser context structure.
*
* @param context
* Lexer/parser context structure allocated with <%= @grammar.prefix %>context_new().
*/
void <%= @grammar.prefix %>context_delete(<%= @grammar.prefix %>context_t * context)
{
}
/**************************************************************************
* Decoder
*************************************************************************/
@ -1054,7 +1071,7 @@ private size_t parse_from(<%= @grammar.prefix %>context_t * context, size_t star
{
/* We shifted a token, mark it consumed. */
<% if @grammar.tree %>
<%= @grammar.tree_prefix %>Token<%= @grammar.tree_suffix %> * token_tree_node = new <%= @grammar.tree_prefix %>Token<%= @grammar.tree_suffix %>(token_info.position, token_info.end_position, token, token_info.pvalue);
<%= @grammar.tree_prefix %>Token<%= @grammar.tree_suffix %> * token_tree_node = new <%= @grammar.tree_prefix %>Token<%= @grammar.tree_suffix %>(token_info.position, token_info.end_position, 0u, true, token, token_info.pvalue);
<%= expand_code(@grammar.on_token_node, false, nil, nil) %>
statevalues[$-1].tree_node = token_tree_node;
<% else %>
@ -1094,6 +1111,8 @@ private size_t parse_from(<%= @grammar.prefix %>context_t * context, size_t star
GC.addRange(node, node_size);
node.position = <%= @grammar.prefix %>position_t.INVALID;
node.end_position = <%= @grammar.prefix %>position_t.INVALID;
node.n_fields = cast(ushort)n_fields;
node.is_token = false;
foreach (i; 0..n_fields)
{
node.fields[i] = null;
@ -1201,6 +1220,35 @@ public <%= start_rule_type(i)[1] %> <%= @grammar.prefix %>result_<%= start_rule
<% end %>
<% end %>
<% if @grammar.tree %>
private void tree_delete(TreeNode * node)
{
if (!node.is_token)
{
for (size_t i = 0u; i < node.n_fields; i++)
{
if (node.fields[i])
{
tree_delete(cast(TreeNode *)node.fields[i]);
}
}
GC.removeRange(node);
free(node);
}
}
void <%= @grammar.prefix %>tree_delete(<%= @grammar.tree_prefix %><%= @grammar.start_rules[0] %><%= @grammar.tree_suffix %> * tree)
{
tree_delete(cast(TreeNode *)tree);
}
<% @grammar.start_rules.each_with_index do |start_rule, i| %>
void <%= @grammar.prefix %>tree_delete_<%= start_rule %>(<%= @grammar.tree_prefix %><%= start_rule %><%= @grammar.tree_suffix %> * tree)
{
tree_delete(cast(TreeNode *)tree);
}
<% end %>
<% end %>
/**
* Get the current text input position.
*

View File

@ -1130,8 +1130,6 @@ p_context_t * context = p_context_new(input);
The `p_context_delete()` function must be called to deinitialize and deallocate
a context structure allocated by `p_context_init()`.
This function is not available for D language since D has a garbage collector.
### `p_lex`
The `p_lex()` function is the main entry point to the lexer.
@ -1320,8 +1318,6 @@ assert(code_point_length == 4u);
The `p_tree_delete()` function can be used to free the memory used by the tree.
It should be passed the same value that is returned by `p_result()`.
The `p_tree_delete()` function is only available for C/C++ output targets.
Note that if any lexer user code block allocates memory to store in a token's
`pvalue`, in order to properly free this memory the `free_token_node` statement
should be used to provide a code block that frees this memory.

View File

@ -10,8 +10,7 @@ int main()
unittest
{
string input = "b";
p_context_t * context;
context = p_context_new(input);
p_context_t * context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
Start * start = p_result(context);
assert(start.pToken1 is null);
@ -21,6 +20,8 @@ unittest
assert(start.pR is null);
assert(start.r is null);
p_tree_delete(start);
input = "abcd";
context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
@ -35,6 +36,8 @@ unittest
assert(start.pR == start.r);
assert_eq(TOKEN_c, start.pR.pToken1.token);
p_tree_delete(start);
input = "bdc";
context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
@ -43,4 +46,6 @@ unittest
assert(start.pToken2 !is null);
assert(start.pR !is null);
assert_eq(TOKEN_d, start.pR.pToken1.token);
p_tree_delete(start);
}

View File

@ -10,8 +10,7 @@ int main()
unittest
{
string input = "b";
p_context_t * context;
context = p_context_new(input);
p_context_t * context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
Start * start = p_result(context);
assert(start.pToken1 is null);
@ -20,6 +19,8 @@ unittest
assert(start.pR3 is null);
assert(start.pR is null);
p_tree_delete(start);
input = "abcd";
context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
@ -32,6 +33,8 @@ unittest
assert(start.pR == start.pR3);
assert_eq(TOKEN_c, start.pR.pToken1.token);
p_tree_delete(start);
input = "bdc";
context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
@ -40,4 +43,6 @@ unittest
assert(start.pToken2 !is null);
assert(start.pR !is null);
assert_eq(TOKEN_d, start.pR.pToken1.token);
p_tree_delete(start);
}

View File

@ -10,8 +10,7 @@ int main()
unittest
{
string input = "bbbb";
p_context_t * context;
context = p_context_new(input);
p_context_t * context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
Start * start = p_result(context);
assert(start.bs);
@ -20,6 +19,8 @@ unittest
assert(start.bs.bs.bs.b);
assert(start.bs.bs.bs.bs.b);
p_tree_delete(start);
context = p_context_new(input);
assert(p_parse_Bs(context) == P_SUCCESS);
Bs * bs = p_result_Bs(context);
@ -28,9 +29,13 @@ unittest
assert(bs.bs.bs.b);
assert(bs.bs.bs.bs.b);
p_tree_delete_Bs(bs);
input = "c";
context = p_context_new(input);
assert(p_parse_R(context) == P_SUCCESS);
R * r = p_result_R(context);
assert(r.c);
p_tree_delete_R(r);
}

View File

@ -17,8 +17,7 @@ unittest
" # s1\n" ~
" # s2\n" ~
"second\n";
p_context_t * context;
context = p_context_new(input);
p_context_t * context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
Start * start = p_result(context);
assert(start.pIDs);
@ -27,4 +26,6 @@ unittest
assert(start.pIDs.pIDs);
assert(start.pIDs.pIDs.id);
assert(start.pIDs.pIDs.id.comments == "# s1\n# s2\n");
p_tree_delete(start);
}

View File

@ -10,8 +10,7 @@ int main()
unittest
{
string input = "a, ((b)), b";
p_context_t * context;
context = p_context_new(input);
p_context_t * context = p_context_new(input);
assert_eq(P_SUCCESS, p_parse(context));
Start * start = p_result(context);
assert(start.pItems1 !is null);
@ -37,12 +36,16 @@ unittest
assert_eq(22, itemsmore.pItem.pToken1.pvalue);
assert(itemsmore.pItemsMore is null);
p_tree_delete(start);
input = "";
context = p_context_new(input);
assert_eq(P_SUCCESS, p_parse(context));
start = p_result(context);
assert(start.pItems is null);
p_tree_delete(start);
input = "2 1";
context = p_context_new(input);
assert_eq(P_SUCCESS, p_parse(context));
@ -54,4 +57,6 @@ unittest
assert(start.pItems.pItem.pDual.pOne2 !is null);
assert(start.pItems.pItem.pDual.pTwo2 is null);
assert(start.pItems.pItem.pDual.pOne1 is null);
p_tree_delete(start);
}

View File

@ -10,12 +10,13 @@ int main()
unittest
{
string input = "\na\nb\nc";
p_context_t * context;
context = p_context_new(input);
p_context_t * context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
Start * start = p_result(context);
assert_eq(TOKEN_a, start.first.pToken.token);
assert_eq(TOKEN_b, start.second.pToken.token);
assert_eq(TOKEN_c, start.third.pToken.token);
p_tree_delete(start);
}

View File

@ -10,8 +10,7 @@ int main()
unittest
{
string input = "\na\n bb ccc";
p_context_t * context;
context = p_context_new(input);
p_context_t * context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
Start * start = p_result(context);
@ -34,6 +33,8 @@ unittest
assert_eq(3, start.end_position.row);
assert_eq(8, start.end_position.col);
p_tree_delete(start);
input = "a\nbb";
context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
@ -58,6 +59,8 @@ unittest
assert_eq(2, start.end_position.row);
assert_eq(2, start.end_position.col);
p_tree_delete(start);
input = "a\nc\nc";
context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
@ -82,6 +85,8 @@ unittest
assert_eq(3, start.end_position.row);
assert_eq(1, start.end_position.col);
p_tree_delete(start);
input = "a";
context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
@ -101,4 +106,6 @@ unittest
assert_eq(1, start.position.col);
assert_eq(1, start.end_position.row);
assert_eq(1, start.end_position.col);
p_tree_delete(start);
}

View File

@ -10,8 +10,7 @@ int main()
unittest
{
string input = "a, ((b)), b";
p_context_t * context;
context = p_context_new(input);
p_context_t * context = p_context_new(input);
assert_eq(P_SUCCESS, p_parse(context));
PStartS * start = p_result(context);
assert(start.pItems1 !is null);
@ -37,12 +36,16 @@ unittest
assert_eq(22, itemsmore.pItem.pToken1.pvalue);
assert(itemsmore.pItemsMore is null);
p_tree_delete(start);
input = "";
context = p_context_new(input);
assert_eq(P_SUCCESS, p_parse(context));
start = p_result(context);
assert(start.pItems is null);
p_tree_delete(start);
input = "2 1";
context = p_context_new(input);
assert_eq(P_SUCCESS, p_parse(context));
@ -54,4 +57,6 @@ unittest
assert(start.pItems.pItem.pDual.pOne2 !is null);
assert(start.pItems.pItem.pDual.pTwo2 is null);
assert(start.pItems.pItem.pDual.pOne1 is null);
p_tree_delete(start);
}

View File

@ -10,8 +10,7 @@ int main()
unittest
{
string input = "abbccc";
p_context_t * context;
context = p_context_new(input);
p_context_t * context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
Start * start = p_result(context);
@ -47,6 +46,8 @@ unittest
assert_eq(1, start.end_position.row);
assert_eq(6, start.end_position.col);
p_tree_delete(start);
input = "\n\n bb\nc\ncc\n\n a";
context = p_context_new(input);
assert(p_parse(context) == P_SUCCESS);
@ -83,4 +84,6 @@ unittest
assert_eq(3, start.position.col);
assert_eq(7, start.end_position.row);
assert_eq(6, start.end_position.col);
p_tree_delete(start);
}