Remove outer namespacing class

This commit is contained in:
Josh Holtrop 2023-07-10 22:54:33 -04:00
parent 78ce7fb77a
commit eee6513384
14 changed files with 638 additions and 625 deletions

View File

@ -9,11 +9,9 @@ import std.stdio;
<%= code %>
<% end %>
class <%= @classname %>
/* Result codes. */
public enum : size_t
{
/* Result codes. */
public enum : size_t
{
P_SUCCESS,
P_DECODE_ERROR,
P_UNEXPECTED_INPUT,
@ -21,15 +19,17 @@ class <%= @classname %>
P_TOKEN,
P_DROP,
P_EOF,
}
}
/* An invalid ID value. */
private enum size_t INVALID_ID = cast(size_t)-1;
/* An invalid ID value. */
private enum size_t INVALID_ID = cast(size_t)-1;
alias Token = <%= get_type_for(@grammar.invalid_token_id) %>;
/** Token ID type. */
public alias Token = <%= get_type_for(@grammar.invalid_token_id) %>;
enum : Token
{
/** Token IDs. */
public enum : Token
{
<% @grammar.tokens.each_with_index do |token, index| %>
TOKEN_<%= token.code_name %> = <%= index %>,
<% unless token.id == index %>
@ -37,39 +37,46 @@ class <%= @classname %>
<% end %>
<% end %>
INVALID_TOKEN_ID = <%= @grammar.invalid_token_id %>,
}
}
alias CodePoint = uint;
static immutable string[] token_names = [
/** Token names. */
public static immutable string[] token_names = [
<% @grammar.tokens.each_with_index do |token, index| %>
"<%= token.name %>",
<% end %>
];
];
static union ParserValue
{
/** Code point type. */
public alias CodePoint = uint;
/** Parser values type(s). */
public static union ParserValue
{
<% @grammar.ptypes.each do |name, typestring| %>
<%= typestring %> v_<%= name %>;
<% end %>
}
}
/**
/**
* A structure to keep track of parser position.
*
* This is useful for reporting errors, etc...
*/
static struct Position
{
public static struct Position
{
/** Input text row (0-based). */
uint row;
/** Input text column (0-based). */
uint col;
}
}
static class Decoder
{
/**************************************************************************
* Decoder
*************************************************************************/
public static class Decoder
{
/**
* Decode a UTF-8 code point.
*
@ -145,77 +152,81 @@ class <%= @classname %>
*out_code_point_length = code_point_length;
return P_SUCCESS;
}
}
}
static class Lexer
{
alias LexerStateID = <%= get_type_for(@lexer.state_table.size) %>;
enum LexerStateID INVALID_LEXER_STATE_ID = <%= @lexer.state_table.size %>u;
/**************************************************************************
* Lexer
*************************************************************************/
private alias LexerStateID = <%= get_type_for(@lexer.state_table.size) %>;
private enum LexerStateID INVALID_LEXER_STATE_ID = <%= @lexer.state_table.size %>u;
<% user_code_id_count = (@grammar.patterns.map(&:code_id).compact.max || 0) + 1 %>
alias UserCodeID = <%= get_type_for(user_code_id_count) %>;
enum UserCodeID INVALID_USER_CODE_ID = <%= user_code_id_count %>u;
private alias UserCodeID = <%= get_type_for(user_code_id_count) %>;
private enum UserCodeID INVALID_USER_CODE_ID = <%= user_code_id_count %>u;
private struct Transition
{
private struct Transition
{
CodePoint first;
CodePoint last;
LexerStateID destination_state;
}
}
private struct LexerState
{
private struct LexerState
{
<%= get_type_for(@lexer.transition_table.size - 1) %> transition_table_index;
<%= get_type_for(@lexer.state_table.map {|ste| ste[:n_transitions]}.max) %> n_transitions;
Token token;
UserCodeID code_id;
bool accepts;
}
}
private struct Mode
{
private struct Mode
{
uint state_table_offset;
}
}
private static immutable Transition[] transitions = [
private static immutable Transition[] lexer_transitions = [
<% @lexer.transition_table.each do |transition_table_entry| %>
Transition(<%= transition_table_entry[:first] %>u,
<%= transition_table_entry[:last] %>u,
<%= transition_table_entry[:destination] %>u),
<% end %>
];
];
private static immutable LexerState[] states = [
private static immutable LexerState[] lexer_states = [
<% @lexer.state_table.each do |state_table_entry| %>
LexerState(<%= state_table_entry[:transition_table_index] %>u,
<%= state_table_entry[:n_transitions] %>u,
LexerState(<%= state_table_entry[:transition_table_index] %>u,
<%= state_table_entry[:n_transitions] %>u,
<% if state_table_entry[:token] %>
Token(<%= state_table_entry[:token] %>u),
Token(<%= state_table_entry[:token] %>u),
<% else %>
INVALID_TOKEN_ID,
INVALID_TOKEN_ID,
<% end %>
<% if state_table_entry[:code_id] %>
<%= state_table_entry[:code_id] %>u,
<%= state_table_entry[:code_id] %>u,
<% else %>
INVALID_USER_CODE_ID,
INVALID_USER_CODE_ID,
<% end %>
<%= state_table_entry[:accepts] %>),
<%= state_table_entry[:accepts] %>),
<% end %>
];
];
private static immutable Mode[] modes = [
private static immutable Mode[] modes = [
<% @lexer.mode_table.each do |mode_table_entry| %>
Mode(<%= mode_table_entry[:state_table_offset] %>),
<% end %>
];
];
public static struct TokenInfo
{
public static struct TokenInfo
{
Position position;
size_t length;
Token token;
ParserValue pvalue;
}
}
public static class Lexer
{
private string m_input;
private size_t m_input_index;
private Position m_input_position;
@ -385,9 +396,9 @@ class <%= @classname %>
attempt_match.delta_position.col++;
}
current_state = transition_state;
if (states[current_state].accepts)
if (lexer_states[current_state].accepts)
{
attempt_match.accepting_state = &states[current_state];
attempt_match.accepting_state = &lexer_states[current_state];
longest_match = attempt_match;
}
}
@ -432,55 +443,57 @@ class <%= @classname %>
private LexerStateID transition(uint current_state, uint code_point)
{
uint transition_table_index = states[current_state].transition_table_index;
for (uint i = 0u; i < states[current_state].n_transitions; i++)
uint transition_table_index = lexer_states[current_state].transition_table_index;
for (uint i = 0u; i < lexer_states[current_state].n_transitions; i++)
{
if ((transitions[transition_table_index + i].first <= code_point) &&
(code_point <= transitions[transition_table_index + i].last))
if ((lexer_transitions[transition_table_index + i].first <= code_point) &&
(code_point <= lexer_transitions[transition_table_index + i].last))
{
return transitions[transition_table_index + i].destination_state;
return lexer_transitions[transition_table_index + i].destination_state;
}
}
return INVALID_LEXER_STATE_ID;
}
}
}
static class Parser
{
alias ReduceID = <%= get_type_for(@parser.reduce_table.size) %>;
/**************************************************************************
* Parser
*************************************************************************/
private alias ReduceID = <%= get_type_for(@parser.reduce_table.size) %>;
<% # A "symbol" is either a token ID or a rule set ID. %>
<% # %>
<% # Rule set IDs start after token IDs, so to store either a token ID %>
<% # or a rule set ID, we just need to know the maximum rule set ID. %>
alias SymbolID = <%= get_type_for(@parser.rule_sets.map(&:last).map(&:id).max) %>;
alias StateID = <%= get_type_for(@parser.state_table.size) %>;
alias RuleID = <%= get_type_for(@grammar.rules.size) %>;
alias ShiftID = <%= get_type_for(@parser.shift_table.size) %>;
private alias SymbolID = <%= get_type_for(@parser.rule_sets.map(&:last).map(&:id).max) %>;
private alias StateID = <%= get_type_for(@parser.state_table.size) %>;
private alias RuleID = <%= get_type_for(@grammar.rules.size) %>;
private alias ShiftID = <%= get_type_for(@parser.shift_table.size) %>;
private struct Shift
{
private struct Shift
{
SymbolID symbol;
StateID state;
}
}
private struct Reduce
{
private struct Reduce
{
Token token;
RuleID rule;
SymbolID rule_set;
StateID n_states;
}
}
private struct ParserState
{
private struct ParserState
{
ShiftID shift_table_index;
ShiftID n_shift_entries;
ReduceID reduce_table_index;
ReduceID n_reduce_entries;
}
}
private struct StateValue
{
private struct StateValue
{
size_t state;
ParserValue pvalue;
@ -488,26 +501,28 @@ class <%= @classname %>
{
this.state = state;
}
}
}
private static immutable Shift[] shifts = [
private static immutable Shift[] parser_shifts = [
<% @parser.shift_table.each do |shift| %>
Shift(<%= shift[:token_id] %>u, <%= shift[:state_id] %>u),
<% end %>
];
];
private static immutable Reduce[] reduces = [
private static immutable Reduce[] parser_reduces = [
<% @parser.reduce_table.each do |reduce| %>
Reduce(<%= reduce[:token_id] %>u, <%= reduce[:rule_id] %>u, <%= reduce[:rule_set_id] %>u, <%= reduce[:n_states] %>u),
<% end %>
];
];
private static immutable ParserState[] states = [
private static immutable ParserState[] parser_states = [
<% @parser.state_table.each do |state| %>
ParserState(<%= state[:shift_index] %>u, <%= state[:n_shifts] %>u, <%= state[:reduce_index] %>u, <%= state[:n_reduces] %>u),
<% end %>
];
];
public static class Parser
{
private Lexer m_lexer;
private ParserValue parse_result;
@ -519,7 +534,7 @@ class <%= @classname %>
size_t parse()
{
Lexer.TokenInfo token_info;
TokenInfo token_info;
Token token = INVALID_TOKEN_ID;
StateValue[] statevalues = new StateValue[](1);
size_t reduced_rule_set = INVALID_ID;
@ -575,9 +590,9 @@ class <%= @classname %>
if (reduce_index != INVALID_ID)
{
/* We have something to reduce. */
reduced_parser_value = user_code(reduces[reduce_index].rule, statevalues, reduces[reduce_index].n_states);
reduced_rule_set = reduces[reduce_index].rule_set;
statevalues.length -= reduces[reduce_index].n_states;
reduced_parser_value = user_code(parser_reduces[reduce_index].rule, statevalues, parser_reduces[reduce_index].n_states);
reduced_rule_set = parser_reduces[reduce_index].rule_set;
statevalues.length -= parser_reduces[reduce_index].n_states;
continue;
}
@ -602,11 +617,11 @@ class <%= @classname %>
private size_t check_shift(size_t state, size_t symbol)
{
uint start = states[state].shift_table_index;
uint end = start + states[state].n_shift_entries;
uint start = parser_states[state].shift_table_index;
uint end = start + parser_states[state].n_shift_entries;
for (uint i = start; i < end; i++)
{
if (shifts[i].symbol == symbol)
if (parser_shifts[i].symbol == symbol)
{
// if (symbol != INVALID_TOKEN_ID)
// {
@ -616,7 +631,7 @@ class <%= @classname %>
// {
// writeln("Shifting rule set ", symbol);
// }
return shifts[i].state;
return parser_shifts[i].state;
}
}
return INVALID_ID;
@ -624,14 +639,14 @@ class <%= @classname %>
private size_t check_reduce(size_t state, Token token)
{
size_t start = states[state].reduce_table_index;
size_t end = start + states[state].n_reduce_entries;
size_t start = parser_states[state].reduce_table_index;
size_t end = start + parser_states[state].n_reduce_entries;
for (size_t i = start; i < end; i++)
{
if ((reduces[i].token == token) ||
(reduces[i].token == INVALID_TOKEN_ID))
if ((parser_reduces[i].token == token) ||
(parser_reduces[i].token == INVALID_TOKEN_ID))
{
// write("Reducing rule ", reduces[i].rule, ", rule set ", reduces[i].rule_set, " lookahead ");
// write("Reducing rule ", parser_reduces[i].rule, ", rule set ", parser_reduces[i].rule_set, " lookahead ");
// if (token != INVALID_TOKEN_ID)
// {
// writeln(token_names[token]);
@ -671,5 +686,4 @@ class <%= @classname %>
return _pvalue;
}
}
}

View File

@ -9,61 +9,60 @@ int main()
unittest
{
size_t result;
Testparser.CodePoint code_point;
CodePoint code_point;
ubyte code_point_length;
result = Testparser.Decoder.decode_code_point("5", &code_point, &code_point_length);
assert(result == Testparser.P_SUCCESS);
result = Decoder.decode_code_point("5", &code_point, &code_point_length);
assert(result == P_SUCCESS);
assert(code_point == '5');
assert(code_point_length == 1u);
result = Testparser.Decoder.decode_code_point("", &code_point, &code_point_length);
assert(result == Testparser.P_EOF);
result = Decoder.decode_code_point("", &code_point, &code_point_length);
assert(result == P_EOF);
result = Testparser.Decoder.decode_code_point("\xC2\xA9", &code_point, &code_point_length);
assert(result == Testparser.P_SUCCESS);
result = Decoder.decode_code_point("\xC2\xA9", &code_point, &code_point_length);
assert(result == P_SUCCESS);
assert(code_point == 0xA9u);
assert(code_point_length == 2u);
result = Testparser.Decoder.decode_code_point("\xf0\x9f\xa7\xa1", &code_point, &code_point_length);
assert(result == Testparser.P_SUCCESS);
result = Decoder.decode_code_point("\xf0\x9f\xa7\xa1", &code_point, &code_point_length);
assert(result == P_SUCCESS);
assert(code_point == 0x1F9E1u);
assert(code_point_length == 4u);
result = Testparser.Decoder.decode_code_point("\xf0\x9f\x27", &code_point, &code_point_length);
assert(result == Testparser.P_DECODE_ERROR);
result = Decoder.decode_code_point("\xf0\x9f\x27", &code_point, &code_point_length);
assert(result == P_DECODE_ERROR);
result = Testparser.Decoder.decode_code_point("\xf0\x9f\xa7\xFF", &code_point, &code_point_length);
assert(result == Testparser.P_DECODE_ERROR);
result = Decoder.decode_code_point("\xf0\x9f\xa7\xFF", &code_point, &code_point_length);
assert(result == P_DECODE_ERROR);
result = Testparser.Decoder.decode_code_point("\xfe", &code_point, &code_point_length);
assert(result == Testparser.P_DECODE_ERROR);
result = Decoder.decode_code_point("\xfe", &code_point, &code_point_length);
assert(result == P_DECODE_ERROR);
}
unittest
{
alias TokenInfo = Testparser.Lexer.TokenInfo;
TokenInfo token_info;
string input = "5 + 4 * \n677 + 567";
Testparser.Lexer lexer = new Testparser.Lexer(input);
assert(lexer.lex_token(&token_info) == Testparser.P_TOKEN);
assert(token_info == TokenInfo(Testparser.Position(0, 0), 1, Testparser.TOKEN_int));
assert(lexer.lex_token(&token_info) == Testparser.P_TOKEN);
assert(token_info == TokenInfo(Testparser.Position(0, 2), 1, Testparser.TOKEN_plus));
assert(lexer.lex_token(&token_info) == Testparser.P_TOKEN);
assert(token_info == TokenInfo(Testparser.Position(0, 4), 1, Testparser.TOKEN_int));
assert(lexer.lex_token(&token_info) == Testparser.P_TOKEN);
assert(token_info == TokenInfo(Testparser.Position(0, 6), 1, Testparser.TOKEN_times));
assert(lexer.lex_token(&token_info) == Testparser.P_TOKEN);
assert(token_info == TokenInfo(Testparser.Position(1, 0), 3, Testparser.TOKEN_int));
assert(lexer.lex_token(&token_info) == Testparser.P_TOKEN);
assert(token_info == TokenInfo(Testparser.Position(1, 4), 1, Testparser.TOKEN_plus));
assert(lexer.lex_token(&token_info) == Testparser.P_TOKEN);
assert(token_info == TokenInfo(Testparser.Position(1, 6), 3, Testparser.TOKEN_int));
assert(lexer.lex_token(&token_info) == Testparser.P_TOKEN);
assert(token_info == TokenInfo(Testparser.Position(1, 9), 0, Testparser.TOKEN___EOF));
Lexer lexer = new Lexer(input);
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(token_info == TokenInfo(Position(0, 0), 1, TOKEN_int));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(token_info == TokenInfo(Position(0, 2), 1, TOKEN_plus));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(token_info == TokenInfo(Position(0, 4), 1, TOKEN_int));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(token_info == TokenInfo(Position(0, 6), 1, TOKEN_times));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(token_info == TokenInfo(Position(1, 0), 3, TOKEN_int));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(token_info == TokenInfo(Position(1, 4), 1, TOKEN_plus));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(token_info == TokenInfo(Position(1, 6), 3, TOKEN_int));
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(token_info == TokenInfo(Position(1, 9), 0, TOKEN___EOF));
lexer = new Testparser.Lexer("");
assert(lexer.lex_token(&token_info) == Testparser.P_TOKEN);
assert(token_info == TokenInfo(Testparser.Position(0, 0), 0, Testparser.TOKEN___EOF));
lexer = new Lexer("");
assert(lexer.lex_token(&token_info) == P_TOKEN);
assert(token_info == TokenInfo(Position(0, 0), 0, TOKEN___EOF));
}

View File

@ -9,10 +9,10 @@ int main()
unittest
{
string input = "aba";
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
auto parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
input = "abb";
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
}

View File

@ -9,14 +9,14 @@ int main()
unittest
{
string input = "a";
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_UNEXPECTED_TOKEN);
auto parser = new Parser(input);
assert(parser.parse() == P_UNEXPECTED_TOKEN);
input = "a b";
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
input = "bb";
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
}

View File

@ -9,7 +9,7 @@ int main()
unittest
{
string input = `identifier_123`;
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
auto parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
writeln("pass1");
}

View File

@ -9,12 +9,12 @@ int main()
unittest
{
string input = `abc "a string" def`;
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
auto parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
writeln("pass1");
input = `abc "abc def" def`;
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
writeln("pass2");
}

View File

@ -9,12 +9,12 @@ int main()
unittest
{
string input = `x`;
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
auto parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(parser.result == 1u);
input = `fabulous`;
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(parser.result == 8u);
}

View File

@ -9,11 +9,11 @@ int main()
unittest
{
string input = `x`;
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_UNEXPECTED_INPUT);
auto parser = new Parser(input);
assert(parser.parse() == P_UNEXPECTED_INPUT);
input = `123`;
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(parser.result == 123u);
}

View File

@ -9,6 +9,6 @@ int main()
unittest
{
string input = "ab";
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
auto parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
}

View File

@ -10,42 +10,42 @@ int main()
unittest
{
string input = ``;
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
auto parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
input = `{}`;
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(cast(JSONObject)parser.result);
input = `[]`;
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(cast(JSONArray)parser.result);
input = `-45.6`;
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(cast(JSONNumber)parser.result);
assert((cast(JSONNumber)parser.result).value == -45.6);
input = `2E-2`;
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(cast(JSONNumber)parser.result);
assert((cast(JSONNumber)parser.result).value == 0.02);
input = `{"hi":true}`;
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(cast(JSONObject)parser.result);
JSONObject o = cast(JSONObject)parser.result;
assert(o.value["hi"]);
assert(cast(JSONTrue)o.value["hi"]);
input = `{"ff": false, "nn": null}`;
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(cast(JSONObject)parser.result);
o = cast(JSONObject)parser.result;
assert(o.value["ff"]);

View File

@ -9,17 +9,17 @@ int main()
unittest
{
string input = "a";
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
auto parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(parser.result == 1u);
input = "";
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(parser.result == 0u);
input = "aaaaaaaaaaaaaaaa";
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
assert(parser.result == 16u);
}

View File

@ -9,12 +9,12 @@ int main()
unittest
{
string input = "abcdef";
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
auto parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
writeln("pass1");
input = "defabcdef";
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
writeln("pass2");
}

View File

@ -9,6 +9,6 @@ int main()
unittest
{
string input = "defghidef";
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
auto parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
}

View File

@ -9,12 +9,12 @@ int main()
unittest
{
string input = "abcdef";
auto parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
auto parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
writeln("pass1");
input = "abcabcdef";
parser = new Testparser.Parser(input);
assert(parser.parse() == Testparser.P_SUCCESS);
parser = new Parser(input);
assert(parser.parse() == P_SUCCESS);
writeln("pass2");
}