update trimString(), add tokenize()

This commit is contained in:
Josh Holtrop 2011-04-20 14:45:55 -04:00
parent cf4e09814f
commit 874be25658

View File

@ -20,14 +20,12 @@ using namespace std;
/****** static functions ******/ /****** static functions ******/
static string trimString(string s) static string trimString(const string in)
{ {
size_t lastpos = s.find_last_not_of(WHITESPACE); size_t firstpos = in.find_first_not_of(WHITESPACE);
if (lastpos == string::npos) if (firstpos == string::npos)
return ""; return "";
s.erase(lastpos + 1); return string(in, firstpos, in.find_last_not_of(WHITESPACE) - firstpos + 1);
s.erase(0, s.find_first_not_of(WHITESPACE));
return s;
} }
static string stripFirstToken(string & input) static string stripFirstToken(string & input)
@ -41,6 +39,20 @@ static string stripFirstToken(string & input)
return token; return token;
} }
vector<string> tokenize(const string & input)
{
vector<string> tokens;
string in = input;
for (;;)
{
string token = stripFirstToken(in);
if (token == "")
break;
tokens.push_back(token);
}
return tokens;
}
static vector<string> splitString(const string & str, char delim) static vector<string> splitString(const string & str, char delim)
{ {
vector<string> ret; vector<string> ret;
@ -134,8 +146,7 @@ bool WFObj::load(const WFObj::Buffer &buff)
size_t idx = 0; size_t idx = 0;
while (idx < buff.length) while (idx < buff.length)
{ {
string line = getLine(buff, idx, &idx); string input = trimString(getLine(buff, idx, &idx));
string input = trimString(line);
int sz = input.size(); int sz = input.size();
if (sz == 0 || input[0] == '#') if (sz == 0 || input[0] == '#')
continue; continue;
@ -182,14 +193,7 @@ string WFObj::getLine(const Buffer & buff, size_t idx, size_t *update_idx)
void WFObj::processInputLine(const std::string & input) void WFObj::processInputLine(const std::string & input)
{ {
string line = input; string line = input;
vector<string> tokens; vector<string> tokens = tokenize(line);
for (;;)
{
string token = stripFirstToken(line);
if (token == "")
break;
tokens.push_back(token);
}
if (tokens.size() == 0) if (tokens.size() == 0)
return; return;