Corrected parsing of the 'expr' report query term

This commit is contained in:
John Wiegley 2009-11-15 05:17:17 -05:00
parent 38e165a994
commit 2a411b5c1d
2 changed files with 14 additions and 10 deletions

View file

@ -53,6 +53,12 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
}
}
if (consume_next_arg) {
consume_next_arg = false;
arg_i = arg_end;
return token_t(token_t::TERM, (*begin).as_string());
}
resume:
bool consume_next = false;
switch (*arg_i) {
@ -95,13 +101,7 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
case '@': ++arg_i; return token_t(token_t::TOK_PAYEE);
case '#': ++arg_i; return token_t(token_t::TOK_CODE);
case '%': ++arg_i; return token_t(token_t::TOK_META);
case '=':
// The '=' keyword at the beginning of a string causes the entire string
// to be taken as an expression.
if (arg_i == (*begin).as_string().begin())
consume_whitespace = true;
++arg_i;
return token_t(token_t::TOK_EQ);
case '=': ++arg_i; return token_t(token_t::TOK_EQ);
case '\\':
consume_next = true;
@ -140,7 +140,7 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
}
consume_whitespace = false;
test_ident:
test_ident:
if (ident == "and")
return token_t(token_t::TOK_AND);
else if (ident == "or")
@ -177,7 +177,7 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
#endif
else if (ident == "expr") {
// The expr keyword takes the whole of the next string as its argument.
consume_whitespace = true;
consume_next_arg = true;
return token_t(token_t::TOK_EXPR);
}
else

View file

@ -61,6 +61,7 @@ public:
string::const_iterator arg_end;
bool consume_whitespace;
bool consume_next_arg;
public:
struct token_t
@ -175,7 +176,9 @@ public:
lexer_t(value_t::sequence_t::const_iterator _begin,
value_t::sequence_t::const_iterator _end)
: begin(_begin), end(_end), consume_whitespace(false)
: begin(_begin), end(_end),
consume_whitespace(false),
consume_next_arg(false)
{
TRACE_CTOR(lexer_t, "");
assert(begin != end);
@ -186,6 +189,7 @@ public:
: begin(lexer.begin), end(lexer.end),
arg_i(lexer.arg_i), arg_end(lexer.arg_end),
consume_whitespace(lexer.consume_whitespace),
consume_next_arg(lexer.consume_next_arg),
token_cache(lexer.token_cache)
{
TRACE_CTOR(lexer_t, "copy");