Corrected a bug in report query parsing

This commit is contained in:
John Wiegley 2010-06-19 14:02:44 -04:00
parent 9b905f2b44
commit 7995e16762
2 changed files with 6 additions and 6 deletions

View file

@ -53,6 +53,7 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
}
}
resume:
switch (*arg_i) {
case '\'':
case '"':
@ -88,7 +89,6 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
return tok;
}
resume:
bool consume_next = false;
switch (*arg_i) {
case ' ':
@ -125,7 +125,7 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
case '\t':
case '\n':
case '\r':
if (! consume_whitespace)
if (! multiple_args && ! consume_whitespace)
goto test_ident;
else
ident.push_back(*arg_i);

View file

@ -63,7 +63,7 @@ void ValueExprTestCase::testPredicateTokenizer2()
args.push_back(string_value("foo and bar"));
#ifndef NOT_FOR_PYTHON
query_t::lexer_t tokens(args.begin(), args.end());
query_t::lexer_t tokens(args.begin(), args.end(), false);
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
@ -119,7 +119,7 @@ void ValueExprTestCase::testPredicateTokenizer5()
args.push_back(string_value("bar)"));
#ifndef NOT_FOR_PYTHON
query_t::lexer_t tokens(args.begin(), args.end());
query_t::lexer_t tokens(args.begin(), args.end(), false);
assertEqual(query_t::lexer_t::token_t::LPAREN, tokens.next_token().kind);
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
@ -168,7 +168,7 @@ void ValueExprTestCase::testPredicateTokenizer8()
args.push_back(string_value("expr 'foo and bar'"));
#ifndef NOT_FOR_PYTHON
query_t::lexer_t tokens(args.begin(), args.end());
query_t::lexer_t tokens(args.begin(), args.end(), false);
assertEqual(query_t::lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
@ -318,7 +318,7 @@ void ValueExprTestCase::testPredicateTokenizer16()
args.push_back(string_value("and bar|baz"));
#ifndef NOT_FOR_PYTHON
query_t::lexer_t tokens(args.begin(), args.end());
query_t::lexer_t tokens(args.begin(), args.end(), false);
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);