More report query fixes
This commit is contained in:
parent
f4fd2ab1e5
commit
c2c11d43ca
2 changed files with 21 additions and 19 deletions
36
src/query.cc
36
src/query.cc
|
|
@ -106,7 +106,7 @@ query_t::lexer_t::next_token(query_t::lexer_t::token_t::kind_t tok_context)
|
||||||
case '\r':
|
case '\r':
|
||||||
case '\n':
|
case '\n':
|
||||||
if (++arg_i == arg_end)
|
if (++arg_i == arg_end)
|
||||||
return next_token();
|
return next_token(tok_context);
|
||||||
goto resume;
|
goto resume;
|
||||||
|
|
||||||
case '(':
|
case '(':
|
||||||
|
|
@ -152,8 +152,10 @@ query_t::lexer_t::next_token(query_t::lexer_t::token_t::kind_t tok_context)
|
||||||
ident.push_back(*arg_i);
|
ident.push_back(*arg_i);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case '(':
|
|
||||||
case ')':
|
case ')':
|
||||||
|
if (! consume_next && tok_context == token_t::TOK_EXPR)
|
||||||
|
goto test_ident;
|
||||||
|
case '(':
|
||||||
case '&':
|
case '&':
|
||||||
case '|':
|
case '|':
|
||||||
case '!':
|
case '!':
|
||||||
|
|
@ -161,7 +163,7 @@ query_t::lexer_t::next_token(query_t::lexer_t::token_t::kind_t tok_context)
|
||||||
case '#':
|
case '#':
|
||||||
case '%':
|
case '%':
|
||||||
case '=':
|
case '=':
|
||||||
if (! consume_next)
|
if (! consume_next && tok_context != token_t::TOK_EXPR)
|
||||||
goto test_ident;
|
goto test_ident;
|
||||||
// fall through...
|
// fall through...
|
||||||
default:
|
default:
|
||||||
|
|
@ -297,10 +299,10 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
|
||||||
expr_t::ptr_op_t arg1 = new expr_t::op_t(expr_t::op_t::VALUE);
|
expr_t::ptr_op_t arg1 = new expr_t::op_t(expr_t::op_t::VALUE);
|
||||||
arg1->set_value(mask_t(*tok.value));
|
arg1->set_value(mask_t(*tok.value));
|
||||||
|
|
||||||
tok = lexer.peek_token();
|
tok = lexer.peek_token(tok_context);
|
||||||
if (tok.kind == lexer_t::token_t::TOK_EQ) {
|
if (tok.kind == lexer_t::token_t::TOK_EQ) {
|
||||||
tok = lexer.next_token();
|
tok = lexer.next_token(tok_context);
|
||||||
tok = lexer.next_token();
|
tok = lexer.next_token(tok_context);
|
||||||
if (tok.kind != lexer_t::token_t::TERM)
|
if (tok.kind != lexer_t::token_t::TERM)
|
||||||
throw_(parse_error,
|
throw_(parse_error,
|
||||||
_("Metadata equality operator not followed by term"));
|
_("Metadata equality operator not followed by term"));
|
||||||
|
|
@ -349,7 +351,7 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
|
||||||
|
|
||||||
case lexer_t::token_t::LPAREN:
|
case lexer_t::token_t::LPAREN:
|
||||||
node = parse_query_expr(tok_context, true);
|
node = parse_query_expr(tok_context, true);
|
||||||
tok = lexer.next_token();
|
tok = lexer.next_token(tok_context);
|
||||||
if (tok.kind != lexer_t::token_t::RPAREN)
|
if (tok.kind != lexer_t::token_t::RPAREN)
|
||||||
tok.expected(')');
|
tok.expected(')');
|
||||||
break;
|
break;
|
||||||
|
|
@ -367,7 +369,7 @@ query_t::parser_t::parse_unary_expr(lexer_t::token_t::kind_t tok_context)
|
||||||
{
|
{
|
||||||
expr_t::ptr_op_t node;
|
expr_t::ptr_op_t node;
|
||||||
|
|
||||||
lexer_t::token_t tok = lexer.next_token();
|
lexer_t::token_t tok = lexer.next_token(tok_context);
|
||||||
switch (tok.kind) {
|
switch (tok.kind) {
|
||||||
case lexer_t::token_t::TOK_NOT: {
|
case lexer_t::token_t::TOK_NOT: {
|
||||||
expr_t::ptr_op_t term(parse_query_term(tok_context));
|
expr_t::ptr_op_t term(parse_query_term(tok_context));
|
||||||
|
|
@ -394,7 +396,7 @@ query_t::parser_t::parse_and_expr(lexer_t::token_t::kind_t tok_context)
|
||||||
{
|
{
|
||||||
if (expr_t::ptr_op_t node = parse_unary_expr(tok_context)) {
|
if (expr_t::ptr_op_t node = parse_unary_expr(tok_context)) {
|
||||||
while (true) {
|
while (true) {
|
||||||
lexer_t::token_t tok = lexer.next_token();
|
lexer_t::token_t tok = lexer.next_token(tok_context);
|
||||||
if (tok.kind == lexer_t::token_t::TOK_AND) {
|
if (tok.kind == lexer_t::token_t::TOK_AND) {
|
||||||
expr_t::ptr_op_t prev(node);
|
expr_t::ptr_op_t prev(node);
|
||||||
node = new expr_t::op_t(expr_t::op_t::O_AND);
|
node = new expr_t::op_t(expr_t::op_t::O_AND);
|
||||||
|
|
@ -418,7 +420,7 @@ query_t::parser_t::parse_or_expr(lexer_t::token_t::kind_t tok_context)
|
||||||
{
|
{
|
||||||
if (expr_t::ptr_op_t node = parse_and_expr(tok_context)) {
|
if (expr_t::ptr_op_t node = parse_and_expr(tok_context)) {
|
||||||
while (true) {
|
while (true) {
|
||||||
lexer_t::token_t tok = lexer.next_token();
|
lexer_t::token_t tok = lexer.next_token(tok_context);
|
||||||
if (tok.kind == lexer_t::token_t::TOK_OR) {
|
if (tok.kind == lexer_t::token_t::TOK_OR) {
|
||||||
expr_t::ptr_op_t prev(node);
|
expr_t::ptr_op_t prev(node);
|
||||||
node = new expr_t::op_t(expr_t::op_t::O_OR);
|
node = new expr_t::op_t(expr_t::op_t::O_OR);
|
||||||
|
|
@ -460,13 +462,13 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
|
||||||
(query_map_t::value_type
|
(query_map_t::value_type
|
||||||
(QUERY_LIMIT, predicate_t(limiter, what_to_keep).print_to_str()));
|
(QUERY_LIMIT, predicate_t(limiter, what_to_keep).print_to_str()));
|
||||||
|
|
||||||
lexer_t::token_t tok = lexer.peek_token();
|
lexer_t::token_t tok = lexer.peek_token(tok_context);
|
||||||
while (tok.kind != lexer_t::token_t::END_REACHED) {
|
while (tok.kind != lexer_t::token_t::END_REACHED) {
|
||||||
switch (tok.kind) {
|
switch (tok.kind) {
|
||||||
case lexer_t::token_t::TOK_SHOW:
|
case lexer_t::token_t::TOK_SHOW:
|
||||||
case lexer_t::token_t::TOK_ONLY:
|
case lexer_t::token_t::TOK_ONLY:
|
||||||
case lexer_t::token_t::TOK_BOLD: {
|
case lexer_t::token_t::TOK_BOLD: {
|
||||||
lexer.next_token();
|
lexer.next_token(tok_context);
|
||||||
|
|
||||||
kind_t kind;
|
kind_t kind;
|
||||||
switch (tok.kind) {
|
switch (tok.kind) {
|
||||||
|
|
@ -505,7 +507,7 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
|
||||||
case lexer_t::token_t::TOK_FOR:
|
case lexer_t::token_t::TOK_FOR:
|
||||||
case lexer_t::token_t::TOK_SINCE:
|
case lexer_t::token_t::TOK_SINCE:
|
||||||
case lexer_t::token_t::TOK_UNTIL: {
|
case lexer_t::token_t::TOK_UNTIL: {
|
||||||
tok = lexer.next_token();
|
tok = lexer.next_token(tok_context);
|
||||||
|
|
||||||
string for_string;
|
string for_string;
|
||||||
|
|
||||||
|
|
@ -515,10 +517,10 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
|
||||||
for_string = "until";
|
for_string = "until";
|
||||||
|
|
||||||
lexer.consume_next_arg = true;
|
lexer.consume_next_arg = true;
|
||||||
tok = lexer.peek_token();
|
tok = lexer.peek_token(tok_context);
|
||||||
|
|
||||||
while (tok.kind != lexer_t::token_t::END_REACHED) {
|
while (tok.kind != lexer_t::token_t::END_REACHED) {
|
||||||
tok = lexer.next_token();
|
tok = lexer.next_token(tok_context);
|
||||||
assert(tok.kind == lexer_t::token_t::TERM);
|
assert(tok.kind == lexer_t::token_t::TERM);
|
||||||
|
|
||||||
if (*tok.value == "show" || *tok.value == "bold" ||
|
if (*tok.value == "show" || *tok.value == "bold" ||
|
||||||
|
|
@ -535,7 +537,7 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
|
||||||
for_string += *tok.value;
|
for_string += *tok.value;
|
||||||
|
|
||||||
lexer.consume_next_arg = true;
|
lexer.consume_next_arg = true;
|
||||||
tok = lexer.peek_token();
|
tok = lexer.peek_token(tok_context);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (! for_string.empty())
|
if (! for_string.empty())
|
||||||
|
|
@ -547,7 +549,7 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
|
||||||
goto done;
|
goto done;
|
||||||
}
|
}
|
||||||
|
|
||||||
tok = lexer.peek_token();
|
tok = lexer.peek_token(tok_context);
|
||||||
}
|
}
|
||||||
done:
|
done:
|
||||||
;
|
;
|
||||||
|
|
|
||||||
|
|
@ -227,9 +227,9 @@ public:
|
||||||
assert(token_cache.kind == token_t::UNKNOWN);
|
assert(token_cache.kind == token_t::UNKNOWN);
|
||||||
token_cache = tok;
|
token_cache = tok;
|
||||||
}
|
}
|
||||||
token_t peek_token() {
|
token_t peek_token(token_t::kind_t tok_context = token_t::UNKNOWN) {
|
||||||
if (token_cache.kind == token_t::UNKNOWN)
|
if (token_cache.kind == token_t::UNKNOWN)
|
||||||
token_cache = next_token();
|
token_cache = next_token(tok_context);
|
||||||
return token_cache;
|
return token_cache;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue