Fixed "show" keywords, and added // syntax

This commit is contained in:
John Wiegley 2009-10-28 23:41:51 -04:00
parent d6d8ed6d99
commit 52433e56e5
4 changed files with 82 additions and 32 deletions

View file

@ -226,24 +226,20 @@ value_t args_command(call_scope_t& args)
args.value().dump(out);
out << std::endl << std::endl;
std::pair<value_t::sequence_t::const_iterator, expr_t>
info = args_to_predicate(begin, end);
begin = info.first;
string predicate = info.second.text();
std::pair<expr_t, query_parser_t> info = args_to_predicate(begin, end);
call_scope_t sub_args(static_cast<scope_t&>(args));
sub_args.push_back(string_value(predicate));
sub_args.push_back(string_value(info.first.text()));
parse_command(sub_args);
if (begin != end) {
if (info.second.tokens_remaining()) {
out << std::endl << _("====== Display predicate ======")
<< std::endl << std::endl;
predicate = args_to_predicate(begin, end).second.text();
call_scope_t disp_sub_args(static_cast<scope_t&>(args));
disp_sub_args.push_back(string_value(predicate));
disp_sub_args.push_back
(string_value(args_to_predicate(info.second).first.text()));
parse_command(disp_sub_args);
}

View file

@ -64,6 +64,29 @@ query_lexer_t::token_t query_lexer_t::next_token()
return next_token();
goto resume;
case '/': {
string pat;
bool found_end_slash = false;
for (++arg_i; arg_i != arg_end; ++arg_i) {
if (*arg_i == '\\') {
if (++arg_i == arg_end)
throw_(parse_error, _("Unexpected '\\' at end of pattern"));
}
else if (*arg_i == '/') {
++arg_i;
found_end_slash = true;
break;
}
pat.push_back(*arg_i);
}
if (! found_end_slash)
throw_(parse_error, _("Expected '/' at end of pattern"));
if (pat.empty())
throw_(parse_error, _("Match pattern is empty"));
return token_t(token_t::TERM, pat);
}
case '(': ++arg_i; return token_t(token_t::LPAREN);
case ')': ++arg_i; return token_t(token_t::RPAREN);
case '&': ++arg_i; return token_t(token_t::TOK_AND);
@ -143,7 +166,7 @@ query_lexer_t::token_t query_lexer_t::next_token()
else if (ident == "show") {
// The "show" keyword is special, and separates a limiting predicate
// from a display predicate.
++begin;
DEBUG("pred.show", "string = " << (*begin).as_string());
return token_t(token_t::END_REACHED);
}
else if (ident == "expr") {
@ -357,14 +380,19 @@ expr_t::ptr_op_t query_parser_t::parse()
return parse_query_expr(query_lexer_t::token_t::TOK_ACCOUNT);
}
std::pair<value_t::sequence_t::const_iterator, expr_t>
std::pair<expr_t, query_parser_t>
args_to_predicate(value_t::sequence_t::const_iterator begin,
value_t::sequence_t::const_iterator end)
{
query_parser_t parser(begin, end);
expr_t expr(parser.parse());
return std::pair<value_t::sequence_t::const_iterator, expr_t>
(parser.begin(), expr);
return std::pair<expr_t, query_parser_t>(expr, parser);
}
std::pair<expr_t, query_parser_t> args_to_predicate(query_parser_t parser)
{
expr_t expr(parser.parse());
return std::pair<expr_t, query_parser_t>(expr, parser);
}
} // namespace ledger

View file

@ -106,7 +106,7 @@ class query_lexer_t
string::const_iterator arg_i;
string::const_iterator arg_end;
bool consume_whitespace;
bool consume_whitespace;
public:
struct token_t
@ -220,16 +220,33 @@ public:
value_t::sequence_t::const_iterator _end)
: begin(_begin), end(_end), consume_whitespace(false)
{
TRACE_CTOR(query_lexer_t, "");
assert(begin != end);
arg_i = (*begin).as_string().begin();
arg_end = (*begin).as_string().end();
}
query_lexer_t(const query_lexer_t& lexer)
: begin(lexer.begin), end(lexer.end),
arg_i(lexer.arg_i), arg_end(lexer.arg_end),
consume_whitespace(lexer.consume_whitespace),
token_cache(lexer.token_cache)
{
TRACE_CTOR(query_lexer_t, "copy");
}
~query_lexer_t() throw() {
TRACE_DTOR(query_lexer_t);
}
token_t next_token();
void push_token(token_t tok) {
assert(token_cache.kind == token_t::UNKNOWN);
token_cache = tok;
}
token_t peek_token() {
if (token_cache.kind == token_t::UNKNOWN)
token_cache = next_token();
return token_cache;
}
};
class query_parser_t
@ -245,22 +262,33 @@ class query_parser_t
public:
query_parser_t(value_t::sequence_t::const_iterator begin,
value_t::sequence_t::const_iterator end)
: lexer(begin, end) {}
: lexer(begin, end) {
TRACE_CTOR(query_parser_t, "");
}
query_parser_t(const query_parser_t& parser)
: lexer(parser.lexer) {
TRACE_CTOR(query_parser_t, "copy");
}
~query_parser_t() throw() {
TRACE_DTOR(query_parser_t);
}
expr_t::ptr_op_t parse();
value_t::sequence_t::const_iterator begin() const {
return lexer.begin;
}
value_t::sequence_t::const_iterator end() const {
return lexer.end;
bool tokens_remaining() {
query_lexer_t::token_t tok = lexer.peek_token();
assert(tok.kind != query_lexer_t::token_t::UNKNOWN);
return tok.kind != query_lexer_t::token_t::END_REACHED;
}
};
std::pair<value_t::sequence_t::const_iterator, expr_t>
std::pair<expr_t, query_parser_t>
args_to_predicate(value_t::sequence_t::const_iterator begin,
value_t::sequence_t::const_iterator end);
std::pair<expr_t, query_parser_t>
args_to_predicate(query_parser_t parser);
} // namespace ledger
#endif // _PREDICATE_H

View file

@ -380,26 +380,24 @@ namespace {
value_t::sequence_t::const_iterator end =
args.value().as_sequence().end();
std::pair<value_t::sequence_t::const_iterator, expr_t>
info = args_to_predicate(begin, end);
begin = info.first;
std::pair<expr_t, query_parser_t> info = args_to_predicate(begin, end);
string limit = info.second.text();
string limit = info.first.text();
if (! limit.empty())
report.HANDLER(limit_).on(whence, limit);
DEBUG("report.predicate",
"Predicate = " << report.HANDLER(limit_).str());
string display;
if (begin != end)
display = args_to_predicate(begin, end).second.text();
if (info.second.tokens_remaining()) {
string display = args_to_predicate(info.second).first.text();
if (! display.empty())
report.HANDLER(display_).on(whence, display);
if (! display.empty())
report.HANDLER(display_).on(whence, display);
DEBUG("report.predicate",
"Display predicate = " << report.HANDLER(display_).str());
DEBUG("report.predicate",
"Display predicate = " << report.HANDLER(display_).str());
}
}
(report.*report_method)(handler_ptr(handler));