Added new "bold" modifier to query expressions
For example: ledger bal assets bold checking Or you can use expressions: ledger bal assets bold '=total > 1000' This last is identical to saying: ledger bal -l 'account =~ /assets/' --bold-if='total > 1000'
This commit is contained in:
parent
7da2701295
commit
3f899c93e6
7 changed files with 175 additions and 133 deletions
|
|
@ -202,25 +202,23 @@ value_t query_command(call_scope_t& args)
|
|||
|
||||
query_t query(args.value(), report.what_to_keep(),
|
||||
! report.HANDLED(collapse));
|
||||
if (query) {
|
||||
if (query.has_predicate(query_t::QUERY_LIMIT)) {
|
||||
call_scope_t sub_args(static_cast<scope_t&>(args));
|
||||
sub_args.push_back(string_value(query.text()));
|
||||
sub_args.push_back
|
||||
(string_value(query.get_predicate(query_t::QUERY_LIMIT).print_to_str()));
|
||||
|
||||
parse_command(sub_args);
|
||||
}
|
||||
|
||||
if (query.tokens_remaining()) {
|
||||
if (query.has_predicate(query_t::QUERY_SHOW)) {
|
||||
out << std::endl << _("====== Display predicate ======")
|
||||
<< std::endl << std::endl;
|
||||
|
||||
query.parse_again();
|
||||
call_scope_t disp_sub_args(static_cast<scope_t&>(args));
|
||||
disp_sub_args.push_back
|
||||
(string_value(query.get_predicate(query_t::QUERY_SHOW).print_to_str()));
|
||||
|
||||
if (query) {
|
||||
call_scope_t disp_sub_args(static_cast<scope_t&>(args));
|
||||
disp_sub_args.push_back(string_value(query.text()));
|
||||
|
||||
parse_command(disp_sub_args);
|
||||
}
|
||||
parse_command(disp_sub_args);
|
||||
}
|
||||
|
||||
return NULL_VALUE;
|
||||
|
|
|
|||
|
|
@ -37,10 +37,4 @@
|
|||
|
||||
namespace ledger {
|
||||
|
||||
predicate_t::predicate_t(const query_t& other)
|
||||
: expr_t(other), what_to_keep(other.what_to_keep)
|
||||
{
|
||||
TRACE_CTOR(predicate_t, "query_t");
|
||||
}
|
||||
|
||||
} // namespace ledger
|
||||
|
|
|
|||
|
|
@ -48,8 +48,6 @@
|
|||
|
||||
namespace ledger {
|
||||
|
||||
class query_t;
|
||||
|
||||
class predicate_t : public expr_t
|
||||
{
|
||||
public:
|
||||
|
|
@ -63,15 +61,21 @@ public:
|
|||
: expr_t(other), what_to_keep(other.what_to_keep) {
|
||||
TRACE_CTOR(predicate_t, "copy");
|
||||
}
|
||||
predicate_t(const query_t& other);
|
||||
|
||||
predicate_t(const string& str, const keep_details_t& _what_to_keep,
|
||||
const parse_flags_t& flags = PARSE_DEFAULT)
|
||||
predicate_t(ptr_op_t _ptr,
|
||||
const keep_details_t& _what_to_keep,
|
||||
scope_t * _context = NULL)
|
||||
: expr_t(_ptr, _context), what_to_keep(_what_to_keep) {
|
||||
TRACE_CTOR(predicate_t, "ptr_op_t, keep_details_t, scope_t *");
|
||||
}
|
||||
predicate_t(const string& str,
|
||||
const keep_details_t& _what_to_keep,
|
||||
const parse_flags_t& flags = PARSE_DEFAULT)
|
||||
: expr_t(str, flags), what_to_keep(_what_to_keep) {
|
||||
TRACE_CTOR(predicate_t, "string, keep_details_t, parse_flags_t");
|
||||
}
|
||||
predicate_t(std::istream& in, const keep_details_t& _what_to_keep,
|
||||
const parse_flags_t& flags = PARSE_DEFAULT)
|
||||
predicate_t(std::istream& in,
|
||||
const keep_details_t& _what_to_keep,
|
||||
const parse_flags_t& flags = PARSE_DEFAULT)
|
||||
: expr_t(in, flags), what_to_keep(_what_to_keep) {
|
||||
TRACE_CTOR(predicate_t, "std::istream&, keep_details_t, parse_flags_t");
|
||||
}
|
||||
|
|
|
|||
136
src/query.cc
136
src/query.cc
|
|
@ -170,20 +170,10 @@ test_ident:
|
|||
return token_t(token_t::TOK_META);
|
||||
else if (ident == "data")
|
||||
return token_t(token_t::TOK_META);
|
||||
else if (ident == "show") {
|
||||
// The "show" keyword is special, and separates a limiting predicate
|
||||
// from a display predicate.
|
||||
DEBUG("pred.show", "string = " << (*begin).as_string());
|
||||
return token_t(token_t::END_REACHED);
|
||||
}
|
||||
#if 0
|
||||
// jww (2009-11-06): This is disabled for the time being.
|
||||
else if (ident == "date") {
|
||||
// The date keyword takes the whole of the next string as its argument.
|
||||
consume_whitespace = true;
|
||||
return token_t(token_t::TOK_DATE);
|
||||
}
|
||||
#endif
|
||||
else if (ident == "show")
|
||||
return token_t(token_t::TOK_SHOW);
|
||||
else if (ident == "bold")
|
||||
return token_t(token_t::TOK_BOLD);
|
||||
else if (ident == "expr") {
|
||||
// The expr keyword takes the whole of the next string as its argument.
|
||||
consume_next_arg = true;
|
||||
|
|
@ -238,10 +228,12 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
|
|||
|
||||
lexer_t::token_t tok = lexer.next_token();
|
||||
switch (tok.kind) {
|
||||
case lexer_t::token_t::TOK_SHOW:
|
||||
case lexer_t::token_t::TOK_BOLD:
|
||||
case lexer_t::token_t::END_REACHED:
|
||||
lexer.push_token(tok);
|
||||
break;
|
||||
|
||||
case lexer_t::token_t::TOK_DATE:
|
||||
case lexer_t::token_t::TOK_CODE:
|
||||
case lexer_t::token_t::TOK_PAYEE:
|
||||
case lexer_t::token_t::TOK_NOTE:
|
||||
|
|
@ -257,41 +249,6 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
|
|||
case lexer_t::token_t::TERM:
|
||||
assert(tok.value);
|
||||
switch (tok_context) {
|
||||
case lexer_t::token_t::TOK_DATE: {
|
||||
expr_t::ptr_op_t ident = new expr_t::op_t(expr_t::op_t::IDENT);
|
||||
ident->set_ident("date");
|
||||
|
||||
date_interval_t interval(*tok.value);
|
||||
|
||||
if (interval.start) {
|
||||
node = new expr_t::op_t(expr_t::op_t::O_GTE);
|
||||
node->set_left(ident);
|
||||
|
||||
expr_t::ptr_op_t arg1 = new expr_t::op_t(expr_t::op_t::VALUE);
|
||||
arg1->set_value(*interval.start);
|
||||
node->set_right(arg1);
|
||||
}
|
||||
|
||||
if (interval.finish) {
|
||||
expr_t::ptr_op_t lt = new expr_t::op_t(expr_t::op_t::O_LT);
|
||||
lt->set_left(ident);
|
||||
|
||||
expr_t::ptr_op_t arg1 = new expr_t::op_t(expr_t::op_t::VALUE);
|
||||
arg1->set_value(*interval.finish);
|
||||
lt->set_right(arg1);
|
||||
|
||||
if (node) {
|
||||
expr_t::ptr_op_t prev(node);
|
||||
node = new expr_t::op_t(expr_t::op_t::O_AND);
|
||||
node->set_left(prev);
|
||||
node->set_right(lt);
|
||||
} else {
|
||||
node = lt;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case lexer_t::token_t::TOK_EXPR:
|
||||
node = expr_t(*tok.value).get_op();
|
||||
break;
|
||||
|
|
@ -357,7 +314,7 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
|
|||
break;
|
||||
|
||||
case lexer_t::token_t::LPAREN:
|
||||
node = parse_query_expr(tok_context);
|
||||
node = parse_query_expr(tok_context, true);
|
||||
tok = lexer.next_token();
|
||||
if (tok.kind != lexer_t::token_t::RPAREN)
|
||||
tok.expected(')');
|
||||
|
|
@ -447,18 +404,77 @@ query_t::parser_t::parse_or_expr(lexer_t::token_t::kind_t tok_context)
|
|||
}
|
||||
|
||||
expr_t::ptr_op_t
|
||||
query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context)
|
||||
query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
|
||||
bool subexpression)
|
||||
{
|
||||
if (expr_t::ptr_op_t node = parse_or_expr(tok_context)) {
|
||||
if (expr_t::ptr_op_t next = parse_query_expr(tok_context)) {
|
||||
expr_t::ptr_op_t prev(node);
|
||||
node = new expr_t::op_t(expr_t::op_t::O_OR);
|
||||
node->set_left(prev);
|
||||
node->set_right(next);
|
||||
expr_t::ptr_op_t limiter;
|
||||
|
||||
while (expr_t::ptr_op_t next = parse_or_expr(tok_context)) {
|
||||
if (! limiter) {
|
||||
limiter = next;
|
||||
} else {
|
||||
expr_t::ptr_op_t prev(limiter);
|
||||
limiter = new expr_t::op_t(expr_t::op_t::O_OR);
|
||||
limiter->set_left(prev);
|
||||
limiter->set_right(next);
|
||||
}
|
||||
return node;
|
||||
}
|
||||
return expr_t::ptr_op_t();
|
||||
|
||||
if (! subexpression) {
|
||||
if (limiter)
|
||||
query_map.insert
|
||||
(query_map_t::value_type(QUERY_LIMIT, predicate_t(limiter, what_to_keep)));
|
||||
|
||||
lexer_t::token_t tok = lexer.peek_token();
|
||||
while (tok.kind != lexer_t::token_t::END_REACHED) {
|
||||
switch (tok.kind) {
|
||||
case lexer_t::token_t::TOK_SHOW: {
|
||||
lexer.next_token();
|
||||
|
||||
expr_t::ptr_op_t node;
|
||||
while (expr_t::ptr_op_t next = parse_or_expr(tok_context)) {
|
||||
if (! node) {
|
||||
node = next;
|
||||
} else {
|
||||
expr_t::ptr_op_t prev(node);
|
||||
node = new expr_t::op_t(expr_t::op_t::O_OR);
|
||||
node->set_left(prev);
|
||||
node->set_right(next);
|
||||
}
|
||||
}
|
||||
|
||||
if (node)
|
||||
query_map.insert
|
||||
(query_map_t::value_type(QUERY_SHOW, predicate_t(node, what_to_keep)));
|
||||
break;
|
||||
}
|
||||
|
||||
case lexer_t::token_t::TOK_BOLD: {
|
||||
lexer.next_token();
|
||||
|
||||
expr_t::ptr_op_t node = parse_or_expr(tok_context);
|
||||
while (expr_t::ptr_op_t next = parse_or_expr(tok_context)) {
|
||||
expr_t::ptr_op_t prev(node);
|
||||
node = new expr_t::op_t(expr_t::op_t::O_OR);
|
||||
node->set_left(prev);
|
||||
node->set_right(next);
|
||||
}
|
||||
|
||||
if (node)
|
||||
query_map.insert
|
||||
(query_map_t::value_type(QUERY_BOLD, predicate_t(node, what_to_keep)));
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
tok = lexer.peek_token();
|
||||
}
|
||||
}
|
||||
|
||||
return limiter;
|
||||
}
|
||||
|
||||
} // namespace ledger
|
||||
|
|
|
|||
94
src/query.h
94
src/query.h
|
|
@ -46,7 +46,7 @@
|
|||
|
||||
namespace ledger {
|
||||
|
||||
class query_t : public predicate_t
|
||||
class query_t
|
||||
{
|
||||
protected:
|
||||
class parser_t;
|
||||
|
|
@ -81,7 +81,6 @@ public:
|
|||
TOK_OR,
|
||||
TOK_EQ,
|
||||
|
||||
TOK_DATE,
|
||||
TOK_CODE,
|
||||
TOK_PAYEE,
|
||||
TOK_NOTE,
|
||||
|
|
@ -89,6 +88,9 @@ public:
|
|||
TOK_META,
|
||||
TOK_EXPR,
|
||||
|
||||
TOK_SHOW,
|
||||
TOK_BOLD,
|
||||
|
||||
TERM,
|
||||
|
||||
END_REACHED
|
||||
|
|
@ -131,13 +133,14 @@ public:
|
|||
case TOK_AND: return "TOK_AND";
|
||||
case TOK_OR: return "TOK_OR";
|
||||
case TOK_EQ: return "TOK_EQ";
|
||||
case TOK_DATE: return "TOK_DATE";
|
||||
case TOK_CODE: return "TOK_CODE";
|
||||
case TOK_PAYEE: return "TOK_PAYEE";
|
||||
case TOK_NOTE: return "TOK_NOTE";
|
||||
case TOK_ACCOUNT: return "TOK_ACCOUNT";
|
||||
case TOK_META: return "TOK_META";
|
||||
case TOK_EXPR: return "TOK_EXPR";
|
||||
case TOK_SHOW: return "TOK_SHOW";
|
||||
case TOK_BOLD: return "TOK_BOLD";
|
||||
case TERM: return string("TERM(") + *value + ")";
|
||||
case END_REACHED: return "END_REACHED";
|
||||
}
|
||||
|
|
@ -153,13 +156,14 @@ public:
|
|||
case TOK_AND: return "and";
|
||||
case TOK_OR: return "or";
|
||||
case TOK_EQ: return "=";
|
||||
case TOK_DATE: return "date";
|
||||
case TOK_CODE: return "code";
|
||||
case TOK_PAYEE: return "payee";
|
||||
case TOK_NOTE: return "note";
|
||||
case TOK_ACCOUNT: return "account";
|
||||
case TOK_META: return "meta";
|
||||
case TOK_EXPR: return "expr";
|
||||
case TOK_SHOW: return "show";
|
||||
case TOK_BOLD: return "bold";
|
||||
|
||||
case END_REACHED: return "<EOF>";
|
||||
|
||||
|
|
@ -218,24 +222,38 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
enum kind_t {
|
||||
QUERY_LIMIT,
|
||||
QUERY_SHOW,
|
||||
QUERY_BOLD
|
||||
};
|
||||
|
||||
typedef std::map<kind_t, predicate_t> query_map_t;
|
||||
|
||||
protected:
|
||||
class parser_t
|
||||
{
|
||||
friend class query_t;
|
||||
|
||||
value_t args;
|
||||
lexer_t lexer;
|
||||
value_t args;
|
||||
lexer_t lexer;
|
||||
keep_details_t what_to_keep;
|
||||
query_map_t query_map;
|
||||
|
||||
expr_t::ptr_op_t parse_query_term(lexer_t::token_t::kind_t tok_context);
|
||||
expr_t::ptr_op_t parse_unary_expr(lexer_t::token_t::kind_t tok_context);
|
||||
expr_t::ptr_op_t parse_and_expr(lexer_t::token_t::kind_t tok_context);
|
||||
expr_t::ptr_op_t parse_or_expr(lexer_t::token_t::kind_t tok_context);
|
||||
expr_t::ptr_op_t parse_query_expr(lexer_t::token_t::kind_t tok_context);
|
||||
expr_t::ptr_op_t parse_query_expr(lexer_t::token_t::kind_t tok_context,
|
||||
bool subexpression = false);
|
||||
|
||||
public:
|
||||
parser_t(const value_t& _args, bool multiple_args = true)
|
||||
: args(_args), lexer(args.begin(), args.end(), multiple_args) {
|
||||
TRACE_CTOR(query_t::parser_t, "");
|
||||
parser_t(const value_t& _args,
|
||||
const keep_details_t& _what_to_keep = keep_details_t(),
|
||||
bool multiple_args = true)
|
||||
: args(_args), lexer(args.begin(), args.end(), multiple_args),
|
||||
what_to_keep(_what_to_keep) {
|
||||
TRACE_CTOR(query_t::parser_t, "value_t, keep_details_t, bool");
|
||||
}
|
||||
parser_t(const parser_t& parser)
|
||||
: args(parser.args), lexer(parser.lexer) {
|
||||
|
|
@ -245,8 +263,8 @@ protected:
|
|||
TRACE_DTOR(query_t::parser_t);
|
||||
}
|
||||
|
||||
expr_t::ptr_op_t parse() {
|
||||
return parse_query_expr(lexer_t::token_t::TOK_ACCOUNT);
|
||||
expr_t::ptr_op_t parse(bool subexpression = false) {
|
||||
return parse_query_expr(lexer_t::token_t::TOK_ACCOUNT, subexpression);
|
||||
}
|
||||
|
||||
bool tokens_remaining() {
|
||||
|
|
@ -257,55 +275,61 @@ protected:
|
|||
};
|
||||
|
||||
optional<parser_t> parser;
|
||||
query_map_t predicates;
|
||||
|
||||
public:
|
||||
query_t() {
|
||||
TRACE_CTOR(query_t, "");
|
||||
}
|
||||
query_t(const query_t& other)
|
||||
: predicate_t(other) {
|
||||
: parser(other.parser), predicates(other.predicates) {
|
||||
TRACE_CTOR(query_t, "copy");
|
||||
}
|
||||
query_t(const string& arg,
|
||||
const keep_details_t& _what_to_keep = keep_details_t(),
|
||||
bool multiple_args = true)
|
||||
: predicate_t(_what_to_keep) {
|
||||
TRACE_CTOR(query_t, "string, keep_details_t");
|
||||
query_t(const string& arg,
|
||||
const keep_details_t& what_to_keep = keep_details_t(),
|
||||
bool multiple_args = true) {
|
||||
TRACE_CTOR(query_t, "string, keep_details_t, bool");
|
||||
if (! arg.empty()) {
|
||||
value_t temp(string_value(arg));
|
||||
parse_args(temp.to_sequence(), multiple_args);
|
||||
parse_args(temp.to_sequence(), what_to_keep, multiple_args);
|
||||
}
|
||||
}
|
||||
query_t(const value_t& args,
|
||||
const keep_details_t& _what_to_keep = keep_details_t(),
|
||||
bool multiple_args = true)
|
||||
: predicate_t(_what_to_keep) {
|
||||
TRACE_CTOR(query_t, "value_t, keep_details_t");
|
||||
query_t(const value_t& args,
|
||||
const keep_details_t& what_to_keep = keep_details_t(),
|
||||
bool multiple_args = true) {
|
||||
TRACE_CTOR(query_t, "value_t, keep_details_t, bool");
|
||||
if (! args.empty())
|
||||
parse_args(args, multiple_args);
|
||||
parse_args(args, what_to_keep, multiple_args);
|
||||
}
|
||||
virtual ~query_t() {
|
||||
TRACE_DTOR(query_t);
|
||||
}
|
||||
|
||||
void parse_args(const value_t& args, bool multiple_args = true) {
|
||||
expr_t::ptr_op_t
|
||||
parse_args(const value_t& args,
|
||||
const keep_details_t& what_to_keep = keep_details_t(),
|
||||
bool multiple_args = true,
|
||||
bool subexpression = false) {
|
||||
if (! parser)
|
||||
parser = parser_t(args, multiple_args);
|
||||
ptr = parser->parse(); // expr_t::ptr
|
||||
parser = parser_t(args, what_to_keep, multiple_args);
|
||||
return parser->parse(subexpression);
|
||||
}
|
||||
|
||||
void parse_again() {
|
||||
assert(parser);
|
||||
ptr = parser->parse(); // expr_t::ptr
|
||||
bool has_predicate(const kind_t& id) const {
|
||||
return parser && parser->query_map.find(id) != parser->query_map.end();
|
||||
}
|
||||
predicate_t get_predicate(const kind_t& id) const {
|
||||
if (parser) {
|
||||
query_map_t::const_iterator i = parser->query_map.find(id);
|
||||
if (i != parser->query_map.end())
|
||||
return (*i).second;
|
||||
}
|
||||
return predicate_t();
|
||||
}
|
||||
|
||||
bool tokens_remaining() {
|
||||
return parser && parser->tokens_remaining();
|
||||
}
|
||||
|
||||
virtual string text() {
|
||||
return print_to_str();
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace ledger
|
||||
|
|
|
|||
|
|
@ -257,21 +257,23 @@ void report_t::normalize_options(const string& verb)
|
|||
void report_t::parse_query_args(const value_t& args, const string& whence)
|
||||
{
|
||||
query_t query(args, what_to_keep());
|
||||
if (query) {
|
||||
HANDLER(limit_).on(whence, query.text());
|
||||
|
||||
DEBUG("report.predicate",
|
||||
"Predicate = " << HANDLER(limit_).str());
|
||||
if (query.has_predicate(query_t::QUERY_LIMIT)) {
|
||||
HANDLER(limit_)
|
||||
.on(whence, query.get_predicate(query_t::QUERY_LIMIT).print_to_str());
|
||||
DEBUG("report.predicate", "Predicate = " << HANDLER(limit_).str());
|
||||
}
|
||||
|
||||
if (query.tokens_remaining()) {
|
||||
query.parse_again();
|
||||
if (query) {
|
||||
HANDLER(display_).on(whence, query.text());
|
||||
if (query.has_predicate(query_t::QUERY_SHOW)) {
|
||||
HANDLER(display_)
|
||||
.on(whence, query.get_predicate(query_t::QUERY_SHOW).print_to_str());
|
||||
DEBUG("report.predicate", "Display predicate = " << HANDLER(display_).str());
|
||||
}
|
||||
|
||||
DEBUG("report.predicate",
|
||||
"Display predicate = " << HANDLER(display_).str());
|
||||
}
|
||||
if (query.has_predicate(query_t::QUERY_BOLD)) {
|
||||
HANDLER(bold_if_)
|
||||
.set_expr(whence, query.get_predicate(query_t::QUERY_BOLD).print_to_str());
|
||||
DEBUG("report.predicate", "Bolding predicate = " << HANDLER(display_).str());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -534,9 +534,13 @@ void instance_t::automated_xact_directive(char * line)
|
|||
bool reveal_context = true;
|
||||
|
||||
try {
|
||||
std::auto_ptr<auto_xact_t> ae
|
||||
(new auto_xact_t(query_t(string(skip_ws(line + 1)),
|
||||
keep_details_t(true, true, true), false)));
|
||||
query_t query;
|
||||
keep_details_t keeper(true, true, true);
|
||||
expr_t::ptr_op_t expr =
|
||||
query.parse_args(string_value(skip_ws(line + 1)).to_sequence(),
|
||||
keeper, false, true);
|
||||
|
||||
std::auto_ptr<auto_xact_t> ae(new auto_xact_t(predicate_t(expr, keeper)));
|
||||
ae->pos = position_t();
|
||||
ae->pos->pathname = pathname;
|
||||
ae->pos->beg_pos = line_beg_pos;
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue