Redesigned the expr_t, predicate_t, query_t classes
This commit is contained in:
parent
523d4243e8
commit
c3535d06c8
37 changed files with 975 additions and 785 deletions
25
src/amount.h
25
src/amount.h
|
|
@ -66,6 +66,20 @@ class commodity_pool_t;
|
||||||
|
|
||||||
DECLARE_EXCEPTION(amount_error, std::runtime_error);
|
DECLARE_EXCEPTION(amount_error, std::runtime_error);
|
||||||
|
|
||||||
|
enum parse_flags_enum_t {
|
||||||
|
PARSE_DEFAULT = 0x00,
|
||||||
|
PARSE_PARTIAL = 0x01,
|
||||||
|
PARSE_SINGLE = 0x02,
|
||||||
|
PARSE_NO_MIGRATE = 0x04,
|
||||||
|
PARSE_NO_REDUCE = 0x08,
|
||||||
|
PARSE_NO_ASSIGN = 0x10,
|
||||||
|
PARSE_NO_DATES = 0x20,
|
||||||
|
PARSE_OP_CONTEXT = 0x40,
|
||||||
|
PARSE_SOFT_FAIL = 0x80
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef basic_flags_t<parse_flags_enum_t, uint_least8_t> parse_flags_t;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Encapsulate infinite-precision commoditized amounts
|
* @brief Encapsulate infinite-precision commoditized amounts
|
||||||
*
|
*
|
||||||
|
|
@ -612,17 +626,8 @@ public:
|
||||||
amount_t::parse_conversion("1.0m", "60s"); // a minute is 60 seconds
|
amount_t::parse_conversion("1.0m", "60s"); // a minute is 60 seconds
|
||||||
amount_t::parse_conversion("1.0h", "60m"); // an hour is 60 minutes
|
amount_t::parse_conversion("1.0h", "60m"); // an hour is 60 minutes
|
||||||
@endcode
|
@endcode
|
||||||
*/
|
|
||||||
enum parse_flags_enum_t {
|
|
||||||
PARSE_DEFAULT = 0x00,
|
|
||||||
PARSE_NO_MIGRATE = 0x01,
|
|
||||||
PARSE_NO_REDUCE = 0x02,
|
|
||||||
PARSE_SOFT_FAIL = 0x04
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef basic_flags_t<parse_flags_enum_t, uint_least8_t> parse_flags_t;
|
The method parse() is used to parse an amount from an input stream
|
||||||
|
|
||||||
/** The method parse() is used to parse an amount from an input stream
|
|
||||||
or a string. A global operator>>() is also defined which simply
|
or a string. A global operator>>() is also defined which simply
|
||||||
calls parse on the input stream. The parse() method has two forms:
|
calls parse on the input stream. The parse() method has two forms:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -63,7 +63,7 @@ void annotation_t::parse(std::istream& in)
|
||||||
throw_(amount_error, _("Commodity price lacks closing brace"));
|
throw_(amount_error, _("Commodity price lacks closing brace"));
|
||||||
|
|
||||||
amount_t temp;
|
amount_t temp;
|
||||||
temp.parse(buf, amount_t::PARSE_NO_MIGRATE);
|
temp.parse(buf, PARSE_NO_MIGRATE);
|
||||||
|
|
||||||
DEBUG("commodity.annotations", "Parsed annotation price: " << temp);
|
DEBUG("commodity.annotations", "Parsed annotation price: " << temp);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@
|
||||||
#include "xact.h"
|
#include "xact.h"
|
||||||
|
|
||||||
#define LEDGER_MAGIC 0x4c454447
|
#define LEDGER_MAGIC 0x4c454447
|
||||||
#define ARCHIVE_VERSION 0x03000003
|
#define ARCHIVE_VERSION 0x03000004
|
||||||
|
|
||||||
//BOOST_IS_ABSTRACT(ledger::scope_t)
|
//BOOST_IS_ABSTRACT(ledger::scope_t)
|
||||||
BOOST_CLASS_EXPORT(ledger::scope_t)
|
BOOST_CLASS_EXPORT(ledger::scope_t)
|
||||||
|
|
|
||||||
32
src/chain.cc
32
src/chain.cc
|
|
@ -44,8 +44,8 @@ post_handler_ptr chain_post_handlers(report_t& report,
|
||||||
bool only_preliminaries)
|
bool only_preliminaries)
|
||||||
{
|
{
|
||||||
post_handler_ptr handler(base_handler);
|
post_handler_ptr handler(base_handler);
|
||||||
item_predicate display_predicate;
|
predicate_t display_predicate;
|
||||||
item_predicate only_predicate;
|
predicate_t only_predicate;
|
||||||
|
|
||||||
assert(report.HANDLED(amount_));
|
assert(report.HANDLED(amount_));
|
||||||
expr_t& expr(report.HANDLER(amount_).expr);
|
expr_t& expr(report.HANDLER(amount_).expr);
|
||||||
|
|
@ -55,8 +55,8 @@ post_handler_ptr chain_post_handlers(report_t& report,
|
||||||
// Make sure only forecast postings which match are allowed through
|
// Make sure only forecast postings which match are allowed through
|
||||||
if (report.HANDLED(forecast_while_)) {
|
if (report.HANDLED(forecast_while_)) {
|
||||||
handler.reset(new filter_posts
|
handler.reset(new filter_posts
|
||||||
(handler, item_predicate(report.HANDLER(forecast_while_).str(),
|
(handler, predicate_t(report.HANDLER(forecast_while_).str(),
|
||||||
report.what_to_keep()),
|
report.what_to_keep()),
|
||||||
report));
|
report));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -73,8 +73,8 @@ post_handler_ptr chain_post_handlers(report_t& report,
|
||||||
// filter_posts will only pass through posts matching the
|
// filter_posts will only pass through posts matching the
|
||||||
// `display_predicate'.
|
// `display_predicate'.
|
||||||
if (report.HANDLED(display_)) {
|
if (report.HANDLED(display_)) {
|
||||||
display_predicate = item_predicate(report.HANDLER(display_).str(),
|
display_predicate = predicate_t(report.HANDLER(display_).str(),
|
||||||
report.what_to_keep());
|
report.what_to_keep());
|
||||||
handler.reset(new filter_posts(handler, display_predicate, report));
|
handler.reset(new filter_posts(handler, display_predicate, report));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -100,8 +100,8 @@ post_handler_ptr chain_post_handlers(report_t& report,
|
||||||
// filter_posts will only pass through posts matching the
|
// filter_posts will only pass through posts matching the
|
||||||
// `secondary_predicate'.
|
// `secondary_predicate'.
|
||||||
if (report.HANDLED(only_)) {
|
if (report.HANDLED(only_)) {
|
||||||
only_predicate = item_predicate(report.HANDLER(only_).str(),
|
only_predicate = predicate_t(report.HANDLER(only_).str(),
|
||||||
report.what_to_keep());
|
report.what_to_keep());
|
||||||
handler.reset(new filter_posts(handler, only_predicate, report));
|
handler.reset(new filter_posts(handler, only_predicate, report));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -187,8 +187,8 @@ post_handler_ptr chain_post_handlers(report_t& report,
|
||||||
DEBUG("report.predicate",
|
DEBUG("report.predicate",
|
||||||
"Report predicate expression = " << report.HANDLER(limit_).str());
|
"Report predicate expression = " << report.HANDLER(limit_).str());
|
||||||
handler.reset(new filter_posts
|
handler.reset(new filter_posts
|
||||||
(handler, item_predicate(report.HANDLER(limit_).str(),
|
(handler, predicate_t(report.HANDLER(limit_).str(),
|
||||||
report.what_to_keep()),
|
report.what_to_keep()),
|
||||||
report));
|
report));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -211,15 +211,15 @@ post_handler_ptr chain_post_handlers(report_t& report,
|
||||||
// the filter get reported.
|
// the filter get reported.
|
||||||
if (report.HANDLED(limit_))
|
if (report.HANDLED(limit_))
|
||||||
handler.reset(new filter_posts
|
handler.reset(new filter_posts
|
||||||
(handler, item_predicate(report.HANDLER(limit_).str(),
|
(handler, predicate_t(report.HANDLER(limit_).str(),
|
||||||
report.what_to_keep()),
|
report.what_to_keep()),
|
||||||
report));
|
report));
|
||||||
}
|
}
|
||||||
else if (report.HANDLED(forecast_while_)) {
|
else if (report.HANDLED(forecast_while_)) {
|
||||||
forecast_posts * forecast_handler
|
forecast_posts * forecast_handler
|
||||||
= new forecast_posts(handler,
|
= new forecast_posts(handler,
|
||||||
item_predicate(report.HANDLER(forecast_while_).str(),
|
predicate_t(report.HANDLER(forecast_while_).str(),
|
||||||
report.what_to_keep()),
|
report.what_to_keep()),
|
||||||
report,
|
report,
|
||||||
report.HANDLED(forecast_years_) ?
|
report.HANDLED(forecast_years_) ?
|
||||||
static_cast<std::size_t>
|
static_cast<std::size_t>
|
||||||
|
|
@ -231,8 +231,8 @@ post_handler_ptr chain_post_handlers(report_t& report,
|
||||||
// See above, under budget_posts.
|
// See above, under budget_posts.
|
||||||
if (report.HANDLED(limit_))
|
if (report.HANDLED(limit_))
|
||||||
handler.reset(new filter_posts
|
handler.reset(new filter_posts
|
||||||
(handler, item_predicate(report.HANDLER(limit_).str(),
|
(handler, predicate_t(report.HANDLER(limit_).str(),
|
||||||
report.what_to_keep()),
|
report.what_to_keep()),
|
||||||
report));
|
report));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -193,8 +193,7 @@ namespace {
|
||||||
if (begin == end)
|
if (begin == end)
|
||||||
throw std::runtime_error(_("Invalid xact command arguments"));
|
throw std::runtime_error(_("Invalid xact command arguments"));
|
||||||
arg = (*++begin).to_string();
|
arg = (*++begin).to_string();
|
||||||
if (! cost.parse(arg, amount_t::PARSE_SOFT_FAIL |
|
if (! cost.parse(arg, PARSE_SOFT_FAIL | PARSE_NO_MIGRATE))
|
||||||
amount_t::PARSE_NO_MIGRATE))
|
|
||||||
throw std::runtime_error(_("Invalid xact command arguments"));
|
throw std::runtime_error(_("Invalid xact command arguments"));
|
||||||
post->cost = cost;
|
post->cost = cost;
|
||||||
}
|
}
|
||||||
|
|
@ -213,8 +212,7 @@ namespace {
|
||||||
amount_t amt;
|
amount_t amt;
|
||||||
optional<mask_t> account;
|
optional<mask_t> account;
|
||||||
|
|
||||||
if (! amt.parse(arg, amount_t::PARSE_SOFT_FAIL |
|
if (! amt.parse(arg, PARSE_SOFT_FAIL | PARSE_NO_MIGRATE))
|
||||||
amount_t::PARSE_NO_MIGRATE))
|
|
||||||
account = mask_t(arg);
|
account = mask_t(arg);
|
||||||
|
|
||||||
if (! post ||
|
if (! post ||
|
||||||
|
|
|
||||||
136
src/expr.cc
136
src/expr.cc
|
|
@ -36,122 +36,26 @@
|
||||||
|
|
||||||
namespace ledger {
|
namespace ledger {
|
||||||
|
|
||||||
expr_t::expr_t() : context(NULL), compiled(false)
|
void expr_t::parse(std::istream& in, const parse_flags_t& flags,
|
||||||
|
const optional<string>& original_string)
|
||||||
{
|
{
|
||||||
TRACE_CTOR(expr_t, "");
|
base_type::parse(in, flags, original_string);
|
||||||
}
|
|
||||||
|
|
||||||
expr_t::expr_t(const expr_t& other)
|
|
||||||
: ptr(other.ptr), context(other.context), str(other.str), compiled(false)
|
|
||||||
{
|
|
||||||
TRACE_CTOR(expr_t, "copy");
|
|
||||||
}
|
|
||||||
|
|
||||||
expr_t::expr_t(const string& _str, const uint_least8_t flags)
|
|
||||||
: context(NULL), str(_str), compiled(false)
|
|
||||||
{
|
|
||||||
TRACE_CTOR(expr_t, "const string&");
|
|
||||||
if (! _str.empty())
|
|
||||||
parse(str, flags);
|
|
||||||
}
|
|
||||||
|
|
||||||
expr_t::expr_t(std::istream& in, const uint_least8_t flags)
|
|
||||||
: context(NULL), compiled(false)
|
|
||||||
{
|
|
||||||
TRACE_CTOR(expr_t, "std::istream&");
|
|
||||||
parse(in, flags);
|
|
||||||
}
|
|
||||||
|
|
||||||
expr_t::expr_t(const ptr_op_t& _ptr, scope_t * _context, const string& _str)
|
|
||||||
: ptr(_ptr), context(_context), str(_str), compiled(false)
|
|
||||||
{
|
|
||||||
TRACE_CTOR(expr_t, "const ptr_op_t&, scope_t *, const string&");
|
|
||||||
}
|
|
||||||
|
|
||||||
expr_t::~expr_t() throw()
|
|
||||||
{
|
|
||||||
TRACE_DTOR(expr_t);
|
|
||||||
}
|
|
||||||
|
|
||||||
expr_t::ptr_op_t expr_t::get_op() throw()
|
|
||||||
{
|
|
||||||
return ptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
string expr_t::text()
|
|
||||||
{
|
|
||||||
if (str.empty()) {
|
|
||||||
std::ostringstream out;
|
|
||||||
ptr->print(out);
|
|
||||||
set_text(out.str());
|
|
||||||
}
|
|
||||||
return str;
|
|
||||||
}
|
|
||||||
|
|
||||||
expr_t& expr_t::operator=(const expr_t& _expr)
|
|
||||||
{
|
|
||||||
if (this != &_expr) {
|
|
||||||
str = _expr.str;
|
|
||||||
ptr = _expr.ptr;
|
|
||||||
context = _expr.context;
|
|
||||||
compiled = _expr.compiled;
|
|
||||||
}
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
void expr_t::parse(const string& _str, const uint32_t flags)
|
|
||||||
{
|
|
||||||
parser_t parser;
|
parser_t parser;
|
||||||
str = _str;
|
ptr = parser.parse(in, flags, original_string);
|
||||||
ptr = parser.parse(str, parser_t::parse_flags_t
|
|
||||||
(static_cast<uint_least8_t>(flags)));
|
|
||||||
context = NULL;
|
|
||||||
compiled = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
void expr_t::parse(std::istream& in, const uint32_t flags,
|
|
||||||
const string * original_string)
|
|
||||||
{
|
|
||||||
parser_t parser;
|
|
||||||
str = "<stream>";
|
|
||||||
ptr = parser.parse(in, parser_t::parse_flags_t
|
|
||||||
(static_cast<uint_least8_t>(flags)), original_string);
|
|
||||||
context = NULL;
|
|
||||||
compiled = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
void expr_t::recompile(scope_t& scope)
|
|
||||||
{
|
|
||||||
if (ptr.get()) {
|
|
||||||
ptr = ptr->compile(scope);
|
|
||||||
context = &scope;
|
|
||||||
compiled = true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void expr_t::compile(scope_t& scope)
|
void expr_t::compile(scope_t& scope)
|
||||||
{
|
{
|
||||||
if (! compiled)
|
if (! compiled && ptr) {
|
||||||
recompile(scope);
|
ptr = ptr->compile(scope);
|
||||||
|
base_type::compile(scope);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
value_t expr_t::calc(scope_t& scope)
|
value_t expr_t::real_calc(scope_t& scope)
|
||||||
{
|
{
|
||||||
if (ptr.get()) {
|
if (ptr) {
|
||||||
if (! compiled) {
|
|
||||||
if (SHOW_DEBUG("expr.compile")) {
|
|
||||||
DEBUG("expr.compile", "Before compilation:");
|
|
||||||
dump(*_log_stream);
|
|
||||||
}
|
|
||||||
|
|
||||||
compile(scope);
|
|
||||||
|
|
||||||
if (SHOW_DEBUG("expr.compile")) {
|
|
||||||
DEBUG("expr.compile", "After compilation:");
|
|
||||||
dump(*_log_stream);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ptr_op_t locus;
|
ptr_op_t locus;
|
||||||
try {
|
try {
|
||||||
return ptr->calc(scope, &locus);
|
return ptr->calc(scope, &locus);
|
||||||
|
|
@ -170,13 +74,13 @@ value_t expr_t::calc(scope_t& scope)
|
||||||
bool expr_t::is_constant() const
|
bool expr_t::is_constant() const
|
||||||
{
|
{
|
||||||
assert(compiled);
|
assert(compiled);
|
||||||
return ptr.get() && ptr->is_value();
|
return ptr && ptr->is_value();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool expr_t::is_function() const
|
bool expr_t::is_function() const
|
||||||
{
|
{
|
||||||
assert(compiled);
|
assert(compiled);
|
||||||
return ptr.get() && ptr->is_function();
|
return ptr && ptr->is_function();
|
||||||
}
|
}
|
||||||
|
|
||||||
value_t& expr_t::constant_value()
|
value_t& expr_t::constant_value()
|
||||||
|
|
@ -191,15 +95,15 @@ const value_t& expr_t::constant_value() const
|
||||||
return ptr->as_value();
|
return ptr->as_value();
|
||||||
}
|
}
|
||||||
|
|
||||||
function_t& expr_t::get_function()
|
expr_t::func_t& expr_t::get_function()
|
||||||
{
|
{
|
||||||
assert(is_function());
|
assert(is_function());
|
||||||
return ptr->as_function_lval();
|
return ptr->as_function_lval();
|
||||||
}
|
}
|
||||||
|
|
||||||
value_t expr_t::eval(const string& _expr, scope_t& scope)
|
string expr_t::context_to_str() const
|
||||||
{
|
{
|
||||||
return expr_t(_expr).calc(scope);
|
return ptr ? op_context(ptr) : _("<empty expression>");
|
||||||
}
|
}
|
||||||
|
|
||||||
void expr_t::print(std::ostream& out) const
|
void expr_t::print(std::ostream& out) const
|
||||||
|
|
@ -213,14 +117,4 @@ void expr_t::dump(std::ostream& out) const
|
||||||
if (ptr) ptr->dump(out, 0);
|
if (ptr) ptr->dump(out, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::ostream& operator<<(std::ostream& out, const expr_t& expr) {
|
|
||||||
expr.print(out);
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
|
|
||||||
string expr_context(const expr_t& expr)
|
|
||||||
{
|
|
||||||
return expr ? op_context(expr.ptr) : _("<empty expression>");
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace ledger
|
} // namespace ledger
|
||||||
|
|
|
||||||
144
src/expr.h
144
src/expr.h
|
|
@ -42,118 +42,91 @@
|
||||||
#ifndef _EXPR_H
|
#ifndef _EXPR_H
|
||||||
#define _EXPR_H
|
#define _EXPR_H
|
||||||
|
|
||||||
|
#include "exprbase.h"
|
||||||
#include "value.h"
|
#include "value.h"
|
||||||
|
|
||||||
namespace ledger {
|
namespace ledger {
|
||||||
|
|
||||||
DECLARE_EXCEPTION(parse_error, std::runtime_error);
|
class expr_t : public expr_base_t<value_t>
|
||||||
DECLARE_EXCEPTION(compile_error, std::runtime_error);
|
|
||||||
DECLARE_EXCEPTION(calc_error, std::runtime_error);
|
|
||||||
DECLARE_EXCEPTION(usage_error, std::runtime_error);
|
|
||||||
|
|
||||||
class scope_t;
|
|
||||||
class call_scope_t;
|
|
||||||
|
|
||||||
typedef function<value_t (call_scope_t&)> function_t;
|
|
||||||
|
|
||||||
class expr_t
|
|
||||||
{
|
{
|
||||||
struct token_t;
|
struct token_t;
|
||||||
class parser_t;
|
class parser_t;
|
||||||
|
|
||||||
friend string expr_context(const expr_t& expr);
|
typedef expr_base_t<value_t> base_type;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
class op_t;
|
class op_t;
|
||||||
typedef intrusive_ptr<op_t> ptr_op_t;
|
typedef intrusive_ptr<op_t> ptr_op_t;
|
||||||
typedef intrusive_ptr<const op_t> const_ptr_op_t;
|
typedef intrusive_ptr<const op_t> const_ptr_op_t;
|
||||||
|
|
||||||
enum parse_flags_enum_t {
|
protected:
|
||||||
PARSE_NORMAL = 0x00,
|
ptr_op_t ptr;
|
||||||
PARSE_PARTIAL = 0x01,
|
|
||||||
PARSE_SINGLE = 0x02,
|
|
||||||
PARSE_NO_MIGRATE = 0x04,
|
|
||||||
PARSE_NO_REDUCE = 0x08,
|
|
||||||
PARSE_NO_ASSIGN = 0x10,
|
|
||||||
PARSE_NO_DATES = 0x20,
|
|
||||||
PARSE_OP_CONTEXT = 0x40
|
|
||||||
};
|
|
||||||
|
|
||||||
private:
|
|
||||||
ptr_op_t ptr;
|
|
||||||
scope_t * context;
|
|
||||||
string str;
|
|
||||||
bool compiled;
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
expr_t();
|
expr_t() : base_type() {
|
||||||
expr_t(const expr_t& other);
|
TRACE_CTOR(expr_t, "");
|
||||||
expr_t(const ptr_op_t& _ptr, scope_t * context = NULL,
|
}
|
||||||
const string& _str = "");
|
expr_t(const expr_t& other)
|
||||||
|
: base_type(other), ptr(other.ptr) {
|
||||||
|
TRACE_CTOR(expr_t, "copy");
|
||||||
|
}
|
||||||
|
expr_t(ptr_op_t _ptr, scope_t * _context = NULL)
|
||||||
|
: base_type(_context), ptr(_ptr) {
|
||||||
|
TRACE_CTOR(expr_t, "const ptr_op_t&, scope_t *");
|
||||||
|
}
|
||||||
|
|
||||||
expr_t(const string& _str, const uint_least8_t flags = 0);
|
expr_t(const string& _str, const parse_flags_t& flags = PARSE_DEFAULT)
|
||||||
expr_t(std::istream& in, const uint_least8_t flags = 0);
|
: base_type() {
|
||||||
|
TRACE_CTOR(expr_t, "string, parse_flags_t");
|
||||||
|
if (! _str.empty())
|
||||||
|
parse(_str, flags);
|
||||||
|
}
|
||||||
|
expr_t(std::istream& in, const parse_flags_t& flags = PARSE_DEFAULT)
|
||||||
|
: base_type() {
|
||||||
|
TRACE_CTOR(expr_t, "std::istream&, parse_flags_t");
|
||||||
|
parse(in, flags);
|
||||||
|
}
|
||||||
|
|
||||||
~expr_t() throw();
|
~expr_t() throw() {
|
||||||
|
TRACE_DTOR(expr_t);
|
||||||
|
}
|
||||||
|
|
||||||
expr_t& operator=(const expr_t& _expr);
|
expr_t& operator=(const expr_t& _expr) {
|
||||||
expr_t& operator=(const string& _expr) {
|
if (this != &_expr) {
|
||||||
parse(_expr);
|
base_type::operator=(_expr);
|
||||||
|
ptr = _expr.ptr;
|
||||||
|
}
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
operator bool() const throw() {
|
virtual operator bool() const throw() {
|
||||||
return ptr.get() != NULL;
|
return ptr.get() != NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
ptr_op_t get_op() throw();
|
ptr_op_t get_op() throw() {
|
||||||
string text();
|
return ptr;
|
||||||
|
|
||||||
// This has special use in the textual parser
|
|
||||||
void set_text(const string& txt) {
|
|
||||||
str = txt;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void parse(const string& _str, const uint32_t flags = 0);
|
void parse(const string& str, const parse_flags_t& flags = PARSE_DEFAULT) {
|
||||||
void parse(std::istream& in, const uint32_t flags = 0,
|
std::istringstream stream(str);
|
||||||
const string * original_string = NULL);
|
return parse(stream, flags, str);
|
||||||
|
|
||||||
void mark_uncompiled() {
|
|
||||||
compiled = false;
|
|
||||||
}
|
|
||||||
void recompile(scope_t& scope);
|
|
||||||
void compile(scope_t& scope);
|
|
||||||
value_t calc(scope_t& scope);
|
|
||||||
value_t calc(scope_t& scope) const;
|
|
||||||
|
|
||||||
value_t calc() {
|
|
||||||
assert(context);
|
|
||||||
return calc(*context);
|
|
||||||
}
|
|
||||||
value_t calc() const {
|
|
||||||
assert(context);
|
|
||||||
return calc(*context);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
scope_t * get_context() {
|
virtual void parse(std::istream& in,
|
||||||
return context;
|
const parse_flags_t& flags = PARSE_DEFAULT,
|
||||||
}
|
const optional<string>& original_string = none);
|
||||||
void set_context(scope_t * scope) {
|
virtual void compile(scope_t& scope);
|
||||||
context = scope;
|
virtual value_t real_calc(scope_t& scope);
|
||||||
}
|
|
||||||
|
|
||||||
bool is_constant() const;
|
bool is_constant() const;
|
||||||
bool is_function() const;
|
value_t& constant_value();
|
||||||
|
const value_t& constant_value() const;
|
||||||
|
bool is_function() const;
|
||||||
|
func_t& get_function();
|
||||||
|
|
||||||
value_t& constant_value();
|
virtual string context_to_str() const;
|
||||||
const value_t& constant_value() const;
|
virtual void print(std::ostream& out) const;
|
||||||
|
virtual void dump(std::ostream& out) const;
|
||||||
function_t& get_function();
|
|
||||||
|
|
||||||
void print(std::ostream& out) const;
|
|
||||||
void dump(std::ostream& out) const;
|
|
||||||
|
|
||||||
static value_t eval(const string& _expr, scope_t& scope);
|
|
||||||
|
|
||||||
#if defined(HAVE_BOOST_SERIALIZATION)
|
#if defined(HAVE_BOOST_SERIALIZATION)
|
||||||
private:
|
private:
|
||||||
|
|
@ -163,19 +136,12 @@ private:
|
||||||
|
|
||||||
template<class Archive>
|
template<class Archive>
|
||||||
void serialize(Archive& ar, const unsigned int /* version */) {
|
void serialize(Archive& ar, const unsigned int /* version */) {
|
||||||
|
ar & boost::serialization::base_object<base_type>(*this);
|
||||||
ar & ptr;
|
ar & ptr;
|
||||||
ar & context;
|
|
||||||
ar & str;
|
|
||||||
if (Archive::is_loading::value)
|
|
||||||
compiled = false;
|
|
||||||
}
|
}
|
||||||
#endif // HAVE_BOOST_SERIALIZATION
|
#endif // HAVE_BOOST_SERIALIZATION
|
||||||
};
|
};
|
||||||
|
|
||||||
std::ostream& operator<<(std::ostream& out, const expr_t& expr);
|
|
||||||
|
|
||||||
string expr_context(const expr_t& expr);
|
|
||||||
|
|
||||||
} // namespace ledger
|
} // namespace ledger
|
||||||
|
|
||||||
#endif // _EXPR_H
|
#endif // _EXPR_H
|
||||||
|
|
|
||||||
253
src/exprbase.h
Normal file
253
src/exprbase.h
Normal file
|
|
@ -0,0 +1,253 @@
|
||||||
|
/*
|
||||||
|
* Copyright (c) 2003-2009, John Wiegley. All rights reserved.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without
|
||||||
|
* modification, are permitted provided that the following conditions are
|
||||||
|
* met:
|
||||||
|
*
|
||||||
|
* - Redistributions of source code must retain the above copyright
|
||||||
|
* notice, this list of conditions and the following disclaimer.
|
||||||
|
*
|
||||||
|
* - Redistributions in binary form must reproduce the above copyright
|
||||||
|
* notice, this list of conditions and the following disclaimer in the
|
||||||
|
* documentation and/or other materials provided with the distribution.
|
||||||
|
*
|
||||||
|
* - Neither the name of New Artisans LLC nor the names of its
|
||||||
|
* contributors may be used to endorse or promote products derived from
|
||||||
|
* this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @addtogroup expr
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file exprbase.h
|
||||||
|
* @author John Wiegley
|
||||||
|
*
|
||||||
|
* @ingroup expr
|
||||||
|
*
|
||||||
|
* This class provides basic behavior for all the domain specific expression
|
||||||
|
* languages used in Leger:
|
||||||
|
*
|
||||||
|
* | Typename | Description | result_type | Derives |
|
||||||
|
* |-------------+----------------------------+-----------------+-------------|
|
||||||
|
* | expr_t | Value expressions | value_t | |
|
||||||
|
* | predicate_t | Special form of expr_t | bool | expr_t |
|
||||||
|
* | query_t | Report queries | bool | predicate_t |
|
||||||
|
* | period_t | Time periods and durations | date_interval_t | |
|
||||||
|
* | draft_t | Partially filled xacts | xact_t * | |
|
||||||
|
* | format_t | Format strings | string | |
|
||||||
|
*/
|
||||||
|
#ifndef _EXPRBASE_H
|
||||||
|
#define _EXPRBASE_H
|
||||||
|
|
||||||
|
#include "utils.h"
|
||||||
|
#include "amount.h"
|
||||||
|
|
||||||
|
namespace ledger {
|
||||||
|
|
||||||
|
DECLARE_EXCEPTION(parse_error, std::runtime_error);
|
||||||
|
DECLARE_EXCEPTION(compile_error, std::runtime_error);
|
||||||
|
DECLARE_EXCEPTION(calc_error, std::runtime_error);
|
||||||
|
DECLARE_EXCEPTION(usage_error, std::runtime_error);
|
||||||
|
|
||||||
|
class scope_t;
|
||||||
|
class call_scope_t;
|
||||||
|
|
||||||
|
template <typename ResultType>
|
||||||
|
class expr_base_t
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
typedef ResultType result_type;
|
||||||
|
|
||||||
|
typedef function<result_type (call_scope_t&)> func_t;
|
||||||
|
|
||||||
|
protected:
|
||||||
|
scope_t * context;
|
||||||
|
string str;
|
||||||
|
bool compiled;
|
||||||
|
|
||||||
|
virtual result_type real_calc(scope_t& scope) = 0;
|
||||||
|
|
||||||
|
public:
|
||||||
|
expr_base_t(const expr_base_t& other)
|
||||||
|
: context(other.context), str(other.str), compiled(false) {
|
||||||
|
TRACE_CTOR(expr_base_t, "copy");
|
||||||
|
}
|
||||||
|
expr_base_t(scope_t * _context = NULL)
|
||||||
|
: context(_context), compiled(false)
|
||||||
|
{
|
||||||
|
TRACE_CTOR(expr_base_t, "scope_t *");
|
||||||
|
}
|
||||||
|
|
||||||
|
~expr_base_t() throw() {
|
||||||
|
TRACE_DTOR(expr_base_t);
|
||||||
|
}
|
||||||
|
|
||||||
|
expr_base_t& operator=(const expr_base_t& _expr) {
|
||||||
|
if (this != &_expr) {
|
||||||
|
str = _expr.str;
|
||||||
|
context = _expr.context;
|
||||||
|
compiled = _expr.compiled;
|
||||||
|
}
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
expr_base_t& operator=(const string& _expr) {
|
||||||
|
parse(_expr);
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual operator bool() const throw() {
|
||||||
|
return ! str.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual string text() {
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
void set_text(const string& txt) {
|
||||||
|
str = txt;
|
||||||
|
}
|
||||||
|
|
||||||
|
void parse(const string& str, const parse_flags_t& flags = PARSE_DEFAULT) {
|
||||||
|
std::istringstream stream(str);
|
||||||
|
return parse(stream, flags, str);
|
||||||
|
}
|
||||||
|
virtual void parse(std::istream&,
|
||||||
|
const parse_flags_t& = PARSE_DEFAULT,
|
||||||
|
const optional<string>& original_string = none) {
|
||||||
|
str = original_string ? *original_string : "<stream>";
|
||||||
|
context = NULL;
|
||||||
|
compiled = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void mark_uncompiled() {
|
||||||
|
compiled = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void recompile(scope_t& scope) {
|
||||||
|
compiled = false;
|
||||||
|
compile(scope);
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual void compile(scope_t& scope) {
|
||||||
|
if (! compiled) {
|
||||||
|
// Derived classes need to do something here.
|
||||||
|
context = &scope;
|
||||||
|
compiled = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result_type operator()(scope_t& scope) {
|
||||||
|
return calc(scope);
|
||||||
|
}
|
||||||
|
|
||||||
|
result_type calc(scope_t& scope)
|
||||||
|
{
|
||||||
|
if (! compiled) {
|
||||||
|
if (SHOW_DEBUG("expr.compile")) {
|
||||||
|
DEBUG("expr.compile", "Before compilation:");
|
||||||
|
dump(*_log_stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
compile(scope);
|
||||||
|
|
||||||
|
if (SHOW_DEBUG("expr.compile")) {
|
||||||
|
DEBUG("expr.compile", "After compilation:");
|
||||||
|
dump(*_log_stream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return real_calc(scope);
|
||||||
|
}
|
||||||
|
|
||||||
|
result_type calc() {
|
||||||
|
assert(context);
|
||||||
|
return calc(*context);
|
||||||
|
}
|
||||||
|
|
||||||
|
scope_t * get_context() {
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
void set_context(scope_t * scope) {
|
||||||
|
context = scope;
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual string context_to_str() const = 0;
|
||||||
|
|
||||||
|
string print_to_str() const {
|
||||||
|
std::ostringstream out;
|
||||||
|
print(out);
|
||||||
|
return out.str();
|
||||||
|
}
|
||||||
|
string dump_to_str() const {
|
||||||
|
std::ostringstream out;
|
||||||
|
dump(out);
|
||||||
|
return out.str();
|
||||||
|
}
|
||||||
|
string preview_to_str(scope_t& scope) const {
|
||||||
|
std::ostringstream out;
|
||||||
|
preview(out);
|
||||||
|
return out.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual void print(std::ostream& out) const = 0;
|
||||||
|
virtual void dump(std::ostream& out) const = 0;
|
||||||
|
|
||||||
|
result_type preview(std::ostream& out, scope_t& scope) const {
|
||||||
|
out << _("--- Input expression ---") << std::endl;
|
||||||
|
out << text() << std::endl;
|
||||||
|
|
||||||
|
out << std::endl << _("--- Text as parsed ---") << std::endl;
|
||||||
|
print(out);
|
||||||
|
out << std::endl;
|
||||||
|
|
||||||
|
out << std::endl << _("--- Expression tree ---") << std::endl;
|
||||||
|
dump(out);
|
||||||
|
|
||||||
|
out << std::endl << _("--- Compiled tree ---") << std::endl;
|
||||||
|
compile(scope);
|
||||||
|
dump(out);
|
||||||
|
|
||||||
|
out << std::endl << _("--- Result value ---") << std::endl;
|
||||||
|
return calc();
|
||||||
|
}
|
||||||
|
|
||||||
|
#if defined(HAVE_BOOST_SERIALIZATION)
|
||||||
|
private:
|
||||||
|
/** Serialization. */
|
||||||
|
|
||||||
|
friend class boost::serialization::access;
|
||||||
|
|
||||||
|
template<class Archive>
|
||||||
|
void serialize(Archive& ar, const unsigned int /* version */) {
|
||||||
|
ar & context;
|
||||||
|
ar & str;
|
||||||
|
if (Archive::is_loading::value)
|
||||||
|
compiled = false;
|
||||||
|
}
|
||||||
|
#endif // HAVE_BOOST_SERIALIZATION
|
||||||
|
};
|
||||||
|
|
||||||
|
template <typename ResultType>
|
||||||
|
std::ostream& operator<<(std::ostream& out,
|
||||||
|
const expr_base_t<ResultType>& expr) {
|
||||||
|
expr.print(out);
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace ledger
|
||||||
|
|
||||||
|
#endif // _EXPRBASE_H
|
||||||
|
|
@ -947,10 +947,10 @@ void forecast_posts::flush()
|
||||||
item_handler<post_t>::flush();
|
item_handler<post_t>::flush();
|
||||||
}
|
}
|
||||||
|
|
||||||
pass_down_accounts::pass_down_accounts(acct_handler_ptr handler,
|
pass_down_accounts::pass_down_accounts(acct_handler_ptr handler,
|
||||||
accounts_iterator& iter,
|
accounts_iterator& iter,
|
||||||
const optional<item_predicate>& _pred,
|
const optional<predicate_t>& _pred,
|
||||||
const optional<scope_t&>& _context)
|
const optional<scope_t&>& _context)
|
||||||
: item_handler<account_t>(handler), pred(_pred), context(_context)
|
: item_handler<account_t>(handler), pred(_pred), context(_context)
|
||||||
{
|
{
|
||||||
TRACE_CTOR(pass_down_accounts, "acct_handler_ptr, accounts_iterator, ...");
|
TRACE_CTOR(pass_down_accounts, "acct_handler_ptr, accounts_iterator, ...");
|
||||||
|
|
|
||||||
|
|
@ -202,18 +202,18 @@ public:
|
||||||
|
|
||||||
class filter_posts : public item_handler<post_t>
|
class filter_posts : public item_handler<post_t>
|
||||||
{
|
{
|
||||||
item_predicate pred;
|
predicate_t pred;
|
||||||
scope_t& context;
|
scope_t& context;
|
||||||
|
|
||||||
filter_posts();
|
filter_posts();
|
||||||
|
|
||||||
public:
|
public:
|
||||||
filter_posts(post_handler_ptr handler,
|
filter_posts(post_handler_ptr handler,
|
||||||
const item_predicate& predicate,
|
const predicate_t& predicate,
|
||||||
scope_t& _context)
|
scope_t& _context)
|
||||||
: item_handler<post_t>(handler), pred(predicate), context(_context) {
|
: item_handler<post_t>(handler), pred(predicate), context(_context) {
|
||||||
TRACE_CTOR(filter_posts,
|
TRACE_CTOR(filter_posts,
|
||||||
"post_handler_ptr, const item_predicate&, scope_t&");
|
"post_handler_ptr, const predicate_t&, scope_t&");
|
||||||
}
|
}
|
||||||
virtual ~filter_posts() {
|
virtual ~filter_posts() {
|
||||||
TRACE_DTOR(filter_posts);
|
TRACE_DTOR(filter_posts);
|
||||||
|
|
@ -273,8 +273,8 @@ public:
|
||||||
class collapse_posts : public item_handler<post_t>
|
class collapse_posts : public item_handler<post_t>
|
||||||
{
|
{
|
||||||
expr_t& amount_expr;
|
expr_t& amount_expr;
|
||||||
item_predicate display_predicate;
|
predicate_t display_predicate;
|
||||||
item_predicate only_predicate;
|
predicate_t only_predicate;
|
||||||
value_t subtotal;
|
value_t subtotal;
|
||||||
std::size_t count;
|
std::size_t count;
|
||||||
xact_t * last_xact;
|
xact_t * last_xact;
|
||||||
|
|
@ -289,8 +289,8 @@ class collapse_posts : public item_handler<post_t>
|
||||||
public:
|
public:
|
||||||
collapse_posts(post_handler_ptr handler,
|
collapse_posts(post_handler_ptr handler,
|
||||||
expr_t& _amount_expr,
|
expr_t& _amount_expr,
|
||||||
item_predicate _display_predicate,
|
predicate_t _display_predicate,
|
||||||
item_predicate _only_predicate,
|
predicate_t _only_predicate,
|
||||||
bool _only_collapse_if_zero = false)
|
bool _only_collapse_if_zero = false)
|
||||||
: item_handler<post_t>(handler), amount_expr(_amount_expr),
|
: item_handler<post_t>(handler), amount_expr(_amount_expr),
|
||||||
display_predicate(_display_predicate),
|
display_predicate(_display_predicate),
|
||||||
|
|
@ -646,19 +646,19 @@ public:
|
||||||
|
|
||||||
class forecast_posts : public generate_posts
|
class forecast_posts : public generate_posts
|
||||||
{
|
{
|
||||||
item_predicate pred;
|
predicate_t pred;
|
||||||
scope_t& context;
|
scope_t& context;
|
||||||
const std::size_t forecast_years;
|
const std::size_t forecast_years;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
forecast_posts(post_handler_ptr handler,
|
forecast_posts(post_handler_ptr handler,
|
||||||
const item_predicate& predicate,
|
const predicate_t& predicate,
|
||||||
scope_t& _context,
|
scope_t& _context,
|
||||||
const std::size_t _forecast_years)
|
const std::size_t _forecast_years)
|
||||||
: generate_posts(handler), pred(predicate), context(_context),
|
: generate_posts(handler), pred(predicate), context(_context),
|
||||||
forecast_years(_forecast_years) {
|
forecast_years(_forecast_years) {
|
||||||
TRACE_CTOR(forecast_posts,
|
TRACE_CTOR(forecast_posts,
|
||||||
"post_handler_ptr, item_predicate, scope_t&, std::size_t");
|
"post_handler_ptr, predicate_t, scope_t&, std::size_t");
|
||||||
}
|
}
|
||||||
virtual ~forecast_posts() throw() {
|
virtual ~forecast_posts() throw() {
|
||||||
TRACE_DTOR(forecast_posts);
|
TRACE_DTOR(forecast_posts);
|
||||||
|
|
@ -679,14 +679,14 @@ class pass_down_accounts : public item_handler<account_t>
|
||||||
{
|
{
|
||||||
pass_down_accounts();
|
pass_down_accounts();
|
||||||
|
|
||||||
optional<item_predicate> pred;
|
optional<predicate_t> pred;
|
||||||
optional<scope_t&> context;
|
optional<scope_t&> context;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
pass_down_accounts(acct_handler_ptr handler,
|
pass_down_accounts(acct_handler_ptr handler,
|
||||||
accounts_iterator& iter,
|
accounts_iterator& iter,
|
||||||
const optional<item_predicate>& _pred = none,
|
const optional<predicate_t>& _pred = none,
|
||||||
const optional<scope_t&>& _context = none);
|
const optional<scope_t&>& _context = none);
|
||||||
|
|
||||||
virtual ~pass_down_accounts() {
|
virtual ~pass_down_accounts() {
|
||||||
TRACE_DTOR(pass_down_accounts);
|
TRACE_DTOR(pass_down_accounts);
|
||||||
|
|
|
||||||
|
|
@ -72,8 +72,7 @@ namespace {
|
||||||
string temp(p);
|
string temp(p);
|
||||||
ptristream str(const_cast<char *&>(p));
|
ptristream str(const_cast<char *&>(p));
|
||||||
expr_t expr;
|
expr_t expr;
|
||||||
expr.parse(str, single_expr ? expr_t::PARSE_SINGLE : expr_t::PARSE_PARTIAL,
|
expr.parse(str, single_expr ? PARSE_SINGLE : PARSE_PARTIAL, temp);
|
||||||
&temp);
|
|
||||||
if (str.eof()) {
|
if (str.eof()) {
|
||||||
expr.set_text(p);
|
expr.set_text(p);
|
||||||
p += std::strlen(p);
|
p += std::strlen(p);
|
||||||
|
|
@ -349,7 +348,7 @@ void format_t::format(std::ostream& out_str, scope_t& scope)
|
||||||
}
|
}
|
||||||
catch (const calc_error&) {
|
catch (const calc_error&) {
|
||||||
add_error_context(_("While calculating format expression:"));
|
add_error_context(_("While calculating format expression:"));
|
||||||
add_error_context(expr_context(elem->expr));
|
add_error_context(elem->expr.context_to_str());
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
|
||||||
|
|
@ -179,9 +179,9 @@ void global_scope_t::execute_command(strings_list args, bool at_repl)
|
||||||
// If such a command is found, create the output stream for the result and
|
// If such a command is found, create the output stream for the result and
|
||||||
// then invoke the command.
|
// then invoke the command.
|
||||||
|
|
||||||
function_t command;
|
expr_t::func_t command;
|
||||||
bool is_precommand = false;
|
bool is_precommand = false;
|
||||||
bind_scope_t bound_scope(*this, report());
|
bind_scope_t bound_scope(*this, report());
|
||||||
|
|
||||||
if (bool(command = look_for_precommand(bound_scope, verb)))
|
if (bool(command = look_for_precommand(bound_scope, verb)))
|
||||||
is_precommand = true;
|
is_precommand = true;
|
||||||
|
|
@ -398,22 +398,22 @@ void global_scope_t::normalize_session_options()
|
||||||
INFO("Journal file is " << pathname.string());
|
INFO("Journal file is " << pathname.string());
|
||||||
}
|
}
|
||||||
|
|
||||||
function_t global_scope_t::look_for_precommand(scope_t& scope,
|
expr_t::func_t global_scope_t::look_for_precommand(scope_t& scope,
|
||||||
const string& verb)
|
const string& verb)
|
||||||
{
|
{
|
||||||
if (expr_t::ptr_op_t def = scope.lookup(symbol_t::PRECOMMAND, verb))
|
if (expr_t::ptr_op_t def = scope.lookup(symbol_t::PRECOMMAND, verb))
|
||||||
return def->as_function();
|
return def->as_function();
|
||||||
else
|
else
|
||||||
return function_t();
|
return expr_t::func_t();
|
||||||
}
|
}
|
||||||
|
|
||||||
function_t global_scope_t::look_for_command(scope_t& scope,
|
expr_t::func_t global_scope_t::look_for_command(scope_t& scope,
|
||||||
const string& verb)
|
const string& verb)
|
||||||
{
|
{
|
||||||
if (expr_t::ptr_op_t def = scope.lookup(symbol_t::COMMAND, verb))
|
if (expr_t::ptr_op_t def = scope.lookup(symbol_t::COMMAND, verb))
|
||||||
return def->as_function();
|
return def->as_function();
|
||||||
else
|
else
|
||||||
return function_t();
|
return expr_t::func_t();
|
||||||
}
|
}
|
||||||
|
|
||||||
void global_scope_t::normalize_report_options(const string& verb)
|
void global_scope_t::normalize_report_options(const string& verb)
|
||||||
|
|
|
||||||
14
src/global.h
14
src/global.h
|
|
@ -56,13 +56,13 @@ public:
|
||||||
global_scope_t(char ** envp);
|
global_scope_t(char ** envp);
|
||||||
~global_scope_t();
|
~global_scope_t();
|
||||||
|
|
||||||
void read_init();
|
void read_init();
|
||||||
void read_environment_settings(char * envp[]);
|
void read_environment_settings(char * envp[]);
|
||||||
strings_list read_command_arguments(scope_t& scope, strings_list args);
|
strings_list read_command_arguments(scope_t& scope, strings_list args);
|
||||||
void normalize_session_options();
|
void normalize_session_options();
|
||||||
function_t look_for_precommand(scope_t& scope, const string& verb);
|
expr_t::func_t look_for_precommand(scope_t& scope, const string& verb);
|
||||||
function_t look_for_command(scope_t& scope, const string& verb);
|
expr_t::func_t look_for_command(scope_t& scope, const string& verb);
|
||||||
void normalize_report_options(const string& verb);
|
void normalize_report_options(const string& verb);
|
||||||
|
|
||||||
char * prompt_string();
|
char * prompt_string();
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -181,12 +181,11 @@ string join_args(call_scope_t& args)
|
||||||
bool first = true;
|
bool first = true;
|
||||||
|
|
||||||
for (std::size_t i = 0; i < args.size(); i++) {
|
for (std::size_t i = 0; i < args.size(); i++) {
|
||||||
if (first) {
|
if (first)
|
||||||
buf << args[i];
|
|
||||||
first = false;
|
first = false;
|
||||||
} else {
|
else
|
||||||
buf << ' ' << args[i];
|
buf << ' ';
|
||||||
}
|
buf << args[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
return buf.str();
|
return buf.str();
|
||||||
|
|
|
||||||
17
src/op.h
17
src/op.h
|
|
@ -61,7 +61,7 @@ private:
|
||||||
variant<ptr_op_t, // used by all binary operators
|
variant<ptr_op_t, // used by all binary operators
|
||||||
value_t, // used by constant VALUE
|
value_t, // used by constant VALUE
|
||||||
string, // used by constant IDENT
|
string, // used by constant IDENT
|
||||||
function_t // used by terminal FUNCTION
|
expr_t::func_t // used by terminal FUNCTION
|
||||||
> data;
|
> data;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
@ -171,14 +171,14 @@ public:
|
||||||
bool is_function() const {
|
bool is_function() const {
|
||||||
return kind == FUNCTION;
|
return kind == FUNCTION;
|
||||||
}
|
}
|
||||||
function_t& as_function_lval() {
|
expr_t::func_t& as_function_lval() {
|
||||||
assert(kind == FUNCTION);
|
assert(kind == FUNCTION);
|
||||||
return boost::get<function_t>(data);
|
return boost::get<expr_t::func_t>(data);
|
||||||
}
|
}
|
||||||
const function_t& as_function() const {
|
const expr_t::func_t& as_function() const {
|
||||||
return const_cast<op_t *>(this)->as_function_lval();
|
return const_cast<op_t *>(this)->as_function_lval();
|
||||||
}
|
}
|
||||||
void set_function(const function_t& val) {
|
void set_function(const expr_t::func_t& val) {
|
||||||
data = val;
|
data = val;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -280,7 +280,7 @@ public:
|
||||||
void dump(std::ostream& out, const int depth) const;
|
void dump(std::ostream& out, const int depth) const;
|
||||||
|
|
||||||
static ptr_op_t wrap_value(const value_t& val);
|
static ptr_op_t wrap_value(const value_t& val);
|
||||||
static ptr_op_t wrap_functor(const function_t& fobj);
|
static ptr_op_t wrap_functor(const expr_t::func_t& fobj);
|
||||||
|
|
||||||
#if defined(HAVE_BOOST_SERIALIZATION)
|
#if defined(HAVE_BOOST_SERIALIZATION)
|
||||||
private:
|
private:
|
||||||
|
|
@ -303,7 +303,7 @@ private:
|
||||||
(! has_right() || ! right()->is_function()))) {
|
(! has_right() || ! right()->is_function()))) {
|
||||||
ar & data;
|
ar & data;
|
||||||
} else {
|
} else {
|
||||||
variant<ptr_op_t, value_t, string, function_t> temp_data;
|
variant<ptr_op_t, value_t, string, expr_t::func_t> temp_data;
|
||||||
ar & temp_data;
|
ar & temp_data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -325,7 +325,8 @@ inline expr_t::ptr_op_t expr_t::op_t::wrap_value(const value_t& val) {
|
||||||
return temp;
|
return temp;
|
||||||
}
|
}
|
||||||
|
|
||||||
inline expr_t::ptr_op_t expr_t::op_t::wrap_functor(const function_t& fobj) {
|
inline expr_t::ptr_op_t
|
||||||
|
expr_t::op_t::wrap_functor(const expr_t::func_t& fobj) {
|
||||||
ptr_op_t temp(new op_t(op_t::FUNCTION));
|
ptr_op_t temp(new op_t(op_t::FUNCTION));
|
||||||
temp->set_function(fobj);
|
temp->set_function(fobj);
|
||||||
return temp;
|
return temp;
|
||||||
|
|
|
||||||
|
|
@ -74,7 +74,7 @@ namespace {
|
||||||
return op_bool_tuple(scope.lookup(symbol_t::OPTION, buf), false);
|
return op_bool_tuple(scope.lookup(symbol_t::OPTION, buf), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
void process_option(const string& whence, const function_t& opt,
|
void process_option(const string& whence, const expr_t::func_t& opt,
|
||||||
scope_t& scope, const char * arg, const string& name)
|
scope_t& scope, const char * arg, const string& name)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
|
|
|
||||||
|
|
@ -200,7 +200,7 @@ void format_accounts::flush()
|
||||||
if (report.HANDLED(display_)) {
|
if (report.HANDLED(display_)) {
|
||||||
DEBUG("account.display",
|
DEBUG("account.display",
|
||||||
"Account display predicate: " << report.HANDLER(display_).str());
|
"Account display predicate: " << report.HANDLER(display_).str());
|
||||||
disp_pred.predicate.parse(report.HANDLER(display_).str());
|
disp_pred.parse(report.HANDLER(display_).str());
|
||||||
}
|
}
|
||||||
|
|
||||||
mark_accounts(*report.session.journal->master, report.HANDLED(flat));
|
mark_accounts(*report.session.journal->master, report.HANDLED(flat));
|
||||||
|
|
|
||||||
10
src/output.h
10
src/output.h
|
|
@ -78,11 +78,11 @@ public:
|
||||||
class format_accounts : public item_handler<account_t>
|
class format_accounts : public item_handler<account_t>
|
||||||
{
|
{
|
||||||
protected:
|
protected:
|
||||||
report_t& report;
|
report_t& report;
|
||||||
format_t account_line_format;
|
format_t account_line_format;
|
||||||
format_t total_line_format;
|
format_t total_line_format;
|
||||||
format_t separator_format;
|
format_t separator_format;
|
||||||
item_predicate disp_pred;
|
predicate_t disp_pred;
|
||||||
|
|
||||||
std::list<account_t *> posted_accounts;
|
std::list<account_t *> posted_accounts;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -473,8 +473,9 @@ expr_t::parser_t::parse_value_expr(std::istream& in,
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_t::ptr_op_t
|
expr_t::ptr_op_t
|
||||||
expr_t::parser_t::parse(std::istream& in, const parse_flags_t& flags,
|
expr_t::parser_t::parse(std::istream& in,
|
||||||
const string * original_string)
|
const parse_flags_t& flags,
|
||||||
|
const optional<string>& original_string)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
ptr_op_t top_node = parse_value_expr(in, flags);
|
ptr_op_t top_node = parse_value_expr(in, flags);
|
||||||
|
|
|
||||||
15
src/parser.h
15
src/parser.h
|
|
@ -49,10 +49,6 @@ namespace ledger {
|
||||||
|
|
||||||
class expr_t::parser_t : public noncopyable
|
class expr_t::parser_t : public noncopyable
|
||||||
{
|
{
|
||||||
public:
|
|
||||||
typedef basic_flags_t<parse_flags_enum_t, uint_least8_t> parse_flags_t;
|
|
||||||
|
|
||||||
private:
|
|
||||||
mutable token_t lookahead;
|
mutable token_t lookahead;
|
||||||
mutable bool use_lookahead;
|
mutable bool use_lookahead;
|
||||||
|
|
||||||
|
|
@ -101,14 +97,9 @@ public:
|
||||||
TRACE_DTOR(parser_t);
|
TRACE_DTOR(parser_t);
|
||||||
}
|
}
|
||||||
|
|
||||||
ptr_op_t parse(std::istream& in,
|
ptr_op_t parse(std::istream& in,
|
||||||
const parse_flags_t& flags = PARSE_NORMAL,
|
const parse_flags_t& flags = PARSE_DEFAULT,
|
||||||
const string * original_string = NULL);
|
const optional<string>& original_string = NULL);
|
||||||
ptr_op_t parse(const string& str,
|
|
||||||
const parse_flags_t& flags = PARSE_NORMAL) {
|
|
||||||
std::istringstream stream(str);
|
|
||||||
return parse(stream, flags, &str);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace ledger
|
} // namespace ledger
|
||||||
|
|
|
||||||
|
|
@ -35,6 +35,7 @@
|
||||||
#include "xact.h"
|
#include "xact.h"
|
||||||
#include "post.h"
|
#include "post.h"
|
||||||
#include "account.h"
|
#include "account.h"
|
||||||
|
#include "query.h"
|
||||||
#include "session.h"
|
#include "session.h"
|
||||||
#include "report.h"
|
#include "report.h"
|
||||||
#include "format.h"
|
#include "format.h"
|
||||||
|
|
@ -223,37 +224,36 @@ value_t args_command(call_scope_t& args)
|
||||||
report_t& report(find_scope<report_t>(args));
|
report_t& report(find_scope<report_t>(args));
|
||||||
std::ostream& out(report.output_stream);
|
std::ostream& out(report.output_stream);
|
||||||
|
|
||||||
value_t::sequence_t::const_iterator begin = args.value().begin();
|
|
||||||
value_t::sequence_t::const_iterator end = args.value().end();
|
|
||||||
|
|
||||||
out << _("--- Input arguments ---") << std::endl;
|
out << _("--- Input arguments ---") << std::endl;
|
||||||
args.value().dump(out);
|
args.value().dump(out);
|
||||||
out << std::endl << std::endl;
|
out << std::endl << std::endl;
|
||||||
|
|
||||||
std::pair<expr_t, query_parser_t> info = args_to_predicate(begin, end);
|
query_t query(args.value(), report.what_to_keep());
|
||||||
if (! info.first)
|
if (! query)
|
||||||
throw_(std::runtime_error,
|
throw_(std::runtime_error,
|
||||||
_("Invalid query predicate: %1") << join_args(args));
|
_("Invalid query predicate: %1") << join_args(args));
|
||||||
|
|
||||||
call_scope_t sub_args(static_cast<scope_t&>(args));
|
call_scope_t sub_args(static_cast<scope_t&>(args));
|
||||||
sub_args.push_back(string_value(info.first.text()));
|
sub_args.push_back(string_value(query.text()));
|
||||||
|
|
||||||
parse_command(sub_args);
|
parse_command(sub_args);
|
||||||
|
|
||||||
if (info.second.tokens_remaining()) {
|
if (query.tokens_remaining()) {
|
||||||
out << std::endl << _("====== Display predicate ======")
|
out << std::endl << _("====== Display predicate ======")
|
||||||
<< std::endl << std::endl;
|
<< std::endl << std::endl;
|
||||||
|
|
||||||
call_scope_t disp_sub_args(static_cast<scope_t&>(args));
|
query.parse_again();
|
||||||
info = args_to_predicate(info.second);
|
|
||||||
if (! info.first)
|
if (! query)
|
||||||
throw_(std::runtime_error,
|
throw_(std::runtime_error,
|
||||||
_("Invalid display predicate: %1") << join_args(args));
|
_("Invalid display predicate: %1") << join_args(args));
|
||||||
|
|
||||||
disp_sub_args.push_back(string_value(info.first.text()));
|
call_scope_t disp_sub_args(static_cast<scope_t&>(args));
|
||||||
|
disp_sub_args.push_back(string_value(query.text()));
|
||||||
|
|
||||||
parse_command(disp_sub_args);
|
parse_command(disp_sub_args);
|
||||||
}
|
}
|
||||||
|
|
||||||
return NULL_VALUE;
|
return NULL_VALUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
253
src/predicate.h
253
src/predicate.h
|
|
@ -48,42 +48,40 @@
|
||||||
|
|
||||||
namespace ledger {
|
namespace ledger {
|
||||||
|
|
||||||
class item_predicate
|
class predicate_t : public expr_t
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
expr_t predicate;
|
|
||||||
keep_details_t what_to_keep;
|
keep_details_t what_to_keep;
|
||||||
|
|
||||||
item_predicate() {
|
predicate_t(const keep_details_t& _what_to_keep = keep_details_t())
|
||||||
TRACE_CTOR(item_predicate, "");
|
: what_to_keep(_what_to_keep) {
|
||||||
|
TRACE_CTOR(predicate_t, "");
|
||||||
}
|
}
|
||||||
item_predicate(const item_predicate& other)
|
predicate_t(const predicate_t& other)
|
||||||
: predicate(other.predicate), what_to_keep(other.what_to_keep) {
|
: expr_t(other), what_to_keep(other.what_to_keep) {
|
||||||
TRACE_CTOR(item_predicate, "copy");
|
TRACE_CTOR(predicate_t, "copy");
|
||||||
}
|
|
||||||
item_predicate(const expr_t& _predicate,
|
|
||||||
const keep_details_t& _what_to_keep)
|
|
||||||
: predicate(_predicate), what_to_keep(_what_to_keep) {
|
|
||||||
TRACE_CTOR(item_predicate, "const expr_t&, const keep_details_t&");
|
|
||||||
}
|
|
||||||
item_predicate(const string& _predicate,
|
|
||||||
const keep_details_t& _what_to_keep)
|
|
||||||
: predicate(expr_t(_predicate)), what_to_keep(_what_to_keep) {
|
|
||||||
TRACE_CTOR(item_predicate, "const string&, const keep_details_t&");
|
|
||||||
}
|
|
||||||
~item_predicate() throw() {
|
|
||||||
TRACE_DTOR(item_predicate);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool operator()(scope_t& item) {
|
predicate_t(const string& str, const keep_details_t& _what_to_keep,
|
||||||
try {
|
const parse_flags_t& flags = PARSE_DEFAULT)
|
||||||
return ! predicate || predicate.calc(item).strip_annotations(what_to_keep);
|
: expr_t(str, flags), what_to_keep(_what_to_keep) {
|
||||||
}
|
TRACE_CTOR(predicate_t, "string, keep_details_t, parse_flags_t");
|
||||||
catch (const std::exception& err) {
|
}
|
||||||
add_error_context(_("While determining truth of predicate expression:"));
|
predicate_t(std::istream& in, const keep_details_t& _what_to_keep,
|
||||||
add_error_context(expr_context(predicate));
|
const parse_flags_t& flags = PARSE_DEFAULT)
|
||||||
throw;
|
: expr_t(in, flags), what_to_keep(_what_to_keep) {
|
||||||
}
|
TRACE_CTOR(predicate_t, "std::istream&, keep_details_t, parse_flags_t");
|
||||||
|
}
|
||||||
|
~predicate_t() throw() {
|
||||||
|
TRACE_DTOR(predicate_t);
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual value_t real_calc(scope_t& scope) {
|
||||||
|
return (*this ?
|
||||||
|
expr_t::real_calc(scope)
|
||||||
|
.strip_annotations(what_to_keep)
|
||||||
|
.to_boolean() :
|
||||||
|
true);
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(HAVE_BOOST_SERIALIZATION)
|
#if defined(HAVE_BOOST_SERIALIZATION)
|
||||||
|
|
@ -94,207 +92,12 @@ private:
|
||||||
|
|
||||||
template<class Archive>
|
template<class Archive>
|
||||||
void serialize(Archive& ar, const unsigned int /* version */) {
|
void serialize(Archive& ar, const unsigned int /* version */) {
|
||||||
ar & predicate;
|
ar & boost::serialization::base_object<expr_t>(*this);
|
||||||
ar & what_to_keep;
|
ar & what_to_keep;
|
||||||
}
|
}
|
||||||
#endif // HAVE_BOOST_SERIALIZATION
|
#endif // HAVE_BOOST_SERIALIZATION
|
||||||
};
|
};
|
||||||
|
|
||||||
class query_lexer_t
|
|
||||||
{
|
|
||||||
friend class query_parser_t;
|
|
||||||
|
|
||||||
value_t::sequence_t::const_iterator begin;
|
|
||||||
value_t::sequence_t::const_iterator end;
|
|
||||||
|
|
||||||
string::const_iterator arg_i;
|
|
||||||
string::const_iterator arg_end;
|
|
||||||
|
|
||||||
bool consume_whitespace;
|
|
||||||
|
|
||||||
public:
|
|
||||||
struct token_t
|
|
||||||
{
|
|
||||||
enum kind_t {
|
|
||||||
UNKNOWN,
|
|
||||||
|
|
||||||
LPAREN,
|
|
||||||
RPAREN,
|
|
||||||
|
|
||||||
TOK_NOT,
|
|
||||||
TOK_AND,
|
|
||||||
TOK_OR,
|
|
||||||
TOK_EQ,
|
|
||||||
|
|
||||||
TOK_DATE,
|
|
||||||
TOK_CODE,
|
|
||||||
TOK_PAYEE,
|
|
||||||
TOK_NOTE,
|
|
||||||
TOK_ACCOUNT,
|
|
||||||
TOK_META,
|
|
||||||
TOK_EXPR,
|
|
||||||
|
|
||||||
TERM,
|
|
||||||
|
|
||||||
END_REACHED
|
|
||||||
|
|
||||||
} kind;
|
|
||||||
|
|
||||||
optional<string> value;
|
|
||||||
|
|
||||||
explicit token_t(kind_t _kind = UNKNOWN,
|
|
||||||
const optional<string>& _value = none)
|
|
||||||
: kind(_kind), value(_value) {
|
|
||||||
TRACE_CTOR(query_lexer_t::token_t, "");
|
|
||||||
}
|
|
||||||
token_t(const token_t& tok)
|
|
||||||
: kind(tok.kind), value(tok.value) {
|
|
||||||
TRACE_CTOR(query_lexer_t::token_t, "copy");
|
|
||||||
}
|
|
||||||
~token_t() throw() {
|
|
||||||
TRACE_DTOR(query_lexer_t::token_t);
|
|
||||||
}
|
|
||||||
|
|
||||||
token_t& operator=(const token_t& tok) {
|
|
||||||
if (this != &tok) {
|
|
||||||
kind = tok.kind;
|
|
||||||
value = tok.value;
|
|
||||||
}
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
operator bool() const {
|
|
||||||
return kind != END_REACHED;
|
|
||||||
}
|
|
||||||
|
|
||||||
string to_string() const {
|
|
||||||
switch (kind) {
|
|
||||||
case UNKNOWN: return "UNKNOWN";
|
|
||||||
case LPAREN: return "LPAREN";
|
|
||||||
case RPAREN: return "RPAREN";
|
|
||||||
case TOK_NOT: return "TOK_NOT";
|
|
||||||
case TOK_AND: return "TOK_AND";
|
|
||||||
case TOK_OR: return "TOK_OR";
|
|
||||||
case TOK_EQ: return "TOK_EQ";
|
|
||||||
case TOK_DATE: return "TOK_DATE";
|
|
||||||
case TOK_CODE: return "TOK_CODE";
|
|
||||||
case TOK_PAYEE: return "TOK_PAYEE";
|
|
||||||
case TOK_NOTE: return "TOK_NOTE";
|
|
||||||
case TOK_ACCOUNT: return "TOK_ACCOUNT";
|
|
||||||
case TOK_META: return "TOK_META";
|
|
||||||
case TOK_EXPR: return "TOK_EXPR";
|
|
||||||
case TERM: return string("TERM(") + *value + ")";
|
|
||||||
case END_REACHED: return "END_REACHED";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
string symbol() const {
|
|
||||||
switch (kind) {
|
|
||||||
case LPAREN: return "(";
|
|
||||||
case RPAREN: return ")";
|
|
||||||
case TOK_NOT: return "not";
|
|
||||||
case TOK_AND: return "and";
|
|
||||||
case TOK_OR: return "or";
|
|
||||||
case TOK_EQ: return "=";
|
|
||||||
case TOK_DATE: return "date";
|
|
||||||
case TOK_CODE: return "code";
|
|
||||||
case TOK_PAYEE: return "payee";
|
|
||||||
case TOK_NOTE: return "note";
|
|
||||||
case TOK_ACCOUNT: return "account";
|
|
||||||
case TOK_META: return "meta";
|
|
||||||
case TOK_EXPR: return "expr";
|
|
||||||
|
|
||||||
case END_REACHED: return "<EOF>";
|
|
||||||
|
|
||||||
case TERM:
|
|
||||||
assert(0);
|
|
||||||
return "<TERM>";
|
|
||||||
|
|
||||||
case UNKNOWN:
|
|
||||||
default:
|
|
||||||
assert(0);
|
|
||||||
return "<UNKNOWN>";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void unexpected();
|
|
||||||
void expected(char wanted, char c = '\0');
|
|
||||||
};
|
|
||||||
|
|
||||||
token_t token_cache;
|
|
||||||
|
|
||||||
query_lexer_t(value_t::sequence_t::const_iterator _begin,
|
|
||||||
value_t::sequence_t::const_iterator _end)
|
|
||||||
: begin(_begin), end(_end), consume_whitespace(false)
|
|
||||||
{
|
|
||||||
TRACE_CTOR(query_lexer_t, "");
|
|
||||||
assert(begin != end);
|
|
||||||
arg_i = (*begin).as_string().begin();
|
|
||||||
arg_end = (*begin).as_string().end();
|
|
||||||
}
|
|
||||||
query_lexer_t(const query_lexer_t& lexer)
|
|
||||||
: begin(lexer.begin), end(lexer.end),
|
|
||||||
arg_i(lexer.arg_i), arg_end(lexer.arg_end),
|
|
||||||
consume_whitespace(lexer.consume_whitespace),
|
|
||||||
token_cache(lexer.token_cache)
|
|
||||||
{
|
|
||||||
TRACE_CTOR(query_lexer_t, "copy");
|
|
||||||
}
|
|
||||||
~query_lexer_t() throw() {
|
|
||||||
TRACE_DTOR(query_lexer_t);
|
|
||||||
}
|
|
||||||
|
|
||||||
token_t next_token();
|
|
||||||
void push_token(token_t tok) {
|
|
||||||
assert(token_cache.kind == token_t::UNKNOWN);
|
|
||||||
token_cache = tok;
|
|
||||||
}
|
|
||||||
token_t peek_token() {
|
|
||||||
if (token_cache.kind == token_t::UNKNOWN)
|
|
||||||
token_cache = next_token();
|
|
||||||
return token_cache;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
class query_parser_t
|
|
||||||
{
|
|
||||||
query_lexer_t lexer;
|
|
||||||
|
|
||||||
expr_t::ptr_op_t parse_query_term(query_lexer_t::token_t::kind_t tok_context);
|
|
||||||
expr_t::ptr_op_t parse_unary_expr(query_lexer_t::token_t::kind_t tok_context);
|
|
||||||
expr_t::ptr_op_t parse_and_expr(query_lexer_t::token_t::kind_t tok_context);
|
|
||||||
expr_t::ptr_op_t parse_or_expr(query_lexer_t::token_t::kind_t tok_context);
|
|
||||||
expr_t::ptr_op_t parse_query_expr(query_lexer_t::token_t::kind_t tok_context);
|
|
||||||
|
|
||||||
public:
|
|
||||||
query_parser_t(value_t::sequence_t::const_iterator begin,
|
|
||||||
value_t::sequence_t::const_iterator end)
|
|
||||||
: lexer(begin, end) {
|
|
||||||
TRACE_CTOR(query_parser_t, "");
|
|
||||||
}
|
|
||||||
query_parser_t(const query_parser_t& parser)
|
|
||||||
: lexer(parser.lexer) {
|
|
||||||
TRACE_CTOR(query_parser_t, "copy");
|
|
||||||
}
|
|
||||||
~query_parser_t() throw() {
|
|
||||||
TRACE_DTOR(query_parser_t);
|
|
||||||
}
|
|
||||||
|
|
||||||
expr_t::ptr_op_t parse();
|
|
||||||
|
|
||||||
bool tokens_remaining() {
|
|
||||||
query_lexer_t::token_t tok = lexer.peek_token();
|
|
||||||
assert(tok.kind != query_lexer_t::token_t::UNKNOWN);
|
|
||||||
return tok.kind != query_lexer_t::token_t::END_REACHED;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
std::pair<expr_t, query_parser_t>
|
|
||||||
args_to_predicate(value_t::sequence_t::const_iterator begin,
|
|
||||||
value_t::sequence_t::const_iterator end);
|
|
||||||
|
|
||||||
std::pair<expr_t, query_parser_t> args_to_predicate(query_parser_t parser);
|
|
||||||
|
|
||||||
} // namespace ledger
|
} // namespace ledger
|
||||||
|
|
||||||
#endif // _PREDICATE_H
|
#endif // _PREDICATE_H
|
||||||
|
|
|
||||||
|
|
@ -285,11 +285,16 @@ internal precision."))
|
||||||
.def("valid", &amount_t::valid)
|
.def("valid", &amount_t::valid)
|
||||||
;
|
;
|
||||||
|
|
||||||
enum_< amount_t::parse_flags_enum_t >("AmountParse")
|
enum_< parse_flags_enum_t >("ParseFlags")
|
||||||
.value("DEFAULT", amount_t::PARSE_DEFAULT)
|
.value("Default", PARSE_DEFAULT)
|
||||||
.value("NO_MIGRATE", amount_t::PARSE_NO_MIGRATE)
|
.value("Partial", PARSE_PARTIAL)
|
||||||
.value("NO_REDUCE", amount_t::PARSE_NO_REDUCE)
|
.value("Single", PARSE_SINGLE)
|
||||||
.value("SOFT_FAIL", amount_t::PARSE_SOFT_FAIL)
|
.value("NoMigrate", PARSE_NO_MIGRATE)
|
||||||
|
.value("NoReduce", PARSE_NO_REDUCE)
|
||||||
|
.value("NoAssign", PARSE_NO_ASSIGN)
|
||||||
|
.value("NoDates", PARSE_NO_DATES)
|
||||||
|
.value("OpContext", PARSE_OP_CONTEXT)
|
||||||
|
.value("SoftFail", PARSE_SOFT_FAIL)
|
||||||
;
|
;
|
||||||
|
|
||||||
register_optional_to_python<amount_t>();
|
register_optional_to_python<amount_t>();
|
||||||
|
|
|
||||||
|
|
@ -133,7 +133,7 @@ void export_xact()
|
||||||
;
|
;
|
||||||
|
|
||||||
class_< auto_xact_t, bases<xact_base_t> > ("AutomatedTransaction")
|
class_< auto_xact_t, bases<xact_base_t> > ("AutomatedTransaction")
|
||||||
.def(init<item_predicate>())
|
.def(init<predicate_t>())
|
||||||
|
|
||||||
.add_property("predicate",
|
.add_property("predicate",
|
||||||
make_getter(&auto_xact_t::predicate),
|
make_getter(&auto_xact_t::predicate),
|
||||||
|
|
|
||||||
|
|
@ -31,12 +31,12 @@
|
||||||
|
|
||||||
#include <system.hh>
|
#include <system.hh>
|
||||||
|
|
||||||
#include "predicate.h"
|
#include "query.h"
|
||||||
#include "op.h"
|
#include "op.h"
|
||||||
|
|
||||||
namespace ledger {
|
namespace ledger {
|
||||||
|
|
||||||
query_lexer_t::token_t query_lexer_t::next_token()
|
query_t::lexer_t::token_t query_t::lexer_t::next_token()
|
||||||
{
|
{
|
||||||
if (token_cache.kind != token_t::UNKNOWN) {
|
if (token_cache.kind != token_t::UNKNOWN) {
|
||||||
token_t tok = token_cache;
|
token_t tok = token_cache;
|
||||||
|
|
@ -191,7 +191,7 @@ query_lexer_t::token_t query_lexer_t::next_token()
|
||||||
return token_t(token_t::UNKNOWN);
|
return token_t(token_t::UNKNOWN);
|
||||||
}
|
}
|
||||||
|
|
||||||
void query_lexer_t::token_t::unexpected()
|
void query_t::lexer_t::token_t::unexpected()
|
||||||
{
|
{
|
||||||
kind_t prev_kind = kind;
|
kind_t prev_kind = kind;
|
||||||
|
|
||||||
|
|
@ -207,7 +207,7 @@ void query_lexer_t::token_t::unexpected()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void query_lexer_t::token_t::expected(char wanted, char c)
|
void query_t::lexer_t::token_t::expected(char wanted, char c)
|
||||||
{
|
{
|
||||||
kind = UNKNOWN;
|
kind = UNKNOWN;
|
||||||
|
|
||||||
|
|
@ -225,32 +225,32 @@ void query_lexer_t::token_t::expected(char wanted, char c)
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_t::ptr_op_t
|
expr_t::ptr_op_t
|
||||||
query_parser_t::parse_query_term(query_lexer_t::token_t::kind_t tok_context)
|
query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_context)
|
||||||
{
|
{
|
||||||
expr_t::ptr_op_t node;
|
expr_t::ptr_op_t node;
|
||||||
|
|
||||||
query_lexer_t::token_t tok = lexer.next_token();
|
lexer_t::token_t tok = lexer.next_token();
|
||||||
switch (tok.kind) {
|
switch (tok.kind) {
|
||||||
case query_lexer_t::token_t::END_REACHED:
|
case lexer_t::token_t::END_REACHED:
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case query_lexer_t::token_t::TOK_DATE:
|
case lexer_t::token_t::TOK_DATE:
|
||||||
case query_lexer_t::token_t::TOK_CODE:
|
case lexer_t::token_t::TOK_CODE:
|
||||||
case query_lexer_t::token_t::TOK_PAYEE:
|
case lexer_t::token_t::TOK_PAYEE:
|
||||||
case query_lexer_t::token_t::TOK_NOTE:
|
case lexer_t::token_t::TOK_NOTE:
|
||||||
case query_lexer_t::token_t::TOK_ACCOUNT:
|
case lexer_t::token_t::TOK_ACCOUNT:
|
||||||
case query_lexer_t::token_t::TOK_META:
|
case lexer_t::token_t::TOK_META:
|
||||||
case query_lexer_t::token_t::TOK_EXPR:
|
case lexer_t::token_t::TOK_EXPR:
|
||||||
node = parse_query_term(tok.kind);
|
node = parse_query_term(tok.kind);
|
||||||
if (! node)
|
if (! node)
|
||||||
throw_(parse_error,
|
throw_(parse_error,
|
||||||
_("%1 operator not followed by argument") << tok.symbol());
|
_("%1 operator not followed by argument") << tok.symbol());
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case query_lexer_t::token_t::TERM:
|
case lexer_t::token_t::TERM:
|
||||||
assert(tok.value);
|
assert(tok.value);
|
||||||
switch (tok_context) {
|
switch (tok_context) {
|
||||||
case query_lexer_t::token_t::TOK_DATE: {
|
case lexer_t::token_t::TOK_DATE: {
|
||||||
expr_t::ptr_op_t ident = new expr_t::op_t(expr_t::op_t::IDENT);
|
expr_t::ptr_op_t ident = new expr_t::op_t(expr_t::op_t::IDENT);
|
||||||
ident->set_ident("date");
|
ident->set_ident("date");
|
||||||
|
|
||||||
|
|
@ -285,11 +285,11 @@ query_parser_t::parse_query_term(query_lexer_t::token_t::kind_t tok_context)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
case query_lexer_t::token_t::TOK_EXPR:
|
case lexer_t::token_t::TOK_EXPR:
|
||||||
node = expr_t(*tok.value).get_op();
|
node = expr_t(*tok.value).get_op();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case query_lexer_t::token_t::TOK_META: {
|
case lexer_t::token_t::TOK_META: {
|
||||||
node = new expr_t::op_t(expr_t::op_t::O_CALL);
|
node = new expr_t::op_t(expr_t::op_t::O_CALL);
|
||||||
|
|
||||||
expr_t::ptr_op_t ident = new expr_t::op_t(expr_t::op_t::IDENT);
|
expr_t::ptr_op_t ident = new expr_t::op_t(expr_t::op_t::IDENT);
|
||||||
|
|
@ -300,10 +300,10 @@ query_parser_t::parse_query_term(query_lexer_t::token_t::kind_t tok_context)
|
||||||
arg1->set_value(mask_t(*tok.value));
|
arg1->set_value(mask_t(*tok.value));
|
||||||
|
|
||||||
tok = lexer.peek_token();
|
tok = lexer.peek_token();
|
||||||
if (tok.kind == query_lexer_t::token_t::TOK_EQ) {
|
if (tok.kind == lexer_t::token_t::TOK_EQ) {
|
||||||
tok = lexer.next_token();
|
tok = lexer.next_token();
|
||||||
tok = lexer.next_token();
|
tok = lexer.next_token();
|
||||||
if (tok.kind != query_lexer_t::token_t::TERM)
|
if (tok.kind != lexer_t::token_t::TERM)
|
||||||
throw_(parse_error,
|
throw_(parse_error,
|
||||||
_("Metadata equality operator not followed by term"));
|
_("Metadata equality operator not followed by term"));
|
||||||
|
|
||||||
|
|
@ -327,13 +327,13 @@ query_parser_t::parse_query_term(query_lexer_t::token_t::kind_t tok_context)
|
||||||
|
|
||||||
expr_t::ptr_op_t ident = new expr_t::op_t(expr_t::op_t::IDENT);
|
expr_t::ptr_op_t ident = new expr_t::op_t(expr_t::op_t::IDENT);
|
||||||
switch (tok_context) {
|
switch (tok_context) {
|
||||||
case query_lexer_t::token_t::TOK_ACCOUNT:
|
case lexer_t::token_t::TOK_ACCOUNT:
|
||||||
ident->set_ident("account"); break;
|
ident->set_ident("account"); break;
|
||||||
case query_lexer_t::token_t::TOK_PAYEE:
|
case lexer_t::token_t::TOK_PAYEE:
|
||||||
ident->set_ident("payee"); break;
|
ident->set_ident("payee"); break;
|
||||||
case query_lexer_t::token_t::TOK_CODE:
|
case lexer_t::token_t::TOK_CODE:
|
||||||
ident->set_ident("code"); break;
|
ident->set_ident("code"); break;
|
||||||
case query_lexer_t::token_t::TOK_NOTE:
|
case lexer_t::token_t::TOK_NOTE:
|
||||||
ident->set_ident("note"); break;
|
ident->set_ident("note"); break;
|
||||||
default:
|
default:
|
||||||
assert(0); break;
|
assert(0); break;
|
||||||
|
|
@ -348,10 +348,10 @@ query_parser_t::parse_query_term(query_lexer_t::token_t::kind_t tok_context)
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case query_lexer_t::token_t::LPAREN:
|
case lexer_t::token_t::LPAREN:
|
||||||
node = parse_query_expr(tok_context);
|
node = parse_query_expr(tok_context);
|
||||||
tok = lexer.next_token();
|
tok = lexer.next_token();
|
||||||
if (tok.kind != query_lexer_t::token_t::RPAREN)
|
if (tok.kind != lexer_t::token_t::RPAREN)
|
||||||
tok.expected(')');
|
tok.expected(')');
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
|
@ -364,13 +364,13 @@ query_parser_t::parse_query_term(query_lexer_t::token_t::kind_t tok_context)
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_t::ptr_op_t
|
expr_t::ptr_op_t
|
||||||
query_parser_t::parse_unary_expr(query_lexer_t::token_t::kind_t tok_context)
|
query_t::parser_t::parse_unary_expr(lexer_t::token_t::kind_t tok_context)
|
||||||
{
|
{
|
||||||
expr_t::ptr_op_t node;
|
expr_t::ptr_op_t node;
|
||||||
|
|
||||||
query_lexer_t::token_t tok = lexer.next_token();
|
lexer_t::token_t tok = lexer.next_token();
|
||||||
switch (tok.kind) {
|
switch (tok.kind) {
|
||||||
case query_lexer_t::token_t::TOK_NOT: {
|
case lexer_t::token_t::TOK_NOT: {
|
||||||
expr_t::ptr_op_t term(parse_query_term(tok_context));
|
expr_t::ptr_op_t term(parse_query_term(tok_context));
|
||||||
if (! term)
|
if (! term)
|
||||||
throw_(parse_error,
|
throw_(parse_error,
|
||||||
|
|
@ -391,12 +391,12 @@ query_parser_t::parse_unary_expr(query_lexer_t::token_t::kind_t tok_context)
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_t::ptr_op_t
|
expr_t::ptr_op_t
|
||||||
query_parser_t::parse_and_expr(query_lexer_t::token_t::kind_t tok_context)
|
query_t::parser_t::parse_and_expr(lexer_t::token_t::kind_t tok_context)
|
||||||
{
|
{
|
||||||
if (expr_t::ptr_op_t node = parse_unary_expr(tok_context)) {
|
if (expr_t::ptr_op_t node = parse_unary_expr(tok_context)) {
|
||||||
while (true) {
|
while (true) {
|
||||||
query_lexer_t::token_t tok = lexer.next_token();
|
lexer_t::token_t tok = lexer.next_token();
|
||||||
if (tok.kind == query_lexer_t::token_t::TOK_AND) {
|
if (tok.kind == lexer_t::token_t::TOK_AND) {
|
||||||
expr_t::ptr_op_t prev(node);
|
expr_t::ptr_op_t prev(node);
|
||||||
node = new expr_t::op_t(expr_t::op_t::O_AND);
|
node = new expr_t::op_t(expr_t::op_t::O_AND);
|
||||||
node->set_left(prev);
|
node->set_left(prev);
|
||||||
|
|
@ -415,12 +415,12 @@ query_parser_t::parse_and_expr(query_lexer_t::token_t::kind_t tok_context)
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_t::ptr_op_t
|
expr_t::ptr_op_t
|
||||||
query_parser_t::parse_or_expr(query_lexer_t::token_t::kind_t tok_context)
|
query_t::parser_t::parse_or_expr(lexer_t::token_t::kind_t tok_context)
|
||||||
{
|
{
|
||||||
if (expr_t::ptr_op_t node = parse_and_expr(tok_context)) {
|
if (expr_t::ptr_op_t node = parse_and_expr(tok_context)) {
|
||||||
while (true) {
|
while (true) {
|
||||||
query_lexer_t::token_t tok = lexer.next_token();
|
lexer_t::token_t tok = lexer.next_token();
|
||||||
if (tok.kind == query_lexer_t::token_t::TOK_OR) {
|
if (tok.kind == lexer_t::token_t::TOK_OR) {
|
||||||
expr_t::ptr_op_t prev(node);
|
expr_t::ptr_op_t prev(node);
|
||||||
node = new expr_t::op_t(expr_t::op_t::O_OR);
|
node = new expr_t::op_t(expr_t::op_t::O_OR);
|
||||||
node->set_left(prev);
|
node->set_left(prev);
|
||||||
|
|
@ -439,7 +439,7 @@ query_parser_t::parse_or_expr(query_lexer_t::token_t::kind_t tok_context)
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_t::ptr_op_t
|
expr_t::ptr_op_t
|
||||||
query_parser_t::parse_query_expr(query_lexer_t::token_t::kind_t tok_context)
|
query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context)
|
||||||
{
|
{
|
||||||
if (expr_t::ptr_op_t node = parse_or_expr(tok_context)) {
|
if (expr_t::ptr_op_t node = parse_or_expr(tok_context)) {
|
||||||
if (expr_t::ptr_op_t next = parse_query_expr(tok_context)) {
|
if (expr_t::ptr_op_t next = parse_query_expr(tok_context)) {
|
||||||
|
|
@ -453,24 +453,4 @@ query_parser_t::parse_query_expr(query_lexer_t::token_t::kind_t tok_context)
|
||||||
return expr_t::ptr_op_t();
|
return expr_t::ptr_op_t();
|
||||||
}
|
}
|
||||||
|
|
||||||
expr_t::ptr_op_t query_parser_t::parse()
|
|
||||||
{
|
|
||||||
return parse_query_expr(query_lexer_t::token_t::TOK_ACCOUNT);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<expr_t, query_parser_t>
|
|
||||||
args_to_predicate(value_t::sequence_t::const_iterator begin,
|
|
||||||
value_t::sequence_t::const_iterator end)
|
|
||||||
{
|
|
||||||
query_parser_t parser(begin, end);
|
|
||||||
expr_t expr(parser.parse());
|
|
||||||
return std::pair<expr_t, query_parser_t>(expr, parser);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<expr_t, query_parser_t> args_to_predicate(query_parser_t parser)
|
|
||||||
{
|
|
||||||
expr_t expr(parser.parse());
|
|
||||||
return std::pair<expr_t, query_parser_t>(expr, parser);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace ledger
|
} // namespace ledger
|
||||||
291
src/query.h
Normal file
291
src/query.h
Normal file
|
|
@ -0,0 +1,291 @@
|
||||||
|
/*
|
||||||
|
* Copyright (c) 2003-2009, John Wiegley. All rights reserved.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without
|
||||||
|
* modification, are permitted provided that the following conditions are
|
||||||
|
* met:
|
||||||
|
*
|
||||||
|
* - Redistributions of source code must retain the above copyright
|
||||||
|
* notice, this list of conditions and the following disclaimer.
|
||||||
|
*
|
||||||
|
* - Redistributions in binary form must reproduce the above copyright
|
||||||
|
* notice, this list of conditions and the following disclaimer in the
|
||||||
|
* documentation and/or other materials provided with the distribution.
|
||||||
|
*
|
||||||
|
* - Neither the name of New Artisans LLC nor the names of its
|
||||||
|
* contributors may be used to endorse or promote products derived from
|
||||||
|
* this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @addtogroup expr
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @file predicate.h
|
||||||
|
* @author John Wiegley
|
||||||
|
*
|
||||||
|
* @ingroup expr
|
||||||
|
*/
|
||||||
|
#ifndef _QUERY_H
|
||||||
|
#define _QUERY_H
|
||||||
|
|
||||||
|
#include "predicate.h"
|
||||||
|
|
||||||
|
namespace ledger {
|
||||||
|
|
||||||
|
class query_t : public predicate_t
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
class lexer_t
|
||||||
|
{
|
||||||
|
friend class query_t;
|
||||||
|
friend class parser_t;
|
||||||
|
|
||||||
|
value_t::sequence_t::const_iterator begin;
|
||||||
|
value_t::sequence_t::const_iterator end;
|
||||||
|
|
||||||
|
string::const_iterator arg_i;
|
||||||
|
string::const_iterator arg_end;
|
||||||
|
|
||||||
|
bool consume_whitespace;
|
||||||
|
|
||||||
|
public:
|
||||||
|
struct token_t
|
||||||
|
{
|
||||||
|
enum kind_t {
|
||||||
|
UNKNOWN,
|
||||||
|
|
||||||
|
LPAREN,
|
||||||
|
RPAREN,
|
||||||
|
|
||||||
|
TOK_NOT,
|
||||||
|
TOK_AND,
|
||||||
|
TOK_OR,
|
||||||
|
TOK_EQ,
|
||||||
|
|
||||||
|
TOK_DATE,
|
||||||
|
TOK_CODE,
|
||||||
|
TOK_PAYEE,
|
||||||
|
TOK_NOTE,
|
||||||
|
TOK_ACCOUNT,
|
||||||
|
TOK_META,
|
||||||
|
TOK_EXPR,
|
||||||
|
|
||||||
|
TERM,
|
||||||
|
|
||||||
|
END_REACHED
|
||||||
|
|
||||||
|
} kind;
|
||||||
|
|
||||||
|
optional<string> value;
|
||||||
|
|
||||||
|
explicit token_t(kind_t _kind = UNKNOWN,
|
||||||
|
const optional<string>& _value = none)
|
||||||
|
: kind(_kind), value(_value) {
|
||||||
|
TRACE_CTOR(lexer_t::token_t, "");
|
||||||
|
}
|
||||||
|
token_t(const token_t& tok)
|
||||||
|
: kind(tok.kind), value(tok.value) {
|
||||||
|
TRACE_CTOR(lexer_t::token_t, "copy");
|
||||||
|
}
|
||||||
|
~token_t() throw() {
|
||||||
|
TRACE_DTOR(lexer_t::token_t);
|
||||||
|
}
|
||||||
|
|
||||||
|
token_t& operator=(const token_t& tok) {
|
||||||
|
if (this != &tok) {
|
||||||
|
kind = tok.kind;
|
||||||
|
value = tok.value;
|
||||||
|
}
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
operator bool() const {
|
||||||
|
return kind != END_REACHED;
|
||||||
|
}
|
||||||
|
|
||||||
|
string to_string() const {
|
||||||
|
switch (kind) {
|
||||||
|
case UNKNOWN: return "UNKNOWN";
|
||||||
|
case LPAREN: return "LPAREN";
|
||||||
|
case RPAREN: return "RPAREN";
|
||||||
|
case TOK_NOT: return "TOK_NOT";
|
||||||
|
case TOK_AND: return "TOK_AND";
|
||||||
|
case TOK_OR: return "TOK_OR";
|
||||||
|
case TOK_EQ: return "TOK_EQ";
|
||||||
|
case TOK_DATE: return "TOK_DATE";
|
||||||
|
case TOK_CODE: return "TOK_CODE";
|
||||||
|
case TOK_PAYEE: return "TOK_PAYEE";
|
||||||
|
case TOK_NOTE: return "TOK_NOTE";
|
||||||
|
case TOK_ACCOUNT: return "TOK_ACCOUNT";
|
||||||
|
case TOK_META: return "TOK_META";
|
||||||
|
case TOK_EXPR: return "TOK_EXPR";
|
||||||
|
case TERM: return string("TERM(") + *value + ")";
|
||||||
|
case END_REACHED: return "END_REACHED";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
string symbol() const {
|
||||||
|
switch (kind) {
|
||||||
|
case LPAREN: return "(";
|
||||||
|
case RPAREN: return ")";
|
||||||
|
case TOK_NOT: return "not";
|
||||||
|
case TOK_AND: return "and";
|
||||||
|
case TOK_OR: return "or";
|
||||||
|
case TOK_EQ: return "=";
|
||||||
|
case TOK_DATE: return "date";
|
||||||
|
case TOK_CODE: return "code";
|
||||||
|
case TOK_PAYEE: return "payee";
|
||||||
|
case TOK_NOTE: return "note";
|
||||||
|
case TOK_ACCOUNT: return "account";
|
||||||
|
case TOK_META: return "meta";
|
||||||
|
case TOK_EXPR: return "expr";
|
||||||
|
|
||||||
|
case END_REACHED: return "<EOF>";
|
||||||
|
|
||||||
|
case TERM:
|
||||||
|
assert(0);
|
||||||
|
return "<TERM>";
|
||||||
|
|
||||||
|
case UNKNOWN:
|
||||||
|
default:
|
||||||
|
assert(0);
|
||||||
|
return "<UNKNOWN>";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void unexpected();
|
||||||
|
void expected(char wanted, char c = '\0');
|
||||||
|
};
|
||||||
|
|
||||||
|
token_t token_cache;
|
||||||
|
|
||||||
|
lexer_t(value_t::sequence_t::const_iterator _begin,
|
||||||
|
value_t::sequence_t::const_iterator _end)
|
||||||
|
: begin(_begin), end(_end), consume_whitespace(false)
|
||||||
|
{
|
||||||
|
TRACE_CTOR(lexer_t, "");
|
||||||
|
assert(begin != end);
|
||||||
|
arg_i = (*begin).as_string().begin();
|
||||||
|
arg_end = (*begin).as_string().end();
|
||||||
|
}
|
||||||
|
lexer_t(const lexer_t& lexer)
|
||||||
|
: begin(lexer.begin), end(lexer.end),
|
||||||
|
arg_i(lexer.arg_i), arg_end(lexer.arg_end),
|
||||||
|
consume_whitespace(lexer.consume_whitespace),
|
||||||
|
token_cache(lexer.token_cache)
|
||||||
|
{
|
||||||
|
TRACE_CTOR(lexer_t, "copy");
|
||||||
|
}
|
||||||
|
~lexer_t() throw() {
|
||||||
|
TRACE_DTOR(lexer_t);
|
||||||
|
}
|
||||||
|
|
||||||
|
token_t next_token();
|
||||||
|
void push_token(token_t tok) {
|
||||||
|
assert(token_cache.kind == token_t::UNKNOWN);
|
||||||
|
token_cache = tok;
|
||||||
|
}
|
||||||
|
token_t peek_token() {
|
||||||
|
if (token_cache.kind == token_t::UNKNOWN)
|
||||||
|
token_cache = next_token();
|
||||||
|
return token_cache;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
protected:
|
||||||
|
class parser_t
|
||||||
|
{
|
||||||
|
friend class query_t;
|
||||||
|
|
||||||
|
value_t args;
|
||||||
|
lexer_t lexer;
|
||||||
|
|
||||||
|
expr_t::ptr_op_t parse_query_term(lexer_t::token_t::kind_t tok_context);
|
||||||
|
expr_t::ptr_op_t parse_unary_expr(lexer_t::token_t::kind_t tok_context);
|
||||||
|
expr_t::ptr_op_t parse_and_expr(lexer_t::token_t::kind_t tok_context);
|
||||||
|
expr_t::ptr_op_t parse_or_expr(lexer_t::token_t::kind_t tok_context);
|
||||||
|
expr_t::ptr_op_t parse_query_expr(lexer_t::token_t::kind_t tok_context);
|
||||||
|
|
||||||
|
public:
|
||||||
|
parser_t(const value_t& _args)
|
||||||
|
: args(_args), lexer(args.begin(), args.end()) {
|
||||||
|
TRACE_CTOR(parser_t, "");
|
||||||
|
}
|
||||||
|
parser_t(const parser_t& parser)
|
||||||
|
: args(parser.args), lexer(parser.lexer) {
|
||||||
|
TRACE_CTOR(parser_t, "copy");
|
||||||
|
}
|
||||||
|
~parser_t() throw() {
|
||||||
|
TRACE_DTOR(parser_t);
|
||||||
|
}
|
||||||
|
|
||||||
|
expr_t::ptr_op_t parse() {
|
||||||
|
return parse_query_expr(lexer_t::token_t::TOK_ACCOUNT);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool tokens_remaining() {
|
||||||
|
lexer_t::token_t tok = lexer.peek_token();
|
||||||
|
assert(tok.kind != lexer_t::token_t::UNKNOWN);
|
||||||
|
return tok.kind != lexer_t::token_t::END_REACHED;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
optional<parser_t> parser;
|
||||||
|
|
||||||
|
public:
|
||||||
|
query_t() {
|
||||||
|
TRACE_CTOR(query_t, "");
|
||||||
|
}
|
||||||
|
query_t(const query_t& other)
|
||||||
|
: predicate_t(other) {
|
||||||
|
TRACE_CTOR(query_t, "copy");
|
||||||
|
}
|
||||||
|
|
||||||
|
query_t(const value_t& args,
|
||||||
|
const keep_details_t& _what_to_keep = keep_details_t())
|
||||||
|
: predicate_t(_what_to_keep) {
|
||||||
|
TRACE_CTOR(query_t, "string, keep_details_t");
|
||||||
|
if (! args.empty())
|
||||||
|
parse_args(args);
|
||||||
|
}
|
||||||
|
~query_t() throw() {
|
||||||
|
TRACE_DTOR(query_t);
|
||||||
|
}
|
||||||
|
|
||||||
|
void parse_args(const value_t& args) {
|
||||||
|
if (! parser)
|
||||||
|
parser = parser_t(args);
|
||||||
|
ptr = parser->parse(); // expr_t::ptr
|
||||||
|
}
|
||||||
|
|
||||||
|
void parse_again() {
|
||||||
|
assert(parser);
|
||||||
|
ptr = parser->parse(); // expr_t::ptr
|
||||||
|
}
|
||||||
|
|
||||||
|
bool tokens_remaining() {
|
||||||
|
return parser && parser->tokens_remaining();
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual string text() {
|
||||||
|
return print_to_str();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
} // namespace ledger
|
||||||
|
|
||||||
|
#endif // _QUERY_H
|
||||||
|
|
@ -35,6 +35,7 @@
|
||||||
#include "session.h"
|
#include "session.h"
|
||||||
#include "unistring.h"
|
#include "unistring.h"
|
||||||
#include "format.h"
|
#include "format.h"
|
||||||
|
#include "query.h"
|
||||||
#include "output.h"
|
#include "output.h"
|
||||||
#include "iterators.h"
|
#include "iterators.h"
|
||||||
#include "filters.h"
|
#include "filters.h"
|
||||||
|
|
@ -96,7 +97,7 @@ void report_t::accounts_report(acct_handler_ptr handler)
|
||||||
|
|
||||||
if (HANDLED(display_))
|
if (HANDLED(display_))
|
||||||
pass_down_accounts(handler, *iter.get(),
|
pass_down_accounts(handler, *iter.get(),
|
||||||
item_predicate(HANDLER(display_).str(), what_to_keep()),
|
predicate_t(HANDLER(display_).str(), what_to_keep()),
|
||||||
*this);
|
*this);
|
||||||
else
|
else
|
||||||
pass_down_accounts(handler, *iter.get());
|
pass_down_accounts(handler, *iter.get());
|
||||||
|
|
@ -431,33 +432,23 @@ namespace {
|
||||||
value_t operator()(call_scope_t& args)
|
value_t operator()(call_scope_t& args)
|
||||||
{
|
{
|
||||||
if (args.size() > 0) {
|
if (args.size() > 0) {
|
||||||
value_t::sequence_t::const_iterator begin =
|
query_t query(args.value(), report.what_to_keep());
|
||||||
args.value().as_sequence().begin();
|
if (! query)
|
||||||
value_t::sequence_t::const_iterator end =
|
|
||||||
args.value().as_sequence().end();
|
|
||||||
|
|
||||||
std::pair<expr_t, query_parser_t> info = args_to_predicate(begin, end);
|
|
||||||
if (! info.first)
|
|
||||||
throw_(std::runtime_error,
|
throw_(std::runtime_error,
|
||||||
_("Invalid query predicate: %1") << join_args(args));
|
_("Invalid query predicate: %1") << query.text());
|
||||||
|
|
||||||
string limit = info.first.text();
|
report.HANDLER(limit_).on(whence, query.text());
|
||||||
if (! limit.empty())
|
|
||||||
report.HANDLER(limit_).on(whence, limit);
|
|
||||||
|
|
||||||
DEBUG("report.predicate",
|
DEBUG("report.predicate",
|
||||||
"Predicate = " << report.HANDLER(limit_).str());
|
"Predicate = " << report.HANDLER(limit_).str());
|
||||||
|
|
||||||
if (info.second.tokens_remaining()) {
|
if (query.tokens_remaining()) {
|
||||||
info = args_to_predicate(info.second);
|
query.parse_again();
|
||||||
if (! info.first)
|
if (! query)
|
||||||
throw_(std::runtime_error,
|
throw_(std::runtime_error,
|
||||||
_("Invalid display predicate: %1") << join_args(args));
|
_("Invalid display predicate: %1") << query.text());
|
||||||
|
|
||||||
string display = info.first.text();
|
report.HANDLER(display_).on(whence, query.text());
|
||||||
|
|
||||||
if (! display.empty())
|
|
||||||
report.HANDLER(display_).on(whence, display);
|
|
||||||
|
|
||||||
DEBUG("report.predicate",
|
DEBUG("report.predicate",
|
||||||
"Display predicate = " << report.HANDLER(display_).str());
|
"Display predicate = " << report.HANDLER(display_).str());
|
||||||
|
|
|
||||||
|
|
@ -144,13 +144,13 @@ namespace {
|
||||||
const string& name);
|
const string& name);
|
||||||
};
|
};
|
||||||
|
|
||||||
void parse_amount_expr(scope_t& scope,
|
void parse_amount_expr(scope_t& scope,
|
||||||
std::istream& in,
|
std::istream& in,
|
||||||
amount_t& amount,
|
amount_t& amount,
|
||||||
post_t * post,
|
post_t * post,
|
||||||
uint_least8_t flags = 0)
|
const parse_flags_t& flags = PARSE_DEFAULT)
|
||||||
{
|
{
|
||||||
expr_t expr(in, flags | static_cast<uint_least8_t>(expr_t::PARSE_PARTIAL));
|
expr_t expr(in, flags.plus_flags(PARSE_PARTIAL));
|
||||||
|
|
||||||
DEBUG("textual.parse", "Parsed an amount expression");
|
DEBUG("textual.parse", "Parsed an amount expression");
|
||||||
|
|
||||||
|
|
@ -506,8 +506,8 @@ void instance_t::automated_xact_directive(char * line)
|
||||||
}
|
}
|
||||||
|
|
||||||
std::auto_ptr<auto_xact_t> ae
|
std::auto_ptr<auto_xact_t> ae
|
||||||
(new auto_xact_t(item_predicate(skip_ws(line + 1),
|
(new auto_xact_t(predicate_t(skip_ws(line + 1),
|
||||||
keep_details_t(true, true, true))));
|
keep_details_t(true, true, true))));
|
||||||
|
|
||||||
reveal_context = false;
|
reveal_context = false;
|
||||||
|
|
||||||
|
|
@ -852,12 +852,10 @@ post_t * instance_t::parse_post(char * line,
|
||||||
ptristream stream(next, len - beg);
|
ptristream stream(next, len - beg);
|
||||||
|
|
||||||
if (*next != '(') // indicates a value expression
|
if (*next != '(') // indicates a value expression
|
||||||
post->amount.parse(stream, amount_t::PARSE_NO_REDUCE);
|
post->amount.parse(stream, PARSE_NO_REDUCE);
|
||||||
else
|
else
|
||||||
parse_amount_expr(scope, stream, post->amount, post.get(),
|
parse_amount_expr(scope, stream, post->amount, post.get(),
|
||||||
static_cast<uint_least8_t>(expr_t::PARSE_NO_REDUCE) |
|
PARSE_NO_REDUCE | PARSE_SINGLE | PARSE_NO_ASSIGN);
|
||||||
static_cast<uint_least8_t>(expr_t::PARSE_SINGLE) |
|
|
||||||
static_cast<uint_least8_t>(expr_t::PARSE_NO_ASSIGN));
|
|
||||||
|
|
||||||
if (! post->amount.is_null() && honor_strict && strict &&
|
if (! post->amount.is_null() && honor_strict && strict &&
|
||||||
post->amount.has_commodity() &&
|
post->amount.has_commodity() &&
|
||||||
|
|
@ -900,12 +898,11 @@ post_t * instance_t::parse_post(char * line,
|
||||||
ptristream cstream(p, len - beg);
|
ptristream cstream(p, len - beg);
|
||||||
|
|
||||||
if (*p != '(') // indicates a value expression
|
if (*p != '(') // indicates a value expression
|
||||||
post->cost->parse(cstream, amount_t::PARSE_NO_MIGRATE);
|
post->cost->parse(cstream, PARSE_NO_MIGRATE);
|
||||||
else
|
else
|
||||||
parse_amount_expr(scope, cstream, *post->cost, post.get(),
|
parse_amount_expr(scope, cstream, *post->cost, post.get(),
|
||||||
static_cast<uint_least8_t>(expr_t::PARSE_NO_MIGRATE) |
|
PARSE_NO_MIGRATE | PARSE_SINGLE |
|
||||||
static_cast<uint_least8_t>(expr_t::PARSE_SINGLE) |
|
PARSE_NO_ASSIGN);
|
||||||
static_cast<uint_least8_t>(expr_t::PARSE_NO_ASSIGN));
|
|
||||||
|
|
||||||
if (post->cost->sign() < 0)
|
if (post->cost->sign() < 0)
|
||||||
throw parse_error(_("A posting's cost may not be negative"));
|
throw parse_error(_("A posting's cost may not be negative"));
|
||||||
|
|
@ -953,11 +950,10 @@ post_t * instance_t::parse_post(char * line,
|
||||||
ptristream stream(p, len - beg);
|
ptristream stream(p, len - beg);
|
||||||
|
|
||||||
if (*p != '(') // indicates a value expression
|
if (*p != '(') // indicates a value expression
|
||||||
post->assigned_amount->parse(stream, amount_t::PARSE_NO_MIGRATE);
|
post->assigned_amount->parse(stream, PARSE_NO_MIGRATE);
|
||||||
else
|
else
|
||||||
parse_amount_expr(scope, stream, *post->assigned_amount, post.get(),
|
parse_amount_expr(scope, stream, *post->assigned_amount, post.get(),
|
||||||
static_cast<uint_least8_t>(expr_t::PARSE_SINGLE) |
|
PARSE_SINGLE | PARSE_NO_MIGRATE);
|
||||||
static_cast<uint_least8_t>(expr_t::PARSE_NO_MIGRATE));
|
|
||||||
|
|
||||||
if (post->assigned_amount->is_null()) {
|
if (post->assigned_amount->is_null()) {
|
||||||
if (post->amount.is_null())
|
if (post->amount.is_null())
|
||||||
|
|
|
||||||
19
src/token.cc
19
src/token.cc
|
|
@ -138,7 +138,7 @@ void expr_t::token_t::parse_ident(std::istream& in)
|
||||||
value.set_string(buf);
|
value.set_string(buf);
|
||||||
}
|
}
|
||||||
|
|
||||||
void expr_t::token_t::next(std::istream& in, const uint_least8_t pflags)
|
void expr_t::token_t::next(std::istream& in, const parse_flags_t& pflags)
|
||||||
{
|
{
|
||||||
if (in.eof()) {
|
if (in.eof()) {
|
||||||
kind = TOK_EOF;
|
kind = TOK_EOF;
|
||||||
|
|
@ -232,7 +232,7 @@ void expr_t::token_t::next(std::istream& in, const uint_least8_t pflags)
|
||||||
case '{': {
|
case '{': {
|
||||||
in.get(c);
|
in.get(c);
|
||||||
amount_t temp;
|
amount_t temp;
|
||||||
temp.parse(in, amount_t::PARSE_NO_MIGRATE);
|
temp.parse(in, PARSE_NO_MIGRATE);
|
||||||
in.get(c);
|
in.get(c);
|
||||||
if (c != '}')
|
if (c != '}')
|
||||||
expected('}', c);
|
expected('}', c);
|
||||||
|
|
@ -298,7 +298,7 @@ void expr_t::token_t::next(std::istream& in, const uint_least8_t pflags)
|
||||||
|
|
||||||
case '/': {
|
case '/': {
|
||||||
in.get(c);
|
in.get(c);
|
||||||
if (pflags & PARSE_OP_CONTEXT) { // operator context
|
if (pflags.has_flags(PARSE_OP_CONTEXT)) { // operator context
|
||||||
kind = SLASH;
|
kind = SLASH;
|
||||||
} else { // terminal context
|
} else { // terminal context
|
||||||
// Read in the regexp
|
// Read in the regexp
|
||||||
|
|
@ -399,17 +399,16 @@ void expr_t::token_t::next(std::istream& in, const uint_least8_t pflags)
|
||||||
// When in relaxed parsing mode, we want to migrate commodity flags
|
// When in relaxed parsing mode, we want to migrate commodity flags
|
||||||
// so that any precision specified by the user updates the current
|
// so that any precision specified by the user updates the current
|
||||||
// maximum displayed precision.
|
// maximum displayed precision.
|
||||||
amount_t::parse_flags_t parse_flags;
|
parse_flags_t parse_flags;
|
||||||
parser_t::parse_flags_t pflags_copy(pflags);
|
|
||||||
|
|
||||||
if (pflags_copy.has_flags(PARSE_NO_MIGRATE))
|
if (pflags.has_flags(PARSE_NO_MIGRATE))
|
||||||
parse_flags.add_flags(amount_t::PARSE_NO_MIGRATE);
|
parse_flags.add_flags(PARSE_NO_MIGRATE);
|
||||||
if (pflags_copy.has_flags(PARSE_NO_REDUCE))
|
if (pflags.has_flags(PARSE_NO_REDUCE))
|
||||||
parse_flags.add_flags(amount_t::PARSE_NO_REDUCE);
|
parse_flags.add_flags(PARSE_NO_REDUCE);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
amount_t temp;
|
amount_t temp;
|
||||||
if (! temp.parse(in, parse_flags.plus_flags(amount_t::PARSE_SOFT_FAIL))) {
|
if (! temp.parse(in, parse_flags.plus_flags(PARSE_SOFT_FAIL))) {
|
||||||
// If the amount had no commodity, it must be an unambiguous
|
// If the amount had no commodity, it must be an unambiguous
|
||||||
// variable reference
|
// variable reference
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -124,7 +124,7 @@ struct expr_t::token_t : public noncopyable
|
||||||
|
|
||||||
int parse_reserved_word(std::istream& in);
|
int parse_reserved_word(std::istream& in);
|
||||||
void parse_ident(std::istream& in);
|
void parse_ident(std::istream& in);
|
||||||
void next(std::istream& in, const uint_least8_t flags);
|
void next(std::istream& in, const parse_flags_t& flags);
|
||||||
void rewind(std::istream& in);
|
void rewind(std::istream& in);
|
||||||
void unexpected();
|
void unexpected();
|
||||||
void expected(char wanted, char c = '\0');
|
void expected(char wanted, char c = '\0');
|
||||||
|
|
|
||||||
|
|
@ -870,6 +870,10 @@ public:
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool empty() const {
|
||||||
|
return size() == 0;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Informational methods.
|
* Informational methods.
|
||||||
*/
|
*/
|
||||||
|
|
|
||||||
|
|
@ -148,7 +148,7 @@ struct xact_finalizer_t {
|
||||||
class auto_xact_t : public xact_base_t
|
class auto_xact_t : public xact_base_t
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
item_predicate predicate;
|
predicate_t predicate;
|
||||||
|
|
||||||
auto_xact_t() {
|
auto_xact_t() {
|
||||||
TRACE_CTOR(auto_xact_t, "");
|
TRACE_CTOR(auto_xact_t, "");
|
||||||
|
|
@ -157,10 +157,10 @@ public:
|
||||||
: xact_base_t(), predicate(other.predicate) {
|
: xact_base_t(), predicate(other.predicate) {
|
||||||
TRACE_CTOR(auto_xact_t, "copy");
|
TRACE_CTOR(auto_xact_t, "copy");
|
||||||
}
|
}
|
||||||
auto_xact_t(const item_predicate& _predicate)
|
auto_xact_t(const predicate_t& _predicate)
|
||||||
: predicate(_predicate)
|
: predicate(_predicate)
|
||||||
{
|
{
|
||||||
TRACE_CTOR(auto_xact_t, "const item_predicate<post_t>&");
|
TRACE_CTOR(auto_xact_t, "const predicate_t&");
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual ~auto_xact_t() {
|
virtual ~auto_xact_t() {
|
||||||
|
|
|
||||||
|
|
@ -132,6 +132,16 @@ for line in fd.readlines():
|
||||||
line = re.sub('balance_pair_t', 'BalancePair', line)
|
line = re.sub('balance_pair_t', 'BalancePair', line)
|
||||||
line = re.sub('value_t', 'Value', line)
|
line = re.sub('value_t', 'Value', line)
|
||||||
|
|
||||||
|
line = re.sub("PARSE_DEFAULT", "ParseFlags.Default", line)
|
||||||
|
line = re.sub("PARSE_PARTIAL", "ParseFlags.Partial", line)
|
||||||
|
line = re.sub("PARSE_SINGLE", "ParseFlags.Single", line)
|
||||||
|
line = re.sub("PARSE_NO_MIGRATE", "ParseFlags.NoMigrate", line)
|
||||||
|
line = re.sub("PARSE_NO_REDUCE", "ParseFlags.NoReduce", line)
|
||||||
|
line = re.sub("PARSE_NO_ASSIGN", "ParseFlags.NoAssign", line)
|
||||||
|
line = re.sub("PARSE_NO_DATES", "ParseFlags.NoDates", line)
|
||||||
|
line = re.sub("PARSE_OP_CONTEXT", "ParseFlags.OpContext", line)
|
||||||
|
line = re.sub("PARSE_SOFT_FAIL", "ParseFlags.SoftFail", line)
|
||||||
|
|
||||||
line = re.sub('ledger::', '', line)
|
line = re.sub('ledger::', '', line)
|
||||||
line = re.sub('std::istringstream', 'StringIO', line)
|
line = re.sub('std::istringstream', 'StringIO', line)
|
||||||
line = re.sub('std::ostringstream', 'StringIO', line)
|
line = re.sub('std::ostringstream', 'StringIO', line)
|
||||||
|
|
|
||||||
|
|
@ -4,16 +4,16 @@
|
||||||
>>>2
|
>>>2
|
||||||
While parsing file "$sourcepath/src/amount.h", line 67:
|
While parsing file "$sourcepath/src/amount.h", line 67:
|
||||||
Error: No quantity specified for amount
|
Error: No quantity specified for amount
|
||||||
While parsing file "$sourcepath/src/amount.h", line 707:
|
While parsing file "$sourcepath/src/amount.h", line 712:
|
||||||
Error: Invalid date/time: line amount_t amoun
|
Error: Invalid date/time: line amount_t amoun
|
||||||
While parsing file "$sourcepath/src/amount.h", line 713:
|
While parsing file "$sourcepath/src/amount.h", line 718:
|
||||||
Error: Invalid date/time: line string amount_
|
Error: Invalid date/time: line string amount_
|
||||||
While parsing file "$sourcepath/src/amount.h", line 719:
|
While parsing file "$sourcepath/src/amount.h", line 724:
|
||||||
Error: Invalid date/time: line string amount_
|
Error: Invalid date/time: line string amount_
|
||||||
While parsing file "$sourcepath/src/amount.h", line 725:
|
While parsing file "$sourcepath/src/amount.h", line 730:
|
||||||
Error: Invalid date/time: line string amount_
|
Error: Invalid date/time: line string amount_
|
||||||
While parsing file "$sourcepath/src/amount.h", line 731:
|
While parsing file "$sourcepath/src/amount.h", line 736:
|
||||||
Error: Invalid date/time: line std::ostream&
|
Error: Invalid date/time: line std::ostream&
|
||||||
While parsing file "$sourcepath/src/amount.h", line 738:
|
While parsing file "$sourcepath/src/amount.h", line 743:
|
||||||
Error: Invalid date/time: line std::istream&
|
Error: Invalid date/time: line std::istream&
|
||||||
=== 7
|
=== 7
|
||||||
|
|
|
||||||
|
|
@ -83,7 +83,7 @@ void AmountTestCase::testParser()
|
||||||
assertEqual(string("EUR 1000"), x19.to_string());
|
assertEqual(string("EUR 1000"), x19.to_string());
|
||||||
assertEqual(string("EUR 1000"), x20.to_string());
|
assertEqual(string("EUR 1000"), x20.to_string());
|
||||||
|
|
||||||
x1.parse("$100.0000", amount_t::PARSE_NO_MIGRATE);
|
x1.parse("$100.0000", PARSE_NO_MIGRATE);
|
||||||
assertEqual(amount_t::precision_t(2), x12.commodity().precision());
|
assertEqual(amount_t::precision_t(2), x12.commodity().precision());
|
||||||
assertEqual(x1.commodity(), x12.commodity());
|
assertEqual(x1.commodity(), x12.commodity());
|
||||||
assertEqual(x1, x12);
|
assertEqual(x1, x12);
|
||||||
|
|
@ -93,27 +93,27 @@ void AmountTestCase::testParser()
|
||||||
assertEqual(x0.commodity(), x12.commodity());
|
assertEqual(x0.commodity(), x12.commodity());
|
||||||
assertEqual(x0, x12);
|
assertEqual(x0, x12);
|
||||||
|
|
||||||
x2.parse("$100.00", amount_t::PARSE_NO_REDUCE);
|
x2.parse("$100.00", PARSE_NO_REDUCE);
|
||||||
assertEqual(x2, x12);
|
assertEqual(x2, x12);
|
||||||
x3.parse("$100.00", amount_t::PARSE_NO_MIGRATE | amount_t::PARSE_NO_REDUCE);
|
x3.parse("$100.00", PARSE_NO_MIGRATE | PARSE_NO_REDUCE);
|
||||||
assertEqual(x3, x12);
|
assertEqual(x3, x12);
|
||||||
|
|
||||||
x4.parse("$100.00");
|
x4.parse("$100.00");
|
||||||
assertEqual(x4, x12);
|
assertEqual(x4, x12);
|
||||||
x5.parse("$100.00", amount_t::PARSE_NO_MIGRATE);
|
x5.parse("$100.00", PARSE_NO_MIGRATE);
|
||||||
assertEqual(x5, x12);
|
assertEqual(x5, x12);
|
||||||
x6.parse("$100.00", amount_t::PARSE_NO_REDUCE);
|
x6.parse("$100.00", PARSE_NO_REDUCE);
|
||||||
assertEqual(x6, x12);
|
assertEqual(x6, x12);
|
||||||
x7.parse("$100.00", amount_t::PARSE_NO_MIGRATE | amount_t::PARSE_NO_REDUCE);
|
x7.parse("$100.00", PARSE_NO_MIGRATE | PARSE_NO_REDUCE);
|
||||||
assertEqual(x7, x12);
|
assertEqual(x7, x12);
|
||||||
|
|
||||||
x8.parse("$100.00");
|
x8.parse("$100.00");
|
||||||
assertEqual(x8, x12);
|
assertEqual(x8, x12);
|
||||||
x9.parse("$100.00", amount_t::PARSE_NO_MIGRATE);
|
x9.parse("$100.00", PARSE_NO_MIGRATE);
|
||||||
assertEqual(x9, x12);
|
assertEqual(x9, x12);
|
||||||
x10.parse("$100.00", amount_t::PARSE_NO_REDUCE);
|
x10.parse("$100.00", PARSE_NO_REDUCE);
|
||||||
assertEqual(x10, x12);
|
assertEqual(x10, x12);
|
||||||
x11.parse("$100.00", amount_t::PARSE_NO_MIGRATE | amount_t::PARSE_NO_REDUCE);
|
x11.parse("$100.00", PARSE_NO_MIGRATE | PARSE_NO_REDUCE);
|
||||||
assertEqual(x11, x12);
|
assertEqual(x11, x12);
|
||||||
|
|
||||||
assertValid(x0);
|
assertValid(x0);
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,8 @@
|
||||||
|
|
||||||
#include "expr.h"
|
#include "expr.h"
|
||||||
#include "predicate.h"
|
#include "predicate.h"
|
||||||
|
#include "query.h"
|
||||||
|
#include "op.h"
|
||||||
|
|
||||||
using namespace ledger;
|
using namespace ledger;
|
||||||
|
|
||||||
|
|
@ -46,12 +48,12 @@ void ValueExprTestCase::testPredicateTokenizer1()
|
||||||
args.push_back(string_value("bar"));
|
args.push_back(string_value("bar"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -61,12 +63,12 @@ void ValueExprTestCase::testPredicateTokenizer2()
|
||||||
args.push_back(string_value("foo and bar"));
|
args.push_back(string_value("foo and bar"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -78,14 +80,14 @@ void ValueExprTestCase::testPredicateTokenizer3()
|
||||||
args.push_back(string_value("bar)"));
|
args.push_back(string_value("bar)"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::LPAREN, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::LPAREN, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::RPAREN, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::RPAREN, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -99,14 +101,14 @@ void ValueExprTestCase::testPredicateTokenizer4()
|
||||||
args.push_back(string_value(")"));
|
args.push_back(string_value(")"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::LPAREN, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::LPAREN, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::RPAREN, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::RPAREN, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -117,14 +119,14 @@ void ValueExprTestCase::testPredicateTokenizer5()
|
||||||
args.push_back(string_value("bar)"));
|
args.push_back(string_value("bar)"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::LPAREN, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::LPAREN, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::RPAREN, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::RPAREN, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -136,13 +138,13 @@ void ValueExprTestCase::testPredicateTokenizer6()
|
||||||
args.push_back(string_value("bar"));
|
args.push_back(string_value("bar"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -152,11 +154,11 @@ void ValueExprTestCase::testPredicateTokenizer7()
|
||||||
args.push_back(string_value("=foo and bar"));
|
args.push_back(string_value("=foo and bar"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -166,11 +168,11 @@ void ValueExprTestCase::testPredicateTokenizer8()
|
||||||
args.push_back(string_value("expr foo and bar"));
|
args.push_back(string_value("expr foo and bar"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -181,11 +183,11 @@ void ValueExprTestCase::testPredicateTokenizer9()
|
||||||
args.push_back(string_value("foo and bar"));
|
args.push_back(string_value("foo and bar"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -198,13 +200,13 @@ void ValueExprTestCase::testPredicateTokenizer10()
|
||||||
args.push_back(string_value("bar"));
|
args.push_back(string_value("bar"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -218,14 +220,14 @@ void ValueExprTestCase::testPredicateTokenizer11()
|
||||||
args.push_back(string_value("baz"));
|
args.push_back(string_value("baz"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -239,14 +241,14 @@ void ValueExprTestCase::testPredicateTokenizer12()
|
||||||
args.push_back(string_value("baz"));
|
args.push_back(string_value("baz"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -259,14 +261,14 @@ void ValueExprTestCase::testPredicateTokenizer13()
|
||||||
args.push_back(string_value("|baz"));
|
args.push_back(string_value("|baz"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -279,14 +281,14 @@ void ValueExprTestCase::testPredicateTokenizer14()
|
||||||
args.push_back(string_value("baz"));
|
args.push_back(string_value("baz"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -298,14 +300,14 @@ void ValueExprTestCase::testPredicateTokenizer15()
|
||||||
args.push_back(string_value("bar|baz"));
|
args.push_back(string_value("bar|baz"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -316,13 +318,13 @@ void ValueExprTestCase::testPredicateTokenizer16()
|
||||||
args.push_back(string_value("and bar|baz"));
|
args.push_back(string_value("and bar|baz"));
|
||||||
|
|
||||||
#ifndef NOT_FOR_PYTHON
|
#ifndef NOT_FOR_PYTHON
|
||||||
query_lexer_t tokens(args.begin(), args.end());
|
query_t::lexer_t tokens(args.begin(), args.end());
|
||||||
|
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
|
||||||
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ libledger_math_la_LDFLAGS = -release $(VERSION).0
|
||||||
libledger_expr_la_SOURCES = \
|
libledger_expr_la_SOURCES = \
|
||||||
src/option.cc \
|
src/option.cc \
|
||||||
src/format.cc \
|
src/format.cc \
|
||||||
src/predicate.cc \
|
src/query.cc \
|
||||||
src/scope.cc \
|
src/scope.cc \
|
||||||
src/interactive.cc \
|
src/interactive.cc \
|
||||||
src/expr.cc \
|
src/expr.cc \
|
||||||
|
|
@ -107,10 +107,12 @@ pkginclude_HEADERS = \
|
||||||
src/token.h \
|
src/token.h \
|
||||||
src/parser.h \
|
src/parser.h \
|
||||||
src/op.h \
|
src/op.h \
|
||||||
|
src/exprbase.h \
|
||||||
src/expr.h \
|
src/expr.h \
|
||||||
src/scope.h \
|
src/scope.h \
|
||||||
src/interactive.h \
|
src/interactive.h \
|
||||||
src/predicate.h \
|
src/predicate.h \
|
||||||
|
src/query.h \
|
||||||
src/format.h \
|
src/format.h \
|
||||||
src/option.h \
|
src/option.h \
|
||||||
\
|
\
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue