Merge branch 'next'

This commit is contained in:
John Wiegley 2009-10-28 23:46:14 -04:00
commit 2b3f3e9867
12 changed files with 940 additions and 216 deletions

2
acprep
View file

@ -1049,7 +1049,7 @@ class PrepareBuild(CommandLineApp):
line = re.sub('^\tmv -f', '\t@mv -f', line)
line = re.sub('^\t\$\(am__mv\)', '\t@$(am__mv)', line)
line = re.sub('^\t(\$\((.*?)LINK\).*)',
'\t@echo " " LD \$@;\\1 > /dev/null', line)
'\t@echo " LD " \$@;\\1 > /dev/null', line)
Makefile_new.write(line)
Makefile_new.close()
Makefile.close()

View file

@ -78,6 +78,16 @@ expr_t::ptr_op_t expr_t::get_op() throw()
return ptr;
}
string expr_t::text()
{
if (str.empty()) {
std::ostringstream out;
ptr->print(out);
set_text(out.str());
}
return str;
}
expr_t& expr_t::operator=(const expr_t& _expr)
{
if (this != &_expr) {

View file

@ -116,10 +116,7 @@ public:
}
ptr_op_t get_op() throw();
string text() const throw() {
return str;
}
string text();
// This has special use in the textual parser
void set_text(const string& txt) {

View file

@ -62,6 +62,8 @@ void interactive_t::verify_arguments() const
for (; ! wrong_arg && ! exit_loop && *p && next_arg; p++) {
DEBUG("interactive.verify",
"Want " << *p << " got: " << next_arg->label());
wrong_arg = false;
switch (*p) {
case 'a':
label = _("an amount");
@ -86,24 +88,24 @@ void interactive_t::verify_arguments() const
case 'i':
case 'l':
label = _("an integer");
if (next_arg->is_long() ||
(next_arg->is_amount() &&
! next_arg->as_amount().has_commodity())) {
wrong_arg = false;
}
else if (next_arg->is_string()) {
wrong_arg = false;
for (const char * q = next_arg->as_string().c_str(); *q; q++) {
if (! std::isdigit(*q) && *q != '-') {
wrong_arg = true;
break;
}
if (next_arg->is_long() ||
(next_arg->is_amount() &&
! next_arg->as_amount().has_commodity())) {
wrong_arg = false;
}
else if (next_arg->is_string()) {
wrong_arg = false;
for (const char * q = next_arg->as_string().c_str(); *q; q++) {
if (! std::isdigit(*q) && *q != '-') {
wrong_arg = true;
break;
}
}
else {
wrong_arg = true;
}
break;
}
else {
wrong_arg = true;
}
break;
case 'm':
label = _("a regex");
wrong_arg = ! next_arg->is_mask();
@ -134,6 +136,8 @@ void interactive_t::verify_arguments() const
dont_skip = true;
break;
}
if (wrong_arg && optional && next_arg->is_null())
wrong_arg = false;
if (wrong_arg)
vlabel = next_arg->label();

View file

@ -226,21 +226,20 @@ value_t args_command(call_scope_t& args)
args.value().dump(out);
out << std::endl << std::endl;
string predicate = args_to_predicate_expr(begin, end);
std::pair<expr_t, query_parser_t> info = args_to_predicate(begin, end);
call_scope_t sub_args(static_cast<scope_t&>(args));
sub_args.push_back(string_value(predicate));
sub_args.push_back(string_value(info.first.text()));
parse_command(sub_args);
if (begin != end) {
if (info.second.tokens_remaining()) {
out << std::endl << _("====== Display predicate ======")
<< std::endl << std::endl;
predicate = args_to_predicate_expr(begin, end);
call_scope_t disp_sub_args(static_cast<scope_t&>(args));
disp_sub_args.push_back(string_value(predicate));
disp_sub_args.push_back
(string_value(args_to_predicate(info.second).first.text()));
parse_command(disp_sub_args);
}

View file

@ -32,175 +32,367 @@
#include <system.hh>
#include "predicate.h"
#include "op.h"
namespace ledger {
string args_to_predicate_expr(value_t::sequence_t::const_iterator& begin,
value_t::sequence_t::const_iterator end)
query_lexer_t::token_t query_lexer_t::next_token()
{
std::ostringstream expr;
bool append_or = false;
bool only_parenthesis = false;
while (begin != end) {
string arg = (*begin).as_string();
string prefix;
if (arg == "show") {
++begin;
break;
}
bool parse_argument = true;
bool only_closed_parenthesis = false;;
if (arg == "not" || arg == "NOT") {
if (append_or)
prefix = " | ! ";
else
prefix = " ! ";
parse_argument = false;
append_or = false;
}
else if (arg == "and" || arg == "AND") {
prefix = " & ";
parse_argument = false;
append_or = false;
}
else if (arg == "or" || arg == "OR") {
prefix = " | ";
parse_argument = false;
append_or = false;
}
else if (append_or) {
if (! only_parenthesis)
prefix = " | ";
}
else {
append_or = true;
}
value_t::sequence_t::const_iterator next = begin;
if (++next != end) {
if (arg == "desc" || arg == "DESC" ||
arg == "payee" || arg == "PAYEE") {
arg = string("@") + (*++begin).as_string();
}
else if (arg == "code" || arg == "CODE") {
arg = string("#") + (*++begin).as_string();
}
else if (arg == "note" || arg == "NOTE") {
arg = string("&") + (*++begin).as_string();
}
else if (arg == "tag" || arg == "TAG" ||
arg == "meta" || arg == "META" ||
arg == "data" || arg == "DATA") {
arg = string("%") + (*++begin).as_string();
}
else if (arg == "expr" || arg == "EXPR") {
arg = string("=") + (*++begin).as_string();
}
}
if (parse_argument) {
bool in_prefix = true;
bool found_specifier = false;
bool no_final_slash = false;
only_parenthesis = true;
std::ostringstream buf;
string parens;
for (const char * c = arg.c_str(); *c != '\0'; c++) {
bool consumed = false;
if (*c != '(' && *c != ')')
only_parenthesis = false;
if (in_prefix) {
switch (*c) {
case ')':
if (only_parenthesis)
only_closed_parenthesis = true;
// fall through...
case '(':
parens += c;
consumed = true;
break;
case '@':
buf << "(payee =~ /";
found_specifier = true;
consumed = true;
break;
case '#':
buf << "(code =~ /";
found_specifier = true;
consumed = true;
break;
case '=':
buf << "(";
found_specifier = true;
no_final_slash = true;
consumed = true;
break;
case '&':
buf << "(note =~ /";
found_specifier = true;
consumed = true;
break;
case '%': {
bool found_metadata = false;
for (const char *q = c; *q != '\0'; q++)
if (*q == '=') {
buf << "has_tag(/"
<< string(c + 1, q - c - 1) << "/, /";
found_metadata = true;
c = q;
break;
}
if (! found_metadata) {
buf << "has_tag(/";
}
found_specifier = true;
consumed = true;
break;
}
default:
if (! found_specifier) {
buf << parens << "(account =~ /";
parens.clear();
found_specifier = true;
}
in_prefix = false;
break;
}
}
if (! consumed)
buf << *c;
}
if (! prefix.empty() &&
! (only_parenthesis && only_closed_parenthesis))
expr << prefix;
expr << parens << buf.str();
if (found_specifier) {
if (! no_final_slash)
expr << "/";
expr << ")";
}
} else {
expr << prefix;
}
begin++;
if (token_cache.kind != token_t::UNKNOWN) {
token_t tok = token_cache;
token_cache = token_t();
return tok;
}
return std::string("(") + expr.str() + ")";
if (arg_i == arg_end) {
if (begin == end || ++begin == end) {
return token_t(token_t::END_REACHED);
} else {
arg_i = (*begin).as_string().begin();
arg_end = (*begin).as_string().end();
}
}
resume:
bool consume_next = false;
switch (*arg_i) {
case ' ':
case '\t':
case '\r':
case '\n':
if (++arg_i == arg_end)
return next_token();
goto resume;
case '/': {
string pat;
bool found_end_slash = false;
for (++arg_i; arg_i != arg_end; ++arg_i) {
if (*arg_i == '\\') {
if (++arg_i == arg_end)
throw_(parse_error, _("Unexpected '\\' at end of pattern"));
}
else if (*arg_i == '/') {
++arg_i;
found_end_slash = true;
break;
}
pat.push_back(*arg_i);
}
if (! found_end_slash)
throw_(parse_error, _("Expected '/' at end of pattern"));
if (pat.empty())
throw_(parse_error, _("Match pattern is empty"));
return token_t(token_t::TERM, pat);
}
case '(': ++arg_i; return token_t(token_t::LPAREN);
case ')': ++arg_i; return token_t(token_t::RPAREN);
case '&': ++arg_i; return token_t(token_t::TOK_AND);
case '|': ++arg_i; return token_t(token_t::TOK_OR);
case '!': ++arg_i; return token_t(token_t::TOK_NOT);
case '@': ++arg_i; return token_t(token_t::TOK_PAYEE);
case '#': ++arg_i; return token_t(token_t::TOK_CODE);
case '%': ++arg_i; return token_t(token_t::TOK_META);
case '=':
// The '=' keyword at the beginning of a string causes the entire string
// to be taken as an expression.
if (arg_i == (*begin).as_string().begin())
consume_whitespace = true;
++arg_i;
return token_t(token_t::TOK_EQ);
case '\\':
consume_next = true;
++arg_i;
// fall through...
default: {
string ident;
string::const_iterator beg = arg_i;
for (; arg_i != arg_end; ++arg_i) {
switch (*arg_i) {
case ' ':
case '\t':
case '\n':
case '\r':
if (! consume_whitespace)
goto test_ident;
else
ident.push_back(*arg_i);
break;
case '(':
case ')':
case '&':
case '|':
case '!':
case '@':
case '#':
case '%':
case '=':
if (! consume_next)
goto test_ident;
// fall through...
default:
ident.push_back(*arg_i);
break;
}
}
consume_whitespace = false;
test_ident:
if (ident == "and")
return token_t(token_t::TOK_AND);
else if (ident == "or")
return token_t(token_t::TOK_OR);
else if (ident == "not")
return token_t(token_t::TOK_NOT);
else if (ident == "account")
return token_t(token_t::TOK_ACCOUNT);
else if (ident == "desc")
return token_t(token_t::TOK_PAYEE);
else if (ident == "payee")
return token_t(token_t::TOK_PAYEE);
else if (ident == "code")
return token_t(token_t::TOK_CODE);
else if (ident == "note")
return token_t(token_t::TOK_NOT);
else if (ident == "tag")
return token_t(token_t::TOK_META);
else if (ident == "meta")
return token_t(token_t::TOK_META);
else if (ident == "data")
return token_t(token_t::TOK_META);
else if (ident == "show") {
// The "show" keyword is special, and separates a limiting predicate
// from a display predicate.
DEBUG("pred.show", "string = " << (*begin).as_string());
return token_t(token_t::END_REACHED);
}
else if (ident == "expr") {
// The expr keyword takes the whole of the next string as its
// argument.
consume_whitespace = true;
return token_t(token_t::TOK_EXPR);
}
else
return token_t(token_t::TERM, ident);
break;
}
}
return token_t(token_t::UNKNOWN);
}
void query_lexer_t::token_t::unexpected()
{
kind_t prev_kind = kind;
kind = UNKNOWN;
switch (prev_kind) {
case END_REACHED:
throw_(parse_error, _("Unexpected end of expression"));
case TERM:
throw_(parse_error, _("Unexpected string '%1'") << *value);
default:
throw_(parse_error, _("Unexpected token '%1'") << symbol());
}
}
void query_lexer_t::token_t::expected(char wanted, char c)
{
kind = UNKNOWN;
if (c == '\0' || c == -1) {
if (wanted == '\0' || wanted == -1)
throw_(parse_error, _("Unexpected end"));
else
throw_(parse_error, _("Missing '%1'") << wanted);
} else {
if (wanted == '\0' || wanted == -1)
throw_(parse_error, _("Invalid char '%1'") << c);
else
throw_(parse_error, _("Invalid char '%1' (wanted '%2')") << c << wanted);
}
}
expr_t::ptr_op_t
query_parser_t::parse_query_term(query_lexer_t::token_t::kind_t tok_context)
{
expr_t::ptr_op_t node;
query_lexer_t::token_t tok = lexer.next_token();
switch (tok.kind) {
case query_lexer_t::token_t::END_REACHED:
break;
case query_lexer_t::token_t::TOK_ACCOUNT:
case query_lexer_t::token_t::TOK_PAYEE:
case query_lexer_t::token_t::TOK_CODE:
case query_lexer_t::token_t::TOK_NOTE:
case query_lexer_t::token_t::TOK_META:
case query_lexer_t::token_t::TOK_EXPR:
node = parse_query_term(tok.kind);
if (! node)
throw_(parse_error,
_("%1 operator not followed by argument") << tok.symbol());
break;
case query_lexer_t::token_t::TERM:
assert(tok.value);
if (tok_context == query_lexer_t::token_t::TOK_META) {
assert(0);
} else {
node = new expr_t::op_t(expr_t::op_t::O_MATCH);
expr_t::ptr_op_t ident;
ident = new expr_t::op_t(expr_t::op_t::IDENT);
switch (tok_context) {
case query_lexer_t::token_t::TOK_ACCOUNT:
ident->set_ident("account"); break;
case query_lexer_t::token_t::TOK_PAYEE:
ident->set_ident("payee"); break;
case query_lexer_t::token_t::TOK_CODE:
ident->set_ident("code"); break;
case query_lexer_t::token_t::TOK_NOTE:
ident->set_ident("note"); break;
default:
assert(0); break;
}
expr_t::ptr_op_t mask;
mask = new expr_t::op_t(expr_t::op_t::VALUE);
mask->set_value(mask_t(*tok.value));
node->set_left(ident);
node->set_right(mask);
}
break;
case query_lexer_t::token_t::LPAREN:
node = parse_query_expr(tok_context);
tok = lexer.next_token();
if (tok.kind != query_lexer_t::token_t::RPAREN)
tok.expected(')');
break;
default:
lexer.push_token(tok);
break;
}
return node;
}
expr_t::ptr_op_t
query_parser_t::parse_unary_expr(query_lexer_t::token_t::kind_t tok_context)
{
expr_t::ptr_op_t node;
query_lexer_t::token_t tok = lexer.next_token();
switch (tok.kind) {
case query_lexer_t::token_t::TOK_NOT: {
expr_t::ptr_op_t term(parse_query_term(tok_context));
if (! term)
throw_(parse_error,
_("%1 operator not followed by argument") << tok.symbol());
node = new expr_t::op_t(expr_t::op_t::O_NOT);
node->set_left(term);
break;
}
default:
lexer.push_token(tok);
node = parse_query_term(tok_context);
break;
}
return node;
}
expr_t::ptr_op_t
query_parser_t::parse_and_expr(query_lexer_t::token_t::kind_t tok_context)
{
if (expr_t::ptr_op_t node = parse_unary_expr(tok_context)) {
while (true) {
query_lexer_t::token_t tok = lexer.next_token();
if (tok.kind == query_lexer_t::token_t::TOK_AND) {
expr_t::ptr_op_t prev(node);
node = new expr_t::op_t(expr_t::op_t::O_AND);
node->set_left(prev);
node->set_right(parse_unary_expr(tok_context));
if (! node->right())
throw_(parse_error,
_("%1 operator not followed by argument") << tok.symbol());
} else {
lexer.push_token(tok);
break;
}
}
return node;
}
return expr_t::ptr_op_t();
}
expr_t::ptr_op_t
query_parser_t::parse_or_expr(query_lexer_t::token_t::kind_t tok_context)
{
if (expr_t::ptr_op_t node = parse_and_expr(tok_context)) {
while (true) {
query_lexer_t::token_t tok = lexer.next_token();
if (tok.kind == query_lexer_t::token_t::TOK_OR) {
expr_t::ptr_op_t prev(node);
node = new expr_t::op_t(expr_t::op_t::O_OR);
node->set_left(prev);
node->set_right(parse_and_expr(tok_context));
if (! node->right())
throw_(parse_error,
_("%1 operator not followed by argument") << tok.symbol());
} else {
lexer.push_token(tok);
break;
}
}
return node;
}
return expr_t::ptr_op_t();
}
expr_t::ptr_op_t
query_parser_t::parse_query_expr(query_lexer_t::token_t::kind_t tok_context)
{
if (expr_t::ptr_op_t node = parse_or_expr(tok_context)) {
if (expr_t::ptr_op_t next = parse_query_expr(tok_context)) {
expr_t::ptr_op_t prev(node);
node = new expr_t::op_t(expr_t::op_t::O_OR);
node->set_left(prev);
node->set_right(next);
}
return node;
}
return expr_t::ptr_op_t();
}
expr_t::ptr_op_t query_parser_t::parse()
{
return parse_query_expr(query_lexer_t::token_t::TOK_ACCOUNT);
}
std::pair<expr_t, query_parser_t>
args_to_predicate(value_t::sequence_t::const_iterator begin,
value_t::sequence_t::const_iterator end)
{
query_parser_t parser(begin, end);
expr_t expr(parser.parse());
return std::pair<expr_t, query_parser_t>(expr, parser);
}
std::pair<expr_t, query_parser_t> args_to_predicate(query_parser_t parser)
{
expr_t expr(parser.parse());
return std::pair<expr_t, query_parser_t>(expr, parser);
}
} // namespace ledger

View file

@ -96,8 +96,198 @@ public:
}
};
string args_to_predicate_expr(value_t::sequence_t::const_iterator& begin,
value_t::sequence_t::const_iterator end);
class query_lexer_t
{
friend class query_parser_t;
value_t::sequence_t::const_iterator begin;
value_t::sequence_t::const_iterator end;
string::const_iterator arg_i;
string::const_iterator arg_end;
bool consume_whitespace;
public:
struct token_t
{
enum kind_t {
UNKNOWN,
LPAREN,
RPAREN,
TOK_NOT,
TOK_AND,
TOK_OR,
TOK_EQ,
TOK_ACCOUNT,
TOK_PAYEE,
TOK_CODE,
TOK_NOTE,
TOK_META,
TOK_EXPR,
TERM,
END_REACHED
} kind;
optional<string> value;
explicit token_t(kind_t _kind = UNKNOWN,
const optional<string>& _value = none)
: kind(_kind), value(_value) {
TRACE_CTOR(query_lexer_t::token_t, "");
}
token_t(const token_t& tok)
: kind(tok.kind), value(tok.value) {
TRACE_CTOR(query_lexer_t::token_t, "copy");
}
~token_t() throw() {
TRACE_DTOR(query_lexer_t::token_t);
}
token_t& operator=(const token_t& tok) {
if (this != &tok) {
kind = tok.kind;
value = tok.value;
}
return *this;
}
operator bool() const {
return kind != END_REACHED;
}
string to_string() const {
switch (kind) {
case UNKNOWN: return "UNKNOWN";
case LPAREN: return "LPAREN";
case RPAREN: return "RPAREN";
case TOK_NOT: return "TOK_NOT";
case TOK_AND: return "TOK_AND";
case TOK_OR: return "TOK_OR";
case TOK_EQ: return "TOK_EQ";
case TOK_ACCOUNT: return "TOK_ACCOUNT";
case TOK_PAYEE: return "TOK_PAYEE";
case TOK_CODE: return "TOK_CODE";
case TOK_NOTE: return "TOK_NOTE";
case TOK_META: return "TOK_META";
case TOK_EXPR: return "TOK_EXPR";
case TERM: return string("TERM(") + *value + ")";
case END_REACHED: return "END_REACHED";
}
}
string symbol() const {
switch (kind) {
case LPAREN: return "(";
case RPAREN: return ")";
case TOK_NOT: return "not";
case TOK_AND: return "and";
case TOK_OR: return "or";
case TOK_EQ: return "=";
case TOK_ACCOUNT: return "account";
case TOK_PAYEE: return "payee";
case TOK_CODE: return "code";
case TOK_NOTE: return "note";
case TOK_META: return "meta";
case TOK_EXPR: return "expr";
case END_REACHED: return "<EOF>";
case TERM:
assert(0);
return "<TERM>";
case UNKNOWN:
default:
assert(0);
return "<UNKNOWN>";
}
}
void unexpected();
void expected(char wanted, char c = '\0');
};
token_t token_cache;
query_lexer_t(value_t::sequence_t::const_iterator _begin,
value_t::sequence_t::const_iterator _end)
: begin(_begin), end(_end), consume_whitespace(false)
{
TRACE_CTOR(query_lexer_t, "");
assert(begin != end);
arg_i = (*begin).as_string().begin();
arg_end = (*begin).as_string().end();
}
query_lexer_t(const query_lexer_t& lexer)
: begin(lexer.begin), end(lexer.end),
arg_i(lexer.arg_i), arg_end(lexer.arg_end),
consume_whitespace(lexer.consume_whitespace),
token_cache(lexer.token_cache)
{
TRACE_CTOR(query_lexer_t, "copy");
}
~query_lexer_t() throw() {
TRACE_DTOR(query_lexer_t);
}
token_t next_token();
void push_token(token_t tok) {
assert(token_cache.kind == token_t::UNKNOWN);
token_cache = tok;
}
token_t peek_token() {
if (token_cache.kind == token_t::UNKNOWN)
token_cache = next_token();
return token_cache;
}
};
class query_parser_t
{
query_lexer_t lexer;
expr_t::ptr_op_t parse_query_term(query_lexer_t::token_t::kind_t tok_context);
expr_t::ptr_op_t parse_unary_expr(query_lexer_t::token_t::kind_t tok_context);
expr_t::ptr_op_t parse_and_expr(query_lexer_t::token_t::kind_t tok_context);
expr_t::ptr_op_t parse_or_expr(query_lexer_t::token_t::kind_t tok_context);
expr_t::ptr_op_t parse_query_expr(query_lexer_t::token_t::kind_t tok_context);
public:
query_parser_t(value_t::sequence_t::const_iterator begin,
value_t::sequence_t::const_iterator end)
: lexer(begin, end) {
TRACE_CTOR(query_parser_t, "");
}
query_parser_t(const query_parser_t& parser)
: lexer(parser.lexer) {
TRACE_CTOR(query_parser_t, "copy");
}
~query_parser_t() throw() {
TRACE_DTOR(query_parser_t);
}
expr_t::ptr_op_t parse();
bool tokens_remaining() {
query_lexer_t::token_t tok = lexer.peek_token();
assert(tok.kind != query_lexer_t::token_t::UNKNOWN);
return tok.kind != query_lexer_t::token_t::END_REACHED;
}
};
std::pair<expr_t, query_parser_t>
args_to_predicate(value_t::sequence_t::const_iterator begin,
value_t::sequence_t::const_iterator end);
std::pair<expr_t, query_parser_t>
args_to_predicate(query_parser_t parser);
} // namespace ledger

View file

@ -380,23 +380,24 @@ namespace {
value_t::sequence_t::const_iterator end =
args.value().as_sequence().end();
string limit = args_to_predicate_expr(begin, end);
std::pair<expr_t, query_parser_t> info = args_to_predicate(begin, end);
string limit = info.first.text();
if (! limit.empty())
report.HANDLER(limit_).on(whence, limit);
DEBUG("report.predicate",
"Predicate = " << report.HANDLER(limit_).str());
string display;
if (begin != end)
display = args_to_predicate_expr(begin, end);
if (info.second.tokens_remaining()) {
string display = args_to_predicate(info.second).first.text();
if (! display.empty())
report.HANDLER(display_).on(whence, display);
if (! display.empty())
report.HANDLER(display_).on(whence, display);
DEBUG("report.predicate",
"Display predicate = " << report.HANDLER(display_).str());
DEBUG("report.predicate",
"Display predicate = " << report.HANDLER(display_).str());
}
}
(report.*report_method)(handler_ptr(handler));

View file

@ -108,10 +108,10 @@ struct expr_t::token_t : public noncopyable
std::size_t length;
explicit token_t() : kind(UNKNOWN), length(0) {
TRACE_CTOR(token_t, "");
TRACE_CTOR(expr_t::token_t, "");
}
~token_t() throw() {
TRACE_DTOR(token_t);
TRACE_DTOR(expr_t::token_t);
}
token_t& operator=(const token_t& other) {

View file

@ -818,13 +818,11 @@ public:
}
void push_back(const value_t& val) {
if (! val.is_null()) {
if (is_null())
*this = sequence_t();
if (! is_sequence())
in_place_cast(SEQUENCE);
as_sequence_lval().push_back(val);
}
if (is_null())
*this = sequence_t();
if (! is_sequence())
in_place_cast(SEQUENCE);
as_sequence_lval().push_back(val);
}
void pop_back() {
@ -855,24 +853,18 @@ public:
}
sequence_t::iterator begin() {
VERIFY(is_sequence());
return as_sequence_lval().begin();
}
sequence_t::iterator end() {
VERIFY(is_sequence());
// This special hack is because we never used end() in a context which
// needs us to call _dup().
return boost::get<sequence_t *>(storage->data)->end();
return as_sequence_lval().end();
}
sequence_t::const_iterator begin() const {
VERIFY(is_sequence());
return as_sequence().begin();
}
sequence_t::const_iterator end() const {
VERIFY(is_sequence());
return as_sequence().end();
}

View file

@ -3,6 +3,7 @@
#include "t_expr.h"
#include "expr.h"
#include "predicate.h"
using namespace ledger;
@ -19,3 +20,309 @@ void ValueExprTestCase::tearDown()
amount_t::shutdown();
times_shutdown();
}
// 1. foo and bar
// 2. 'foo and bar'
// 3. (foo and bar)
// 4. ( foo and bar )
// 5. '( foo and' bar)
// 6. =foo and bar
// 7. ='foo and bar'
// 8. 'expr foo and bar'
// 9. expr 'foo and bar'
// 10. expr foo and bar
// 11. foo and bar or baz
// 12. foo and bar | baz
// 13. foo and bar |baz
// 14. foo and bar| baz
// 15. foo and bar|baz
// 16. foo 'and bar|baz'
void ValueExprTestCase::testPredicateTokenizer1()
{
value_t args;
args.push_back(string_value("foo"));
args.push_back(string_value("and"));
args.push_back(string_value("bar"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer2()
{
value_t args;
args.push_back(string_value("foo and bar"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer3()
{
value_t args;
args.push_back(string_value("(foo"));
args.push_back(string_value("and"));
args.push_back(string_value("bar)"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::LPAREN, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::RPAREN, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer4()
{
value_t args;
args.push_back(string_value("("));
args.push_back(string_value("foo"));
args.push_back(string_value("and"));
args.push_back(string_value("bar"));
args.push_back(string_value(")"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::LPAREN, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::RPAREN, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer5()
{
value_t args;
args.push_back(string_value("( foo and"));
args.push_back(string_value("bar)"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::LPAREN, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::RPAREN, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer6()
{
value_t args;
args.push_back(string_value("=foo"));
args.push_back(string_value("and"));
args.push_back(string_value("bar"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer7()
{
value_t args;
args.push_back(string_value("=foo and bar"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer8()
{
value_t args;
args.push_back(string_value("expr foo and bar"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer9()
{
value_t args;
args.push_back(string_value("expr"));
args.push_back(string_value("foo and bar"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer10()
{
value_t args;
args.push_back(string_value("expr"));
args.push_back(string_value("foo"));
args.push_back(string_value("and"));
args.push_back(string_value("bar"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer11()
{
value_t args;
args.push_back(string_value("foo"));
args.push_back(string_value("and"));
args.push_back(string_value("bar"));
args.push_back(string_value("or"));
args.push_back(string_value("baz"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer12()
{
value_t args;
args.push_back(string_value("foo"));
args.push_back(string_value("and"));
args.push_back(string_value("bar"));
args.push_back(string_value("|"));
args.push_back(string_value("baz"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer13()
{
value_t args;
args.push_back(string_value("foo"));
args.push_back(string_value("and"));
args.push_back(string_value("bar"));
args.push_back(string_value("|baz"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer14()
{
value_t args;
args.push_back(string_value("foo"));
args.push_back(string_value("and"));
args.push_back(string_value("bar|"));
args.push_back(string_value("baz"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer15()
{
value_t args;
args.push_back(string_value("foo"));
args.push_back(string_value("and"));
args.push_back(string_value("bar|baz"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
void ValueExprTestCase::testPredicateTokenizer16()
{
value_t args;
args.push_back(string_value("foo"));
args.push_back(string_value("and bar|baz"));
#ifndef NOT_FOR_PYTHON
query_lexer_t tokens(args.begin(), args.end());
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}

View file

@ -8,6 +8,22 @@ class ValueExprTestCase : public CPPUNIT_NS::TestCase
CPPUNIT_TEST_SUITE(ValueExprTestCase);
//CPPUNIT_TEST(testConstructors);
CPPUNIT_TEST(testPredicateTokenizer1);
CPPUNIT_TEST(testPredicateTokenizer2);
CPPUNIT_TEST(testPredicateTokenizer3);
CPPUNIT_TEST(testPredicateTokenizer4);
CPPUNIT_TEST(testPredicateTokenizer5);
CPPUNIT_TEST(testPredicateTokenizer6);
CPPUNIT_TEST(testPredicateTokenizer7);
CPPUNIT_TEST(testPredicateTokenizer8);
CPPUNIT_TEST(testPredicateTokenizer9);
CPPUNIT_TEST(testPredicateTokenizer10);
CPPUNIT_TEST(testPredicateTokenizer11);
CPPUNIT_TEST(testPredicateTokenizer12);
CPPUNIT_TEST(testPredicateTokenizer13);
CPPUNIT_TEST(testPredicateTokenizer14);
CPPUNIT_TEST(testPredicateTokenizer15);
CPPUNIT_TEST(testPredicateTokenizer16);
CPPUNIT_TEST_SUITE_END();
@ -19,6 +35,22 @@ public:
virtual void tearDown();
//void testConstructors();
void testPredicateTokenizer1();
void testPredicateTokenizer2();
void testPredicateTokenizer3();
void testPredicateTokenizer4();
void testPredicateTokenizer5();
void testPredicateTokenizer6();
void testPredicateTokenizer7();
void testPredicateTokenizer8();
void testPredicateTokenizer9();
void testPredicateTokenizer10();
void testPredicateTokenizer11();
void testPredicateTokenizer12();
void testPredicateTokenizer13();
void testPredicateTokenizer14();
void testPredicateTokenizer15();
void testPredicateTokenizer16();
private:
ValueExprTestCase(const ValueExprTestCase &copy);