summaryrefslogtreecommitdiff
path: root/src/query.cc
diff options
context:
space:
mode:
Diffstat (limited to 'src/query.cc')
-rw-r--r--src/query.cc317
1 files changed, 201 insertions, 116 deletions
diff --git a/src/query.cc b/src/query.cc
index 363c6f73..5480336c 100644
--- a/src/query.cc
+++ b/src/query.cc
@@ -53,22 +53,27 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
}
}
+ resume:
switch (*arg_i) {
+ case '\0':
+ assert(false);
+ break;
+
case '\'':
case '"':
case '/': {
string pat;
- char closing = *arg_i;
+ char closing = *arg_i;
bool found_closing = false;
for (++arg_i; arg_i != arg_end; ++arg_i) {
if (*arg_i == '\\') {
- if (++arg_i == arg_end)
- throw_(parse_error, _("Unexpected '\\' at end of pattern"));
+ if (++arg_i == arg_end)
+ throw_(parse_error, _("Unexpected '\\' at end of pattern"));
}
else if (*arg_i == closing) {
- ++arg_i;
- found_closing = true;
- break;
+ ++arg_i;
+ found_closing = true;
+ break;
}
pat.push_back(*arg_i);
}
@@ -84,13 +89,17 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
if (multiple_args && consume_next_arg) {
consume_next_arg = false;
token_t tok(token_t::TERM, string(arg_i, arg_end));
+ prev_arg_i = arg_i;
arg_i = arg_end;
return tok;
}
- resume:
bool consume_next = false;
switch (*arg_i) {
+ case '\0':
+ assert(false);
+ break;
+
case ' ':
case '\t':
case '\r':
@@ -121,15 +130,20 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
string::const_iterator beg = arg_i;
for (; arg_i != arg_end; ++arg_i) {
switch (*arg_i) {
+ case '\0':
+ assert(false);
+ break;
+
case ' ':
case '\t':
case '\n':
case '\r':
- if (! consume_whitespace)
- goto test_ident;
- else
- ident.push_back(*arg_i);
- break;
+ if (! multiple_args && ! consume_whitespace)
+ goto test_ident;
+ else
+ ident.push_back(*arg_i);
+ break;
+
case '(':
case ')':
case '&':
@@ -139,12 +153,12 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
case '#':
case '%':
case '=':
- if (! consume_next)
- goto test_ident;
+ if (! consume_next)
+ goto test_ident;
// fall through...
default:
- ident.push_back(*arg_i);
- break;
+ ident.push_back(*arg_i);
+ break;
}
}
consume_whitespace = false;
@@ -170,20 +184,18 @@ test_ident:
return token_t(token_t::TOK_META);
else if (ident == "data")
return token_t(token_t::TOK_META);
- else if (ident == "show") {
- // The "show" keyword is special, and separates a limiting predicate
- // from a display predicate.
- DEBUG("pred.show", "string = " << (*begin).as_string());
- return token_t(token_t::END_REACHED);
- }
-#if 0
- // jww (2009-11-06): This is disabled for the time being.
- else if (ident == "date") {
- // The date keyword takes the whole of the next string as its argument.
- consume_whitespace = true;
- return token_t(token_t::TOK_DATE);
- }
-#endif
+ else if (ident == "show")
+ return token_t(token_t::TOK_SHOW);
+ else if (ident == "only")
+ return token_t(token_t::TOK_ONLY);
+ else if (ident == "bold")
+ return token_t(token_t::TOK_BOLD);
+ else if (ident == "for")
+ return token_t(token_t::TOK_FOR);
+ else if (ident == "since")
+ return token_t(token_t::TOK_SINCE);
+ else if (ident == "until")
+ return token_t(token_t::TOK_UNTIL);
else if (ident == "expr") {
// The expr keyword takes the whole of the next string as its argument.
consume_next_arg = true;
@@ -238,10 +250,16 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
lexer_t::token_t tok = lexer.next_token();
switch (tok.kind) {
+ case lexer_t::token_t::TOK_SHOW:
+ case lexer_t::token_t::TOK_ONLY:
+ case lexer_t::token_t::TOK_BOLD:
+ case lexer_t::token_t::TOK_FOR:
+ case lexer_t::token_t::TOK_SINCE:
+ case lexer_t::token_t::TOK_UNTIL:
case lexer_t::token_t::END_REACHED:
+ lexer.push_token(tok);
break;
- case lexer_t::token_t::TOK_DATE:
case lexer_t::token_t::TOK_CODE:
case lexer_t::token_t::TOK_PAYEE:
case lexer_t::token_t::TOK_NOTE:
@@ -251,47 +269,12 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
node = parse_query_term(tok.kind);
if (! node)
throw_(parse_error,
- _("%1 operator not followed by argument") << tok.symbol());
+ _("%1 operator not followed by argument") << tok.symbol());
break;
case lexer_t::token_t::TERM:
assert(tok.value);
switch (tok_context) {
- case lexer_t::token_t::TOK_DATE: {
- expr_t::ptr_op_t ident = new expr_t::op_t(expr_t::op_t::IDENT);
- ident->set_ident("date");
-
- date_interval_t interval(*tok.value);
-
- if (interval.start) {
- node = new expr_t::op_t(expr_t::op_t::O_GTE);
- node->set_left(ident);
-
- expr_t::ptr_op_t arg1 = new expr_t::op_t(expr_t::op_t::VALUE);
- arg1->set_value(*interval.start);
- node->set_right(arg1);
- }
-
- if (interval.finish) {
- expr_t::ptr_op_t lt = new expr_t::op_t(expr_t::op_t::O_LT);
- lt->set_left(ident);
-
- expr_t::ptr_op_t arg1 = new expr_t::op_t(expr_t::op_t::VALUE);
- arg1->set_value(*interval.finish);
- lt->set_right(arg1);
-
- if (node) {
- expr_t::ptr_op_t prev(node);
- node = new expr_t::op_t(expr_t::op_t::O_AND);
- node->set_left(prev);
- node->set_right(lt);
- } else {
- node = lt;
- }
- }
- break;
- }
-
case lexer_t::token_t::TOK_EXPR:
node = expr_t(*tok.value).get_op();
break;
@@ -308,22 +291,22 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
tok = lexer.peek_token();
if (tok.kind == lexer_t::token_t::TOK_EQ) {
- tok = lexer.next_token();
- tok = lexer.next_token();
- if (tok.kind != lexer_t::token_t::TERM)
- throw_(parse_error,
- _("Metadata equality operator not followed by term"));
-
- expr_t::ptr_op_t arg2 = new expr_t::op_t(expr_t::op_t::VALUE);
- assert(tok.value);
- arg2->set_value(mask_t(*tok.value));
-
- node->set_right(expr_t::op_t::new_node
- (expr_t::op_t::O_SEQ,
- expr_t::op_t::new_node
- (expr_t::op_t::O_CONS, arg1, arg2)));
+ tok = lexer.next_token();
+ tok = lexer.next_token();
+ if (tok.kind != lexer_t::token_t::TERM)
+ throw_(parse_error,
+ _("Metadata equality operator not followed by term"));
+
+ expr_t::ptr_op_t arg2 = new expr_t::op_t(expr_t::op_t::VALUE);
+ assert(tok.value);
+ arg2->set_value(mask_t(*tok.value));
+
+ node->set_right(expr_t::op_t::new_node
+ (expr_t::op_t::O_SEQ,
+ expr_t::op_t::new_node
+ (expr_t::op_t::O_CONS, arg1, arg2)));
} else {
- node->set_right(arg1);
+ node->set_right(arg1);
}
break;
}
@@ -334,15 +317,15 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
expr_t::ptr_op_t ident = new expr_t::op_t(expr_t::op_t::IDENT);
switch (tok_context) {
case lexer_t::token_t::TOK_ACCOUNT:
- ident->set_ident("account"); break;
+ ident->set_ident("account"); break;
case lexer_t::token_t::TOK_PAYEE:
- ident->set_ident("payee"); break;
+ ident->set_ident("payee"); break;
case lexer_t::token_t::TOK_CODE:
- ident->set_ident("code"); break;
+ ident->set_ident("code"); break;
case lexer_t::token_t::TOK_NOTE:
- ident->set_ident("note"); break;
+ ident->set_ident("note"); break;
default:
- assert(false); break;
+ assert(false); break;
}
expr_t::ptr_op_t mask = new expr_t::op_t(expr_t::op_t::VALUE);
@@ -357,7 +340,7 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
break;
case lexer_t::token_t::LPAREN:
- node = parse_query_expr(tok_context);
+ node = parse_query_expr(tok_context, true);
tok = lexer.next_token();
if (tok.kind != lexer_t::token_t::RPAREN)
tok.expected(')');
@@ -382,7 +365,7 @@ query_t::parser_t::parse_unary_expr(lexer_t::token_t::kind_t tok_context)
expr_t::ptr_op_t term(parse_query_term(tok_context));
if (! term)
throw_(parse_error,
- _("%1 operator not followed by argument") << tok.symbol());
+ _("%1 operator not followed by argument") << tok.symbol());
node = new expr_t::op_t(expr_t::op_t::O_NOT);
node->set_left(term);
@@ -405,16 +388,16 @@ query_t::parser_t::parse_and_expr(lexer_t::token_t::kind_t tok_context)
while (true) {
lexer_t::token_t tok = lexer.next_token();
if (tok.kind == lexer_t::token_t::TOK_AND) {
- expr_t::ptr_op_t prev(node);
- node = new expr_t::op_t(expr_t::op_t::O_AND);
- node->set_left(prev);
- node->set_right(parse_unary_expr(tok_context));
- if (! node->right())
- throw_(parse_error,
- _("%1 operator not followed by argument") << tok.symbol());
+ expr_t::ptr_op_t prev(node);
+ node = new expr_t::op_t(expr_t::op_t::O_AND);
+ node->set_left(prev);
+ node->set_right(parse_unary_expr(tok_context));
+ if (! node->right())
+ throw_(parse_error,
+ _("%1 operator not followed by argument") << tok.symbol());
} else {
- lexer.push_token(tok);
- break;
+ lexer.push_token(tok);
+ break;
}
}
return node;
@@ -429,16 +412,16 @@ query_t::parser_t::parse_or_expr(lexer_t::token_t::kind_t tok_context)
while (true) {
lexer_t::token_t tok = lexer.next_token();
if (tok.kind == lexer_t::token_t::TOK_OR) {
- expr_t::ptr_op_t prev(node);
- node = new expr_t::op_t(expr_t::op_t::O_OR);
- node->set_left(prev);
- node->set_right(parse_and_expr(tok_context));
- if (! node->right())
- throw_(parse_error,
- _("%1 operator not followed by argument") << tok.symbol());
+ expr_t::ptr_op_t prev(node);
+ node = new expr_t::op_t(expr_t::op_t::O_OR);
+ node->set_left(prev);
+ node->set_right(parse_and_expr(tok_context));
+ if (! node->right())
+ throw_(parse_error,
+ _("%1 operator not followed by argument") << tok.symbol());
} else {
- lexer.push_token(tok);
- break;
+ lexer.push_token(tok);
+ break;
}
}
return node;
@@ -447,18 +430,120 @@ query_t::parser_t::parse_or_expr(lexer_t::token_t::kind_t tok_context)
}
expr_t::ptr_op_t
-query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context)
+query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
+ bool subexpression)
{
- if (expr_t::ptr_op_t node = parse_or_expr(tok_context)) {
- if (expr_t::ptr_op_t next = parse_query_expr(tok_context)) {
- expr_t::ptr_op_t prev(node);
- node = new expr_t::op_t(expr_t::op_t::O_OR);
- node->set_left(prev);
- node->set_right(next);
+ expr_t::ptr_op_t limiter;
+
+ while (expr_t::ptr_op_t next = parse_or_expr(tok_context)) {
+ if (! limiter) {
+ limiter = next;
+ } else {
+ expr_t::ptr_op_t prev(limiter);
+ limiter = new expr_t::op_t(expr_t::op_t::O_OR);
+ limiter->set_left(prev);
+ limiter->set_right(next);
}
- return node;
}
- return expr_t::ptr_op_t();
+
+ if (! subexpression) {
+ if (limiter)
+ query_map.insert
+ (query_map_t::value_type
+ (QUERY_LIMIT, predicate_t(limiter, what_to_keep).print_to_str()));
+
+ lexer_t::token_t tok = lexer.peek_token();
+ while (tok.kind != lexer_t::token_t::END_REACHED) {
+ switch (tok.kind) {
+ case lexer_t::token_t::TOK_SHOW:
+ case lexer_t::token_t::TOK_ONLY:
+ case lexer_t::token_t::TOK_BOLD: {
+ lexer.next_token();
+
+ kind_t kind;
+ switch (tok.kind) {
+ case lexer_t::token_t::TOK_SHOW:
+ kind = QUERY_SHOW;
+ break;
+ case lexer_t::token_t::TOK_ONLY:
+ kind = QUERY_ONLY;
+ break;
+ case lexer_t::token_t::TOK_BOLD:
+ kind = QUERY_BOLD;
+ break;
+ default:
+ break;
+ }
+
+ expr_t::ptr_op_t node;
+ while (expr_t::ptr_op_t next = parse_or_expr(tok_context)) {
+ if (! node) {
+ node = next;
+ } else {
+ expr_t::ptr_op_t prev(node);
+ node = new expr_t::op_t(expr_t::op_t::O_OR);
+ node->set_left(prev);
+ node->set_right(next);
+ }
+ }
+
+ if (node)
+ query_map.insert
+ (query_map_t::value_type
+ (kind, predicate_t(node, what_to_keep).print_to_str()));
+ break;
+ }
+
+ case lexer_t::token_t::TOK_FOR:
+ case lexer_t::token_t::TOK_SINCE:
+ case lexer_t::token_t::TOK_UNTIL: {
+ tok = lexer.next_token();
+
+ string for_string;
+
+ if (tok.kind == lexer_t::token_t::TOK_SINCE)
+ for_string = "since";
+ else if (tok.kind == lexer_t::token_t::TOK_UNTIL)
+ for_string = "until";
+
+ lexer.consume_next_arg = true;
+ tok = lexer.peek_token();
+
+ while (tok.kind != lexer_t::token_t::END_REACHED) {
+ tok = lexer.next_token();
+ assert(tok.kind == lexer_t::token_t::TERM);
+
+ if (*tok.value == "show" || *tok.value == "bold" ||
+ *tok.value == "for" || *tok.value == "since" ||
+ *tok.value == "until") {
+ lexer.token_cache = lexer_t::token_t();
+ lexer.arg_i = lexer.prev_arg_i;
+ lexer.consume_next_arg = false;
+ break;
+ }
+
+ if (! for_string.empty())
+ for_string += " ";
+ for_string += *tok.value;
+
+ lexer.consume_next_arg = true;
+ tok = lexer.peek_token();
+ }
+
+ if (! for_string.empty())
+ query_map.insert(query_map_t::value_type(QUERY_FOR, for_string));
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ tok = lexer.peek_token();
+ }
+ }
+
+ return limiter;
}
} // namespace ledger