summaryrefslogtreecommitdiff
path: root/src/query.cc
diff options
context:
space:
mode:
authorJohn Wiegley <johnw@newartisans.com>2010-06-23 19:31:06 -0400
committerJohn Wiegley <johnw@newartisans.com>2010-06-23 19:31:06 -0400
commiteda6cbd0146d371653feec70e0eb3ee4e4c56379 (patch)
treeaeb4e1ace0f9b284789f66f3ceb30bed66946b93 /src/query.cc
parent014fde3418783d1ee1ec7fe4ea6c8b04ae7f6cd8 (diff)
parent93807fade69dd4f0ec575eda78fe1a77a85c24e3 (diff)
downloadfork-ledger-eda6cbd0146d371653feec70e0eb3ee4e4c56379.tar.gz
fork-ledger-eda6cbd0146d371653feec70e0eb3ee4e4c56379.tar.bz2
fork-ledger-eda6cbd0146d371653feec70e0eb3ee4e4c56379.zip
Merge branch 'next'
Diffstat (limited to 'src/query.cc')
-rw-r--r--src/query.cc207
1 files changed, 145 insertions, 62 deletions
diff --git a/src/query.cc b/src/query.cc
index 404c101f..bed6afae 100644
--- a/src/query.cc
+++ b/src/query.cc
@@ -53,7 +53,12 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
}
}
+ resume:
switch (*arg_i) {
+ case '\0':
+ assert(false);
+ break;
+
case '\'':
case '"':
case '/': {
@@ -84,13 +89,17 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
if (multiple_args && consume_next_arg) {
consume_next_arg = false;
token_t tok(token_t::TERM, string(arg_i, arg_end));
+ prev_arg_i = arg_i;
arg_i = arg_end;
return tok;
}
- resume:
bool consume_next = false;
switch (*arg_i) {
+ case '\0':
+ assert(false);
+ break;
+
case ' ':
case '\t':
case '\r':
@@ -121,15 +130,20 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
string::const_iterator beg = arg_i;
for (; arg_i != arg_end; ++arg_i) {
switch (*arg_i) {
+ case '\0':
+ assert(false);
+ break;
+
case ' ':
case '\t':
case '\n':
case '\r':
- if (! consume_whitespace)
+ if (! multiple_args && ! consume_whitespace)
goto test_ident;
else
ident.push_back(*arg_i);
break;
+
case '(':
case ')':
case '&':
@@ -170,20 +184,16 @@ test_ident:
return token_t(token_t::TOK_META);
else if (ident == "data")
return token_t(token_t::TOK_META);
- else if (ident == "show") {
- // The "show" keyword is special, and separates a limiting predicate
- // from a display predicate.
- DEBUG("pred.show", "string = " << (*begin).as_string());
- return token_t(token_t::END_REACHED);
- }
-#if 0
- // jww (2009-11-06): This is disabled for the time being.
- else if (ident == "date") {
- // The date keyword takes the whole of the next string as its argument.
- consume_whitespace = true;
- return token_t(token_t::TOK_DATE);
- }
-#endif
+ else if (ident == "show")
+ return token_t(token_t::TOK_SHOW);
+ else if (ident == "bold")
+ return token_t(token_t::TOK_BOLD);
+ else if (ident == "for")
+ return token_t(token_t::TOK_FOR);
+ else if (ident == "since")
+ return token_t(token_t::TOK_SINCE);
+ else if (ident == "until")
+ return token_t(token_t::TOK_UNTIL);
else if (ident == "expr") {
// The expr keyword takes the whole of the next string as its argument.
consume_next_arg = true;
@@ -238,10 +248,15 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
lexer_t::token_t tok = lexer.next_token();
switch (tok.kind) {
+ case lexer_t::token_t::TOK_SHOW:
+ case lexer_t::token_t::TOK_BOLD:
+ case lexer_t::token_t::TOK_FOR:
+ case lexer_t::token_t::TOK_SINCE:
+ case lexer_t::token_t::TOK_UNTIL:
case lexer_t::token_t::END_REACHED:
+ lexer.push_token(tok);
break;
- case lexer_t::token_t::TOK_DATE:
case lexer_t::token_t::TOK_CODE:
case lexer_t::token_t::TOK_PAYEE:
case lexer_t::token_t::TOK_NOTE:
@@ -257,41 +272,6 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
case lexer_t::token_t::TERM:
assert(tok.value);
switch (tok_context) {
- case lexer_t::token_t::TOK_DATE: {
- expr_t::ptr_op_t ident = new expr_t::op_t(expr_t::op_t::IDENT);
- ident->set_ident("date");
-
- date_interval_t interval(*tok.value);
-
- if (interval.start) {
- node = new expr_t::op_t(expr_t::op_t::O_GTE);
- node->set_left(ident);
-
- expr_t::ptr_op_t arg1 = new expr_t::op_t(expr_t::op_t::VALUE);
- arg1->set_value(*interval.start);
- node->set_right(arg1);
- }
-
- if (interval.finish) {
- expr_t::ptr_op_t lt = new expr_t::op_t(expr_t::op_t::O_LT);
- lt->set_left(ident);
-
- expr_t::ptr_op_t arg1 = new expr_t::op_t(expr_t::op_t::VALUE);
- arg1->set_value(*interval.finish);
- lt->set_right(arg1);
-
- if (node) {
- expr_t::ptr_op_t prev(node);
- node = new expr_t::op_t(expr_t::op_t::O_AND);
- node->set_left(prev);
- node->set_right(lt);
- } else {
- node = lt;
- }
- }
- break;
- }
-
case lexer_t::token_t::TOK_EXPR:
node = expr_t(*tok.value).get_op();
break;
@@ -357,7 +337,7 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
break;
case lexer_t::token_t::LPAREN:
- node = parse_query_expr(tok_context);
+ node = parse_query_expr(tok_context, true);
tok = lexer.next_token();
if (tok.kind != lexer_t::token_t::RPAREN)
tok.expected(')');
@@ -447,18 +427,121 @@ query_t::parser_t::parse_or_expr(lexer_t::token_t::kind_t tok_context)
}
expr_t::ptr_op_t
-query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context)
+query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
+ bool subexpression)
{
- if (expr_t::ptr_op_t node = parse_or_expr(tok_context)) {
- if (expr_t::ptr_op_t next = parse_query_expr(tok_context)) {
- expr_t::ptr_op_t prev(node);
- node = new expr_t::op_t(expr_t::op_t::O_OR);
- node->set_left(prev);
- node->set_right(next);
+ expr_t::ptr_op_t limiter;
+
+ while (expr_t::ptr_op_t next = parse_or_expr(tok_context)) {
+ if (! limiter) {
+ limiter = next;
+ } else {
+ expr_t::ptr_op_t prev(limiter);
+ limiter = new expr_t::op_t(expr_t::op_t::O_OR);
+ limiter->set_left(prev);
+ limiter->set_right(next);
}
- return node;
}
- return expr_t::ptr_op_t();
+
+ if (! subexpression) {
+ if (limiter)
+ query_map.insert
+ (query_map_t::value_type
+ (QUERY_LIMIT, predicate_t(limiter, what_to_keep).print_to_str()));
+
+ lexer_t::token_t tok = lexer.peek_token();
+ while (tok.kind != lexer_t::token_t::END_REACHED) {
+ switch (tok.kind) {
+ case lexer_t::token_t::TOK_SHOW: {
+ lexer.next_token();
+
+ expr_t::ptr_op_t node;
+ while (expr_t::ptr_op_t next = parse_or_expr(tok_context)) {
+ if (! node) {
+ node = next;
+ } else {
+ expr_t::ptr_op_t prev(node);
+ node = new expr_t::op_t(expr_t::op_t::O_OR);
+ node->set_left(prev);
+ node->set_right(next);
+ }
+ }
+
+ if (node)
+ query_map.insert
+ (query_map_t::value_type
+ (QUERY_SHOW, predicate_t(node, what_to_keep).print_to_str()));
+ break;
+ }
+
+ case lexer_t::token_t::TOK_BOLD: {
+ lexer.next_token();
+
+ expr_t::ptr_op_t node = parse_or_expr(tok_context);
+ while (expr_t::ptr_op_t next = parse_or_expr(tok_context)) {
+ expr_t::ptr_op_t prev(node);
+ node = new expr_t::op_t(expr_t::op_t::O_OR);
+ node->set_left(prev);
+ node->set_right(next);
+ }
+
+ if (node)
+ query_map.insert
+ (query_map_t::value_type
+ (QUERY_BOLD, predicate_t(node, what_to_keep).print_to_str()));
+ break;
+ }
+
+ case lexer_t::token_t::TOK_FOR:
+ case lexer_t::token_t::TOK_SINCE:
+ case lexer_t::token_t::TOK_UNTIL: {
+ tok = lexer.next_token();
+
+ string for_string;
+
+ if (tok.kind == lexer_t::token_t::TOK_SINCE)
+ for_string = "since";
+ else if (tok.kind == lexer_t::token_t::TOK_UNTIL)
+ for_string = "until";
+
+ lexer.consume_next_arg = true;
+ tok = lexer.peek_token();
+
+ while (tok.kind != lexer_t::token_t::END_REACHED) {
+ tok = lexer.next_token();
+ assert(tok.kind == lexer_t::token_t::TERM);
+
+ if (*tok.value == "show" || *tok.value == "bold" ||
+ *tok.value == "for" || *tok.value == "since" ||
+ *tok.value == "until") {
+ lexer.token_cache = lexer_t::token_t();
+ lexer.arg_i = lexer.prev_arg_i;
+ lexer.consume_next_arg = false;
+ break;
+ }
+
+ if (! for_string.empty())
+ for_string += " ";
+ for_string += *tok.value;
+
+ lexer.consume_next_arg = true;
+ tok = lexer.peek_token();
+ }
+
+ if (! for_string.empty())
+ query_map.insert(query_map_t::value_type(QUERY_FOR, for_string));
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ tok = lexer.peek_token();
+ }
+ }
+
+ return limiter;
}
} // namespace ledger