summaryrefslogtreecommitdiff
path: root/src/query.cc
diff options
context:
space:
mode:
Diffstat (limited to 'src/query.cc')
-rw-r--r--src/query.cc55
1 files changed, 33 insertions, 22 deletions
diff --git a/src/query.cc b/src/query.cc
index 5c11add5..b0f8d691 100644
--- a/src/query.cc
+++ b/src/query.cc
@@ -36,7 +36,8 @@
namespace ledger {
-query_t::lexer_t::token_t query_t::lexer_t::next_token()
+query_t::lexer_t::token_t
+query_t::lexer_t::next_token(query_t::lexer_t::token_t::kind_t tok_context)
{
if (token_cache.kind != token_t::UNKNOWN) {
token_t tok = token_cache;
@@ -105,11 +106,19 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
case '\r':
case '\n':
if (++arg_i == arg_end)
- return next_token();
+ return next_token(tok_context);
goto resume;
- case '(': ++arg_i; return token_t(token_t::LPAREN);
- case ')': ++arg_i; return token_t(token_t::RPAREN);
+ case '(':
+ ++arg_i;
+ if (tok_context == token_t::TOK_EXPR)
+ consume_whitespace = true;
+ return token_t(token_t::LPAREN);
+ case ')':
+ ++arg_i;
+ if (tok_context == token_t::TOK_EXPR)
+ consume_whitespace = false;
+ return token_t(token_t::RPAREN);
case '&': ++arg_i; return token_t(token_t::TOK_AND);
case '|': ++arg_i; return token_t(token_t::TOK_OR);
case '!': ++arg_i; return token_t(token_t::TOK_NOT);
@@ -118,7 +127,7 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
case '%': ++arg_i; return token_t(token_t::TOK_META);
case '=':
++arg_i;
- consume_next_arg = true;
+ consume_next = true;
return token_t(token_t::TOK_EQ);
case '\\':
@@ -143,8 +152,10 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
ident.push_back(*arg_i);
break;
- case '(':
case ')':
+ if (! consume_next && tok_context == token_t::TOK_EXPR)
+ goto test_ident;
+ case '(':
case '&':
case '|':
case '!':
@@ -152,7 +163,7 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
case '#':
case '%':
case '=':
- if (! consume_next)
+ if (! consume_next && tok_context != token_t::TOK_EXPR)
goto test_ident;
// fall through...
default:
@@ -247,7 +258,7 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
{
expr_t::ptr_op_t node;
- lexer_t::token_t tok = lexer.next_token();
+ lexer_t::token_t tok = lexer.next_token(tok_context);
switch (tok.kind) {
case lexer_t::token_t::TOK_SHOW:
case lexer_t::token_t::TOK_ONLY:
@@ -288,10 +299,10 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
expr_t::ptr_op_t arg1 = new expr_t::op_t(expr_t::op_t::VALUE);
arg1->set_value(mask_t(*tok.value));
- tok = lexer.peek_token();
+ tok = lexer.peek_token(tok_context);
if (tok.kind == lexer_t::token_t::TOK_EQ) {
- tok = lexer.next_token();
- tok = lexer.next_token();
+ tok = lexer.next_token(tok_context);
+ tok = lexer.next_token(tok_context);
if (tok.kind != lexer_t::token_t::TERM)
throw_(parse_error,
_("Metadata equality operator not followed by term"));
@@ -340,7 +351,7 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
case lexer_t::token_t::LPAREN:
node = parse_query_expr(tok_context, true);
- tok = lexer.next_token();
+ tok = lexer.next_token(tok_context);
if (tok.kind != lexer_t::token_t::RPAREN)
tok.expected(')');
break;
@@ -358,7 +369,7 @@ query_t::parser_t::parse_unary_expr(lexer_t::token_t::kind_t tok_context)
{
expr_t::ptr_op_t node;
- lexer_t::token_t tok = lexer.next_token();
+ lexer_t::token_t tok = lexer.next_token(tok_context);
switch (tok.kind) {
case lexer_t::token_t::TOK_NOT: {
expr_t::ptr_op_t term(parse_query_term(tok_context));
@@ -385,7 +396,7 @@ query_t::parser_t::parse_and_expr(lexer_t::token_t::kind_t tok_context)
{
if (expr_t::ptr_op_t node = parse_unary_expr(tok_context)) {
while (true) {
- lexer_t::token_t tok = lexer.next_token();
+ lexer_t::token_t tok = lexer.next_token(tok_context);
if (tok.kind == lexer_t::token_t::TOK_AND) {
expr_t::ptr_op_t prev(node);
node = new expr_t::op_t(expr_t::op_t::O_AND);
@@ -409,7 +420,7 @@ query_t::parser_t::parse_or_expr(lexer_t::token_t::kind_t tok_context)
{
if (expr_t::ptr_op_t node = parse_and_expr(tok_context)) {
while (true) {
- lexer_t::token_t tok = lexer.next_token();
+ lexer_t::token_t tok = lexer.next_token(tok_context);
if (tok.kind == lexer_t::token_t::TOK_OR) {
expr_t::ptr_op_t prev(node);
node = new expr_t::op_t(expr_t::op_t::O_OR);
@@ -451,13 +462,13 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
(query_map_t::value_type
(QUERY_LIMIT, predicate_t(limiter, what_to_keep).print_to_str()));
- lexer_t::token_t tok = lexer.peek_token();
+ lexer_t::token_t tok = lexer.peek_token(tok_context);
while (tok.kind != lexer_t::token_t::END_REACHED) {
switch (tok.kind) {
case lexer_t::token_t::TOK_SHOW:
case lexer_t::token_t::TOK_ONLY:
case lexer_t::token_t::TOK_BOLD: {
- lexer.next_token();
+ lexer.next_token(tok_context);
kind_t kind;
switch (tok.kind) {
@@ -496,7 +507,7 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
case lexer_t::token_t::TOK_FOR:
case lexer_t::token_t::TOK_SINCE:
case lexer_t::token_t::TOK_UNTIL: {
- tok = lexer.next_token();
+ tok = lexer.next_token(tok_context);
string for_string;
@@ -506,10 +517,10 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
for_string = "until";
lexer.consume_next_arg = true;
- tok = lexer.peek_token();
+ tok = lexer.peek_token(tok_context);
while (tok.kind != lexer_t::token_t::END_REACHED) {
- tok = lexer.next_token();
+ tok = lexer.next_token(tok_context);
assert(tok.kind == lexer_t::token_t::TERM);
if (*tok.value == "show" || *tok.value == "bold" ||
@@ -526,7 +537,7 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
for_string += *tok.value;
lexer.consume_next_arg = true;
- tok = lexer.peek_token();
+ tok = lexer.peek_token(tok_context);
}
if (! for_string.empty())
@@ -538,7 +549,7 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
goto done;
}
- tok = lexer.peek_token();
+ tok = lexer.peek_token(tok_context);
}
done:
;