summaryrefslogtreecommitdiff
path: root/src/query.cc
diff options
context:
space:
mode:
authorJohn Wiegley <john@dfinity.org>2019-09-17 16:54:58 -0700
committerJohn Wiegley <john@dfinity.org>2019-09-17 16:54:58 -0700
commit49b07a1c19489547b859d61fbc5c240aff224dda (patch)
tree5ca9ca47638ac1b0c4f8f13e73b64b07ace410a9 /src/query.cc
parentdba961a6c35719e2ca7da3cd9c5627b9ef6036df (diff)
downloadfork-ledger-49b07a1c19489547b859d61fbc5c240aff224dda.tar.gz
fork-ledger-49b07a1c19489547b859d61fbc5c240aff224dda.tar.bz2
fork-ledger-49b07a1c19489547b859d61fbc5c240aff224dda.zip
Correction to the way parens are parsed in query expressions
Diffstat (limited to 'src/query.cc')
-rw-r--r--src/query.cc55
1 files changed, 46 insertions, 9 deletions
diff --git a/src/query.cc b/src/query.cc
index 883bea40..75c017ce 100644
--- a/src/query.cc
+++ b/src/query.cc
@@ -36,6 +36,17 @@
namespace ledger {
+bool query_t::lexer_t::advance()
+{
+ if (begin == end || ++begin == end) {
+ return false;
+ } else {
+ arg_i = (*begin).as_string().begin();
+ arg_end = (*begin).as_string().end();
+ }
+ return true;
+}
+
query_t::lexer_t::token_t
query_t::lexer_t::next_token(query_t::lexer_t::token_t::kind_t tok_context)
{
@@ -46,12 +57,8 @@ query_t::lexer_t::next_token(query_t::lexer_t::token_t::kind_t tok_context)
}
if (arg_i == arg_end) {
- if (begin == end || ++begin == end) {
+ if (! advance())
return token_t(token_t::END_REACHED);
- } else {
- arg_i = (*begin).as_string().begin();
- arg_end = (*begin).as_string().end();
- }
}
resume:
@@ -153,9 +160,6 @@ query_t::lexer_t::next_token(query_t::lexer_t::token_t::kind_t tok_context)
break;
case ')':
- if (! consume_next && tok_context == token_t::TOK_EXPR)
- goto test_ident;
- // fall through...
case '(':
case '&':
case '|':
@@ -253,6 +257,23 @@ void query_t::lexer_t::token_t::expected(char wanted, char c)
}
}
+std::string& ltrim(std::string& str, const std::string& chars = "\t\n\v\f\r ")
+{
+ str.erase(0, str.find_first_not_of(chars));
+ return str;
+}
+
+std::string& rtrim(std::string& str, const std::string& chars = "\t\n\v\f\r ")
+{
+ str.erase(str.find_last_not_of(chars) + 1);
+ return str;
+}
+
+std::string& trim(std::string& str, const std::string& chars = "\t\n\v\f\r ")
+{
+ return ltrim(rtrim(str, chars), chars);
+}
+
expr_t::ptr_op_t
query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_context)
{
@@ -275,13 +296,29 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
case lexer_t::token_t::TOK_NOTE:
case lexer_t::token_t::TOK_ACCOUNT:
case lexer_t::token_t::TOK_META:
- case lexer_t::token_t::TOK_EXPR:
node = parse_query_term(tok.kind);
if (! node)
throw_(parse_error,
_f("%1% operator not followed by argument") % tok.symbol());
break;
+ case lexer_t::token_t::TOK_EXPR: {
+ string arg(lexer.arg_i, lexer.arg_end);
+ lexer.arg_i = lexer.arg_end;
+ trim(arg);
+
+ if (arg.length() == 0) {
+ if (lexer.advance()) {
+ arg = string(lexer.arg_i, lexer.arg_end);
+ lexer.arg_i = lexer.arg_end;
+ trim(arg);
+ }
+ }
+
+ node = expr_t(arg).get_op();
+ break;
+ }
+
case lexer_t::token_t::TERM:
assert(tok.value);
switch (tok_context) {