summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/expr.h6
-rw-r--r--src/parser.cc4
-rw-r--r--src/parser.h8
-rw-r--r--src/query.cc55
-rw-r--r--src/query.h6
-rw-r--r--src/report.h2
-rw-r--r--src/token.cc108
-rw-r--r--src/token.h9
8 files changed, 152 insertions, 46 deletions
diff --git a/src/expr.h b/src/expr.h
index a3ae5669..c4cd5dc5 100644
--- a/src/expr.h
+++ b/src/expr.h
@@ -49,15 +49,15 @@ namespace ledger {
class expr_t : public expr_base_t<value_t>
{
- struct token_t;
- class parser_t;
-
+ class parser_t;
typedef expr_base_t<value_t> base_type;
public:
+ struct token_t;
class op_t;
typedef intrusive_ptr<op_t> ptr_op_t;
typedef intrusive_ptr<const op_t> const_ptr_op_t;
+
protected:
ptr_op_t ptr;
diff --git a/src/parser.cc b/src/parser.cc
index f0085295..a18fa552 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -74,7 +74,7 @@ expr_t::parser_t::parse_value_term(std::istream& in,
case token_t::LPAREN:
node = parse_value_expr(in, tflags.plus_flags(PARSE_PARTIAL)
.minus_flags(PARSE_SINGLE));
- tok = next_token(in, tflags, ')');
+ tok = next_token(in, tflags, token_t::RPAREN);
break;
default:
@@ -367,7 +367,7 @@ expr_t::parser_t::parse_querycolon_expr(std::istream& in,
throw_(parse_error,
_("%1 operator not followed by argument") << tok.symbol);
- next_token(in, tflags.plus_flags(PARSE_OP_CONTEXT), ':');
+ next_token(in, tflags.plus_flags(PARSE_OP_CONTEXT), token_t::COLON);
prev = node->right();
ptr_op_t subnode = new op_t(op_t::O_COLON);
subnode->set_left(prev);
diff --git a/src/parser.h b/src/parser.h
index 9a65765d..09e12d95 100644
--- a/src/parser.h
+++ b/src/parser.h
@@ -53,11 +53,15 @@ class expr_t::parser_t : public noncopyable
mutable bool use_lookahead;
token_t& next_token(std::istream& in, const parse_flags_t& tflags,
- const char expecting = '\0') const {
+ const optional<token_t::kind_t>& expecting = none) const {
if (use_lookahead)
use_lookahead = false;
else
- lookahead.next(in, tflags, expecting);
+ lookahead.next(in, tflags);
+
+ if (expecting && lookahead.kind != *expecting)
+ lookahead.expected(*expecting);
+
return lookahead;
}
diff --git a/src/query.cc b/src/query.cc
index 5c11add5..b0f8d691 100644
--- a/src/query.cc
+++ b/src/query.cc
@@ -36,7 +36,8 @@
namespace ledger {
-query_t::lexer_t::token_t query_t::lexer_t::next_token()
+query_t::lexer_t::token_t
+query_t::lexer_t::next_token(query_t::lexer_t::token_t::kind_t tok_context)
{
if (token_cache.kind != token_t::UNKNOWN) {
token_t tok = token_cache;
@@ -105,11 +106,19 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
case '\r':
case '\n':
if (++arg_i == arg_end)
- return next_token();
+ return next_token(tok_context);
goto resume;
- case '(': ++arg_i; return token_t(token_t::LPAREN);
- case ')': ++arg_i; return token_t(token_t::RPAREN);
+ case '(':
+ ++arg_i;
+ if (tok_context == token_t::TOK_EXPR)
+ consume_whitespace = true;
+ return token_t(token_t::LPAREN);
+ case ')':
+ ++arg_i;
+ if (tok_context == token_t::TOK_EXPR)
+ consume_whitespace = false;
+ return token_t(token_t::RPAREN);
case '&': ++arg_i; return token_t(token_t::TOK_AND);
case '|': ++arg_i; return token_t(token_t::TOK_OR);
case '!': ++arg_i; return token_t(token_t::TOK_NOT);
@@ -118,7 +127,7 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
case '%': ++arg_i; return token_t(token_t::TOK_META);
case '=':
++arg_i;
- consume_next_arg = true;
+ consume_next = true;
return token_t(token_t::TOK_EQ);
case '\\':
@@ -143,8 +152,10 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
ident.push_back(*arg_i);
break;
- case '(':
case ')':
+ if (! consume_next && tok_context == token_t::TOK_EXPR)
+ goto test_ident;
+ case '(':
case '&':
case '|':
case '!':
@@ -152,7 +163,7 @@ query_t::lexer_t::token_t query_t::lexer_t::next_token()
case '#':
case '%':
case '=':
- if (! consume_next)
+ if (! consume_next && tok_context != token_t::TOK_EXPR)
goto test_ident;
// fall through...
default:
@@ -247,7 +258,7 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
{
expr_t::ptr_op_t node;
- lexer_t::token_t tok = lexer.next_token();
+ lexer_t::token_t tok = lexer.next_token(tok_context);
switch (tok.kind) {
case lexer_t::token_t::TOK_SHOW:
case lexer_t::token_t::TOK_ONLY:
@@ -288,10 +299,10 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
expr_t::ptr_op_t arg1 = new expr_t::op_t(expr_t::op_t::VALUE);
arg1->set_value(mask_t(*tok.value));
- tok = lexer.peek_token();
+ tok = lexer.peek_token(tok_context);
if (tok.kind == lexer_t::token_t::TOK_EQ) {
- tok = lexer.next_token();
- tok = lexer.next_token();
+ tok = lexer.next_token(tok_context);
+ tok = lexer.next_token(tok_context);
if (tok.kind != lexer_t::token_t::TERM)
throw_(parse_error,
_("Metadata equality operator not followed by term"));
@@ -340,7 +351,7 @@ query_t::parser_t::parse_query_term(query_t::lexer_t::token_t::kind_t tok_contex
case lexer_t::token_t::LPAREN:
node = parse_query_expr(tok_context, true);
- tok = lexer.next_token();
+ tok = lexer.next_token(tok_context);
if (tok.kind != lexer_t::token_t::RPAREN)
tok.expected(')');
break;
@@ -358,7 +369,7 @@ query_t::parser_t::parse_unary_expr(lexer_t::token_t::kind_t tok_context)
{
expr_t::ptr_op_t node;
- lexer_t::token_t tok = lexer.next_token();
+ lexer_t::token_t tok = lexer.next_token(tok_context);
switch (tok.kind) {
case lexer_t::token_t::TOK_NOT: {
expr_t::ptr_op_t term(parse_query_term(tok_context));
@@ -385,7 +396,7 @@ query_t::parser_t::parse_and_expr(lexer_t::token_t::kind_t tok_context)
{
if (expr_t::ptr_op_t node = parse_unary_expr(tok_context)) {
while (true) {
- lexer_t::token_t tok = lexer.next_token();
+ lexer_t::token_t tok = lexer.next_token(tok_context);
if (tok.kind == lexer_t::token_t::TOK_AND) {
expr_t::ptr_op_t prev(node);
node = new expr_t::op_t(expr_t::op_t::O_AND);
@@ -409,7 +420,7 @@ query_t::parser_t::parse_or_expr(lexer_t::token_t::kind_t tok_context)
{
if (expr_t::ptr_op_t node = parse_and_expr(tok_context)) {
while (true) {
- lexer_t::token_t tok = lexer.next_token();
+ lexer_t::token_t tok = lexer.next_token(tok_context);
if (tok.kind == lexer_t::token_t::TOK_OR) {
expr_t::ptr_op_t prev(node);
node = new expr_t::op_t(expr_t::op_t::O_OR);
@@ -451,13 +462,13 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
(query_map_t::value_type
(QUERY_LIMIT, predicate_t(limiter, what_to_keep).print_to_str()));
- lexer_t::token_t tok = lexer.peek_token();
+ lexer_t::token_t tok = lexer.peek_token(tok_context);
while (tok.kind != lexer_t::token_t::END_REACHED) {
switch (tok.kind) {
case lexer_t::token_t::TOK_SHOW:
case lexer_t::token_t::TOK_ONLY:
case lexer_t::token_t::TOK_BOLD: {
- lexer.next_token();
+ lexer.next_token(tok_context);
kind_t kind;
switch (tok.kind) {
@@ -496,7 +507,7 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
case lexer_t::token_t::TOK_FOR:
case lexer_t::token_t::TOK_SINCE:
case lexer_t::token_t::TOK_UNTIL: {
- tok = lexer.next_token();
+ tok = lexer.next_token(tok_context);
string for_string;
@@ -506,10 +517,10 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
for_string = "until";
lexer.consume_next_arg = true;
- tok = lexer.peek_token();
+ tok = lexer.peek_token(tok_context);
while (tok.kind != lexer_t::token_t::END_REACHED) {
- tok = lexer.next_token();
+ tok = lexer.next_token(tok_context);
assert(tok.kind == lexer_t::token_t::TERM);
if (*tok.value == "show" || *tok.value == "bold" ||
@@ -526,7 +537,7 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
for_string += *tok.value;
lexer.consume_next_arg = true;
- tok = lexer.peek_token();
+ tok = lexer.peek_token(tok_context);
}
if (! for_string.empty())
@@ -538,7 +549,7 @@ query_t::parser_t::parse_query_expr(lexer_t::token_t::kind_t tok_context,
goto done;
}
- tok = lexer.peek_token();
+ tok = lexer.peek_token(tok_context);
}
done:
;
diff --git a/src/query.h b/src/query.h
index b5b3b0fc..aca31b61 100644
--- a/src/query.h
+++ b/src/query.h
@@ -222,14 +222,14 @@ public:
TRACE_DTOR(query_t::lexer_t);
}
- token_t next_token();
+ token_t next_token(token_t::kind_t tok_context = token_t::UNKNOWN);
void push_token(token_t tok) {
assert(token_cache.kind == token_t::UNKNOWN);
token_cache = tok;
}
- token_t peek_token() {
+ token_t peek_token(token_t::kind_t tok_context = token_t::UNKNOWN) {
if (token_cache.kind == token_t::UNKNOWN)
- token_cache = next_token();
+ token_cache = next_token(tok_context);
return token_cache;
}
};
diff --git a/src/report.h b/src/report.h
index 58c12f24..5b403205 100644
--- a/src/report.h
+++ b/src/report.h
@@ -770,7 +770,7 @@ public:
parent->HANDLER(total_)
.set_expr(string("--percent"),
"((is_account&parent&parent.total)?"
- " percent(scrub(total), scrub(parent.total)):0");
+ " percent(scrub(total), scrub(parent.total)):0)");
});
OPTION__
diff --git a/src/token.cc b/src/token.cc
index 735f5825..fe7ce7cd 100644
--- a/src/token.cc
+++ b/src/token.cc
@@ -137,8 +137,7 @@ void expr_t::token_t::parse_ident(std::istream& in)
value.set_string(buf);
}
-void expr_t::token_t::next(std::istream& in, const parse_flags_t& pflags,
- const char expecting)
+void expr_t::token_t::next(std::istream& in, const parse_flags_t& pflags)
{
if (in.eof()) {
kind = TOK_EOF;
@@ -230,7 +229,6 @@ void expr_t::token_t::next(std::istream& in, const parse_flags_t& pflags,
break;
}
-#if 0
case '{': {
in.get(c);
amount_t temp;
@@ -243,7 +241,6 @@ void expr_t::token_t::next(std::istream& in, const parse_flags_t& pflags,
value = temp;
break;
}
-#endif
case '!':
in.get(c);
@@ -427,7 +424,7 @@ void expr_t::token_t::next(std::istream& in, const parse_flags_t& pflags,
throw_(parse_error, _("Failed to reset input stream"));
c = static_cast<char>(in.peek());
- if (std::isdigit(c) || c == '.')
+ if (! std::isalpha(c))
expected('\0', c);
parse_ident(in);
@@ -436,7 +433,7 @@ void expr_t::token_t::next(std::istream& in, const parse_flags_t& pflags,
kind = ERROR;
symbol[0] = c;
symbol[1] = '\0';
- unexpected(expecting);
+ throw_(parse_error, _("Failed to parse identifier"));
}
} else {
if (! in.good()) {
@@ -505,10 +502,8 @@ void expr_t::token_t::unexpected(const char wanted)
}
}
-void expr_t::token_t::expected(char wanted, char c)
+void expr_t::token_t::expected(const char wanted, char c)
{
- kind = ERROR;
-
if (c == '\0' || c == -1) {
if (wanted == '\0' || wanted == -1)
throw_(parse_error, _("Unexpected end"));
@@ -518,8 +513,101 @@ void expr_t::token_t::expected(char wanted, char c)
if (wanted == '\0' || wanted == -1)
throw_(parse_error, _("Invalid char '%1'") << c);
else
- throw_(parse_error, _("Invalid char '%1' (wanted '%2')") << c << wanted);
+ throw_(parse_error,
+ _("Invalid char '%1' (wanted '%2')") << c << wanted);
+ }
+}
+
+void expr_t::token_t::expected(const kind_t wanted)
+{
+ try {
+ if (wanted == '\0' || wanted == -1)
+ throw_(parse_error, _("Invalid token '%1'") << *this);
+ else
+ throw_(parse_error,
+ _("Invalid token '%1' (wanted '%2')") << *this << wanted);
+ }
+ catch (...) {
+ kind = ERROR;
+ throw;
}
}
+std::ostream& operator<<(std::ostream& out, const expr_t::token_t::kind_t& kind)
+{
+ switch (kind) {
+ case expr_t::token_t::ERROR: out << "<error token>"; break;
+ case expr_t::token_t::VALUE: out << "<value>"; break;
+ case expr_t::token_t::IDENT: out << "<identifier>"; break;
+ case expr_t::token_t::MASK: out << "<regex mask>"; break;
+
+ case expr_t::token_t::LPAREN: out << "("; break;
+ case expr_t::token_t::RPAREN: out << ")"; break;
+ case expr_t::token_t::LBRACE: out << "{"; break;
+ case expr_t::token_t::RBRACE: out << "}"; break;
+
+ case expr_t::token_t::EQUAL: out << "=="; break;
+ case expr_t::token_t::NEQUAL: out << "!="; break;
+ case expr_t::token_t::LESS: out << "<"; break;
+ case expr_t::token_t::LESSEQ: out << "<="; break;
+ case expr_t::token_t::GREATER: out << ">"; break;
+ case expr_t::token_t::GREATEREQ: out << ">="; break;
+
+ case expr_t::token_t::ASSIGN: out << "="; break;
+ case expr_t::token_t::MATCH: out << "=~"; break;
+ case expr_t::token_t::NMATCH: out << "!~"; break;
+ case expr_t::token_t::MINUS: out << "-"; break;
+ case expr_t::token_t::PLUS: out << "+"; break;
+ case expr_t::token_t::STAR: out << "*"; break;
+ case expr_t::token_t::SLASH: out << "/"; break;
+ case expr_t::token_t::ARROW: out << "->"; break;
+ case expr_t::token_t::KW_DIV: out << "div"; break;
+
+ case expr_t::token_t::EXCLAM: out << "!"; break;
+ case expr_t::token_t::KW_AND: out << "and"; break;
+ case expr_t::token_t::KW_OR: out << "or"; break;
+ case expr_t::token_t::KW_MOD: out << "mod"; break;
+
+ case expr_t::token_t::KW_IF: out << "if"; break;
+ case expr_t::token_t::KW_ELSE: out << "else"; break;
+
+ case expr_t::token_t::QUERY: out << "?"; break;
+ case expr_t::token_t::COLON: out << ":"; break;
+
+ case expr_t::token_t::DOT: out << "."; break;
+ case expr_t::token_t::COMMA: out << ","; break;
+ case expr_t::token_t::SEMI: out << ";"; break;
+
+ case expr_t::token_t::TOK_EOF: out << "<end of input>"; break;
+ case expr_t::token_t::UNKNOWN: out << "<unknown>"; break;
+
+ default:
+ assert(false);
+ break;
+ }
+
+ return out;
+}
+
+std::ostream& operator<<(std::ostream& out, const expr_t::token_t& token)
+{
+ switch (token.kind) {
+ case expr_t::token_t::VALUE:
+ out << "<value '" << token.value << "'>";
+ break;
+ case expr_t::token_t::IDENT:
+ out << "<ident '" << token.value << "'>";
+ break;
+ case expr_t::token_t::MASK:
+ out << "<mask '" << token.value << "'>";
+ break;
+
+ default:
+ out << token.kind;
+ break;
+ }
+
+ return out;
+}
+
} // namespace ledger
diff --git a/src/token.h b/src/token.h
index 13a799cb..cbdf1258 100644
--- a/src/token.h
+++ b/src/token.h
@@ -123,13 +123,16 @@ struct expr_t::token_t : public noncopyable
int parse_reserved_word(std::istream& in);
void parse_ident(std::istream& in);
- void next(std::istream& in, const parse_flags_t& flags,
- const char expecting = '\0');
+ void next(std::istream& in, const parse_flags_t& flags);
void rewind(std::istream& in);
void unexpected(const char wanted = '\0');
- void expected(const char wanted, char c = '\0');
+ void expected(const char wanted, const char c = '\0');
+ void expected(const kind_t wanted);
};
+std::ostream& operator<<(std::ostream& out, const expr_t::token_t::kind_t& kind);
+std::ostream& operator<<(std::ostream& out, const expr_t::token_t& token);
+
} // namespace ledger
#endif // _TOKEN_H