diff options
author | John Wiegley <johnw@newartisans.com> | 2009-11-08 23:40:42 -0500 |
---|---|---|
committer | John Wiegley <johnw@newartisans.com> | 2009-11-09 02:06:06 -0500 |
commit | c3535d06c89732a0ba4c13274702b0f48198ae79 (patch) | |
tree | 5a8153d4c627cc3c7eff687b50a59a1cc9c3d04a /src/query.h | |
parent | 523d4243e8c347cb7cbd1f68b03a5098ceb73b70 (diff) | |
download | fork-ledger-c3535d06c89732a0ba4c13274702b0f48198ae79.tar.gz fork-ledger-c3535d06c89732a0ba4c13274702b0f48198ae79.tar.bz2 fork-ledger-c3535d06c89732a0ba4c13274702b0f48198ae79.zip |
Redesigned the expr_t, predicate_t, query_t classes
Diffstat (limited to 'src/query.h')
-rw-r--r-- | src/query.h | 291 |
1 files changed, 291 insertions, 0 deletions
diff --git a/src/query.h b/src/query.h new file mode 100644 index 00000000..e64588ad --- /dev/null +++ b/src/query.h @@ -0,0 +1,291 @@ +/* + * Copyright (c) 2003-2009, John Wiegley. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * - Neither the name of New Artisans LLC nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/** + * @addtogroup expr + */ + +/** + * @file predicate.h + * @author John Wiegley + * + * @ingroup expr + */ +#ifndef _QUERY_H +#define _QUERY_H + +#include "predicate.h" + +namespace ledger { + +class query_t : public predicate_t +{ +public: + class lexer_t + { + friend class query_t; + friend class parser_t; + + value_t::sequence_t::const_iterator begin; + value_t::sequence_t::const_iterator end; + + string::const_iterator arg_i; + string::const_iterator arg_end; + + bool consume_whitespace; + + public: + struct token_t + { + enum kind_t { + UNKNOWN, + + LPAREN, + RPAREN, + + TOK_NOT, + TOK_AND, + TOK_OR, + TOK_EQ, + + TOK_DATE, + TOK_CODE, + TOK_PAYEE, + TOK_NOTE, + TOK_ACCOUNT, + TOK_META, + TOK_EXPR, + + TERM, + + END_REACHED + + } kind; + + optional<string> value; + + explicit token_t(kind_t _kind = UNKNOWN, + const optional<string>& _value = none) + : kind(_kind), value(_value) { + TRACE_CTOR(lexer_t::token_t, ""); + } + token_t(const token_t& tok) + : kind(tok.kind), value(tok.value) { + TRACE_CTOR(lexer_t::token_t, "copy"); + } + ~token_t() throw() { + TRACE_DTOR(lexer_t::token_t); + } + + token_t& operator=(const token_t& tok) { + if (this != &tok) { + kind = tok.kind; + value = tok.value; + } + return *this; + } + + operator bool() const { + return kind != END_REACHED; + } + + string to_string() const { + switch (kind) { + case UNKNOWN: return "UNKNOWN"; + case LPAREN: return "LPAREN"; + case RPAREN: return "RPAREN"; + case TOK_NOT: return "TOK_NOT"; + case TOK_AND: return "TOK_AND"; + case TOK_OR: return "TOK_OR"; + case TOK_EQ: return "TOK_EQ"; + case TOK_DATE: return "TOK_DATE"; + case TOK_CODE: return "TOK_CODE"; + case TOK_PAYEE: return "TOK_PAYEE"; + case TOK_NOTE: return "TOK_NOTE"; + case TOK_ACCOUNT: return "TOK_ACCOUNT"; + case TOK_META: return "TOK_META"; + case TOK_EXPR: return "TOK_EXPR"; + case TERM: return string("TERM(") + *value + ")"; + case END_REACHED: return "END_REACHED"; + } + } + + string symbol() const { + switch (kind) { + case LPAREN: return "("; + case RPAREN: return ")"; + case TOK_NOT: return "not"; + case TOK_AND: return "and"; + case TOK_OR: return "or"; + case TOK_EQ: return "="; + case TOK_DATE: return "date"; + case TOK_CODE: return "code"; + case TOK_PAYEE: return "payee"; + case TOK_NOTE: return "note"; + case TOK_ACCOUNT: return "account"; + case TOK_META: return "meta"; + case TOK_EXPR: return "expr"; + + case END_REACHED: return "<EOF>"; + + case TERM: + assert(0); + return "<TERM>"; + + case UNKNOWN: + default: + assert(0); + return "<UNKNOWN>"; + } + } + + void unexpected(); + void expected(char wanted, char c = '\0'); + }; + + token_t token_cache; + + lexer_t(value_t::sequence_t::const_iterator _begin, + value_t::sequence_t::const_iterator _end) + : begin(_begin), end(_end), consume_whitespace(false) + { + TRACE_CTOR(lexer_t, ""); + assert(begin != end); + arg_i = (*begin).as_string().begin(); + arg_end = (*begin).as_string().end(); + } + lexer_t(const lexer_t& lexer) + : begin(lexer.begin), end(lexer.end), + arg_i(lexer.arg_i), arg_end(lexer.arg_end), + consume_whitespace(lexer.consume_whitespace), + token_cache(lexer.token_cache) + { + TRACE_CTOR(lexer_t, "copy"); + } + ~lexer_t() throw() { + TRACE_DTOR(lexer_t); + } + + token_t next_token(); + void push_token(token_t tok) { + assert(token_cache.kind == token_t::UNKNOWN); + token_cache = tok; + } + token_t peek_token() { + if (token_cache.kind == token_t::UNKNOWN) + token_cache = next_token(); + return token_cache; + } + }; + +protected: + class parser_t + { + friend class query_t; + + value_t args; + lexer_t lexer; + + expr_t::ptr_op_t parse_query_term(lexer_t::token_t::kind_t tok_context); + expr_t::ptr_op_t parse_unary_expr(lexer_t::token_t::kind_t tok_context); + expr_t::ptr_op_t parse_and_expr(lexer_t::token_t::kind_t tok_context); + expr_t::ptr_op_t parse_or_expr(lexer_t::token_t::kind_t tok_context); + expr_t::ptr_op_t parse_query_expr(lexer_t::token_t::kind_t tok_context); + + public: + parser_t(const value_t& _args) + : args(_args), lexer(args.begin(), args.end()) { + TRACE_CTOR(parser_t, ""); + } + parser_t(const parser_t& parser) + : args(parser.args), lexer(parser.lexer) { + TRACE_CTOR(parser_t, "copy"); + } + ~parser_t() throw() { + TRACE_DTOR(parser_t); + } + + expr_t::ptr_op_t parse() { + return parse_query_expr(lexer_t::token_t::TOK_ACCOUNT); + } + + bool tokens_remaining() { + lexer_t::token_t tok = lexer.peek_token(); + assert(tok.kind != lexer_t::token_t::UNKNOWN); + return tok.kind != lexer_t::token_t::END_REACHED; + } + }; + + optional<parser_t> parser; + +public: + query_t() { + TRACE_CTOR(query_t, ""); + } + query_t(const query_t& other) + : predicate_t(other) { + TRACE_CTOR(query_t, "copy"); + } + + query_t(const value_t& args, + const keep_details_t& _what_to_keep = keep_details_t()) + : predicate_t(_what_to_keep) { + TRACE_CTOR(query_t, "string, keep_details_t"); + if (! args.empty()) + parse_args(args); + } + ~query_t() throw() { + TRACE_DTOR(query_t); + } + + void parse_args(const value_t& args) { + if (! parser) + parser = parser_t(args); + ptr = parser->parse(); // expr_t::ptr + } + + void parse_again() { + assert(parser); + ptr = parser->parse(); // expr_t::ptr + } + + bool tokens_remaining() { + return parser && parser->tokens_remaining(); + } + + virtual string text() { + return print_to_str(); + } +}; + +} // namespace ledger + +#endif // _QUERY_H |