summaryrefslogtreecommitdiff
path: root/test/unit
diff options
context:
space:
mode:
authorJohn Wiegley <johnw@newartisans.com>2009-11-08 23:40:42 -0500
committerJohn Wiegley <johnw@newartisans.com>2009-11-09 02:06:06 -0500
commitc3535d06c89732a0ba4c13274702b0f48198ae79 (patch)
tree5a8153d4c627cc3c7eff687b50a59a1cc9c3d04a /test/unit
parent523d4243e8c347cb7cbd1f68b03a5098ceb73b70 (diff)
downloadfork-ledger-c3535d06c89732a0ba4c13274702b0f48198ae79.tar.gz
fork-ledger-c3535d06c89732a0ba4c13274702b0f48198ae79.tar.bz2
fork-ledger-c3535d06c89732a0ba4c13274702b0f48198ae79.zip
Redesigned the expr_t, predicate_t, query_t classes
Diffstat (limited to 'test/unit')
-rw-r--r--test/unit/t_amount.cc18
-rw-r--r--test/unit/t_expr.cc214
2 files changed, 117 insertions, 115 deletions
diff --git a/test/unit/t_amount.cc b/test/unit/t_amount.cc
index c2a6b8c9..2c91ee98 100644
--- a/test/unit/t_amount.cc
+++ b/test/unit/t_amount.cc
@@ -83,7 +83,7 @@ void AmountTestCase::testParser()
assertEqual(string("EUR 1000"), x19.to_string());
assertEqual(string("EUR 1000"), x20.to_string());
- x1.parse("$100.0000", amount_t::PARSE_NO_MIGRATE);
+ x1.parse("$100.0000", PARSE_NO_MIGRATE);
assertEqual(amount_t::precision_t(2), x12.commodity().precision());
assertEqual(x1.commodity(), x12.commodity());
assertEqual(x1, x12);
@@ -93,27 +93,27 @@ void AmountTestCase::testParser()
assertEqual(x0.commodity(), x12.commodity());
assertEqual(x0, x12);
- x2.parse("$100.00", amount_t::PARSE_NO_REDUCE);
+ x2.parse("$100.00", PARSE_NO_REDUCE);
assertEqual(x2, x12);
- x3.parse("$100.00", amount_t::PARSE_NO_MIGRATE | amount_t::PARSE_NO_REDUCE);
+ x3.parse("$100.00", PARSE_NO_MIGRATE | PARSE_NO_REDUCE);
assertEqual(x3, x12);
x4.parse("$100.00");
assertEqual(x4, x12);
- x5.parse("$100.00", amount_t::PARSE_NO_MIGRATE);
+ x5.parse("$100.00", PARSE_NO_MIGRATE);
assertEqual(x5, x12);
- x6.parse("$100.00", amount_t::PARSE_NO_REDUCE);
+ x6.parse("$100.00", PARSE_NO_REDUCE);
assertEqual(x6, x12);
- x7.parse("$100.00", amount_t::PARSE_NO_MIGRATE | amount_t::PARSE_NO_REDUCE);
+ x7.parse("$100.00", PARSE_NO_MIGRATE | PARSE_NO_REDUCE);
assertEqual(x7, x12);
x8.parse("$100.00");
assertEqual(x8, x12);
- x9.parse("$100.00", amount_t::PARSE_NO_MIGRATE);
+ x9.parse("$100.00", PARSE_NO_MIGRATE);
assertEqual(x9, x12);
- x10.parse("$100.00", amount_t::PARSE_NO_REDUCE);
+ x10.parse("$100.00", PARSE_NO_REDUCE);
assertEqual(x10, x12);
- x11.parse("$100.00", amount_t::PARSE_NO_MIGRATE | amount_t::PARSE_NO_REDUCE);
+ x11.parse("$100.00", PARSE_NO_MIGRATE | PARSE_NO_REDUCE);
assertEqual(x11, x12);
assertValid(x0);
diff --git a/test/unit/t_expr.cc b/test/unit/t_expr.cc
index 2576379e..5e5d44fb 100644
--- a/test/unit/t_expr.cc
+++ b/test/unit/t_expr.cc
@@ -4,6 +4,8 @@
#include "expr.h"
#include "predicate.h"
+#include "query.h"
+#include "op.h"
using namespace ledger;
@@ -46,12 +48,12 @@ void ValueExprTestCase::testPredicateTokenizer1()
args.push_back(string_value("bar"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
+ query_t::lexer_t tokens(args.begin(), args.end());
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -61,12 +63,12 @@ void ValueExprTestCase::testPredicateTokenizer2()
args.push_back(string_value("foo and bar"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
+ query_t::lexer_t tokens(args.begin(), args.end());
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -78,14 +80,14 @@ void ValueExprTestCase::testPredicateTokenizer3()
args.push_back(string_value("bar)"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
-
- assertEqual(query_lexer_t::token_t::LPAREN, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::RPAREN, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ query_t::lexer_t tokens(args.begin(), args.end());
+
+ assertEqual(query_t::lexer_t::token_t::LPAREN, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::RPAREN, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -99,14 +101,14 @@ void ValueExprTestCase::testPredicateTokenizer4()
args.push_back(string_value(")"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
-
- assertEqual(query_lexer_t::token_t::LPAREN, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::RPAREN, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ query_t::lexer_t tokens(args.begin(), args.end());
+
+ assertEqual(query_t::lexer_t::token_t::LPAREN, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::RPAREN, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -117,14 +119,14 @@ void ValueExprTestCase::testPredicateTokenizer5()
args.push_back(string_value("bar)"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
-
- assertEqual(query_lexer_t::token_t::LPAREN, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::RPAREN, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ query_t::lexer_t tokens(args.begin(), args.end());
+
+ assertEqual(query_t::lexer_t::token_t::LPAREN, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::RPAREN, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -136,13 +138,13 @@ void ValueExprTestCase::testPredicateTokenizer6()
args.push_back(string_value("bar"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
+ query_t::lexer_t tokens(args.begin(), args.end());
- assertEqual(query_lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -152,11 +154,11 @@ void ValueExprTestCase::testPredicateTokenizer7()
args.push_back(string_value("=foo and bar"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
+ query_t::lexer_t tokens(args.begin(), args.end());
- assertEqual(query_lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -166,11 +168,11 @@ void ValueExprTestCase::testPredicateTokenizer8()
args.push_back(string_value("expr foo and bar"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
+ query_t::lexer_t tokens(args.begin(), args.end());
- assertEqual(query_lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -181,11 +183,11 @@ void ValueExprTestCase::testPredicateTokenizer9()
args.push_back(string_value("foo and bar"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
+ query_t::lexer_t tokens(args.begin(), args.end());
- assertEqual(query_lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -198,13 +200,13 @@ void ValueExprTestCase::testPredicateTokenizer10()
args.push_back(string_value("bar"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
+ query_t::lexer_t tokens(args.begin(), args.end());
- assertEqual(query_lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_EXPR, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -218,14 +220,14 @@ void ValueExprTestCase::testPredicateTokenizer11()
args.push_back(string_value("baz"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
-
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ query_t::lexer_t tokens(args.begin(), args.end());
+
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -239,14 +241,14 @@ void ValueExprTestCase::testPredicateTokenizer12()
args.push_back(string_value("baz"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
-
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ query_t::lexer_t tokens(args.begin(), args.end());
+
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -259,14 +261,14 @@ void ValueExprTestCase::testPredicateTokenizer13()
args.push_back(string_value("|baz"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
-
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ query_t::lexer_t tokens(args.begin(), args.end());
+
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -279,14 +281,14 @@ void ValueExprTestCase::testPredicateTokenizer14()
args.push_back(string_value("baz"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
-
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ query_t::lexer_t tokens(args.begin(), args.end());
+
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -298,14 +300,14 @@ void ValueExprTestCase::testPredicateTokenizer15()
args.push_back(string_value("bar|baz"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
-
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ query_t::lexer_t tokens(args.begin(), args.end());
+
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -316,13 +318,13 @@ void ValueExprTestCase::testPredicateTokenizer16()
args.push_back(string_value("and bar|baz"));
#ifndef NOT_FOR_PYTHON
- query_lexer_t tokens(args.begin(), args.end());
-
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TOK_OR, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_lexer_t::token_t::END_REACHED, tokens.next_token().kind);
+ query_t::lexer_t tokens(args.begin(), args.end());
+
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TOK_OR, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
+ assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}