diff options
author | John Wiegley <johnw@newartisans.com> | 2010-06-23 19:31:06 -0400 |
---|---|---|
committer | John Wiegley <johnw@newartisans.com> | 2010-06-23 19:31:06 -0400 |
commit | eda6cbd0146d371653feec70e0eb3ee4e4c56379 (patch) | |
tree | aeb4e1ace0f9b284789f66f3ceb30bed66946b93 /test/unit | |
parent | 014fde3418783d1ee1ec7fe4ea6c8b04ae7f6cd8 (diff) | |
parent | 93807fade69dd4f0ec575eda78fe1a77a85c24e3 (diff) | |
download | fork-ledger-eda6cbd0146d371653feec70e0eb3ee4e4c56379.tar.gz fork-ledger-eda6cbd0146d371653feec70e0eb3ee4e4c56379.tar.bz2 fork-ledger-eda6cbd0146d371653feec70e0eb3ee4e4c56379.zip |
Merge branch 'next'
Diffstat (limited to 'test/unit')
-rw-r--r-- | test/unit/t_expr.cc | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/test/unit/t_expr.cc b/test/unit/t_expr.cc index 0d88be9e..d9dc1f1f 100644 --- a/test/unit/t_expr.cc +++ b/test/unit/t_expr.cc @@ -63,7 +63,7 @@ void ValueExprTestCase::testPredicateTokenizer2() args.push_back(string_value("foo and bar")); #ifndef NOT_FOR_PYTHON - query_t::lexer_t tokens(args.begin(), args.end()); + query_t::lexer_t tokens(args.begin(), args.end(), false); assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind); assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind); @@ -119,7 +119,7 @@ void ValueExprTestCase::testPredicateTokenizer5() args.push_back(string_value("bar)")); #ifndef NOT_FOR_PYTHON - query_t::lexer_t tokens(args.begin(), args.end()); + query_t::lexer_t tokens(args.begin(), args.end(), false); assertEqual(query_t::lexer_t::token_t::LPAREN, tokens.next_token().kind); assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind); @@ -168,7 +168,7 @@ void ValueExprTestCase::testPredicateTokenizer8() args.push_back(string_value("expr 'foo and bar'")); #ifndef NOT_FOR_PYTHON - query_t::lexer_t tokens(args.begin(), args.end()); + query_t::lexer_t tokens(args.begin(), args.end(), false); assertEqual(query_t::lexer_t::token_t::TOK_EXPR, tokens.next_token().kind); assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind); @@ -318,7 +318,7 @@ void ValueExprTestCase::testPredicateTokenizer16() args.push_back(string_value("and bar|baz")); #ifndef NOT_FOR_PYTHON - query_t::lexer_t tokens(args.begin(), args.end()); + query_t::lexer_t tokens(args.begin(), args.end(), false); assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind); assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind); |