summaryrefslogtreecommitdiff
path: root/test/unit
diff options
context:
space:
mode:
Diffstat (limited to 'test/unit')
-rw-r--r--test/unit/t_amount.cc6
-rw-r--r--test/unit/t_expr.cc6
2 files changed, 5 insertions, 7 deletions
diff --git a/test/unit/t_amount.cc b/test/unit/t_amount.cc
index 2c91ee98..63d82675 100644
--- a/test/unit/t_amount.cc
+++ b/test/unit/t_amount.cc
@@ -64,9 +64,9 @@ void AmountTestCase::testParser()
x16.parse("$2000,00");
assertEqual(string("$2.000,00"), x16.to_string());
- // Since European-ness is an additive quality, we must switch back
- // to American-ness manually
- x15.commodity().drop_flags(COMMODITY_STYLE_EUROPEAN);
+ // Since use of a decimal-comma is an additive quality, we must switch back
+ // to decimal-period manually.
+ x15.commodity().drop_flags(COMMODITY_STYLE_DECIMAL_COMMA);
amount_t x17("$1,000,000.00"); // parsing this switches back to American
diff --git a/test/unit/t_expr.cc b/test/unit/t_expr.cc
index b5865948..0d88be9e 100644
--- a/test/unit/t_expr.cc
+++ b/test/unit/t_expr.cc
@@ -158,8 +158,6 @@ void ValueExprTestCase::testPredicateTokenizer7()
assertEqual(query_t::lexer_t::token_t::TOK_EQ, tokens.next_token().kind);
assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
- assertEqual(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind);
- assertEqual(query_t::lexer_t::token_t::TERM, tokens.next_token().kind);
assertEqual(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind);
#endif
}
@@ -167,7 +165,7 @@ void ValueExprTestCase::testPredicateTokenizer7()
void ValueExprTestCase::testPredicateTokenizer8()
{
value_t args;
- args.push_back(string_value("expr foo and bar"));
+ args.push_back(string_value("expr 'foo and bar'"));
#ifndef NOT_FOR_PYTHON
query_t::lexer_t tokens(args.begin(), args.end());
@@ -182,7 +180,7 @@ void ValueExprTestCase::testPredicateTokenizer9()
{
value_t args;
args.push_back(string_value("expr"));
- args.push_back(string_value("foo and bar"));
+ args.push_back(string_value("'foo and bar'"));
#ifndef NOT_FOR_PYTHON
query_t::lexer_t tokens(args.begin(), args.end());