diff options
author | Gwyneth Morgan <gwymor@tilde.club> | 2023-09-12 19:50:24 +0000 |
---|---|---|
committer | John Wiegley <johnw@newartisans.com> | 2023-10-12 13:55:48 -0700 |
commit | f480f37a847100d351a970a1b83a86d9eeccbcc7 (patch) | |
tree | 1d47f591c525e948496a38195e22d5b11b80b484 | |
parent | 4355c4faf157d5ef47b126286aa501742732708d (diff) | |
download | fork-ledger-f480f37a847100d351a970a1b83a86d9eeccbcc7.tar.gz fork-ledger-f480f37a847100d351a970a1b83a86d9eeccbcc7.tar.bz2 fork-ledger-f480f37a847100d351a970a1b83a86d9eeccbcc7.zip |
Fix =regex note query syntax
The manpage documents `=regex` as equivalent to the `note regex` query
syntax, but the former does not actually work as the parser only handles
an equals sign in the case of `tag type=dining` syntax, and doesn't
handle the case where an equals sign starts a note query.
Fixing this does break queries like `tag type = dining` with spaces
around the equals sign, but that syntax was not intended or documented.
Closes: #2275
-rw-r--r-- | src/query.cc | 4 | ||||
-rw-r--r-- | test/manual/transaction-notes-1.test | 11 | ||||
-rw-r--r-- | test/regress/1182_2.test | 2 | ||||
-rw-r--r-- | test/unit/t_expr.cc | 4 |
4 files changed, 15 insertions, 6 deletions
diff --git a/src/query.cc b/src/query.cc index 945bd34c..705ba151 100644 --- a/src/query.cc +++ b/src/query.cc @@ -126,6 +126,10 @@ query_t::lexer_t::next_token(query_t::lexer_t::token_t::kind_t tok_context) case '#': ++arg_i; return token_t(token_t::TOK_CODE); case '%': ++arg_i; return token_t(token_t::TOK_META); case '=': + if (arg_i == (*begin).as_string().begin()) { + ++arg_i; + return token_t(token_t::TOK_NOTE); + } ++arg_i; consume_next = true; return token_t(token_t::TOK_EQ); diff --git a/test/manual/transaction-notes-1.test b/test/manual/transaction-notes-1.test index 05ab3412..4085a6e2 100644 --- a/test/manual/transaction-notes-1.test +++ b/test/manual/transaction-notes-1.test @@ -2,14 +2,14 @@ Expenses:Food $4.50 Assets:Checking -2009/11/01 Panera Bread +2009/11/02 Panera Bread ; Type: Coffee ; Let’s see, I ate a whole bunch of stuff, drank some coffee, ; pondered a bagel, then decided against the donut. Expenses:Food $4.50 Assets:Checking -2009/11/01 Panera Bread +2009/11/03 Panera Bread ; Type: Dining ; :Eating: ; This is another long note, after the metadata. @@ -18,5 +18,10 @@ test reg --columns=60 food and note eat 09-Nov-01 Panera Bread Expenses:Food $4.50 $4.50 -09-Nov-01 Panera Bread Expenses:Food $4.50 $9.00 +09-Nov-03 Panera Bread Expenses:Food $4.50 $9.00 +end test + +test reg --columns=60 food and =eat +09-Nov-01 Panera Bread Expenses:Food $4.50 $4.50 +09-Nov-03 Panera Bread Expenses:Food $4.50 $9.00 end test diff --git a/test/regress/1182_2.test b/test/regress/1182_2.test index d3c88dd8..6c018e94 100644 --- a/test/regress/1182_2.test +++ b/test/regress/1182_2.test @@ -13,5 +13,5 @@ __ERROR__ While parsing file "$FILE", line 5: While parsing automated transaction: > ============ -Error: Expected predicate after '=' +Error: note operator not followed by argument end test diff --git a/test/unit/t_expr.cc b/test/unit/t_expr.cc index c10ee029..ff30b3ed 100644 --- a/test/unit/t_expr.cc +++ b/test/unit/t_expr.cc @@ -142,7 +142,7 @@ BOOST_AUTO_TEST_CASE(testPredicateTokenizer6) #ifndef NOT_FOR_PYTHON query_t::lexer_t tokens(args.begin(), args.end()); - BOOST_CHECK_EQUAL(query_t::lexer_t::token_t::TOK_EQ, tokens.next_token().kind); + BOOST_CHECK_EQUAL(query_t::lexer_t::token_t::TOK_NOTE, tokens.next_token().kind); BOOST_CHECK_EQUAL(query_t::lexer_t::token_t::TERM, tokens.next_token().kind); BOOST_CHECK_EQUAL(query_t::lexer_t::token_t::TOK_AND, tokens.next_token().kind); BOOST_CHECK_EQUAL(query_t::lexer_t::token_t::TERM, tokens.next_token().kind); @@ -158,7 +158,7 @@ BOOST_AUTO_TEST_CASE(testPredicateTokenizer7) #ifndef NOT_FOR_PYTHON query_t::lexer_t tokens(args.begin(), args.end()); - BOOST_CHECK_EQUAL(query_t::lexer_t::token_t::TOK_EQ, tokens.next_token().kind); + BOOST_CHECK_EQUAL(query_t::lexer_t::token_t::TOK_NOTE, tokens.next_token().kind); BOOST_CHECK_EQUAL(query_t::lexer_t::token_t::TERM, tokens.next_token().kind); BOOST_CHECK_EQUAL(query_t::lexer_t::token_t::END_REACHED, tokens.next_token().kind); #endif |