Eradicate the dedupe_qs filter

The new `extend_query` and `urlencode` filters are way cleaner ways
to achieve what we did with `dedupe_qs`.

Signed-off-by: Kevin Morris <kevr@0cost.org>
This commit is contained in:
Kevin Morris 2021-08-31 14:27:16 -07:00
parent c9374732c0
commit 210e459ba9
No known key found for this signature in database
GPG key ID: F7E46DED420788F3
5 changed files with 11 additions and 53 deletions

View file

@ -1,4 +1,3 @@
from collections import OrderedDict
from datetime import datetime
from zoneinfo import ZoneInfo
@ -17,21 +16,6 @@ def test_as_timezone():
assert util.as_timezone(dt, "UTC") == dt.astimezone(tz=ZoneInfo("UTC"))
def test_dedupe_qs():
items = OrderedDict()
items["key1"] = "test"
items["key2"] = "blah"
items["key3"] = 1
# Construct and test our query string.
query_string = '&'.join([f"{k}={v}" for k, v in items.items()])
assert query_string == "key1=test&key2=blah&key3=1"
# Add key1=changed and key2=changed to the query and dedupe it.
deduped = util.dedupe_qs(query_string, "key1=changed", "key3=changed")
assert deduped == "key2=blah&key1=changed&key3=changed"
def test_number_format():
assert util.number_format(0.222, 2) == "0.22"
assert util.number_format(0.226, 2) == "0.23"