Merge pull request #39029 from kaulith/fix/safe-filters-notation
fix: preserve docnames matching scientific notation in get_safe_filters
This commit is contained in:
commit
4a0be898af
3 changed files with 40 additions and 9 deletions
|
|
@ -291,3 +291,12 @@ class TestClient(IntegrationTestCase):
|
||||||
# cleanup
|
# cleanup
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
frappe.delete_doc("Note", doc)
|
frappe.delete_doc("Note", doc)
|
||||||
|
|
||||||
|
def test_get_value_scientific_notation_docname(self):
|
||||||
|
from frappe.client import get_value
|
||||||
|
|
||||||
|
tag = frappe.get_doc({"doctype": "Tag", "name": "3E002"}).insert(ignore_if_duplicate=True)
|
||||||
|
try:
|
||||||
|
self.assertEqual(get_value("Tag", ["name"], "3E002"), {"name": "3E002"})
|
||||||
|
finally:
|
||||||
|
tag.delete()
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,7 @@ from frappe.utils import (
|
||||||
get_file_timestamp,
|
get_file_timestamp,
|
||||||
get_gravatar,
|
get_gravatar,
|
||||||
get_link_to_report,
|
get_link_to_report,
|
||||||
|
get_safe_filters,
|
||||||
get_site_info,
|
get_site_info,
|
||||||
get_sites,
|
get_sites,
|
||||||
get_url,
|
get_url,
|
||||||
|
|
@ -352,6 +353,28 @@ class TestFilters(IntegrationTestCase):
|
||||||
link = get_link_to_report(name="ToDo", filters=filters)
|
link = get_link_to_report(name="ToDo", filters=filters)
|
||||||
self.assertIn('creation=["between",["2024-01-01","2024-12-31"]]', link)
|
self.assertIn('creation=["between",["2024-01-01","2024-12-31"]]', link)
|
||||||
|
|
||||||
|
def test_safe_filters_scientific_notation(self):
|
||||||
|
self.assertEqual(get_safe_filters("3E002"), "3E002")
|
||||||
|
self.assertEqual(get_safe_filters("1E5"), "1E5")
|
||||||
|
self.assertEqual(get_safe_filters("2e10"), "2e10")
|
||||||
|
self.assertEqual(get_safe_filters("1.5"), "1.5")
|
||||||
|
self.assertEqual(get_safe_filters("Infinity"), "Infinity")
|
||||||
|
self.assertEqual(get_safe_filters("NaN"), "NaN")
|
||||||
|
|
||||||
|
def test_safe_filters_json(self):
|
||||||
|
self.assertEqual(get_safe_filters('{"name": "ABC"}'), {"name": "ABC"})
|
||||||
|
self.assertEqual(get_safe_filters('[["name", "=", "ABC"]]'), [["name", "=", "ABC"]])
|
||||||
|
# FrappeClient encodes scalar filters via frappe.as_json — must still unwrap
|
||||||
|
self.assertEqual(get_safe_filters('"ABC"'), "ABC")
|
||||||
|
self.assertIsNone(get_safe_filters("null"))
|
||||||
|
self.assertIs(get_safe_filters("true"), True)
|
||||||
|
self.assertIs(get_safe_filters("false"), False)
|
||||||
|
|
||||||
|
def test_safe_filters_non_string(self):
|
||||||
|
self.assertEqual(get_safe_filters({"name": "ABC"}), {"name": "ABC"})
|
||||||
|
self.assertEqual(get_safe_filters([["name", "=", "ABC"]]), [["name", "=", "ABC"]])
|
||||||
|
self.assertIsNone(get_safe_filters(None))
|
||||||
|
|
||||||
|
|
||||||
class TestMoney(IntegrationTestCase):
|
class TestMoney(IntegrationTestCase):
|
||||||
def test_money_in_words(self):
|
def test_money_in_words(self):
|
||||||
|
|
|
||||||
|
|
@ -905,16 +905,15 @@ def call(fn, *args, **kwargs):
|
||||||
|
|
||||||
def get_safe_filters(filters):
|
def get_safe_filters(filters):
|
||||||
try:
|
try:
|
||||||
filters = orjson.loads(filters)
|
parsed = orjson.loads(filters)
|
||||||
|
|
||||||
if isinstance(filters, int | float):
|
|
||||||
filters = frappe.as_unicode(filters)
|
|
||||||
|
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
# filters are not passed, not json
|
# not a string, or not valid json
|
||||||
pass
|
return filters
|
||||||
|
# numeric JSON is ambiguous: docnames like "3E002" parse as floats and
|
||||||
return filters
|
# would be corrupted by stringifying back, so keep the original string
|
||||||
|
if isinstance(parsed, int | float) and not isinstance(parsed, bool):
|
||||||
|
return filters
|
||||||
|
return parsed
|
||||||
|
|
||||||
|
|
||||||
def create_batch(iterable: Iterable, size: int) -> Generator[Iterable]:
|
def create_batch(iterable: Iterable, size: int) -> Generator[Iterable]:
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue