refactor: support new function style

- Migrate all SQL function usage from string format to dict format
- Old: fields=['count(*) as count']
- New: fields=[{'COUNT': '*', 'as': 'count'}]
- Add `NULLIF`

Signed-off-by: Akhil Narang <me@akhilnarang.dev>
This commit is contained in:
Akhil Narang 2025-11-06 18:27:43 +05:30
parent 5d3f7aaab9
commit 90ed0502fa
No known key found for this signature in database
GPG key ID: 9DCC61E211BF645F
14 changed files with 31 additions and 26 deletions

View file

@ -236,7 +236,7 @@ def get_import_status(data_import_name: str):
import_status = {"status": data_import.status} import_status = {"status": data_import.status}
logs = frappe.get_all( logs = frappe.get_all(
"Data Import Log", "Data Import Log",
fields=["count(*) as count", "success"], fields=[{"COUNT": "*", "as": "count"}, "success"],
filters={"data_import": data_import_name}, filters={"data_import": data_import_name},
group_by="success", group_by="success",
) )

View file

@ -20,7 +20,7 @@ def get_things_todo(as_list=False):
"""Return a count of incomplete ToDos.""" """Return a count of incomplete ToDos."""
data = frappe.get_list( data = frappe.get_list(
"ToDo", "ToDo",
fields=["name", "description"] if as_list else "count(*)", fields=["name", "description"] if as_list else [{"COUNT": "*"}],
filters=[["ToDo", "status", "=", "Open"]], filters=[["ToDo", "status", "=", "Open"]],
or_filters=[ or_filters=[
["ToDo", "allocated_to", "=", frappe.session.user], ["ToDo", "allocated_to", "=", frappe.session.user],

View file

@ -52,6 +52,7 @@ FUNCTION_MAPPING = {
"IFNULL": functions.IfNull, "IFNULL": functions.IfNull,
"CONCAT": functions.Concat, "CONCAT": functions.Concat,
"NOW": functions.Now, "NOW": functions.Now,
"NULLIF": functions.NullIf,
} }
# Mapping from operator names to pypika Arithmetic enum values # Mapping from operator names to pypika Arithmetic enum values
@ -1579,6 +1580,12 @@ class SQLFunctionParser:
if not arg: if not arg:
frappe.throw(_("Empty string arguments are not allowed"), frappe.ValidationError) frappe.throw(_("Empty string arguments are not allowed"), frappe.ValidationError)
# Special case: allow '*' for COUNT(*) and similar aggregate functions
if arg == "*":
# Return as-is for SQL star expansion (COUNT(*), etc.)
# pypika will handle this correctly when used with aggregate functions
return Column("*")
# Check for string literals (quoted strings) # Check for string literals (quoted strings)
if self._is_string_literal(arg): if self._is_string_literal(arg):
return self._validate_string_literal(arg) return self._validate_string_literal(arg)

View file

@ -201,7 +201,7 @@ def get_chart_config(chart, filters, timespan, timegrain, from_date, to_date):
data = frappe.get_list( data = frappe.get_list(
doctype, doctype,
fields=[datefield, f"SUM({value_field})", "COUNT(*)"], fields=[datefield, {"SUM": value_field}, {"COUNT": "*"}],
filters=filters, filters=filters,
group_by=datefield, group_by=datefield,
order_by=datefield, order_by=datefield,
@ -244,7 +244,7 @@ def get_heatmap_chart_config(chart, filters, heatmap_year):
doctype, doctype,
fields=[ fields=[
timestamp_field, timestamp_field,
f"{aggregate_function}({value_field})", {aggregate_function: value_field},
], ],
filters=filters, filters=filters,
group_by=f"date({datefield})", group_by=f"date({datefield})",
@ -270,7 +270,7 @@ def get_group_by_chart_config(chart, filters) -> dict | None:
doctype, doctype,
fields=[ fields=[
f"{group_by_field} as name", f"{group_by_field} as name",
f"{aggregate_function}({value_field}) as count", {aggregate_function: value_field, "as": "count"},
], ],
filters=filters, filters=filters,
parent_doctype=chart.parent_document_type, parent_doctype=chart.parent_document_type,

View file

@ -46,7 +46,7 @@ def deferred_insert(routes):
def frequently_visited_links(): def frequently_visited_links():
return frappe.get_all( return frappe.get_all(
"Route History", "Route History",
fields=["route", "count(name) as count"], fields=["route", {"COUNT": "name", "as": "count"}],
filters={"user": frappe.session.user}, filters={"user": frappe.session.user},
group_by="route", group_by="route",
order_by="count desc", order_by="count desc",

View file

@ -69,7 +69,7 @@ def get_group_by_count(doctype: str, current_filters: str, field: str) -> list[d
doctype, doctype,
filters=current_filters, filters=current_filters,
group_by=f"`tab{doctype}`.{field}", group_by=f"`tab{doctype}`.{field}",
fields=["count(*) as count", f"`{field}` as name"], fields=[{"COUNT": "*", "as": "count"}, f"`{field}` as name"],
order_by="count desc", order_by="count desc",
limit=1000, limit=1000,
) )

View file

@ -65,7 +65,7 @@ def get_notifications_for_doctypes(config, notification_count):
try: try:
if isinstance(condition, dict): if isinstance(condition, dict):
result = frappe.get_list( result = frappe.get_list(
d, fields=["count(*) as count"], filters=condition, ignore_ifnull=True d, fields=[{"COUNT": "*", "as": "count"}], filters=condition, ignore_ifnull=True
)[0].count )[0].count
else: else:
result = frappe.get_attr(condition)() result = frappe.get_attr(condition)()

View file

@ -686,7 +686,7 @@ def get_stats(stats, doctype, filters=None):
try: try:
tag_count = frappe.get_list( tag_count = frappe.get_list(
doctype, doctype,
fields=[column, "count(*)"], fields=[column, {"COUNT": "*"}],
filters=[*filters, [column, "!=", ""]], filters=[*filters, [column, "!=", ""]],
group_by=column, group_by=column,
as_list=True, as_list=True,
@ -697,7 +697,7 @@ def get_stats(stats, doctype, filters=None):
results[column] = scrub_user_tags(tag_count) results[column] = scrub_user_tags(tag_count)
no_tag_count = frappe.get_list( no_tag_count = frappe.get_list(
doctype, doctype,
fields=[column, "count(*)"], fields=[column, {"COUNT": "1"}],
filters=[*filters, [column, "in", ("", ",")]], filters=[*filters, [column, "in", ("", ",")]],
as_list=True, as_list=True,
group_by=column, group_by=column,
@ -736,7 +736,7 @@ def get_filter_dashboard_data(stats, doctype, filters=None):
if tag["type"] not in ["Date", "Datetime"]: if tag["type"] not in ["Date", "Datetime"]:
tagcount = frappe.get_list( tagcount = frappe.get_list(
doctype, doctype,
fields=[tag["name"], "count(*)"], fields=[tag["name"], {"COUNT": "*"}],
filters=[*filters, "ifnull(`{}`,'')!=''".format(tag["name"])], filters=[*filters, "ifnull(`{}`,'')!=''".format(tag["name"])],
group_by=tag["name"], group_by=tag["name"],
as_list=True, as_list=True,
@ -758,7 +758,7 @@ def get_filter_dashboard_data(stats, doctype, filters=None):
"No Data", "No Data",
frappe.get_list( frappe.get_list(
doctype, doctype,
fields=[tag["name"], "count(*)"], fields=[tag["name"], {"COUNT": "*"}],
filters=[*filters, "({0} = '' or {0} is null)".format(tag["name"])], filters=[*filters, "({0} = '' or {0} is null)".format(tag["name"])],
as_list=True, as_list=True,
)[0][1], )[0][1],

View file

@ -176,8 +176,8 @@ def search_widget(
if not meta.translated_doctype: if not meta.translated_doctype:
_txt = frappe.db.escape((txt or "").replace("%", "").replace("@", "")) _txt = frappe.db.escape((txt or "").replace("%", "").replace("@", ""))
# locate returns 0 if string is not found, convert 0 to null and then sort null to end in order by # locate returns 0 if string is not found, convert 0 to null and then sort null to end in order by
_relevance = f"(1 / nullif(locate({_txt}, `tab{doctype}`.`name`), 0))" _relevance = {"DIV": [1, {"NULLIF": [{"LOCATE": [_txt, "name"]}, 0]}], "as": "_relevance"}
formatted_fields.append(f"""{_relevance} as `_relevance`""") formatted_fields.append(f"{_relevance} as _relevance")
# Since we are sorting by alias postgres needs to know number of column we are sorting # Since we are sorting by alias postgres needs to know number of column we are sorting
if frappe.db.db_type == "mariadb": if frappe.db.db_type == "mariadb":
order_by = f"ifnull(_relevance, -9999) desc, {order_by}" order_by = f"ifnull(_relevance, -9999) desc, {order_by}"

View file

@ -950,7 +950,7 @@ def get_max_email_uid(email_account):
"sent_or_received": "Received", "sent_or_received": "Received",
"email_account": email_account, "email_account": email_account,
}, },
fields=["max(uid) as uid"], fields=[{"MAX": "uid", "as": "uid"}],
): ):
return cint(result[0].get("uid", 0)) + 1 return cint(result[0].get("uid", 0)) + 1
return 1 return 1

View file

@ -403,7 +403,7 @@ class TestDB(IntegrationTestCase):
random_field, random_field,
) )
self.assertEqual( self.assertEqual(
next(iter(frappe.get_all("ToDo", fields=[f"count(`{random_field}`)"], limit=1)[0])), next(iter(frappe.get_all("ToDo", fields=[{"COUNT": f"`{random_field}`"}], limit=1)[0])),
"count" if frappe.conf.db_type == "postgres" else f"count(`{random_field}`)", "count" if frappe.conf.db_type == "postgres" else f"count(`{random_field}`)",
) )

View file

@ -483,15 +483,13 @@ class TestDBQuery(IntegrationTestCase):
self.assertTrue("count" in data[0]) self.assertTrue("count" in data[0])
data = DatabaseQuery("DocType").execute( data = DatabaseQuery("DocType").execute(
fields=["name", "issingle", "locate('', name) as _relevance"], fields=["name", "issingle", "locate('','name') as _relevance"], limit_start=0, limit_page_length=1
limit_start=0,
limit_page_length=1,
) )
self.assertTrue("_relevance" in data[0]) self.assertTrue("_relevance" in data[0])
# Test that fields with keywords in strings are allowed # Test that fields with keywords in strings are allowed
data = DatabaseQuery("DocType").execute( data = DatabaseQuery("DocType").execute(
fields=["name", "locate('select', name)"], fields=["name", "locate('select', 'name')"],
limit_start=0, limit_start=0,
limit_page_length=1, limit_page_length=1,
) )
@ -818,7 +816,7 @@ class TestDBQuery(IntegrationTestCase):
frappe.db.get_list( frappe.db.get_list(
"Web Form", "Web Form",
filters=[["Web Form Field", "reqd", "=", 1]], filters=[["Web Form Field", "reqd", "=", 1]],
fields=["count(*) as count"], fields=[{"COUNT": "*", "as": "count"}],
order_by="count desc", order_by="count desc",
limit=50, limit=50,
) )
@ -846,7 +844,7 @@ class TestDBQuery(IntegrationTestCase):
"DocType", "DocType",
filters={"docstatus": 0, "document_type": ("!=", "")}, filters={"docstatus": 0, "document_type": ("!=", "")},
group_by="document_type", group_by="document_type",
fields=["document_type", "sum(is_submittable) as is_submittable"], fields=["document_type", {"SUM": "is_submittable", "as": "is_submittable"}],
limit=1, limit=1,
as_list=True, as_list=True,
) )
@ -1222,7 +1220,7 @@ class TestDBQuery(IntegrationTestCase):
self.assertEqual(len(data["values"]), 1) self.assertEqual(len(data["values"]), 1)
def test_select_star_expansion(self): def test_select_star_expansion(self):
count = frappe.get_list("Language", ["SUM(1)", "COUNT(*)"], as_list=1, order_by=None)[0] count = frappe.get_list("Language", [{"SUM": 1}, {"COUNT": "*"}], as_list=1, order_by=None)[0]
self.assertEqual(count[0], frappe.db.count("Language")) self.assertEqual(count[0], frappe.db.count("Language"))
self.assertEqual(count[1], frappe.db.count("Language")) self.assertEqual(count[1], frappe.db.count("Language"))

View file

@ -70,13 +70,13 @@ class TestFrappeClient(IntegrationTestCase):
getlist_users = server.get_list( getlist_users = server.get_list(
"User", "User",
fields=["count(name) as user_count"], fields=[{"COUNT": "name", "as": "user_count"}],
filters={"user_type": "System User"}, filters={"user_type": "System User"},
group_by="user_type", group_by="user_type",
) )
getall_users = frappe.db.get_all( getall_users = frappe.db.get_all(
"User", "User",
fields=["count(name) as system_user_count"], fields=[{"COUNT": "name", "as": "system_user_count"}],
filters={"user_type": "System User"}, filters={"user_type": "System User"},
group_by="user_type", group_by="user_type",
) )

View file

@ -127,7 +127,7 @@ def get_workflow_state_count(doctype, workflow_state_field, states):
if workflow_state_field in frappe.get_meta(doctype).get_valid_columns(): if workflow_state_field in frappe.get_meta(doctype).get_valid_columns():
result = frappe.get_all( result = frappe.get_all(
doctype, doctype,
fields=[workflow_state_field, "count(*) as count"], fields=[workflow_state_field, {"COUNT": "*", "as": "count"}],
filters={workflow_state_field: ["not in", states]}, filters={workflow_state_field: ["not in", states]},
group_by=workflow_state_field, group_by=workflow_state_field,
) )