perf: misc client cache improvements (#29070)

* perf: Reduce penalty for lack of redis connection

If redis isn't running than this client cache is slower than default
implementation because of the extra locking overhead.

* test: update perf redis counts

* perf: cache table columns in client-cache

* fix: race condition on cache-client_cache init

Rare but apparant in synthetic benchmarks.

Cache is set but client cache is still being initialized then request
will fail.

* perf: Don't run notifications when loading document

WHAT?

* fix: use cached doc to repopulate

* perf: reduce get_meta calls
This commit is contained in:
Ankush Menat 2025-01-07 16:14:43 +05:30 committed by GitHub
parent 8e101182f2
commit fdba41c682
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 42 additions and 24 deletions

View file

@ -274,7 +274,7 @@ def init(site: str, sites_path: str = ".", new_site: bool = False, force: bool =
local.dev_server = _dev_server
local.qb = get_query_builder(local.conf.db_type)
local.qb.get_query = get_query
if not cache:
if not cache or not client_cache:
setup_redis_cache_connection()
if not _one_time_setup.get(local.conf.db_type):

View file

@ -67,7 +67,6 @@ user_cache_keys = (
doctype_cache_keys = (
"doctype_form_meta",
"table_columns",
"last_modified",
"linked_doctypes",
"notifications",
@ -75,6 +74,11 @@ doctype_cache_keys = (
"data_import_column_header_map",
)
wildcard_keys = (
"document_cache::*",
"table_columns::*",
)
def clear_user_cache(user=None):
from frappe.desk.notifications import clear_notifications
@ -158,7 +162,8 @@ def _clear_doctype_cache_from_redis(doctype: str | None = None):
else:
# clear all
to_del += doctype_cache_keys
to_del += frappe.cache.get_keys("document_cache::")
for pattern in wildcard_keys:
to_del += frappe.cache.get_keys(pattern)
clear_meta_cache()
frappe.cache.delete_value(to_del)

View file

@ -229,7 +229,7 @@ def get_system_settings(key: str):
try:
system_settings = frappe.client_cache.get_value(cache_key)
if not system_settings:
system_settings = frappe.get_doc("System Settings")
system_settings = frappe.get_cached_doc("System Settings")
frappe.client_cache.set_value(cache_key, system_settings)
frappe.local.system_settings = system_settings
except frappe.DoesNotExistError: # possible during new install

View file

@ -1278,7 +1278,8 @@ class Database:
def get_db_table_columns(self, table) -> list[str]:
"""Return list of column names from given table."""
columns = frappe.cache.hget("table_columns", table)
key = f"table_columns::{table}"
columns = frappe.client_cache.get_value(key)
if columns is None:
information_schema = frappe.qb.Schema("information_schema")
@ -1290,7 +1291,7 @@ class Database:
)
if columns:
frappe.cache.hset("table_columns", table, columns)
frappe.cache.set_value(key, columns)
return columns

View file

@ -250,7 +250,8 @@ class PostgresDatabase(PostgresExceptionUtil, Database):
def get_db_table_columns(self, table) -> list[str]:
"""Returns list of column names from given table."""
if (columns := frappe.cache.hget("table_columns", table)) is not None:
key = f"table_columns::{table}"
if (columns := frappe.client_cache.get_value(key)) is not None:
return columns
information_schema = frappe.qb.Schema("information_schema")
@ -265,7 +266,7 @@ class PostgresDatabase(PostgresExceptionUtil, Database):
.run(pluck=True)
)
frappe.cache.hset("table_columns", table, columns)
frappe.client_cache.set_value(key, columns)
return columns

View file

@ -44,7 +44,7 @@ class DBTable:
if self.is_new():
self.create()
else:
frappe.cache.hdel("table_columns", self.table_name)
frappe.client_cache.delete_value(f"table_columns::{self.table_name}")
self.alter()
def create(self):

View file

@ -8,6 +8,7 @@ import json
import re
from collections import Counter
from collections.abc import Mapping, Sequence
from functools import cached_property
import frappe
import frappe.defaults
@ -46,6 +47,7 @@ STRICT_FIELD_PATTERN = re.compile(r".*/\*.*")
STRICT_UNION_PATTERN = re.compile(r".*\s(union).*\s")
ORDER_GROUP_PATTERN = re.compile(r".*[^a-z0-9-_ ,`'\"\.\(\)].*")
SPECIAL_FIELD_CHARS = frozenset(("(", "`", ".", "'", '"', "*"))
# XXX: These are just matching brackets to not confuse code formatters: ))
class DatabaseQuery:
@ -65,12 +67,16 @@ class DatabaseQuery:
self.permission_map = {}
self.shared = []
self._fetch_shared_documents = False
self._metas = {}
@property
@cached_property
def doctype_meta(self):
if not hasattr(self, "_doctype_meta"):
self._doctype_meta = frappe.get_meta(self.doctype)
return self._doctype_meta
return self.get_meta(self.doctype)
def get_meta(self, doctype: str):
if doctype not in self._metas:
self._metas[doctype] = frappe.get_meta(doctype)
return self._metas[doctype]
@property
def query_tables(self):
@ -358,7 +364,7 @@ class DatabaseQuery:
if " as " in field:
field, alias = field.split(" as ", 1)
linked_fieldname, fieldname = field.split(".", 1)
linked_field = frappe.get_meta(self.doctype).get_field(linked_fieldname)
linked_field = self.get_meta(self.doctype).get_field(linked_fieldname)
# this is not a link field
if not linked_field:
continue
@ -701,7 +707,7 @@ class DatabaseQuery:
if f.operator.lower() in additional_filters_config:
f.update(get_additional_filter_field(additional_filters_config, f, f.value))
meta = frappe.get_meta(f.doctype)
meta = self.get_meta(f.doctype)
df = meta.get("fields", {"fieldname": f.fieldname})
df = df[0] if df else None

View file

@ -1106,7 +1106,8 @@ class Document(BaseDocument, DocRef):
def run_notifications(self, method):
"""Run notifications for this method"""
if (
(frappe.flags.in_import and frappe.flags.mute_emails)
method == "onload"
or (frappe.flags.in_import and frappe.flags.mute_emails)
or frappe.flags.in_patch
or frappe.flags.in_install
):

View file

@ -1251,14 +1251,15 @@ class TestPostgresSchemaQueryIndependence(ExtIntegrationTestCase):
self.assertEqual(columns, ["col_a", "col_b"])
frappe.conf["db_schema"] = "alt_schema"
frappe.cache.delete_key("table_columns") # remove table columns cache for next try from alt_schema
# remove table columns cache for next try from alt_schema
frappe.client_cache.delete_keys("table_columns::*")
# should have received the columns of the table from alt_schema
columns = frappe.db.get_table_columns(self.test_table_name)
self.assertEqual(columns, ["col_c", "col_d"])
del frappe.conf["db_schema"]
frappe.cache.delete_key("table_columns")
frappe.client_cache.delete_keys("table_columns::*")
def test_describe(self) -> None:
self.assertSequenceEqual([("col_a",), ("col_b",)], frappe.db.describe(self.test_table_name))
@ -1274,7 +1275,7 @@ class TestPostgresSchemaQueryIndependence(ExtIntegrationTestCase):
frappe.db.add_index("User", ("col_c",))
del frappe.conf["db_schema"]
frappe.cache.delete_key("table_columns")
frappe.client_cache.delete_keys("table_columns::*")
# the index creation in the default schema should fail
with self.assertSqlException():

View file

@ -291,20 +291,20 @@ class TestOverheadCalls(FrappeAPITestCase):
def test_ping_overheads(self):
self.get(self.method("ping"), {"sid": "Guest"})
with self.assertRedisCallCounts(10), self.assertQueryCount(self.BASE_SQL_CALLS):
with self.assertRedisCallCounts(2), self.assertQueryCount(self.BASE_SQL_CALLS):
self.get(self.method("ping"), {"sid": "Guest"})
def test_ping_overheads_authenticated(self):
sid = self.sid
self.get(self.method("ping"), {"sid": sid})
with self.assertRedisCallCounts(10), self.assertQueryCount(self.BASE_SQL_CALLS):
with self.assertRedisCallCounts(3), self.assertQueryCount(self.BASE_SQL_CALLS):
self.get(self.method("ping"), {"sid": sid})
def test_list_view_overheads(self):
sid = self.sid
self.get(self.resource("ToDo"), {"sid": sid})
self.get(self.resource("ToDo"), {"sid": sid})
with self.assertRedisCallCounts(24), self.assertQueryCount(self.BASE_SQL_CALLS + 1):
with self.assertRedisCallCounts(6), self.assertQueryCount(self.BASE_SQL_CALLS + 1):
self.get(self.resource("ToDo"), {"sid": sid})
def test_get_doc_overheads(self):
@ -312,7 +312,7 @@ class TestOverheadCalls(FrappeAPITestCase):
tables = len(frappe.get_meta("User").get_table_fields())
self.get(self.resource("User", "Administrator"), {"sid": sid})
self.get(self.resource("User", "Administrator"), {"sid": sid})
with self.assertRedisCallCounts(19), self.assertQueryCount(self.BASE_SQL_CALLS + 1 + tables):
with self.assertRedisCallCounts(3), self.assertQueryCount(self.BASE_SQL_CALLS + 1 + tables):
self.get(self.resource("User", "Administrator"), {"sid": sid})

View file

@ -483,6 +483,9 @@ class ClientCache:
self.invalidator_thread = self.run_invalidator_thread()
def get_value(self, key):
if not self.healthy:
return self.redis.get_value(key)
key = self.redis.make_key(key)
try:
val = self.cache[key]
@ -526,7 +529,7 @@ class ClientCache:
# - Client A writes a key and reads it again from local cache
# - Client B overwrites this key, but since client A never "read" it from Redis, Redis
# doesn't send invalidation.
_ = self.redis.get_value(key, shared=True, use_local_cache=False)
_ = self.redis.get_value(key, shared=True, use_local_cache=not self.healthy)
def ensure_max_size(self):
if len(self.cache) >= self.maxsize: