feat: cache documents in client cache

No need to query 1 key at a time.
This commit is contained in:
Ankush Menat 2025-01-14 14:06:21 +05:30
parent d6f47e8060
commit b49d512404
4 changed files with 18 additions and 5 deletions

View file

@ -227,10 +227,7 @@ def get_system_settings(key: str):
"""Return the value associated with the given `key` from System Settings DocType."""
if not (system_settings := getattr(frappe.local, "system_settings", None)):
try:
system_settings = frappe.client_cache.get_value(cache_key)
if not system_settings:
system_settings = frappe.get_cached_doc("System Settings")
frappe.client_cache.set_value(cache_key, system_settings)
system_settings = frappe.client_cache.get_doc("System Settings")
frappe.local.system_settings = system_settings
except frappe.DoesNotExistError: # possible during new install
frappe.clear_last_message()

View file

@ -47,6 +47,6 @@ def get_locale_value(key: str, language: str | None = None) -> str | None:
"""
lang = language or frappe.local.lang
if lang:
value = frappe.db.get_value("Language", lang, key)
value = frappe.client_cache.get_doc("Language", lang).get(key)
return value or frappe.db.get_default(key)

View file

@ -111,3 +111,8 @@ class TestClientCache(IntegrationTestCase):
with self.assertRedisCallCounts(0):
self.assertEqual(frappe.client_cache.get_value(TEST_KEY, generator=lambda: val), val)
def test_get_doc(self):
frappe.client_cache.get_doc("User", "Guest")
with self.assertRedisCallCounts(0):
frappe.client_cache.get_doc("User", "Guest")

View file

@ -531,6 +531,17 @@ class ClientCache:
# doesn't send invalidation.
_ = self.redis.get_value(key, shared=True, use_local_cache=not self.healthy)
def get_doc(self, doctype: str, name: str | None = None):
"""Utility to fetch and store documents in client cache.
Use sparingly, this should ideally be used for settings and doctypes that have few known
number of documents.
"""
if not name:
name = doctype # singles
key = frappe.get_document_cache_key(doctype, name)
return self.get_value(key, generator=lambda: frappe.get_doc(doctype, name))
def ensure_max_size(self):
if len(self.cache) >= self.maxsize:
with self.lock, suppress(RuntimeError):