refactor: move to http_cache
This commit is contained in:
parent
b0800cacf6
commit
cce23d6699
4 changed files with 34 additions and 160 deletions
|
|
@ -15,7 +15,7 @@ from frappe.database.schema import SPECIAL_CHAR_PATTERN
|
|||
from frappe.model.db_query import get_order_by
|
||||
from frappe.permissions import has_permission
|
||||
from frappe.utils import cint, cstr, escape_html, unique
|
||||
from frappe.utils.caching import redis_cache
|
||||
from frappe.utils.caching import http_cache
|
||||
from frappe.utils.data import make_filter_tuple
|
||||
|
||||
|
||||
|
|
@ -33,15 +33,9 @@ class LinkSearchResults(TypedDict):
|
|||
label: NotRequired[str]
|
||||
|
||||
|
||||
def should_cache(doctype, txt, *args, **kwargs):
|
||||
"""Return True if there is no search text and the filters are cacheable."""
|
||||
filters = kwargs.get("filters")
|
||||
return not txt and not isinstance(filters, dict | list)
|
||||
|
||||
|
||||
# this is called by the Link Field
|
||||
@frappe.whitelist()
|
||||
@redis_cache(ttl=60 * 5, user=True, condition=should_cache)
|
||||
@http_cache(max_age=60 * 5, stale_while_revalidate=60 * 5)
|
||||
def search_link(
|
||||
doctype: str,
|
||||
txt: str,
|
||||
|
|
|
|||
|
|
@ -340,6 +340,34 @@ frappe.ui.form.ControlLink = class ControlLink extends frappe.ui.form.ControlDat
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if we should use GET (enables HTTP caching) or POST.
|
||||
* Use GET for empty searches with filters that fit in URL.
|
||||
* Use POST for searches with text or large filters.
|
||||
*/
|
||||
should_use_post_for_search(txt, filters, max_get_size = 2000) {
|
||||
// Always use POST if there's search text
|
||||
if (txt) return true;
|
||||
|
||||
// If no filters, use GET
|
||||
if (!filters) return false;
|
||||
|
||||
// Check size of filters when stringified
|
||||
let filters_str = filters;
|
||||
if (typeof filters !== "string") {
|
||||
try {
|
||||
filters_str = JSON.stringify(filters);
|
||||
} catch (e) {
|
||||
// If stringification fails, use POST
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// URL-encoded params add ~30% overhead on average
|
||||
const estimated_size = filters_str.length * 1.3;
|
||||
return estimated_size > max_get_size;
|
||||
}
|
||||
|
||||
on_input(e) {
|
||||
var doctype = this.get_options();
|
||||
if (!doctype) return;
|
||||
|
|
@ -364,10 +392,12 @@ frappe.ui.form.ControlLink = class ControlLink extends frappe.ui.form.ControlDat
|
|||
|
||||
this.set_custom_query(args);
|
||||
|
||||
const use_get = !this.should_use_post_for_search(term, args.filters);
|
||||
frappe.call({
|
||||
type: "POST",
|
||||
type: use_get ? "GET" : "POST",
|
||||
method: "frappe.desk.search.search_link",
|
||||
no_spinner: true,
|
||||
cache: use_get,
|
||||
args: args,
|
||||
callback: (r) => {
|
||||
if (!window.Cypress && !this.$input.is(":focus")) {
|
||||
|
|
|
|||
|
|
@ -226,147 +226,6 @@ class TestRedisCache(FrappeAPITestCase):
|
|||
self.assertEqual(calculate_area(1), PI)
|
||||
self.assertEqual(function_call_count, 2)
|
||||
|
||||
def test_condition_returns_true_uses_cache(self):
|
||||
"""Cache should be used when condition returns True."""
|
||||
function_call_count = 0
|
||||
|
||||
def always_cache(*args, **kwargs):
|
||||
return True
|
||||
|
||||
@redis_cache(ttl=CACHE_TTL, condition=always_cache)
|
||||
def calculate_area(radius: float) -> float:
|
||||
nonlocal function_call_count
|
||||
function_call_count += 1
|
||||
return 3.14 * radius**2
|
||||
|
||||
calculate_area.clear_cache()
|
||||
self.assertEqual(calculate_area(10), 314)
|
||||
self.assertEqual(function_call_count, 1)
|
||||
|
||||
# Second call should use cache
|
||||
self.assertEqual(calculate_area(10), 314)
|
||||
self.assertEqual(function_call_count, 1)
|
||||
|
||||
calculate_area.clear_cache()
|
||||
|
||||
def test_condition_returns_false_bypasses_cache(self):
|
||||
"""Cache should be bypassed when condition returns False."""
|
||||
function_call_count = 0
|
||||
|
||||
def never_cache(*args, **kwargs):
|
||||
return False
|
||||
|
||||
@redis_cache(ttl=CACHE_TTL, condition=never_cache)
|
||||
def calculate_area(radius: float) -> float:
|
||||
nonlocal function_call_count
|
||||
function_call_count += 1
|
||||
return 3.14 * radius**2
|
||||
|
||||
calculate_area.clear_cache()
|
||||
self.assertEqual(calculate_area(10), 314)
|
||||
self.assertEqual(function_call_count, 1)
|
||||
|
||||
# Second call should NOT use cache, function should be called again
|
||||
self.assertEqual(calculate_area(10), 314)
|
||||
self.assertEqual(function_call_count, 2)
|
||||
|
||||
# Third call should also bypass cache
|
||||
self.assertEqual(calculate_area(10), 314)
|
||||
self.assertEqual(function_call_count, 3)
|
||||
|
||||
calculate_area.clear_cache()
|
||||
|
||||
def test_condition_receives_correct_arguments(self):
|
||||
"""Condition function should receive the same args and kwargs as the decorated function."""
|
||||
received_args = []
|
||||
received_kwargs = []
|
||||
|
||||
def capture_args(*args, **kwargs):
|
||||
received_args.append(args)
|
||||
received_kwargs.append(kwargs)
|
||||
return True
|
||||
|
||||
@redis_cache(ttl=CACHE_TTL, condition=capture_args)
|
||||
def calculate_area(radius: float, precision: int = 2) -> float:
|
||||
return round(3.14159 * radius**2, precision)
|
||||
|
||||
calculate_area.clear_cache()
|
||||
|
||||
# Test with positional args
|
||||
calculate_area(10)
|
||||
self.assertEqual(received_args[-1], (10,))
|
||||
self.assertEqual(received_kwargs[-1], {})
|
||||
|
||||
# Test with kwargs
|
||||
calculate_area(radius=5, precision=3)
|
||||
self.assertEqual(received_args[-1], ())
|
||||
self.assertEqual(received_kwargs[-1], {"radius": 5, "precision": 3})
|
||||
|
||||
# Test with mixed args and kwargs
|
||||
calculate_area(7, precision=4)
|
||||
self.assertEqual(received_args[-1], (7,))
|
||||
self.assertEqual(received_kwargs[-1], {"precision": 4})
|
||||
|
||||
calculate_area.clear_cache()
|
||||
|
||||
def test_condition_none_uses_cache_by_default(self):
|
||||
"""When condition is not provided (default), cache should always be used."""
|
||||
function_call_count = 0
|
||||
|
||||
@redis_cache(ttl=CACHE_TTL)
|
||||
def calculate_area(radius: float) -> float:
|
||||
nonlocal function_call_count
|
||||
function_call_count += 1
|
||||
return 3.14 * radius**2
|
||||
|
||||
calculate_area.clear_cache()
|
||||
self.assertEqual(calculate_area(10), 314)
|
||||
self.assertEqual(function_call_count, 1)
|
||||
|
||||
# Second call should use cache
|
||||
self.assertEqual(calculate_area(10), 314)
|
||||
self.assertEqual(function_call_count, 1)
|
||||
|
||||
# Different args should miss cache
|
||||
self.assertEqual(calculate_area(5), 78.5)
|
||||
self.assertEqual(function_call_count, 2)
|
||||
|
||||
# Same args should hit cache again
|
||||
self.assertEqual(calculate_area(5), 78.5)
|
||||
self.assertEqual(function_call_count, 2)
|
||||
|
||||
calculate_area.clear_cache()
|
||||
|
||||
def test_condition_dynamic_behavior(self):
|
||||
"""Condition can dynamically decide whether to cache based on arguments."""
|
||||
function_call_count = 0
|
||||
|
||||
def cache_only_for_large_radius(radius, **kwargs):
|
||||
# Only cache results for radius >= 100
|
||||
return radius >= 100
|
||||
|
||||
@redis_cache(ttl=CACHE_TTL, condition=cache_only_for_large_radius)
|
||||
def calculate_area(radius: float) -> float:
|
||||
nonlocal function_call_count
|
||||
function_call_count += 1
|
||||
return 3.14 * radius**2
|
||||
|
||||
calculate_area.clear_cache()
|
||||
|
||||
# Small radius - should not cache
|
||||
self.assertEqual(calculate_area(10), 314)
|
||||
self.assertEqual(function_call_count, 1)
|
||||
self.assertEqual(calculate_area(10), 314)
|
||||
self.assertEqual(function_call_count, 2) # Called again, not cached
|
||||
|
||||
# Large radius - should cache
|
||||
self.assertEqual(calculate_area(100), 31400)
|
||||
self.assertEqual(function_call_count, 3)
|
||||
self.assertEqual(calculate_area(100), 31400)
|
||||
self.assertEqual(function_call_count, 3) # Cached, not called again
|
||||
|
||||
calculate_area.clear_cache()
|
||||
|
||||
|
||||
class TestDocumentCache(FrappeAPITestCase):
|
||||
TEST_DOCTYPE = "User"
|
||||
|
|
|
|||
|
|
@ -167,19 +167,13 @@ def site_cache(ttl: int | None = None, maxsize: int | None = None) -> Callable:
|
|||
return time_cache_wrapper
|
||||
|
||||
|
||||
def redis_cache(
|
||||
ttl: int | None = 3600,
|
||||
user: str | bool | None = None,
|
||||
shared: bool = False,
|
||||
condition: Callable | None = None,
|
||||
) -> Callable:
|
||||
def redis_cache(ttl: int | None = 3600, user: str | bool | None = None, shared: bool = False) -> Callable:
|
||||
"""Decorator to cache method calls and its return values in Redis
|
||||
|
||||
args:
|
||||
ttl: time to expiry in seconds, defaults to 1 hour
|
||||
user: `true` should cache be specific to session user.
|
||||
shared: `true` should cache be shared across sites
|
||||
condition: A callable that returns `True` if the cache should be used, `False` otherwise.
|
||||
"""
|
||||
|
||||
def wrapper(func: Callable | None = None) -> Callable:
|
||||
|
|
@ -193,9 +187,6 @@ def redis_cache(
|
|||
|
||||
@wraps(func)
|
||||
def redis_cache_wrapper(*args, **kwargs):
|
||||
if condition and not condition(*args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
func_call_key = f"{func_key}::{hash(__generate_request_cache_key(args, kwargs))}"
|
||||
cached_val = frappe.cache.get_value(func_call_key, user=user, shared=shared)
|
||||
if cached_val is not None:
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue