fix: Sample link counts and flush frequently (#32713)

We recently applied limit on how many links can be buffered. That
pretty much "samples" only records created at start of the hour.

This change makes it flush 4x frequently and samples 10% of input to
reduce updates. Again, statistically this serves same purpose.
This commit is contained in:
Ankush Menat 2025-05-29 12:18:28 +05:30 committed by GitHub
parent 4d8ebcce29
commit f17658c4c6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 3 additions and 2 deletions

View file

@ -209,6 +209,7 @@ scheduler_events = {
"frappe.utils.global_search.sync_global_search",
"frappe.deferred_insert.save_to_db",
"frappe.automation.doctype.reminder.reminder.send_reminders",
"frappe.model.utils.link_count.update_link_count",
],
# 10 minutes
"0/10 * * * *": [
@ -231,7 +232,6 @@ scheduler_events = {
# Use these for when you don't care about when the job runs but just need some guarantee for
# frequency.
"hourly_maintenance": [
"frappe.model.utils.link_count.update_link_count",
"frappe.model.utils.user_settings.sync_user_settings",
"frappe.desk.page.backups.backups.delete_downloadable_backups",
"frappe.desk.form.document_follow.send_hourly_updates",

View file

@ -2,6 +2,7 @@
# License: MIT. See LICENSE
from collections import defaultdict
from random import random
import frappe
@ -35,7 +36,7 @@ LINK_COUNT_BUFFER_SIZE = 256
def notify_link_count(doctype, name):
"""updates link count for given document"""
if doctype in ignore_doctypes or not frappe.request:
if doctype in ignore_doctypes or not frappe.request or random() < 0.9: # Sample 10%
return
if not hasattr(frappe.local, "_link_count"):