Merge remote-tracking branch 'frappe/develop' into fix-col-input-focus

This commit is contained in:
Devin Slauenwhite 2023-06-06 10:26:18 -04:00
commit dcfc29eb5a
43 changed files with 614 additions and 363 deletions

View file

@ -16,6 +16,7 @@ import inspect
import json
import os
import re
import unicodedata
import warnings
from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, TypeAlias, overload
@ -190,7 +191,6 @@ def init(site: str, sites_path: str = ".", new_site: bool = False, force=False)
local.error_log = []
local.message_log = []
local.debug_log = []
local.realtime_log = []
local.flags = _dict(
{
"currently_saving": [],
@ -207,9 +207,7 @@ def init(site: str, sites_path: str = ".", new_site: bool = False, force=False)
"read_only": False,
}
)
local.rollback_observers = []
local.locked_documents = []
local.before_commit = []
local.test_objects = {}
local.site = site
@ -233,7 +231,6 @@ def init(site: str, sites_path: str = ".", new_site: bool = False, force=False)
local.role_permissions = {}
local.valid_columns = {}
local.new_doc_templates = {}
local.link_count = {}
local.jenv = None
local.jloader = None
@ -879,6 +876,7 @@ def clear_cache(user: str | None = None, doctype: str | None = None):
:param doctype: If doctype is given, only DocType cache is cleared."""
import frappe.cache_manager
import frappe.utils.caching
from frappe.website.router import clear_routing_cache
if doctype:
frappe.cache_manager.clear_doctype_cache(doctype)
@ -907,6 +905,8 @@ def clear_cache(user: str | None = None, doctype: str | None = None):
if hasattr(local, "website_settings"):
del local.website_settings
clear_routing_cache()
def only_has_select_perm(doctype, user=None, ignore_permissions=False):
if ignore_permissions:
@ -1079,7 +1079,7 @@ def set_value(doctype, docname, fieldname, value=None):
def get_cached_doc(*args, **kwargs) -> "Document":
if (key := can_cache_doc(args)) and (doc := cache().hget("document_cache", key)):
if (key := can_cache_doc(args)) and (doc := cache().get_value(key)):
return doc
# Not found in cache, fetch from DB
@ -1095,7 +1095,7 @@ def get_cached_doc(*args, **kwargs) -> "Document":
def _set_document_in_cache(key: str, doc: "Document") -> None:
cache().hset("document_cache", key, doc)
cache().set_value(key, doc)
def can_cache_doc(args) -> str | None:
@ -1116,12 +1116,20 @@ def can_cache_doc(args) -> str | None:
def get_document_cache_key(doctype: str, name: str):
return f"{doctype}::{name}"
return f"document_cache::{doctype}::{name}"
def clear_document_cache(doctype, name):
cache().hdel("last_modified", doctype)
cache().hdel("document_cache", get_document_cache_key(doctype, name))
def clear_document_cache(doctype: str, name: str | None = None) -> None:
def clear_in_redis():
if name is not None:
cache().delete_value(get_document_cache_key(doctype, name))
else:
cache().delete_keys(get_document_cache_key(doctype, ""))
clear_in_redis()
if hasattr(db, "after_commit"):
db.after_commit.add(clear_in_redis)
db.after_rollback.add(clear_in_redis)
if doctype == "System Settings" and hasattr(local, "system_settings"):
delattr(local, "system_settings")
@ -1206,7 +1214,7 @@ def get_doc(*args, **kwargs):
doc = frappe.model.document.get_doc(*args, **kwargs)
# Replace cache if stale one exists
if (key := can_cache_doc(args)) and cache().hexists("document_cache", key):
if (key := can_cache_doc(args)) and cache().exists(key):
_set_document_in_cache(key, doc)
return doc
@ -2271,6 +2279,7 @@ def bold(text):
def safe_eval(code, eval_globals=None, eval_locals=None):
"""A safer `eval`"""
whitelisted_globals = {"int": int, "float": float, "long": int, "round": round}
code = unicodedata.normalize("NFKC", code)
UNSAFE_ATTRIBUTES = {
# Generator Attributes

View file

@ -19,7 +19,6 @@ import frappe.recorder
import frappe.utils.response
from frappe import _
from frappe.auth import SAFE_HTTP_METHODS, UNSAFE_HTTP_METHODS, HTTPRequest
from frappe.core.doctype.comment.comment import update_comments_in_parent_after_request
from frappe.middlewares import StaticDataMiddleware
from frappe.utils import cint, get_site_name, sanitize_html
from frappe.utils.error import make_error_snapshot
@ -351,8 +350,6 @@ def sync_database(rollback: bool) -> bool:
frappe.db.commit()
rollback = False
update_comments_in_parent_after_request()
return rollback

View file

@ -123,12 +123,21 @@ def clear_defaults_cache(user=None):
def clear_doctype_cache(doctype=None):
clear_controller_cache(doctype)
_clear_doctype_cache_form_redis()
if hasattr(frappe.db, "after_commit"):
frappe.db.after_commit.add(_clear_doctype_cache_form_redis)
frappe.db.after_rollback.add(_clear_doctype_cache_form_redis)
def _clear_doctype_cache_form_redis(doctype: str | None = None):
cache = frappe.cache()
for key in ("is_table", "doctype_modules", "document_cache"):
for key in ("is_table", "doctype_modules"):
cache.delete_value(key)
def clear_single(dt):
frappe.clear_document_cache(dt)
for name in doctype_cache_keys:
cache.hdel(name, dt)
@ -155,6 +164,7 @@ def clear_doctype_cache(doctype=None):
# clear all
for name in doctype_cache_keys:
cache.delete_value(name)
cache.delete_keys("document_cache::")
def clear_controller_cache(doctype=None):

View file

@ -623,7 +623,7 @@ frappe.db.connect()
def _console_cleanup():
# Execute rollback_observers on console close
# Execute after_rollback on console close
frappe.db.rollback()
frappe.destroy()

View file

@ -152,14 +152,9 @@ def update_comments_in_parent(reference_doctype, reference_name, _comments):
except Exception as e:
if frappe.db.is_column_missing(e) and getattr(frappe.local, "request", None):
# missing column and in request, add column and update after commit
frappe.local._comments = getattr(frappe.local, "_comments", []) + [
(reference_doctype, reference_name, _comments)
]
pass
elif frappe.db.is_data_too_long(e):
raise frappe.DataTooLongException
else:
raise
else:
@ -169,13 +164,3 @@ def update_comments_in_parent(reference_doctype, reference_name, _comments):
# Clear route cache
if route := frappe.get_cached_value(reference_doctype, reference_name, "route"):
clear_cache(route)
def update_comments_in_parent_after_request():
"""update _comments in parent if _comments column is missing"""
if hasattr(frappe.local, "_comments"):
for (reference_doctype, reference_name, _comments) in frappe.local._comments:
add_column(reference_doctype, "_comments", "Text")
update_comments_in_parent(reference_doctype, reference_name, _comments)
frappe.db.commit()

View file

@ -6,16 +6,16 @@ frappe.listview_settings["DocType"] = {
setup_select_primary_button: function (me) {
let actions = [
{
label: __("Add DocType"),
description: __("Create a new DocType"),
action: () => frappe.new_doc("DocType"),
},
{
label: __("Add DocType (Form Builder)"),
description: __("Use the form builder to create a new DocType"),
action: () => frappe.set_route("form-builder", "new-doctype"),
},
{
label: __("Add DocType"),
description: __("Create a new DocType"),
action: () => frappe.new_doc("DocType"),
},
];
frappe.utils.add_select_group_button(

View file

@ -2,6 +2,7 @@
# See license.txt
import frappe
from frappe.core.doctype.doctype.test_doctype import new_doctype
from frappe.core.doctype.document_naming_settings.document_naming_settings import (
DocumentNamingSettings,
)
@ -11,6 +12,25 @@ from frappe.utils import cint
class TestNamingSeries(FrappeTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.ns_doctype = (
new_doctype(
fields=[
{
"label": "Series",
"fieldname": "naming_series",
"fieldtype": "Select",
"options": f"\n{frappe.generate_hash()}-.###",
}
],
autoname="naming_series:",
)
.insert()
.name
)
def setUp(self):
self.dns: DocumentNamingSettings = frappe.get_doc("Document Naming Settings")
@ -23,7 +43,7 @@ class TestNamingSeries(FrappeTestCase):
return VALID_SERIES + exisiting_series
def test_naming_preview(self):
self.dns.transaction_type = "Webhook"
self.dns.transaction_type = self.ns_doctype
self.dns.try_naming_series = "AXBZ.####"
serieses = self.dns.preview_series().split("\n")
@ -35,23 +55,22 @@ class TestNamingSeries(FrappeTestCase):
def test_get_transactions(self):
naming_info = self.dns.get_transactions_and_prefixes()
self.assertIn("Webhook", naming_info["transactions"])
self.assertIn(self.ns_doctype, naming_info["transactions"])
existing_naming_series = frappe.get_meta("Webhook").get_field("naming_series").options
existing_naming_series = frappe.get_meta(self.ns_doctype).get_field("naming_series").options
for series in existing_naming_series.split("\n"):
self.assertIn(NamingSeries(series).get_prefix(), naming_info["prefixes"])
def test_default_naming_series(self):
self.assertIn("HOOK", get_default_naming_series("Webhook"))
self.assertIsNone(get_default_naming_series("DocType"))
def test_updates_naming_options(self):
self.dns.transaction_type = "Webhook"
self.dns.transaction_type = self.ns_doctype
test_series = "KOOHBEW.###"
self.dns.naming_series_options = self.dns.get_options() + "\n" + test_series
self.dns.update_series()
self.assertIn(test_series, frappe.get_meta("Webhook").get_naming_series_options())
self.assertIn(test_series, frappe.get_meta(self.ns_doctype).get_naming_series_options())
def test_update_series_counter(self):
for series in self.get_valid_serieses():

View file

@ -69,7 +69,7 @@ class File(Document):
else:
self.save_file(content=self.get_content())
self.flags.new_file = True
frappe.local.rollback_observers.append(self)
frappe.db.after_rollback.add(self.on_rollback)
def after_insert(self):
if not self.is_folder:
@ -121,10 +121,16 @@ class File(Document):
self.add_comment_in_reference_doc("Attachment Removed", _("Removed {0}").format(self.file_name))
def on_rollback(self):
rollback_flags = ("new_file", "original_content", "original_path")
def pop_rollback_flags():
for flag in rollback_flags:
self.flags.pop(flag, None)
# following condition is only executed when an insert has been rolledback
if self.flags.new_file:
self._delete_file_on_disk()
self.flags.pop("new_file")
pop_rollback_flags()
return
# if original_content flag is set, this rollback should revert the file to its original state
@ -139,14 +145,14 @@ class File(Document):
with open(file_path, mode) as f:
f.write(self.flags.original_content)
os.fsync(f.fileno())
self.flags.pop("original_content")
pop_rollback_flags()
# used in case file path (File.file_url) has been changed
if self.flags.original_path:
target = self.flags.original_path["old"]
source = self.flags.original_path["new"]
shutil.move(source, target)
self.flags.pop("original_path")
pop_rollback_flags()
def get_name_based_on_parent_folder(self) -> str | None:
if self.folder:
@ -218,7 +224,7 @@ class File(Document):
# Uses os.rename which is an atomic operation
shutil.move(source, target)
self.flags.original_path = {"old": source, "new": target}
frappe.local.rollback_observers.append(self)
frappe.db.after_rollback.add(self.on_rollback)
self.file_url = updated_file_url
update_existing_file_docs(self)
@ -520,7 +526,7 @@ class File(Document):
f.write(self._content)
os.fsync(f.fileno())
frappe.local.rollback_observers.append(self)
frappe.db.after_rollback.add(self.on_rollback)
return file_path

View file

@ -17,7 +17,7 @@ from frappe.core.api.file import (
move_file,
unzip_file,
)
from frappe.core.doctype.file.utils import get_extension
from frappe.core.doctype.file.utils import delete_file, get_extension
from frappe.exceptions import ValidationError
from frappe.tests.utils import FrappeTestCase
from frappe.utils import get_files_path
@ -77,6 +77,16 @@ class TestSimpleFile(FrappeTestCase):
self.assertEqual(content, self.test_content)
class TestFSRollbacks(FrappeTestCase):
def test_rollback_from_file_system(self):
file_name = content = frappe.generate_hash()
file = frappe.new_doc("File", file_name=file_name, content=content).insert()
self.assertTrue(file.exists_on_disk())
frappe.db.rollback()
self.assertFalse(file.exists_on_disk())
class TestBase64File(FrappeTestCase):
def setUp(self):
self.attached_to_doctype, self.attached_to_docname = make_test_doc()

View file

@ -11,7 +11,7 @@ import frappe
from frappe.core.doctype.rq_job.rq_job import RQJob, remove_failed_jobs, stop_job
from frappe.tests.utils import FrappeTestCase, timeout
from frappe.utils import cstr, execute_in_shell
from frappe.utils.background_jobs import is_job_enqueued
from frappe.utils.background_jobs import get_job_status, is_job_enqueued
class TestRQJob(FrappeTestCase):
@ -104,6 +104,26 @@ class TestRQJob(FrappeTestCase):
self.check_status(job, "finished")
self.assertFalse(is_job_enqueued(job_id))
@timeout(20)
def test_enqueue_after_commit(self):
job_id = frappe.generate_hash()
frappe.enqueue(self.BG_JOB, enqueue_after_commit=True, job_id=job_id)
self.assertIsNone(get_job_status(job_id))
frappe.db.commit()
self.assertIsNotNone(get_job_status(job_id))
job_id = frappe.generate_hash()
frappe.enqueue(self.BG_JOB, enqueue_after_commit=True, job_id=job_id)
self.assertIsNone(get_job_status(job_id))
frappe.db.rollback()
self.assertIsNone(get_job_status(job_id))
frappe.db.commit()
self.assertIsNone(get_job_status(job_id))
def test_func(fail=False, sleep=0):
if fail:

View file

@ -212,6 +212,7 @@
"read_only": 1
},
{
"allow_in_quick_entry": 1,
"fieldname": "role_profile_name",
"fieldtype": "Link",
"label": "Role Profile",
@ -761,7 +762,7 @@
"link_fieldname": "user"
}
],
"modified": "2023-05-24 15:20:06.434506",
"modified": "2023-06-05 17:26:04.127555",
"modified_by": "Administrator",
"module": "Core",
"name": "User",

View file

@ -75,6 +75,7 @@ class User(Document):
self.validate_email_type(self.email)
self.validate_email_type(self.name)
self.add_system_manager_role()
self.check_roles_added()
self.set_system_user()
self.set_full_name()
self.check_enable_disable()
@ -673,6 +674,21 @@ class User(Document):
if not self.time_zone:
self.time_zone = get_system_timezone()
def check_roles_added(self):
if self.user_type != "System User" or self.roles or not self.is_new():
return
frappe.msgprint(
_("Newly created user {0} has no roles enabled.").format(frappe.bold(self.name)),
title=_("No Roles Specified"),
indicator="orange",
primary_action={
"label": _("Add Roles"),
"client_action": "frappe.set_route",
"args": ["Form", self.doctype, self.name],
},
)
@frappe.whitelist()
def get_timezones():

View file

@ -192,7 +192,9 @@ class CustomizeForm(Document):
if self.flags.rebuild_doctype_for_global_search:
frappe.enqueue(
"frappe.utils.global_search.rebuild_for_doctype", now=True, doctype=self.doc_type
"frappe.utils.global_search.rebuild_for_doctype",
doctype=self.doc_type,
enqueue_after_commit=True,
)
def set_property_setters(self):

View file

@ -29,8 +29,8 @@ from frappe.database.utils import (
is_query_type,
)
from frappe.exceptions import DoesNotExistError, ImplicitCommitError
from frappe.model.utils.link_count import flush_local_link_count
from frappe.query_builder.functions import Count
from frappe.utils import CallbackManager
from frappe.utils import cast as cast_fieldtype
from frappe.utils import cint, get_datetime, get_table_name, getdate, now, sbool
from frappe.utils.deprecations import deprecated, deprecation_warning
@ -107,6 +107,12 @@ class Database:
self.value_cache = {}
self.logger = frappe.logger("database")
self.logger.setLevel("WARNING")
self.before_commit = CallbackManager()
self.after_commit = CallbackManager()
self.before_rollback = CallbackManager()
self.after_rollback = CallbackManager()
# self.db_type: str
# self.last_query (lazy) attribute of last sql query executed
@ -118,7 +124,6 @@ class Database:
self.cur_db_name = self.user
self._conn = self.get_connection()
self._cursor = self._conn.cursor()
frappe.local.rollback_observers = []
try:
if execution_timeout := get_query_execution_timeout():
@ -915,10 +920,8 @@ class Database:
if isinstance(dn, str):
frappe.clear_document_cache(dt, dn)
else:
# TODO: Fix this; doesn't work rn - gavin@frappe.io
# frappe.cache().hdel_keys(dt, "document_cache")
# Workaround: clear all document caches
frappe.cache().delete_value("document_cache")
# No way to guess which documents are modified, clear all of them
frappe.clear_document_cache(dt)
for column, value in to_update.items():
query = query.set(column, value)
@ -970,26 +973,30 @@ class Database:
def commit(self):
"""Commit current transaction. Calls SQL `COMMIT`."""
for method in frappe.local.before_commit:
frappe.call(method[0], *(method[1] or []), **(method[2] or {}))
self.before_rollback.reset()
self.after_rollback.reset()
self.before_commit.run()
self.sql("commit")
self.begin() # explicitly start a new transaction
frappe.local.rollback_observers = []
self.flush_realtime_log()
enqueue_jobs_after_commit()
flush_local_link_count()
self.after_commit.run()
def add_before_commit(self, method, args=None, kwargs=None):
frappe.local.before_commit.append([method, args, kwargs])
def rollback(self, *, save_point=None):
"""`ROLLBACK` current transaction. Optionally rollback to a known save_point."""
if save_point:
self.sql(f"rollback to savepoint {save_point}")
else:
self.before_commit.reset()
self.after_commit.reset()
@staticmethod
def flush_realtime_log():
for args in frappe.local.realtime_log:
frappe.realtime.emit_via_redis(*args)
self.before_rollback.run()
frappe.local.realtime_log = []
self.sql("rollback")
self.begin()
self.after_rollback.run()
def savepoint(self, save_point):
"""Savepoints work as a nested transaction.
@ -1004,21 +1011,6 @@ class Database:
def release_savepoint(self, save_point):
self.sql(f"release savepoint {save_point}")
def rollback(self, *, save_point=None):
"""`ROLLBACK` current transaction. Optionally rollback to a known save_point."""
if save_point:
self.sql(f"rollback to savepoint {save_point}")
else:
self.sql("rollback")
self.begin()
for obj in dict.fromkeys(frappe.local.rollback_observers):
if hasattr(obj, "on_rollback"):
obj.on_rollback()
frappe.local.rollback_observers = []
frappe.local.realtime_log = []
frappe.flags.enqueue_after_commit = []
def field_exists(self, dt, fn):
"""Return true of field exists."""
return self.exists("DocField", {"fieldname": fn, "parent": dt})
@ -1304,28 +1296,6 @@ class Database:
raise NotImplementedError
def enqueue_jobs_after_commit():
from frappe.utils.background_jobs import (
RQ_JOB_FAILURE_TTL,
RQ_RESULTS_TTL,
execute_job,
get_queue,
)
if frappe.flags.enqueue_after_commit and len(frappe.flags.enqueue_after_commit) > 0:
for job in frappe.flags.enqueue_after_commit:
q = get_queue(job.get("queue"), is_async=job.get("is_async"))
q.enqueue_call(
execute_job,
timeout=job.get("timeout"),
kwargs=job.get("queue_args"),
failure_ttl=frappe.conf.get("rq_job_failure_ttl") or RQ_JOB_FAILURE_TTL,
result_ttl=frappe.conf.get("rq_results_ttl") or RQ_RESULTS_TTL,
job_id=job.get("job_id"),
)
frappe.flags.enqueue_after_commit = []
@contextmanager
def savepoint(catch: type | tuple[type, ...] = Exception):
"""Wrapper for wrapping blocks of DB operations in a savepoint.

View file

@ -95,7 +95,7 @@ frappe.ui.form.on("Form Tour", {
},
});
add_custom_button = (frm) => {
let add_custom_button = (frm) => {
if (frm.doc.ui_tour) {
frm.add_custom_button(__("Reset"), function () {
frappe.confirm(

View file

@ -6,8 +6,7 @@ from frappe.model.document import Document
class ListViewSettings(Document):
def on_update(self):
frappe.clear_document_cache(self.doctype, self.name)
pass
@frappe.whitelist()

View file

@ -14,7 +14,10 @@ from frappe.tests.utils import FrappeTestCase
@contextmanager
def get_test_webhook(config):
wh = frappe.get_doc(config).insert()
wh = frappe.get_doc(config)
if not wh.name:
wh.name = frappe.generate_hash()
wh.insert()
wh.reload()
try:
yield wh
@ -37,6 +40,7 @@ class TestWebhook(FrappeTestCase):
def create_sample_webhooks(cls):
samples_webhooks_data = [
{
"name": frappe.generate_hash(),
"webhook_doctype": "User",
"webhook_docevent": "after_insert",
"request_url": "https://httpbin.org/post",
@ -44,6 +48,7 @@ class TestWebhook(FrappeTestCase):
"enabled": True,
},
{
"name": frappe.generate_hash(),
"webhook_doctype": "User",
"webhook_docevent": "after_insert",
"request_url": "https://httpbin.org/post",

View file

@ -1,13 +1,12 @@
{
"actions": [],
"autoname": "naming_series:",
"autoname": "prompt",
"creation": "2017-09-08 16:16:13.060641",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"sb_doc_events",
"naming_series",
"webhook_doctype",
"cb_doc_events",
"webhook_docevent",
@ -46,6 +45,7 @@
{
"fieldname": "webhook_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "DocType",
"options": "DocType",
"reqd": 1,
@ -136,12 +136,6 @@
"label": "JSON Request Body",
"options": "JSON"
},
{
"fieldname": "naming_series",
"fieldtype": "Select",
"label": "Naming Series",
"options": "\nHOOK-.####"
},
{
"fieldname": "sb_security",
"fieldtype": "Section Break",
@ -218,11 +212,11 @@
"link_fieldname": "webhook"
}
],
"modified": "2023-05-22 16:30:10.740512",
"modified": "2023-06-02 17:25:12.598232",
"modified_by": "Administrator",
"module": "Integrations",
"name": "Webhook",
"naming_rule": "By \"Naming Series\" field",
"naming_rule": "Set by user",
"owner": "Administrator",
"permissions": [
{
@ -241,6 +235,5 @@
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"title_field": "webhook_doctype",
"track_changes": 1
}

View file

@ -1200,7 +1200,6 @@ class Document(BaseDocument):
if notify:
self.notify_update()
self.clear_cache()
if commit:
frappe.db.commit()

View file

@ -1,6 +1,8 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
from collections import defaultdict
import frappe
ignore_doctypes = ("DocType", "Print Format", "Role", "Module Def", "Communication", "ToDo")
@ -8,29 +10,29 @@ ignore_doctypes = ("DocType", "Print Format", "Role", "Module Def", "Communicati
def notify_link_count(doctype, name):
"""updates link count for given document"""
if hasattr(frappe.local, "link_count"):
if (doctype, name) in frappe.local.link_count:
frappe.local.link_count[(doctype, name)] += 1
else:
frappe.local.link_count[(doctype, name)] = 1
if not hasattr(frappe.local, "_link_count"):
frappe.local._link_count = defaultdict(int)
frappe.db.after_commit.add(flush_local_link_count)
frappe.local._link_count[(doctype, name)] += 1
def flush_local_link_count():
"""flush from local before ending request"""
if not getattr(frappe.local, "link_count", None):
new_links = getattr(frappe.local, "_link_count", None)
if not new_links:
return
link_count = frappe.cache().get_value("_link_count")
if not link_count:
link_count = {}
link_count = frappe.cache().get_value("_link_count") or {}
for key, value in frappe.local.link_count.items():
if key in link_count:
link_count[key] += frappe.local.link_count[key]
else:
link_count[key] = frappe.local.link_count[key]
for key, value in new_links.items():
if key in link_count:
link_count[key] += value
else:
link_count[key] = value
frappe.cache().set_value("_link_count", link_count)
new_links.clear()
def update_link_count():
@ -38,14 +40,12 @@ def update_link_count():
link_count = frappe.cache().get_value("_link_count")
if link_count:
for key, count in link_count.items():
if key[0] not in ignore_doctypes:
for (doctype, name), count in link_count.items():
if doctype not in ignore_doctypes:
try:
frappe.db.sql(
f"update `tab{key[0]}` set idx = idx + {count} where name=%s",
key[1],
auto_commit=1,
)
table = frappe.qb.DocType(doctype)
frappe.qb.update(table).set(table.idx, table.idx + count).where(table.name == name).run()
frappe.db.commit()
except Exception as e:
if not frappe.db.is_table_missing(e): # table not found, single
raise e

View file

@ -5,26 +5,28 @@ frappe.ui.form.ControlGeolocation = class ControlGeolocation extends frappe.ui.f
async make() {
super.make();
$(this.input_area).addClass("hidden");
}
make_wrapper() {
set_disp_area(value) {
// Create the elements for map area
super.make_wrapper();
if (!this.disp_area) {
return;
}
let $input_wrapper = this.$wrapper.find(".control-input-wrapper");
this.map_id = frappe.dom.get_unique_id();
this.map_area = $(
`<div class="map-wrapper border">
<div id="` +
this.map_id +
`" style="min-height: 400px; z-index: 1; max-width:100%"></div>
<div id="${this.map_id}" style="min-height: 400px; z-index: 1; max-width:100%"></div>
</div>`
);
this.map_area.prependTo($input_wrapper);
this.$wrapper.find(".control-input").addClass("hidden");
$(this.disp_area).html(this.map_area);
$(this.disp_area).removeClass("like-disabled-input");
$(this.disp_area).css("display", "block");
if (this.frm) {
this.make_map();
this.make_map(value);
} else {
$(document).on("frappe.ui.Dialog:shown", () => {
this.make_map();
@ -32,7 +34,7 @@ frappe.ui.form.ControlGeolocation = class ControlGeolocation extends frappe.ui.f
}
}
make_map() {
make_map(value) {
this.bind_leaflet_map();
if (this.disabled) {
this.map.dragging.disable();
@ -44,52 +46,50 @@ frappe.ui.form.ControlGeolocation = class ControlGeolocation extends frappe.ui.f
this.map.zoomControl.remove();
} else {
this.bind_leaflet_draw_control();
this.bind_leaflet_event_listeners();
this.bind_leaflet_locate_control();
this.bind_leaflet_refresh_button();
this.bind_leaflet_data(value);
}
this.map.setView(frappe.utils.map_defaults.center, frappe.utils.map_defaults.zoom);
}
format_for_input(value) {
if (!this.map) return;
// render raw value from db into map
this.clear_editable_layers();
if (value) {
var data_layers = new L.FeatureGroup().addLayer(
L.geoJson(JSON.parse(value), {
pointToLayer: function (geoJsonPoint, latlng) {
if (geoJsonPoint.properties.point_type == "circle") {
return L.circle(latlng, { radius: geoJsonPoint.properties.radius });
} else if (geoJsonPoint.properties.point_type == "circlemarker") {
return L.circleMarker(latlng, {
radius: geoJsonPoint.properties.radius,
});
} else {
return L.marker(latlng);
}
},
})
);
this.add_non_group_layers(data_layers, this.editableLayers);
try {
this.map.fitBounds(this.editableLayers.getBounds(), {
padding: [50, 50],
});
} catch (err) {
// suppress error if layer has a point.
}
this.editableLayers.addTo(this.map);
} else {
this.map.setView(frappe.utils.map_defaults.center, frappe.utils.map_defaults.zoom);
bind_leaflet_data(value) {
/* render raw value from db into map */
if (!this.map || !value) {
return;
}
this.clear_editable_layers();
const data_layers = new L.FeatureGroup().addLayer(
L.geoJson(JSON.parse(value), { pointToLayer: this.point_to_layer })
);
this.add_non_group_layers(data_layers, this.editableLayers);
this.editableLayers.addTo(this.map);
this.fit_and_recenter_map();
}
/**
* Defines custom rules for how geoJSON data is rendered on the map.
*
* @param {Object} geoJsonPoint - The geoJSON object to be rendered on the map.
* @param {Object} latlng - The latitude and longitude where the geoJSON data should be rendered on the map.
* @returns {Object} - Returns the Leaflet layer object to be rendered on the map.
*/
point_to_layer(geoJsonPoint, latlng) {
// Custom rules for how geojson data is rendered on the map
if (geoJsonPoint.properties.point_type == "circle") {
return L.circle(latlng, { radius: geoJsonPoint.properties.radius });
} else if (geoJsonPoint.properties.point_type == "circlemarker") {
return L.circleMarker(latlng, { radius: geoJsonPoint.properties.radius });
} else {
return L.marker(latlng);
}
this.map.invalidateSize();
}
bind_leaflet_map() {
var circleToGeoJSON = L.Circle.prototype.toGeoJSON;
const circleToGeoJSON = L.Circle.prototype.toGeoJSON;
L.Circle.include({
toGeoJSON: function () {
var feature = circleToGeoJSON.call(this);
const feature = circleToGeoJSON.call(this);
feature.properties = {
point_type: "circle",
radius: this.getRadius(),
@ -100,7 +100,7 @@ frappe.ui.form.ControlGeolocation = class ControlGeolocation extends frappe.ui.f
L.CircleMarker.include({
toGeoJSON: function () {
var feature = circleToGeoJSON.call(this);
const feature = circleToGeoJSON.call(this);
feature.properties = {
point_type: "circlemarker",
radius: this.getRadius(),
@ -111,10 +111,13 @@ frappe.ui.form.ControlGeolocation = class ControlGeolocation extends frappe.ui.f
L.Icon.Default.imagePath = "/assets/frappe/images/leaflet/";
this.map = L.map(this.map_id);
this.map.setView(frappe.utils.map_defaults.center, frappe.utils.map_defaults.zoom);
L.tileLayer(frappe.utils.map_defaults.tiles, frappe.utils.map_defaults.options).addTo(
this.map
);
this.editableLayers = new L.FeatureGroup();
}
bind_leaflet_locate_control() {
@ -124,9 +127,18 @@ frappe.ui.form.ControlGeolocation = class ControlGeolocation extends frappe.ui.f
}
bind_leaflet_draw_control() {
this.editableLayers = new L.FeatureGroup();
if (
!frappe.perm.has_perm(this.doctype, this.df.permlevel, "write", this.doc) ||
this.df.read_only
) {
return;
}
var options = {
this.map.addControl(this.get_leaflet_controls());
}
get_leaflet_controls() {
return new L.Control.Draw({
position: "topleft",
draw: {
polyline: {
@ -156,12 +168,10 @@ frappe.ui.form.ControlGeolocation = class ControlGeolocation extends frappe.ui.f
featureGroup: this.editableLayers, //REQUIRED!!
remove: true,
},
};
// create control and add to map
this.drawControl = new L.Control.Draw(options);
this.map.addControl(this.drawControl);
});
}
bind_leaflet_event_listeners() {
this.map.on("draw:created", (e) => {
var type = e.layerType,
layer = e.layer;
@ -173,31 +183,12 @@ frappe.ui.form.ControlGeolocation = class ControlGeolocation extends frappe.ui.f
});
this.map.on("draw:deleted draw:edited", (e) => {
var layer = e.layer;
const { layer } = e;
this.editableLayers.removeLayer(layer);
this.set_value(JSON.stringify(this.editableLayers.toGeoJSON()));
});
}
bind_leaflet_refresh_button() {
L.easyButton({
id: "refresh-map-" + this.df.fieldname,
position: "topright",
type: "replace",
leafletClasses: true,
states: [
{
stateName: "refresh-map",
onClick: function (button, map) {
map._onResize();
},
title: "Refresh map",
icon: "fa fa-refresh",
},
],
}).addTo(this.map);
}
add_non_group_layers(source_layer, target_group) {
// https://gis.stackexchange.com/a/203773
// Would benefit from https://github.com/Leaflet/Leaflet/issues/4461
@ -215,4 +206,20 @@ frappe.ui.form.ControlGeolocation = class ControlGeolocation extends frappe.ui.f
this.editableLayers.removeLayer(l);
});
}
fit_and_recenter_map() {
// Spread map across the wrapper, recenter and zoom w.r.t bounds
try {
this.map.invalidateSize();
this.map.fitBounds(this.editableLayers.getBounds(), {
padding: [50, 50],
});
} catch (err) {
// suppress error if layer has a point.
}
}
on_section_collapse(hide) {
!hide && this.fit_and_recenter_map();
}
};

View file

@ -133,4 +133,7 @@ frappe.ui.form.ControlSignature = class ControlSignature extends frappe.ui.form.
this.set_my_value(base64_img);
this.set_image(this.get_value());
}
on_section_collapse() {
this.refresh();
}
};

View file

@ -103,8 +103,9 @@ frappe.form.formatters = {
},
Currency: function (value, docfield, options, doc) {
var currency = frappe.meta.get_field_currency(docfield, doc);
var precision =
docfield.precision || cint(frappe.boot.sysdefaults.currency_precision) || 2;
var precision = cint(
docfield.precision ?? frappe.boot.sysdefaults.currency_precision ?? 2
);
// If you change anything below, it's going to hurt a company in UAE, a bit.
if (precision > 2) {

View file

@ -110,12 +110,7 @@ export default class Section {
this.set_icon(hide);
// refresh signature fields
this.fields_list.forEach((f) => {
if (f.df.fieldtype == "Signature") {
f.refresh();
}
});
this.fields_list.forEach((f) => f.on_section_collapse && f.on_section_collapse(hide));
// save state for next reload ('' is falsy)
if (this.df.css_class)

View file

@ -22,6 +22,7 @@ frappe.views.Workspace = class Workspace {
this.page = wrapper.page;
this.blocks = frappe.workspace_block.blocks;
this.is_read_only = true;
this.is_page_loaded = false;
this.pages = {};
this.sorted_public_items = [];
this.sorted_private_items = [];
@ -248,10 +249,14 @@ frappe.views.Workspace = class Workspace {
this.update_selected_sidebar(page, true); //add selected on new page
if (!frappe.router.current_route[0]) {
this.is_page_loaded = true;
frappe.set_route(frappe.router.slug(page.public ? page.name : "private/" + page.name));
}
this.show_page(page);
if (!this.is_page_loaded) {
this.show_page(page);
this.is_page_loaded = false;
}
}
update_selected_sidebar(page, add) {

View file

@ -73,13 +73,29 @@ def publish_realtime(
room = get_site_room()
if after_commit:
if not hasattr(frappe.local, "_realtime_log"):
frappe.local._realtime_log = []
frappe.db.after_commit.add(flush_realtime_log)
frappe.db.after_rollback.add(clear_realtime_log)
params = [event, message, room]
if params not in frappe.local.realtime_log:
frappe.local.realtime_log.append(params)
if params not in frappe.local._realtime_log:
frappe.local._realtime_log.append(params)
else:
emit_via_redis(event, message, room)
def flush_realtime_log():
for args in frappe.local._realtime_log:
frappe.realtime.emit_via_redis(*args)
frappe.local._realtime_log = []
def clear_realtime_log():
frappe.local._realtime_log = []
def emit_via_redis(event, message, room):
"""Publish real-time updates via redis

View file

@ -163,3 +163,65 @@ class TestRedisCache(FrappeAPITestCase):
calculate_area(radius=10)
# kwargs should hit cache too
self.assertEqual(function_call_count, 4)
class TestDocumentCache(FrappeAPITestCase):
TEST_DOCTYPE = "User"
TEST_DOCNAME = "Administrator"
TEST_FIELD = "middle_name"
def setUp(self) -> None:
self.test_value = frappe.generate_hash()
def test_caching(self):
doc = frappe.get_cached_doc(self.TEST_DOCTYPE, self.TEST_DOCNAME)
with self.assertQueryCount(0):
doc = frappe.get_cached_doc(self.TEST_DOCTYPE, self.TEST_DOCNAME)
doc.db_set(self.TEST_FIELD, self.test_value)
new_doc = frappe.get_cached_doc(self.TEST_DOCTYPE, self.TEST_DOCNAME)
self.assertIsNot(doc, new_doc) # Shouldn't be same object from frappe.local
self.assertEqual(new_doc.get(self.TEST_FIELD), self.test_value) # Cache invalidated and fetched
frappe.db.rollback()
doc_after_rollback = frappe.get_cached_doc(self.TEST_DOCTYPE, self.TEST_DOCNAME)
self.assertIsNot(new_doc, doc_after_rollback)
# Cache invalidated after rollback
self.assertNotEqual(doc_after_rollback.get(self.TEST_FIELD), self.test_value)
with self.assertQueryCount(0):
frappe.get_cached_doc(self.TEST_DOCTYPE, self.TEST_DOCNAME)
def test_cache_invalidation_set_value(self):
doc = frappe.get_cached_doc(self.TEST_DOCTYPE, self.TEST_DOCNAME)
frappe.db.set_value(
self.TEST_DOCTYPE,
{"name": ("like", "%Admin%")},
self.TEST_FIELD,
self.test_value,
)
new_doc = frappe.get_cached_doc(self.TEST_DOCTYPE, self.TEST_DOCNAME)
self.assertIsNot(doc, new_doc)
self.assertEqual(new_doc.get(self.TEST_FIELD), self.test_value)
with self.assertQueryCount(0):
frappe.get_cached_doc(self.TEST_DOCTYPE, self.TEST_DOCNAME)
class TestRedisWrapper(FrappeAPITestCase):
def test_delete_keys(self):
c = frappe.cache()
prefix = "test_del_"
for i in range(5):
c.set_value(f"{prefix}{i}", 1)
self.assertEqual(len(c.get_keys(prefix)), 5)
c.delete_keys(prefix)
self.assertEqual(len(c.get_keys(prefix)), 0)

View file

@ -593,6 +593,37 @@ class TestDB(FrappeTestCase):
modify_values((23, 23.0, 23.00004345, "wow", [1, 2, 3, "abc"])),
)
def test_callbacks(self):
order_of_execution = []
def f(val):
nonlocal order_of_execution
order_of_execution.append(val)
frappe.db.before_commit.add(lambda: f(0))
frappe.db.before_commit.add(lambda: f(1))
frappe.db.after_commit.add(lambda: f(2))
frappe.db.after_commit.add(lambda: f(3))
frappe.db.before_rollback.add(lambda: f("IGNORED"))
frappe.db.before_rollback.add(lambda: f("IGNORED"))
frappe.db.commit()
frappe.db.after_commit.add(lambda: f("IGNORED"))
frappe.db.after_commit.add(lambda: f("IGNORED"))
frappe.db.before_rollback.add(lambda: f(4))
frappe.db.before_rollback.add(lambda: f(5))
frappe.db.after_rollback.add(lambda: f(6))
frappe.db.after_rollback.add(lambda: f(7))
frappe.db.rollback()
self.assertEqual(order_of_execution, list(range(0, 8)))
@run_only_if(db_type_is.MARIADB)
class TestDDLCommandsMaria(FrappeTestCase):
@ -765,21 +796,20 @@ class TestDBSetValue(FrappeTestCase):
def test_set_value(self):
self.todo1.reload()
with patch.object(Database, "sql") as sql_called:
frappe.db.set_value(
self.todo1.doctype,
self.todo1.name,
"description",
f"{self.todo1.description}-edit by `test_for_update`",
)
first_query = sql_called.call_args_list[0].args[0]
frappe.db.set_value(
self.todo1.doctype,
self.todo1.name,
"description",
f"{self.todo1.description}-edit by `test_for_update`",
)
query = str(frappe.db.last_query)
if frappe.conf.db_type == "postgres":
from frappe.database.postgres.database import modify_query
if frappe.conf.db_type == "postgres":
from frappe.database.postgres.database import modify_query
self.assertTrue(modify_query("UPDATE `tabToDo` SET") in first_query)
if frappe.conf.db_type == "mariadb":
self.assertTrue("UPDATE `tabToDo` SET" in first_query)
self.assertTrue(modify_query("UPDATE `tabToDo` SET") in query)
if frappe.conf.db_type == "mariadb":
self.assertTrue("UPDATE `tabToDo` SET" in query)
def test_cleared_cache(self):
self.todo2.reload()

View file

@ -69,10 +69,12 @@ class TestFixtureImport(FrappeTestCase):
import_doc(path_to_exported_fixtures)
delete_doc("DocType", "temp_singles", delete_permanently=True)
os.remove(path_to_exported_fixtures)
data = frappe.db.get_single_value("temp_singles", "member_name")
truncate_query.run()
self.assertEqual(data, dummy_name_list[0])
delete_doc("DocType", "temp_singles", delete_permanently=True)
os.remove(path_to_exported_fixtures)
frappe.db.commit()

View file

@ -32,7 +32,7 @@ class FrappeTestCase(unittest.TestCase):
# flush changes done so far to avoid flake
frappe.db.commit()
if cls.SHOW_TRANSACTION_COMMIT_WARNINGS:
frappe.db.add_before_commit(_commit_watcher)
frappe.db.before_commit.add(_commit_watcher)
# enqueue teardown actions (executed in LIFO order)
cls.addClassCleanup(_restore_thread_locals, copy.deepcopy(frappe.local.flags))
@ -101,8 +101,6 @@ def _commit_watcher():
def _rollback_db():
frappe.local.before_commit = []
frappe.local.rollback_observers = []
frappe.db.value_cache = {}
frappe.db.rollback()
@ -112,7 +110,6 @@ def _restore_thread_locals(flags):
frappe.local.error_log = []
frappe.local.message_log = []
frappe.local.debug_log = []
frappe.local.realtime_log = []
frappe.local.conf = frappe._dict(frappe.get_site_config())
frappe.local.cache = {}
frappe.local.lang = "en"

View file

@ -9,6 +9,7 @@ import os
import re
import sys
import traceback
from collections import deque
from collections.abc import (
Container,
Generator,
@ -20,7 +21,7 @@ from collections.abc import (
from email.header import decode_header, make_header
from email.utils import formataddr, parseaddr
from gzip import GzipFile
from typing import Any, Literal
from typing import Any, Callable, Literal
from urllib.parse import quote, urlparse
from redis.exceptions import ConnectionError
@ -1092,3 +1093,42 @@ def is_git_url(url: str) -> bool:
# modified to allow without the tailing .git from https://github.com/jonschlinkert/is-git-url.git
pattern = r"(?:git|ssh|https?|\w*@[-\w.]+):(\/\/)?(.*?)(\.git)?(\/?|\#[-\d\w._]+?)$"
return bool(re.match(pattern, url))
class CallbackManager:
"""Manage callbacks.
```
# Capture callacks
callbacks = CallbackManager()
# Put a function call in queue
callbacks.add(func)
# Run all pending functions in queue
callbacks.run()
# Reset queue
callbacks.reset()
```
Example usage: frappe.db.after_commit
"""
__slots__ = ("_functions",)
def __init__(self) -> None:
self._functions = deque()
def add(self, func: Callable) -> None:
"""Add a function to queue, functions are executed in order of addition."""
self._functions.append(func)
def run(self):
"""Run all functions in queue"""
while self._functions:
_func = self._functions.popleft()
_func()
def reset(self):
self._functions.clear()

View file

@ -3,13 +3,14 @@ import socket
import time
from collections import defaultdict
from functools import lru_cache
from typing import TYPE_CHECKING, Any, Literal, NoReturn, Union
from typing import Any, Callable, Literal, NoReturn
from uuid import uuid4
import redis
from redis.exceptions import BusyLoadingError, ConnectionError
from rq import Connection, Queue, Worker
from rq.exceptions import NoSuchJobError
from rq.job import Job, JobStatus
from rq.logutils import setup_loghandlers
from rq.worker import RandomWorker, RoundRobinWorker
from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_fixed
@ -22,10 +23,6 @@ from frappe.utils.commands import log
from frappe.utils.deprecations import deprecation_warning
from frappe.utils.redis_queue import RedisQueue
if TYPE_CHECKING:
from rq.job import Job
# TTL to keep RQ job logs in redis for.
RQ_JOB_FAILURE_TTL = 7 * 24 * 60 * 60 # 7 days instead of 1 year (default)
RQ_RESULTS_TTL = 10 * 60
@ -54,21 +51,21 @@ redis_connection = None
def enqueue(
method,
queue="default",
timeout=None,
on_success=None,
on_failure=None,
method: str | Callable,
queue: str = "default",
timeout: int | None = None,
event=None,
is_async=True,
job_name=None,
now=False,
enqueue_after_commit=False,
is_async: bool = True,
job_name: str | None = None,
now: bool = False,
enqueue_after_commit: bool = False,
*,
at_front=False,
job_id=None,
on_success: Callable = None,
on_failure: Callable = None,
at_front: bool = False,
job_id: str = None,
**kwargs,
) -> Union["Job", Any]:
) -> Job | Any:
"""
Enqueue method to be executed using a background worker
@ -113,6 +110,7 @@ def enqueue(
if not timeout:
timeout = get_queues_timeout().get(queue) or 300
queue_args = {
"site": frappe.local.site,
"user": frappe.session.user,
@ -122,32 +120,25 @@ def enqueue(
"is_async": is_async,
"kwargs": kwargs,
}
if enqueue_after_commit:
if not frappe.flags.enqueue_after_commit:
frappe.flags.enqueue_after_commit = []
frappe.flags.enqueue_after_commit.append(
{
"queue": queue,
"is_async": is_async,
"timeout": timeout,
"queue_args": queue_args,
"job_id": job_id,
}
def enqueue_call():
return q.enqueue_call(
execute_job,
on_success=on_success,
on_failure=on_failure,
timeout=timeout,
kwargs=queue_args,
at_front=at_front,
failure_ttl=frappe.conf.get("rq_job_failure_ttl") or RQ_JOB_FAILURE_TTL,
result_ttl=frappe.conf.get("rq_results_ttl") or RQ_RESULTS_TTL,
job_id=job_id,
)
return frappe.flags.enqueue_after_commit
return q.enqueue_call(
execute_job,
on_success=on_success,
on_failure=on_failure,
timeout=timeout,
kwargs=queue_args,
at_front=at_front,
failure_ttl=frappe.conf.get("rq_job_failure_ttl") or RQ_JOB_FAILURE_TTL,
result_ttl=frappe.conf.get("rq_results_ttl") or RQ_RESULTS_TTL,
job_id=job_id,
)
if enqueue_after_commit:
frappe.db.after_commit.add(enqueue_call)
return
return enqueue_call()
def enqueue_doc(
@ -437,12 +428,15 @@ def create_job_id(job_id: str) -> str:
return f"{frappe.local.site}::{job_id}"
def is_job_enqueued(job_id: str) -> str:
from rq.job import Job
def is_job_enqueued(job_id: str) -> bool:
return get_job_status(job_id) in (JobStatus.QUEUED, JobStatus.STARTED)
def get_job_status(job_id: str) -> JobStatus | None:
"""Get RQ job status, returns None if job is not found."""
try:
job = Job.fetch(create_job_id(job_id), connection=get_redis_conn())
except NoSuchJobError:
return False
return None
return job.get_status() in ("queued", "started")
return job.get_status()

View file

@ -150,7 +150,7 @@ def redis_cache(ttl: int | None = 3600, user: str | bool | None = None) -> Calla
@wraps(func)
def redis_cache_wrapper(*args, **kwargs):
func_call_key = func_key + str(__generate_request_cache_key(args, kwargs))
func_call_key = func_key + "::" + str(__generate_request_cache_key(args, kwargs))
if frappe.cache().exists(func_call_key):
return frappe.cache().get_value(func_call_key, user=user)
else:

View file

@ -127,20 +127,22 @@ class RedisWrapper(redis.Redis):
def delete_value(self, keys, user=None, make_keys=True, shared=False):
"""Delete value, list of values."""
if not keys:
return
if not isinstance(keys, (list, tuple)):
keys = (keys,)
if make_keys:
keys = [self.make_key(k, shared=shared, user=user) for k in keys]
for key in keys:
if make_keys:
key = self.make_key(key, shared=shared)
frappe.local.cache.pop(key, None)
if key in frappe.local.cache:
del frappe.local.cache[key]
try:
self.delete(key)
except redis.exceptions.ConnectionError:
pass
try:
self.delete(*keys)
except redis.exceptions.ConnectionError:
pass
def lpush(self, key, value):
super().lpush(self.make_key(key), value)
@ -197,7 +199,11 @@ class RedisWrapper(redis.Redis):
def exists(self, *names: str, user=None, shared=None) -> int:
names = [self.make_key(n, user=user, shared=shared) for n in names]
return super().exists(*names)
try:
return super().exists(*names)
except redis.exceptions.ConnectionError:
return False
def hgetall(self, name):
value = super().hgetall(self.make_key(name))

View file

@ -31,6 +31,10 @@
"allow_incomplete",
"section_break_2",
"max_attachment_size",
"section_break_xzqr",
"condition",
"column_break_tjgl",
"condition_description",
"section_break_3",
"list_setting_message",
"show_list",
@ -279,10 +283,6 @@
"fieldtype": "Tab Break",
"label": "Form"
},
{
"fieldname": "column_break_1",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_1",
"fieldtype": "Section Break"
@ -297,7 +297,6 @@
"fieldtype": "Column Break"
},
{
"collapsible": 1,
"fieldname": "section_break_2",
"fieldtype": "Section Break"
},
@ -374,13 +373,33 @@
"fieldname": "anonymous",
"fieldtype": "Check",
"label": "Anonymous"
},
{
"fieldname": "condition",
"fieldtype": "Code",
"label": "Condition",
"max_height": "150px"
},
{
"fieldname": "section_break_xzqr",
"fieldtype": "Section Break"
},
{
"fieldname": "column_break_tjgl",
"fieldtype": "Column Break"
},
{
"fieldname": "condition_description",
"fieldtype": "HTML",
"label": "Condition Description",
"options": "<p>Multiple webforms can be created for a single doctype. Write a condition specific to this webform to display correct record after submission.</p><p>For Example:</p>\n<p>If you create a separate webform every year to capture feedback from employees add a \n field named year in doctype and add a condition <b>doc.year==\"2023\"</b></p>\n"
}
],
"has_web_view": 1,
"icon": "icon-edit",
"is_published_field": "published",
"links": [],
"modified": "2023-04-20 17:24:42.657731",
"modified": "2023-06-03 19:18:56.760479",
"modified_by": "Administrator",
"module": "Website",
"name": "Web Form",

View file

@ -12,6 +12,7 @@ from frappe.desk.form.meta import get_code_files_via_hooks
from frappe.modules.utils import export_module_json, get_doc_module
from frappe.rate_limiter import rate_limit
from frappe.utils import cstr, dict_with_keys, strip_html
from frappe.utils.caching import redis_cache
from frappe.website.utils import get_boot_data, get_comment_list, get_sidebar_items
from frappe.website.website_generator import WebsiteGenerator
@ -19,9 +20,6 @@ from frappe.website.website_generator import WebsiteGenerator
class WebForm(WebsiteGenerator):
website = frappe._dict(no_cache=1)
def onload(self):
super().onload()
def validate(self):
super().validate()
@ -153,10 +151,16 @@ def get_context(context):
and not frappe.form_dict.name
and not frappe.form_dict.is_list
):
name = frappe.db.get_value(self.doc_type, {"owner": frappe.session.user}, "name")
if name:
context.in_view_mode = True
frappe.redirect(f"/{self.route}/{name}")
names = frappe.db.get_values(self.doc_type, {"owner": frappe.session.user}, pluck="name")
for name in names:
if self.condition:
doc = frappe.get_doc(self.doc_type, name)
if frappe.safe_eval(self.condition, None, {"doc": doc.as_dict()}):
context.in_view_mode = True
frappe.redirect(f"/{self.route}/{name}")
else:
context.in_view_mode = True
frappe.redirect(f"/{self.route}/{name}")
# Show new form when
# - User is Guest
@ -633,3 +637,8 @@ def get_link_options(web_form_name, doctype, allow_read_on_all_link_options=Fals
raise frappe.PermissionError(
_("You don't have permission to access the {0} DocType.").format(doctype)
)
@redis_cache(ttl=60 * 60)
def get_published_web_forms() -> dict[str, str]:
return frappe.get_all("Web Form", ["name", "route", "modified"], {"published": 1})

View file

@ -8,6 +8,7 @@ from jinja2.exceptions import TemplateSyntaxError
import frappe
from frappe import _
from frappe.utils import get_datetime, now, quoted, strip_html
from frappe.utils.caching import redis_cache
from frappe.utils.jinja import render_template
from frappe.utils.safe_exec import safe_exec
from frappe.website.doctype.website_slideshow.website_slideshow import get_slideshow
@ -30,12 +31,6 @@ class WebPage(WebsiteGenerator):
if not self.dynamic_route:
self.route = quoted(self.route)
def on_update(self):
super().on_update()
def on_trash(self):
super().on_trash()
def get_context(self, context):
context.main_section = get_html_content_based_on_type(self, "main_section", self.content_type)
context.source_content_type = self.content_type
@ -247,3 +242,10 @@ def extract_script_and_style_tags(html):
style.extract()
return str(soup), scripts, styles
@redis_cache(ttl=60 * 60)
def get_dynamic_web_pages() -> dict[str, str]:
return frappe.get_all(
"Web Page", fields=["name", "route", "modified"], filters=dict(published=1, dynamic_route=1)
)

View file

@ -1,5 +1,6 @@
import frappe
from frappe.model.document import get_controller
from frappe.utils.caching import redis_cache
from frappe.website.page_renderers.base_template_page import BaseTemplatePage
from frappe.website.router import (
get_doctypes_with_web_view,
@ -22,22 +23,9 @@ class DocumentPage(BaseTemplatePage):
return False
def search_in_doctypes_with_web_view(self):
for doctype in get_doctypes_with_web_view():
filters = dict(route=self.path)
meta = frappe.get_meta(doctype)
condition_field = self.get_condition_field(meta)
if condition_field:
filters[condition_field] = 1
try:
self.docname = frappe.db.get_value(doctype, filters, "name")
if self.docname:
self.doctype = doctype
return True
except Exception as e:
if not frappe.db.is_missing_column(e):
raise e
if document := _find_matching_document_webview(self.path):
self.doctype, self.docname = document
return True
def search_web_page_dynamic_routes(self):
d = get_page_info_from_web_page_with_dynamic_routes(self.path)
@ -83,7 +71,8 @@ class DocumentPage(BaseTemplatePage):
if prop not in self.context:
self.context[prop] = getattr(self.doc, prop, False)
def get_condition_field(self, meta):
@staticmethod
def get_condition_field(meta):
condition_field = None
if meta.is_published_field:
condition_field = meta.is_published_field
@ -92,3 +81,22 @@ class DocumentPage(BaseTemplatePage):
condition_field = controller.website.condition_field
return condition_field
@redis_cache(ttl=60 * 60)
def _find_matching_document_webview(route: str) -> tuple[str, str] | None:
for doctype in get_doctypes_with_web_view():
filters = dict(route=route)
meta = frappe.get_meta(doctype)
condition_field = DocumentPage.get_condition_field(meta)
if condition_field:
filters[condition_field] = 1
try:
docname = frappe.db.get_value(doctype, filters, "name")
if docname:
return (doctype, docname)
except Exception as e:
if not frappe.db.is_missing_column(e):
raise e

View file

@ -16,12 +16,11 @@ def get_page_info_from_web_page_with_dynamic_routes(path):
"""
Query Web Page with dynamic_route = 1 and evaluate if any of the routes match
"""
from frappe.website.doctype.web_page.web_page import get_dynamic_web_pages
rules, page_info = [], {}
# build rules from all web page with `dynamic_route = 1`
for d in frappe.get_all(
"Web Page", fields=["name", "route", "modified"], filters=dict(published=1, dynamic_route=1)
):
for d in get_dynamic_web_pages():
rules.append(Rule("/" + d.route, endpoint=d.name))
d.doctype = "Web Page"
page_info[d.name] = d
@ -33,9 +32,10 @@ def get_page_info_from_web_page_with_dynamic_routes(path):
def get_page_info_from_web_form(path):
"""Query published web forms and evaluate if the route matches"""
from frappe.website.doctype.web_form.web_form import get_published_web_forms
rules, page_info = [], {}
web_forms = frappe.get_all("Web Form", ["name", "route", "modified"], {"published": 1})
for d in web_forms:
for d in get_published_web_forms():
rules.append(Rule(f"/{d.route}", endpoint=d.name))
rules.append(Rule(f"/{d.route}/list", endpoint=d.name))
rules.append(Rule(f"/{d.route}/new", endpoint=d.name))
@ -315,3 +315,13 @@ def get_doctypes_with_web_view():
def get_start_folders():
return frappe.local.flags.web_pages_folders or ("www", "templates/pages")
def clear_routing_cache():
from frappe.website.doctype.web_form.web_form import get_published_web_forms
from frappe.website.doctype.web_page.web_page import get_dynamic_web_pages
from frappe.website.page_renderers.document_page import _find_matching_document_webview
_find_matching_document_webview.clear_cache()
get_dynamic_web_pages.clear_cache()
get_published_web_forms.clear_cache()

View file

@ -360,9 +360,13 @@ def get_html_content_based_on_type(doc, fieldname, content_type):
def clear_cache(path=None):
"""Clear website caches
:param path: (optional) for the given path"""
from frappe.website.router import clear_routing_cache
for key in ("website_generator_routes", "website_pages", "website_full_index", "sitemap_routes"):
frappe.cache().delete_value(key)
clear_routing_cache()
frappe.cache().delete_value("website_404")
if path:
frappe.cache().hdel("website_redirects", path)

View file

@ -49,7 +49,7 @@
},
{
"fieldname": "card_1_url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
},
@ -79,7 +79,7 @@
},
{
"fieldname": "card_2_url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
},
@ -109,7 +109,7 @@
},
{
"fieldname": "card_3_url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
},
@ -139,7 +139,7 @@
},
{
"fieldname": "card_4_url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
},
@ -169,7 +169,7 @@
},
{
"fieldname": "card_5_url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
},
@ -199,7 +199,7 @@
},
{
"fieldname": "card_6_url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
},
@ -229,7 +229,7 @@
},
{
"fieldname": "card_7_url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
},
@ -259,7 +259,7 @@
},
{
"fieldname": "card_8_url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
},
@ -289,13 +289,13 @@
},
{
"fieldname": "card_9_url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
}
],
"idx": 0,
"modified": "2021-05-03 13:26:34.470232",
"modified": "2023-06-05 13:26:34.470232",
"modified_by": "Administrator",
"module": "Website",
"name": "Section with Cards",

View file

@ -43,7 +43,7 @@
},
{
"fieldname": "url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
},
@ -55,7 +55,7 @@
}
],
"idx": 2,
"modified": "2020-10-26 17:43:08.219285",
"modified": "2023-06-05 13:26:34.470232",
"modified_by": "Administrator",
"module": "Website",
"name": "Section with Features",

View file

@ -56,13 +56,13 @@
},
{
"fieldname": "url",
"fieldtype": "Data",
"fieldtype": "Small Text",
"label": "URL",
"reqd": 0
}
],
"idx": 0,
"modified": "2022-03-21 15:39:39.044104",
"modified": "2023-06-05 13:26:34.470232",
"modified_by": "Administrator",
"module": "Website",
"name": "Section with Testimonials",