Merge remote-tracking branch 'upstream/develop' into feat-translation-customizations

This commit is contained in:
barredterra 2024-06-27 17:22:50 +02:00
commit 4c924a5a16
241 changed files with 67078 additions and 40418 deletions

View file

@ -6,8 +6,8 @@ on:
workflow_dispatch:
jobs:
regeneratee-pot-file:
name: Release
regenerate-pot-file:
name: Regenerate POT file
runs-on: ubuntu-latest
strategy:
fail-fast: false

View file

@ -1,64 +1,53 @@
context("Control Autocomplete", () => {
before(() => {
cy.login();
cy.visit("/app/website");
cy.visit("/app");
cy.wait(4000);
});
function get_dialog_with_autocomplete(options) {
cy.visit("/app/website");
const get_dialog_with_autocomplete = (fieldname, options) => {
return cy.dialog({
title: "Autocomplete",
fields: [
{
label: "Select an option",
fieldname: "autocomplete",
fieldname: fieldname,
fieldtype: "Autocomplete",
options: options || ["Option 1", "Option 2", "Option 3"],
options: options,
},
],
});
}
};
it("should set the valid value", () => {
get_dialog_with_autocomplete().as("dialog");
cy.get(".frappe-control[data-fieldname=autocomplete] input").focus().as("input");
cy.wait(1000);
cy.get("@input").type("2", { delay: 300 });
cy.get(".frappe-control[data-fieldname=autocomplete]")
.findByRole("listbox")
.should("be.visible");
cy.get(".frappe-control[data-fieldname=autocomplete] input").type("{enter}", {
delay: 300,
});
cy.get(".frappe-control[data-fieldname=autocomplete] input").blur();
const fieldname = "autocomplete_1";
get_dialog_with_autocomplete(fieldname, ["Option 1", "Option 2", "Option 3"]).as("dialog");
cy.get(`.control-input > .awesomplete > input[data-fieldname=${fieldname}]`).as("input");
cy.wait(500);
cy.get("@input").type("2{enter}", { delay: 300 });
cy.get("@dialog").then((dialog) => {
let value = dialog.get_value("autocomplete");
let value = dialog.get_value(fieldname);
expect(value).to.eq("Option 2");
dialog.clear();
dialog.hide();
});
});
it("should set the valid value with different label", () => {
const options_with_label = [
const fieldname = "autocomplete_2";
get_dialog_with_autocomplete(fieldname, [
{ label: "Option 1", value: "option_1" },
{ label: "Option 2", value: "option_2" },
];
get_dialog_with_autocomplete(options_with_label).as("dialog");
]).as("dialog");
cy.get(".frappe-control[data-fieldname=autocomplete] input").focus().as("input");
cy.get(".frappe-control[data-fieldname=autocomplete]")
.findByRole("listbox")
.should("be.visible");
cy.get("@input").type("2", { delay: 300 });
cy.get(".frappe-control[data-fieldname=autocomplete] input").type("{enter}", {
delay: 300,
});
cy.get(".frappe-control[data-fieldname=autocomplete] input").blur();
cy.get(`.control-input > .awesomplete > input[data-fieldname=${fieldname}]`).as("input");
cy.wait(500);
cy.get("@input").type("2{enter}", { delay: 300 });
cy.get("@dialog").then((dialog) => {
let value = dialog.get_value("autocomplete");
let value = dialog.get_value(fieldname);
expect(value).to.eq("option_2");
dialog.clear();
dialog.hide();
});
});
});

View file

@ -20,11 +20,14 @@ context("Control Duration", () => {
it("should set duration", () => {
get_dialog_with_duration().as("dialog");
cy.wait(500);
cy.get(".frappe-control[data-fieldname=duration] input").first().click();
cy.get(".duration-input[data-duration=days]")
.type(45, { force: true })
.blur({ force: true });
cy.wait(500);
cy.get(".duration-input[data-duration=minutes]").type(30).blur({ force: true });
cy.wait(500);
cy.get(".frappe-control[data-fieldname=duration] input")
.first()
.should("have.value", "45d 30m");

View file

@ -27,7 +27,7 @@ context("Control Link", () => {
}
function get_dialog_with_gender_link() {
return cy.dialog({
let dialog = cy.dialog({
title: "Link",
fields: [
{
@ -38,6 +38,8 @@ context("Control Link", () => {
},
],
});
cy.wait(500);
return dialog;
}
it("should set the valid value", () => {
@ -62,6 +64,7 @@ context("Control Link", () => {
cy.wait("@search_link");
cy.get("@input").type("todo for link", { delay: 200 });
cy.wait("@search_link");
cy.wait(500);
cy.get(".frappe-control[data-fieldname=link]").findByRole("listbox").should("be.visible");
cy.get(".frappe-control[data-fieldname=link] input").type("{enter}", { delay: 100 });
cy.get(".frappe-control[data-fieldname=link] input").blur();
@ -82,6 +85,7 @@ context("Control Link", () => {
.type("invalid value", { delay: 100 })
.blur();
cy.wait("@validate_link");
cy.wait(500);
cy.get(".frappe-control[data-fieldname=link] input").should("have.value", "");
});
@ -92,6 +96,7 @@ context("Control Link", () => {
cy.get(".frappe-control[data-fieldname=link] input").type(" ", { delay: 100 }).blur();
cy.wait("@validate_link");
cy.wait(500);
cy.get(".frappe-control[data-fieldname=link] input").should("have.value", "");
cy.window()
.its("cur_dialog")
@ -262,6 +267,7 @@ context("Control Link", () => {
cy.wait("@search_link");
cy.get("@input").type("Sonstiges", { delay: 200 });
cy.wait("@search_link");
cy.wait(500);
cy.get(".frappe-control[data-fieldname=link] ul").should("be.visible");
cy.get(".frappe-control[data-fieldname=link] input").type("{enter}", { delay: 100 });
cy.get(".frappe-control[data-fieldname=link] input").blur();
@ -284,7 +290,7 @@ context("Control Link", () => {
});
cy.clear_cache();
cy.wait(500);
cy.wait(1000);
get_dialog_with_gender_link().as("dialog");
cy.intercept("POST", "/api/method/frappe.desk.search.search_link").as("search_link");
@ -293,6 +299,7 @@ context("Control Link", () => {
cy.wait("@search_link");
cy.get("@input").type("Non-Conforming", { delay: 200 });
cy.wait("@search_link");
cy.wait(500);
cy.get(".frappe-control[data-fieldname=link] ul").should("be.visible");
cy.get(".frappe-control[data-fieldname=link] input").type("{enter}", { delay: 100 });
cy.get(".frappe-control[data-fieldname=link] input").blur();

View file

@ -1,7 +1,11 @@
context("FileUploader", () => {
before(() => {
cy.login();
});
beforeEach(() => {
cy.visit("/app");
cy.wait(2000); // workspace can load async and clear active dialog
});
function open_upload_dialog() {
@ -10,6 +14,7 @@ context("FileUploader", () => {
.then((frappe) => {
new frappe.ui.FileUploader();
});
cy.wait(500);
}
it("upload dialog api works", () => {
@ -47,40 +52,4 @@ context("FileUploader", () => {
.should("have.property", "file_name", "example.json");
cy.get(".modal:visible").should("not.exist");
});
it("should accept web links", () => {
open_upload_dialog();
cy.get_open_dialog().findByRole("button", { name: "Link" }).click();
cy.get_open_dialog()
.findByPlaceholderText("Attach a web link")
.type("https://github.com", { delay: 100, force: true });
cy.intercept("POST", "/api/method/upload_file").as("upload_file");
cy.get_open_dialog().findByRole("button", { name: "Upload" }).click();
cy.wait("@upload_file")
.its("response.body.message")
.should("have.property", "file_url", "https://github.com");
cy.get(".modal:visible").should("not.exist");
});
it("should allow cropping and optimization for valid images", () => {
open_upload_dialog();
cy.get_open_dialog()
.find(".file-upload-area")
.selectFile("cypress/fixtures/sample_image.jpg", {
action: "drag-drop",
});
cy.get_open_dialog().findAllByText("sample_image.jpg").should("exist");
cy.get_open_dialog().find(".btn-crop").first().click();
cy.get_open_dialog().findByRole("button", { name: "Crop" }).click();
cy.get_open_dialog().findAllByRole("checkbox", { name: "Optimize" }).should("exist");
cy.get_open_dialog().findAllByLabelText("Optimize").first().click();
cy.intercept("POST", "/api/method/upload_file").as("upload_file");
cy.get_open_dialog().findByRole("button", { name: "Upload" }).click();
cy.wait("@upload_file").its("response.statusCode").should("eq", 200);
cy.get(".modal:visible").should("not.exist");
});
});

View file

@ -30,7 +30,7 @@ context("Navigation", () => {
cy.get("@reload").get(".page-card .btn-primary").contains("Login").click();
cy.location("pathname").should("eq", "/login");
cy.login();
cy.visit("/app");
cy.reload().as("reload");
cy.location("pathname").should("eq", "/app/todo");
});
});

View file

@ -1,72 +0,0 @@
context.skip("Recorder", () => {
before(() => {
cy.login();
});
beforeEach(() => {
cy.visit("/app/recorder");
return cy
.window()
.its("frappe")
.then((frappe) => {
// reset recorder
return frappe.xcall("frappe.recorder.stop").then(() => {
return frappe.xcall("frappe.recorder.delete");
});
});
});
it("Recorder Empty State", () => {
cy.get(".page-head").findByTitle("Recorder").should("exist");
cy.get(".indicator-pill").should("contain", "Inactive").should("have.class", "red");
cy.get(".page-actions").findByRole("button", { name: "Start" }).should("exist");
cy.get(".page-actions").findByRole("button", { name: "Clear" }).should("exist");
cy.get(".msg-box").should("contain", "Recorder is Inactive");
cy.get(".msg-box").findByRole("button", { name: "Start Recording" }).should("exist");
});
it("Recorder Start", () => {
cy.get(".page-actions").findByRole("button", { name: "Start" }).click();
cy.get(".indicator-pill").should("contain", "Active").should("have.class", "green");
cy.get(".msg-box").should("contain", "No Requests found");
cy.visit("/app/List/DocType/List");
cy.intercept("POST", "/api/method/frappe.desk.reportview.get").as("list_refresh");
cy.wait("@list_refresh");
cy.get(".page-head").findByTitle("DocType").should("exist");
cy.get(".list-count").should("contain", "20 of ");
cy.visit("/app/recorder");
cy.get(".page-head").findByTitle("Recorder").should("exist");
cy.get(".frappe-list .result-list").should(
"contain",
"/api/method/frappe.desk.reportview.get"
);
});
it("Recorder View Request", () => {
cy.get(".page-actions").findByRole("button", { name: "Start" }).click();
cy.visit("/app/List/DocType/List");
cy.intercept("POST", "/api/method/frappe.desk.reportview.get").as("list_refresh");
cy.wait("@list_refresh");
cy.get(".page-head").findByTitle("DocType").should("exist");
cy.get(".list-count").should("contain", "20 of ");
cy.visit("/app/recorder");
cy.get(".frappe-list .list-row-container span")
.contains("/api/method/frappe")
.should("be.visible")
.click({ force: true });
cy.url().should("include", "/recorder/request");
cy.get("form").should("contain", "/api/method/frappe");
});
});

View file

@ -422,7 +422,7 @@ def get_site_config(sites_path: str | None = None, site_path: str | None = None)
from frappe.database.mariadb.database import MariaDBDatabase
return {
"mariadb": MariaDBDatabase.default_port, # 3306
"mariadb": MariaDBDatabase.default_port,
"postgres": 5432,
}[db_type]
@ -435,7 +435,7 @@ def get_site_config(sites_path: str | None = None, site_path: str | None = None)
config["db_type"] = os.environ.get("FRAPPE_DB_TYPE") or config.get("db_type") or "mariadb"
config["db_socket"] = os.environ.get("FRAPPE_DB_SOCKET") or config.get("db_socket")
config["db_host"] = os.environ.get("FRAPPE_DB_HOST") or config.get("db_host") or "127.0.0.1"
config["db_port"] = (
config["db_port"] = int(
os.environ.get("FRAPPE_DB_PORT") or config.get("db_port") or db_default_ports(config["db_type"])
)
@ -1049,7 +1049,11 @@ def clear_cache(user: str | None = None, doctype: str | None = None):
frappe.cache_manager.clear_user_cache(user)
else: # everything
# Delete ALL keys associated with this site.
frappe.cache.delete_keys("")
keys_to_delete = set(frappe.cache.get_keys(""))
for key in frappe.get_hooks("persistent_cache_keys"):
keys_to_delete.difference_update(frappe.cache.get_keys(key))
frappe.cache.delete_value(list(keys_to_delete), make_keys=False)
reset_metadata_version()
local.cache = {}
local.new_doc_templates = {}
@ -1713,34 +1717,37 @@ def setup_module_map(include_all_apps: bool = True) -> None:
"""
if include_all_apps:
local.app_modules = cache.get_value("app_modules")
local.module_app = cache.get_value("module_app")
else:
local.app_modules = cache.get_value("installed_app_modules")
local.module_app = cache.get_value("module_installed_app")
if not (local.app_modules and local.module_app):
local.module_app, local.app_modules = {}, {}
if not local.app_modules:
local.app_modules = {}
if include_all_apps:
apps = get_all_apps(with_internal_apps=True)
else:
apps = get_installed_apps(_ensure_on_bench=True)
for app in apps:
local.app_modules.setdefault(app, [])
for module in get_module_list(app):
module = scrub(module)
if module in local.module_app:
print(
f"WARNING: module `{module}` found in apps `{local.module_app[module]}` and `{app}`"
)
local.module_app[module] = app
local.app_modules[app].append(module)
if include_all_apps:
cache.set_value("app_modules", local.app_modules)
cache.set_value("module_app", local.module_app)
else:
cache.set_value("installed_app_modules", local.app_modules)
cache.set_value("module_installed_app", local.module_app)
# Init module_app (reverse mapping)
local.module_app = {}
for app, modules in local.app_modules.items():
for module in modules:
if module in local.module_app:
warnings.warn(
f"WARNING: module `{module}` found in apps `{local.module_app[module]}` and `{app}`",
stacklevel=1,
)
local.module_app[module] = app
def get_file_items(path, raise_not_found=False, ignore_empty_lines=True):
@ -2565,8 +2572,8 @@ def _register_fault_handler():
import io
# Some libraries monkey patch stderr, we need actual fd
if isinstance(sys.stderr, io.TextIOWrapper):
faulthandler.register(signal.SIGUSR1, file=sys.stderr)
if isinstance(sys.__stderr__, io.TextIOWrapper):
faulthandler.register(signal.SIGUSR1, file=sys.__stderr__)
from frappe.utils.error import log_error

View file

@ -115,6 +115,11 @@ def delete_doc(doctype: str, name: str):
return "ok"
def get_meta(doctype: str):
frappe.only_for("All")
return frappe.get_meta(doctype)
def execute_doc_method(doctype: str, name: str, method: str | None = None):
"""Get a document from DB and execute method on it.
@ -188,6 +193,6 @@ url_rules = [
endpoint=execute_doc_method,
),
# Collection level APIs
Rule("/doctype/<doctype>/meta", methods=["GET"], endpoint=frappe.get_meta),
Rule("/doctype/<doctype>/meta", methods=["GET"], endpoint=get_meta),
Rule("/doctype/<doctype>/count", methods=["GET"], endpoint=count),
]

View file

@ -36,7 +36,12 @@ _sites_path = os.environ.get("SITES_PATH", ".")
# If gc.freeze is done then importing modules before forking allows us to share the memory
if frappe._tune_gc:
import gettext
import babel
import babel.messages
import bleach
import num2words
import pydantic
import frappe.boot
@ -74,7 +79,6 @@ def after_response_wrapper(app):
app(environ, start_response),
(
frappe.rate_limiter.update,
frappe.monitor.stop,
frappe.recorder.dump,
frappe.request.after_response.run,
frappe.destroy,
@ -409,7 +413,6 @@ def sync_database(rollback: bool) -> bool:
# update session
if session := getattr(frappe.local, "session_obj", None):
if session.update():
frappe.db.commit()
rollback = False
return rollback

View file

@ -125,6 +125,8 @@ class LoginManager:
self.set_user_info()
def login(self):
self.run_trigger("before_login")
if frappe.get_system_settings("disable_user_pass_login"):
frappe.throw(_("Login with username and password is not allowed."), frappe.AuthenticationError)
@ -419,7 +421,18 @@ def clear_cookies():
def validate_ip_address(user):
"""check if IP Address is valid"""
"""
Method to check if the user has IP restrictions enabled, and if so is the IP address they are
connecting from allowlisted.
Certain methods called from our socketio backend need direct access, and so the IP is not
checked for those
"""
if hasattr(frappe.local, "request") and frappe.local.request.path.startswith(
"/api/method/frappe.realtime."
):
return True
from frappe.core.doctype.user.user import get_restricted_ip_list
# Only fetch required fields - for perf

View file

@ -4,6 +4,8 @@
bootstrap client session
"""
import os
import frappe
import frappe.defaults
import frappe.desk.desk_page
@ -24,6 +26,7 @@ from frappe.social.doctype.energy_point_settings.energy_point_settings import (
)
from frappe.utils import add_user_info, cstr, get_system_timezone
from frappe.utils.change_log import get_versions
from frappe.utils.frappecloud import on_frappecloud
from frappe.website.doctype.web_page_view.web_page_view import is_tracking_enabled
@ -46,7 +49,6 @@ def get_bootinfo():
if frappe.session["user"] != "Guest":
bootinfo.user_info = get_user_info()
bootinfo.sid = frappe.session["sid"]
bootinfo.modules = {}
bootinfo.module_list = []
@ -110,12 +112,18 @@ def get_bootinfo():
bootinfo.marketplace_apps = get_marketplace_apps()
bootinfo.changelog_feed = get_changelog_feed_items()
if sentry_dsn := get_sentry_dsn():
bootinfo.sentry_dsn = sentry_dsn
return bootinfo
def get_letter_heads():
letter_heads = {}
for letter_head in frappe.get_all("Letter Head", fields=["name", "content", "footer"]):
if not frappe.has_permission("Letter Head"):
return letter_heads
for letter_head in frappe.get_list("Letter Head", fields=["name", "content", "footer"]):
letter_heads.setdefault(
letter_head.name, {"header": letter_head.content, "footer": letter_head.footer}
)
@ -164,7 +172,9 @@ def get_user_pages_or_reports(parent, cache=False):
page = DocType("Page")
report = DocType("Report")
if parent == "Report":
is_report = parent == "Report"
if is_report:
columns = (report.name.as_("title"), report.ref_doctype, report.report_type)
else:
columns = (page.title.as_("title"),)
@ -206,7 +216,7 @@ def get_user_pages_or_reports(parent, cache=False):
.distinct()
)
if parent == "Report":
if is_report:
pages_with_standard_roles = pages_with_standard_roles.where(report.disabled == 0)
pages_with_standard_roles = pages_with_standard_roles.run(as_dict=True)
@ -221,19 +231,20 @@ def get_user_pages_or_reports(parent, cache=False):
frappe.qb.from_(hasRole).select(Count("*")).where(hasRole.parent == parentTable.name)
)
# pages with no role are allowed
if parent == "Page":
pages_with_no_roles = (
frappe.qb.from_(parentTable)
.select(parentTable.name, parentTable.modified, *columns)
.where(no_of_roles == 0)
).run(as_dict=True)
# pages and reports with no role are allowed
rows_with_no_roles = (
frappe.qb.from_(parentTable)
.select(parentTable.name, parentTable.modified, *columns)
.where(no_of_roles == 0)
).run(as_dict=True)
for p in pages_with_no_roles:
if p.name not in has_role:
has_role[p.name] = {"modified": p.modified, "title": p.title}
for r in rows_with_no_roles:
if r.name not in has_role:
has_role[r.name] = {"modified": r.modified, "title": r.title}
if is_report:
has_role[r.name] |= {"ref_doctype": r.ref_doctype}
elif parent == "Report":
if is_report:
if not has_permission("Report", print_logs=False):
return {}
@ -442,7 +453,7 @@ def get_marketplace_apps():
apps = []
cache_key = "frappe_marketplace_apps"
if frappe.conf.developer_mode:
if frappe.conf.developer_mode or not on_frappecloud():
return apps
def get_apps_from_fc():
@ -467,3 +478,10 @@ def add_subscription_conf():
return frappe.conf.subscription
except Exception:
return ""
def get_sentry_dsn():
if not frappe.get_system_settings("enable_telemetry"):
return
return os.getenv("FRAPPE_SENTRY_DSN")

View file

@ -174,8 +174,11 @@ def purge_jobs(site=None, queue=None, event=None):
@click.command("schedule")
def start_scheduler():
"""Start scheduler process which is responsible for enqueueing the scheduled job types."""
import time
from frappe.utils.scheduler import start_scheduler
time.sleep(0.5) # Delayed start. TODO: find better way to handle this.
start_scheduler()
@ -222,12 +225,7 @@ def start_worker_pool(queue, quiet=False, num_workers=2, burst=False):
"""Start a pool of background workers"""
from frappe.utils.background_jobs import start_worker_pool
start_worker_pool(
queue=queue,
quiet=quiet,
burst=burst,
num_workers=num_workers,
)
start_worker_pool(queue=queue, quiet=quiet, burst=burst, num_workers=num_workers)
@click.command("ready-for-migration")

View file

@ -587,103 +587,6 @@ def add_db_index(context, doctype, column):
raise SiteNotSpecifiedError
@click.command("describe-database-table")
@click.option("--doctype", help="DocType to describe")
@click.option(
"--column",
multiple=True,
help="Explicitly fetch accurate cardinality from table data. This can be quite slow on large tables.",
)
@pass_context
def describe_database_table(context, doctype, column):
"""Describes various statistics about the table.
This is useful to build integration like
This includes:
1. Schema
2. Indexes
3. stats - total count of records
4. if column is specified then extra stats are generated for column:
Distinct values count in column
"""
import json
for site in context.sites:
frappe.init(site=site)
frappe.connect()
try:
data = _extract_table_stats(doctype, column)
# NOTE: Do not print anything else in this to avoid clobbering the output.
print(json.dumps(data, indent=2))
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
def _extract_table_stats(doctype: str, columns: list[str]) -> dict:
from frappe.utils import cint, cstr, get_table_name
def sql_bool(val):
return cstr(val).lower() in ("yes", "1", "true")
table = get_table_name(doctype, wrap_in_backticks=True)
schema = []
for field in frappe.db.sql(f"describe {table}", as_dict=True):
schema.append(
{
"column": field["Field"],
"type": field["Type"],
"is_nullable": sql_bool(field["Null"]),
"default": field["Default"],
}
)
def update_cardinality(column, value):
for col in schema:
if col["column"] == column:
col["cardinality"] = value
break
indexes = []
for idx in frappe.db.sql(f"show index from {table}", as_dict=True):
indexes.append(
{
"unique": not sql_bool(idx["Non_unique"]),
"cardinality": idx["Cardinality"],
"name": idx["Key_name"],
"sequence": idx["Seq_in_index"],
"nullable": sql_bool(idx["Null"]),
"column": idx["Column_name"],
"type": idx["Index_type"],
}
)
if idx["Seq_in_index"] == 1:
update_cardinality(idx["Column_name"], idx["Cardinality"])
total_rows = cint(
frappe.db.sql(
f"""select table_rows
from information_schema.tables
where table_name = 'tab{doctype}'"""
)[0][0]
)
# fetch accurate cardinality for columns by query. WARN: This can take a lot of time.
for column in columns:
cardinality = frappe.db.sql(f"select count(distinct {column}) from {table}")[0][0]
update_cardinality(column, cardinality)
return {
"table_name": table.strip("`"),
"total_rows": total_rows,
"schema": schema,
"indexes": indexes,
}
@click.command("add-system-manager")
@click.argument("email")
@click.option("--first-name")
@ -1331,7 +1234,10 @@ def start_ngrok(context, bind_tls, use_default_authtoken):
ngrok.set_auth_token(ngrok_authtoken)
port = frappe.conf.http_port or frappe.conf.webserver_port
port = frappe.conf.http_port
if not port and frappe.conf.developer_mode:
port = frappe.conf.webserver_port
tunnel = ngrok.connect(addr=str(port), host_header=site, bind_tls=bind_tls)
print(f"Public URL: {tunnel.public_url}")
print("Inspect logs at http://127.0.0.1:4040")
@ -1595,11 +1501,37 @@ def add_new_user(
update_password(user=user.name, pwd=password)
@click.command("bypass-patch")
@click.argument("patch_name")
@click.option("--yes", "-y", is_flag=True, default=False, help="Pass --yes to skip confirmation")
@pass_context
def bypass_patch(context, patch_name: str, yes: bool):
"""Bypass a patch permanently instead of migrating using the --skip-failing flag."""
from frappe.modules.patch_handler import update_patch_log
if not context.sites:
raise SiteNotSpecifiedError
if not yes:
click.confirm(
f"This will bypass the patch {patch_name!r} forever and register it as successful.\nAre you sure you want to continue?",
abort=True,
)
for site in context.sites:
frappe.init(site=site)
frappe.connect()
try:
update_patch_log(patch_name)
frappe.db.commit()
finally:
frappe.destroy()
commands = [
add_system_manager,
add_user_for_sites,
add_db_index,
describe_database_table,
backup,
drop_site,
install_app,
@ -1630,4 +1562,5 @@ commands = [
trim_tables,
trim_database,
clear_log_table,
bypass_patch,
]

View file

@ -214,15 +214,12 @@ def get_address_list(doctype, txt, filters, limit_start, limit_page_length=20, o
from frappe.www.list import get_list
user = frappe.session.user
ignore_permissions = True
if not filters:
filters = []
filters.append(("Address", "owner", "=", user))
return get_list(
doctype, txt, filters, limit_start, limit_page_length, ignore_permissions=ignore_permissions
)
return get_list(doctype, txt, filters, limit_start, limit_page_length)
def has_website_permission(doc, ptype, user, verbose=False):
@ -291,9 +288,21 @@ def address_query(doctype, txt, searchfield, start, page_len, filters):
else:
search_condition += f" or `tabAddress`.`{field}` like %(txt)s"
# Use custom title field if set
if meta.show_title_field_in_link and meta.title_field:
title = f"`tabAddress`.{meta.title_field}"
else:
title = "`tabAddress`.city"
# Get additional search fields
if searchfields:
extra_query_fields = ",".join([f"`tabAddress`.{field}" for field in searchfields])
else:
extra_query_fields = "`tabAddress`.country"
return frappe.db.sql(
"""select
`tabAddress`.name, `tabAddress`.city, `tabAddress`.country
`tabAddress`.name, {title}, {extra_query_fields}
from
`tabAddress`
join `tabDynamic Link`
@ -315,6 +324,8 @@ def address_query(doctype, txt, searchfield, start, page_len, filters):
mcond=get_match_cond(doctype),
search_condition=search_condition,
condition=condition or "",
title=title,
extra_query_fields=extra_query_fields,
),
{
"txt": "%" + txt + "%",

View file

@ -88,6 +88,17 @@ frappe.ui.form.on("Contact", {
);
}
}
if (!frm.is_dirty()) {
frm.page.add_menu_item(__("Download vCard"), function () {
window.open(
`/api/method/frappe.contacts.doctype.contact.contact.download_vcard?contact=${encodeURIComponent(
frm.doc.name
)}`,
"_blank"
);
});
}
},
validate: function (frm) {
// clear linked customer / supplier / sales partner on saving...

View file

@ -3,6 +3,7 @@
import frappe
from frappe import _
from frappe.contacts.address_and_contact import set_link_title
from frappe.core.doctype.access_log.access_log import make_access_log
from frappe.core.doctype.dynamic_link.dynamic_link import deduplicate_dynamic_links
from frappe.model.document import Document
from frappe.model.naming import append_number_if_name_exists
@ -53,7 +54,7 @@ class Contact(Document):
# concat party name if reqd
for link in self.links:
self.name = self.name + "-" + link.link_name.strip()
self.name = self.name + "-" + cstr(link.link_name).strip()
break
if frappe.db.exists("Contact", self.name):
@ -129,6 +130,9 @@ class Contact(Document):
if len([email.email_id for email in self.email_ids if email.is_primary]) > 1:
frappe.throw(_("Only one {0} can be set as primary.").format(frappe.bold("Email ID")))
if len(self.email_ids) == 1:
self.email_ids[0].is_primary = 1
primary_email_exists = False
for d in self.email_ids:
if d.is_primary == 1:
@ -167,6 +171,88 @@ class Contact(Document):
def _get_full_name(self) -> str:
return get_full_name(self.first_name, self.middle_name, self.last_name, self.company_name)
def get_vcard(self):
from vobject import vCard
from vobject.vcard import Name
vcard = vCard()
vcard.add("fn").value = self.full_name
name = Name()
if self.first_name:
name.given = self.first_name
if self.last_name:
name.family = self.last_name
if self.middle_name:
name.additional = self.middle_name
vcard.add("n").value = name
if self.designation:
vcard.add("title").value = self.designation
for row in self.email_ids:
email = vcard.add("email")
email.value = row.email_id
if row.is_primary:
email.type_param = "pref"
for row in self.phone_nos:
tel = vcard.add("tel")
tel.value = row.phone
if row.is_primary_phone:
tel.type_param = "home"
if row.is_primary_mobile_no:
tel.type_param = "cell"
return vcard
@frappe.whitelist()
def download_vcard(contact: str):
"""Download vCard for the contact"""
contact = frappe.get_doc("Contact", contact)
contact.check_permission()
vcard = contact.get_vcard()
make_access_log(doctype="Contact", document=contact.name, file_type="vcf")
frappe.response["filename"] = f"{contact.name}.vcf"
frappe.response["filecontent"] = vcard.serialize().encode("utf-8")
frappe.response["type"] = "binary"
@frappe.whitelist()
def download_vcards(contacts: str):
"""Download vCard for the contact"""
import json
from frappe.utils.data import now
contact_ids = frappe.parse_json(contacts)
vcards = []
for contact_id in contact_ids:
contact = frappe.get_doc("Contact", contact_id)
contact.check_permission()
vcard = contact.get_vcard()
vcards.append(vcard.serialize())
make_access_log(
doctype="Contact",
filters=json.dumps([["name", "in", contact_ids]], ensure_ascii=False, indent="\t"),
file_type="vcf",
)
timestamp = now()[:19] # remove milliseconds
frappe.response["filename"] = f"{timestamp} Contacts.vcf"
frappe.response["filecontent"] = "\n".join(vcards).encode("utf-8")
frappe.response["type"] = "binary"
def get_default_contact(doctype, name):
"""Return default contact for the given doctype, name."""

View file

@ -1,3 +1,11 @@
frappe.listview_settings["Contact"] = {
add_fields: ["image"],
onload: function (listview) {
listview.page.add_action_item(__("Download vCards"), function () {
const contacts = listview.get_checked_items();
open_url_post("/api/method/frappe.contacts.doctype.contact.contact.download_vcards", {
contacts: contacts.map((c) => c.name),
});
});
},
};

View file

@ -192,7 +192,7 @@ def update_comments_in_parent(reference_doctype, reference_name, _comments):
)
except Exception as e:
if frappe.db.is_column_missing(e) and getattr(frappe.local, "request", None):
if frappe.db.is_missing_column(e) and getattr(frappe.local, "request", None):
pass
elif frappe.db.is_data_too_long(e):
raise frappe.DataTooLongException

View file

@ -404,15 +404,9 @@ frappe.ui.form.on("Data Import", {
render_import_log(frm) {
frappe.call({
method: "frappe.client.get_list",
method: "frappe.core.doctype.data_import.data_import.get_import_logs",
args: {
doctype: "Data Import Log",
filters: {
data_import: frm.doc.name,
},
fields: ["success", "docname", "messages", "exception", "row_indexes"],
limit_page_length: 5000,
order_by: "log_index",
data_import: frm.doc.name,
},
callback: function (r) {
let logs = r.message;
@ -503,7 +497,7 @@ frappe.ui.form.on("Data Import", {
show_import_log(frm) {
frm.toggle_display("import_log_section", false);
if (frm.import_in_progress) {
if (frm.is_new() || frm.import_in_progress) {
return;
}

View file

@ -16,6 +16,8 @@
"google_sheets_url",
"refresh_google_sheet",
"column_break_5",
"custom_delimiters",
"delimiter_options",
"status",
"submit_after_import",
"mute_emails",
@ -167,11 +169,25 @@
"hidden": 1,
"label": "Payload Count",
"read_only": 1
},
{
"default": ",;\\t|",
"depends_on": "custom_delimiters",
"description": "If your CSV uses a different delimiter, add that character here, ensuring no spaces or additional characters are included.",
"fieldname": "delimiter_options",
"fieldtype": "Data",
"label": "Delimiter Options"
},
{
"default": "0",
"fieldname": "custom_delimiters",
"fieldtype": "Check",
"label": "Custom Delimiters"
}
],
"hide_toolbar": 1,
"links": [],
"modified": "2024-03-23 16:02:16.953820",
"modified": "2024-04-27 20:42:35.843158",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
@ -195,4 +211,4 @@
"sort_order": "DESC",
"states": [],
"track_changes": 1
}
}

View file

@ -27,6 +27,8 @@ class DataImport(Document):
if TYPE_CHECKING:
from frappe.types import DF
custom_delimiters: DF.Check
delimiter_options: DF.Data | None
google_sheets_url: DF.Data | None
import_file: DF.Attach | None
import_type: DF.Literal["", "Insert New Records", "Update Existing Records"]
@ -50,11 +52,16 @@ class DataImport(Document):
self.template_options = ""
self.template_warnings = ""
self.set_delimiters_flag()
self.validate_doctype()
self.validate_import_file()
self.validate_google_sheets_url()
self.set_payload_count()
def set_delimiters_flag(self):
if self.import_file:
frappe.flags.delimiter_options = self.delimiter_options or ","
def validate_doctype(self):
if self.reference_doctype in BLOCKED_DOCTYPES:
frappe.throw(_("Importing {0} is not allowed.").format(self.reference_doctype))
@ -79,6 +86,7 @@ class DataImport(Document):
def get_preview_from_template(self, import_file=None, google_sheets_url=None):
if import_file:
self.import_file = import_file
self.set_delimiters_flag()
if google_sheets_url:
self.google_sheets_url = google_sheets_url
@ -218,6 +226,20 @@ def get_import_status(data_import_name):
return import_status
@frappe.whitelist()
def get_import_logs(data_import: str):
doc = frappe.get_doc("Data Import", data_import)
doc.check_permission("read")
return frappe.get_all(
"Data Import Log",
fields=["success", "docname", "messages", "exception", "row_indexes"],
filters={"data_import": data_import},
limit_page_length=5000,
order_by="log_index",
)
def import_file(doctype, file_path, import_type, submit_after_import=False, console=False):
"""
Import documents in from CSV or XLSX using data import.

View file

@ -118,7 +118,6 @@ class Exporter:
for doc in data:
rows = []
rows = self.add_data_row(self.doctype, None, doc, rows, 0)
if table_fields:
# add child table data
for f in table_fields:
@ -144,6 +143,8 @@ class Exporter:
if df.fieldtype == "Duration":
value = format_duration(flt(value), df.hide_days)
if df.fieldtype == "Text Editor":
value = frappe.core.utils.html2text(value)
row[i] = value
return rows

View file

@ -0,0 +1,5 @@
Title ;Description ;Number ;another_number ;ID (Table Field 1) ;Child Title (Table Field 1) ;Child Description (Table Field 1) ;Child 2 Title (Table Field 2) ;Child 2 Date (Table Field 2) ;Child 2 Number (Table Field 2) ;Child Title (Table Field 1 Again) ;Child Date (Table Field 1 Again) ;Child Number (Table Field 1 Again) ;table_field_1_again.child_another_number
Test 5 ;test description ;1 ;2 ;"" ; ;"child description with ,comma and" ;child title ;14-08-2019 ;4 ;child title again ;22-09-2020 ;5 ; 7
; ; ; ; ;child title 2 ;child description 2 ;title child ;30-10-2019 ;5 ; ;22-09-2021 ; ;
;test description 2 ;1 ;2 ; ;child mandatory title ; ;title child man ; ; ;child mandatory again ; ; ;
Test 4 ;test description 3 ;4 ;5 ;"" ;child title asdf ;child description asdf ;child title asdf adsf ;15-08-2019 ;6 ;child title again asdf ;22-09-2022 ;9 ; 71
Can't render this file because it contains an unexpected character in line 2 and column 55.

View file

@ -1012,7 +1012,13 @@ class Column:
)
elif self.df.fieldtype in ("Date", "Time", "Datetime"):
# guess date/time format
# TODO: add possibility for user, to define the date format explicitly in the Data Import UI
# for example, if date column in file is in %d-%m-%y format -> 23-04-24.
# The date guesser might fail, as, this can be also parsed as %y-%m-%d, as both 23 and 24 are valid for year & for day
# This is an issue that cannot be handled automatically, no matter how we try, as it completely depends on the user's input.
# Defining an explicit value which surely recognizes
self.date_format = self.guess_date_format_for_column()
if not self.date_format:
if self.df.fieldtype == "Time":
self.date_format = "%H:%M:%S"

View file

@ -50,6 +50,25 @@ class TestImporter(FrappeTestCase):
self.assertEqual(doc3.another_number, 5)
self.assertEqual(format_duration(doc3.duration), "5d 5h 45m")
def test_data_validation_semicolon_success(self):
import_file = get_import_file("sample_import_file_semicolon")
data_import = self.get_importer(doctype_name, import_file, update=True)
doc = data_import.get_preview_from_template().get("data", [{}])
self.assertEqual(doc[0][7], "child description with ,comma and")
# Column count should be 14 (+1 ID)
self.assertEqual(len(doc[0]), 15)
def test_data_validation_semicolon_failure(self):
import_file = get_import_file("sample_import_file_semicolon")
data_import = self.get_importer_semicolon(doctype_name, import_file)
doc = data_import.get_preview_from_template().get("data", [{}])
# if semicolon delimiter detection fails, and falls back to comma,
# column number will be less than 15 -> 2 (+1 id)
self.assertLessEqual(len(doc[0]), 15)
def test_data_import_preview(self):
import_file = get_import_file("sample_import_file")
data_import = self.get_importer(doctype_name, import_file)
@ -138,6 +157,18 @@ class TestImporter(FrappeTestCase):
return data_import
def get_importer_semicolon(self, doctype, import_file, update=False):
data_import = frappe.new_doc("Data Import")
data_import.import_type = "Insert New Records" if not update else "Update Existing Records"
data_import.reference_doctype = doctype
data_import.import_file = import_file.file_url
# deliberately overwrite default delimiter options here, causing to fail when parsing `;`
data_import.delimiter_options = ","
data_import.insert()
frappe.db.commit() # nosemgrep
return data_import
def create_doctype_if_not_exists(doctype_name, force=False):
if force:

View file

@ -58,9 +58,8 @@
}
],
"in_create": 1,
"index_web_pages_for_search": 1,
"links": [],
"modified": "2024-03-23 16:02:17.334396",
"modified": "2024-04-29 18:44:17.050909",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import Log",
@ -79,7 +78,8 @@
"write": 1
}
],
"read_only": 1,
"sort_field": "creation",
"sort_order": "DESC",
"states": []
}
}

View file

@ -870,8 +870,10 @@ class DocType(Document):
def make_amendable(self):
"""If is_submittable is set, add amended_from docfields."""
if self.is_submittable:
docfield_exists = [f for f in self.fields if f.fieldname == "amended_from"]
if not docfield_exists:
docfield = [f for f in self.fields if f.fieldname == "amended_from"]
if docfield:
docfield[0].options = self.name
else:
self.append(
"fields",
{
@ -903,7 +905,7 @@ class DocType(Document):
no_copy=1,
print_hide=1,
)
create_custom_field(self.name, df)
create_custom_field(self.name, df, ignore_validate=True)
def validate_nestedset(self):
if not self.get("is_tree"):
@ -1558,9 +1560,21 @@ def validate_fields(meta: Meta):
options_list.append(_option)
field.options = "\n".join(options_list)
def scrub_fetch_from(field):
if hasattr(field, "fetch_from") and field.fetch_from:
field.fetch_from = field.fetch_from.strip("\n").strip()
def validate_fetch_from(field):
if not field.get("fetch_from"):
return
field.fetch_from = field.fetch_from.strip()
if "." not in field.fetch_from:
return
source_field, _target_field = field.fetch_from.split(".", maxsplit=1)
if source_field == field.fieldname:
msg = _(
"{0} contains an invalid Fetch From expression, Fetch From can't be self-referential."
).format(_(field.label, context=field.parent))
frappe.throw(msg, title=_("Recursive Fetch From"))
def validate_data_field_type(docfield):
if docfield.get("is_virtual"):
@ -1636,7 +1650,7 @@ def validate_fields(meta: Meta):
check_unique_and_text(meta.get("name"), d)
check_table_multiselect_option(d)
scrub_options_in_select(d)
scrub_fetch_from(d)
validate_fetch_from(d)
validate_data_field_type(d)
if not frappe.flags.in_migrate or in_ci:

View file

@ -774,6 +774,19 @@ class TestDocType(FrappeTestCase):
self.assertTrue(doctype.fields[1].in_list_view)
frappe.delete_doc("DocType", doctype.name)
def test_no_recursive_fetch(self):
recursive_dt = new_doctype(
fields=[
{
"label": "User",
"fieldname": "user",
"fieldtype": "Link",
"fetch_from": "user.email",
}
],
)
self.assertRaises(frappe.ValidationError, recursive_dt.insert)
def new_doctype(
name: str | None = None,

View file

@ -12,6 +12,9 @@ frappe.ui.form.on("Error Log", {
reference_name: frm.doc.reference_name,
});
});
frm.add_custom_button(__("Open reference document"), function () {
frappe.set_route("Form", frm.doc.reference_doctype, frm.doc.reference_name);
});
}
},
});

View file

@ -48,6 +48,8 @@
{
"fieldname": "reference_name",
"fieldtype": "Data",
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Reference Name",
"read_only": 1
},
@ -70,7 +72,7 @@
"idx": 1,
"in_create": 1,
"links": [],
"modified": "2024-03-23 16:03:25.381120",
"modified": "2024-06-05 05:34:35.048489",
"modified_by": "Administrator",
"module": "Core",
"name": "Error Log",
@ -92,4 +94,4 @@
"sort_order": "DESC",
"states": [],
"title_field": "method"
}
}

View file

@ -24,6 +24,14 @@ class ErrorLog(Document):
trace_id: DF.Data | None
# end: auto-generated types
def validate(self):
self.method = str(self.method)
self.error = str(self.error)
if len(self.method) > 140:
self.error = f"{self.method}\n{self.error}"
self.method = self.method[:140]
def onload(self):
if not self.seen and not frappe.flags.read_only:
self.db_set("seen", 1, update_modified=0)

View file

@ -82,7 +82,16 @@ frappe.ui.form.on("File", {
if (frm.doc.file_name) {
file_url = file_url.replace(/#/g, "%23");
}
window.open(file_url);
// create temporary link element to simulate a download click
var link = document.createElement("a");
link.href = file_url;
link.download = frm.doc.file_name;
link.style.display = "none";
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
},
optimize: function (frm) {

View file

@ -107,6 +107,7 @@
"fieldtype": "Link",
"hidden": 1,
"label": "Folder",
"length": 255,
"options": "File",
"read_only": 1
},
@ -189,7 +190,7 @@
"icon": "fa fa-file",
"idx": 1,
"links": [],
"modified": "2024-03-23 16:03:25.814224",
"modified": "2024-05-09 11:46:42.917146",
"modified_by": "Administrator",
"module": "Core",
"name": "File",

View file

@ -31,6 +31,7 @@ from .utils import *
exclude_from_linked_with = True
ImageFile.LOAD_TRUNCATED_IMAGES = True
URL_PREFIXES = ("http://", "https://")
FILE_ENCODING_OPTIONS = ("utf-8-sig", "utf-8", "windows-1250", "windows-1252")
class File(Document):
@ -515,10 +516,11 @@ class File(Document):
def exists_on_disk(self):
return os.path.exists(self.get_full_path())
def get_content(self) -> bytes:
def get_content(self, encodings=None) -> bytes | str:
if self.is_folder:
frappe.throw(_("Cannot get file contents of a Folder"))
# if doc was just created, content field is already populated, return it as-is
if self.get("content"):
self._content = self.content
if self.decode:
@ -531,15 +533,20 @@ class File(Document):
self.validate_file_url()
file_path = self.get_full_path()
# read the file
if encodings is None:
encodings = FILE_ENCODING_OPTIONS
with open(file_path, mode="rb") as f:
self._content = f.read()
try:
# for plain text files
self._content = self._content.decode()
except UnicodeDecodeError:
# for .png, .jpg, etc
pass
# looping will not result in slowdown, as the content is usually utf-8 or utf-8-sig
# encoded so the first iteration will be enough most of the time
for encoding in encodings:
try:
# read file with proper encoding
self._content = self._content.decode(encoding)
break
except UnicodeDecodeError:
# for .png, .jpg, etc
continue
return self._content

View file

@ -1,7 +1,6 @@
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import base64
import json
import os
import shutil
import tempfile
@ -111,7 +110,7 @@ class TestBase64File(FrappeTestCase):
def setUp(self):
self.attached_to_doctype, self.attached_to_docname = make_test_doc()
self.test_content = base64.b64encode(test_content1.encode("utf-8"))
_file: "File" = frappe.get_doc(
_file: frappe.Document = frappe.get_doc(
{
"doctype": "File",
"file_name": "test_base64.txt",
@ -125,7 +124,7 @@ class TestBase64File(FrappeTestCase):
self.saved_file_url = _file.file_url
def test_saved_content(self):
_file = frappe.get_doc("File", {"file_url": self.saved_file_url})
_file: frappe.Document = frappe.get_doc("File", {"file_url": self.saved_file_url})
content = _file.get_content()
self.assertEqual(content, test_content1)
@ -255,6 +254,25 @@ class TestSameContent(FrappeTestCase):
limit_property.delete()
frappe.clear_cache(doctype="ToDo")
def test_utf8_bom_content_decoding(self):
utf8_bom_content = test_content1.encode("utf-8-sig")
_file: frappe.Document = frappe.get_doc(
{
"doctype": "File",
"file_name": "utf8bom.txt",
"attached_to_doctype": self.attached_to_doctype1,
"attached_to_name": self.attached_to_docname1,
"content": utf8_bom_content,
"decode": False,
}
)
_file.save()
saved_file = frappe.get_doc("File", _file.name)
file_content_decoded = saved_file.get_content(encodings=["utf-8"])
self.assertEqual(file_content_decoded[0], "\ufeff")
file_content_properly_decoded = saved_file.get_content(encodings=["utf-8-sig", "utf-8"])
self.assertEqual(file_content_properly_decoded, test_content1)
class TestFile(FrappeTestCase):
def setUp(self):

View file

@ -12,7 +12,6 @@ import frappe
from frappe import _, safe_decode
from frappe.utils import cint, cstr, encode, get_files_path, random_string, strip
from frappe.utils.file_manager import safe_b64decode
from frappe.utils.image import optimize_image
if TYPE_CHECKING:
from PIL.ImageFile import ImageFile
@ -237,8 +236,6 @@ def extract_images_from_html(doc: "Document", content: str, is_private: bool = F
content = content.split(b",")[1]
content = safe_b64decode(content)
content = optimize_image(content, mtype)
if "filename=" in headers:
filename = headers.split("filename=")[-1]
filename = safe_decode(filename).split(";", 1)[0]

View file

@ -1,6 +1,5 @@
{
"actions": [],
"allow_rename": 1,
"autoname": "field:language_code",
"creation": "2014-08-22 16:12:17.249590",
"doctype": "DocType",
@ -27,7 +26,8 @@
"fieldtype": "Data",
"in_list_view": 1,
"label": "Language Name",
"reqd": 1
"reqd": 1,
"set_only_once": 1
},
{
"fieldname": "flag",
@ -51,7 +51,7 @@
"icon": "fa fa-globe",
"in_create": 1,
"links": [],
"modified": "2024-03-23 16:03:28.477169",
"modified": "2024-06-06 18:25:01.010821",
"modified_by": "Administrator",
"module": "Core",
"name": "Language",

View file

@ -1,4 +1,8 @@
// Copyright (c) 2020, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on("Navbar Settings", {});
frappe.ui.form.on("Navbar Settings", {
after_save: function (frm) {
frappe.ui.toolbar.clear_cache();
},
});

View file

@ -7,8 +7,6 @@
"field_order": [
"logo_section",
"app_logo",
"column_break_3",
"logo_width",
"section_break_2",
"settings_dropdown",
"help_dropdown",
@ -43,15 +41,6 @@
"fieldtype": "Section Break",
"label": "Application Logo"
},
{
"fieldname": "column_break_3",
"fieldtype": "Column Break"
},
{
"fieldname": "logo_width",
"fieldtype": "Int",
"label": "Logo Width"
},
{
"fieldname": "announcements_section",
"fieldtype": "Section Break",
@ -67,7 +56,7 @@
],
"issingle": 1,
"links": [],
"modified": "2024-03-23 17:03:30.561647",
"modified": "2024-05-01 14:09:54.587137",
"modified_by": "Administrator",
"module": "Core",
"name": "Navbar Settings",
@ -89,4 +78,4 @@
"sort_order": "DESC",
"states": [],
"track_changes": 1
}
}

View file

@ -19,7 +19,6 @@ class NavbarSettings(Document):
announcement_widget: DF.TextEditor | None
app_logo: DF.AttachImage | None
help_dropdown: DF.Table[NavbarItem]
logo_width: DF.Int
settings_dropdown: DF.Table[NavbarItem]
# end: auto-generated types

View file

@ -122,5 +122,8 @@ class PackageRelease(Document):
attached_to_name=self.name,
)
# Set path to tarball
self.path = file.file_url
file.flags.ignore_duplicate_entry_error = True
file.insert()

View file

@ -114,9 +114,8 @@ def generate_report(prepared_report):
instance.status = "Completed"
except Exception:
instance.status = "Error"
instance.error_message = frappe.get_traceback(with_context=True)
_save_instance(instance) # we need to ensure that error gets stored
# we need to ensure that error gets stored
_save_error(instance, error=frappe.get_traceback(with_context=True))
instance.report_end_time = frappe.utils.now()
instance.save(ignore_permissions=True)
@ -129,7 +128,10 @@ def generate_report(prepared_report):
@dangerously_reconnect_on_connection_abort
def _save_instance(instance):
def _save_error(instance, error):
instance.reload()
instance.status = "Error"
instance.error_message = error
instance.save(ignore_permissions=True)

View file

@ -0,0 +1,283 @@
"""Basic DB optimizer for Frappe Framework based app.
This is largely based on heuristics and known good practices for indexing.
"""
from collections import defaultdict
from dataclasses import dataclass
from typing import TypeVar
from sql_metadata import Parser
import frappe
from frappe.utils import flt
# Any index that reads more than 30% table on average is not "useful"
INDEX_SCORE_THRESHOLD = 0.3
# Anything reading less than this percent of table is considered optimal
OPTIMIZATION_THRESHOLD = 0.1
T = TypeVar("T")
@dataclass
class DBColumn:
name: str
cardinality: int | None
is_nullable: bool
default: str
data_type: str
@classmethod
def from_frappe_ouput(cls, data) -> "DBColumn":
"Parse DBColumn from output of describe-database-table command in Frappe"
return cls(
name=data["column"],
cardinality=data.get("cardinality"),
is_nullable=data["is_nullable"],
default=data["default"],
data_type=data["type"],
)
@dataclass
class DBIndex:
name: str
column: str
table: str
unique: bool | None = None
cardinality: int | None = None
sequence: int = 1
nullable: bool = True
_score: float = 0.0
def __eq__(self, other: "DBIndex") -> bool:
return self.column == other.column and self.sequence == other.sequence and self.table == other.table
def __repr__(self):
return f"DBIndex(`{self.table}`.`{self.column}`)"
@classmethod
def from_frappe_ouput(cls, data, table) -> "DBIndex":
"Parse DBIndex from output of describe-database-table command in Frappe"
return cls(
name=data["name"],
table=table,
unique=data["unique"],
cardinality=data["cardinality"],
sequence=data["sequence"],
nullable=data["nullable"],
column=data["column"],
)
@dataclass
class ColumnStat:
column_name: str
avg_frequency: float
avg_length: float
nulls_ratio: float | None = None
histogram: list[float] = None
def __post_init__(self):
if not self.histogram:
self.histogram = []
@classmethod
def from_frappe_ouput(cls, data) -> "ColumnStat":
return cls(
column_name=data["column_name"],
avg_frequency=data["avg_frequency"],
avg_length=data["avg_length"],
nulls_ratio=data["nulls_ratio"],
histogram=[flt(bin) for bin in data["histogram"].split(",")] if data["histogram"] else [],
)
@dataclass
class DBTable:
name: str
total_rows: int
schema: list[DBColumn] | None = None
indexes: list[DBIndex] | None = None
def __post_init__(self):
if not self.schema:
self.schema = []
if not self.indexes:
self.indexes = []
def update_cardinality(self, column_stats: list[ColumnStat]) -> None:
"""Estimate cardinality using mysql.column_stat"""
for column_stat in column_stats:
for col in self.schema:
if col.name == column_stat.column_name and not col.cardinality and column_stat.avg_frequency:
# "hack" or "math" - average frequency is on average how frequently a row value appears.
# Avg = total_rows / cardinality, so...
col.cardinality = self.total_rows / column_stat.avg_frequency
@classmethod
def from_frappe_ouput(cls, data) -> "DBTable":
"Parse DBTable from output of describe-database-table command in Frappe"
table_name = data["table_name"]
return cls(
name=table_name,
total_rows=data["total_rows"],
schema=[DBColumn.from_frappe_ouput(c) for c in data["schema"]],
indexes=[DBIndex.from_frappe_ouput(i, table_name) for i in data["indexes"]],
)
def has_column(self, column: str) -> bool:
for col in self.schema:
if col.name == column:
return True
return False
@dataclass
class DBOptimizer:
query: str # raw query in string format
tables: dict[str, DBTable] = None
parsed_query: Parser = None
def __post_init__(self):
if not self.tables:
self.tables = {}
self.parsed_query = Parser(self.query)
def tables_examined(self) -> list[str]:
return self.parsed_query.tables
def update_table_data(self, table: DBTable):
self.tables[table.name] = table
def _convert_to_db_index(self, column: str) -> DBIndex:
column_name, table = None, None
if "." in column:
table, column_name = column.split(".")
else:
column_name = column
for table_name, db_table in self.tables.items():
if db_table.has_column(column):
table = table_name
break
return DBIndex(column=column_name, name=column_name, table=table)
def _remove_existing_indexes(self, potential_indexes: list[DBIndex]) -> list[DBIndex]:
"""Given list of potential index candidates remove the ones that already exist.
This also removes multi-column indexes for parts that are applicable to query.
Example: If multi-col index A+B+C exists and query utilizes A+B then
A+B are removed from potential indexes.
"""
def remove_maximum_indexes(idx: list[DBIndex]):
"""Try to remove entire index from potential indexes, if not possible, reduce one part and try again until no parts are left."""
if not idx:
return
matched_sub_index = []
for idx_part in list(idx):
matching_part = [
i for i in potential_indexes if i.column == idx_part.column and i.table == idx_part.table
]
if not matching_part:
# pop and recurse
idx.pop()
return remove_maximum_indexes(idx)
else:
matched_sub_index.extend(matching_part)
# Every part matched now, lets remove those parts
for i in matched_sub_index:
potential_indexes.remove(i)
# Reconstruct multi-col index
for table in self.tables.values():
merged_indexes = defaultdict(list)
for index in table.indexes:
merged_indexes[index.name].append(index)
for idx in merged_indexes.values():
idx.sort(key=lambda x: x.sequence)
for idx in merged_indexes.values():
remove_maximum_indexes(idx)
return potential_indexes
def potential_indexes(self) -> list[DBIndex]:
"""Get all columns that can potentially be indexed to speed up this query."""
possible_indexes = []
# Where claus columns using these operators benefit from index
# 1. = (equality)
# 2. >, <, >=, <=
# 3. LIKE 'xyz%' (Prefix search)
# 4. BETWEEN (for date[time] fields)
# 5. IN (similar to equality)
if not self.parsed_query.columns_dict:
return []
if where_columns := self.parsed_query.columns_dict.get("where"):
# TODO: Apply some heuristics here, not all columns in where clause are actually useful
possible_indexes.extend(where_columns)
# Join clauses - Both sides of join should ideally be indexed. One will *usually* be primary key.
if join_columns := self.parsed_query.columns_dict.get("join"):
possible_indexes.extend(join_columns)
# Top N query variant - Order by column can possibly speed up the query
if order_by_columns := self.parsed_query.columns_dict.get("order_by"):
if self.parsed_query.limit_and_offset:
possible_indexes.extend(order_by_columns)
possible_db_indexes = [self._convert_to_db_index(i) for i in possible_indexes]
possible_db_indexes = [i for i in possible_db_indexes if i.column not in ("*", "name")]
possible_db_indexes.sort(key=lambda i: (i.table, i.column))
return self._remove_existing_indexes(possible_db_indexes)
def suggest_index(self) -> DBIndex | None:
"""Suggest best possible column to index given query and table stats."""
if missing_tables := (set(self.tables_examined()) - set(self.tables.keys())):
frappe.throw("DBTable infomation missing for: " + ", ".join(missing_tables))
potential_indexes = self.potential_indexes()
for index in list(potential_indexes):
table = self.tables[index.table]
# Data type is not easily indexable - skip
column = next(c for c in table.schema if c.name == index.column)
if "text" in column.data_type.lower() or "json" in column.data_type.lower():
potential_indexes.remove(index)
# Update cardinality from column so scoring can be done
index.cardinality = column.cardinality
for index in potential_indexes:
index._score = self.index_score(index)
potential_indexes.sort(key=lambda i: i._score)
if (
potential_indexes
and (best_index := potential_indexes[0])
and best_index._score < INDEX_SCORE_THRESHOLD
):
return best_index
def index_score(self, index: DBIndex) -> float:
"""Score an index from 0 to 1 based on usefulness.
A score of 0.5 indicates on average this index will read 50% of the table. (e.g. checkboxes)"""
table = self.tables[index.table]
cardinality = index.cardinality or 2
total_rows = table.total_rows or cardinality or 1
# We assume most unique values are evenly distributed, this is
# definitely not the case IRL but it should be good enough assumptions
# Score is rouhgly what percentage of table we will end up reading on typical query
rows_fetched_on_average = (table.total_rows or cardinality) / cardinality
return rows_fetched_on_average / total_rows

View file

@ -9,6 +9,39 @@ frappe.ui.form.on("Recorder", {
frm.disable_save();
frm._sort_order = {};
frm.trigger("setup_sort");
frm.fields_dict.sql_queries.grid.grid_pagination.page_length = 500;
refresh_field("sql_queries");
frm.trigger("format_grid");
frm.add_custom_button(__("Suggest Optimizations"), () => {
frappe.xcall("frappe.core.doctype.recorder.recorder.optimize", {
recorder_id: frm.doc.name,
});
});
frappe.realtime.on("recorder-analysis-complete", () => {
frm.reload_doc();
setTimeout(() => frm.scroll_to_field("suggested_indexes"), 1500);
});
let index_grid = frm.fields_dict.suggested_indexes.grid;
index_grid.wrapper.find(".grid-footer").toggle(true);
index_grid.toggle_checkboxes(true);
index_grid.df.cannot_delete_rows = true;
index_grid.add_custom_button(__("Add Indexes"), function () {
let indexes_to_add = index_grid.get_selected_children().map((row) => {
return {
column: row.column,
table: row.table,
};
});
if (!indexes_to_add.length) {
frappe.toast(__("You need to select indexes you want to add first."));
return;
}
frappe.xcall("frappe.core.doctype.recorder.recorder.add_indexes", {
indexes: indexes_to_add,
});
});
},
setup_sort: function (frm) {
@ -22,9 +55,25 @@ frappe.ui.form.on("Recorder", {
frm._sort_order[field] = -1 * sort_order; // reverse for next click
grid.refresh();
frm.trigger("setup_sort"); // grid creates new elements again, resetup listeners.
frm.trigger("format_grid");
});
});
},
/// Format duration and copy cells
format_grid(frm) {
const max_duration = Math.max(20, ...frm.doc.sql_queries.map((d) => d.duration));
const heatmap = (table, field, max) => {
frm.fields_dict[table].grid.grid_rows.forEach((row) => {
const percent = Math.round((row.doc[field] / max) * 100);
$(row.columns[field]).css({
"background-color": `color-mix(in srgb, var(--bg-red) ${percent}%, var(--bg-color))`,
});
});
};
heatmap("sql_queries", "duration", max_duration);
},
});
frappe.ui.form.on("Recorder Query", "form_render", function (frm, cdt, cdn) {

View file

@ -20,6 +20,7 @@
"section_break_sgro",
"form_dict",
"section_break_9jhm",
"suggested_indexes",
"sql_queries",
"section_break_optn",
"profile"
@ -119,6 +120,13 @@
"fieldtype": "Code",
"label": "cProfile Output",
"read_only": 1
},
{
"description": "Disclaimer: These indexes are suggested based on data and queries performed during this recording. These suggestions may or may not help.",
"fieldname": "suggested_indexes",
"fieldtype": "Table",
"label": "Suggested Indexes",
"options": "Recorder Suggested Index"
}
],
"hide_toolbar": 1,
@ -126,7 +134,7 @@
"index_web_pages_for_search": 1,
"is_virtual": 1,
"links": [],
"modified": "2024-02-01 22:13:26.505174",
"modified": "2024-05-14 15:16:55.626656",
"modified_by": "Administrator",
"module": "Core",
"name": "Recorder",

View file

@ -1,10 +1,18 @@
# Copyright (c) 2023, Frappe Technologies and contributors
# For license information, please see license.txt
import json
from collections import Counter, defaultdict
import frappe
from frappe import _
from frappe.core.doctype.recorder.db_optimizer import DBOptimizer, DBTable
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
from frappe.model.document import Document
from frappe.recorder import RECORDER_REQUEST_HASH
from frappe.recorder import get as get_recorder_data
from frappe.utils import cint, evaluate_filters
from frappe.utils import cint, cstr, evaluate_filters, get_table_name
from frappe.utils.caching import redis_cache
class Recorder(Document):
@ -15,6 +23,9 @@ class Recorder(Document):
if TYPE_CHECKING:
from frappe.core.doctype.recorder_query.recorder_query import RecorderQuery
from frappe.core.doctype.recorder_suggested_index.recorder_suggested_index import (
RecorderSuggestedIndex,
)
from frappe.types import DF
cmd: DF.Data | None
@ -27,6 +38,7 @@ class Recorder(Document):
profile: DF.Code | None
request_headers: DF.Code | None
sql_queries: DF.Table[RecorderQuery]
suggested_indexes: DF.Table[RecorderSuggestedIndex]
time: DF.Datetime | None
time_in_queries: DF.Float
# end: auto-generated types
@ -95,8 +107,192 @@ def serialize_request(request):
request_headers=frappe.as_json(request.get("headers"), indent=4),
form_dict=frappe.as_json(request.get("form_dict"), indent=4),
sql_queries=request.get("calls"),
suggested_indexes=request.get("suggested_indexes"),
modified=request.get("time"),
creation=request.get("time"),
)
return request
@frappe.whitelist()
def add_indexes(indexes):
frappe.only_for("Administrator")
indexes = json.loads(indexes)
for index in indexes:
frappe.enqueue(_add_index, table=index["table"], column=index["column"])
frappe.msgprint(_("Enqueued creation of indexes"), alert=True)
def _add_index(table, column):
doctype = get_doctype_name(table)
frappe.db.add_index(doctype, [column])
make_property_setter(
doctype,
column,
property="search_index",
value="1",
property_type="Check",
for_doctype=False, # Applied on docfield
)
frappe.msgprint(
_("Index created successfully on column {0} of doctype {1}").format(column, doctype),
alert=True,
realtime=True,
)
@frappe.whitelist()
def optimize(recorder_id: str):
frappe.only_for("Administrator")
frappe.enqueue(_optimize, recorder_id=recorder_id, queue="long")
def _optimize(recorder_id):
record: Recorder = frappe.get_doc("Recorder", recorder_id)
total_duration = record.time_in_queries
# Any index with query time less than 5% of total time is not suggested
PERCENT_DURATION_THRESHOLD_OVERALL = 0.05
# Any query with duration less than 0.5% of total duration is not analyzed
PERCENT_DURATION_THRESHOLD_QUERY = 0.005
# Index suggestion -> Query duration
index_suggestions = Counter()
for idx, captured_query in enumerate(record.sql_queries, start=1):
query = cstr(captured_query.query)
frappe.publish_progress(
idx / len(record.sql_queries) * 100,
title="Analyzing Queries",
doctype=record.doctype,
docname=record.name,
description=f"Analyzing query: {query[:140]}",
)
if captured_query.duration < total_duration * PERCENT_DURATION_THRESHOLD_QUERY:
continue
if not query.lower().strip().startswith(("select", "update", "delete")):
continue
if index := _optimize_query(query):
index_suggestions[(index.table, index.column)] += captured_query.duration
suggested_indexes = index_suggestions.most_common(3)
suggested_indexes = [
idx for idx in suggested_indexes if idx[1] > total_duration * PERCENT_DURATION_THRESHOLD_OVERALL
]
if not suggested_indexes:
frappe.msgprint(
_("No automatic optimization suggestions available."),
title=_("No Suggestions"),
realtime=True,
)
return
data = frappe.cache.hget(RECORDER_REQUEST_HASH, record.name)
data["suggested_indexes"] = [{"table": idx[0][0], "column": idx[0][1]} for idx in suggested_indexes]
frappe.cache.hset(RECORDER_REQUEST_HASH, record.name, data)
frappe.publish_realtime("recorder-analysis-complete", user=frappe.session.user)
frappe.msgprint(_("Query analysis complete. Check suggested indexes."), realtime=True, alert=True)
def _optimize_query(query):
optimizer = DBOptimizer(query=query)
tables = optimizer.tables_examined()
# Note: Two passes are required here because we first need basic data to understand which
# columns need to be analyzed to get accurate cardinality.
for table in tables:
doctype = get_doctype_name(table)
stats = _fetch_table_stats(doctype, columns=[])
if not stats:
return
db_table = DBTable.from_frappe_ouput(stats)
optimizer.update_table_data(db_table)
potential_indexes = optimizer.potential_indexes()
tablewise_columns = defaultdict(list)
for idx in potential_indexes:
tablewise_columns[idx.table].append(idx.column)
for table in tables:
doctype = get_doctype_name(table)
stats = _fetch_table_stats(doctype, columns=tablewise_columns[table])
if not stats:
return
db_table = DBTable.from_frappe_ouput(stats)
optimizer.update_table_data(db_table)
return optimizer.suggest_index()
def _fetch_table_stats(doctype: str, columns: list[str]) -> dict | None:
def sql_bool(val):
return cstr(val).lower() in ("yes", "1", "true")
if not frappe.db.table_exists(doctype):
return
table = get_table_name(doctype, wrap_in_backticks=True)
schema = []
for field in frappe.db.sql(f"describe {table}", as_dict=True):
schema.append(
{
"column": field["Field"],
"type": field["Type"],
"is_nullable": sql_bool(field["Null"]),
"default": field["Default"],
}
)
def update_cardinality(column, value):
for col in schema:
if col["column"] == column:
col["cardinality"] = value
break
indexes = []
for idx in frappe.db.sql(f"show index from {table}", as_dict=True):
indexes.append(
{
"unique": not sql_bool(idx["Non_unique"]),
"cardinality": idx["Cardinality"],
"name": idx["Key_name"],
"sequence": idx["Seq_in_index"],
"nullable": sql_bool(idx["Null"]),
"column": idx["Column_name"],
"type": idx["Index_type"],
}
)
if idx["Seq_in_index"] == 1:
update_cardinality(idx["Column_name"], idx["Cardinality"])
total_rows = cint(
frappe.db.sql(
f"""select table_rows
from information_schema.tables
where table_name = 'tab{doctype}'"""
)[0][0]
)
# fetch accurate cardinality for columns by query. WARN: This can take A LOT of time.
for column in columns:
cardinality = _get_column_cardinality(table, column)
update_cardinality(column, cardinality)
return {
"table_name": table.strip("`"),
"total_rows": total_rows,
"schema": schema,
"indexes": indexes,
}
@redis_cache
def _get_column_cardinality(table, column):
return frappe.db.sql(f"select count(distinct {column}) from {table}")[0][0]
def get_doctype_name(table_name: str) -> str:
return table_name.removeprefix("tab")

View file

@ -5,8 +5,10 @@ import re
import frappe
import frappe.recorder
from frappe.core.doctype.recorder.recorder import serialize_request
from frappe.core.doctype.recorder.recorder import _optimize_query, serialize_request
from frappe.query_builder.utils import db_type_is
from frappe.recorder import get as get_recorder_data
from frappe.tests.test_query_builder import run_only_if
from frappe.tests.utils import FrappeTestCase
from frappe.utils import set_request
@ -75,3 +77,20 @@ class TestRecorder(FrappeTestCase):
requests = frappe.get_all("Recorder")
request_doc = get_recorder_data(requests[0].name)
self.assertIsInstance(serialize_request(request_doc), dict)
class TestQueryOptimization(FrappeTestCase):
@run_only_if(db_type_is.MARIADB)
def test_query_optimizer(self):
suggested_index = _optimize_query(
"""select name from
`tabUser` u
join `tabHas Role` r
on r.parent = u.name
where email='xyz'
and creation > '2023'
and bio like '%xyz%'
"""
)
self.assertEqual(suggested_index.table, "tabUser")
self.assertEqual(suggested_index.column, "email")

View file

@ -33,20 +33,24 @@
"label": "Normalized Query"
},
{
"columns": 1,
"fieldname": "duration",
"fieldtype": "Float",
"in_list_view": 1,
"label": "Duration"
},
{
"columns": 1,
"fieldname": "exact_copies",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Exact Copies"
},
{
"columns": 1,
"fieldname": "normalized_copies",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Normalized Copies"
},
{
@ -84,6 +88,7 @@
"label": "SQL Explain"
},
{
"columns": 1,
"fieldname": "index",
"fieldtype": "Int",
"in_list_view": 1,
@ -94,7 +99,7 @@
"is_virtual": 1,
"istable": 1,
"links": [],
"modified": "2024-03-23 16:03:36.052756",
"modified": "2024-05-13 17:13:20.785329",
"modified_by": "Administrator",
"module": "Core",
"name": "Recorder Query",

View file

@ -0,0 +1,39 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2024-05-14 16:23:33.466465",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"table",
"column"
],
"fields": [
{
"fieldname": "table",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Table"
},
{
"fieldname": "column",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Column"
}
],
"index_web_pages_for_search": 1,
"is_virtual": 1,
"istable": 1,
"links": [],
"modified": "2024-05-14 17:43:57.231051",
"modified_by": "Administrator",
"module": "Core",
"name": "Recorder Suggested Index",
"owner": "Administrator",
"permissions": [],
"sort_field": "creation",
"sort_order": "DESC",
"states": []
}

View file

@ -0,0 +1,46 @@
# Copyright (c) 2024, Frappe Technologies and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class RecorderSuggestedIndex(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
column: DF.Data | None
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
table: DF.Data | None
# end: auto-generated types
def db_insert(self, *args, **kwargs):
raise NotImplementedError
def load_from_db(self):
raise NotImplementedError
def db_update(self):
raise NotImplementedError
def delete(self):
raise NotImplementedError
@staticmethod
def get_list(filters=None, page_length=20, **kwargs):
pass
@staticmethod
def get_count(filters=None, **kwargs):
pass
@staticmethod
def get_stats(**kwargs):
pass

View file

@ -71,8 +71,9 @@ class ScheduledJobType(Document):
enqueue(
"frappe.core.doctype.scheduled_job_type.scheduled_job_type.run_scheduled_job",
queue=self.get_queue_name(),
job_type=self.method,
job_type=self.method, # Not actually used, kept for logging
job_id=self.rq_job_id,
scheduled_job_type=self.name,
)
return True
else:
@ -93,7 +94,7 @@ class ScheduledJobType(Document):
@property
def rq_job_id(self):
"""Unique ID created to deduplicate jobs with single RQ call."""
return f"scheduled_job::{self.method}"
return f"scheduled_job::{self.name}"
@property
def next_execution(self):
@ -115,7 +116,7 @@ class ScheduledJobType(Document):
}
if not self.cron_format:
self.cron_format = CRON_MAP[self.frequency]
self.cron_format = CRON_MAP.get(self.frequency)
# If this is a cold start then last_execution will not be set.
# Creation is set as fallback because if very old fallback is set job might trigger
@ -157,9 +158,8 @@ class ScheduledJobType(Document):
def update_scheduler_log(self, status):
if not self.create_log:
# self.get_next_execution will work properly iff self.last_execution is properly set
if self.frequency == "All" and status == "Start":
self.db_set("last_execution", now_datetime(), update_modified=False)
frappe.db.commit()
self.db_set("last_execution", now_datetime(), update_modified=False)
frappe.db.commit()
return
if not self.scheduler_log:
self.scheduler_log = frappe.get_doc(
@ -189,10 +189,10 @@ def execute_event(doc: str):
return doc
def run_scheduled_job(job_type: str):
def run_scheduled_job(scheduled_job_type: str, job_type: str | None = None):
"""This is a wrapper function that runs a hooks.scheduler_events method"""
try:
frappe.get_doc("Scheduled Job Type", dict(method=job_type)).execute()
frappe.get_doc("Scheduled Job Type", scheduled_job_type).execute()
except Exception:
print(frappe.get_traceback())
@ -200,8 +200,8 @@ def run_scheduled_job(job_type: str):
def sync_jobs(hooks: dict | None = None):
frappe.reload_doc("core", "doctype", "scheduled_job_type")
scheduler_events = hooks or frappe.get_hooks("scheduler_events")
all_events = insert_events(scheduler_events)
clear_events(all_events)
insert_events(scheduler_events)
clear_events(scheduler_events)
def insert_events(scheduler_events: dict) -> list:
@ -262,12 +262,19 @@ def insert_single_event(frequency: str, event: str, cron_format: str | None = No
doc.insert()
def clear_events(all_events: list):
for event in frappe.get_all("Scheduled Job Type", fields=["name", "method", "server_script"]):
is_server_script = event.server_script
is_defined_in_hooks = event.method in all_events
def clear_events(scheduler_events: dict):
def event_exists(event) -> bool:
if event.server_script:
return True
if not (is_defined_in_hooks or is_server_script):
freq = frappe.scrub(event.frequency)
if freq == "cron":
return event.method in scheduler_events.get(freq, {}).get(event.cron_format, [])
else:
return event.method in scheduler_events.get(freq, [])
for event in frappe.get_all("Scheduled Job Type", fields=["*"]):
if not event_exists(event):
frappe.delete_doc("Scheduled Job Type", event.name)

View file

@ -65,6 +65,37 @@ if doc.allocated_to:
</code>
</pre>
<h5>Payment processing</h5>
<p>Payment processing events have a special state. See the <a href="https://github.com/frappe/payments/blob/develop/payments/controllers/payment_controller.py">PaymentController in Frappe Payments</a> for details.</p>
<pre>
<code>
# retreive payment session state
ps = doc.flags.payment_session
if ps.changed: # could be an idempotent run
if ps.flags.status_changed_to in ps.flowstates.success:
doc.set_as_paid()
# custom process return values
doc.flags.payment_result = {
"message": "Thank you for your payment",
"action": {"href": "https://shop.example.com", "label": "Return to shop"},
}
if ps.flags.status_changed_to in ps.flowstates.pre_authorized:
# do something else
if ps.flags.status_changed_to in ps.flowstates.processing:
# do something else
if ps.flags.status_changed_to in ps.flowstates.declined:
# do something else
</code>
</pre>
<p>The <i>On Payment Failed</i> (<code>on_payment_failed</code>) event only transports the error message which the controller implementation had extracted from the transaction.</p>
<pre>
<code>
msg = doc.flags.payment_failure_message
doc.my_failure_message_field = msg
</code>
</pre>
<hr>
<h4>API Call</h4>

View file

@ -57,7 +57,7 @@
"fieldname": "doctype_event",
"fieldtype": "Select",
"label": "DocType Event",
"options": "Before Insert\nBefore Validate\nBefore Save\nAfter Insert\nAfter Save\nBefore Rename\nAfter Rename\nBefore Submit\nAfter Submit\nBefore Cancel\nAfter Cancel\nBefore Delete\nAfter Delete\nBefore Save (Submitted Document)\nAfter Save (Submitted Document)\nBefore Print\nOn Payment Authorization\nOn Payment Paid\nOn Payment Failed"
"options": "Before Insert\nBefore Validate\nBefore Save\nAfter Insert\nAfter Save\nBefore Rename\nAfter Rename\nBefore Submit\nAfter Submit\nBefore Cancel\nAfter Cancel\nBefore Discard\nAfter Discard\nBefore Delete\nAfter Delete\nBefore Save (Submitted Document)\nAfter Save (Submitted Document)\nBefore Print\nOn Payment Authorization\nOn Payment Paid\nOn Payment Failed\nOn Payment Charge Processed\nOn Payment Mandate Charge Processed\nOn Payment Mandate Acquisition Processed"
},
{
"depends_on": "eval:doc.script_type==='API'",
@ -151,7 +151,7 @@
"link_fieldname": "server_script"
}
],
"modified": "2024-04-08 16:18:52.901097",
"modified": "2024-05-08 03:21:54.169380",
"modified_by": "Administrator",
"module": "Core",
"name": "Server Script",
@ -175,4 +175,4 @@
"sort_order": "DESC",
"states": [],
"track_changes": 1
}
}

View file

@ -3,6 +3,7 @@
from functools import partial
from types import FunctionType, MethodType, ModuleType
from typing import TYPE_CHECKING
import frappe
from frappe import _
@ -16,6 +17,9 @@ from frappe.utils.safe_exec import (
safe_exec,
)
if TYPE_CHECKING:
from frappe.core.doctype.scheduled_job_type.scheduled_job_type import ScheduledJobType
class ServerScript(Document):
# begin: auto-generated types
@ -42,6 +46,8 @@ class ServerScript(Document):
"After Submit",
"Before Cancel",
"After Cancel",
"Before Discard",
"After Discard",
"Before Delete",
"After Delete",
"Before Save (Submitted Document)",
@ -50,6 +56,9 @@ class ServerScript(Document):
"On Payment Authorization",
"On Payment Paid",
"On Payment Failed",
"On Payment Charge Processed",
"On Payment Mandate Charge Processed",
"On Payment Mandate Acquisition Processed",
]
enable_rate_limit: DF.Check
event_frequency: DF.Literal[
@ -75,12 +84,10 @@ class ServerScript(Document):
def validate(self):
frappe.only_for("Script Manager", True)
self.sync_scheduled_jobs()
self.clear_scheduled_events()
self.check_if_compilable_in_restricted_context()
def on_update(self):
self.sync_scheduler_events()
self.sync_scheduled_job_type()
def clear_cache(self):
frappe.cache.delete_value("server_script_map")
@ -90,7 +97,10 @@ class ServerScript(Document):
frappe.cache.delete_value("server_script_map")
if self.script_type == "Scheduler Event":
for job in self.scheduled_jobs:
frappe.delete_doc("Scheduled Job Type", job.name)
scheduled_job_type: "ScheduledJobType" = frappe.get_doc("Scheduled Job Type", job.name)
scheduled_job_type.stopped = True
scheduled_job_type.server_script = None
scheduled_job_type.save()
def get_code_fields(self):
return {"script": "py"}
@ -103,33 +113,38 @@ class ServerScript(Document):
fields=["name", "stopped"],
)
def sync_scheduled_jobs(self):
"""Sync Scheduled Job Type statuses if Server Script's disabled status is changed"""
if self.script_type != "Scheduler Event" or not self.has_value_changed("disabled"):
def sync_scheduled_job_type(self):
"""Create or update Scheduled Job Type documents for Scheduler Event Server Scripts"""
def get_scheduled_job() -> "ScheduledJobType":
if scheduled_script := frappe.db.get_value("Scheduled Job Type", {"server_script": self.name}):
return frappe.get_doc("Scheduled Job Type", scheduled_script)
else:
return frappe.get_doc({"doctype": "Scheduled Job Type", "server_script": self.name})
previous_script_type = self.get_value_before_save("script_type")
if previous_script_type != self.script_type and previous_script_type == "Scheduler Event":
get_scheduled_job().update({"stopped": 1}).save()
return
for scheduled_job in self.scheduled_jobs:
if bool(scheduled_job.stopped) != bool(self.disabled):
job = frappe.get_doc("Scheduled Job Type", scheduled_job.name)
job.stopped = self.disabled
job.save()
if self.script_type != "Scheduler Event" or not (
self.has_value_changed("event_frequency")
or self.has_value_changed("cron_format")
or self.has_value_changed("disabled")
or self.has_value_changed("script_type")
):
return
def sync_scheduler_events(self):
"""Create or update Scheduled Job Type documents for Scheduler Event Server Scripts"""
if not self.disabled and self.event_frequency and self.script_type == "Scheduler Event":
cron_format = self.cron_format if self.event_frequency == "Cron" else None
setup_scheduler_events(
script_name=self.name, frequency=self.event_frequency, cron_format=cron_format
)
get_scheduled_job().update(
{
"method": frappe.scrub(f"{self.name}-{self.event_frequency}"),
"frequency": self.event_frequency,
"cron_format": self.cron_format,
"stopped": self.disabled,
}
).save()
def clear_scheduled_events(self):
"""Deletes existing scheduled jobs by Server Script if self.event_frequency or self.cron_format has changed"""
if (
self.script_type == "Scheduler Event"
and (self.has_value_changed("event_frequency") or self.has_value_changed("cron_format"))
) or (self.has_value_changed("script_type") and self.script_type != "Scheduler Event"):
for scheduled_job in self.scheduled_jobs:
frappe.delete_doc("Scheduled Job Type", scheduled_job.name, delete_permanently=1)
frappe.msgprint(_("Scheduled execution for script {0} has updated").format(self.name), alert=True)
def check_if_compilable_in_restricted_context(self):
"""Check compilation errors and send them back as warnings."""
@ -245,43 +260,7 @@ class ServerScript(Document):
return items
def setup_scheduler_events(script_name: str, frequency: str, cron_format: str | None = None):
"""Creates or Updates Scheduled Job Type documents based on the specified script name and frequency
Args:
script_name (str): Name of the Server Script document
frequency (str): Event label compatible with the Frappe scheduler
"""
method = frappe.scrub(f"{script_name}-{frequency}")
scheduled_script = frappe.db.get_value("Scheduled Job Type", {"method": method})
if not scheduled_script:
frappe.get_doc(
{
"doctype": "Scheduled Job Type",
"method": method,
"frequency": frequency,
"server_script": script_name,
"cron_format": cron_format,
}
).insert()
frappe.msgprint(_("Enabled scheduled execution for script {0}").format(script_name))
else:
doc = frappe.get_doc("Scheduled Job Type", scheduled_script)
if doc.frequency == frequency:
return
doc.frequency = frequency
doc.cron_format = cron_format
doc.save()
frappe.msgprint(_("Scheduled execution for script {0} has updated").format(script_name))
def execute_api_server_script(script=None, *args, **kwargs):
def execute_api_server_script(script: ServerScript, *args, **kwargs):
# These are only added for compatibility with rate limiter.
del args
del kwargs

View file

@ -15,6 +15,8 @@ EVENT_MAP = {
"on_submit": "After Submit",
"before_cancel": "Before Cancel",
"on_cancel": "After Cancel",
"before_discard": "Before Discard",
"on_discard": "After Discard",
"on_trash": "Before Delete",
"after_delete": "After Delete",
"before_update_after_submit": "Before Save (Submitted Document)",
@ -23,6 +25,9 @@ EVENT_MAP = {
"on_payment_paid": "On Payment Paid",
"on_payment_failed": "On Payment Failed",
"on_payment_authorized": "On Payment Authorization",
"on_payment_charge_processed": "On Payment Charge Processed",
"on_payment_mandated_charge_processed": "On Payment Mandate Charge Processed",
"on_payment_mandate_acquisition_processed": "On Payment Mandate Acquisition Processed",
}
@ -41,7 +46,7 @@ def run_server_script_for_doc_event(doc, event):
if scripts:
# run all scripts for this doctype + event
for script_name in scripts:
frappe.get_doc("Server Script", script_name).execute_doc(doc)
frappe.get_cached_doc("Server Script", script_name).execute_doc(doc)
def get_server_script_map():

View file

@ -3,7 +3,8 @@
import requests
import frappe
from frappe.core.doctype.scheduled_job_type.scheduled_job_type import sync_jobs
from frappe.core.doctype.scheduled_job_type.scheduled_job_type import ScheduledJobType, sync_jobs
from frappe.core.doctype.server_script.server_script import ServerScript
from frappe.frappeclient import FrappeClient, FrappeException
from frappe.tests.utils import FrappeTestCase
from frappe.utils import get_site_url
@ -342,3 +343,44 @@ frappe.qb.from_(todo).select(todo.name).where(todo.name == "{todo.name}").run()
updated_cron_job_name = frappe.db.get_value("Scheduled Job Type", {"server_script": cron_script.name})
updated_cron_job = frappe.get_doc("Scheduled Job Type", updated_cron_job_name)
self.assertEqual(updated_cron_job.next_execution.day, 2)
def test_server_script_state_changes(self):
script: ServerScript = frappe.get_doc(
doctype="Server Script",
name="scheduled_script_state_change",
script_type="Scheduler Event",
script="""frappe.flags = {"test": True}""",
event_frequency="Hourly",
).insert()
job: ScheduledJobType = frappe.get_doc("Scheduled Job Type", {"server_script": script.name})
script.script_type = "API"
script.save()
self.assertTrue(job.reload().stopped)
script.script_type = "Scheduler Event"
script.save()
self.assertFalse(job.reload().stopped)
# Change to different frequency
script.event_frequency = "Monthly"
script.save()
self.assertEqual(job.reload().frequency, "Monthly")
# change cron expr
script.event_frequency = "Cron"
script.cron_format = "* * * * *"
script.save()
self.assertEqual(job.reload().frequency, "Cron")
self.assertEqual(job.reload().cron_format, script.cron_format)
# manually disable
script.disabled = 1
script.save()
self.assertTrue(job.reload().stopped)
script.disabled = 0
script.save()
self.assertFalse(job.reload().stopped)

View file

@ -5,7 +5,7 @@ import json
import frappe
from frappe.model.document import Document
from frappe.translate import MERGED_TRANSLATION_KEY, USER_TRANSLATION_KEY, get_translator_url
from frappe.translate import MERGED_TRANSLATION_KEY, USER_TRANSLATION_KEY
from frappe.utils import is_html, strip_html_tags

View file

@ -11,6 +11,7 @@ from werkzeug.http import parse_cookie
import frappe
import frappe.exceptions
from frappe.core.doctype.user.user import (
User,
handle_password_test_fail,
reset_password,
sign_up,
@ -475,7 +476,7 @@ def test_user(
try:
first_name = first_name or frappe.generate_hash()
email = email or (first_name + "@example.com")
user = frappe.new_doc(
user: User = frappe.new_doc(
"User",
send_welcome_email=0,
email=email,

View file

@ -280,7 +280,7 @@ frappe.ui.form.on("User", {
frm.set_df_property("enabled", "read_only", 0);
}
if (frappe.session.user !== "Administrator") {
if (frm.doc.name !== "Administrator") {
frm.toggle_enable("email", frm.is_new());
}
},

View file

@ -157,9 +157,9 @@ class User(Document):
self.password_strength_test()
if self.name not in STANDARD_USERS:
self.validate_email_type(self.email)
self.email = self.name
self.validate_email_type(self.name)
self.add_system_manager_role()
self.move_role_profile_name_to_role_profiles()
self.populate_role_profile_roles()
self.check_roles_added()
@ -279,9 +279,6 @@ class User(Document):
if not cint(self.enabled) and self.name in STANDARD_USERS:
frappe.throw(_("User {0} cannot be disabled").format(self.name))
if not cint(self.enabled):
self.a_system_manager_should_exist()
# clear sessions if disabled
if not cint(self.enabled) and getattr(frappe.local, "login_manager", None):
frappe.local.login_manager.logout(user=self.name)
@ -289,38 +286,6 @@ class User(Document):
# toggle notifications based on the user's status
toggle_notifications(self.name, enable=cint(self.enabled), ignore_permissions=True)
def add_system_manager_role(self):
if self.is_system_manager_disabled():
return
# if adding system manager, do nothing
if not cint(self.enabled) or (
"System Manager" in [user_role.role for user_role in self.get("roles")]
):
return
if (
self.name not in STANDARD_USERS
and self.user_type == "System User"
and not self.get_other_system_managers()
and cint(frappe.db.get_single_value("System Settings", "setup_complete"))
):
msgprint(_("Adding System Manager to this User as there must be atleast one System Manager"))
self.append("roles", {"doctype": "Has Role", "role": "System Manager"})
if self.name == "Administrator":
# Administrator should always have System Manager Role
self.extend(
"roles",
[
{"doctype": "Has Role", "role": "System Manager"},
{"doctype": "Has Role", "role": "Administrator"},
],
)
def is_system_manager_disabled(self):
return frappe.db.get_value("Role", {"name": "System Manager"}, ["disabled"])
def email_new_password(self, new_password=None):
if new_password and not self.flags.in_insert:
_update_password(user=self.name, pwd=new_password, logout_all_sessions=self.logout_all_sessions)
@ -430,20 +395,6 @@ class User(Document):
return link
def get_other_system_managers(self):
user_doctype = DocType("User").as_("user")
user_role_doctype = DocType("Has Role").as_("user_role")
return (
frappe.qb.from_(user_doctype)
.from_(user_role_doctype)
.select(user_doctype.name)
.where(user_role_doctype.role == "System Manager")
.where(user_doctype.enabled == 1)
.where(user_role_doctype.parent == user_doctype.name)
.where(user_role_doctype.parent.notin(["Administrator", self.name]))
.limit(1)
).run()
def get_fullname(self):
"""get first_name space last_name"""
return (self.first_name or "") + (self.first_name and " " or "") + (self.last_name or "")
@ -528,20 +479,11 @@ class User(Document):
retry=3,
)
def a_system_manager_should_exist(self):
if self.is_system_manager_disabled():
return
if not self.get_other_system_managers():
throw(_("There should remain at least one System Manager"))
def on_trash(self):
frappe.clear_cache(user=self.name)
if self.name in STANDARD_USERS:
throw(_("User {0} cannot be deleted").format(self.name))
self.a_system_manager_should_exist()
# disable the user and log him/her out
self.enabled = 0
if getattr(frappe.local, "login_manager", None):
@ -601,6 +543,10 @@ class User(Document):
frappe.throw(_("You can disable the user instead of deleting it."), frappe.LinkExistsError)
def before_rename(self, old_name, new_name, merge=False):
# if merging, delete the old user notification settings
if merge:
frappe.delete_doc("Notification Settings", old_name, ignore_permissions=True)
frappe.clear_cache(user=old_name)
self.validate_rename(old_name, new_name)

View file

@ -335,10 +335,14 @@ frappe.ui.form.on("Customize Form Field", {
},
});
let parenttype, parent; // used in the form events for the child tables: links, actions and states
// can't delete standard links
frappe.ui.form.on("DocType Link", {
before_links_remove: function (frm, doctype, name) {
let row = frappe.get_doc(doctype, name);
parenttype = row.parenttype; // used in the event links_remove
parent = row.parent; // used in the event links_remove
if (!(row.custom || row.__islocal)) {
frappe.msgprint(__("Cannot delete standard link. You can hide it if you want"));
throw "cannot delete standard link";
@ -348,12 +352,19 @@ frappe.ui.form.on("DocType Link", {
let f = frappe.model.get_doc(cdt, cdn);
f.custom = 1;
},
links_remove: function (frm, doctype, name) {
// replicate the changed rows from the browser's copy of the parent doc to the current 'Customize Form' doc
let parent_doc = locals[parenttype][parent];
frm.doc.links = parent_doc.links;
},
});
// can't delete standard actions
frappe.ui.form.on("DocType Action", {
before_actions_remove: function (frm, doctype, name) {
let row = frappe.get_doc(doctype, name);
parenttype = row.parenttype; // used in the event actions_remove
parent = row.parent; // used in the event actions_remove
if (!(row.custom || row.__islocal)) {
frappe.msgprint(__("Cannot delete standard action. You can hide it if you want"));
throw "cannot delete standard action";
@ -363,12 +374,19 @@ frappe.ui.form.on("DocType Action", {
let f = frappe.model.get_doc(cdt, cdn);
f.custom = 1;
},
actions_remove: function (frm, doctype, name) {
// replicate the changed rows from the browser's copy of the parent doc to the current 'Customize Form' doc
let parent_doc = locals[parenttype][parent];
frm.doc.actions = parent_doc.actions;
},
});
// can't delete standard states
frappe.ui.form.on("DocType State", {
before_states_remove: function (frm, doctype, name) {
let row = frappe.get_doc(doctype, name);
parenttype = row.parenttype; // used in the event states_remove
parent = row.parent; // used in the event states_remove
if (!(row.custom || row.__islocal)) {
frappe.msgprint(__("Cannot delete standard document state."));
throw "cannot delete standard document state";
@ -378,6 +396,11 @@ frappe.ui.form.on("DocType State", {
let f = frappe.model.get_doc(cdt, cdn);
f.custom = 1;
},
states_remove: function (frm, doctype, name) {
// replicate the changed rows from the browser's copy of the parent doc to the current 'Customize Form' doc
let parent_doc = locals[parenttype][parent];
frm.doc.states = parent_doc.states;
},
});
frappe.customize_form.save_customization = function (frm) {

View file

@ -32,7 +32,7 @@ from frappe.monitor import get_trace_id
from frappe.query_builder.functions import Count
from frappe.utils import CallbackManager, cint, get_datetime, get_table_name, getdate, now, sbool
from frappe.utils import cast as cast_fieldtype
from frappe.utils.deprecations import deprecation_warning
from frappe.utils.deprecations import deprecated, deprecation_warning
if TYPE_CHECKING:
from psycopg2 import connection as PostgresConnection
@ -196,7 +196,7 @@ class Database:
"""
if isinstance(query, MySQLQueryBuilder | PostgreSQLQueryBuilder):
frappe.log("Use run method to execute SQL queries generated by Query Engine")
frappe.log("Use run method to execute SQL queries generated by Query Builder")
debug = debug or getattr(self, "debug", False)
query = str(query)
@ -430,7 +430,7 @@ class Database:
if query and is_query_type(query, ("commit", "rollback")):
self.transaction_writes = 0
if query[:6].lower() in ("update", "insert", "delete"):
if query.lstrip()[:6].lower() in ("update", "insert", "delete"):
self.transaction_writes += 1
if self.transaction_writes > self.MAX_WRITES_PER_TRANSACTION:
if self.auto_commit_on_many_writes:
@ -1105,7 +1105,7 @@ class Database:
"""Return True if at least one row exists."""
return frappe.get_all(doctype, limit=1, order_by=None, as_list=True)
def exists(self, dt, dn=None, cache=False):
def exists(self, dt, dn=None, cache=False, *, debug=False):
"""Return the document name of a matching document, or None.
Note: `cache` only works if `dt` and `dn` are of type `str`.
@ -1138,7 +1138,7 @@ class Database:
dt = dt.copy() # don't modify the original dict
dt, dn = dt.pop("doctype"), dt
return self.get_value(dt, dn, ignore=True, cache=cache, order_by=None)
return self.get_value(dt, dn, ignore=True, cache=cache, order_by=None, debug=debug)
def count(self, dt, filters=None, debug=False, cache=False, distinct: bool = True):
"""Return `COUNT(*)` for given DocType and filters."""
@ -1244,8 +1244,9 @@ class Database:
raise NotImplementedError
@staticmethod
@deprecated
def is_column_missing(e):
raise NotImplementedError
return frappe.db.is_missing_column(e)
def get_descendants(self, doctype, name):
"""Return descendants of the group node in tree"""
@ -1267,7 +1268,7 @@ class Database:
def delete(self, doctype: str, filters: dict | list | None = None, debug=False, **kwargs):
"""Delete rows from a table in site which match the passed filters. This
does trigger DocType hooks. Simply runs a DELETE query in the database.
does not trigger DocType hooks. Simply runs a DELETE query in the database.
Doctype name can be passed directly, it will be pre-pended with `tab`.
"""

View file

@ -50,25 +50,38 @@ class DbManager:
return self.db.sql("SHOW DATABASES", pluck=True)
@staticmethod
def restore_database(verbose, target, source, user, password):
def restore_database(verbose: bool, target: str, source: str, user: str, password: str) -> None:
"""
Function to restore the given SQL file to the target database.
:param target: The database to restore to.
:param source: The SQL dump to restore
:param user: The database username
:param password: The database password
:return: Nothing
"""
import shlex
from shutil import which
from frappe.database import get_command
from frappe.utils import execute_in_shell
# Ensure that the entire process fails if any part of the pipeline fails
command = ["set -o pipefail;"]
# Handle gzipped backups
if source.endswith(".gz"):
if gzip := which("gzip"):
command.extend([gzip, "-cd", source, "|"])
source = []
else:
raise Exception("`gzip` not installed")
else:
source = ["<", source]
command.extend(["cat", source, "|"])
# Newer versions of MariaDB add in a line that'll break on older versions, so remove it
command.extend(["sed", r"'/\/\*!999999\\- enable the sandbox mode \*\//d'", "|"])
# Generate the restore command
bin, args, bin_name = get_command(
socket=frappe.conf.db_socket,
host=frappe.conf.db_host,
@ -84,6 +97,6 @@ class DbManager:
)
command.append(bin)
command.append(shlex.join(args))
command.extend(source)
execute_in_shell(" ".join(command), check_exit_code=True, verbose=verbose)
frappe.cache.delete_keys("") # Delete all keys associated with this site.

View file

@ -532,6 +532,9 @@ class MariaDBDatabase(MariaDBConnectionUtil, MariaDBExceptionUtil, Database):
from pymysql.cursors import SSCursor
try:
if not self._conn:
self.connect()
original_cursor = self._cursor
new_cursor = self._cursor = self._conn.cursor(SSCursor)
yield

View file

@ -200,7 +200,12 @@ class DbColumn:
self.not_nullable = not_nullable
def get_definition(self, for_modification=False):
column_def = get_definition(self.fieldtype, precision=self.precision, length=self.length)
column_def = get_definition(
self.fieldtype,
precision=self.precision,
length=self.length,
options=self.options,
)
if not column_def:
return column_def
@ -356,9 +361,20 @@ def validate_column_length(fieldname):
frappe.throw(_("Fieldname is limited to 64 characters ({0})").format(fieldname))
def get_definition(fieldtype, precision=None, length=None):
def get_definition(fieldtype, precision=None, length=None, *, options=None):
d = frappe.db.type_map.get(fieldtype)
if (
fieldtype == "Link"
and options
# XXX: This might not trigger if referred doctype is not yet created
# This is largely limitation of how migration happens though.
# Maybe we can sort by creation and then modified?
and frappe.db.exists("DocType", options)
and frappe.get_meta(options).autoname == "UUID"
):
d = ("uuid", None)
if not d:
return

View file

@ -416,7 +416,7 @@ def get_workspace_sidebar_items():
has_access = "Workspace Manager" in frappe.get_roles()
# don't get domain restricted pages
blocked_modules = frappe.get_doc("User", frappe.session.user).get_blocked_modules()
blocked_modules = frappe.get_cached_doc("User", frappe.session.user).get_blocked_modules()
blocked_modules.append("Dummy Module")
# adding None to allowed_domains to include pages without domain restriction
@ -469,7 +469,11 @@ def get_workspace_sidebar_items():
pages = [frappe.get_doc("Workspace", "Welcome Workspace").as_dict()]
pages[0]["label"] = _("Welcome Workspace")
return {"pages": pages, "has_access": has_access}
return {
"pages": pages,
"has_access": has_access,
"has_create_access": frappe.has_permission(doctype="Workspace", ptype="create"),
}
def get_table_with_counts():

View file

@ -71,19 +71,17 @@ def get_permission_query_conditions(user):
if not user:
user = frappe.session.user
if user == "Administrator":
if user == "Administrator" or "System Manager" in frappe.get_roles(user):
return
roles = frappe.get_roles(user)
if "System Manager" in roles:
return None
module_not_set = " ifnull(`tabDashboard`.`module`, '') = '' "
allowed_modules = [
frappe.db.escape(module.get("module_name")) for module in get_modules_from_all_apps_for_user()
]
return "`tabDashboard`.`module` in ({allowed_modules}) or `tabDashboard`.`module` is NULL".format(
allowed_modules=",".join(allowed_modules)
)
if not allowed_modules:
return module_not_set
return f" `tabDashboard`.`module` in ({','.join(allowed_modules)}) or {module_not_set} "
@frappe.whitelist()

View file

@ -1,7 +1,23 @@
# Copyright (c) 2019, Frappe Technologies and Contributors
# License: MIT. See LICENSE
import frappe
from frappe.config import get_modules_from_all_apps_for_user
from frappe.core.doctype.user.test_user import test_user
from frappe.tests.utils import FrappeTestCase
class TestDashboard(FrappeTestCase):
pass
def test_permission_query(self):
for user in ["Administrator", "test@example.com"]:
with self.set_user(user):
frappe.get_list("Dashboard")
with test_user(roles=["_Test Role"]) as user:
with self.set_user(user.name):
frappe.get_list("Dashboard")
with self.set_user("Administrator"):
all_modules = get_modules_from_all_apps_for_user("Administrator")
for module in all_modules:
user.append("block_modules", {"module": module.get("module_name")})
user.save()
frappe.get_list("Dashboard")

View file

@ -243,6 +243,7 @@
"fieldname": "is_standard",
"fieldtype": "Check",
"label": "Is Standard",
"no_copy": 1,
"read_only_depends_on": "eval: !frappe.boot.developer_mode"
},
{
@ -288,7 +289,7 @@
}
],
"links": [],
"modified": "2024-03-23 16:02:16.230433",
"modified": "2024-06-03 13:29:57.960271",
"modified_by": "Administrator",
"module": "Desk",
"name": "Dashboard Chart",

View file

@ -71,7 +71,7 @@ frappe.ui.form.on("Event", {
frappe.ui.form.on("Event Participants", {
event_participants_remove: function (frm, cdt, cdn) {
if (cdt && !cdn.includes("New Event Participants")) {
if (cdt && !cdn.includes("new-event-participants")) {
frappe.call({
type: "POST",
method: "frappe.desk.doctype.event.event.delete_communication",

View file

@ -147,7 +147,8 @@ def update_order_for_single_card(board_name, docname, from_colname, to_colname,
if from_colname == to_colname:
from_col_order = to_col_order
to_col_order.insert(new_index, from_col_order.pop(old_index))
if from_col_order:
to_col_order.insert(new_index, from_col_order.pop(old_index))
# save updated order
board.columns[from_col_idx].order = frappe.as_json(from_col_order)

View file

@ -31,7 +31,7 @@
"fieldtype": "Select",
"in_list_view": 1,
"label": "Indicator",
"options": "Blue\nCyan\nGray\nGreen\nLight Blue\nOrange\nPink\nPurple\nRed\nRed\nYellow"
"options": "Blue\nCyan\nGray\nGreen\nLight Blue\nOrange\nPink\nPurple\nRed\nYellow"
},
{
"fieldname": "order",

View file

@ -15,7 +15,7 @@ class KanbanBoardColumn(Document):
column_name: DF.Data | None
indicator: DF.Literal[
"Blue", "Cyan", "Gray", "Green", "Light Blue", "Orange", "Pink", "Purple", "Red", "Red", "Yellow"
"Blue", "Cyan", "Gray", "Green", "Light Blue", "Orange", "Pink", "Purple", "Red", "Yellow"
]
order: DF.Code | None
parent: DF.Data

View file

@ -273,6 +273,9 @@ frappe.ui.form.on("Number Card", {
}
table.on("click", () => {
if (!frappe.boot.developer_mode && frm.doc.is_standard) {
frappe.throw(__("Cannot edit filters for standard number cards"));
}
let dialog = new frappe.ui.Dialog({
title: __("Set Filters"),
fields: fields.filter((f) => !is_dynamic_filter(f)),
@ -357,6 +360,9 @@ frappe.ui.form.on("Number Card", {
);
frm.dynamic_filter_table.on("click", () => {
if (!frappe.boot.developer_mode && frm.doc.is_standard) {
frappe.throw(__("Cannot edit filters for standard number cards"));
}
let dialog = new frappe.ui.Dialog({
title: __("Set Dynamic Filters"),
fields: fields,

View file

@ -56,6 +56,7 @@ frappe.ui.form.on("System Health Report", {
val > 3 &&
frm.doc.total_outgoing_emails > 3 &&
val / frm.doc.total_outgoing_emails > 0.1,
oldest_unscheduled_job: (val) => !!val,
"queue_status.pending_jobs": (val) => val > 50,
"background_workers.utilization": (val) => val > 70,
"background_workers.failed_jobs": (val) => val > 50,
@ -72,6 +73,9 @@ frappe.ui.form.on("System Health Report", {
document.head.appendChild(style);
const update_fields = () => {
if (!frappe.get_route().includes(frm.doc.name)) {
clearInterval(interval);
}
Object.entries(conditions).forEach(([field, condition]) => {
try {
if (field.includes(".")) {
@ -93,6 +97,6 @@ frappe.ui.form.on("System Health Report", {
};
update_fields();
setInterval(update_fields, 1000);
const interval = setInterval(update_fields, 1000);
},
});

View file

@ -17,6 +17,9 @@
"background_workers",
"scheduler_section",
"scheduler_status",
"column_break_bxog",
"oldest_unscheduled_job",
"section_break_vpuw",
"failing_scheduled_jobs",
"database_section",
"database",
@ -368,6 +371,7 @@
{
"fieldname": "scheduler_section",
"fieldtype": "Section Break",
"hide_border": 1,
"label": "Scheduler"
},
{
@ -375,6 +379,20 @@
"fieldtype": "Table",
"label": "Failing Scheduled Jobs (last 7 days)",
"options": "System Health Report Failing Jobs"
},
{
"fieldname": "column_break_bxog",
"fieldtype": "Column Break"
},
{
"fieldname": "oldest_unscheduled_job",
"fieldtype": "Link",
"label": "Oldest Unscheduled Job",
"options": "Scheduled Job Type"
},
{
"fieldname": "section_break_vpuw",
"fieldtype": "Section Break"
}
],
"hide_toolbar": 1,
@ -382,7 +400,7 @@
"is_virtual": 1,
"issingle": 1,
"links": [],
"modified": "2024-04-22 11:47:52.194784",
"modified": "2024-05-02 13:32:16.495750",
"modified_by": "Administrator",
"module": "Desk",
"name": "System Health Report",

View file

@ -19,13 +19,28 @@ import functools
import os
from collections import defaultdict
from collections.abc import Callable
from contextlib import contextmanager
import frappe
from frappe.core.doctype.scheduled_job_type.scheduled_job_type import ScheduledJobType
from frappe.model.document import Document
from frappe.utils.background_jobs import get_queue, get_queue_list
from frappe.utils.background_jobs import get_queue, get_queue_list, get_redis_conn
from frappe.utils.caching import redis_cache
from frappe.utils.data import add_to_date
from frappe.utils.scheduler import get_scheduler_status
from frappe.utils.scheduler import get_scheduler_status, get_scheduler_tick
@contextmanager
def no_wait(func):
"Disable tenacity waiting on some function"
from tenacity import stop_after_attempt
try:
original_stop = func.retry.stop
func.retry.stop = stop_after_attempt(1)
yield
finally:
func.retry.stop = original_stop
def health_check(step: str):
@ -37,8 +52,11 @@ def health_check(step: str):
try:
return func(*args, **kwargs)
except Exception as e:
frappe.log(frappe.get_traceback())
# nosemgrep
frappe.msgprint(f"System Health check step {frappe.bold(step)} failed: {e}", alert=True)
frappe.msgprint(
f"System Health check step {frappe.bold(step)} failed: {e}", alert=True, indicator="red"
)
return wrapper
@ -86,6 +104,7 @@ class SystemHealthReport(Document):
handled_emails: DF.Int
last_10_active_users: DF.Code | None
new_users: DF.Int
oldest_unscheduled_job: DF.Link | None
onsite_backups: DF.Int
pending_emails: DF.Int
private_files_size: DF.Float
@ -126,7 +145,10 @@ class SystemHealthReport(Document):
self.fetch_user_stats()
@health_check("Background Jobs")
@no_wait(get_redis_conn)
def fetch_background_jobs(self):
self.background_jobs_check = "failed"
# This just checks connection life
self.test_job_id = frappe.enqueue("frappe.ping", at_front=True).id
self.background_jobs_check = "queued"
self.scheduler_status = get_scheduler_status().get("status")
@ -184,6 +206,18 @@ class SystemHealthReport(Document):
for job in failing_jobs:
self.append("failing_scheduled_jobs", job)
threshold = add_to_date(None, seconds=-30 * get_scheduler_tick(), as_datetime=True)
for job_type in frappe.get_all(
"Scheduled Job Type",
filters={"stopped": 0, "last_execution": ("<", threshold)},
fields="*",
order_by="last_execution asc",
):
job_type: ScheduledJobType = frappe.get_doc(doctype="Scheduled Job Type", **job_type)
if job_type.is_event_due():
self.oldest_unscheduled_job = job_type.name
break
@health_check("Emails")
def fetch_email_stats(self):
threshold = add_to_date(None, days=-7, as_datetime=True)
@ -292,6 +326,7 @@ class SystemHealthReport(Document):
@frappe.whitelist()
@no_wait(get_redis_conn)
def get_job_status(job_id: str | None = None):
frappe.only_for("System Manager")
try:

View file

@ -28,7 +28,7 @@ def check_user_tags(dt):
doctype = DocType(dt)
frappe.qb.from_(doctype).select(doctype._user_tags).limit(1).run()
except Exception as e:
if frappe.db.is_column_missing(e):
if frappe.db.is_missing_column(e):
DocTags(dt).setup()
@ -118,7 +118,7 @@ class DocTags:
doc = frappe.get_doc(self.dt, dn)
update_tags(doc, tags)
except Exception as e:
if frappe.db.is_column_missing(e):
if frappe.db.is_missing_column(e):
if not tags:
# no tags, nothing to do
return

View file

@ -90,15 +90,17 @@ class ToDo(Document):
return
try:
assignments = frappe.get_all(
assignments = frappe.db.get_values(
"ToDo",
filters={
{
"reference_type": self.reference_type,
"reference_name": self.reference_name,
"status": ("not in", ("Cancelled", "Closed")),
"allocated_to": ("is", "set"),
},
pluck="allocated_to",
"allocated_to",
pluck=True,
for_update=True,
)
assignments.reverse()
@ -106,7 +108,7 @@ class ToDo(Document):
frappe.db.set_single_value(
self.reference_type,
"_assign",
json.dumps(assignments),
json.dumps(assignments) if assignments else "",
update_modified=False,
)
else:
@ -114,7 +116,7 @@ class ToDo(Document):
self.reference_type,
self.reference_name,
"_assign",
json.dumps(assignments),
json.dumps(assignments) if assignments else "",
update_modified=False,
)
@ -123,7 +125,7 @@ class ToDo(Document):
# no table
return
elif frappe.db.is_column_missing(e):
elif frappe.db.is_missing_column(e):
from frappe.database.schema import add_column
add_column(self.reference_type, "_assign", "Text")

View file

@ -219,7 +219,7 @@
],
"in_create": 1,
"links": [],
"modified": "2024-03-23 16:04:05.604044",
"modified": "2024-05-30 17:30:36.791171",
"modified_by": "Administrator",
"module": "Desk",
"name": "Workspace",
@ -237,6 +237,18 @@
"role": "Workspace Manager",
"share": 1,
"write": 1
},
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Desk User",
"share": 1,
"write": 1
}
],
"sort_field": "creation",

View file

@ -23,9 +23,7 @@ class Workspace(Document):
if TYPE_CHECKING:
from frappe.core.doctype.has_role.has_role import HasRole
from frappe.desk.doctype.workspace_chart.workspace_chart import WorkspaceChart
from frappe.desk.doctype.workspace_custom_block.workspace_custom_block import (
WorkspaceCustomBlock,
)
from frappe.desk.doctype.workspace_custom_block.workspace_custom_block import WorkspaceCustomBlock
from frappe.desk.doctype.workspace_link.workspace_link import WorkspaceLink
from frappe.desk.doctype.workspace_number_card.workspace_number_card import WorkspaceNumberCard
from frappe.desk.doctype.workspace_quick_list.workspace_quick_list import WorkspaceQuickList
@ -251,6 +249,12 @@ def new_page(new_page):
):
frappe.throw(_("Cannot create private workspace of other users"), frappe.PermissionError)
elif not frappe.has_permission(doctype="Workspace", ptype="create"):
frappe.flags.error_message = _("User {0} does not have the permission to create a Workspace.").format(
frappe.bold(frappe.session.user)
)
raise frappe.PermissionError
doc = frappe.new_doc("Workspace")
doc.title = page.get("title")
doc.icon = page.get("icon")

View file

@ -10,7 +10,7 @@ from frappe.utils import get_url_to_form
@frappe.whitelist()
def update_follow(doctype, doc_name, following):
def update_follow(doctype: str, doc_name: str, following: bool):
if following:
return follow_document(doctype, doc_name, frappe.session.user)
else:

View file

@ -59,6 +59,17 @@ def cancel(doctype=None, name=None, workflow_state_fieldname=None, workflow_stat
frappe.msgprint(frappe._("Cancelled"), indicator="red", alert=True)
@frappe.whitelist()
def discard(doctype: str, name: str | int):
"""discard a draft document"""
doc = frappe.get_doc(doctype, name)
capture_doc(doc, "Discard")
doc.discard()
send_updated_docs(doc)
frappe.msgprint(frappe._("Discarded"), indicator="red", alert=True)
def send_updated_docs(doc):
from .load import get_docinfo

View file

@ -58,7 +58,7 @@ def _toggle_like(doctype, name, add, user=None):
frappe.db.set_value(doctype, name, "_liked_by", json.dumps(liked_by), update_modified=False)
except frappe.db.ProgrammingError as e:
if frappe.db.is_column_missing(e):
if frappe.db.is_missing_column(e):
add_column(doctype, "_liked_by", "Text")
_toggle_like(doctype, name, add, user)
else:

View file

@ -60,10 +60,12 @@ def get_group_by_count(doctype: str, current_filters: str, field: str) -> list[d
.run(as_dict=True)
)
if not frappe.get_meta(doctype).has_field(field) and not is_default_field(field):
meta = frappe.get_meta(doctype)
if not meta.has_field(field) and not is_default_field(field):
raise ValueError("Field does not belong to doctype")
return frappe.get_list(
data = frappe.get_list(
doctype,
filters=current_filters,
group_by=f"`tab{doctype}`.{field}",
@ -71,3 +73,13 @@ def get_group_by_count(doctype: str, current_filters: str, field: str) -> list[d
order_by="count desc",
limit=50,
)
# Add in title if it's a link field and `show_title_field_in_link` is set
if (field_meta := meta.get_field(field)) and field_meta.fieldtype == "Link":
link_meta = frappe.get_meta(field_meta.options)
if link_meta.show_title_field_in_link:
title_field = link_meta.get_title_field()
for item in data:
item.title = frappe.get_value(field_meta.options, item.name, title_field)
return data

View file

@ -32,8 +32,8 @@ def get_setup_stages(args): # nosemgrep
stages.append(
{
# post executing hooks
"status": "Wrapping up",
"fail_msg": "Failed to complete setup",
"status": _("Wrapping up"),
"fail_msg": _("Failed to complete setup"),
"tasks": [{"fn": run_post_setup_complete, "args": args, "fail_msg": "Failed to complete setup"}],
}
)
@ -220,6 +220,7 @@ def create_or_update_user(args): # nosemgrep
}
)
user.append_roles(*_get_default_roles())
user.append_roles("System Manager")
user.flags.no_welcome_mail = True
user.insert()
@ -305,7 +306,7 @@ def load_languages():
}
@frappe.whitelist()
@frappe.whitelist(allow_guest=True)
def load_country():
from frappe.sessions import get_geo_ip_country

View file

@ -34,6 +34,9 @@ def get_report_doc(report_name):
doc.custom_filters = data.get("filters")
doc.is_custom_report = True
# Follow whatever the custom report has set for prepared report field
doc.prepared_report = custom_report_doc.prepared_report
if not doc.is_permitted():
frappe.throw(
_("You don't have access to Report: {0}").format(report_name),

View file

@ -216,7 +216,8 @@ def update_wildcard_field_param(data):
if (isinstance(data.fields, str) and data.fields == "*") or (
isinstance(data.fields, list | tuple) and len(data.fields) == 1 and data.fields[0] == "*"
):
data.fields = get_permitted_fields(data.doctype, parenttype=data.parenttype)
parent_type = data.parenttype or data.parent_doctype
data.fields = get_permitted_fields(data.doctype, parenttype=parent_type, ignore_virtual=True)
return True
return False
@ -517,6 +518,16 @@ def delete_bulk(doctype, items):
if undeleted_items and len(items) != len(undeleted_items):
frappe.clear_messages()
delete_bulk(doctype, undeleted_items)
elif undeleted_items:
frappe.msgprint(
_("Failed to delete {0} documents: {1}").format(len(undeleted_items), ", ".join(undeleted_items)),
realtime=True,
title=_("Bulk Operation Failed"),
)
else:
frappe.msgprint(
_("Deleted all documents successfully"), realtime=True, title=_("Bulk Operation Successful")
)
@frappe.whitelist()

View file

@ -43,7 +43,7 @@ def get_children(doctype, parent="", include_disabled=False, **filters):
def _get_children(doctype, parent="", ignore_permissions=False, include_disabled=False):
parent_field = "parent_" + doctype.lower().replace(" ", "_")
parent_field = "parent_" + frappe.scrub(doctype)
filters = [[f"ifnull(`{parent_field}`,'')", "=", parent], ["docstatus", "<", 2]]
if frappe.db.has_column(doctype, "disabled") and not include_disabled:
filters.append(["disabled", "=", False])
@ -75,13 +75,15 @@ def make_tree_args(**kwarg):
kwarg.pop("cmd", None)
doctype = kwarg["doctype"]
parent_field = "parent_" + doctype.lower().replace(" ", "_")
parent_field = "parent_" + frappe.scrub(doctype)
if kwarg["is_root"] == "false":
kwarg["is_root"] = False
if kwarg["is_root"] == "true":
kwarg["is_root"] = True
kwarg.update({parent_field: kwarg.get("parent") or kwarg.get(parent_field)})
parent = kwarg.get("parent") or kwarg.get(parent_field)
if doctype != parent:
kwarg.update({parent_field: parent})
return frappe._dict(kwarg)

View file

@ -347,7 +347,9 @@ def make_links(columns, data):
elif col.fieldtype == "Currency":
doc = None
if doc_name and col.get("parent") and not frappe.get_meta(col.parent).istable:
doc = frappe.get_doc(col.parent, doc_name)
if frappe.db.exists(col.parent, doc_name):
doc = frappe.get_doc(col.parent, doc_name)
# Pass the Document to get the currency based on docfield option
row[col.fieldname] = frappe.format_value(row[col.fieldname], col, doc=doc)
return columns, data

View file

@ -237,6 +237,7 @@ class SendMailContext:
self.sent_to_atleast_one_recipient = any(
rec.recipient for rec in self.queue_doc.recipients if rec.is_mail_sent()
)
self.email_account_doc = None
def fetch_smtp_server(self):
self.email_account_doc = self.queue_doc.get_email_account(raise_error=True)
@ -326,7 +327,11 @@ class SendMailContext:
}
tracker_url = get_url(f"{email_read_tracker_url}?{get_signed_params(params)}")
elif frappe.conf.use_ssl and self.email_account_doc.track_email_status:
elif (
self.email_account_doc
and self.email_account_doc.track_email_status
and self.queue_doc.communication
):
tracker_url = f"{get_url()}/api/method/frappe.core.doctype.communication.email.mark_email_as_seen?name={self.queue_doc.communication}"
if tracker_url:

View file

@ -231,7 +231,8 @@
"fieldname": "message",
"fieldtype": "Code",
"ignore_xss_filter": 1,
"label": "Message"
"label": "Message",
"options": "Jinja"
},
{
"fieldname": "message_examples",
@ -291,7 +292,7 @@
"icon": "fa fa-envelope",
"index_web_pages_for_search": 1,
"links": [],
"modified": "2024-03-23 16:03:31.519921",
"modified": "2024-06-17 04:03:22.591781",
"modified_by": "Administrator",
"module": "Email",
"name": "Notification",
@ -314,4 +315,4 @@
"states": [],
"title_field": "subject",
"track_changes": 1
}
}

View file

@ -300,3 +300,10 @@ class InvalidKeyError(ValidationError):
http_status_code = 401
title = "Invalid Key"
message = "The document key is invalid"
class CommandFailedError(Exception):
def __init__(self, message: str, out: str, err: str):
super().__init__(message)
self.out = out
self.err = err

View file

@ -497,7 +497,7 @@
"currency_fraction_units": 100,
"currency_name": "Canadian Dollar",
"currency_symbol": "$",
"date_format": "mm-dd-yyyy",
"date_format": "yyyy-mm-dd",
"number_format": "#,###.##",
"timezones": [
"America/Atikokan",

View file

@ -3,9 +3,7 @@ import json
EXCLUDE_SELECT_OPTIONS = [
"naming_series",
"number_format",
"float_precision",
"currency_precision",
"minimum_password_score",
"icon", # primarily for the Workflow State doctype
]
@ -55,9 +53,6 @@ def extract(fileobj, *args, **kwargs):
select_options = [option for option in message.split("\n") if option and not option.isdigit()]
if select_options and "icon" in select_options[0]:
continue
messages.extend(
(
option,
@ -78,7 +73,7 @@ def extract(fileobj, *args, **kwargs):
messages.append((link_doctype, f"Linked DocType in {doctype}'s connections"))
# By using "pgettext" as the function name we can supply the doctype as context
yield from ((None, "pgettext", (doctype, message), [comment]) for message, comment in messages)
yield from ((None, "_", message, [comment]) for message, comment in messages)
# Role names do not get context because they are used with multiple doctypes
yield from (

View file

@ -9,7 +9,7 @@ def extract(*args, **kwargs):
Reuse the babel_extract function from jinja2.ext, but handle our own implementation of `_()`.
To handle JS microtemplates, parse all code again using regex."""
fileobj = args[0] or kwargs["fileobj"]
print(fileobj.name)
code = fileobj.read().decode("utf-8")
for lineno, funcname, messages, comments in babel_extract(*args, **kwargs):

View file

@ -35,6 +35,15 @@ def extract(fileobj, *args, **kwargs):
)
for link in data.get("links", [])
)
yield from (
(
None,
"pgettext",
(link.get("link_to") if link.get("link_type") == "DocType" else None, link.get("description")),
[f"Description of a {link.get('type')} in the {workspace_name} Workspace"],
)
for link in data.get("links", [])
)
yield from (
(
None,
@ -44,3 +53,12 @@ def extract(fileobj, *args, **kwargs):
)
for shortcut in data.get("shortcuts", [])
)
yield from (
(
None,
"pgettext",
(shortcut.get("link_to") if shortcut.get("type") == "DocType" else None, shortcut.get("format")),
[f"Count format of shortcut in the {workspace_name} Workspace"],
)
for shortcut in data.get("shortcuts", [])
)

View file

@ -299,9 +299,6 @@ def get_translations_from_mo(lang, app):
if m.context:
context = m.context.decode() # context is encoded as bytes
translations[f"{key}:{context}"] = m.string
if m.id not in translations:
# better a translation with context than no translation
translations[m.id] = m.string
else:
translations[m.id] = m.string
return translations

Some files were not shown because too many files have changed in this diff Show more