Merge remote-tracking branch 'upstream/develop' into address-autocomplete

This commit is contained in:
barredterra 2024-05-19 20:18:14 +02:00
commit 99918674f8
70 changed files with 2414 additions and 1820 deletions

View file

@ -1,72 +0,0 @@
context.skip("Recorder", () => {
before(() => {
cy.login();
});
beforeEach(() => {
cy.visit("/app/recorder");
return cy
.window()
.its("frappe")
.then((frappe) => {
// reset recorder
return frappe.xcall("frappe.recorder.stop").then(() => {
return frappe.xcall("frappe.recorder.delete");
});
});
});
it("Recorder Empty State", () => {
cy.get(".page-head").findByTitle("Recorder").should("exist");
cy.get(".indicator-pill").should("contain", "Inactive").should("have.class", "red");
cy.get(".page-actions").findByRole("button", { name: "Start" }).should("exist");
cy.get(".page-actions").findByRole("button", { name: "Clear" }).should("exist");
cy.get(".msg-box").should("contain", "Recorder is Inactive");
cy.get(".msg-box").findByRole("button", { name: "Start Recording" }).should("exist");
});
it("Recorder Start", () => {
cy.get(".page-actions").findByRole("button", { name: "Start" }).click();
cy.get(".indicator-pill").should("contain", "Active").should("have.class", "green");
cy.get(".msg-box").should("contain", "No Requests found");
cy.visit("/app/List/DocType/List");
cy.intercept("POST", "/api/method/frappe.desk.reportview.get").as("list_refresh");
cy.wait("@list_refresh");
cy.get(".page-head").findByTitle("DocType").should("exist");
cy.get(".list-count").should("contain", "20 of ");
cy.visit("/app/recorder");
cy.get(".page-head").findByTitle("Recorder").should("exist");
cy.get(".frappe-list .result-list").should(
"contain",
"/api/method/frappe.desk.reportview.get"
);
});
it("Recorder View Request", () => {
cy.get(".page-actions").findByRole("button", { name: "Start" }).click();
cy.visit("/app/List/DocType/List");
cy.intercept("POST", "/api/method/frappe.desk.reportview.get").as("list_refresh");
cy.wait("@list_refresh");
cy.get(".page-head").findByTitle("DocType").should("exist");
cy.get(".list-count").should("contain", "20 of ");
cy.visit("/app/recorder");
cy.get(".frappe-list .list-row-container span")
.contains("/api/method/frappe")
.should("be.visible")
.click({ force: true });
cy.url().should("include", "/recorder/request");
cy.get("form").should("contain", "/api/method/frappe");
});
});

View file

@ -36,7 +36,12 @@ _sites_path = os.environ.get("SITES_PATH", ".")
# If gc.freeze is done then importing modules before forking allows us to share the memory
if frappe._tune_gc:
import gettext
import babel
import babel.messages
import bleach
import num2words
import pydantic
import frappe.boot

View file

@ -125,6 +125,8 @@ class LoginManager:
self.set_user_info()
def login(self):
self.run_trigger("before_login")
if frappe.get_system_settings("disable_user_pass_login"):
frappe.throw(_("Login with username and password is not allowed."), frappe.AuthenticationError)

View file

@ -4,6 +4,8 @@
bootstrap client session
"""
import os
import frappe
import frappe.defaults
import frappe.desk.desk_page
@ -113,6 +115,9 @@ def get_bootinfo():
"Address Autocomplete Settings", "enabled"
)
if sentry_dsn := get_sentry_dsn():
bootinfo.sentry_dsn = sentry_dsn
return bootinfo
@ -473,3 +478,10 @@ def add_subscription_conf():
return frappe.conf.subscription
except Exception:
return ""
def get_sentry_dsn():
if not frappe.get_system_settings("enable_telemetry"):
return
return os.getenv("FRAPPE_SENTRY_DSN")

View file

@ -587,103 +587,6 @@ def add_db_index(context, doctype, column):
raise SiteNotSpecifiedError
@click.command("describe-database-table")
@click.option("--doctype", help="DocType to describe")
@click.option(
"--column",
multiple=True,
help="Explicitly fetch accurate cardinality from table data. This can be quite slow on large tables.",
)
@pass_context
def describe_database_table(context, doctype, column):
"""Describes various statistics about the table.
This is useful to build integration like
This includes:
1. Schema
2. Indexes
3. stats - total count of records
4. if column is specified then extra stats are generated for column:
Distinct values count in column
"""
import json
for site in context.sites:
frappe.init(site=site)
frappe.connect()
try:
data = _extract_table_stats(doctype, column)
# NOTE: Do not print anything else in this to avoid clobbering the output.
print(json.dumps(data, indent=2))
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
def _extract_table_stats(doctype: str, columns: list[str]) -> dict:
from frappe.utils import cint, cstr, get_table_name
def sql_bool(val):
return cstr(val).lower() in ("yes", "1", "true")
table = get_table_name(doctype, wrap_in_backticks=True)
schema = []
for field in frappe.db.sql(f"describe {table}", as_dict=True):
schema.append(
{
"column": field["Field"],
"type": field["Type"],
"is_nullable": sql_bool(field["Null"]),
"default": field["Default"],
}
)
def update_cardinality(column, value):
for col in schema:
if col["column"] == column:
col["cardinality"] = value
break
indexes = []
for idx in frappe.db.sql(f"show index from {table}", as_dict=True):
indexes.append(
{
"unique": not sql_bool(idx["Non_unique"]),
"cardinality": idx["Cardinality"],
"name": idx["Key_name"],
"sequence": idx["Seq_in_index"],
"nullable": sql_bool(idx["Null"]),
"column": idx["Column_name"],
"type": idx["Index_type"],
}
)
if idx["Seq_in_index"] == 1:
update_cardinality(idx["Column_name"], idx["Cardinality"])
total_rows = cint(
frappe.db.sql(
f"""select table_rows
from information_schema.tables
where table_name = 'tab{doctype}'"""
)[0][0]
)
# fetch accurate cardinality for columns by query. WARN: This can take a lot of time.
for column in columns:
cardinality = frappe.db.sql(f"select count(distinct {column}) from {table}")[0][0]
update_cardinality(column, cardinality)
return {
"table_name": table.strip("`"),
"total_rows": total_rows,
"schema": schema,
"indexes": indexes,
}
@click.command("add-system-manager")
@click.argument("email")
@click.option("--first-name")
@ -1602,7 +1505,6 @@ commands = [
add_system_manager,
add_user_for_sites,
add_db_index,
describe_database_table,
backup,
drop_site,
install_app,

View file

@ -16,6 +16,8 @@
"google_sheets_url",
"refresh_google_sheet",
"column_break_5",
"custom_delimiters",
"delimiter_options",
"status",
"submit_after_import",
"mute_emails",
@ -167,11 +169,25 @@
"hidden": 1,
"label": "Payload Count",
"read_only": 1
},
{
"default": ",;\\t|",
"depends_on": "custom_delimiters",
"description": "If your CSV uses a different delimiter, add that character here, ensuring no spaces or additional characters are included.",
"fieldname": "delimiter_options",
"fieldtype": "Data",
"label": "Delimiter Options"
},
{
"default": "0",
"fieldname": "custom_delimiters",
"fieldtype": "Check",
"label": "Custom Delimiters"
}
],
"hide_toolbar": 1,
"links": [],
"modified": "2024-03-23 16:02:16.953820",
"modified": "2024-04-27 20:42:35.843158",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
@ -195,4 +211,4 @@
"sort_order": "DESC",
"states": [],
"track_changes": 1
}
}

View file

@ -27,6 +27,8 @@ class DataImport(Document):
if TYPE_CHECKING:
from frappe.types import DF
custom_delimiters: DF.Check
delimiter_options: DF.Data | None
google_sheets_url: DF.Data | None
import_file: DF.Attach | None
import_type: DF.Literal["", "Insert New Records", "Update Existing Records"]
@ -50,11 +52,16 @@ class DataImport(Document):
self.template_options = ""
self.template_warnings = ""
self.set_delimiters_flag()
self.validate_doctype()
self.validate_import_file()
self.validate_google_sheets_url()
self.set_payload_count()
def set_delimiters_flag(self):
if self.import_file:
frappe.flags.delimiter_options = self.delimiter_options or ","
def validate_doctype(self):
if self.reference_doctype in BLOCKED_DOCTYPES:
frappe.throw(_("Importing {0} is not allowed.").format(self.reference_doctype))
@ -79,6 +86,7 @@ class DataImport(Document):
def get_preview_from_template(self, import_file=None, google_sheets_url=None):
if import_file:
self.import_file = import_file
self.set_delimiters_flag()
if google_sheets_url:
self.google_sheets_url = google_sheets_url

View file

@ -0,0 +1,5 @@
Title ;Description ;Number ;another_number ;ID (Table Field 1) ;Child Title (Table Field 1) ;Child Description (Table Field 1) ;Child 2 Title (Table Field 2) ;Child 2 Date (Table Field 2) ;Child 2 Number (Table Field 2) ;Child Title (Table Field 1 Again) ;Child Date (Table Field 1 Again) ;Child Number (Table Field 1 Again) ;table_field_1_again.child_another_number
Test 5 ;test description ;1 ;2 ;"" ; ;"child description with ,comma and" ;child title ;14-08-2019 ;4 ;child title again ;22-09-2020 ;5 ; 7
; ; ; ; ;child title 2 ;child description 2 ;title child ;30-10-2019 ;5 ; ;22-09-2021 ; ;
;test description 2 ;1 ;2 ; ;child mandatory title ; ;title child man ; ; ;child mandatory again ; ; ;
Test 4 ;test description 3 ;4 ;5 ;"" ;child title asdf ;child description asdf ;child title asdf adsf ;15-08-2019 ;6 ;child title again asdf ;22-09-2022 ;9 ; 71
Can't render this file because it contains an unexpected character in line 2 and column 55.

View file

@ -1012,7 +1012,13 @@ class Column:
)
elif self.df.fieldtype in ("Date", "Time", "Datetime"):
# guess date/time format
# TODO: add possibility for user, to define the date format explicitly in the Data Import UI
# for example, if date column in file is in %d-%m-%y format -> 23-04-24.
# The date guesser might fail, as, this can be also parsed as %y-%m-%d, as both 23 and 24 are valid for year & for day
# This is an issue that cannot be handled automatically, no matter how we try, as it completely depends on the user's input.
# Defining an explicit value which surely recognizes
self.date_format = self.guess_date_format_for_column()
if not self.date_format:
if self.df.fieldtype == "Time":
self.date_format = "%H:%M:%S"

View file

@ -50,6 +50,25 @@ class TestImporter(FrappeTestCase):
self.assertEqual(doc3.another_number, 5)
self.assertEqual(format_duration(doc3.duration), "5d 5h 45m")
def test_data_validation_semicolon_success(self):
import_file = get_import_file("sample_import_file_semicolon")
data_import = self.get_importer(doctype_name, import_file, update=True)
doc = data_import.get_preview_from_template().get("data", [{}])
self.assertEqual(doc[0][7], "child description with ,comma and")
# Column count should be 14 (+1 ID)
self.assertEqual(len(doc[0]), 15)
def test_data_validation_semicolon_failure(self):
import_file = get_import_file("sample_import_file_semicolon")
data_import = self.get_importer_semicolon(doctype_name, import_file)
doc = data_import.get_preview_from_template().get("data", [{}])
# if semicolon delimiter detection fails, and falls back to comma,
# column number will be less than 15 -> 2 (+1 id)
self.assertLessEqual(len(doc[0]), 15)
def test_data_import_preview(self):
import_file = get_import_file("sample_import_file")
data_import = self.get_importer(doctype_name, import_file)
@ -138,6 +157,18 @@ class TestImporter(FrappeTestCase):
return data_import
def get_importer_semicolon(self, doctype, import_file, update=False):
data_import = frappe.new_doc("Data Import")
data_import.import_type = "Insert New Records" if not update else "Update Existing Records"
data_import.reference_doctype = doctype
data_import.import_file = import_file.file_url
# deliberately overwrite default delimiter options here, causing to fail when parsing `;`
data_import.delimiter_options = ","
data_import.insert()
frappe.db.commit() # nosemgrep
return data_import
def create_doctype_if_not_exists(doctype_name, force=False):
if force:

View file

@ -82,4 +82,4 @@
"sort_field": "creation",
"sort_order": "DESC",
"states": []
}
}

View file

@ -107,6 +107,7 @@
"fieldtype": "Link",
"hidden": 1,
"label": "Folder",
"length": 255,
"options": "File",
"read_only": 1
},
@ -189,7 +190,7 @@
"icon": "fa fa-file",
"idx": 1,
"links": [],
"modified": "2024-03-23 16:03:25.814224",
"modified": "2024-05-09 11:46:42.917146",
"modified_by": "Administrator",
"module": "Core",
"name": "File",

View file

@ -31,6 +31,7 @@ from .utils import *
exclude_from_linked_with = True
ImageFile.LOAD_TRUNCATED_IMAGES = True
URL_PREFIXES = ("http://", "https://")
FILE_ENCODING_OPTIONS = ("utf-8-sig", "utf-8", "windows-1250", "windows-1252")
class File(Document):
@ -515,10 +516,11 @@ class File(Document):
def exists_on_disk(self):
return os.path.exists(self.get_full_path())
def get_content(self) -> bytes:
def get_content(self, encodings=None) -> bytes | str:
if self.is_folder:
frappe.throw(_("Cannot get file contents of a Folder"))
# if doc was just created, content field is already populated, return it as-is
if self.get("content"):
self._content = self.content
if self.decode:
@ -531,15 +533,20 @@ class File(Document):
self.validate_file_url()
file_path = self.get_full_path()
# read the file
if encodings is None:
encodings = FILE_ENCODING_OPTIONS
with open(file_path, mode="rb") as f:
self._content = f.read()
try:
# for plain text files
self._content = self._content.decode()
except UnicodeDecodeError:
# for .png, .jpg, etc
pass
# looping will not result in slowdown, as the content is usually utf-8 or utf-8-sig
# encoded so the first iteration will be enough most of the time
for encoding in encodings:
try:
# read file with proper encoding
self._content = self._content.decode(encoding)
break
except UnicodeDecodeError:
# for .png, .jpg, etc
continue
return self._content

View file

@ -1,7 +1,6 @@
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import base64
import json
import os
import shutil
import tempfile
@ -111,7 +110,7 @@ class TestBase64File(FrappeTestCase):
def setUp(self):
self.attached_to_doctype, self.attached_to_docname = make_test_doc()
self.test_content = base64.b64encode(test_content1.encode("utf-8"))
_file: "File" = frappe.get_doc(
_file: frappe.Document = frappe.get_doc(
{
"doctype": "File",
"file_name": "test_base64.txt",
@ -125,7 +124,7 @@ class TestBase64File(FrappeTestCase):
self.saved_file_url = _file.file_url
def test_saved_content(self):
_file = frappe.get_doc("File", {"file_url": self.saved_file_url})
_file: frappe.Document = frappe.get_doc("File", {"file_url": self.saved_file_url})
content = _file.get_content()
self.assertEqual(content, test_content1)
@ -255,6 +254,25 @@ class TestSameContent(FrappeTestCase):
limit_property.delete()
frappe.clear_cache(doctype="ToDo")
def test_utf8_bom_content_decoding(self):
utf8_bom_content = test_content1.encode("utf-8-sig")
_file: frappe.Document = frappe.get_doc(
{
"doctype": "File",
"file_name": "utf8bom.txt",
"attached_to_doctype": self.attached_to_doctype1,
"attached_to_name": self.attached_to_docname1,
"content": utf8_bom_content,
"decode": False,
}
)
_file.save()
saved_file = frappe.get_doc("File", _file.name)
file_content_decoded = saved_file.get_content(encodings=["utf-8"])
self.assertEqual(file_content_decoded[0], "\ufeff")
file_content_properly_decoded = saved_file.get_content(encodings=["utf-8-sig", "utf-8"])
self.assertEqual(file_content_properly_decoded, test_content1)
class TestFile(FrappeTestCase):
def setUp(self):

View file

@ -0,0 +1,283 @@
"""Basic DB optimizer for Frappe Framework based app.
This is largely based on heuristics and known good practices for indexing.
"""
from collections import defaultdict
from dataclasses import dataclass
from typing import TypeVar
from sql_metadata import Parser
import frappe
from frappe.utils import flt
# Any index that reads more than 30% table on average is not "useful"
INDEX_SCORE_THRESHOLD = 0.3
# Anything reading less than this percent of table is considered optimal
OPTIMIZATION_THRESHOLD = 0.1
T = TypeVar("T")
@dataclass
class DBColumn:
name: str
cardinality: int | None
is_nullable: bool
default: str
data_type: str
@classmethod
def from_frappe_ouput(cls, data) -> "DBColumn":
"Parse DBColumn from output of describe-database-table command in Frappe"
return cls(
name=data["column"],
cardinality=data.get("cardinality"),
is_nullable=data["is_nullable"],
default=data["default"],
data_type=data["type"],
)
@dataclass
class DBIndex:
name: str
column: str
table: str
unique: bool | None = None
cardinality: int | None = None
sequence: int = 1
nullable: bool = True
_score: float = 0.0
def __eq__(self, other: "DBIndex") -> bool:
return self.column == other.column and self.sequence == other.sequence and self.table == other.table
def __repr__(self):
return f"DBIndex(`{self.table}`.`{self.column}`)"
@classmethod
def from_frappe_ouput(cls, data, table) -> "DBIndex":
"Parse DBIndex from output of describe-database-table command in Frappe"
return cls(
name=data["name"],
table=table,
unique=data["unique"],
cardinality=data["cardinality"],
sequence=data["sequence"],
nullable=data["nullable"],
column=data["column"],
)
@dataclass
class ColumnStat:
column_name: str
avg_frequency: float
avg_length: float
nulls_ratio: float | None = None
histogram: list[float] = None
def __post_init__(self):
if not self.histogram:
self.histogram = []
@classmethod
def from_frappe_ouput(cls, data) -> "ColumnStat":
return cls(
column_name=data["column_name"],
avg_frequency=data["avg_frequency"],
avg_length=data["avg_length"],
nulls_ratio=data["nulls_ratio"],
histogram=[flt(bin) for bin in data["histogram"].split(",")] if data["histogram"] else [],
)
@dataclass
class DBTable:
name: str
total_rows: int
schema: list[DBColumn] | None = None
indexes: list[DBIndex] | None = None
def __post_init__(self):
if not self.schema:
self.schema = []
if not self.indexes:
self.indexes = []
def update_cardinality(self, column_stats: list[ColumnStat]) -> None:
"""Estimate cardinality using mysql.column_stat"""
for column_stat in column_stats:
for col in self.schema:
if col.name == column_stat.column_name and not col.cardinality and column_stat.avg_frequency:
# "hack" or "math" - average frequency is on average how frequently a row value appears.
# Avg = total_rows / cardinality, so...
col.cardinality = self.total_rows / column_stat.avg_frequency
@classmethod
def from_frappe_ouput(cls, data) -> "DBTable":
"Parse DBTable from output of describe-database-table command in Frappe"
table_name = data["table_name"]
return cls(
name=table_name,
total_rows=data["total_rows"],
schema=[DBColumn.from_frappe_ouput(c) for c in data["schema"]],
indexes=[DBIndex.from_frappe_ouput(i, table_name) for i in data["indexes"]],
)
def has_column(self, column: str) -> bool:
for col in self.schema:
if col.name == column:
return True
return False
@dataclass
class DBOptimizer:
query: str # raw query in string format
tables: dict[str, DBTable] = None
parsed_query: Parser = None
def __post_init__(self):
if not self.tables:
self.tables = {}
self.parsed_query = Parser(self.query)
def tables_examined(self) -> list[str]:
return self.parsed_query.tables
def update_table_data(self, table: DBTable):
self.tables[table.name] = table
def _convert_to_db_index(self, column: str) -> DBIndex:
column_name, table = None, None
if "." in column:
table, column_name = column.split(".")
else:
column_name = column
for table_name, db_table in self.tables.items():
if db_table.has_column(column):
table = table_name
break
return DBIndex(column=column_name, name=column_name, table=table)
def _remove_existing_indexes(self, potential_indexes: list[DBIndex]) -> list[DBIndex]:
"""Given list of potential index candidates remove the ones that already exist.
This also removes multi-column indexes for parts that are applicable to query.
Example: If multi-col index A+B+C exists and query utilizes A+B then
A+B are removed from potential indexes.
"""
def remove_maximum_indexes(idx: list[DBIndex]):
"""Try to remove entire index from potential indexes, if not possible, reduce one part and try again until no parts are left."""
if not idx:
return
matched_sub_index = []
for idx_part in list(idx):
matching_part = [
i for i in potential_indexes if i.column == idx_part.column and i.table == idx_part.table
]
if not matching_part:
# pop and recurse
idx.pop()
return remove_maximum_indexes(idx)
else:
matched_sub_index.extend(matching_part)
# Every part matched now, lets remove those parts
for i in matched_sub_index:
potential_indexes.remove(i)
# Reconstruct multi-col index
for table in self.tables.values():
merged_indexes = defaultdict(list)
for index in table.indexes:
merged_indexes[index.name].append(index)
for idx in merged_indexes.values():
idx.sort(key=lambda x: x.sequence)
for idx in merged_indexes.values():
remove_maximum_indexes(idx)
return potential_indexes
def potential_indexes(self) -> list[DBIndex]:
"""Get all columns that can potentially be indexed to speed up this query."""
possible_indexes = []
# Where claus columns using these operators benefit from index
# 1. = (equality)
# 2. >, <, >=, <=
# 3. LIKE 'xyz%' (Prefix search)
# 4. BETWEEN (for date[time] fields)
# 5. IN (similar to equality)
if not self.parsed_query.columns_dict:
return []
if where_columns := self.parsed_query.columns_dict.get("where"):
# TODO: Apply some heuristics here, not all columns in where clause are actually useful
possible_indexes.extend(where_columns)
# Join clauses - Both sides of join should ideally be indexed. One will *usually* be primary key.
if join_columns := self.parsed_query.columns_dict.get("join"):
possible_indexes.extend(join_columns)
# Top N query variant - Order by column can possibly speed up the query
if order_by_columns := self.parsed_query.columns_dict.get("order_by"):
if self.parsed_query.limit_and_offset:
possible_indexes.extend(order_by_columns)
possible_db_indexes = [self._convert_to_db_index(i) for i in possible_indexes]
possible_db_indexes = [i for i in possible_db_indexes if i.column not in ("*", "name")]
possible_db_indexes.sort(key=lambda i: (i.table, i.column))
return self._remove_existing_indexes(possible_db_indexes)
def suggest_index(self) -> DBIndex | None:
"""Suggest best possible column to index given query and table stats."""
if missing_tables := (set(self.tables_examined()) - set(self.tables.keys())):
frappe.throw("DBTable infomation missing for: " + ", ".join(missing_tables))
potential_indexes = self.potential_indexes()
for index in list(potential_indexes):
table = self.tables[index.table]
# Data type is not easily indexable - skip
column = next(c for c in table.schema if c.name == index.column)
if "text" in column.data_type.lower() or "json" in column.data_type.lower():
potential_indexes.remove(index)
# Update cardinality from column so scoring can be done
index.cardinality = column.cardinality
for index in potential_indexes:
index._score = self.index_score(index)
potential_indexes.sort(key=lambda i: i._score)
if (
potential_indexes
and (best_index := potential_indexes[0])
and best_index._score < INDEX_SCORE_THRESHOLD
):
return best_index
def index_score(self, index: DBIndex) -> float:
"""Score an index from 0 to 1 based on usefulness.
A score of 0.5 indicates on average this index will read 50% of the table. (e.g. checkboxes)"""
table = self.tables[index.table]
cardinality = index.cardinality or 2
total_rows = table.total_rows or cardinality or 1
# We assume most unique values are evenly distributed, this is
# definitely not the case IRL but it should be good enough assumptions
# Score is rouhgly what percentage of table we will end up reading on typical query
rows_fetched_on_average = (table.total_rows or cardinality) / cardinality
return rows_fetched_on_average / total_rows

View file

@ -9,6 +9,39 @@ frappe.ui.form.on("Recorder", {
frm.disable_save();
frm._sort_order = {};
frm.trigger("setup_sort");
frm.fields_dict.sql_queries.grid.grid_pagination.page_length = 500;
refresh_field("sql_queries");
frm.trigger("format_grid");
frm.add_custom_button(__("Suggest Optimizations"), () => {
frappe.xcall("frappe.core.doctype.recorder.recorder.optimize", {
recorder_id: frm.doc.name,
});
});
frappe.realtime.on("recorder-analysis-complete", () => {
frm.reload_doc();
setTimeout(() => frm.scroll_to_field("suggested_indexes"), 1500);
});
let index_grid = frm.fields_dict.suggested_indexes.grid;
index_grid.wrapper.find(".grid-footer").toggle(true);
index_grid.toggle_checkboxes(true);
index_grid.df.cannot_delete_rows = true;
index_grid.add_custom_button(__("Add Indexes"), function () {
let indexes_to_add = index_grid.get_selected_children().map((row) => {
return {
column: row.column,
table: row.table,
};
});
if (!indexes_to_add.length) {
frappe.toast(__("You need to select indexes you want to add first."));
return;
}
frappe.xcall("frappe.core.doctype.recorder.recorder.add_indexes", {
indexes: indexes_to_add,
});
});
},
setup_sort: function (frm) {
@ -22,9 +55,25 @@ frappe.ui.form.on("Recorder", {
frm._sort_order[field] = -1 * sort_order; // reverse for next click
grid.refresh();
frm.trigger("setup_sort"); // grid creates new elements again, resetup listeners.
frm.trigger("format_grid");
});
});
},
/// Format duration and copy cells
format_grid(frm) {
const max_duration = Math.max(20, ...frm.doc.sql_queries.map((d) => d.duration));
const heatmap = (table, field, max) => {
frm.fields_dict[table].grid.grid_rows.forEach((row) => {
const percent = Math.round((row.doc[field] / max) * 100);
$(row.columns[field]).css({
"background-color": `color-mix(in srgb, var(--bg-red) ${percent}%, var(--bg-color))`,
});
});
};
heatmap("sql_queries", "duration", max_duration);
},
});
frappe.ui.form.on("Recorder Query", "form_render", function (frm, cdt, cdn) {

View file

@ -20,6 +20,7 @@
"section_break_sgro",
"form_dict",
"section_break_9jhm",
"suggested_indexes",
"sql_queries",
"section_break_optn",
"profile"
@ -119,6 +120,13 @@
"fieldtype": "Code",
"label": "cProfile Output",
"read_only": 1
},
{
"description": "Disclaimer: These indexes are suggested based on data and queries performed during this recording. These suggestions may or may not help.",
"fieldname": "suggested_indexes",
"fieldtype": "Table",
"label": "Suggested Indexes",
"options": "Recorder Suggested Index"
}
],
"hide_toolbar": 1,
@ -126,7 +134,7 @@
"index_web_pages_for_search": 1,
"is_virtual": 1,
"links": [],
"modified": "2024-02-01 22:13:26.505174",
"modified": "2024-05-14 15:16:55.626656",
"modified_by": "Administrator",
"module": "Core",
"name": "Recorder",

View file

@ -1,10 +1,18 @@
# Copyright (c) 2023, Frappe Technologies and contributors
# For license information, please see license.txt
import json
from collections import Counter, defaultdict
import frappe
from frappe import _
from frappe.core.doctype.recorder.db_optimizer import DBOptimizer, DBTable
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
from frappe.model.document import Document
from frappe.recorder import RECORDER_REQUEST_HASH
from frappe.recorder import get as get_recorder_data
from frappe.utils import cint, evaluate_filters
from frappe.utils import cint, cstr, evaluate_filters, get_table_name
from frappe.utils.caching import redis_cache
class Recorder(Document):
@ -15,6 +23,9 @@ class Recorder(Document):
if TYPE_CHECKING:
from frappe.core.doctype.recorder_query.recorder_query import RecorderQuery
from frappe.core.doctype.recorder_suggested_index.recorder_suggested_index import (
RecorderSuggestedIndex,
)
from frappe.types import DF
cmd: DF.Data | None
@ -27,6 +38,7 @@ class Recorder(Document):
profile: DF.Code | None
request_headers: DF.Code | None
sql_queries: DF.Table[RecorderQuery]
suggested_indexes: DF.Table[RecorderSuggestedIndex]
time: DF.Datetime | None
time_in_queries: DF.Float
# end: auto-generated types
@ -95,8 +107,192 @@ def serialize_request(request):
request_headers=frappe.as_json(request.get("headers"), indent=4),
form_dict=frappe.as_json(request.get("form_dict"), indent=4),
sql_queries=request.get("calls"),
suggested_indexes=request.get("suggested_indexes"),
modified=request.get("time"),
creation=request.get("time"),
)
return request
@frappe.whitelist()
def add_indexes(indexes):
frappe.only_for("Administrator")
indexes = json.loads(indexes)
for index in indexes:
frappe.enqueue(_add_index, table=index["table"], column=index["column"])
frappe.msgprint(_("Enqueued creation of indexes"), alert=True)
def _add_index(table, column):
doctype = get_doctype_name(table)
frappe.db.add_index(doctype, [column])
make_property_setter(
doctype,
column,
property="search_index",
value="1",
property_type="Check",
for_doctype=False, # Applied on docfield
)
frappe.msgprint(
_("Index created successfully on column {0} of doctype {1}").format(column, doctype),
alert=True,
realtime=True,
)
@frappe.whitelist()
def optimize(recorder_id: str):
frappe.only_for("Administrator")
frappe.enqueue(_optimize, recorder_id=recorder_id, queue="long")
def _optimize(recorder_id):
record: Recorder = frappe.get_doc("Recorder", recorder_id)
total_duration = record.time_in_queries
# Any index with query time less than 5% of total time is not suggested
PERCENT_DURATION_THRESHOLD_OVERALL = 0.05
# Any query with duration less than 0.5% of total duration is not analyzed
PERCENT_DURATION_THRESHOLD_QUERY = 0.005
# Index suggestion -> Query duration
index_suggestions = Counter()
for idx, captured_query in enumerate(record.sql_queries, start=1):
query = cstr(captured_query.query)
frappe.publish_progress(
idx / len(record.sql_queries) * 100,
title="Analyzing Queries",
doctype=record.doctype,
docname=record.name,
description=f"Analyzing query: {query[:140]}",
)
if captured_query.duration < total_duration * PERCENT_DURATION_THRESHOLD_QUERY:
continue
if not query.lower().strip().startswith(("select", "update", "delete")):
continue
if index := _optimize_query(query):
index_suggestions[(index.table, index.column)] += captured_query.duration
suggested_indexes = index_suggestions.most_common(3)
suggested_indexes = [
idx for idx in suggested_indexes if idx[1] > total_duration * PERCENT_DURATION_THRESHOLD_OVERALL
]
if not suggested_indexes:
frappe.msgprint(
_("No automatic optimization suggestions available."),
title=_("No Suggestions"),
realtime=True,
)
return
data = frappe.cache.hget(RECORDER_REQUEST_HASH, record.name)
data["suggested_indexes"] = [{"table": idx[0][0], "column": idx[0][1]} for idx in suggested_indexes]
frappe.cache.hset(RECORDER_REQUEST_HASH, record.name, data)
frappe.publish_realtime("recorder-analysis-complete", user=frappe.session.user)
frappe.msgprint(_("Query analysis complete. Check suggested indexes."), realtime=True, alert=True)
def _optimize_query(query):
optimizer = DBOptimizer(query=query)
tables = optimizer.tables_examined()
# Note: Two passes are required here because we first need basic data to understand which
# columns need to be analyzed to get accurate cardinality.
for table in tables:
doctype = get_doctype_name(table)
stats = _fetch_table_stats(doctype, columns=[])
if not stats:
return
db_table = DBTable.from_frappe_ouput(stats)
optimizer.update_table_data(db_table)
potential_indexes = optimizer.potential_indexes()
tablewise_columns = defaultdict(list)
for idx in potential_indexes:
tablewise_columns[idx.table].append(idx.column)
for table in tables:
doctype = get_doctype_name(table)
stats = _fetch_table_stats(doctype, columns=tablewise_columns[table])
if not stats:
return
db_table = DBTable.from_frappe_ouput(stats)
optimizer.update_table_data(db_table)
return optimizer.suggest_index()
def _fetch_table_stats(doctype: str, columns: list[str]) -> dict | None:
def sql_bool(val):
return cstr(val).lower() in ("yes", "1", "true")
if not frappe.db.table_exists(doctype):
return
table = get_table_name(doctype, wrap_in_backticks=True)
schema = []
for field in frappe.db.sql(f"describe {table}", as_dict=True):
schema.append(
{
"column": field["Field"],
"type": field["Type"],
"is_nullable": sql_bool(field["Null"]),
"default": field["Default"],
}
)
def update_cardinality(column, value):
for col in schema:
if col["column"] == column:
col["cardinality"] = value
break
indexes = []
for idx in frappe.db.sql(f"show index from {table}", as_dict=True):
indexes.append(
{
"unique": not sql_bool(idx["Non_unique"]),
"cardinality": idx["Cardinality"],
"name": idx["Key_name"],
"sequence": idx["Seq_in_index"],
"nullable": sql_bool(idx["Null"]),
"column": idx["Column_name"],
"type": idx["Index_type"],
}
)
if idx["Seq_in_index"] == 1:
update_cardinality(idx["Column_name"], idx["Cardinality"])
total_rows = cint(
frappe.db.sql(
f"""select table_rows
from information_schema.tables
where table_name = 'tab{doctype}'"""
)[0][0]
)
# fetch accurate cardinality for columns by query. WARN: This can take A LOT of time.
for column in columns:
cardinality = _get_column_cardinality(table, column)
update_cardinality(column, cardinality)
return {
"table_name": table.strip("`"),
"total_rows": total_rows,
"schema": schema,
"indexes": indexes,
}
@redis_cache
def _get_column_cardinality(table, column):
return frappe.db.sql(f"select count(distinct {column}) from {table}")[0][0]
def get_doctype_name(table_name: str) -> str:
return table_name.removeprefix("tab")

View file

@ -5,8 +5,10 @@ import re
import frappe
import frappe.recorder
from frappe.core.doctype.recorder.recorder import serialize_request
from frappe.core.doctype.recorder.recorder import _optimize_query, serialize_request
from frappe.query_builder.utils import db_type_is
from frappe.recorder import get as get_recorder_data
from frappe.tests.test_query_builder import run_only_if
from frappe.tests.utils import FrappeTestCase
from frappe.utils import set_request
@ -75,3 +77,20 @@ class TestRecorder(FrappeTestCase):
requests = frappe.get_all("Recorder")
request_doc = get_recorder_data(requests[0].name)
self.assertIsInstance(serialize_request(request_doc), dict)
class TestQueryOptimization(FrappeTestCase):
@run_only_if(db_type_is.MARIADB)
def test_query_optimizer(self):
suggested_index = _optimize_query(
"""select name from
`tabUser` u
join `tabHas Role` r
on r.parent = u.name
where email='xyz'
and creation > '2023'
and bio like '%xyz%'
"""
)
self.assertEqual(suggested_index.table, "tabUser")
self.assertEqual(suggested_index.column, "email")

View file

@ -33,20 +33,24 @@
"label": "Normalized Query"
},
{
"columns": 1,
"fieldname": "duration",
"fieldtype": "Float",
"in_list_view": 1,
"label": "Duration"
},
{
"columns": 1,
"fieldname": "exact_copies",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Exact Copies"
},
{
"columns": 1,
"fieldname": "normalized_copies",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Normalized Copies"
},
{
@ -84,6 +88,7 @@
"label": "SQL Explain"
},
{
"columns": 1,
"fieldname": "index",
"fieldtype": "Int",
"in_list_view": 1,
@ -94,7 +99,7 @@
"is_virtual": 1,
"istable": 1,
"links": [],
"modified": "2024-03-23 16:03:36.052756",
"modified": "2024-05-13 17:13:20.785329",
"modified_by": "Administrator",
"module": "Core",
"name": "Recorder Query",

View file

@ -0,0 +1,39 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2024-05-14 16:23:33.466465",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"table",
"column"
],
"fields": [
{
"fieldname": "table",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Table"
},
{
"fieldname": "column",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Column"
}
],
"index_web_pages_for_search": 1,
"is_virtual": 1,
"istable": 1,
"links": [],
"modified": "2024-05-14 17:43:57.231051",
"modified_by": "Administrator",
"module": "Core",
"name": "Recorder Suggested Index",
"owner": "Administrator",
"permissions": [],
"sort_field": "creation",
"sort_order": "DESC",
"states": []
}

View file

@ -0,0 +1,46 @@
# Copyright (c) 2024, Frappe Technologies and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class RecorderSuggestedIndex(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
column: DF.Data | None
parent: DF.Data
parentfield: DF.Data
parenttype: DF.Data
table: DF.Data | None
# end: auto-generated types
def db_insert(self, *args, **kwargs):
raise NotImplementedError
def load_from_db(self):
raise NotImplementedError
def db_update(self):
raise NotImplementedError
def delete(self):
raise NotImplementedError
@staticmethod
def get_list(filters=None, page_length=20, **kwargs):
pass
@staticmethod
def get_count(filters=None, **kwargs):
pass
@staticmethod
def get_stats(**kwargs):
pass

View file

@ -3,6 +3,7 @@
from functools import partial
from types import FunctionType, MethodType, ModuleType
from typing import TYPE_CHECKING
import frappe
from frappe import _
@ -16,6 +17,9 @@ from frappe.utils.safe_exec import (
safe_exec,
)
if TYPE_CHECKING:
from frappe.core.doctype.scheduled_job_type.scheduled_job_type import ScheduledJobType
class ServerScript(Document):
# begin: auto-generated types
@ -77,12 +81,10 @@ class ServerScript(Document):
def validate(self):
frappe.only_for("Script Manager", True)
self.sync_scheduled_jobs()
self.clear_scheduled_events()
self.check_if_compilable_in_restricted_context()
def on_update(self):
self.sync_scheduler_events()
self.sync_scheduled_job_type()
def clear_cache(self):
frappe.cache.delete_value("server_script_map")
@ -92,7 +94,10 @@ class ServerScript(Document):
frappe.cache.delete_value("server_script_map")
if self.script_type == "Scheduler Event":
for job in self.scheduled_jobs:
frappe.delete_doc("Scheduled Job Type", job.name)
scheduled_job_type: "ScheduledJobType" = frappe.get_doc("Scheduled Job Type", job.name)
scheduled_job_type.stopped = True
scheduled_job_type.server_script = None
scheduled_job_type.save()
def get_code_fields(self):
return {"script": "py"}
@ -105,33 +110,35 @@ class ServerScript(Document):
fields=["name", "stopped"],
)
def sync_scheduled_jobs(self):
"""Sync Scheduled Job Type statuses if Server Script's disabled status is changed"""
if self.script_type != "Scheduler Event" or not self.has_value_changed("disabled"):
def sync_scheduled_job_type(self):
"""Create or update Scheduled Job Type documents for Scheduler Event Server Scripts"""
if self.script_type != "Scheduler Event" or (
(previous_script_type := self.has_value_changed("script_type"))
# True will be sent if its a new record
and previous_script_type.value not in (True, "Scheduler Event")
):
return
for scheduled_job in self.scheduled_jobs:
if bool(scheduled_job.stopped) != bool(self.disabled):
job = frappe.get_doc("Scheduled Job Type", scheduled_job.name)
job.stopped = self.disabled
job.save()
def sync_scheduler_events(self):
"""Create or update Scheduled Job Type documents for Scheduler Event Server Scripts"""
if not self.disabled and self.event_frequency and self.script_type == "Scheduler Event":
cron_format = self.cron_format if self.event_frequency == "Cron" else None
setup_scheduler_events(
script_name=self.name, frequency=self.event_frequency, cron_format=cron_format
if scheduled_script := frappe.db.get_value("Scheduled Job Type", {"server_script": self.name}):
scheduled_job_type: "ScheduledJobType" = frappe.get_doc("Scheduled Job Type", scheduled_script)
else:
scheduled_job_type: "ScheduledJobType" = frappe.get_doc(
{
"doctype": "Scheduled Job Type",
"server_script": self.name,
}
)
def clear_scheduled_events(self):
"""Deletes existing scheduled jobs by Server Script if self.event_frequency or self.cron_format has changed"""
if (
self.script_type == "Scheduler Event"
and (self.has_value_changed("event_frequency") or self.has_value_changed("cron_format"))
) or (self.has_value_changed("script_type") and self.script_type != "Scheduler Event"):
for scheduled_job in self.scheduled_jobs:
frappe.delete_doc("Scheduled Job Type", scheduled_job.name, delete_permanently=1)
scheduled_job_type.update(
{
"method": frappe.scrub(f"{self.name}-{self.event_frequency}"),
"frequency": self.event_frequency,
"cron_format": self.cron_format,
"stopped": self.disabled,
}
).save()
frappe.msgprint(_("Scheduled execution for script {0} has updated").format(self.name))
def check_if_compilable_in_restricted_context(self):
"""Check compilation errors and send them back as warnings."""
@ -247,43 +254,7 @@ class ServerScript(Document):
return items
def setup_scheduler_events(script_name: str, frequency: str, cron_format: str | None = None):
"""Creates or Updates Scheduled Job Type documents based on the specified script name and frequency
Args:
script_name (str): Name of the Server Script document
frequency (str): Event label compatible with the Frappe scheduler
"""
method = frappe.scrub(f"{script_name}-{frequency}")
scheduled_script = frappe.db.get_value("Scheduled Job Type", {"method": method})
if not scheduled_script:
frappe.get_doc(
{
"doctype": "Scheduled Job Type",
"method": method,
"frequency": frequency,
"server_script": script_name,
"cron_format": cron_format,
}
).insert()
frappe.msgprint(_("Enabled scheduled execution for script {0}").format(script_name))
else:
doc = frappe.get_doc("Scheduled Job Type", scheduled_script)
if doc.frequency == frequency:
return
doc.frequency = frequency
doc.cron_format = cron_format
doc.save()
frappe.msgprint(_("Scheduled execution for script {0} has updated").format(script_name))
def execute_api_server_script(script=None, *args, **kwargs):
def execute_api_server_script(script: ServerScript, *args, **kwargs):
# These are only added for compatibility with rate limiter.
del args
del kwargs

View file

@ -43,7 +43,7 @@ def run_server_script_for_doc_event(doc, event):
if scripts:
# run all scripts for this doctype + event
for script_name in scripts:
frappe.get_doc("Server Script", script_name).execute_doc(doc)
frappe.get_cached_doc("Server Script", script_name).execute_doc(doc)
def get_server_script_map():

View file

@ -1105,7 +1105,7 @@ class Database:
"""Return True if at least one row exists."""
return frappe.get_all(doctype, limit=1, order_by=None, as_list=True)
def exists(self, dt, dn=None, cache=False):
def exists(self, dt, dn=None, cache=False, *, debug=False):
"""Return the document name of a matching document, or None.
Note: `cache` only works if `dt` and `dn` are of type `str`.
@ -1138,7 +1138,7 @@ class Database:
dt = dt.copy() # don't modify the original dict
dt, dn = dt.pop("doctype"), dt
return self.get_value(dt, dn, ignore=True, cache=cache, order_by=None)
return self.get_value(dt, dn, ignore=True, cache=cache, order_by=None, debug=debug)
def count(self, dt, filters=None, debug=False, cache=False, distinct: bool = True):
"""Return `COUNT(*)` for given DocType and filters."""
@ -1268,7 +1268,7 @@ class Database:
def delete(self, doctype: str, filters: dict | list | None = None, debug=False, **kwargs):
"""Delete rows from a table in site which match the passed filters. This
does trigger DocType hooks. Simply runs a DELETE query in the database.
does not trigger DocType hooks. Simply runs a DELETE query in the database.
Doctype name can be passed directly, it will be pre-pended with `tab`.
"""

View file

@ -71,7 +71,7 @@ frappe.ui.form.on("Event", {
frappe.ui.form.on("Event Participants", {
event_participants_remove: function (frm, cdt, cdn) {
if (cdt && !cdn.includes("New Event Participants")) {
if (cdt && !cdn.includes("new-event-participants")) {
frappe.call({
type: "POST",
method: "frappe.desk.doctype.event.event.delete_communication",

View file

@ -106,7 +106,7 @@ class ToDo(Document):
frappe.db.set_single_value(
self.reference_type,
"_assign",
json.dumps(assignments),
json.dumps(assignments) if assignments else "",
update_modified=False,
)
else:
@ -114,7 +114,7 @@ class ToDo(Document):
self.reference_type,
self.reference_name,
"_assign",
json.dumps(assignments),
json.dumps(assignments) if assignments else "",
update_modified=False,
)

View file

@ -10,7 +10,7 @@ from frappe.utils import get_url_to_form
@frappe.whitelist()
def update_follow(doctype, doc_name, following):
def update_follow(doctype: str, doc_name: str, following: bool):
if following:
return follow_document(doctype, doc_name, frappe.session.user)
else:

View file

@ -60,10 +60,12 @@ def get_group_by_count(doctype: str, current_filters: str, field: str) -> list[d
.run(as_dict=True)
)
if not frappe.get_meta(doctype).has_field(field) and not is_default_field(field):
meta = frappe.get_meta(doctype)
if not meta.has_field(field) and not is_default_field(field):
raise ValueError("Field does not belong to doctype")
return frappe.get_list(
data = frappe.get_list(
doctype,
filters=current_filters,
group_by=f"`tab{doctype}`.{field}",
@ -71,3 +73,13 @@ def get_group_by_count(doctype: str, current_filters: str, field: str) -> list[d
order_by="count desc",
limit=50,
)
# Add in title if it's a link field and `show_title_field_in_link` is set
if (field_meta := meta.get_field(field)) and field_meta.fieldtype == "Link":
link_meta = frappe.get_meta(field_meta.options)
if link_meta.show_title_field_in_link:
title_field = link_meta.get_title_field()
for item in data:
item.title = frappe.get_value(field_meta.options, item.name, title_field)
return data

View file

@ -82,6 +82,8 @@ def make_tree_args(**kwarg):
if kwarg["is_root"] == "true":
kwarg["is_root"] = True
kwarg.update({parent_field: kwarg.get("parent") or kwarg.get(parent_field)})
parent = kwarg.get("parent") or kwarg.get(parent_field)
if doctype != parent:
kwarg.update({parent_field: parent})
return frappe._dict(kwarg)

View file

@ -299,9 +299,6 @@ def get_translations_from_mo(lang, app):
if m.context:
context = m.context.decode() # context is encoded as bytes
translations[f"{key}:{context}"] = m.string
if m.id not in translations:
# better a translation with context than no translation
translations[m.id] = m.string
else:
translations[m.id] = m.string
return translations

View file

@ -442,7 +442,6 @@ after_job = [
extend_bootinfo = [
"frappe.utils.telemetry.add_bootinfo",
"frappe.core.doctype.user_permission.user_permission.send_user_permissions",
"frappe.utils.sentry.add_bootinfo",
]
get_changelog_feed = "frappe.desk.doctype.changelog_feed.changelog_feed.get_feed"
@ -549,7 +548,7 @@ default_log_clearing_doctypes = {
# These keys will not be erased when doing frappe.clear_cache()
persistent_cache_keys = [
"update-user-set",
"update-info",
"changelog-*", # version update notifications
"insert_queue_for_*", # Deferred Insert
"recorder-*", # Recorder
]

File diff suppressed because it is too large Load diff

View file

@ -803,13 +803,14 @@ class BaseDocument:
# that are mapped as link_fieldname.source_fieldname in Options of
# Readonly or Data or Text type fields
meta = frappe.get_meta(doctype)
fields_to_fetch = [
_df
for _df in self.meta.get_fields_to_fetch(df.fieldname)
if not _df.get("fetch_if_empty")
or (_df.get("fetch_if_empty") and not self.get(_df.fieldname))
]
if not frappe.get_meta(doctype).get("is_virtual"):
if not meta.get("is_virtual"):
if not fields_to_fetch:
# cache a single value type
values = _dict(name=frappe.db.get_value(doctype, docname, "name", cache=True))
@ -827,10 +828,10 @@ class BaseDocument:
or empty_values
)
if getattr(frappe.get_meta(doctype), "issingle", 0):
if getattr(meta, "issingle", 0):
values.name = doctype
if frappe.get_meta(doctype).get("is_virtual"):
if meta.get("is_virtual"):
values = frappe.get_doc(doctype, docname).as_dict()
if values:
@ -840,7 +841,8 @@ class BaseDocument:
if self.is_new() or not self.docstatus.is_submitted() or _df.allow_on_submit:
self.set_fetch_from_value(doctype, _df, values)
notify_link_count(doctype, docname)
if not meta.istable:
notify_link_count(doctype, docname)
if not values.name:
invalid_links.append((df.fieldname, docname, get_msg(df, docname)))
@ -1027,7 +1029,7 @@ class BaseDocument:
frappe.throw(
_("{0}: '{1}' ({3}) will get truncated, as max characters allowed is {2}").format(
reference, _(df.label, context=df.parent), max_length, value
reference, frappe.bold(_(df.label, context=df.parent)), max_length, value
),
frappe.CharacterLengthExceededError,
title=_("Value too big"),

View file

@ -21,7 +21,7 @@ from frappe.model.naming import set_new_name, validate_name
from frappe.model.utils import is_virtual_doctype
from frappe.model.workflow import set_workflow_state_on_action, validate_workflow
from frappe.types import DF
from frappe.utils import compare, cstr, date_diff, file_lock, flt, now
from frappe.utils import Truthy, compare, cstr, date_diff, file_lock, flt, now
from frappe.utils.data import get_absolute_url, get_datetime, get_timedelta, getdate
from frappe.utils.global_search import update_global_search
@ -468,7 +468,7 @@ class Document(BaseDocument):
previous = self.get_doc_before_save()
if not previous:
return True
return Truthy(context="New Document")
previous_value = previous.get(fieldname)
current_value = self.get(fieldname)
@ -480,7 +480,10 @@ class Document(BaseDocument):
elif isinstance(previous_value, timedelta):
current_value = get_timedelta(current_value)
return previous_value != current_value
if previous_value != current_value:
return Truthy(value=previous_value)
return False
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""

View file

@ -451,27 +451,29 @@ def get_link_fields(doctype: str) -> list[dict]:
frappe.flags.link_fields = {}
if doctype not in frappe.flags.link_fields:
virtual_doctypes = frappe.get_all("DocType", {"is_virtual": 1}, pluck="name")
dt = frappe.qb.DocType("DocType")
df = frappe.qb.DocType("DocField")
cf = frappe.qb.DocType("Custom Field")
ps = frappe.qb.DocType("Property Setter")
standard_fields = (
standard_fields_query = (
frappe.qb.from_(df)
.inner_join(dt)
.on(df.parent == dt.name)
.select(df.parent, df.fieldname, dt.issingle.as_("issingle"))
.where(
(df.options == doctype)
& (df.fieldtype == "Link")
& (df.is_virtual == 0)
& (dt.is_virtual == 0)
)
.run(as_dict=True)
.where((df.options == doctype) & (df.fieldtype == "Link"))
)
if frappe.db.has_column("DocField", "is_virtual"):
standard_fields_query = standard_fields_query.where(df.is_virtual == 0)
virtual_doctypes = []
if frappe.db.has_column("DocType", "is_virtual"):
virtual_doctypes = frappe.get_all("DocType", {"is_virtual": 1}, pluck="name")
standard_fields_query = standard_fields_query.where(dt.is_virtual == 0)
standard_fields = standard_fields_query.run(as_dict=True)
cf_issingle = frappe.qb.from_(dt).select(dt.issingle).where(dt.name == cf.dt).as_("issingle")
custom_fields = (
frappe.qb.from_(cf)

View file

@ -0,0 +1,8 @@
import { Calendar as FullCalendar } from "@fullcalendar/core";
import dayGridPlugin from "@fullcalendar/daygrid";
import listPlugin from "@fullcalendar/list";
import timeGridPlugin from "@fullcalendar/timegrid";
import interactionPlugin from "@fullcalendar/interaction";
frappe.FullCalendar = FullCalendar;
frappe.FullCalendar.Plugins = [listPlugin, dayGridPlugin, timeGridPlugin, interactionPlugin];

View file

@ -117,7 +117,7 @@ frappe.ui.form.ControlTable = class ControlTable extends frappe.ui.form.Control
get_field(field_name) {
let fieldname;
field_name = field_name.toLowerCase();
this.grid.meta.fields.some((field) => {
this.grid?.meta?.fields.some((field) => {
if (frappe.model.no_value_type.includes(field.fieldtype)) {
return false;
}

View file

@ -196,7 +196,7 @@ frappe.ui.form.ControlTextEditor = class ControlTextEditor extends frappe.ui.for
}
get_quill_options() {
return {
const options = {
modules: {
toolbar: Object.keys(this.df).includes("get_toolbar_options")
? this.df.get_toolbar_options()
@ -211,6 +211,14 @@ frappe.ui.form.ControlTextEditor = class ControlTextEditor extends frappe.ui.for
bounds: this.quill_container[0],
placeholder: this.df.placeholder || "",
};
// In a grid row where space is constrained, hide the toolbar.
if (this.grid_row) {
options.theme = null;
options.modules.toolbar = [];
}
return options;
}
get_mention_options() {

View file

@ -1101,12 +1101,6 @@ export default class GridRow {
parent = column.field_area,
df = column.df;
// no text editor in grid
if (df.fieldtype == "Text Editor") {
df = Object.assign({}, df);
df.fieldtype = "Text";
}
var field = frappe.ui.form.make_control({
df: df,
parent: parent,

View file

@ -629,7 +629,10 @@ frappe.ui.form.Layout = class Layout {
// show grid row (if exists)
field.grid.grid_rows[0].show_form();
return true;
} else if (!frappe.model.no_value_type.includes(field.df.fieldtype)) {
} else if (
field.df.fieldtype === "Table MultiSelect" ||
!frappe.model.no_value_type.includes(field.df.fieldtype)
) {
this.set_focus(field);
return true;
}

View file

@ -16,6 +16,8 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
this.fields = this.get_fields();
this.make();
this.selected_fields = new Set();
}
get_fields() {
@ -337,12 +339,25 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
if (!$(e.target).is(":checkbox") && !$(e.target).is("a")) {
$(this).find(":checkbox").trigger("click");
}
let name = $(this).attr("data-item-name").trim();
if ($(this).find(":checkbox").is(":checked")) {
me.selected_fields.add(name);
} else {
me.selected_fields.delete(name);
}
});
this.$results.on("click", ".list-item--head :checkbox", (e) => {
this.$results
.find(".list-item-container .list-row-check")
.prop("checked", $(e.target).is(":checked"));
let checked = $(e.target).is(":checked");
this.$results.find(".list-item-container .list-row-check").each(function () {
$(this).prop("checked", checked);
const name = $(this).closest(".list-item-container").attr("data-item-name").trim();
if (checked) {
me.selected_fields.add(name);
} else {
me.selected_fields.delete(name);
}
});
});
this.$parent.find(".input-with-feedback").on("change", () => {
@ -510,12 +525,12 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
empty_list() {
// Store all checked items
let checked = this.get_checked_items().map((item) => {
return {
let checked = this.results
.filter((result) => this.selected_fields.has(result.name))
.map((item) => ({
...item,
checked: true,
};
});
}));
// Remove **all** items
this.$results.find(".list-item-container").remove();

View file

@ -139,6 +139,25 @@ frappe.ui.form.AssignToDialog = class AssignToDialog {
me.dialog.set_value("assign_to", assign_to);
}
user_group_list() {
let me = this;
let user_group = me.dialog.get_value("assign_to_user_group");
me.dialog.set_value("assign_to_me", 0);
if (user_group) {
let user_group_members = [];
frappe.db
.get_list("User Group Member", {
parent_doctype: "User Group",
filters: { parent: user_group },
fields: ["user"],
})
.then((response) => {
user_group_members = response.map((group_member) => group_member.user);
me.dialog.set_value("assign_to", user_group_members);
});
}
}
set_description_from_doc() {
let me = this;
@ -157,6 +176,13 @@ frappe.ui.form.AssignToDialog = class AssignToDialog {
default: 0,
onchange: () => me.assign_to_me(),
},
{
label: __("Assign To User Group"),
fieldtype: "Link",
fieldname: "assign_to_user_group",
options: "User Group",
onchange: () => me.user_group_list(),
},
{
fieldtype: "MultiSelectPills",
fieldname: "assign_to",

View file

@ -221,6 +221,8 @@ frappe.views.ListGroupBy = class ListGroupBy {
label = __("Me");
} else if (fieldtype && fieldtype == "Check") {
label = field.name == "0" ? __("No") : __("Yes");
} else if (fieldtype && fieldtype == "Link" && field.title) {
label = __(field.title);
} else {
label = __(field.name);
}

View file

@ -231,11 +231,15 @@ frappe.router = {
} else if (frappe.model.is_single(doctype_route.doctype)) {
route = ["Form", doctype_route.doctype, doctype_route.doctype];
} else if (meta.default_view) {
route = [
"List",
doctype_route.doctype,
this.list_views_route[meta.default_view.toLowerCase()],
];
if (meta.default_view === "Tree") {
route = ["Tree", doctype_route.doctype];
} else {
route = [
"List",
doctype_route.doctype,
this.list_views_route[meta.default_view.toLowerCase()],
];
}
} else {
route = ["List", doctype_route.doctype, "List"];
}

View file

@ -127,6 +127,10 @@ frappe.ui.Dialog = class Dialog extends frappe.ui.FieldGroup {
});
}
get $backdrop() {
return $(this.$wrapper.data("bs.modal")?._backdrop);
}
set_modal_size() {
if (!this.fields) {
this.size = "";
@ -241,7 +245,7 @@ frappe.ui.Dialog = class Dialog extends frappe.ui.FieldGroup {
this.$wrapper.removeClass("modal-minimize");
if (this.minimizable && this.is_minimized) {
$(".modal-backdrop").toggle();
this.$backdrop.show();
this.is_minimized = false;
}
@ -256,7 +260,7 @@ frappe.ui.Dialog = class Dialog extends frappe.ui.FieldGroup {
hide() {
if (this.animate && this.animation_speed === "slow") {
this.$wrapper.addClass("slow");
$(".modal-backdrop").addClass("slow");
this.$backdrop.addClass("slow");
}
this.$wrapper.modal("hide");
this.is_visible = false;
@ -279,7 +283,7 @@ frappe.ui.Dialog = class Dialog extends frappe.ui.FieldGroup {
}
toggle_minimize() {
$(".modal-backdrop").toggle();
this.$backdrop.toggle();
let modal = this.$wrapper.closest(".modal").toggleClass("modal-minimize");
modal.attr("tabindex") ? modal.removeAttr("tabindex") : modal.attr("tabindex", -1);
this.is_minimized = !this.is_minimized;

View file

@ -62,7 +62,7 @@ frappe.ui.FilterGroup = class {
}
set_popover_events() {
$(document.body).on("click", (e) => {
$(document.body).on("mousedown", (e) => {
if (this.wrapper && this.wrapper.is(":visible")) {
const in_datepicker =
$(e.target).is(".datepicker--cell") ||

View file

@ -28,7 +28,7 @@ frappe.views.CalendarView = class CalendarView extends frappe.views.ListView {
setup_defaults() {
return super.setup_defaults().then(() => {
this.page_title = __("{0} Calendar", [this.page_title]);
this.calendar_settings = frappe.views.calendar[this.doctype] || {};
this.calendar_settings = frappe.views.Calendar[this.doctype] || {};
this.calendar_name = frappe.get_route()[3];
});
}
@ -101,15 +101,7 @@ frappe.views.CalendarView = class CalendarView extends frappe.views.ListView {
}
get required_libs() {
let assets = [
"assets/frappe/js/lib/fullcalendar/fullcalendar.min.css",
"assets/frappe/js/lib/fullcalendar/fullcalendar.min.js",
];
let user_language = frappe.boot.lang;
if (user_language && user_language !== "en") {
assets.push("assets/frappe/js/lib/fullcalendar/locale-all.js");
}
return assets;
return "calendar.bundle.js";
}
};
@ -133,10 +125,10 @@ frappe.views.Calendar = class Calendar {
}
get_default_options() {
return new Promise((resolve) => {
let defaultView = localStorage.getItem("cal_defaultView");
let initialView = localStorage.getItem("cal_initialView");
let weekends = localStorage.getItem("cal_weekends");
let defaults = {
defaultView: defaultView ? defaultView : "month",
initialView: initialView ? initialView : "dayGridMonth",
weekends: weekends ? weekends : true,
};
resolve(defaults);
@ -162,13 +154,13 @@ frappe.views.Calendar = class Calendar {
});
$(this.parent).on("show", function () {
me.$cal.fullCalendar("refetchEvents");
me.$cal.fullCalendar.refetchEvents();
});
}
make() {
this.$wrapper = this.parent;
this.$cal = $("<div>").appendTo(this.$wrapper);
this.$cal = $("<div id='fc-calendar-wrapper'>").appendTo(this.$wrapper);
this.footnote_area = frappe.utils.set_footnote(
this.footnote_area,
this.$wrapper,
@ -176,7 +168,9 @@ frappe.views.Calendar = class Calendar {
);
this.footnote_area.css({ "border-top": "0px" });
this.$cal.fullCalendar(this.cal_options);
this.fullCalendar = new frappe.FullCalendar(this.$cal[0], this.cal_options);
this.fullCalendar.render();
this.set_css();
}
setup_view_mode_button(defaults) {
@ -194,143 +188,152 @@ frappe.views.Calendar = class Calendar {
const me = this;
let btn_group = me.$wrapper.find(".fc-button-group");
btn_group.on("click", ".btn", function () {
let value = $(this).hasClass("fc-agendaWeek-button")
? "agendaWeek"
: $(this).hasClass("fc-agendaDay-button")
? "agendaDay"
: "month";
me.set_localStorage_option("cal_defaultView", value);
let value = $(this).hasClass("fc-dayGridWeek-button")
? "dayGridWeek"
: $(this).hasClass("fc-dayGridDay-button")
? "dayGridDay"
: "dayGridMonth";
me.set_localStorage_option("cal_initialView", value);
});
me.$wrapper.on("click", ".btn-weekend", function () {
me.cal_options.weekends = !me.cal_options.weekends;
me.$cal.fullCalendar("option", "weekends", me.cal_options.weekends);
me.fullCalendar.setOption("weekends", me.cal_options.weekends);
me.set_localStorage_option("cal_weekends", me.cal_options.weekends);
me.set_css();
me.setup_view_mode_button(me.cal_options);
});
}
set_css() {
// flatify buttons
const viewButtons =
".fc-dayGridMonth-button, .fc-dayGridWeek-button, .fc-dayGridDay-button, .fc-today-button";
const fcViewButtonClasses = "fc-button fc-button-primary fc-button-active";
// remove fc-button styles
this.$wrapper
.find("button.fc-state-default")
.removeClass("fc-state-default")
.find("button.fc-button")
.removeClass(fcViewButtonClasses)
.addClass("btn btn-default");
this.$wrapper
.find(".fc-month-button, .fc-agendaWeek-button, .fc-agendaDay-button")
.wrapAll('<div class="btn-group" />');
// group all view buttons
this.$wrapper.find(viewButtons).wrapAll('<div class="btn-group" />');
// add icons
this.$wrapper
.find(".fc-prev-button span")
.find(`.fc-prev-button span`)
.attr("class", "")
.html(frappe.utils.icon("left"));
this.$wrapper
.find(".fc-next-button span")
.find(`.fc-next-button span`)
.attr("class", "")
.html(frappe.utils.icon("right"));
if (this.$wrapper.find(".fc-today-button svg").length == 0)
this.$wrapper.find(".fc-today-button").prepend(frappe.utils.icon("today"));
this.$wrapper.find(".fc-today-button").prepend(frappe.utils.icon("today"));
this.$wrapper.find(".fc-day-number").wrap('<div class="fc-day"></div>');
// v6.x of fc has weird behaviour which removes all the custom classes
// on header buttons on click, event below re-adds all the classes
var btn_group = this.$wrapper.find(".fc-button-group");
btn_group.find(".fc-state-active").addClass("active");
btn_group.find(".fc-button-active").addClass("active");
btn_group.find(".btn").on("click", function () {
btn_group.find(".btn").removeClass("active");
btn_group
.find(viewButtons)
.removeClass(`active ${fcViewButtonClasses}`)
.addClass("btn btn-default");
$(this).addClass("active");
});
}
get_system_datetime(date) {
date._offset = moment(date).tz(frappe.sys_defaults.time_zone)._offset;
return frappe.datetime.convert_to_system_tz(moment(date).locale("en"));
return frappe.datetime.convert_to_system_tz(date, true);
}
setup_options(defaults) {
var me = this;
defaults.meridiem = "false";
this.cal_options = {
plugins: frappe.FullCalendar.Plugins,
initialView: defaults.initialView || "dayGridMonth",
locale: frappe.boot.lang,
header: {
left: "prev, title, next",
right: "today, month, agendaWeek, agendaDay",
firstDay: 1,
headerToolbar: {
left: "prev,title,next",
center: "",
right: "today,dayGridMonth,dayGridWeek,dayGridDay",
},
editable: true,
droppable: true,
selectable: true,
selectHelper: true,
selectMirror: true,
forceEventDuration: true,
displayEventTime: true,
defaultView: defaults.defaultView,
weekends: defaults.weekends,
nowIndicator: true,
themeSystem: null,
buttonText: {
today: __("Today"),
month: __("Month"),
week: __("Week"),
day: __("Day"),
},
events: function (start, end, timezone, callback) {
events: function (info, successCallback, failureCallback) {
return frappe.call({
method: me.get_events_method || "frappe.desk.calendar.get_events",
type: "GET",
args: me.get_args(start, end),
args: me.get_args(info.start, info.end),
callback: function (r) {
var events = r.message || [];
events = me.prepare_events(events);
callback(events);
successCallback(events);
},
});
},
displayEventEnd: true,
eventRender: function (event, element) {
element.attr("title", event.tooltip);
},
eventClick: function (event) {
eventClick: function (info) {
// edit event description or delete
var doctype = event.doctype || me.doctype;
var doctype = info.doctype || me.doctype;
if (frappe.model.can_read(doctype)) {
frappe.set_route("Form", doctype, event.name);
frappe.set_route("Form", doctype, info.event.id);
}
},
eventDrop: function (event, delta, revertFunc) {
me.update_event(event, revertFunc);
eventDrop: function (info) {
me.update_event(info.event, info.revert);
},
eventResize: function (event, delta, revertFunc) {
me.update_event(event, revertFunc);
eventResize: function (info) {
me.update_event(info.event, info.revert);
},
select: function (startDate, endDate, jsEvent, view) {
if (view.name === "month" && endDate - startDate === 86400000) {
select: function (info) {
const seconds = info.end - info.start;
const allDay = seconds === 86400000;
if (info.view.type === "dayGridMonth" && allDay) {
// detect single day click in month view
return;
}
var event = frappe.model.get_new_doc(me.doctype);
event[me.field_map.start] = me.get_system_datetime(startDate);
event[me.field_map.start] = me.get_system_datetime(info.start);
if (me.field_map.end) event[me.field_map.end] = me.get_system_datetime(info.end);
if (me.field_map.end) event[me.field_map.end] = me.get_system_datetime(endDate);
if (me.field_map.allDay) {
var all_day = startDate._ambigTime && endDate._ambigTime ? 1 : 0;
event[me.field_map.allDay] = all_day;
if (all_day)
event[me.field_map.end] = me.get_system_datetime(
moment(endDate).subtract(1, "s")
);
if (seconds >= 86400000) {
if (allDay) {
// all-day click
event[me.field_map.allDay] = 1;
}
// incase of all day or multiple day events -1 sec
event[me.field_map.end] = me.get_system_datetime(info.end - 1);
}
frappe.set_route("Form", me.doctype, event.name);
},
dayClick: function (date, jsEvent, view) {
if (view.name === "month") {
const $date_cell = $("td[data-date=" + date.format("YYYY-MM-DD") + "]");
dateClick: function (info) {
if (info.view.type === "dayGridMonth") {
const $date_cell = $(
"td[data-date=" + info.date.toISOString().slice(0, 10) + "]"
);
if ($date_cell.hasClass("date-clicked")) {
me.$cal.fullCalendar("changeView", "agendaDay");
me.$cal.fullCalendar("gotoDate", date);
me.fullCalendar.changeView("timeGridDay", info.date);
me.$wrapper.find(".date-clicked").removeClass("date-clicked");
// update "active view" btn
@ -340,6 +343,13 @@ frappe.views.Calendar = class Calendar {
me.$wrapper.find(".date-clicked").removeClass("date-clicked");
$date_cell.addClass("date-clicked");
// explicitly remove the fc primary button styling that is append on view change
// from month -> day
$("#fc-calendar-wrapper")
.find("button.fc-button")
.removeClass("fc-button fc-button-primary fc-button-active")
.addClass("btn btn-default");
}
return false;
},
@ -361,7 +371,7 @@ frappe.views.Calendar = class Calendar {
return args;
}
refresh() {
this.$cal.fullCalendar("refetchEvents");
this.fullCalendar.refetchEvents();
}
prepare_events(events) {
var me = this;
@ -400,7 +410,6 @@ frappe.views.Calendar = class Calendar {
d.end = frappe.datetime.add_days(d.start, 1);
}
me.fix_end_date_for_event_render(d);
me.prepare_colors(d);
d.title = frappe.utils.html2text(d.title);
@ -431,7 +440,7 @@ frappe.views.Calendar = class Calendar {
}
update_event(event, revertFunc) {
var me = this;
frappe.model.remove_from_locals(me.doctype, event.name);
frappe.model.remove_from_locals(me.doctype, event.id);
return frappe.call({
method: me.update_event_method || "frappe.desk.calendar.update_event",
args: me.get_update_args(event),
@ -449,13 +458,14 @@ frappe.views.Calendar = class Calendar {
get_update_args(event) {
var me = this;
var args = {
name: event[this.field_map.id],
name: event.id,
};
args[this.field_map.start] = me.get_system_datetime(event.start);
if (this.field_map.allDay)
args[this.field_map.allDay] = event.start._ambigTime && event.end._ambigTime ? 1 : 0;
if (this.field_map.allDay) {
args[this.field_map.allDay] = event.end - event.start === 86400000 ? 1 : 0;
}
if (this.field_map.end) {
if (!event.end) {
@ -463,11 +473,8 @@ frappe.views.Calendar = class Calendar {
}
args[this.field_map.end] = me.get_system_datetime(event.end);
if (args[this.field_map.allDay]) {
args[this.field_map.end] = me.get_system_datetime(
moment(event.end).subtract(1, "s")
);
args[this.field_map.end] = me.get_system_datetime(new Date(event.end - 1000));
}
}
@ -475,14 +482,4 @@ frappe.views.Calendar = class Calendar {
return { args: args, field_map: this.field_map };
}
fix_end_date_for_event_render(event) {
if (event.allDay) {
// We use inclusive end dates. This workaround fixes the rendering of events
event.start = event.start ? $.fullCalendar.moment(event.start).stripTime() : null;
event.end = event.end
? $.fullCalendar.moment(event.end).add(1, "day").stripTime()
: null;
}
}
};

View file

@ -180,13 +180,8 @@ frappe.views.TreeView = class TreeView {
args: me.args,
callback: function (r) {
if (r.message) {
if (r.message.length > 1) {
me.root_label = me.doctype;
me.root_value = "";
} else {
me.root_label = r.message[0]["value"];
me.root_value = me.root_label;
}
me.root_label = me.doctype;
me.root_value = "";
me.make_tree();
}
},

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -230,7 +230,8 @@
height: 46px !important;
}
.form-control {
.form-control,
.ql-editor {
border-radius: 0px;
border: 0px;
padding-top: 10px;
@ -300,6 +301,14 @@
.grid-static-col[data-fieldtype="Text Editor"] {
overflow: hidden;
margin: 0 !important;
.ql-editor {
overflow-y: auto !important;
min-height: 0 !important;
max-height: unset !important;
line-height: 1.3 !important;
}
}
}

View file

@ -306,3 +306,24 @@ body.modal-open[style^="padding-right"] {
margin-right: var(--margin-md);
}
}
// Stack minimized modals
@for $i from 1 through 5 {
// 5n + 1, 5n + 2, ...
body > .modal:nth-child(5n + #{$i} of .show.modal-minimize) {
--minimized-modal-index: #{$i};
}
}
.modal-minimize ~ .modal-minimize {
.modal-dialog {
bottom: calc(44px * (var(--minimized-modal-index) - 1));
}
.modal-header {
border-bottom: 0px;
}
.modal-content {
// Rounded chip style
border-radius: var(--border-radius-md);
overflow: hidden;
}
}

View file

@ -1,52 +1,53 @@
.fc-unthemed {
.fc-theme-standard {
padding: 20px;
color: var(--text-light) !important;
}
.fc-theme-standard a {
color: var(--text-light);
}
.fc-toolbar {
// padding-top: 30px;
padding-bottom: 15px;
margin-bottom: 0px !important;
}
.fc-toolbar-chunk div {
display: flex;
}
.fc-view-container {
margin-left: -1px;
margin-right: -1px;
}
.fc-head-container {
// border-top: 0 !important;
border: none !important;
}
th.fc-widget-header {
border: none !important;
th.fc-col-header-cell {
color: var(--gray-500);
font-weight: 600;
}
// th {
// border: none !important;
// }
.fc-unthemed td,
.fc-unthemed hr,
.fc-unthemed thead,
.fc-unthemed tbody,
.fc-unthemed .fc-row,
.fc-unthemed .fc-popover {
.fc-theme-standard td,
.fc-theme-standard hr,
.fc-theme-standard thead,
.fc-theme-standard tbody,
.fc-theme-standard .fc-row,
.fc-theme-standard .fc-popover {
border-color: var(--gray-300) !important;
}
.fc-unthemed td.fc-sun {
.fc-theme-standard td.fc-day-sun {
background: var(--highlight-color);
}
.fc-unthemed .fc-today {
.fc-theme-standard .fc-day-today {
background-color: var(--fg-color) !important;
.fc-day-number {
background-color: var(--blue-500);
.fc-daygrid-day-number {
background-color: var(--gray-700);
border-radius: 50%;
color: $white;
height: 22px;
@ -55,61 +56,64 @@ th.fc-widget-header {
display: flex;
justify-content: center;
text-align: center;
padding: 0;
}
}
// .fc-highlight {
// background-color: $light-yellow !important;
// }
.fc-event {
// border: 1px solid #E8DDFF; /* default BORDER color */
background-color: #e8ddff;
background-color: rgb(237, 246, 253);
border: none !important;
}
.fc-event-main .fc-event-time {
display: none;
}
.fc-time-grid-event {
border: none !important;
}
// @media (max-width: $screen-xs) {
// .fc-scroller {
// height: auto !important;
// }
// }
.fc-day-top {
padding: 5px 10px 0 0 !important;
}
.fc-day {
margin-left: 10px;
.fc-day-number {
.fc-daygrid-day-top {
margin: 5px 0 0 10px;
flex-direction: row !important;
.fc-daygrid-day-number {
float: left !important;
}
}
th.fc-day-header {
th.fc-col-header-cell {
padding: 10px 12px 10px 0 !important;
text-transform: uppercase;
font-size: 12px;
}
.fc-event-container .fc-content {
.fc-daygrid-dot-event {
padding: 3px;
display: flex;
flex-direction: column-reverse;
align-items: normal;
color: rgb(0, 112, 204) !important;
.fc-time {
.fc-event-time {
font-weight: normal;
margin-top: 2px;
}
.fc-title {
.fc-event-title {
font-weight: 600;
}
.fc-daygrid-event-dot {
display: none;
}
}
.fc-left h2 {
font-size: $font-size-lg;
.fc-toolbar-title {
font-size: $font-size-lg !important;
font-weight: 500;
line-height: 28px;
height: 28px;
@ -120,9 +124,6 @@ th.fc-day-header {
font-size: var(--text-md) !important;
outline: none !important;
text-transform: capitalize;
// .fc-icon {
// top: -1px !important;
// }
}
.fc-right button {
@ -131,29 +132,51 @@ th.fc-day-header {
.fc-left button {
width: 80px;
// svg {
// margin-right: 5px;
// }
}
.fc-state-active {
.fc-button-active {
box-shadow: none !important;
background: var(--gray-500) !important;
color: var(--fg-color) !important;
z-index: 0 !important;
}
.fc-day-grid-event {
//override default and fc-button styles
.fc-dayGridMonth-button,
.fc-dayGridWeek-button,
.fc-dayGridDay-button {
border: none !important;
border-radius: 0;
background-color: var(--control-bg);
color: var(--text-color);
}
.fc-dayGridMonth-button {
border-top-left-radius: var(--border-radius) !important;
border-bottom-left-radius: var(--border-radius) !important;
}
.fc-dayGridDay-button {
border-top-right-radius: var(--border-radius) !important;
border-bottom-right-radius: var(--border-radius) !important;
}
.fc-prev-button {
margin-right: 10px !important;
}
.fc-next-button {
margin-left: 10px;
}
.fc-today-button {
margin-right: 10px;
border-radius: var(--border-radius) !important;
}
.fc-daygrid-event {
border: none !important;
margin: 5px 4px 0 !important;
padding: 1px 5px !important;
}
// .result .footnote-area {
// padding: 15px 10px 0 30px;
// }
.fc-time-grid .fc-slats .fc-minor td {
border-top-style: none !important;
}
@ -185,7 +208,6 @@ th.fc-day-header {
.fc-day-grid {
border-bottom: 1px solid var(--gray-300);
// height: 2em !important;
}
.fc-divider {

View file

@ -1,10 +1,11 @@
from datetime import time, timedelta
from datetime import datetime, time, timedelta
from typing import Any
from pypika.queries import QueryBuilder
from pypika.terms import Criterion, Function, ValueWrapper
from pypika.utils import format_alias_sql
import frappe
from frappe.utils.data import format_time, format_timedelta
@ -56,6 +57,8 @@ class ParameterizedValueWrapper(ValueWrapper):
self.value = format_timedelta(self.value)
elif isinstance(self.value, time):
self.value = format_time(self.value)
elif isinstance(self.value, datetime):
self.value = frappe.db.format_datetime(self.value)
sql = self.get_value_sql(
quote_char=quote_char,

View file

@ -446,6 +446,9 @@ def make_test_objects(doctype, test_records=None, verbose=None, reset=False, com
test_records = frappe.get_test_records(doctype)
for doc in test_records:
if not reset:
frappe.db.savepoint("creating_test_record")
if not doc.get("doctype"):
doc["doctype"] = doctype
@ -461,7 +464,7 @@ def make_test_objects(doctype, test_records=None, verbose=None, reset=False, com
d.set_new_name()
if frappe.db.exists(d.doctype, d.name) and not reset:
frappe.db.rollback()
frappe.db.rollback(save_point="creating_test_record")
# do not create test records, if already exists
continue

View file

@ -185,6 +185,34 @@ class TestRedisCache(FrappeAPITestCase):
calculate_area(10)
self.assertEqual(function_call_count, 2)
def test_user_cache(self):
function_call_count = 0
PI = 3.1415
ENGINEERING_PI = _E = 3
@redis_cache(user=True)
def calculate_area(radius: float) -> float:
nonlocal function_call_count
PI_APPROX = ENGINEERING_PI if frappe.session.user == "Engineer" else PI
function_call_count += 1
return PI_APPROX * radius**2
with self.set_user("Engineer"):
self.assertEqual(calculate_area(1), ENGINEERING_PI)
self.assertEqual(function_call_count, 1)
with self.set_user("Mathematician"):
self.assertEqual(calculate_area(1), PI)
self.assertEqual(function_call_count, 2)
with self.set_user("Engineer"):
self.assertEqual(calculate_area(1), ENGINEERING_PI)
self.assertEqual(function_call_count, 2)
with self.set_user("Mathematician"):
self.assertEqual(calculate_area(1), PI)
self.assertEqual(function_call_count, 2)
class TestDocumentCache(FrappeAPITestCase):
TEST_DOCTYPE = "User"

View file

@ -892,12 +892,12 @@ class TestAddNewUser(BaseTestCommands):
class TestBenchBuild(BaseTestCommands):
def test_build_assets_size_check(self):
with cli(frappe.commands.utils.build, "--force --production") as result:
with cli(frappe.commands.utils.build, "--force --production --app frappe") as result:
self.assertEqual(result.exit_code, 0)
self.assertEqual(result.exception, None)
CURRENT_SIZE = 3.5 # MB
JS_ASSET_THRESHOLD = 0.1
CURRENT_SIZE = 3.3 # MB
JS_ASSET_THRESHOLD = 0.01
hooks = frappe.get_hooks()
default_bundle = hooks["app_include_js"]
@ -925,15 +925,6 @@ class TestDBUtils(BaseTestCommands):
meta = frappe.get_meta("User", cached=False)
self.assertTrue(meta.get_field(field).search_index)
@run_only_if(db_type_is.MARIADB)
def test_describe_table(self):
self.execute("bench --site {site} describe-database-table --doctype User", {})
self.assertIn("user_type", self.stdout)
# Ensure that output is machine parseable
stats = json.loads(self.stdout)
self.assertIn("total_rows", stats)
class TestSchedulerUtils(BaseTestCommands):
# Retry just in case there are stuck queued jobs

View file

@ -17,6 +17,7 @@ from frappe.query_builder.functions import Concat_ws
from frappe.tests.test_query_builder import db_type_is, run_only_if
from frappe.tests.utils import FrappeTestCase, patch_hooks, timeout
from frappe.utils import add_days, now, random_string, set_request
from frappe.utils.data import now_datetime
from frappe.utils.testutils import clear_custom_fields
@ -504,6 +505,19 @@ class TestDB(FrappeTestCase):
self.assertEqual(frappe.db.exists(dt, [["name", "=", dn]]), dn)
def test_datetime_serialization(self):
dt = now_datetime()
dt = dt.replace(microsecond=0)
self.assertEqual(str(dt), str(frappe.db.sql("select %s", dt)[0][0]))
frappe.db.exists("User", {"creation": (">", dt)})
self.assertIn(str(dt), str(frappe.db.last_query))
before = now_datetime()
note = frappe.get_doc(doctype="Note", title=frappe.generate_hash(), content="something").insert()
after = now_datetime()
self.assertEqual(note.name, frappe.db.exists("Note", {"creation": ("between", (before, after))}))
def test_bulk_insert(self):
current_count = frappe.db.count("ToDo")
test_body = f"test_bulk_insert - {random_string(10)}"

View file

@ -17,6 +17,7 @@ from frappe.translate import (
extract_messages_from_javascript_code,
extract_messages_from_python_code,
get_language,
get_messages_for_app,
get_parent_language,
get_translation_dict_from_file,
)
@ -314,6 +315,8 @@ def verify_translation_files(app):
lang = file.stem # basename of file = lang
get_translation_dict_from_file(file, lang, app, throw=True)
get_messages_for_app(app)
expected_output = [
("Warning: Unable to find {0} in any table related to {1}", "This is some context", 2),

View file

@ -42,6 +42,8 @@ EMAIL_MATCH_PATTERN = re.compile(
re.IGNORECASE,
)
UNSET = object()
def get_fullname(user=None):
"""get the full name (first name + last name) of the user from User"""
@ -1166,3 +1168,21 @@ class CallbackManager:
def reset(self):
self._functions.clear()
class Truthy:
def __init__(self, value=True, context=UNSET):
self.value = value
self.context = context
def __bool__(self):
return True
def __eq__(self, other: object) -> bool:
return True == other # noqa: E712
def __repr__(self) -> str:
_val = "UNSET" if self.value is UNSET else self.value
_ctx = "UNSET" if self.context is UNSET else self.context
return f"Truthy(value={_val}, context={_ctx})"

View file

@ -659,6 +659,11 @@ jobs:
- name: Clone
uses: actions/checkout@v3
- name: Find tests
run: |
echo "Finding tests"
grep -rn "def test" > /dev/null
- name: Setup Python
uses: actions/setup-python@v4
with:

View file

@ -132,12 +132,13 @@ def site_cache(ttl: int | None = None, maxsize: int | None = None) -> Callable:
return time_cache_wrapper
def redis_cache(ttl: int | None = 3600, user: str | bool | None = None) -> Callable:
def redis_cache(ttl: int | None = 3600, user: str | bool | None = None, shared: bool = False) -> Callable:
"""Decorator to cache method calls and its return values in Redis
args:
ttl: time to expiry in seconds, defaults to 1 hour
user: `true` should cache be specific to session user.
shared: `true` should cache be shared across sites
"""
def wrapper(func: Callable | None = None) -> Callable:
@ -152,11 +153,11 @@ def redis_cache(ttl: int | None = 3600, user: str | bool | None = None) -> Calla
@wraps(func)
def redis_cache_wrapper(*args, **kwargs):
func_call_key = func_key + "::" + str(__generate_request_cache_key(args, kwargs))
if frappe.cache.exists(func_call_key):
return frappe.cache.get_value(func_call_key, user=user)
if frappe.cache.exists(func_call_key, user=user, shared=shared):
return frappe.cache.get_value(func_call_key, user=user, shared=shared)
val = func(*args, **kwargs)
ttl = getattr(func, "ttl", 3600)
frappe.cache.set_value(func_call_key, val, expires_in_sec=ttl, user=user)
frappe.cache.set_value(func_call_key, val, expires_in_sec=ttl, user=user, shared=shared)
return val
return redis_cache_wrapper

View file

@ -11,6 +11,7 @@ from semantic_version import SimpleSpec, Version
import frappe
from frappe import _, safe_decode
from frappe.utils import cstr
from frappe.utils.caching import redis_cache
from frappe.utils.frappecloud import on_frappecloud
@ -211,7 +212,7 @@ def check_for_update():
def has_app_update_notifications() -> bool:
return bool(frappe.cache.sismember("update-user-set", frappe.session.user))
return bool(frappe.cache.sismember("changelog-update-user-set", frappe.session.user))
def parse_latest_non_beta_release(response: list, current_version: Version) -> list | None:
@ -250,22 +251,16 @@ def check_release_on_github(
raise ValueError("Repo cannot be empty")
# Get latest version from GitHub
r = requests.get(f"https://api.github.com/repos/{owner}/{repo}/releases")
if r.ok:
latest_non_beta_release = parse_latest_non_beta_release(r.json(), current_version)
if latest_non_beta_release:
return Version(latest_non_beta_release), owner
releases = _get_latest_releases(owner, repo)
latest_non_beta_release = parse_latest_non_beta_release(releases, current_version)
if latest_non_beta_release:
return Version(latest_non_beta_release), owner
return None, None
def security_issues_count(owner: str, repo: str, current_version: Version, target_version: Version) -> int:
import requests
r = requests.get(f"https://api.github.com/repos/{owner}/{repo}/security-advisories")
if not r.ok:
return 0
advisories = r.json()
advisories = _get_security_issues(owner, repo)
def applicable(advisory) -> bool:
# Current version is in vulnerable range
@ -285,6 +280,28 @@ def security_issues_count(owner: str, repo: str, current_version: Version, targe
return len([sa for sa in advisories if applicable(sa)])
@redis_cache(ttl=6 * 24 * 60 * 60, shared=True)
def _get_latest_releases(owner, repo):
import requests
r = requests.get(f"https://api.github.com/repos/{owner}/{repo}/releases")
if not r.ok:
return []
return r.json()
@redis_cache(ttl=6 * 24 * 60 * 60, shared=True)
def _get_security_issues(owner, repo):
import requests
r = requests.get(f"https://api.github.com/repos/{owner}/{repo}/security-advisories")
if not r.ok:
return []
return r.json()
def parse_github_url(remote_url: str) -> tuple[str, str] | tuple[None, None]:
"""Parse the remote URL to get the owner and repo name."""
import re
@ -307,11 +324,11 @@ def get_source_url(app: str) -> str | None:
def add_message_to_redis(update_json):
# "update-message" will store the update message string
# "update-user-set" will be a set of users
frappe.cache.set_value("update-info", json.dumps(update_json))
# "changelog-update-user-set" will be a set of users
frappe.cache.set_value("changelog-update-info", json.dumps(update_json))
user_list = [x.name for x in frappe.get_all("User", filters={"enabled": True})]
system_managers = [user for user in user_list if "System Manager" in frappe.get_roles(user)]
frappe.cache.sadd("update-user-set", *system_managers)
frappe.cache.sadd("changelog-update-user-set", *system_managers)
@frappe.whitelist()
@ -320,7 +337,7 @@ def show_update_popup():
return
user = frappe.session.user
update_info = frappe.cache.get_value("update-info")
update_info = frappe.cache.get_value("changelog-update-info")
if not update_info:
return
@ -328,7 +345,7 @@ def show_update_popup():
# Check if user is int the set of users to send update message to
update_message = ""
if frappe.cache.sismember("update-user-set", user):
if frappe.cache.sismember("changelog-update-user-set", user):
for update_type in updates:
release_links = ""
for app in updates[update_type]:
@ -373,7 +390,7 @@ def show_update_popup():
indicator="green",
primary_action=primary_action,
)
frappe.cache.srem("update-user-set", user)
frappe.cache.srem("changelog-update-user-set", user)
def get_pyproject(app: str) -> dict | None:

View file

@ -2,12 +2,14 @@
# License: MIT. See LICENSE
import csv
import json
from csv import Sniffer
from io import StringIO
import requests
import frappe
from frappe import _, msgprint
from frappe.core.doctype.file.file import FILE_ENCODING_OPTIONS
from frappe.utils import cint, comma_or, cstr, flt
@ -39,7 +41,7 @@ def read_csv_content_from_attached_file(doc):
def read_csv_content(fcontent):
if not isinstance(fcontent, str):
decoded = False
for encoding in ["utf-8", "windows-1250", "windows-1252"]:
for encoding in FILE_ENCODING_OPTIONS:
try:
fcontent = str(fcontent, encoding)
decoded = True
@ -49,15 +51,35 @@ def read_csv_content(fcontent):
if not decoded:
frappe.msgprint(
_("Unknown file encoding. Tried utf-8, windows-1250, windows-1252."), raise_exception=True
_("Unknown file encoding. Tried to use: {0}").format(", ".join(FILE_ENCODING_OPTIONS)),
raise_exception=True,
)
fcontent = fcontent.encode("utf-8")
content = [frappe.safe_decode(line) for line in fcontent.splitlines(True)]
sniffer = Sniffer()
# Don't need to use whole csv, if more than 20 rows, use just first 20
sample_content = content[:20] if len(content) > 20 else content
# only testing for most common delimiter types, this later can be extended
# init default dialect, to avoid lint errors
dialect = csv.get_dialect("excel")
try:
# csv by default uses excel dialect, which is not always correct
dialect = sniffer.sniff(sample="\n".join(sample_content), delimiters=frappe.flags.delimiter_options)
except csv.Error:
# if sniff fails, show alert on user interface. Fall back to use default dialect (excel)
frappe.msgprint(
_(
"Delimiter detection failed. Try to enable custom delimiters and adjust the delimiter options as per your data."
),
indicator="orange",
alert=True,
)
try:
rows = []
for row in csv.reader(content):
for row in csv.reader(content, dialect=dialect):
r = []
for val in row:
# decode everything

View file

@ -66,7 +66,7 @@ def validate_template(html):
try:
jenv.from_string(html)
except TemplateSyntaxError as e:
frappe.throw(frappe._(f"Syntax error in template as line {e.lineno}: {e.message}"))
frappe.throw(f"Syntax error in template as line {e.lineno}: {e.message}")
def render_template(template, context=None, is_path=None, safe_render=True):

View file

@ -140,13 +140,3 @@ def capture_exception(message: str | None = None) -> None:
except Exception:
frappe.logger().error("Failed to capture exception", exc_info=True)
pass
def add_bootinfo(bootinfo):
"""Called from hook, sends DSN so client side can setup error monitoring."""
if not frappe.get_system_settings("enable_telemetry"):
return
if sentry_dsn := os.getenv("FRAPPE_SENTRY_DSN"):
bootinfo.sentry_dsn = sentry_dsn

View file

@ -22,6 +22,11 @@
"dependencies": {
"@editorjs/editorjs": "^2.28.2",
"@frappe/esbuild-plugin-postcss2": "^0.1.3",
"@fullcalendar/core": "^6.1.11",
"@fullcalendar/daygrid": "^6.1.11",
"@fullcalendar/list": "^6.1.11",
"@fullcalendar/timegrid": "^6.1.11",
"@fullcalendar/interaction": "^6.1.11",
"@headlessui/vue": "^1.7.16",
"@popperjs/core": "^2.11.2",
"@redis/client": "^1.5.8",
@ -31,7 +36,7 @@
"@vue/component-compiler": "^4.2.4",
"@vueuse/core": "^9.5.0",
"ace-builds": "^1.4.8",
"air-datepicker": "github:frappe/air-datepicker",
"air-datepicker": "git+https://github.com/frappe/air-datepicker",
"autoprefixer": "10",
"awesomplete": "^1.1.5",
"bootstrap": "4.6.2",
@ -87,4 +92,4 @@
"bufferutil": "^4.0.8",
"utf-8-validate": "^6.0.3"
}
}
}

View file

@ -55,6 +55,35 @@
stylus "^0.x"
tmp "^0.2.1"
"@fullcalendar/core@^6.1.11":
version "6.1.11"
resolved "https://registry.yarnpkg.com/@fullcalendar/core/-/core-6.1.11.tgz#f9630e83ae977e774992507635b1e7af4c339d37"
integrity sha512-TjG7c8sUz+Vkui2FyCNJ+xqyu0nq653Ibe99A66LoW95oBo6tVhhKIaG1Wh0GVKymYiqAQN/OEdYTuj4ay27kA==
dependencies:
preact "~10.12.1"
"@fullcalendar/daygrid@^6.1.11", "@fullcalendar/daygrid@~6.1.11":
version "6.1.11"
resolved "https://registry.yarnpkg.com/@fullcalendar/daygrid/-/daygrid-6.1.11.tgz#83a5d4a94c314cf3a14b06bebba03b1b40e6d2ba"
integrity sha512-hF5jJB7cgUIxWD5MVjj8IU407HISyLu7BWXcEIuTytkfr8oolOXeCazqnnjmRbnFOncoJQVstTtq6SIhaT32Xg==
"@fullcalendar/interaction@^6.1.11":
version "6.1.11"
resolved "https://registry.yarnpkg.com/@fullcalendar/interaction/-/interaction-6.1.11.tgz#baa3beec8f5c489fb6904973b175a5f4797abdf3"
integrity sha512-ynOKjzuPwEAMgTQ6R/Z2zvzIIqG4p8/Qmnhi1q0vzPZZxSIYx3rlZuvpEK2WGBZZ1XEafDOP/LGfbWoNZe+qdg==
"@fullcalendar/list@^6.1.11":
version "6.1.11"
resolved "https://registry.yarnpkg.com/@fullcalendar/list/-/list-6.1.11.tgz#4cd23700ea48b382b37387e29a706f2da692e174"
integrity sha512-9Qx8uvik9pXD12u50FiHwNzlHv4wkhfsr+r03ycahW7vEeIAKCsIZGTkUfFP+96I5wHihrfLazu1cFQG4MPiuw==
"@fullcalendar/timegrid@^6.1.11":
version "6.1.11"
resolved "https://registry.yarnpkg.com/@fullcalendar/timegrid/-/timegrid-6.1.11.tgz#76b2fc4446d1e97819a4395dab4f3a7e44c7a9eb"
integrity sha512-0seUHK/ferH89IeuCvV4Bib0zWjgK0nsptNdmAc9wDBxD/d9hm5Mdti0URJX6bDoRtsSfRDu5XsRcrzwoc+AUQ==
dependencies:
"@fullcalendar/daygrid" "~6.1.11"
"@headlessui/vue@^1.7.16":
version "1.7.16"
resolved "https://registry.yarnpkg.com/@headlessui/vue/-/vue-1.7.16.tgz#bdc9d32d329248910325539b99e6abfce0c69f89"
@ -398,9 +427,9 @@ acorn@^7.1.1:
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==
"air-datepicker@github:frappe/air-datepicker":
"air-datepicker@git+https://github.com/frappe/air-datepicker":
version "2.2.3"
resolved "https://codeload.github.com/frappe/air-datepicker/tar.gz/ed37b94d95c68d8544357e330be0c89d044a3eea"
resolved "git+https://github.com/frappe/air-datepicker#ed37b94d95c68d8544357e330be0c89d044a3eea"
dependencies:
jquery ">=2.0.0 <4.0.0"
@ -2666,6 +2695,11 @@ postcss@^7.0.36:
picocolors "^0.2.1"
source-map "^0.6.1"
preact@~10.12.1:
version "10.12.1"
resolved "https://registry.yarnpkg.com/preact/-/preact-10.12.1.tgz#8f9cb5442f560e532729b7d23d42fd1161354a21"
integrity sha512-l8386ixSsBdbreOAkqtrwqHwdvR35ID8c3rKPa8lCWuO86dBi32QWHV4vfsZK1utLLFMvw+Z5Ad4XLkZzchscg==
"prettier@^1.18.2 || ^2.0.0":
version "2.8.8"
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.8.tgz#e8c5d7e98a4305ffe3de2e1fc4aca1a71c28b1da"