refactor: for append to extend, merge list extend
Replace a for append loop with list extend. Create the list with values instead of creating an empty list and extending it with another list.
This commit is contained in:
parent
b553ed98d8
commit
88c8baa9ee
64 changed files with 307 additions and 430 deletions
|
|
@ -523,11 +523,7 @@ def clear_messages():
|
|||
|
||||
|
||||
def get_message_log():
|
||||
log = []
|
||||
for msg_out in local.message_log:
|
||||
log.append(json.loads(msg_out))
|
||||
|
||||
return log
|
||||
return [json.loads(msg_out) for msg_out in local.message_log]
|
||||
|
||||
|
||||
def clear_last_message():
|
||||
|
|
|
|||
|
|
@ -141,17 +141,20 @@ class AssignmentRule(Document):
|
|||
|
||||
def get_user_load_balancing(self):
|
||||
"""Assign to the user with least number of open assignments"""
|
||||
counts = []
|
||||
for d in self.users:
|
||||
counts.append(
|
||||
dict(
|
||||
user=d.user,
|
||||
count=frappe.db.count(
|
||||
"ToDo", dict(reference_type=self.document_type, allocated_to=d.user, status="Open")
|
||||
counts = [
|
||||
dict(
|
||||
user=d.user,
|
||||
count=frappe.db.count(
|
||||
"ToDo",
|
||||
dict(
|
||||
reference_type=self.document_type,
|
||||
allocated_to=d.user,
|
||||
status="Open",
|
||||
),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
for d in self.users
|
||||
]
|
||||
# sort by dict value
|
||||
sorted_counts = sorted(counts, key=lambda k: k["count"])
|
||||
|
||||
|
|
|
|||
|
|
@ -208,11 +208,7 @@ def insert_many(docs=None):
|
|||
if len(docs) > 200:
|
||||
frappe.throw(_("Only 200 inserts allowed in one request"))
|
||||
|
||||
out = []
|
||||
for doc in docs:
|
||||
out.append(insert_doc(doc).name)
|
||||
|
||||
return out
|
||||
return [insert_doc(doc).name for doc in docs]
|
||||
|
||||
|
||||
@frappe.whitelist(methods=["POST", "PUT"])
|
||||
|
|
|
|||
|
|
@ -51,21 +51,17 @@ def get_permission_query_conditions(doctype):
|
|||
return ""
|
||||
|
||||
elif not links.get("permitted_links"):
|
||||
conditions = []
|
||||
|
||||
# when everything is not permitted
|
||||
for df in links.get("not_permitted_links"):
|
||||
# like ifnull(customer, '')='' and ifnull(supplier, '')=''
|
||||
conditions.append(f"ifnull(`tab{doctype}`.`{df.fieldname}`, '')=''")
|
||||
conditions = [
|
||||
f"ifnull(`tab{doctype}`.`{df.fieldname}`, '')=''" for df in links.get("not_permitted_links")
|
||||
]
|
||||
|
||||
return "( " + " and ".join(conditions) + " )"
|
||||
|
||||
else:
|
||||
conditions = []
|
||||
|
||||
for df in links.get("permitted_links"):
|
||||
# like ifnull(customer, '')!='' or ifnull(supplier, '')!=''
|
||||
conditions.append(f"ifnull(`tab{doctype}`.`{df.fieldname}`, '')!=''")
|
||||
conditions = [
|
||||
f"ifnull(`tab{doctype}`.`{df.fieldname}`, '')!=''" for df in links.get("permitted_links")
|
||||
]
|
||||
|
||||
return "( " + " or ".join(conditions) + " )"
|
||||
|
||||
|
|
|
|||
|
|
@ -115,10 +115,7 @@ def get_reference_details(reference_doctype, doctype, reference_list, reference_
|
|||
fields = ["`tabDynamic Link`.link_name"] + field_map.get(doctype, [])
|
||||
|
||||
records = frappe.get_list(doctype, filters=filters, fields=fields, as_list=True)
|
||||
temp_records = list()
|
||||
|
||||
for d in records:
|
||||
temp_records.append(d[1:])
|
||||
temp_records = [d[1:] for d in records]
|
||||
|
||||
if not reference_list:
|
||||
frappe.throw(_("No records present in {0}").format(reference_doctype))
|
||||
|
|
|
|||
|
|
@ -553,9 +553,7 @@ def get_emails(email_strings: list[str]) -> list[str]:
|
|||
for email_string in email_strings:
|
||||
if email_string:
|
||||
result = getaddresses([email_string])
|
||||
for email in result:
|
||||
email_addrs.append(email[1])
|
||||
|
||||
email_addrs.extend(email[1] for email in result)
|
||||
return email_addrs
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -189,9 +189,7 @@ class CommunicationEmailMixin:
|
|||
}
|
||||
final_attachments.append(d)
|
||||
|
||||
for a in self.get_attachments() or []:
|
||||
final_attachments.append({"fid": a["name"]})
|
||||
|
||||
final_attachments.extend({"fid": a["name"]} for a in self.get_attachments() or [])
|
||||
return final_attachments
|
||||
|
||||
def get_unsubscribe_message(self):
|
||||
|
|
|
|||
|
|
@ -120,9 +120,10 @@ class DataExporter:
|
|||
self.column_start_end = {}
|
||||
|
||||
if self.all_doctypes:
|
||||
self.child_doctypes = []
|
||||
for df in frappe.get_meta(self.doctype).get_table_fields():
|
||||
self.child_doctypes.append(dict(doctype=df.options, parentfield=df.fieldname))
|
||||
self.child_doctypes = [
|
||||
dict(doctype=df.options, parentfield=df.fieldname)
|
||||
for df in frappe.get_meta(self.doctype).get_table_fields()
|
||||
]
|
||||
|
||||
def build_response(self):
|
||||
self.writer = UnicodeWriter()
|
||||
|
|
|
|||
|
|
@ -290,10 +290,11 @@ class Report(Document):
|
|||
columns = params.get("fields")
|
||||
else:
|
||||
columns = [["name", self.ref_doctype]]
|
||||
for df in frappe.get_meta(self.ref_doctype).fields:
|
||||
if df.in_list_view:
|
||||
columns.append([df.fieldname, self.ref_doctype])
|
||||
|
||||
columns.extend(
|
||||
[df.fieldname, self.ref_doctype]
|
||||
for df in frappe.get_meta(self.ref_doctype).fields
|
||||
if df.in_list_view
|
||||
)
|
||||
return columns
|
||||
|
||||
def get_standard_report_filters(self, params, filters):
|
||||
|
|
|
|||
|
|
@ -95,11 +95,9 @@ class RolePermissionforPageandReport(Document):
|
|||
return {check_for_field: name}
|
||||
|
||||
def get_roles(self):
|
||||
roles = []
|
||||
for data in self.roles:
|
||||
if data.role != "All":
|
||||
roles.append({"role": data.role, "parenttype": "Custom Role"})
|
||||
return roles
|
||||
return [
|
||||
{"role": data.role, "parenttype": "Custom Role"} for data in self.roles if data.role != "All"
|
||||
]
|
||||
|
||||
def update_status(self):
|
||||
return frappe.render_template
|
||||
|
|
|
|||
|
|
@ -569,10 +569,7 @@ class User(Document):
|
|||
tables = frappe.db.get_tables()
|
||||
for tab in tables:
|
||||
desc = frappe.db.get_table_columns_description(tab)
|
||||
has_fields = []
|
||||
for d in desc:
|
||||
if d.get("name") in ["owner", "modified_by"]:
|
||||
has_fields.append(d.get("name"))
|
||||
has_fields = [d.get("name") for d in desc if d.get("name") in ["owner", "modified_by"]]
|
||||
for field in has_fields:
|
||||
frappe.db.sql(
|
||||
"""UPDATE `%s`
|
||||
|
|
|
|||
|
|
@ -169,11 +169,7 @@ def get_applicable_for_doctype_list(doctype, txt, searchfield, start, page_len,
|
|||
|
||||
linked_doctypes.sort()
|
||||
|
||||
return_list = []
|
||||
for doctype in linked_doctypes[start:page_len]:
|
||||
return_list.append([doctype])
|
||||
|
||||
return return_list
|
||||
return [[doctype] for doctype in linked_doctypes[start:page_len]]
|
||||
|
||||
|
||||
def get_permitted_documents(doctype):
|
||||
|
|
|
|||
|
|
@ -193,9 +193,7 @@ class UserType(Document):
|
|||
doctypes.append("File")
|
||||
|
||||
for doctype in ["select_doctypes", "custom_select_doctypes"]:
|
||||
for dt in self.get(doctype):
|
||||
doctypes.append(dt.document_type)
|
||||
|
||||
doctypes.extend(dt.document_type for dt in self.get(doctype))
|
||||
for perm in frappe.get_all(
|
||||
"Custom DocPerm", filters={"role": self.role, "parent": ["not in", doctypes]}
|
||||
):
|
||||
|
|
|
|||
|
|
@ -43,9 +43,7 @@ def get_roles_and_doctypes():
|
|||
restricted_roles = ["Administrator"]
|
||||
if frappe.session.user != "Administrator":
|
||||
custom_user_type_roles = frappe.get_all("User Type", filters={"is_standard": 0}, fields=["role"])
|
||||
for row in custom_user_type_roles:
|
||||
restricted_roles.append(row.role)
|
||||
|
||||
restricted_roles.extend(row.role for row in custom_user_type_roles)
|
||||
restricted_roles.append("All")
|
||||
|
||||
roles = frappe.get_all(
|
||||
|
|
|
|||
|
|
@ -56,11 +56,9 @@ def query_doctypes(doctype, txt, searchfield, start, page_len, filters):
|
|||
|
||||
single_doctypes = [d[0] for d in frappe.db.get_values("DocType", {"issingle": 1})]
|
||||
|
||||
out = []
|
||||
for dt in can_read:
|
||||
if txt.lower().replace("%", "") in dt.lower() and (
|
||||
include_single_doctypes or dt not in single_doctypes
|
||||
):
|
||||
out.append([dt])
|
||||
|
||||
return out
|
||||
return [
|
||||
[dt]
|
||||
for dt in can_read
|
||||
if txt.lower().replace("%", "") in dt.lower()
|
||||
and (include_single_doctypes or dt not in single_doctypes)
|
||||
]
|
||||
|
|
|
|||
|
|
@ -66,11 +66,7 @@ def find_all(list_of_dict, match_function):
|
|||
|
||||
red_shapes = find_all(colored_shapes, lambda d: d['color'] == 'red')
|
||||
"""
|
||||
found = []
|
||||
for entry in list_of_dict:
|
||||
if match_function(entry):
|
||||
found.append(entry)
|
||||
return found
|
||||
return [entry for entry in list_of_dict if match_function(entry)]
|
||||
|
||||
|
||||
def ljust_list(_list, length, fill_word=None):
|
||||
|
|
|
|||
|
|
@ -600,11 +600,8 @@ class CustomizeForm(Document):
|
|||
),
|
||||
as_dict=True,
|
||||
)
|
||||
links = []
|
||||
label = df.label
|
||||
for doc in docs:
|
||||
links.append(frappe.utils.get_link_to_form(self.doc_type, doc.name))
|
||||
links_str = ", ".join(links)
|
||||
links_str = ", ".join(frappe.utils.get_link_to_form(self.doc_type, doc.name) for doc in docs)
|
||||
|
||||
if docs:
|
||||
frappe.throw(
|
||||
|
|
|
|||
|
|
@ -70,29 +70,26 @@ class MariaDBTable(DBTable):
|
|||
for col in self.columns.values():
|
||||
col.build_for_alter_table(self.current_columns.get(col.fieldname.lower()))
|
||||
|
||||
add_column_query = []
|
||||
modify_column_query = []
|
||||
add_index_query = []
|
||||
drop_index_query = []
|
||||
|
||||
for col in self.add_column:
|
||||
add_column_query.append(f"ADD COLUMN `{col.fieldname}` {col.get_definition()}")
|
||||
|
||||
add_column_query = [
|
||||
f"ADD COLUMN `{col.fieldname}` {col.get_definition()}" for col in self.add_column
|
||||
]
|
||||
columns_to_modify = set(self.change_type + self.set_default)
|
||||
for col in columns_to_modify:
|
||||
modify_column_query.append(
|
||||
f"MODIFY `{col.fieldname}` {col.get_definition(for_modification=True)}"
|
||||
)
|
||||
|
||||
for col in self.add_unique:
|
||||
modify_column_query.append(
|
||||
modify_column_query = [
|
||||
f"MODIFY `{col.fieldname}` {col.get_definition(for_modification=True)}"
|
||||
for col in columns_to_modify
|
||||
]
|
||||
modify_column_query.extend(
|
||||
[
|
||||
f"ADD UNIQUE INDEX IF NOT EXISTS {col.fieldname} (`{col.fieldname}`)"
|
||||
)
|
||||
|
||||
for col in self.add_index:
|
||||
# if index key does not exists
|
||||
if not frappe.db.get_column_index(self.table_name, col.fieldname, unique=False):
|
||||
add_index_query.append(f"ADD INDEX `{col.fieldname}_index`(`{col.fieldname}`)")
|
||||
for col in self.add_unique
|
||||
]
|
||||
)
|
||||
add_index_query = [
|
||||
f"ADD INDEX `{col.fieldname}_index`(`{col.fieldname}`)"
|
||||
for col in self.add_index
|
||||
if not frappe.db.get_column_index(self.table_name, col.fieldname, unique=False)
|
||||
]
|
||||
drop_index_query = []
|
||||
|
||||
for col in {*self.drop_index, *self.drop_unique}:
|
||||
if col.fieldname == "name":
|
||||
|
|
|
|||
|
|
@ -76,10 +76,7 @@ class PostgresTable(DBTable):
|
|||
for col in self.columns.values():
|
||||
col.build_for_alter_table(self.current_columns.get(col.fieldname.lower()))
|
||||
|
||||
query = []
|
||||
|
||||
for col in self.add_column:
|
||||
query.append(f"ADD COLUMN `{col.fieldname}` {col.get_definition()}")
|
||||
query = [f"ADD COLUMN `{col.fieldname}` {col.get_definition()}" for col in self.add_column]
|
||||
|
||||
for col in self.change_type:
|
||||
using_clause = ""
|
||||
|
|
|
|||
|
|
@ -58,16 +58,16 @@ class DBTable:
|
|||
return ret
|
||||
|
||||
def get_index_definitions(self):
|
||||
ret = []
|
||||
for key, col in self.columns.items():
|
||||
return [
|
||||
"index `" + key + "`(`" + key + "`)"
|
||||
for key, col in self.columns.items()
|
||||
if (
|
||||
col.set_index
|
||||
and not col.unique
|
||||
and col.fieldtype in frappe.db.type_map
|
||||
and frappe.db.type_map.get(col.fieldtype)[0] not in ("text", "longtext")
|
||||
):
|
||||
ret.append("index `" + key + "`(`" + key + "`)")
|
||||
return ret
|
||||
)
|
||||
]
|
||||
|
||||
def get_columns_from_docfields(self):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -493,11 +493,15 @@ def get_custom_doctype_list(module):
|
|||
order_by="name",
|
||||
)
|
||||
|
||||
out = []
|
||||
for d in doctypes:
|
||||
out.append({"type": "Link", "link_type": "doctype", "link_to": d.name, "label": _(d.name)})
|
||||
|
||||
return out
|
||||
return [
|
||||
{
|
||||
"type": "Link",
|
||||
"link_type": "doctype",
|
||||
"link_to": d.name,
|
||||
"label": _(d.name),
|
||||
}
|
||||
for d in doctypes
|
||||
]
|
||||
|
||||
|
||||
def get_custom_report_list(module):
|
||||
|
|
@ -509,23 +513,20 @@ def get_custom_report_list(module):
|
|||
order_by="name",
|
||||
)
|
||||
|
||||
out = []
|
||||
for r in reports:
|
||||
out.append(
|
||||
{
|
||||
"type": "Link",
|
||||
"link_type": "report",
|
||||
"doctype": r.ref_doctype,
|
||||
"dependencies": r.ref_doctype,
|
||||
"is_query_report": 1
|
||||
if r.report_type in ("Query Report", "Script Report", "Custom Report")
|
||||
else 0,
|
||||
"label": _(r.name),
|
||||
"link_to": r.name,
|
||||
}
|
||||
)
|
||||
|
||||
return out
|
||||
return [
|
||||
{
|
||||
"type": "Link",
|
||||
"link_type": "report",
|
||||
"doctype": r.ref_doctype,
|
||||
"dependencies": r.ref_doctype,
|
||||
"is_query_report": 1
|
||||
if r.report_type in ("Query Report", "Script Report", "Custom Report")
|
||||
else 0,
|
||||
"label": _(r.name),
|
||||
"link_to": r.name,
|
||||
}
|
||||
for r in reports
|
||||
]
|
||||
|
||||
|
||||
def save_new_widget(doc, page, blocks, new_widgets):
|
||||
|
|
|
|||
|
|
@ -105,12 +105,8 @@ def get_permitted_charts(dashboard_name):
|
|||
|
||||
@frappe.whitelist()
|
||||
def get_permitted_cards(dashboard_name):
|
||||
permitted_cards = []
|
||||
dashboard = frappe.get_doc("Dashboard", dashboard_name)
|
||||
for card in dashboard.cards:
|
||||
if frappe.has_permission("Number Card", doc=card.card):
|
||||
permitted_cards.append(card)
|
||||
return permitted_cards
|
||||
return [card for card in dashboard.cards if frappe.has_permission("Number Card", doc=card.card)]
|
||||
|
||||
|
||||
def get_non_standard_charts_in_dashboard(dashboard):
|
||||
|
|
|
|||
|
|
@ -188,16 +188,19 @@ def get_documents_for_tag(tag):
|
|||
"""
|
||||
# remove hastag `#` from tag
|
||||
tag = tag[1:]
|
||||
results = []
|
||||
|
||||
result = frappe.get_list(
|
||||
"Tag Link", filters={"tag": tag}, fields=["document_type", "document_name", "title", "tag"]
|
||||
)
|
||||
|
||||
for res in result:
|
||||
results.append({"doctype": res.document_type, "name": res.document_name, "content": res.title})
|
||||
|
||||
return results
|
||||
return [
|
||||
{
|
||||
"doctype": res.document_type,
|
||||
"name": res.document_name,
|
||||
"content": res.title,
|
||||
}
|
||||
for res in result
|
||||
]
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
|
|
|
|||
|
|
@ -263,19 +263,17 @@ def get_row_changed(row_changed, time, doctype, doc_name, v):
|
|||
|
||||
|
||||
def get_added_row(added, time, doctype, doc_name, v):
|
||||
items = []
|
||||
for d in added:
|
||||
items.append(
|
||||
{
|
||||
"time": v.modified,
|
||||
"data": {"to": d[0], "time": time},
|
||||
"doctype": doctype,
|
||||
"doc_name": doc_name,
|
||||
"type": "row added",
|
||||
"by": v.modified_by,
|
||||
}
|
||||
)
|
||||
return items
|
||||
return [
|
||||
{
|
||||
"time": v.modified,
|
||||
"data": {"to": d[0], "time": time},
|
||||
"doctype": doctype,
|
||||
"doc_name": doc_name,
|
||||
"type": "row added",
|
||||
"by": v.modified_by,
|
||||
}
|
||||
for d in added
|
||||
]
|
||||
|
||||
|
||||
def get_field_changed(changed, time, doctype, doc_name, v):
|
||||
|
|
|
|||
|
|
@ -407,10 +407,7 @@ def validate_linked_doc(docinfo, ignore_doctypes_on_cancel_all=None):
|
|||
|
||||
def get_exempted_doctypes():
|
||||
"""Get list of doctypes exempted from being auto-cancelled"""
|
||||
auto_cancel_exempt_doctypes = []
|
||||
for doctypes in frappe.get_hooks("auto_cancel_exempted_doctypes"):
|
||||
auto_cancel_exempt_doctypes.append(doctypes)
|
||||
return auto_cancel_exempt_doctypes
|
||||
return list(frappe.get_hooks("auto_cancel_exempted_doctypes"))
|
||||
|
||||
|
||||
def get_linked_docs(doctype: str, name: str, linkinfo: dict | None = None) -> dict[str, list]:
|
||||
|
|
|
|||
|
|
@ -77,9 +77,11 @@ def getdoctype(doctype, with_parent=False, cached_timestamp=None):
|
|||
|
||||
def get_meta_bundle(doctype):
|
||||
bundle = [frappe.desk.form.meta.get_meta(doctype)]
|
||||
for df in bundle[0].fields:
|
||||
if df.fieldtype in frappe.model.table_fields:
|
||||
bundle.append(frappe.desk.form.meta.get_meta(df.options))
|
||||
bundle.extend(
|
||||
frappe.desk.form.meta.get_meta(df.options)
|
||||
for df in bundle[0].fields
|
||||
if df.fieldtype in frappe.model.table_fields
|
||||
)
|
||||
return bundle
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -242,9 +242,7 @@ class FormMeta(Meta):
|
|||
workflow = frappe.get_doc("Workflow", workflow_name)
|
||||
workflow_docs.append(workflow)
|
||||
|
||||
for d in workflow.get("states"):
|
||||
workflow_docs.append(frappe.get_doc("Workflow State", d.state))
|
||||
|
||||
workflow_docs.extend(frappe.get_doc("Workflow State", d.state) for d in workflow.get("states"))
|
||||
self.set("__workflow_docs", workflow_docs)
|
||||
|
||||
def load_templates(self):
|
||||
|
|
|
|||
|
|
@ -129,18 +129,20 @@ def get_stages_hooks(args):
|
|||
|
||||
|
||||
def get_setup_complete_hooks(args):
|
||||
stages = []
|
||||
for method in frappe.get_hooks("setup_wizard_complete"):
|
||||
stages.append(
|
||||
{
|
||||
"status": "Executing method",
|
||||
"fail_msg": "Failed to execute method",
|
||||
"tasks": [
|
||||
{"fn": frappe.get_attr(method), "args": args, "fail_msg": "Failed to execute method"}
|
||||
],
|
||||
}
|
||||
)
|
||||
return stages
|
||||
return [
|
||||
{
|
||||
"status": "Executing method",
|
||||
"fail_msg": "Failed to execute method",
|
||||
"tasks": [
|
||||
{
|
||||
"fn": frappe.get_attr(method),
|
||||
"args": args,
|
||||
"fail_msg": "Failed to execute method",
|
||||
}
|
||||
],
|
||||
}
|
||||
for method in frappe.get_hooks("setup_wizard_complete")
|
||||
]
|
||||
|
||||
|
||||
def handle_setup_exception(args):
|
||||
|
|
@ -339,8 +341,7 @@ def prettify_args(args):
|
|||
args[key] = f"Image Attached: '{filename}' of size {size} MB"
|
||||
|
||||
pretty_args = []
|
||||
for key in sorted(args):
|
||||
pretty_args.append(f"{key} = {args[key]}")
|
||||
pretty_args.extend(f"{key} = {args[key]}" for key in sorted(args))
|
||||
return pretty_args
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -261,10 +261,7 @@ def compress(data, args=None):
|
|||
values = []
|
||||
keys = list(data[0])
|
||||
for row in data:
|
||||
new_row = []
|
||||
for key in keys:
|
||||
new_row.append(row.get(key))
|
||||
values.append(new_row)
|
||||
values.append([row.get(key) for key in keys])
|
||||
|
||||
# add user info for assignments (avatar)
|
||||
if row.get("_assign", ""):
|
||||
|
|
@ -644,11 +641,7 @@ def scrub_user_tags(tagcount):
|
|||
|
||||
rdict[tag] += tagdict[t]
|
||||
|
||||
rlist = []
|
||||
for tag in rdict:
|
||||
rlist.append([tag, rdict[tag]])
|
||||
|
||||
return rlist
|
||||
return [[tag, rdict[tag]] for tag in rdict]
|
||||
|
||||
|
||||
# used in building query in queries.py
|
||||
|
|
|
|||
|
|
@ -94,11 +94,9 @@ def get_communication_doctype(doctype, txt, searchfield, start, page_len, filter
|
|||
d[0] for d in frappe.db.get_values("DocType", {"issingle": 0, "istable": 0, "hide_toolbar": 0})
|
||||
]
|
||||
|
||||
out = []
|
||||
for dt in com_doctypes:
|
||||
if txt.lower().replace("%", "") in dt.lower() and dt in can_read:
|
||||
out.append([dt])
|
||||
return out
|
||||
return [
|
||||
[dt] for dt in com_doctypes if txt.lower().replace("%", "") in dt.lower() and dt in can_read
|
||||
]
|
||||
|
||||
|
||||
def get_cached_contacts(txt):
|
||||
|
|
|
|||
|
|
@ -115,10 +115,9 @@ class AutoEmailReport(Document):
|
|||
# Check if all Mandatory Report Filters are filled by the User
|
||||
filters = frappe.parse_json(self.filters) if self.filters else {}
|
||||
filter_meta = frappe.parse_json(self.filter_meta) if self.filter_meta else {}
|
||||
throw_list = []
|
||||
for meta in filter_meta:
|
||||
if meta.get("reqd") and not filters.get(meta["fieldname"]):
|
||||
throw_list.append(meta["label"])
|
||||
throw_list = [
|
||||
meta["label"] for meta in filter_meta if meta.get("reqd") and not filters.get(meta["fieldname"])
|
||||
]
|
||||
if throw_list:
|
||||
frappe.throw(
|
||||
title=_("Missing Filters Required"),
|
||||
|
|
|
|||
|
|
@ -734,19 +734,21 @@ def get_append_to(
|
|||
doctype=None, txt=None, searchfield=None, start=None, page_len=None, filters=None
|
||||
):
|
||||
txt = txt if txt else ""
|
||||
email_append_to_list = []
|
||||
|
||||
# Set Email Append To DocTypes via DocType
|
||||
filters = {"istable": 0, "issingle": 0, "email_append_to": 1}
|
||||
for dt in frappe.get_all("DocType", filters=filters, fields=["name", "email_append_to"]):
|
||||
email_append_to_list.append(dt.name)
|
||||
|
||||
# Set Email Append To DocTypes via DocType
|
||||
email_append_to_list = [
|
||||
dt.name for dt in frappe.get_all("DocType", filters=filters, fields=["name", "email_append_to"])
|
||||
]
|
||||
# Set Email Append To DocTypes set via Customize Form
|
||||
for dt in frappe.get_list(
|
||||
"Property Setter", filters={"property": "email_append_to", "value": 1}, fields=["doc_type"]
|
||||
):
|
||||
email_append_to_list.append(dt.doc_type)
|
||||
|
||||
email_append_to_list.extend(
|
||||
dt.doc_type
|
||||
for dt in frappe.get_list(
|
||||
"Property Setter",
|
||||
filters={"property": "email_append_to", "value": 1},
|
||||
fields=["doc_type"],
|
||||
)
|
||||
)
|
||||
return [[d] for d in set(email_append_to_list) if txt in d]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -310,10 +310,7 @@ def get_dropbox_settings(redirect_uri=False):
|
|||
|
||||
def delete_older_backups(dropbox_client, folder_path, to_keep):
|
||||
res = dropbox_client.files_list_folder(path=folder_path)
|
||||
files = []
|
||||
for f in res.entries:
|
||||
if isinstance(f, dropbox.files.FileMetadata) and "sql" in f.name:
|
||||
files.append(f)
|
||||
files = [f for f in res.entries if isinstance(f, dropbox.files.FileMetadata) and "sql" in f.name]
|
||||
|
||||
if len(files) <= to_keep:
|
||||
return
|
||||
|
|
|
|||
|
|
@ -299,9 +299,7 @@ def sync_events_from_google_calendar(g_calendar, method=None):
|
|||
else:
|
||||
frappe.throw(msg)
|
||||
|
||||
for event in events.get("items", []):
|
||||
results.append(event)
|
||||
|
||||
results.extend(event for event in events.get("items", []))
|
||||
if not events.get("nextPageToken"):
|
||||
if events.get("nextSyncToken"):
|
||||
account.next_sync_token = events.get("nextSyncToken")
|
||||
|
|
|
|||
|
|
@ -140,9 +140,7 @@ def sync_contacts_from_google_contacts(g_contact):
|
|||
).format(account.name, err.resp.status)
|
||||
)
|
||||
|
||||
for contact in contacts.get("connections", []):
|
||||
results.append(contact)
|
||||
|
||||
results.extend(contact for contact in contacts.get("connections", []))
|
||||
if not contacts.get("nextPageToken"):
|
||||
if contacts.get("nextSyncToken"):
|
||||
frappe.db.set_value(
|
||||
|
|
|
|||
|
|
@ -300,10 +300,7 @@ class LDAPSettings(Document):
|
|||
) # Build search query
|
||||
|
||||
if len(conn.entries) >= 1:
|
||||
fetch_ldap_groups = []
|
||||
for group in conn.entries:
|
||||
fetch_ldap_groups.append(group["cn"].value)
|
||||
|
||||
fetch_ldap_groups = [group["cn"].value for group in conn.entries]
|
||||
return fetch_ldap_groups
|
||||
|
||||
def authenticate(self, username: str, password: str):
|
||||
|
|
|
|||
|
|
@ -104,10 +104,7 @@ class Webhook(Document):
|
|||
|
||||
def validate_repeating_fields(self):
|
||||
"""Error when Same Field is entered multiple times in webhook_data"""
|
||||
webhook_data = []
|
||||
for entry in self.webhook_data:
|
||||
webhook_data.append(entry.fieldname)
|
||||
|
||||
webhook_data = [entry.fieldname for entry in self.webhook_data]
|
||||
if len(webhook_data) != len(set(webhook_data)):
|
||||
frappe.throw(_("Same Field is entered more than once"))
|
||||
|
||||
|
|
|
|||
|
|
@ -1182,15 +1182,15 @@ class BaseDocument:
|
|||
|
||||
def reset_values_if_no_permlevel_access(self, has_access_to, high_permlevel_fields):
|
||||
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
|
||||
to_reset = []
|
||||
|
||||
for df in high_permlevel_fields:
|
||||
to_reset = [
|
||||
df
|
||||
for df in high_permlevel_fields
|
||||
if (
|
||||
df.permlevel not in has_access_to
|
||||
and df.fieldtype not in display_fieldtypes
|
||||
and df.fieldname not in self.flags.get("ignore_permlevel_for_fields", [])
|
||||
):
|
||||
to_reset.append(df)
|
||||
)
|
||||
]
|
||||
|
||||
if to_reset:
|
||||
if self.is_new():
|
||||
|
|
|
|||
|
|
@ -376,9 +376,7 @@ class DatabaseQuery:
|
|||
|
||||
if isinstance(filters, dict):
|
||||
fdict = filters
|
||||
filters = []
|
||||
for key, value in fdict.items():
|
||||
filters.append(make_filter_tuple(self.doctype, key, value))
|
||||
filters = [make_filter_tuple(self.doctype, key, value) for key, value in fdict.items()]
|
||||
setattr(self, filter_name, filters)
|
||||
|
||||
def sanitize_fields(self):
|
||||
|
|
@ -564,10 +562,7 @@ class DatabaseQuery:
|
|||
# remove from fields
|
||||
to_remove = []
|
||||
for fld in self.fields:
|
||||
for f in optional_fields:
|
||||
if f in fld and not f in self.columns:
|
||||
to_remove.append(fld)
|
||||
|
||||
to_remove.extend(fld for f in optional_fields if f in fld and f not in self.columns)
|
||||
for fld in to_remove:
|
||||
del self.fields[self.fields.index(fld)]
|
||||
|
||||
|
|
@ -577,10 +572,9 @@ class DatabaseQuery:
|
|||
if isinstance(each, str):
|
||||
each = [each]
|
||||
|
||||
for element in each:
|
||||
if element in optional_fields and element not in self.columns:
|
||||
to_remove.append(each)
|
||||
|
||||
to_remove.extend(
|
||||
each for element in each if element in optional_fields and element not in self.columns
|
||||
)
|
||||
for each in to_remove:
|
||||
if isinstance(self.filters, dict):
|
||||
del self.filters[each]
|
||||
|
|
|
|||
|
|
@ -421,11 +421,7 @@ class Meta(Document):
|
|||
order = json.loads(self.get(f"{fieldname}_order") or "[]")
|
||||
if order:
|
||||
name_map = {d.name: d for d in self.get(fieldname)}
|
||||
new_list = []
|
||||
for name in order:
|
||||
if name in name_map:
|
||||
new_list.append(name_map[name])
|
||||
|
||||
new_list = [name_map[name] for name in order if name in name_map]
|
||||
# add the missing items that have not be added
|
||||
# maybe these items were added to the standard product
|
||||
# after the customization was done
|
||||
|
|
@ -564,11 +560,7 @@ class Meta(Document):
|
|||
def get_high_permlevel_fields(self):
|
||||
"""Build list of fields with high perm level and all the higher perm levels defined."""
|
||||
if not hasattr(self, "high_permlevel_fields"):
|
||||
self.high_permlevel_fields = []
|
||||
for df in self.fields:
|
||||
if df.permlevel > 0:
|
||||
self.high_permlevel_fields.append(df)
|
||||
|
||||
self.high_permlevel_fields = [df for df in self.fields if df.permlevel > 0]
|
||||
return self.high_permlevel_fields
|
||||
|
||||
def get_permitted_fieldnames(self, parenttype=None, *, user=None, permission_type="read"):
|
||||
|
|
@ -594,10 +586,11 @@ class Meta(Document):
|
|||
self.get_permlevel_access(permission_type=permission_type, parenttype=parenttype, user=user)
|
||||
)
|
||||
|
||||
for df in self.get_fieldnames_with_value(with_field_meta=True, with_virtual_fields=True):
|
||||
if df.permlevel in permlevel_access:
|
||||
permitted_fieldnames.append(df.fieldname)
|
||||
|
||||
permitted_fieldnames.extend(
|
||||
df.fieldname
|
||||
for df in self.get_fieldnames_with_value(with_field_meta=True, with_virtual_fields=True)
|
||||
if df.permlevel in permlevel_access
|
||||
)
|
||||
return permitted_fieldnames
|
||||
|
||||
def get_permlevel_access(self, permission_type="read", parenttype=None, *, user=None):
|
||||
|
|
|
|||
|
|
@ -42,14 +42,17 @@ ignore_doctypes = [""]
|
|||
|
||||
def import_files(module, dt=None, dn=None, force=False, pre_process=None, reset_permissions=False):
|
||||
if type(module) is list:
|
||||
out = []
|
||||
for m in module:
|
||||
out.append(
|
||||
import_file(
|
||||
m[0], m[1], m[2], force=force, pre_process=pre_process, reset_permissions=reset_permissions
|
||||
)
|
||||
return [
|
||||
import_file(
|
||||
m[0],
|
||||
m[1],
|
||||
m[2],
|
||||
force=force,
|
||||
pre_process=pre_process,
|
||||
reset_permissions=reset_permissions,
|
||||
)
|
||||
return out
|
||||
for m in module
|
||||
]
|
||||
else:
|
||||
return import_file(
|
||||
module, dt, dn, force=force, pre_process=pre_process, reset_permissions=reset_permissions
|
||||
|
|
|
|||
|
|
@ -149,11 +149,7 @@ class OAuthWebRequestValidator(RequestValidator):
|
|||
filters={"client": client_id, "validity": "Valid"},
|
||||
)
|
||||
|
||||
checkcodes = []
|
||||
for vcode in validcodes:
|
||||
checkcodes.append(vcode["name"])
|
||||
|
||||
if code in checkcodes:
|
||||
if code in [vcode["name"] for vcode in validcodes]:
|
||||
request.scopes = frappe.db.get_value("OAuth Authorization Code", code, "scopes").split(
|
||||
get_url_delimiter()
|
||||
)
|
||||
|
|
|
|||
|
|
@ -230,10 +230,11 @@ def get_all_tests(app):
|
|||
# in /doctype/doctype/boilerplate/
|
||||
continue
|
||||
|
||||
for filename in files:
|
||||
if filename.startswith("test_") and filename.endswith(".py") and filename != "test_runner.py":
|
||||
test_file_list.append([path, filename])
|
||||
|
||||
test_file_list.extend(
|
||||
[path, filename]
|
||||
for filename in files
|
||||
if filename.startswith("test_") and filename.endswith(".py") and filename != "test_runner.py"
|
||||
)
|
||||
return test_file_list
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -55,21 +55,20 @@ def execute():
|
|||
user_permissions_to_delete.append(user_permission.name)
|
||||
user_permission.name = None
|
||||
user_permission.skip_for_doctype = None
|
||||
for doctype in applicable_for_doctypes:
|
||||
if doctype:
|
||||
# Maintain sequence (name, user, allow, for_value, applicable_for, apply_to_all_doctypes, creation, modified)
|
||||
new_user_permissions_list.append(
|
||||
(
|
||||
frappe.generate_hash(length=10),
|
||||
user_permission.user,
|
||||
user_permission.allow,
|
||||
user_permission.for_value,
|
||||
doctype,
|
||||
0,
|
||||
user_permission.creation,
|
||||
user_permission.modified,
|
||||
)
|
||||
)
|
||||
new_user_permissions_list.extend(
|
||||
(
|
||||
frappe.generate_hash(length=10),
|
||||
user_permission.user,
|
||||
user_permission.allow,
|
||||
user_permission.for_value,
|
||||
doctype,
|
||||
0,
|
||||
user_permission.creation,
|
||||
user_permission.modified,
|
||||
)
|
||||
for doctype in applicable_for_doctypes
|
||||
if doctype
|
||||
)
|
||||
else:
|
||||
# No skip_for_doctype found! Just update apply_to_all_doctypes.
|
||||
frappe.db.set_value("User Permission", user_permission.name, "apply_to_all_doctypes", 1)
|
||||
|
|
|
|||
|
|
@ -3,11 +3,11 @@ import frappe
|
|||
|
||||
def execute():
|
||||
navbar_settings = frappe.get_single("Navbar Settings")
|
||||
duplicate_items = []
|
||||
|
||||
for navbar_item in navbar_settings.settings_dropdown:
|
||||
if navbar_item.item_label == "Toggle Full Width":
|
||||
duplicate_items.append(navbar_item)
|
||||
duplicate_items = [
|
||||
navbar_item
|
||||
for navbar_item in navbar_settings.settings_dropdown
|
||||
if navbar_item.item_label == "Toggle Full Width"
|
||||
]
|
||||
|
||||
if len(duplicate_items) > 1:
|
||||
navbar_settings.remove(duplicate_items[0])
|
||||
|
|
|
|||
|
|
@ -37,9 +37,10 @@ class NetworkPrinterSettings(Document):
|
|||
cups.setPort(self.port)
|
||||
conn = cups.Connection()
|
||||
printers = conn.getPrinters()
|
||||
for printer_id, printer in printers.items():
|
||||
printer_list.append({"value": printer_id, "label": printer["printer-make-and-model"]})
|
||||
|
||||
printer_list.extend(
|
||||
{"value": printer_id, "label": printer["printer-make-and-model"]}
|
||||
for printer_id, printer in printers.items()
|
||||
)
|
||||
except RuntimeError:
|
||||
frappe.throw(_("Failed to connect to server"))
|
||||
except frappe.ValidationError:
|
||||
|
|
|
|||
|
|
@ -121,7 +121,6 @@ class FullTextSearch:
|
|||
ix = self.get_index()
|
||||
|
||||
results = None
|
||||
out = []
|
||||
|
||||
search_fields = self.get_fields_to_search()
|
||||
fieldboosts = {}
|
||||
|
|
@ -143,10 +142,7 @@ class FullTextSearch:
|
|||
filter_scoped = Prefix(self.id, scope)
|
||||
results = searcher.search(query, limit=limit, filter=filter_scoped)
|
||||
|
||||
for r in results:
|
||||
out.append(self.parse_result(r))
|
||||
|
||||
return out
|
||||
return [self.parse_result(r) for r in results]
|
||||
|
||||
|
||||
class FuzzyTermExtended(FuzzyTerm):
|
||||
|
|
|
|||
|
|
@ -175,15 +175,9 @@ class TestBoilerPlate(unittest.TestCase):
|
|||
self.check_parsable_python_files(new_app_dir)
|
||||
|
||||
def get_paths(self, app_dir, app_name):
|
||||
all_paths = list()
|
||||
|
||||
for path in self.root_paths:
|
||||
all_paths.append(os.path.join(app_dir, path))
|
||||
|
||||
all_paths = [os.path.join(app_dir, path) for path in self.root_paths]
|
||||
all_paths.append(os.path.join(app_dir, app_name))
|
||||
|
||||
for path in self.paths_inside_app:
|
||||
all_paths.append(os.path.join(app_dir, app_name, path))
|
||||
all_paths.extend(os.path.join(app_dir, app_name, path) for path in self.paths_inside_app)
|
||||
|
||||
return all_paths
|
||||
|
||||
|
|
|
|||
|
|
@ -48,8 +48,7 @@ class TestCachingUtils(FrappeTestCase):
|
|||
|
||||
# ensure that external service was called only once
|
||||
# thereby return value of request_specific_api is cached
|
||||
for _ in range(5):
|
||||
retval.append(request_specific_api(120, 23))
|
||||
retval.extend(request_specific_api(120, 23) for _ in range(5))
|
||||
external_service.assert_called_once()
|
||||
self.assertTrue(same_output_received())
|
||||
|
||||
|
|
|
|||
|
|
@ -462,10 +462,7 @@ def get_messages_from_doctype(name):
|
|||
messages.append(d.options)
|
||||
|
||||
# translations of roles
|
||||
for d in meta.get("permissions"):
|
||||
if d.role:
|
||||
messages.append(d.role)
|
||||
|
||||
messages.extend(d.role for d in meta.get("permissions") if d.role)
|
||||
messages = [message for message in messages if message]
|
||||
messages = [("DocType: " + name, message) for message in messages if is_translatable(message)]
|
||||
|
||||
|
|
@ -579,10 +576,11 @@ def get_messages_from_custom_fields(app_name):
|
|||
continue
|
||||
messages.append(("Custom Field - {}: {}".format(prop, cf["name"]), cf[prop]))
|
||||
if cf["fieldtype"] == "Selection" and cf.get("options"):
|
||||
for option in cf["options"].split("\n"):
|
||||
if option and "icon" not in option and is_translatable(option):
|
||||
messages.append(("Custom Field - Description: " + cf["name"], option))
|
||||
|
||||
messages.extend(
|
||||
("Custom Field - Description: " + cf["name"], option)
|
||||
for option in cf["options"].split("\n")
|
||||
if option and "icon" not in option and is_translatable(option)
|
||||
)
|
||||
return messages
|
||||
|
||||
|
||||
|
|
@ -1199,12 +1197,9 @@ def send_translations(translation_dict):
|
|||
|
||||
|
||||
def deduplicate_messages(messages):
|
||||
ret = []
|
||||
op = operator.itemgetter(1)
|
||||
messages = sorted(messages, key=op)
|
||||
for k, g in itertools.groupby(messages, op):
|
||||
ret.append(next(g))
|
||||
return ret
|
||||
return [next(g) for k, g in itertools.groupby(messages, op)]
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
|
|
|
|||
|
|
@ -114,11 +114,14 @@ class TypeExporter:
|
|||
)
|
||||
|
||||
def _create_fields_code_block(self):
|
||||
fields = []
|
||||
|
||||
for field, typehint in self.field_types.items():
|
||||
fields.append(field_template.format(field=field, type=typehint))
|
||||
return "\n".join(sorted(fields))
|
||||
return "\n".join(
|
||||
sorted(
|
||||
[
|
||||
field_template.format(field=field, type=typehint)
|
||||
for field, typehint in self.field_types.items()
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
def _create_imports_block(self) -> str:
|
||||
return "\n".join(sorted(self.imports))
|
||||
|
|
|
|||
|
|
@ -362,10 +362,7 @@ def dict_to_str(args: dict[str, Any], sep: str = "&") -> str:
|
|||
"""
|
||||
Converts a dictionary to URL
|
||||
"""
|
||||
t = []
|
||||
for k in list(args):
|
||||
t.append(str(k) + "=" + quote(str(args[k] or "")))
|
||||
return sep.join(t)
|
||||
return sep.join(f"{str(k)}=" + quote(str(args[k] or "")) for k in list(args))
|
||||
|
||||
|
||||
def list_to_str(seq, sep=", "):
|
||||
|
|
@ -758,15 +755,14 @@ def get_installed_apps_info():
|
|||
out = []
|
||||
from frappe.utils.change_log import get_versions
|
||||
|
||||
for app, version_details in get_versions().items():
|
||||
out.append(
|
||||
{
|
||||
"app_name": app,
|
||||
"version": version_details.get("branch_version") or version_details.get("version"),
|
||||
"branch": version_details.get("branch"),
|
||||
}
|
||||
)
|
||||
|
||||
out.extend(
|
||||
{
|
||||
"app_name": app,
|
||||
"version": version_details.get("branch_version") or version_details.get("version"),
|
||||
"branch": version_details.get("branch"),
|
||||
}
|
||||
for app, version_details in get_versions().items()
|
||||
)
|
||||
return out
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -53,9 +53,7 @@ def read_csv_content(fcontent):
|
|||
)
|
||||
|
||||
fcontent = fcontent.encode("utf-8")
|
||||
content = []
|
||||
for line in fcontent.splitlines(True):
|
||||
content.append(frappe.safe_decode(line))
|
||||
content = [frappe.safe_decode(line) for line in fcontent.splitlines(True)]
|
||||
|
||||
try:
|
||||
rows = []
|
||||
|
|
|
|||
|
|
@ -1651,10 +1651,9 @@ def get_link_to_report(
|
|||
conditions = []
|
||||
for k, v in filters.items():
|
||||
if isinstance(v, list):
|
||||
for value in v:
|
||||
conditions.append(
|
||||
str(k) + "=" + '["' + str(value[0] + '"' + "," + '"' + str(value[1]) + '"]')
|
||||
)
|
||||
conditions.extend(
|
||||
str(k) + "=" + '["' + str(value[0] + '"' + "," + '"' + str(value[1]) + '"]') for value in v
|
||||
)
|
||||
else:
|
||||
conditions.append(str(k) + "=" + str(v))
|
||||
|
||||
|
|
@ -1795,9 +1794,7 @@ def get_filter(doctype: str, f: dict | list | tuple, filters_config=None) -> "fr
|
|||
) + NestedSetHierarchy
|
||||
|
||||
if filters_config:
|
||||
additional_operators = []
|
||||
for key in filters_config:
|
||||
additional_operators.append(key.lower())
|
||||
additional_operators = [key.lower() for key in filters_config]
|
||||
valid_operators = tuple(set(valid_operators + tuple(additional_operators)))
|
||||
|
||||
if f.operator.lower() not in valid_operators:
|
||||
|
|
|
|||
|
|
@ -208,10 +208,10 @@ def get_children_data(doctype, meta):
|
|||
|
||||
|
||||
def insert_values_for_multiple_docs(all_contents):
|
||||
values = []
|
||||
for content in all_contents:
|
||||
values.append("({doctype}, {name}, {content}, {published}, {title}, {route})".format(**content))
|
||||
|
||||
values = [
|
||||
"({doctype}, {name}, {content}, {published}, {title}, {route})".format(**content)
|
||||
for content in all_contents
|
||||
]
|
||||
batch_size = 50000
|
||||
for i in range(0, len(values), batch_size):
|
||||
batch_values = values[i : i + batch_size]
|
||||
|
|
@ -249,19 +249,21 @@ def update_global_search(doc):
|
|||
):
|
||||
return
|
||||
|
||||
content = []
|
||||
for field in doc.meta.get_global_search_fields():
|
||||
if doc.get(field.fieldname) and field.fieldtype not in frappe.model.table_fields:
|
||||
content.append(get_formatted_value(doc.get(field.fieldname), field))
|
||||
content = [
|
||||
get_formatted_value(doc.get(field.fieldname), field)
|
||||
for field in doc.meta.get_global_search_fields()
|
||||
if doc.get(field.fieldname) and field.fieldtype not in frappe.model.table_fields
|
||||
]
|
||||
|
||||
# Get children
|
||||
for child in doc.meta.get_table_fields():
|
||||
for d in doc.get(child.fieldname):
|
||||
if d.parent == doc.name:
|
||||
for field in d.meta.get_global_search_fields():
|
||||
if d.get(field.fieldname):
|
||||
content.append(get_formatted_value(d.get(field.fieldname), field))
|
||||
|
||||
content.extend(
|
||||
get_formatted_value(d.get(field.fieldname), field)
|
||||
for field in d.meta.get_global_search_fields()
|
||||
if d.get(field.fieldname)
|
||||
)
|
||||
if content:
|
||||
published = 0
|
||||
if hasattr(doc, "is_website_published") and doc.meta.allow_guest_to_view:
|
||||
|
|
|
|||
|
|
@ -389,19 +389,16 @@ def is_system_user(username: str | None = None) -> str | None:
|
|||
def get_users() -> list[dict]:
|
||||
from frappe.core.doctype.user.user import get_system_users
|
||||
|
||||
users = []
|
||||
system_managers = get_system_managers(only_name=True)
|
||||
|
||||
for user in get_system_users():
|
||||
users.append(
|
||||
{
|
||||
"full_name": get_user_fullname(user),
|
||||
"email": user,
|
||||
"is_system_manager": user in system_managers,
|
||||
}
|
||||
)
|
||||
|
||||
return users
|
||||
return [
|
||||
{
|
||||
"full_name": get_user_fullname(user),
|
||||
"email": user,
|
||||
"is_system_manager": user in system_managers,
|
||||
}
|
||||
for user in get_system_users()
|
||||
]
|
||||
|
||||
|
||||
def get_users_with_role(role: str) -> list[str]:
|
||||
|
|
|
|||
|
|
@ -88,10 +88,7 @@ def read_xlsx_file_from_attached_file(file_url=None, fcontent=None, filepath=Non
|
|||
wb1 = load_workbook(filename=filename, read_only=True, data_only=True)
|
||||
ws1 = wb1.active
|
||||
for row in ws1.iter_rows():
|
||||
tmp_list = []
|
||||
for cell in row:
|
||||
tmp_list.append(cell.value)
|
||||
rows.append(tmp_list)
|
||||
rows.append([cell.value for cell in row])
|
||||
return rows
|
||||
|
||||
|
||||
|
|
@ -99,10 +96,7 @@ def read_xls_file_from_attached_file(content):
|
|||
book = xlrd.open_workbook(file_contents=content)
|
||||
sheets = book.sheets()
|
||||
sheet = sheets[0]
|
||||
rows = []
|
||||
for i in range(sheet.nrows):
|
||||
rows.append(sheet.row_values(i))
|
||||
return rows
|
||||
return [sheet.row_values(i) for i in range(sheet.nrows)]
|
||||
|
||||
|
||||
def build_xlsx_response(data, filename):
|
||||
|
|
|
|||
|
|
@ -96,11 +96,12 @@ class WebForm(WebsiteGenerator):
|
|||
"""Validate all fields are present"""
|
||||
from frappe.model import no_value_fields
|
||||
|
||||
missing = []
|
||||
meta = frappe.get_meta(self.doc_type)
|
||||
for df in self.web_form_fields:
|
||||
if df.fieldname and (df.fieldtype not in no_value_fields and not meta.has_field(df.fieldname)):
|
||||
missing.append(df.fieldname)
|
||||
missing = [
|
||||
df.fieldname
|
||||
for df in self.web_form_fields
|
||||
if df.fieldname and (df.fieldtype not in no_value_fields and not meta.has_field(df.fieldname))
|
||||
]
|
||||
|
||||
if missing:
|
||||
frappe.throw(_("Following fields are missing:") + "<br>" + "<br>".join(missing))
|
||||
|
|
@ -387,11 +388,7 @@ def get_context(context):
|
|||
|
||||
def validate_mandatory(self, doc):
|
||||
"""Validate mandatory web form fields"""
|
||||
missing = []
|
||||
for f in self.web_form_fields:
|
||||
if f.reqd and doc.get(f.fieldname) in (None, [], ""):
|
||||
missing.append(f)
|
||||
|
||||
missing = [f for f in self.web_form_fields if f.reqd and doc.get(f.fieldname) in (None, [], "")]
|
||||
if missing:
|
||||
frappe.throw(
|
||||
_("Mandatory Information missing:")
|
||||
|
|
|
|||
|
|
@ -34,9 +34,6 @@ class WebsiteSidebar(Document):
|
|||
else:
|
||||
items_without_group.append(item)
|
||||
|
||||
out = []
|
||||
for group, items in items_by_group.items():
|
||||
out.append({"group_title": group, "group_items": items})
|
||||
|
||||
out = [{"group_title": group, "group_items": items} for group, items in items_by_group.items()]
|
||||
out += items_without_group
|
||||
return out
|
||||
|
|
|
|||
|
|
@ -144,10 +144,7 @@ class WebsiteTheme(Document):
|
|||
from frappe.utils.change_log import get_versions
|
||||
|
||||
apps = get_versions()
|
||||
out = []
|
||||
for app, values in apps.items():
|
||||
out.append({"name": app, "title": values["title"]})
|
||||
return out
|
||||
return [{"name": app, "title": values["title"]} for app, values in apps.items()]
|
||||
|
||||
|
||||
def get_active_theme() -> Optional["WebsiteTheme"]:
|
||||
|
|
|
|||
|
|
@ -562,15 +562,9 @@ def set_content_type(response, data, path):
|
|||
|
||||
|
||||
def add_preload_for_bundled_assets(response):
|
||||
links = [f"<{css}>; rel=preload; as=style" for css in frappe.local.preload_assets["style"]]
|
||||
|
||||
links = []
|
||||
|
||||
for css in frappe.local.preload_assets["style"]:
|
||||
links.append(f"<{css}>; rel=preload; as=style")
|
||||
|
||||
for js in frappe.local.preload_assets["script"]:
|
||||
links.append(f"<{js}>; rel=preload; as=script")
|
||||
|
||||
links.extend(f"<{js}>; rel=preload; as=script" for js in frappe.local.preload_assets["script"])
|
||||
if links:
|
||||
response.headers["Link"] = ",".join(links)
|
||||
|
||||
|
|
|
|||
|
|
@ -461,13 +461,12 @@ def filter_allowed_users(users, doc, transition):
|
|||
"""
|
||||
from frappe.permissions import has_permission
|
||||
|
||||
filtered_users = []
|
||||
for user in users:
|
||||
if has_approval_access(user, doc, transition) and has_permission(
|
||||
doctype=doc, user=user, raise_exception=False
|
||||
):
|
||||
filtered_users.append(user)
|
||||
return filtered_users
|
||||
return [
|
||||
user
|
||||
for user in users
|
||||
if has_approval_access(user, doc, transition)
|
||||
and has_permission(doctype=doc, user=user, raise_exception=False)
|
||||
]
|
||||
|
||||
|
||||
def get_common_email_args(doc):
|
||||
|
|
|
|||
|
|
@ -227,9 +227,11 @@ def get_list(
|
|||
|
||||
if txt:
|
||||
if meta.search_fields:
|
||||
for f in meta.get_search_fields():
|
||||
if f == "name" or meta.get_field(f).fieldtype in ("Data", "Text", "Small Text", "Text Editor"):
|
||||
or_filters.append([doctype, f, "like", "%" + txt + "%"])
|
||||
or_filters.extend(
|
||||
[doctype, f, "like", "%" + txt + "%"]
|
||||
for f in meta.get_search_fields()
|
||||
if f == "name" or meta.get_field(f).fieldtype in ("Data", "Text", "Small Text", "Text Editor")
|
||||
)
|
||||
else:
|
||||
if isinstance(filters, dict):
|
||||
filters["name"] = ("like", "%" + txt + "%")
|
||||
|
|
|
|||
|
|
@ -14,19 +14,19 @@ base_template_path = "www/sitemap.xml"
|
|||
|
||||
def get_context(context):
|
||||
"""generate the sitemap XML"""
|
||||
links = []
|
||||
links = [
|
||||
{"loc": get_url(quote(page.name.encode("utf-8"))), "lastmod": nowdate()}
|
||||
for route, page in get_pages().items()
|
||||
if page.sitemap
|
||||
]
|
||||
|
||||
for route, page in get_pages().items():
|
||||
if page.sitemap:
|
||||
links.append({"loc": get_url(quote(page.name.encode("utf-8"))), "lastmod": nowdate()})
|
||||
|
||||
for route, data in get_public_pages_from_doctypes().items():
|
||||
links.append(
|
||||
{
|
||||
"loc": get_url(quote((route or "").encode("utf-8"))),
|
||||
"lastmod": f"{data['modified']:%Y-%m-%d}",
|
||||
}
|
||||
)
|
||||
links.extend(
|
||||
{
|
||||
"loc": get_url(quote((route or "").encode("utf-8"))),
|
||||
"lastmod": f"{data['modified']:%Y-%m-%d}",
|
||||
}
|
||||
for route, data in get_public_pages_from_doctypes().items()
|
||||
)
|
||||
|
||||
return {"links": links}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue