perf(minor): remove unnecessary comprehensions

- remove several unnecessary comprehensions from functions that accept a generator.
- Using `[x for x in iter]` causes a list to be built first then passed to the outer function.
- `any` and `all` can take generator instead. This makes memory usage O(1) and actually makes these functions short-circuiting. E.g. if the first condition fails then `all` will immediately return false instead of evaluating all the entries.
- `sum`, `min`, `max` => memory usage become O(1)
- `list`, `set`, `.join()` => roughly halves memory usage, as list is not required to be built.
- lastly, it's two fewer characters to read/think about.
This commit is contained in:
Ankush Menat 2021-05-09 19:44:58 +05:30
parent 4d7f5a8f8d
commit 4754ab71d1
No known key found for this signature in database
GPG key ID: 8EA82E09BBD13AAF
32 changed files with 62 additions and 62 deletions

View file

@ -334,7 +334,7 @@ class AutoRepeat(Document):
if self.reference_doctype and self.reference_document:
res = get_contacts_linking_to(self.reference_doctype, self.reference_document, fields=['email_id'])
res += get_contacts_linked_from(self.reference_doctype, self.reference_document, fields=['email_id'])
email_ids = list(set([d.email_id for d in res]))
email_ids = set(d.email_id for d in res)
if not email_ids:
frappe.msgprint(_('No contacts linked to document'), alert=True)
else:

View file

@ -154,7 +154,7 @@ def filter_dynamic_link_doctypes(doctype, txt, searchfield, start, page_len, fil
doctypes = frappe.db.get_all("DocField", filters=filters, fields=["parent"],
distinct=True, as_list=True)
doctypes = tuple([d for d in doctypes if re.search(txt+".*", _(d[0]), re.IGNORECASE)])
doctypes = tuple(d for d in doctypes if re.search(txt+".*", _(d[0]), re.IGNORECASE))
filters.update({
"dt": ("not in", [d[0] for d in doctypes])

View file

@ -263,7 +263,7 @@ def address_query(doctype, txt, searchfield, start, page_len, filters):
def get_condensed_address(doc):
fields = ["address_title", "address_line1", "address_line2", "city", "county", "state", "country"]
return ", ".join([doc.get(d) for d in fields if doc.get(d)])
return ", ".join(doc.get(d) for d in fields if doc.get(d))
def update_preferred_address(address, field):
frappe.db.set_value('Address', address, field, 0)

View file

@ -450,7 +450,7 @@ class ImportFile:
for row in data_without_first_row:
row_values = row.get_values(parent_column_indexes)
# if the row is blank, it's a child row doc
if all([v in INVALID_VALUES for v in row_values]):
if all(v in INVALID_VALUES for v in row_values):
rows.append(row)
continue
# if we encounter a row which has values in parent columns,
@ -607,7 +607,7 @@ class Row:
if df.fieldtype == "Select":
select_options = get_select_options(df)
if select_options and value not in select_options:
options_string = ", ".join([frappe.bold(d) for d in select_options])
options_string = ", ".join(frappe.bold(d) for d in select_options)
msg = _("Value must be one of {0}").format(options_string)
self.warnings.append(
{"row": self.row_number, "field": df_as_json(df), "message": msg,}
@ -903,7 +903,7 @@ class Column:
if self.df.fieldtype == "Link":
# find all values that dont exist
values = list(set([cstr(v) for v in self.column_values[1:] if v]))
values = list(set(cstr(v) for v in self.column_values[1:] if v))
exists = [
d.name for d in frappe.db.get_all(self.df.options, filters={"name": ("in", values)})
]
@ -939,11 +939,11 @@ class Column:
elif self.df.fieldtype == "Select":
options = get_select_options(self.df)
if options:
values = list(set([cstr(v) for v in self.column_values[1:] if v]))
values = list(set(cstr(v) for v in self.column_values[1:] if v))
invalid = list(set(values) - set(options))
if invalid:
valid_values = ", ".join([frappe.bold(o) for o in options])
invalid_values = ", ".join([frappe.bold(i) for i in invalid])
valid_values = ", ".join(frappe.bold(o) for o in options)
invalid_values = ", ".join(frappe.bold(i) for i in invalid)
self.warnings.append(
{
"col": self.column_number,

View file

@ -181,7 +181,7 @@ def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False,
if d.get("name") and d["name"].startswith('"'):
d["name"] = d["name"][1:-1]
if sum([0 if not val else 1 for val in d.values()]):
if sum(0 if not val else 1 for val in d.values()):
d['doctype'] = dt
if dt == doctype:
doc.update(d)
@ -537,6 +537,6 @@ def get_parent_field(doctype, parenttype):
def delete_child_rows(rows, doctype):
"""delete child rows for all parents"""
for p in list(set([r[1] for r in rows])):
for p in list(set(r[1] for r in rows)):
if p:
frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p)

View file

@ -196,7 +196,7 @@ class DocType(Document):
self.flags.update_fields_to_fetch_queries = []
if set(old_fields_to_fetch) != set([df.fieldname for df in new_meta.get_fields_to_fetch()]):
if set(old_fields_to_fetch) != set(df.fieldname for df in new_meta.get_fields_to_fetch()):
for df in new_meta.get_fields_to_fetch():
if df.fieldname not in old_fields_to_fetch:
link_fieldname, source_fieldname = df.fetch_from.split('.', 1)
@ -765,7 +765,7 @@ def validate_fields(meta):
invalid_fields = ('doctype',)
if fieldname in invalid_fields:
frappe.throw(_("{0}: Fieldname cannot be one of {1}")
.format(docname, ", ".join([frappe.bold(d) for d in invalid_fields])))
.format(docname, ", ".join(frappe.bold(d) for d in invalid_fields)))
def check_unique_fieldname(docname, fieldname):
duplicates = list(filter(None, map(lambda df: df.fieldname==fieldname and str(df.idx) or None, fields)))
@ -999,7 +999,7 @@ def validate_fields(meta):
if docfield.options and (docfield.options not in data_field_options):
df_str = frappe.bold(_(docfield.label))
text_str = _("{0} is an invalid Data field.").format(df_str) + "<br>" * 2 + _("Only Options allowed for Data field are:") + "<br>"
df_options_str = "<ul><li>" + "</li><li>".join([_(x) for x in data_field_options]) + "</ul>"
df_options_str = "<ul><li>" + "</li><li>".join(_(x) for x in data_field_options) + "</ul>"
frappe.msgprint(text_str + df_options_str, title="Invalid Data Field", raise_exception=True)

View file

@ -111,7 +111,7 @@ class Domain(Document):
# enable
frappe.db.sql('''update `tabPortal Menu Item` set enabled=1
where route in ({0})'''.format(', '.join(['"{0}"'.format(d) for d in self.data.allow_sidebar_items])))
where route in ({0})'''.format(', '.join('"{0}"'.format(d) for d in self.data.allow_sidebar_items)))
if self.data.remove_sidebar_items:
# disable all
@ -119,4 +119,4 @@ class Domain(Document):
# enable
frappe.db.sql('''update `tabPortal Menu Item` set enabled=0
where route in ({0})'''.format(', '.join(['"{0}"'.format(d) for d in self.data.remove_sidebar_items])))
where route in ({0})'''.format(', '.join('"{0}"'.format(d) for d in self.data.remove_sidebar_items)))

View file

@ -935,7 +935,7 @@ def user_query(doctype, txt, searchfield, start, page_len, filters):
LIMIT %(page_len)s OFFSET %(start)s
""".format(
user_type_condition = user_type_condition,
standard_users=", ".join([frappe.db.escape(u) for u in STANDARD_USERS]),
standard_users=", ".join(frappe.db.escape(u) for u in STANDARD_USERS),
key=searchfield,
fcond=get_filters_cond(doctype, filters, conditions),
mcond=get_match_cond(doctype)

View file

@ -114,7 +114,7 @@ class UserType(Document):
self.select_doctypes = []
select_doctypes = []
user_doctypes = tuple([row.document_type for row in self.user_doctypes])
user_doctypes = tuple(row.document_type for row in self.user_doctypes)
for doctype in user_doctypes:
doc = frappe.get_meta(doctype)

View file

@ -356,7 +356,7 @@ class CustomizeForm(Document):
def delete_custom_fields(self):
meta = frappe.get_meta(self.doc_type)
fields_to_remove = (set([df.fieldname for df in meta.get("fields")])
fields_to_remove = (set(df.fieldname for df in meta.get("fields"))
- set(df.fieldname for df in self.get("fields")))
for fieldname in fields_to_remove:

View file

@ -344,7 +344,7 @@ class Database(object):
values[key] = value[1]
if isinstance(value[1], (tuple, list)):
# value is a list in tuple ("in", ("A", "B"))
_rhs = " ({0})".format(", ".join([self.escape(v) for v in value[1]]))
_rhs = " ({0})".format(", ".join(self.escape(v) for v in value[1]))
del values[key]
if _operator not in ["=", "!=", ">", ">=", "<", "<=", "like", "in", "not in", "not like"]:
@ -1019,7 +1019,7 @@ class Database(object):
:params values: list of list of values
"""
insert_list = []
fields = ", ".join(["`"+field+"`" for field in fields])
fields = ", ".join("`"+field+"`" for field in fields)
for idx, value in enumerate(values):
insert_list.append(tuple(value))

View file

@ -22,7 +22,7 @@ class GlobalSearchSettings(Document):
dts.append(dt.document_type)
if core_dts:
core_dts = (", ".join([frappe.bold(dt) for dt in core_dts]))
core_dts = (", ".join(frappe.bold(dt) for dt in core_dts))
frappe.throw(_("Core Modules {0} cannot be searched in Global Search.").format(core_dts))
if repeated_dts:
@ -61,7 +61,7 @@ def update_global_search_doctypes():
if search_doctypes.get(domain):
global_search_doctypes.extend(search_doctypes.get(domain))
doctype_list = set([dt.name for dt in frappe.get_all("DocType")])
doctype_list = set(dt.name for dt in frappe.get_all("DocType"))
allowed_in_global_search = []
for dt in global_search_doctypes:

View file

@ -132,7 +132,7 @@ def update_tags(doc, tags):
:param doc: Document to be added to global tags
"""
new_tags = list(set([tag.strip() for tag in tags.split(",") if tag]))
new_tags = list(set(tag.strip() for tag in tags.split(",") if tag))
for tag in new_tags:
if not frappe.db.exists("Tag Link", {"parenttype": doc.doctype, "parent": doc.name, "tag": tag}):

View file

@ -594,7 +594,7 @@ class EmailAccount(Document):
# get email account user and set communication as seen
users = frappe.get_all("User Email", filters={ "email_account": self.name },
fields=["parent"])
users = list(set([ user.get("parent") for user in users ]))
users = list(set(user.get("parent") for user in users))
communication._seen = json.dumps(users)
communication.flags.in_receive = True
@ -851,8 +851,8 @@ class EmailAccount(Document):
email_server.update_flag(uid_list=uid_list)
# mark communication as read
docnames = ",".join([ "'%s'"%flag.get("communication") for flag in flags \
if flag.get("action") == "Read" ])
docnames = ",".join("'%s'"%flag.get("communication") for flag in flags \
if flag.get("action") == "Read")
self.set_communication_seen_status(docnames, seen=1)
# mark communication as unread

View file

@ -44,7 +44,7 @@ class TestNewsletter(unittest.TestCase):
email_queue_list = [frappe.get_doc("Email Queue", e.name) for e in frappe.get_all("Email Queue")]
self.assertEqual(len(email_queue_list), 4)
recipients = set([e.recipients[0].recipient for e in email_queue_list])
recipients = set(e.recipients[0].recipient for e in email_queue_list)
self.assertTrue(set(emails).issubset(recipients))
def test_unsubscribe(self):

View file

@ -18,7 +18,7 @@ def get_email_accounts(user=None):
"all_accounts": ""
}
all_accounts = ",".join([ account.get("email_account") for account in accounts ])
all_accounts = ",".join(account.get("email_account") for account in accounts)
if len(accounts) > 1:
email_accounts.append({
"email_account": all_accounts,

View file

@ -540,7 +540,7 @@ def is_downgrade(sql_file_path, verbose=False):
def is_partial(sql_file_path):
with open(sql_file_path) as f:
header = " ".join([f.readline() for _ in range(5)])
header = " ".join(f.readline() for _ in range(5))
if "Partial Backup" in header:
return True
return False

View file

@ -80,7 +80,7 @@ class LDAPSettings(Document):
def sync_roles(self, user, additional_groups=None):
current_roles = set([d.role for d in user.get("roles")])
current_roles = set(d.role for d in user.get("roles"))
needed_roles = set()
needed_roles.add(self.default_role)

View file

@ -166,7 +166,7 @@ def delete_fields(args_dict, delete=0):
frappe.db.sql("""
DELETE FROM `tabSingles`
WHERE doctype='%s' AND field IN (%s)
""" % (dt, ", ".join(["'{}'".format(f) for f in fields])))
""" % (dt, ", ".join("'{}'".format(f) for f in fields)))
else:
existing_fields = frappe.db.multisql({
"mariadb": "DESC `tab%s`" % dt,
@ -189,7 +189,7 @@ def delete_fields(args_dict, delete=0):
frappe.db.commit()
query = "ALTER TABLE `tab%s` " % dt + \
", ".join(["DROP COLUMN `%s`" % f for f in fields_need_to_delete])
", ".join("DROP COLUMN `%s`" % f for f in fields_need_to_delete)
frappe.db.sql(query)
if frappe.db.db_type == 'postgres':

View file

@ -358,7 +358,7 @@ class BaseDocument(object):
frappe.db.sql("""INSERT INTO `tab{doctype}` ({columns})
VALUES ({values})""".format(
doctype = self.doctype,
columns = ", ".join(["`"+c+"`" for c in columns]),
columns = ", ".join("`"+c+"`" for c in columns),
values = ", ".join(["%s"] * len(columns))
), list(d.values()))
except Exception as e:
@ -401,7 +401,7 @@ class BaseDocument(object):
frappe.db.sql("""UPDATE `tab{doctype}`
SET {values} WHERE `name`=%s""".format(
doctype = self.doctype,
values = ", ".join(["`"+c+"`=%s" for c in columns])
values = ", ".join("`"+c+"`=%s" for c in columns)
), list(d.values()) + [name])
except Exception as e:
if frappe.db.is_unique_key_violation(e):

View file

@ -620,7 +620,7 @@ class DatabaseQuery(object):
def get_share_condition(self):
return """`tab{0}`.name in ({1})""".format(self.doctype, ", ".join(["%s"] * len(self.shared))) % \
tuple([frappe.db.escape(s, percent=False) for s in self.shared])
tuple(frappe.db.escape(s, percent=False) for s in self.shared)
def add_user_permissions(self, user_permissions):
meta = frappe.get_meta(self.doctype)
@ -726,8 +726,8 @@ class DatabaseQuery(object):
# `idx desc, modified desc`
# will covert to
# `tabItem`.`idx` desc, `tabItem`.`modified` desc
args.order_by = ', '.join(['`tab{0}`.`{1}` {2}'.format(self.doctype,
f.split()[0].strip(), f.split()[1].strip()) for f in meta.sort_field.split(',')])
args.order_by = ', '.join('`tab{0}`.`{1}` {2}'.format(self.doctype,
f.split()[0].strip(), f.split()[1].strip()) for f in meta.sort_field.split(','))
else:
sort_field = meta.sort_field or 'modified'
sort_order = (meta.sort_field and meta.sort_order) or 'desc'
@ -807,8 +807,8 @@ def get_order_by(doctype, meta):
# `idx desc, modified desc`
# will covert to
# `tabItem`.`idx` desc, `tabItem`.`modified` desc
order_by = ', '.join(['`tab{0}`.`{1}` {2}'.format(doctype,
f.split()[0].strip(), f.split()[1].strip()) for f in meta.sort_field.split(',')])
order_by = ', '.join('`tab{0}`.`{1}` {2}'.format(doctype,
f.split()[0].strip(), f.split()[1].strip()) for f in meta.sort_field.split(','))
else:
sort_field = meta.sort_field or 'modified'
sort_order = (meta.sort_field and meta.sort_order) or 'desc'

View file

@ -667,7 +667,7 @@ def trim_tables(doctype=None):
and not f.startswith("_")]
if columns_to_remove:
print(doctype, "columns removed:", columns_to_remove)
columns_to_remove = ", ".join(["drop `{0}`".format(c) for c in columns_to_remove])
columns_to_remove = ", ".join("drop `{0}`".format(c) for c in columns_to_remove)
query = """alter table `tab{doctype}` {columns}""".format(
doctype=doctype, columns=columns_to_remove)
frappe.db.sql_ddl(query)

View file

@ -144,7 +144,7 @@ def update_user_settings(old, new, link_fields):
if not link_fields: return
# find the user settings for the linked doctypes
linked_doctypes = set([d.parent for d in link_fields if not d.issingle])
linked_doctypes = set(d.parent for d in link_fields if not d.issingle)
user_settings_details = frappe.db.sql('''SELECT `user`, `doctype`, `data`
FROM `__UserSettings`
WHERE `data` like %s

View file

@ -312,7 +312,7 @@ def has_controller_permissions(doc, ptype, user=None):
return None
def get_doctypes_with_read():
return list(set([p.parent if type(p.parent) == str else p.parent.encode('UTF8') for p in get_valid_perms()]))
return list(set(p.parent if type(p.parent) == str else p.parent.encode('UTF8') for p in get_valid_perms()))
def get_valid_perms(doctype=None, user=None):
'''Get valid permissions for the current user from DocPerm and Custom DocPerm'''

View file

@ -80,7 +80,7 @@ def exists_in_backup(doctypes, file):
)
with gzip.open(file, "rb") as f:
content = f.read().decode("utf8")
return all([predicate.format(doctype).lower() in content.lower() for doctype in doctypes])
return all(predicate.format(doctype).lower() in content.lower() for doctype in doctypes)
class BaseTestCommands(unittest.TestCase):
@ -355,12 +355,12 @@ class TestCommands(BaseTestCommands):
# test 2: bare functionality for single site
self.execute("bench --site {site} list-apps")
self.assertEqual(self.returncode, 0)
list_apps = set([
list_apps = set(
_x.split()[0] for _x in self.stdout.split("\n")
])
)
doctype = frappe.get_single("Installed Applications").installed_applications
if doctype:
installed_apps = set([x.app_name for x in doctype])
installed_apps = set(x.app_name for x in doctype)
else:
installed_apps = set(frappe.get_installed_apps())
self.assertSetEqual(list_apps, installed_apps)

View file

@ -283,8 +283,8 @@ def clear_cache():
def get_messages_for_app(app, deduplicate=True):
"""Returns all messages (list) for a specified `app`"""
messages = []
modules = ", ".join(['"{}"'.format(m.title().replace("_", " ")) \
for m in frappe.local.app_modules[app]])
modules = ", ".join('"{}"'.format(m.title().replace("_", " ")) \
for m in frappe.local.app_modules[app])
# doctypes
if modules:

View file

@ -186,7 +186,7 @@ def random_string(length):
"""generate a random string"""
import string
from random import choice
return ''.join([choice(string.ascii_letters + string.digits) for i in range(length)])
return ''.join(choice(string.ascii_letters + string.digits) for i in range(length))
def has_gravatar(email):
@ -305,7 +305,7 @@ def make_esc(esc_chars):
"""
Function generator for Escaping special characters
"""
return lambda s: ''.join(['\\' + c if c in esc_chars else c for c in s])
return lambda s: ''.join('\\' + c if c in esc_chars else c for c in s)
# esc / unescape characters -- used for command line
def esc(s, esc_chars):

View file

@ -307,8 +307,8 @@ class BackupGenerator:
backup_summary = self.get_summary()
print("Backup Summary for {0} at {1}".format(frappe.local.site, now()))
title = max([len(x) for x in backup_summary])
path = max([len(x["path"]) for x in backup_summary.values()])
title = max(len(x) for x in backup_summary)
path = max(len(x["path"]) for x in backup_summary.values())
for _type, info in backup_summary.items():
template = "{{0:{0}}}: {{1:{1}}} {{2}}".format(title, path)
@ -381,7 +381,7 @@ class BackupGenerator:
"",
])
generated_header = "\n".join([f"-- {x}" for x in database_header_content]) + "\n"
generated_header = "\n".join(f"-- {x}" for x in database_header_content) + "\n"
with gzip.open(args.backup_path_db, "wt") as f:
f.write(generated_header)

View file

@ -40,10 +40,10 @@ class BotParser(object):
def format_list(self, data):
'''Format list as markdown'''
return _('I found these: ') + ', '.join([' [{title}](/app/Form/{doctype}/{name})'.format(
return _('I found these: ') + ', '.join(' [{title}](/app/Form/{doctype}/{name})'.format(
title = d.title or d.name,
doctype=self.get_doctype(),
name=d.name) for d in data])
name=d.name) for d in data)
def get_doctype(self):
'''returns the doctype name from self.tables'''
@ -58,8 +58,8 @@ class ShowNotificationBot(BotParser):
if open_items:
return ("Following items need your attention:\n\n"
+ "\n\n".join(["{0} [{1}](/app/List/{1})".format(d[1], d[0])
for d in open_items if d[1] > 0]))
+ "\n\n".join("{0} [{1}](/app/List/{1})".format(d[1], d[0])
for d in open_items if d[1] > 0))
else:
return 'Take it easy, nothing urgent needs your attention'

View file

@ -14,10 +14,10 @@ def resolve_class(classes):
return classes
if isinstance(classes, (list, tuple)):
return " ".join([resolve_class(c) for c in classes]).strip()
return " ".join(resolve_class(c) for c in classes).strip()
if isinstance(classes, dict):
return " ".join([classname for classname in classes if classes[classname]]).strip()
return " ".join(classname for classname in classes if classes[classname]).strip()
return classes

View file

@ -346,7 +346,7 @@ def get_context(context):
if missing:
frappe.throw(_('Mandatory Information missing:') + '<br><br>'
+ '<br>'.join(['{0} ({1})'.format(d.label, d.fieldtype) for d in missing]))
+ '<br>'.join('{0} ({1})'.format(d.label, d.fieldtype) for d in missing))
def allow_website_search_indexing(self):
return False

View file

@ -23,7 +23,7 @@ class WebsiteSlideshow(Document):
files = map(lambda row: row.image, self.slideshow_items)
if files:
result = frappe.get_all("File", filters={ "file_url":("in", list(files)) }, fields="is_private")
if any([file.is_private for file in result]):
if any(file.is_private for file in result):
frappe.throw(_("All Images attached to Website Slideshow should be public"))
def get_slideshow(doc):