refactor: remove redundant lock_rows
This commit is contained in:
parent
8a0825fe6d
commit
b1a723f514
1 changed files with 6 additions and 8 deletions
|
|
@ -198,7 +198,7 @@ def get_docs(
|
||||||
(df.fieldname, df.options) for df in meta.get_table_fields() if not is_virtual_doctype(df.options)
|
(df.fieldname, df.options) for df in meta.get_table_fields() if not is_virtual_doctype(df.options)
|
||||||
]
|
]
|
||||||
controller = get_controller(doctype)
|
controller = get_controller(doctype)
|
||||||
lock_rows = for_update and frappe.db.db_type != "sqlite"
|
for_update = for_update and frappe.db.db_type != "sqlite"
|
||||||
|
|
||||||
if as_iterator:
|
if as_iterator:
|
||||||
return _get_docs_generator(
|
return _get_docs_generator(
|
||||||
|
|
@ -210,7 +210,6 @@ def get_docs(
|
||||||
limit=limit,
|
limit=limit,
|
||||||
limit_start=limit_start,
|
limit_start=limit_start,
|
||||||
order_by=order_by,
|
order_by=order_by,
|
||||||
lock_rows=lock_rows,
|
|
||||||
for_update=for_update,
|
for_update=for_update,
|
||||||
distinct=distinct,
|
distinct=distinct,
|
||||||
)
|
)
|
||||||
|
|
@ -222,7 +221,7 @@ def get_docs(
|
||||||
order_by=order_by,
|
order_by=order_by,
|
||||||
limit=limit,
|
limit=limit,
|
||||||
offset=limit_start,
|
offset=limit_start,
|
||||||
lock_rows=lock_rows,
|
for_update=for_update,
|
||||||
child_tables=child_tables,
|
child_tables=child_tables,
|
||||||
distinct=distinct,
|
distinct=distinct,
|
||||||
)
|
)
|
||||||
|
|
@ -240,7 +239,6 @@ def _get_docs_generator(
|
||||||
limit,
|
limit,
|
||||||
limit_start,
|
limit_start,
|
||||||
order_by,
|
order_by,
|
||||||
lock_rows,
|
|
||||||
for_update,
|
for_update,
|
||||||
distinct,
|
distinct,
|
||||||
) -> Generator["Document"]:
|
) -> Generator["Document"]:
|
||||||
|
|
@ -261,7 +259,7 @@ def _get_docs_generator(
|
||||||
order_by=order_by,
|
order_by=order_by,
|
||||||
limit=current_chunk_size,
|
limit=current_chunk_size,
|
||||||
offset=current_offset,
|
offset=current_offset,
|
||||||
lock_rows=lock_rows,
|
for_update=for_update,
|
||||||
child_tables=child_tables,
|
child_tables=child_tables,
|
||||||
distinct=distinct,
|
distinct=distinct,
|
||||||
)
|
)
|
||||||
|
|
@ -276,7 +274,7 @@ def _get_docs_generator(
|
||||||
current_offset += len(chunk_data)
|
current_offset += len(chunk_data)
|
||||||
|
|
||||||
|
|
||||||
def _fetch_rows(doctype, *, filters, order_by, limit, offset, lock_rows, child_tables, distinct=False):
|
def _fetch_rows(doctype, *, filters, order_by, limit, offset, for_update, child_tables, distinct=False):
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
if limit is not None:
|
if limit is not None:
|
||||||
kwargs["limit"] = limit
|
kwargs["limit"] = limit
|
||||||
|
|
@ -288,7 +286,7 @@ def _fetch_rows(doctype, *, filters, order_by, limit, offset, lock_rows, child_t
|
||||||
filters=filters or {},
|
filters=filters or {},
|
||||||
fields=["*"],
|
fields=["*"],
|
||||||
order_by=order_by,
|
order_by=order_by,
|
||||||
for_update=lock_rows,
|
for_update=for_update,
|
||||||
distinct=distinct,
|
distinct=distinct,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
).run(as_dict=True)
|
).run(as_dict=True)
|
||||||
|
|
@ -308,7 +306,7 @@ def _fetch_rows(doctype, *, filters, order_by, limit, offset, lock_rows, child_t
|
||||||
filters={"parent": ("in", parent_names), "parenttype": doctype, "parentfield": fieldname},
|
filters={"parent": ("in", parent_names), "parenttype": doctype, "parentfield": fieldname},
|
||||||
fields=["*"],
|
fields=["*"],
|
||||||
order_by="idx asc",
|
order_by="idx asc",
|
||||||
for_update=lock_rows,
|
for_update=for_update,
|
||||||
).run(as_dict=True)
|
).run(as_dict=True)
|
||||||
|
|
||||||
for child in child_table_data:
|
for child in child_table_data:
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue