Merge branch 'develop' of https://github.com/frappe/frappe into v16-ui-fixes

This commit is contained in:
Saqib Ansari 2026-01-01 12:23:32 +05:30
commit 2d141aca75
9 changed files with 132 additions and 45 deletions

View file

@ -126,6 +126,7 @@ def _bulk_action(doctype, docnames, action, data, task_id=None):
)
except Exception:
frappe.log_error("Bulk action failed")
failed.append(docname)
frappe.db.rollback()

View file

@ -857,7 +857,8 @@ export default class Grid {
}
add_new_row(idx, callback, show, copy_doc, go_to_last_page = false, go_to_first_page = false) {
if (this.is_editable()) {
let cannot_add_rows = this.cannot_add_rows || (this.df && this.df.cannot_add_rows);
if (this.is_editable() && !cannot_add_rows) {
if (go_to_last_page) {
this.grid_pagination.go_to_last_page_to_add_row();
} else if (go_to_first_page) {

View file

@ -34,6 +34,7 @@ frappe.ui.form.QuickEntryForm = class QuickEntryForm extends frappe.ui.Dialog {
this.doc = doc;
this.force = force ? force : false;
this.dialog = this; // for backward compatibility
this.layout = this;
}
setup() {
@ -326,6 +327,51 @@ frappe.ui.form.QuickEntryForm = class QuickEntryForm extends frappe.ui.Dialog {
this.add_custom_action(__("Edit Full Form"), () => this.open_doc(true));
}
set_intro(txt, color) {
if (txt) {
this.set_alert(txt, color || "info");
} else {
this.clear_alert();
}
}
set_df_property(fieldname, prop, value) {
const field = this.fields_dict?.[fieldname];
if (!field) return;
field.df[prop] = value;
field.refresh?.();
}
toggle_display(fnames, show) {
this._apply_on_fields(fnames, (field) => {
field.df.hidden = show ? 0 : 1;
field.refresh?.();
});
}
toggle_enable(fnames, enable) {
this._apply_on_fields(fnames, (field) => {
field.df.read_only = enable ? 0 : 1;
field.refresh?.();
});
}
toggle_reqd(fnames, mandatory) {
this._apply_on_fields(fnames, (field) => {
field.df.reqd = mandatory ? 1 : 0;
field.refresh?.();
});
}
_apply_on_fields(fnames, fn) {
if (!fnames) return;
const names = Array.isArray(fnames) ? fnames : [fnames];
names.forEach((fname) => {
const field = this.fields_dict?.[fname];
if (field) fn(field);
});
}
set_defaults() {
var me = this;
// set defaults

View file

@ -195,7 +195,8 @@ function get_number_format_info(format) {
}
// get the precision from the number format
info.precision = format.split(info.decimal_str).slice(1)[0].length;
info.precision =
info.decimal_str == "" ? 0 : format.split(info.decimal_str).slice(1)[0].length;
return info;
}

View file

@ -961,6 +961,7 @@ class SQLiteSearch(ABC):
for i in range(0, len(documents), chunk_size):
chunk = documents[i : i + chunk_size]
doc_ids_to_delete = []
values_to_insert = []
for doc in chunk:
@ -983,6 +984,7 @@ class SQLiteSearch(ABC):
# Build values tuple dynamically based on schema
values = []
doc_id = None
for field in all_fields:
# Build doc_id automatically from doctype:name
if field == "doc_id":
@ -991,8 +993,15 @@ class SQLiteSearch(ABC):
else:
values.append(doc.get(field, ""))
doc_ids_to_delete.append(doc_id)
values_to_insert.append(tuple(values))
# Delete existing rows for these doc_ids first using a single statement
if doc_ids_to_delete:
placeholders_for_delete = ",".join(["?" for _ in doc_ids_to_delete])
delete_sql = f"DELETE FROM search_fts WHERE doc_id IN ({placeholders_for_delete})"
cursor.execute(delete_sql, doc_ids_to_delete)
# Insert the chunk
if values_to_insert:
cursor.executemany(insert_sql, values_to_insert)

View file

@ -481,6 +481,70 @@ class TestSQLiteSearchAPI(IntegrationTestCase):
disabled_search.build_index() # Should not raise error but do nothing
self.assertFalse(disabled_search.index_exists())
def test_deduplication_on_reindex(self):
"""Test that re-indexing the same document does not create duplicates."""
self.search.build_index()
# Create a test document
test_note = frappe.get_doc(
{
"doctype": "Note",
"title": "Deduplication Test Document",
"content": "This document tests deduplication functionality",
}
)
test_note.insert()
try:
# Index the document
self.search.index_doc("Note", test_note.name)
# Search for the document - should find exactly one result
results = self.search.search("Deduplication Test")
initial_count = len([r for r in results["results"] if r["name"] == test_note.name])
self.assertEqual(initial_count, 1, "Should find exactly one instance of the document")
# Re-index the same document multiple times
self.search.index_doc("Note", test_note.name)
self.search.index_doc("Note", test_note.name)
self.search.index_doc("Note", test_note.name)
# Search again - should still find exactly one result
results = self.search.search("Deduplication Test")
final_count = len([r for r in results["results"] if r["name"] == test_note.name])
self.assertEqual(final_count, 1, "Should still find exactly one instance after re-indexing")
# Update the document content and re-index
test_note.content = "Updated content for deduplication testing"
test_note.save()
self.search.index_doc("Note", test_note.name)
# Search with updated content - should find exactly one result with new content
results = self.search.search("Updated content deduplication")
updated_results = [r for r in results["results"] if r["name"] == test_note.name]
self.assertEqual(len(updated_results), 1, "Should find exactly one instance with updated content")
# Content may contain HTML markup from search highlighting, so check for words individually
self.assertIn("Updated", updated_results[0]["content"])
self.assertIn("content", updated_results[0]["content"])
# Rebuild entire index - should not create duplicates
self.search.build_index()
results = self.search.search("Deduplication Test")
rebuild_count = len([r for r in results["results"] if r["name"] == test_note.name])
self.assertEqual(rebuild_count, 1, "Should still find exactly one instance after full rebuild")
# Verify at database level - check raw count in FTS table
conn = sqlite3.connect(self.search.db_path)
cursor = conn.cursor()
doc_id = f"Note:{test_note.name}"
cursor.execute("SELECT COUNT(*) FROM search_fts WHERE doc_id = ?", (doc_id,))
db_count = cursor.fetchone()[0]
conn.close()
self.assertEqual(db_count, 1, "Database should contain exactly one entry for the document")
finally:
test_note.delete()
@patch("frappe.enqueue")
def test_background_operations(self, mock_enqueue):
"""Test background job integration and module-level functions."""

View file

@ -37,13 +37,8 @@ def strip_exif_data(content, content_type) -> bytes:
if content_type == "image/jpeg" and original_image.mode in ("RGBA", "P"):
original_image = original_image.convert("RGB")
new_image = Image.new(original_image.mode, original_image.size)
new_image.putdata(list(original_image.getdata()))
new_image.save(output, format=content_type.split("/")[1])
content = output.getvalue()
return content
original_image.save(output, format=content_type.split("/")[1], exif=b"")
return output.getvalue()
def optimize_image(content, content_type, max_width=1024, max_height=768, optimize=True, quality=85):

View file

@ -9,17 +9,12 @@ import requests
import frappe
from frappe import _
from frappe.utils.print_utils import find_or_download_chromium_executable
# TODO: close browser when worker is killed.
class ChromePDFGenerator:
EXECUTABLE_PATHS: ClassVar[dict[str, list[str]]] = {
"linux": ["chrome-linux", "headless_shell"],
"darwin": ["chrome-mac", "headless_shell"],
"windows": ["chrome-win", "headless_shell.exe"],
}
_instance = None
_browsers: ClassVar[list] = []
@ -62,8 +57,6 @@ class ChromePDFGenerator:
self._devtools_url = self.CHROMIUM_WEBSOCKET_URL
return
# only when we want to use chromium from a specific path ( incase we don't have chromium in bench folder )
self.CHROMIUM_BINARY_PATH = site_config.get("chromium_binary_path", "")
"""
Number of allowed open websocket connections to chromium.
This number will basically define how many concurrent requests can be handled by one chromium instance.
@ -77,38 +70,11 @@ class ChromePDFGenerator:
# time to wait for chromium to start and provide dev tools url used in _set_devtools_url.
self.START_TIMEOUT = site_config.get("chromium_start_timeout", 3)
self._chromium_path = (
self._find_chromium_executable() if not self.CHROMIUM_BINARY_PATH else self.CHROMIUM_BINARY_PATH
)
self._chromium_path = find_or_download_chromium_executable()
if self._verify_chromium_installation():
if not self._devtools_url:
self.start_chromium_process()
def _find_chromium_executable(self):
"""Finds the Chromium executable or raises an error if not found."""
bench_path = frappe.utils.get_bench_path()
"""Determine the path to the Chromium executable. chromium is downloaded by download_chromium in print_designer/install.py"""
chromium_dir = os.path.join(bench_path, "chromium")
if not os.path.exists(chromium_dir):
frappe.throw(_("Chromium is not downloaded. Please run the setup first."))
platform_name = platform.system().lower()
if platform_name not in ["linux", "darwin", "windows"]:
frappe.throw(f"Unsupported platform: {platform_name}")
executable_name = self.EXECUTABLE_PATHS.get(platform_name)
# Construct the full path to the executable
exec_path = Path(chromium_dir).joinpath(*executable_name)
if not exec_path.exists():
frappe.throw(
f"Chromium executable not found: {exec_path}. please run bench setup-new-pdf-backend"
)
return str(exec_path)
def _verify_chromium_installation(self):
"""Ensures Chromium is available and executable, raising clearer errors if not."""
if not os.path.exists(self._chromium_path):

View file

@ -172,8 +172,12 @@ def setup_chromium():
def find_or_download_chromium_executable():
"""Finds the Chromium executable or downloads if not found."""
import platform
import shutil
from pathlib import Path
if chromium_path := shutil.which(frappe.get_common_site_config().chromium_path):
return chromium_path
bench_path = frappe.utils.get_bench_path()
"""Determine the path to the Chromium executable."""
chromium_dir = os.path.join(bench_path, "chromium")