feat: website_search_field in doctype doctype

website_search_field to declare the content field
remove the background job execution of search indexing
This commit is contained in:
hasnain2808 2021-06-18 00:12:07 +05:30
parent 292d7f914c
commit 9bea81e30c
3 changed files with 27 additions and 20 deletions

View file

@ -76,6 +76,7 @@
"index_web_pages_for_search",
"route",
"is_published_field",
"website_search_field",
"advanced",
"engine"
],
@ -547,6 +548,12 @@
{
"fieldname": "column_break_51",
"fieldtype": "Column Break"
},
{
"depends_on": "has_web_view",
"fieldname": "website_search_field",
"fieldtype": "Data",
"label": "Website Search Field"
}
],
"icon": "fa fa-bolt",
@ -628,7 +635,7 @@
"link_fieldname": "reference_doctype"
}
],
"modified": "2021-04-16 12:26:41.031135",
"modified": "2021-06-17 23:31:44.974199",
"modified_by": "Administrator",
"module": "Core",
"name": "DocType",
@ -662,4 +669,4 @@
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}
}

View file

@ -89,6 +89,12 @@ Otherwise, check the server logs and ensure that all the required services are r
for fn in frappe.get_hooks('after_migrate', app_name=app):
frappe.get_attr(fn)()
# build web_routes index
if not skip_search_index:
# Run this last as it updates the current session
print('Building search index for {}'.format(frappe.local.site))
build_index_for_all_routes()
frappe.db.commit()
clear_notifications()
@ -96,18 +102,6 @@ Otherwise, check the server logs and ensure that all the required services are r
frappe.publish_realtime("version-update")
frappe.flags.in_migrate = False
# build web_routes index
if not skip_search_index:
# Run this last as it updates the current session
print('Queuing search index build for {}'.format(frappe.local.site))
enqueue(
method=build_index_for_all_routes,
job_name='Search index build for {}'.format(frappe.local.site),
now=0,
queue='background',
timeout=10000
)
finally:
with open(touched_tables_file, 'w') as f:
json.dump(list(frappe.flags.touched_tables), f, sort_keys=True, indent=4)

View file

@ -35,10 +35,12 @@ class WebsiteSearch(FullTextSearch):
if getattr(self, "_items_to_index", False):
return self._items_to_index
routes = get_static_pages_from_all_apps() + slugs_with_web_view()
self._items_to_index = []
routes = get_static_pages_from_all_apps() + slugs_with_web_view(self._items_to_index )
for i, route in enumerate(routes):
update_progress_bar("Retrieving Routes", i, len(routes))
self._items_to_index += [self.get_document_to_index(route)]
@ -85,16 +87,20 @@ class WebsiteSearch(FullTextSearch):
)
def slugs_with_web_view():
def slugs_with_web_view(_items_to_index):
all_routes = []
filters = { "has_web_view": 1, "allow_guest_to_view": 1, "index_web_pages_for_search": 1}
fields = ["name", "is_published_field"]
fields = ["name", "is_published_field", 'website_search_field']
doctype_with_web_views = frappe.get_all("DocType", filters=filters, fields=fields)
for doctype in doctype_with_web_views:
if doctype.is_published_field:
routes = frappe.get_all(doctype.name, filters={doctype.is_published_field: 1}, fields="route")
all_routes += [route.route for route in routes]
docs = frappe.get_all(doctype.name, filters={doctype.is_published_field: 1}, fields=["route", doctype.website_search_field])
if doctype.website_search_field:
for doc in docs:
_items_to_index += [frappe._dict(title=doc.title, content=getattr(doc, doctype.website_search_field), path=doc.route)]
else:
all_routes += [route.route for route in docs]
return all_routes