Merge branch 'link_title_refactor' of https://github.com/nextchamp-saqib/frappe into link_title_refactor

This commit is contained in:
Saqib Ansari 2022-02-09 10:32:47 +05:30
commit 073fbdd3bc
78 changed files with 1736 additions and 1077 deletions

View file

@ -58,6 +58,23 @@ context('Control Link', () => {
cy.get('.frappe-control[data-fieldname=link] input').should('have.value', '');
});
it("should be possible set empty value explicitly", () => {
get_dialog_with_link().as("dialog");
cy.intercept("POST", "/api/method/frappe.client.validate_link").as("validate_link");
cy.get(".frappe-control[data-fieldname=link] input")
.type(" ", { delay: 100 })
.blur();
cy.wait("@validate_link");
cy.get(".frappe-control[data-fieldname=link] input").should("have.value", "");
cy.window()
.its("cur_dialog")
.then((dialog) => {
expect(dialog.get_value("link")).to.equal('');
});
});
it('should route to form on arrow click', () => {
get_dialog_with_link().as('dialog');
@ -109,7 +126,7 @@ context('Control Link', () => {
});
});
it('should fetch valid value', () => {
it('should update dependant fields (via fetch_from)', () => {
cy.get('@todos').then(todos => {
cy.visit(`/app/todo/${todos[0]}`);
cy.intercept('POST', '/api/method/frappe.client.validate_link').as('validate_link');
@ -120,7 +137,67 @@ context('Control Link', () => {
cy.get('.frappe-control[data-fieldname=assigned_by_full_name] .control-value').should(
'contain', 'Administrator'
);
cy.window()
.its("cur_frm.doc.assigned_by")
.should("eq", "Administrator");
// invalid input
cy.get('@input').clear().type('invalid input', {delay: 100}).blur();
cy.get('.frappe-control[data-fieldname=assigned_by_full_name] .control-value').should(
'contain', ''
);
cy.window()
.its("cur_frm.doc.assigned_by")
.should("eq", null);
// set valid value again
cy.get('@input').clear().type('Administrator', {delay: 100}).blur();
cy.wait('@validate_link');
cy.window()
.its("cur_frm.doc.assigned_by")
.should("eq", "Administrator");
// clear input
cy.get('@input').clear().blur();
cy.get('.frappe-control[data-fieldname=assigned_by_full_name] .control-value').should(
'contain', ''
);
cy.window()
.its("cur_frm.doc.assigned_by")
.should("eq", "");
});
});
it("should set default values", () => {
cy.insert_doc("Property Setter", {
"doctype_or_field": "DocField",
"doc_type": "ToDo",
"field_name": "assigned_by",
"property": "default",
"property_type": "Text",
"value": "Administrator"
}, true);
cy.reload();
cy.new_form("ToDo");
cy.fill_field("description", "new", "Text Editor");
cy.intercept("POST", "/api/method/frappe.desk.form.save.savedocs").as("save_form");
cy.findByRole("button", {name: "Save"}).click();
cy.wait("@save_form");
cy.get(".frappe-control[data-fieldname=assigned_by_full_name] .control-value").should(
"contain", "Administrator"
);
// if user clears default value explicitly, system should not reset default again
cy.get_field("assigned_by").clear().blur();
cy.intercept("POST", "/api/method/frappe.desk.form.save.savedocs").as("save_form");
cy.findByRole("button", {name: "Save"}).click();
cy.wait("@save_form");
cy.get_field("assigned_by").should("have.value", "");
cy.get(".frappe-control[data-fieldname=assigned_by_full_name] .control-value").should(
"contain", ""
);
});
});

View file

@ -110,34 +110,6 @@ Cypress.Commands.add('get_doc', (doctype, name) => {
});
});
Cypress.Commands.add('insert_doc', (doctype, args, ignore_duplicate) => {
return cy
.window()
.its('frappe.csrf_token')
.then(csrf_token => {
return cy
.request({
method: 'POST',
url: `/api/resource/${doctype}`,
body: args,
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
'X-Frappe-CSRF-Token': csrf_token
},
failOnStatusCode: !ignore_duplicate
})
.then(res => {
let status_codes = [200];
if (ignore_duplicate) {
status_codes.push(409);
}
expect(res.status).to.be.oneOf(status_codes);
return res.body;
});
});
});
Cypress.Commands.add('remove_doc', (doctype, name) => {
return cy
.window()

View file

@ -143,6 +143,8 @@ lang = local("lang")
# This if block is never executed when running the code. It is only used for
# telling static code analyzer where to find dynamically defined attributes.
if typing.TYPE_CHECKING:
from frappe.utils.redis_wrapper import RedisWrapper
from frappe.database.mariadb.database import MariaDBDatabase
from frappe.database.postgres.database import PostgresDatabase
from frappe.query_builder.builder import MariaDB, Postgres
@ -150,6 +152,7 @@ if typing.TYPE_CHECKING:
db: typing.Union[MariaDBDatabase, PostgresDatabase]
qb: typing.Union[MariaDB, Postgres]
# end: static analysis hack
def init(site, sites_path=None, new_site=False):
@ -311,9 +314,8 @@ def destroy():
release_local(local)
# memcache
redis_server = None
def cache():
def cache() -> "RedisWrapper":
"""Returns redis connection."""
global redis_server
if not redis_server:

View file

@ -99,7 +99,6 @@ def get_value(doctype, fieldname, filters=None, as_dict=True, debug=False, paren
if not filters:
filters = None
if frappe.get_meta(doctype).issingle:
value = frappe.db.get_values_from_single(fields, filters, doctype, as_dict=as_dict, debug=debug)
else:

View file

@ -623,6 +623,7 @@ def transform_database(context, table, engine, row_format, failfast):
@click.command('run-tests')
@click.option('--app', help="For App")
@click.option('--doctype', help="For DocType")
@click.option('--case', help="Select particular TestCase")
@click.option('--doctype-list-path', help="Path to .txt file for list of doctypes. Example erpnext/tests/server/agriculture.txt")
@click.option('--test', multiple=True, help="Specific test")
@click.option('--ui-tests', is_flag=True, default=False, help="Run UI Tests")
@ -636,7 +637,7 @@ def transform_database(context, table, engine, row_format, failfast):
@pass_context
def run_tests(context, app=None, module=None, doctype=None, test=(), profile=False,
coverage=False, junit_xml_output=False, ui_tests = False, doctype_list_path=None,
skip_test_records=False, skip_before_tests=False, failfast=False):
skip_test_records=False, skip_before_tests=False, failfast=False, case=None):
with CodeCoverage(coverage, app):
import frappe.test_runner
@ -658,7 +659,7 @@ def run_tests(context, app=None, module=None, doctype=None, test=(), profile=Fal
ret = frappe.test_runner.main(app, module, doctype, context.verbose, tests=tests,
force=context.force, profile=profile, junit_xml_output=junit_xml_output,
ui_tests=ui_tests, doctype_list_path=doctype_list_path, failfast=failfast)
ui_tests=ui_tests, doctype_list_path=doctype_list_path, failfast=failfast, case=case)
if len(ret.failures) == 0 and len(ret.errors) == 0:
ret = 0

View file

@ -44,6 +44,7 @@ frappe.ui.form.on('Data Import', {
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(__('In Progress'), 'orange');
frm.trigger('update_primary_action');
// hide progress when complete
if (data.current === data.total) {
@ -80,7 +81,10 @@ frappe.ui.form.on('Data Import', {
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
if (frm.doc.status != 'Pending')
frm.trigger('show_import_status');
frm.trigger('show_report_error_button');
if (frm.doc.status === 'Partial Success') {
@ -128,40 +132,49 @@ frappe.ui.form.on('Data Import', {
},
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
frappe.call({
'method': 'frappe.core.doctype.data_import.data_import.get_import_status',
'args': {
'data_import_name': frm.doc.name
},
'callback': function(r) {
let successful_records = cint(r.message.success);
let failed_records = cint(r.message.failed);
let total_records = cint(r.message.total_records);
let message;
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
if (!total_records) return;
let message;
if (failed_records === 0) {
let message_args = [successful_records];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
let message_args = [successful_records, total_records];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records > 1
? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
} else {
message =
successful_records > 1
? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
}
}
frm.dashboard.set_headline(message);
}
} else {
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
}
}
frm.dashboard.set_headline(message);
});
},
show_report_error_button(frm) {
@ -275,7 +288,7 @@ frappe.ui.form.on('Data Import', {
},
show_import_preview(frm, preview_data) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let import_log = preview_data.import_log;
if (
frm.import_preview &&
@ -316,6 +329,15 @@ frappe.ui.form.on('Data Import', {
);
},
export_import_log(frm) {
open_url_post(
'/api/method/frappe.core.doctype.data_import.data_import.download_import_log',
{
data_import_name: frm.doc.name
}
);
},
show_import_warnings(frm, preview_data) {
let columns = preview_data.columns;
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
@ -391,92 +413,131 @@ frappe.ui.form.on('Data Import', {
frm.trigger('show_import_log');
},
show_import_log(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let logs = import_log;
frm.toggle_display('import_log', false);
frm.toggle_display('import_log_section', logs.length > 0);
render_import_log(frm) {
frappe.call({
'method': 'frappe.client.get_list',
'args': {
'doctype': 'Data Import Log',
'filters': {
'data_import': frm.doc.name
},
'fields': ['success', 'docname', 'messages', 'exception', 'row_indexes'],
'limit_page_length': 5000,
'order_by': 'log_index'
},
callback: function(r) {
let logs = r.message;
if (logs.length === 0) {
frm.get_field('import_log_preview').$wrapper.empty();
if (logs.length === 0) return;
frm.toggle_display('import_log_section', true);
let rows = logs
.map(log => {
let html = '';
if (log.success) {
if (frm.doc.import_type === 'Insert New Records') {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
html = __('Successfully updated {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
}
} else {
let messages = (JSON.parse(log.messages || '[]'))
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}" style="margin-top: 15px;">
${__('Show Traceback')}
</button>
<div class="collapse" id="${id}" style="margin-top: 15px;">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
if (frm.doc.show_failed_logs && log.success) {
return '';
}
return `<tr>
<td>${JSON.parse(log.row_indexes).join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
if (!rows && frm.doc.show_failed_logs) {
rows = `<tr><td class="text-center text-muted" colspan=3>
${__('No failed logs')}
</td></tr>`;
}
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
}
});
},
show_import_log(frm) {
frm.toggle_display('import_log_section', false);
if (frm.import_in_progress) {
return;
}
let rows = logs
.map(log => {
let html = '';
if (log.success) {
if (frm.doc.import_type === 'Insert New Records') {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
html = __('Successfully updated {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
}
frappe.call({
'method': 'frappe.client.get_count',
'args': {
'doctype': 'Data Import Log',
'filters': {
'data_import': frm.doc.name
}
},
'callback': function(r) {
let count = r.message;
if (count < 5000) {
frm.trigger('render_import_log');
} else {
let messages = log.messages
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}" style="margin-top: 15px;">
${__('Show Traceback')}
</button>
<div class="collapse" id="${id}" style="margin-top: 15px;">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
frm.toggle_display('import_log_section', false);
frm.add_custom_button(__('Export Import Log'), () =>
frm.trigger('export_import_log')
);
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
if (frm.doc.show_failed_logs && log.success) {
return '';
}
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
if (!rows && frm.doc.show_failed_logs) {
rows = `<tr><td class="text-center text-muted" colspan=3>
${__('No failed logs')}
</td></tr>`;
}
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
}
});
},
});

View file

@ -1,194 +1,197 @@
{
"actions": [],
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"html_5",
"google_sheets_url",
"refresh_google_sheet",
"column_break_5",
"status",
"submit_after_import",
"mute_emails",
"template_options",
"import_warnings_section",
"template_warnings",
"import_warnings",
"section_import_preview",
"import_preview",
"import_log_section",
"import_log",
"show_failed_logs",
"import_log_preview"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"set_only_once": 1
},
{
"fieldname": "import_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"set_only_once": 1
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"in_list_view": 1,
"label": "Import File",
"read_only_depends_on": "eval: ['Success', 'Partial Success'].includes(doc.status)"
},
{
"fieldname": "import_preview",
"fieldtype": "HTML",
"label": "Import Preview"
},
{
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"label": "Preview"
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Options",
"options": "JSON",
"read_only": 1
},
{
"fieldname": "import_log",
"fieldtype": "Code",
"label": "Import Log",
"options": "JSON"
},
{
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview"
},
{
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Pending\nSuccess\nPartial Success\nError",
"read_only": 1
},
{
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import",
"set_only_once": 1
},
{
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Import File Errors and Warnings"
},
{
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings"
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template"
},
{
"default": "1",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails",
"set_only_once": 1
},
{
"default": "0",
"fieldname": "show_failed_logs",
"fieldtype": "Check",
"label": "Show Failed Logs"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file",
"fieldname": "html_5",
"fieldtype": "HTML",
"options": "<h5 class=\"text-muted uppercase\">Or</h5>"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file\n",
"description": "Must be a publicly accessible Google Sheets URL",
"fieldname": "google_sheets_url",
"fieldtype": "Data",
"label": "Import from Google Sheets",
"read_only_depends_on": "eval: ['Success', 'Partial Success'].includes(doc.status)"
},
{
"depends_on": "eval:doc.google_sheets_url && !doc.__unsaved && ['Success', 'Partial Success'].includes(doc.status)",
"fieldname": "refresh_google_sheet",
"fieldtype": "Button",
"label": "Refresh Google Sheet"
}
],
"hide_toolbar": 1,
"links": [],
"modified": "2021-04-11 01:50:42.074623",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
"actions": [],
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"payload_count",
"html_5",
"google_sheets_url",
"refresh_google_sheet",
"column_break_5",
"status",
"submit_after_import",
"mute_emails",
"template_options",
"import_warnings_section",
"template_warnings",
"import_warnings",
"section_import_preview",
"import_preview",
"import_log_section",
"show_failed_logs",
"import_log_preview"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"set_only_once": 1
},
{
"fieldname": "import_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"set_only_once": 1
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"in_list_view": 1,
"label": "Import File",
"read_only_depends_on": "eval: ['Success', 'Partial Success'].includes(doc.status)"
},
{
"fieldname": "import_preview",
"fieldtype": "HTML",
"label": "Import Preview"
},
{
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"label": "Preview"
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Options",
"options": "JSON",
"read_only": 1
},
{
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview"
},
{
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Pending\nSuccess\nPartial Success\nError",
"read_only": 1
},
{
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import",
"set_only_once": 1
},
{
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Import File Errors and Warnings"
},
{
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings"
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template"
},
{
"default": "1",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails",
"set_only_once": 1
},
{
"default": "0",
"fieldname": "show_failed_logs",
"fieldtype": "Check",
"label": "Show Failed Logs"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file",
"fieldname": "html_5",
"fieldtype": "HTML",
"options": "<h5 class=\"text-muted uppercase\">Or</h5>"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file\n",
"description": "Must be a publicly accessible Google Sheets URL",
"fieldname": "google_sheets_url",
"fieldtype": "Data",
"label": "Import from Google Sheets",
"read_only_depends_on": "eval: ['Success', 'Partial Success'].includes(doc.status)"
},
{
"depends_on": "eval:doc.google_sheets_url && !doc.__unsaved && ['Success', 'Partial Success'].includes(doc.status)",
"fieldname": "refresh_google_sheet",
"fieldtype": "Button",
"label": "Refresh Google Sheet"
},
{
"fieldname": "payload_count",
"fieldtype": "Int",
"hidden": 1,
"label": "Payload Count",
"read_only": 1
}
],
"hide_toolbar": 1,
"links": [],
"modified": "2022-02-01 20:08:37.624914",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
"naming_rule": "Expression",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View file

@ -27,6 +27,7 @@ class DataImport(Document):
self.validate_import_file()
self.validate_google_sheets_url()
self.set_payload_count()
def validate_import_file(self):
if self.import_file:
@ -38,6 +39,12 @@ class DataImport(Document):
return
validate_google_sheets_url(self.google_sheets_url)
def set_payload_count(self):
if self.import_file:
i = self.get_importer()
payloads = i.import_file.get_payloads_for_import()
self.payload_count = len(payloads)
@frappe.whitelist()
def get_preview_from_template(self, import_file=None, google_sheets_url=None):
if import_file:
@ -67,7 +74,7 @@ class DataImport(Document):
enqueue(
start_import,
queue="default",
timeout=6000,
timeout=10000,
event="data_import",
job_name=self.name,
data_import=self.name,
@ -80,6 +87,9 @@ class DataImport(Document):
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def download_import_log(self):
return self.get_importer().export_import_log()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
@ -90,7 +100,6 @@ def get_preview_from_template(data_import, import_file=None, google_sheets_url=N
import_file, google_sheets_url
)
@frappe.whitelist()
def form_start_import(data_import):
return frappe.get_doc("Data Import", data_import).start_import()
@ -145,6 +154,30 @@ def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import", data_import_name)
data_import.export_errored_rows()
@frappe.whitelist()
def download_import_log(data_import_name):
data_import = frappe.get_doc("Data Import", data_import_name)
data_import.download_import_log()
@frappe.whitelist()
def get_import_status(data_import_name):
import_status = {}
logs = frappe.get_all('Data Import Log', fields=['count(*) as count', 'success'],
filters={'data_import': data_import_name},
group_by='success')
total_payload_count = frappe.db.get_value('Data Import', data_import_name, 'payload_count')
for log in logs:
if log.get('success'):
import_status['success'] = log.get('count')
else:
import_status['failed'] = log.get('count')
import_status['total_records'] = total_payload_count
return import_status
def import_file(
doctype, file_path, import_type, submit_after_import=False, console=False

View file

@ -24,12 +24,14 @@ frappe.listview_settings['Data Import'] = {
'Error': 'red'
};
let status = doc.status;
if (imports_in_progress.includes(doc.name)) {
status = 'In Progress';
}
if (status == 'Pending') {
status = 'Not Started';
}
return [__(status), colors[status], 'status,=,' + doc.status];
},
formatters: {

View file

@ -47,7 +47,13 @@ class Importer:
)
def get_data_for_import_preview(self):
return self.import_file.get_data_for_import_preview()
out = self.import_file.get_data_for_import_preview()
out.import_log = frappe.db.get_all("Data Import Log", fields=["row_indexes", "success"],
filters={"data_import": self.data_import.name},
order_by="log_index", limit=10)
return out
def before_import(self):
# set user lang for translations
@ -58,7 +64,6 @@ class Importer:
frappe.flags.in_import = True
frappe.flags.mute_emails = self.data_import.mute_emails
self.data_import.db_set("status", "Pending")
self.data_import.db_set("template_warnings", "")
def import_data(self):
@ -79,20 +84,25 @@ class Importer:
return
# setup import log
if self.data_import.import_log:
import_log = frappe.parse_json(self.data_import.import_log)
else:
import_log = []
import_log = frappe.db.get_all("Data Import Log", fields=["row_indexes", "success", "log_index"],
filters={"data_import": self.data_import.name},
order_by="log_index") or []
# remove previous failures from import log
import_log = [log for log in import_log if log.get("success")]
log_index = 0
# Do not remove rows in case of retry after an error or pending data import
if self.data_import.status == "Partial Success" and len(import_log) >= self.data_import.payload_count:
# remove previous failures from import log only in case of retry after partial success
import_log = [log for log in import_log if log.get("success")]
# get successfully imported rows
imported_rows = []
for log in import_log:
log = frappe._dict(log)
if log.success:
imported_rows += log.row_indexes
if log.success or len(import_log) < self.data_import.payload_count:
imported_rows += json.loads(log.row_indexes)
log_index = log.log_index
# start import
total_payload_count = len(payloads)
@ -146,25 +156,41 @@ class Importer:
},
)
import_log.append(
frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes)
)
create_import_log(self.data_import.name, log_index, {
'success': True,
'docname': doc.name,
'row_indexes': row_indexes
})
log_index += 1
if not self.data_import.status == "Partial Success":
self.data_import.db_set("status", "Partial Success")
# commit after every successful import
frappe.db.commit()
except Exception:
import_log.append(
frappe._dict(
success=False,
exception=frappe.get_traceback(),
messages=frappe.local.message_log,
row_indexes=row_indexes,
)
)
messages = frappe.local.message_log
frappe.clear_messages()
# rollback if exception
frappe.db.rollback()
create_import_log(self.data_import.name, log_index, {
'success': False,
'exception': frappe.get_traceback(),
'messages': messages,
'row_indexes': row_indexes
})
log_index += 1
# Logs are db inserted directly so will have to be fetched again
import_log = frappe.db.get_all("Data Import Log", fields=["row_indexes", "success", "log_index"],
filters={"data_import": self.data_import.name},
order_by="log_index") or []
# set status
failures = [log for log in import_log if not log.get("success")]
if len(failures) == total_payload_count:
@ -178,7 +204,6 @@ class Importer:
self.print_import_log(import_log)
else:
self.data_import.db_set("status", status)
self.data_import.db_set("import_log", json.dumps(import_log))
self.after_import()
@ -248,11 +273,14 @@ class Importer:
if not self.data_import:
return
import_log = frappe.parse_json(self.data_import.import_log or "[]")
import_log = frappe.db.get_all("Data Import Log", fields=["row_indexes", "success"],
filters={"data_import": self.data_import.name},
order_by="log_index") or []
failures = [log for log in import_log if not log.get("success")]
row_indexes = []
for f in failures:
row_indexes.extend(f.get("row_indexes", []))
row_indexes.extend(json.loads(f.get("row_indexes", [])))
# de duplicate
row_indexes = list(set(row_indexes))
@ -264,6 +292,30 @@ class Importer:
build_csv_response(rows, _(self.doctype))
def export_import_log(self):
from frappe.utils.csvutils import build_csv_response
if not self.data_import:
return
import_log = frappe.db.get_all("Data Import Log", fields=["row_indexes", "success", "messages", "exception", "docname"],
filters={"data_import": self.data_import.name},
order_by="log_index")
header_row = ["Row Numbers", "Status", "Message", "Exception"]
rows = [header_row]
for log in import_log:
row_number = json.loads(log.get("row_indexes"))[0]
status = "Success" if log.get('success') else "Failure"
message = "Successfully Imported {0}".format(log.get('docname')) if log.get('success') else \
log.get("messages")
exception = frappe.utils.cstr(log.get("exception", ''))
rows += [[row_number, status, message, exception]]
build_csv_response(rows, self.doctype)
def print_import_log(self, import_log):
failed_records = [log for log in import_log if not log.success]
successful_records = [log for log in import_log if log.success]
@ -1172,3 +1224,17 @@ def df_as_json(df):
def get_select_options(df):
return [d for d in (df.options or "").split("\n") if d]
def create_import_log(data_import, log_index, log_details):
frappe.get_doc({
'doctype': 'Data Import Log',
'log_index': log_index,
'success': log_details.get('success'),
'data_import': data_import,
'row_indexes': json.dumps(log_details.get('row_indexes')),
'docname': log_details.get('docname'),
'messages': json.dumps(log_details.get('messages', '[]')),
'exception': log_details.get('exception')
}).db_insert()

View file

@ -60,15 +60,19 @@ class TestImporter(unittest.TestCase):
frappe.local.message_log = []
data_import.start_import()
data_import.reload()
import_log = frappe.parse_json(data_import.import_log)
self.assertEqual(import_log[0]['row_indexes'], [2,3])
expected_error = "Error: <strong>Child 1 of DocType for Import</strong> Row #1: Value missing for: Child Title"
self.assertEqual(frappe.parse_json(import_log[0]['messages'][0])['message'], expected_error)
expected_error = "Error: <strong>Child 1 of DocType for Import</strong> Row #2: Value missing for: Child Title"
self.assertEqual(frappe.parse_json(import_log[0]['messages'][1])['message'], expected_error)
self.assertEqual(import_log[1]['row_indexes'], [4])
self.assertEqual(frappe.parse_json(import_log[1]['messages'][0])['message'], "Title is required")
import_log = frappe.db.get_all("Data Import Log", fields=["row_indexes", "success", "messages", "exception", "docname"],
filters={"data_import": data_import.name},
order_by="log_index")
self.assertEqual(frappe.parse_json(import_log[0]['row_indexes']), [2,3])
expected_error = "Error: <strong>Child 1 of DocType for Import</strong> Row #1: Value missing for: Child Title"
self.assertEqual(frappe.parse_json(frappe.parse_json(import_log[0]['messages'])[0])['message'], expected_error)
expected_error = "Error: <strong>Child 1 of DocType for Import</strong> Row #2: Value missing for: Child Title"
self.assertEqual(frappe.parse_json(frappe.parse_json(import_log[0]['messages'])[1])['message'], expected_error)
self.assertEqual(frappe.parse_json(import_log[1]['row_indexes']), [4])
self.assertEqual(frappe.parse_json(frappe.parse_json(import_log[1]['messages'])[0])['message'], "Title is required")
def test_data_import_update(self):
existing_doc = frappe.get_doc(

View file

@ -0,0 +1,8 @@
// Copyright (c) 2021, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import Log', {
// refresh: function(frm) {
// }
});

View file

@ -0,0 +1,84 @@
{
"actions": [],
"creation": "2021-12-25 16:12:20.205889",
"doctype": "DocType",
"editable_grid": 1,
"engine": "MyISAM",
"field_order": [
"data_import",
"row_indexes",
"success",
"docname",
"messages",
"exception",
"log_index"
],
"fields": [
{
"fieldname": "data_import",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Data Import",
"options": "Data Import"
},
{
"fieldname": "docname",
"fieldtype": "Data",
"label": "Reference Name"
},
{
"fieldname": "exception",
"fieldtype": "Text",
"label": "Exception"
},
{
"fieldname": "row_indexes",
"fieldtype": "Code",
"label": "Row Indexes",
"options": "JSON"
},
{
"default": "0",
"fieldname": "success",
"fieldtype": "Check",
"in_list_view": 1,
"label": "Success"
},
{
"fieldname": "log_index",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Log Index"
},
{
"fieldname": "messages",
"fieldtype": "Code",
"label": "Messages",
"options": "JSON"
}
],
"in_create": 1,
"index_web_pages_for_search": 1,
"links": [],
"modified": "2021-12-29 11:19:19.646076",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import Log",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC"
}

View file

@ -0,0 +1,8 @@
# Copyright (c) 2021, Frappe Technologies and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class DataImportLog(Document):
pass

View file

@ -0,0 +1,8 @@
# Copyright (c) 2021, Frappe Technologies and Contributors
# See license.txt
# import frappe
import unittest
class TestDataImportLog(unittest.TestCase):
pass

View file

@ -699,6 +699,13 @@ class DocType(Document):
if not name:
name = self.name
# a Doctype name is the tablename created in database
# `tab<Doctype Name>` the length of tablename is limited to 64 characters
max_length = frappe.db.MAX_COLUMN_LENGTH - 3
if len(name) > max_length:
# length(tab + <Doctype Name>) should be equal to 64 characters hence doctype should be 61 characters
frappe.throw(_("Doctype name is limited to {0} characters ({1})").format(max_length, name), frappe.NameError)
flags = {"flags": re.ASCII}
# a DocType name should not start or end with an empty space

View file

@ -23,6 +23,7 @@ class TestDocType(unittest.TestCase):
self.assertRaises(frappe.NameError, new_doctype("_Some DocType").insert)
self.assertRaises(frappe.NameError, new_doctype("8Some DocType").insert)
self.assertRaises(frappe.NameError, new_doctype("Some (DocType)").insert)
self.assertRaises(frappe.NameError, new_doctype("Some Doctype with a name whose length is more than 61 characters").insert)
for name in ("Some DocType", "Some_DocType"):
if frappe.db.exists("DocType", name):
frappe.delete_doc("DocType", name)
@ -353,7 +354,6 @@ class TestDocType(unittest.TestCase):
dump_docs = json.dumps(docs.get('docs'))
cancel_all_linked_docs(dump_docs)
data_link_doc.cancel()
data_doc.name = '{}-CANC-0'.format(data_doc.name)
data_doc.load_from_db()
self.assertEqual(data_link_doc.docstatus, 2)
self.assertEqual(data_doc.docstatus, 2)
@ -377,7 +377,7 @@ class TestDocType(unittest.TestCase):
for data in link_doc.get('permissions'):
data.submit = 1
data.cancel = 1
link_doc.insert(ignore_if_duplicate=True)
link_doc.insert()
#create first parent doctype
test_doc_1 = new_doctype('Test Doctype 1')
@ -392,7 +392,7 @@ class TestDocType(unittest.TestCase):
for data in test_doc_1.get('permissions'):
data.submit = 1
data.cancel = 1
test_doc_1.insert(ignore_if_duplicate=True)
test_doc_1.insert()
#crete second parent doctype
doc = new_doctype('Test Doctype 2')
@ -407,7 +407,7 @@ class TestDocType(unittest.TestCase):
for data in link_doc.get('permissions'):
data.submit = 1
data.cancel = 1
doc.insert(ignore_if_duplicate=True)
doc.insert()
# create doctype data
data_link_doc_1 = frappe.new_doc('Test Linked Doctype 1')
@ -438,7 +438,6 @@ class TestDocType(unittest.TestCase):
# checking that doc for Test Doctype 2 is not canceled
self.assertRaises(frappe.LinkExistsError, data_link_doc_1.cancel)
data_doc_2.name = '{}-CANC-0'.format(data_doc_2.name)
data_doc.load_from_db()
data_doc_2.load_from_db()
self.assertEqual(data_link_doc_1.docstatus, 2)

View file

@ -1,4 +1,4 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
"""
@ -7,7 +7,6 @@ record of files
naming for same name files: file.gif, file-1.gif, file-2.gif etc
"""
import base64
import hashlib
import imghdr
import io
@ -17,9 +16,10 @@ import os
import re
import shutil
import zipfile
from typing import TYPE_CHECKING, Tuple
import requests
import requests.exceptions
from requests.exceptions import HTTPError, SSLError
from PIL import Image, ImageFile, ImageOps
from io import BytesIO
from urllib.parse import quote, unquote
@ -31,6 +31,11 @@ from frappe.utils import call_hook_method, cint, cstr, encode, get_files_path, g
from frappe.utils.image import strip_exif_data, optimize_image
from frappe.utils.file_manager import safe_b64decode
if TYPE_CHECKING:
from PIL.ImageFile import ImageFile
from requests.models import Response
class MaxFileSizeReachedError(frappe.ValidationError):
pass
@ -276,7 +281,7 @@ class File(Document):
image, filename, extn = get_local_image(self.file_url)
else:
image, filename, extn = get_web_image(self.file_url)
except (requests.exceptions.HTTPError, requests.exceptions.SSLError, IOError, TypeError):
except (HTTPError, SSLError, IOError, TypeError):
return
size = width, height
@ -648,9 +653,17 @@ def setup_folder_path(filename, new_parent):
from frappe.model.rename_doc import rename_doc
rename_doc("File", file.name, file.get_name_based_on_parent_folder(), ignore_permissions=True)
def get_extension(filename, extn, content):
def get_extension(filename, extn, content: bytes = None, response: "Response" = None) -> str:
mimetype = None
if response:
content_type = response.headers.get("Content-Type")
if content_type:
_extn = mimetypes.guess_extension(content_type)
if _extn:
return _extn[1:]
if extn:
# remove '?' char and parameters from extn if present
if '?' in extn:
@ -693,14 +706,14 @@ def get_local_image(file_url):
return image, filename, extn
def get_web_image(file_url):
def get_web_image(file_url: str) -> Tuple["ImageFile", str, str]:
# download
file_url = frappe.utils.get_url(file_url)
r = requests.get(file_url, stream=True)
try:
r.raise_for_status()
except requests.exceptions.HTTPError as e:
if "404" in e.args[0]:
except HTTPError:
if r.status_code == 404:
frappe.msgprint(_("File '{0}' not found").format(file_url))
else:
frappe.msgprint(_("Unable to read file format for {0}").format(file_url))
@ -719,7 +732,10 @@ def get_web_image(file_url):
filename = get_random_filename()
extn = None
extn = get_extension(filename, extn, r.content)
extn = get_extension(filename, extn, response=r)
if extn == "bin":
extn = get_extension(filename, extn, content=r.content) or "png"
filename = "/files/" + strip(unquote(filename))
return image, filename, extn

View file

@ -1,15 +1,14 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import base64
import json
import frappe
import os
import unittest
from frappe import _
from frappe.core.doctype.file.file import get_attached_images, move_file, get_files_in_folder, unzip_file
from frappe.core.doctype.file.file import File, get_attached_images, move_file, get_files_in_folder, unzip_file
from frappe.utils import get_files_path
# test_records = frappe.get_test_records('File')
test_content1 = 'Hello'
test_content2 = 'Hello World'
@ -24,8 +23,6 @@ def make_test_doc():
class TestSimpleFile(unittest.TestCase):
def setUp(self):
self.attached_to_doctype, self.attached_to_docname = make_test_doc()
self.test_content = test_content1
@ -38,21 +35,13 @@ class TestSimpleFile(unittest.TestCase):
_file.save()
self.saved_file_url = _file.file_url
def test_save(self):
_file = frappe.get_doc("File", {"file_url": self.saved_file_url})
content = _file.get_content()
self.assertEqual(content, self.test_content)
def tearDown(self):
# File gets deleted on rollback, so blank
pass
class TestBase64File(unittest.TestCase):
def setUp(self):
self.attached_to_doctype, self.attached_to_docname = make_test_doc()
self.test_content = base64.b64encode(test_content1.encode('utf-8'))
@ -66,18 +55,12 @@ class TestBase64File(unittest.TestCase):
_file.save()
self.saved_file_url = _file.file_url
def test_saved_content(self):
_file = frappe.get_doc("File", {"file_url": self.saved_file_url})
content = _file.get_content()
self.assertEqual(content, test_content1)
def tearDown(self):
# File gets deleted on rollback, so blank
pass
class TestSameFileName(unittest.TestCase):
def test_saved_content(self):
self.attached_to_doctype, self.attached_to_docname = make_test_doc()
@ -130,8 +113,6 @@ class TestSameFileName(unittest.TestCase):
class TestSameContent(unittest.TestCase):
def setUp(self):
self.attached_to_doctype1, self.attached_to_docname1 = make_test_doc()
self.attached_to_doctype2, self.attached_to_docname2 = make_test_doc()
@ -186,10 +167,6 @@ class TestSameContent(unittest.TestCase):
limit_property.delete()
frappe.clear_cache(doctype='ToDo')
def tearDown(self):
# File gets deleted on rollback, so blank
pass
class TestFile(unittest.TestCase):
def setUp(self):
@ -398,7 +375,7 @@ class TestFile(unittest.TestCase):
def test_make_thumbnail(self):
# test web image
test_file = frappe.get_doc({
test_file: File = frappe.get_doc({
"doctype": "File",
"file_name": 'logo',
"file_url": frappe.utils.get_url('/_test/assets/image.jpg'),
@ -407,6 +384,16 @@ class TestFile(unittest.TestCase):
test_file.make_thumbnail()
self.assertEquals(test_file.thumbnail_url, '/files/image_small.jpg')
# test web image without extension
test_file = frappe.get_doc({
"doctype": "File",
"file_name": 'logo',
"file_url": frappe.utils.get_url('/_test/assets/image'),
}).insert(ignore_permissions=True)
test_file.make_thumbnail()
self.assertTrue(test_file.thumbnail_url.endswith("_small.jpeg"))
# test local image
test_file.db_set('thumbnail_url', None)
test_file.reload()

View file

@ -3,6 +3,7 @@
from frappe.core.doctype.user_permission.user_permission import add_user_permissions, remove_applicable
from frappe.permissions import has_user_permission
from frappe.core.doctype.doctype.test_doctype import new_doctype
from frappe.website.doctype.blog_post.test_blog_post import make_test_blog
import frappe
import unittest
@ -31,6 +32,18 @@ class TestUserPermission(unittest.TestCase):
param = get_params(user, 'User', perm_user.name, is_default=1)
self.assertRaises(frappe.ValidationError, add_user_permissions, param)
def test_default_user_permission_corectness(self):
user = create_user('test_default_corectness_permission_1@example.com')
param = get_params(user, 'User', user.name, is_default=1, hide_descendants= 1)
add_user_permissions(param)
#create a duplicate entry with default
perm_user = create_user('test_default_corectness2@example.com')
test_blog = make_test_blog()
param = get_params(perm_user, 'Blog Post', test_blog.name, is_default=1, hide_descendants= 1)
add_user_permissions(param)
frappe.db.delete('User Permission', filters={'for_value': test_blog.name})
frappe.delete_doc('Blog Post', test_blog.name)
def test_default_user_permission(self):
frappe.set_user('Administrator')
user = create_user('test_user_perm1@example.com', 'Website Manager')

View file

@ -48,7 +48,6 @@ class UserPermission(Document):
}, or_filters={
'applicable_for': cstr(self.applicable_for),
'apply_to_all_doctypes': 1,
'hide_descendants': cstr(self.hide_descendants)
}, limit=1)
if overlap_exists:
ref_link = frappe.get_desk_link(self.doctype, overlap_exists[0].name)

View file

@ -30,6 +30,7 @@ class Dashboard {
show() {
this.route = frappe.get_route();
this.set_breadcrumbs();
if (this.route.length > 1) {
// from route
this.show_dashboard(this.route.slice(-1)[0]);
@ -75,6 +76,10 @@ class Dashboard {
frappe.last_dashboard = current_dashboard_name;
}
set_breadcrumbs() {
frappe.breadcrumbs.add("Desk", "Dashboard");
}
refresh() {
frappe.run_serially([
() => this.render_cards(),

View file

@ -10,19 +10,20 @@ import re
import string
from contextlib import contextmanager
from time import time
from typing import Dict, List, Union, Tuple
from typing import Dict, List, Tuple, Union
from pypika.terms import Criterion, NullValue, PseudoColumn
import frappe
import frappe.defaults
import frappe.model.meta
from frappe import _
from frappe.utils import now, getdate, cast, get_datetime
from frappe.model.utils.link_count import flush_local_link_count
from frappe.query_builder.functions import Count
from frappe.query_builder.functions import Min, Max, Avg, Sum
from frappe.query_builder.utils import Column
from frappe.query_builder.utils import DocType
from frappe.utils import cast, get_datetime, getdate, now, sbool
from .query import Query
from pypika.terms import Criterion, PseudoColumn
class Database(object):
@ -557,7 +558,21 @@ class Database(object):
def get_list(*args, **kwargs):
return frappe.get_list(*args, **kwargs)
def get_single_value(self, doctype, fieldname, cache=False):
def set_single_value(self, doctype, fieldname, value, *args, **kwargs):
"""Set field value of Single DocType.
:param doctype: DocType of the single object
:param fieldname: `fieldname` of the property
:param value: `value` of the property
Example:
# Update the `deny_multiple_sessions` field in System Settings DocType.
company = frappe.db.set_single_value("System Settings", "deny_multiple_sessions", True)
"""
return self.set_value(doctype, doctype, fieldname, value, *args, **kwargs)
def get_single_value(self, doctype, fieldname, cache=True):
"""Get property of Single DocType. Cache locally by default
:param doctype: DocType of the single object whose value is requested
@ -572,7 +587,7 @@ class Database(object):
if not doctype in self.value_cache:
self.value_cache[doctype] = {}
if fieldname in self.value_cache[doctype]:
if cache and fieldname in self.value_cache[doctype]:
return self.value_cache[doctype][fieldname]
val = self.query.get_sql(
@ -679,53 +694,55 @@ class Database(object):
:param debug: Print the query in the developer / js console.
:param for_update: Will add a row-level lock to the value that is being set so that it can be released on commit.
"""
if not modified:
modified = now()
if not modified_by:
modified_by = frappe.session.user
is_single_doctype = not (dn and dt != dn)
to_update = field if isinstance(field, dict) else {field: val}
to_update = {}
if update_modified:
to_update = {"modified": modified, "modified_by": modified_by}
modified = modified or now()
modified_by = modified_by or frappe.session.user
to_update.update({"modified": modified, "modified_by": modified_by})
if is_single_doctype:
frappe.db.delete(
"Singles",
filters={"field": ("in", tuple(to_update)), "doctype": dt}, debug=debug
)
singles_data = ((dt, key, sbool(value)) for key, value in to_update.items())
query = (
frappe.qb.into("Singles")
.columns("doctype", "field", "value")
.insert(*singles_data)
).run(debug=debug)
frappe.clear_document_cache(dt, dt)
if isinstance(field, dict):
to_update.update(field)
else:
to_update.update({field: val})
table = DocType(dt)
if dn and dt!=dn:
# with table
set_values = []
for key in to_update:
set_values.append('`{0}`=%({0})s'.format(key))
if for_update:
docnames = tuple(
self.get_values(dt, dn, "name", debug=debug, for_update=for_update, pluck=True)
) or (NullValue(),)
query = frappe.qb.update(table).where(table.name.isin(docnames))
for name in self.get_values(dt, dn, 'name', for_update=for_update, debug=debug):
values = dict(name=name[0])
values.update(to_update)
for docname in docnames:
frappe.clear_document_cache(dt, docname)
self.sql("""update `tab{0}`
set {1} where name=%(name)s""".format(dt, ', '.join(set_values)),
values, debug=debug)
else:
query = self.query.build_conditions(table=dt, filters=dn, update=True)
# TODO: Fix this; doesn't work rn - gavin@frappe.io
# frappe.cache().hdel_keys(dt, "document_cache")
# Workaround: clear all document caches
frappe.cache().delete_value('document_cache')
frappe.clear_document_cache(dt, values['name'])
else:
# for singles
keys = list(to_update)
self.sql('''
delete from `tabSingles`
where field in ({0}) and
doctype=%s'''.format(', '.join(['%s']*len(keys))),
list(keys) + [dt], debug=debug)
for key, value in to_update.items():
self.sql('''insert into `tabSingles` (doctype, field, value) values (%s, %s, %s)''',
(dt, key, value), debug=debug)
for column, value in to_update.items():
query = query.set(column, value)
frappe.clear_document_cache(dt, dn)
query.run(debug=debug)
if dt in self.value_cache:
del self.value_cache[dt]
@staticmethod
def set(doc, field, val):
"""Set value in document. **Avoid**"""

View file

@ -42,13 +42,13 @@ def submit_cancel_or_update_docs(doctype, docnames, action='submit', data=None):
doc = frappe.get_doc(doctype, d)
try:
message = ''
if action == 'submit' and doc.docstatus==0:
if action == 'submit' and doc.docstatus.is_draft():
doc.submit()
message = _('Submiting {0}').format(doctype)
elif action == 'cancel' and doc.docstatus==1:
elif action == 'cancel' and doc.docstatus.is_submitted():
doc.cancel()
message = _('Cancelling {0}').format(doctype)
elif action == 'update' and doc.docstatus < 2:
elif action == 'update' and not doc.docstatus.is_cancelled():
doc.update(data)
doc.save()
message = _('Updating {0}').format(doctype)

View file

@ -319,7 +319,7 @@ def export_query():
if add_totals_row:
ret = append_totals_row(ret)
data = [['Sr'] + get_labels(db_query.fields, doctype)]
data = [[_('Sr')] + get_labels(db_query.fields, doctype)]
for i, row in enumerate(ret):
data.append([i+1] + list(row))
@ -378,7 +378,8 @@ def get_labels(fields, doctype):
for key in fields:
key = key.split(" as ")[0]
if key.startswith(('count(', 'sum(', 'avg(')): continue
if key.startswith(('count(', 'sum(', 'avg(')):
continue
if "." in key:
parenttype, fieldname = key.split(".")[0][4:-1], key.split(".")[1].strip("`")
@ -386,10 +387,16 @@ def get_labels(fields, doctype):
parenttype = doctype
fieldname = fieldname.strip("`")
df = frappe.get_meta(parenttype).get_field(fieldname)
label = df.label if df else fieldname.title()
if label in labels:
label = doctype + ": " + label
if parenttype == doctype and fieldname == "name":
label = _("ID", context="Label of name column in report")
else:
df = frappe.get_meta(parenttype).get_field(fieldname)
label = _(df.label if df else fieldname.title())
if parenttype != doctype:
# If the column is from a child table, append the child doctype.
# For example, "Item Code (Sales Invoice Item)".
label += f" ({ _(parenttype) })"
labels.append(label)
return labels

View file

@ -137,7 +137,7 @@ def get_context(context):
if self.set_property_after_alert:
allow_update = True
if doc.docstatus == 1 and not doc.meta.get_field(self.set_property_after_alert).allow_on_submit:
if doc.docstatus.is_submitted() and not doc.meta.get_field(self.set_property_after_alert).allow_on_submit:
allow_update = False
try:
if allow_update and not doc.flags.in_notification_update:

View file

@ -1,5 +1,6 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import frappe
import datetime
from frappe import _
@ -11,6 +12,7 @@ from frappe.model import display_fieldtypes
from frappe.utils import (cint, flt, now, cstr, strip_html,
sanitize_html, sanitize_email, cast_fieldtype)
from frappe.utils.html_utils import unescape_html
from frappe.model.docstatus import DocStatus
max_positive_value = {
'smallint': 2 ** 15,
@ -20,6 +22,7 @@ max_positive_value = {
DOCTYPES_FOR_DOCTYPE = ('DocType', 'DocField', 'DocPerm', 'DocType Action', 'DocType Link')
def get_controller(doctype):
"""Returns the **class** object of the given DocType.
For `custom` type, returns `frappe.model.document.Document`.
@ -224,7 +227,7 @@ class BaseDocument(object):
value.parentfield = key
if value.docstatus is None:
value.docstatus = 0
value.docstatus = DocStatus.draft()
if not getattr(value, "idx", None):
value.idx = len(self.get(key) or []) + 1
@ -282,8 +285,11 @@ class BaseDocument(object):
if key not in self.__dict__:
self.__dict__[key] = None
if key in ("idx", "docstatus") and self.__dict__[key] is None:
self.__dict__[key] = 0
if self.__dict__[key] is None:
if key == "docstatus":
self.docstatus = DocStatus.draft()
elif key == "idx":
self.__dict__[key] = 0
for key in self.get_valid_columns():
if key not in self.__dict__:
@ -304,6 +310,14 @@ class BaseDocument(object):
def is_new(self):
return self.get("__islocal")
@property
def docstatus(self):
return DocStatus(self.get("docstatus"))
@docstatus.setter
def docstatus(self, value):
self.__dict__["docstatus"] = DocStatus(cint(value))
def as_dict(self, no_nulls=False, no_default_fields=False, convert_dates_to_str=False):
doc = self.get_valid_dict(convert_dates_to_str=convert_dates_to_str)
doc["doctype"] = self.doctype
@ -492,7 +506,7 @@ class BaseDocument(object):
self.set(df.fieldname, flt(self.get(df.fieldname)))
if self.docstatus is not None:
self.docstatus = cint(self.docstatus)
self.docstatus = DocStatus(cint(self.docstatus))
def _get_missing_mandatory_fields(self):
"""Get mandatory fields that do not have any values"""
@ -581,7 +595,7 @@ class BaseDocument(object):
setattr(self, df.fieldname, values.name)
for _df in fields_to_fetch:
if self.is_new() or self.docstatus != 1 or _df.allow_on_submit:
if self.is_new() or not self.docstatus.is_submitted() or _df.allow_on_submit:
self.set_fetch_from_value(doctype, _df, values)
notify_link_count(doctype, docname)
@ -591,7 +605,7 @@ class BaseDocument(object):
elif (df.fieldname != "amended_from"
and (is_submittable or self.meta.is_submittable) and frappe.get_meta(doctype).is_submittable
and cint(frappe.db.get_value(doctype, docname, "docstatus"))==2):
and cint(frappe.db.get_value(doctype, docname, "docstatus")) == DocStatus.cancelled()):
cancelled_links.append((df.fieldname, docname, get_msg(df, docname)))
@ -805,8 +819,8 @@ class BaseDocument(object):
or df.get("fieldtype") in ("Attach", "Attach Image", "Barcode", "Code")
# cancelled and submit but not update after submit should be ignored
or self.docstatus==2
or (self.docstatus==1 and not df.get("allow_on_submit"))):
or self.docstatus.is_cancelled()
or (self.docstatus.is_submitted() and not df.get("allow_on_submit"))):
continue
else:

View file

@ -212,7 +212,7 @@ def check_permission_and_not_submitted(doc):
.format(doc.doctype, doc.name), raise_exception=frappe.PermissionError)
# check if submitted
if doc.docstatus == 1:
if doc.docstatus.is_submitted():
frappe.msgprint(_("{0} {1}: Submitted Record cannot be deleted. You must {2} Cancel {3} it first.").format(_(doc.doctype), doc.name, "<a href='https://docs.erpnext.com//docs/user/manual/en/setting-up/articles/delete-submitted-document' target='_blank'>", "</a>"),
raise_exception=True)

25
frappe/model/docstatus.py Normal file
View file

@ -0,0 +1,25 @@
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
class DocStatus(int):
def is_draft(self):
return self == self.draft()
def is_submitted(self):
return self == self.submitted()
def is_cancelled(self):
return self == self.cancelled()
@classmethod
def draft(cls):
return cls(0)
@classmethod
def submitted(cls):
return cls(1)
@classmethod
def cancelled(cls):
return cls(2)

View file

@ -1,13 +1,16 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import frappe
import hashlib
import json
import time
from werkzeug.exceptions import NotFound
import frappe
from frappe import _, msgprint, is_whitelisted
from frappe.utils import flt, cstr, now, get_datetime_str, file_lock, date_diff
from frappe.model.base_document import BaseDocument, get_controller
from frappe.model.naming import set_new_name, gen_new_name_for_cancelled_doc
from werkzeug.exceptions import NotFound, Forbidden
import hashlib, json
from frappe.model.naming import set_new_name
from frappe.model.docstatus import DocStatus
from frappe.model import optional_fields, table_fields
from frappe.model.workflow import validate_workflow
from frappe.model.workflow import set_workflow_state_on_action
@ -17,6 +20,7 @@ from frappe.desk.form.document_follow import follow_document
from frappe.core.doctype.server_script.server_script_utils import run_server_script_for_doc_event
from frappe.utils.data import get_absolute_url
# once_only validation
# methods
@ -307,9 +311,6 @@ class Document(BaseDocument):
self.check_permission("write", "save")
if self.docstatus == 2:
self._rename_doc_on_cancel()
self.set_user_and_timestamp()
self.set_docstatus()
self.check_if_latest()
@ -490,7 +491,7 @@ class Document(BaseDocument):
def set_docstatus(self):
if self.docstatus is None:
self.docstatus=0
self.docstatus = DocStatus.draft()
for d in self.get_all_children():
d.docstatus = self.docstatus
@ -720,6 +721,7 @@ class Document(BaseDocument):
else:
tmp = frappe.db.sql("""select modified, docstatus from `tab{0}`
where name = %s for update""".format(self.doctype), self.name, as_dict=True)
if not tmp:
frappe.throw(_("Record does not exist"))
else:
@ -740,7 +742,7 @@ class Document(BaseDocument):
else:
self.check_docstatus_transition(0)
def check_docstatus_transition(self, docstatus):
def check_docstatus_transition(self, to_docstatus):
"""Ensures valid `docstatus` transition.
Valid transitions are (number in brackets is `docstatus`):
@ -751,31 +753,32 @@ class Document(BaseDocument):
"""
if not self.docstatus:
self.docstatus = 0
if docstatus==0:
if self.docstatus==0:
self.docstatus = DocStatus.draft()
if to_docstatus == DocStatus.draft():
if self.docstatus.is_draft():
self._action = "save"
elif self.docstatus==1:
elif self.docstatus.is_submitted():
self._action = "submit"
self.check_permission("submit")
elif self.docstatus==2:
elif self.docstatus.is_cancelled():
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 0 (Draft) to 2 (Cancelled)"))
else:
raise frappe.ValidationError(_("Invalid docstatus"), self.docstatus)
elif docstatus==1:
if self.docstatus==1:
elif to_docstatus == DocStatus.submitted():
if self.docstatus.is_submitted():
self._action = "update_after_submit"
self.check_permission("submit")
elif self.docstatus==2:
elif self.docstatus.is_cancelled():
self._action = "cancel"
self.check_permission("cancel")
elif self.docstatus==0:
elif self.docstatus.is_draft():
raise frappe.DocstatusTransitionError(_("Cannot change docstatus from 1 (Submitted) to 0 (Draft)"))
else:
raise frappe.ValidationError(_("Invalid docstatus"), self.docstatus)
elif docstatus==2:
elif to_docstatus == DocStatus.cancelled():
raise frappe.ValidationError(_("Cannot edit cancelled document"))
def set_parent_in_children(self):
@ -929,14 +932,14 @@ class Document(BaseDocument):
@whitelist.__func__
def _submit(self):
"""Submit the document. Sets `docstatus` = 1, then saves."""
self.docstatus = 1
self.docstatus = DocStatus.submitted()
return self.save()
@whitelist.__func__
def _cancel(self):
"""Cancel the document. Sets `docstatus` = 2, then saves.
"""
self.docstatus = 2
self.docstatus = DocStatus.cancelled()
return self.save()
@whitelist.__func__
@ -954,7 +957,7 @@ class Document(BaseDocument):
frappe.delete_doc(self.doctype, self.name, ignore_permissions = ignore_permissions, flags=self.flags)
def run_before_save_methods(self):
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
"""Run standard methods before `INSERT` or `UPDATE`. Standard Methods are:
- `validate`, `before_save` for **Save**.
- `validate`, `before_submit` for **Submit**.
@ -1371,11 +1374,6 @@ class Document(BaseDocument):
from frappe.desk.doctype.tag.tag import DocTags
return DocTags(self.doctype).get_tags(self.name).split(",")[1:]
def _rename_doc_on_cancel(self):
new_name = gen_new_name_for_cancelled_doc(self)
frappe.rename_doc(self.doctype, self.name, new_name, force=True, show_alert=False)
self.name = new_name
def __repr__(self):
name = self.name or "unsaved"
doctype = self.__class__.__name__

View file

@ -1,14 +1,3 @@
"""utilities to generate a document name based on various rules defined.
NOTE:
Till version 13, whenever a submittable document is amended it's name is set to orig_name-X,
where X is a counter and it increments when amended again and so on.
From Version 14, The naming pattern is changed in a way that amended documents will
have the original name `orig_name` instead of `orig_name-X`. To make this happen
the cancelled document naming pattern is changed to 'orig_name-CANC-X'.
"""
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
@ -40,7 +29,7 @@ def set_new_name(doc):
doc.name = None
if getattr(doc, "amended_from", None):
doc.name = _get_amended_name(doc)
_set_amended_name(doc)
return
elif getattr(doc.meta, "issingle", False):
@ -256,18 +245,6 @@ def revert_series_if_last(key, name, doc=None):
* prefix = #### and hashes = 2021 (hash doesn't exist)
* will search hash in key then accordingly get prefix = ""
"""
if hasattr(doc, 'amended_from'):
# Do not revert the series if the document is amended.
if doc.amended_from:
return
# Get document name by parsing incase of fist cancelled document
if doc.docstatus == 2 and not doc.amended_from:
if doc.name.endswith('-CANC'):
name, _ = NameParser.parse_docname(doc.name, sep='-CANC')
else:
name, _ = NameParser.parse_docname(doc.name, sep='-CANC-')
if ".#" in key:
prefix, hashes = key.rsplit(".", 1)
if "#" not in hashes:
@ -356,9 +333,16 @@ def append_number_if_name_exists(doctype, value, fieldname="name", separator="-"
return value
def _get_amended_name(doc):
name, _ = NameParser(doc).parse_amended_from()
return name
def _set_amended_name(doc):
am_id = 1
am_prefix = doc.amended_from
if frappe.db.get_value(doc.doctype, doc.amended_from, "amended_from"):
am_id = cint(doc.amended_from.split("-")[-1]) + 1
am_prefix = "-".join(doc.amended_from.split("-")[:-1]) # except the last hyphen
doc.name = am_prefix + "-" + str(am_id)
return doc.name
def _field_autoname(autoname, doc, skip_slicing=None):
"""
@ -399,83 +383,3 @@ def _format_autoname(autoname, doc):
name = re.sub(r"(\{[\w | #]+\})", get_param_value_for_match, autoname_value)
return name
class NameParser:
"""Parse document name and return parts of it.
NOTE: It handles cancellend and amended doc parsing for now. It can be expanded.
"""
def __init__(self, doc):
self.doc = doc
def parse_amended_from(self):
"""
Cancelled document naming will be in one of these formats
* original_name-X-CANC - This is introduced to migrate old style naming to new style
* original_name-CANC - This is introduced to migrate old style naming to new style
* original_name-CANC-X - This is the new style naming
New style naming: In new style naming amended documents will have original name. That says,
when a document gets cancelled we need rename the document by adding `-CANC-X` to the end
so that amended documents can use the original name.
Old style naming: cancelled documents stay with original name and when amended, amended one
gets a new name as `original_name-X`. To bring new style naming we had to change the existing
cancelled document names and that is done by adding `-CANC` to cancelled documents through patch.
"""
if not getattr(self.doc, 'amended_from', None):
return (None, None)
# Handle old style cancelled documents (original_name-X-CANC, original_name-CANC)
if self.doc.amended_from.endswith('-CANC'):
name, _ = self.parse_docname(self.doc.amended_from, '-CANC')
amended_from_doc = frappe.get_all(
self.doc.doctype,
filters = {'name': self.doc.amended_from},
fields = ['amended_from'],
limit=1)
# Handle format original_name-X-CANC.
if amended_from_doc and amended_from_doc[0].amended_from:
return self.parse_docname(name, '-')
return name, None
# Handle new style cancelled documents
return self.parse_docname(self.doc.amended_from, '-CANC-')
@classmethod
def parse_docname(cls, name, sep='-'):
split_list = name.rsplit(sep, 1)
if len(split_list) == 1:
return (name, None)
return (split_list[0], split_list[1])
def get_cancelled_doc_latest_counter(tname, docname):
"""Get the latest counter used for cancelled docs of given docname.
"""
name_prefix = f'{docname}-CANC-'
rows = frappe.db.sql("""
select
name
from `tab{tname}`
where
name like %(name_prefix)s and docstatus=2
""".format(tname=tname), {'name_prefix': name_prefix+'%'}, as_dict=1)
if not rows:
return -1
return max([int(row.name.replace(name_prefix, '') or -1) for row in rows])
def gen_new_name_for_cancelled_doc(doc):
"""Generate a new name for cancelled document.
"""
if getattr(doc, "amended_from", None):
name, _ = NameParser(doc).parse_amended_from()
else:
name = doc.name
counter = get_cancelled_doc_latest_counter(doc.doctype, name)
return f'{name}-CANC-{counter+1}'

View file

@ -1,10 +1,11 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import json
import frappe
from frappe.utils import cint
from frappe import _
import json
from frappe.utils import cint
from frappe.model.docstatus import DocStatus
class WorkflowStateError(frappe.ValidationError): pass
class WorkflowTransitionError(frappe.ValidationError): pass
@ -102,13 +103,13 @@ def apply_workflow(doc, action):
doc.set(next_state.update_field, next_state.update_value)
new_docstatus = cint(next_state.doc_status)
if doc.docstatus == 0 and new_docstatus == 0:
if doc.docstatus.is_draft() and new_docstatus == DocStatus.draft():
doc.save()
elif doc.docstatus == 0 and new_docstatus == 1:
elif doc.docstatus.is_draft() and new_docstatus == DocStatus.submitted():
doc.submit()
elif doc.docstatus == 1 and new_docstatus == 1:
elif doc.docstatus.is_submitted() and new_docstatus == DocStatus.submitted():
doc.save()
elif doc.docstatus == 1 and new_docstatus == 2:
elif doc.docstatus.is_submitted() and new_docstatus == DocStatus.cancelled():
doc.cancel()
else:
frappe.throw(_('Illegal Document Status for {0}').format(next_state.state))
@ -212,10 +213,10 @@ def bulk_workflow_approval(docnames, doctype, action):
frappe.db.commit()
except Exception as e:
if not frappe.message_log:
# Exception is raised manually and not from msgprint or throw
# Exception is raised manually and not from msgprint or throw
message = "{0}".format(e.__class__.__name__)
if e.args:
message += " : {0}".format(e.args[0])
message += " : {0}".format(e.args[0])
message_dict = {"docname": docname, "message": message}
failed_transactions[docname].append(message_dict)

View file

@ -185,7 +185,6 @@ frappe.patches.v13_0.queryreport_columns
frappe.patches.v13_0.jinja_hook
frappe.patches.v13_0.update_notification_channel_if_empty
frappe.patches.v13_0.set_first_day_of_the_week
frappe.patches.v14_0.rename_cancelled_documents
frappe.patches.v14_0.update_workspace2 # 20.09.2021
frappe.patches.v14_0.save_ratings_in_fraction #23-12-2021
frappe.patches.v14_0.transform_todo_schema

View file

@ -3,6 +3,7 @@ import click
def execute():
frappe.delete_doc_if_exists("DocType", "Chat Message")
frappe.delete_doc_if_exists("DocType", "Chat Message Attachment")
frappe.delete_doc_if_exists("DocType", "Chat Profile")
frappe.delete_doc_if_exists("DocType", "Chat Token")
frappe.delete_doc_if_exists("DocType", "Chat Room User")

View file

@ -1,213 +0,0 @@
import functools
import traceback
import frappe
def execute():
"""Rename cancelled documents by adding a postfix.
"""
rename_cancelled_docs()
def get_submittable_doctypes():
"""Returns list of submittable doctypes in the system.
"""
return frappe.db.get_all('DocType', filters={'is_submittable': 1}, pluck='name')
def get_cancelled_doc_names(doctype):
"""Return names of cancelled document names those are in old format.
"""
docs = frappe.db.get_all(doctype, filters={'docstatus': 2}, pluck='name')
return [each for each in docs if not (each.endswith('-CANC') or ('-CANC-' in each))]
@functools.lru_cache()
def get_linked_doctypes():
"""Returns list of doctypes those are linked with given doctype using 'Link' fieldtype.
"""
filters=[['fieldtype','=', 'Link']]
links = frappe.get_all("DocField",
fields=["parent", "fieldname", "options as linked_to"],
filters=filters,
as_list=1)
links+= frappe.get_all("Custom Field",
fields=["dt as parent", "fieldname", "options as linked_to"],
filters=filters,
as_list=1)
links_by_doctype = {}
for doctype, fieldname, linked_to in links:
links_by_doctype.setdefault(linked_to, []).append((doctype, fieldname))
return links_by_doctype
@functools.lru_cache()
def get_single_doctypes():
return frappe.get_all("DocType", filters={'issingle': 1}, pluck='name')
@functools.lru_cache()
def get_dynamic_linked_doctypes():
filters=[['fieldtype','=', 'Dynamic Link']]
# find dynamic links of parents
links = frappe.get_all("DocField",
fields=["parent as doctype", "fieldname", "options as doctype_fieldname"],
filters=filters,
as_list=1)
links+= frappe.get_all("Custom Field",
fields=["dt as doctype", "fieldname", "options as doctype_fieldname"],
filters=filters,
as_list=1)
return links
@functools.lru_cache()
def get_child_tables():
"""
"""
filters =[['fieldtype', 'in', ('Table', 'Table MultiSelect')]]
links = frappe.get_all("DocField",
fields=["parent as doctype", "options as child_table"],
filters=filters,
as_list=1)
links+= frappe.get_all("Custom Field",
fields=["dt as doctype", "options as child_table"],
filters=filters,
as_list=1)
map = {}
for doctype, child_table in links:
map.setdefault(doctype, []).append(child_table)
return map
def update_cancelled_document_names(doctype, cancelled_doc_names):
return frappe.db.sql("""
update
`tab{doctype}`
set
name=CONCAT(name, '-CANC')
where
docstatus=2
and
name in %(cancelled_doc_names)s;
""".format(doctype=doctype), {'cancelled_doc_names': cancelled_doc_names})
def update_amended_field(doctype, cancelled_doc_names):
return frappe.db.sql("""
update
`tab{doctype}`
set
amended_from=CONCAT(amended_from, '-CANC')
where
amended_from in %(cancelled_doc_names)s;
""".format(doctype=doctype), {'cancelled_doc_names': cancelled_doc_names})
def update_attachments(doctype, cancelled_doc_names):
frappe.db.sql("""
update
`tabFile`
set
attached_to_name=CONCAT(attached_to_name, '-CANC')
where
attached_to_doctype=%(dt)s and attached_to_name in %(cancelled_doc_names)s
""", {'cancelled_doc_names': cancelled_doc_names, 'dt': doctype})
def update_versions(doctype, cancelled_doc_names):
frappe.db.sql("""
UPDATE
`tabVersion`
SET
docname=CONCAT(docname, '-CANC')
WHERE
ref_doctype=%(dt)s AND docname in %(cancelled_doc_names)s
""", {'cancelled_doc_names': cancelled_doc_names, 'dt': doctype})
def update_linked_doctypes(doctype, cancelled_doc_names):
single_doctypes = get_single_doctypes()
for linked_dt, field in get_linked_doctypes().get(doctype, []):
if linked_dt not in single_doctypes:
frappe.db.sql("""
update
`tab{linked_dt}`
set
`{column}`=CONCAT(`{column}`, '-CANC')
where
`{column}` in %(cancelled_doc_names)s;
""".format(linked_dt=linked_dt, column=field),
{'cancelled_doc_names': cancelled_doc_names})
else:
doc = frappe.get_single(linked_dt)
if getattr(doc, field) in cancelled_doc_names:
setattr(doc, field, getattr(doc, field)+'-CANC')
doc.flags.ignore_mandatory=True
doc.flags.ignore_validate=True
doc.save(ignore_permissions=True)
def update_dynamic_linked_doctypes(doctype, cancelled_doc_names):
single_doctypes = get_single_doctypes()
for linked_dt, fieldname, doctype_fieldname in get_dynamic_linked_doctypes():
if linked_dt not in single_doctypes:
frappe.db.sql("""
update
`tab{linked_dt}`
set
`{column}`=CONCAT(`{column}`, '-CANC')
where
`{column}` in %(cancelled_doc_names)s and {doctype_fieldname}=%(dt)s;
""".format(linked_dt=linked_dt, column=fieldname, doctype_fieldname=doctype_fieldname),
{'cancelled_doc_names': cancelled_doc_names, 'dt': doctype})
else:
doc = frappe.get_single(linked_dt)
if getattr(doc, doctype_fieldname) == doctype and getattr(doc, fieldname) in cancelled_doc_names:
setattr(doc, fieldname, getattr(doc, fieldname)+'-CANC')
doc.flags.ignore_mandatory=True
doc.flags.ignore_validate=True
doc.save(ignore_permissions=True)
def update_child_tables(doctype, cancelled_doc_names):
child_tables = get_child_tables().get(doctype, [])
single_doctypes = get_single_doctypes()
for table in child_tables:
if table not in single_doctypes:
frappe.db.sql("""
update
`tab{table}`
set
parent=CONCAT(parent, '-CANC')
where
parenttype=%(dt)s and parent in %(cancelled_doc_names)s;
""".format(table=table), {'cancelled_doc_names': cancelled_doc_names, 'dt': doctype})
else:
doc = frappe.get_single(table)
if getattr(doc, 'parenttype')==doctype and getattr(doc, 'parent') in cancelled_doc_names:
setattr(doc, 'parent', getattr(doc, 'parent')+'-CANC')
doc.flags.ignore_mandatory=True
doc.flags.ignore_validate=True
doc.save(ignore_permissions=True)
def rename_cancelled_docs():
submittable_doctypes = get_submittable_doctypes()
for dt in submittable_doctypes:
for retry in range(2):
try:
cancelled_doc_names = tuple(get_cancelled_doc_names(dt))
if not cancelled_doc_names:
break
update_cancelled_document_names(dt, cancelled_doc_names)
update_amended_field(dt, cancelled_doc_names)
update_child_tables(dt, cancelled_doc_names)
update_linked_doctypes(dt, cancelled_doc_names)
update_dynamic_linked_doctypes(dt, cancelled_doc_names)
update_attachments(dt, cancelled_doc_names)
update_versions(dt, cancelled_doc_names)
print(f"Renaming cancelled records of {dt} doctype")
frappe.db.commit()
break
except Exception:
if retry == 1:
print(f"Failed to rename the cancelled records of {dt} doctype, moving on!")
traceback.print_exc()
frappe.db.rollback()

View file

@ -2,6 +2,7 @@ import "./jquery-bootstrap";
import "./frappe/class.js";
import "./frappe/polyfill.js";
import "./lib/md5.min.js";
import "./lib/moment.js";
import "./frappe/provide.js";
import "./frappe/format.js";
import "./frappe/utils/number_format.js";

View file

@ -331,7 +331,7 @@ frappe.data_import.ImportPreview = class ImportPreview {
is_row_imported(row) {
let serial_no = row[0].content;
return this.import_log.find(log => {
return log.success && log.row_indexes.includes(serial_no);
return log.success && JSON.parse(log.row_indexes || '[]').includes(serial_no);
});
}
};

View file

@ -534,22 +534,21 @@ export default {
});
},
show_google_drive_picker() {
let dialog = cur_dialog;
dialog.hide();
this.close_dialog = true;
let google_drive = new GoogleDrivePicker({
pickerCallback: data => this.google_drive_callback(data, dialog),
pickerCallback: data => this.google_drive_callback(data),
...this.google_drive_settings
});
google_drive.loadPicker();
},
google_drive_callback(data, dialog) {
google_drive_callback(data) {
if (data.action == google.picker.Action.PICKED) {
this.upload_file({
file_url: data.docs[0].url,
file_name: data.docs[0].name
});
} else if (data.action == google.picker.Action.CANCEL) {
dialog.show();
cur_frm.attachments.new_attachment()
}
},
url_to_file(url, filename, mime_type) {

View file

@ -437,10 +437,22 @@ frappe.ui.form.ControlLink = class ControlLink extends frappe.ui.form.ControlDat
}
set_custom_query(args) {
var set_nulls = function(obj) {
$.each(obj, function(key, value) {
if(value!==undefined) {
obj[key] = value;
const is_valid_value = (value, key) => {
if (value) return true;
// check if empty value is valid
if (this.frm) {
let field = frappe.meta.get_docfield(this.frm.doctype, key);
// empty value link fields is invalid
return !field || !["Link", "Dynamic Link"].includes(field.fieldtype);
} else {
return value !== undefined;
}
}
const set_nulls = (obj) => {
$.each(obj, (key, value) => {
if (!is_valid_value(value, key)) {
delete obj[key];
}
});
return obj;
@ -527,8 +539,6 @@ frappe.ui.form.ControlLink = class ControlLink extends frappe.ui.form.ControlDat
return;
};
let field_value = "";
const fetch_map = this.fetch_map;
const columns_to_fetch = Object.values(fetch_map);
@ -537,16 +547,10 @@ frappe.ui.form.ControlLink = class ControlLink extends frappe.ui.form.ControlDat
return value;
}
return frappe.xcall("frappe.client.validate_link", {
doctype: options,
docname: value,
fields: columns_to_fetch,
}).then((response) => {
if (!docname || !columns_to_fetch.length) return response.name;
function update_dependant_fields(response) {
let field_value = "";
for (const [target_field, source_field] of Object.entries(fetch_map)) {
if (value) field_value = response[source_field];
frappe.model.set_value(
df.parent,
docname,
@ -555,9 +559,23 @@ frappe.ui.form.ControlLink = class ControlLink extends frappe.ui.form.ControlDat
df.fieldtype,
);
}
}
return response.name;
});
// to avoid unnecessary request
if (value) {
return frappe.xcall("frappe.client.validate_link", {
doctype: options,
docname: value,
fields: columns_to_fetch,
}).then((response) => {
if (!docname || !columns_to_fetch.length) return response.name;
update_dependant_fields(response);
return response.name;
});
} else {
update_dependant_fields({});
return value;
}
}
get fetch_map() {

View file

@ -860,36 +860,32 @@ frappe.ui.form.Form = class FrappeForm {
}
_cancel(btn, callback, on_error, skip_confirm) {
const me = this;
const cancel_doc = () => {
frappe.validated = true;
this.script_manager.trigger("before_cancel").then(() => {
me.script_manager.trigger("before_cancel").then(() => {
if (!frappe.validated) {
return this.handle_save_fail(btn, on_error);
return me.handle_save_fail(btn, on_error);
}
const original_name = this.docname;
const after_cancel = (r) => {
var after_cancel = function(r) {
if (r.exc) {
this.handle_save_fail(btn, on_error);
me.handle_save_fail(btn, on_error);
} else {
frappe.utils.play_sound("cancel");
me.refresh();
callback && callback();
this.script_manager.trigger("after_cancel");
frappe.run_serially([
() => this.rename_notify(this.doctype, original_name, r.docs[0].name),
() => frappe.router.clear_re_route(this.doctype, original_name),
() => this.refresh(),
]);
me.script_manager.trigger("after_cancel");
}
};
frappe.ui.form.save(this, "cancel", after_cancel, btn);
frappe.ui.form.save(me, "cancel", after_cancel, btn);
});
}
if (skip_confirm) {
cancel_doc();
} else {
frappe.confirm(__("Permanently Cancel {0}?", [this.docname]), cancel_doc, this.handle_save_fail(btn, on_error));
frappe.confirm(__("Permanently Cancel {0}?", [this.docname]), cancel_doc, me.handle_save_fail(btn, on_error));
}
};
@ -911,7 +907,7 @@ frappe.ui.form.Form = class FrappeForm {
'docname': this.doc.name
}).then(is_amended => {
if (is_amended) {
frappe.throw(__('This document is already amended, you cannot amend it again'));
frappe.throw(__('This document is already amended, you cannot ammend it again'));
}
this.validate_form_action("Amend");
var me = this;

View file

@ -150,8 +150,12 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
});
}
is_child_selection_enabled() {
return this.dialog.fields_dict['allow_child_item_selection'].get_value();
}
toggle_child_selection() {
if (this.dialog.fields_dict['allow_child_item_selection'].get_value()) {
if (this.is_child_selection_enabled()) {
this.show_child_results();
} else {
this.child_results = [];
@ -289,7 +293,11 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
parent: this.dialog.get_field('filter_area').$wrapper,
doctype: this.doctype,
on_change: () => {
this.get_results();
if (this.is_child_selection_enabled()) {
this.show_child_results();
} else {
this.get_results();
}
}
});
// 'Apply Filter' breaks since the filers are not in a popover
@ -325,7 +333,11 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
this.$parent.find('.input-with-feedback').on('change', () => {
frappe.flags.auto_scroll = false;
this.get_results();
if (this.is_child_selection_enabled()) {
this.show_child_results();
} else {
this.get_results();
}
});
this.$parent.find('[data-fieldtype="Data"]').on('input', () => {
@ -333,8 +345,12 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
clearTimeout($this.data('timeout'));
$this.data('timeout', setTimeout(function () {
frappe.flags.auto_scroll = false;
me.empty_list();
me.get_results();
if (me.is_child_selection_enabled()) {
me.show_child_results();
} else {
me.empty_list();
me.get_results();
}
}, 300));
});
}

View file

@ -250,12 +250,6 @@ frappe.router = {
}
},
clear_re_route(doctype, docname) {
delete frappe.re_route[
`${encodeURIComponent(frappe.router.slug(doctype))}/${encodeURIComponent(docname)}`
];
},
set_title(sub_path) {
if (frappe.route_titles[sub_path]) {
frappe.utils.set_title(frappe.route_titles[sub_path]);

View file

@ -70,6 +70,9 @@ frappe.breadcrumbs = {
this.set_form_breadcrumb(breadcrumbs, view);
} else if (breadcrumbs.doctype && view === 'list') {
this.set_list_breadcrumb(breadcrumbs);
} else if (breadcrumbs.doctype && view == 'dashboard-view') {
this.set_list_breadcrumb(breadcrumbs);
this.set_dashboard_breadcrumb(breadcrumbs);
}
}
@ -164,6 +167,14 @@ frappe.breadcrumbs = {
},
set_dashboard_breadcrumb(breadcrumbs) {
const doctype = breadcrumbs.doctype;
const docname = frappe.get_route()[1];
let dashboard_route = `/app/${frappe.router.slug(doctype)}/${docname}`;
$(`<li><a href="${dashboard_route}">${__(docname)}</a></li>`)
.appendTo(this.$breadcrumbs);
},
setup_modules() {
if (!frappe.visible_modules) {
frappe.visible_modules = $.map(frappe.boot.allowed_workspaces, (m) => {

View file

@ -866,7 +866,7 @@ frappe.views.ReportView = class ReportView extends frappe.views.ListView {
}
doctype_fields = [{
label: __('ID'),
label: __('ID', null, 'Label of name column in report'),
fieldname: 'name',
fieldtype: 'Data',
reqd: 1

View file

@ -44,9 +44,16 @@ export default class GoogleDrivePicker {
}
handleAuthResult(authResult) {
let error_map = {
"popup_closed_by_user": __("Google Authentication was closed abruptly by the user")
};
if (authResult && !authResult.error) {
frappe.boot.user.google_drive_token = authResult.access_token;
this.createPicker();
} else {
let error = error_map[authResult.error] || __("Google Authentication Error");
frappe.throw(error);
}
}
@ -58,20 +65,34 @@ export default class GoogleDrivePicker {
createPicker() {
// Create and render a Picker object for searching images.
if (this.pickerApiLoaded && frappe.boot.user.google_drive_token) {
var view = new google.picker.DocsView(google.picker.ViewId.DOCS)
this.view = new google.picker.DocsView(google.picker.ViewId.DOCS)
.setParent('root') // show the root folder by default
.setIncludeFolders(true); // also show folders, not just files
var picker = new google.picker.PickerBuilder()
this.picker = new google.picker.PickerBuilder()
.setAppId(this.appId)
.setDeveloperKey(this.developerKey)
.setOAuthToken(frappe.boot.user.google_drive_token)
.addView(view)
.addView(this.view)
.setLocale(frappe.boot.lang)
.setCallback(this.pickerCallback)
.build();
picker.setVisible(true);
this.picker.setVisible(true);
this.setupHide();
}
}
setupHide() {
let bg = $(".picker-dialog-bg");
for (let el of bg) {
el.onclick = () => {
this.picker.setVisible(false);
this.picker.Ob({
action: google.picker.Action.CANCEL
});
};
}
}
}

View file

@ -0,0 +1,5 @@
// This file is used to make sure that `moment` is bound to the window
// before the bundle finishes loading, due to imports (datetime.js) in the bundle
// that depend on `moment`.
import momentTimezone from "moment-timezone/builds/moment-timezone-with-data.js";
window.moment = momentTimezone;

View file

@ -1,15 +1,12 @@
import "./jquery-bootstrap";
import Vue from "vue/dist/vue.esm.js";
import moment from "moment/min/moment-with-locales.js";
import momentTimezone from "moment-timezone/builds/moment-timezone-with-data.js";
import "./lib/moment";
import io from "socket.io-client/dist/socket.io.slim.js";
import Sortable from "./lib/Sortable.min.js";
// TODO: esbuild
// Don't think jquery.hotkeys is being used anywhere. Will remove this after being sure.
// import "./lib/jquery/jquery.hotkeys.js";
window.moment = moment;
window.moment = momentTimezone;
window.Vue = Vue;
window.Sortable = Sortable;
window.io = io;

View file

@ -1,2 +1,3 @@
import "./lib/moment.js";
import "./frappe/utils/datetime.js";
import "./frappe/web_form/webform_script.js";

View file

@ -249,6 +249,7 @@
--checkbox-right-margin: var(--margin-xs);
--checkbox-size: 14px;
--checkbox-focus-shadow: 0 0 0 2px var(--gray-300);
--checkbox-gradient: linear-gradient(180deg, #4AC3F8 -124.51%, var(--primary) 100%);
--right-arrow-svg: url("data: image/svg+xml;utf8, <svg width='6' height='8' viewBox='0 0 6 8' fill='none' xmlns='http://www.w3.org/2000/svg'><path d='M1.25 7.5L4.75 4L1.25 0.5' stroke='%231F272E' stroke-linecap='round' stroke-linejoin='round'/></svg>");
--left-arrow-svg: url("data: image/svg+xml;utf8, <svg width='6' height='8' viewBox='0 0 6 8' fill='none' xmlns='http://www.w3.org/2000/svg'><path d='M7.5 9.5L4 6l3.5-3.5' stroke='%231F272E' stroke-linecap='round' stroke-linejoin='round'></path></svg>");

View file

@ -54,7 +54,7 @@ input[type="radio"] {
}
&:checked::before {
background-color: var(--blue-500);
background-color: var(--primary);
border-radius: 16px;
box-shadow: inset 0 0 0 2px white;
}
@ -85,8 +85,8 @@ input[type="checkbox"] {
}
&:checked {
background-color: var(--blue-500);
background-image: $check-icon, linear-gradient(180deg, #4AC3F8 -124.51%, #2490EF 100%);
background-color: var(--primary);
background-image: $check-icon, var(--checkbox-gradient);
background-size: 57%, 100%;
box-shadow: none;
border: none;

View file

@ -54,4 +54,6 @@ $input-height: 28px !default;
// skeleton
--skeleton-bg: var(--gray-100);
// progress bar
--progress-bar-bg: var(--primary);
}

View file

@ -118,6 +118,9 @@ $custom-control-label-color: var(--text-color);
$custom-switch-indicator-size: 8px;
$custom-control-indicator-border-width: 2px;
// progress bar
$progress-bar-bg: var(--progress-bar-bg);
$navbar-nav-link-padding-x: 1rem !default;
$navbar-padding-y: 1rem !default;
$card-border-radius: 0.75rem !default;

View file

@ -1,8 +1,21 @@
from frappe.query_builder.terms import ParameterizedValueWrapper, ParameterizedFunction
import pypika
import pypika.terms
from pypika import *
from pypika import Field
from pypika.utils import ignore_copy
from frappe.query_builder.terms import ParameterizedFunction, ParameterizedValueWrapper
from frappe.query_builder.utils import (
Column,
DocType,
get_query_builder,
patch_query_aggregation,
patch_query_execute,
)
pypika.terms.ValueWrapper = ParameterizedValueWrapper
pypika.terms.Function = ParameterizedFunction
from pypika import *
from frappe.query_builder.utils import Column, DocType, get_query_builder, patch_query_execute, patch_query_aggregation
# * Overrides the field() method and replaces it with the a `PseudoColumn` 'field' for consistency
pypika.queries.Selectable.__getattr__ = ignore_copy(lambda table, x: Field(x, table=table))
pypika.queries.Selectable.__getitem__ = ignore_copy(lambda table, x: Field(x, table=table))
pypika.queries.Selectable.field = pypika.terms.PseudoColumn("field")

View file

@ -1,8 +1,12 @@
from pypika import MySQLQuery, Order, PostgreSQLQuery, terms
from pypika.queries import Schema, Table
from frappe.utils import get_table_name
from pypika.dialects import MySQLQueryBuilder, PostgreSQLQueryBuilder
from pypika.queries import QueryBuilder, Schema, Table
from pypika.terms import Function
from frappe.query_builder.terms import ParameterizedValueWrapper
from frappe.utils import get_table_name
class Base:
terms = terms
desc = Order.desc
@ -19,13 +23,13 @@ class Base:
return Table(table_name, *args, **kwargs)
@classmethod
def into(cls, table, *args, **kwargs):
def into(cls, table, *args, **kwargs) -> QueryBuilder:
if isinstance(table, str):
table = cls.DocType(table)
return super().into(table, *args, **kwargs)
@classmethod
def update(cls, table, *args, **kwargs):
def update(cls, table, *args, **kwargs) -> QueryBuilder:
if isinstance(table, str):
table = cls.DocType(table)
return super().update(table, *args, **kwargs)
@ -34,6 +38,10 @@ class Base:
class MariaDB(Base, MySQLQuery):
Field = terms.Field
@classmethod
def _builder(cls, *args, **kwargs) -> "MySQLQueryBuilder":
return super()._builder(*args, wrapper_cls=ParameterizedValueWrapper, **kwargs)
@classmethod
def from_(cls, table, *args, **kwargs):
if isinstance(table, str):
@ -53,6 +61,10 @@ class Postgres(Base, PostgreSQLQuery):
# they are two different objects. The quick fix used here is to replace the
# Field names in the "Field" function.
@classmethod
def _builder(cls, *args, **kwargs) -> "PostgreSQLQueryBuilder":
return super()._builder(*args, wrapper_cls=ParameterizedValueWrapper, **kwargs)
@classmethod
def Field(cls, field_name, *args, **kwargs):
if field_name in cls.field_translation:

View file

@ -1,33 +1,77 @@
from datetime import timedelta
from typing import Any, Dict, Optional
from frappe.utils.data import format_timedelta
from pypika.terms import Function, ValueWrapper
from pypika.utils import format_alias_sql
class NamedParameterWrapper():
def __init__(self, parameters: Dict[str, Any]):
self.parameters = parameters
class NamedParameterWrapper:
"""Utility class to hold parameter values and keys"""
def update_parameters(self, param_key: Any, param_value: Any, **kwargs):
def __init__(self) -> None:
self.parameters = {}
def get_sql(self, param_value: Any, **kwargs) -> str:
"""returns SQL for a parameter, while adding the real value in a dict
Args:
param_value (Any): Value of the parameter
Returns:
str: parameter used in the SQL query
"""
param_key = f"%(param{len(self.parameters) + 1})s"
self.parameters[param_key[2:-2]] = param_value
return param_key
def get_sql(self, **kwargs):
return f'%(param{len(self.parameters) + 1})s'
def get_parameters(self) -> Dict[str, Any]:
"""get dict with parameters and values
Returns:
Dict[str, Any]: parameter dict
"""
return self.parameters
class ParameterizedValueWrapper(ValueWrapper):
def get_sql(self, quote_char: Optional[str] = None, secondary_quote_char: str = "'", param_wrapper= None, **kwargs: Any) -> str:
if param_wrapper is None:
sql = self.get_value_sql(quote_char=quote_char, secondary_quote_char=secondary_quote_char, **kwargs)
return format_alias_sql(sql, self.alias, quote_char=quote_char, **kwargs)
"""
Class to monkey patch ValueWrapper
Adds functionality to parameterize queries when a `param wrapper` is passed in get_sql()
"""
def get_sql(
self,
quote_char: Optional[str] = None,
secondary_quote_char: str = "'",
param_wrapper: Optional[NamedParameterWrapper] = None,
**kwargs: Any,
) -> str:
if param_wrapper and isinstance(self.value, str):
# add quotes if it's a string value
value_sql = self.get_value_sql(quote_char=quote_char, **kwargs)
sql = param_wrapper.get_sql(param_value=value_sql, **kwargs)
else:
value_sql = self.get_value_sql(quote_char=quote_char, **kwargs) if not isinstance(self.value,int) else self.value
param_sql = param_wrapper.get_sql(**kwargs)
param_wrapper.update_parameters(param_key=param_sql, param_value=value_sql, **kwargs)
return format_alias_sql(param_sql, self.alias, quote_char=quote_char, **kwargs)
# * BUG: pypika doesen't parse timedeltas
if isinstance(self.value, timedelta):
self.value = format_timedelta(self.value)
sql = self.get_value_sql(
quote_char=quote_char,
secondary_quote_char=secondary_quote_char,
param_wrapper=param_wrapper,
**kwargs,
)
return format_alias_sql(sql, self.alias, quote_char=quote_char, **kwargs)
class ParameterizedFunction(Function):
"""
Class to monkey patch pypika.terms.Functions
Only to pass `param_wrapper` in `get_function_sql`.
"""
def get_sql(self, **kwargs: Any) -> str:
with_alias = kwargs.pop("with_alias", False)
with_namespace = kwargs.pop("with_namespace", False)
@ -35,15 +79,24 @@ class ParameterizedFunction(Function):
dialect = kwargs.pop("dialect", None)
param_wrapper = kwargs.pop("param_wrapper", None)
function_sql = self.get_function_sql(with_namespace=with_namespace, quote_char=quote_char, param_wrapper=param_wrapper, dialect=dialect)
function_sql = self.get_function_sql(
with_namespace=with_namespace,
quote_char=quote_char,
param_wrapper=param_wrapper,
dialect=dialect,
)
if self.schema is not None:
function_sql = "{schema}.{function}".format(
schema=self.schema.get_sql(quote_char=quote_char, dialect=dialect, **kwargs),
schema=self.schema.get_sql(
quote_char=quote_char, dialect=dialect, **kwargs
),
function=function_sql,
)
if with_alias:
return format_alias_sql(function_sql, self.alias, quote_char=quote_char, **kwargs)
return format_alias_sql(
function_sql, self.alias, quote_char=quote_char, **kwargs
)
return function_sql

View file

@ -1,17 +1,17 @@
from enum import Enum
from typing import Any, Callable, Dict, Union, get_type_hints
from importlib import import_module
from typing import Any, Callable, Dict, Union, get_type_hints
from pypika import Query
from pypika.queries import Column
import frappe
from .builder import MariaDB, Postgres
from pypika.terms import PseudoColumn
import frappe
from frappe.query_builder.terms import NamedParameterWrapper
from .builder import MariaDB, Postgres
class db_type_is(Enum):
MARIADB = "mariadb"
POSTGRES = "postgres"
@ -59,11 +59,11 @@ def patch_query_execute():
return frappe.db.sql(query, params, *args, **kwargs) # nosemgrep
def prepare_query(query):
params = {}
query = query.get_sql(param_wrapper = NamedParameterWrapper(params))
param_collector = NamedParameterWrapper()
query = query.get_sql(param_wrapper=param_collector)
if frappe.flags.in_safe_exec and not query.lower().strip().startswith("select"):
raise frappe.PermissionError('Only SELECT SQL allowed in scripting')
return query, params
return query, param_collector.get_parameters()
query_class = get_attr(str(frappe.qb).split("'")[1])
builder_class = get_type_hints(query_class._builder).get('return')
@ -78,7 +78,7 @@ def patch_query_execute():
def patch_query_aggregation():
"""Patch aggregation functions to frappe.qb
"""
from frappe.query_builder.functions import _max, _min, _avg, _sum
from frappe.query_builder.functions import _avg, _max, _min, _sum
frappe.qb.max = _max
frappe.qb.min = _min

View file

@ -66,7 +66,7 @@ class FullTextSearch:
ix = self.get_index()
with ix.searcher():
writer = ix.writer()
writer = AsyncWriter(ix)
writer.delete_by_term(self.id, doc_name)
writer.commit(optimize=True)
@ -98,7 +98,7 @@ class FullTextSearch:
def build_index(self):
"""Build index for all parsed documents"""
ix = self.create_index()
writer = ix.writer()
writer = AsyncWriter(ix)
for i, document in enumerate(self.documents):
if document:

View file

@ -59,9 +59,9 @@ class EnergyPointRule(Document):
# indicates that this was a new doc
return doc.get_doc_before_save() is None
if self.for_doc_event == 'Submit':
return doc.docstatus == 1
return doc.docstatus.is_submitted()
if self.for_doc_event == 'Cancel':
return doc.docstatus == 2
return doc.docstatus.is_cancelled()
if self.for_doc_event == 'Value Change':
field_to_check = self.field_to_check
if not field_to_check: return False
@ -96,7 +96,7 @@ def process_energy_points(doc, state):
old_doc = doc.get_doc_before_save()
# check if doc has been cancelled
if old_doc and old_doc.docstatus == 1 and doc.docstatus == 2:
if old_doc and old_doc.docstatus.is_submitted() and doc.docstatus.is_cancelled():
return revert_points_for_cancelled_doc(doc)
for d in frappe.cache_manager.get_doctype_map('Energy Point Rule', doc.doctype,

View file

@ -105,8 +105,6 @@
// for backward compatibility of some libs
frappe.sys_defaults = frappe.boot.sysdefaults;
</script>
<script type="text/javascript" src="/assets/frappe/node_modules/moment/min/moment-with-locales.min.js"></script>
<script type="text/javascript" src="/assets/frappe/node_modules/moment-timezone/builds/moment-timezone-with-data.min.js"></script>
{{ include_script('frappe-web.bundle.js') }}
{% endblock %}

View file

@ -30,7 +30,7 @@ def xmlrunner_wrapper(output):
def main(app=None, module=None, doctype=None, verbose=False, tests=(),
force=False, profile=False, junit_xml_output=None, ui_tests=False,
doctype_list_path=None, skip_test_records=False, failfast=False):
doctype_list_path=None, skip_test_records=False, failfast=False, case=None):
global unittest_runner
if doctype_list_path:
@ -76,7 +76,7 @@ def main(app=None, module=None, doctype=None, verbose=False, tests=(),
if doctype:
ret = run_tests_for_doctype(doctype, verbose, tests, force, profile, failfast=failfast, junit_xml_output=junit_xml_output)
elif module:
ret = run_tests_for_module(module, verbose, tests, profile, failfast=failfast, junit_xml_output=junit_xml_output)
ret = run_tests_for_module(module, verbose, tests, profile, failfast=failfast, junit_xml_output=junit_xml_output, case=case)
else:
ret = run_all_tests(app, verbose, profile, ui_tests, failfast=failfast, junit_xml_output=junit_xml_output)
@ -182,16 +182,16 @@ def run_tests_for_doctype(doctypes, verbose=False, tests=(), force=False, profil
return _run_unittest(modules, verbose=verbose, tests=tests, profile=profile, failfast=failfast, junit_xml_output=junit_xml_output)
def run_tests_for_module(module, verbose=False, tests=(), profile=False, failfast=False, junit_xml_output=False):
def run_tests_for_module(module, verbose=False, tests=(), profile=False, failfast=False, junit_xml_output=False, case=None):
module = importlib.import_module(module)
if hasattr(module, "test_dependencies"):
for doctype in module.test_dependencies:
make_test_records(doctype, verbose=verbose)
frappe.db.commit()
return _run_unittest(module, verbose=verbose, tests=tests, profile=profile, failfast=failfast, junit_xml_output=junit_xml_output)
return _run_unittest(module, verbose=verbose, tests=tests, profile=profile, failfast=failfast, junit_xml_output=junit_xml_output, case=case)
def _run_unittest(modules, verbose=False, tests=(), profile=False, failfast=False, junit_xml_output=False):
def _run_unittest(modules, verbose=False, tests=(), profile=False, failfast=False, junit_xml_output=False, case=None):
frappe.db.begin()
test_suite = unittest.TestSuite()
@ -200,7 +200,10 @@ def _run_unittest(modules, verbose=False, tests=(), profile=False, failfast=Fals
modules = [modules]
for module in modules:
module_test_cases = unittest.TestLoader().loadTestsFromModule(module)
if case:
module_test_cases = unittest.TestLoader().loadTestsFromTestCase(getattr(module, case))
else:
module_test_cases = unittest.TestLoader().loadTestsFromModule(module)
if tests:
for each in module_test_cases:
for test_case in each.__dict__["_tests"]:
@ -337,7 +340,7 @@ def make_test_records_for_doctype(doctype, verbose=0, force=False):
elif hasattr(test_module, "test_records"):
if doctype in frappe.local.test_objects:
frappe.local.test_objects[doctype] += make_test_objects(doctype, test_module.test_records, verbose, force)
else:
else:
frappe.local.test_objects[doctype] = make_test_objects(doctype, test_module.test_records, verbose, force)
else:

View file

@ -0,0 +1,18 @@
import unittest
from frappe.model.base_document import BaseDocument
class TestBaseDocument(unittest.TestCase):
def test_docstatus(self):
doc = BaseDocument({"docstatus": 0})
self.assertTrue(doc.docstatus.is_draft())
self.assertEquals(doc.docstatus, 0)
doc.docstatus = 1
self.assertTrue(doc.docstatus.is_submitted())
self.assertEquals(doc.docstatus, 1)
doc.docstatus = 2
self.assertTrue(doc.docstatus.is_cancelled())
self.assertEquals(doc.docstatus, 2)

View file

@ -1,21 +1,21 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import datetime
import inspect
import unittest
from random import choice
import datetime
from unittest.mock import patch
import frappe
from frappe.custom.doctype.custom_field.custom_field import create_custom_field
from frappe.utils import random_string
from frappe.utils.testutils import clear_custom_fields
from frappe.query_builder import Field
from frappe.database import savepoint
from .test_query_builder import run_only_if, db_type_is
from frappe.database.database import Database
from frappe.query_builder import Field
from frappe.query_builder.functions import Concat_ws
from frappe.tests.test_query_builder import db_type_is, run_only_if
from frappe.utils import add_days, now, random_string
from frappe.utils.testutils import clear_custom_fields
class TestDB(unittest.TestCase):
@ -84,20 +84,6 @@ class TestDB(unittest.TestCase):
),
)
def test_set_value(self):
todo1 = frappe.get_doc(dict(doctype='ToDo', description = 'test_set_value 1')).insert()
todo2 = frappe.get_doc(dict(doctype='ToDo', description = 'test_set_value 2')).insert()
frappe.db.set_value('ToDo', todo1.name, 'description', 'test_set_value change 1')
self.assertEqual(frappe.db.get_value('ToDo', todo1.name, 'description'), 'test_set_value change 1')
# multiple set-value
frappe.db.set_value('ToDo', dict(description=('like', '%test_set_value%')),
'description', 'change 2')
self.assertEqual(frappe.db.get_value('ToDo', todo1.name, 'description'), 'change 2')
self.assertEqual(frappe.db.get_value('ToDo', todo2.name, 'description'), 'change 2')
def test_escape(self):
frappe.db.escape("香港濟生堂製藥有限公司 - IT".encode("utf-8"))
@ -246,7 +232,6 @@ class TestDB(unittest.TestCase):
frappe.delete_doc(test_doctype, doc)
clear_custom_fields(test_doctype)
def test_savepoints(self):
frappe.db.rollback()
save_point = "todonope"
@ -365,6 +350,143 @@ class TestDDLCommandsMaria(unittest.TestCase):
self.assertEquals(len(indexs_in_table), 2)
class TestDBSetValue(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.todo1 = frappe.get_doc(doctype="ToDo", description="test_set_value 1").insert()
cls.todo2 = frappe.get_doc(doctype="ToDo", description="test_set_value 2").insert()
def test_update_single_doctype_field(self):
value = frappe.db.get_single_value("System Settings", "deny_multiple_sessions")
changed_value = not value
frappe.db.set_value("System Settings", "System Settings", "deny_multiple_sessions", changed_value)
current_value = frappe.db.get_single_value("System Settings", "deny_multiple_sessions")
self.assertEqual(current_value, changed_value)
changed_value = not current_value
frappe.db.set_value("System Settings", None, "deny_multiple_sessions", changed_value)
current_value = frappe.db.get_single_value("System Settings", "deny_multiple_sessions")
self.assertEqual(current_value, changed_value)
changed_value = not current_value
frappe.db.set_single_value("System Settings", "deny_multiple_sessions", changed_value)
current_value = frappe.db.get_single_value("System Settings", "deny_multiple_sessions")
self.assertEqual(current_value, changed_value)
def test_update_single_row_single_column(self):
frappe.db.set_value("ToDo", self.todo1.name, "description", "test_set_value change 1")
updated_value = frappe.db.get_value("ToDo", self.todo1.name, "description")
self.assertEqual(updated_value, "test_set_value change 1")
def test_update_single_row_multiple_columns(self):
description, status = "Upated by test_update_single_row_multiple_columns", "Closed"
frappe.db.set_value("ToDo", self.todo1.name, {
"description": description,
"status": status,
}, update_modified=False)
updated_desciption, updated_status = frappe.db.get_value("ToDo",
filters={"name": self.todo1.name},
fieldname=["description", "status"]
)
self.assertEqual(description, updated_desciption)
self.assertEqual(status, updated_status)
def test_update_multiple_rows_single_column(self):
frappe.db.set_value("ToDo", {"description": ("like", "%test_set_value%")}, "description", "change 2")
self.assertEqual(frappe.db.get_value("ToDo", self.todo1.name, "description"), "change 2")
self.assertEqual(frappe.db.get_value("ToDo", self.todo2.name, "description"), "change 2")
def test_update_multiple_rows_multiple_columns(self):
todos_to_update = frappe.get_all("ToDo", filters={
"description": ("like", "%test_set_value%"),
"status": ("!=", "Closed")
}, pluck="name")
frappe.db.set_value("ToDo", {
"description": ("like", "%test_set_value%"),
"status": ("!=", "Closed")
}, {
"status": "Closed",
"priority": "High"
})
test_result = frappe.get_all("ToDo", filters={"name": ("in", todos_to_update)}, fields=["status", "priority"])
self.assertTrue(all(x for x in test_result if x["status"] == "Closed"))
self.assertTrue(all(x for x in test_result if x["priority"] == "High"))
def test_update_modified_options(self):
self.todo2.reload()
todo = self.todo2
updated_description = f"{todo.description} - by `test_update_modified_options`"
custom_modified = datetime.datetime.fromisoformat(add_days(now(), 10))
custom_modified_by = "user_that_doesnt_exist@example.com"
frappe.db.set_value("ToDo", todo.name, "description", updated_description, update_modified=False)
self.assertEqual(updated_description, frappe.db.get_value("ToDo", todo.name, "description"))
self.assertEqual(todo.modified, frappe.db.get_value("ToDo", todo.name, "modified"))
frappe.db.set_value("ToDo", todo.name, "description", "test_set_value change 1", modified=custom_modified, modified_by=custom_modified_by)
self.assertTupleEqual(
(custom_modified, custom_modified_by),
frappe.db.get_value("ToDo", todo.name, ["modified", "modified_by"])
)
def test_for_update(self):
self.todo1.reload()
with patch.object(Database, "sql") as sql_called:
frappe.db.set_value(
self.todo1.doctype,
self.todo1.name,
"description",
f"{self.todo1.description}-edit by `test_for_update`"
)
first_query = sql_called.call_args_list[0].args[0]
second_query = sql_called.call_args_list[1].args[0]
self.assertTrue(sql_called.call_count == 2)
self.assertTrue("FOR UPDATE" in first_query)
if frappe.conf.db_type == "postgres":
from frappe.database.postgres.database import modify_query
self.assertTrue(modify_query("UPDATE `tabToDo` SET") in second_query)
if frappe.conf.db_type == "mariadb":
self.assertTrue("UPDATE `tabToDo` SET" in second_query)
def test_cleared_cache(self):
self.todo2.reload()
with patch.object(frappe, "clear_document_cache") as clear_cache:
frappe.db.set_value(
self.todo2.doctype,
self.todo2.name,
"description",
f"{self.todo2.description}-edit by `test_cleared_cache`"
)
clear_cache.assert_called()
def test_update_alias(self):
args = (self.todo1.doctype, self.todo1.name, "description", "Updated by `test_update_alias`")
kwargs = {"for_update": False, "modified": None, "modified_by": None, "update_modified": True, "debug": False}
self.assertTrue("return self.set_value(" in inspect.getsource(frappe.db.update))
with patch.object(Database, "set_value") as set_value:
frappe.db.update(*args, **kwargs)
set_value.assert_called_once()
set_value.assert_called_with(*args, **kwargs)
@classmethod
def tearDownClass(cls):
frappe.db.rollback()
@run_only_if(db_type_is.POSTGRES)
class TestDDLCommandsPost(unittest.TestCase):
test_table_name = "TestNotes"

View file

@ -0,0 +1,26 @@
import unittest
from frappe.model.docstatus import DocStatus
class TestDocStatus(unittest.TestCase):
def test_draft(self):
self.assertEqual(DocStatus(0), DocStatus.draft())
self.assertTrue(DocStatus.draft().is_draft())
self.assertFalse(DocStatus.draft().is_cancelled())
self.assertFalse(DocStatus.draft().is_submitted())
def test_submitted(self):
self.assertEqual(DocStatus(1), DocStatus.submitted())
self.assertFalse(DocStatus.submitted().is_draft())
self.assertTrue(DocStatus.submitted().is_submitted())
self.assertFalse(DocStatus.submitted().is_cancelled())
def test_cancelled(self):
self.assertEqual(DocStatus(2), DocStatus.cancelled())
self.assertFalse(DocStatus.cancelled().is_draft())
self.assertFalse(DocStatus.cancelled().is_submitted())
self.assertTrue(DocStatus.cancelled().is_cancelled())

View file

@ -144,6 +144,7 @@ class TestNaming(unittest.TestCase):
current_index = frappe.db.sql("""SELECT current from `tabSeries` where name = %s""", series, as_dict=True)[0]
self.assertEqual(current_index.get('current'), 2)
frappe.db.delete("Series", {"name": series})
def test_naming_for_cancelled_and_amended_doc(self):
@ -166,25 +167,20 @@ class TestNaming(unittest.TestCase):
doc.submit()
doc.cancel()
cancelled_name = doc.name
self.assertEqual(cancelled_name, "{}-CANC-0".format(original_name))
self.assertEqual(cancelled_name, original_name)
amended_doc = frappe.copy_doc(doc)
amended_doc.docstatus = 0
amended_doc.amended_from = doc.name
amended_doc.save()
self.assertEqual(amended_doc.name, original_name)
self.assertEqual(amended_doc.name, "{}-1".format(original_name))
amended_doc.submit()
amended_doc.cancel()
self.assertEqual(amended_doc.name, "{}-CANC-1".format(original_name))
self.assertEqual(amended_doc.name, "{}-1".format(original_name))
submittable_doctype.delete()
def test_parse_naming_series_for_consecutive_week_number(self):
week = determine_consecutive_week_number(now_datetime())
name = parse_naming_series('PREFIX-.WW.-SUFFIX')
expected_name = 'PREFIX-{}-SUFFIX'.format(week)
self.assertEqual(name, expected_name)
def test_determine_consecutive_week_number(self):
from datetime import datetime
@ -207,4 +203,4 @@ class TestNaming(unittest.TestCase):
dt = datetime.fromisoformat("2021-12-31")
w = determine_consecutive_week_number(dt)
self.assertEqual(w, "52")
self.assertEqual(w, "52")

View file

@ -5,7 +5,7 @@ import frappe
from frappe.query_builder.custom import ConstantColumn
from frappe.query_builder.functions import Coalesce, GroupConcat, Match
from frappe.query_builder.utils import db_type_is
from frappe.query_builder import Case
def run_only_if(dbtype: db_type_is) -> Callable:
return unittest.skipIf(
@ -25,8 +25,14 @@ class TestCustomFunctionsMariaDB(unittest.TestCase):
)
def test_constant_column(self):
query = frappe.qb.from_("DocType").select("name", ConstantColumn("John").as_("User"))
self.assertEqual(query.get_sql(), "SELECT `name`,'John' `User` FROM `tabDocType`")
query = frappe.qb.from_("DocType").select(
"name", ConstantColumn("John").as_("User")
)
self.assertEqual(
query.get_sql(), "SELECT `name`,'John' `User` FROM `tabDocType`"
)
@run_only_if(db_type_is.POSTGRES)
class TestCustomFunctionsPostgres(unittest.TestCase):
def test_concat(self):
@ -39,8 +45,13 @@ class TestCustomFunctionsPostgres(unittest.TestCase):
)
def test_constant_column(self):
query = frappe.qb.from_("DocType").select("name", ConstantColumn("John").as_("User"))
self.assertEqual(query.get_sql(), 'SELECT "name",\'John\' "User" FROM "tabDocType"')
query = frappe.qb.from_("DocType").select(
"name", ConstantColumn("John").as_("User")
)
self.assertEqual(
query.get_sql(), 'SELECT "name",\'John\' "User" FROM "tabDocType"'
)
class TestBuilderBase(object):
def test_adding_tabs(self):
@ -55,23 +66,95 @@ class TestBuilderBase(object):
self.assertIsInstance(query.run, Callable)
self.assertIsInstance(data, list)
def test_walk(self):
DocType = frappe.qb.DocType('DocType')
class TestParameterization(unittest.TestCase):
def test_where_conditions(self):
DocType = frappe.qb.DocType("DocType")
query = (
frappe.qb.from_(DocType)
.select(DocType.name)
.where((DocType.owner == "Administrator' --")
& (Coalesce(DocType.search_fields == "subject"))
)
.where((DocType.owner == "Administrator' --"))
)
self.assertTrue("walk" in dir(query))
query, params = query.walk()
self.assertIn("%(param1)s", query)
self.assertIn("%(param2)s", query)
self.assertIn("param1",params)
self.assertEqual(params["param1"],"Administrator' --")
self.assertEqual(params["param2"],"subject")
self.assertIn("param1", params)
self.assertEqual(params["param1"], "Administrator' --")
def test_set_cnoditions(self):
DocType = frappe.qb.DocType("DocType")
query = frappe.qb.update(DocType).set(DocType.value, "some_value")
self.assertTrue("walk" in dir(query))
query, params = query.walk()
self.assertIn("%(param1)s", query)
self.assertIn("param1", params)
self.assertEqual(params["param1"], "some_value")
def test_where_conditions_functions(self):
DocType = frappe.qb.DocType("DocType")
query = (
frappe.qb.from_(DocType)
.select(DocType.name)
.where(Coalesce(DocType.search_fields == "subject"))
)
self.assertTrue("walk" in dir(query))
query, params = query.walk()
self.assertIn("%(param1)s", query)
self.assertIn("param1", params)
self.assertEqual(params["param1"], "subject")
def test_case(self):
DocType = frappe.qb.DocType("DocType")
query = (
frappe.qb.from_(DocType)
.select(
Case()
.when(DocType.search_fields == "value", "other_value")
.when(Coalesce(DocType.search_fields == "subject_in_function"), "true_value")
.else_("Overdue")
)
)
self.assertTrue("walk" in dir(query))
query, params = query.walk()
self.assertIn("%(param1)s", query)
self.assertIn("param1", params)
self.assertEqual(params["param1"], "value")
self.assertEqual(params["param2"], "other_value")
self.assertEqual(params["param3"], "subject_in_function")
self.assertEqual(params["param4"], "true_value")
self.assertEqual(params["param5"], "Overdue")
def test_case_in_update(self):
DocType = frappe.qb.DocType("DocType")
query = (
frappe.qb.update(DocType)
.set(
"parent",
Case()
.when(DocType.search_fields == "value", "other_value")
.when(Coalesce(DocType.search_fields == "subject_in_function"), "true_value")
.else_("Overdue")
)
)
self.assertTrue("walk" in dir(query))
query, params = query.walk()
self.assertIn("%(param1)s", query)
self.assertIn("param1", params)
self.assertEqual(params["param1"], "value")
self.assertEqual(params["param2"], "other_value")
self.assertEqual(params["param3"], "subject_in_function")
self.assertEqual(params["param4"], "true_value")
self.assertEqual(params["param5"], "Overdue")
@run_only_if(db_type_is.MARIADB)
@ -84,6 +167,7 @@ class TestBuilderMaria(unittest.TestCase, TestBuilderBase):
"SELECT * FROM `__Auth`", frappe.qb.from_("__Auth").select("*").get_sql()
)
@run_only_if(db_type_is.POSTGRES)
class TestBuilderPostgres(unittest.TestCase, TestBuilderBase):
def test_adding_tabs_in_from(self):

View file

@ -1,22 +1,28 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import unittest
import frappe
from frappe.utils import evaluate_filters, money_in_words, scrub_urls, get_url
from frappe.utils import validate_url, validate_email_address
from frappe.utils import ceil, floor
from frappe.utils.data import cast, validate_python_code
from frappe.utils.diff import get_version_diff, version_query, _get_value_from_version
from PIL import Image
from frappe.utils.image import strip_exif_data, optimize_image
import io
import json
import unittest
from datetime import date, datetime, time, timedelta
from decimal import Decimal
from enum import Enum
from mimetypes import guess_type
from datetime import datetime, timedelta, date
from unittest.mock import patch
import pytz
from PIL import Image
import frappe
from frappe.utils import ceil, evaluate_filters, floor, format_timedelta
from frappe.utils import get_url, money_in_words, parse_timedelta, scrub_urls
from frappe.utils import validate_email_address, validate_url
from frappe.utils.data import cast, get_time, get_timedelta, nowtime, now_datetime, validate_python_code
from frappe.utils.diff import _get_value_from_version, get_version_diff, version_query
from frappe.utils.image import optimize_image, strip_exif_data
from frappe.utils.response import json_handler
class TestFilters(unittest.TestCase):
def test_simple_dict(self):
self.assertTrue(evaluate_filters({'doctype': 'User', 'status': 'Open'}, {'status': 'Open'}))
@ -273,9 +279,7 @@ class TestPythonExpressions(unittest.TestCase):
for expr in invalid_expressions:
self.assertRaises(frappe.ValidationError, validate_python_code, expr)
class TestDiffUtils(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.doc = frappe.get_doc(doctype="Client Script", dt="Client Script")
@ -330,8 +334,85 @@ class TestDateUtils(unittest.TestCase):
self.assertEqual(frappe.utils.get_last_day_of_week("2020-12-28"),
frappe.utils.getdate("2021-01-02"))
class TestXlsxUtils(unittest.TestCase):
def test_get_time(self):
datetime_input = now_datetime()
timedelta_input = get_timedelta()
time_input = nowtime()
self.assertIsInstance(get_time(datetime_input), time)
self.assertIsInstance(get_time(timedelta_input), time)
self.assertIsInstance(get_time(time_input), time)
self.assertIsInstance(get_time("100:2:12"), time)
self.assertIsInstance(get_time(str(datetime_input)), time)
self.assertIsInstance(get_time(str(timedelta_input)), time)
self.assertIsInstance(get_time(str(time_input)), time)
def test_get_timedelta(self):
datetime_input = now_datetime()
timedelta_input = get_timedelta()
time_input = nowtime()
self.assertIsInstance(get_timedelta(), timedelta)
self.assertIsInstance(get_timedelta("100:2:12"), timedelta)
self.assertIsInstance(get_timedelta("17:21:00"), timedelta)
self.assertIsInstance(get_timedelta("2012-01-19 17:21:00"), timedelta)
self.assertIsInstance(get_timedelta(str(datetime_input)), timedelta)
self.assertIsInstance(get_timedelta(str(timedelta_input)), timedelta)
self.assertIsInstance(get_timedelta(str(time_input)), timedelta)
class TestResponse(unittest.TestCase):
def test_json_handler(self):
class TEST(Enum):
ABC = "!@)@)!"
BCE = "ENJD"
GOOD_OBJECT = {
"time_types": [
date(year=2020, month=12, day=2),
datetime(year=2020, month=12, day=2, hour=23, minute=23, second=23, microsecond=23, tzinfo=pytz.utc),
time(hour=23, minute=23, second=23, microsecond=23, tzinfo=pytz.utc),
timedelta(days=10, hours=12, minutes=120, seconds=10),
],
"float": [
Decimal(29.21),
],
"doc": [
frappe.get_doc("System Settings"),
],
"iter": [
{1, 2, 3},
(1, 2, 3),
"abcdef",
],
"string": "abcdef"
}
BAD_OBJECT = {"Enum": TEST}
processed_object = json.loads(json.dumps(GOOD_OBJECT, default=json_handler))
self.assertTrue(all([isinstance(x, str) for x in processed_object["time_types"]]))
self.assertTrue(all([isinstance(x, float) for x in processed_object["float"]]))
self.assertTrue(all([isinstance(x, (list, str)) for x in processed_object["iter"]]))
self.assertIsInstance(processed_object["string"], str)
with self.assertRaises(TypeError):
json.dumps(BAD_OBJECT, default=json_handler)
class TestTimeDeltaUtils(unittest.TestCase):
def test_format_timedelta(self):
self.assertEqual(format_timedelta(timedelta(seconds=0)), "0:00:00")
self.assertEqual(format_timedelta(timedelta(hours=10)), "10:00:00")
self.assertEqual(format_timedelta(timedelta(hours=100)), "100:00:00")
self.assertEqual(format_timedelta(timedelta(seconds=100, microseconds=129)), "0:01:40.000129")
self.assertEqual(format_timedelta(timedelta(seconds=100, microseconds=12212199129)), "3:25:12.199129")
def test_parse_timedelta(self):
self.assertEqual(parse_timedelta("0:0:0"), timedelta(seconds=0))
self.assertEqual(parse_timedelta("10:0:0"), timedelta(hours=10))
self.assertEqual(parse_timedelta("7 days, 0:32:18.192221"), timedelta(days=7, seconds=1938, microseconds=192221))
self.assertEqual(parse_timedelta("7 days, 0:32:18"), timedelta(days=7, seconds=1938))
class TestXlsxUtils(unittest.TestCase):
def test_unescape(self):
from frappe.utils.xlsxutils import handle_html

View file

@ -1,7 +1,9 @@
import unittest
from unittest.mock import patch
import frappe
from frappe.utils import set_request
from frappe.website.page_renderers.static_page import StaticPage
from frappe.website.serve import get_response, get_response_content
from frappe.website.utils import (build_response, clear_website_cache, get_home_page)
@ -20,6 +22,7 @@ class TestWebsite(unittest.TestCase):
doctype='User',
email='test-user-for-home-page@example.com',
first_name='test')).insert(ignore_if_duplicate=True)
user.reload()
role = frappe.get_doc(dict(
doctype = 'Role',
@ -96,6 +99,19 @@ class TestWebsite(unittest.TestCase):
response = get_response()
self.assertEqual(response.status_code, 200)
set_request(method="GET", path="/_test/assets/image.jpg")
response = get_response()
self.assertEqual(response.status_code, 200)
set_request(method="GET", path="/_test/assets/image")
response = get_response()
self.assertEqual(response.status_code, 200)
with patch.object(StaticPage, "render") as static_render:
set_request(method="GET", path="/_test/assets/image")
response = get_response()
static_render.assert_called()
def test_error_page(self):
set_request(method='GET', path='/_test/problematic_page')
response = get_response()
@ -126,7 +142,6 @@ class TestWebsite(unittest.TestCase):
response = get_response()
self.assertEqual(response.status_code, 404)
def test_redirect(self):
import frappe.hooks
frappe.set_user('Administrator')

View file

@ -151,6 +151,7 @@ My Account,Mein Konto,
New Address,Neue Adresse,
New Contact,Neuer Kontakt,
Next,Weiter,
No,Nein,
No Data,Keine Daten,
No address added yet.,Noch keine Adresse hinzugefügt.,
No contacts added yet.,Noch keine Kontakte hinzugefügt.,
@ -349,7 +350,7 @@ Add a New Role,Neue Rolle hinzufügen,
Add a column,Spalte einfügen,
Add a comment,Einen Kommentar hinzufügen,
Add a new section,Fügen Sie einen neuen Abschnitt hinzu,
Add a tag ...,Füge einen Tag hinzu ...,
Add a tag ...,Füge ein Schlagwort hinzu ...,
Add all roles,Alle Rollen hinzufügen,
Add custom forms.,Benutzerdefinierte Formulare hinzufügen,
Add custom javascript to forms.,Benutzerdefiniertes Javascript zum Formular hinzufügen,
@ -946,6 +947,7 @@ Edit Auto Email Report Settings,Bearbeiten Sie die Einstellungen für automatisc
Edit Custom HTML,Benutzerdefiniertes HTML bearbeiten,
Edit DocType,DocType bearbeiten,
Edit Filter,Filter bearbeiten,
Edit Filters,Filter bearbeiten,
Edit Format,Format bearbeiten,
Edit HTML,HTML bearbeiten,
Edit Heading,Kopf bearbeiten,
@ -1230,6 +1232,7 @@ Hide Copy,Kopie ausblenden,
Hide Footer Signup,Fußzeilen-Anmeldung ausblenden,
Hide Sidebar and Menu,Seitenleiste und Menü ausblenden,
Hide Standard Menu,Standardmenü ausblenden,
Hide Tags,Schlagworte ausblenden,
Hide Weekends,Wochenenden ausblenden,
Hide details,Details ausblenden,
Hide footer in auto email reports,Fußzeile in automatischen E-Mail-Berichten ausblenden,
@ -1650,7 +1653,7 @@ No Preview,Keine Vorschau,
No Preview Available,Keine Vorschau vorhanden,
No Printer is Available.,Es ist kein Drucker verfügbar.,
No Results,Keine Ergebnisse,
No Tags,No Tags,
No Tags,Keine Schlagworte,
No alerts for today,Keine Warnungen für heute,
No comments yet,Noch keine Kommentare,
No comments yet. Start a new discussion.,Noch keine Kommentare. Starten Sie eine neue Diskussion.,
@ -2040,7 +2043,7 @@ Remove,Entfernen,
Remove Field,Feld entfernen,
Remove Filter,Filter entfernen,
Remove Section,Abschnitt entfernen,
Remove Tag,Markierung entfernen,
Remove Tag,Schlagwort entfernen,
Remove all customizations?,Alle Anpassungen entfernen?,
Removed {0},{0} entfernt,
Rename many items by uploading a .csv file.,Viele Elemente auf einmal umbenennen durch Hochladen einer .CSV-Datei,
@ -3250,7 +3253,7 @@ DocType Action,DocType-Aktion,
DocType Event,DocType-Ereignis,
DocType Link,DocType Link,
Document Share,Dokumentenfreigabe,
Document Tag,Dokument-Tag,
Document Tag,Dokument-Schlagwort,
Document Title,Dokumenttitel,
Document Type Field Mapping,Dokumenttyp-Feldzuordnung,
Document Type Mapping,Dokumenttypzuordnung,
@ -3558,9 +3561,9 @@ Skipping column {0},Spalte {0} wird übersprungen,
Social Home,Soziales Zuhause,
Some columns might get cut off when printing to PDF. Try to keep number of columns under 10.,"Einige Spalten werden beim Drucken in PDF möglicherweise abgeschnitten. Versuchen Sie, die Anzahl der Spalten unter 10 zu halten.",
Something went wrong during the token generation. Click on {0} to generate a new one.,"Während der Token-Generierung ist ein Fehler aufgetreten. Klicken Sie auf {0}, um eine neue zu erstellen.",
Submit After Import,Nach dem Import einreichen,
Submitting...,Einreichen ...,
Success! You are good to go 👍,Erfolg! Du bist gut zu gehen 👍,
Submit After Import,Nach dem Import buchen,
Submitting...,wird verbucht...,
Success! You are good to go 👍,Erfolg! Du kannst nun durchstarten 👍,
Successful Transactions,Erfolgreiche Transaktionen,
Successfully Submitted!,Erfolgreich eingereicht!,
Successfully imported {0} record.,{0} Datensatz erfolgreich importiert.,
@ -3572,9 +3575,9 @@ Sync Contacts,Kontakte synchronisieren,
Sync with Google Calendar,Mit Google Kalender synchronisieren,
Sync with Google Contacts,Mit Google-Kontakten synchronisieren,
Synced,Synchronisiert,
Syncing,Synchronisierung,
Syncing,Synchronisiert,
Syncing {0} of {1},{0} von {1} synchronisieren,
Tag Link,Tag-Link,
Tag Link,Schlagwortverknüpfung,
Take Backup,Backup erstellen,
Template Error,Vorlagenfehler,
Template Options,Vorlagenoptionen,
@ -3796,7 +3799,7 @@ Start,Start,
Start Time,Startzeit,
Status,Status,
Submitted,Gebucht,
Tag,Etikett,
Tag,Schlagwort,
Template,Vorlage,
Thursday,Donnerstag,
Title,Bezeichnung,
@ -4028,7 +4031,7 @@ Please select target language for translation,Bitte wählen Sie die Zielsprache
Select Language,Sprache auswählen,
Confirm Translations,Übersetzungen bestätigen,
Contributed Translations,Beigetragene Übersetzungen,
Show Tags,Tags anzeigen,
Show Tags,Schlagworte anzeigen,
Do not have permission to access {0} bucket.,Sie haben keine Berechtigung zum Zugriff auf den Bucket {0}.,
Allow document creation via Email,Dokumenterstellung per E-Mail zulassen,
Sender Field,Absenderfeld,

1 A4 A4
151 New Address Neue Adresse
152 New Contact Neuer Kontakt
153 Next Weiter
154 No Nein
155 No Data Keine Daten
156 No address added yet. Noch keine Adresse hinzugefügt.
157 No contacts added yet. Noch keine Kontakte hinzugefügt.
350 Add a column Spalte einfügen
351 Add a comment Einen Kommentar hinzufügen
352 Add a new section Fügen Sie einen neuen Abschnitt hinzu
353 Add a tag ... Füge einen Tag hinzu ... Füge ein Schlagwort hinzu ...
354 Add all roles Alle Rollen hinzufügen
355 Add custom forms. Benutzerdefinierte Formulare hinzufügen
356 Add custom javascript to forms. Benutzerdefiniertes Javascript zum Formular hinzufügen
947 Edit Custom HTML Benutzerdefiniertes HTML bearbeiten
948 Edit DocType DocType bearbeiten
949 Edit Filter Filter bearbeiten
950 Edit Filters Filter bearbeiten
951 Edit Format Format bearbeiten
952 Edit HTML HTML bearbeiten
953 Edit Heading Kopf bearbeiten
1232 Hide Footer Signup Fußzeilen-Anmeldung ausblenden
1233 Hide Sidebar and Menu Seitenleiste und Menü ausblenden
1234 Hide Standard Menu Standardmenü ausblenden
1235 Hide Tags Schlagworte ausblenden
1236 Hide Weekends Wochenenden ausblenden
1237 Hide details Details ausblenden
1238 Hide footer in auto email reports Fußzeile in automatischen E-Mail-Berichten ausblenden
1653 No Preview Available Keine Vorschau vorhanden
1654 No Printer is Available. Es ist kein Drucker verfügbar.
1655 No Results Keine Ergebnisse
1656 No Tags No Tags Keine Schlagworte
1657 No alerts for today Keine Warnungen für heute
1658 No comments yet Noch keine Kommentare
1659 No comments yet. Start a new discussion. Noch keine Kommentare. Starten Sie eine neue Diskussion.
2043 Remove Field Feld entfernen
2044 Remove Filter Filter entfernen
2045 Remove Section Abschnitt entfernen
2046 Remove Tag Markierung entfernen Schlagwort entfernen
2047 Remove all customizations? Alle Anpassungen entfernen?
2048 Removed {0} {0} entfernt
2049 Rename many items by uploading a .csv file. Viele Elemente auf einmal umbenennen durch Hochladen einer .CSV-Datei
3253 DocType Event DocType-Ereignis
3254 DocType Link DocType Link
3255 Document Share Dokumentenfreigabe
3256 Document Tag Dokument-Tag Dokument-Schlagwort
3257 Document Title Dokumenttitel
3258 Document Type Field Mapping Dokumenttyp-Feldzuordnung
3259 Document Type Mapping Dokumenttypzuordnung
3561 Social Home Soziales Zuhause
3562 Some columns might get cut off when printing to PDF. Try to keep number of columns under 10. Einige Spalten werden beim Drucken in PDF möglicherweise abgeschnitten. Versuchen Sie, die Anzahl der Spalten unter 10 zu halten.
3563 Something went wrong during the token generation. Click on {0} to generate a new one. Während der Token-Generierung ist ein Fehler aufgetreten. Klicken Sie auf {0}, um eine neue zu erstellen.
3564 Submit After Import Nach dem Import einreichen Nach dem Import buchen
3565 Submitting... Einreichen ... wird verbucht...
3566 Success! You are good to go 👍 Erfolg! Du bist gut zu gehen 👍 Erfolg! Du kannst nun durchstarten 👍
3567 Successful Transactions Erfolgreiche Transaktionen
3568 Successfully Submitted! Erfolgreich eingereicht!
3569 Successfully imported {0} record. {0} Datensatz erfolgreich importiert.
3575 Sync with Google Calendar Mit Google Kalender synchronisieren
3576 Sync with Google Contacts Mit Google-Kontakten synchronisieren
3577 Synced Synchronisiert
3578 Syncing Synchronisierung Synchronisiert
3579 Syncing {0} of {1} {0} von {1} synchronisieren
3580 Tag Link Tag-Link Schlagwortverknüpfung
3581 Take Backup Backup erstellen
3582 Template Error Vorlagenfehler
3583 Template Options Vorlagenoptionen
3799 Start Time Startzeit
3800 Status Status
3801 Submitted Gebucht
3802 Tag Etikett Schlagwort
3803 Template Vorlage
3804 Thursday Donnerstag
3805 Title Bezeichnung
4031 Select Language Sprache auswählen
4032 Confirm Translations Übersetzungen bestätigen
4033 Contributed Translations Beigetragene Übersetzungen
4034 Show Tags Tags anzeigen Schlagworte anzeigen
4035 Do not have permission to access {0} bucket. Sie haben keine Berechtigung zum Zugriff auf den Bucket {0}.
4036 Allow document creation via Email Dokumenterstellung per E-Mail zulassen
4037 Sender Field Absenderfeld

View file

@ -1,4 +1,4 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import functools
@ -56,8 +56,8 @@ def get_email_address(user=None):
def get_formatted_email(user, mail=None):
"""get Email Address of user formatted as: `John Doe <johndoe@example.com>`"""
fullname = get_fullname(user)
method = get_hook_method('get_sender_details')
if method:
sender_name, mail = method()
# if method exists but sender_name is ""

View file

@ -1,17 +1,22 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
from typing import Optional
import frappe
import operator
import json
import base64
import re, datetime, math, time
import datetime
import json
import math
import operator
import re
import time
from code import compile_command
from urllib.parse import quote, urljoin
from frappe.desk.utils import slug
from click import secho
from enum import Enum
from typing import Any, Dict, List, Optional, Tuple, Union
from urllib.parse import quote, urljoin
from click import secho
import frappe
from frappe.desk.utils import slug
DATE_FORMAT = "%Y-%m-%d"
TIME_FORMAT = "%H:%M:%S.%f"
@ -99,11 +104,17 @@ def get_timedelta(time: Optional[str] = None) -> Optional[datetime.timedelta]:
datetime.timedelta: Timedelta object equivalent of the passed `time` string
"""
from dateutil import parser
from dateutil.parser import ParserError
time = time or "0:0:0"
try:
t = parser.parse(time)
try:
t = parser.parse(time)
except ParserError as e:
if "day" in e.args[1] or "hour must be in" in e.args[0]:
return parse_timedelta(time)
raise e
return datetime.timedelta(
hours=t.hour, minutes=t.minute, seconds=t.second, microseconds=t.microsecond
)
@ -201,7 +212,7 @@ def get_time_zone():
return frappe.cache().get_value("time_zone", _get_time_zone)
def convert_utc_to_timezone(utc_timestamp, time_zone):
from pytz import timezone, UnknownTimeZoneError
from pytz import UnknownTimeZoneError, timezone
utcnow = timezone('UTC').localize(utc_timestamp)
try:
return utcnow.astimezone(timezone(time_zone))
@ -318,17 +329,24 @@ def get_year_ending(date):
# last day of this month
return add_to_date(date, days=-1)
def get_time(time_str):
def get_time(time_str: str) -> datetime.time:
from dateutil import parser
from dateutil.parser import ParserError
if isinstance(time_str, datetime.datetime):
return time_str.time()
elif isinstance(time_str, datetime.time):
return time_str
else:
if isinstance(time_str, datetime.timedelta):
time_str = str(time_str)
elif isinstance(time_str, datetime.timedelta):
return (datetime.datetime.min + time_str).time()
try:
return parser.parse(time_str).time()
except ParserError as e:
if "day" in e.args[1] or "hour must be in" in e.args[0]:
return (
datetime.datetime.min + parse_timedelta(time_str)
).time()
raise e
def get_datetime_str(datetime_obj):
if isinstance(datetime_obj, str):
@ -610,7 +628,7 @@ def cast(fieldtype, value=None):
value = flt(value)
elif fieldtype in ("Int", "Check"):
value = cint(value)
value = cint(sbool(value))
elif fieldtype in ("Data", "Text", "Small Text", "Long Text",
"Text Editor", "Select", "Link", "Dynamic Link"):
@ -726,7 +744,7 @@ def ceil(s):
def cstr(s, encoding='utf-8'):
return frappe.as_unicode(s, encoding)
def sbool(x):
def sbool(x: str) -> Union[bool, Any]:
"""Converts str object to Boolean if possible.
Example:
"true" becomes True
@ -737,12 +755,15 @@ def sbool(x):
x (str): String to be converted to Bool
Returns:
object: Returns Boolean or type(x)
object: Returns Boolean or x
"""
from distutils.util import strtobool
try:
return bool(strtobool(x))
val = x.lower()
if val in ('true', '1'):
return True
elif val in ('false', '0'):
return False
return x
except Exception:
return x
@ -917,13 +938,13 @@ number_format_info = {
"#.########": (".", "", 8)
}
def get_number_format_info(format):
def get_number_format_info(format: str) -> Tuple[str, str, int]:
return number_format_info.get(format) or (".", ",", 2)
#
# convert currency to words
#
def money_in_words(number, main_currency = None, fraction_currency=None):
def money_in_words(number: str, main_currency: Optional[str] = None, fraction_currency: Optional[str] = None):
"""
Returns string in words with currency and fraction currency.
"""
@ -1009,9 +1030,11 @@ def is_image(filepath):
def get_thumbnail_base64_for_image(src):
from os.path import exists as file_exists
from PIL import Image
from frappe import cache, safe_decode
from frappe.core.doctype.file.file import get_local_image
from frappe import safe_decode, cache
if not src:
frappe.throw('Invalid source for image: {0}'.format(src))
@ -1302,7 +1325,7 @@ operator_map = {
"None": lambda a, b: (not a) and True or False
}
def evaluate_filters(doc, filters):
def evaluate_filters(doc, filters: Union[Dict, List, Tuple]):
'''Returns true if doc matches filters'''
if isinstance(filters, dict):
for key, value in filters.items():
@ -1319,7 +1342,7 @@ def evaluate_filters(doc, filters):
return True
def compare(val1, condition, val2, fieldtype=None):
def compare(val1: Any, condition: str, val2: Any, fieldtype: Optional[str] = None):
ret = False
if fieldtype:
val2 = cast(fieldtype, val2)
@ -1328,7 +1351,7 @@ def compare(val1, condition, val2, fieldtype=None):
return ret
def get_filter(doctype, f, filters_config=None):
def get_filter(doctype: str, f: Union[Dict, List, Tuple], filters_config=None) -> "frappe._dict":
"""Returns a _dict like
{
@ -1415,8 +1438,10 @@ def make_filter_dict(filters):
return _filter
def sanitize_column(column_name):
from frappe import _
import sqlparse
from frappe import _
regex = re.compile("^.*[,'();].*")
column_name = sqlparse.format(column_name, strip_comments=True, keyword_case="lower")
blacklisted_keywords = ['select', 'create', 'insert', 'delete', 'drop', 'update', 'case', 'and', 'or']
@ -1492,9 +1517,10 @@ def strip(val, chars=None):
return (val or "").replace("\ufeff", "").replace("\u200b", "").strip(chars)
def to_markdown(html):
from html2text import html2text
from html.parser import HTMLParser
from html2text import html2text
text = None
try:
text = html2text(html or '')
@ -1504,7 +1530,8 @@ def to_markdown(html):
return text
def md_to_html(markdown_text):
from markdown2 import markdown as _markdown, MarkdownError
from markdown2 import MarkdownError
from markdown2 import markdown as _markdown
extras = {
'fenced-code-blocks': None,
@ -1529,14 +1556,14 @@ def md_to_html(markdown_text):
def markdown(markdown_text):
return md_to_html(markdown_text)
def is_subset(list_a, list_b):
def is_subset(list_a: List, list_b: List) -> bool:
'''Returns whether list_a is a subset of list_b'''
return len(list(set(list_a) & set(list_b))) == len(list_a)
def generate_hash(*args, **kwargs):
def generate_hash(*args, **kwargs) -> str:
return frappe.generate_hash(*args, **kwargs)
def guess_date_format(date_string):
def guess_date_format(date_string: str) -> str:
DATE_FORMATS = [
r"%d/%b/%y",
r"%d-%m-%Y",
@ -1611,13 +1638,13 @@ def guess_date_format(date_string):
if date_format and time_format:
return (date_format + ' ' + time_format).strip()
def validate_json_string(string):
def validate_json_string(string: str) -> None:
try:
json.loads(string)
except (TypeError, ValueError):
raise frappe.ValidationError
def get_user_info_for_avatar(user_id):
def get_user_info_for_avatar(user_id: str) -> Dict:
user_info = {
"email": user_id,
"image": "",
@ -1664,3 +1691,30 @@ class UnicodeWithAttrs(str):
def __init__(self, text):
self.toc_html = text.toc_html
self.metadata = text.metadata
def format_timedelta(o: datetime.timedelta) -> str:
# mariadb allows a wide diff range - https://mariadb.com/kb/en/time/
# but frappe doesnt - i think via babel : only allows 0..23 range for hour
total_seconds = o.total_seconds()
hours, remainder = divmod(total_seconds, 3600)
minutes, seconds = divmod(remainder, 60)
rounded_seconds = round(seconds, 6)
int_seconds = int(seconds)
if rounded_seconds == int_seconds:
seconds = int_seconds
else:
seconds = rounded_seconds
return "{:01}:{:02}:{:02}".format(int(hours), int(minutes), seconds)
def parse_timedelta(s: str) -> datetime.timedelta:
# ref: https://stackoverflow.com/a/21074460/10309266
if 'day' in s:
m = re.match(r"(?P<days>[-\d]+) day[s]*, (?P<hours>\d+):(?P<minutes>\d+):(?P<seconds>\d[\.\d+]*)", s)
else:
m = re.match(r"(?P<hours>\d+):(?P<minutes>\d+):(?P<seconds>\d[\.\d+]*)", s)
return datetime.timedelta(**{key: float(val) for key, val in m.groupdict().items()})

View file

@ -3,9 +3,11 @@
import frappe
import datetime
from frappe.utils import formatdate, fmt_money, flt, cstr, cint, format_datetime, format_time, format_duration
from frappe.utils import formatdate, fmt_money, flt, cstr, cint, format_datetime, format_time, format_duration, format_timedelta
from frappe.model.meta import get_field_currency, get_field_precision
import re
from dateutil.parser import ParserError
def format_value(value, df=None, doc=None, currency=None, translated=False, format=None):
'''Format value based on given fieldtype, document reference, currency reference.
@ -47,7 +49,10 @@ def format_value(value, df=None, doc=None, currency=None, translated=False, form
return format_datetime(value)
elif df.get("fieldtype")=="Time":
return format_time(value)
try:
return format_time(value)
except ParserError:
return format_timedelta(value)
elif value==0 and df.get("fieldtype") in ("Int", "Float", "Currency", "Percent") and df.get("print_hide_if_no_value"):
# this is required to show 0 as blank in table columns

View file

@ -1,4 +1,4 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import json
@ -16,7 +16,7 @@ from werkzeug.local import LocalProxy
from werkzeug.wsgi import wrap_file
from werkzeug.wrappers import Response
from werkzeug.exceptions import NotFound, Forbidden
from frappe.utils import cint
from frappe.utils import cint, format_timedelta
from urllib.parse import quote
from frappe.core.doctype.access_log.access_log import make_access_log
@ -122,12 +122,14 @@ def make_logs(response = None):
def json_handler(obj):
"""serialize non-serializable data for json"""
# serialize date
import collections.abc
from collections.abc import Iterable
if isinstance(obj, (datetime.date, datetime.timedelta, datetime.datetime, datetime.time)):
if isinstance(obj, (datetime.date, datetime.datetime, datetime.time)):
return str(obj)
elif isinstance(obj, datetime.timedelta):
return format_timedelta(obj)
elif isinstance(obj, decimal.Decimal):
return float(obj)
@ -138,7 +140,7 @@ def json_handler(obj):
doc = obj.as_dict(no_nulls=True)
return doc
elif isinstance(obj, collections.abc.Iterable):
elif isinstance(obj, Iterable):
return list(obj)
elif type(obj)==type or isinstance(obj, Exception):

View file

@ -92,18 +92,12 @@ $(".file-size").each(function() {
});
</script>
{{ include_script("controls.bundle.js") }}
{% if is_list %}
{# web form list #}
<script type="text/javascript" src="/assets/frappe/node_modules/moment/min/moment-with-locales.min.js"></script>
<script type="text/javascript" src="/assets/frappe/node_modules/moment-timezone/builds/moment-timezone-with-data.min.js"></script>
{% if is_list %} <!-- web form list -->
{{ include_script("dialog.bundle.js") }}
{{ include_script("web_form.bundle.js") }}
{{ include_script("bootstrap-4-web.bundle.js") }}
{% else %}
{# web form #}
{% else %} <!-- web form -->
{{ include_script("dialog.bundle.js") }}
<script type="text/javascript" src="/assets/frappe/node_modules/moment/min/moment-with-locales.min.js"></script>
<script type="text/javascript" src="/assets/frappe/node_modules/moment-timezone/builds/moment-timezone-with-data.min.js"></script>
<script type="text/javascript" src="/assets/frappe/node_modules/vue/dist/vue.js"></script>
<script>
Vue.prototype.__ = window.__;

View file

@ -6,6 +6,7 @@ from werkzeug.wsgi import wrap_file
import frappe
from frappe.website.page_renderers.base_renderer import BaseRenderer
from frappe.website.utils import is_binary_file
UNSUPPORTED_STATIC_PAGE_TYPES = ('html', 'md', 'js', 'xml', 'css', 'txt', 'py', 'json')
@ -20,21 +21,20 @@ class StaticPage(BaseRenderer):
return
for app in frappe.get_installed_apps():
file_path = frappe.get_app_path(app, 'www') + '/' + self.path
if os.path.isfile(file_path):
if os.path.isfile(file_path) and is_binary_file(file_path):
self.file_path = file_path
def can_render(self):
return self.is_valid_file_path() and self.file_path
def is_valid_file_path(self):
if ('.' not in self.path):
return False
extension = self.path.rsplit('.', 1)[-1]
if extension in UNSUPPORTED_STATIC_PAGE_TYPES:
return False
return True
def render(self):
# file descriptor to be left open, closed by middleware
f = open(self.file_path, 'rb')
response = Response(wrap_file(frappe.local.request.environ, f), direct_passthrough=True)
response.mimetype = mimetypes.guess_type(self.file_path)[0] or 'application/octet-stream'

View file

@ -7,7 +7,7 @@ from frappe.website.router import get_page_info
from frappe.website.page_renderers.base_template_page import BaseTemplatePage
from frappe.website.router import get_base_template
from frappe.website.utils import (extract_comment_tag, extract_title, get_next_link,
get_toc, get_frontmatter, cache_html, get_sidebar_items)
get_toc, get_frontmatter, is_binary_file, cache_html, get_sidebar_items)
WEBPAGE_PY_MODULE_PROPERTIES = ("base_template_path", "template", "no_cache", "sitemap", "condition_field")
@ -39,7 +39,7 @@ class TemplatePage(BaseTemplatePage):
for dirname in folders:
search_path = os.path.join(app_path, dirname, self.path)
for file_path in self.get_index_path_options(search_path):
if os.path.isfile(file_path):
if os.path.isfile(file_path) and not is_binary_file(file_path):
self.app = app
self.app_path = app_path
self.file_dir = dirname

View file

@ -1,10 +1,10 @@
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# Copyright (c) 2022, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import json
import mimetypes
import os
import re
from functools import wraps
from functools import lru_cache, wraps
from typing import Dict, Optional
import yaml
@ -88,7 +88,7 @@ def get_home_page():
# portal default
if not home_page:
home_page = frappe.db.get_value("Portal Settings", None, "default_portal_home")
home_page = frappe.db.get_single_value("Portal Settings", "default_portal_home")
# by hooks
if not home_page:
@ -96,7 +96,7 @@ def get_home_page():
# global
if not home_page:
home_page = frappe.db.get_value("Website Settings", None, "home_page")
home_page = frappe.db.get_single_value("Website Settings", "home_page")
if not home_page:
home_page = "login" if frappe.session.user == 'Guest' else "me"
@ -511,3 +511,11 @@ def add_preload_headers(response):
except Exception:
import traceback
traceback.print_exc()
@lru_cache()
def is_binary_file(path):
# ref: https://stackoverflow.com/a/7392391/10309266
textchars = bytearray({7,8,9,10,12,13,27} | set(range(0x20, 0x100)) - {0x7f})
with open(path, 'rb') as f:
content = f.read(1024)
return bool(content.translate(None, textchars))

Binary file not shown.

After

Width:  |  Height:  |  Size: 158 KiB