diff --git a/.github/semantic.yml b/.github/semantic.yml new file mode 100644 index 0000000000..e1e53bc1a4 --- /dev/null +++ b/.github/semantic.yml @@ -0,0 +1,13 @@ +# Always validate the PR title AND all the commits +titleAndCommits: true + +# Allow use of Merge commits (eg on github: "Merge branch 'master' into feature/ride-unicorns") +# this is only relevant when using commitsOnly: true (or titleAndCommits: true) +allowMergeCommits: true + +# Allow use of Revert commits (eg on github: "Revert "feat: ride unicorns"") +# this is only relevant when using commitsOnly: true (or titleAndCommits: true) +allowRevertCommits: true + +# For allowed PR types: https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json +# Tool Reference: https://github.com/zeke/semantic-pull-requests diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml new file mode 100644 index 0000000000..4b1147e79f --- /dev/null +++ b/.github/workflows/docker-release.yml @@ -0,0 +1,14 @@ +name: Trigger Docker build on release +on: + release: + types: [released] +jobs: + curl: + runs-on: ubuntu-latest + container: + image: alpine:latest + steps: + - name: curl + run: | + apk add curl bash + curl -s -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "Travis-API-Version: 3" -H "Authorization: token ${{ secrets.TRAVIS_CI_TOKEN }}" -d '{"request":{"branch":"master"}}' https://api.travis-ci.com/repo/frappe%2Ffrappe_docker/requests diff --git a/.mergify.yml b/.mergify.yml index b145834cc4..582bbc2ee5 100644 --- a/.mergify.yml +++ b/.mergify.yml @@ -4,8 +4,7 @@ pull_request_rules: - status-success=Sider - status-success=Semantic Pull Request - status-success=Travis CI - Pull Request - - status-success=security/snyk - package.json (frappe) - - status-success=security/snyk - requirements.txt (frappe) + - status-success=security/snyk (frappe) - label!=don't-merge - label!=squash - "#approved-reviews-by>=1" @@ -17,8 +16,7 @@ pull_request_rules: - status-success=Sider - status-success=Semantic Pull Request - status-success=Travis CI - Pull Request - - status-success=security/snyk - package.json (frappe) - - status-success=security/snyk - requirements.txt (frappe) + - status-success=security/snyk (frappe) - label!=don't-merge - label=squash - "#approved-reviews-by>=1" diff --git a/.travis/roulette.py b/.travis/roulette.py index ce6c08be7c..4d83137199 100644 --- a/.travis/roulette.py +++ b/.travis/roulette.py @@ -27,8 +27,11 @@ if __name__ == "__main__": commit_range = os.environ.get("TRAVIS_COMMIT_RANGE") print("Build Type: {}".format(build_type)) print("Commit Range: {}".format(commit_range)) - - files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False) + + try: + files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False) + except Exception: + sys.exit(2) if "fatal" not in files_changed: files_list = files_changed.split() diff --git a/frappe/__init__.py b/frappe/__init__.py index 8f36c0c4d3..d644d2a473 100644 --- a/frappe/__init__.py +++ b/frappe/__init__.py @@ -490,7 +490,8 @@ def sendmail(recipients=[], sender="", subject="No Subject", message="No Message message = content or message if as_markdown: - message = frappe.utils.md_to_html(message) + from frappe.utils import md_to_html + message = md_to_html(message) if not delayed: now = True @@ -1145,8 +1146,8 @@ def make_property_setter(args, ignore_validate=False, validate_fields_for_doctyp def import_doc(path, ignore_links=False, ignore_insert=False, insert=False): """Import a file using Data Import.""" - from frappe.core.doctype.data_import import data_import - data_import.import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert) + from frappe.core.doctype.data_import.data_import import import_doc + import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert) def copy_doc(doc, ignore_no_copy=True): """ No_copy fields also get copied.""" diff --git a/frappe/api.py b/frappe/api.py index b73ee4c128..6a09b795b0 100644 --- a/frappe/api.py +++ b/frappe/api.py @@ -159,14 +159,14 @@ def validate_auth(): authorization_type = authorization_header[0].lower() if len(authorization_header) == 1: - frappe.throw(_('Invalid Authorization headers, add a token with a prefix from one of the following: {0}.'.format(VALID_AUTH_PREFIX_STRING)), frappe.InvalidAuthorizationHeader) + frappe.throw(_('Invalid Authorization headers, add a token with a prefix from one of the following: {0}.').format(VALID_AUTH_PREFIX_STRING), frappe.InvalidAuthorizationHeader) if authorization_type == "bearer": validate_oauth(authorization_header) elif authorization_type in VALID_AUTH_PREFIX_TYPES: validate_auth_via_api_keys(authorization_header) else: - frappe.throw(_('Invalid Authorization Type {0}, must be one of {1}.'.format(authorization_type, VALID_AUTH_PREFIX_STRING)), frappe.InvalidAuthorizationPrefix) + frappe.throw(_('Invalid Authorization Type {0}, must be one of {1}.').format(authorization_type, VALID_AUTH_PREFIX_STRING), frappe.InvalidAuthorizationPrefix) def validate_oauth(authorization_header): @@ -245,5 +245,6 @@ def validate_api_key_secret(api_key, api_secret, frappe_authorization_source=Non ) else: user = frappe.db.get_value(doctype, doc, 'user') - frappe.set_user(user) + if frappe.local.login_manager.user in ('', 'Guest'): + frappe.set_user(user) frappe.local.form_dict = form_dict diff --git a/frappe/app.py b/frappe/app.py index 50d09177d6..57db867882 100644 --- a/frappe/app.py +++ b/frappe/app.py @@ -99,7 +99,7 @@ def application(request): frappe.monitor.stop(response) frappe.recorder.dump() - frappe.logger("web").info({ + frappe.logger("frappe.web").info({ "site": get_site_name(request.host), "remote_addr": getattr(request, "remote_addr", "NOTFOUND"), "base_url": getattr(request, "base_url", "NOTFOUND"), diff --git a/frappe/automation/doctype/assignment_rule/assignment_rule.py b/frappe/automation/doctype/assignment_rule/assignment_rule.py index bf45347c4f..78f05e7fe9 100644 --- a/frappe/automation/doctype/assignment_rule/assignment_rule.py +++ b/frappe/automation/doctype/assignment_rule/assignment_rule.py @@ -21,7 +21,7 @@ class AssignmentRule(Document): def on_update(self): # pylint: disable=no-self-use frappe.cache_manager.clear_doctype_map('Assignment Rule', self.name) - def after_rename(self): # pylint: disable=no-self-use + def after_rename(self, old, new, merge): # pylint: disable=no-self-use frappe.cache_manager.clear_doctype_map('Assignment Rule', self.name) def apply_unassign(self, doc, assignments): diff --git a/frappe/automation/doctype/auto_repeat/auto_repeat.py b/frappe/automation/doctype/auto_repeat/auto_repeat.py index c447c55727..a946fcc81c 100644 --- a/frappe/automation/doctype/auto_repeat/auto_repeat.py +++ b/frappe/automation/doctype/auto_repeat/auto_repeat.py @@ -146,7 +146,7 @@ class AutoRepeat(Document): def make_new_document(self): reference_doc = frappe.get_doc(self.reference_doctype, self.reference_document) - new_doc = frappe.copy_doc(reference_doc, ignore_no_copy = False) + new_doc = frappe.copy_doc(reference_doc) self.update_doc(new_doc, reference_doc) new_doc.insert(ignore_permissions = True) @@ -372,7 +372,8 @@ def make_auto_repeat(doctype, docname, frequency = 'Daily', start_date = None, e doc.save() return doc -#method for reference_doctype filter +# method for reference_doctype filter +@frappe.whitelist() def get_auto_repeat_doctypes(doctype, txt, searchfield, start, page_len, filters): res = frappe.db.get_all('Property Setter', { 'property': 'allow_auto_repeat', diff --git a/frappe/commands/site.py b/frappe/commands/site.py index 28e61282eb..0f51f21104 100755 --- a/frappe/commands/site.py +++ b/frappe/commands/site.py @@ -108,12 +108,14 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N @click.option('--install-app', multiple=True, help='Install app after installation') @click.option('--with-public-files', help='Restores the public files of the site, given path to its tar file') @click.option('--with-private-files', help='Restores the private files of the site, given path to its tar file') +@click.option('--force', is_flag=True, default=False, help='Use a bit of force to get the job done') @pass_context def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_password=None, db_name=None, verbose=None, install_app=None, admin_password=None, force=None, with_public_files=None, with_private_files=None): "Restore site database from an sql file" - from frappe.installer import extract_sql_gzip, extract_tar_files - # Extract the gzip file if user has passed *.sql.gz file instead of *.sql file + from frappe.installer import extract_sql_gzip, extract_tar_files, is_downgrade + force = context.force or force + # Extract the gzip file if user has passed *.sql.gz file instead of *.sql file if not os.path.exists(sql_file_path): base_path = '..' sql_file_path = os.path.join(base_path, sql_file_path) @@ -125,7 +127,6 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas else: base_path = '.' - if sql_file_path.endswith('sql.gz'): decompressed_file_name = extract_sql_gzip(os.path.abspath(sql_file_path)) else: @@ -133,10 +134,16 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas site = get_site(context) frappe.init(site=site) + + # dont allow downgrading to older versions of frappe without force + if not force and is_downgrade(decompressed_file_name, verbose=True): + warn_message = "This is not recommended and may lead to unexpected behaviour. Do you want to continue anyway?" + click.confirm(warn_message, abort=True) + _new_site(frappe.conf.db_name, site, mariadb_root_username=mariadb_root_username, mariadb_root_password=mariadb_root_password, admin_password=admin_password, verbose=context.verbose, install_apps=install_app, source_sql=decompressed_file_name, - force=True) + force=True, db_type=frappe.conf.db_type) # Extract public and/or private files to the restored site, if user has given the path if with_public_files: @@ -414,15 +421,16 @@ def remove_from_installed_apps(context, app): @click.argument('app') @click.option('--yes', '-y', help='To bypass confirmation prompt for uninstalling the app', is_flag=True, default=False, multiple=True) @click.option('--dry-run', help='List all doctypes that will be deleted', is_flag=True, default=False) +@click.option('--no-backup', help='Do not backup the site', is_flag=True, default=False) @pass_context -def uninstall(context, app, dry_run=False, yes=False): +def uninstall(context, app, dry_run=False, yes=False, no_backup=False): "Remove app and linked modules from site" from frappe.installer import remove_app for site in context.sites: try: frappe.init(site=site) frappe.connect() - remove_app(app, dry_run, yes) + remove_app(app, dry_run, yes, no_backup) finally: frappe.destroy() if not context.sites: diff --git a/frappe/commands/utils.py b/frappe/commands/utils.py index f340e53b54..28b6344b8e 100644 --- a/frappe/commands/utils.py +++ b/frappe/commands/utils.py @@ -215,12 +215,12 @@ def export_doc(context, doctype, docname): @pass_context def export_json(context, doctype, path, name=None): "Export doclist as json to the given path, use '-' as name for Singles." - from frappe.core.doctype.data_import import data_import + from frappe.core.doctype.data_import.data_import import export_json for site in context.sites: try: frappe.init(site=site) frappe.connect() - data_import.export_json(doctype, path, name=name) + export_json(doctype, path, name=name) finally: frappe.destroy() if not context.sites: @@ -232,12 +232,12 @@ def export_json(context, doctype, path, name=None): @pass_context def export_csv(context, doctype, path): "Export data import template with data for DocType" - from frappe.core.doctype.data_import import data_import + from frappe.core.doctype.data_import.data_import import export_csv for site in context.sites: try: frappe.init(site=site) frappe.connect() - data_import.export_csv(doctype, path) + export_csv(doctype, path) finally: frappe.destroy() if not context.sites: @@ -264,7 +264,7 @@ def export_fixtures(context, app=None): @pass_context def import_doc(context, path, force=False): "Import (insert/update) doclist. If the argument is a directory, all files ending with .json are imported" - from frappe.core.doctype.data_import import data_import + from frappe.core.doctype.data_import.data_import import import_doc if not os.path.exists(path): path = os.path.join('..', path) @@ -276,7 +276,7 @@ def import_doc(context, path, force=False): try: frappe.init(site=site) frappe.connect() - data_import.import_doc(path, overwrite=context.force) + import_doc(path, overwrite=context.force) finally: frappe.destroy() if not context.sites: @@ -293,7 +293,7 @@ def import_doc(context, path, force=False): @pass_context def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True): "Import CSV using data import" - from frappe.core.doctype.data_import import importer + from frappe.core.doctype.data_import_legacy import importer from frappe.utils.csvutils import read_csv_content site = get_site(context) @@ -329,20 +329,12 @@ def import_csv(context, path, only_insert=False, submit_after_import=False, igno @pass_context def data_import(context, file_path, doctype, import_type=None, submit_after_import=False, mute_emails=True): "Import documents in bulk from CSV or XLSX using data import" - from frappe.core.doctype.data_import_beta.importer import Importer + from frappe.core.doctype.data_import.data_import import import_file site = get_site(context) frappe.init(site=site) frappe.connect() - - data_import = frappe.new_doc('Data Import Beta') - data_import.submit_after_import = submit_after_import - data_import.mute_emails = mute_emails - data_import.import_type = 'Insert New Records' if import_type.lower() == 'insert' else 'Update Existing Records' - - i = Importer(doctype=doctype, file_path=file_path, data_import=data_import, console=True) - i.import_data() - + import_file(doctype, file_path, import_type, submit_after_import, console=True) frappe.destroy() diff --git a/frappe/contacts/doctype/contact/contact.js b/frappe/contacts/doctype/contact/contact.js index 5285f8b85c..fae6e6515e 100644 --- a/frappe/contacts/doctype/contact/contact.js +++ b/frappe/contacts/doctype/contact/contact.js @@ -42,6 +42,16 @@ frappe.ui.form.on("Contact", { }); frm.refresh_field("links"); + let numbers = frm.doc.phone_nos; + if (numbers && numbers.length && frappe.phone_call.handler) { + frm.add_custom_button(__('Call'), () => { + numbers = frm.doc.phone_nos + .sort((prev, next) => next.is_primary_mobile_no - prev.is_primary_mobile_no) + .map(d => d.phone); + frappe.phone_call.handler(numbers); + }); + } + if (frm.doc.links) { frappe.call({ method: "frappe.contacts.doctype.contact.contact.address_query", diff --git a/frappe/core/doctype/data_export/exporter.py b/frappe/core/doctype/data_export/exporter.py index 6518c59653..e4d2ff2af6 100644 --- a/frappe/core/doctype/data_export/exporter.py +++ b/frappe/core/doctype/data_export/exporter.py @@ -9,7 +9,7 @@ import frappe.permissions import re, csv, os from frappe.utils.csvutils import UnicodeWriter from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint -from frappe.core.doctype.data_import.importer import get_data_keys +from frappe.core.doctype.data_import_legacy.importer import get_data_keys from six import string_types from frappe.core.doctype.access_log.access_log import make_access_log diff --git a/frappe/core/doctype/data_import/README.md b/frappe/core/doctype/data_import/README.md deleted file mode 100644 index 7bd4ac809b..0000000000 --- a/frappe/core/doctype/data_import/README.md +++ /dev/null @@ -1 +0,0 @@ -Bulk import / update of data via file upload in Excel or CSV. \ No newline at end of file diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.css b/frappe/core/doctype/data_import/data_import.css similarity index 100% rename from frappe/core/doctype/data_import_beta/data_import_beta.css rename to frappe/core/doctype/data_import/data_import.css diff --git a/frappe/core/doctype/data_import/data_import.js b/frappe/core/doctype/data_import/data_import.js index 9391b262d7..6a922618cb 100644 --- a/frappe/core/doctype/data_import/data_import.js +++ b/frappe/core/doctype/data_import/data_import.js @@ -1,324 +1,518 @@ -// Copyright (c) 2017, Frappe Technologies and contributors +// Copyright (c) 2019, Frappe Technologies and contributors // For license information, please see license.txt frappe.ui.form.on('Data Import', { - onload: function(frm) { - if (frm.doc.__islocal) { - frm.set_value("action", ""); - } - - frappe.call({ - method: "frappe.core.doctype.data_import.data_import.get_importable_doctypes", - callback: function (r) { - let importable_doctypes = r.message; - frm.set_query("reference_doctype", function () { - return { - "filters": { - "issingle": 0, - "istable": 0, - "name": ['in', importable_doctypes] - } - }; - }); + setup(frm) { + frappe.realtime.on('data_import_refresh', ({ data_import }) => { + frm.import_in_progress = false; + if (data_import !== frm.doc.name) return; + frappe.model.clear_doc('Data Import', frm.doc.name); + frappe.model.with_doc('Data Import', frm.doc.name).then(() => { + frm.refresh(); + }); + }); + frappe.realtime.on('data_import_progress', data => { + frm.import_in_progress = true; + if (data.data_import !== frm.doc.name) { + return; } - }), + let percent = Math.floor((data.current * 100) / data.total); + let seconds = Math.floor(data.eta); + let minutes = Math.floor(data.eta / 60); + let eta_message = + // prettier-ignore + seconds < 60 + ? __('About {0} seconds remaining', [seconds]) + : minutes === 1 + ? __('About {0} minute remaining', [minutes]) + : __('About {0} minutes remaining', [minutes]); - // should never check public - frm.fields_dict["import_file"].df.is_private = 1; + let message; + if (data.success) { + let message_args = [data.current, data.total, eta_message]; + message = + frm.doc.import_type === 'Insert New Records' + ? __('Importing {0} of {1}, {2}', message_args) + : __('Updating {0} of {1}, {2}', message_args); + } + if (data.skipping) { + message = __('Skipping {0} of {1}, {2}', [ + data.current, + data.total, + eta_message + ]); + } + frm.dashboard.show_progress(__('Import Progress'), percent, message); + frm.page.set_indicator(__('In Progress'), 'orange'); - frappe.realtime.on("data_import_progress", function(data) { - if (data.data_import === frm.doc.name) { - if (data.reload && data.reload === true) { - frm.reload_doc(); - } - if (data.progress) { - let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar"); - if (progress_bar) { - $(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped"); - $(progress_bar).css("width", data.progress + "%"); - } - } + // hide progress when complete + if (data.current === data.total) { + setTimeout(() => { + frm.dashboard.hide(); + frm.refresh(); + }, 2000); } }); + + frm.set_query('reference_doctype', () => { + return { + filters: { + name: ['in', frappe.boot.user.can_import] + } + }; + }); + + frm.get_field('import_file').df.options = { + restrictions: { + allowed_file_types: ['.csv', '.xls', '.xlsx'] + } + }; + + frm.has_import_file = () => { + return frm.doc.import_file || frm.doc.google_sheets_url; + }; }, - reference_doctype: function(frm){ - if (frm.doc.reference_doctype) { - frappe.model.with_doctype(frm.doc.reference_doctype); + refresh(frm) { + frm.page.hide_icon_group(); + frm.trigger('update_indicators'); + frm.trigger('import_file'); + frm.trigger('show_import_log'); + frm.trigger('show_import_warnings'); + frm.trigger('toggle_submit_after_import'); + frm.trigger('show_import_status'); + frm.trigger('show_report_error_button'); + + if (frm.doc.status === 'Partial Success') { + frm.add_custom_button(__('Export Errored Rows'), () => + frm.trigger('export_errored_rows') + ); + } + + if (frm.doc.status.includes('Success')) { + frm.add_custom_button( + __('Go to {0} List', [frm.doc.reference_doctype]), + () => frappe.set_route('List', frm.doc.reference_doctype) + ); } }, - refresh: function(frm) { + onload_post_render(frm) { + frm.trigger('update_primary_action'); + }, + + update_primary_action(frm) { + if (frm.is_dirty()) { + frm.enable_save(); + return; + } frm.disable_save(); - frm.dashboard.clear_headline(); - if (frm.doc.reference_doctype && !frm.doc.import_file) { - frm.page.set_indicator(__('Attach file'), 'orange'); - } else { - if (frm.doc.import_status) { - const listview_settings = frappe.listview_settings['Data Import']; - const indicator = listview_settings.get_indicator(frm.doc); - - frm.page.set_indicator(indicator[0], indicator[1]); - - if (frm.doc.import_status === "In Progress") { - frm.dashboard.add_progress("Data Import Progress", "0"); - frm.set_read_only(); - frm.refresh_fields(); - } + if (frm.doc.status !== 'Success') { + if (!frm.is_new() && (frm.has_import_file())) { + let label = + frm.doc.status === 'Pending' ? __('Start Import') : __('Retry'); + frm.page.set_primary_action(label, () => frm.events.start_import(frm)); + } else { + frm.page.set_primary_action(__('Save'), () => frm.save()); } } + }, - if (frm.doc.reference_doctype) { - frappe.model.with_doctype(frm.doc.reference_doctype); + update_indicators(frm) { + const indicator = frappe.get_indicator(frm.doc); + if (indicator) { + frm.page.set_indicator(indicator[0], indicator[1]); + } else { + frm.page.clear_indicator(); } + }, - if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") { - frm.set_df_property("action", "read_only", 1); + show_import_status(frm) { + let import_log = JSON.parse(frm.doc.import_log || '[]'); + let successful_records = import_log.filter(log => log.success); + let failed_records = import_log.filter(log => !log.success); + if (successful_records.length === 0) return; + + let message; + if (failed_records.length === 0) { + let message_args = [successful_records.length]; + if (frm.doc.import_type === 'Insert New Records') { + message = + successful_records.length > 1 + ? __('Successfully imported {0} records.', message_args) + : __('Successfully imported {0} record.', message_args); + } else { + message = + successful_records.length > 1 + ? __('Successfully updated {0} records.', message_args) + : __('Successfully updated {0} record.', message_args); + } + } else { + let message_args = [successful_records.length, import_log.length]; + if (frm.doc.import_type === 'Insert New Records') { + message = + successful_records.length > 1 + ? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args) + : __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args); + } else { + message = + successful_records.length > 1 + ? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args) + : __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args); + } } + frm.dashboard.set_headline(message); + }, - frm.add_custom_button(__("Help"), function() { - frappe.help.show_video("6wiriRKPhmg"); + show_report_error_button(frm) { + if (frm.doc.status === 'Error') { + frappe.db + .get_list('Error Log', { + filters: { method: frm.doc.name }, + fields: ['method', 'error'], + order_by: 'creation desc', + limit: 1 + }) + .then(result => { + if (result.length > 0) { + frm.add_custom_button('Report Error', () => { + let fake_xhr = { + responseText: JSON.stringify({ + exc: result[0].error + }) + }; + frappe.request.report_error(fake_xhr, {}); + }); + } + }); + } + }, + + start_import(frm) { + frm + .call({ + method: 'form_start_import', + args: { data_import: frm.doc.name }, + btn: frm.page.btn_primary + }) + .then(r => { + if (r.message === true) { + frm.disable_save(); + } + }); + }, + + download_template(frm) { + frappe.require('/assets/js/data_import_tools.min.js', () => { + frm.data_exporter = new frappe.data_import.DataExporter( + frm.doc.reference_doctype, + frm.doc.import_type + ); }); + }, - if (frm.doc.reference_doctype && frm.doc.docstatus === 0) { - frm.add_custom_button(__("Download template"), function() { - frappe.data_import.download_dialog(frm).show(); + reference_doctype(frm) { + frm.trigger('toggle_submit_after_import'); + }, + + toggle_submit_after_import(frm) { + frm.toggle_display('submit_after_import', false); + let doctype = frm.doc.reference_doctype; + if (doctype) { + frappe.model.with_doctype(doctype, () => { + let meta = frappe.get_meta(doctype); + frm.toggle_display('submit_after_import', meta.is_submittable); }); } + }, - if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows && - frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) { - frm.page.set_primary_action(__("Start Import"), function() { - frappe.call({ - btn: frm.page.btn_primary, - method: "frappe.core.doctype.data_import.data_import.import_data", - args: { - data_import: frm.doc.name - } - }); - }).addClass('btn btn-primary'); - } - - if (frm.doc.log_details) { - frm.events.create_log_table(frm); + google_sheets_url(frm) { + if (!frm.is_dirty()) { + frm.trigger('import_file'); } else { - $(frm.fields_dict.import_log.wrapper).empty(); + frm.trigger('update_primary_action'); } }, - action: function(frm) { - if(!frm.doc.action) return; - if(!frm.doc.reference_doctype) { - frappe.msgprint(__("Please select document type first.")); - frm.set_value("action", ""); + refresh_google_sheet(frm) { + frm.trigger('import_file'); + }, + + import_file(frm) { + frm.toggle_display('section_import_preview', frm.has_import_file()); + if (!frm.has_import_file()) { + frm.get_field('import_preview').$wrapper.empty(); + return; + } else { + frm.trigger('update_primary_action'); + } + + // load import preview + frm.get_field('import_preview').$wrapper.empty(); + $('') + .html(__('Loading import file...')) + .appendTo(frm.get_field('import_preview').$wrapper); + + frm + .call({ + method: 'get_preview_from_template', + args: { + data_import: frm.doc.name, + import_file: frm.doc.import_file, + google_sheets_url: frm.doc.google_sheets_url + }, + error_handlers: { + TimestampMismatchError() { + // ignore this error + } + } + }) + .then(r => { + let preview_data = r.message; + frm.events.show_import_preview(frm, preview_data); + frm.events.show_import_warnings(frm, preview_data); + }); + }, + + show_import_preview(frm, preview_data) { + let import_log = JSON.parse(frm.doc.import_log || '[]'); + + if ( + frm.import_preview && + frm.import_preview.doctype === frm.doc.reference_doctype + ) { + frm.import_preview.preview_data = preview_data; + frm.import_preview.import_log = import_log; + frm.import_preview.refresh(); return; } - if(frm.doc.action == "Insert new records") { - frm.doc.insert_new = 1; - } else if (frm.doc.action == "Update records"){ - frm.doc.overwrite = 1; + frappe.require('/assets/js/data_import_tools.min.js', () => { + frm.import_preview = new frappe.data_import.ImportPreview({ + wrapper: frm.get_field('import_preview').$wrapper, + doctype: frm.doc.reference_doctype, + preview_data, + import_log, + frm, + events: { + remap_column(changed_map) { + let template_options = JSON.parse(frm.doc.template_options || '{}'); + template_options.column_to_field_map = template_options.column_to_field_map || {}; + Object.assign(template_options.column_to_field_map, changed_map); + frm.set_value('template_options', JSON.stringify(template_options)); + frm.save().then(() => frm.trigger('import_file')); + } + } + }); + }); + }, + + export_errored_rows(frm) { + open_url_post( + '/api/method/frappe.core.doctype.data_import.data_import.download_errored_template', + { + data_import_name: frm.doc.name + } + ); + }, + + show_import_warnings(frm, preview_data) { + let warnings = JSON.parse(frm.doc.template_warnings || '[]'); + warnings = warnings.concat(preview_data.warnings || []); + + frm.toggle_display('import_warnings_section', warnings.length > 0); + if (warnings.length === 0) { + frm.get_field('import_warnings').$wrapper.html(''); + return; } - frm.save(); + + // group warnings by row + let warnings_by_row = {}; + let other_warnings = []; + for (let warning of warnings) { + if (warning.row) { + warnings_by_row[warning.row] = warnings_by_row[warning.row] || []; + warnings_by_row[warning.row].push(warning); + } else { + other_warnings.push(warning); + } + } + + let html = ''; + html += Object.keys(warnings_by_row) + .map(row_number => { + let message = warnings_by_row[row_number] + .map(w => { + if (w.field) { + let label = + w.field.label + + (w.field.parent !== frm.doc.reference_doctype + ? ` (${w.field.parent})` + : ''); + return `
  • ${label}: ${w.message}
  • `; + } + return `
  • ${w.message}
  • `; + }) + .join(''); + return ` +
    +
    ${__('Row {0}', [row_number])}
    +
      ${message}
    +
    + `; + }) + .join(''); + + html += other_warnings + .map(warning => { + let header = ''; + if (warning.col) { + header = __('Column {0}', [warning.col]); + } + return ` +
    +
    ${header}
    +
    ${warning.message}
    +
    + `; + }) + .join(''); + frm.get_field('import_warnings').$wrapper.html(` +
    +
    ${html}
    +
    + `); }, - only_update: function(frm) { - frm.save(); + show_failed_logs(frm) { + frm.trigger('show_import_log'); }, - submit_after_import: function(frm) { - frm.save(); + show_import_log(frm) { + let import_log = JSON.parse(frm.doc.import_log || '[]'); + let logs = import_log; + frm.toggle_display('import_log', false); + frm.toggle_display('import_log_section', logs.length > 0); + + if (logs.length === 0) { + frm.get_field('import_log_preview').$wrapper.empty(); + return; + } + + let rows = logs + .map(log => { + let html = ''; + if (log.success) { + if (frm.doc.import_type === 'Insert New Records') { + html = __('Successfully imported {0}', [ + `${frappe.utils.get_form_link( + frm.doc.reference_doctype, + log.docname, + true + )}` + ]); + } else { + html = __('Successfully updated {0}', [ + `${frappe.utils.get_form_link( + frm.doc.reference_doctype, + log.docname, + true + )}` + ]); + } + } else { + let messages = log.messages + .map(JSON.parse) + .map(m => { + let title = m.title ? `${m.title}` : ''; + let message = m.message ? `
    ${m.message}
    ` : ''; + return title + message; + }) + .join(''); + let id = frappe.dom.get_unique_id(); + html = `${messages} + +
    +
    +
    ${log.exception}
    +
    +
    `; + } + let indicator_color = log.success ? 'green' : 'red'; + let title = log.success ? __('Success') : __('Failure'); + + if (frm.doc.show_failed_logs && log.success) { + return ''; + } + + return ` + ${log.row_indexes.join(', ')} + +
    ${title}
    + + + ${html} + + `; + }) + .join(''); + + if (!rows && frm.doc.show_failed_logs) { + rows = ` + ${__('No failed logs')} + `; + } + + frm.get_field('import_log_preview').$wrapper.html(` + + + + + + + ${rows} +
    ${__('Row Number')}${__('Status')}${__('Message')}
    + `); }, - skip_errors: function(frm) { - frm.save(); - }, + show_missing_link_values(frm, missing_link_values) { + let can_be_created_automatically = missing_link_values.every( + d => d.has_one_mandatory_field + ); - ignore_encoding_errors: function(frm) { - frm.save(); - }, + let html = missing_link_values + .map(d => { + let doctype = d.doctype; + let values = d.missing_values; + return ` +
    ${doctype}
    +
      ${values.map(v => `
    • ${v}
    • `).join('')}
    + `; + }) + .join(''); - no_email: function(frm) { - frm.save(); - }, - - show_only_errors: function(frm) { - frm.events.create_log_table(frm); - }, - - create_log_table: function(frm) { - let msg = JSON.parse(frm.doc.log_details); - var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty(); - $(frappe.render_template("log_details", { - data: msg.messages, - import_status: frm.doc.import_status, - show_only_errors: frm.doc.show_only_errors, - })).appendTo($log_wrapper); + if (can_be_created_automatically) { + // prettier-ignore + let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?'); + frappe.confirm(message + html, () => { + frm + .call('create_missing_link_values', { + missing_link_values + }) + .then(r => { + let records = r.message; + frappe.msgprint( + __('Created {0} records successfully.', [records.length]) + ); + }); + }); + } else { + frappe.msgprint( + // prettier-ignore + __('The following records needs to be created before we can import your file.') + html + ); + } } }); - -frappe.provide('frappe.data_import'); -frappe.data_import.download_dialog = function(frm) { - var dialog; - const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden; - const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields); - - const get_doctype_checkbox_fields = () => { - return dialog.fields.filter(df => df.fieldname.endsWith('_fields')) - .map(df => dialog.fields_dict[df.fieldname]); - }; - - const doctype_fields = get_fields(frm.doc.reference_doctype) - .map(df => { - let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0; - return { - label: df.label, - reqd: reqd, - danger: reqd, - value: df.fieldname, - checked: 1 - }; - }); - - let fields = [ - { - "label": __("Select Columns"), - "fieldname": "select_columns", - "fieldtype": "Select", - "options": "All\nMandatory\nManually", - "reqd": 1, - "onchange": function() { - const fields = get_doctype_checkbox_fields(); - fields.map(f => f.toggle(true)); - if(this.value == 'Mandatory' || this.value == 'Manually') { - checkbox_toggle(true); - fields.map(multicheck_field => { - multicheck_field.options.map(option => { - if(!option.reqd) return; - $(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`) - .prop('checked', false) - .trigger('click'); - }); - }); - } else if(this.value == 'All'){ - $(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`) - .prop('disabled', true); - } - } - }, - { - "label": __("File Type"), - "fieldname": "file_type", - "fieldtype": "Select", - "options": "Excel\nCSV", - "default": "Excel" - }, - { - "label": __("Download with Data"), - "fieldname": "with_data", - "fieldtype": "Check", - "hidden": !frm.doc.overwrite, - "default": 1 - }, - { - "label": __("Select All"), - "fieldname": "select_all", - "fieldtype": "Button", - "depends_on": "eval:doc.select_columns=='Manually'", - click: function() { - checkbox_toggle(); - } - }, - { - "label": __("Unselect All"), - "fieldname": "unselect_all", - "fieldtype": "Button", - "depends_on": "eval:doc.select_columns=='Manually'", - click: function() { - checkbox_toggle(true); - } - }, - { - "label": frm.doc.reference_doctype, - "fieldname": "doctype_fields", - "fieldtype": "MultiCheck", - "options": doctype_fields, - "columns": 2, - "hidden": 1 - } - ]; - - const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype) - .map(df => { - return { - "label": df.options, - "fieldname": df.fieldname + '_fields', - "fieldtype": "MultiCheck", - "options": frappe.meta.get_docfields(df.options) - .filter(filter_fields) - .map(df => ({ - label: df.label, - reqd: df.reqd ? 1 : 0, - value: df.fieldname, - checked: 1, - danger: df.reqd - })), - "columns": 2, - "hidden": 1 - }; - }); - - fields = fields.concat(child_table_fields); - - dialog = new frappe.ui.Dialog({ - title: __('Download Template'), - fields: fields, - primary_action: function(values) { - var data = values; - if (frm.doc.reference_doctype) { - var export_params = () => { - let columns = {}; - if(values.select_columns) { - columns = get_doctype_checkbox_fields().reduce((columns, field) => { - const options = field.get_checked_options(); - columns[field.df.label] = options; - return columns; - }, {}); - } - - return { - doctype: frm.doc.reference_doctype, - parent_doctype: frm.doc.reference_doctype, - select_columns: JSON.stringify(columns), - with_data: frm.doc.overwrite && data.with_data, - all_doctypes: true, - file_type: data.file_type, - template: true - }; - }; - let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data'; - open_url_post(get_template_url, export_params()); - } else { - frappe.msgprint(__("Please select the Document Type.")); - } - dialog.hide(); - }, - primary_action_label: __('Download') - }); - - $(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]') - .wrapAll('
    '); - const button_container = $(dialog.body).find('.inline-buttons'); - button_container.addClass('flex'); - $(button_container).find('.frappe-control').map((index, button) => { - $(button).css({"margin-right": "1em"}); - }); - - function checkbox_toggle(checked=false) { - $(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => { - $(element).find(`:checkbox`).prop("checked", checked).trigger('click'); - }); - } - - return dialog; -}; diff --git a/frappe/core/doctype/data_import/data_import.json b/frappe/core/doctype/data_import/data_import.json index 11c8368e00..8b1b6c4e07 100644 --- a/frappe/core/doctype/data_import/data_import.json +++ b/frappe/core/doctype/data_import/data_import.json @@ -1,767 +1,192 @@ { - "allow_copy": 1, - "allow_guest_to_view": 0, - "allow_import": 0, - "allow_rename": 0, - "autoname": "", - "beta": 0, - "creation": "2016-12-09 14:27:32.720061", - "custom": 0, - "docstatus": 0, - "doctype": "DocType", - "document_type": "Document", - "editable_grid": 1, - "engine": "InnoDB", + "actions": [], + "autoname": "format:{reference_doctype} Import on {creation}", + "beta": 1, + "creation": "2019-08-04 14:16:08.318714", + "doctype": "DocType", + "editable_grid": 1, + "engine": "InnoDB", + "field_order": [ + "reference_doctype", + "import_type", + "download_template", + "import_file", + "html_5", + "google_sheets_url", + "refresh_google_sheet", + "column_break_5", + "status", + "submit_after_import", + "mute_emails", + "template_options", + "import_warnings_section", + "template_warnings", + "import_warnings", + "section_import_preview", + "import_preview", + "import_log_section", + "import_log", + "show_failed_logs", + "import_log_preview" + ], "fields": [ { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "depends_on": "", - "fieldname": "reference_doctype", - "fieldtype": "Link", - "hidden": 0, - "ignore_user_permissions": 1, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 1, - "in_standard_filter": 0, - "label": "Document Type", - "length": 0, - "no_copy": 0, - "options": "DocType", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, - { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fieldname": "action", - "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Action", - "length": 0, - "no_copy": 0, - "options": "Insert new records\nUpdate records", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, + "fieldname": "reference_doctype", + "fieldtype": "Link", + "in_list_view": 1, + "label": "Document Type", + "options": "DocType", "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "set_only_once": 1 + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "0", - "depends_on": "eval:!doc.overwrite", - "description": "New data will be inserted.", - "fieldname": "insert_new", - "fieldtype": "Check", - "hidden": 1, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Insert new records", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 1, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_type", + "fieldtype": "Select", + "in_list_view": 1, + "label": "Import Type", + "options": "\nInsert New Records\nUpdate Existing Records", + "reqd": 1, + "set_only_once": 1 + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "0", - "depends_on": "eval:!doc.insert_new", - "description": "If you are updating/overwriting already created records.", - "fieldname": "overwrite", - "fieldtype": "Check", - "hidden": 1, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Update records", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 1, - "translatable": 0, - "unique": 0 - }, - { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "0", - "depends_on": "overwrite", - "description": "If you don't want to create any new records while updating the older records.", - "fieldname": "only_update", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Don't create new records", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, - { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "collapsible_depends_on": "", - "columns": 0, - "depends_on": "eval:(!doc.__islocal)", - "fieldname": "section_break_4", - "fieldtype": "Section Break", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, - { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "depends_on": "", + "depends_on": "eval:!doc.__islocal", "fieldname": "import_file", "fieldtype": "Attach", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Attach file for Import", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "in_list_view": 1, + "label": "Import File" }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fieldname": "column_break_4", - "fieldtype": "Column Break", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "fieldname": "import_preview", + "fieldtype": "HTML", + "label": "Import Preview" }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "depends_on": "eval: doc.import_status == \"Partially Successful\"", - "description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.", - "fieldname": "error_file", - "fieldtype": "Attach", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Generated File", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, - { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "collapsible_depends_on": "", - "columns": 0, - "depends_on": "eval:(!doc.__islocal)", - "fieldname": "section_break_6", + "fieldname": "section_import_preview", "fieldtype": "Section Break", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "label": "Preview" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.", - "fieldname": "skip_errors", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Skip rows with errors", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "column_break_5", + "fieldtype": "Column Break" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "0", - "depends_on": "", - "fieldname": "submit_after_import", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Submit after importing", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "template_options", + "fieldtype": "Code", + "hidden": 1, + "label": "Template Options", + "options": "JSON", + "read_only": 1 + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "0", - "depends_on": "", - "fieldname": "ignore_encoding_errors", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Ignore encoding errors", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_log", + "fieldtype": "Code", + "label": "Import Log", + "options": "JSON" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "1", - "depends_on": "", - "fieldname": "no_email", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Do not send Emails", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_log_section", + "fieldtype": "Section Break", + "label": "Import Log" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 1, - "collapsible_depends_on": "eval: doc.import_status == \"Failed\"", - "columns": 0, - "depends_on": "import_status", - "fieldname": "import_detail", - "fieldtype": "Section Break", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Import Log", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_log_preview", + "fieldtype": "HTML", + "label": "Import Log Preview" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "depends_on": "", - "fieldname": "import_status", - "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Import Status", - "length": 0, - "no_copy": 0, - "options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 1, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "default": "Pending", + "fieldname": "status", + "fieldtype": "Select", + "hidden": 1, + "label": "Status", + "options": "Pending\nSuccess\nPartial Success\nError", + "read_only": 1 + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 1, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "1", - "fieldname": "show_only_errors", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Show only errors", - "length": 0, - "no_copy": 1, - "permlevel": 0, - "precision": "", - "print_hide": 1, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "template_warnings", + "fieldtype": "Code", + "hidden": 1, + "label": "Template Warnings", + "options": "JSON" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 1, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "", - "depends_on": "import_status", - "fieldname": "import_log", - "fieldtype": "HTML", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Import Log", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "default": "0", + "fieldname": "submit_after_import", + "fieldtype": "Check", + "label": "Submit After Import", + "set_only_once": 1 + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 1, - "bold": 0, - "collapsible": 0, - "columns": 0, - "depends_on": "", - "fieldname": "log_details", - "fieldtype": "Code", - "hidden": 1, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Log Details", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 1, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_warnings_section", + "fieldtype": "Section Break", + "label": "Import File Errors and Warnings" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fieldname": "amended_from", - "fieldtype": "Link", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Amended From", - "length": 0, - "no_copy": 1, - "options": "Data Import", - "permlevel": 0, - "print_hide": 1, - "print_hide_if_no_value": 0, - "read_only": 1, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_warnings", + "fieldtype": "HTML", + "label": "Import Warnings" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fieldname": "total_rows", - "fieldtype": "Int", - "hidden": 1, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Total Rows", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 1, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "depends_on": "eval:!doc.__islocal", + "fieldname": "download_template", + "fieldtype": "Button", + "label": "Download Template" + }, + { + "default": "1", + "fieldname": "mute_emails", + "fieldtype": "Check", + "label": "Don't Send Emails", + "set_only_once": 1 + }, + { + "default": "0", + "fieldname": "show_failed_logs", + "fieldtype": "Check", + "label": "Show Failed Logs" + }, + { + "depends_on": "eval:!doc.__islocal && !doc.import_file", + "fieldname": "html_5", + "fieldtype": "HTML", + "options": "
    Or
    " + }, + { + "depends_on": "eval:!doc.__islocal && !doc.import_file\n", + "description": "Must be a publicly accessible Google Sheets URL", + "fieldname": "google_sheets_url", + "fieldtype": "Data", + "label": "Import from Google Sheets" + }, + { + "depends_on": "eval:doc.google_sheets_url && !doc.__unsaved", + "fieldname": "refresh_google_sheet", + "fieldtype": "Button", + "label": "Refresh Google Sheet" } - ], - "has_web_view": 0, - "hide_heading": 0, - "hide_toolbar": 0, - "idx": 0, - "image_view": 0, - "in_create": 0, - "is_submittable": 1, - "issingle": 0, - "istable": 0, - "max_attachments": 1, - "modified": "2018-08-28 15:05:56.787108", - "modified_by": "Administrator", - "module": "Core", - "name": "Data Import", - "name_case": "", - "owner": "Administrator", + ], + "hide_toolbar": 1, + "links": [], + "modified": "2020-06-24 14:33:03.173876", + "modified_by": "Administrator", + "module": "Core", + "name": "Data Import", + "owner": "Administrator", "permissions": [ { - "amend": 0, - "cancel": 0, - "create": 1, - "delete": 1, - "email": 1, - "export": 0, - "if_owner": 0, - "import": 0, - "permlevel": 0, - "print": 0, - "read": 1, - "report": 0, - "role": "System Manager", - "set_user_permissions": 0, - "share": 1, - "submit": 1, + "create": 1, + "delete": 1, + "email": 1, + "export": 1, + "print": 1, + "read": 1, + "report": 1, + "role": "System Manager", + "share": 1, "write": 1 } - ], - "quick_entry": 0, - "read_only": 0, - "read_only_onload": 0, - "show_name_in_global_search": 0, - "sort_field": "modified", - "sort_order": "DESC", - "title_field": "", - "track_changes": 1, - "track_seen": 1, - "track_views": 0 -} + ], + "sort_field": "modified", + "sort_order": "DESC", + "track_changes": 1 +} \ No newline at end of file diff --git a/frappe/core/doctype/data_import/data_import.py b/frappe/core/doctype/data_import/data_import.py index ecf34d24b0..72de092461 100644 --- a/frappe/core/doctype/data_import/data_import.py +++ b/frappe/core/doctype/data_import/data_import.py @@ -1,54 +1,187 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2017, Frappe Technologies and contributors +# Copyright (c) 2019, Frappe Technologies and contributors # For license information, please see license.txt from __future__ import unicode_literals -import frappe, os -from frappe import _ -import frappe.modules.import_file +import os +import frappe from frappe.model.document import Document -from frappe.utils.data import format_datetime -from frappe.core.doctype.data_import.importer import upload + +from frappe.core.doctype.data_import.importer import Importer +from frappe.core.doctype.data_import.exporter import Exporter from frappe.utils.background_jobs import enqueue +from frappe.utils.csvutils import validate_google_sheets_url +from frappe import _ class DataImport(Document): - def autoname(self): - if not self.name: - self.name = "Import on " +format_datetime(self.creation) - def validate(self): - if not self.import_file: - self.db_set("total_rows", 0) - if self.import_status == "In Progress": - frappe.throw(_("Can't save the form as data import is in progress.")) + doc_before_save = self.get_doc_before_save() + if ( + not (self.import_file or self.google_sheets_url) + or (doc_before_save and doc_before_save.import_file != self.import_file) + or (doc_before_save and doc_before_save.google_sheets_url != self.google_sheets_url) + ): + self.template_options = "" + self.template_warnings = "" - # validate the template just after the upload - # if there is total_rows in the doc, it means that the template is already validated and error free - if self.import_file and not self.total_rows: - upload(data_import_doc=self, from_data_import="Yes", validate_template=True) + self.validate_import_file() + self.validate_google_sheets_url() + + def validate_import_file(self): + if self.import_file: + # validate template + self.get_importer() + + def validate_google_sheets_url(self): + if not self.google_sheets_url: + return + validate_google_sheets_url(self.google_sheets_url) + + def get_preview_from_template(self, import_file=None, google_sheets_url=None): + if import_file: + self.import_file = import_file + + if google_sheets_url: + self.google_sheets_url = google_sheets_url + + if not (self.import_file or self.google_sheets_url): + return + + i = self.get_importer() + return i.get_data_for_import_preview() + + def start_import(self): + from frappe.core.page.background_jobs.background_jobs import get_info + from frappe.utils.scheduler import is_scheduler_inactive + + if is_scheduler_inactive() and not frappe.flags.in_test: + frappe.throw( + _("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive") + ) + + enqueued_jobs = [d.get("job_name") for d in get_info()] + + if self.name not in enqueued_jobs: + enqueue( + start_import, + queue="default", + timeout=6000, + event="data_import", + job_name=self.name, + data_import=self.name, + now=frappe.conf.developer_mode or frappe.flags.in_test, + ) + return True + + return False + + def export_errored_rows(self): + return self.get_importer().export_errored_rows() + + def get_importer(self): + return Importer(self.reference_doctype, data_import=self) @frappe.whitelist() -def get_importable_doctypes(): - return frappe.cache().hget("can_import", frappe.session.user) +def get_preview_from_template(data_import, import_file=None, google_sheets_url=None): + return frappe.get_doc("Data Import", data_import).get_preview_from_template( + import_file, google_sheets_url + ) + @frappe.whitelist() -def import_data(data_import): - frappe.db.set_value("Data Import", data_import, "import_status", "In Progress", update_modified=False) - frappe.publish_realtime("data_import_progress", {"progress": "0", - "data_import": data_import, "reload": True}, user=frappe.session.user) - - from frappe.core.page.background_jobs.background_jobs import get_info - enqueued_jobs = [d.get("job_name") for d in get_info()] - - if data_import not in enqueued_jobs: - enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import, - data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user) +def form_start_import(data_import): + return frappe.get_doc("Data Import", data_import).start_import() -def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False, - insert=False, submit=False, pre_process=None): +def start_import(data_import): + """This method runs in background job""" + data_import = frappe.get_doc("Data Import", data_import) + try: + i = Importer(data_import.reference_doctype, data_import=data_import) + i.import_data() + except Exception: + frappe.db.rollback() + data_import.db_set("status", "Error") + frappe.log_error(title=data_import.name) + finally: + frappe.flags.in_import = False + + frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name}) + + +@frappe.whitelist() +def download_template( + doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV" +): + """ + Download template from Exporter + :param doctype: Document Type + :param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']} + :param export_records=None: One of 'all', 'by_filter', 'blank_template' + :param export_filters: Filter dict + :param file_type: File type to export into + """ + + export_fields = frappe.parse_json(export_fields) + export_filters = frappe.parse_json(export_filters) + export_data = export_records != "blank_template" + + e = Exporter( + doctype, + export_fields=export_fields, + export_data=export_data, + export_filters=export_filters, + file_type=file_type, + export_page_length=5 if export_records == "5_records" else None, + ) + e.build_response() + + +@frappe.whitelist() +def download_errored_template(data_import_name): + data_import = frappe.get_doc("Data Import", data_import_name) + data_import.export_errored_rows() + + +def import_file( + doctype, file_path, import_type, submit_after_import=False, console=False +): + """ + Import documents in from CSV or XLSX using data import. + + :param doctype: DocType to import + :param file_path: Path to .csv, .xls, or .xlsx file to import + :param import_type: One of "Insert" or "Update" + :param submit_after_import: Whether to submit documents after import + :param console: Set to true if this is to be used from command line. Will print errors or progress to stdout. + """ + + data_import = frappe.new_doc("Data Import") + data_import.submit_after_import = submit_after_import + data_import.import_type = ( + "Insert New Records" if import_type.lower() == "insert" else "Update Existing Records" + ) + + i = Importer( + doctype=doctype, file_path=file_path, data_import=data_import, console=console + ) + i.import_data() + + +############## + + +def import_doc( + path, + overwrite=False, + ignore_links=False, + ignore_insert=False, + insert=False, + submit=False, + pre_process=None, +): if os.path.isdir(path): files = [os.path.join(path, f) for f in os.listdir(path)] else: @@ -57,25 +190,44 @@ def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False, for f in files: if f.endswith(".json"): frappe.flags.mute_emails = True - frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True) + frappe.modules.import_file.import_file_by_path( + f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True + ) frappe.flags.mute_emails = False frappe.db.commit() elif f.endswith(".csv"): - import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process) + import_file_by_path( + f, + ignore_links=ignore_links, + overwrite=overwrite, + submit=submit, + pre_process=pre_process, + ) frappe.db.commit() -def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True): - from frappe.utils.csvutils import read_csv_content - print("Importing " + path) - with open(path, "r") as infile: - upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite, - submit_after_import=submit, pre_process=pre_process) +def import_file_by_path( + path, + ignore_links=False, + overwrite=False, + submit=False, + pre_process=None, + no_email=True, +): + if path.endswith(".csv"): + print() + print("This method is deprecated.") + print('Import CSV files using the command "bench --site sitename data-import"') + print("Or use the method frappe.core.doctype.data_import.data_import.import_file") + print() + raise Exception("Method deprecated") -def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"): +def export_json( + doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc" +): def post_process(out): - del_keys = ('modified_by', 'creation', 'owner', 'idx') + del_keys = ("modified_by", "creation", "owner", "idx") for doc in out: for key in del_keys: if key in doc: @@ -83,7 +235,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b for k, v in doc.items(): if isinstance(v, list): for child in v: - for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'): + for key in del_keys + ("docstatus", "doctype", "modified", "name"): if key in child: del child[key] @@ -93,13 +245,20 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b elif frappe.db.get_value("DocType", doctype, "issingle"): out.append(frappe.get_doc(doctype).as_dict()) else: - for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by): + for doc in frappe.get_all( + doctype, + fields=["name"], + filters=filters, + or_filters=or_filters, + limit_page_length=0, + order_by=order_by, + ): out.append(frappe.get_doc(doctype, doc.name).as_dict()) post_process(out) dirname = os.path.dirname(path) if not os.path.exists(dirname): - path = os.path.join('..', path) + path = os.path.join("..", path) with open(path, "w") as outfile: outfile.write(frappe.as_json(out)) @@ -107,17 +266,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b def export_csv(doctype, path): from frappe.core.doctype.data_export.exporter import export_data + with open(path, "wb") as csvfile: export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True) csvfile.write(frappe.response.result.encode("utf-8")) - - -@frappe.whitelist() -def export_fixture(doctype, app): - if frappe.session.user != "Administrator": - raise frappe.PermissionError - - if not os.path.exists(frappe.get_app_path(app, "fixtures")): - os.mkdir(frappe.get_app_path(app, "fixtures")) - - export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc") diff --git a/frappe/core/doctype/data_import/data_import_list.js b/frappe/core/doctype/data_import/data_import_list.js index dc06f44f59..1dee4319f9 100644 --- a/frappe/core/doctype/data_import/data_import_list.js +++ b/frappe/core/doctype/data_import/data_import_list.js @@ -1,31 +1,40 @@ +let imports_in_progress = []; + frappe.listview_settings['Data Import'] = { - add_fields: ["import_status"], - has_indicator_for_draft: 1, - get_indicator: function(doc) { - - let status = { - 'Successful': [__("Success"), "green", "import_status,=,Successful"], - 'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"], - 'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"], - 'Failed': [__("Failed"), "red", "import_status,=,Failed"], - 'Pending': [__("Pending"), "orange", "import_status,=,"] - } - - if (doc.import_status) { - return status[doc.import_status]; - } - - if (doc.docstatus == 0) { - return status['Pending']; - } - - return status['Pending']; - }, onload(listview) { - listview.page.set_title_sub(` - - ${__('Try the new Data Import')} - - `); - } + frappe.realtime.on('data_import_progress', data => { + if (!imports_in_progress.includes(data.data_import)) { + imports_in_progress.push(data.data_import); + } + }); + frappe.realtime.on('data_import_refresh', data => { + imports_in_progress = imports_in_progress.filter( + d => d !== data.data_import + ); + listview.refresh(); + }); + }, + get_indicator: function(doc) { + var colors = { + 'Pending': 'orange', + 'Partial Success': 'orange', + 'Success': 'green', + 'In Progress': 'orange', + 'Error': 'red' + }; + let status = doc.status; + if (imports_in_progress.includes(doc.name)) { + status = 'In Progress'; + } + return [__(status), colors[status], 'status,=,' + doc.status]; + }, + formatters: { + import_type(value) { + return { + 'Insert New Records': __('Insert'), + 'Update Existing Records': __('Update') + }[value]; + } + }, + hide_name_column: true }; diff --git a/frappe/core/doctype/data_import_beta/exporter.py b/frappe/core/doctype/data_import/exporter.py similarity index 100% rename from frappe/core/doctype/data_import_beta/exporter.py rename to frappe/core/doctype/data_import/exporter.py diff --git a/frappe/core/doctype/data_import_beta/fixtures/sample_import_file.csv b/frappe/core/doctype/data_import/fixtures/sample_import_file.csv similarity index 100% rename from frappe/core/doctype/data_import_beta/fixtures/sample_import_file.csv rename to frappe/core/doctype/data_import/fixtures/sample_import_file.csv diff --git a/frappe/core/doctype/data_import_beta/fixtures/sample_import_file_for_update.csv b/frappe/core/doctype/data_import/fixtures/sample_import_file_for_update.csv similarity index 100% rename from frappe/core/doctype/data_import_beta/fixtures/sample_import_file_for_update.csv rename to frappe/core/doctype/data_import/fixtures/sample_import_file_for_update.csv diff --git a/frappe/core/doctype/data_import_beta/fixtures/sample_import_file_without_mandatory.csv b/frappe/core/doctype/data_import/fixtures/sample_import_file_without_mandatory.csv similarity index 100% rename from frappe/core/doctype/data_import_beta/fixtures/sample_import_file_without_mandatory.csv rename to frappe/core/doctype/data_import/fixtures/sample_import_file_without_mandatory.csv diff --git a/frappe/core/doctype/data_import/importer.py b/frappe/core/doctype/data_import/importer.py index b6d410d072..ec3cccc1b1 100644 --- a/frappe/core/doctype/data_import/importer.py +++ b/frappe/core/doctype/data_import/importer.py @@ -1,541 +1,1164 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors +# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt -from __future__ import unicode_literals, print_function - -from six.moves import range -import requests -import frappe, json, os -import frappe.permissions - +from __future__ import unicode_literals +import os +import io +import frappe +import timeit +import json +from datetime import datetime from frappe import _ +from frappe.utils import cint, flt, update_progress_bar, cstr +from frappe.utils.csvutils import read_csv_content, get_csv_content_from_google_sheets +from frappe.utils.xlsxutils import ( + read_xlsx_file_from_attached_file, + read_xls_file_from_attached_file, +) +from frappe.model import no_value_fields, table_fields as table_fieldtypes +from frappe.core.doctype.version.version import get_diff -from frappe.utils.csvutils import getlink -from frappe.utils.dateutils import parse_date - -from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url -from six import text_type, string_types +INVALID_VALUES = ("", None) +MAX_ROWS_IN_PREVIEW = 10 +INSERT = "Insert New Records" +UPDATE = "Update Existing Records" -@frappe.whitelist() -def get_data_keys(): - return frappe._dict({ - "data_separator": _('Start entering data below this line'), - "main_table": _("Table") + ":", - "parent_table": _("Parent Table") + ":", - "columns": _("Column Name") + ":", - "doctype": _("DocType") + ":" - }) +class Importer: + def __init__( + self, doctype, data_import=None, file_path=None, import_type=None, console=False + ): + self.doctype = doctype + self.console = console + self.data_import = data_import + if not self.data_import: + self.data_import = frappe.get_doc(doctype="Data Import") + if import_type: + self.data_import.import_type = import_type + self.template_options = frappe.parse_json(self.data_import.template_options or "{}") + self.import_type = self.data_import.import_type -@frappe.whitelist() -def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None, - update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No", - skip_errors = True, data_import_doc=None, validate_template=False, user=None): - """upload data""" + self.import_file = ImportFile( + doctype, + file_path or data_import.google_sheets_url or data_import.import_file, + self.template_options, + self.import_type, + ) - # for translations - if user: - frappe.cache().hdel("lang", user) - frappe.set_user_lang(user) + def get_data_for_import_preview(self): + return self.import_file.get_data_for_import_preview() - if data_import_doc and isinstance(data_import_doc, string_types): - data_import_doc = frappe.get_doc("Data Import", data_import_doc) - if data_import_doc and from_data_import == "Yes": - no_email = data_import_doc.no_email - ignore_encoding_errors = data_import_doc.ignore_encoding_errors - update_only = data_import_doc.only_update - submit_after_import = data_import_doc.submit_after_import - overwrite = data_import_doc.overwrite - skip_errors = data_import_doc.skip_errors - else: - # extra input params - params = json.loads(frappe.form_dict.get("params") or '{}') - if params.get("submit_after_import"): - submit_after_import = True - if params.get("ignore_encoding_errors"): - ignore_encoding_errors = True - if not params.get("no_email"): - no_email = False - if params.get('update_only'): - update_only = True - if params.get('from_data_import'): - from_data_import = params.get('from_data_import') - if not params.get('skip_errors'): - skip_errors = params.get('skip_errors') + def before_import(self): + # set user lang for translations + frappe.cache().hdel("lang", frappe.session.user) + frappe.set_user_lang(frappe.session.user) - frappe.flags.in_import = True - frappe.flags.mute_emails = no_email + # set flags + frappe.flags.in_import = True + frappe.flags.mute_emails = self.data_import.mute_emails - def get_data_keys_definition(): - return get_data_keys() + self.data_import.db_set("template_warnings", "") - def bad_template(): - frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator)) + def import_data(self): + self.before_import() - def check_data_length(): - if not data: - frappe.throw(_("No data found in the file. Please reattach the new file with data.")) + # parse docs from rows + payloads = self.import_file.get_payloads_for_import() - def get_start_row(): - for i, row in enumerate(rows): - if row and row[0]==get_data_keys_definition().data_separator: - return i+1 - bad_template() + # dont import if there are non-ignorable warnings + warnings = self.import_file.get_warnings() + warnings = [w for w in warnings if w.get("type") != "info"] - def get_header_row(key): - return get_header_row_and_idx(key)[0] - - def get_header_row_and_idx(key): - for i, row in enumerate(header): - if row and row[0]==key: - return row, i - return [], -1 - - def filter_empty_columns(columns): - empty_cols = list(filter(lambda x: x in ("", None), columns)) - - if empty_cols: - if columns[-1*len(empty_cols):] == empty_cols: - # filter empty columns if they exist at the end - columns = columns[:-1*len(empty_cols)] + if warnings: + if self.console: + self.print_grouped_warnings(warnings) else: - frappe.msgprint(_("Please make sure that there are no empty columns in the file."), - raise_exception=1) - - return columns - - def make_column_map(): - doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype) - if row_idx == -1: # old style + self.data_import.db_set("template_warnings", json.dumps(warnings)) return - dt = None - for i, d in enumerate(doctype_row[1:]): - if d not in ("~", "-"): - if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"): - dt, parentfield = d, None - # xls format truncates the row, so it may not have more columns - if len(doctype_row) > i+2: - parentfield = doctype_row[i+2] - doctypes.append((dt, parentfield)) - column_idx_to_fieldname[(dt, parentfield)] = {} - column_idx_to_fieldtype[(dt, parentfield)] = {} - if dt: - column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1] - column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1] - - def get_doc(start_idx): - if doctypes: - doc = {} - attachments = [] - last_error_row_idx = None - for idx in range(start_idx, len(rows)): - last_error_row_idx = idx # pylint: disable=W0612 - if (not doc) or main_doc_empty(rows[idx]): - for dt, parentfield in doctypes: - d = {} - for column_idx in column_idx_to_fieldname[(dt, parentfield)]: - try: - fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx] - fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx] - - if not fieldname or not rows[idx][column_idx]: - continue - - d[fieldname] = rows[idx][column_idx] - if fieldtype in ("Int", "Check"): - d[fieldname] = cint(d[fieldname]) - elif fieldtype in ("Float", "Currency", "Percent"): - d[fieldname] = flt(d[fieldname]) - elif fieldtype == "Date": - if d[fieldname] and isinstance(d[fieldname], string_types): - d[fieldname] = getdate(parse_date(d[fieldname])) - elif fieldtype == "Datetime": - if d[fieldname]: - if " " in d[fieldname]: - _date, _time = d[fieldname].split() - else: - _date, _time = d[fieldname], '00:00:00' - _date = parse_date(d[fieldname]) - d[fieldname] = get_datetime(_date + " " + _time) - else: - d[fieldname] = None - - elif fieldtype in ("Image", "Attach Image", "Attach"): - # added file to attachments list - attachments.append(d[fieldname]) - - elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]: - # as fields can be saved in the number format(long type) in data import template - d[fieldname] = cstr(d[fieldname]) - - except IndexError: - pass - - # scrub quotes from name and modified - if d.get("name") and d["name"].startswith('"'): - d["name"] = d["name"][1:-1] - - if sum([0 if not val else 1 for val in d.values()]): - d['doctype'] = dt - if dt == doctype: - doc.update(d) - else: - if not overwrite and doc.get("name"): - d['parent'] = doc["name"] - d['parenttype'] = doctype - d['parentfield'] = parentfield - doc.setdefault(d['parentfield'], []).append(d) - else: - break - - return doc, attachments, last_error_row_idx + # setup import log + if self.data_import.import_log: + import_log = frappe.parse_json(self.data_import.import_log) else: - doc = frappe._dict(zip(columns, rows[start_idx][1:])) - doc['doctype'] = doctype - return doc, [], None + import_log = [] - # used in testing whether a row is empty or parent row or child row - # checked only 3 first columns since first two columns can be blank for example the case of - # importing the item variant where item code and item name will be blank. - def main_doc_empty(row): - if row: - for i in range(3,0,-1): - if len(row) > i and row[i]: - return False - return True + # remove previous failures from import log + import_log = [log for log in import_log if log.get("success")] - def validate_naming(doc): - autoname = frappe.get_meta(doctype).autoname - if autoname: - if autoname[0:5] == 'field': - autoname = autoname[6:] - elif autoname == 'naming_series:': - autoname = 'naming_series' - else: - return True + # get successfully imported rows + imported_rows = [] + for log in import_log: + log = frappe._dict(log) + if log.success: + imported_rows += log.row_indexes - if (autoname not in doc) or (not doc[autoname]): - from frappe.model.base_document import get_controller - if not hasattr(get_controller(doctype), "autoname"): - frappe.throw(_("{0} is a mandatory field").format(autoname)) - return True + # start import + total_payload_count = len(payloads) + batch_size = frappe.conf.data_import_batch_size or 1000 - users = frappe.db.sql_list("select name from tabUser") - def prepare_for_insert(doc): - # don't block data import if user is not set - # migrating from another system - if not doc.owner in users: - doc.owner = frappe.session.user - if not doc.modified_by in users: - doc.modified_by = frappe.session.user + for batch_index, batched_payloads in enumerate( + frappe.utils.create_batch(payloads, batch_size) + ): + for i, payload in enumerate(batched_payloads): + doc = payload.doc + row_indexes = [row.row_number for row in payload.rows] + current_index = (i + 1) + (batch_index * batch_size) - def is_valid_url(url): - is_valid = False - if url.startswith("/files") or url.startswith("/private/files"): - url = get_url(url) + if set(row_indexes).intersection(set(imported_rows)): + print("Skipping imported rows", row_indexes) + if total_payload_count > 5: + frappe.publish_realtime( + "data_import_progress", + { + "current": current_index, + "total": total_payload_count, + "skipping": True, + "data_import": self.data_import.name, + }, + ) + continue - try: - r = requests.get(url) - is_valid = True if r.status_code == 200 else False - except Exception: - pass + try: + start = timeit.default_timer() + doc = self.process_doc(doc) + processing_time = timeit.default_timer() - start + eta = self.get_eta(current_index, total_payload_count, processing_time) - return is_valid + if self.console: + update_progress_bar( + "Importing {0} records".format(total_payload_count), + current_index, + total_payload_count, + ) + elif total_payload_count > 5: + frappe.publish_realtime( + "data_import_progress", + { + "current": current_index, + "total": total_payload_count, + "docname": doc.name, + "data_import": self.data_import.name, + "success": True, + "row_indexes": row_indexes, + "eta": eta, + }, + ) - def attach_file_to_doc(doctype, docname, file_url): - # check if attachment is already available - # check if the attachement link is relative or not - if not file_url: - return - if not is_valid_url(file_url): - return + import_log.append( + frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes) + ) + # commit after every successful import + frappe.db.commit() - files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and - attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format( - doctype=doctype, - docname=docname, - file_url=file_url - )) - - if files: - # file is already attached - return - - _file = frappe.get_doc({ - "doctype": "File", - "file_url": file_url, - "attached_to_name": docname, - "attached_to_doctype": doctype, - "attached_to_field": 0, - "folder": "Home/Attachments"}) - _file.save() - - - # header - filename, file_extension = ['',''] - if not rows: - _file = frappe.get_doc("File", {"file_url": data_import_doc.import_file}) - fcontent = _file.get_content() - filename, file_extension = _file.get_extension() - - if file_extension == '.xlsx' and from_data_import == 'Yes': - from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file - rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file) - - elif file_extension == '.csv': - from frappe.utils.csvutils import read_csv_content - rows = read_csv_content(fcontent, ignore_encoding_errors) + except Exception: + import_log.append( + frappe._dict( + success=False, + exception=frappe.get_traceback(), + messages=frappe.local.message_log, + row_indexes=row_indexes, + ) + ) + frappe.clear_messages() + # rollback if exception + frappe.db.rollback() + # set status + failures = [log for log in import_log if not log.get("success")] + if len(failures) == total_payload_count: + status = "Pending" + elif len(failures) > 0: + status = "Partial Success" else: - frappe.throw(_("Unsupported File Format")) + status = "Success" - start_row = get_start_row() - header = rows[:start_row] - data = rows[start_row:] - try: - doctype = get_header_row(get_data_keys_definition().main_table)[1] - columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:]) - except: - frappe.throw(_("Cannot change header content")) - doctypes = [] - column_idx_to_fieldname = {} - column_idx_to_fieldtype = {} + if self.console: + self.print_import_log(import_log) + else: + self.data_import.db_set("status", status) + self.data_import.db_set("import_log", json.dumps(import_log)) - if skip_errors: - data_rows_with_error = header + self.after_import() - if submit_after_import and not cint(frappe.db.get_value("DocType", - doctype, "is_submittable")): - submit_after_import = False + return import_log - parenttype = get_header_row(get_data_keys_definition().parent_table) - - if len(parenttype) > 1: - parenttype = parenttype[1] - - # check permissions - if not frappe.permissions.can_import(parenttype or doctype): + def after_import(self): + frappe.flags.in_import = False frappe.flags.mute_emails = False - return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True} - # Throw expception in case of the empty data file - check_data_length() - make_column_map() - total = len(data) + def process_doc(self, doc): + if self.import_type == INSERT: + return self.insert_record(doc) + elif self.import_type == UPDATE: + return self.update_record(doc) - if validate_template: - if total: - data_import_doc.total_rows = total - return True + def insert_record(self, doc): + meta = frappe.get_meta(self.doctype) + new_doc = frappe.new_doc(self.doctype) + new_doc.update(doc) - if overwrite==None: - overwrite = params.get('overwrite') + if (meta.autoname or "").lower() != "prompt": + # name can only be set directly if autoname is prompt + new_doc.set("name", None) - # delete child rows (if parenttype) - parentfield = None - if parenttype: - parentfield = get_parent_field(doctype, parenttype) + new_doc.flags.updater_reference = { + "doctype": self.data_import.doctype, + "docname": self.data_import.name, + "label": _("via Data Import"), + } - if overwrite: - delete_child_rows(data, doctype) + new_doc.insert() + if meta.is_submittable and self.data_import.submit_after_import: + new_doc.submit() + return new_doc - import_log = [] - def log(**kwargs): - if via_console: - print((kwargs.get("title") + kwargs.get("message")).encode('utf-8')) + def update_record(self, doc): + id_field = get_id_field(self.doctype) + existing_doc = frappe.get_doc(self.doctype, doc.get(id_field.fieldname)) + + updated_doc = frappe.get_doc(self.doctype, doc.get(id_field.fieldname)) + updated_doc.update(doc) + + if get_diff(existing_doc, updated_doc): + # update doc if there are changes + updated_doc.flags.updater_reference = { + "doctype": self.data_import.doctype, + "docname": self.data_import.name, + "label": _("via Data Import"), + } + updated_doc.save() + return updated_doc else: - import_log.append(kwargs) + # throw if no changes + frappe.throw('No changes to update') - def as_link(doctype, name): - if via_console: - return "{0}: {1}".format(doctype, name) - else: - return getlink(doctype, name) + def get_eta(self, current, total, processing_time): + self.last_eta = getattr(self, "last_eta", 0) + remaining = total - current + eta = processing_time * remaining + if not self.last_eta or eta < self.last_eta: + self.last_eta = eta + return self.last_eta - # publish realtime task update - def publish_progress(achieved, reload=False): - if data_import_doc: - frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)), - "data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user) + def export_errored_rows(self): + from frappe.utils.csvutils import build_csv_response + + if not self.data_import: + return + + import_log = frappe.parse_json(self.data_import.import_log or "[]") + failures = [log for log in import_log if not log.get("success")] + row_indexes = [] + for f in failures: + row_indexes.extend(f.get("row_indexes", [])) + + # de duplicate + row_indexes = list(set(row_indexes)) + row_indexes.sort() + + header_row = [col.header_title for col in self.import_file.columns] + rows = [header_row] + rows += [row.data for row in self.import_file.data if row.row_number in row_indexes] + + build_csv_response(rows, self.doctype) + + def print_import_log(self, import_log): + failed_records = [log for log in import_log if not log.success] + successful_records = [log for log in import_log if log.success] + + if successful_records: + print() + print( + "Successfully imported {0} records out of {1}".format( + len(successful_records), len(import_log) + ) + ) + + if failed_records: + print("Failed to import {0} records".format(len(failed_records))) + file_name = "{0}_import_on_{1}.txt".format(self.doctype, frappe.utils.now()) + print("Check {0} for errors".format(os.path.join("sites", file_name))) + text = "" + for w in failed_records: + text += "Row Indexes: {0}\n".format(str(w.get("row_indexes", []))) + text += "Messages:\n{0}\n".format("\n".join(w.get("messages", []))) + text += "Traceback:\n{0}\n\n".format(w.get("exception")) + + with open(file_name, "w") as f: + f.write(text) + + def print_grouped_warnings(self, warnings): + warnings_by_row = {} + other_warnings = [] + for w in warnings: + if w.get("row"): + warnings_by_row.setdefault(w.get("row"), []).append(w) + else: + other_warnings.append(w) + + for row_number, warnings in warnings_by_row.items(): + print("Row {0}".format(row_number)) + for w in warnings: + print(w.get("message")) + + for w in other_warnings: + print(w.get("message")) - error_flag = rollback_flag = False +class ImportFile: + def __init__(self, doctype, file, template_options=None, import_type=None): + self.doctype = doctype + self.template_options = template_options or frappe._dict( + column_to_field_map=frappe._dict() + ) + self.column_to_field_map = self.template_options.column_to_field_map + self.import_type = import_type + self.warnings = [] - batch_size = frappe.conf.data_import_batch_size or 1000 + self.file_doc = self.file_path = self.google_sheets_url = None + if isinstance(file, frappe.string_types): + if frappe.db.exists("File", {"file_url": file}): + self.file_doc = frappe.get_doc("File", {"file_url": file}) + elif 'docs.google.com/spreadsheets' in file: + self.google_sheets_url = file + elif os.path.exists(file): + self.file_path = file - for batch_start in range(0, total, batch_size): - batch = data[batch_start:batch_start + batch_size] + if not self.file_doc and not self.file_path and not self.google_sheets_url: + frappe.throw(_("Invalid template file for import")) - for i, row in enumerate(batch): - # bypass empty rows - if main_doc_empty(row): + self.raw_data = self.get_data_from_template_file() + self.parse_data_from_template() + + def get_data_from_template_file(self): + content = None + extension = None + + if self.file_doc: + parts = self.file_doc.get_extension() + extension = parts[1] + content = self.file_doc.get_content() + extension = extension.lstrip(".") + + elif self.file_path: + content, extension = self.read_file(self.file_path) + + elif self.google_sheets_url: + content = get_csv_content_from_google_sheets(self.google_sheets_url) + extension = 'csv' + + if not content: + frappe.throw(_("Invalid or corrupted content for import")) + + if not extension: + extension = "csv" + + if content: + return self.read_content(content, extension) + + def parse_data_from_template(self): + header = None + data = [] + + for i, row in enumerate(self.raw_data): + if all(v in INVALID_VALUES for v in row): + # empty row continue - row_idx = i + start_row - doc = None - - publish_progress(i) - - try: - doc, attachments, last_error_row_idx = get_doc(row_idx) - validate_naming(doc) - if pre_process: - pre_process(doc) - - original = None - if parentfield: - parent = frappe.get_doc(parenttype, doc["parent"]) - doc = parent.append(parentfield, doc) - parent.save() - else: - if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]): - original = frappe.get_doc(doctype, doc["name"]) - original_name = original.name - original.update(doc) - # preserve original name for case sensitivity - original.name = original_name - original.flags.ignore_links = ignore_links - original.save() - doc = original - else: - if not update_only: - doc = frappe.get_doc(doc) - prepare_for_insert(doc) - doc.flags.ignore_links = ignore_links - doc.insert() - if attachments: - # check file url and create a File document - for file_url in attachments: - attach_file_to_doc(doc.doctype, doc.name, file_url) - if submit_after_import: - doc.submit() - - # log errors - if parentfield: - log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)), - "link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"}) - elif submit_after_import: - log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)), - "message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"}) - elif original: - log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)), - "message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"}) - elif not update_only: - log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)), - "message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"}) - else: - log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None, - "message": "Document updation ignored", "indicator": "orange"}) - - except Exception as e: - error_flag = True - - # build error message - if frappe.local.message_log: - err_msg = "\n".join(['

    {}

    '.format(json.loads(msg).get('message')) for msg in frappe.local.message_log]) - else: - err_msg = '

    {}

    '.format(cstr(e)) - - error_trace = frappe.get_traceback() - if error_trace: - error_log_doc = frappe.log_error(error_trace) - error_link = get_absolute_url("Error Log", error_log_doc.name) - else: - error_link = None - - log(**{ - "row": row_idx + 1, - "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""), - "message": err_msg, - "indicator": "red", - "link":error_link - }) - - # data with error to create a new file - # include the errored data in the last row as last_error_row_idx will not be updated for the last row - if skip_errors: - if last_error_row_idx == len(rows)-1: - last_error_row_idx = len(rows) - data_rows_with_error += rows[row_idx:last_error_row_idx] - else: - rollback_flag = True - finally: - frappe.local.message_log = [] - - start_row += batch_size - if rollback_flag: - frappe.db.rollback() - else: - frappe.db.commit() - - frappe.flags.mute_emails = False - frappe.flags.in_import = False - - log_message = {"messages": import_log, "error": error_flag} - if data_import_doc: - data_import_doc.log_details = json.dumps(log_message) - - import_status = None - if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error): - import_status = "Partially Successful" - # write the file with the faulty row - file_name = 'error_' + filename + file_extension - if file_extension == '.xlsx': - from frappe.utils.xlsxutils import make_xlsx - xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") - file_data = xlsx_file.getvalue() + if not header: + header = Header(i, row, self.doctype, self.raw_data, self.column_to_field_map) else: - from frappe.utils.csvutils import to_csv - file_data = to_csv(data_rows_with_error) - _file = frappe.get_doc({ - "doctype": "File", - "file_name": file_name, - "attached_to_doctype": "Data Import", - "attached_to_name": data_import_doc.name, - "folder": "Home/Attachments", - "content": file_data}) - _file.save() - data_import_doc.error_file = _file.file_url + row_obj = Row(i, row, self.doctype, header, self.import_type) + data.append(row_obj) - elif error_flag: - import_status = "Failed" - else: - import_status = "Successful" + self.header = header + self.columns = self.header.columns + self.data = data - data_import_doc.import_status = import_status - data_import_doc.save() - if data_import_doc.import_status in ["Successful", "Partially Successful"]: - data_import_doc.submit() - publish_progress(100, True) - else: - publish_progress(0, True) - frappe.db.commit() - else: - return log_message + if len(data) < 1: + frappe.throw( + _("Import template should contain a Header and atleast one row."), + title=_("Template Error"), + ) -def get_parent_field(doctype, parenttype): - parentfield = None + def get_data_for_import_preview(self): + """Adds a serial number column as the first column""" - # get parentfield - if parenttype: - for d in frappe.get_meta(parenttype).get_table_fields(): - if d.options==doctype: - parentfield = d.fieldname + columns = [frappe._dict({"header_title": "Sr. No", "skip_import": True})] + columns += [col.as_dict() for col in self.columns] + for col in columns: + # only pick useful fields in docfields to minimise the payload + if col.df: + col.df = { + "fieldtype": col.df.fieldtype, + "fieldname": col.df.fieldname, + "label": col.df.label, + "options": col.df.options, + "parent": col.df.parent, + "reqd": col.df.reqd, + "default": col.df.default, + "read_only": col.df.read_only, + } + + data = [[row.row_number] + row.as_list() for row in self.data] + + warnings = self.get_warnings() + + out = frappe._dict() + out.data = data + out.columns = columns + out.warnings = warnings + total_number_of_rows = len(out.data) + if total_number_of_rows > MAX_ROWS_IN_PREVIEW: + out.data = out.data[:MAX_ROWS_IN_PREVIEW] + out.max_rows_exceeded = True + out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW + out.total_number_of_rows = total_number_of_rows + return out + + def get_payloads_for_import(self): + payloads = [] + # make a copy + data = list(self.data) + while data: + doc, rows, data = self.parse_next_row_for_import(data) + payloads.append(frappe._dict(doc=doc, rows=rows)) + return payloads + + def parse_next_row_for_import(self, data): + """ + Parses rows that make up a doc. A doc maybe built from a single row or multiple rows. + Returns the doc, rows, and data without the rows. + """ + doctypes = self.header.doctypes + + # first row is included by default + first_row = data[0] + rows = [first_row] + + # if there are child doctypes, find the subsequent rows + if len(doctypes) > 1: + # subsequent rows either dont have any parent value set + # or have the same value as the parent row + # we include a row if either of conditions match + parent_column_indexes = self.header.get_column_indexes(self.doctype) + parent_row_values = first_row.get_values(parent_column_indexes) + + data_without_first_row = data[1:] + for row in data_without_first_row: + row_values = row.get_values(parent_column_indexes) + # if the row is blank, it's a child row doc + if all([v in INVALID_VALUES for v in row_values]): + rows.append(row) + continue + # if the row has same values as parent row, it's a child row doc + if row_values == parent_row_values: + rows.append(row) + continue + # if any of those conditions dont match, it's the next doc break - if not parentfield: - frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype)) - raise Exception + parent_doc = None + for row in rows: + for doctype, table_df in doctypes: + if doctype == self.doctype and not parent_doc: + parent_doc = row.parse_doc(doctype) - return parentfield + if doctype != self.doctype and table_df: + child_doc = row.parse_doc(doctype, parent_doc, table_df) + parent_doc[table_df.fieldname] = parent_doc.get(table_df.fieldname, []) + parent_doc[table_df.fieldname].append(child_doc) -def delete_child_rows(rows, doctype): - """delete child rows for all parents""" - for p in list(set([r[1] for r in rows])): - if p: - frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p) + doc = parent_doc + + if self.import_type == INSERT: + # check if there is atleast one row for mandatory table fields + meta = frappe.get_meta(self.doctype) + mandatory_table_fields = [ + df + for df in meta.fields + if df.fieldtype in table_fieldtypes + and df.reqd + and len(doc.get(df.fieldname, [])) == 0 + ] + if len(mandatory_table_fields) == 1: + self.warnings.append( + { + "row": first_row.row_number, + "message": _("There should be atleast one row for {0} table").format( + frappe.bold(mandatory_table_fields[0].label) + ), + } + ) + elif mandatory_table_fields: + fields_string = ", ".join([df.label for df in mandatory_table_fields]) + message = _("There should be atleast one row for the following tables: {0}").format( + fields_string + ) + self.warnings.append({"row": first_row.row_number, "message": message}) + + return doc, rows, data[len(rows) :] + + def get_warnings(self): + warnings = [] + + # ImportFile warnings + warnings += self.warnings + + # Column warnings + for col in self.header.columns: + warnings += col.warnings + + # Row warnings + for row in self.data: + warnings += row.warnings + + return warnings + + ###### + + def read_file(self, file_path): + extn = file_path.split(".")[1] + + file_content = None + with io.open(file_path, mode="rb") as f: + file_content = f.read() + + return file_content, extn + + def read_content(self, content, extension): + error_title = _("Template Error") + if extension not in ("csv", "xlsx", "xls"): + frappe.throw( + _("Import template should be of type .csv, .xlsx or .xls"), title=error_title + ) + + if extension == "csv": + data = read_csv_content(content) + elif extension == "xlsx": + data = read_xlsx_file_from_attached_file(fcontent=content) + elif extension == "xls": + data = read_xls_file_from_attached_file(content) + + return data + + +class Row: + link_values_exist_map = {} + + def __init__(self, index, row, doctype, header, import_type): + self.index = index + self.row_number = index + 1 + self.doctype = doctype + self.data = row + self.header = header + self.import_type = import_type + self.warnings = [] + + len_row = len(self.data) + len_columns = len(self.header.columns) + if len_row != len_columns: + less_than_columns = len_row < len_columns + message = ( + "Row has less values than columns" + if less_than_columns + else "Row has more values than columns" + ) + self.warnings.append( + {"row": self.row_number, "message": message,} + ) + + def parse_doc(self, doctype, parent_doc=None, table_df=None): + col_indexes = self.header.get_column_indexes(doctype, table_df) + values = self.get_values(col_indexes) + columns = self.header.get_columns(col_indexes) + doc = self._parse_doc(doctype, columns, values, parent_doc, table_df) + return doc + + def _parse_doc(self, doctype, columns, values, parent_doc=None, table_df=None): + doc = frappe._dict() + if self.import_type == INSERT: + # new_doc returns a dict with default values set + doc = frappe.new_doc( + doctype, + parent_doc=parent_doc, + parentfield=table_df.fieldname if table_df else None, + as_dict=True, + ) + + # remove standard fields and __islocal + for key in frappe.model.default_fields + ("__islocal",): + doc.pop(key, None) + + for col, value in zip(columns, values): + df = col.df + if value in INVALID_VALUES: + value = None + + if value is not None: + value = self.validate_value(value, col) + + if value is not None: + doc[df.fieldname] = self.parse_value(value, col) + + is_table = frappe.get_meta(doctype).istable + is_update = self.import_type == UPDATE + if is_table and is_update and doc.get("name") in INVALID_VALUES: + # for table rows being inserted in update + # create a new doc with defaults set + new_doc = frappe.new_doc(doctype, as_dict=True) + new_doc.update(doc) + doc = new_doc + + self.check_mandatory_fields(doctype, doc, table_df) + return doc + + def validate_value(self, value, col): + df = col.df + if df.fieldtype == "Select": + select_options = df.get_select_options() + if select_options and value not in select_options: + options_string = ", ".join([frappe.bold(d) for d in select_options]) + msg = _("Value must be one of {0}").format(options_string) + self.warnings.append( + { + "row": self.row_number, + "field": df_as_json(df), + "message": msg, + } + ) + return + + elif df.fieldtype == "Link": + exists = self.link_exists(value, df) + if not exists: + msg = _("Value {0} missing for {1}").format( + frappe.bold(value), frappe.bold(df.options) + ) + self.warnings.append( + { + "row": self.row_number, + "field": df_as_json(df), + "message": msg, + } + ) + return + elif df.fieldtype in ["Date", "Datetime"]: + value = self.get_date(value, col) + if isinstance(value, frappe.string_types): + # value was not parsed as datetime object + self.warnings.append( + { + "row": self.row_number, + "col": col.column_number, + "field": df_as_json(df), + "message": _("Value {0} must in {1} format").format( + frappe.bold(value), frappe.bold(get_user_format(col.date_format)) + ), + } + ) + return + + return value + + def link_exists(self, value, df): + key = df.options + "::" + cstr(value) + if Row.link_values_exist_map.get(key) is None: + Row.link_values_exist_map[key] = frappe.db.exists(df.options, value) + return Row.link_values_exist_map.get(key) + + def parse_value(self, value, col): + df = col.df + if isinstance(value, datetime) and df.fieldtype in ["Date", "Datetime"]: + return value + + value = cstr(value) + + # convert boolean values to 0 or 1 + valid_check_values = ["t", "f", "true", "false", "yes", "no", "y", "n"] + if df.fieldtype == "Check" and value.lower().strip() in valid_check_values: + value = value.lower().strip() + value = 1 if value in ["t", "true", "y", "yes"] else 0 + + if df.fieldtype in ["Int", "Check"]: + value = cint(value) + elif df.fieldtype in ["Float", "Percent", "Currency"]: + value = flt(value) + elif df.fieldtype in ["Date", "Datetime"]: + value = self.get_date(value, col) + + return value + + def get_date(self, value, column): + date_format = column.date_format + if date_format: + try: + return datetime.strptime(value, date_format) + except ValueError: + # ignore date values that dont match the format + # import will break for these values later + pass + return value + + def check_mandatory_fields(self, doctype, doc, table_df=None): + """If import type is Insert: + Check for mandatory fields (except table fields) in doc + if import type is Update: + Check for name field or autoname field in doc + """ + meta = frappe.get_meta(doctype) + if self.import_type == UPDATE: + if meta.istable: + # when updating records with table rows, + # there are two scenarios: + # 1. if row 'name' is provided in the template + # the table row will be updated + # 2. if row 'name' is not provided + # then a new row will be added + # so we dont need to check for mandatory + return + + # for update, only ID (name) field is mandatory + id_field = get_id_field(doctype) + if doc.get(id_field.fieldname) in INVALID_VALUES: + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} is a mandatory field asdadsf").format(id_field.label), + } + ) + return + + fields = [ + df + for df in meta.fields + if df.fieldtype not in table_fieldtypes + and df.reqd + and doc.get(df.fieldname) in INVALID_VALUES + ] + + if not fields: + return + + def get_field_label(df): + return "{0}{1}".format(df.label, " ({})".format(table_df.label) if table_df else "") + + if len(fields) == 1: + field_label = get_field_label(fields[0]) + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} is a mandatory field").format(frappe.bold(field_label)), + } + ) + else: + fields_string = ", ".join([frappe.bold(get_field_label(df)) for df in fields]) + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} are mandatory fields").format(fields_string), + } + ) + + def get_values(self, indexes): + return [self.data[i] for i in indexes] + + def get(self, index): + return self.data[index] + + def as_list(self): + return self.data + + +class Header(Row): + def __init__(self, index, row, doctype, raw_data, column_to_field_map=None): + self.index = index + self.row_number = index + 1 + self.data = row + self.doctype = doctype + column_to_field_map = column_to_field_map or frappe._dict() + + self.seen = [] + self.columns = [] + + for j, header in enumerate(row): + column_values = [get_item_at_index(r, j) for r in raw_data] + map_to_field = column_to_field_map.get(str(j)) + column = Column( + j, header, self.doctype, column_values, map_to_field, self.seen + ) + self.seen.append(header) + self.columns.append(column) + + doctypes = [] + for col in self.columns: + if not col.df: + continue + if col.df.parent == self.doctype: + doctypes.append((col.df.parent, None)) + else: + doctypes.append((col.df.parent, col.df.child_table_df)) + + self.doctypes = sorted( + list(set(doctypes)), key=lambda x: -1 if x[0] == self.doctype else 1 + ) + + def get_column_indexes(self, doctype, tablefield=None): + def is_table_field(df): + if tablefield: + return df.child_table_df.fieldname == tablefield.fieldname + return True + + return [ + col.index + for col in self.columns + if not col.skip_import + and col.df + and col.df.parent == doctype + and is_table_field(col.df) + ] + + def get_columns(self, indexes): + return [self.columns[i] for i in indexes] + + +class Column: + seen = [] + fields_column_map = {} + + def __init__(self, index, header, doctype, column_values, map_to_field=None, seen=[]): + self.index = index + self.column_number = index + 1 + self.doctype = doctype + self.header_title = header + self.column_values = column_values + self.map_to_field = map_to_field + self.seen = seen + + self.date_format = None + self.df = None + self.skip_import = None + self.warnings = [] + + self.meta = frappe.get_meta(doctype) + self.parse() + self.validate_values() + + def parse(self): + header_title = self.header_title + column_number = str(self.column_number) + skip_import = False + + if self.map_to_field and self.map_to_field != "Don't Import": + df = get_df_for_column_header(self.doctype, self.map_to_field) + if df: + self.warnings.append( + { + "message": _("Mapping column {0} to field {1}").format( + frappe.bold(header_title or "Untitled Column"), frappe.bold(df.label) + ), + "type": "info", + } + ) + else: + self.warnings.append( + { + "message": _("Could not map column {0} to field {1}").format( + column_number, self.map_to_field + ), + "type": "info", + } + ) + else: + df = get_df_for_column_header(self.doctype, header_title) + # df = df_by_labels_and_fieldnames.get(header_title) + + if not df: + skip_import = True + else: + skip_import = False + + if header_title in self.seen: + self.warnings.append( + { + "col": column_number, + "message": _("Skipping Duplicate Column {0}").format(frappe.bold(header_title)), + "type": "info", + } + ) + df = None + skip_import = True + elif self.map_to_field == "Don't Import": + skip_import = True + self.warnings.append( + { + "col": column_number, + "message": _("Skipping column {0}").format(frappe.bold(header_title)), + "type": "info", + } + ) + elif header_title and not df: + self.warnings.append( + { + "col": column_number, + "message": _("Cannot match column {0} with any field").format( + frappe.bold(header_title) + ), + "type": "info", + } + ) + elif not header_title and not df: + self.warnings.append( + {"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"} + ) + + self.df = df + self.skip_import = skip_import + + def guess_date_format_for_column(self): + """ Guesses date format for a column by parsing all the values in the column, + getting the date format and then returning the one which has the maximum frequency + """ + + date_formats = [ + frappe.utils.guess_date_format(d) for d in self.column_values if isinstance(d, str) + ] + date_formats = [d for d in date_formats if d] + if not date_formats: + return + + unique_date_formats = set(date_formats) + max_occurred_date_format = max(unique_date_formats, key=date_formats.count) + + if len(unique_date_formats) > 1: + # fmt: off + message = _("The column {0} has {1} different date formats. Automatically setting {2} as the default format as it is the most common. Please change other values in this column to this format.") + # fmt: on + user_date_format = get_user_format(max_occurred_date_format) + self.warnings.append( + { + "col": self.column_number, + "message": message.format( + frappe.bold(self.header_title), + len(unique_date_formats), + frappe.bold(user_date_format), + ), + "type": "info", + } + ) + + return max_occurred_date_format + + def validate_values(self): + if not self.df: + return + + if self.df.fieldtype == 'Link': + # find all values that dont exist + values = list(set([v for v in self.column_values[1:] if v])) + exists = [d.name for d in frappe.db.get_all(self.df.options, filters={'name': ('in', values)})] + not_exists = list(set(values) - set(exists)) + if not_exists: + missing_values = ', '.join(not_exists) + self.warnings.append({ + 'col': self.column_number, + 'message': "The following values do not exist for {}: {}".format(self.df.options, missing_values), + 'type': 'warning' + }) + elif self.df.fieldtype in ("Date", "Time", "Datetime"): + # guess date format + self.date_format = self.guess_date_format_for_column() + + def as_dict(self): + d = frappe._dict() + d.index = self.index + d.column_number = self.column_number + d.doctype = self.doctype + d.header_title = self.header_title + d.map_to_field = self.map_to_field + d.date_format = self.date_format + d.df = self.df + if hasattr(self.df, 'is_child_table_field'): + d.is_child_table_field = self.df.is_child_table_field + d.child_table_df = self.df.child_table_df + d.skip_import = self.skip_import + d.warnings = self.warnings + return d + + +def build_fields_dict_for_column_matching(parent_doctype): + """ + Build a dict with various keys to match with column headers and value as docfield + The keys can be label or fieldname + { + 'Customer': df1, + 'customer': df1, + 'Due Date': df2, + 'due_date': df2, + 'Item Code (Sales Invoice Item)': df3, + 'Sales Invoice Item:item_code': df3, + } + """ + + def get_standard_fields(doctype): + meta = frappe.get_meta(doctype) + if meta.istable: + standard_fields = [ + {"label": "Parent", "fieldname": "parent"}, + {"label": "Parent Type", "fieldname": "parenttype"}, + {"label": "Parent Field", "fieldname": "parentfield"}, + {"label": "Row Index", "fieldname": "idx"}, + ] + else: + standard_fields = [ + {"label": "Owner", "fieldname": "owner"}, + {"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"}, + ] + + out = [] + for df in standard_fields: + df = frappe._dict(df) + df.parent = doctype + out.append(df) + return out + + parent_meta = frappe.get_meta(parent_doctype) + out = {} + + # doctypes and fieldname if it is a child doctype + doctypes = [[parent_doctype, None]] + [ + [df.options, df] for df in parent_meta.get_table_fields() + ] + + for doctype, table_df in doctypes: + # name field + name_by_label = ( + "ID" if doctype == parent_doctype else "ID ({0})".format(table_df.label) + ) + name_by_fieldname = ( + "name" if doctype == parent_doctype else "{0}.name".format(table_df.fieldname) + ) + name_df = frappe._dict( + { + "fieldtype": "Data", + "fieldname": "name", + "label": "ID", + "reqd": 1, # self.import_type == UPDATE, + "parent": doctype, + } + ) + + if doctype != parent_doctype: + name_df.is_child_table_field = True + name_df.child_table_df = table_df + + out[name_by_label] = name_df + out[name_by_fieldname] = name_df + + # other fields + fields = get_standard_fields(doctype) + frappe.get_meta(doctype).fields + for df in fields: + fieldtype = df.fieldtype or "Data" + parent = df.parent or parent_doctype + if fieldtype not in no_value_fields: + if parent_doctype == doctype: + # for parent doctypes keys will be + # Label + # label + # Label (label) + if not out.get(df.label): + # if Label is already set, don't set it again + # in case of duplicate column headers + out[df.label] = df + out[df.fieldname] = df + label_with_fieldname = "{0} ({1})".format(df.label, df.fieldname) + out[label_with_fieldname] = df + else: + # in case there are multiple table fields with the same doctype + # for child doctypes keys will be + # Label (Table Field Label) + # table_field.fieldname + table_fields = parent_meta.get( + "fields", {"fieldtype": ["in", table_fieldtypes], "options": parent} + ) + for table_field in table_fields: + by_label = "{0} ({1})".format(df.label, table_field.label) + by_fieldname = "{0}.{1}".format(table_field.fieldname, df.fieldname) + + # create a new df object to avoid mutation problems + if isinstance(df, dict): + new_df = frappe._dict(df.copy()) + else: + new_df = df.as_dict() + + new_df.is_child_table_field = True + new_df.child_table_df = table_field + out[by_label] = new_df + out[by_fieldname] = new_df + + # if autoname is based on field + # add an entry for "ID (Autoname Field)" + autoname_field = get_autoname_field(parent_doctype) + if autoname_field: + out["ID ({})".format(autoname_field.label)] = autoname_field + # ID field should also map to the autoname field + out["ID"] = autoname_field + out["name"] = autoname_field + + return out + + +def get_df_for_column_header(doctype, header): + def build_fields_dict_for_doctype(): + return build_fields_dict_for_column_matching(doctype) + + df_by_labels_and_fieldname = frappe.cache().hget( + "data_import_column_header_map", doctype, generator=build_fields_dict_for_doctype + ) + return df_by_labels_and_fieldname.get(header) + + +# utilities + + +def get_id_field(doctype): + autoname_field = get_autoname_field(doctype) + if autoname_field: + return autoname_field + return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"}) + + +def get_autoname_field(doctype): + meta = frappe.get_meta(doctype) + if meta.autoname and meta.autoname.startswith("field:"): + fieldname = meta.autoname[len("field:") :] + return meta.get_field(fieldname) + + +def get_item_at_index(_list, i, default=None): + try: + a = _list[i] + except IndexError: + a = default + return a + + +def get_user_format(date_format): + return ( + date_format.replace("%Y", "yyyy") + .replace("%y", "yy") + .replace("%m", "mm") + .replace("%d", "dd") + ) + +def df_as_json(df): + return { + 'fieldname': df.fieldname, + 'fieldtype': df.fieldtype, + 'label': df.label, + 'options': df.options, + 'parent': df.parent, + 'default': df.default + } diff --git a/frappe/core/doctype/data_import/test_data_import.js b/frappe/core/doctype/data_import/test_data_import.js deleted file mode 100644 index fbce7781b6..0000000000 --- a/frappe/core/doctype/data_import/test_data_import.js +++ /dev/null @@ -1,23 +0,0 @@ -/* eslint-disable */ -// rename this file from _test_[name] to test_[name] to activate -// and remove above this line - -QUnit.test("test: Data Import", function (assert) { - let done = assert.async(); - - // number of asserts - assert.expect(1); - - frappe.run_serially([ - // insert a new Data Import - () => frappe.tests.make('Data Import', [ - // values to be set - {key: 'value'} - ]), - () => { - assert.equal(cur_frm.doc.key, 'value'); - }, - () => done() - ]); - -}); diff --git a/frappe/core/doctype/data_import/test_data_import.py b/frappe/core/doctype/data_import/test_data_import.py index 406ea08958..15fd57744a 100644 --- a/frappe/core/doctype/data_import/test_data_import.py +++ b/frappe/core/doctype/data_import/test_data_import.py @@ -1,100 +1,10 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2017, Frappe Technologies and Contributors +# Copyright (c) 2020, Frappe Technologies and Contributors # See license.txt from __future__ import unicode_literals -import frappe, unittest -from frappe.core.doctype.data_export import exporter -from frappe.core.doctype.data_import import importer -from frappe.utils.csvutils import read_csv_content +# import frappe +import unittest class TestDataImport(unittest.TestCase): - def test_export(self): - exporter.export_data("User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - - def test_export_with_data(self): - exporter.export_data("User", all_doctypes=True, template=True, with_data=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1]) - - def test_export_with_all_doctypes(self): - exporter.export_data("User", all_doctypes="Yes", template=True, with_data=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1]) - self.assertEqual(content[13][0], "DocType:") - self.assertEqual(content[13][1], "User") - self.assertTrue("Has Role" in content[13]) - - def test_import(self): - if frappe.db.exists("Blog Category", "test-category"): - frappe.delete_doc("Blog Category", "test-category") - - exporter.export_data("Blog Category", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test-category", "Test Cateogry"]) - importer.upload(content) - self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category") - - # export with data - exporter.export_data("Blog Category", all_doctypes=True, template=True, with_data=True) - content = read_csv_content(frappe.response.result) - - # overwrite - content[-1][3] = "New Title" - importer.upload(content, overwrite=True) - self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title") - - def test_import_only_children(self): - user_email = "test_import_userrole@example.com" - if frappe.db.exists("User", user_email): - frappe.delete_doc("User", user_email, force=True) - - frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert() - - exporter.export_data("Has Role", "User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test_import_userrole@example.com", "Blogger"]) - importer.upload(content) - - user = frappe.get_doc("User", user_email) - self.assertTrue(frappe.db.get_value("Has Role", filters={"role": "Blogger", "parent": user_email, "parenttype": "User"})) - self.assertTrue(user.get("roles")[0].role, "Blogger") - - # overwrite - exporter.export_data("Has Role", "User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test_import_userrole@example.com", "Website Manager"]) - importer.upload(content, overwrite=True) - - user = frappe.get_doc("User", user_email) - self.assertEqual(len(user.get("roles")), 1) - self.assertTrue(user.get("roles")[0].role, "Website Manager") - - def test_import_with_children(self): #pylint: disable=R0201 - if frappe.db.exists("Event", "EV00001"): - frappe.delete_doc("Event", "EV00001") - exporter.export_data("Event", all_doctypes="Yes", template=True) - content = read_csv_content(frappe.response.result) - - content.append([None] * len(content[-2])) - content[-1][1] = "__Test Event with children" - content[-1][2] = "Private" - content[-1][3] = "2014-01-01 10:00:00.000000" - importer.upload(content) - - frappe.get_doc("Event", {"subject":"__Test Event with children"}) - - def test_excel_import(self): - if frappe.db.exists("Event", "EV00001"): - frappe.delete_doc("Event", "EV00001") - - exporter.export_data("Event", all_doctypes=True, template=True, file_type="Excel") - from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file - content = read_xlsx_file_from_attached_file(fcontent=frappe.response.filecontent) - content.append(["", "_test", "Private", "05-11-2017 13:51:48", "Event", "blue", "0", "0", "", "Open", "", 0, "", 0, "", "", "1", 0, "", "", 0, 0, 0, 0, 0, 0, 0]) - importer.upload(content) - self.assertTrue(frappe.db.get_value("Event", {"subject": "_test"}, "name")) \ No newline at end of file + pass diff --git a/frappe/core/doctype/data_import_beta/test_exporter.py b/frappe/core/doctype/data_import/test_exporter.py similarity index 95% rename from frappe/core/doctype/data_import_beta/test_exporter.py rename to frappe/core/doctype/data_import/test_exporter.py index 1a61741de3..8415af2e63 100644 --- a/frappe/core/doctype/data_import_beta/test_exporter.py +++ b/frappe/core/doctype/data_import/test_exporter.py @@ -5,8 +5,8 @@ from __future__ import unicode_literals import unittest import frappe -from frappe.core.doctype.data_import_beta.exporter import Exporter -from frappe.core.doctype.data_import_beta.test_importer import ( +from frappe.core.doctype.data_import.exporter import Exporter +from frappe.core.doctype.data_import.test_importer import ( create_doctype_if_not_exists, ) diff --git a/frappe/core/doctype/data_import_beta/test_importer.py b/frappe/core/doctype/data_import/test_importer.py similarity index 98% rename from frappe/core/doctype/data_import_beta/test_importer.py rename to frappe/core/doctype/data_import/test_importer.py index ce51348237..bdadad7890 100644 --- a/frappe/core/doctype/data_import_beta/test_importer.py +++ b/frappe/core/doctype/data_import/test_importer.py @@ -84,7 +84,7 @@ class TestImporter(unittest.TestCase): self.assertEqual(updated_doc.table_field_1_again[0].child_title, 'child title again') def get_importer(self, doctype, import_file, update=False): - data_import = frappe.new_doc('Data Import Beta') + data_import = frappe.new_doc('Data Import') data_import.import_type = 'Insert New Records' if not update else 'Update Existing Records' data_import.reference_doctype = doctype data_import.import_file = import_file.file_url @@ -180,4 +180,4 @@ def get_import_file(csv_file_name, force=False): def get_csv_file_path(file_name): - return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import_beta', 'fixtures', file_name) + return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import', 'fixtures', file_name) diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.js b/frappe/core/doctype/data_import_beta/data_import_beta.js deleted file mode 100644 index a8e3d4eae5..0000000000 --- a/frappe/core/doctype/data_import_beta/data_import_beta.js +++ /dev/null @@ -1,494 +0,0 @@ -// Copyright (c) 2019, Frappe Technologies and contributors -// For license information, please see license.txt - -frappe.ui.form.on('Data Import Beta', { - setup(frm) { - frappe.realtime.on('data_import_refresh', ({ data_import }) => { - frm.import_in_progress = false; - if (data_import !== frm.doc.name) return; - frappe.model.clear_doc('Data Import Beta', frm.doc.name); - frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => { - frm.refresh(); - }); - }); - frappe.realtime.on('data_import_progress', data => { - frm.import_in_progress = true; - if (data.data_import !== frm.doc.name) { - return; - } - let percent = Math.floor((data.current * 100) / data.total); - let seconds = Math.floor(data.eta); - let minutes = Math.floor(data.eta / 60); - let eta_message = - // prettier-ignore - seconds < 60 - ? __('About {0} seconds remaining', [seconds]) - : minutes === 1 - ? __('About {0} minute remaining', [minutes]) - : __('About {0} minutes remaining', [minutes]); - - let message; - if (data.success) { - let message_args = [data.current, data.total, eta_message]; - message = - frm.doc.import_type === 'Insert New Records' - ? __('Importing {0} of {1}, {2}', message_args) - : __('Updating {0} of {1}, {2}', message_args); - } - if (data.skipping) { - message = __('Skipping {0} of {1}, {2}', [ - data.current, - data.total, - eta_message - ]); - } - frm.dashboard.show_progress(__('Import Progress'), percent, message); - frm.page.set_indicator(__('In Progress'), 'orange'); - - // hide progress when complete - if (data.current === data.total) { - setTimeout(() => { - frm.dashboard.hide(); - frm.refresh(); - }, 2000); - } - }); - - frm.set_query('reference_doctype', () => { - return { - filters: { - name: ['in', frappe.boot.user.can_import] - } - }; - }); - - frm.get_field('import_file').df.options = { - restrictions: { - allowed_file_types: ['.csv', '.xls', '.xlsx'] - } - }; - }, - - refresh(frm) { - frm.page.hide_icon_group(); - frm.trigger('update_indicators'); - frm.trigger('import_file'); - frm.trigger('show_import_log'); - frm.trigger('show_import_warnings'); - frm.trigger('toggle_submit_after_import'); - frm.trigger('show_import_status'); - frm.trigger('show_report_error_button'); - - if (frm.doc.status === 'Partial Success') { - frm.add_custom_button(__('Export Errored Rows'), () => - frm.trigger('export_errored_rows') - ); - } - - if (frm.doc.status.includes('Success')) { - frm.add_custom_button( - __('Go to {0} List', [frm.doc.reference_doctype]), - () => frappe.set_route('List', frm.doc.reference_doctype) - ); - } - - frm.disable_save(); - if (frm.doc.status !== 'Success') { - if (!frm.is_new() && frm.doc.import_file) { - let label = - frm.doc.status === 'Pending' ? __('Start Import') : __('Retry'); - frm.page.set_primary_action(label, () => frm.events.start_import(frm)); - } else { - frm.page.set_primary_action(__('Save'), () => frm.save()); - } - } - }, - - update_indicators(frm) { - const indicator = frappe.get_indicator(frm.doc); - if (indicator) { - frm.page.set_indicator(indicator[0], indicator[1]); - } else { - frm.page.clear_indicator(); - } - }, - - show_import_status(frm) { - let import_log = JSON.parse(frm.doc.import_log || '[]'); - let successful_records = import_log.filter(log => log.success); - let failed_records = import_log.filter(log => !log.success); - if (successful_records.length === 0) return; - - let message; - if (failed_records.length === 0) { - let message_args = [successful_records.length]; - if (frm.doc.import_type === 'Insert New Records') { - message = - successful_records.length > 1 - ? __('Successfully imported {0} records.', message_args) - : __('Successfully imported {0} record.', message_args); - } else { - message = - successful_records.length > 1 - ? __('Successfully updated {0} records.', message_args) - : __('Successfully updated {0} record.', message_args); - } - } else { - let message_args = [successful_records.length, import_log.length]; - if (frm.doc.import_type === 'Insert New Records') { - message = - successful_records.length > 1 - ? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args) - : __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args); - } else { - message = - successful_records.length > 1 - ? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args) - : __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args); - } - } - frm.dashboard.set_headline(message); - }, - - show_report_error_button(frm) { - if (frm.doc.status === 'Error') { - frappe.db - .get_list('Error Log', { - filters: { method: frm.doc.name }, - fields: ['method', 'error'], - order_by: 'creation desc', - limit: 1 - }) - .then(result => { - if (result.length > 0) { - frm.add_custom_button('Report Error', () => { - let fake_xhr = { - responseText: JSON.stringify({ - exc: result[0].error - }) - }; - frappe.request.report_error(fake_xhr, {}); - }); - } - }); - } - }, - - start_import(frm) { - frm - .call({ - method: 'form_start_import', - args: { data_import: frm.doc.name }, - btn: frm.page.btn_primary - }) - .then(r => { - if (r.message === true) { - frm.disable_save(); - } - }); - }, - - download_template(frm) { - if ( - frm.data_exporter && - frm.data_exporter.doctype === frm.doc.reference_doctype - ) { - frm.data_exporter.exporting_for = frm.doc.import_type; - frm.data_exporter.dialog.show(); - } else { - frappe.require('/assets/js/data_import_tools.min.js', () => { - frm.data_exporter = new frappe.data_import.DataExporter( - frm.doc.reference_doctype, - frm.doc.import_type - ); - }); - } - }, - - reference_doctype(frm) { - frm.trigger('toggle_submit_after_import'); - }, - - toggle_submit_after_import(frm) { - frm.toggle_display('submit_after_import', false); - let doctype = frm.doc.reference_doctype; - if (doctype) { - frappe.model.with_doctype(doctype, () => { - let meta = frappe.get_meta(doctype); - frm.toggle_display('submit_after_import', meta.is_submittable); - }); - } - }, - - import_file(frm) { - frm.toggle_display('section_import_preview', frm.doc.import_file); - if (!frm.doc.import_file) { - frm.get_field('import_preview').$wrapper.empty(); - return; - } - - // load import preview - frm.get_field('import_preview').$wrapper.empty(); - $('') - .html(__('Loading import file...')) - .appendTo(frm.get_field('import_preview').$wrapper); - - frm - .call({ - method: 'get_preview_from_template', - args: { data_import: frm.doc.name, import_file: frm.doc.import_file }, - error_handlers: { - TimestampMismatchError() { - // ignore this error - } - } - }) - .then(r => { - let preview_data = r.message; - frm.events.show_import_preview(frm, preview_data); - frm.events.show_import_warnings(frm, preview_data); - }); - }, - - show_import_preview(frm, preview_data) { - let import_log = JSON.parse(frm.doc.import_log || '[]'); - - if ( - frm.import_preview && - frm.import_preview.doctype === frm.doc.reference_doctype - ) { - frm.import_preview.preview_data = preview_data; - frm.import_preview.import_log = import_log; - frm.import_preview.refresh(); - return; - } - - frappe.require('/assets/js/data_import_tools.min.js', () => { - frm.import_preview = new frappe.data_import.ImportPreview({ - wrapper: frm.get_field('import_preview').$wrapper, - doctype: frm.doc.reference_doctype, - preview_data, - import_log, - frm, - events: { - remap_column(changed_map) { - let template_options = JSON.parse(frm.doc.template_options || '{}'); - template_options.remap_column = template_options.remap_column || {}; - Object.assign(template_options.remap_column, changed_map); - frm.set_value('template_options', JSON.stringify(template_options)); - frm.save().then(() => frm.trigger('import_file')); - } - } - }); - }); - }, - - export_errored_rows(frm) { - open_url_post( - '/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template', - { - data_import_name: frm.doc.name - } - ); - }, - - show_import_warnings(frm, preview_data) { - let warnings = JSON.parse(frm.doc.template_warnings || '[]'); - warnings = warnings.concat(preview_data.warnings || []); - - frm.toggle_display('import_warnings_section', warnings.length > 0); - if (warnings.length === 0) { - frm.get_field('import_warnings').$wrapper.html(''); - return; - } - - // group warnings by row - let warnings_by_row = {}; - let other_warnings = []; - for (let warning of warnings) { - if (warning.row) { - warnings_by_row[warning.row] = warnings_by_row[warning.row] || []; - warnings_by_row[warning.row].push(warning); - } else { - other_warnings.push(warning); - } - } - - let html = ''; - html += Object.keys(warnings_by_row) - .map(row_number => { - let message = warnings_by_row[row_number] - .map(w => { - if (w.field) { - let label = - w.field.label + - (w.field.parent !== frm.doc.reference_doctype - ? ` (${w.field.parent})` - : ''); - return `
  • ${label}: ${w.message}
  • `; - } - return `
  • ${w.message}
  • `; - }) - .join(''); - return ` -
    -
    ${__('Row {0}', [row_number])}
    -
      ${message}
    -
    - `; - }) - .join(''); - - html += other_warnings - .map(warning => { - let header = ''; - if (warning.col) { - header = __('Column {0}', [warning.col]); - } - return ` -
    -
    ${header}
    -
    ${warning.message}
    -
    - `; - }) - .join(''); - frm.get_field('import_warnings').$wrapper.html(` -
    -
    ${html}
    -
    - `); - }, - - show_failed_logs(frm) { - frm.trigger('show_import_log'); - }, - - show_import_log(frm) { - let import_log = JSON.parse(frm.doc.import_log || '[]'); - let logs = import_log; - frm.toggle_display('import_log', false); - frm.toggle_display('import_log_section', logs.length > 0); - - if (logs.length === 0) { - frm.get_field('import_log_preview').$wrapper.empty(); - return; - } - - let rows = logs - .map(log => { - let html = ''; - if (log.success) { - if (frm.doc.import_type === 'Insert New Records') { - html = __('Successfully imported {0}', [ - `${frappe.utils.get_form_link( - frm.doc.reference_doctype, - log.docname, - true - )}` - ]); - } else { - html = __('Successfully updated {0}', [ - `${frappe.utils.get_form_link( - frm.doc.reference_doctype, - log.docname, - true - )}` - ]); - } - } else { - let messages = log.messages - .map(JSON.parse) - .map(m => { - let title = m.title ? `${m.title}` : ''; - let message = m.message ? `
    ${m.message}
    ` : ''; - return title + message; - }) - .join(''); - let id = frappe.dom.get_unique_id(); - html = `${messages} - -
    -
    -
    ${log.exception}
    -
    -
    `; - } - let indicator_color = log.success ? 'green' : 'red'; - let title = log.success ? __('Success') : __('Failure'); - - if (frm.doc.show_failed_logs && log.success) { - return ''; - } - - return ` - ${log.row_indexes.join(', ')} - -
    ${title}
    - - - ${html} - - `; - }) - .join(''); - - if (!rows && frm.doc.show_failed_logs) { - rows = ` - ${__('No failed logs')} - `; - } - - frm.get_field('import_log_preview').$wrapper.html(` - - - - - - - ${rows} -
    ${__('Row Number')}${__('Status')}${__('Message')}
    - `); - }, - - show_missing_link_values(frm, missing_link_values) { - let can_be_created_automatically = missing_link_values.every( - d => d.has_one_mandatory_field - ); - - let html = missing_link_values - .map(d => { - let doctype = d.doctype; - let values = d.missing_values; - return ` -
    ${doctype}
    -
      ${values.map(v => `
    • ${v}
    • `).join('')}
    - `; - }) - .join(''); - - if (can_be_created_automatically) { - // prettier-ignore - let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?'); - frappe.confirm(message + html, () => { - frm - .call('create_missing_link_values', { - missing_link_values - }) - .then(r => { - let records = r.message; - frappe.msgprint( - __('Created {0} records successfully.', [records.length]) - ); - }); - }); - } else { - frappe.msgprint( - // prettier-ignore - __('The following records needs to be created before we can import your file.') + html - ); - } - } -}); diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.json b/frappe/core/doctype/data_import_beta/data_import_beta.json deleted file mode 100644 index 8876d2246a..0000000000 --- a/frappe/core/doctype/data_import_beta/data_import_beta.json +++ /dev/null @@ -1,206 +0,0 @@ -{ - "actions": [], - "autoname": "format:{reference_doctype} Import on {creation}", - "beta": 1, - "creation": "2019-08-04 14:16:08.318714", - "doctype": "DocType", - "editable_grid": 1, - "engine": "InnoDB", - "field_order": [ - "reference_doctype", - "import_type", - "download_template", - "import_file", - "column_break_5", - "status", - "submit_after_import", - "mute_emails", - "template_options", - "import_warnings_section", - "template_warnings", - "import_warnings", - "section_import_preview", - "import_preview", - "import_log_section", - "import_log", - "show_failed_logs", - "import_log_preview" - ], - "fields": [ - { - "fieldname": "reference_doctype", - "fieldtype": "Link", - "in_list_view": 1, - "label": "Document Type", - "options": "DocType", - "reqd": 1, - "set_only_once": 1, - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "import_type", - "fieldtype": "Select", - "in_list_view": 1, - "label": "Import Type", - "options": "\nInsert New Records\nUpdate Existing Records", - "reqd": 1, - "set_only_once": 1, - "show_days": 1, - "show_seconds": 1 - }, - { - "depends_on": "eval:!doc.__islocal", - "fieldname": "import_file", - "fieldtype": "Attach", - "in_list_view": 1, - "label": "Import File", - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "import_preview", - "fieldtype": "HTML", - "label": "Import Preview", - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "section_import_preview", - "fieldtype": "Section Break", - "label": "Preview", - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "column_break_5", - "fieldtype": "Column Break", - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "template_options", - "fieldtype": "Code", - "hidden": 1, - "label": "Template Options", - "options": "JSON", - "read_only": 1, - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "import_log", - "fieldtype": "Code", - "label": "Import Log", - "options": "JSON", - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "import_log_section", - "fieldtype": "Section Break", - "label": "Import Log", - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "import_log_preview", - "fieldtype": "HTML", - "label": "Import Log Preview", - "show_days": 1, - "show_seconds": 1 - }, - { - "default": "Pending", - "fieldname": "status", - "fieldtype": "Select", - "hidden": 1, - "label": "Status", - "options": "Pending\nSuccess\nPartial Success\nError", - "read_only": 1, - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "template_warnings", - "fieldtype": "Code", - "hidden": 1, - "label": "Template Warnings", - "options": "JSON", - "show_days": 1, - "show_seconds": 1 - }, - { - "default": "0", - "fieldname": "submit_after_import", - "fieldtype": "Check", - "label": "Submit After Import", - "set_only_once": 1, - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "import_warnings_section", - "fieldtype": "Section Break", - "label": "Warnings", - "show_days": 1, - "show_seconds": 1 - }, - { - "fieldname": "import_warnings", - "fieldtype": "HTML", - "label": "Import Warnings", - "show_days": 1, - "show_seconds": 1 - }, - { - "depends_on": "reference_doctype", - "fieldname": "download_template", - "fieldtype": "Button", - "label": "Download Template", - "show_days": 1, - "show_seconds": 1 - }, - { - "default": "1", - "fieldname": "mute_emails", - "fieldtype": "Check", - "label": "Don't Send Emails", - "set_only_once": 1, - "show_days": 1, - "show_seconds": 1 - }, - { - "default": "0", - "fieldname": "show_failed_logs", - "fieldtype": "Check", - "label": "Show Failed Logs", - "show_days": 1, - "show_seconds": 1 - } - ], - "hide_toolbar": 1, - "links": [], - "modified": "2020-05-28 22:11:38.266208", - "modified_by": "Administrator", - "module": "Core", - "name": "Data Import Beta", - "owner": "Administrator", - "permissions": [ - { - "create": 1, - "delete": 1, - "email": 1, - "export": 1, - "print": 1, - "read": 1, - "report": 1, - "role": "System Manager", - "share": 1, - "write": 1 - } - ], - "sort_field": "modified", - "sort_order": "DESC", - "track_changes": 1 -} \ No newline at end of file diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.py b/frappe/core/doctype/data_import_beta/data_import_beta.py deleted file mode 100644 index 3c6885360b..0000000000 --- a/frappe/core/doctype/data_import_beta/data_import_beta.py +++ /dev/null @@ -1,125 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies and contributors -# For license information, please see license.txt - -from __future__ import unicode_literals -import frappe -from frappe.model.document import Document - -from frappe.core.doctype.data_import_beta.importer import Importer -from frappe.core.doctype.data_import_beta.exporter import Exporter -from frappe.core.page.background_jobs.background_jobs import get_info -from frappe.utils.background_jobs import enqueue -from frappe import _ - - -class DataImportBeta(Document): - def validate(self): - doc_before_save = self.get_doc_before_save() - if not self.import_file or ( - doc_before_save and doc_before_save.import_file != self.import_file - ): - self.template_options = "" - self.template_warnings = "" - - if self.import_file: - # validate template - self.get_importer() - - def get_preview_from_template(self, import_file=None): - if import_file: - self.import_file = import_file - - if not self.import_file: - return - - i = self.get_importer() - return i.get_data_for_import_preview() - - def start_import(self): - if frappe.utils.scheduler.is_scheduler_inactive() and not frappe.flags.in_test: - frappe.throw( - _("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive") - ) - - enqueued_jobs = [d.get("job_name") for d in get_info()] - - if self.name not in enqueued_jobs: - enqueue( - start_import, - queue="default", - timeout=6000, - event="data_import", - job_name=self.name, - data_import=self.name, - now=frappe.conf.developer_mode or frappe.flags.in_test, - ) - return True - - return False - - def export_errored_rows(self): - return self.get_importer().export_errored_rows() - - def get_importer(self): - return Importer(self.reference_doctype, data_import=self) - - -@frappe.whitelist() -def get_preview_from_template(data_import, import_file): - return frappe.get_doc("Data Import Beta", data_import).get_preview_from_template(import_file) - - -@frappe.whitelist() -def form_start_import(data_import): - return frappe.get_doc("Data Import Beta", data_import).start_import() - - -def start_import(data_import): - """This method runs in background job""" - data_import = frappe.get_doc("Data Import Beta", data_import) - try: - i = Importer(data_import.reference_doctype, data_import=data_import) - i.import_data() - except: - frappe.db.rollback() - data_import.db_set("status", "Error") - frappe.log_error(title=data_import.name) - finally: - frappe.flags.in_import = False - - frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name}) - - -@frappe.whitelist() -def download_template( - doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV" -): - """ - Download template from Exporter - :param doctype: Document Type - :param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']} - :param export_records=None: One of 'all', 'by_filter', 'blank_template' - :param export_filters: Filter dict - :param file_type: File type to export into - """ - - export_fields = frappe.parse_json(export_fields) - export_filters = frappe.parse_json(export_filters) - export_data = export_records != "blank_template" - - e = Exporter( - doctype, - export_fields=export_fields, - export_data=export_data, - export_filters=export_filters, - file_type=file_type, - export_page_length=5 if export_records == "5_records" else None, - ) - e.build_response() - - -@frappe.whitelist() -def download_errored_template(data_import_name): - data_import = frappe.get_doc("Data Import Beta", data_import_name) - data_import.export_errored_rows() diff --git a/frappe/core/doctype/data_import_beta/data_import_beta_list.js b/frappe/core/doctype/data_import_beta/data_import_beta_list.js deleted file mode 100644 index 58953d2531..0000000000 --- a/frappe/core/doctype/data_import_beta/data_import_beta_list.js +++ /dev/null @@ -1,40 +0,0 @@ -let imports_in_progress = []; - -frappe.listview_settings['Data Import Beta'] = { - onload(listview) { - frappe.realtime.on('data_import_progress', data => { - if (!imports_in_progress.includes(data.data_import)) { - imports_in_progress.push(data.data_import); - } - }); - frappe.realtime.on('data_import_refresh', data => { - imports_in_progress = imports_in_progress.filter( - d => d !== data.data_import - ); - listview.refresh(); - }); - }, - get_indicator: function(doc) { - var colors = { - 'Pending': 'orange', - 'Partial Success': 'orange', - 'Success': 'green', - 'In Progress': 'orange', - 'Error': 'red' - }; - let status = doc.status; - if (imports_in_progress.includes(doc.name)) { - status = 'In Progress'; - } - return [__(status), colors[status], 'status,=,' + doc.status]; - }, - formatters: { - import_type(value) { - return { - 'Insert New Records': __('Insert'), - 'Update Existing Records': __('Update') - }[value]; - } - }, - hide_name_column: true -}; diff --git a/frappe/core/doctype/data_import_beta/importer.py b/frappe/core/doctype/data_import_beta/importer.py deleted file mode 100644 index 74995f5dfe..0000000000 --- a/frappe/core/doctype/data_import_beta/importer.py +++ /dev/null @@ -1,1058 +0,0 @@ -# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors -# MIT License. See license.txt - -from __future__ import unicode_literals -import os -import io -import frappe -import timeit -import json -from datetime import datetime -from frappe import _ -from frappe.utils import cint, flt, update_progress_bar, cstr -from frappe.utils.csvutils import read_csv_content -from frappe.utils.xlsxutils import ( - read_xlsx_file_from_attached_file, - read_xls_file_from_attached_file, -) -from frappe.model import no_value_fields, table_fields as table_fieldtypes - -INVALID_VALUES = ("", None) -MAX_ROWS_IN_PREVIEW = 10 -INSERT = "Insert New Records" -UPDATE = "Update Existing Records" - - -class Importer: - def __init__(self, doctype, data_import=None, import_type=None, console=False): - self.doctype = doctype - self.console = console - - self.data_import = data_import - if not self.data_import: - self.data_import = frappe.get_doc(doctype="Data Import Beta") - if import_type: - self.data_import.import_type = import_type - - self.template_options = frappe.parse_json(self.data_import.template_options or "{}") - self.import_type = self.data_import.import_type - - self.import_file = ImportFile( - doctype, data_import.import_file, self.template_options, self.import_type - ) - - def get_data_for_import_preview(self): - return self.import_file.get_data_for_import_preview() - - def before_import(self): - # set user lang for translations - frappe.cache().hdel("lang", frappe.session.user) - frappe.set_user_lang(frappe.session.user) - - # set flags - frappe.flags.in_import = True - frappe.flags.mute_emails = self.data_import.mute_emails - - self.data_import.db_set("template_warnings", "") - - def import_data(self): - self.before_import() - - # parse docs from rows - payloads = self.import_file.get_payloads_for_import() - - # dont import if there are non-ignorable warnings - warnings = self.import_file.get_warnings() - warnings = [w for w in warnings if w.get("type") != "info"] - - if warnings: - if self.console: - self.print_grouped_warnings(warnings) - else: - self.data_import.db_set("template_warnings", json.dumps(warnings)) - return - - # setup import log - if self.data_import.import_log: - import_log = frappe.parse_json(self.data_import.import_log) - else: - import_log = [] - - # remove previous failures from import log - import_log = [log for log in import_log if log.get("success")] - - # get successfully imported rows - imported_rows = [] - for log in import_log: - log = frappe._dict(log) - if log.success: - imported_rows += log.row_indexes - - # start import - total_payload_count = len(payloads) - batch_size = frappe.conf.data_import_batch_size or 1000 - - for batch_index, batched_payloads in enumerate( - frappe.utils.create_batch(payloads, batch_size) - ): - for i, payload in enumerate(batched_payloads): - doc = payload.doc - row_indexes = [row.row_number for row in payload.rows] - current_index = (i + 1) + (batch_index * batch_size) - - if set(row_indexes).intersection(set(imported_rows)): - print("Skipping imported rows", row_indexes) - if total_payload_count > 5: - frappe.publish_realtime( - "data_import_progress", - { - "current": current_index, - "total": total_payload_count, - "skipping": True, - "data_import": self.data_import.name, - }, - ) - continue - - try: - start = timeit.default_timer() - doc = self.process_doc(doc) - processing_time = timeit.default_timer() - start - eta = self.get_eta(current_index, total_payload_count, processing_time) - - if self.console: - update_progress_bar( - "Importing {0} records".format(total_payload_count), - current_index, - total_payload_count, - ) - elif total_payload_count > 5: - frappe.publish_realtime( - "data_import_progress", - { - "current": current_index, - "total": total_payload_count, - "docname": doc.name, - "data_import": self.data_import.name, - "success": True, - "row_indexes": row_indexes, - "eta": eta, - }, - ) - - import_log.append( - frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes) - ) - # commit after every successful import - frappe.db.commit() - - except Exception: - import_log.append( - frappe._dict( - success=False, - exception=frappe.get_traceback(), - messages=frappe.local.message_log, - row_indexes=row_indexes, - ) - ) - frappe.clear_messages() - # rollback if exception - frappe.db.rollback() - - # set status - failures = [log for log in import_log if not log.get("success")] - if len(failures) == total_payload_count: - status = "Pending" - elif len(failures) > 0: - status = "Partial Success" - else: - status = "Success" - - if self.console: - self.print_import_log(import_log) - else: - self.data_import.db_set("status", status) - self.data_import.db_set("import_log", json.dumps(import_log)) - - self.after_import() - - return import_log - - def after_import(self): - frappe.flags.in_import = False - frappe.flags.mute_emails = False - - def process_doc(self, doc): - if self.import_type == INSERT: - return self.insert_record(doc) - elif self.import_type == UPDATE: - return self.update_record(doc) - - def insert_record(self, doc): - meta = frappe.get_meta(self.doctype) - new_doc = frappe.new_doc(self.doctype) - new_doc.update(doc) - - if (meta.autoname or "").lower() != "prompt": - # name can only be set directly if autoname is prompt - new_doc.set("name", None) - - new_doc.flags.updater_reference = { - "doctype": self.data_import.doctype, - "docname": self.data_import.name, - "label": _("via Data Import"), - } - - new_doc.insert() - if meta.is_submittable and self.data_import.submit_after_import: - new_doc.submit() - return new_doc - - def update_record(self, doc): - id_field = get_id_field(self.doctype) - existing_doc = frappe.get_doc(self.doctype, doc.get(id_field.fieldname)) - existing_doc.flags.updater_reference = { - "doctype": self.data_import.doctype, - "docname": self.data_import.name, - "label": _("via Data Import"), - } - existing_doc.update(doc) - existing_doc.save() - return existing_doc - - def get_eta(self, current, total, processing_time): - self.last_eta = getattr(self, "last_eta", 0) - remaining = total - current - eta = processing_time * remaining - if not self.last_eta or eta < self.last_eta: - self.last_eta = eta - return self.last_eta - - def export_errored_rows(self): - from frappe.utils.csvutils import build_csv_response - - if not self.data_import: - return - - import_log = frappe.parse_json(self.data_import.import_log or "[]") - failures = [log for log in import_log if not log.get("success")] - row_indexes = [] - for f in failures: - row_indexes.extend(f.get("row_indexes", [])) - - # de duplicate - row_indexes = list(set(row_indexes)) - row_indexes.sort() - - header_row = [col.header_title for col in self.import_file.columns] - rows = [header_row] - rows += [row.data for row in self.import_file.data if row.row_number in row_indexes] - - build_csv_response(rows, self.doctype) - - -class ImportFile: - def __init__(self, doctype, file, template_options=None, import_type=None): - self.doctype = doctype - self.template_options = template_options or frappe._dict( - column_to_field_map=frappe._dict() - ) - self.column_to_field_map = self.template_options.column_to_field_map - self.import_type = import_type - - self.file_doc = self.file_path = None - if isinstance(file, frappe.string_types): - if frappe.db.exists("File", {"file_url": file}): - self.file_doc = frappe.get_doc("File", {"file_url": file}) - elif os.path.exists(file): - self.file_path = file - - if not self.file_doc and not self.file_path: - frappe.throw(_("Invalid template file for import")) - - self.raw_data = self.get_data_from_template_file() - self.parse_data_from_template() - - def get_data_from_template_file(self): - content = None - extension = None - - if self.file_doc: - parts = self.file_doc.get_extension() - extension = parts[1] - content = self.file_doc.get_content() - extension = extension.lstrip(".") - - elif self.file_path: - content, extension = self.read_file(self.file_path) - - if not content: - frappe.throw(_("Invalid or corrupted content for import")) - - if not extension: - extension = "csv" - - if content: - return self.read_content(content, extension) - - def parse_data_from_template(self): - header = None - data = [] - - for i, row in enumerate(self.raw_data): - if all(v in INVALID_VALUES for v in row): - # empty row - continue - - if not header: - header = Header(i, row, self.doctype, self.raw_data, self.column_to_field_map) - else: - row_obj = Row(i, row, self.doctype, header, self.import_type) - data.append(row_obj) - - self.header = header - self.columns = self.header.columns - self.data = data - - if len(data) < 1: - frappe.throw( - _("Import template should contain a Header and atleast one row."), - title=_("Template Error"), - ) - - def get_data_for_import_preview(self): - """Adds a serial number column as the first column""" - - columns = [frappe._dict({"header_title": "Sr. No", "skip_import": True})] - columns += [col.as_dict() for col in self.columns] - for col in columns: - # only pick useful fields in docfields to minimise the payload - if col.df: - col.df = { - 'fieldtype': col.df.fieldtype, - 'fieldname': col.df.fieldname, - 'label': col.df.label, - 'options': col.df.options, - 'parent': col.df.parent, - 'reqd': col.df.reqd, - 'default': col.df.default, - 'read_only': col.df.read_only - } - - data = [[row.row_number] + row.as_list() for row in self.data] - - warnings = self.get_warnings() - - out = frappe._dict() - out.data = data - out.columns = columns - out.warnings = warnings - total_number_of_rows = len(out.data) - if total_number_of_rows > MAX_ROWS_IN_PREVIEW: - out.data = out.data[:MAX_ROWS_IN_PREVIEW] - out.max_rows_exceeded = True - out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW - out.total_number_of_rows = total_number_of_rows - return out - - def get_payloads_for_import(self): - payloads = [] - # make a copy - data = list(self.data) - while data: - doc, rows, data = self.parse_next_row_for_import(data) - payloads.append(frappe._dict(doc=doc, rows=rows)) - return payloads - - def parse_next_row_for_import(self, data): - """ - Parses rows that make up a doc. A doc maybe built from a single row or multiple rows. - Returns the doc, rows, and data without the rows. - """ - doctypes = self.header.doctypes - - # first row is included by default - first_row = data[0] - rows = [first_row] - - # if there are child doctypes, find the subsequent rows - if len(doctypes) > 1: - # subsequent rows either dont have any parent value set - # or have the same value as the parent row - # we include a row if either of conditions match - parent_column_indexes = self.header.get_column_indexes(self.doctype) - parent_row_values = first_row.get_values(parent_column_indexes) - - data_without_first_row = data[1:] - for row in data_without_first_row: - row_values = row.get_values(parent_column_indexes) - # if the row is blank, it's a child row doc - if all([v in INVALID_VALUES for v in row_values]): - rows.append(row) - continue - # if the row has same values as parent row, it's a child row doc - if row_values == parent_row_values: - rows.append(row) - continue - # if any of those conditions dont match, it's the next doc - break - - parent_doc = None - for row in rows: - for doctype, table_df in doctypes: - if doctype == self.doctype and not parent_doc: - parent_doc = row.parse_doc(doctype) - - if doctype != self.doctype and table_df: - child_doc = row.parse_doc(doctype, parent_doc, table_df) - parent_doc[table_df.fieldname] = parent_doc.get(table_df.fieldname, []) - parent_doc[table_df.fieldname].append(child_doc) - - doc = parent_doc - # check if there is atleast one row for mandatory table fields - meta = frappe.get_meta(self.doctype) - mandatory_table_fields = [ - df - for df in meta.fields - if df.fieldtype in table_fieldtypes - and df.reqd - and len(doc.get(df.fieldname, [])) == 0 - ] - if len(mandatory_table_fields) == 1: - self.warnings.append( - { - "row": first_row.row_number, - "message": _("There should be atleast one row for {0} table").format( - mandatory_table_fields[0].label - ), - } - ) - elif mandatory_table_fields: - fields_string = ", ".join([df.label for df in mandatory_table_fields]) - message = _("There should be atleast one row for the following tables: {0}").format( - fields_string - ) - self.warnings.append({"row": first_row.row_number, "message": message}) - - return doc, rows, data[len(rows) :] - - def get_warnings(self): - warnings = [] - for col in self.header.columns: - warnings += col.warnings - - for row in self.data: - warnings += row.warnings - - return warnings - - ###### - - def read_file(self, file_path): - extn = file_path.split(".")[1] - - file_content = None - with io.open(file_path, mode="rb") as f: - file_content = f.read() - - return file_content, extn - - def read_content(self, content, extension): - error_title = _("Template Error") - if extension not in ("csv", "xlsx", "xls"): - frappe.throw( - _("Import template should be of type .csv, .xlsx or .xls"), title=error_title - ) - - if extension == "csv": - data = read_csv_content(content) - elif extension == "xlsx": - data = read_xlsx_file_from_attached_file(fcontent=content) - elif extension == "xls": - data = read_xls_file_from_attached_file(content) - - return data - - -class Row: - link_values_exist_map = {} - - def __init__(self, index, row, doctype, header, import_type): - self.index = index - self.row_number = index + 1 - self.doctype = doctype - self.data = row - self.header = header - self.import_type = import_type - self.warnings = [] - - len_row = len(self.data) - len_columns = len(self.header.columns) - if len_row != len_columns: - less_than_columns = len_row < len_columns - message = ( - "Row has less values than columns" - if less_than_columns - else "Row has more values than columns" - ) - self.warnings.append( - {"row": self.row_number, "message": message,} - ) - - def parse_doc(self, doctype, parent_doc=None, table_df=None): - col_indexes = self.header.get_column_indexes(doctype, table_df) - values = self.get_values(col_indexes) - columns = self.header.get_columns(col_indexes) - doc = self._parse_doc(doctype, columns, values, parent_doc, table_df) - return doc - - def _parse_doc(self, doctype, columns, values, parent_doc=None, table_df=None): - doc = frappe._dict() - if self.import_type == INSERT: - # new_doc returns a dict with default values set - doc = frappe.new_doc( - doctype, - parent_doc=parent_doc, - parentfield=table_df.fieldname if table_df else None, - as_dict=True, - ) - - # remove standard fields and __islocal - for key in frappe.model.default_fields + ("__islocal",): - doc.pop(key, None) - - for col, value in zip(columns, values): - df = col.df - if value in INVALID_VALUES: - value = None - - if value is not None: - value = self.validate_value(value, col) - - if value is not None: - doc[df.fieldname] = self.parse_value(value, col) - - is_table = frappe.get_meta(doctype).istable - is_update = self.import_type == UPDATE - if is_table and is_update and doc.get("name") in INVALID_VALUES: - # for table rows being inserted in update - # create a new doc with defaults set - new_doc = frappe.new_doc(doctype, as_dict=True) - new_doc.update(doc) - doc = new_doc - - self.check_mandatory_fields(doctype, doc, table_df) - return doc - - def validate_value(self, value, col): - df = col.df - if df.fieldtype == "Select": - select_options = df.get_select_options() - if select_options and value not in select_options: - options_string = ", ".join([frappe.bold(d) for d in select_options]) - msg = _("Value must be one of {0}").format(options_string) - self.warnings.append( - { - "row": self.row_number, - "field": df.as_dict(convert_dates_to_str=True), - "message": msg, - } - ) - return - - elif df.fieldtype == "Link": - exists = self.link_exists(value, df) - if not exists: - msg = _("Value {0} missing for {1}").format( - frappe.bold(value), frappe.bold(df.options) - ) - self.warnings.append( - { - "row": self.row_number, - "field": df.as_dict(convert_dates_to_str=True), - "message": msg, - } - ) - return - elif df.fieldtype in ["Date", "Datetime"]: - value = self.get_date(value, col) - if isinstance(value, frappe.string_types): - # value was not parsed as datetime object - self.warnings.append( - { - "row": self.row_number, - "col": col.column_number, - "field": df.as_dict(convert_dates_to_str=True), - "message": _("Value {0} must in {1} format").format( - frappe.bold(value), frappe.bold(get_user_format(col.date_format)) - ), - } - ) - return - - return value - - def link_exists(self, value, df): - key = df.options + "::" + value - if Row.link_values_exist_map.get(key) is None: - Row.link_values_exist_map[key] = frappe.db.exists(df.options, value) - return Row.link_values_exist_map.get(key) - - def parse_value(self, value, col): - df = col.df - if isinstance(value, datetime) and df.fieldtype in ["Date", "Datetime"]: - return value - - value = cstr(value) - - # convert boolean values to 0 or 1 - valid_check_values = ["t", "f", "true", "false", "yes", "no", "y", "n"] - if df.fieldtype == "Check" and value.lower().strip() in valid_check_values: - value = value.lower().strip() - value = 1 if value in ["t", "true", "y", "yes"] else 0 - - if df.fieldtype in ["Int", "Check"]: - value = cint(value) - elif df.fieldtype in ["Float", "Percent", "Currency"]: - value = flt(value) - elif df.fieldtype in ["Date", "Datetime"]: - value = self.get_date(value, col) - - return value - - def get_date(self, value, column): - date_format = column.date_format - if date_format: - try: - return datetime.strptime(value, date_format) - except ValueError: - # ignore date values that dont match the format - # import will break for these values later - pass - return value - - def check_mandatory_fields(self, doctype, doc, table_df=None): - """If import type is Insert: - Check for mandatory fields (except table fields) in doc - if import type is Update: - Check for name field or autoname field in doc - """ - meta = frappe.get_meta(doctype) - if self.import_type == UPDATE: - if meta.istable: - # when updating records with table rows, - # there are two scenarios: - # 1. if row 'name' is provided in the template - # the table row will be updated - # 2. if row 'name' is not provided - # then a new row will be added - # so we dont need to check for mandatory - return - - # for update, only ID (name) field is mandatory - id_field = get_id_field(doctype) - if doc.get(id_field.fieldname) in INVALID_VALUES: - self.warnings.append( - { - "row": self.row_number, - "message": _("{0} is a mandatory field asdadsf").format(id_field.label), - } - ) - return - - fields = [ - df - for df in meta.fields - if df.fieldtype not in table_fieldtypes - and df.reqd - and doc.get(df.fieldname) in INVALID_VALUES - ] - - if not fields: - return - - def get_field_label(df): - return "{0}{1}".format(df.label, " ({})".format(table_df.label) if table_df else "") - - if len(fields) == 1: - field_label = get_field_label(fields[0]) - self.warnings.append( - { - "row": self.row_number, - "message": _("{0} is a mandatory field").format(frappe.bold(field_label)), - } - ) - else: - fields_string = ", ".join([frappe.bold(get_field_label(df)) for df in fields]) - self.warnings.append( - { - "row": self.row_number, - "message": _("{0} are mandatory fields").format(fields_string), - } - ) - - def get_values(self, indexes): - return [self.data[i] for i in indexes] - - def get(self, index): - return self.data[index] - - def as_list(self): - return self.data - - -class Header(Row): - def __init__(self, index, row, doctype, raw_data, column_to_field_map): - self.index = index - self.row_number = index + 1 - self.data = row - self.doctype = doctype - - self.seen = [] - self.columns = [] - - for j, header in enumerate(row): - column_values = [get_item_at_index(r, j) for r in raw_data] - column = Column( - j, header, self.doctype, column_values, column_to_field_map.get(header), self.seen - ) - self.seen.append(header) - self.columns.append(column) - - doctypes = [] - for col in self.columns: - if not col.df: - continue - if col.df.parent == self.doctype: - doctypes.append((col.df.parent, None)) - else: - doctypes.append((col.df.parent, col.df.child_table_df)) - - self.doctypes = sorted( - list(set(doctypes)), key=lambda x: -1 if x[0] == self.doctype else 1 - ) - - def get_column_indexes(self, doctype, tablefield=None): - def is_table_field(df): - if tablefield: - return df.child_table_df.fieldname == tablefield.fieldname - return True - - return [ - col.index - for col in self.columns - if not col.skip_import and col.df and col.df.parent == doctype and is_table_field(col.df) - ] - - def get_columns(self, indexes): - return [self.columns[i] for i in indexes] - - - -class Column: - seen = [] - fields_column_map = {} - - def __init__(self, index, header, doctype, column_values, map_to_field=None, seen=[]): - self.index = index - self.column_number = index + 1 - self.doctype = doctype - self.header_title = header - self.column_values = column_values - self.map_to_field = map_to_field - self.seen = seen - - self.date_format = None - self.df = None - self.skip_import = None - self.warnings = [] - - self.meta = frappe.get_meta(doctype) - self.parse() - self.parse_date_format() - - def parse(self): - header_title = self.header_title - column_number = str(self.column_number) - skip_import = False - - if self.map_to_field and self.map_to_field != "Don't Import": - df = get_df_for_column_header(self.doctype, self.map_to_field) - if df: - self.warnings.append( - { - "message": _("Mapping column {0} to field {1}").format( - frappe.bold(header_title or "Untitled Column"), frappe.bold(df.label) - ), - "type": "info", - } - ) - else: - self.warnings.append( - { - "message": _("Could not map column {0} to field {1}").format( - column_number, self.map_to_field - ), - "type": "info", - } - ) - else: - df = get_df_for_column_header(self.doctype, header_title) - # df = df_by_labels_and_fieldnames.get(header_title) - - if not df: - skip_import = True - else: - skip_import = False - - if header_title in self.seen: - self.warnings.append( - { - "col": column_number, - "message": _("Skipping Duplicate Column {0}").format(frappe.bold(header_title)), - "type": "info", - } - ) - df = None - skip_import = True - elif self.map_to_field == "Don't Import": - skip_import = True - self.warnings.append( - { - "col": column_number, - "message": _("Skipping column {0}").format(frappe.bold(header_title)), - "type": "info", - } - ) - elif header_title and not df: - self.warnings.append( - { - "col": column_number, - "message": _("Cannot match column {0} with any field").format( - frappe.bold(header_title) - ), - "type": "info", - } - ) - elif not header_title and not df: - self.warnings.append( - {"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"} - ) - - self.df = df - self.skip_import = skip_import - - def parse_date_format(self): - if self.df and self.df.fieldtype in ("Date", "Time", "Datetime"): - self.date_format = self.guess_date_format_for_column() - - def guess_date_format_for_column(self): - """ Guesses date format for a column by parsing all the values in the column, - getting the date format and then returning the one which has the maximum frequency - """ - - date_formats = [ - frappe.utils.guess_date_format(d) for d in self.column_values if isinstance(d, str) - ] - date_formats = [d for d in date_formats if d] - if not date_formats: - return - - unique_date_formats = set(date_formats) - max_occurred_date_format = max(unique_date_formats, key=date_formats.count) - - if len(unique_date_formats) > 1: - # fmt: off - message = _("The column {0} has {1} different date formats. Automatically setting {2} as the default format as it is the most common. Please change other values in this column to this format.") - # fmt: on - user_date_format = get_user_format(max_occurred_date_format) - self.warnings.append( - { - "col": self.column_number, - "message": message.format( - frappe.bold(self.header_title), - len(unique_date_formats), - frappe.bold(user_date_format), - ), - "type": "info", - } - ) - - return max_occurred_date_format - - def as_dict(self): - d = frappe._dict() - d.index = self.index - d.column_number = self.column_number - d.doctype = self.doctype - d.header_title = self.header_title - d.map_to_field = self.map_to_field - d.date_format = self.date_format - d.df = self.df - d.skip_import = self.skip_import - d.warnings = self.warnings - return d - - -def build_fields_dict_for_column_matching(parent_doctype): - """ - Build a dict with various keys to match with column headers and value as docfield - The keys can be label or fieldname - { - 'Customer': df1, - 'customer': df1, - 'Due Date': df2, - 'due_date': df2, - 'Item Code (Sales Invoice Item)': df3, - 'Sales Invoice Item:item_code': df3, - } - """ - - def get_standard_fields(doctype): - meta = frappe.get_meta(doctype) - if meta.istable: - standard_fields = [ - {"label": "Parent", "fieldname": "parent"}, - {"label": "Parent Type", "fieldname": "parenttype"}, - {"label": "Parent Field", "fieldname": "parentfield"}, - {"label": "Row Index", "fieldname": "idx"}, - ] - else: - standard_fields = [ - {"label": "Owner", "fieldname": "owner"}, - {"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"}, - ] - - out = [] - for df in standard_fields: - df = frappe._dict(df) - df.parent = doctype - out.append(df) - return out - - parent_meta = frappe.get_meta(parent_doctype) - out = {} - - # doctypes and fieldname if it is a child doctype - doctypes = [[parent_doctype, None]] + [ - [df.options, df] for df in parent_meta.get_table_fields() - ] - - for doctype, table_df in doctypes: - # name field - name_by_label = ( - "ID" if doctype == parent_doctype else "ID ({0})".format(table_df.label) - ) - name_by_fieldname = ( - "name" if doctype == parent_doctype else "{0}.name".format(table_df.fieldname) - ) - name_df = frappe._dict( - { - "fieldtype": "Data", - "fieldname": "name", - "label": "ID", - "reqd": 1, # self.import_type == UPDATE, - "parent": doctype, - } - ) - - if doctype != parent_doctype: - name_df.is_child_table_field = True - name_df.child_table_df = table_df - - out[name_by_label] = name_df - out[name_by_fieldname] = name_df - - # other fields - fields = get_standard_fields(doctype) + frappe.get_meta(doctype).fields - for df in fields: - fieldtype = df.fieldtype or "Data" - parent = df.parent or parent_doctype - if fieldtype not in no_value_fields: - if parent_doctype == doctype: - # for parent doctypes keys will be - # Label - # label - # Label (label) - if not out.get(df.label): - # if Label is already set, don't set it again - # in case of duplicate column headers - out[df.label] = df - out[df.fieldname] = df - label_with_fieldname = "{0} ({1})".format(df.label, df.fieldname) - out[label_with_fieldname] = df - else: - # in case there are multiple table fields with the same doctype - # for child doctypes keys will be - # Label (Table Field Label) - # table_field.fieldname - table_fields = parent_meta.get( - "fields", {"fieldtype": ["in", table_fieldtypes], "options": parent} - ) - for table_field in table_fields: - by_label = "{0} ({1})".format(df.label, table_field.label) - by_fieldname = "{0}.{1}".format(table_field.fieldname, df.fieldname) - - # create a new df object to avoid mutation problems - if isinstance(df, dict): - new_df = frappe._dict(df.copy()) - else: - new_df = df.as_dict() - - new_df.is_child_table_field = True - new_df.child_table_df = table_field - out[by_label] = new_df - out[by_fieldname] = new_df - - # if autoname is based on field - # add an entry for "ID (Autoname Field)" - autoname_field = get_autoname_field(parent_doctype) - if autoname_field: - out["ID ({})".format(autoname_field.label)] = autoname_field - # ID field should also map to the autoname field - out["ID"] = autoname_field - out["name"] = autoname_field - - return out - - -def get_df_for_column_header(doctype, header): - def build_fields_dict_for_doctype(): - return build_fields_dict_for_column_matching(doctype) - - df_by_labels_and_fieldname = frappe.cache().hget( - "data_import_column_header_map", doctype, generator=build_fields_dict_for_doctype - ) - return df_by_labels_and_fieldname.get(header) - - -# utilities - -def get_id_field(doctype): - autoname_field = get_autoname_field(doctype) - if autoname_field: - return autoname_field - return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"}) - -def get_autoname_field(doctype): - meta = frappe.get_meta(doctype) - if meta.autoname and meta.autoname.startswith("field:"): - fieldname = meta.autoname[len("field:") :] - return meta.get_field(fieldname) - - -def get_item_at_index(_list, i, default=None): - try: - a = _list[i] - except IndexError: - a = default - return a - - -def get_user_format(date_format): - return ( - date_format.replace("%Y", "yyyy") - .replace("%y", "yy") - .replace("%m", "mm") - .replace("%d", "dd") - ) diff --git a/frappe/core/doctype/data_import_beta/__init__.py b/frappe/core/doctype/data_import_legacy/__init__.py similarity index 100% rename from frappe/core/doctype/data_import_beta/__init__.py rename to frappe/core/doctype/data_import_legacy/__init__.py diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy.js b/frappe/core/doctype/data_import_legacy/data_import_legacy.js new file mode 100644 index 0000000000..9a301af76e --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/data_import_legacy.js @@ -0,0 +1,324 @@ +// Copyright (c) 2017, Frappe Technologies and contributors +// For license information, please see license.txt + +frappe.ui.form.on('Data Import Legacy', { + onload: function(frm) { + if (frm.doc.__islocal) { + frm.set_value("action", ""); + } + + frappe.call({ + method: "frappe.core.doctype.data_import_legacy.data_import_legacy.get_importable_doctypes", + callback: function (r) { + let importable_doctypes = r.message; + frm.set_query("reference_doctype", function () { + return { + "filters": { + "issingle": 0, + "istable": 0, + "name": ['in', importable_doctypes] + } + }; + }); + } + }), + + // should never check public + frm.fields_dict["import_file"].df.is_private = 1; + + frappe.realtime.on("data_import_progress", function(data) { + if (data.data_import === frm.doc.name) { + if (data.reload && data.reload === true) { + frm.reload_doc(); + } + if (data.progress) { + let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar"); + if (progress_bar) { + $(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped"); + $(progress_bar).css("width", data.progress + "%"); + } + } + } + }); + }, + + reference_doctype: function(frm){ + if (frm.doc.reference_doctype) { + frappe.model.with_doctype(frm.doc.reference_doctype); + } + }, + + refresh: function(frm) { + frm.disable_save(); + frm.dashboard.clear_headline(); + if (frm.doc.reference_doctype && !frm.doc.import_file) { + frm.page.set_indicator(__('Attach file'), 'orange'); + } else { + if (frm.doc.import_status) { + const listview_settings = frappe.listview_settings['Data Import Legacy']; + const indicator = listview_settings.get_indicator(frm.doc); + + frm.page.set_indicator(indicator[0], indicator[1]); + + if (frm.doc.import_status === "In Progress") { + frm.dashboard.add_progress("Data Import Progress", "0"); + frm.set_read_only(); + frm.refresh_fields(); + } + } + } + + if (frm.doc.reference_doctype) { + frappe.model.with_doctype(frm.doc.reference_doctype); + } + + if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") { + frm.set_df_property("action", "read_only", 1); + } + + frm.add_custom_button(__("Help"), function() { + frappe.help.show_video("6wiriRKPhmg"); + }); + + if (frm.doc.reference_doctype && frm.doc.docstatus === 0) { + frm.add_custom_button(__("Download template"), function() { + frappe.data_import.download_dialog(frm).show(); + }); + } + + if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows && + frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) { + frm.page.set_primary_action(__("Start Import"), function() { + frappe.call({ + btn: frm.page.btn_primary, + method: "frappe.core.doctype.data_import_legacy.data_import_legacy.import_data", + args: { + data_import: frm.doc.name + } + }); + }).addClass('btn btn-primary'); + } + + if (frm.doc.log_details) { + frm.events.create_log_table(frm); + } else { + $(frm.fields_dict.import_log.wrapper).empty(); + } + }, + + action: function(frm) { + if(!frm.doc.action) return; + if(!frm.doc.reference_doctype) { + frappe.msgprint(__("Please select document type first.")); + frm.set_value("action", ""); + return; + } + + if(frm.doc.action == "Insert new records") { + frm.doc.insert_new = 1; + } else if (frm.doc.action == "Update records"){ + frm.doc.overwrite = 1; + } + frm.save(); + }, + + only_update: function(frm) { + frm.save(); + }, + + submit_after_import: function(frm) { + frm.save(); + }, + + skip_errors: function(frm) { + frm.save(); + }, + + ignore_encoding_errors: function(frm) { + frm.save(); + }, + + no_email: function(frm) { + frm.save(); + }, + + show_only_errors: function(frm) { + frm.events.create_log_table(frm); + }, + + create_log_table: function(frm) { + let msg = JSON.parse(frm.doc.log_details); + var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty(); + $(frappe.render_template("log_details", { + data: msg.messages, + import_status: frm.doc.import_status, + show_only_errors: frm.doc.show_only_errors, + })).appendTo($log_wrapper); + } +}); + +frappe.provide('frappe.data_import'); +frappe.data_import.download_dialog = function(frm) { + var dialog; + const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden; + const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields); + + const get_doctype_checkbox_fields = () => { + return dialog.fields.filter(df => df.fieldname.endsWith('_fields')) + .map(df => dialog.fields_dict[df.fieldname]); + }; + + const doctype_fields = get_fields(frm.doc.reference_doctype) + .map(df => { + let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0; + return { + label: df.label, + reqd: reqd, + danger: reqd, + value: df.fieldname, + checked: 1 + }; + }); + + let fields = [ + { + "label": __("Select Columns"), + "fieldname": "select_columns", + "fieldtype": "Select", + "options": "All\nMandatory\nManually", + "reqd": 1, + "onchange": function() { + const fields = get_doctype_checkbox_fields(); + fields.map(f => f.toggle(true)); + if(this.value == 'Mandatory' || this.value == 'Manually') { + checkbox_toggle(true); + fields.map(multicheck_field => { + multicheck_field.options.map(option => { + if(!option.reqd) return; + $(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`) + .prop('checked', false) + .trigger('click'); + }); + }); + } else if(this.value == 'All'){ + $(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`) + .prop('disabled', true); + } + } + }, + { + "label": __("File Type"), + "fieldname": "file_type", + "fieldtype": "Select", + "options": "Excel\nCSV", + "default": "Excel" + }, + { + "label": __("Download with Data"), + "fieldname": "with_data", + "fieldtype": "Check", + "hidden": !frm.doc.overwrite, + "default": 1 + }, + { + "label": __("Select All"), + "fieldname": "select_all", + "fieldtype": "Button", + "depends_on": "eval:doc.select_columns=='Manually'", + click: function() { + checkbox_toggle(); + } + }, + { + "label": __("Unselect All"), + "fieldname": "unselect_all", + "fieldtype": "Button", + "depends_on": "eval:doc.select_columns=='Manually'", + click: function() { + checkbox_toggle(true); + } + }, + { + "label": frm.doc.reference_doctype, + "fieldname": "doctype_fields", + "fieldtype": "MultiCheck", + "options": doctype_fields, + "columns": 2, + "hidden": 1 + } + ]; + + const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype) + .map(df => { + return { + "label": df.options, + "fieldname": df.fieldname + '_fields', + "fieldtype": "MultiCheck", + "options": frappe.meta.get_docfields(df.options) + .filter(filter_fields) + .map(df => ({ + label: df.label, + reqd: df.reqd ? 1 : 0, + value: df.fieldname, + checked: 1, + danger: df.reqd + })), + "columns": 2, + "hidden": 1 + }; + }); + + fields = fields.concat(child_table_fields); + + dialog = new frappe.ui.Dialog({ + title: __('Download Template'), + fields: fields, + primary_action: function(values) { + var data = values; + if (frm.doc.reference_doctype) { + var export_params = () => { + let columns = {}; + if(values.select_columns) { + columns = get_doctype_checkbox_fields().reduce((columns, field) => { + const options = field.get_checked_options(); + columns[field.df.label] = options; + return columns; + }, {}); + } + + return { + doctype: frm.doc.reference_doctype, + parent_doctype: frm.doc.reference_doctype, + select_columns: JSON.stringify(columns), + with_data: frm.doc.overwrite && data.with_data, + all_doctypes: true, + file_type: data.file_type, + template: true + }; + }; + let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data'; + open_url_post(get_template_url, export_params()); + } else { + frappe.msgprint(__("Please select the Document Type.")); + } + dialog.hide(); + }, + primary_action_label: __('Download') + }); + + $(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]') + .wrapAll('
    '); + const button_container = $(dialog.body).find('.inline-buttons'); + button_container.addClass('flex'); + $(button_container).find('.frappe-control').map((index, button) => { + $(button).css({"margin-right": "1em"}); + }); + + function checkbox_toggle(checked=false) { + $(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => { + $(element).find(`:checkbox`).prop("checked", checked).trigger('click'); + }); + } + + return dialog; +}; diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy.json b/frappe/core/doctype/data_import_legacy/data_import_legacy.json new file mode 100644 index 0000000000..852ccba156 --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/data_import_legacy.json @@ -0,0 +1,218 @@ +{ + "actions": [], + "allow_copy": 1, + "creation": "2020-06-11 16:13:23.813709", + "doctype": "DocType", + "document_type": "Document", + "editable_grid": 1, + "engine": "InnoDB", + "field_order": [ + "reference_doctype", + "action", + "insert_new", + "overwrite", + "only_update", + "section_break_4", + "import_file", + "column_break_4", + "error_file", + "section_break_6", + "skip_errors", + "submit_after_import", + "ignore_encoding_errors", + "no_email", + "import_detail", + "import_status", + "show_only_errors", + "import_log", + "log_details", + "amended_from", + "total_rows", + "amended_from" + ], + "fields": [ + { + "fieldname": "reference_doctype", + "fieldtype": "Link", + "ignore_user_permissions": 1, + "in_list_view": 1, + "label": "Document Type", + "options": "DocType", + "reqd": 1 + }, + { + "fieldname": "action", + "fieldtype": "Select", + "label": "Action", + "options": "Insert new records\nUpdate records", + "reqd": 1 + }, + { + "default": "0", + "depends_on": "eval:!doc.overwrite", + "description": "New data will be inserted.", + "fieldname": "insert_new", + "fieldtype": "Check", + "hidden": 1, + "label": "Insert new records", + "set_only_once": 1 + }, + { + "default": "0", + "depends_on": "eval:!doc.insert_new", + "description": "If you are updating/overwriting already created records.", + "fieldname": "overwrite", + "fieldtype": "Check", + "hidden": 1, + "label": "Update records", + "set_only_once": 1 + }, + { + "default": "0", + "depends_on": "overwrite", + "description": "If you don't want to create any new records while updating the older records.", + "fieldname": "only_update", + "fieldtype": "Check", + "label": "Don't create new records" + }, + { + "depends_on": "eval:(!doc.__islocal)", + "fieldname": "section_break_4", + "fieldtype": "Section Break" + }, + { + "fieldname": "import_file", + "fieldtype": "Attach", + "label": "Attach file for Import" + }, + { + "fieldname": "column_break_4", + "fieldtype": "Column Break" + }, + { + "depends_on": "eval: doc.import_status == \"Partially Successful\"", + "description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.", + "fieldname": "error_file", + "fieldtype": "Attach", + "label": "Generated File" + }, + { + "depends_on": "eval:(!doc.__islocal)", + "fieldname": "section_break_6", + "fieldtype": "Section Break" + }, + { + "default": "0", + "description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.", + "fieldname": "skip_errors", + "fieldtype": "Check", + "label": "Skip rows with errors" + }, + { + "default": "0", + "fieldname": "submit_after_import", + "fieldtype": "Check", + "label": "Submit after importing" + }, + { + "default": "0", + "fieldname": "ignore_encoding_errors", + "fieldtype": "Check", + "label": "Ignore encoding errors" + }, + { + "default": "1", + "fieldname": "no_email", + "fieldtype": "Check", + "label": "Do not send Emails" + }, + { + "collapsible": 1, + "collapsible_depends_on": "eval: doc.import_status == \"Failed\"", + "depends_on": "import_status", + "fieldname": "import_detail", + "fieldtype": "Section Break", + "label": "Import Log" + }, + { + "fieldname": "import_status", + "fieldtype": "Select", + "label": "Import Status", + "options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful", + "read_only": 1 + }, + { + "allow_on_submit": 1, + "default": "1", + "fieldname": "show_only_errors", + "fieldtype": "Check", + "label": "Show only errors", + "no_copy": 1, + "print_hide": 1 + }, + { + "allow_on_submit": 1, + "depends_on": "import_status", + "fieldname": "import_log", + "fieldtype": "HTML", + "label": "Import Log" + }, + { + "allow_on_submit": 1, + "fieldname": "log_details", + "fieldtype": "Code", + "hidden": 1, + "label": "Log Details", + "read_only": 1 + }, + { + "fieldname": "amended_from", + "fieldtype": "Link", + "label": "Amended From", + "no_copy": 1, + "options": "Data Import", + "print_hide": 1, + "read_only": 1 + }, + { + "fieldname": "total_rows", + "fieldtype": "Int", + "hidden": 1, + "label": "Total Rows", + "read_only": 1 + }, + { + "fieldname": "amended_from", + "fieldtype": "Link", + "label": "Amended From", + "no_copy": 1, + "options": "Data Import Legacy", + "print_hide": 1, + "read_only": 1 + } + ], + "is_submittable": 1, + "links": [], + "max_attachments": 1, + "modified": "2020-06-11 16:13:23.813709", + "modified_by": "Administrator", + "module": "Core", + "name": "Data Import Legacy", + "owner": "Administrator", + "permissions": [ + { + "create": 1, + "delete": 1, + "email": 1, + "read": 1, + "role": "System Manager", + "share": 1, + "submit": 1, + "write": 1 + } + ], + "sort_field": "modified", + "sort_order": "DESC", + "track_changes": 1, + "track_seen": 1 +} \ No newline at end of file diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy.py b/frappe/core/doctype/data_import_legacy/data_import_legacy.py new file mode 100644 index 0000000000..df3a3edd3a --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/data_import_legacy.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2017, Frappe Technologies and contributors +# For license information, please see license.txt + +from __future__ import unicode_literals +import frappe, os +from frappe import _ +import frappe.modules.import_file +from frappe.model.document import Document +from frappe.utils.data import format_datetime +from frappe.core.doctype.data_import_legacy.importer import upload +from frappe.utils.background_jobs import enqueue + + +class DataImportLegacy(Document): + def autoname(self): + if not self.name: + self.name = "Import on " +format_datetime(self.creation) + + def validate(self): + if not self.import_file: + self.db_set("total_rows", 0) + if self.import_status == "In Progress": + frappe.throw(_("Can't save the form as data import is in progress.")) + + # validate the template just after the upload + # if there is total_rows in the doc, it means that the template is already validated and error free + if self.import_file and not self.total_rows: + upload(data_import_doc=self, from_data_import="Yes", validate_template=True) + + +@frappe.whitelist() +def get_importable_doctypes(): + return frappe.cache().hget("can_import", frappe.session.user) + +@frappe.whitelist() +def import_data(data_import): + frappe.db.set_value("Data Import Legacy", data_import, "import_status", "In Progress", update_modified=False) + frappe.publish_realtime("data_import_progress", {"progress": "0", + "data_import": data_import, "reload": True}, user=frappe.session.user) + + from frappe.core.page.background_jobs.background_jobs import get_info + enqueued_jobs = [d.get("job_name") for d in get_info()] + + if data_import not in enqueued_jobs: + enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import, + data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user) + + +def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False, + insert=False, submit=False, pre_process=None): + if os.path.isdir(path): + files = [os.path.join(path, f) for f in os.listdir(path)] + else: + files = [path] + + for f in files: + if f.endswith(".json"): + frappe.flags.mute_emails = True + frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True) + frappe.flags.mute_emails = False + frappe.db.commit() + elif f.endswith(".csv"): + import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process) + frappe.db.commit() + + +def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True): + from frappe.utils.csvutils import read_csv_content + print("Importing " + path) + with open(path, "r") as infile: + upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite, + submit_after_import=submit, pre_process=pre_process) + + +def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"): + def post_process(out): + del_keys = ('modified_by', 'creation', 'owner', 'idx') + for doc in out: + for key in del_keys: + if key in doc: + del doc[key] + for k, v in doc.items(): + if isinstance(v, list): + for child in v: + for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'): + if key in child: + del child[key] + + out = [] + if name: + out.append(frappe.get_doc(doctype, name).as_dict()) + elif frappe.db.get_value("DocType", doctype, "issingle"): + out.append(frappe.get_doc(doctype).as_dict()) + else: + for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by): + out.append(frappe.get_doc(doctype, doc.name).as_dict()) + post_process(out) + + dirname = os.path.dirname(path) + if not os.path.exists(dirname): + path = os.path.join('..', path) + + with open(path, "w") as outfile: + outfile.write(frappe.as_json(out)) + + +def export_csv(doctype, path): + from frappe.core.doctype.data_export.exporter import export_data + with open(path, "wb") as csvfile: + export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True) + csvfile.write(frappe.response.result.encode("utf-8")) + + +@frappe.whitelist() +def export_fixture(doctype, app): + if frappe.session.user != "Administrator": + raise frappe.PermissionError + + if not os.path.exists(frappe.get_app_path(app, "fixtures")): + os.mkdir(frappe.get_app_path(app, "fixtures")) + + export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc") diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy_list.js b/frappe/core/doctype/data_import_legacy/data_import_legacy_list.js new file mode 100644 index 0000000000..fcf2391313 --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/data_import_legacy_list.js @@ -0,0 +1,24 @@ +frappe.listview_settings['Data Import Legacy'] = { + add_fields: ["import_status"], + has_indicator_for_draft: 1, + get_indicator: function(doc) { + + let status = { + 'Successful': [__("Success"), "green", "import_status,=,Successful"], + 'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"], + 'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"], + 'Failed': [__("Failed"), "red", "import_status,=,Failed"], + 'Pending': [__("Pending"), "orange", "import_status,=,"] + } + + if (doc.import_status) { + return status[doc.import_status]; + } + + if (doc.docstatus == 0) { + return status['Pending']; + } + + return status['Pending']; + } +}; diff --git a/frappe/core/doctype/data_import_legacy/importer.py b/frappe/core/doctype/data_import_legacy/importer.py new file mode 100644 index 0000000000..5bd0daf32b --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/importer.py @@ -0,0 +1,541 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors +# MIT License. See license.txt + +from __future__ import unicode_literals, print_function + +from six.moves import range +import requests +import frappe, json +import frappe.permissions + +from frappe import _ + +from frappe.utils.csvutils import getlink +from frappe.utils.dateutils import parse_date + +from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url +from six import string_types + + +@frappe.whitelist() +def get_data_keys(): + return frappe._dict({ + "data_separator": _('Start entering data below this line'), + "main_table": _("Table") + ":", + "parent_table": _("Parent Table") + ":", + "columns": _("Column Name") + ":", + "doctype": _("DocType") + ":" + }) + + + +@frappe.whitelist() +def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None, + update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No", + skip_errors = True, data_import_doc=None, validate_template=False, user=None): + """upload data""" + + # for translations + if user: + frappe.cache().hdel("lang", user) + frappe.set_user_lang(user) + + if data_import_doc and isinstance(data_import_doc, string_types): + data_import_doc = frappe.get_doc("Data Import Legacy", data_import_doc) + if data_import_doc and from_data_import == "Yes": + no_email = data_import_doc.no_email + ignore_encoding_errors = data_import_doc.ignore_encoding_errors + update_only = data_import_doc.only_update + submit_after_import = data_import_doc.submit_after_import + overwrite = data_import_doc.overwrite + skip_errors = data_import_doc.skip_errors + else: + # extra input params + params = json.loads(frappe.form_dict.get("params") or '{}') + if params.get("submit_after_import"): + submit_after_import = True + if params.get("ignore_encoding_errors"): + ignore_encoding_errors = True + if not params.get("no_email"): + no_email = False + if params.get('update_only'): + update_only = True + if params.get('from_data_import'): + from_data_import = params.get('from_data_import') + if not params.get('skip_errors'): + skip_errors = params.get('skip_errors') + + frappe.flags.in_import = True + frappe.flags.mute_emails = no_email + + def get_data_keys_definition(): + return get_data_keys() + + def bad_template(): + frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator)) + + def check_data_length(): + if not data: + frappe.throw(_("No data found in the file. Please reattach the new file with data.")) + + def get_start_row(): + for i, row in enumerate(rows): + if row and row[0]==get_data_keys_definition().data_separator: + return i+1 + bad_template() + + def get_header_row(key): + return get_header_row_and_idx(key)[0] + + def get_header_row_and_idx(key): + for i, row in enumerate(header): + if row and row[0]==key: + return row, i + return [], -1 + + def filter_empty_columns(columns): + empty_cols = list(filter(lambda x: x in ("", None), columns)) + + if empty_cols: + if columns[-1*len(empty_cols):] == empty_cols: + # filter empty columns if they exist at the end + columns = columns[:-1*len(empty_cols)] + else: + frappe.msgprint(_("Please make sure that there are no empty columns in the file."), + raise_exception=1) + + return columns + + def make_column_map(): + doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype) + if row_idx == -1: # old style + return + + dt = None + for i, d in enumerate(doctype_row[1:]): + if d not in ("~", "-"): + if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"): + dt, parentfield = d, None + # xls format truncates the row, so it may not have more columns + if len(doctype_row) > i+2: + parentfield = doctype_row[i+2] + doctypes.append((dt, parentfield)) + column_idx_to_fieldname[(dt, parentfield)] = {} + column_idx_to_fieldtype[(dt, parentfield)] = {} + if dt: + column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1] + column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1] + + def get_doc(start_idx): + if doctypes: + doc = {} + attachments = [] + last_error_row_idx = None + for idx in range(start_idx, len(rows)): + last_error_row_idx = idx # pylint: disable=W0612 + if (not doc) or main_doc_empty(rows[idx]): + for dt, parentfield in doctypes: + d = {} + for column_idx in column_idx_to_fieldname[(dt, parentfield)]: + try: + fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx] + fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx] + + if not fieldname or not rows[idx][column_idx]: + continue + + d[fieldname] = rows[idx][column_idx] + if fieldtype in ("Int", "Check"): + d[fieldname] = cint(d[fieldname]) + elif fieldtype in ("Float", "Currency", "Percent"): + d[fieldname] = flt(d[fieldname]) + elif fieldtype == "Date": + if d[fieldname] and isinstance(d[fieldname], string_types): + d[fieldname] = getdate(parse_date(d[fieldname])) + elif fieldtype == "Datetime": + if d[fieldname]: + if " " in d[fieldname]: + _date, _time = d[fieldname].split() + else: + _date, _time = d[fieldname], '00:00:00' + _date = parse_date(d[fieldname]) + d[fieldname] = get_datetime(_date + " " + _time) + else: + d[fieldname] = None + + elif fieldtype in ("Image", "Attach Image", "Attach"): + # added file to attachments list + attachments.append(d[fieldname]) + + elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]: + # as fields can be saved in the number format(long type) in data import template + d[fieldname] = cstr(d[fieldname]) + + except IndexError: + pass + + # scrub quotes from name and modified + if d.get("name") and d["name"].startswith('"'): + d["name"] = d["name"][1:-1] + + if sum([0 if not val else 1 for val in d.values()]): + d['doctype'] = dt + if dt == doctype: + doc.update(d) + else: + if not overwrite and doc.get("name"): + d['parent'] = doc["name"] + d['parenttype'] = doctype + d['parentfield'] = parentfield + doc.setdefault(d['parentfield'], []).append(d) + else: + break + + return doc, attachments, last_error_row_idx + else: + doc = frappe._dict(zip(columns, rows[start_idx][1:])) + doc['doctype'] = doctype + return doc, [], None + + # used in testing whether a row is empty or parent row or child row + # checked only 3 first columns since first two columns can be blank for example the case of + # importing the item variant where item code and item name will be blank. + def main_doc_empty(row): + if row: + for i in range(3,0,-1): + if len(row) > i and row[i]: + return False + return True + + def validate_naming(doc): + autoname = frappe.get_meta(doctype).autoname + if autoname: + if autoname[0:5] == 'field': + autoname = autoname[6:] + elif autoname == 'naming_series:': + autoname = 'naming_series' + else: + return True + + if (autoname not in doc) or (not doc[autoname]): + from frappe.model.base_document import get_controller + if not hasattr(get_controller(doctype), "autoname"): + frappe.throw(_("{0} is a mandatory field").format(autoname)) + return True + + users = frappe.db.sql_list("select name from tabUser") + def prepare_for_insert(doc): + # don't block data import if user is not set + # migrating from another system + if not doc.owner in users: + doc.owner = frappe.session.user + if not doc.modified_by in users: + doc.modified_by = frappe.session.user + + def is_valid_url(url): + is_valid = False + if url.startswith("/files") or url.startswith("/private/files"): + url = get_url(url) + + try: + r = requests.get(url) + is_valid = True if r.status_code == 200 else False + except Exception: + pass + + return is_valid + + def attach_file_to_doc(doctype, docname, file_url): + # check if attachment is already available + # check if the attachement link is relative or not + if not file_url: + return + if not is_valid_url(file_url): + return + + files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and + attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format( + doctype=doctype, + docname=docname, + file_url=file_url + )) + + if files: + # file is already attached + return + + _file = frappe.get_doc({ + "doctype": "File", + "file_url": file_url, + "attached_to_name": docname, + "attached_to_doctype": doctype, + "attached_to_field": 0, + "folder": "Home/Attachments"}) + _file.save() + + + # header + filename, file_extension = ['',''] + if not rows: + _file = frappe.get_doc("File", {"file_url": data_import_doc.import_file}) + fcontent = _file.get_content() + filename, file_extension = _file.get_extension() + + if file_extension == '.xlsx' and from_data_import == 'Yes': + from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file + rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file) + + elif file_extension == '.csv': + from frappe.utils.csvutils import read_csv_content + rows = read_csv_content(fcontent, ignore_encoding_errors) + + else: + frappe.throw(_("Unsupported File Format")) + + start_row = get_start_row() + header = rows[:start_row] + data = rows[start_row:] + try: + doctype = get_header_row(get_data_keys_definition().main_table)[1] + columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:]) + except: + frappe.throw(_("Cannot change header content")) + doctypes = [] + column_idx_to_fieldname = {} + column_idx_to_fieldtype = {} + + if skip_errors: + data_rows_with_error = header + + if submit_after_import and not cint(frappe.db.get_value("DocType", + doctype, "is_submittable")): + submit_after_import = False + + parenttype = get_header_row(get_data_keys_definition().parent_table) + + if len(parenttype) > 1: + parenttype = parenttype[1] + + # check permissions + if not frappe.permissions.can_import(parenttype or doctype): + frappe.flags.mute_emails = False + return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True} + + # Throw expception in case of the empty data file + check_data_length() + make_column_map() + total = len(data) + + if validate_template: + if total: + data_import_doc.total_rows = total + return True + + if overwrite==None: + overwrite = params.get('overwrite') + + # delete child rows (if parenttype) + parentfield = None + if parenttype: + parentfield = get_parent_field(doctype, parenttype) + + if overwrite: + delete_child_rows(data, doctype) + + import_log = [] + def log(**kwargs): + if via_console: + print((kwargs.get("title") + kwargs.get("message")).encode('utf-8')) + else: + import_log.append(kwargs) + + def as_link(doctype, name): + if via_console: + return "{0}: {1}".format(doctype, name) + else: + return getlink(doctype, name) + + # publish realtime task update + def publish_progress(achieved, reload=False): + if data_import_doc: + frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)), + "data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user) + + + error_flag = rollback_flag = False + + batch_size = frappe.conf.data_import_batch_size or 1000 + + for batch_start in range(0, total, batch_size): + batch = data[batch_start:batch_start + batch_size] + + for i, row in enumerate(batch): + # bypass empty rows + if main_doc_empty(row): + continue + + row_idx = i + start_row + doc = None + + publish_progress(i) + + try: + doc, attachments, last_error_row_idx = get_doc(row_idx) + validate_naming(doc) + if pre_process: + pre_process(doc) + + original = None + if parentfield: + parent = frappe.get_doc(parenttype, doc["parent"]) + doc = parent.append(parentfield, doc) + parent.save() + else: + if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]): + original = frappe.get_doc(doctype, doc["name"]) + original_name = original.name + original.update(doc) + # preserve original name for case sensitivity + original.name = original_name + original.flags.ignore_links = ignore_links + original.save() + doc = original + else: + if not update_only: + doc = frappe.get_doc(doc) + prepare_for_insert(doc) + doc.flags.ignore_links = ignore_links + doc.insert() + if attachments: + # check file url and create a File document + for file_url in attachments: + attach_file_to_doc(doc.doctype, doc.name, file_url) + if submit_after_import: + doc.submit() + + # log errors + if parentfield: + log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)), + "link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"}) + elif submit_after_import: + log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)), + "message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"}) + elif original: + log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)), + "message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"}) + elif not update_only: + log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)), + "message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"}) + else: + log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None, + "message": "Document updation ignored", "indicator": "orange"}) + + except Exception as e: + error_flag = True + + # build error message + if frappe.local.message_log: + err_msg = "\n".join(['

    {}

    '.format(json.loads(msg).get('message')) for msg in frappe.local.message_log]) + else: + err_msg = '

    {}

    '.format(cstr(e)) + + error_trace = frappe.get_traceback() + if error_trace: + error_log_doc = frappe.log_error(error_trace) + error_link = get_absolute_url("Error Log", error_log_doc.name) + else: + error_link = None + + log(**{ + "row": row_idx + 1, + "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""), + "message": err_msg, + "indicator": "red", + "link":error_link + }) + + # data with error to create a new file + # include the errored data in the last row as last_error_row_idx will not be updated for the last row + if skip_errors: + if last_error_row_idx == len(rows)-1: + last_error_row_idx = len(rows) + data_rows_with_error += rows[row_idx:last_error_row_idx] + else: + rollback_flag = True + finally: + frappe.local.message_log = [] + + start_row += batch_size + if rollback_flag: + frappe.db.rollback() + else: + frappe.db.commit() + + frappe.flags.mute_emails = False + frappe.flags.in_import = False + + log_message = {"messages": import_log, "error": error_flag} + if data_import_doc: + data_import_doc.log_details = json.dumps(log_message) + + import_status = None + if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error): + import_status = "Partially Successful" + # write the file with the faulty row + file_name = 'error_' + filename + file_extension + if file_extension == '.xlsx': + from frappe.utils.xlsxutils import make_xlsx + xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") + file_data = xlsx_file.getvalue() + else: + from frappe.utils.csvutils import to_csv + file_data = to_csv(data_rows_with_error) + _file = frappe.get_doc({ + "doctype": "File", + "file_name": file_name, + "attached_to_doctype": "Data Import Legacy", + "attached_to_name": data_import_doc.name, + "folder": "Home/Attachments", + "content": file_data}) + _file.save() + data_import_doc.error_file = _file.file_url + + elif error_flag: + import_status = "Failed" + else: + import_status = "Successful" + + data_import_doc.import_status = import_status + data_import_doc.save() + if data_import_doc.import_status in ["Successful", "Partially Successful"]: + data_import_doc.submit() + publish_progress(100, True) + else: + publish_progress(0, True) + frappe.db.commit() + else: + return log_message + +def get_parent_field(doctype, parenttype): + parentfield = None + + # get parentfield + if parenttype: + for d in frappe.get_meta(parenttype).get_table_fields(): + if d.options==doctype: + parentfield = d.fieldname + break + + if not parentfield: + frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype)) + raise Exception + + return parentfield + +def delete_child_rows(rows, doctype): + """delete child rows for all parents""" + for p in list(set([r[1] for r in rows])): + if p: + frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p) diff --git a/frappe/core/doctype/data_import/log_details.html b/frappe/core/doctype/data_import_legacy/log_details.html similarity index 100% rename from frappe/core/doctype/data_import/log_details.html rename to frappe/core/doctype/data_import_legacy/log_details.html diff --git a/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py b/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py new file mode 100644 index 0000000000..e5b244e6a0 --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2020, Frappe Technologies and Contributors +# See license.txt +from __future__ import unicode_literals + +# import frappe +import unittest + +class TestDataImportLegacy(unittest.TestCase): + pass diff --git a/frappe/core/doctype/doctype/doctype.py b/frappe/core/doctype/doctype/doctype.py index 6ca3cccdba..7f84555b79 100644 --- a/frappe/core/doctype/doctype/doctype.py +++ b/frappe/core/doctype/doctype/doctype.py @@ -406,9 +406,13 @@ class DocType(Document): with open(fname, 'r') as f: code = f.read() with open(fname, 'w') as f: - file_content = code.replace(old, new) # replace str with full str (js controllers) - file_content = file_content.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports) - file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers) + if fname.endswith('.js'): + file_content = code.replace(old, new) # replace str with full str (js controllers) + + elif fname.endswith('.py'): + file_content = code.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports) + file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers) + f.write(file_content) # updating json file with new name diff --git a/frappe/core/doctype/installed_applications/installed_applications.py b/frappe/core/doctype/installed_applications/installed_applications.py index aa0401f368..4e6eadf07e 100644 --- a/frappe/core/doctype/installed_applications/installed_applications.py +++ b/frappe/core/doctype/installed_applications/installed_applications.py @@ -12,7 +12,7 @@ class InstalledApplications(Document): for app in frappe.utils.get_installed_apps_info(): self.append("installed_applications", { "app_name": app.get("app_name"), - "app_version": app.get("version"), - "git_branch": app.get("branch") + "app_version": app.get("version") or "UNVERSIONED", + "git_branch": app.get("branch") or "UNVERSIONED" }) self.save() \ No newline at end of file diff --git a/frappe/core/doctype/role/role.py b/frappe/core/doctype/role/role.py index 7ce2537da3..657340ec24 100644 --- a/frappe/core/doctype/role/role.py +++ b/frappe/core/doctype/role/role.py @@ -22,16 +22,28 @@ class Role(Document): frappe.db.sql("delete from `tabHas Role` where role = %s", self.name) frappe.clear_cache() + def on_update(self): + '''update system user desk access if this has changed in this update''' + if frappe.flags.in_install: return + if self.has_value_changed('desk_access'): + for user_name in get_users(self.name): + user = frappe.get_doc('User', user_name) + user_type = user.user_type + user.set_system_user() + if user_type != user.user_type: + user.save() + # Get email addresses of all users that have been assigned this role def get_emails_from_role(role): emails = [] - users = frappe.get_list("Has Role", filters={"role": role, "parenttype": "User"}, - fields=["parent"]) - - for user in users: - user_email, enabled = frappe.db.get_value("User", user.parent, ["email", "enabled"]) + for user in get_users(role): + user_email, enabled = frappe.db.get_value("User", user, ["email", "enabled"]) if enabled and user_email not in ["admin@example.com", "guest@example.com"]: emails.append(user_email) - return emails \ No newline at end of file + return emails + +def get_users(role): + return [d.parent for d in frappe.get_all("Has Role", filters={"role": role, "parenttype": "User"}, + fields=["parent"])] diff --git a/frappe/core/doctype/role/test_role.py b/frappe/core/doctype/role/test_role.py index 31efb5d4e8..6459a72c98 100644 --- a/frappe/core/doctype/role/test_role.py +++ b/frappe/core/doctype/role/test_role.py @@ -23,3 +23,28 @@ class TestUser(unittest.TestCase): frappe.get_doc("User", "test@example.com").add_roles("_Test Role 3") self.assertTrue("_Test Role 3" in frappe.get_roles("test@example.com")) + + def test_change_desk_access(self): + '''if we change desk acecss from role, remove from user''' + frappe.delete_doc_if_exists('User', 'test-user-for-desk-access@example.com') + frappe.delete_doc_if_exists('Role', 'desk-access-test') + user = frappe.get_doc(dict( + doctype='User', + email='test-user-for-desk-access@example.com', + first_name='test')).insert() + role = frappe.get_doc(dict( + doctype = 'Role', + role_name = 'desk-access-test', + desk_access = 0 + )).insert() + user.add_roles(role.name) + user.save() + self.assertTrue(user.user_type=='Website User') + role.desk_access = 1 + role.save() + user.reload() + self.assertTrue(user.user_type=='System User') + role.desk_access = 0 + role.save() + user.reload() + self.assertTrue(user.user_type=='Website User') diff --git a/frappe/core/doctype/server_script/server_script.py b/frappe/core/doctype/server_script/server_script.py index 9522b77b4b..539ae8eb01 100644 --- a/frappe/core/doctype/server_script/server_script.py +++ b/frappe/core/doctype/server_script/server_script.py @@ -42,7 +42,7 @@ class ServerScript(Document): @frappe.whitelist() def setup_scheduler_events(script_name, frequency): - method = frappe.scrub(script_name) + '_' + frequency.lower() + method = frappe.scrub('{0}-{1}'.format(script_name, frequency)) scheduled_script = frappe.db.get_value('Scheduled Job Type', dict(method=method)) diff --git a/frappe/core/doctype/user/user.py b/frappe/core/doctype/user/user.py index 7b9266ff64..fc58f66bfc 100644 --- a/frappe/core/doctype/user/user.py +++ b/frappe/core/doctype/user/user.py @@ -811,6 +811,7 @@ def reset_password(user): frappe.clear_messages() return 'not found' +@frappe.whitelist() def user_query(doctype, txt, searchfield, start, page_len, filters): from frappe.desk.reportview import get_match_cond diff --git a/frappe/core/report/permitted_documents_for_user/permitted_documents_for_user.py b/frappe/core/report/permitted_documents_for_user/permitted_documents_for_user.py index 95a04360be..8b2d1e01fa 100644 --- a/frappe/core/report/permitted_documents_for_user/permitted_documents_for_user.py +++ b/frappe/core/report/permitted_documents_for_user/permitted_documents_for_user.py @@ -5,23 +5,23 @@ from __future__ import unicode_literals import frappe from frappe import _, throw import frappe.utils.user -from frappe.permissions import check_admin_or_system_manager +from frappe.permissions import check_admin_or_system_manager, rights from frappe.model import data_fieldtypes def execute(filters=None): user, doctype, show_permissions = filters.get("user"), filters.get("doctype"), filters.get("show_permissions") + if not validate(user, doctype): return [], [] columns, fields = get_columns_and_fields(doctype) data = frappe.get_list(doctype, fields=fields, as_list=True, user=user) if show_permissions: - columns = columns + ["Read", "Write", "Create", "Delete", "Submit", "Cancel", "Amend", "Print", "Email", - "Report", "Import", "Export", "Share"] + columns = columns + [frappe.unscrub(right) + ':Check:80' for right in rights] data = list(data) - for i,item in enumerate(data): - temp = frappe.permissions.get_doc_permissions(frappe.get_doc(doctype, item[0]), False,user) - data[i] = item+(temp.get("read"),temp.get("write"),temp.get("create"),temp.get("delete"),temp.get("submit"),temp.get("cancel"),temp.get("amend"),temp.get("print"),temp.get("email"),temp.get("report"),temp.get("import"),temp.get("export"),temp.get("share"),) + for i, doc in enumerate(data): + permission = frappe.permissions.get_doc_permissions(frappe.get_doc(doctype, doc[0]), user) + data[i] = doc + tuple(permission.get(right) for right in rights) return columns, data diff --git a/frappe/custom/doctype/custom_field/custom_field.py b/frappe/custom/doctype/custom_field/custom_field.py index 20da9ac0cc..bc325b654e 100644 --- a/frappe/custom/doctype/custom_field/custom_field.py +++ b/frappe/custom/doctype/custom_field/custom_field.py @@ -72,6 +72,11 @@ class CustomField(Document): frappe.db.updatedb(self.dt) def on_trash(self): + #check if Admin owned field + if self.owner == 'Administrator' and frappe.session.user != 'Administrator': + frappe.throw(_("Custom Field {0} is created by the Administrator and can only be deleted through the Administrator account.").format( + frappe.bold(self.label))) + # delete property setter entries frappe.db.sql("""\ DELETE FROM `tabProperty Setter` diff --git a/frappe/database/db_manager.py b/frappe/database/db_manager.py index 80236b2dc2..3345fce735 100644 --- a/frappe/database/db_manager.py +++ b/frappe/database/db_manager.py @@ -49,7 +49,7 @@ class DbManager: host = self.get_current_host() if frappe.conf.get('rds_db', 0) == 1: - self.db.sql("GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, EXECUTE ON `%s`.* TO '%s'@'%s';" % (target, user, host)) + self.db.sql("GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, EXECUTE, LOCK TABLES ON `%s`.* TO '%s'@'%s';" % (target, user, host)) else: self.db.sql("GRANT ALL PRIVILEGES ON `%s`.* TO '%s'@'%s';" % (target, user, host)) diff --git a/frappe/database/postgres/setup_db.py b/frappe/database/postgres/setup_db.py index 01a97178f9..1dc1ea4c97 100644 --- a/frappe/database/postgres/setup_db.py +++ b/frappe/database/postgres/setup_db.py @@ -1,7 +1,7 @@ import frappe, subprocess, os from six.moves import input -def setup_database(force, source_sql, verbose): +def setup_database(force, source_sql=None, verbose=False): root_conn = get_root_connection() root_conn.commit() root_conn.sql("DROP DATABASE IF EXISTS `{0}`".format(frappe.conf.db_name)) @@ -16,10 +16,12 @@ def setup_database(force, source_sql, verbose): subprocess_env = os.environ.copy() subprocess_env['PGPASSWORD'] = str(frappe.conf.db_password) # bootstrap db + if not source_sql: + source_sql = os.path.join(os.path.dirname(__file__), 'framework_postgres.sql') + subprocess.check_output([ 'psql', frappe.conf.db_name, '-h', frappe.conf.db_host or 'localhost', '-U', - frappe.conf.db_name, '-f', - os.path.join(os.path.dirname(__file__), 'framework_postgres.sql') + frappe.conf.db_name, '-f', source_sql ], env=subprocess_env) frappe.connect() diff --git a/frappe/desk/desktop.py b/frappe/desk/desktop.py index 0748fc05f0..655c36c703 100644 --- a/frappe/desk/desktop.py +++ b/frappe/desk/desktop.py @@ -168,7 +168,6 @@ class Workspace: 'subtitle': _(self.onboarding_doc.subtitle), 'success': _(self.onboarding_doc.success_message), 'docs_url': self.onboarding_doc.documentation_url, - 'user_can_dismiss': self.onboarding_doc.user_can_dismiss, 'items': self.get_onboarding_steps() } @handle_not_exist diff --git a/frappe/desk/doctype/calendar_view/calendar_view.json b/frappe/desk/doctype/calendar_view/calendar_view.json index 04839abc9f..ea220c335c 100644 --- a/frappe/desk/doctype/calendar_view/calendar_view.json +++ b/frappe/desk/doctype/calendar_view/calendar_view.json @@ -1,208 +1,81 @@ { - "allow_copy": 0, - "allow_guest_to_view": 0, - "allow_import": 0, - "allow_rename": 0, + "actions": [], "autoname": "Prompt", - "beta": 0, "creation": "2017-10-23 13:02:10.295824", - "custom": 0, - "docstatus": 0, "doctype": "DocType", - "document_type": "", "editable_grid": 1, "engine": "InnoDB", + "field_order": [ + "reference_doctype", + "subject_field", + "start_date_field", + "end_date_field", + "column_break_5", + "all_day" + ], "fields": [ { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "reference_doctype", "fieldtype": "Link", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, "in_list_view": 1, - "in_standard_filter": 0, "label": "Reference Document Type", - "length": 0, - "no_copy": 0, "options": "DocType", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "subject_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, "in_list_view": 1, - "in_standard_filter": 0, "label": "Subject Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "start_date_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, "label": "Start Date Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "end_date_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, "label": "End Date Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 + }, + { + "fieldname": "column_break_5", + "fieldtype": "Column Break" + }, + { + "default": "0", + "fieldname": "all_day", + "fieldtype": "Check", + "label": "All Day" } ], - "has_web_view": 0, - "hide_heading": 0, - "hide_toolbar": 0, - "idx": 0, - "image_view": 0, - "in_create": 0, - "is_submittable": 0, - "issingle": 0, - "istable": 0, - "max_attachments": 0, - "modified": "2019-09-05 14:22:27.664645", + "links": [], + "modified": "2020-06-15 11:24:57.639430", "modified_by": "Administrator", "module": "Desk", "name": "Calendar View", - "name_case": "", "owner": "faris@erpnext.com", "permissions": [ { - "amend": 0, - "apply_user_permissions": 0, - "cancel": 0, "create": 1, "delete": 1, "email": 1, "export": 1, - "if_owner": 0, - "import": 0, - "permlevel": 0, "print": 1, "read": 1, "report": 1, "role": "System Manager", - "set_user_permissions": 0, "share": 1, - "submit": 0, "write": 1 }, { - "amend": 0, - "apply_user_permissions": 0, - "cancel": 0, - "create": 0, - "delete": 0, - "email": 0, - "export": 0, - "if_owner": 0, - "import": 0, - "permlevel": 0, - "print": 0, "read": 1, - "report": 0, - "role": "All", - "set_user_permissions": 0, - "share": 0, - "submit": 0, - "write": 0 + "role": "All" } ], - "quick_entry": 0, - "read_only": 0, - "read_only_onload": 0, - "show_name_in_global_search": 0, "sort_field": "modified", - "sort_order": "DESC", - "track_changes": 0, - "track_seen": 0 + "sort_order": "DESC" } \ No newline at end of file diff --git a/frappe/desk/doctype/dashboard_chart/dashboard_chart.py b/frappe/desk/doctype/dashboard_chart/dashboard_chart.py index ab1863ca0b..4ad6943e0b 100644 --- a/frappe/desk/doctype/dashboard_chart/dashboard_chart.py +++ b/frappe/desk/doctype/dashboard_chart/dashboard_chart.py @@ -8,7 +8,8 @@ from frappe import _ import datetime import json from frappe.utils.dashboard import cache_source, get_from_date_from_timespan -from frappe.utils import nowdate, add_to_date, getdate, get_last_day, formatdate, get_datetime, cint +from frappe.utils import nowdate, add_to_date, getdate, get_last_day, formatdate,\ + get_datetime, cint, now_datetime from frappe.model.naming import append_number_if_name_exists from frappe.boot import get_allowed_reports from frappe.model.document import Document @@ -26,15 +27,15 @@ def get_permission_query_conditions(user): if "System Manager" in roles: return None - allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read()) - allowed_reports = tuple([key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()]) + allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()] + allowed_reports = ['"%s"' % key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()] return ''' - `tabDashboard Chart`.`document_type` in {allowed_doctypes} - or `tabDashboard Chart`.`report_name` in {allowed_reports} + `tabDashboard Chart`.`document_type` in ({allowed_doctypes}) + or `tabDashboard Chart`.`report_name` in ({allowed_reports}) '''.format( - allowed_doctypes=allowed_doctypes, - allowed_reports=allowed_reports + allowed_doctypes=','.join(allowed_doctypes), + allowed_reports=','.join(allowed_reports) ) @@ -134,7 +135,7 @@ def get_chart_config(chart, filters, timespan, timegrain, from_date, to_date): if not from_date: from_date = get_from_date_from_timespan(to_date, timespan) if not to_date: - to_date = datetime.datetime.now() + to_date = now_datetime() doctype = chart.document_type datefield = chart.based_on @@ -258,9 +259,10 @@ def get_aggregate_function(chart_type): def get_result(data, timegrain, from_date, to_date): start_date = getdate(from_date) end_date = getdate(to_date) - result = [] - while start_date <= end_date: + result = [[start_date, 0.0]] + + while start_date < end_date: next_date = get_next_expected_date(start_date, timegrain) result.append([next_date, 0.0]) start_date = next_date @@ -276,11 +278,8 @@ def get_result(data, timegrain, from_date, to_date): def get_next_expected_date(date, timegrain): next_date = None - if timegrain=='Daily': - next_date = add_to_date(date, days=1) - else: - # given date is always assumed to be the period ending date - next_date = get_period_ending(add_to_date(date, days=1), timegrain) + # given date is always assumed to be the period ending date + next_date = get_period_ending(add_to_date(date, days=1), timegrain) return getdate(next_date) def get_period_ending(date, timegrain): diff --git a/frappe/desk/doctype/dashboard_chart/test_dashboard_chart.py b/frappe/desk/doctype/dashboard_chart/test_dashboard_chart.py index dfc6edbf58..5e39998e62 100644 --- a/frappe/desk/doctype/dashboard_chart/test_dashboard_chart.py +++ b/frappe/desk/doctype/dashboard_chart/test_dashboard_chart.py @@ -4,13 +4,12 @@ from __future__ import unicode_literals import unittest, frappe -from frappe.utils import getdate, formatdate +from frappe.utils import getdate, formatdate, get_last_day from frappe.desk.doctype.dashboard_chart.dashboard_chart import (get, get_period_ending) from datetime import datetime from dateutil.relativedelta import relativedelta -import calendar class TestDashboardChart(unittest.TestCase): def test_period_ending(self): @@ -35,9 +34,6 @@ class TestDashboardChart(unittest.TestCase): self.assertEqual(get_period_ending('2019-10-01', 'Quarterly'), getdate('2019-12-31')) - self.assertEqual(get_period_ending('2019-10-01', 'Yearly'), - getdate('2019-12-31')) - def test_dashboard_chart(self): if frappe.db.exists('Dashboard Chart', 'Test Dashboard Chart'): frappe.delete_doc('Dashboard Chart', 'Test Dashboard Chart') @@ -50,22 +46,24 @@ class TestDashboardChart(unittest.TestCase): based_on = 'creation', timespan = 'Last Year', time_interval = 'Monthly', - filters_json = '[]', + filters_json = '{}', timeseries = 1 )).insert() cur_date = datetime.now() - relativedelta(years=1) - result = get(chart_name ='Test Dashboard Chart', refresh = 1) - for idx in range(13): - month = datetime(int(cur_date.year), int(cur_date.strftime('%m')), int(calendar.monthrange(cur_date.year, cur_date.month)[1])) + result = get(chart_name='Test Dashboard Chart', refresh=1) + self.assertEqual(result.get('labels')[0], formatdate(cur_date.strftime('%Y-%m-%d'))) + + if formatdate(cur_date.strftime('%Y-%m-%d')) == formatdate(get_last_day(cur_date).strftime('%Y-%m-%d')): + cur_date += relativedelta(months=1) + + for idx in range(1, 13): + month = get_last_day(cur_date) month = formatdate(month.strftime('%Y-%m-%d')) self.assertEqual(result.get('labels')[idx], month) cur_date += relativedelta(months=1) - # self.assertEqual(result.get('datasets')[0].get('values')[:-1], - # [44, 28, 8, 11, 2, 6, 18, 6, 4, 5, 15, 13]) - frappe.db.rollback() def test_empty_dashboard_chart(self): @@ -88,9 +86,14 @@ class TestDashboardChart(unittest.TestCase): cur_date = datetime.now() - relativedelta(years=1) - result = get(chart_name ='Test Empty Dashboard Chart', refresh = 1) - for idx in range(13): - month = datetime(int(cur_date.year), int(cur_date.strftime('%m')), int(calendar.monthrange(cur_date.year, cur_date.month)[1])) + result = get(chart_name ='Test Empty Dashboard Chart', refresh=1) + self.assertEqual(result.get('labels')[0], formatdate(cur_date.strftime('%Y-%m-%d'))) + + if formatdate(cur_date.strftime('%Y-%m-%d')) == formatdate(get_last_day(cur_date).strftime('%Y-%m-%d')): + cur_date += relativedelta(months=1) + + for idx in range(1, 13): + month = get_last_day(cur_date) month = formatdate(month.strftime('%Y-%m-%d')) self.assertEqual(result.get('labels')[idx], month) cur_date += relativedelta(months=1) @@ -121,8 +124,13 @@ class TestDashboardChart(unittest.TestCase): cur_date = datetime.now() - relativedelta(years=1) result = get(chart_name ='Test Empty Dashboard Chart 2', refresh = 1) - for idx in range(13): - month = datetime(int(cur_date.year), int(cur_date.strftime('%m')), int(calendar.monthrange(cur_date.year, cur_date.month)[1])) + self.assertEqual(result.get('labels')[0], formatdate(cur_date.strftime('%Y-%m-%d'))) + + if formatdate(cur_date.strftime('%Y-%m-%d')) == formatdate(get_last_day(cur_date).strftime('%Y-%m-%d')): + cur_date += relativedelta(months=1) + + for idx in range(1, 13): + month = get_last_day(cur_date) month = formatdate(month.strftime('%Y-%m-%d')) self.assertEqual(result.get('labels')[idx], month) cur_date += relativedelta(months=1) @@ -132,6 +140,60 @@ class TestDashboardChart(unittest.TestCase): frappe.db.rollback() + def test_group_by_chart_type(self): + if frappe.db.exists('Dashboard Chart', 'Test Group By Dashboard Chart'): + frappe.delete_doc('Dashboard Chart', 'Test Group By Dashboard Chart') + + frappe.get_doc({"doctype":"ToDo", "description": "test"}).insert() + + frappe.get_doc(dict( + doctype = 'Dashboard Chart', + chart_name = 'Test Group By Dashboard Chart', + chart_type = 'Group By', + document_type = 'ToDo', + group_by_based_on = 'status', + filters_json = '[]', + )).insert() + + result = get(chart_name ='Test Group By Dashboard Chart', refresh = 1) + todo_status_count = frappe.db.count('ToDo', {'status': result.get('labels')[0]}) + + self.assertEqual(result.get('datasets')[0].get('values')[0], todo_status_count) + + frappe.db.rollback() + + def test_daily_dashboard_chart(self): + insert_test_records() + + if frappe.db.exists('Dashboard Chart', 'Test Daily Dashboard Chart'): + frappe.delete_doc('Dashboard Chart', 'Test Daily Dashboard Chart') + + frappe.get_doc(dict( + doctype = 'Dashboard Chart', + chart_name = 'Test Daily Dashboard Chart', + chart_type = 'Sum', + document_type = 'Communication', + based_on = 'communication_date', + value_based_on = 'rating', + timespan = 'Select Date Range', + time_interval = 'Daily', + from_date = datetime(2019, 1, 6), + to_date = datetime(2019, 1, 11), + filters_json = '[]', + timeseries = 1 + )).insert() + + result = get(chart_name ='Test Daily Dashboard Chart', refresh = 1) + + self.assertEqual(result.get('datasets')[0].get('values'), [200.0, 400.0, 300.0, 0.0, 100.0, 0.0]) + self.assertEqual( + result.get('labels'), + [formatdate('2019-01-06'), formatdate('2019-01-07'), formatdate('2019-01-08'),\ + formatdate('2019-01-09'), formatdate('2019-01-10'), formatdate('2019-01-11')] + ) + + frappe.db.rollback() + def test_weekly_dashboard_chart(self): insert_test_records() @@ -155,37 +217,18 @@ class TestDashboardChart(unittest.TestCase): result = get(chart_name ='Test Weekly Dashboard Chart', refresh = 1) - self.assertEqual(result.get('datasets')[0].get('values'), [200.0, 400.0, 0.0]) - self.assertEqual(result.get('labels'), [formatdate('2019-01-06'), formatdate('2019-01-13'), formatdate('2019-01-20')]) - - frappe.db.rollback() - - def test_group_by_chart_type(self): - if frappe.db.exists('Dashboard Chart', 'Test Group By Dashboard Chart'): - frappe.delete_doc('Dashboard Chart', 'Test Group By Dashboard Chart') - - frappe.get_doc({"doctype":"ToDo", "description": "test"}).insert() - - frappe.get_doc(dict( - doctype = 'Dashboard Chart', - chart_name = 'Test Group By Dashboard Chart', - chart_type = 'Group By', - document_type = 'ToDo', - group_by_based_on = 'status', - filters_json = '[]', - )).insert() - - result = get(chart_name ='Test Group By Dashboard Chart', refresh = 1) - todo_status_count = frappe.db.count('ToDo', {'status': result.get('labels')[0]}) - - self.assertEqual(result.get('datasets')[0].get('values')[0], todo_status_count) + self.assertEqual(result.get('datasets')[0].get('values'), [50.0, 300.0, 800.0, 0.0]) + self.assertEqual(result.get('labels'), [formatdate('2018-12-30'), formatdate('2019-01-06'), formatdate('2019-01-13'), formatdate('2019-01-20')]) frappe.db.rollback() def insert_test_records(): - create_new_communication(datetime(2019, 1, 10), 100) + create_new_communication(datetime(2018, 12, 30), 50) + create_new_communication(datetime(2019, 1, 4), 100) create_new_communication(datetime(2019, 1, 6), 200) + create_new_communication(datetime(2019, 1, 7), 400) create_new_communication(datetime(2019, 1, 8), 300) + create_new_communication(datetime(2019, 1, 10), 100) def create_new_communication(date, rating): communication = { diff --git a/frappe/desk/doctype/dashboard_chart_source/dashboard_chart_source.json b/frappe/desk/doctype/dashboard_chart_source/dashboard_chart_source.json index 7f6532ce1f..fbe0ae94f0 100644 --- a/frappe/desk/doctype/dashboard_chart_source/dashboard_chart_source.json +++ b/frappe/desk/doctype/dashboard_chart_source/dashboard_chart_source.json @@ -1,162 +1,69 @@ { - "allow_copy": 0, - "allow_events_in_timeline": 0, - "allow_guest_to_view": 0, - "allow_import": 0, - "allow_rename": 0, + "actions": [], "autoname": "field:source_name", - "beta": 0, "creation": "2019-02-06 07:55:29.579840", - "custom": 0, - "docstatus": 0, "doctype": "DocType", - "document_type": "", "editable_grid": 1, "engine": "InnoDB", + "field_order": [ + "source_name", + "module", + "timeseries" + ], "fields": [ { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fetch_if_empty": 0, "fieldname": "source_name", "fieldtype": "Data", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, "in_list_view": 1, - "in_standard_filter": 0, "label": "Source Name", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, "unique": 1 }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fetch_if_empty": 0, "fieldname": "module", "fieldtype": "Link", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, "in_list_view": 1, - "in_standard_filter": 0, "label": "Module", - "length": 0, - "no_copy": 0, "options": "Module Def", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fetch_if_empty": 0, + "default": "0", "fieldname": "timeseries", "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Timeseries", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "label": "Timeseries" } ], - "has_web_view": 0, - "hide_toolbar": 0, - "idx": 0, - "in_create": 0, - "is_submittable": 0, - "issingle": 0, - "istable": 0, - "max_attachments": 0, - "modified": "2019-04-09 14:20:51.548207", + "links": [], + "modified": "2020-06-26 18:00:37.421491", "modified_by": "Administrator", "module": "Desk", "name": "Dashboard Chart Source", - "name_case": "", "owner": "Administrator", "permissions": [ { - "amend": 0, - "cancel": 0, - "create": 1, - "delete": 1, "email": 1, "export": 1, - "if_owner": 0, - "import": 0, - "permlevel": 0, "print": 1, "read": 1, "report": 1, "role": "System Manager", - "set_user_permissions": 0, + "share": 1 + }, + { + "create": 1, + "delete": 1, + "email": 1, + "export": 1, + "print": 1, + "read": 1, + "report": 1, + "role": "Administrator", "share": 1, - "submit": 0, "write": 1 } ], - "quick_entry": 0, - "read_only": 0, - "show_name_in_global_search": 0, "sort_field": "modified", "sort_order": "DESC", - "title_field": "", - "track_changes": 1, - "track_seen": 0, - "track_views": 0 + "track_changes": 1 } \ No newline at end of file diff --git a/frappe/desk/doctype/dashboard_chart_source/dashboard_chart_source.py b/frappe/desk/doctype/dashboard_chart_source/dashboard_chart_source.py index de83807b4b..6685009078 100644 --- a/frappe/desk/doctype/dashboard_chart_source/dashboard_chart_source.py +++ b/frappe/desk/doctype/dashboard_chart_source/dashboard_chart_source.py @@ -18,10 +18,6 @@ def get_config(name): return f.read() class DashboardChartSource(Document): - def validate(self): - if frappe.session.user != "Administrator": - frappe.throw(_("Only Administrator is allowed to create Dashboard Chart Sources")) - def on_update(self): export_to_files(record_list=[[self.doctype, self.name]], record_module=self.module, create_init=True) diff --git a/frappe/desk/doctype/module_onboarding/module_onboarding.json b/frappe/desk/doctype/module_onboarding/module_onboarding.json index 0667ddf6ad..02a18b9c2d 100644 --- a/frappe/desk/doctype/module_onboarding/module_onboarding.json +++ b/frappe/desk/doctype/module_onboarding/module_onboarding.json @@ -13,7 +13,6 @@ "column_break_4", "success_message", "documentation_url", - "user_can_dismiss", "is_complete", "section_break_6", "steps" @@ -53,13 +52,6 @@ "label": "Success Message", "reqd": 1 }, - { - "default": "1", - "description": "Allow users to dismiss onboarding temporarily for a day", - "fieldname": "user_can_dismiss", - "fieldtype": "Check", - "label": "User Can Dismiss " - }, { "fieldname": "documentation_url", "fieldtype": "Data", @@ -90,7 +82,7 @@ } ], "links": [], - "modified": "2020-05-18 19:42:39.738869", + "modified": "2020-06-08 15:36:04.701049", "modified_by": "Administrator", "module": "Desk", "name": "Module Onboarding", diff --git a/frappe/desk/doctype/notification_log/notification_log.py b/frappe/desk/doctype/notification_log/notification_log.py index 211b3ae5e6..12f2c41274 100644 --- a/frappe/desk/doctype/notification_log/notification_log.py +++ b/frappe/desk/doctype/notification_log/notification_log.py @@ -100,14 +100,16 @@ def send_notification_email(doc): ) def get_email_header(doc): - return { + docname = doc.document_name + header_map = { 'Default': _('New Notification'), - 'Mention': _('New Mention'), - 'Assignment': _('New Assignment'), - 'Share': _('New Document Shared'), - 'Energy Point': _('Energy Point Update'), - }[doc.type or 'Default'] + 'Mention': _('New Mention on {0}').format(docname), + 'Assignment': _('Assignment Update on {0}').format(docname), + 'Share': _('New Document Shared {0}').format(docname), + 'Energy Point': _('Energy Point Update on {0}').format(docname), + } + return header_map[doc.type or 'Default'] @frappe.whitelist() def mark_all_as_read(): diff --git a/frappe/desk/doctype/number_card/number_card.py b/frappe/desk/doctype/number_card/number_card.py index 6bb9c7d45c..c4a427c4e0 100644 --- a/frappe/desk/doctype/number_card/number_card.py +++ b/frappe/desk/doctype/number_card/number_card.py @@ -27,12 +27,12 @@ def get_permission_query_conditions(user=None): if "System Manager" in roles: return None - allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read()) + allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()] return ''' - `tabNumber Card`.`document_type` in {allowed_doctypes} + `tabNumber Card`.`document_type` in ({allowed_doctypes}) '''.format( - allowed_doctypes=allowed_doctypes, + allowed_doctypes=','.join(allowed_doctypes) ) def has_permission(doc, ptype, user): diff --git a/frappe/desk/form/assign_to.py b/frappe/desk/form/assign_to.py index a916cbca82..26b2bd2835 100644 --- a/frappe/desk/form/assign_to.py +++ b/frappe/desk/form/assign_to.py @@ -178,7 +178,8 @@ def notify_assignment(assigned_by, owner, doc_type, doc_name, action='CLOSE', description_html = "
    {0}
    ".format(description) if description else None if action=='CLOSE': - subject = _('Your assignment on {0} {1} has been removed').format(frappe.bold(doc_type), get_title_html(title)) + subject = _('Your assignment on {0} {1} has been removed by {2}')\ + .format(frappe.bold(doc_type), get_title_html(title), frappe.bold(user_name)) else: user_name = frappe.bold(user_name) document_type = frappe.bold(doc_type) diff --git a/frappe/desk/form/linked_with.py b/frappe/desk/form/linked_with.py index 72917d0341..5bae49ea95 100644 --- a/frappe/desk/form/linked_with.py +++ b/frappe/desk/form/linked_with.py @@ -13,7 +13,7 @@ from frappe.modules import load_doctype_module @frappe.whitelist() -def get_submitted_linked_docs(doctype, name, docs=None, linked=None): +def get_submitted_linked_docs(doctype, name, docs=None, visited=None): """ Get all nested submitted linked doctype linkinfo @@ -31,26 +31,27 @@ def get_submitted_linked_docs(doctype, name, docs=None, linked=None): if not docs: docs = [] - if not linked: - linked = {} + if not visited: + visited = {} + + if doctype not in visited: + visited[doctype] = [] + + if name in visited[doctype]: + return linkinfo = get_linked_doctypes(doctype) linked_docs = get_linked_docs(doctype, name, linkinfo) link_count = 0 + visited[doctype].append(name) + for link_doctype, link_names in linked_docs.items(): - if link_doctype not in linked: - linked[link_doctype] = [] for link in link_names: if link['name'] == name: continue - if linked and name in linked[link_doctype]: - continue - - linked[link_doctype].append(link['name']) - docinfo = link.update({"doctype": link_doctype}) validated_doc = validate_linked_doc(docinfo) @@ -58,16 +59,15 @@ def get_submitted_linked_docs(doctype, name, docs=None, linked=None): continue link_count += 1 - if link.name in [doc.get("name") for doc in docs]: - continue - links = get_submitted_linked_docs(link_doctype, link.name, docs, linked) - docs.append({ - "doctype": link_doctype, - "name": link.name, - "docstatus": link.docstatus, - "link_count": links.get("count") - }) + links = get_submitted_linked_docs(link_doctype, link.name, docs, visited) + if links: + docs.append({ + "doctype": link_doctype, + "name": link.name, + "docstatus": link.docstatus, + "link_count": links.get("count") + }) # sort linked documents by ascending number of links docs.sort(key=lambda doc: doc.get("link_count")) diff --git a/frappe/desk/form/load.py b/frappe/desk/form/load.py index f24f33df07..cacbd3c633 100644 --- a/frappe/desk/form/load.py +++ b/frappe/desk/form/load.py @@ -100,6 +100,7 @@ def get_docinfo(doc=None, doctype=None, name=None): "shared": frappe.share.get_users(doc.doctype, doc.name), "views": get_view_logs(doc.doctype, doc.name), "energy_point_logs": get_point_logs(doc.doctype, doc.name), + "additional_timeline_content": get_additional_timeline_content(doc.doctype, doc.name), "milestones": get_milestones(doc.doctype, doc.name), "is_document_followed": is_document_followed(doc.doctype, doc.name, frappe.session.user), "tags": get_tags(doc.doctype, doc.name), @@ -277,3 +278,14 @@ def get_document_email(doctype, name): def get_automatic_email_link(): return frappe.db.get_value("Email Account", {"enable_incoming": 1, "enable_automatic_linking": 1}, "email_id") + +def get_additional_timeline_content(doctype, docname): + contents = [] + hooks = frappe.get_hooks().get('additional_timeline_content', {}) + methods_for_all_doctype = hooks.get('*', []) + methods_for_current_doctype = hooks.get(doctype, []) + + for method in methods_for_all_doctype + methods_for_current_doctype: + contents.extend(frappe.get_attr(method)(doctype, docname) or []) + + return contents \ No newline at end of file diff --git a/frappe/desk/leaderboard.py b/frappe/desk/leaderboard.py index 1ebf32febe..e5654c853f 100644 --- a/frappe/desk/leaderboard.py +++ b/frappe/desk/leaderboard.py @@ -14,13 +14,16 @@ def get_leaderboards(): return leaderboards @frappe.whitelist() -def get_energy_point_leaderboard(from_date, company = None, field = None, limit = None): +def get_energy_point_leaderboard(date_range, company = None, field = None, limit = None): + filters = [ + ['type', '!=', 'Review'], + ] + if date_range: + date_range = frappe.parse_json(date_range) + filters.append(['creation', 'between', [date_range[0], date_range[1]]]) energy_point_users = frappe.db.get_all('Energy Point Log', fields = ['user as name', 'sum(points) as value'], - filters = [ - ['type', '!=', 'Review'], - ['creation', '>', from_date] - ], + filters = filters, group_by = 'user', order_by = 'value desc' ) diff --git a/frappe/desk/listview.py b/frappe/desk/listview.py index 1bce14fb2d..1d10a13930 100644 --- a/frappe/desk/listview.py +++ b/frappe/desk/listview.py @@ -35,7 +35,7 @@ def get_group_by_count(doctype, current_filters, field): from `tabToDo`, `tabUser` where - `tabToDo`.status='Open' and + `tabToDo`.status!='Cancelled' and `tabToDo`.owner = `tabUser`.name and `tabUser`.user_type = 'System User' {subquery_condition} diff --git a/frappe/desk/page/backups/backups.py b/frappe/desk/page/backups/backups.py index 663ab37cff..386af70a4e 100644 --- a/frappe/desk/page/backups/backups.py +++ b/frappe/desk/page/backups/backups.py @@ -30,7 +30,7 @@ def get_context(context): get_size(os.path.join(path, _file))) for _file in files if _file.endswith('sql.gz')] files.sort(key=lambda x: x[1], reverse=True) - return {"files": files} + return {"files": files[:backup_limit]} def get_scheduled_backup_limit(): backup_limit = frappe.db.get_singles_value('System Settings', 'backup_limit') @@ -89,4 +89,4 @@ def backup_files_and_notify_user(user_email=None): def get_downloadable_links(backup_files): for key in ['backup_path_files', 'backup_path_private_files']: path = backup_files[key] - backup_files[key] = get_url('/'.join(path.split('/')[-2:])) \ No newline at end of file + backup_files[key] = get_url('/'.join(path.split('/')[-2:])) diff --git a/frappe/desk/page/leaderboard/leaderboard.js b/frappe/desk/page/leaderboard/leaderboard.js index 4472a2978a..189949ac68 100644 --- a/frappe/desk/page/leaderboard/leaderboard.js +++ b/frappe/desk/page/leaderboard/leaderboard.js @@ -49,7 +49,7 @@ class Leaderboard { this.timespans = [ "This Week", "This Month", "This Quarter", "This Year", "Last Week", "Last Month", "Last Quarter", "Last Year", - "All Time", "Select From Date" + "All Time", "Select Date Range" ]; // for saving current selected filters @@ -113,7 +113,7 @@ class Leaderboard { return {"label": __(d), value: d }; }) ); - this.create_from_date_field(); + this.create_date_range_field(); this.type_select = this.page.add_select(__("Field"), this.options.selected_filter.map(d => { @@ -123,12 +123,12 @@ class Leaderboard { this.timespan_select.on("change", (e) => { this.options.selected_timespan = e.currentTarget.value; - if (this.options.selected_timespan === 'Select From Date') { - this.from_date_field.show(); + if (this.options.selected_timespan === 'Select Date Range') { + this.date_range_field.show(); } else { - this.from_date_field.hide(); - this.make_request(); + this.date_range_field.hide(); } + this.make_request(); }); this.type_select.on("change", (e) => { @@ -137,21 +137,21 @@ class Leaderboard { }); } - create_from_date_field() { + create_date_range_field() { let timespan_field = $(this.parent).find(`.frappe-control[data-original-title='Timespan']`); - this.from_date_field = $(`
    `).insertAfter(timespan_field).hide(); + this.date_range_field = $(`
    `).insertAfter(timespan_field).hide(); let date_field = frappe.ui.form.make_control({ df: { - fieldtype: 'Date', - fieldname: 'selected_from_date', - placeholder: frappe.datetime.month_start(), - default: frappe.datetime.month_start(), + fieldtype: 'DateRange', + fieldname: 'selected_date_range', + placeholder: "Date Range", + default: [frappe.datetime.month_start(), frappe.datetime.now_date()], input_class: 'input-sm', reqd: 1, change: () => { - this.selected_from_date = date_field.get_value(); - if (this.selected_from_date) this.make_request(); + this.selected_date_range = date_field.get_value(); + if (this.selected_date_range) this.make_request(); } }, parent: $(this.parent).find('.from-date-field'), @@ -225,7 +225,7 @@ class Leaderboard { frappe.call( this.leaderboard_config[this.options.selected_doctype].method, { - 'from_date': this.get_from_date(), + 'date_range': this.get_date_range(), 'company': this.options.selected_company, 'field': this.options.selected_filter_item, 'limit': this.leaderboard_limit, @@ -375,23 +375,22 @@ class Leaderboard { `); } - get_from_date() { + get_date_range() { let timespan = this.options.selected_timespan.toLowerCase(); let current_date = frappe.datetime.now_date(); - let get_from_date = { - "this week": frappe.datetime.week_start(), - "this month": frappe.datetime.month_start(), - "this quarter": frappe.datetime.quarter_start(), - "this year": frappe.datetime.year_start(), - "last week": frappe.datetime.add_days(current_date, -7), - "last month": frappe.datetime.add_months(current_date, -1), - "last quarter": frappe.datetime.add_months(current_date, -3), - "last year": frappe.datetime.add_months(current_date, -12), - "all time": "", - "select from date": this.selected_from_date || frappe.datetime.month_start() + let date_range_map = { + "this week": [frappe.datetime.week_start(), current_date], + "this month": [frappe.datetime.month_start(), current_date], + "this quarter": [frappe.datetime.quarter_start(), current_date], + "this year": [frappe.datetime.year_start(), current_date], + "last week": [frappe.datetime.add_days(current_date, -7), current_date], + "last month": [frappe.datetime.add_months(current_date, -1), current_date], + "last quarter": [frappe.datetime.add_months(current_date, -3), current_date], + "last year": [frappe.datetime.add_months(current_date, -12), current_date], + "all time": null, + "select date range": this.selected_date_range || [frappe.datetime.month_start(), current_date] } - - return get_from_date[timespan]; + return date_range_map[timespan]; } } diff --git a/frappe/desk/search.py b/frappe/desk/search.py index c70b650945..b4b54b4b6e 100644 --- a/frappe/desk/search.py +++ b/frappe/desk/search.py @@ -6,6 +6,7 @@ from __future__ import unicode_literals import frappe, json from frappe.utils import cstr, unique, cint from frappe.permissions import has_permission +from frappe.handler import is_whitelisted from frappe import _ from six import string_types import re @@ -74,8 +75,17 @@ def search_widget(doctype, txt, query=None, searchfield=None, start=0, if query and query.split()[0].lower()!="select": # by method - frappe.response["values"] = frappe.call(query, doctype, txt, - searchfield, start, page_length, filters, as_dict=as_dict) + try: + is_whitelisted(frappe.get_attr(query)) + frappe.response["values"] = frappe.call(query, doctype, txt, + searchfield, start, page_length, filters, as_dict=as_dict) + except Exception as e: + if frappe.local.conf.developer_mode: + raise e + else: + frappe.respond_as_web_page(title='Invalid Method', html='Method not found', + indicator_color='red', http_status_code=404) + return elif not query and doctype in standard_queries: # from standard queries search_widget(doctype, txt, standard_queries[doctype][0], @@ -157,7 +167,7 @@ def search_widget(doctype, txt, query=None, searchfield=None, start=0, strict=False) if doctype in UNTRANSLATED_DOCTYPES: - values = tuple([v for v in list(values) if re.search(txt+".*", (_(v.name) if as_dict else _(v[0])), re.IGNORECASE)]) + values = tuple([v for v in list(values) if re.search(re.escape(txt)+".*", (_(v.name) if as_dict else _(v[0])), re.IGNORECASE)]) # remove _relevance from results if as_dict: diff --git a/frappe/email/doctype/email_account/email_account.py b/frappe/email/doctype/email_account/email_account.py index 082b16c17a..cf8c6e80c6 100755 --- a/frappe/email/doctype/email_account/email_account.py +++ b/frappe/email/doctype/email_account/email_account.py @@ -10,7 +10,7 @@ import socket import time from frappe import _ from frappe.model.document import Document -from frappe.utils import validate_email_address, cint, get_datetime, DATE_FORMAT, strip, comma_or, sanitize_html, add_days +from frappe.utils import validate_email_address, cint, cstr, get_datetime, DATE_FORMAT, strip, comma_or, sanitize_html, add_days from frappe.utils.user import is_system_user from frappe.utils.jinja import render_template from frappe.email.smtp import SMTPServer @@ -169,19 +169,20 @@ class EmailAccount(Document): try: email_server.connect() except (error_proto, imaplib.IMAP4.error) as e: - message = e.message.lower().replace(" ","") - if in_receive and any(map(lambda t: t in message, ['authenticationfail', 'loginviayourwebbrowser', #abbreviated to work with both failure and failed + e = cstr(e) + message = e.lower().replace(" ","") + if in_receive and any(map(lambda t: t in message, ['authenticationfailed', 'loginviayourwebbrowser', #abbreviated to work with both failure and failed 'loginfailed', 'err[auth]', 'errtemporaryerror'])): #temporary error to deal with godaddy # if called via self.receive and it leads to authentication error, disable incoming # and send email to system manager self.handle_incoming_connect_error( - description=_('Authentication failed while receiving emails from Email Account {0}. Message from server: {1}').format(self.name, e.message) + description=_('Authentication failed while receiving emails from Email Account {0}. Message from server: {1}').format(self.name, e) ) return None else: - frappe.throw(e.message) + frappe.throw(e) except socket.error: if in_receive: @@ -273,6 +274,8 @@ class EmailAccount(Document): for idx, msg in enumerate(incoming_mails): uid = None if not uid_list else uid_list[idx] + self.flags.notify = True + try: args = { "uid": uid, @@ -293,7 +296,11 @@ class EmailAccount(Document): else: frappe.db.commit() - if communication: + if communication and self.flags.notify: + + # If email already exists in the system + # then do not send notifications for the same email. + attachments = [] if hasattr(communication, '_attachments'): @@ -362,6 +369,9 @@ class EmailAccount(Document): name = names[0].get("name") # email is already available update communication uid instead frappe.db.set_value("Communication", name, "uid", uid, update_modified=False) + + self.flags.notify = False + return frappe.get_doc("Communication", name) if email.content_type == 'text/html': @@ -468,26 +478,38 @@ class EmailAccount(Document): if self.append_to and self.sender_field: if self.subject_field: - # try and match by subject and sender - # if sent by same sender with same subject, - # append it to old coversation - subject = frappe.as_unicode(strip(re.sub(r"(^\s*(fw|fwd|wg)[^:]*:|\s*(re|aw)[^:]*:\s*)*", - "", email.subject, 0, flags=re.IGNORECASE))) + if '#' in email.subject: + # try and match if ID is found + # document ID is appended to subject + # example "Re: Your email (#OPP-2020-2334343)" + parent_id = email.subject.rsplit('#', 1)[-1].strip(' ()') + if parent_id: + parent = frappe.db.get_all(self.append_to, filters = dict(name = parent_id), + fields = 'name') - parent = frappe.db.get_all(self.append_to, filters={ - self.sender_field: email.from_email, - self.subject_field: ("like", "%{0}%".format(subject)), - "creation": (">", (get_datetime() - relativedelta(days=60)).strftime(DATE_FORMAT)) - }, fields="name") + if not parent: + # try and match by subject and sender + # if sent by same sender with same subject, + # append it to old coversation + subject = frappe.as_unicode(strip(re.sub(r"(^\s*(fw|fwd|wg)[^:]*:|\s*(re|aw)[^:]*:\s*)*", + "", email.subject, 0, flags=re.IGNORECASE))) + + parent = frappe.db.get_all(self.append_to, filters={ + self.sender_field: email.from_email, + self.subject_field: ("like", "%{0}%".format(subject)), + "creation": (">", (get_datetime() - relativedelta(days=60)).strftime(DATE_FORMAT)) + }, fields = "name", limit = 1) - # match only subject field - # when the from_email is of a user in the system - # and subject is atleast 10 chars long if not parent and len(subject) > 10 and is_system_user(email.from_email): + # match only subject field + # when the from_email is of a user in the system + # and subject is atleast 10 chars long parent = frappe.db.get_all(self.append_to, filters={ self.subject_field: ("like", "%{0}%".format(subject)), "creation": (">", (get_datetime() - relativedelta(days=60)).strftime(DATE_FORMAT)) - }, fields="name") + }, fields = "name", limit = 1) + + if parent: parent = frappe._dict(doctype=self.append_to, name=parent[0].name) diff --git a/frappe/email/doctype/newsletter/newsletter.py b/frappe/email/doctype/newsletter/newsletter.py index 2dccfbead4..a82b52a663 100755 --- a/frappe/email/doctype/newsletter/newsletter.py +++ b/frappe/email/doctype/newsletter/newsletter.py @@ -191,7 +191,7 @@ def subscribe(email, email_group=_('Website')):

    {3}

    """.format(*messages) - frappe.sendmail(email, subject=getattr('email_template', 'subject', '') or _("Confirm Your Email"), content=content) + frappe.sendmail(email, subject=getattr('email_template', 'subject', '') or _("Confirm Your Email"), content=content, now=True) @frappe.whitelist(allow_guest=True) def confirm_subscription(email, email_group=_('Website')): diff --git a/frappe/email/doctype/notification/notification.json b/frappe/email/doctype/notification/notification.json index d1526f5fe4..932f0491a9 100644 --- a/frappe/email/doctype/notification/notification.json +++ b/frappe/email/doctype/notification/notification.json @@ -48,15 +48,11 @@ "default": "1", "fieldname": "enabled", "fieldtype": "Check", - "label": "Enabled", - "show_days": 1, - "show_seconds": 1 + "label": "Enabled" }, { "fieldname": "column_break_2", - "fieldtype": "Column Break", - "show_days": 1, - "show_seconds": 1 + "fieldtype": "Column Break" }, { "default": "Email", @@ -65,9 +61,7 @@ "fieldtype": "Select", "label": "Channel", "options": "Email\nSlack\nSystem Notification", - "reqd": 1, - "show_days": 1, - "show_seconds": 1 + "reqd": 1 }, { "depends_on": "eval:doc.channel=='Slack'", @@ -75,16 +69,12 @@ "fieldtype": "Link", "label": "Slack Channel", "mandatory_depends_on": "eval:doc.channel=='Slack'", - "options": "Slack Webhook URL", - "show_days": 1, - "show_seconds": 1 + "options": "Slack Webhook URL" }, { "fieldname": "filters", "fieldtype": "Section Break", - "label": "Filters", - "show_days": 1, - "show_seconds": 1 + "label": "Filters" }, { "description": "To add dynamic subject, use jinja tags like\n\n
    {{ doc.name }} Delivered
    ", @@ -93,9 +83,7 @@ "ignore_xss_filter": 1, "in_list_view": 1, "label": "Subject", - "reqd": 1, - "show_days": 1, - "show_seconds": 1 + "reqd": 1 }, { "fieldname": "document_type", @@ -105,17 +93,13 @@ "label": "Document Type", "options": "DocType", "reqd": 1, - "search_index": 1, - "show_days": 1, - "show_seconds": 1 + "search_index": 1 }, { "default": "0", "fieldname": "is_standard", "fieldtype": "Check", - "label": "Is Standard", - "show_days": 1, - "show_seconds": 1 + "label": "Is Standard" }, { "depends_on": "is_standard", @@ -123,86 +107,67 @@ "fieldtype": "Link", "in_standard_filter": 1, "label": "Module", - "options": "Module Def", - "show_days": 1, - "show_seconds": 1 + "options": "Module Def" }, { "fieldname": "col_break_1", - "fieldtype": "Column Break", - "show_days": 1, - "show_seconds": 1 + "fieldtype": "Column Break" }, { + "depends_on": "eval: doc.document_type", "fieldname": "event", "fieldtype": "Select", "in_list_view": 1, "label": "Send Alert On", "options": "\nNew\nSave\nSubmit\nCancel\nDays After\nDays Before\nValue Change\nMethod\nCustom", "reqd": 1, - "search_index": 1, - "show_days": 1, - "show_seconds": 1 + "search_index": 1 }, { "depends_on": "eval:doc.event=='Method'", "description": "Trigger on valid methods like \"before_insert\", \"after_update\", etc (will depend on the DocType selected)", "fieldname": "method", "fieldtype": "Data", - "label": "Trigger Method", - "show_days": 1, - "show_seconds": 1 + "label": "Trigger Method" }, { - "depends_on": "eval:doc.event==\"Days After\" || doc.event==\"Days Before\"", + "depends_on": "eval:doc.document_type && (doc.event==\"Days After\" || doc.event==\"Days Before\")", "description": "Send alert if date matches this field's value", "fieldname": "date_changed", "fieldtype": "Select", - "label": "Reference Date", - "show_days": 1, - "show_seconds": 1 + "label": "Reference Date" }, { "default": "0", - "depends_on": "eval:doc.event==\"Days After\" || doc.event==\"Days Before\"", + "depends_on": "eval:doc.document_type && (doc.event==\"Days After\" || doc.event==\"Days Before\")", "description": "Send days before or after the reference date", "fieldname": "days_in_advance", "fieldtype": "Int", - "label": "Days Before or After", - "show_days": 1, - "show_seconds": 1 + "label": "Days Before or After" }, { - "depends_on": "eval:doc.event==\"Value Change\"", + "depends_on": "eval:doc.document_type && doc.event==\"Value Change\"", "description": "Send alert if this field's value changes", "fieldname": "value_changed", "fieldtype": "Select", - "label": "Value Changed", - "show_days": 1, - "show_seconds": 1 + "label": "Value Changed" }, { "fieldname": "sender", "fieldtype": "Link", "label": "Sender", - "options": "Email Account", - "show_days": 1, - "show_seconds": 1 + "options": "Email Account" }, { "fieldname": "sender_email", "fieldtype": "Data", "label": "Sender Email", "options": "Email", - "read_only": 1, - "show_days": 1, - "show_seconds": 1 + "read_only": 1 }, { "fieldname": "section_break_9", - "fieldtype": "Section Break", - "show_days": 1, - "show_seconds": 1 + "fieldtype": "Section Break" }, { "description": "Optional: The alert will be sent if this expression is true", @@ -210,128 +175,96 @@ "fieldtype": "Code", "ignore_xss_filter": 1, "in_list_view": 1, - "label": "Condition", - "show_days": 1, - "show_seconds": 1 + "label": "Condition" }, { "fieldname": "column_break_6", - "fieldtype": "Column Break", - "show_days": 1, - "show_seconds": 1 + "fieldtype": "Column Break" }, { "fieldname": "html_7", "fieldtype": "HTML", - "options": "

    Condition Examples:

    \n
    doc.status==\"Open\"
    doc.due_date==nowdate()
    doc.total > 40000\n
    \n", - "show_days": 1, - "show_seconds": 1 + "options": "

    Condition Examples:

    \n
    doc.status==\"Open\"
    doc.due_date==nowdate()
    doc.total > 40000\n
    \n" }, { "collapsible": 1, "fieldname": "property_section", "fieldtype": "Section Break", - "label": "Set Property After Alert", - "show_days": 1, - "show_seconds": 1 + "label": "Set Property After Alert" }, { "fieldname": "set_property_after_alert", "fieldtype": "Select", - "label": "Set Property After Alert", - "show_days": 1, - "show_seconds": 1 + "label": "Set Property After Alert" }, { "fieldname": "property_value", "fieldtype": "Data", - "label": "Value To Be Set", - "show_days": 1, - "show_seconds": 1 + "label": "Value To Be Set" }, { "depends_on": "eval:doc.channel!=='Slack'", "fieldname": "column_break_5", "fieldtype": "Section Break", - "label": "Recipients", - "show_days": 1, - "show_seconds": 1 + "label": "Recipients" }, { "fieldname": "recipients", "fieldtype": "Table", "label": "Recipients", "mandatory_depends_on": "eval:doc.channel!=='Slack'", - "options": "Notification Recipient", - "show_days": 1, - "show_seconds": 1 + "options": "Notification Recipient" }, { "fieldname": "message_sb", "fieldtype": "Section Break", - "label": "Message", - "show_days": 1, - "show_seconds": 1 + "label": "Message" }, { "default": "Add your message here", "fieldname": "message", "fieldtype": "Code", "ignore_xss_filter": 1, - "label": "Message", - "show_days": 1, - "show_seconds": 1 + "label": "Message" }, { "depends_on": "eval:doc.channel=='Email'", "fieldname": "message_examples", "fieldtype": "HTML", "label": "Message Examples", - "options": "
    Message Example
    \n\n
    <h3>Order Overdue</h3>\n\n<p>Transaction {{ doc.name }} has exceeded Due Date. Please take necessary action.</p>\n\n<!-- show last comment -->\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n<h4>Details</h4>\n\n<ul>\n<li>Customer: {{ doc.customer }}\n<li>Amount: {{ doc.grand_total }}\n</ul>\n
    ", - "show_days": 1, - "show_seconds": 1 + "options": "
    Message Example
    \n\n
    <h3>Order Overdue</h3>\n\n<p>Transaction {{ doc.name }} has exceeded Due Date. Please take necessary action.</p>\n\n<!-- show last comment -->\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n<h4>Details</h4>\n\n<ul>\n<li>Customer: {{ doc.customer }}\n<li>Amount: {{ doc.grand_total }}\n</ul>\n
    " }, { "depends_on": "eval:doc.channel=='Slack'", "fieldname": "slack_message_examples", "fieldtype": "HTML", "label": "Message Examples", - "options": "
    Message Example
    \n\n
    *Order Overdue*\n\nTransaction {{ doc.name }} has exceeded Due Date. Please take necessary action.\n\n\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n*Details*\n\n\u2022 Customer: {{ doc.customer }}\n\u2022 Amount: {{ doc.grand_total }}\n
    ", - "show_days": 1, - "show_seconds": 1 + "options": "
    Message Example
    \n\n
    *Order Overdue*\n\nTransaction {{ doc.name }} has exceeded Due Date. Please take necessary action.\n\n\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n*Details*\n\n\u2022 Customer: {{ doc.customer }}\n\u2022 Amount: {{ doc.grand_total }}\n
    " }, { "fieldname": "view_properties", "fieldtype": "Button", - "label": "View Properties (via Customize Form)", - "show_days": 1, - "show_seconds": 1 + "label": "View Properties (via Customize Form)" }, { "collapsible": 1, "collapsible_depends_on": "attach_print", "fieldname": "column_break_25", "fieldtype": "Section Break", - "label": "Print Settings", - "show_days": 1, - "show_seconds": 1 + "label": "Print Settings" }, { "default": "0", "fieldname": "attach_print", "fieldtype": "Check", - "label": "Attach Print", - "show_days": 1, - "show_seconds": 1 + "label": "Attach Print" }, { "depends_on": "attach_print", "fieldname": "print_format", "fieldtype": "Link", "label": "Print Format", - "options": "Print Format", - "show_days": 1, - "show_seconds": 1 + "options": "Print Format" }, { "default": "0", @@ -339,14 +272,12 @@ "description": "If enabled, the notification will show up in the notifications dropdown on the top right corner of the navigation bar.", "fieldname": "send_system_notification", "fieldtype": "Check", - "label": "Send System Notification", - "show_days": 1, - "show_seconds": 1 + "label": "Send System Notification" } ], "icon": "fa fa-envelope", "links": [], - "modified": "2020-05-29 16:03:10.914526", + "modified": "2020-06-23 14:01:25.462544", "modified_by": "Administrator", "module": "Email", "name": "Notification", diff --git a/frappe/email/queue.py b/frappe/email/queue.py index ce512de276..8bffc108b9 100755 --- a/frappe/email/queue.py +++ b/frappe/email/queue.py @@ -347,7 +347,7 @@ def flush(from_test=False): if not smtpserver: smtpserver = SMTPServer() smtpserver_dict[email.sender] = smtpserver - + if from_test: send_one(email.name, smtpserver, auto_commit) else: @@ -390,12 +390,12 @@ def send_one(email, smtpserver=None, auto_commit=True, now=False): where name=%s for update''', email, as_dict=True) - + if len(email): email = email[0] else: return - + recipients_list = frappe.db.sql('''select name, recipient, status from `tabEmail Queue Recipient` where parent=%s''', email.name, as_dict=1) @@ -417,6 +417,8 @@ def send_one(email, smtpserver=None, auto_commit=True, now=False): if email.communication: frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit) + email_sent_to_any_recipient = None + try: message = None diff --git a/frappe/hooks.py b/frappe/hooks.py index f5a8701089..1f209f00a2 100644 --- a/frappe/hooks.py +++ b/frappe/hooks.py @@ -12,7 +12,7 @@ source_link = "https://github.com/frappe/frappe" app_license = "MIT" app_logo_url = '/assets/frappe/images/frappe-framework-logo.png' -develop_version = '12.x.x-develop' +develop_version = '13.x.x-develop' app_email = "info@frappe.io" @@ -273,7 +273,6 @@ setup_wizard_exception = [ before_migrate = ['frappe.patches.v11_0.sync_user_permission_doctype_before_migrate.execute'] after_migrate = [ - 'frappe.website.doctype.website_theme.website_theme.generate_theme_files_if_not_exist', 'frappe.modules.full_text_search.build_index_for_all_routes' ] diff --git a/frappe/installer.py b/frappe/installer.py index 4fc19b282a..40fdc057d6 100755 --- a/frappe/installer.py +++ b/frappe/installer.py @@ -113,23 +113,25 @@ def remove_from_installed_apps(app_name): installed_apps = frappe.get_installed_apps() if app_name in installed_apps: installed_apps.remove(app_name) - frappe.db.set_global("installed_apps", json.dumps(installed_apps)) + frappe.db.set_value("DefaultValue", {"defkey": "installed_apps"}, "defvalue", json.dumps(installed_apps)) frappe.db.commit() if frappe.flags.in_install: post_install() -def remove_app(app_name, dry_run=False, yes=False): - """Delete app and all linked to the app's module with the app.""" +def remove_app(app_name, dry_run=False, yes=False, no_backup=False): + """Remove app and all linked to the app's module with the app from a site.""" if not dry_run and not yes: confirm = input("All doctypes (including custom), modules related to this app will be deleted. Are you sure you want to continue (y/n) ? ") if confirm!="y": return - from frappe.utils.backups import scheduled_backup - print("Backing up...") - scheduled_backup(ignore_files=True) + if not no_backup: + from frappe.utils.backups import scheduled_backup + print("Backing up...") + scheduled_backup(ignore_files=True) + frappe.flags.in_uninstall = True drop_doctypes = [] # remove modules, doctypes, roles @@ -164,6 +166,8 @@ def remove_app(app_name, dry_run=False, yes=False): for doctype in set(drop_doctypes): frappe.db.sql("drop table `tab{0}`".format(doctype)) + frappe.flags.in_uninstall = False + def post_install(rebuild_website=False): if rebuild_website: render.clear_cache() @@ -299,12 +303,15 @@ def remove_missing_apps(): def extract_sql_gzip(sql_gz_path): try: - # kdvf - keep, decompress, verbose, force - subprocess.check_call(['gzip', '-kdvf', sql_gz_path]) + # dvf - decompress, verbose, force + original_file = sql_gz_path + decompressed_file = original_file.rstrip(".gz") + cmd = 'gzip -dvf < {0} > {1}'.format(original_file, decompressed_file) + subprocess.check_call(cmd, shell=True) except: raise - return sql_gz_path[:-3] + return decompressed_file def extract_tar_files(site_name, file_path, folder_name): # Need to do frappe.init to maintain the site locals @@ -326,3 +333,34 @@ def extract_tar_files(site_name, file_path, folder_name): frappe.destroy() return tar_path + +def is_downgrade(sql_file_path, verbose=False): + """checks if input db backup will get downgraded on current bench""" + from semantic_version import Version + head = "INSERT INTO `tabInstalled Application` VALUES" + + with open(sql_file_path) as f: + for line in f: + if head in line: + # 'line' (str) format: ('2056588823','2020-05-11 18:21:31.488367','2020-06-12 11:49:31.079506','Administrator','Administrator',0,'Installed Applications','installed_applications','Installed Applications',1,'frappe','v10.1.71-74 (3c50d5e) (v10.x.x)','v10.x.x'),('855c640b8e','2020-05-11 18:21:31.488367','2020-06-12 11:49:31.079506','Administrator','Administrator',0,'Installed Applications','installed_applications','Installed Applications',2,'your_custom_app','0.0.1','master') + line = line.strip().lstrip(head).rstrip(";").strip() + # 'all_apps' (list) format: [('frappe', '12.x.x-develop ()', 'develop'), ('your_custom_app', '0.0.1', 'master')] + all_apps = [ x[-3:] for x in frappe.safe_eval(line) ] + + for app in all_apps: + app_name = app[0] + app_version = app[1].split(" ")[0] + + if app_name == "frappe": + try: + current_version = Version(frappe.__version__) + backup_version = Version(app_version[1:] if app_version[0] == "v" else app_version) + except ValueError: + return False + + downgrade = backup_version > current_version + + if verbose and downgrade: + print("Your site will be downgraded from Frappe {0} to {1}".format(current_version, backup_version)) + + return downgrade diff --git a/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py b/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py index f177aa6620..864720174f 100644 --- a/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py +++ b/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py @@ -97,10 +97,12 @@ def backup_to_dropbox(upload_db_backup=True): if frappe.flags.create_new_backup: backup = new_backup(ignore_files=True) filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) + site_config = os.path.join(get_backups_path(), os.path.basename(backup.site_config_backup_path)) else: - filename = get_latest_backup_file() + filename, site_config = get_latest_backup_file() upload_file_to_dropbox(filename, "/database", dropbox_client) + upload_file_to_dropbox(site_config, "/database", dropbox_client) # delete older databases if dropbox_settings['no_of_backups']: diff --git a/frappe/integrations/doctype/google_calendar/google_calendar.py b/frappe/integrations/doctype/google_calendar/google_calendar.py index fa2eea6ce1..4a9acd9e84 100644 --- a/frappe/integrations/doctype/google_calendar/google_calendar.py +++ b/frappe/integrations/doctype/google_calendar/google_calendar.py @@ -12,6 +12,7 @@ from frappe import _ from frappe.model.document import Document from frappe.utils import get_request_site_address from googleapiclient.errors import HttpError +from frappe.utils.password import set_encrypted_password from frappe.utils import add_days, get_datetime, get_weekdays, now_datetime, add_to_date, get_time_zone from dateutil import parser from datetime import datetime, timedelta @@ -198,7 +199,7 @@ def check_google_calendar(account, google_calendar): except HttpError as err: frappe.throw(_("Google Calendar - Could not create Calendar for {0}, error code {1}.").format(account.name, err.resp.status)) -def sync_events_from_google_calendar(g_calendar, method=None, page_length=10): +def sync_events_from_google_calendar(g_calendar, method=None): """ Syncs Events from Google Calendar in Framework Calendar. Google Calendar returns nextSyncToken when all the events in Google Calendar are fetched. @@ -210,23 +211,32 @@ def sync_events_from_google_calendar(g_calendar, method=None, page_length=10): if not account.pull_from_google_calendar: return + sync_token = account.get_password(fieldname="next_sync_token", raise_exception=False) or None + events = frappe._dict() results = [] while True: try: # API Response listed at EOF - sync_token = account.get_password(fieldname="next_sync_token", raise_exception=False) or None - events = google_calendar.events().list(calendarId=account.google_calendar_id, maxResults=page_length, - singleEvents=False, showDeleted=True, syncToken=sync_token).execute() + events = google_calendar.events().list(calendarId=account.google_calendar_id, maxResults=2000, + pageToken=events.get("nextPageToken"), singleEvents=False, showDeleted=True, syncToken=sync_token).execute() except HttpError as err: - frappe.throw(_("Google Calendar - Could not fetch event from Google Calendar, error code {0}.").format(err.resp.status)) + msg = _("Google Calendar - Could not fetch event from Google Calendar, error code {0}.").format(err.resp.status) + + if err.resp.status == 410: + set_encrypted_password("Google Calendar", account.name, "", "next_sync_token") + frappe.db.commit() + msg += ' ' + _('Sync token was invalid and has been resetted, Retry syncing.') + frappe.msgprint(msg, title='Invalid Sync Token', indicator='blue') + else: + frappe.throw(msg) for event in events.get("items", []): results.append(event) if not events.get("nextPageToken"): if events.get("nextSyncToken"): - frappe.db.set_value("Google Calendar", account.name, "next_sync_token", events.get("nextSyncToken")) - frappe.db.commit() + account.next_sync_token = events.get("nextSyncToken") + account.save() break for idx, event in enumerate(results): diff --git a/frappe/integrations/doctype/google_drive/google_drive.py b/frappe/integrations/doctype/google_drive/google_drive.py index 60ee173bbf..0c28e95a24 100644 --- a/frappe/integrations/doctype/google_drive/google_drive.py +++ b/frappe/integrations/doctype/google_drive/google_drive.py @@ -190,12 +190,16 @@ def upload_system_backup_to_google_drive(): set_progress(1, "Backing up Data.") backup = new_backup() fileurl_backup = os.path.basename(backup.backup_path_db) + fileurl_site_config = os.path.basename(backup.site_config_backup_path) fileurl_public_files = os.path.basename(backup.backup_path_files) fileurl_private_files = os.path.basename(backup.backup_path_private_files) else: - fileurl_backup, fileurl_public_files, fileurl_private_files = get_latest_backup_file(with_files=True) + fileurl_backup, fileurl_site_config, fileurl_public_files, fileurl_private_files = get_latest_backup_file(with_files=True) + + for fileurl in [fileurl_backup, fileurl_site_config, fileurl_public_files, fileurl_private_files]: + if not fileurl: + continue - for fileurl in [fileurl_backup, fileurl_public_files, fileurl_private_files]: file_metadata = { "name": fileurl, "parents": [account.backup_folder_id] @@ -218,11 +222,13 @@ def upload_system_backup_to_google_drive(): return _("Google Drive Backup Successful.") def daily_backup(): - if frappe.db.get_single_value("Google Drive", "frequency") == "Daily": + drive_settings = frappe.db.get_singles_dict('Google Drive') + if drive_settings.enable and drive_settings.frequency == "Daily": upload_system_backup_to_google_drive() def weekly_backup(): - if frappe.db.get_single_value("Google Drive", "frequency") == "Weekly": + drive_settings = frappe.db.get_singles_dict('Google Drive') + if drive_settings.enable and drive_settings.frequency == "Weekly": upload_system_backup_to_google_drive() def get_absolute_path(filename): diff --git a/frappe/integrations/doctype/s3_backup_settings/s3_backup_settings.py b/frappe/integrations/doctype/s3_backup_settings/s3_backup_settings.py index 21232992f4..c8b007ba7b 100755 --- a/frappe/integrations/doctype/s3_backup_settings/s3_backup_settings.py +++ b/frappe/integrations/doctype/s3_backup_settings/s3_backup_settings.py @@ -19,6 +19,9 @@ from botocore.exceptions import ClientError class S3BackupSettings(Document): def validate(self): + if not self.enabled: + return + if not self.endpoint_url: self.endpoint_url = 'https://s3.amazonaws.com' conn = boto3.client( @@ -115,19 +118,21 @@ def backup_to_s3(): backup = new_backup(ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=True) db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db)) + site_config = os.path.join(get_backups_path(), os.path.basename(backup.site_config_backup_path)) if backup_files: files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files)) private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files)) else: if backup_files: - db_filename, files_filename, private_files = get_latest_backup_file(with_files=backup_files) + db_filename, site_config, files_filename, private_files = get_latest_backup_file(with_files=backup_files) else: - db_filename = get_latest_backup_file() + db_filename, site_config = get_latest_backup_file() folder = os.path.basename(db_filename)[:15] + '/' # for adding datetime to folder name upload_file_to_s3(db_filename, folder, conn, bucket) + upload_file_to_s3(site_config, folder, conn, bucket) if backup_files: upload_file_to_s3(private_files, folder, conn, bucket) upload_file_to_s3(files_filename, folder, conn, bucket) diff --git a/frappe/integrations/frappe_providers/frappecloud.py b/frappe/integrations/frappe_providers/frappecloud.py index 3e4b584246..16bc09d9bf 100644 --- a/frappe/integrations/frappe_providers/frappecloud.py +++ b/frappe/integrations/frappe_providers/frappecloud.py @@ -1,6 +1,7 @@ # imports - standard imports import getpass import json +import os import re import sys @@ -8,6 +9,7 @@ import sys import click from html2text import html2text import requests +from tenacity import retry, stop_after_attempt, wait_fixed # imports - module imports import frappe @@ -138,6 +140,7 @@ def select_team(session): return team +@retry(stop=stop_after_attempt(5)) def get_new_site_options(): site_options_sc = session.post(options_url) @@ -158,6 +161,7 @@ def is_valid_subdomain(subdomain): print("Subdomain contains invalid characters. Use lowercase characters, numbers and hyphens") +@retry(stop=stop_after_attempt(5)) def is_subdomain_available(subdomain): res = session.post(site_exists_url, {"subdomain": subdomain}) if res.ok: @@ -252,6 +256,17 @@ def get_subdomain(domain): return subdomain +@retry(stop=stop_after_attempt(2), wait=wait_fixed(5)) +def upload_backup_file(file_type, file_path): + return session.post(files_url, data={}, files={ + "file": open(file_path, "rb"), + "is_private": 1, + "folder": "Home", + "method": "press.api.site.upload_backup", + "type": file_type + }) + + @add_line_after def upload_backup(local_site): # take backup @@ -265,14 +280,11 @@ def upload_backup(local_site): ("public", odb.backup_path_files), ("private", odb.backup_path_private_files) ]): - file_upload_response = session.post(files_url, data={}, files={ - "file": open(file_path, "rb"), - "is_private": 1, - "folder": "Home", - "method": "press.api.site.upload_backup", - "type": file_type - }) - print("Uploading files ({}/3)".format(x+1), end="\r") + file_name = file_path.split(os.sep)[-1] + + print("Uploading {} file: {} ({}/3)".format(file_type, file_name, x+1)) + file_upload_response = upload_backup_file(file_type, file_path) + if file_upload_response.ok: files_session[file_type] = file_upload_response.json()["message"] else: @@ -362,7 +374,10 @@ def create_session(): if login_sc.ok: print("Authorization Successful! ✅") team = select_team(session) - session.headers.update({"X-Press-Team": team }) + session.headers.update({ + "X-Press-Team": team, + "Connection": "keep-alive" + }) return session else: handle_request_failure(message="Authorization Failed with Error Code {}".format(login_sc.status_code), traceback=False) diff --git a/frappe/integrations/offsite_backup_utils.py b/frappe/integrations/offsite_backup_utils.py index 7e80cb68c4..9de176b2d0 100644 --- a/frappe/integrations/offsite_backup_utils.py +++ b/frappe/integrations/offsite_backup_utils.py @@ -47,16 +47,17 @@ def get_latest_backup_file(with_files=False): def get_latest(file_ext): file_list = glob.glob(os.path.join(get_backups_path(), file_ext)) - return max(file_list, key=os.path.getctime) + return max(file_list, key=os.path.getctime) if file_list else None latest_file = get_latest('*.sql.gz') + latest_site_config = get_latest('*.json') if with_files: latest_public_file_bak = get_latest('*-files.tar') latest_private_file_bak = get_latest('*-private-files.tar') - return latest_file, latest_public_file_bak, latest_private_file_bak + return latest_file, latest_site_config, latest_public_file_bak, latest_private_file_bak - return latest_file + return latest_file, latest_site_config def get_file_size(file_path, unit): @@ -76,7 +77,7 @@ def get_file_size(file_path, unit): def validate_file_size(): frappe.flags.create_new_backup = True - latest_file = get_latest_backup_file() + latest_file, site_config = get_latest_backup_file() file_size = get_file_size(latest_file, unit='GB') if file_size > 1: diff --git a/frappe/model/base_document.py b/frappe/model/base_document.py index 106d21eb51..d7028870f4 100644 --- a/frappe/model/base_document.py +++ b/frappe/model/base_document.py @@ -504,19 +504,7 @@ class BaseDocument(object): for _df in fields_to_fetch: if self.is_new() or self.docstatus != 1 or _df.allow_on_submit: - fetch_from_fieldname = _df.fetch_from.split('.')[-1] - value = values[fetch_from_fieldname] - if _df.fieldtype == 'Small Text' or _df.fieldtype == 'Text' or _df.fieldtype == 'Data': - if fetch_from_fieldname in default_fields: - from frappe.model.meta import get_default_df - fetch_from_df = get_default_df(fetch_from_fieldname) - else: - fetch_from_df = frappe.get_meta(doctype).get_field(fetch_from_fieldname) - - fetch_from_ft = fetch_from_df.get('fieldtype') - if fetch_from_ft == 'Text Editor' and value: - value = unescape_html(strip_html(value)) - setattr(self, _df.fieldname, value) + self.set_fetch_from_value(doctype, _df, values) notify_link_count(doctype, docname) @@ -531,6 +519,27 @@ class BaseDocument(object): return invalid_links, cancelled_links + def set_fetch_from_value(self, doctype, df, values): + fetch_from_fieldname = df.fetch_from.split('.')[-1] + value = values[fetch_from_fieldname] + if df.fieldtype in ['Small Text', 'Text', 'Data']: + if fetch_from_fieldname in default_fields: + from frappe.model.meta import get_default_df + fetch_from_df = get_default_df(fetch_from_fieldname) + else: + fetch_from_df = frappe.get_meta(doctype).get_field(fetch_from_fieldname) + + if not fetch_from_df: + frappe.throw( + _('Please check the value of "Fetch From" set for field {0}').format(frappe.bold(df.label)), + title = _('Wrong Fetch From value') + ) + + fetch_from_ft = fetch_from_df.get('fieldtype') + if fetch_from_ft == 'Text Editor' and value: + value = unescape_html(strip_html(value)) + setattr(self, df.fieldname, value) + def _validate_selects(self): if frappe.flags.in_import: return diff --git a/frappe/model/delete_doc.py b/frappe/model/delete_doc.py index c0d2c4eef9..98dbce1d8f 100644 --- a/frappe/model/delete_doc.py +++ b/frappe/model/delete_doc.py @@ -77,7 +77,7 @@ def delete_doc(doctype=None, name=None, force=0, ignore_doctypes=None, for_reloa delete_from_table(doctype, name, ignore_doctypes, None) - if not (for_reload or frappe.flags.in_migrate or frappe.flags.in_install or frappe.flags.in_test): + if not (for_reload or frappe.flags.in_migrate or frappe.flags.in_install or frappe.flags.in_uninstall or frappe.flags.in_test): try: delete_controllers(name, doc.module) except (FileNotFoundError, OSError, KeyError): diff --git a/frappe/model/document.py b/frappe/model/document.py index 7c283b84ce..ea693167f8 100644 --- a/frappe/model/document.py +++ b/frappe/model/document.py @@ -396,6 +396,11 @@ class Document(BaseDocument): def get_doc_before_save(self): return getattr(self, '_doc_before_save', None) + def has_value_changed(self, fieldname): + '''Returns true if value is changed before and after saving''' + previous = self.get_doc_before_save() + return previous.get(fieldname)!=self.get(fieldname) if previous else True + def set_new_name(self, force=False, set_name=None, set_child_names=True): """Calls `frappe.naming.set_new_name` for parent and child docs.""" if self.flags.name_set and not force: @@ -825,7 +830,7 @@ class Document(BaseDocument): def run_notifications(self, method): """Run notifications for this method""" - if frappe.flags.in_import or frappe.flags.in_patch or frappe.flags.in_install: + if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install: return if self.flags.notifications_executed==None: @@ -1059,6 +1064,8 @@ class Document(BaseDocument): def save_version(self): """Save version info""" + if not self._doc_before_save and frappe.flags.in_patch: return + version = frappe.new_doc('Version') if not self._doc_before_save: version.for_insert(self) diff --git a/frappe/patches.txt b/frappe/patches.txt index 582b369343..6c4c75aca9 100644 --- a/frappe/patches.txt +++ b/frappe/patches.txt @@ -19,6 +19,7 @@ execute:frappe.reload_doc('core', 'doctype', 'module_def') #2017-09-22 execute:frappe.reload_doc('core', 'doctype', 'version') #2017-04-01 execute:frappe.reload_doc('email', 'doctype', 'document_follow') execute:frappe.reload_doc('core', 'doctype', 'communication_link') #2019-10-02 +execute:frappe.reload_doc('core', 'doctype', 'has_role') execute:frappe.reload_doc('core', 'doctype', 'communication') #2019-10-02 frappe.patches.v11_0.replicate_old_user_permissions frappe.patches.v11_0.reload_and_rename_view_log #2019-01-03 @@ -289,3 +290,4 @@ execute:frappe.delete_doc("DocType", "Onboarding Slide Field") execute:frappe.delete_doc("DocType", "Onboarding Slide Help Link") frappe.patches.v13_0.update_date_filters_in_user_settings frappe.patches.v13_0.update_duration_options +frappe.patches.v13_0.replace_old_data_import # 2020-06-24 diff --git a/frappe/patches/v11_0/reload_and_rename_view_log.py b/frappe/patches/v11_0/reload_and_rename_view_log.py index 611de79a3c..12c71b746f 100644 --- a/frappe/patches/v11_0/reload_and_rename_view_log.py +++ b/frappe/patches/v11_0/reload_and_rename_view_log.py @@ -2,7 +2,7 @@ from __future__ import unicode_literals import frappe def execute(): - if frappe.db.exists('DocType', 'View log'): + if frappe.db.table_exists('View log'): # for mac users direct renaming would not work since mysql for mac saves table name in lower case # so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error # more info https://stackoverflow.com/a/44753093/5955589 , diff --git a/frappe/patches/v13_0/replace_old_data_import.py b/frappe/patches/v13_0/replace_old_data_import.py new file mode 100644 index 0000000000..f3eed6253c --- /dev/null +++ b/frappe/patches/v13_0/replace_old_data_import.py @@ -0,0 +1,16 @@ +# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors +# MIT License. See license.txt + +from __future__ import unicode_literals +import frappe + + +def execute(): + if not frappe.db.exists("DocType", "Data Import Beta"): + return + + frappe.db.sql("DROP TABLE IF EXISTS `tabData Import Legacy`") + frappe.rename_doc('DocType', 'Data Import', 'Data Import Legacy') + frappe.db.commit() + frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`") + frappe.rename_doc('DocType', 'Data Import Beta', 'Data Import') diff --git a/frappe/public/css/desk-rtl.css b/frappe/public/css/desk-rtl.css index 31321be17d..a38f6864ff 100644 --- a/frappe/public/css/desk-rtl.css +++ b/frappe/public/css/desk-rtl.css @@ -110,4 +110,9 @@ ul.tree-children { } .section-header { direction: ltr; +} + +.ql-editor { + direction: rtl; + text-align: right; } \ No newline at end of file diff --git a/frappe/public/js/frappe/data_import/data_exporter.js b/frappe/public/js/frappe/data_import/data_exporter.js index d9685ca2c6..f6af338235 100644 --- a/frappe/public/js/frappe/data_import/data_exporter.js +++ b/frappe/public/js/frappe/data_import/data_exporter.js @@ -13,36 +13,6 @@ frappe.data_import.DataExporter = class DataExporter { this.dialog = new frappe.ui.Dialog({ title: __('Export Data'), fields: [ - { - fieldtype: 'Select', - fieldname: 'exporting_for', - label: __('Exporting For'), - options: [ - { - label: __('Insert New Records'), - value: 'Insert New Records' - }, - { - label: __('Update Existing Records'), - value: 'Update Existing Records' - } - ], - change: () => { - let exporting_for = this.dialog.get_value('exporting_for'); - this.dialog.set_value( - 'export_records', - exporting_for === 'Insert New Records' ? 'blank_template' : 'all' - ); - - // Force ID field to be exported when updating existing records - let id_field = this.dialog.get_field(this.doctype).options[0]; - if (id_field.value === 'name' && id_field.$checkbox) { - id_field.$checkbox - .find('input') - .prop('disabled', exporting_for === 'Update Existing Records'); - } - } - }, { fieldtype: 'Select', fieldname: 'export_records', @@ -65,7 +35,7 @@ frappe.data_import.DataExporter = class DataExporter { value: 'blank_template' } ], - default: 'blank_template', + default: this.exporting_for === 'Insert New Records' ? 'blank_template' : 'all', change: () => { this.update_record_count_message(); } @@ -119,10 +89,6 @@ frappe.data_import.DataExporter = class DataExporter { on_page_show: () => this.select_mandatory() }); - if (this.exporting_for) { - this.dialog.set_value('exporting_for', this.exporting_for); - } - this.make_filter_area(); this.make_select_all_buttons(); this.update_record_count_message(); @@ -132,7 +98,7 @@ frappe.data_import.DataExporter = class DataExporter { export_records() { let method = - '/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_template'; + '/api/method/frappe.core.doctype.data_import.data_import.download_template'; let multicheck_fields = this.dialog.fields .filter(df => df.fieldtype === 'MultiCheck') @@ -172,15 +138,17 @@ frappe.data_import.DataExporter = class DataExporter { } make_select_all_buttons() { + let for_insert = this.exporting_for === 'Insert New Records'; + let section_title = for_insert ? __('Select Fields To Insert') : __('Select Fields To Update'); let $select_all_buttons = $(`
    -
    ${__('Select fields to export')}
    +
    ${section_title}
    - + `: ''} @@ -285,11 +253,9 @@ frappe.data_import.DataExporter = class DataExporter { } get_filters() { - return this.filter_group.get_filters().reduce((acc, filter) => { - return Object.assign(acc, { - [filter[1]]: [filter[2], filter[3]] - }); - }, {}); + return this.filter_group.get_filters().map(filter => { + return filter.slice(0, 4); + }); } get_multicheck_options(doctype, child_fieldname = null) { @@ -308,6 +274,9 @@ frappe.data_import.DataExporter = class DataExporter { ? this.column_map[child_fieldname] : this.column_map[doctype]; + let is_field_mandatory = df => (df.fieldname === 'name' && !child_fieldname) + || (df.reqd && this.exporting_for == 'Insert New Records'); + return fields .filter(df => { if (autoname_field && df.fieldname === autoname_field.fieldname) { @@ -323,7 +292,7 @@ frappe.data_import.DataExporter = class DataExporter { return { label, value: df.fieldname, - danger: df.reqd, + danger: is_field_mandatory(df), checked: false, description: `${df.fieldname} ${df.reqd ? __('(Mandatory)') : ''}` }; diff --git a/frappe/public/js/frappe/data_import/import_preview.js b/frappe/public/js/frappe/data_import/import_preview.js index 7cf8431456..4edcb87aeb 100644 --- a/frappe/public/js/frappe/data_import/import_preview.js +++ b/frappe/public/js/frappe/data_import/import_preview.js @@ -245,11 +245,12 @@ frappe.data_import.ImportPreview = class ImportPreview { let fieldname; if (!df) { fieldname = null; + } else if (col.map_to_field) { + fieldname = col.map_to_field; + } else if (col.is_child_table_field) { + fieldname = `${col.child_table_df.fieldname}.${df.fieldname}`; } else { - fieldname = - df.parent === this.doctype - ? df.fieldname - : `${df.parent}:${df.fieldname}`; + fieldname = df.fieldname; } return [ { @@ -272,7 +273,7 @@ frappe.data_import.ImportPreview = class ImportPreview { label: __("Don't Import"), value: "Don't Import" } - ].concat(column_picker_fields.get_fields_as_options()), + ].concat(get_fields_as_options(this.doctype, column_picker_fields)), default: fieldname || "Don't Import", change() { changed.push(i); @@ -328,3 +329,29 @@ frappe.data_import.ImportPreview = class ImportPreview { }); } }; + +function get_fields_as_options(doctype, column_map) { + let keys = [doctype]; + frappe.meta.get_table_fields(doctype).forEach(df => { + keys.push(df.fieldname); + }); + // flatten array + return [].concat( + ...keys.map(key => { + return column_map[key].map(df => { + let label = df.label; + let value = df.fieldname; + if (doctype !== key) { + let table_field = frappe.meta.get_docfield(doctype, key); + label = `${df.label} (${table_field.label})`; + value = `${table_field.fieldname}.${df.fieldname}`; + } + return { + label, + value, + description: value + }; + }); + }) + ); +} \ No newline at end of file diff --git a/frappe/public/js/frappe/db.js b/frappe/public/js/frappe/db.js index 1b6fb0e438..cf716c67e5 100644 --- a/frappe/public/js/frappe/db.js +++ b/frappe/public/js/frappe/db.js @@ -91,12 +91,26 @@ frappe.db = { }); }, count: function(doctype, args={}) { - return new Promise(resolve => { - frappe.call({ - method: 'frappe.client.get_count', - type: 'GET', - args: Object.assign(args, { doctype }) - }).then(r => resolve(r.message)); + let filters = args.filters || {}; + const with_child_table_filter = Array.isArray(filters) && filters.some(filter => { + return filter[0] !== doctype; + }); + + const fields = [ + // cannot break this line as it adds extra \n's and \t's which breaks the query + `count(${with_child_table_filter ? 'distinct': ''} ${frappe.model.get_full_column_name('name', doctype)}) AS total_count` + ]; + + return frappe.call({ + type: 'GET', + method: 'frappe.desk.reportview.get', + args: { + doctype, + filters, + fields, + } + }).then(r => { + return r.message.values[0][0]; }); }, get_link_options(doctype, txt = '', filters={}) { diff --git a/frappe/public/js/frappe/form/controls/button.js b/frappe/public/js/frappe/form/controls/button.js index dca2a82654..ae24651892 100644 --- a/frappe/public/js/frappe/form/controls/button.js +++ b/frappe/public/js/frappe/form/controls/button.js @@ -5,7 +5,8 @@ frappe.ui.form.ControlButton = frappe.ui.form.ControlData.extend({ }, make_input: function() { var me = this; - this.$input = $('
    diff --git a/frappe/public/js/frappe/form/toolbar.js b/frappe/public/js/frappe/form/toolbar.js index 5afee9b8fb..2aa24455fd 100644 --- a/frappe/public/js/frappe/form/toolbar.js +++ b/frappe/public/js/frappe/form/toolbar.js @@ -36,6 +36,11 @@ frappe.ui.form.Toolbar = Class.extend({ this.page.set_title_sub(""); } else { this.page.set_title_sub(this.frm.docname); + this.page.$sub_title_area.css("cursor", "copy"); + this.page.$sub_title_area.on('click', (event) => { + event.stopImmediatePropagation(); + frappe.utils.copy_to_clipboard(this.frm.docname); + }); } } else { var title = this.frm.docname; @@ -376,10 +381,10 @@ frappe.ui.form.Toolbar = Class.extend({ var status = this.get_action_status(); if (status) { // When moving from a page with status amend to another page with status amend - // We need to check if document is already amened specifcally and hide + // We need to check if document is already amend specifically and hide // or clear the menu actions accordingly - if (status !== this.current_status || status === 'Amend') { + if (status !== this.current_status && status === 'Amend') { let doc = this.frm.doc; frappe.xcall('frappe.client.is_document_amended', { 'doctype': doc.doctype, @@ -396,7 +401,7 @@ frappe.ui.form.Toolbar = Class.extend({ } } else { this.page.clear_actions(); - this.current_status = null + this.current_status = null; } }, get_action_status: function() { diff --git a/frappe/public/js/frappe/list/list_view.js b/frappe/public/js/frappe/list/list_view.js index 3e8b7ad6c3..c9e3537afe 100644 --- a/frappe/public/js/frappe/list/list_view.js +++ b/frappe/public/js/frappe/list/list_view.js @@ -33,6 +33,8 @@ frappe.views.ListView = class ListView extends frappe.views.BaseList { } show() { + this.parent.disable_scroll_to_top = true; + if (!this.has_permissions()) { frappe.set_route(""); frappe.msgprint(__(`Not permitted to view ${this.doctype}`)); @@ -859,43 +861,16 @@ frappe.views.ListView = class ListView extends frappe.views.BaseList { let current_count = this.data.length; let count_without_children = this.data.uniqBy((d) => d.name).length; - const filters = this.get_filters_for_args(); - const with_child_table_filter = filters.some((filter) => { - return filter[0] !== this.doctype; + return frappe.db.count(this.doctype, { + filters: this.get_filters_for_args() + }).then(total_count => { + this.total_count = total_count || current_count; + let str = __('{0} of {1}', [current_count, this.total_count]); + if (count_without_children !== current_count) { + str = __('{0} of {1} ({2} rows with children)', [count_without_children, this.total_count, current_count]); + } + return str; }); - - const fields = [ - // cannot break this line as it adds extra \n's and \t's which breaks the query - `count(${ - with_child_table_filter ? "distinct" : "" - }${frappe.model.get_full_column_name( - "name", - this.doctype - )}) AS total_count`, - ]; - - return frappe - .call({ - type: "GET", - method: this.method, - args: { - doctype: this.doctype, - filters, - fields, - }, - }) - .then((r) => { - this.total_count = r.message.values[0][0] || current_count; - let str = __("{0} of {1}", [current_count, this.total_count]); - if (count_without_children !== current_count) { - str = __("{0} of {1} ({2} rows with children)", [ - count_without_children, - this.total_count, - current_count, - ]); - } - return str; - }); } get_form_link(doc) { diff --git a/frappe/public/js/frappe/model/indicator.js b/frappe/public/js/frappe/model/indicator.js index 028dbde472..c7290e05d5 100644 --- a/frappe/public/js/frappe/model/indicator.js +++ b/frappe/public/js/frappe/model/indicator.js @@ -41,6 +41,8 @@ frappe.get_indicator = function(doc, doctype) { "Warning": "orange", "Danger": "red", "Primary": "blue", + "Inverse": "black", + "Info": "light-blue", }[locals["Workflow State"][value].style]; } if(!colour) colour = "darkgrey"; diff --git a/frappe/public/js/frappe/request.js b/frappe/public/js/frappe/request.js index 21155492b3..980a3d0592 100644 --- a/frappe/public/js/frappe/request.js +++ b/frappe/public/js/frappe/request.js @@ -102,6 +102,7 @@ frappe.call = function(opts) { error_handlers: opts.error_handlers || {}, // show_spinner: !opts.no_spinner, async: opts.async, + silent: opts.silent, url, }); } diff --git a/frappe/public/js/frappe/socketio_client.js b/frappe/public/js/frappe/socketio_client.js index 1411b6289d..fdc3f8619c 100644 --- a/frappe/public/js/frappe/socketio_client.js +++ b/frappe/public/js/frappe/socketio_client.js @@ -89,6 +89,14 @@ frappe.socketio = { frappe.socketio.doc_close(frm.doctype, frm.docname); }); + $(document).on('form-typing', function(e, frm) { + frappe.socketio.form_typing(frm.doctype, frm.docname); + }); + + $(document).on('form-stopped-typing', function(e, frm) { + frappe.socketio.form_stopped_typing(frm.doctype, frm.docname); + }); + window.onbeforeunload = function() { if (!cur_frm || cur_frm.is_new()) { return; @@ -161,8 +169,18 @@ frappe.socketio = { doc_close: function(doctype, docname) { // notify that the user has closed this doc frappe.socketio.socket.emit('doc_close', doctype, docname); - }, + // if the doc is closed the user has also stopped typing + frappe.socketio.socket.emit('doc_typing_stopped', doctype, docname); + }, + form_typing: function(doctype, docname) { + // notifiy that the user is typing on the doc + frappe.socketio.socket.emit('doc_typing', doctype, docname); + }, + form_stopped_typing: function(doctype, docname) { + // notifiy that the user has stopped typing + frappe.socketio.socket.emit('doc_typing_stopped', doctype, docname); + }, setup_listeners: function() { frappe.socketio.socket.on('task_status_change', function(data) { frappe.socketio.process_response(data, data.status.toLowerCase()); diff --git a/frappe/public/js/frappe/ui/filters/edit_filter.html b/frappe/public/js/frappe/ui/filters/edit_filter.html index bbaca6eb66..53ad72dbc1 100644 --- a/frappe/public/js/frappe/ui/filters/edit_filter.html +++ b/frappe/public/js/frappe/ui/filters/edit_filter.html @@ -10,6 +10,7 @@
    +
    diff --git a/frappe/public/js/frappe/ui/filters/filter.js b/frappe/public/js/frappe/ui/filters/filter.js index 7300ad3c11..4ef6bb59e3 100644 --- a/frappe/public/js/frappe/ui/filters/filter.js +++ b/frappe/public/js/frappe/ui/filters/filter.js @@ -106,6 +106,8 @@ frappe.ui.Filter = class { if (['in', 'like', 'not in', 'not like'].includes(condition)) { fieldtype = 'Data'; this.add_condition_help(condition); + } else { + this.filter_edit_area.find('.filter-description').empty(); } if ( @@ -130,6 +132,34 @@ frappe.ui.Filter = class { return this.set_values(this.doctype, fieldname, this.condition, this.value); } + setup_state(is_new) { + let promise = Promise.resolve(); + if (is_new) { + this.filter_edit_area.addClass('new-filter'); + } else { + promise = this.update_filter_tag(); + } + + if (this.hidden) { + promise.then(() => this.$filter_tag.hide()); + } + } + + freeze() { + this.update_filter_tag(); + } + + update_filter_tag() { + if (this._filter_value_set) { + return this._filter_value_set.then(() => { + !this.$filter_tag ? this.make_tag() : this.set_filter_button_text(); + this.filter_edit_area.hide(); + }); + } else { + return Promise.resolve(); + } + } + remove() { this.filter_edit_area.remove(); this.field = null; @@ -218,7 +248,7 @@ frappe.ui.Filter = class { } frappe .xcall(this.filters_config[condition].get_field, args) - .then((field) => { + .then(field => { df.fieldtype = field.fieldtype; df.options = field.options; df.fieldname = fieldname; @@ -250,6 +280,14 @@ frappe.ui.Filter = class { this.field.set_value(old_text); } + // run on enter + $(this.field.wrapper) + .find(':input') + .keydown(e => { + if (e.which == 13 && this.field.df.fieldtype !== 'MultiSelect') { + this.on_change(); + } + }); } get_value() { @@ -277,16 +315,59 @@ frappe.ui.Filter = class { } add_condition_help(condition) { - let $desc = this.field.desc_area; - if (!$desc) { - $desc = $('
    ').appendTo(this.field.wrapper); - } - // set description - $desc.html( - (in_list(['in', 'not in'], condition) === 'in' - ? __('values separated by commas') - : __('use % as wildcard')) + '
    ' + const description = ['in', 'not in'].includes(condition) + ? __('values separated by commas') + : __('use % as wildcard'); + + this.filter_edit_area.find('.filter-description').html(description); + } + + make_tag() { + if (!this.field) return; + this.$filter_tag = this.get_filter_tag_element().insertAfter( + this.parent.find('.active-tag-filters .clear-filters') ); + this.set_filter_button_text(); + this.bind_tag(); + } + + bind_tag() { + this.$filter_tag.find('.remove-filter').on('click', this.remove.bind(this)); + + let filter_button = this.$filter_tag.find('.toggle-filter'); + filter_button.on('click', () => { + filter_button + .closest('.tag-filters-area') + .find('.filter-edit-area') + .show(); + this.filter_edit_area.toggle(); + }); + } + + set_filter_button_text() { + this.$filter_tag.find('.toggle-filter').html(this.get_filter_button_text()); + } + + get_filter_button_text() { + let value = this.utils.get_formatted_value( + this.field, + this.get_selected_value() + ); + return `${__(this.field.df.label)} ${__(this.get_condition())} ${__( + value + )}`; + } + + get_filter_tag_element() { + return $(`
    + + +
    `); } hide_invalid_conditions(fieldtype, original_type) { @@ -383,12 +464,12 @@ frappe.ui.filter_utils = { // scrub if (df.fieldname == 'docstatus') { - (df.fieldtype = 'Select'), - (df.options = [ - { value: 0, label: __('Draft') }, - { value: 1, label: __('Submitted') }, - { value: 2, label: __('Cancelled') }, - ]); + df.fieldtype = 'Select'; + df.options = [ + { value: 0, label: __('Draft') }, + { value: 1, label: __('Submitted') }, + { value: 2, label: __('Cancelled') }, + ]; } else if (df.fieldtype == 'Check') { df.fieldtype = 'Select'; df.options = 'No\nYes'; diff --git a/frappe/public/js/frappe/ui/messages.js b/frappe/public/js/frappe/ui/messages.js index 8652fde4aa..cba15377db 100644 --- a/frappe/public/js/frappe/ui/messages.js +++ b/frappe/public/js/frappe/ui/messages.js @@ -167,6 +167,11 @@ frappe.msgprint = function(msg, title, is_minimizable) { method: data.primary_action.server_action, args: { args: data.primary_action.args + }, + callback() { + if (data.primary_action.hide_on_success) { + frappe.hide_msgprint(); + } } }); } diff --git a/frappe/public/js/frappe/utils/pretty_date.js b/frappe/public/js/frappe/utils/pretty_date.js index 7618d58829..84fd276068 100644 --- a/frappe/public/js/frappe/utils/pretty_date.js +++ b/frappe/public/js/frappe/utils/pretty_date.js @@ -1,8 +1,10 @@ function prettyDate(date, mini) { if (!date) return ''; - if (typeof (date) == "string") + if (typeof (date) == "string") { + date = frappe.datetime.convert_to_user_tz(date); date = new Date((date || "").replace(/-/g, "/").replace(/[TZ]/g, " ").replace(/\.[0-9]*/, "")); + } let diff = (((new Date()).getTime() - date.getTime()) / 1000); let day_diff = Math.floor(diff / 86400); diff --git a/frappe/public/js/frappe/views/calendar/calendar.js b/frappe/public/js/frappe/views/calendar/calendar.js index 4b091d502c..e053da0263 100644 --- a/frappe/public/js/frappe/views/calendar/calendar.js +++ b/frappe/public/js/frappe/views/calendar/calendar.js @@ -78,7 +78,8 @@ frappe.views.CalendarView = class CalendarView extends frappe.views.ListView { id: "name", start: doc.start_date_field, end: doc.end_date_field, - title: doc.subject_field + title: doc.subject_field, + allDay: doc.all_day ? 1 : 0 } }); resolve(options); diff --git a/frappe/public/js/frappe/views/communication.js b/frappe/public/js/frappe/views/communication.js index ba290417f5..53d946f75d 100755 --- a/frappe/public/js/frappe/views/communication.js +++ b/frappe/public/js/frappe/views/communication.js @@ -66,6 +66,10 @@ frappe.views.CommunicationComposer = Class.extend({ }) this.prepare(); this.dialog.show(); + + if (this.frm) { + $(document).trigger('form-typing', [this.frm]); + } }, get_fields: function() { @@ -170,17 +174,21 @@ frappe.views.CommunicationComposer = Class.extend({ } if (!this.subject) { - if (this.frm.subject_field && this.frm.doc[this.frm.subject_field]) { - this.subject = __("Re: {0}", [this.frm.doc[this.frm.subject_field]]); - } else { - let title = this.frm.doc.name; - if(this.frm.meta.title_field && this.frm.doc[this.frm.meta.title_field] - && this.frm.doc[this.frm.meta.title_field] != this.frm.doc.name) { - title = `${this.frm.doc[this.frm.meta.title_field]} (#${this.frm.doc.name})`; - } - this.subject = `${__(this.frm.doctype)}: ${title}`; + this.subject = this.frm.doc.name; + if (this.frm.meta.subject_field && this.frm.doc[this.frm.meta.subject_field]) { + this.subject = this.frm.doc[this.frm.meta.subject_field]; + } else if (this.frm.meta.title_field && this.frm.doc[this.frm.meta.title_field]) { + this.subject = this.frm.doc[this.frm.meta.title_field]; } } + + // always add an identifier to catch a reply + // some email clients (outlook) may not send the message id to identify + // the thread. So as a backup we use the name of the document as identifier + let identifier = `#${this.frm.doc.name}`; + if (!this.subject.includes(identifier)) { + this.subject = `${this.subject} (${identifier})`; + } } if (this.frm && !this.recipients) { @@ -262,6 +270,10 @@ frappe.views.CommunicationComposer = Class.extend({ subject: me.dialog.get_value("subject"), content: me.dialog.get_value("content"), }); + + if (me.frm) { + $(document).trigger("form-stopped-typing", [me.frm]); + } } this.dialog.on_page_show = function() { diff --git a/frappe/public/js/frappe/views/container.js b/frappe/public/js/frappe/views/container.js index 104d9bee92..a5227b4271 100644 --- a/frappe/public/js/frappe/views/container.js +++ b/frappe/public/js/frappe/views/container.js @@ -84,7 +84,7 @@ frappe.views.Container = Class.extend({ this.page._route = window.location.hash; $(this.page).trigger('show'); - frappe.utils.scroll_to(0); + !this.page.disable_scroll_to_top && frappe.utils.scroll_to(0); frappe.breadcrumbs.update(); return this.page; diff --git a/frappe/public/js/frappe/views/desktop/desktop.js b/frappe/public/js/frappe/views/desktop/desktop.js index 125a79d577..764768f87f 100644 --- a/frappe/public/js/frappe/views/desktop/desktop.js +++ b/frappe/public/js/frappe/views/desktop/desktop.js @@ -3,6 +3,7 @@ export default class Desktop { this.wrapper = wrapper; this.pages = {}; this.sidebar_items = {}; + this.mobile_sidebar_items = {}; this.sidebar_categories = [ "Modules", "Domains", @@ -84,10 +85,13 @@ export default class Desktop { this.current_page = item.name; } let $item = get_sidebar_item(item); - - $item.appendTo(this.mobile_list); - $item.clone().appendTo(this.sidebar); + let $mobile_item = $item.clone(); + + $item.appendTo(this.sidebar); this.sidebar_items[item.name] = $item; + + $mobile_item.appendTo(this.mobile_list); + this.mobile_sidebar_items[item.name] = $mobile_item; }; const make_category_title = name => { @@ -122,7 +126,10 @@ export default class Desktop { if (this.sidebar_items && this.sidebar_items[this.current_page]) { this.sidebar_items[this.current_page].removeClass("selected"); + this.mobile_sidebar_items[this.current_page].removeClass("selected"); + this.sidebar_items[page].addClass("selected"); + this.mobile_sidebar_items[page].addClass("selected"); } this.current_page = page; this.mobile_list.hide(); @@ -134,11 +141,13 @@ export default class Desktop { get_page_to_show() { const default_page = this.desktop_settings ? this.desktop_settings["Modules"][0].name - : "Website"; + : frappe.boot.allowed_workspaces[0].name; + let page = frappe.get_route()[1] || localStorage.current_desk_page || default_page; + return page; } @@ -289,7 +298,6 @@ class DesktopPage { steps: this.data.onboarding.items, success: this.data.onboarding.success, docs_url: this.data.onboarding.docs_url, - user_can_dismiss: this.data.onboarding.user_can_dismiss, widget_type: 'onboarding', container: this.page, options: { diff --git a/frappe/public/js/frappe/views/formview.js b/frappe/public/js/frappe/views/formview.js index 033569a03c..7440ab198d 100644 --- a/frappe/public/js/frappe/views/formview.js +++ b/frappe/public/js/frappe/views/formview.js @@ -37,7 +37,13 @@ frappe.views.FormFactory = class FormFactory extends frappe.views.Factory { }); frappe.realtime.on("doc_viewers", function(data) { - frappe.ui.form.set_viewers(data); + // set users that currently viewing the form + frappe.ui.form.set_users(data, 'viewers'); + }); + + frappe.realtime.on("doc_typers", function(data) { + // set users that currently typing on the form + frappe.ui.form.set_users(data, 'typers'); }); } diff --git a/frappe/public/js/frappe/views/gantt/gantt_view.js b/frappe/public/js/frappe/views/gantt/gantt_view.js index df01c203bb..6fb4688a48 100644 --- a/frappe/public/js/frappe/views/gantt/gantt_view.js +++ b/frappe/public/js/frappe/views/gantt/gantt_view.js @@ -79,6 +79,10 @@ frappe.views.GanttView = class GanttView extends frappe.views.ListView { }); } + render_header() { + + } + render_gantt() { const me = this; const gantt_view_mode = this.view_user_settings.gantt_view_mode || 'Day'; @@ -126,8 +130,8 @@ frappe.views.GanttView = class GanttView extends frappe.views.ListView { var item = me.get_item(task.id); var html = - `
    ${task.name}
    -

    ${moment(task._start).format('MMM D')} - ${moment(task._end).format('MMM D')}

    `; + `
    ${task.name}
    +
    ${moment(task._start).format('MMM D')} - ${moment(task._end).format('MMM D')}
    `; // custom html in doctype settings var custom = me.settings.gantt_custom_popup_html; @@ -204,8 +208,8 @@ frappe.views.GanttView = class GanttView extends frappe.views.ListView { get required_libs() { return [ - "assets/frappe/js/lib/frappe-gantt/frappe-gantt.css", - "assets/frappe/js/lib/frappe-gantt/frappe-gantt.min.js" + "assets/frappe/node_modules/frappe-gantt/dist/frappe-gantt.css", + "assets/frappe/node_modules/frappe-gantt/dist/frappe-gantt.min.js" ]; } }; diff --git a/frappe/public/js/frappe/views/reports/print_grid.html b/frappe/public/js/frappe/views/reports/print_grid.html index ea510fa7bd..852c2925e8 100644 --- a/frappe/public/js/frappe/views/reports/print_grid.html +++ b/frappe/public/js/frappe/views/reports/print_grid.html @@ -8,50 +8,51 @@
    {% endif %} - - - - {% for col in columns %} - {% if col.name && col._id !== "_check" %} + + + + {% for col in columns %} + {% if col.name && col._id !== "_check" %} - {% endif %} - {% endfor %} - - - - - {% for row in data %} - - {% for col in columns %} - {% if col.name && col._id !== "_check" %} + {% endif %} + {% endfor %} + + + + + {% for row in data %} + + {% for col in columns %} + {% if col.name && col._id !== "_check" %} - {% var value = col.fieldname ? row[col.fieldname] : row[col.id]; %} + {% var value = col.fieldname ? row[col.fieldname] : row[col.id]; %} - - {% endif %} - {% endfor %} - - {% endfor %} - + + {% endif %} + {% endfor %} + + {% endfor %} +
    {{ __(col.name) }}
    - - {{ - col.formatter - ? col.formatter(row._index, col._index, value, col, row, true) - : col.format - ? col.format(value, row, col, data) - : col.docfield - ? frappe.format(value, col.docfield) - : value - }} - -
    + + {% format_data = row.is_total_row ? data[0] : row %} + {{ + col.formatter + ? col.formatter(row._index, col._index, value, col, format_data, true) + : col.format + ? col.format(value, row, col, format_data) + : col.docfield + ? frappe.format(value, col.docfield) + : value + }} + +
    diff --git a/frappe/public/js/frappe/views/reports/query_report.js b/frappe/public/js/frappe/views/reports/query_report.js index e79e43ae02..f82956adac 100644 --- a/frappe/public/js/frappe/views/reports/query_report.js +++ b/frappe/public/js/frappe/views/reports/query_report.js @@ -261,27 +261,25 @@ frappe.views.QueryReport = class QueryReport extends frappe.views.BaseList { } get_report_settings() { - if (frappe.query_reports[this.report_name]) { - this.report_settings = this.get_local_report_settings(); - return this._load_script; - } - - this._load_script = (new Promise(resolve => frappe.call({ - method: 'frappe.desk.query_report.get_script', - args: { report_name: this.report_name }, - callback: resolve - }))).then(r => { - frappe.dom.eval(r.message.script || ''); - return r; - }).then(r => { - return frappe.after_ajax(() => { - this.report_settings = this.get_local_report_settings(); - this.report_settings.html_format = r.message.html_format; - this.report_settings.execution_time = r.message.execution_time || 0; - }); + return new Promise((resolve, reject) => { + if (frappe.query_reports[this.report_name]) { + this.report_settings = frappe.query_reports[this.report_name]; + resolve(); + } else { + frappe.xcall('frappe.desk.query_report.get_script', { + report_name: this.report_name + }).then(settings => { + frappe.dom.eval(settings.script || ''); + frappe.after_ajax(() => { + this.report_settings = this.get_local_report_settings(); + this.report_settings.html_format = settings.html_format; + this.report_settings.execution_time = settings.execution_time || 0; + frappe.query_reports[this.report_name] = this.report_settings; + resolve(); + }); + }).catch(reject); + } }); - - return this._load_script; } get_local_report_settings() { @@ -1158,6 +1156,7 @@ frappe.views.QueryReport = class QueryReport extends frappe.views.BaseList { if (this.raw_data.add_total_row) { let totalRow = this.datatable.bodyRenderer.getTotalRow().reduce((row, cell) => { row[cell.column.id] = cell.content; + row.is_total_row = true; return row; }, {}); diff --git a/frappe/public/js/frappe/web_form/web_form_list.js b/frappe/public/js/frappe/web_form/web_form_list.js index ec20838dee..21eed52e5e 100644 --- a/frappe/public/js/frappe/web_form/web_form_list.js +++ b/frappe/public/js/frappe/web_form/web_form_list.js @@ -7,16 +7,15 @@ export default class WebFormList { Object.assign(this, opts); frappe.web_form_list = this; this.wrapper = document.getElementById("datatable"); - this.refresh(); this.make_actions(); this.make_filters(); - $('.link-btn').remove() + $('.link-btn').remove(); } refresh() { if (this.table) { Array.from(this.table.tBodies).forEach(tbody => tbody.remove()); - let check = document.getElementById('select-all') + let check = document.getElementById('select-all'); check.checked = false; } this.rows = []; @@ -32,8 +31,8 @@ export default class WebFormList { } make_filters() { - this.filters = {} - this.filter_input = [] + this.filters = {}; + this.filter_input = []; const filter_area = document.getElementById('list-filters'); frappe.call('frappe.website.doctype.web_form.web_form.get_web_form_filters', { @@ -41,9 +40,10 @@ export default class WebFormList { }).then(response => { let fields = response.message; fields.forEach(field => { - let col = document.createElement('div.col-sm-4') - col.classList.add('col', 'col-sm-3') - filter_area.appendChild(col) + let col = document.createElement('div.col-sm-4'); + col.classList.add('col', 'col-sm-3'); + filter_area.appendChild(col); + if (field.default) this.add_filter(field.fieldname, field.default, field.fieldtype); let input = frappe.ui.form.make_control({ df: { @@ -54,27 +54,27 @@ export default class WebFormList { label: __(field.label), onchange: (event) => { $('#more').remove(); - this.add_filter(field.fieldname, input.value, field.fieldtype) + this.add_filter(field.fieldname, input.value, field.fieldtype); + this.refresh(); } }, parent: col, value: field.default, render_input: 1, - }) - this.filter_input.push(input) - }) - }) + }); + this.filter_input.push(input); + }); + this.refresh(); + }); } add_filter(field, value, fieldtype) { if (!value) { - delete this.filters[field] + delete this.filters[field]; + } else { + if (fieldtype === 'Data') value = ['like', value + '%']; + Object.assign(this.filters, Object.fromEntries([[field, value]])); } - else { - if (fieldtype === 'Data') value = ['like', value + '%'] - Object.assign(this.filters, Object.fromEntries([[field, value]])) - } - this.refresh(); } get_list_view_fields() { @@ -106,13 +106,13 @@ export default class WebFormList { } more() { - this.web_list_start += this.page_length + this.web_list_start += this.page_length; this.fetch_data().then((res) => { if (res.message.length === 0) { - frappe.msgprint(__("No more items to display")) + frappe.msgprint(__("No more items to display")); } - this.append_rows(res.message) - }) + this.append_rows(res.message); + }); } @@ -125,7 +125,7 @@ export default class WebFormList { }; }); - if (! this.table) { + if (!this.table) { this.table = document.createElement("table"); this.table.classList.add("table"); this.make_table_head(); diff --git a/frappe/public/js/frappe/widgets/onboarding_widget.js b/frappe/public/js/frappe/widgets/onboarding_widget.js index 097fd890a6..a1b5a6216d 100644 --- a/frappe/public/js/frappe/widgets/onboarding_widget.js +++ b/frappe/public/js/frappe/widgets/onboarding_widget.js @@ -7,12 +7,6 @@ export default class OnboardingWidget extends Widget { } make_body() { - this.body.addClass("grid"); - if (this.steps.length < 5) { - this.body.addClass(`grid-rows-${this.steps.length}`); - } else if (this.steps.length >= 5) { - this.body.addClass("grid-rows-5"); - } this.steps.forEach((step) => { this.add_step(step); }); @@ -57,10 +51,14 @@ export default class OnboardingWidget extends Widget { let actions = { "Watch Video": () => this.show_video(step), "Create Entry": () => { - if (step.show_full_form) { - this.create_entry(step); + if (step.is_complete) { + frappe.set_route(`#List/${step.reference_document}`); } else { - this.show_quick_entry(step); + if (step.show_full_form) { + this.create_entry(step); + } else { + this.show_quick_entry(step); + } } }, "Show Form Tour": () => this.show_form_tour(step), @@ -138,7 +136,7 @@ export default class OnboardingWidget extends Widget { if (step.is_single) { route = `Form/${step.reference_document}`; } else { - route = `Form/${step.reference_document}/New ${step.reference_document}`; + route = `Form/${step.reference_document}/__('New')+ ' ' + ${step.reference_document}`; } let current_route = frappe.get_route(); @@ -264,7 +262,7 @@ export default class OnboardingWidget extends Widget { frappe.route_hooks.after_save = callback; } - frappe.set_route(`Form/${step.reference_document}/New ${step.reference_document} 1`); + frappe.set_route(`Form/${step.reference_document}/__('New')+ ' ' + ${step.reference_document}`); } show_quick_entry(step) { @@ -434,8 +432,6 @@ export default class OnboardingWidget extends Widget { set_actions() { this.action_area.empty(); - if (!this.user_can_dismiss) return; - const dismiss = $( `
    Dismiss
    ` ); diff --git a/frappe/public/js/frappe/widgets/widget_dialog.js b/frappe/public/js/frappe/widgets/widget_dialog.js index d5cd6d9643..054159116f 100644 --- a/frappe/public/js/frappe/widgets/widget_dialog.js +++ b/frappe/public/js/frappe/widgets/widget_dialog.js @@ -74,7 +74,7 @@ class WidgetDialog { this.filters = []; if (this.values && this.values.stats_filter) { - const filters_json = JSON.parse(this.values.stats_filter); + const filters_json = new Function(`return ${this.values.stats_filter}`)(); this.filters = Object.keys(filters_json).map((filter) => { let val = filters_json[filter]; return [this.values.link_to, filter, val[0], val[1], false]; diff --git a/frappe/public/js/lib/frappe-gantt/frappe-gantt.css b/frappe/public/js/lib/frappe-gantt/frappe-gantt.css deleted file mode 100644 index 4d463b281d..0000000000 --- a/frappe/public/js/lib/frappe-gantt/frappe-gantt.css +++ /dev/null @@ -1,118 +0,0 @@ -.gantt .grid-background { - fill: none; } - -.gantt .grid-header { - fill: #ffffff; - stroke: #e0e0e0; - stroke-width: 1.4; } - -.gantt .grid-row { - fill: #ffffff; } - -.gantt .grid-row:nth-child(even) { - fill: #f5f5f5; } - -.gantt .row-line { - stroke: #ebeff2; } - -.gantt .tick { - stroke: #e0e0e0; - stroke-width: 0.2; } - .gantt .tick.thick { - stroke-width: 0.4; } - -.gantt .today-highlight { - fill: #fcf8e3; - opacity: 0.5; } - -.gantt .arrow { - fill: none; - stroke: #666; - stroke-width: 1.4; } - -.gantt .bar { - fill: #b8c2cc; - stroke: #8D99A6; - stroke-width: 0; - transition: stroke-width .3s ease; - user-select: none; } - -.gantt .bar-progress { - fill: #a3a3ff; } - -.gantt .bar-invalid { - fill: transparent; - stroke: #8D99A6; - stroke-width: 1; - stroke-dasharray: 5; } - .gantt .bar-invalid ~ .bar-label { - fill: #555; } - -.gantt .bar-label { - fill: #fff; - dominant-baseline: central; - text-anchor: middle; - font-size: 12px; - font-weight: lighter; } - .gantt .bar-label.big { - fill: #555; - text-anchor: start; } - -.gantt .handle { - fill: #ddd; - cursor: ew-resize; - opacity: 0; - visibility: hidden; - transition: opacity .3s ease; } - -.gantt .bar-wrapper { - cursor: pointer; } - .gantt .bar-wrapper:hover .bar { - fill: #a9b5c1; } - .gantt .bar-wrapper:hover .bar-progress { - fill: #8a8aff; } - .gantt .bar-wrapper:hover .handle { - visibility: visible; - opacity: 1; } - .gantt .bar-wrapper.active .bar { - fill: #a9b5c1; } - .gantt .bar-wrapper.active .bar-progress { - fill: #8a8aff; } - -.gantt .lower-text, .gantt .upper-text { - font-size: 12px; - text-anchor: middle; } - -.gantt .upper-text { - fill: #555; } - -.gantt .lower-text { - fill: #333; } - -.gantt .hide { - display: none; } - -.gantt-container { - position: relative; - overflow: auto; - font-size: 12px; } - .gantt-container .popup-wrapper { - position: absolute; - top: 0; - left: 0; - background: rgba(0, 0, 0, 0.8); - padding: 0; - color: #959da5; - border-radius: 3px; } - .gantt-container .popup-wrapper .title { - border-bottom: 3px solid #a3a3ff; - padding: 10px; } - .gantt-container .popup-wrapper .subtitle { - padding: 10px; - color: #dfe2e5; } - .gantt-container .popup-wrapper .pointer { - position: absolute; - height: 5px; - margin: 0 0 0 -5px; - border: 5px solid transparent; - border-top-color: rgba(0, 0, 0, 0.8); } diff --git a/frappe/public/js/lib/frappe-gantt/frappe-gantt.min.js b/frappe/public/js/lib/frappe-gantt/frappe-gantt.min.js deleted file mode 100644 index 4abbba9f49..0000000000 --- a/frappe/public/js/lib/frappe-gantt/frappe-gantt.min.js +++ /dev/null @@ -1 +0,0 @@ -var Gantt=function(){"use strict";const t=["January","February","March","April","May","June","July","August","September","October","November","December"];var e={parse(t,e="-",s=":"){if(t instanceof Date)return t;if("string"==typeof t){let i,n;const a=t.split(" ");i=a[0].split(e).map(t=>parseInt(t,10)),n=a[1]&&a[1].split(s),i[1]=i[1]-1;let o=i;return n&&n.length&&(o=o.concat(n)),new Date(...o)}},to_string(t,e=!1){if(!(t instanceof Date))throw new TypeError("Invalid argument type");const i=this.get_date_values(t).map((t,e)=>(1===e&&(t+=1),s(t+"",2,"0"))),n=`${i[0]}-${i[1]}-${i[2]}`,a=`${i[3]}:${i[4]}:${i[5]}`;return n+(e?" "+a:"")},format(e,i="YYYY-MM-DD HH:mm:ss"){const n=this.get_date_values(e).map(t=>s(t,2,0)),a={YYYY:n[0],MM:s(+n[1]+1,2,0),DD:n[2],HH:n[3],mm:n[4],ss:n[5],D:n[2],MMMM:t[+n[1]],MMM:t[+n[1]]};let o=i;return Object.keys(a).sort((t,e)=>e.length-t.length).forEach(t=>{o=o.replace(t,a[t])}),o},diff(t,e,s="day"){let i,n,a,o,r,h,d;return d=(h=(r=(a=(o=(n=(i=t-e)/1e3)/60)/60)/24)/30)/12,s.endsWith("s")||(s+="s"),Math.floor({milliseconds:i,seconds:n,minutes:o,hours:a,days:r,months:h,years:d}[s])},today(){const t=this.get_date_values(new Date).slice(0,3);return new Date(...t)},now:()=>new Date,add(t,e,s){e=parseInt(e,10);const i=[t.getFullYear()+("year"===s?e:0),t.getMonth()+("month"===s?e:0),t.getDate()+("day"===s?e:0),t.getHours()+("hour"===s?e:0),t.getMinutes()+("minute"===s?e:0),t.getSeconds()+("second"===s?e:0),t.getMilliseconds()+("millisecond"===s?e:0)];return new Date(...i)},start_of(t,e){const s={year:6,month:5,day:4,hour:3,minute:2,second:1,millisecond:0};function i(t){const i=s[e];return s[t]<=i}const n=[t.getFullYear(),i("year")?0:t.getMonth(),i("month")?1:t.getDate(),i("day")?0:t.getHours(),i("hour")?0:t.getMinutes(),i("minute")?0:t.getSeconds(),i("second")?0:t.getMilliseconds()];return new Date(...n)},clone(t){return new Date(...this.get_date_values(t))},get_date_values:t=>[t.getFullYear(),t.getMonth(),t.getDate(),t.getHours(),t.getMinutes(),t.getSeconds(),t.getMilliseconds()],get_days_in_month(t){const e=[31,28,31,30,31,30,31,31,30,31,30,31],s=t.getMonth();if(1!==s)return e[s];const i=t.getFullYear();return i%4==0&&i%100!=0||i%400==0?29:28}};function s(t,e,s){return t+="",e>>=0,s=String(void 0!==s?s:" "),t.length>e?String(t):((e-=t.length)>s.length&&(s+=s.repeat(e/s.length)),s.slice(0,e)+String(t))}function i(t,e){return"string"==typeof t?(e||document).querySelector(t):t||null}function n(t,e){const s=document.createElementNS("http://www.w3.org/2000/svg",t);for(let t in e)if("append_to"===t){e.append_to.appendChild(s)}else"innerHTML"===t?s.innerHTML=e.innerHTML:s.setAttribute(t,e[t]);return s}function a(t,e,s,a){const o=function(t,e,s,a,o="0.4s",r="0.1s"){const h=t.querySelector("animate");if(h)return i.attr(h,{attributeName:e,from:s,to:a,dur:o,begin:"click + "+r}),t;const d=n("animate",{attributeName:e,from:s,to:a,dur:o,begin:r,calcMode:"spline",values:s+";"+a,keyTimes:"0; 1",keySplines:(p="ease-out",{ease:".25 .1 .25 1",linear:"0 0 1 1","ease-in":".42 0 1 1","ease-out":"0 0 .58 1","ease-in-out":".42 0 .58 1"}[p])});var p;return t.appendChild(d),t}(t,e,s,a);if(o===t){const t=document.createEvent("HTMLEvents");t.initEvent("click",!0,!0),t.eventName="click",o.dispatchEvent(t)}}i.on=((t,e,s,n)=>{n?i.delegate(t,e,s,n):(n=s,i.bind(t,e,n))}),i.off=((t,e,s)=>{t.removeEventListener(e,s)}),i.bind=((t,e,s)=>{e.split(/\s+/).forEach(function(e){t.addEventListener(e,s)})}),i.delegate=((t,e,s,i)=>{t.addEventListener(e,function(t){const e=t.target.closest(s);e&&(t.delegatedTarget=e,i.call(this,t,e))})}),i.closest=((t,e)=>e?e.matches(t)?e:i.closest(t,e.parentNode):null),i.attr=((t,e,s)=>{if(!s&&"string"==typeof e)return t.getAttribute(e);if("object"!=typeof e)t.setAttribute(e,s);else for(let s in e)i.attr(t,s,e[s])});class o{constructor(t,e){this.set_defaults(t,e),this.prepare(),this.draw(),this.bind()}set_defaults(t,e){this.action_completed=!1,this.gantt=t,this.task=e}prepare(){this.prepare_values(),this.prepare_helpers()}prepare_values(){this.invalid=this.task.invalid,this.height=this.gantt.options.bar_height,this.x=this.compute_x(),this.y=this.compute_y(),this.corner_radius=this.gantt.options.bar_corner_radius,this.duration=(e.diff(this.task._end,this.task._start,"hour")+24)/this.gantt.options.step,this.width=this.gantt.options.column_width*this.duration,this.progress_width=this.gantt.options.column_width*this.duration*(this.task.progress/100)||0,this.group=n("g",{class:"bar-wrapper "+(this.task.custom_class||""),"data-id":this.task.id}),this.bar_group=n("g",{class:"bar-group",append_to:this.group}),this.handle_group=n("g",{class:"handle-group",append_to:this.group})}prepare_helpers(){SVGElement.prototype.getX=function(){return+this.getAttribute("x")},SVGElement.prototype.getY=function(){return+this.getAttribute("y")},SVGElement.prototype.getWidth=function(){return+this.getAttribute("width")},SVGElement.prototype.getHeight=function(){return+this.getAttribute("height")},SVGElement.prototype.getEndX=function(){return this.getX()+this.getWidth()}}draw(){this.draw_bar(),this.draw_progress_bar(),this.draw_label(),this.draw_resize_handles()}draw_bar(){this.$bar=n("rect",{x:this.x,y:this.y,width:this.width,height:this.height,rx:this.corner_radius,ry:this.corner_radius,class:"bar",append_to:this.bar_group}),a(this.$bar,"width",0,this.width),this.invalid&&this.$bar.classList.add("bar-invalid")}draw_progress_bar(){this.invalid||(this.$bar_progress=n("rect",{x:this.x,y:this.y,width:this.progress_width,height:this.height,rx:this.corner_radius,ry:this.corner_radius,class:"bar-progress",append_to:this.bar_group}),a(this.$bar_progress,"width",0,this.progress_width))}draw_label(){n("text",{x:this.x+this.width/2,y:this.y+this.height/2,innerHTML:this.task.name,class:"bar-label",append_to:this.bar_group}),requestAnimationFrame(()=>this.update_label_position())}draw_resize_handles(){if(this.invalid)return;const t=this.$bar;n("rect",{x:t.getX()+t.getWidth()-9,y:t.getY()+1,width:8,height:this.height-2,rx:this.corner_radius,ry:this.corner_radius,class:"handle right",append_to:this.handle_group}),n("rect",{x:t.getX()+1,y:t.getY()+1,width:8,height:this.height-2,rx:this.corner_radius,ry:this.corner_radius,class:"handle left",append_to:this.handle_group}),this.task.progress&&this.task.progress<100&&(this.$handle_progress=n("polygon",{points:this.get_progress_polygon_points().join(","),class:"handle progress",append_to:this.handle_group}))}get_progress_polygon_points(){const t=this.$bar_progress;return[t.getEndX()-5,t.getY()+t.getHeight(),t.getEndX()+5,t.getY()+t.getHeight(),t.getEndX(),t.getY()+t.getHeight()-8.66]}bind(){this.invalid||this.setup_click_event()}setup_click_event(){i.on(this.group,"click",t=>{this.action_completed||(this.group.classList.contains("active")&&this.gantt.trigger_event("click",[this.task]),this.gantt.unselect_all(),this.group.classList.toggle("active"),this.show_popup())})}show_popup(){if(this.gantt.bar_being_dragged)return;const t=e.format(this.task._start,"MMM D")+" - "+e.format(this.task._end,"MMM D");this.gantt.show_popup({target_element:this.$bar,title:this.task.name,subtitle:t})}update_bar_position({x:t=null,width:e=null}){const s=this.$bar;if(t){if(!this.task.dependencies.map(t=>this.gantt.get_bar(t).$bar.getX()).reduce((e,s)=>t>=s,t))return void(e=null);this.update_attr(s,"x",t)}e&&e>=this.gantt.options.column_width&&this.update_attr(s,"width",e),this.update_label_position(),this.update_handle_position(),this.update_progressbar_position(),this.update_arrow_position()}date_changed(){const{new_start_date:t,new_end_date:e}=this.compute_start_end_date();this.task._start=t,this.task._end=e,this.gantt.trigger_event("date_change",[this.task,t,e])}progress_changed(){const t=this.compute_progress();this.task.progress=t,this.gantt.trigger_event("progress_change",[this.task,t])}set_action_completed(){this.action_completed=!0,setTimeout(()=>this.action_completed=!1,1e3)}compute_start_end_date(){const t=this.$bar,s=t.getX()/this.gantt.options.column_width,i=e.add(this.gantt.gantt_start,s*this.gantt.options.step,"hours"),n=t.getWidth()/this.gantt.options.column_width,a=e.add(i,n*this.gantt.options.step,"hours");return e.add(a,-1,"second"),{new_start_date:i,new_end_date:a}}compute_progress(){const t=this.$bar_progress.getWidth()/this.$bar.getWidth()*100;return parseInt(t,10)}compute_x(){let t=e.diff(this.task._start,this.gantt.gantt_start,"hour")/this.gantt.options.step*this.gantt.options.column_width;return this.gantt.view_is("Month")&&(t=e.diff(this.task._start,this.gantt.gantt_start,"day")*this.gantt.options.column_width/30),t}compute_y(){return this.gantt.options.header_height+this.gantt.options.padding+this.task._index*(this.height+this.gantt.options.padding)}get_snap_position(t){let e,s,i=t;return s=this.gantt.view_is("Week")?i-(e=t%(this.gantt.options.column_width/7))+(et.getWidth()?(e.classList.add("big"),e.setAttribute("x",t.getX()+t.getWidth()+5)):(e.classList.remove("big"),e.setAttribute("x",t.getX()+t.getWidth()/2))}update_handle_position(){const t=this.$bar;this.handle_group.querySelector(".handle.left").setAttribute("x",t.getX()+1),this.handle_group.querySelector(".handle.right").setAttribute("x",t.getEndX()-9);const e=this.group.querySelector(".handle.progress");e&&e.setAttribute("points",this.get_progress_polygon_points())}update_arrow_position(){this.arrows=this.arrows||[];for(let t of this.arrows)t.update()}}class r{constructor(t,e,s){this.gantt=t,this.from_task=e,this.to_task=s,this.calculate_path(),this.draw()}calculate_path(){let t=this.from_task.$bar.getX()+this.from_task.$bar.getWidth()/2;const e=()=>this.to_task.$bar.getX()this.from_task.$bar.getX()+this.gantt.options.padding;for(;e();)t-=10;const s=this.gantt.options.header_height+this.gantt.options.bar_height+(this.gantt.options.padding+this.gantt.options.bar_height)*this.from_task.task._index+this.gantt.options.padding,i=this.to_task.$bar.getX()-this.gantt.options.padding/2,n=this.gantt.options.header_height+this.gantt.options.bar_height/2+(this.gantt.options.padding+this.gantt.options.bar_height)*this.to_task.task._index+this.gantt.options.padding,a=this.from_task.task._index>this.to_task.task._index,o=this.gantt.options.arrow_curve,r=a?1:0,h=a?-o:o,d=a?n+this.gantt.options.arrow_curve:n-this.gantt.options.arrow_curve;if(this.path=`\n M ${t} ${s}\n V ${d}\n a ${o} ${o} 0 0 ${r} ${o} ${h}\n L ${i} ${n}\n m -5 -5\n l 5 5\n l -5 5`,this.to_task.$bar.getX()
    \n
    \n
    \n ',this.hide(),this.title=this.parent.querySelector(".title"),this.subtitle=this.parent.querySelector(".subtitle"),this.pointer=this.parent.querySelector(".pointer")}show(t){if(!t.target_element)throw new Error("target_element is required to show popup");t.position||(t.position="left");const e=t.target_element;let s;this.title.innerHTML=t.title,this.subtitle.innerHTML=t.subtitle,this.parent.style.width=this.parent.clientWidth+"px",e instanceof HTMLElement?s=e.getBoundingClientRect():e instanceof SVGElement&&(s=t.target_element.getBBox()),"left"===t.position&&(this.parent.style.left=s.x+(s.width+10)+"px",this.parent.style.top=s.y-this.title.clientHeight/2+s.height/2+"px",this.pointer.style.transform="rotateZ(90deg)",this.pointer.style.left="-7px",this.pointer.style.top=this.title.clientHeight/2-this.pointer.getBoundingClientRect().height+2+"px"),this.parent.style.opacity=1}hide(){this.parent.style.opacity=0}}return class{constructor(t,e,s){this.setup_wrapper(t),this.setup_options(s),this.setup_tasks(e),this.change_view_mode(),this.bind_events()}setup_wrapper(t){if("string"==typeof t&&(t=document.querySelector(t)),!(t instanceof HTMLElement))throw new Error("Invalid argument passed for element");this.$container=document.createElement("div"),this.$container.classList.add("gantt-container"),t.appendChild(this.$container),this.$svg=n("svg",{append_to:this.$container,class:"gantt"}),this.popup_wrapper=document.createElement("div"),this.popup_wrapper.classList.add("popup-wrapper"),this.$svg.parentElement.appendChild(this.popup_wrapper)}setup_options(t){this.options=Object.assign({},{header_height:50,column_width:30,step:24,view_modes:["Quarter Day","Half Day","Day","Week","Month"],bar_height:20,bar_corner_radius:3,arrow_curve:5,padding:18,view_mode:"Day",date_format:"YYYY-MM-DD",custom_popup_html:null},t)}setup_tasks(t){this.tasks=t.map((t,s)=>{if(t._start=e.parse(t.start),t._end=e.parse(t.end),e.diff(t._end,t._start,"year")>10&&(t.end=null),t._index=s,!t.start&&!t.end){const s=e.today();t._start=s,t._end=e.add(s,2,"day")}if(!t.start&&t.end&&(t._start=e.add(t._end,-2,"day")),t.start&&!t.end&&(t._end=e.add(t._start,2,"day")),t.start&&t.end||(t.invalid=!0),"string"==typeof t.dependencies||!t.dependencies){let e=[];t.dependencies&&(e=t.dependencies.split(",").map(t=>t.trim()).filter(t=>t)),t.dependencies=e}return t.id||(t.id=function(t){return t.name+"_"+Math.random().toString(36).slice(2,12)}(t)),t}),this.setup_dependencies()}setup_dependencies(){this.dependency_map={};for(let t of this.tasks)for(let e of t.dependencies)this.dependency_map[e]=this.dependency_map[e]||[],this.dependency_map[e].push(t.id)}refresh(t){this.setup_tasks(t),this.change_view_mode()}change_view_mode(t=this.options.view_mode){this.update_view_scale(t),this.setup_dates(),this.render(),this.trigger_event("view_change",[t])}update_view_scale(t){this.options.view_mode=t,"Day"===t?(this.options.step=24,this.options.column_width=38):"Half Day"===t?(this.options.step=12,this.options.column_width=38):"Quarter Day"===t?(this.options.step=6,this.options.column_width=38):"Week"===t?(this.options.step=168,this.options.column_width=140):"Month"===t&&(this.options.step=720,this.options.column_width=120)}setup_dates(){this.setup_gantt_dates(),this.setup_date_values()}setup_gantt_dates(){this.gantt_start=this.gantt_end=null;for(let t of this.tasks)(!this.gantt_start||t._startthis.gantt_end)&&(this.gantt_end=t._end);this.view_is(["Quarter Day","Half Day"])?(this.gantt_start=e.add(this.gantt_start,-7,"day"),this.gantt_end=e.add(this.gantt_end,7,"day")):this.view_is("Month")?(this.gantt_start=e.start_of(this.gantt_start,"year"),this.gantt_end=e.add(this.gantt_end,1,"year")):(this.gantt_start=e.add(this.gantt_start,-1,"month"),this.gantt_end=e.add(this.gantt_end,1,"month"))}setup_date_values(){this.dates=[];let t=null;for(;null===t||t=1&&a.getDate()<8&&(o+=" thick"),this.view_is("Month")&&(a.getMonth()+1)%3==0&&(o+=" thick"),n("path",{d:`M ${t} ${s} v ${i}`,class:o,append_to:this.layers.grid}),this.view_is("Month")?t+=e.get_days_in_month(a)*this.options.column_width/30:t+=this.options.column_width}}make_grid_highlights(){this.view_is("Day")&&n("rect",{x:e.diff(e.today(),this.gantt_start,"hour")/this.options.step*this.options.column_width,y:0,width:this.options.column_width,height:(this.options.bar_height+this.options.padding)*this.tasks.length+this.options.header_height+this.options.padding/2,class:"today-highlight",append_to:this.layers.grid})}make_dates(){for(let t of this.get_dates_to_draw())if(n("text",{x:t.lower_x,y:t.lower_y,innerHTML:t.lower_text,class:"lower-text",append_to:this.layers.date}),t.upper_text){const e=n("text",{x:t.upper_x,y:t.upper_y,innerHTML:t.upper_text,class:"upper-text",append_to:this.layers.date});e.getBBox().x2>this.layers.grid.getBBox().width&&e.remove()}}get_dates_to_draw(){let t=null;return this.dates.map((e,s)=>{const i=this.get_date_info(e,t,s);return t=e,i})}get_date_info(t,s,i){s||(s=e.add(t,1,"year"));const n={"Quarter Day_lower":e.format(t,"HH"),"Half Day_lower":e.format(t,"HH"),Day_lower:t.getDate()!==s.getDate()?e.format(t,"D"):"",Week_lower:t.getMonth()!==s.getMonth()?e.format(t,"D MMM"):e.format(t,"D"),Month_lower:e.format(t,"MMMM"),"Quarter Day_upper":t.getDate()!==s.getDate()?e.format(t,"D MMM"):"","Half Day_upper":t.getDate()!==s.getDate()?t.getMonth()!==s.getMonth()?e.format(t,"D MMM"):e.format(t,"D"):"",Day_upper:t.getMonth()!==s.getMonth()?e.format(t,"MMMM"):"",Week_upper:t.getMonth()!==s.getMonth()?e.format(t,"MMMM"):"",Month_upper:t.getFullYear()!==s.getFullYear()?e.format(t,"YYYY"):""},a={x:i*this.options.column_width,lower_y:this.options.header_height,upper_y:this.options.header_height-25},o={"Quarter Day_lower":4*this.options.column_width/2,"Quarter Day_upper":0,"Half Day_lower":2*this.options.column_width/2,"Half Day_upper":0,Day_lower:this.options.column_width/2,Day_upper:30*this.options.column_width/2,Week_lower:0,Week_upper:4*this.options.column_width/2,Month_lower:this.options.column_width/2,Month_upper:12*this.options.column_width/2};return{upper_text:n[`${this.options.view_mode}_upper`],lower_text:n[`${this.options.view_mode}_lower`],upper_x:a.x+o[`${this.options.view_mode}_upper`],upper_y:a.upper_y,lower_x:a.x+o[`${this.options.view_mode}_lower`],lower_y:a.lower_y}}make_bars(){this.bars=this.tasks.map(t=>{const e=new o(this,t);return this.layers.bar.appendChild(e.group),e})}make_arrows(){this.arrows=[];for(let t of this.tasks){let e=[];e=t.dependencies.map(e=>{const s=this.get_task(e);if(!s)return;const i=new r(this,this.bars[s._index],this.bars[t._index]);return this.layers.arrow.appendChild(i.element),i}).filter(Boolean),this.arrows=this.arrows.concat(e)}}map_arrows_on_bars(){for(let t of this.bars)t.arrows=this.arrows.filter(e=>e.from_task.task.id===t.task.id||e.to_task.task.id===t.task.id)}set_width(){const t=this.$svg.getBoundingClientRect().width,e=this.$svg.querySelector(".grid .grid-row").getAttribute("width");t{this.unselect_all(),this.hide_popup()})}bind_bar_events(){let t=!1,e=0,s=0,n=!1,a=!1,o=null,r=[];this.bar_being_dragged=null,i.on(this.layers.bar,"mousedown",".bar-wrapper, .handle",(h,d)=>{const p=i.closest(".bar-wrapper",d);d.classList.contains("left")?n=!0:d.classList.contains("right")?a=!0:d.classList.contains("bar-wrapper")&&(t=!0),p.classList.add("active"),e=h.offsetX,s=h.offsetY;const _=[o=p.getAttribute("data-id"),...this.get_all_dependent_tasks(o)];r=_.map(t=>this.get_bar(t)),this.bar_being_dragged=o,r.forEach(t=>{const e=t.$bar;e.ox=e.getX(),e.oy=e.getY(),e.owidth=e.getWidth(),e.finaldx=0})}),i.on(this.$svg,"mousemove",s=>{if(!(t||n||a))return;const i=s.offsetX-e;s.offsetY,r.forEach(e=>{const s=e.$bar;s.finaldx=this.get_snap_position(i),n?o===e.task.id?e.update_bar_position({x:s.ox+s.finaldx,width:s.owidth-s.finaldx}):e.update_bar_position({x:s.ox+s.finaldx}):a?o===e.task.id&&e.update_bar_position({width:s.owidth+s.finaldx}):t&&e.update_bar_position({x:s.ox+s.finaldx})})}),document.addEventListener("mouseup",e=>{(t||n||a)&&r.forEach(t=>t.group.classList.remove("active")),t=!1,n=!1,a=!1}),i.on(this.$svg,"mouseup",t=>{this.bar_being_dragged=null,r.forEach(t=>{t.$bar.finaldx&&(t.date_changed(),t.set_action_completed())})}),this.bind_bar_progress()}bind_bar_progress(){let t=0,e=0,s=null,n=null,a=null,o=null;i.on(this.$svg,"mousedown",".handle.progress",(r,h)=>{s=!0,t=r.offsetX,e=r.offsetY;const d=i.closest(".bar-wrapper",h).getAttribute("data-id");n=this.get_bar(d),a=n.$bar_progress,o=n.$bar,a.finaldx=0,a.owidth=a.getWidth(),a.min_dx=-a.getWidth(),a.max_dx=o.getWidth()-a.getWidth()}),i.on(this.$svg,"mousemove",e=>{if(!s)return;let o=e.offsetX-t;e.offsetY,o>a.max_dx&&(o=a.max_dx),o{s=!1,a&&a.finaldx&&(n.progress_changed(),n.set_action_completed())})}get_all_dependent_tasks(t){let e=[],s=[t];for(;s.length;){const t=s.reduce((t,e)=>t=t.concat(this.dependency_map[e]),[]);e=e.concat(t),s=t.filter(t=>!s.includes(t))}return e.filter(Boolean)}get_snap_position(t){let e,s,i=t;return s=this.view_is("Week")?i-(e=t%(this.options.column_width/7))+(e{t.classList.remove("active")})}view_is(t){return"string"==typeof t?this.options.view_mode===t:!!Array.isArray(t)&&t.some(t=>this.options.view_mode===t)}get_task(t){return this.tasks.find(e=>e.id===t)}get_bar(t){return this.bars.find(e=>e.task.id===t)}show_popup(t){this.popup||(this.popup=new h(this.popup_wrapper)),this.popup.show(t)}hide_popup(){this.popup&&this.popup.hide()}trigger_event(t,e){this.options["on_"+t]&&this.options["on_"+t].apply(null,e)}get_oldest_starting_date(){return this.tasks.map(t=>t._start).reduce((t,e)=>e<=t?e:t)}clear(){this.$svg.innerHTML=""}}}(); diff --git a/frappe/public/less/controls.less b/frappe/public/less/controls.less index fe786e3734..53f0aba43c 100644 --- a/frappe/public/less/controls.less +++ b/frappe/public/less/controls.less @@ -34,16 +34,27 @@ overflow-wrap: break-word; } +.frappe-control[data-fieldtype="Data"] .control-input { + position: relative; +} + .link-btn { position: absolute; - top: 3px; + top: 4px; right: 4px; - border-radius: 2px; padding: 3px; display: none; z-index: 3; } +.phone-btn { + position: absolute; + top: 2px; + right: 8px; + padding: 3px; + z-index: 3; +} + .markdown-preview, .html-preview { padding: 12px 15px; min-height: 300px; diff --git a/frappe/public/less/form.less b/frappe/public/less/form.less index ead0f941fe..47661ad5c3 100644 --- a/frappe/public/less/form.less +++ b/frappe/public/less/form.less @@ -326,6 +326,9 @@ h6.uppercase, .h6.uppercase { .form-section { padding: 15px 7px; } + .hide-border { + padding-top: 0; + } } .help ol { @@ -550,7 +553,13 @@ h6.uppercase, .h6.uppercase { margin-left: 5px; } - .media-body:after, .media-body:before { + .media-body { + .left-arrow; + } +} + +.left-arrow { + &::after, &::before { right: 100%; top: 15px; border: solid transparent; @@ -561,13 +570,13 @@ h6.uppercase, .h6.uppercase { pointer-events: none; } - .media-body:after { + &::after { border-color: rgba(136, 183, 213, 0); border-right-color: #fafbfc; border-width: 6px; margin-top: -6px; } - .media-body:before { + &::before { border-color: rgba(194, 225, 245, 0); border-right-color: @border-color; border-width: 7px; @@ -615,6 +624,18 @@ h6.uppercase, .h6.uppercase { top: 5px; } +.timeline-item.user-content.show-indicator { + position: relative; + .media-body { + margin-left: 50px; + } + &::before { + .timeline-indicator(); + left: 13px; + top: 13px; + } +} + .timeline-item.notification-content::before { .timeline-indicator(); } diff --git a/frappe/public/less/gantt.less b/frappe/public/less/gantt.less index 6190dcf4bb..89be32a2f4 100644 --- a/frappe/public/less/gantt.less +++ b/frappe/public/less/gantt.less @@ -18,4 +18,29 @@ .frappe-rtl .gantt { direction: ltr; +} + +.list-paging-area .gantt-view-mode { + margin-left: 15px; + margin-right: 15px; +} + +.gantt-container { + .details-container { + min-width: 160px; + + .heading { + margin-bottom: 10px; + font-size: 12px; + } + + .avatar-small { + width: 16px; + height: 16px; + } + + .standard-image { + display: block; + } + } } \ No newline at end of file diff --git a/frappe/public/less/list.less b/frappe/public/less/list.less index 78d21d2ebf..3c698aec77 100644 --- a/frappe/public/less/list.less +++ b/frappe/public/less/list.less @@ -68,6 +68,7 @@ // position: relative; // .tag-filters-area { // padding: 10px 150px 0 10px; +// border-bottom: 1px solid @border-color; // } // .sort-selector { // position: absolute; @@ -125,6 +126,8 @@ // } // .list-row-head { +// background-color: @panel-bg; +// border-bottom: 1px solid @border-color !important; // .list-subject { // font-weight: normal; @@ -468,30 +471,6 @@ // } // } -// // gantt -// .list-paging-area .gantt-view-mode { -// margin-left: 15px; -// margin-right: 15px; -// } - -// .gantt { -// .details-container { -// .heading { -// margin-bottom: 10px; -// font-size: 12px; -// } - -// .avatar-small { -// width: 16px; -// height: 16px; -// } - -// .standard-image { -// display: block; -// } -// } -// } - // .inbox-attachment, .inbox-link { // margin-right: 7px; // } diff --git a/frappe/public/less/print.less b/frappe/public/less/print.less index 203b1237c5..813659ffd8 100644 --- a/frappe/public/less/print.less +++ b/frappe/public/less/print.less @@ -1,15 +1 @@ -.text-editor-print { - ul li { - list-style-type: none; - padding-left: 1.5em; - } - - ul li:before { - content: '\2022'; - margin-left: -1.5em; - margin-right: 0.3em; - text-align: right; - white-space: nowrap; - width: 1.2em; - } -} +@import "quill.less"; diff --git a/frappe/public/less/quill.less b/frappe/public/less/quill.less index 0a1384c11d..a72602697a 100644 --- a/frappe/public/less/quill.less +++ b/frappe/public/less/quill.less @@ -1,38 +1,47 @@ -@import "variables.less"; -@import (less) "quill/dist/quill.snow.css"; -@import (less) "quill/dist/quill.bubble.css"; -@import (less) "../js/frappe/form/controls/quill-mention/quill.mention.css"; +@import 'variables.less'; +@import (less) 'quill/dist/quill.snow.css'; +@import (less) 'quill/dist/quill.bubble.css'; +@import (less) '../js/frappe/form/controls/quill-mention/quill.mention.css'; -.ql-toolbar.ql-snow, .ql-container.ql-snow { - border-color: @border-color; - font-family: inherit; +.ql-toolbar.ql-snow, +.ql-container.ql-snow { + border-color: @border-color; + font-family: inherit; } .ql-editor { - font-family: @font-stack; - line-height: 1.6; + white-space: normal; +} - h1, h2, h3, h4, h5 { +.ql-editor { + font-family: @font-stack; + line-height: 1.6; + + h1, + h2, + h3, + h4, + h5 { margin-top: 0.5em; margin-bottom: 0.25em; } } .ql-toolbar.ql-snow { - border-top-left-radius: 4px; - border-top-right-radius: 4px; - background-color: @panel-bg; - padding-bottom: 0; + border-top-left-radius: 4px; + border-top-right-radius: 4px; + background-color: @panel-bg; + padding-bottom: 0; } .ql-container.ql-snow { - border-bottom-left-radius: 4px; - border-bottom-right-radius: 4px; + border-bottom-left-radius: 4px; + border-bottom-right-radius: 4px; } .ql-snow .ql-editor { - min-height: 400px; - max-height: 600px; + min-height: 400px; + max-height: 600px; } .ql-snow .ql-picker { @@ -40,87 +49,87 @@ } .ql-snow .ql-picker-label { - outline: none; + outline: none; } .ql-formats { - margin-bottom: 8px; + margin-bottom: 8px; } .ql-bubble .ql-editor { - min-height: 100px; - max-height: 300px; - border: 1px solid @light-border-color; - border-radius: 4px; + min-height: 100px; + max-height: 300px; + border: 1px solid @light-border-color; + border-radius: 4px; } .ql-mention-list-container { - z-index: 1; + z-index: 1; } .ql-mention-list { - border-radius: 4px; + border-radius: 4px; } .ql-mention-list-item { - font-size: @text-small; - padding: 10px 12px; - height: initial; - line-height: initial; + font-size: @text-small; + padding: 10px 12px; + height: initial; + line-height: initial; - &.selected { - background-color: @btn-bg; - } + &.selected { + background-color: @btn-bg; + } } .ql-editor .mention { - height: auto; - width: auto; - border-radius: 10px; - border: 1px solid @light-border-color; - padding: 2px 3px; - background-color: @btn-bg; + height: auto; + width: auto; + border-radius: 10px; + border: 1px solid @light-border-color; + padding: 2px 3px; + background-color: @btn-bg; } // table .ql-table { - width: 66px; + width: 66px; - .ql-picker-label::before { - content: 'Table'; - } + .ql-picker-label::before { + content: 'Table'; + } - .ql-picker-options { - [data-value='insert-table']::before { - content: 'Insert Table'; - } - [data-value='insert-row-above']::before { - content: 'Insert Row Above'; - } - [data-value='insert-row-below']::before { - content: 'Insert Row Below'; - } - [data-value='insert-column-right']::before { - content: 'Insert Column Right'; - } - [data-value='insert-column-left']::before { - content: 'Insert Column Left'; - } - [data-value='delete-row']::before { - content: 'Delete Row'; - } - [data-value='delete-column']::before { - content: 'Delete Column'; - } - [data-value='delete-table']::before { - content: 'Delete Table'; - } - } + .ql-picker-options { + [data-value='insert-table']::before { + content: 'Insert Table'; + } + [data-value='insert-row-above']::before { + content: 'Insert Row Above'; + } + [data-value='insert-row-below']::before { + content: 'Insert Row Below'; + } + [data-value='insert-column-right']::before { + content: 'Insert Column Right'; + } + [data-value='insert-column-left']::before { + content: 'Insert Column Left'; + } + [data-value='delete-row']::before { + content: 'Delete Row'; + } + [data-value='delete-column']::before { + content: 'Delete Column'; + } + [data-value='delete-table']::before { + content: 'Delete Table'; + } + } } .ql-editor td { - border: 1px solid @border-color; + border: 1px solid @border-color; } .ql-snow .ql-editor blockquote { @@ -128,16 +137,3 @@ margin-top: 0px; margin-bottom: 0px; } - -.ql-editor ol { - padding-left: 2.5em; -} - -.ql-editor li[data-list=ordered] { - list-style-type: decimal; - padding-left: 0; - - &::before { - content: none; - } -} diff --git a/frappe/public/scss/blog.scss b/frappe/public/scss/blog.scss index 046158f23b..9918b490c5 100644 --- a/frappe/public/scss/blog.scss +++ b/frappe/public/scss/blog.scss @@ -26,8 +26,9 @@ height: 12rem; img { - width: 100%; - min-height: 100%; + min-height: 12rem; + min-width: 100%; + object-fit: cover; } .default-cover { diff --git a/frappe/public/scss/desktop.scss b/frappe/public/scss/desktop.scss index ae73e50e0f..295406c647 100644 --- a/frappe/public/scss/desktop.scss +++ b/frappe/public/scss/desktop.scss @@ -14,7 +14,7 @@ $panel-bg: $gray-50; border-radius: 4px; cursor: pointer; text-rendering: optimizelegibility; - + &:focus { background-color: $panel-bg; outline: 0; @@ -23,24 +23,24 @@ $panel-bg: $gray-50; &:first-child { margin-top: 100px; } - + &:hover { background-color: $panel-bg; } - + &.selected { background-color: $panel-bg; } - + a { color: $gray-800; text-decoration: none; } - + svg { margin-right: 0.875rem; } - } + } } .desk-page { @@ -255,14 +255,11 @@ $panel-bg: $gray-50; .widget-head { display: flex; - .widget-title { - font-size: 20px; - } - .widget-subtitle { - font-size: 16px; margin-top: 5px; color: $text-muted; + font-size: 14px; + font-weight: 400; } .widget-control { @@ -275,31 +272,8 @@ $panel-bg: $gray-50; .widget-body { margin-top: 20px; - &.grid { - display: grid; - grid-template-columns: 1fr 1fr 1fr; - grid-auto-flow: column; - - &.grid-rows-2 { - grid-template-rows: repeat(3, 1fr); - } - - &.grid-rows-3 { - grid-template-rows: repeat(3, 1fr); - } - - &.grid-rows-4 { - grid-template-rows: repeat(4, 1fr); - } - - &.grid-rows-5 { - grid-template-rows: repeat(5, 1fr); - } - } - .onboarding-step { margin-bottom: 8px; - font-size: 16px; letter-spacing: 0.015em; i { diff --git a/frappe/public/scss/doc.scss b/frappe/public/scss/doc.scss index 1eb3422042..13a59ba45b 100644 --- a/frappe/public/scss/doc.scss +++ b/frappe/public/scss/doc.scss @@ -276,3 +276,13 @@ $navbar-height-lg: 4.5rem; padding-top: 1rem; text-align: right; } + +.doc-content .breadcrumb-container { + padding-left: 0; + padding-right: 0; + margin-top: 3rem; + + .breadcrumb { + margin-bottom: 0; + } +} \ No newline at end of file diff --git a/frappe/public/scss/markdown.scss b/frappe/public/scss/markdown.scss index a77b8b941e..1cb78dcc62 100644 --- a/frappe/public/scss/markdown.scss +++ b/frappe/public/scss/markdown.scss @@ -122,6 +122,13 @@ border-radius: 0.375rem; } + .screenshot + em { + text-align: center; + display: block; + margin-top: 0.5rem; + margin-bottom: 2rem; + } + code:not(.hljs) { padding: 0 0.25rem; background: $light; diff --git a/frappe/public/scss/page-builder.scss b/frappe/public/scss/page-builder.scss index defeb19e6e..f6446a9ba9 100644 --- a/frappe/public/scss/page-builder.scss +++ b/frappe/public/scss/page-builder.scss @@ -1,3 +1,18 @@ +.hero-content { + .btn-primary { + margin-top: 1rem; + margin-right: 0.5rem; + + @include media-breakpoint-up(lg) { + margin-right: 1rem; + } + } + + .btn-primary-light { + margin-top: 1rem; + } +} + .hero-subtitle { @extend .lead; font-weight: 400; diff --git a/frappe/public/scss/portal.scss b/frappe/public/scss/portal.scss new file mode 100644 index 0000000000..33371d5693 --- /dev/null +++ b/frappe/public/scss/portal.scss @@ -0,0 +1,7 @@ +.portal-row { + padding: 1rem 0; + + a { + color: $body-color; + } +} \ No newline at end of file diff --git a/frappe/public/scss/sidebar.scss b/frappe/public/scss/sidebar.scss index 903a9af8c8..c07cbbd8a2 100644 --- a/frappe/public/scss/sidebar.scss +++ b/frappe/public/scss/sidebar.scss @@ -478,6 +478,11 @@ body[data-route^="Module"] .main-menu { } } +// Remove top margin from frist child +.sidebar-item:first-child a { + margin-top: 0rem; +} + .sidebar-item a.active { color: $primary; background-color: $primary-light; @@ -495,6 +500,7 @@ body[data-route^="Module"] .main-menu { h6 { font-size: $font-size-sm; margin-bottom: 0.75rem; + line-height: 1.5; } > ul { diff --git a/frappe/public/scss/website.scss b/frappe/public/scss/website.scss index 636d1a2b62..2ce81de60c 100644 --- a/frappe/public/scss/website.scss +++ b/frappe/public/scss/website.scss @@ -8,6 +8,7 @@ @import 'markdown'; @import "global"; @import 'sidebar'; +@import 'portal'; @import 'doc'; .container { @@ -110,9 +111,18 @@ color: $light; } +.page-content-wrapper { + margin: 2rem 0; +} + +.breadcrumb-container { + margin-top: 1rem; + padding-top: 0.25rem; +} + .breadcrumb { - padding-left: 0; - padding-right: 0; + padding: 0; + font-size: $font-size-sm; background-color: white; } @@ -291,3 +301,25 @@ h5.modal-title { .image-loaded { filter: blur(0rem); } + +.embed-container { + position: relative; + padding-bottom: 56.25%; + height: 0; + overflow: hidden; + max-width: 100%; +} + +.embed-container iframe { + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; +} + +.ellipsis { + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; +} \ No newline at end of file diff --git a/frappe/templates/base.html b/frappe/templates/base.html index 5688ce4fc3..0b82b3dac2 100644 --- a/frappe/templates/base.html +++ b/frappe/templates/base.html @@ -10,7 +10,7 @@ {% include "templates/includes/meta_block.html" %} {% endblock %} - {% block title %} {{ title | striptags }} {% endblock %} + {% block title %}{{ title | striptags }}{% endblock %} {% block favicon %} {% endblock %} + + {%- block head -%} {% if head_html is defined -%} {{ head_html or "" }} diff --git a/frappe/templates/doc.html b/frappe/templates/doc.html index bb3cb6ec77..3e1cc5509a 100644 --- a/frappe/templates/doc.html +++ b/frappe/templates/doc.html @@ -1,10 +1,6 @@ {% extends "templates/base.html" %} {%- from "templates/includes/navbar/navbar_items.html" import render_item -%} -{% macro page_content() %} -{%- block page_content -%}{%- endblock -%} -{% endmacro %} - {%- block head_include %} {% endblock -%} @@ -71,17 +67,6 @@ {% block content %} -{% macro main_content() %} -
    - {% block page_container %} -
    -
    - {{ page_content() }} -
    -
    - {% endblock %} -
    -{% endmacro %} {% macro container_attributes() -%} id="page-{{ name or route | e }}" data-path="{{ pathname | e }}" @@ -99,14 +84,31 @@ id="page-{{ name or route | e }}" data-path="{{ pathname | e }}"
    - {{ main_content() }} +
    + {% block page_container %} +
    +
    + {%- if add_breadcrumbs -%} + {% include "templates/includes/breadcrumbs.html" %} + {%- endif -%} + {%- block page_content -%}{%- endblock -%} +
    +
    + {% endblock %} +
    + {%- if page_toc_html -%}
    + {% block page_toc %} + {% if page_toc_html %}
    On this page
    {{ page_toc_html }}
    + {% endif %} + {% endblock %}
    + {%- endif -%} diff --git a/frappe/templates/emails/new_notification.html b/frappe/templates/emails/new_notification.html index fb1fc98901..4eea49a712 100644 --- a/frappe/templates/emails/new_notification.html +++ b/frappe/templates/emails/new_notification.html @@ -8,6 +8,5 @@ {% endif %} - diff --git a/frappe/templates/includes/breadcrumbs.html b/frappe/templates/includes/breadcrumbs.html index 4cb3ef5c0c..e281c4b111 100644 --- a/frappe/templates/includes/breadcrumbs.html +++ b/frappe/templates/includes/breadcrumbs.html @@ -1,5 +1,5 @@ {%- if not no_breadcrumbs and parents -%} -
    +