diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml new file mode 100644 index 0000000000..4b1147e79f --- /dev/null +++ b/.github/workflows/docker-release.yml @@ -0,0 +1,14 @@ +name: Trigger Docker build on release +on: + release: + types: [released] +jobs: + curl: + runs-on: ubuntu-latest + container: + image: alpine:latest + steps: + - name: curl + run: | + apk add curl bash + curl -s -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "Travis-API-Version: 3" -H "Authorization: token ${{ secrets.TRAVIS_CI_TOKEN }}" -d '{"request":{"branch":"master"}}' https://api.travis-ci.com/repo/frappe%2Ffrappe_docker/requests diff --git a/.travis.yml b/.travis.yml index 9fab56188b..63895675ea 100644 --- a/.travis.yml +++ b/.travis.yml @@ -47,23 +47,11 @@ matrix: script: bench --site test_site run-ui-tests frappe --headless before_install: - # do we really want to run travis? + # do we really want to run travis? - | - ONLY_DOCS_CHANGES=$(git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qvE '\.(md|png|jpg|jpeg)$|^.github|LICENSE' ; echo $?) - ONLY_JS_CHANGES=$(git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qvE '\.js$' ; echo $?) - ONLY_PY_CHANGES=$(git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qvE '\.py$' ; echo $?) - - if [[ $ONLY_DOCS_CHANGES == "1" ]]; then - echo "Only docs were updated, stopping build process."; - exit; - fi - if [[ $ONLY_JS_CHANGES == "1" && $TYPE == "server" ]]; then - echo "Only JavaScript code was updated; Stopping Python build process."; - exit; - fi - if [[ $ONLY_PY_CHANGES == "1" && $TYPE == "ui" ]]; then - echo "Only Python code was updated, stopping Cypress build process."; - exit; + python ./.travis/roulette.py + if [[ $? != 2 ]];then + exit; fi # install wkhtmltopdf diff --git a/.travis/roulette.py b/.travis/roulette.py new file mode 100644 index 0000000000..4d83137199 --- /dev/null +++ b/.travis/roulette.py @@ -0,0 +1,54 @@ +# if the script ends with exit code 0, then no tests are run further, else all tests are run +import os +import re +import shlex +import subprocess +import sys + + +def get_output(command, shell=True): + print(command) + command = shlex.split(command) + return subprocess.check_output(command, shell=shell, encoding="utf8").strip() + +def is_py(file): + return file.endswith("py") + +def is_js(file): + return file.endswith("js") + +def is_docs(file): + regex = re.compile('\.(md|png|jpg|jpeg)$|^.github|LICENSE') + return bool(regex.search(file)) + + +if __name__ == "__main__": + build_type = os.environ.get("TYPE") + commit_range = os.environ.get("TRAVIS_COMMIT_RANGE") + print("Build Type: {}".format(build_type)) + print("Commit Range: {}".format(commit_range)) + + try: + files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False) + except Exception: + sys.exit(2) + + if "fatal" not in files_changed: + files_list = files_changed.split() + only_docs_changed = len(list(filter(is_docs, files_list))) == len(files_list) + only_js_changed = len(list(filter(is_js, files_list))) == len(files_list) + only_py_changed = len(list(filter(is_py, files_list))) == len(files_list) + + if only_docs_changed: + print("Only docs were updated, stopping build process.") + sys.exit(0) + + if only_js_changed and build_type == "server": + print("Only JavaScript code was updated; Stopping Python build process.") + sys.exit(0) + + if only_py_changed and build_type == "ui": + print("Only Python code was updated, stopping Cypress build process.") + sys.exit(0) + + sys.exit(2) diff --git a/README.md b/README.md index 860958087e..7545249610 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@

- + frappe

@@ -33,8 +33,8 @@ Full-stack web application framework that uses Python and MariaDB on the server side and a tightly integrated client side library. Built for [ERPNext](https://erpnext.com) ### Table of Contents -* [Installation](#installation) -* [Documentation](https://frappe.io/docs) +* [Installation](https://frappeframework.com/docs/user/en/installation) +* [Documentation](https://frappeframework.com/docs) * [License](#license) ### Installation @@ -49,7 +49,7 @@ Full-stack web application framework that uses Python and MariaDB on the server ### Website For details and documentation, see the website -[https://frappe.io](https://frappe.io) +[https://frappeframework.com](https://frappeframework.com) ### License This repository has been released under the [MIT License](LICENSE). diff --git a/cypress/integration/control_duration.js b/cypress/integration/control_duration.js index f304abd3d9..edad759216 100644 --- a/cypress/integration/control_duration.js +++ b/cypress/integration/control_duration.js @@ -4,14 +4,14 @@ context('Control Duration', () => { cy.visit('/desk#workspace/Website'); }); - function get_dialog_with_duration(show_days=1, show_seconds=1) { + function get_dialog_with_duration(hide_days=0, hide_seconds=0) { return cy.dialog({ title: 'Duration', fields: [{ 'fieldname': 'duration', 'fieldtype': 'Duration', - 'show_seconds': show_days, - 'show_days': show_seconds + 'hide_days': hide_days, + 'hide_seconds': hide_seconds }] }); } @@ -37,7 +37,7 @@ context('Control Duration', () => { }); it('should hide days or seconds according to duration options', () => { - get_dialog_with_duration(0, 0).as('dialog'); + get_dialog_with_duration(1, 1).as('dialog'); cy.get('.frappe-control[data-fieldname=duration] input').first().click(); cy.get('.duration-input[data-duration=days]').should('not.be.visible'); cy.get('.duration-input[data-duration=seconds]').should('not.be.visible'); diff --git a/cypress/integration/form.js b/cypress/integration/form.js index 23fc57fc57..ef89a18e7d 100644 --- a/cypress/integration/form.js +++ b/cypress/integration/form.js @@ -9,6 +9,7 @@ context('Form', () => { it('create a new form', () => { cy.visit('/desk#Form/ToDo/New ToDo 1'); cy.fill_field('description', 'this is a test todo', 'Text Editor').blur(); + cy.wait(300); cy.get('.page-title').should('contain', 'Not Saved'); cy.server(); cy.route({ diff --git a/cypress/integration/grid_pagination.js b/cypress/integration/grid_pagination.js index f03384cb93..b383f30bb8 100644 --- a/cypress/integration/grid_pagination.js +++ b/cypress/integration/grid_pagination.js @@ -40,12 +40,12 @@ context('Grid Pagination', () => { cy.get('@table').find('.current-page-number').should('contain', '20'); cy.get('@table').find('.total-page-number').should('contain', '20'); }); - it('deletes all rows', ()=> { - cy.visit('/desk#Form/Contact/Test Contact'); - cy.get('.frappe-control[data-fieldname="phone_nos"]').as('table'); - cy.get('@table').find('.grid-heading-row .grid-row-check').click({force: true}); - cy.get('@table').find('button.grid-remove-all-rows').click(); - cy.get('.modal-dialog .btn-primary').contains('Yes').click(); - cy.get('@table').find('.grid-body .grid-row').should('have.length', 0); - }); + // it('deletes all rows', ()=> { + // cy.visit('/desk#Form/Contact/Test Contact'); + // cy.get('.frappe-control[data-fieldname="phone_nos"]').as('table'); + // cy.get('@table').find('.grid-heading-row .grid-row-check').click({force: true}); + // cy.get('@table').find('button.grid-remove-all-rows').click(); + // cy.get('.modal-dialog .btn-primary').contains('Yes').click(); + // cy.get('@table').find('.grid-body .grid-row').should('have.length', 0); + // }); }); \ No newline at end of file diff --git a/frappe/__init__.py b/frappe/__init__.py index 8f36c0c4d3..f35409fa48 100644 --- a/frappe/__init__.py +++ b/frappe/__init__.py @@ -1145,8 +1145,8 @@ def make_property_setter(args, ignore_validate=False, validate_fields_for_doctyp def import_doc(path, ignore_links=False, ignore_insert=False, insert=False): """Import a file using Data Import.""" - from frappe.core.doctype.data_import import data_import - data_import.import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert) + from frappe.core.doctype.data_import.data_import import import_doc + import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert) def copy_doc(doc, ignore_no_copy=True): """ No_copy fields also get copied.""" diff --git a/frappe/app.py b/frappe/app.py index 50d09177d6..57db867882 100644 --- a/frappe/app.py +++ b/frappe/app.py @@ -99,7 +99,7 @@ def application(request): frappe.monitor.stop(response) frappe.recorder.dump() - frappe.logger("web").info({ + frappe.logger("frappe.web").info({ "site": get_site_name(request.host), "remote_addr": getattr(request, "remote_addr", "NOTFOUND"), "base_url": getattr(request, "base_url", "NOTFOUND"), diff --git a/frappe/cache_manager.py b/frappe/cache_manager.py index 4560680653..92d12289c6 100644 --- a/frappe/cache_manager.py +++ b/frappe/cache_manager.py @@ -24,7 +24,7 @@ user_cache_keys = ("bootinfo", "user_recent", "roles", "user_doc", "lang", "has_role:Page", "has_role:Report") doctype_cache_keys = ("meta", "form_meta", "table_columns", "last_modified", - "linked_doctypes", 'notifications', 'workflow' ,'energy_point_rule_map') + "linked_doctypes", 'notifications', 'workflow' ,'energy_point_rule_map', 'data_import_column_header_map') def clear_user_cache(user=None): diff --git a/frappe/commands/__init__.py b/frappe/commands/__init__.py index 42f4440547..b7294fff77 100644 --- a/frappe/commands/__init__.py +++ b/frappe/commands/__init__.py @@ -43,12 +43,14 @@ def pass_context(f): return click.pass_context(_func) -def get_site(context): +def get_site(context, raise_err=True): try: site = context.sites[0] return site except (IndexError, TypeError): - raise frappe.SiteNotSpecifiedError + if raise_err: + raise frappe.SiteNotSpecifiedError + return None def popen(command, *args, **kwargs): output = kwargs.get('output', True) diff --git a/frappe/commands/scheduler.py b/frappe/commands/scheduler.py index 511fac6e0d..bd9c9d2cb0 100755 --- a/frappe/commands/scheduler.py +++ b/frappe/commands/scheduler.py @@ -126,7 +126,7 @@ def doctor(context, site=None): "Get diagnostic info about background workers" from frappe.utils.doctor import doctor as _doctor if not site: - site = get_site(context) + site = get_site(context, raise_err=False) return _doctor(site=site) @click.command('show-pending-jobs') diff --git a/frappe/commands/site.py b/frappe/commands/site.py index 399d0efd68..55ac05bd71 100755 --- a/frappe/commands/site.py +++ b/frappe/commands/site.py @@ -83,10 +83,6 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N installing = touch_file(get_site_path('locks', 'installing.lock')) - if new_site: - # run cleanup only if new-site is called - atexit.register(_new_site_cleanup, site, mariadb_root_username, mariadb_root_password) - install_db(root_login=mariadb_root_username, root_password=mariadb_root_password, db_name=db_name, admin_password=admin_password, verbose=verbose, source_sql=source_sql, force=force, reinstall=reinstall, db_password=db_password, db_type=db_type, db_host=db_host, db_port=db_port, no_mariadb_socket=no_mariadb_socket) @@ -102,18 +98,6 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N scheduler_status = "disabled" if frappe.utils.scheduler.is_scheduler_disabled() else "enabled" print("*** Scheduler is", scheduler_status, "***") -def _new_site_cleanup(site, mariadb_root_username, mariadb_root_password): - try: - installing = get_site_path('locks', 'installing.lock') - except AttributeError: - installing = os.path.join(site, 'locks', 'installing.lock') - - if installing and os.path.exists(installing): - if mariadb_root_password: - _drop_site(site, mariadb_root_username, mariadb_root_password, force=True, no_backup=True) - shutil.rmtree(site) - - frappe.destroy() @click.command('restore') @click.argument('sql-file-path') @@ -430,15 +414,16 @@ def remove_from_installed_apps(context, app): @click.argument('app') @click.option('--yes', '-y', help='To bypass confirmation prompt for uninstalling the app', is_flag=True, default=False, multiple=True) @click.option('--dry-run', help='List all doctypes that will be deleted', is_flag=True, default=False) +@click.option('--no-backup', help='Do not backup the site', is_flag=True, default=False) @pass_context -def uninstall(context, app, dry_run=False, yes=False): +def uninstall(context, app, dry_run=False, yes=False, no_backup=False): "Remove app and linked modules from site" from frappe.installer import remove_app for site in context.sites: try: frappe.init(site=site) frappe.connect() - remove_app(app, dry_run, yes) + remove_app(app, dry_run, yes, no_backup) finally: frappe.destroy() if not context.sites: diff --git a/frappe/commands/utils.py b/frappe/commands/utils.py index 86db7cdc8f..28b6344b8e 100644 --- a/frappe/commands/utils.py +++ b/frappe/commands/utils.py @@ -215,12 +215,12 @@ def export_doc(context, doctype, docname): @pass_context def export_json(context, doctype, path, name=None): "Export doclist as json to the given path, use '-' as name for Singles." - from frappe.core.doctype.data_import import data_import + from frappe.core.doctype.data_import.data_import import export_json for site in context.sites: try: frappe.init(site=site) frappe.connect() - data_import.export_json(doctype, path, name=name) + export_json(doctype, path, name=name) finally: frappe.destroy() if not context.sites: @@ -232,12 +232,12 @@ def export_json(context, doctype, path, name=None): @pass_context def export_csv(context, doctype, path): "Export data import template with data for DocType" - from frappe.core.doctype.data_import import data_import + from frappe.core.doctype.data_import.data_import import export_csv for site in context.sites: try: frappe.init(site=site) frappe.connect() - data_import.export_csv(doctype, path) + export_csv(doctype, path) finally: frappe.destroy() if not context.sites: @@ -264,7 +264,7 @@ def export_fixtures(context, app=None): @pass_context def import_doc(context, path, force=False): "Import (insert/update) doclist. If the argument is a directory, all files ending with .json are imported" - from frappe.core.doctype.data_import import data_import + from frappe.core.doctype.data_import.data_import import import_doc if not os.path.exists(path): path = os.path.join('..', path) @@ -276,7 +276,7 @@ def import_doc(context, path, force=False): try: frappe.init(site=site) frappe.connect() - data_import.import_doc(path, overwrite=context.force) + import_doc(path, overwrite=context.force) finally: frappe.destroy() if not context.sites: @@ -293,7 +293,7 @@ def import_doc(context, path, force=False): @pass_context def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True): "Import CSV using data import" - from frappe.core.doctype.data_import import importer + from frappe.core.doctype.data_import_legacy import importer from frappe.utils.csvutils import read_csv_content site = get_site(context) @@ -329,20 +329,12 @@ def import_csv(context, path, only_insert=False, submit_after_import=False, igno @pass_context def data_import(context, file_path, doctype, import_type=None, submit_after_import=False, mute_emails=True): "Import documents in bulk from CSV or XLSX using data import" - from frappe.core.doctype.data_import.importer_new import Importer + from frappe.core.doctype.data_import.data_import import import_file site = get_site(context) frappe.init(site=site) frappe.connect() - - data_import = frappe.new_doc('Data Import Beta') - data_import.submit_after_import = submit_after_import - data_import.mute_emails = mute_emails - data_import.import_type = 'Insert New Records' if import_type.lower() == 'insert' else 'Update Existing Records' - - i = Importer(doctype=doctype, file_path=file_path, data_import=data_import, console=True) - i.import_data() - + import_file(doctype, file_path, import_type, submit_after_import, console=True) frappe.destroy() @@ -502,7 +494,17 @@ def run_tests(context, app=None, module=None, doctype=None, test=(), if coverage: # Generate coverage report only for app that is being tested source_path = os.path.join(get_bench_path(), 'apps', app or 'frappe') - cov = Coverage(source=[source_path], omit=['*.html', '*.js', '*.xml', '*.css', '*/doctype/*/*_dashboard.py', '*/patches/*']) + cov = Coverage(source=[source_path], omit=[ + '*.html', + '*.js', + '*.xml', + '*.css', + '*.less', + '*.scss', + '*.vue', + '*/doctype/*/*_dashboard.py', + '*/patches/*' + ]) cov.start() ret = frappe.test_runner.main(app, module, doctype, context.verbose, tests=tests, diff --git a/frappe/contacts/doctype/contact/contact.js b/frappe/contacts/doctype/contact/contact.js index 5285f8b85c..fae6e6515e 100644 --- a/frappe/contacts/doctype/contact/contact.js +++ b/frappe/contacts/doctype/contact/contact.js @@ -42,6 +42,16 @@ frappe.ui.form.on("Contact", { }); frm.refresh_field("links"); + let numbers = frm.doc.phone_nos; + if (numbers && numbers.length && frappe.phone_call.handler) { + frm.add_custom_button(__('Call'), () => { + numbers = frm.doc.phone_nos + .sort((prev, next) => next.is_primary_mobile_no - prev.is_primary_mobile_no) + .map(d => d.phone); + frappe.phone_call.handler(numbers); + }); + } + if (frm.doc.links) { frappe.call({ method: "frappe.contacts.doctype.contact.contact.address_query", diff --git a/frappe/core/doctype/communication/communication.py b/frappe/core/doctype/communication/communication.py index 20e4774add..232d485f36 100644 --- a/frappe/core/doctype/communication/communication.py +++ b/frappe/core/doctype/communication/communication.py @@ -444,24 +444,48 @@ def update_parent_document_on_communication(doc): status_field = parent.meta.get_field("status") if status_field: - options = (status_field.options or '').splitlines() + options = (status_field.options or "").splitlines() # if status has a "Replied" option, then update the status for received communication - if ('Replied' in options) and doc.sent_or_received=="Received": + if ("Replied" in options) and doc.sent_or_received == "Received": parent.db_set("status", "Open") + parent.run_method("handle_hold_time", "Replied") apply_assignment_rule(parent) else: # update the modified date for document parent.update_modified() update_mins_to_first_communication(parent, doc) - parent.run_method('notify_communication', doc) + set_avg_response_time(parent, doc) + parent.run_method("notify_communication", doc) parent.notify_update() def update_mins_to_first_communication(parent, communication): - if parent.meta.has_field('mins_to_first_response') and not parent.get('mins_to_first_response'): + if parent.meta.has_field("mins_to_first_response") and not parent.get("mins_to_first_response"): if is_system_user(communication.sender): first_responded_on = communication.creation - if parent.meta.has_field('first_responded_on') and communication.sent_or_received == "Sent": - parent.db_set('first_responded_on', first_responded_on) - parent.db_set('mins_to_first_response', round(time_diff_in_seconds(first_responded_on, parent.creation) / 60), 2) + if parent.meta.has_field("first_responded_on") and communication.sent_or_received == "Sent": + parent.db_set("first_responded_on", first_responded_on) + parent.db_set("mins_to_first_response", round(time_diff_in_seconds(first_responded_on, parent.creation) / 60), 2) + +def set_avg_response_time(parent, communication): + if parent.meta.has_field("avg_response_time") and communication.sent_or_received == "Sent": + # avg response time for all the responses + communications = frappe.get_list("Communication", filters={ + "reference_doctype": parent.doctype, + "reference_name": parent.name + }, + fields=["sent_or_received", "name", "creation"], + order_by="creation" + ) + + if len(communications): + response_times = [] + for i in range(len(communications)): + if communications[i].sent_or_received == "Sent" and communications[i-1].sent_or_received == "Received": + response_time = round(time_diff_in_seconds(communications[i].creation, communications[i-1].creation), 2) + if response_time > 0: + response_times.append(response_time) + if response_times: + avg_response_time = sum(response_times) / len(response_times) + parent.db_set("avg_response_time", avg_response_time) \ No newline at end of file diff --git a/frappe/core/doctype/data_export/exporter.py b/frappe/core/doctype/data_export/exporter.py index 6518c59653..e4d2ff2af6 100644 --- a/frappe/core/doctype/data_export/exporter.py +++ b/frappe/core/doctype/data_export/exporter.py @@ -9,7 +9,7 @@ import frappe.permissions import re, csv, os from frappe.utils.csvutils import UnicodeWriter from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint -from frappe.core.doctype.data_import.importer import get_data_keys +from frappe.core.doctype.data_import_legacy.importer import get_data_keys from six import string_types from frappe.core.doctype.access_log.access_log import make_access_log diff --git a/frappe/core/doctype/data_import/README.md b/frappe/core/doctype/data_import/README.md deleted file mode 100644 index 7bd4ac809b..0000000000 --- a/frappe/core/doctype/data_import/README.md +++ /dev/null @@ -1 +0,0 @@ -Bulk import / update of data via file upload in Excel or CSV. \ No newline at end of file diff --git a/frappe/core/doctype/data_import/data_import.css b/frappe/core/doctype/data_import/data_import.css new file mode 100644 index 0000000000..5206540a33 --- /dev/null +++ b/frappe/core/doctype/data_import/data_import.css @@ -0,0 +1,3 @@ +.warnings .warning { + margin-bottom: 40px; +} diff --git a/frappe/core/doctype/data_import/data_import.js b/frappe/core/doctype/data_import/data_import.js index 9391b262d7..81a7bc9705 100644 --- a/frappe/core/doctype/data_import/data_import.js +++ b/frappe/core/doctype/data_import/data_import.js @@ -1,324 +1,522 @@ -// Copyright (c) 2017, Frappe Technologies and contributors +// Copyright (c) 2019, Frappe Technologies and contributors // For license information, please see license.txt frappe.ui.form.on('Data Import', { - onload: function(frm) { - if (frm.doc.__islocal) { - frm.set_value("action", ""); - } - - frappe.call({ - method: "frappe.core.doctype.data_import.data_import.get_importable_doctypes", - callback: function (r) { - let importable_doctypes = r.message; - frm.set_query("reference_doctype", function () { - return { - "filters": { - "issingle": 0, - "istable": 0, - "name": ['in', importable_doctypes] - } - }; - }); + setup(frm) { + frappe.realtime.on('data_import_refresh', ({ data_import }) => { + frm.import_in_progress = false; + if (data_import !== frm.doc.name) return; + frappe.model.clear_doc('Data Import', frm.doc.name); + frappe.model.with_doc('Data Import', frm.doc.name).then(() => { + frm.refresh(); + }); + }); + frappe.realtime.on('data_import_progress', data => { + frm.import_in_progress = true; + if (data.data_import !== frm.doc.name) { + return; } - }), + let percent = Math.floor((data.current * 100) / data.total); + let seconds = Math.floor(data.eta); + let minutes = Math.floor(data.eta / 60); + let eta_message = + // prettier-ignore + seconds < 60 + ? __('About {0} seconds remaining', [seconds]) + : minutes === 1 + ? __('About {0} minute remaining', [minutes]) + : __('About {0} minutes remaining', [minutes]); - // should never check public - frm.fields_dict["import_file"].df.is_private = 1; + let message; + if (data.success) { + let message_args = [data.current, data.total, eta_message]; + message = + frm.doc.import_type === 'Insert New Records' + ? __('Importing {0} of {1}, {2}', message_args) + : __('Updating {0} of {1}, {2}', message_args); + } + if (data.skipping) { + message = __('Skipping {0} of {1}, {2}', [ + data.current, + data.total, + eta_message + ]); + } + frm.dashboard.show_progress(__('Import Progress'), percent, message); + frm.page.set_indicator(__('In Progress'), 'orange'); - frappe.realtime.on("data_import_progress", function(data) { - if (data.data_import === frm.doc.name) { - if (data.reload && data.reload === true) { - frm.reload_doc(); - } - if (data.progress) { - let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar"); - if (progress_bar) { - $(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped"); - $(progress_bar).css("width", data.progress + "%"); - } - } + // hide progress when complete + if (data.current === data.total) { + setTimeout(() => { + frm.dashboard.hide(); + frm.refresh(); + }, 2000); } }); + + frm.set_query('reference_doctype', () => { + return { + filters: { + name: ['in', frappe.boot.user.can_import] + } + }; + }); + + frm.get_field('import_file').df.options = { + restrictions: { + allowed_file_types: ['.csv', '.xls', '.xlsx'] + } + }; + + frm.has_import_file = () => { + return frm.doc.import_file || frm.doc.google_sheets_url; + }; }, - reference_doctype: function(frm){ - if (frm.doc.reference_doctype) { - frappe.model.with_doctype(frm.doc.reference_doctype); + refresh(frm) { + frm.page.hide_icon_group(); + frm.trigger('update_indicators'); + frm.trigger('import_file'); + frm.trigger('show_import_log'); + frm.trigger('show_import_warnings'); + frm.trigger('toggle_submit_after_import'); + frm.trigger('show_import_status'); + frm.trigger('show_report_error_button'); + + if (frm.doc.status === 'Partial Success') { + frm.add_custom_button(__('Export Errored Rows'), () => + frm.trigger('export_errored_rows') + ); + } + + if (frm.doc.status.includes('Success')) { + frm.add_custom_button( + __('Go to {0} List', [frm.doc.reference_doctype]), + () => frappe.set_route('List', frm.doc.reference_doctype) + ); } }, - refresh: function(frm) { + onload_post_render(frm) { + frm.trigger('update_primary_action'); + }, + + update_primary_action(frm) { frm.disable_save(); - frm.dashboard.clear_headline(); - if (frm.doc.reference_doctype && !frm.doc.import_file) { - frm.page.set_indicator(__('Attach file'), 'orange'); - } else { - if (frm.doc.import_status) { - const listview_settings = frappe.listview_settings['Data Import']; - const indicator = listview_settings.get_indicator(frm.doc); - - frm.page.set_indicator(indicator[0], indicator[1]); - - if (frm.doc.import_status === "In Progress") { - frm.dashboard.add_progress("Data Import Progress", "0"); - frm.set_read_only(); - frm.refresh_fields(); - } + if (frm.doc.status !== 'Success') { + if (!frm.is_new() && (frm.has_import_file())) { + let label = + frm.doc.status === 'Pending' ? __('Start Import') : __('Retry'); + frm.page.set_primary_action(label, () => frm.events.start_import(frm)); + } else { + frm.page.set_primary_action(__('Save'), () => frm.save()); } } + }, - if (frm.doc.reference_doctype) { - frappe.model.with_doctype(frm.doc.reference_doctype); + update_indicators(frm) { + const indicator = frappe.get_indicator(frm.doc); + if (indicator) { + frm.page.set_indicator(indicator[0], indicator[1]); + } else { + frm.page.clear_indicator(); } + }, - if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") { - frm.set_df_property("action", "read_only", 1); + show_import_status(frm) { + let import_log = JSON.parse(frm.doc.import_log || '[]'); + let successful_records = import_log.filter(log => log.success); + let failed_records = import_log.filter(log => !log.success); + if (successful_records.length === 0) return; + + let message; + if (failed_records.length === 0) { + let message_args = [successful_records.length]; + if (frm.doc.import_type === 'Insert New Records') { + message = + successful_records.length > 1 + ? __('Successfully imported {0} records.', message_args) + : __('Successfully imported {0} record.', message_args); + } else { + message = + successful_records.length > 1 + ? __('Successfully updated {0} records.', message_args) + : __('Successfully updated {0} record.', message_args); + } + } else { + let message_args = [successful_records.length, import_log.length]; + if (frm.doc.import_type === 'Insert New Records') { + message = + successful_records.length > 1 + ? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args) + : __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args); + } else { + message = + successful_records.length > 1 + ? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args) + : __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args); + } } + frm.dashboard.set_headline(message); + }, - frm.add_custom_button(__("Help"), function() { - frappe.help.show_video("6wiriRKPhmg"); - }); + show_report_error_button(frm) { + if (frm.doc.status === 'Error') { + frappe.db + .get_list('Error Log', { + filters: { method: frm.doc.name }, + fields: ['method', 'error'], + order_by: 'creation desc', + limit: 1 + }) + .then(result => { + if (result.length > 0) { + frm.add_custom_button('Report Error', () => { + let fake_xhr = { + responseText: JSON.stringify({ + exc: result[0].error + }) + }; + frappe.request.report_error(fake_xhr, {}); + }); + } + }); + } + }, - if (frm.doc.reference_doctype && frm.doc.docstatus === 0) { - frm.add_custom_button(__("Download template"), function() { - frappe.data_import.download_dialog(frm).show(); + start_import(frm) { + frm + .call({ + method: 'form_start_import', + args: { data_import: frm.doc.name }, + btn: frm.page.btn_primary + }) + .then(r => { + if (r.message === true) { + frm.disable_save(); + } + }); + }, + + download_template(frm) { + if ( + frm.data_exporter && + frm.data_exporter.doctype === frm.doc.reference_doctype + ) { + frm.data_exporter.exporting_for = frm.doc.import_type; + frm.data_exporter.dialog.show(); + } else { + frappe.require('/assets/js/data_import_tools.min.js', () => { + frm.data_exporter = new frappe.data_import.DataExporter( + frm.doc.reference_doctype, + frm.doc.import_type + ); }); } + }, - if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows && - frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) { - frm.page.set_primary_action(__("Start Import"), function() { - frappe.call({ - btn: frm.page.btn_primary, - method: "frappe.core.doctype.data_import.data_import.import_data", - args: { - data_import: frm.doc.name - } - }); - }).addClass('btn btn-primary'); - } + reference_doctype(frm) { + frm.trigger('toggle_submit_after_import'); + }, - if (frm.doc.log_details) { - frm.events.create_log_table(frm); - } else { - $(frm.fields_dict.import_log.wrapper).empty(); + toggle_submit_after_import(frm) { + frm.toggle_display('submit_after_import', false); + let doctype = frm.doc.reference_doctype; + if (doctype) { + frappe.model.with_doctype(doctype, () => { + let meta = frappe.get_meta(doctype); + frm.toggle_display('submit_after_import', meta.is_submittable); + }); } }, - action: function(frm) { - if(!frm.doc.action) return; - if(!frm.doc.reference_doctype) { - frappe.msgprint(__("Please select document type first.")); - frm.set_value("action", ""); + google_sheets_url(frm) { + if (!frm.is_dirty()) { + frm.trigger('import_file'); + } else { + frm.trigger('update_primary_action'); + } + }, + + refresh_google_sheet(frm) { + frm.trigger('import_file'); + }, + + import_file(frm) { + frm.toggle_display('section_import_preview', frm.has_import_file()); + if (!frm.has_import_file()) { + frm.get_field('import_preview').$wrapper.empty(); + return; + } else { + frm.trigger('update_primary_action'); + } + + // load import preview + frm.get_field('import_preview').$wrapper.empty(); + $('') + .html(__('Loading import file...')) + .appendTo(frm.get_field('import_preview').$wrapper); + + frm + .call({ + method: 'get_preview_from_template', + args: { + data_import: frm.doc.name, + import_file: frm.doc.import_file, + google_sheets_url: frm.doc.google_sheets_url + }, + error_handlers: { + TimestampMismatchError() { + // ignore this error + } + } + }) + .then(r => { + let preview_data = r.message; + frm.events.show_import_preview(frm, preview_data); + frm.events.show_import_warnings(frm, preview_data); + }); + }, + + show_import_preview(frm, preview_data) { + let import_log = JSON.parse(frm.doc.import_log || '[]'); + + if ( + frm.import_preview && + frm.import_preview.doctype === frm.doc.reference_doctype + ) { + frm.import_preview.preview_data = preview_data; + frm.import_preview.import_log = import_log; + frm.import_preview.refresh(); return; } - if(frm.doc.action == "Insert new records") { - frm.doc.insert_new = 1; - } else if (frm.doc.action == "Update records"){ - frm.doc.overwrite = 1; + frappe.require('/assets/js/data_import_tools.min.js', () => { + frm.import_preview = new frappe.data_import.ImportPreview({ + wrapper: frm.get_field('import_preview').$wrapper, + doctype: frm.doc.reference_doctype, + preview_data, + import_log, + frm, + events: { + remap_column(changed_map) { + let template_options = JSON.parse(frm.doc.template_options || '{}'); + template_options.remap_column = template_options.remap_column || {}; + Object.assign(template_options.remap_column, changed_map); + frm.set_value('template_options', JSON.stringify(template_options)); + frm.save().then(() => frm.trigger('import_file')); + } + } + }); + }); + }, + + export_errored_rows(frm) { + open_url_post( + '/api/method/frappe.core.doctype.data_import.data_import.download_errored_template', + { + data_import_name: frm.doc.name + } + ); + }, + + show_import_warnings(frm, preview_data) { + let warnings = JSON.parse(frm.doc.template_warnings || '[]'); + warnings = warnings.concat(preview_data.warnings || []); + + frm.toggle_display('import_warnings_section', warnings.length > 0); + if (warnings.length === 0) { + frm.get_field('import_warnings').$wrapper.html(''); + return; } - frm.save(); + + // group warnings by row + let warnings_by_row = {}; + let other_warnings = []; + for (let warning of warnings) { + if (warning.row) { + warnings_by_row[warning.row] = warnings_by_row[warning.row] || []; + warnings_by_row[warning.row].push(warning); + } else { + other_warnings.push(warning); + } + } + + let html = ''; + html += Object.keys(warnings_by_row) + .map(row_number => { + let message = warnings_by_row[row_number] + .map(w => { + if (w.field) { + let label = + w.field.label + + (w.field.parent !== frm.doc.reference_doctype + ? ` (${w.field.parent})` + : ''); + return `
  • ${label}: ${w.message}
  • `; + } + return `
  • ${w.message}
  • `; + }) + .join(''); + return ` +
    +
    ${__('Row {0}', [row_number])}
    +
      ${message}
    +
    + `; + }) + .join(''); + + html += other_warnings + .map(warning => { + let header = ''; + if (warning.col) { + header = __('Column {0}', [warning.col]); + } + return ` +
    +
    ${header}
    +
    ${warning.message}
    +
    + `; + }) + .join(''); + frm.get_field('import_warnings').$wrapper.html(` +
    +
    ${html}
    +
    + `); }, - only_update: function(frm) { - frm.save(); + show_failed_logs(frm) { + frm.trigger('show_import_log'); }, - submit_after_import: function(frm) { - frm.save(); + show_import_log(frm) { + let import_log = JSON.parse(frm.doc.import_log || '[]'); + let logs = import_log; + frm.toggle_display('import_log', false); + frm.toggle_display('import_log_section', logs.length > 0); + + if (logs.length === 0) { + frm.get_field('import_log_preview').$wrapper.empty(); + return; + } + + let rows = logs + .map(log => { + let html = ''; + if (log.success) { + if (frm.doc.import_type === 'Insert New Records') { + html = __('Successfully imported {0}', [ + `${frappe.utils.get_form_link( + frm.doc.reference_doctype, + log.docname, + true + )}` + ]); + } else { + html = __('Successfully updated {0}', [ + `${frappe.utils.get_form_link( + frm.doc.reference_doctype, + log.docname, + true + )}` + ]); + } + } else { + let messages = log.messages + .map(JSON.parse) + .map(m => { + let title = m.title ? `${m.title}` : ''; + let message = m.message ? `
    ${m.message}
    ` : ''; + return title + message; + }) + .join(''); + let id = frappe.dom.get_unique_id(); + html = `${messages} + +
    +
    +
    ${log.exception}
    +
    +
    `; + } + let indicator_color = log.success ? 'green' : 'red'; + let title = log.success ? __('Success') : __('Failure'); + + if (frm.doc.show_failed_logs && log.success) { + return ''; + } + + return ` + ${log.row_indexes.join(', ')} + +
    ${title}
    + + + ${html} + + `; + }) + .join(''); + + if (!rows && frm.doc.show_failed_logs) { + rows = ` + ${__('No failed logs')} + `; + } + + frm.get_field('import_log_preview').$wrapper.html(` + + + + + + + ${rows} +
    ${__('Row Number')}${__('Status')}${__('Message')}
    + `); }, - skip_errors: function(frm) { - frm.save(); - }, + show_missing_link_values(frm, missing_link_values) { + let can_be_created_automatically = missing_link_values.every( + d => d.has_one_mandatory_field + ); - ignore_encoding_errors: function(frm) { - frm.save(); - }, + let html = missing_link_values + .map(d => { + let doctype = d.doctype; + let values = d.missing_values; + return ` +
    ${doctype}
    +
      ${values.map(v => `
    • ${v}
    • `).join('')}
    + `; + }) + .join(''); - no_email: function(frm) { - frm.save(); - }, - - show_only_errors: function(frm) { - frm.events.create_log_table(frm); - }, - - create_log_table: function(frm) { - let msg = JSON.parse(frm.doc.log_details); - var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty(); - $(frappe.render_template("log_details", { - data: msg.messages, - import_status: frm.doc.import_status, - show_only_errors: frm.doc.show_only_errors, - })).appendTo($log_wrapper); + if (can_be_created_automatically) { + // prettier-ignore + let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?'); + frappe.confirm(message + html, () => { + frm + .call('create_missing_link_values', { + missing_link_values + }) + .then(r => { + let records = r.message; + frappe.msgprint( + __('Created {0} records successfully.', [records.length]) + ); + }); + }); + } else { + frappe.msgprint( + // prettier-ignore + __('The following records needs to be created before we can import your file.') + html + ); + } } }); - -frappe.provide('frappe.data_import'); -frappe.data_import.download_dialog = function(frm) { - var dialog; - const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden; - const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields); - - const get_doctype_checkbox_fields = () => { - return dialog.fields.filter(df => df.fieldname.endsWith('_fields')) - .map(df => dialog.fields_dict[df.fieldname]); - }; - - const doctype_fields = get_fields(frm.doc.reference_doctype) - .map(df => { - let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0; - return { - label: df.label, - reqd: reqd, - danger: reqd, - value: df.fieldname, - checked: 1 - }; - }); - - let fields = [ - { - "label": __("Select Columns"), - "fieldname": "select_columns", - "fieldtype": "Select", - "options": "All\nMandatory\nManually", - "reqd": 1, - "onchange": function() { - const fields = get_doctype_checkbox_fields(); - fields.map(f => f.toggle(true)); - if(this.value == 'Mandatory' || this.value == 'Manually') { - checkbox_toggle(true); - fields.map(multicheck_field => { - multicheck_field.options.map(option => { - if(!option.reqd) return; - $(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`) - .prop('checked', false) - .trigger('click'); - }); - }); - } else if(this.value == 'All'){ - $(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`) - .prop('disabled', true); - } - } - }, - { - "label": __("File Type"), - "fieldname": "file_type", - "fieldtype": "Select", - "options": "Excel\nCSV", - "default": "Excel" - }, - { - "label": __("Download with Data"), - "fieldname": "with_data", - "fieldtype": "Check", - "hidden": !frm.doc.overwrite, - "default": 1 - }, - { - "label": __("Select All"), - "fieldname": "select_all", - "fieldtype": "Button", - "depends_on": "eval:doc.select_columns=='Manually'", - click: function() { - checkbox_toggle(); - } - }, - { - "label": __("Unselect All"), - "fieldname": "unselect_all", - "fieldtype": "Button", - "depends_on": "eval:doc.select_columns=='Manually'", - click: function() { - checkbox_toggle(true); - } - }, - { - "label": frm.doc.reference_doctype, - "fieldname": "doctype_fields", - "fieldtype": "MultiCheck", - "options": doctype_fields, - "columns": 2, - "hidden": 1 - } - ]; - - const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype) - .map(df => { - return { - "label": df.options, - "fieldname": df.fieldname + '_fields', - "fieldtype": "MultiCheck", - "options": frappe.meta.get_docfields(df.options) - .filter(filter_fields) - .map(df => ({ - label: df.label, - reqd: df.reqd ? 1 : 0, - value: df.fieldname, - checked: 1, - danger: df.reqd - })), - "columns": 2, - "hidden": 1 - }; - }); - - fields = fields.concat(child_table_fields); - - dialog = new frappe.ui.Dialog({ - title: __('Download Template'), - fields: fields, - primary_action: function(values) { - var data = values; - if (frm.doc.reference_doctype) { - var export_params = () => { - let columns = {}; - if(values.select_columns) { - columns = get_doctype_checkbox_fields().reduce((columns, field) => { - const options = field.get_checked_options(); - columns[field.df.label] = options; - return columns; - }, {}); - } - - return { - doctype: frm.doc.reference_doctype, - parent_doctype: frm.doc.reference_doctype, - select_columns: JSON.stringify(columns), - with_data: frm.doc.overwrite && data.with_data, - all_doctypes: true, - file_type: data.file_type, - template: true - }; - }; - let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data'; - open_url_post(get_template_url, export_params()); - } else { - frappe.msgprint(__("Please select the Document Type.")); - } - dialog.hide(); - }, - primary_action_label: __('Download') - }); - - $(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]') - .wrapAll('
    '); - const button_container = $(dialog.body).find('.inline-buttons'); - button_container.addClass('flex'); - $(button_container).find('.frappe-control').map((index, button) => { - $(button).css({"margin-right": "1em"}); - }); - - function checkbox_toggle(checked=false) { - $(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => { - $(element).find(`:checkbox`).prop("checked", checked).trigger('click'); - }); - } - - return dialog; -}; diff --git a/frappe/core/doctype/data_import/data_import.json b/frappe/core/doctype/data_import/data_import.json index 11c8368e00..177252ea22 100644 --- a/frappe/core/doctype/data_import/data_import.json +++ b/frappe/core/doctype/data_import/data_import.json @@ -1,767 +1,192 @@ { - "allow_copy": 1, - "allow_guest_to_view": 0, - "allow_import": 0, - "allow_rename": 0, - "autoname": "", - "beta": 0, - "creation": "2016-12-09 14:27:32.720061", - "custom": 0, - "docstatus": 0, - "doctype": "DocType", - "document_type": "Document", - "editable_grid": 1, - "engine": "InnoDB", + "actions": [], + "autoname": "format:{reference_doctype} Import on {creation}", + "beta": 1, + "creation": "2019-08-04 14:16:08.318714", + "doctype": "DocType", + "editable_grid": 1, + "engine": "InnoDB", + "field_order": [ + "reference_doctype", + "import_type", + "download_template", + "import_file", + "html_5", + "google_sheets_url", + "refresh_google_sheet", + "column_break_5", + "status", + "submit_after_import", + "mute_emails", + "template_options", + "import_warnings_section", + "template_warnings", + "import_warnings", + "section_import_preview", + "import_preview", + "import_log_section", + "import_log", + "show_failed_logs", + "import_log_preview" + ], "fields": [ { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "depends_on": "", - "fieldname": "reference_doctype", - "fieldtype": "Link", - "hidden": 0, - "ignore_user_permissions": 1, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 1, - "in_standard_filter": 0, - "label": "Document Type", - "length": 0, - "no_copy": 0, - "options": "DocType", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, - { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fieldname": "action", - "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Action", - "length": 0, - "no_copy": 0, - "options": "Insert new records\nUpdate records", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, + "fieldname": "reference_doctype", + "fieldtype": "Link", + "in_list_view": 1, + "label": "Document Type", + "options": "DocType", "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "set_only_once": 1 + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "0", - "depends_on": "eval:!doc.overwrite", - "description": "New data will be inserted.", - "fieldname": "insert_new", - "fieldtype": "Check", - "hidden": 1, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Insert new records", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 1, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_type", + "fieldtype": "Select", + "in_list_view": 1, + "label": "Import Type", + "options": "\nInsert New Records\nUpdate Existing Records", + "reqd": 1, + "set_only_once": 1 + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "0", - "depends_on": "eval:!doc.insert_new", - "description": "If you are updating/overwriting already created records.", - "fieldname": "overwrite", - "fieldtype": "Check", - "hidden": 1, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Update records", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 1, - "translatable": 0, - "unique": 0 - }, - { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "0", - "depends_on": "overwrite", - "description": "If you don't want to create any new records while updating the older records.", - "fieldname": "only_update", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Don't create new records", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, - { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "collapsible_depends_on": "", - "columns": 0, - "depends_on": "eval:(!doc.__islocal)", - "fieldname": "section_break_4", - "fieldtype": "Section Break", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, - { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "depends_on": "", + "depends_on": "eval:!doc.__islocal", "fieldname": "import_file", "fieldtype": "Attach", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Attach file for Import", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "in_list_view": 1, + "label": "Import File" }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fieldname": "column_break_4", - "fieldtype": "Column Break", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "fieldname": "import_preview", + "fieldtype": "HTML", + "label": "Import Preview" }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "depends_on": "eval: doc.import_status == \"Partially Successful\"", - "description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.", - "fieldname": "error_file", - "fieldtype": "Attach", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Generated File", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, - { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "collapsible_depends_on": "", - "columns": 0, - "depends_on": "eval:(!doc.__islocal)", - "fieldname": "section_break_6", + "fieldname": "section_import_preview", "fieldtype": "Section Break", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "label": "Preview" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.", - "fieldname": "skip_errors", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Skip rows with errors", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "column_break_5", + "fieldtype": "Column Break" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "0", - "depends_on": "", - "fieldname": "submit_after_import", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Submit after importing", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "template_options", + "fieldtype": "Code", + "hidden": 1, + "label": "Template Options", + "options": "JSON", + "read_only": 1 + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "0", - "depends_on": "", - "fieldname": "ignore_encoding_errors", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Ignore encoding errors", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_log", + "fieldtype": "Code", + "label": "Import Log", + "options": "JSON" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "1", - "depends_on": "", - "fieldname": "no_email", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Do not send Emails", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_log_section", + "fieldtype": "Section Break", + "label": "Import Log" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 1, - "collapsible_depends_on": "eval: doc.import_status == \"Failed\"", - "columns": 0, - "depends_on": "import_status", - "fieldname": "import_detail", - "fieldtype": "Section Break", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Import Log", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_log_preview", + "fieldtype": "HTML", + "label": "Import Log Preview" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "depends_on": "", - "fieldname": "import_status", - "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Import Status", - "length": 0, - "no_copy": 0, - "options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 1, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "default": "Pending", + "fieldname": "status", + "fieldtype": "Select", + "hidden": 1, + "label": "Status", + "options": "Pending\nSuccess\nPartial Success\nError", + "read_only": 1 + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 1, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "1", - "fieldname": "show_only_errors", - "fieldtype": "Check", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Show only errors", - "length": 0, - "no_copy": 1, - "permlevel": 0, - "precision": "", - "print_hide": 1, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "template_warnings", + "fieldtype": "Code", + "hidden": 1, + "label": "Template Warnings", + "options": "JSON" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 1, - "bold": 0, - "collapsible": 0, - "columns": 0, - "default": "", - "depends_on": "import_status", - "fieldname": "import_log", - "fieldtype": "HTML", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Import Log", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "default": "0", + "fieldname": "submit_after_import", + "fieldtype": "Check", + "label": "Submit After Import", + "set_only_once": 1 + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 1, - "bold": 0, - "collapsible": 0, - "columns": 0, - "depends_on": "", - "fieldname": "log_details", - "fieldtype": "Code", - "hidden": 1, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Log Details", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 1, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_warnings_section", + "fieldtype": "Section Break", + "label": "Warnings" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fieldname": "amended_from", - "fieldtype": "Link", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Amended From", - "length": 0, - "no_copy": 1, - "options": "Data Import", - "permlevel": 0, - "print_hide": 1, - "print_hide_if_no_value": 0, - "read_only": 1, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 - }, + "fieldname": "import_warnings", + "fieldtype": "HTML", + "label": "Import Warnings" + }, { - "allow_bulk_edit": 0, - "allow_in_quick_entry": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, - "fieldname": "total_rows", - "fieldtype": "Int", - "hidden": 1, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, - "label": "Total Rows", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 1, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 0, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "depends_on": "reference_doctype", + "fieldname": "download_template", + "fieldtype": "Button", + "label": "Download Template" + }, + { + "default": "1", + "fieldname": "mute_emails", + "fieldtype": "Check", + "label": "Don't Send Emails", + "set_only_once": 1 + }, + { + "default": "0", + "fieldname": "show_failed_logs", + "fieldtype": "Check", + "label": "Show Failed Logs" + }, + { + "depends_on": "eval:!doc.__islocal && !doc.import_file", + "fieldname": "html_5", + "fieldtype": "HTML", + "options": "
    Or
    " + }, + { + "depends_on": "eval:!doc.__islocal && !doc.import_file\n", + "description": "Must be a publicly accessible Google Sheets URL", + "fieldname": "google_sheets_url", + "fieldtype": "Data", + "label": "Import from Google Sheets" + }, + { + "depends_on": "eval:doc.google_sheets_url", + "fieldname": "refresh_google_sheet", + "fieldtype": "Button", + "label": "Refresh Google Sheet" } - ], - "has_web_view": 0, - "hide_heading": 0, - "hide_toolbar": 0, - "idx": 0, - "image_view": 0, - "in_create": 0, - "is_submittable": 1, - "issingle": 0, - "istable": 0, - "max_attachments": 1, - "modified": "2018-08-28 15:05:56.787108", - "modified_by": "Administrator", - "module": "Core", - "name": "Data Import", - "name_case": "", - "owner": "Administrator", + ], + "hide_toolbar": 1, + "links": [], + "modified": "2020-06-18 16:05:54.211034", + "modified_by": "Administrator", + "module": "Core", + "name": "Data Import", + "owner": "Administrator", "permissions": [ { - "amend": 0, - "cancel": 0, - "create": 1, - "delete": 1, - "email": 1, - "export": 0, - "if_owner": 0, - "import": 0, - "permlevel": 0, - "print": 0, - "read": 1, - "report": 0, - "role": "System Manager", - "set_user_permissions": 0, - "share": 1, - "submit": 1, + "create": 1, + "delete": 1, + "email": 1, + "export": 1, + "print": 1, + "read": 1, + "report": 1, + "role": "System Manager", + "share": 1, "write": 1 } - ], - "quick_entry": 0, - "read_only": 0, - "read_only_onload": 0, - "show_name_in_global_search": 0, - "sort_field": "modified", - "sort_order": "DESC", - "title_field": "", - "track_changes": 1, - "track_seen": 1, - "track_views": 0 -} + ], + "sort_field": "modified", + "sort_order": "DESC", + "track_changes": 1 +} \ No newline at end of file diff --git a/frappe/core/doctype/data_import/data_import.py b/frappe/core/doctype/data_import/data_import.py index ecf34d24b0..72de092461 100644 --- a/frappe/core/doctype/data_import/data_import.py +++ b/frappe/core/doctype/data_import/data_import.py @@ -1,54 +1,187 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2017, Frappe Technologies and contributors +# Copyright (c) 2019, Frappe Technologies and contributors # For license information, please see license.txt from __future__ import unicode_literals -import frappe, os -from frappe import _ -import frappe.modules.import_file +import os +import frappe from frappe.model.document import Document -from frappe.utils.data import format_datetime -from frappe.core.doctype.data_import.importer import upload + +from frappe.core.doctype.data_import.importer import Importer +from frappe.core.doctype.data_import.exporter import Exporter from frappe.utils.background_jobs import enqueue +from frappe.utils.csvutils import validate_google_sheets_url +from frappe import _ class DataImport(Document): - def autoname(self): - if not self.name: - self.name = "Import on " +format_datetime(self.creation) - def validate(self): - if not self.import_file: - self.db_set("total_rows", 0) - if self.import_status == "In Progress": - frappe.throw(_("Can't save the form as data import is in progress.")) + doc_before_save = self.get_doc_before_save() + if ( + not (self.import_file or self.google_sheets_url) + or (doc_before_save and doc_before_save.import_file != self.import_file) + or (doc_before_save and doc_before_save.google_sheets_url != self.google_sheets_url) + ): + self.template_options = "" + self.template_warnings = "" - # validate the template just after the upload - # if there is total_rows in the doc, it means that the template is already validated and error free - if self.import_file and not self.total_rows: - upload(data_import_doc=self, from_data_import="Yes", validate_template=True) + self.validate_import_file() + self.validate_google_sheets_url() + + def validate_import_file(self): + if self.import_file: + # validate template + self.get_importer() + + def validate_google_sheets_url(self): + if not self.google_sheets_url: + return + validate_google_sheets_url(self.google_sheets_url) + + def get_preview_from_template(self, import_file=None, google_sheets_url=None): + if import_file: + self.import_file = import_file + + if google_sheets_url: + self.google_sheets_url = google_sheets_url + + if not (self.import_file or self.google_sheets_url): + return + + i = self.get_importer() + return i.get_data_for_import_preview() + + def start_import(self): + from frappe.core.page.background_jobs.background_jobs import get_info + from frappe.utils.scheduler import is_scheduler_inactive + + if is_scheduler_inactive() and not frappe.flags.in_test: + frappe.throw( + _("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive") + ) + + enqueued_jobs = [d.get("job_name") for d in get_info()] + + if self.name not in enqueued_jobs: + enqueue( + start_import, + queue="default", + timeout=6000, + event="data_import", + job_name=self.name, + data_import=self.name, + now=frappe.conf.developer_mode or frappe.flags.in_test, + ) + return True + + return False + + def export_errored_rows(self): + return self.get_importer().export_errored_rows() + + def get_importer(self): + return Importer(self.reference_doctype, data_import=self) @frappe.whitelist() -def get_importable_doctypes(): - return frappe.cache().hget("can_import", frappe.session.user) +def get_preview_from_template(data_import, import_file=None, google_sheets_url=None): + return frappe.get_doc("Data Import", data_import).get_preview_from_template( + import_file, google_sheets_url + ) + @frappe.whitelist() -def import_data(data_import): - frappe.db.set_value("Data Import", data_import, "import_status", "In Progress", update_modified=False) - frappe.publish_realtime("data_import_progress", {"progress": "0", - "data_import": data_import, "reload": True}, user=frappe.session.user) - - from frappe.core.page.background_jobs.background_jobs import get_info - enqueued_jobs = [d.get("job_name") for d in get_info()] - - if data_import not in enqueued_jobs: - enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import, - data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user) +def form_start_import(data_import): + return frappe.get_doc("Data Import", data_import).start_import() -def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False, - insert=False, submit=False, pre_process=None): +def start_import(data_import): + """This method runs in background job""" + data_import = frappe.get_doc("Data Import", data_import) + try: + i = Importer(data_import.reference_doctype, data_import=data_import) + i.import_data() + except Exception: + frappe.db.rollback() + data_import.db_set("status", "Error") + frappe.log_error(title=data_import.name) + finally: + frappe.flags.in_import = False + + frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name}) + + +@frappe.whitelist() +def download_template( + doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV" +): + """ + Download template from Exporter + :param doctype: Document Type + :param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']} + :param export_records=None: One of 'all', 'by_filter', 'blank_template' + :param export_filters: Filter dict + :param file_type: File type to export into + """ + + export_fields = frappe.parse_json(export_fields) + export_filters = frappe.parse_json(export_filters) + export_data = export_records != "blank_template" + + e = Exporter( + doctype, + export_fields=export_fields, + export_data=export_data, + export_filters=export_filters, + file_type=file_type, + export_page_length=5 if export_records == "5_records" else None, + ) + e.build_response() + + +@frappe.whitelist() +def download_errored_template(data_import_name): + data_import = frappe.get_doc("Data Import", data_import_name) + data_import.export_errored_rows() + + +def import_file( + doctype, file_path, import_type, submit_after_import=False, console=False +): + """ + Import documents in from CSV or XLSX using data import. + + :param doctype: DocType to import + :param file_path: Path to .csv, .xls, or .xlsx file to import + :param import_type: One of "Insert" or "Update" + :param submit_after_import: Whether to submit documents after import + :param console: Set to true if this is to be used from command line. Will print errors or progress to stdout. + """ + + data_import = frappe.new_doc("Data Import") + data_import.submit_after_import = submit_after_import + data_import.import_type = ( + "Insert New Records" if import_type.lower() == "insert" else "Update Existing Records" + ) + + i = Importer( + doctype=doctype, file_path=file_path, data_import=data_import, console=console + ) + i.import_data() + + +############## + + +def import_doc( + path, + overwrite=False, + ignore_links=False, + ignore_insert=False, + insert=False, + submit=False, + pre_process=None, +): if os.path.isdir(path): files = [os.path.join(path, f) for f in os.listdir(path)] else: @@ -57,25 +190,44 @@ def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False, for f in files: if f.endswith(".json"): frappe.flags.mute_emails = True - frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True) + frappe.modules.import_file.import_file_by_path( + f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True + ) frappe.flags.mute_emails = False frappe.db.commit() elif f.endswith(".csv"): - import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process) + import_file_by_path( + f, + ignore_links=ignore_links, + overwrite=overwrite, + submit=submit, + pre_process=pre_process, + ) frappe.db.commit() -def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True): - from frappe.utils.csvutils import read_csv_content - print("Importing " + path) - with open(path, "r") as infile: - upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite, - submit_after_import=submit, pre_process=pre_process) +def import_file_by_path( + path, + ignore_links=False, + overwrite=False, + submit=False, + pre_process=None, + no_email=True, +): + if path.endswith(".csv"): + print() + print("This method is deprecated.") + print('Import CSV files using the command "bench --site sitename data-import"') + print("Or use the method frappe.core.doctype.data_import.data_import.import_file") + print() + raise Exception("Method deprecated") -def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"): +def export_json( + doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc" +): def post_process(out): - del_keys = ('modified_by', 'creation', 'owner', 'idx') + del_keys = ("modified_by", "creation", "owner", "idx") for doc in out: for key in del_keys: if key in doc: @@ -83,7 +235,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b for k, v in doc.items(): if isinstance(v, list): for child in v: - for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'): + for key in del_keys + ("docstatus", "doctype", "modified", "name"): if key in child: del child[key] @@ -93,13 +245,20 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b elif frappe.db.get_value("DocType", doctype, "issingle"): out.append(frappe.get_doc(doctype).as_dict()) else: - for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by): + for doc in frappe.get_all( + doctype, + fields=["name"], + filters=filters, + or_filters=or_filters, + limit_page_length=0, + order_by=order_by, + ): out.append(frappe.get_doc(doctype, doc.name).as_dict()) post_process(out) dirname = os.path.dirname(path) if not os.path.exists(dirname): - path = os.path.join('..', path) + path = os.path.join("..", path) with open(path, "w") as outfile: outfile.write(frappe.as_json(out)) @@ -107,17 +266,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b def export_csv(doctype, path): from frappe.core.doctype.data_export.exporter import export_data + with open(path, "wb") as csvfile: export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True) csvfile.write(frappe.response.result.encode("utf-8")) - - -@frappe.whitelist() -def export_fixture(doctype, app): - if frappe.session.user != "Administrator": - raise frappe.PermissionError - - if not os.path.exists(frappe.get_app_path(app, "fixtures")): - os.mkdir(frappe.get_app_path(app, "fixtures")) - - export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc") diff --git a/frappe/core/doctype/data_import/data_import_list.js b/frappe/core/doctype/data_import/data_import_list.js index dc06f44f59..1dee4319f9 100644 --- a/frappe/core/doctype/data_import/data_import_list.js +++ b/frappe/core/doctype/data_import/data_import_list.js @@ -1,31 +1,40 @@ +let imports_in_progress = []; + frappe.listview_settings['Data Import'] = { - add_fields: ["import_status"], - has_indicator_for_draft: 1, - get_indicator: function(doc) { - - let status = { - 'Successful': [__("Success"), "green", "import_status,=,Successful"], - 'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"], - 'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"], - 'Failed': [__("Failed"), "red", "import_status,=,Failed"], - 'Pending': [__("Pending"), "orange", "import_status,=,"] - } - - if (doc.import_status) { - return status[doc.import_status]; - } - - if (doc.docstatus == 0) { - return status['Pending']; - } - - return status['Pending']; - }, onload(listview) { - listview.page.set_title_sub(` - - ${__('Try the new Data Import')} - - `); - } + frappe.realtime.on('data_import_progress', data => { + if (!imports_in_progress.includes(data.data_import)) { + imports_in_progress.push(data.data_import); + } + }); + frappe.realtime.on('data_import_refresh', data => { + imports_in_progress = imports_in_progress.filter( + d => d !== data.data_import + ); + listview.refresh(); + }); + }, + get_indicator: function(doc) { + var colors = { + 'Pending': 'orange', + 'Partial Success': 'orange', + 'Success': 'green', + 'In Progress': 'orange', + 'Error': 'red' + }; + let status = doc.status; + if (imports_in_progress.includes(doc.name)) { + status = 'In Progress'; + } + return [__(status), colors[status], 'status,=,' + doc.status]; + }, + formatters: { + import_type(value) { + return { + 'Insert New Records': __('Insert'), + 'Update Existing Records': __('Update') + }[value]; + } + }, + hide_name_column: true }; diff --git a/frappe/core/doctype/data_import/exporter.py b/frappe/core/doctype/data_import/exporter.py new file mode 100644 index 0000000000..3eef6ce016 --- /dev/null +++ b/frappe/core/doctype/data_import/exporter.py @@ -0,0 +1,257 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors +# MIT License. See license.txt + +import frappe +from frappe.model import ( + display_fieldtypes, + no_value_fields, + table_fields as table_fieldtypes, +) +from frappe.utils.csvutils import build_csv_response +from frappe.utils.xlsxutils import build_xlsx_response + + +class Exporter: + def __init__( + self, + doctype, + export_fields=None, + export_data=False, + export_filters=None, + export_page_length=None, + file_type="CSV", + ): + """ + Exports records of a DocType for use with Importer + :param doctype: Document Type to export + :param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']} + :param export_data=False: Whether to export data as well + :param export_filters=None: The filters (dict or list) which is used to query the records + :param file_type: One of 'Excel' or 'CSV' + """ + self.doctype = doctype + self.meta = frappe.get_meta(doctype) + self.export_fields = export_fields + self.export_filters = export_filters + self.export_page_length = export_page_length + self.file_type = file_type + + # this will contain the csv content + self.csv_array = [] + + # fields that get exported + self.exportable_fields = self.get_all_exportable_fields() + self.fields = self.serialize_exportable_fields() + self.add_header() + + if export_data: + self.data = self.get_data_to_export() + else: + self.data = [] + self.add_data() + + def get_all_exportable_fields(self): + child_table_fields = [ + df.fieldname for df in self.meta.fields if df.fieldtype in table_fieldtypes + ] + + meta = frappe.get_meta(self.doctype) + exportable_fields = frappe._dict({}) + + for key, fieldnames in self.export_fields.items(): + if key == self.doctype: + # parent fields + exportable_fields[key] = self.get_exportable_fields(key, fieldnames) + + elif key in child_table_fields: + # child fields + child_df = meta.get_field(key) + child_doctype = child_df.options + exportable_fields[key] = self.get_exportable_fields(child_doctype, fieldnames) + + return exportable_fields + + def serialize_exportable_fields(self): + fields = [] + for key, exportable_fields in self.exportable_fields.items(): + for _df in exportable_fields: + # make a copy of df dict to avoid reference mutation + if isinstance(_df, frappe.core.doctype.docfield.docfield.DocField): + df = _df.as_dict() + else: + df = _df.copy() + + df.is_child_table_field = key != self.doctype + if df.is_child_table_field: + df.child_table_df = self.meta.get_field(key) + fields.append(df) + return fields + + def get_exportable_fields(self, doctype, fieldnames): + meta = frappe.get_meta(doctype) + + def is_exportable(df): + return df and df.fieldtype not in (display_fieldtypes + no_value_fields) + + # add name field + name_field = frappe._dict( + { + "fieldtype": "Data", + "fieldname": "name", + "label": "ID", + "reqd": 1, + "parent": doctype, + } + ) + + fields = [meta.get_field(fieldname) for fieldname in fieldnames] + fields = [df for df in fields if is_exportable(df)] + + if "name" in fieldnames: + fields = [name_field] + fields + + return fields or [] + + def get_data_to_export(self): + frappe.permissions.can_export(self.doctype, raise_exception=True) + data_to_export = [] + + table_fields = [f for f in self.exportable_fields if f != self.doctype] + data = self.get_data_as_docs() + + for doc in data: + rows = [] + rows = self.add_data_row(self.doctype, None, doc, rows, 0) + + if table_fields: + # add child table data + for f in table_fields: + for i, child_row in enumerate(doc[f]): + table_df = self.meta.get_field(f) + child_doctype = table_df.options + rows = self.add_data_row(child_doctype, child_row.parentfield, child_row, rows, i) + + data_to_export += rows + + return data_to_export + + def add_data_row(self, doctype, parentfield, doc, rows, row_idx): + if len(rows) < row_idx + 1: + rows.append([""] * len(self.fields)) + + row = rows[row_idx] + + for i, df in enumerate(self.fields): + if df.parent == doctype: + if df.is_child_table_field and df.child_table_df.fieldname != parentfield: + continue + row[i] = doc.get(df.fieldname, "") + + return rows + + def get_data_as_docs(self): + def format_column_name(df): + return "`tab{0}`.`{1}`".format(df.parent, df.fieldname) + + filters = self.export_filters + + if self.meta.is_nested_set(): + order_by = "`tab{0}`.`lft` ASC".format(self.doctype) + else: + order_by = "`tab{0}`.`creation` DESC".format(self.doctype) + + parent_fields = [ + format_column_name(df) for df in self.fields if df.parent == self.doctype + ] + parent_data = frappe.db.get_list( + self.doctype, + filters=filters, + fields=["name"] + parent_fields, + limit_page_length=self.export_page_length, + order_by=order_by, + as_list=0, + ) + parent_names = [p.name for p in parent_data] + + child_data = {} + for key in self.exportable_fields: + if key == self.doctype: + continue + child_table_df = self.meta.get_field(key) + child_table_doctype = child_table_df.options + child_fields = ["name", "idx", "parent", "parentfield"] + list( + set( + [format_column_name(df) for df in self.fields if df.parent == child_table_doctype] + ) + ) + data = frappe.db.get_list( + child_table_doctype, + filters={ + "parent": ("in", parent_names), + "parentfield": child_table_df.fieldname, + "parenttype": self.doctype, + }, + fields=child_fields, + order_by="idx asc", + as_list=0, + ) + child_data[key] = data + + return self.merge_data(parent_data, child_data) + + def merge_data(self, parent_data, child_data): + for doc in parent_data: + for table_field, table_rows in child_data.items(): + doc[table_field] = [row for row in table_rows if row.parent == doc.name] + + return parent_data + + def add_header(self): + + header = [] + for df in self.fields: + is_parent = not df.is_child_table_field + if is_parent: + label = df.label + else: + label = "{0} ({1})".format(df.label, df.child_table_df.label) + + if label in header: + # this label is already in the header, + # which means two fields with the same label + # add the fieldname to avoid clash + if is_parent: + label = "{0}".format(df.fieldname) + else: + label = "{0}.{1}".format(df.child_table_df.fieldname, df.fieldname) + header.append(label) + + self.csv_array.append(header) + + def add_data(self): + self.csv_array += self.data + + def get_csv_array(self): + return self.csv_array + + def get_csv_array_for_export(self): + csv_array = self.csv_array + + if not self.data: + # add 2 empty rows + csv_array += [[]] * 2 + + return csv_array + + def build_response(self): + if self.file_type == "CSV": + self.build_csv_response() + elif self.file_type == "Excel": + self.build_xlsx_response() + + def build_csv_response(self): + build_csv_response(self.get_csv_array_for_export(), self.doctype) + + def build_xlsx_response(self): + build_xlsx_response(self.get_csv_array_for_export(), self.doctype) diff --git a/frappe/core/doctype/data_import/exporter_new.py b/frappe/core/doctype/data_import/exporter_new.py deleted file mode 100644 index 85f933be69..0000000000 --- a/frappe/core/doctype/data_import/exporter_new.py +++ /dev/null @@ -1,267 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors -# MIT License. See license.txt - -import frappe -from frappe.model import display_fieldtypes, no_value_fields, table_fields -from frappe.utils.csvutils import build_csv_response -from frappe.utils.xlsxutils import build_xlsx_response -from .importer_new import INVALID_VALUES - - -class Exporter: - def __init__( - self, - doctype, - export_fields=None, - export_data=False, - export_filters=None, - export_page_length=None, - file_type="CSV", - ): - """ - Exports records of a DocType for use with Importer - :param doctype: Document Type to export - :param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']} - :param export_data=False: Whether to export data as well - :param export_filters=None: The filters (dict or list) which is used to query the records - :param file_type: One of 'Excel' or 'CSV' - """ - self.doctype = doctype - self.meta = frappe.get_meta(doctype) - self.export_fields = export_fields - self.export_filters = export_filters - self.export_page_length = export_page_length - self.file_type = file_type - - # this will contain the csv content - self.csv_array = [] - - # fields that get exported - # can be All, Mandatory or User Selected Fields - self.fields = self.get_all_exportable_fields() - self.add_header() - - if export_data: - self.data = self.get_data_to_export() - else: - self.data = [] - self.add_data() - - def get_all_exportable_fields(self): - return self.get_exportable_parent_fields() + self.get_exportable_children_fields() - - def get_exportable_parent_fields(self): - parent_fields = self.get_exportable_fields(self.doctype) - - # if autoname is based on field - # then merge ID and the field column title as "ID (Autoname Field)" - autoname = self.meta.autoname - if autoname and autoname.startswith("field:"): - fieldname = autoname[len("field:") :] - autoname_field = self.meta.get_field(fieldname) - if autoname_field: - name_field = parent_fields[0] - name_field.label = "ID ({})".format(autoname_field.label) - # remove the autoname field as it is a duplicate of ID field - parent_fields = [ - df for df in parent_fields if df.fieldname != autoname_field.fieldname - ] - - return parent_fields - - def get_exportable_children_fields(self): - child_table_fields = [df for df in self.meta.fields if df.fieldtype in table_fields] - if self.export_fields == "Mandatory": - child_table_fields = [df for df in child_table_fields if df.reqd] - - children = [df.options for df in child_table_fields] - children_fields = [] - for child in children: - children_fields += self.get_exportable_fields(child) - - return children_fields - - def get_exportable_fields(self, doctype): - meta = frappe.get_meta(doctype) - - def is_exportable(df): - return df and df.fieldtype not in (display_fieldtypes + no_value_fields) - - # filter out invalid fieldtypes - all_fields = [df for df in meta.fields if is_exportable(df)] - # add name field - name_field = frappe._dict( - { - "fieldtype": "Data", - "fieldname": "name", - "label": "ID", - "reqd": 1, - "parent": doctype, - } - ) - all_fields = [name_field] + all_fields - - if self.export_fields == "Mandatory": - fields = [df for df in all_fields if df.reqd] - - if self.export_fields == "All": - fields = list(all_fields) - - elif isinstance(self.export_fields, dict): - fields_to_export = self.export_fields.get(doctype, []) - fields = [meta.get_field(fieldname) for fieldname in fields_to_export] - fields = [df for df in fields if is_exportable(df)] - if 'name' in fields_to_export: - fields = [name_field] + fields - - return fields or [] - - def get_data_to_export(self): - frappe.permissions.can_export(self.doctype, raise_exception=True) - - def get_column_name(df): - return "`tab{0}`.`{1}`".format(df.parent, df.fieldname) - - fields = [get_column_name(df) for df in self.fields] - filters = self.export_filters - - if self.meta.is_nested_set(): - order_by = "`tab{0}`.`lft` ASC".format(self.doctype) - else: - order_by = "`tab{0}`.`creation` DESC".format(self.doctype) - - data = frappe.db.get_list( - self.doctype, - filters=filters, - fields=fields, - limit_page_length=self.export_page_length, - order_by=order_by, - as_list=1, - ) - - data = self.remove_duplicate_values(data) - data = self.remove_row_gaps(data) - data = self.remove_empty_rows(data) - # data = self.remove_values_from_name_column(data) - - return data - - def remove_duplicate_values(self, data): - out = [] - - doctypes = set([df.parent for df in self.fields]) - - def name_exists_in_column_before_row(name, column_index, row_index): - column_values = [row[column_index] for i, row in enumerate(data) if i < row_index] - return name in column_values - - for i, row in enumerate(data): - # first row is fine - if i == 0: - out.append(row) - continue - - row = list(row) - for doctype in doctypes: - name_index = self.get_name_column_index(doctype) - name = row[name_index] - column_indexes = self.get_column_indexes(doctype) - - if name_exists_in_column_before_row(name, name_index, i): - # remove the values from the row - row = [None if i in column_indexes else d for i, d in enumerate(row)] - - out.append(row) - - return out - - def remove_row_gaps(self, data): - doctypes = set([df.parent for df in self.fields if df.parent != self.doctype]) - - def get_nearest_empty_row_index(col_index, row_index): - col_values = [row[col_index] for row in data] - i = row_index - 1 - while not col_values[i]: - i = i - 1 - out = i + 1 - if row_index != out: - return out - - for i, row in enumerate(data): - # if this is the row that contains parent values then skip - if row[0]: - continue - - for doctype in doctypes: - name_index = self.get_name_column_index(doctype) - name = row[name_index] - column_indexes = self.get_column_indexes(doctype) - - if not name: - continue - - row_index = get_nearest_empty_row_index(name_index, i) - if row_index: - for col_index in column_indexes: - data[row_index][col_index] = row[col_index] - row[col_index] = None - - return data - - # pylint: disable=R0201 - def remove_empty_rows(self, data): - return [row for row in data if any(v not in INVALID_VALUES for v in row)] - - def remove_values_from_name_column(self, data): - out = [] - name_columns = [i for i, df in enumerate(self.fields) if df.fieldname == "name"] - for row in data: - out.append(["" if i in name_columns else value for i, value in enumerate(row)]) - return out - - def get_name_column_index(self, doctype): - for i, df in enumerate(self.fields): - if df.parent == doctype and df.fieldname == "name": - return i - return -1 - - def get_column_indexes(self, doctype): - return [i for i, df in enumerate(self.fields) if df.parent == doctype] - - def add_header(self): - def get_label(df): - if df.parent == self.doctype: - return df.label - else: - return "{0} ({1})".format(df.label, df.parent) - - header = [get_label(df) for df in self.fields] - self.csv_array.append(header) - - def add_data(self): - self.csv_array += self.data - - def get_csv_array(self): - return self.csv_array - - def get_csv_array_for_export(self): - csv_array = self.csv_array - - if not self.data: - # add 2 empty rows - csv_array += [[]] * 2 - - return csv_array - - def build_response(self): - if self.file_type == 'CSV': - self.build_csv_response() - elif self.file_type == 'Excel': - self.build_xlsx_response() - - def build_csv_response(self): - build_csv_response(self.get_csv_array_for_export(), self.doctype) - - def build_xlsx_response(self): - build_xlsx_response(self.get_csv_array_for_export(), self.doctype) diff --git a/frappe/core/doctype/data_import/fixtures/sample_import_file.csv b/frappe/core/doctype/data_import/fixtures/sample_import_file.csv new file mode 100644 index 0000000000..ef5b96df58 --- /dev/null +++ b/frappe/core/doctype/data_import/fixtures/sample_import_file.csv @@ -0,0 +1,5 @@ +Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number +Test ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7 + , , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 ,child title again 2 ,22-09-2021 , , +Test 2 ,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , , +Test 3 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71 diff --git a/frappe/core/doctype/data_import/fixtures/sample_import_file_for_update.csv b/frappe/core/doctype/data_import/fixtures/sample_import_file_for_update.csv new file mode 100644 index 0000000000..656985b519 --- /dev/null +++ b/frappe/core/doctype/data_import/fixtures/sample_import_file_for_update.csv @@ -0,0 +1,2 @@ +Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number +Test 26 ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7 diff --git a/frappe/core/doctype/data_import/fixtures/sample_import_file_without_mandatory.csv b/frappe/core/doctype/data_import/fixtures/sample_import_file_without_mandatory.csv new file mode 100644 index 0000000000..c6bff5caeb --- /dev/null +++ b/frappe/core/doctype/data_import/fixtures/sample_import_file_without_mandatory.csv @@ -0,0 +1,5 @@ +Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number +Test 5 ,test description ,1 ,2 ,"" , ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7 + , , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 , ,22-09-2021 , , + ,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , , +Test 4 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71 diff --git a/frappe/core/doctype/data_import/importer.py b/frappe/core/doctype/data_import/importer.py index b6d410d072..4761652c70 100644 --- a/frappe/core/doctype/data_import/importer.py +++ b/frappe/core/doctype/data_import/importer.py @@ -1,541 +1,1115 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors +# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt -from __future__ import unicode_literals, print_function - -from six.moves import range -import requests -import frappe, json, os -import frappe.permissions - +from __future__ import unicode_literals +import os +import io +import frappe +import timeit +import json +from datetime import datetime from frappe import _ +from frappe.utils import cint, flt, update_progress_bar, cstr +from frappe.utils.csvutils import read_csv_content, get_csv_content_from_google_sheets +from frappe.utils.xlsxutils import ( + read_xlsx_file_from_attached_file, + read_xls_file_from_attached_file, +) +from frappe.model import no_value_fields, table_fields as table_fieldtypes -from frappe.utils.csvutils import getlink -from frappe.utils.dateutils import parse_date - -from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url -from six import text_type, string_types +INVALID_VALUES = ("", None) +MAX_ROWS_IN_PREVIEW = 10 +INSERT = "Insert New Records" +UPDATE = "Update Existing Records" -@frappe.whitelist() -def get_data_keys(): - return frappe._dict({ - "data_separator": _('Start entering data below this line'), - "main_table": _("Table") + ":", - "parent_table": _("Parent Table") + ":", - "columns": _("Column Name") + ":", - "doctype": _("DocType") + ":" - }) +class Importer: + def __init__( + self, doctype, data_import=None, file_path=None, import_type=None, console=False + ): + self.doctype = doctype + self.console = console + self.data_import = data_import + if not self.data_import: + self.data_import = frappe.get_doc(doctype="Data Import") + if import_type: + self.data_import.import_type = import_type + self.template_options = frappe.parse_json(self.data_import.template_options or "{}") + self.import_type = self.data_import.import_type -@frappe.whitelist() -def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None, - update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No", - skip_errors = True, data_import_doc=None, validate_template=False, user=None): - """upload data""" + self.import_file = ImportFile( + doctype, + file_path or data_import.google_sheets_url or data_import.import_file, + self.template_options, + self.import_type, + ) - # for translations - if user: - frappe.cache().hdel("lang", user) - frappe.set_user_lang(user) + def get_data_for_import_preview(self): + return self.import_file.get_data_for_import_preview() - if data_import_doc and isinstance(data_import_doc, string_types): - data_import_doc = frappe.get_doc("Data Import", data_import_doc) - if data_import_doc and from_data_import == "Yes": - no_email = data_import_doc.no_email - ignore_encoding_errors = data_import_doc.ignore_encoding_errors - update_only = data_import_doc.only_update - submit_after_import = data_import_doc.submit_after_import - overwrite = data_import_doc.overwrite - skip_errors = data_import_doc.skip_errors - else: - # extra input params - params = json.loads(frappe.form_dict.get("params") or '{}') - if params.get("submit_after_import"): - submit_after_import = True - if params.get("ignore_encoding_errors"): - ignore_encoding_errors = True - if not params.get("no_email"): - no_email = False - if params.get('update_only'): - update_only = True - if params.get('from_data_import'): - from_data_import = params.get('from_data_import') - if not params.get('skip_errors'): - skip_errors = params.get('skip_errors') + def before_import(self): + # set user lang for translations + frappe.cache().hdel("lang", frappe.session.user) + frappe.set_user_lang(frappe.session.user) - frappe.flags.in_import = True - frappe.flags.mute_emails = no_email + # set flags + frappe.flags.in_import = True + frappe.flags.mute_emails = self.data_import.mute_emails - def get_data_keys_definition(): - return get_data_keys() + self.data_import.db_set("template_warnings", "") - def bad_template(): - frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator)) + def import_data(self): + self.before_import() - def check_data_length(): - if not data: - frappe.throw(_("No data found in the file. Please reattach the new file with data.")) + # parse docs from rows + payloads = self.import_file.get_payloads_for_import() - def get_start_row(): - for i, row in enumerate(rows): - if row and row[0]==get_data_keys_definition().data_separator: - return i+1 - bad_template() + # dont import if there are non-ignorable warnings + warnings = self.import_file.get_warnings() + warnings = [w for w in warnings if w.get("type") != "info"] - def get_header_row(key): - return get_header_row_and_idx(key)[0] - - def get_header_row_and_idx(key): - for i, row in enumerate(header): - if row and row[0]==key: - return row, i - return [], -1 - - def filter_empty_columns(columns): - empty_cols = list(filter(lambda x: x in ("", None), columns)) - - if empty_cols: - if columns[-1*len(empty_cols):] == empty_cols: - # filter empty columns if they exist at the end - columns = columns[:-1*len(empty_cols)] + if warnings: + if self.console: + self.print_grouped_warnings(warnings) else: - frappe.msgprint(_("Please make sure that there are no empty columns in the file."), - raise_exception=1) - - return columns - - def make_column_map(): - doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype) - if row_idx == -1: # old style + self.data_import.db_set("template_warnings", json.dumps(warnings)) return - dt = None - for i, d in enumerate(doctype_row[1:]): - if d not in ("~", "-"): - if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"): - dt, parentfield = d, None - # xls format truncates the row, so it may not have more columns - if len(doctype_row) > i+2: - parentfield = doctype_row[i+2] - doctypes.append((dt, parentfield)) - column_idx_to_fieldname[(dt, parentfield)] = {} - column_idx_to_fieldtype[(dt, parentfield)] = {} - if dt: - column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1] - column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1] - - def get_doc(start_idx): - if doctypes: - doc = {} - attachments = [] - last_error_row_idx = None - for idx in range(start_idx, len(rows)): - last_error_row_idx = idx # pylint: disable=W0612 - if (not doc) or main_doc_empty(rows[idx]): - for dt, parentfield in doctypes: - d = {} - for column_idx in column_idx_to_fieldname[(dt, parentfield)]: - try: - fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx] - fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx] - - if not fieldname or not rows[idx][column_idx]: - continue - - d[fieldname] = rows[idx][column_idx] - if fieldtype in ("Int", "Check"): - d[fieldname] = cint(d[fieldname]) - elif fieldtype in ("Float", "Currency", "Percent"): - d[fieldname] = flt(d[fieldname]) - elif fieldtype == "Date": - if d[fieldname] and isinstance(d[fieldname], string_types): - d[fieldname] = getdate(parse_date(d[fieldname])) - elif fieldtype == "Datetime": - if d[fieldname]: - if " " in d[fieldname]: - _date, _time = d[fieldname].split() - else: - _date, _time = d[fieldname], '00:00:00' - _date = parse_date(d[fieldname]) - d[fieldname] = get_datetime(_date + " " + _time) - else: - d[fieldname] = None - - elif fieldtype in ("Image", "Attach Image", "Attach"): - # added file to attachments list - attachments.append(d[fieldname]) - - elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]: - # as fields can be saved in the number format(long type) in data import template - d[fieldname] = cstr(d[fieldname]) - - except IndexError: - pass - - # scrub quotes from name and modified - if d.get("name") and d["name"].startswith('"'): - d["name"] = d["name"][1:-1] - - if sum([0 if not val else 1 for val in d.values()]): - d['doctype'] = dt - if dt == doctype: - doc.update(d) - else: - if not overwrite and doc.get("name"): - d['parent'] = doc["name"] - d['parenttype'] = doctype - d['parentfield'] = parentfield - doc.setdefault(d['parentfield'], []).append(d) - else: - break - - return doc, attachments, last_error_row_idx + # setup import log + if self.data_import.import_log: + import_log = frappe.parse_json(self.data_import.import_log) else: - doc = frappe._dict(zip(columns, rows[start_idx][1:])) - doc['doctype'] = doctype - return doc, [], None + import_log = [] - # used in testing whether a row is empty or parent row or child row - # checked only 3 first columns since first two columns can be blank for example the case of - # importing the item variant where item code and item name will be blank. - def main_doc_empty(row): - if row: - for i in range(3,0,-1): - if len(row) > i and row[i]: - return False - return True + # remove previous failures from import log + import_log = [log for log in import_log if log.get("success")] - def validate_naming(doc): - autoname = frappe.get_meta(doctype).autoname - if autoname: - if autoname[0:5] == 'field': - autoname = autoname[6:] - elif autoname == 'naming_series:': - autoname = 'naming_series' - else: - return True + # get successfully imported rows + imported_rows = [] + for log in import_log: + log = frappe._dict(log) + if log.success: + imported_rows += log.row_indexes - if (autoname not in doc) or (not doc[autoname]): - from frappe.model.base_document import get_controller - if not hasattr(get_controller(doctype), "autoname"): - frappe.throw(_("{0} is a mandatory field").format(autoname)) - return True + # start import + total_payload_count = len(payloads) + batch_size = frappe.conf.data_import_batch_size or 1000 - users = frappe.db.sql_list("select name from tabUser") - def prepare_for_insert(doc): - # don't block data import if user is not set - # migrating from another system - if not doc.owner in users: - doc.owner = frappe.session.user - if not doc.modified_by in users: - doc.modified_by = frappe.session.user + for batch_index, batched_payloads in enumerate( + frappe.utils.create_batch(payloads, batch_size) + ): + for i, payload in enumerate(batched_payloads): + doc = payload.doc + row_indexes = [row.row_number for row in payload.rows] + current_index = (i + 1) + (batch_index * batch_size) - def is_valid_url(url): - is_valid = False - if url.startswith("/files") or url.startswith("/private/files"): - url = get_url(url) + if set(row_indexes).intersection(set(imported_rows)): + print("Skipping imported rows", row_indexes) + if total_payload_count > 5: + frappe.publish_realtime( + "data_import_progress", + { + "current": current_index, + "total": total_payload_count, + "skipping": True, + "data_import": self.data_import.name, + }, + ) + continue - try: - r = requests.get(url) - is_valid = True if r.status_code == 200 else False - except Exception: - pass + try: + start = timeit.default_timer() + doc = self.process_doc(doc) + processing_time = timeit.default_timer() - start + eta = self.get_eta(current_index, total_payload_count, processing_time) - return is_valid + if self.console: + update_progress_bar( + "Importing {0} records".format(total_payload_count), + current_index, + total_payload_count, + ) + elif total_payload_count > 5: + frappe.publish_realtime( + "data_import_progress", + { + "current": current_index, + "total": total_payload_count, + "docname": doc.name, + "data_import": self.data_import.name, + "success": True, + "row_indexes": row_indexes, + "eta": eta, + }, + ) - def attach_file_to_doc(doctype, docname, file_url): - # check if attachment is already available - # check if the attachement link is relative or not - if not file_url: - return - if not is_valid_url(file_url): - return + import_log.append( + frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes) + ) + # commit after every successful import + frappe.db.commit() - files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and - attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format( - doctype=doctype, - docname=docname, - file_url=file_url - )) - - if files: - # file is already attached - return - - _file = frappe.get_doc({ - "doctype": "File", - "file_url": file_url, - "attached_to_name": docname, - "attached_to_doctype": doctype, - "attached_to_field": 0, - "folder": "Home/Attachments"}) - _file.save() - - - # header - filename, file_extension = ['',''] - if not rows: - _file = frappe.get_doc("File", {"file_url": data_import_doc.import_file}) - fcontent = _file.get_content() - filename, file_extension = _file.get_extension() - - if file_extension == '.xlsx' and from_data_import == 'Yes': - from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file - rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file) - - elif file_extension == '.csv': - from frappe.utils.csvutils import read_csv_content - rows = read_csv_content(fcontent, ignore_encoding_errors) + except Exception: + import_log.append( + frappe._dict( + success=False, + exception=frappe.get_traceback(), + messages=frappe.local.message_log, + row_indexes=row_indexes, + ) + ) + frappe.clear_messages() + # rollback if exception + frappe.db.rollback() + # set status + failures = [log for log in import_log if not log.get("success")] + if len(failures) == total_payload_count: + status = "Pending" + elif len(failures) > 0: + status = "Partial Success" else: - frappe.throw(_("Unsupported File Format")) + status = "Success" - start_row = get_start_row() - header = rows[:start_row] - data = rows[start_row:] - try: - doctype = get_header_row(get_data_keys_definition().main_table)[1] - columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:]) - except: - frappe.throw(_("Cannot change header content")) - doctypes = [] - column_idx_to_fieldname = {} - column_idx_to_fieldtype = {} + if self.console: + self.print_import_log(import_log) + else: + self.data_import.db_set("status", status) + self.data_import.db_set("import_log", json.dumps(import_log)) - if skip_errors: - data_rows_with_error = header + self.after_import() - if submit_after_import and not cint(frappe.db.get_value("DocType", - doctype, "is_submittable")): - submit_after_import = False + return import_log - parenttype = get_header_row(get_data_keys_definition().parent_table) - - if len(parenttype) > 1: - parenttype = parenttype[1] - - # check permissions - if not frappe.permissions.can_import(parenttype or doctype): + def after_import(self): + frappe.flags.in_import = False frappe.flags.mute_emails = False - return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True} - # Throw expception in case of the empty data file - check_data_length() - make_column_map() - total = len(data) + def process_doc(self, doc): + if self.import_type == INSERT: + return self.insert_record(doc) + elif self.import_type == UPDATE: + return self.update_record(doc) - if validate_template: - if total: - data_import_doc.total_rows = total - return True + def insert_record(self, doc): + meta = frappe.get_meta(self.doctype) + new_doc = frappe.new_doc(self.doctype) + new_doc.update(doc) - if overwrite==None: - overwrite = params.get('overwrite') + if (meta.autoname or "").lower() != "prompt": + # name can only be set directly if autoname is prompt + new_doc.set("name", None) - # delete child rows (if parenttype) - parentfield = None - if parenttype: - parentfield = get_parent_field(doctype, parenttype) + new_doc.flags.updater_reference = { + "doctype": self.data_import.doctype, + "docname": self.data_import.name, + "label": _("via Data Import"), + } - if overwrite: - delete_child_rows(data, doctype) + new_doc.insert() + if meta.is_submittable and self.data_import.submit_after_import: + new_doc.submit() + return new_doc - import_log = [] - def log(**kwargs): - if via_console: - print((kwargs.get("title") + kwargs.get("message")).encode('utf-8')) - else: - import_log.append(kwargs) + def update_record(self, doc): + id_field = get_id_field(self.doctype) + existing_doc = frappe.get_doc(self.doctype, doc.get(id_field.fieldname)) + existing_doc.flags.updater_reference = { + "doctype": self.data_import.doctype, + "docname": self.data_import.name, + "label": _("via Data Import"), + } + existing_doc.update(doc) + existing_doc.save() + return existing_doc - def as_link(doctype, name): - if via_console: - return "{0}: {1}".format(doctype, name) - else: - return getlink(doctype, name) + def get_eta(self, current, total, processing_time): + self.last_eta = getattr(self, "last_eta", 0) + remaining = total - current + eta = processing_time * remaining + if not self.last_eta or eta < self.last_eta: + self.last_eta = eta + return self.last_eta - # publish realtime task update - def publish_progress(achieved, reload=False): - if data_import_doc: - frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)), - "data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user) + def export_errored_rows(self): + from frappe.utils.csvutils import build_csv_response + + if not self.data_import: + return + + import_log = frappe.parse_json(self.data_import.import_log or "[]") + failures = [log for log in import_log if not log.get("success")] + row_indexes = [] + for f in failures: + row_indexes.extend(f.get("row_indexes", [])) + + # de duplicate + row_indexes = list(set(row_indexes)) + row_indexes.sort() + + header_row = [col.header_title for col in self.import_file.columns] + rows = [header_row] + rows += [row.data for row in self.import_file.data if row.row_number in row_indexes] + + build_csv_response(rows, self.doctype) + + def print_import_log(self, import_log): + failed_records = [log for log in import_log if not log.success] + successful_records = [log for log in import_log if log.success] + + if successful_records: + print() + print( + "Successfully imported {0} records out of {1}".format( + len(successful_records), len(import_log) + ) + ) + + if failed_records: + print("Failed to import {0} records".format(len(failed_records))) + file_name = "{0}_import_on_{1}.txt".format(self.doctype, frappe.utils.now()) + print("Check {0} for errors".format(os.path.join("sites", file_name))) + text = "" + for w in failed_records: + text += "Row Indexes: {0}\n".format(str(w.get("row_indexes", []))) + text += "Messages:\n{0}\n".format("\n".join(w.get("messages", []))) + text += "Traceback:\n{0}\n\n".format(w.get("exception")) + + with open(file_name, "w") as f: + f.write(text) + + def print_grouped_warnings(self, warnings): + warnings_by_row = {} + other_warnings = [] + for w in warnings: + if w.get("row"): + warnings_by_row.setdefault(w.get("row"), []).append(w) + else: + other_warnings.append(w) + + for row_number, warnings in warnings_by_row.items(): + print("Row {0}".format(row_number)) + for w in warnings: + print(w.get("message")) + + for w in other_warnings: + print(w.get("message")) - error_flag = rollback_flag = False +class ImportFile: + def __init__(self, doctype, file, template_options=None, import_type=None): + self.doctype = doctype + self.template_options = template_options or frappe._dict( + column_to_field_map=frappe._dict() + ) + self.column_to_field_map = self.template_options.column_to_field_map + self.import_type = import_type - batch_size = frappe.conf.data_import_batch_size or 1000 + self.file_doc = self.file_path = None + if isinstance(file, frappe.string_types): + if frappe.db.exists("File", {"file_url": file}): + self.file_doc = frappe.get_doc("File", {"file_url": file}) + elif 'docs.google.com/spreadsheets' in file: + self.google_sheets_url = file + elif os.path.exists(file): + self.file_path = file - for batch_start in range(0, total, batch_size): - batch = data[batch_start:batch_start + batch_size] + if not self.file_doc and not self.file_path and not self.google_sheets_url: + frappe.throw(_("Invalid template file for import")) - for i, row in enumerate(batch): - # bypass empty rows - if main_doc_empty(row): + self.raw_data = self.get_data_from_template_file() + self.parse_data_from_template() + + def get_data_from_template_file(self): + content = None + extension = None + + if self.file_doc: + parts = self.file_doc.get_extension() + extension = parts[1] + content = self.file_doc.get_content() + extension = extension.lstrip(".") + + elif self.file_path: + content, extension = self.read_file(self.file_path) + + elif self.google_sheets_url: + content = get_csv_content_from_google_sheets(self.google_sheets_url) + extension = 'csv' + + if not content: + frappe.throw(_("Invalid or corrupted content for import")) + + if not extension: + extension = "csv" + + if content: + return self.read_content(content, extension) + + def parse_data_from_template(self): + header = None + data = [] + + for i, row in enumerate(self.raw_data): + if all(v in INVALID_VALUES for v in row): + # empty row continue - row_idx = i + start_row - doc = None - - publish_progress(i) - - try: - doc, attachments, last_error_row_idx = get_doc(row_idx) - validate_naming(doc) - if pre_process: - pre_process(doc) - - original = None - if parentfield: - parent = frappe.get_doc(parenttype, doc["parent"]) - doc = parent.append(parentfield, doc) - parent.save() - else: - if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]): - original = frappe.get_doc(doctype, doc["name"]) - original_name = original.name - original.update(doc) - # preserve original name for case sensitivity - original.name = original_name - original.flags.ignore_links = ignore_links - original.save() - doc = original - else: - if not update_only: - doc = frappe.get_doc(doc) - prepare_for_insert(doc) - doc.flags.ignore_links = ignore_links - doc.insert() - if attachments: - # check file url and create a File document - for file_url in attachments: - attach_file_to_doc(doc.doctype, doc.name, file_url) - if submit_after_import: - doc.submit() - - # log errors - if parentfield: - log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)), - "link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"}) - elif submit_after_import: - log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)), - "message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"}) - elif original: - log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)), - "message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"}) - elif not update_only: - log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)), - "message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"}) - else: - log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None, - "message": "Document updation ignored", "indicator": "orange"}) - - except Exception as e: - error_flag = True - - # build error message - if frappe.local.message_log: - err_msg = "\n".join(['

    {}

    '.format(json.loads(msg).get('message')) for msg in frappe.local.message_log]) - else: - err_msg = '

    {}

    '.format(cstr(e)) - - error_trace = frappe.get_traceback() - if error_trace: - error_log_doc = frappe.log_error(error_trace) - error_link = get_absolute_url("Error Log", error_log_doc.name) - else: - error_link = None - - log(**{ - "row": row_idx + 1, - "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""), - "message": err_msg, - "indicator": "red", - "link":error_link - }) - - # data with error to create a new file - # include the errored data in the last row as last_error_row_idx will not be updated for the last row - if skip_errors: - if last_error_row_idx == len(rows)-1: - last_error_row_idx = len(rows) - data_rows_with_error += rows[row_idx:last_error_row_idx] - else: - rollback_flag = True - finally: - frappe.local.message_log = [] - - start_row += batch_size - if rollback_flag: - frappe.db.rollback() - else: - frappe.db.commit() - - frappe.flags.mute_emails = False - frappe.flags.in_import = False - - log_message = {"messages": import_log, "error": error_flag} - if data_import_doc: - data_import_doc.log_details = json.dumps(log_message) - - import_status = None - if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error): - import_status = "Partially Successful" - # write the file with the faulty row - file_name = 'error_' + filename + file_extension - if file_extension == '.xlsx': - from frappe.utils.xlsxutils import make_xlsx - xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") - file_data = xlsx_file.getvalue() + if not header: + header = Header(i, row, self.doctype, self.raw_data, self.column_to_field_map) else: - from frappe.utils.csvutils import to_csv - file_data = to_csv(data_rows_with_error) - _file = frappe.get_doc({ - "doctype": "File", - "file_name": file_name, - "attached_to_doctype": "Data Import", - "attached_to_name": data_import_doc.name, - "folder": "Home/Attachments", - "content": file_data}) - _file.save() - data_import_doc.error_file = _file.file_url + row_obj = Row(i, row, self.doctype, header, self.import_type) + data.append(row_obj) - elif error_flag: - import_status = "Failed" - else: - import_status = "Successful" + self.header = header + self.columns = self.header.columns + self.data = data - data_import_doc.import_status = import_status - data_import_doc.save() - if data_import_doc.import_status in ["Successful", "Partially Successful"]: - data_import_doc.submit() - publish_progress(100, True) - else: - publish_progress(0, True) - frappe.db.commit() - else: - return log_message + if len(data) < 1: + frappe.throw( + _("Import template should contain a Header and atleast one row."), + title=_("Template Error"), + ) -def get_parent_field(doctype, parenttype): - parentfield = None + def get_data_for_import_preview(self): + """Adds a serial number column as the first column""" - # get parentfield - if parenttype: - for d in frappe.get_meta(parenttype).get_table_fields(): - if d.options==doctype: - parentfield = d.fieldname + columns = [frappe._dict({"header_title": "Sr. No", "skip_import": True})] + columns += [col.as_dict() for col in self.columns] + for col in columns: + # only pick useful fields in docfields to minimise the payload + if col.df: + col.df = { + "fieldtype": col.df.fieldtype, + "fieldname": col.df.fieldname, + "label": col.df.label, + "options": col.df.options, + "parent": col.df.parent, + "reqd": col.df.reqd, + "default": col.df.default, + "read_only": col.df.read_only, + } + + data = [[row.row_number] + row.as_list() for row in self.data] + + warnings = self.get_warnings() + + out = frappe._dict() + out.data = data + out.columns = columns + out.warnings = warnings + total_number_of_rows = len(out.data) + if total_number_of_rows > MAX_ROWS_IN_PREVIEW: + out.data = out.data[:MAX_ROWS_IN_PREVIEW] + out.max_rows_exceeded = True + out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW + out.total_number_of_rows = total_number_of_rows + return out + + def get_payloads_for_import(self): + payloads = [] + # make a copy + data = list(self.data) + while data: + doc, rows, data = self.parse_next_row_for_import(data) + payloads.append(frappe._dict(doc=doc, rows=rows)) + return payloads + + def parse_next_row_for_import(self, data): + """ + Parses rows that make up a doc. A doc maybe built from a single row or multiple rows. + Returns the doc, rows, and data without the rows. + """ + doctypes = self.header.doctypes + + # first row is included by default + first_row = data[0] + rows = [first_row] + + # if there are child doctypes, find the subsequent rows + if len(doctypes) > 1: + # subsequent rows either dont have any parent value set + # or have the same value as the parent row + # we include a row if either of conditions match + parent_column_indexes = self.header.get_column_indexes(self.doctype) + parent_row_values = first_row.get_values(parent_column_indexes) + + data_without_first_row = data[1:] + for row in data_without_first_row: + row_values = row.get_values(parent_column_indexes) + # if the row is blank, it's a child row doc + if all([v in INVALID_VALUES for v in row_values]): + rows.append(row) + continue + # if the row has same values as parent row, it's a child row doc + if row_values == parent_row_values: + rows.append(row) + continue + # if any of those conditions dont match, it's the next doc break - if not parentfield: - frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype)) - raise Exception + parent_doc = None + for row in rows: + for doctype, table_df in doctypes: + if doctype == self.doctype and not parent_doc: + parent_doc = row.parse_doc(doctype) - return parentfield + if doctype != self.doctype and table_df: + child_doc = row.parse_doc(doctype, parent_doc, table_df) + parent_doc[table_df.fieldname] = parent_doc.get(table_df.fieldname, []) + parent_doc[table_df.fieldname].append(child_doc) -def delete_child_rows(rows, doctype): - """delete child rows for all parents""" - for p in list(set([r[1] for r in rows])): - if p: - frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p) + doc = parent_doc + # check if there is atleast one row for mandatory table fields + meta = frappe.get_meta(self.doctype) + mandatory_table_fields = [ + df + for df in meta.fields + if df.fieldtype in table_fieldtypes + and df.reqd + and len(doc.get(df.fieldname, [])) == 0 + ] + if len(mandatory_table_fields) == 1: + self.warnings.append( + { + "row": first_row.row_number, + "message": _("There should be atleast one row for {0} table").format( + mandatory_table_fields[0].label + ), + } + ) + elif mandatory_table_fields: + fields_string = ", ".join([df.label for df in mandatory_table_fields]) + message = _("There should be atleast one row for the following tables: {0}").format( + fields_string + ) + self.warnings.append({"row": first_row.row_number, "message": message}) + + return doc, rows, data[len(rows) :] + + def get_warnings(self): + warnings = [] + for col in self.header.columns: + warnings += col.warnings + + for row in self.data: + warnings += row.warnings + + return warnings + + ###### + + def read_file(self, file_path): + extn = file_path.split(".")[1] + + file_content = None + with io.open(file_path, mode="rb") as f: + file_content = f.read() + + return file_content, extn + + def read_content(self, content, extension): + error_title = _("Template Error") + if extension not in ("csv", "xlsx", "xls"): + frappe.throw( + _("Import template should be of type .csv, .xlsx or .xls"), title=error_title + ) + + if extension == "csv": + data = read_csv_content(content) + elif extension == "xlsx": + data = read_xlsx_file_from_attached_file(fcontent=content) + elif extension == "xls": + data = read_xls_file_from_attached_file(content) + + return data + + +class Row: + link_values_exist_map = {} + + def __init__(self, index, row, doctype, header, import_type): + self.index = index + self.row_number = index + 1 + self.doctype = doctype + self.data = row + self.header = header + self.import_type = import_type + self.warnings = [] + + len_row = len(self.data) + len_columns = len(self.header.columns) + if len_row != len_columns: + less_than_columns = len_row < len_columns + message = ( + "Row has less values than columns" + if less_than_columns + else "Row has more values than columns" + ) + self.warnings.append( + {"row": self.row_number, "message": message,} + ) + + def parse_doc(self, doctype, parent_doc=None, table_df=None): + col_indexes = self.header.get_column_indexes(doctype, table_df) + values = self.get_values(col_indexes) + columns = self.header.get_columns(col_indexes) + doc = self._parse_doc(doctype, columns, values, parent_doc, table_df) + return doc + + def _parse_doc(self, doctype, columns, values, parent_doc=None, table_df=None): + doc = frappe._dict() + if self.import_type == INSERT: + # new_doc returns a dict with default values set + doc = frappe.new_doc( + doctype, + parent_doc=parent_doc, + parentfield=table_df.fieldname if table_df else None, + as_dict=True, + ) + + # remove standard fields and __islocal + for key in frappe.model.default_fields + ("__islocal",): + doc.pop(key, None) + + for col, value in zip(columns, values): + df = col.df + if value in INVALID_VALUES: + value = None + + if value is not None: + value = self.validate_value(value, col) + + if value is not None: + doc[df.fieldname] = self.parse_value(value, col) + + is_table = frappe.get_meta(doctype).istable + is_update = self.import_type == UPDATE + if is_table and is_update and doc.get("name") in INVALID_VALUES: + # for table rows being inserted in update + # create a new doc with defaults set + new_doc = frappe.new_doc(doctype, as_dict=True) + new_doc.update(doc) + doc = new_doc + + self.check_mandatory_fields(doctype, doc, table_df) + return doc + + def validate_value(self, value, col): + df = col.df + if df.fieldtype == "Select": + select_options = df.get_select_options() + if select_options and value not in select_options: + options_string = ", ".join([frappe.bold(d) for d in select_options]) + msg = _("Value must be one of {0}").format(options_string) + self.warnings.append( + { + "row": self.row_number, + "field": df.as_dict(convert_dates_to_str=True), + "message": msg, + } + ) + return + + elif df.fieldtype == "Link": + exists = self.link_exists(value, df) + if not exists: + msg = _("Value {0} missing for {1}").format( + frappe.bold(value), frappe.bold(df.options) + ) + self.warnings.append( + { + "row": self.row_number, + "field": df.as_dict(convert_dates_to_str=True), + "message": msg, + } + ) + return + elif df.fieldtype in ["Date", "Datetime"]: + value = self.get_date(value, col) + if isinstance(value, frappe.string_types): + # value was not parsed as datetime object + self.warnings.append( + { + "row": self.row_number, + "col": col.column_number, + "field": df.as_dict(convert_dates_to_str=True), + "message": _("Value {0} must in {1} format").format( + frappe.bold(value), frappe.bold(get_user_format(col.date_format)) + ), + } + ) + return + + return value + + def link_exists(self, value, df): + key = df.options + "::" + value + if Row.link_values_exist_map.get(key) is None: + Row.link_values_exist_map[key] = frappe.db.exists(df.options, value) + return Row.link_values_exist_map.get(key) + + def parse_value(self, value, col): + df = col.df + if isinstance(value, datetime) and df.fieldtype in ["Date", "Datetime"]: + return value + + value = cstr(value) + + # convert boolean values to 0 or 1 + valid_check_values = ["t", "f", "true", "false", "yes", "no", "y", "n"] + if df.fieldtype == "Check" and value.lower().strip() in valid_check_values: + value = value.lower().strip() + value = 1 if value in ["t", "true", "y", "yes"] else 0 + + if df.fieldtype in ["Int", "Check"]: + value = cint(value) + elif df.fieldtype in ["Float", "Percent", "Currency"]: + value = flt(value) + elif df.fieldtype in ["Date", "Datetime"]: + value = self.get_date(value, col) + + return value + + def get_date(self, value, column): + date_format = column.date_format + if date_format: + try: + return datetime.strptime(value, date_format) + except ValueError: + # ignore date values that dont match the format + # import will break for these values later + pass + return value + + def check_mandatory_fields(self, doctype, doc, table_df=None): + """If import type is Insert: + Check for mandatory fields (except table fields) in doc + if import type is Update: + Check for name field or autoname field in doc + """ + meta = frappe.get_meta(doctype) + if self.import_type == UPDATE: + if meta.istable: + # when updating records with table rows, + # there are two scenarios: + # 1. if row 'name' is provided in the template + # the table row will be updated + # 2. if row 'name' is not provided + # then a new row will be added + # so we dont need to check for mandatory + return + + # for update, only ID (name) field is mandatory + id_field = get_id_field(doctype) + if doc.get(id_field.fieldname) in INVALID_VALUES: + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} is a mandatory field asdadsf").format(id_field.label), + } + ) + return + + fields = [ + df + for df in meta.fields + if df.fieldtype not in table_fieldtypes + and df.reqd + and doc.get(df.fieldname) in INVALID_VALUES + ] + + if not fields: + return + + def get_field_label(df): + return "{0}{1}".format(df.label, " ({})".format(table_df.label) if table_df else "") + + if len(fields) == 1: + field_label = get_field_label(fields[0]) + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} is a mandatory field").format(frappe.bold(field_label)), + } + ) + else: + fields_string = ", ".join([frappe.bold(get_field_label(df)) for df in fields]) + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} are mandatory fields").format(fields_string), + } + ) + + def get_values(self, indexes): + return [self.data[i] for i in indexes] + + def get(self, index): + return self.data[index] + + def as_list(self): + return self.data + + +class Header(Row): + def __init__(self, index, row, doctype, raw_data, column_to_field_map): + self.index = index + self.row_number = index + 1 + self.data = row + self.doctype = doctype + + self.seen = [] + self.columns = [] + + for j, header in enumerate(row): + column_values = [get_item_at_index(r, j) for r in raw_data] + column = Column( + j, header, self.doctype, column_values, column_to_field_map.get(header), self.seen + ) + self.seen.append(header) + self.columns.append(column) + + doctypes = [] + for col in self.columns: + if not col.df: + continue + if col.df.parent == self.doctype: + doctypes.append((col.df.parent, None)) + else: + doctypes.append((col.df.parent, col.df.child_table_df)) + + self.doctypes = sorted( + list(set(doctypes)), key=lambda x: -1 if x[0] == self.doctype else 1 + ) + + def get_column_indexes(self, doctype, tablefield=None): + def is_table_field(df): + if tablefield: + return df.child_table_df.fieldname == tablefield.fieldname + return True + + return [ + col.index + for col in self.columns + if not col.skip_import + and col.df + and col.df.parent == doctype + and is_table_field(col.df) + ] + + def get_columns(self, indexes): + return [self.columns[i] for i in indexes] + + +class Column: + seen = [] + fields_column_map = {} + + def __init__(self, index, header, doctype, column_values, map_to_field=None, seen=[]): + self.index = index + self.column_number = index + 1 + self.doctype = doctype + self.header_title = header + self.column_values = column_values + self.map_to_field = map_to_field + self.seen = seen + + self.date_format = None + self.df = None + self.skip_import = None + self.warnings = [] + + self.meta = frappe.get_meta(doctype) + self.parse() + self.parse_date_format() + + def parse(self): + header_title = self.header_title + column_number = str(self.column_number) + skip_import = False + + if self.map_to_field and self.map_to_field != "Don't Import": + df = get_df_for_column_header(self.doctype, self.map_to_field) + if df: + self.warnings.append( + { + "message": _("Mapping column {0} to field {1}").format( + frappe.bold(header_title or "Untitled Column"), frappe.bold(df.label) + ), + "type": "info", + } + ) + else: + self.warnings.append( + { + "message": _("Could not map column {0} to field {1}").format( + column_number, self.map_to_field + ), + "type": "info", + } + ) + else: + df = get_df_for_column_header(self.doctype, header_title) + # df = df_by_labels_and_fieldnames.get(header_title) + + if not df: + skip_import = True + else: + skip_import = False + + if header_title in self.seen: + self.warnings.append( + { + "col": column_number, + "message": _("Skipping Duplicate Column {0}").format(frappe.bold(header_title)), + "type": "info", + } + ) + df = None + skip_import = True + elif self.map_to_field == "Don't Import": + skip_import = True + self.warnings.append( + { + "col": column_number, + "message": _("Skipping column {0}").format(frappe.bold(header_title)), + "type": "info", + } + ) + elif header_title and not df: + self.warnings.append( + { + "col": column_number, + "message": _("Cannot match column {0} with any field").format( + frappe.bold(header_title) + ), + "type": "info", + } + ) + elif not header_title and not df: + self.warnings.append( + {"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"} + ) + + self.df = df + self.skip_import = skip_import + + def parse_date_format(self): + if self.df and self.df.fieldtype in ("Date", "Time", "Datetime"): + self.date_format = self.guess_date_format_for_column() + + def guess_date_format_for_column(self): + """ Guesses date format for a column by parsing all the values in the column, + getting the date format and then returning the one which has the maximum frequency + """ + + date_formats = [ + frappe.utils.guess_date_format(d) for d in self.column_values if isinstance(d, str) + ] + date_formats = [d for d in date_formats if d] + if not date_formats: + return + + unique_date_formats = set(date_formats) + max_occurred_date_format = max(unique_date_formats, key=date_formats.count) + + if len(unique_date_formats) > 1: + # fmt: off + message = _("The column {0} has {1} different date formats. Automatically setting {2} as the default format as it is the most common. Please change other values in this column to this format.") + # fmt: on + user_date_format = get_user_format(max_occurred_date_format) + self.warnings.append( + { + "col": self.column_number, + "message": message.format( + frappe.bold(self.header_title), + len(unique_date_formats), + frappe.bold(user_date_format), + ), + "type": "info", + } + ) + + return max_occurred_date_format + + def as_dict(self): + d = frappe._dict() + d.index = self.index + d.column_number = self.column_number + d.doctype = self.doctype + d.header_title = self.header_title + d.map_to_field = self.map_to_field + d.date_format = self.date_format + d.df = self.df + d.skip_import = self.skip_import + d.warnings = self.warnings + return d + + +def build_fields_dict_for_column_matching(parent_doctype): + """ + Build a dict with various keys to match with column headers and value as docfield + The keys can be label or fieldname + { + 'Customer': df1, + 'customer': df1, + 'Due Date': df2, + 'due_date': df2, + 'Item Code (Sales Invoice Item)': df3, + 'Sales Invoice Item:item_code': df3, + } + """ + + def get_standard_fields(doctype): + meta = frappe.get_meta(doctype) + if meta.istable: + standard_fields = [ + {"label": "Parent", "fieldname": "parent"}, + {"label": "Parent Type", "fieldname": "parenttype"}, + {"label": "Parent Field", "fieldname": "parentfield"}, + {"label": "Row Index", "fieldname": "idx"}, + ] + else: + standard_fields = [ + {"label": "Owner", "fieldname": "owner"}, + {"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"}, + ] + + out = [] + for df in standard_fields: + df = frappe._dict(df) + df.parent = doctype + out.append(df) + return out + + parent_meta = frappe.get_meta(parent_doctype) + out = {} + + # doctypes and fieldname if it is a child doctype + doctypes = [[parent_doctype, None]] + [ + [df.options, df] for df in parent_meta.get_table_fields() + ] + + for doctype, table_df in doctypes: + # name field + name_by_label = ( + "ID" if doctype == parent_doctype else "ID ({0})".format(table_df.label) + ) + name_by_fieldname = ( + "name" if doctype == parent_doctype else "{0}.name".format(table_df.fieldname) + ) + name_df = frappe._dict( + { + "fieldtype": "Data", + "fieldname": "name", + "label": "ID", + "reqd": 1, # self.import_type == UPDATE, + "parent": doctype, + } + ) + + if doctype != parent_doctype: + name_df.is_child_table_field = True + name_df.child_table_df = table_df + + out[name_by_label] = name_df + out[name_by_fieldname] = name_df + + # other fields + fields = get_standard_fields(doctype) + frappe.get_meta(doctype).fields + for df in fields: + fieldtype = df.fieldtype or "Data" + parent = df.parent or parent_doctype + if fieldtype not in no_value_fields: + if parent_doctype == doctype: + # for parent doctypes keys will be + # Label + # label + # Label (label) + if not out.get(df.label): + # if Label is already set, don't set it again + # in case of duplicate column headers + out[df.label] = df + out[df.fieldname] = df + label_with_fieldname = "{0} ({1})".format(df.label, df.fieldname) + out[label_with_fieldname] = df + else: + # in case there are multiple table fields with the same doctype + # for child doctypes keys will be + # Label (Table Field Label) + # table_field.fieldname + table_fields = parent_meta.get( + "fields", {"fieldtype": ["in", table_fieldtypes], "options": parent} + ) + for table_field in table_fields: + by_label = "{0} ({1})".format(df.label, table_field.label) + by_fieldname = "{0}.{1}".format(table_field.fieldname, df.fieldname) + + # create a new df object to avoid mutation problems + if isinstance(df, dict): + new_df = frappe._dict(df.copy()) + else: + new_df = df.as_dict() + + new_df.is_child_table_field = True + new_df.child_table_df = table_field + out[by_label] = new_df + out[by_fieldname] = new_df + + # if autoname is based on field + # add an entry for "ID (Autoname Field)" + autoname_field = get_autoname_field(parent_doctype) + if autoname_field: + out["ID ({})".format(autoname_field.label)] = autoname_field + # ID field should also map to the autoname field + out["ID"] = autoname_field + out["name"] = autoname_field + + return out + + +def get_df_for_column_header(doctype, header): + def build_fields_dict_for_doctype(): + return build_fields_dict_for_column_matching(doctype) + + df_by_labels_and_fieldname = frappe.cache().hget( + "data_import_column_header_map", doctype, generator=build_fields_dict_for_doctype + ) + return df_by_labels_and_fieldname.get(header) + + +# utilities + + +def get_id_field(doctype): + autoname_field = get_autoname_field(doctype) + if autoname_field: + return autoname_field + return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"}) + + +def get_autoname_field(doctype): + meta = frappe.get_meta(doctype) + if meta.autoname and meta.autoname.startswith("field:"): + fieldname = meta.autoname[len("field:") :] + return meta.get_field(fieldname) + + +def get_item_at_index(_list, i, default=None): + try: + a = _list[i] + except IndexError: + a = default + return a + + +def get_user_format(date_format): + return ( + date_format.replace("%Y", "yyyy") + .replace("%y", "yy") + .replace("%m", "mm") + .replace("%d", "dd") + ) diff --git a/frappe/core/doctype/data_import/importer_new.py b/frappe/core/doctype/data_import/importer_new.py deleted file mode 100644 index 040e9fabc4..0000000000 --- a/frappe/core/doctype/data_import/importer_new.py +++ /dev/null @@ -1,1044 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors -# MIT License. See license.txt - -import io -import os -import json -import timeit -import frappe -from datetime import datetime -from frappe import _ -from frappe.utils import cint, flt, update_progress_bar, cstr, DATETIME_FORMAT -from frappe.utils.csvutils import read_csv_content -from frappe.utils.xlsxutils import ( - read_xlsx_file_from_attached_file, - read_xls_file_from_attached_file, -) -from frappe.model import no_value_fields, table_fields - -INVALID_VALUES = ["", None] -MAX_ROWS_IN_PREVIEW = 10 -INSERT = "Insert New Records" -UPDATE = "Update Existing Records" - -# pylint: disable=R0201 -class Importer: - def __init__( - self, doctype, data_import=None, file_path=None, content=None, console=False - ): - self.doctype = doctype - self.template_options = frappe._dict({"remap_column": {}}) - self.console = console - - if data_import: - self.data_import = data_import - if self.data_import.template_options: - template_options = frappe.parse_json(self.data_import.template_options) - self.template_options.update(template_options) - self.import_type = self.data_import.import_type - else: - self.data_import = None - - self.import_type = self.import_type or INSERT - - self.header_row = None - self.data = None - # used to store date formats guessed from data rows per column - self._guessed_date_formats = {} - # used to store eta during import - self.last_eta = 0 - # used to collect warnings during template parsing - # and show them to user - self.warnings = [] - self.meta = frappe.get_meta(doctype) - self.prepare_content(file_path, content) - self.parse_data_from_template() - - def prepare_content(self, file_path, content): - extension = None - if self.data_import and self.data_import.import_file: - file_doc = frappe.get_doc("File", {"file_url": self.data_import.import_file}) - parts = file_doc.get_extension() - extension = parts[1] - content = file_doc.get_content() - extension = extension.lstrip(".") - - if file_path: - content, extension = self.read_file(file_path) - - if not extension: - extension = "csv" - - if content: - self.read_content(content, extension) - - self.validate_template_content() - - def read_file(self, file_path): - extn = file_path.split(".")[1] - - file_content = None - with io.open(file_path, mode="rb") as f: - file_content = f.read() - - return file_content, extn - - def read_content(self, content, extension): - error_title = _("Template Error") - if extension not in ("csv", "xlsx", "xls"): - frappe.throw( - _("Import template should be of type .csv, .xlsx or .xls"), title=error_title - ) - - if extension == "csv": - data = read_csv_content(content) - elif extension == "xlsx": - data = read_xlsx_file_from_attached_file(fcontent=content) - elif extension == "xls": - data = read_xls_file_from_attached_file(content) - - data = self.remove_empty_rows_and_columns(data) - - if len(data) <= 1: - frappe.throw( - _("Import template should contain a Header and atleast one row."), title=error_title - ) - - self.header_row = data[0] - self.data = data[1:] - - def validate_template_content(self): - column_count = len(self.header_row) - if any([len(row) != column_count and len(row) != 0 for row in self.data]): - frappe.throw( - _("Number of columns does not match with data"), title=_("Invalid Template") - ) - - def remove_empty_rows_and_columns(self, raw_data): - self.row_index_map = [] - removed_rows = [] - removed_columns = [] - - # remove empty rows - data_without_empty_rows = [] - for i, row in enumerate(raw_data): - if all(v in INVALID_VALUES for v in row): - # empty row - removed_rows.append(i) - else: - data_without_empty_rows.append(row) - self.row_index_map.append(i) - - # remove empty columns - # a column with a header and no data is a valid column - # a column with no header and no data will be removed - first_row = data_without_empty_rows[0] - for i, column in enumerate(first_row): - column_values = [row[i] for row in data_without_empty_rows] - if all(v in INVALID_VALUES for v in column_values): - # empty column - removed_columns.append(i) - - if removed_columns: - data_without_empty_rows_and_columns = [] - # remove empty columns from data - for i, row in enumerate(data_without_empty_rows): - new_row = [v for j, v in enumerate(row) if j not in removed_columns] - data_without_empty_rows_and_columns.append(new_row) - else: - data_without_empty_rows_and_columns = data_without_empty_rows - - return data_without_empty_rows_and_columns - - def get_data_for_import_preview(self): - out = frappe._dict() - out.data = list(self.rows) - out.columns = self.columns - out.warnings = self.warnings - total_number_of_rows = len(out.data) - if total_number_of_rows > MAX_ROWS_IN_PREVIEW: - out.data = out.data[:MAX_ROWS_IN_PREVIEW] - out.max_rows_exceeded = True - out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW - out.total_number_of_rows = total_number_of_rows - return out - - def parse_data_from_template(self): - columns = self.parse_columns_from_header_row() - columns = self.detect_date_formats(columns) - columns, data = self.add_serial_no_column(columns, self.data) - - self.columns = columns - self.rows = data - - def parse_columns_from_header_row(self): - remap_column = self.template_options.remap_column - columns = [] - - df_by_labels_and_fieldnames = self.build_fields_dict_for_column_matching() - - for i, header_title in enumerate(self.header_row): - header_row_index = str(i) - column_number = str(i + 1) - skip_import = False - fieldname = remap_column.get(header_row_index) - - if fieldname and fieldname != "Don't Import": - df = df_by_labels_and_fieldnames.get(fieldname) - self.warnings.append( - { - "col": column_number, - "message": _("Mapping column {0} to field {1}").format( - frappe.bold(header_title or "Untitled Column"), frappe.bold(df.label) - ), - "type": "info", - } - ) - else: - df = df_by_labels_and_fieldnames.get(header_title) - - if not df: - skip_import = True - else: - skip_import = False - - if fieldname == "Don't Import": - skip_import = True - self.warnings.append( - { - "col": column_number, - "message": _("Skipping column {0}").format(frappe.bold(header_title)), - "type": "info", - } - ) - elif header_title and not df: - self.warnings.append( - { - "col": column_number, - "message": _("Cannot match column {0} with any field").format( - frappe.bold(header_title) - ), - "type": "info", - } - ) - elif not header_title and not df: - self.warnings.append( - {"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"} - ) - - columns.append( - frappe._dict( - df=df, - skip_import=skip_import, - header_title=header_title, - column_number=column_number, - index=i, - ) - ) - - return columns - - def build_fields_dict_for_column_matching(self): - """ - Build a dict with various keys to match with column headers and value as docfield - The keys can be label or fieldname - { - 'Customer': df1, - 'customer': df1, - 'Due Date': df2, - 'due_date': df2, - 'Item Code (Sales Invoice Item)': df3, - 'Sales Invoice Item:item_code': df3, - } - """ - out = {} - - table_doctypes = [df.options for df in self.meta.get_table_fields()] - doctypes = table_doctypes + [self.doctype] - for doctype in doctypes: - # name field - name_key = "ID" if self.doctype == doctype else "ID ({})".format(doctype) - name_df = frappe._dict( - { - "fieldtype": "Data", - "fieldname": "name", - "label": "ID", - "reqd": self.import_type == UPDATE, - "parent": doctype, - } - ) - out[name_key] = name_df - out["name"] = name_df - - # other fields - meta = frappe.get_meta(doctype) - fields = self.get_standard_fields(doctype) + meta.fields - for df in fields: - fieldtype = df.fieldtype or "Data" - parent = df.parent or self.doctype - if fieldtype not in no_value_fields: - # label as key - label = ( - df.label if self.doctype == doctype else "{0} ({1})".format(df.label, parent) - ) - out[label] = df - # fieldname as key - if self.doctype == doctype: - out[df.fieldname] = df - else: - key = "{0}:{1}".format(doctype, df.fieldname) - out[key] = df - - # if autoname is based on field - # add an entry for "ID (Autoname Field)" - autoname_field = self.get_autoname_field(self.doctype) - if autoname_field: - out["ID ({})".format(autoname_field.label)] = autoname_field - # ID field should also map to the autoname field - out["ID"] = autoname_field - out["name"] = autoname_field - - return out - - def get_standard_fields(self, doctype): - meta = frappe.get_meta(doctype) - if meta.istable: - standard_fields = [ - {"label": "Parent", "fieldname": "parent"}, - {"label": "Parent Type", "fieldname": "parenttype"}, - {"label": "Parent Field", "fieldname": "parentfield"}, - {"label": "Row Index", "fieldname": "idx"}, - ] - else: - standard_fields = [ - {"label": "Owner", "fieldname": "owner"}, - {"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"}, - ] - - out = [] - for df in standard_fields: - df = frappe._dict(df) - df.parent = doctype - out.append(df) - return out - - def detect_date_formats(self, columns): - for col in columns: - if col.df and col.df.fieldtype in ["Date", "Time", "Datetime"]: - col.date_format = self.guess_date_format_for_column(col, columns) - return columns - - def add_serial_no_column(self, columns, data): - columns_with_serial_no = [ - frappe._dict({"header_title": "Sr. No", "skip_import": True}) - ] + columns - - # update index for each column - for i, col in enumerate(columns_with_serial_no): - col.index = i - - data_with_serial_no = [] - for i, row in enumerate(data): - data_with_serial_no.append([self.row_index_map[i] + 1] + row) - - return columns_with_serial_no, data_with_serial_no - - def parse_value(self, value, df): - if isinstance(value, datetime) and df.fieldtype in ["Date", "Datetime"]: - return value - - value = cstr(value) - - # convert boolean values to 0 or 1 - if df.fieldtype == "Check" and value.lower().strip() in [ - "t", - "f", - "true", - "false", - "yes", - "no", - "y", - "n", - ]: - value = value.lower().strip() - value = 1 if value in ["t", "true", "y", "yes"] else 0 - - if df.fieldtype in ["Int", "Check"]: - value = cint(value) - elif df.fieldtype in ["Float", "Percent", "Currency"]: - value = flt(value) - elif df.fieldtype in ["Date", "Datetime"]: - value = self.parse_date_format(value, df) - - return value - - def parse_date_format(self, value, df): - date_format = self.get_date_format_for_df(df) or DATETIME_FORMAT - try: - return datetime.strptime(value, date_format) - except ValueError: - # ignore date values that dont match the format - # import will break for these values later - pass - return value - - def get_date_format_for_df(self, df): - return self._guessed_date_formats.get(df.parent + df.fieldname) - - def guess_date_format_for_column(self, column, columns): - """ Guesses date format for a column by parsing the first 10 values in the column, - getting the date format and then returning the one which has the maximum frequency - """ - PARSE_ROW_COUNT = 10 - - df = column.df - key = df.parent + df.fieldname - - if not self._guessed_date_formats.get(key): - matches = [col for col in columns if col.df == df] - if not matches: - self._guessed_date_formats[key] = None - return - - column = matches[0] - column_index = column.index - - date_values = [ - row[column_index] for row in self.data[:PARSE_ROW_COUNT] if row[column_index] - ] - date_formats = [ - guess_date_format(d) if isinstance(d, str) else None for d in date_values - ] - if not date_formats: - return - max_occurred_date_format = max(set(date_formats), key=date_formats.count) - self._guessed_date_formats[key] = max_occurred_date_format - - return self._guessed_date_formats[key] - - def import_data(self): - # set user lang for translations - frappe.cache().hdel("lang", frappe.session.user) - frappe.set_user_lang(frappe.session.user) - - if not self.console: - self.data_import.db_set("template_warnings", "") - - # set flags - frappe.flags.in_import = True - frappe.flags.mute_emails = self.data_import.mute_emails - - # prepare a map for missing link field values - self.prepare_missing_link_field_values() - - # parse docs from rows - payloads = self.get_payloads_for_import() - - # dont import if there are non-ignorable warnings - warnings = [w for w in self.warnings if w.get("type") != "info"] - if warnings: - if self.console: - self.print_grouped_warnings(warnings) - else: - self.data_import.db_set("template_warnings", json.dumps(warnings)) - frappe.publish_realtime( - "data_import_refresh", {"data_import": self.data_import.name} - ) - return - - # setup import log - if self.data_import.import_log: - import_log = frappe.parse_json(self.data_import.import_log) - else: - import_log = [] - - # remove previous failures from import log - import_log = [l for l in import_log if l.get("success") == True] - - # get successfully imported rows - imported_rows = [] - for log in import_log: - log = frappe._dict(log) - if log.success: - imported_rows += log.row_indexes - - # start import - total_payload_count = len(payloads) - batch_size = frappe.conf.data_import_batch_size or 1000 - - for batch_index, batched_payloads in enumerate( - frappe.utils.create_batch(payloads, batch_size) - ): - for i, payload in enumerate(batched_payloads): - doc = payload.doc - row_indexes = [row[0] for row in payload.rows] - current_index = (i + 1) + (batch_index * batch_size) - - if set(row_indexes).intersection(set(imported_rows)): - print("Skipping imported rows", row_indexes) - if total_payload_count > 5: - frappe.publish_realtime( - "data_import_progress", - { - "current": current_index, - "total": total_payload_count, - "skipping": True, - "data_import": self.data_import.name, - }, - ) - continue - - try: - start = timeit.default_timer() - doc = self.process_doc(doc) - processing_time = timeit.default_timer() - start - eta = self.get_eta(current_index, total_payload_count, processing_time) - - if total_payload_count > 5: - frappe.publish_realtime( - "data_import_progress", - { - "current": current_index, - "total": total_payload_count, - "docname": doc.name, - "data_import": self.data_import.name, - "success": True, - "row_indexes": row_indexes, - "eta": eta, - }, - ) - if self.console: - update_progress_bar( - "Importing {0} records".format(total_payload_count), - current_index, - total_payload_count, - ) - import_log.append( - frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes) - ) - # commit after every successful import - frappe.db.commit() - - except Exception: - import_log.append( - frappe._dict( - success=False, - exception=frappe.get_traceback(), - messages=frappe.local.message_log, - row_indexes=row_indexes, - ) - ) - frappe.clear_messages() - # rollback if exception - frappe.db.rollback() - - # set status - failures = [l for l in import_log if l.get("success") == False] - if len(failures) == total_payload_count: - status = "Pending" - elif len(failures) > 0: - status = "Partial Success" - else: - status = "Success" - - if self.console: - self.print_import_log(import_log) - else: - self.data_import.db_set("status", status) - self.data_import.db_set("import_log", json.dumps(import_log)) - - frappe.flags.in_import = False - frappe.flags.mute_emails = False - frappe.publish_realtime("data_import_refresh", {"data_import": self.data_import.name}) - - return import_log - - def get_payloads_for_import(self): - payloads = [] - # make a copy - data = list(self.rows) - while data: - doc, rows, data = self.parse_next_row_for_import(data) - payloads.append(frappe._dict(doc=doc, rows=rows)) - return payloads - - def parse_next_row_for_import(self, data): - """ - Parses rows that make up a doc. A doc maybe built from a single row or multiple rows. - Returns the doc, rows, and data without the rows. - """ - doctypes = set([col.df.parent for col in self.columns if col.df and col.df.parent]) - - # first row is included by default - first_row = data[0] - rows = [first_row] - - # if there are child doctypes, find the subsequent rows - if len(doctypes) > 1: - # subsequent rows either dont have any parent value set - # or have the same value as the parent row - # we include a row if either of conditions match - parent_column_indexes = [ - col.index - for col in self.columns - if not col.skip_import and col.df and col.df.parent == self.doctype - ] - parent_row_values = [first_row[i] for i in parent_column_indexes] - - data_without_first_row = data[1:] - for row in data_without_first_row: - row_values = [row[i] for i in parent_column_indexes] - # if the row is blank, it's a child row doc - if all([v in INVALID_VALUES for v in row_values]): - rows.append(row) - continue - # if the row has same values as parent row, it's a child row doc - if row_values == parent_row_values: - rows.append(row) - continue - # if any of those conditions dont match, it's the next doc - break - - def get_column_indexes(doctype): - return [ - col.index - for col in self.columns - if not col.skip_import and col.df and col.df.parent == doctype - ] - - def validate_value(value, df): - if df.fieldtype == "Select": - select_options = df.get_select_options() - if select_options and value not in select_options: - options_string = ", ".join([frappe.bold(d) for d in select_options]) - msg = _("Value must be one of {0}").format(options_string) - self.warnings.append( - { - "row": row_number, - "field": df.as_dict(convert_dates_to_str=True), - "message": msg, - } - ) - return - - elif df.fieldtype == "Link": - d = self.get_missing_link_field_values(df.options) - if value in d.missing_values and not d.one_mandatory: - msg = _("Value {0} missing for {1}").format( - frappe.bold(value), frappe.bold(df.options) - ) - self.warnings.append( - { - "row": row_number, - "field": df.as_dict(convert_dates_to_str=True), - "message": msg, - } - ) - return value - - return value - - def parse_doc(doctype, docfields, values, row_number): - doc = frappe._dict() - if self.import_type == INSERT: - # new_doc returns a dict with default values set - doc = frappe.new_doc(doctype, as_dict=True) - - # remove standard fields and __islocal - for key in frappe.model.default_fields + ("__islocal",): - doc.pop(key, None) - - for df, value in zip(docfields, values): - if value in INVALID_VALUES: - value = None - - if value is not None: - value = validate_value(value, df) - - if value is not None: - doc[df.fieldname] = self.parse_value(value, df) - - is_table = frappe.get_meta(doctype).istable - is_update = self.import_type == UPDATE - if is_table and is_update and doc.get("name") in INVALID_VALUES: - # for table rows being inserted in update - # create a new doc with defaults set - new_doc = frappe.new_doc(doctype, as_dict=True) - new_doc.update(doc) - doc = new_doc - - check_mandatory_fields(doctype, doc, row_number) - return doc - - def check_mandatory_fields(doctype, doc, row_number): - """If import type is Insert: - Check for mandatory fields (except table fields) in doc - if import type is Update: - Check for name field or autoname field in doc - """ - meta = frappe.get_meta(doctype) - if self.import_type == UPDATE: - if meta.istable: - # when updating records with table rows, - # there are two scenarios: - # 1. if row 'name' is provided in the template - # the table row will be updated - # 2. if row 'name' is not provided - # then a new row will be added - # so we dont need to check for mandatory - return - - id_field = self.get_id_field(doctype) - if doc.get(id_field.fieldname) in INVALID_VALUES: - self.warnings.append( - { - "row": row_number, - "message": _("{0} is a mandatory field").format(id_field.label), - } - ) - return - - fields = [ - df - for df in meta.fields - if df.fieldtype not in table_fields - and df.reqd - and doc.get(df.fieldname) in INVALID_VALUES - ] - - if not fields: - return - - if len(fields) == 1: - self.warnings.append( - { - "row": row_number, - "message": _("{0} is a mandatory field").format(fields[0].label), - } - ) - else: - fields_string = ", ".join([df.label for df in fields]) - self.warnings.append( - {"row": row_number, "message": _("{0} are mandatory fields").format(fields_string)} - ) - - parsed_docs = {} - for row in rows: - for doctype in doctypes: - if doctype == self.doctype and parsed_docs.get(doctype): - # if parent doc is already parsed from the first row - # then skip - continue - - row_number = row[0] - column_indexes = get_column_indexes(doctype) - values = [row[i] for i in column_indexes] - - if all(v in INVALID_VALUES for v in values): - # skip values if all of them are empty - continue - - columns = [self.columns[i] for i in column_indexes] - docfields = [col.df for col in columns] - doc = parse_doc(doctype, docfields, values, row_number) - parsed_docs[doctype] = parsed_docs.get(doctype, []) - parsed_docs[doctype].append(doc) - - # build the doc with children - doc = {} - for doctype, docs in parsed_docs.items(): - if doctype == self.doctype: - doc.update(docs[0]) - else: - table_dfs = self.meta.get( - "fields", {"options": doctype, "fieldtype": ["in", table_fields]} - ) - if table_dfs: - table_field = table_dfs[0] - doc[table_field.fieldname] = docs - - # check if there is atleast one row for mandatory table fields - mandatory_table_fields = [ - df - for df in self.meta.fields - if df.fieldtype in table_fields and df.reqd and len(doc.get(df.fieldname, [])) == 0 - ] - if len(mandatory_table_fields) == 1: - self.warnings.append( - { - "row": first_row[0], - "message": _("There should be atleast one row for {0} table").format( - mandatory_table_fields[0].label - ), - } - ) - elif mandatory_table_fields: - fields_string = ", ".join([df.label for df in mandatory_table_fields]) - message = _("There should be atleast one row for the following tables: {0}").format( - fields_string - ) - self.warnings.append({"row": first_row[0], "message": message}) - - return doc, rows, data[len(rows) :] - - def process_doc(self, doc): - if self.import_type == INSERT: - return self.insert_record(doc) - elif self.import_type == UPDATE: - return self.update_record(doc) - - def insert_record(self, doc): - self.create_missing_linked_records(doc) - - new_doc = frappe.new_doc(self.doctype) - new_doc.update(doc) - # name shouldn't be set when inserting a new record - new_doc.set("name", None) - new_doc.insert() - if self.meta.is_submittable and self.data_import.submit_after_import: - new_doc.submit() - return new_doc - - def create_missing_linked_records(self, doc): - """ - Finds fields that are of type Link, and creates the corresponding - document automatically if it has only one mandatory field - """ - link_values = [] - - def get_link_fields(doc, doctype): - for fieldname, value in doc.items(): - meta = frappe.get_meta(doctype) - df = meta.get_field(fieldname) - if not df: - continue - if df.fieldtype == "Link" and value not in INVALID_VALUES: - link_values.append([df.options, value]) - elif df.fieldtype in table_fields: - for row in value: - get_link_fields(row, df.options) - - get_link_fields(doc, self.doctype) - - for link_doctype, link_value in link_values: - d = self.missing_link_values.get(link_doctype) - if d and d.one_mandatory and link_value in d.missing_values: - # find the autoname field - autoname_field = self.get_autoname_field(link_doctype) - name_field = autoname_field.fieldname if autoname_field else "name" - new_doc = frappe.new_doc(link_doctype) - new_doc.set(name_field, link_value) - new_doc.insert() - d.missing_values.remove(link_value) - - def update_record(self, doc): - id_fieldname = self.get_id_fieldname(self.doctype) - id_value = doc[id_fieldname] - existing_doc = frappe.get_doc(self.doctype, id_value) - existing_doc.flags.updater_reference = { - "doctype": self.data_import.doctype, - "docname": self.data_import.name, - "label": _("via Data Import"), - } - existing_doc.update(doc) - existing_doc.save() - return existing_doc - - def export_errored_rows(self): - from frappe.utils.csvutils import build_csv_response - - if not self.data_import: - return - - import_log = frappe.parse_json(self.data_import.import_log or "[]") - failures = [l for l in import_log if l.get("success") == False] - row_indexes = [] - for f in failures: - row_indexes.extend(f.get("row_indexes", [])) - - # de duplicate - row_indexes = list(set(row_indexes)) - row_indexes.sort() - - header_row = [col.header_title for col in self.columns[1:]] - rows = [header_row] - rows += [row[1:] for row in self.rows if row[0] in row_indexes] - - build_csv_response(rows, self.doctype) - - def get_missing_link_field_values(self, doctype): - return self.missing_link_values.get(doctype, {}) - - def prepare_missing_link_field_values(self): - columns = self.columns - rows = self.rows - link_column_indexes = [ - col.index for col in columns if col.df and col.df.fieldtype == "Link" - ] - - self.missing_link_values = {} - for index in link_column_indexes: - col = columns[index] - column_values = [row[index] for row in rows] - values = set([v for v in column_values if v not in INVALID_VALUES]) - doctype = col.df.options - - missing_values = [value for value in values if not frappe.db.exists(doctype, value)] - if self.missing_link_values.get(doctype): - self.missing_link_values[doctype].missing_values += missing_values - else: - self.missing_link_values[doctype] = frappe._dict( - missing_values=missing_values, - one_mandatory=self.has_one_mandatory_field(doctype), - df=col.df, - ) - - def get_eta(self, current, total, processing_time): - remaining = total - current - eta = processing_time * remaining - if not self.last_eta or eta < self.last_eta: - self.last_eta = eta - return self.last_eta - - def has_one_mandatory_field(self, doctype): - meta = frappe.get_meta(doctype) - # get mandatory fields with default not set - mandatory_fields = [df for df in meta.fields if df.reqd and not df.default] - mandatory_fields_count = len(mandatory_fields) - if meta.autoname and meta.autoname.lower() == "prompt": - mandatory_fields_count += 1 - return mandatory_fields_count == 1 - - def get_id_fieldname(self, doctype): - return self.get_id_field(doctype).fieldname - - def get_id_field(self, doctype): - autoname_field = self.get_autoname_field(doctype) - if autoname_field: - return autoname_field - return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"}) - - def get_autoname_field(self, doctype): - meta = frappe.get_meta(doctype) - if meta.autoname and meta.autoname.startswith("field:"): - fieldname = meta.autoname[len("field:") :] - return meta.get_field(fieldname) - - def print_grouped_warnings(self, warnings): - warnings_by_row = {} - other_warnings = [] - for w in warnings: - if w.get("row"): - warnings_by_row.setdefault(w.get("row"), []).append(w) - else: - other_warnings.append(w) - - for row_number, warnings in warnings_by_row.items(): - print("Row {0}".format(row_number)) - for w in warnings: - print(w.get("message")) - - for w in other_warnings: - print(w.get("message")) - - def print_import_log(self, import_log): - failed_records = [l for l in import_log if not l.success] - successful_records = [l for l in import_log if l.success] - - if successful_records: - print( - "Successfully imported {0} records out of {1}".format( - len(successful_records), len(import_log) - ) - ) - - if failed_records: - print("Failed to import {0} records".format(len(failed_records))) - file_name = "{0}_import_on_{1}.txt".format(self.doctype, frappe.utils.now()) - print("Check {0} for errors".format(os.path.join("sites", file_name))) - text = "" - for w in failed_records: - text += "Row Indexes: {0}\n".format(str(w.get("row_indexes", []))) - text += "Messages:\n{0}\n".format("\n".join(w.get("messages", []))) - text += "Traceback:\n{0}\n\n".format(w.get("exception")) - - with open(file_name, "w") as f: - f.write(text) - - -DATE_FORMATS = [ - r"%d-%m-%Y", - r"%m-%d-%Y", - r"%Y-%m-%d", - r"%d-%m-%y", - r"%m-%d-%y", - r"%y-%m-%d", - r"%d/%m/%Y", - r"%m/%d/%Y", - r"%Y/%m/%d", - r"%d/%m/%y", - r"%m/%d/%y", - r"%y/%m/%d", - r"%d.%m.%Y", - r"%m.%d.%Y", - r"%Y.%m.%d", - r"%d.%m.%y", - r"%m.%d.%y", - r"%y.%m.%d", -] - -TIME_FORMATS = [ - r"%H:%M:%S.%f", - r"%H:%M:%S", - r"%H:%M", - r"%I:%M:%S.%f %p", - r"%I:%M:%S %p", - r"%I:%M %p", -] - - -def guess_date_format(date_string): - date_string = date_string.strip() - - _date = None - _time = None - - if " " in date_string: - _date, _time = date_string.split(" ", 1) - else: - _date = date_string - - date_format = None - time_format = None - - for f in DATE_FORMATS: - try: - # if date is parsed without any exception - # capture the date format - datetime.strptime(_date, f) - date_format = f - break - except ValueError: - pass - - if _time: - for f in TIME_FORMATS: - try: - # if time is parsed without any exception - # capture the time format - datetime.strptime(_time, f) - time_format = f - break - except ValueError: - pass - - full_format = date_format - if time_format: - full_format += " " + time_format - return full_format - - -def import_data(doctype, file_path): - i = Importer(doctype, file_path) - i.import_data() diff --git a/frappe/core/doctype/data_import/test_data_import.js b/frappe/core/doctype/data_import/test_data_import.js deleted file mode 100644 index fbce7781b6..0000000000 --- a/frappe/core/doctype/data_import/test_data_import.js +++ /dev/null @@ -1,23 +0,0 @@ -/* eslint-disable */ -// rename this file from _test_[name] to test_[name] to activate -// and remove above this line - -QUnit.test("test: Data Import", function (assert) { - let done = assert.async(); - - // number of asserts - assert.expect(1); - - frappe.run_serially([ - // insert a new Data Import - () => frappe.tests.make('Data Import', [ - // values to be set - {key: 'value'} - ]), - () => { - assert.equal(cur_frm.doc.key, 'value'); - }, - () => done() - ]); - -}); diff --git a/frappe/core/doctype/data_import/test_data_import.py b/frappe/core/doctype/data_import/test_data_import.py index 406ea08958..15fd57744a 100644 --- a/frappe/core/doctype/data_import/test_data_import.py +++ b/frappe/core/doctype/data_import/test_data_import.py @@ -1,100 +1,10 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2017, Frappe Technologies and Contributors +# Copyright (c) 2020, Frappe Technologies and Contributors # See license.txt from __future__ import unicode_literals -import frappe, unittest -from frappe.core.doctype.data_export import exporter -from frappe.core.doctype.data_import import importer -from frappe.utils.csvutils import read_csv_content +# import frappe +import unittest class TestDataImport(unittest.TestCase): - def test_export(self): - exporter.export_data("User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - - def test_export_with_data(self): - exporter.export_data("User", all_doctypes=True, template=True, with_data=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1]) - - def test_export_with_all_doctypes(self): - exporter.export_data("User", all_doctypes="Yes", template=True, with_data=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1]) - self.assertEqual(content[13][0], "DocType:") - self.assertEqual(content[13][1], "User") - self.assertTrue("Has Role" in content[13]) - - def test_import(self): - if frappe.db.exists("Blog Category", "test-category"): - frappe.delete_doc("Blog Category", "test-category") - - exporter.export_data("Blog Category", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test-category", "Test Cateogry"]) - importer.upload(content) - self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category") - - # export with data - exporter.export_data("Blog Category", all_doctypes=True, template=True, with_data=True) - content = read_csv_content(frappe.response.result) - - # overwrite - content[-1][3] = "New Title" - importer.upload(content, overwrite=True) - self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title") - - def test_import_only_children(self): - user_email = "test_import_userrole@example.com" - if frappe.db.exists("User", user_email): - frappe.delete_doc("User", user_email, force=True) - - frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert() - - exporter.export_data("Has Role", "User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test_import_userrole@example.com", "Blogger"]) - importer.upload(content) - - user = frappe.get_doc("User", user_email) - self.assertTrue(frappe.db.get_value("Has Role", filters={"role": "Blogger", "parent": user_email, "parenttype": "User"})) - self.assertTrue(user.get("roles")[0].role, "Blogger") - - # overwrite - exporter.export_data("Has Role", "User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test_import_userrole@example.com", "Website Manager"]) - importer.upload(content, overwrite=True) - - user = frappe.get_doc("User", user_email) - self.assertEqual(len(user.get("roles")), 1) - self.assertTrue(user.get("roles")[0].role, "Website Manager") - - def test_import_with_children(self): #pylint: disable=R0201 - if frappe.db.exists("Event", "EV00001"): - frappe.delete_doc("Event", "EV00001") - exporter.export_data("Event", all_doctypes="Yes", template=True) - content = read_csv_content(frappe.response.result) - - content.append([None] * len(content[-2])) - content[-1][1] = "__Test Event with children" - content[-1][2] = "Private" - content[-1][3] = "2014-01-01 10:00:00.000000" - importer.upload(content) - - frappe.get_doc("Event", {"subject":"__Test Event with children"}) - - def test_excel_import(self): - if frappe.db.exists("Event", "EV00001"): - frappe.delete_doc("Event", "EV00001") - - exporter.export_data("Event", all_doctypes=True, template=True, file_type="Excel") - from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file - content = read_xlsx_file_from_attached_file(fcontent=frappe.response.filecontent) - content.append(["", "_test", "Private", "05-11-2017 13:51:48", "Event", "blue", "0", "0", "", "Open", "", 0, "", 0, "", "", "1", 0, "", "", 0, 0, 0, 0, 0, 0, 0]) - importer.upload(content) - self.assertTrue(frappe.db.get_value("Event", {"subject": "_test"}, "name")) \ No newline at end of file + pass diff --git a/frappe/core/doctype/data_import/test_exporter.py b/frappe/core/doctype/data_import/test_exporter.py new file mode 100644 index 0000000000..8415af2e63 --- /dev/null +++ b/frappe/core/doctype/data_import/test_exporter.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019, Frappe Technologies and Contributors +# See license.txt +from __future__ import unicode_literals + +import unittest +import frappe +from frappe.core.doctype.data_import.exporter import Exporter +from frappe.core.doctype.data_import.test_importer import ( + create_doctype_if_not_exists, +) + +doctype_name = 'DocType for Export' + +class TestExporter(unittest.TestCase): + def setUp(self): + create_doctype_if_not_exists(doctype_name) + + def test_exports_specified_fields(self): + if not frappe.db.exists(doctype_name, "Test"): + doc = frappe.get_doc( + doctype=doctype_name, + title="Test", + description="Test Description", + table_field_1=[ + {"child_title": "Child Title 1", "child_description": "Child Description 1"}, + {"child_title": "Child Title 2", "child_description": "Child Description 2"}, + ], + table_field_2=[ + {"child_2_title": "Child Title 1", "child_2_description": "Child Description 1"}, + ], + table_field_1_again=[ + { + "child_title": "Child Title 1 Again", + "child_description": "Child Description 1 Again", + }, + ], + ).insert() + else: + doc = frappe.get_doc(doctype_name, "Test") + + e = Exporter( + doctype_name, + export_fields={ + doctype_name: ["title", "description", "number", "another_number"], + "table_field_1": ["name", "child_title", "child_description"], + "table_field_2": ["child_2_date", "child_2_number"], + "table_field_1_again": [ + "child_title", + "child_date", + "child_number", + "child_another_number", + ], + }, + export_data=True, + ) + csv_array = e.get_csv_array() + header_row = csv_array[0] + + self.assertEqual( + header_row, + [ + "Title", + "Description", + "Number", + "another_number", + "ID (Table Field 1)", + "Child Title (Table Field 1)", + "Child Description (Table Field 1)", + "Child 2 Date (Table Field 2)", + "Child 2 Number (Table Field 2)", + "Child Title (Table Field 1 Again)", + "Child Date (Table Field 1 Again)", + "Child Number (Table Field 1 Again)", + "table_field_1_again.child_another_number", + ], + ) + + table_field_1_row_1_name = doc.table_field_1[0].name + table_field_1_row_2_name = doc.table_field_1[1].name + # fmt: off + self.assertEqual( + csv_array[1], + ["Test", "Test Description", 0, 0, table_field_1_row_1_name, "Child Title 1", "Child Description 1", None, 0, "Child Title 1 Again", None, 0, 0] + ) + self.assertEqual( + csv_array[2], + ["", "", "", "", table_field_1_row_2_name, "Child Title 2", "Child Description 2", "", "", "", "", "", ""], + ) + # fmt: on + self.assertEqual(len(csv_array), 3) + + def test_export_csv_response(self): + e = Exporter( + doctype_name, + export_fields={doctype_name: ["title", "description"]}, + export_data=True, + file_type="CSV" + ) + e.build_response() + + self.assertTrue(frappe.response['result']) + self.assertEqual(frappe.response['doctype'], doctype_name) + self.assertEqual(frappe.response['type'], "csv") diff --git a/frappe/core/doctype/data_import/test_exporter_new.py b/frappe/core/doctype/data_import/test_exporter_new.py deleted file mode 100644 index 0d3aedb033..0000000000 --- a/frappe/core/doctype/data_import/test_exporter_new.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies and Contributors -# See license.txt -from __future__ import unicode_literals - -import unittest -import frappe -from frappe.core.doctype.data_import.exporter_new import Exporter - - -class TestExporter(unittest.TestCase): - def test_exports_mandatory_fields(self): - e = Exporter('Web Page', export_fields='Mandatory') - csv_array = e.get_csv_array() - header_row = csv_array[0] - self.assertEqual(header_row, ['ID', 'Title']) - - - def test_exports_all_fields(self): - e = Exporter('Web Page', export_fields='All') - csv_array = e.get_csv_array() - header = csv_array[0] - self.assertEqual(len(header), 37) - - - def test_exports_selected_fields(self): - export_fields = { - 'Web Page': ['title', 'route', 'published'] - } - e = Exporter('Web Page', export_fields=export_fields) - csv_array = e.get_csv_array() - header = csv_array[0] - self.assertEqual(header, ['Title', 'Route', 'Published']) - - - def test_exports_data(self): - e = Exporter('ToDo', export_fields='All', export_data=True) - todo_records = frappe.db.count('ToDo') - csv_array = e.get_csv_array() - self.assertEqual(len(csv_array), todo_records + 1) diff --git a/frappe/core/doctype/data_import/test_importer.py b/frappe/core/doctype/data_import/test_importer.py new file mode 100644 index 0000000000..bdadad7890 --- /dev/null +++ b/frappe/core/doctype/data_import/test_importer.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019, Frappe Technologies and Contributors +# See license.txt +from __future__ import unicode_literals + +import unittest +import frappe +from frappe.utils import getdate + +doctype_name = 'DocType for Import' + +class TestImporter(unittest.TestCase): + def setUp(self): + create_doctype_if_not_exists(doctype_name) + + def test_data_import_from_file(self): + import_file = get_import_file('sample_import_file') + data_import = self.get_importer(doctype_name, import_file) + data_import.start_import() + + doc1 = frappe.get_doc(doctype_name, 'Test') + doc2 = frappe.get_doc(doctype_name, 'Test 2') + doc3 = frappe.get_doc(doctype_name, 'Test 3') + + self.assertEqual(doc1.description, 'test description') + self.assertEqual(doc1.number, 1) + + self.assertEqual(doc1.table_field_1[0].child_title, 'child title') + self.assertEqual(doc1.table_field_1[0].child_description, 'child description') + + self.assertEqual(doc1.table_field_1[1].child_title, 'child title 2') + self.assertEqual(doc1.table_field_1[1].child_description, 'child description 2') + + self.assertEqual(doc1.table_field_2[1].child_2_title, 'title child') + self.assertEqual(doc1.table_field_2[1].child_2_date, getdate('2019-10-30')) + self.assertEqual(doc1.table_field_2[1].child_2_another_number, 5) + + self.assertEqual(doc1.table_field_1_again[0].child_title, 'child title again') + self.assertEqual(doc1.table_field_1_again[1].child_title, 'child title again 2') + self.assertEqual(doc1.table_field_1_again[1].child_date, getdate('2021-09-22')) + + self.assertEqual(doc2.description, 'test description 2') + self.assertEqual(doc3.another_number, 5) + + def test_data_import_preview(self): + import_file = get_import_file('sample_import_file') + data_import = self.get_importer(doctype_name, import_file) + preview = data_import.get_preview_from_template() + + self.assertEqual(len(preview.data), 4) + self.assertEqual(len(preview.columns), 15) + + def test_data_import_without_mandatory_values(self): + import_file = get_import_file('sample_import_file_without_mandatory') + data_import = self.get_importer(doctype_name, import_file) + data_import.start_import() + data_import.reload() + warnings = frappe.parse_json(data_import.template_warnings) + + self.assertEqual(warnings[0]['row'], 2) + self.assertEqual(warnings[0]['message'], "Child Title (Table Field 1) is a mandatory field") + + self.assertEqual(warnings[1]['row'], 3) + self.assertEqual(warnings[1]['message'], "Child Title (Table Field 1 Again) is a mandatory field") + + self.assertEqual(warnings[2]['row'], 4) + self.assertEqual(warnings[2]['message'], "Title is a mandatory field") + + def test_data_import_update(self): + if not frappe.db.exists(doctype_name, 'Test 26'): + frappe.get_doc( + doctype=doctype_name, + title='Test 26' + ).insert() + + import_file = get_import_file('sample_import_file_for_update') + data_import = self.get_importer(doctype_name, import_file, update=True) + data_import.start_import() + + updated_doc = frappe.get_doc(doctype_name, 'Test 26') + self.assertEqual(updated_doc.description, 'test description') + self.assertEqual(updated_doc.table_field_1[0].child_title, 'child title') + self.assertEqual(updated_doc.table_field_1[0].child_description, 'child description') + self.assertEqual(updated_doc.table_field_1_again[0].child_title, 'child title again') + + def get_importer(self, doctype, import_file, update=False): + data_import = frappe.new_doc('Data Import') + data_import.import_type = 'Insert New Records' if not update else 'Update Existing Records' + data_import.reference_doctype = doctype + data_import.import_file = import_file.file_url + data_import.insert() + + return data_import + +def create_doctype_if_not_exists(doctype_name, force=False): + if force: + frappe.delete_doc_if_exists('DocType', doctype_name) + frappe.delete_doc_if_exists('DocType', 'Child 1 of ' + doctype_name) + frappe.delete_doc_if_exists('DocType', 'Child 2 of ' + doctype_name) + + if frappe.db.exists('DocType', doctype_name): + return + + # Child Table 1 + table_1_name = 'Child 1 of ' + doctype_name + frappe.get_doc({ + 'doctype': 'DocType', + 'name': table_1_name, + 'module': 'Custom', + 'custom': 1, + 'istable': 1, + 'fields': [ + {'label': 'Child Title', 'fieldname': 'child_title', 'reqd': 1, 'fieldtype': 'Data'}, + {'label': 'Child Description', 'fieldname': 'child_description', 'fieldtype': 'Small Text'}, + {'label': 'Child Date', 'fieldname': 'child_date', 'fieldtype': 'Date'}, + {'label': 'Child Number', 'fieldname': 'child_number', 'fieldtype': 'Int'}, + {'label': 'Child Number', 'fieldname': 'child_another_number', 'fieldtype': 'Int'}, + ] + }).insert() + + # Child Table 2 + table_2_name = 'Child 2 of ' + doctype_name + frappe.get_doc({ + 'doctype': 'DocType', + 'name': table_2_name, + 'module': 'Custom', + 'custom': 1, + 'istable': 1, + 'fields': [ + {'label': 'Child 2 Title', 'fieldname': 'child_2_title', 'reqd': 1, 'fieldtype': 'Data'}, + {'label': 'Child 2 Description', 'fieldname': 'child_2_description', 'fieldtype': 'Small Text'}, + {'label': 'Child 2 Date', 'fieldname': 'child_2_date', 'fieldtype': 'Date'}, + {'label': 'Child 2 Number', 'fieldname': 'child_2_number', 'fieldtype': 'Int'}, + {'label': 'Child 2 Number', 'fieldname': 'child_2_another_number', 'fieldtype': 'Int'}, + ] + }).insert() + + # Main Table + frappe.get_doc({ + 'doctype': 'DocType', + 'name': doctype_name, + 'module': 'Custom', + 'custom': 1, + 'autoname': 'field:title', + 'fields': [ + {'label': 'Title', 'fieldname': 'title', 'reqd': 1, 'fieldtype': 'Data'}, + {'label': 'Description', 'fieldname': 'description', 'fieldtype': 'Small Text'}, + {'label': 'Date', 'fieldname': 'date', 'fieldtype': 'Date'}, + {'label': 'Number', 'fieldname': 'number', 'fieldtype': 'Int'}, + {'label': 'Number', 'fieldname': 'another_number', 'fieldtype': 'Int'}, + {'label': 'Table Field 1', 'fieldname': 'table_field_1', 'fieldtype': 'Table', 'options': table_1_name}, + {'label': 'Table Field 2', 'fieldname': 'table_field_2', 'fieldtype': 'Table', 'options': table_2_name}, + {'label': 'Table Field 1 Again', 'fieldname': 'table_field_1_again', 'fieldtype': 'Table', 'options': table_1_name}, + ], + 'permissions': [ + {'role': 'System Manager'} + ] + }).insert() + + +def get_import_file(csv_file_name, force=False): + file_name = csv_file_name + '.csv' + _file = frappe.db.exists('File', {'file_name': file_name}) + if force and _file: + frappe.delete_doc_if_exists('File', _file) + + if frappe.db.exists('File', {'file_name': file_name}): + f = frappe.get_doc('File', {'file_name': file_name}) + else: + full_path = get_csv_file_path(file_name) + f = frappe.get_doc( + doctype='File', + content=frappe.read_file(full_path), + file_name=file_name, + is_private=1 + ) + f.save(ignore_permissions=True) + + return f + + +def get_csv_file_path(file_name): + return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import', 'fixtures', file_name) diff --git a/frappe/core/doctype/data_import/test_importer_new.py b/frappe/core/doctype/data_import/test_importer_new.py deleted file mode 100644 index d6349daa55..0000000000 --- a/frappe/core/doctype/data_import/test_importer_new.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies and Contributors -# See license.txt -from __future__ import unicode_literals - -import datetime -import unittest -import frappe -from frappe.core.doctype.data_import.importer_new import Importer - -content_empty_rows = '''title,start_date,idx,show_title -,,, -est phasellus sit amet,5/20/2019,52,1 -nibh in,7/29/2019,77,1 -''' - -content_mandatory_missing = '''title,start_date,idx,show_title -,5/20/2019,52,1 -''' - -content_convert_value = '''title,start_date,idx,show_title -est phasellus sit amet,5/20/2019,52,True -''' - -content_invalid_column = '''title,start_date,idx,show_title,invalid_column -est phasellus sit amet,5/20/2019,52,True,invalid value -''' - - -class TestImporter(unittest.TestCase): - def test_should_skip_empty_rows(self): - i = self.get_importer('Web Page', content=content_empty_rows) - payloads = i.get_payloads_for_import() - row_to_be_imported = [] - for p in payloads: - row_to_be_imported += [row[0] for row in p.rows] - self.assertEqual(len(row_to_be_imported), 2) - - def test_should_throw_if_mandatory_is_missing(self): - i = self.get_importer('Web Page', content=content_mandatory_missing) - i.import_data() - warning = i.warnings[0] - self.assertTrue('Title is a mandatory field' in warning['message']) - - def test_should_convert_value_based_on_fieldtype(self): - i = self.get_importer('Web Page', content=content_convert_value) - payloads = i.get_payloads_for_import() - doc = payloads[0].doc - - self.assertEqual(type(doc['show_title']), int) - self.assertEqual(type(doc['idx']), int) - self.assertEqual(type(doc['start_date']), datetime.datetime) - - def test_should_ignore_invalid_columns(self): - i = self.get_importer('Web Page', content=content_invalid_column) - payloads = i.get_payloads_for_import() - doc = payloads[0].doc - - self.assertTrue('invalid_column' not in doc) - self.assertTrue('title' in doc) - - def test_should_import_valid_template(self): - title = 'est phasellus sit amet {0}'.format(frappe.utils.random_string(8)) - content_valid_content = '''title,start_date,idx,show_title -{0},5/20/2019,52,1'''.format(title) - i = self.get_importer('Web Page', content=content_valid_content) - import_log = i.import_data() - log = import_log[0] - self.assertTrue(log.success) - doc = frappe.get_doc('Web Page', { 'title': title }) - self.assertEqual(frappe.utils.get_datetime_str(doc.start_date), - frappe.utils.get_datetime_str('2019-05-20')) - - def get_importer(self, doctype, content): - data_import = frappe.new_doc('Data Import Beta') - data_import.import_type = 'Insert New Records' - i = Importer(doctype, content=content, data_import=data_import) - return i diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.js b/frappe/core/doctype/data_import_beta/data_import_beta.js deleted file mode 100644 index 527dbd7d0c..0000000000 --- a/frappe/core/doctype/data_import_beta/data_import_beta.js +++ /dev/null @@ -1,511 +0,0 @@ -// Copyright (c) 2019, Frappe Technologies and contributors -// For license information, please see license.txt - -frappe.ui.form.on('Data Import Beta', { - setup(frm) { - frappe.realtime.on('data_import_refresh', ({ data_import }) => { - frm.import_in_progress = false; - if (data_import !== frm.doc.name) return; - frappe.model.clear_doc('Data Import Beta', frm.doc.name); - frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => { - frm.refresh(); - }); - }); - frappe.realtime.on('data_import_progress', data => { - frm.import_in_progress = true; - if (data.data_import !== frm.doc.name) { - return; - } - let percent = Math.floor((data.current * 100) / data.total); - let seconds = Math.floor(data.eta); - let minutes = Math.floor(data.eta / 60); - let eta_message = - // prettier-ignore - seconds < 60 - ? __('About {0} seconds remaining', [seconds]) - : minutes === 1 - ? __('About {0} minute remaining', [minutes]) - : __('About {0} minutes remaining', [minutes]); - - let message; - if (data.success) { - let message_args = [data.current, data.total, eta_message]; - message = - frm.doc.import_type === 'Insert New Records' - ? __('Importing {0} of {1}, {2}', message_args) - : __('Updating {0} of {1}, {2}', message_args); - } - if (data.skipping) { - message = __('Skipping {0} of {1}, {2}', [ - data.current, - data.total, - eta_message - ]); - } - frm.dashboard.show_progress(__('Import Progress'), percent, message); - frm.page.set_indicator(__('In Progress'), 'orange'); - - // hide progress when complete - if (data.current === data.total) { - setTimeout(() => { - frm.dashboard.hide(); - frm.refresh(); - }, 2000); - } - }); - - frm.set_query('reference_doctype', () => { - return { - filters: { - allow_import: 1 - } - }; - }); - - frm.get_field('import_file').df.options = { - restrictions: { - allowed_file_types: ['.csv', '.xls', '.xlsx'] - } - }; - }, - - refresh(frm) { - frm.page.hide_icon_group(); - frm.trigger('update_indicators'); - frm.trigger('import_file'); - frm.trigger('show_import_log'); - frm.trigger('show_import_warnings'); - frm.trigger('toggle_submit_after_import'); - frm.trigger('show_import_status'); - frm.trigger('show_report_error_button'); - - if (frm.doc.status === 'Partial Success') { - frm.add_custom_button(__('Export Errored Rows'), () => - frm.trigger('export_errored_rows') - ); - } - - if (frm.doc.status.includes('Success')) { - frm.add_custom_button( - __('Go to {0} List', [frm.doc.reference_doctype]), - () => frappe.set_route('List', frm.doc.reference_doctype) - ); - } - - frm.disable_save(); - if (frm.doc.status !== 'Success') { - if (!frm.is_new() && frm.doc.import_file) { - let label = - frm.doc.status === 'Pending' ? __('Start Import') : __('Retry'); - frm.page.set_primary_action(label, () => frm.events.start_import(frm)); - } else { - frm.page.set_primary_action(__('Save'), () => frm.save()); - } - } - }, - - update_indicators(frm) { - const indicator = frappe.get_indicator(frm.doc); - if (indicator) { - frm.page.set_indicator(indicator[0], indicator[1]); - } else { - frm.page.clear_indicator(); - } - }, - - show_import_status(frm) { - let import_log = JSON.parse(frm.doc.import_log || '[]'); - let successful_records = import_log.filter(log => log.success); - let failed_records = import_log.filter(log => !log.success); - if (successful_records.length === 0) return; - - let message; - if (failed_records.length === 0) { - let message_args = [successful_records.length]; - if (frm.doc.import_type === 'Insert New Records') { - message = - successful_records.length > 1 - ? __('Successfully imported {0} records.', message_args) - : __('Successfully imported {0} record.', message_args); - } else { - message = - successful_records.length > 1 - ? __('Successfully updated {0} records.', message_args) - : __('Successfully updated {0} record.', message_args); - } - } else { - let message_args = [successful_records.length, import_log.length]; - if (frm.doc.import_type === 'Insert New Records') { - message = - successful_records.length > 1 - ? __('Successfully imported {0} records out of {1}.', message_args) - : __('Successfully imported {0} record out of {1}.', message_args); - } else { - message = - successful_records.length > 1 - ? __('Successfully updated {0} records out of {1}.', message_args) - : __('Successfully updated {0} record out of {1}.', message_args); - } - } - frm.dashboard.set_headline(message); - }, - - show_report_error_button(frm) { - if (frm.doc.status === 'Error') { - frappe.db - .get_list('Error Log', { - filters: { method: frm.doc.name }, - fields: ['method', 'error'], - order_by: 'creation desc', - limit: 1 - }) - .then(result => { - if (result.length > 0) { - frm.add_custom_button('Report Error', () => { - let fake_xhr = { - responseText: JSON.stringify({ - exc: result[0].error - }) - }; - frappe.request.report_error(fake_xhr, {}); - }); - } - }); - } - }, - - start_import(frm) { - frm - .call({ - method: 'form_start_import', - args: { data_import: frm.doc.name }, - btn: frm.page.btn_primary - }) - .then(r => { - if (r.message === true) { - frm.disable_save(); - } - }); - }, - - download_template(frm) { - if ( - frm.data_exporter && - frm.data_exporter.doctype === frm.doc.reference_doctype - ) { - frm.data_exporter.dialog.show(); - set_export_records(); - } else { - frappe.require('/assets/js/data_import_tools.min.js', () => { - frm.data_exporter = new frappe.data_import.DataExporter( - frm.doc.reference_doctype - ); - set_export_records(); - }); - } - - function set_export_records() { - if (frm.doc.import_type === 'Insert New Records') { - frm.data_exporter.dialog.set_value('export_records', 'blank_template'); - } else { - frm.data_exporter.dialog.set_value('export_records', 'all'); - } - // Force ID field to be exported when updating existing records - let id_field = frm.data_exporter.dialog.get_field( - frm.doc.reference_doctype - ).options[0]; - if (id_field.value === 'name' && id_field.$checkbox) { - id_field.$checkbox - .find('input') - .prop('disabled', frm.doc.import_type === 'Update Existing Records'); - } - } - }, - - reference_doctype(frm) { - frm.trigger('toggle_submit_after_import'); - }, - - toggle_submit_after_import(frm) { - frm.toggle_display('submit_after_import', false); - let doctype = frm.doc.reference_doctype; - if (doctype) { - frappe.model.with_doctype(doctype, () => { - let meta = frappe.get_meta(doctype); - frm.toggle_display('submit_after_import', meta.is_submittable); - }); - } - }, - - import_file(frm) { - frm.toggle_display('section_import_preview', frm.doc.import_file); - if (!frm.doc.import_file) { - frm.get_field('import_preview').$wrapper.empty(); - return; - } - - // load import preview - frm.get_field('import_preview').$wrapper.empty(); - $('') - .html(__('Loading import file...')) - .appendTo(frm.get_field('import_preview').$wrapper); - - frm - .call({ - method: 'get_preview_from_template', - args: { data_import: frm.doc.name }, - error_handlers: { - TimestampMismatchError() { - // ignore this error - } - } - }) - .then(r => { - let preview_data = r.message; - frm.events.show_import_preview(frm, preview_data); - frm.events.show_import_warnings(frm, preview_data); - }); - }, - - show_import_preview(frm, preview_data) { - let import_log = JSON.parse(frm.doc.import_log || '[]'); - - if ( - frm.import_preview && - frm.import_preview.doctype === frm.doc.reference_doctype - ) { - frm.import_preview.preview_data = preview_data; - frm.import_preview.import_log = import_log; - frm.import_preview.refresh(); - return; - } - - frappe.require('/assets/js/data_import_tools.min.js', () => { - frm.import_preview = new frappe.data_import.ImportPreview({ - wrapper: frm.get_field('import_preview').$wrapper, - doctype: frm.doc.reference_doctype, - preview_data, - import_log, - frm, - events: { - remap_column(changed_map) { - let template_options = JSON.parse(frm.doc.template_options || '{}'); - template_options.remap_column = template_options.remap_column || {}; - Object.assign(template_options.remap_column, changed_map); - frm.set_value('template_options', JSON.stringify(template_options)); - frm.save().then(() => frm.trigger('import_file')); - } - } - }); - }); - }, - - export_errored_rows(frm) { - open_url_post( - '/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template', - { - data_import_name: frm.doc.name - } - ); - }, - - show_import_warnings(frm, preview_data) { - let warnings = JSON.parse(frm.doc.template_warnings || '[]'); - warnings = warnings.concat(preview_data.warnings || []); - - frm.toggle_display('import_warnings_section', warnings.length > 0); - if (warnings.length === 0) { - frm.get_field('import_warnings').$wrapper.html(''); - return; - } - - // group warnings by row - let warnings_by_row = {}; - let other_warnings = []; - for (let warning of warnings) { - if (warning.row) { - warnings_by_row[warning.row] = warnings_by_row[warning.row] || []; - warnings_by_row[warning.row].push(warning); - } else { - other_warnings.push(warning); - } - } - - let html = ''; - html += Object.keys(warnings_by_row) - .map(row_number => { - let message = warnings_by_row[row_number] - .map(w => { - if (w.field) { - let label = - w.field.label + - (w.field.parent !== frm.doc.reference_doctype - ? ` (${w.field.parent})` - : ''); - return `
  • ${label}: ${w.message}
  • `; - } - return `
  • ${w.message}
  • `; - }) - .join(''); - return ` -
    -
    ${__('Row {0}', [row_number])}
    -
      ${message}
    -
    - `; - }) - .join(''); - - html += other_warnings - .map(warning => { - let header = ''; - if (warning.col) { - header = __('Column {0}', [warning.col]); - } - return ` -
    -
    ${header}
    -
    ${warning.message}
    -
    - `; - }) - .join(''); - frm.get_field('import_warnings').$wrapper.html(` -
    -
    ${html}
    -
    - `); - }, - - show_failed_logs(frm) { - frm.trigger('show_import_log'); - }, - - show_import_log(frm) { - let import_log = JSON.parse(frm.doc.import_log || '[]'); - let logs = import_log; - frm.toggle_display('import_log', false); - frm.toggle_display('import_log_section', logs.length > 0); - - if (logs.length === 0) { - frm.get_field('import_log_preview').$wrapper.empty(); - return; - } - - let rows = logs - .map(log => { - let html = ''; - if (log.success) { - if (frm.doc.import_type === 'Insert New Records') { - html = __('Successfully imported {0}', [ - `${frappe.utils.get_form_link( - frm.doc.reference_doctype, - log.docname, - true - )}` - ]); - } else { - html = __('Successfully updated {0}', [ - `${frappe.utils.get_form_link( - frm.doc.reference_doctype, - log.docname, - true - )}` - ]); - } - } else { - let messages = log.messages - .map(JSON.parse) - .map(m => { - let title = m.title ? `${m.title}` : ''; - let message = m.message ? `
    ${m.message}
    ` : ''; - return title + message; - }) - .join(''); - let id = frappe.dom.get_unique_id(); - html = `${messages} - -
    -
    -
    ${log.exception}
    -
    -
    `; - } - let indicator_color = log.success ? 'green' : 'red'; - let title = log.success ? __('Success') : __('Failure'); - - if (frm.doc.show_failed_logs && log.success) { - return ''; - } - - return ` - ${log.row_indexes.join(', ')} - -
    ${title}
    - - - ${html} - - `; - }) - .join(''); - - if (!rows && frm.doc.show_failed_logs) { - rows = ` - ${__('No failed logs')} - `; - } - - frm.get_field('import_log_preview').$wrapper.html(` - - - - - - - ${rows} -
    ${__('Row Number')}${__('Status')}${__('Message')}
    - `); - }, - - show_missing_link_values(frm, missing_link_values) { - let can_be_created_automatically = missing_link_values.every( - d => d.has_one_mandatory_field - ); - - let html = missing_link_values - .map(d => { - let doctype = d.doctype; - let values = d.missing_values; - return ` -
    ${doctype}
    -
      ${values.map(v => `
    • ${v}
    • `).join('')}
    - `; - }) - .join(''); - - if (can_be_created_automatically) { - // prettier-ignore - let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?'); - frappe.confirm(message + html, () => { - frm - .call('create_missing_link_values', { - missing_link_values - }) - .then(r => { - let records = r.message; - frappe.msgprint( - __('Created {0} records successfully.', [records.length]) - ); - }); - }); - } else { - frappe.msgprint( - // prettier-ignore - __('The following records needs to be created before we can import your file.') + html - ); - } - } -}); diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.json b/frappe/core/doctype/data_import_beta/data_import_beta.json deleted file mode 100644 index 777af0a071..0000000000 --- a/frappe/core/doctype/data_import_beta/data_import_beta.json +++ /dev/null @@ -1,170 +0,0 @@ -{ - "actions": [], - "autoname": "format:{reference_doctype} Import on {creation}", - "beta": 1, - "creation": "2019-08-04 14:16:08.318714", - "doctype": "DocType", - "editable_grid": 1, - "engine": "InnoDB", - "field_order": [ - "reference_doctype", - "import_type", - "download_template", - "import_file", - "column_break_5", - "status", - "submit_after_import", - "mute_emails", - "template_options", - "section_import_preview", - "import_preview", - "import_warnings_section", - "template_warnings", - "import_warnings", - "import_log_section", - "import_log", - "show_failed_logs", - "import_log_preview" - ], - "fields": [ - { - "fieldname": "reference_doctype", - "fieldtype": "Link", - "in_list_view": 1, - "label": "Document Type", - "options": "DocType", - "reqd": 1, - "set_only_once": 1 - }, - { - "fieldname": "import_type", - "fieldtype": "Select", - "in_list_view": 1, - "label": "Import Type", - "options": "\nInsert New Records\nUpdate Existing Records", - "reqd": 1, - "set_only_once": 1 - }, - { - "depends_on": "eval:!doc.__islocal", - "fieldname": "import_file", - "fieldtype": "Attach", - "in_list_view": 1, - "label": "Import File" - }, - { - "fieldname": "import_preview", - "fieldtype": "HTML", - "label": "Import Preview" - }, - { - "fieldname": "section_import_preview", - "fieldtype": "Section Break", - "label": "Preview" - }, - { - "fieldname": "column_break_5", - "fieldtype": "Column Break" - }, - { - "fieldname": "template_options", - "fieldtype": "Code", - "hidden": 1, - "label": "Template Options", - "options": "JSON", - "read_only": 1 - }, - { - "fieldname": "import_log", - "fieldtype": "Code", - "label": "Import Log", - "options": "JSON" - }, - { - "fieldname": "import_log_section", - "fieldtype": "Section Break", - "label": "Import Log" - }, - { - "fieldname": "import_log_preview", - "fieldtype": "HTML", - "label": "Import Log Preview" - }, - { - "default": "Pending", - "fieldname": "status", - "fieldtype": "Select", - "hidden": 1, - "label": "Status", - "options": "Pending\nSuccess\nPartial Success\nError", - "read_only": 1 - }, - { - "fieldname": "template_warnings", - "fieldtype": "Code", - "hidden": 1, - "label": "Template Warnings", - "options": "JSON" - }, - { - "default": "0", - "fieldname": "submit_after_import", - "fieldtype": "Check", - "label": "Submit After Import", - "set_only_once": 1 - }, - { - "fieldname": "import_warnings_section", - "fieldtype": "Section Break", - "label": "Warnings" - }, - { - "fieldname": "import_warnings", - "fieldtype": "HTML", - "label": "Import Warnings" - }, - { - "depends_on": "reference_doctype", - "fieldname": "download_template", - "fieldtype": "Button", - "label": "Download Template" - }, - { - "default": "1", - "fieldname": "mute_emails", - "fieldtype": "Check", - "label": "Don't Send Emails", - "set_only_once": 1 - }, - { - "default": "0", - "fieldname": "show_failed_logs", - "fieldtype": "Check", - "label": "Show Failed Logs" - } - ], - "hide_toolbar": 1, - "links": [], - "modified": "2020-02-17 15:35:04.386098", - "modified_by": "faris@erpnext.com", - "module": "Core", - "name": "Data Import Beta", - "owner": "Administrator", - "permissions": [ - { - "create": 1, - "delete": 1, - "email": 1, - "export": 1, - "print": 1, - "read": 1, - "report": 1, - "role": "System Manager", - "share": 1, - "write": 1 - } - ], - "sort_field": "modified", - "sort_order": "DESC", - "track_changes": 1 -} \ No newline at end of file diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.py b/frappe/core/doctype/data_import_beta/data_import_beta.py deleted file mode 100644 index 8f12bd20ed..0000000000 --- a/frappe/core/doctype/data_import_beta/data_import_beta.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies and contributors -# For license information, please see license.txt - -from __future__ import unicode_literals -import frappe -from frappe.model.document import Document -from frappe.core.doctype.data_import.importer_new import Importer -from frappe.core.doctype.data_import.exporter_new import Exporter -from frappe.core.page.background_jobs.background_jobs import get_info -from frappe.utils.background_jobs import enqueue -from frappe import _ - - -class DataImportBeta(Document): - def validate(self): - doc_before_save = self.get_doc_before_save() - if not self.import_file or ( - doc_before_save and doc_before_save.import_file != self.import_file - ): - self.template_options = "" - self.template_warnings = "" - - if self.import_file: - # validate template - self.get_importer() - - def get_preview_from_template(self): - if not self.import_file: - return - - i = self.get_importer() - return i.get_data_for_import_preview() - - def start_import(self): - if frappe.utils.scheduler.is_scheduler_inactive(): - frappe.throw( - _("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive") - ) - - enqueued_jobs = [d.get("job_name") for d in get_info()] - - if self.name not in enqueued_jobs: - enqueue( - start_import, - queue="default", - timeout=6000, - event="data_import", - job_name=self.name, - data_import=self.name, - now=frappe.conf.developer_mode or frappe.flags.in_test, - ) - return True - - return False - - def export_errored_rows(self): - return self.get_importer().export_errored_rows() - - def get_importer(self): - return Importer(self.reference_doctype, data_import=self) - - -@frappe.whitelist() -def get_preview_from_template(data_import): - return frappe.get_doc("Data Import Beta", data_import).get_preview_from_template() - - -@frappe.whitelist() -def form_start_import(data_import): - return frappe.get_doc("Data Import Beta", data_import).start_import() - - -def start_import(data_import): - """This method runs in background job""" - data_import = frappe.get_doc("Data Import Beta", data_import) - try: - i = Importer(data_import.reference_doctype, data_import=data_import) - i.import_data() - except: - frappe.db.rollback() - data_import.db_set("status", "Error") - frappe.log_error(title=data_import.name) - frappe.db.commit() - frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name}) - - -@frappe.whitelist() -def download_template( - doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV" -): - """ - Download template from Exporter - :param doctype: Document Type - :param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']} - :param export_records=None: One of 'all', 'by_filter', 'blank_template' - :param export_filters: Filter dict - :param file_type: File type to export into - """ - - export_fields = frappe.parse_json(export_fields) - export_filters = frappe.parse_json(export_filters) - export_data = export_records != "blank_template" - - e = Exporter( - doctype, - export_fields=export_fields, - export_data=export_data, - export_filters=export_filters, - file_type=file_type, - export_page_length=5 if export_records == "5_records" else None, - ) - e.build_response() - - -@frappe.whitelist() -def download_errored_template(data_import_name): - data_import = frappe.get_doc("Data Import Beta", data_import_name) - data_import.export_errored_rows() diff --git a/frappe/core/doctype/data_import_beta/data_import_beta_list.js b/frappe/core/doctype/data_import_beta/data_import_beta_list.js deleted file mode 100644 index 58953d2531..0000000000 --- a/frappe/core/doctype/data_import_beta/data_import_beta_list.js +++ /dev/null @@ -1,40 +0,0 @@ -let imports_in_progress = []; - -frappe.listview_settings['Data Import Beta'] = { - onload(listview) { - frappe.realtime.on('data_import_progress', data => { - if (!imports_in_progress.includes(data.data_import)) { - imports_in_progress.push(data.data_import); - } - }); - frappe.realtime.on('data_import_refresh', data => { - imports_in_progress = imports_in_progress.filter( - d => d !== data.data_import - ); - listview.refresh(); - }); - }, - get_indicator: function(doc) { - var colors = { - 'Pending': 'orange', - 'Partial Success': 'orange', - 'Success': 'green', - 'In Progress': 'orange', - 'Error': 'red' - }; - let status = doc.status; - if (imports_in_progress.includes(doc.name)) { - status = 'In Progress'; - } - return [__(status), colors[status], 'status,=,' + doc.status]; - }, - formatters: { - import_type(value) { - return { - 'Insert New Records': __('Insert'), - 'Update Existing Records': __('Update') - }[value]; - } - }, - hide_name_column: true -}; diff --git a/frappe/core/doctype/data_import_beta/__init__.py b/frappe/core/doctype/data_import_legacy/__init__.py similarity index 100% rename from frappe/core/doctype/data_import_beta/__init__.py rename to frappe/core/doctype/data_import_legacy/__init__.py diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy.js b/frappe/core/doctype/data_import_legacy/data_import_legacy.js new file mode 100644 index 0000000000..9a301af76e --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/data_import_legacy.js @@ -0,0 +1,324 @@ +// Copyright (c) 2017, Frappe Technologies and contributors +// For license information, please see license.txt + +frappe.ui.form.on('Data Import Legacy', { + onload: function(frm) { + if (frm.doc.__islocal) { + frm.set_value("action", ""); + } + + frappe.call({ + method: "frappe.core.doctype.data_import_legacy.data_import_legacy.get_importable_doctypes", + callback: function (r) { + let importable_doctypes = r.message; + frm.set_query("reference_doctype", function () { + return { + "filters": { + "issingle": 0, + "istable": 0, + "name": ['in', importable_doctypes] + } + }; + }); + } + }), + + // should never check public + frm.fields_dict["import_file"].df.is_private = 1; + + frappe.realtime.on("data_import_progress", function(data) { + if (data.data_import === frm.doc.name) { + if (data.reload && data.reload === true) { + frm.reload_doc(); + } + if (data.progress) { + let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar"); + if (progress_bar) { + $(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped"); + $(progress_bar).css("width", data.progress + "%"); + } + } + } + }); + }, + + reference_doctype: function(frm){ + if (frm.doc.reference_doctype) { + frappe.model.with_doctype(frm.doc.reference_doctype); + } + }, + + refresh: function(frm) { + frm.disable_save(); + frm.dashboard.clear_headline(); + if (frm.doc.reference_doctype && !frm.doc.import_file) { + frm.page.set_indicator(__('Attach file'), 'orange'); + } else { + if (frm.doc.import_status) { + const listview_settings = frappe.listview_settings['Data Import Legacy']; + const indicator = listview_settings.get_indicator(frm.doc); + + frm.page.set_indicator(indicator[0], indicator[1]); + + if (frm.doc.import_status === "In Progress") { + frm.dashboard.add_progress("Data Import Progress", "0"); + frm.set_read_only(); + frm.refresh_fields(); + } + } + } + + if (frm.doc.reference_doctype) { + frappe.model.with_doctype(frm.doc.reference_doctype); + } + + if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") { + frm.set_df_property("action", "read_only", 1); + } + + frm.add_custom_button(__("Help"), function() { + frappe.help.show_video("6wiriRKPhmg"); + }); + + if (frm.doc.reference_doctype && frm.doc.docstatus === 0) { + frm.add_custom_button(__("Download template"), function() { + frappe.data_import.download_dialog(frm).show(); + }); + } + + if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows && + frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) { + frm.page.set_primary_action(__("Start Import"), function() { + frappe.call({ + btn: frm.page.btn_primary, + method: "frappe.core.doctype.data_import_legacy.data_import_legacy.import_data", + args: { + data_import: frm.doc.name + } + }); + }).addClass('btn btn-primary'); + } + + if (frm.doc.log_details) { + frm.events.create_log_table(frm); + } else { + $(frm.fields_dict.import_log.wrapper).empty(); + } + }, + + action: function(frm) { + if(!frm.doc.action) return; + if(!frm.doc.reference_doctype) { + frappe.msgprint(__("Please select document type first.")); + frm.set_value("action", ""); + return; + } + + if(frm.doc.action == "Insert new records") { + frm.doc.insert_new = 1; + } else if (frm.doc.action == "Update records"){ + frm.doc.overwrite = 1; + } + frm.save(); + }, + + only_update: function(frm) { + frm.save(); + }, + + submit_after_import: function(frm) { + frm.save(); + }, + + skip_errors: function(frm) { + frm.save(); + }, + + ignore_encoding_errors: function(frm) { + frm.save(); + }, + + no_email: function(frm) { + frm.save(); + }, + + show_only_errors: function(frm) { + frm.events.create_log_table(frm); + }, + + create_log_table: function(frm) { + let msg = JSON.parse(frm.doc.log_details); + var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty(); + $(frappe.render_template("log_details", { + data: msg.messages, + import_status: frm.doc.import_status, + show_only_errors: frm.doc.show_only_errors, + })).appendTo($log_wrapper); + } +}); + +frappe.provide('frappe.data_import'); +frappe.data_import.download_dialog = function(frm) { + var dialog; + const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden; + const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields); + + const get_doctype_checkbox_fields = () => { + return dialog.fields.filter(df => df.fieldname.endsWith('_fields')) + .map(df => dialog.fields_dict[df.fieldname]); + }; + + const doctype_fields = get_fields(frm.doc.reference_doctype) + .map(df => { + let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0; + return { + label: df.label, + reqd: reqd, + danger: reqd, + value: df.fieldname, + checked: 1 + }; + }); + + let fields = [ + { + "label": __("Select Columns"), + "fieldname": "select_columns", + "fieldtype": "Select", + "options": "All\nMandatory\nManually", + "reqd": 1, + "onchange": function() { + const fields = get_doctype_checkbox_fields(); + fields.map(f => f.toggle(true)); + if(this.value == 'Mandatory' || this.value == 'Manually') { + checkbox_toggle(true); + fields.map(multicheck_field => { + multicheck_field.options.map(option => { + if(!option.reqd) return; + $(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`) + .prop('checked', false) + .trigger('click'); + }); + }); + } else if(this.value == 'All'){ + $(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`) + .prop('disabled', true); + } + } + }, + { + "label": __("File Type"), + "fieldname": "file_type", + "fieldtype": "Select", + "options": "Excel\nCSV", + "default": "Excel" + }, + { + "label": __("Download with Data"), + "fieldname": "with_data", + "fieldtype": "Check", + "hidden": !frm.doc.overwrite, + "default": 1 + }, + { + "label": __("Select All"), + "fieldname": "select_all", + "fieldtype": "Button", + "depends_on": "eval:doc.select_columns=='Manually'", + click: function() { + checkbox_toggle(); + } + }, + { + "label": __("Unselect All"), + "fieldname": "unselect_all", + "fieldtype": "Button", + "depends_on": "eval:doc.select_columns=='Manually'", + click: function() { + checkbox_toggle(true); + } + }, + { + "label": frm.doc.reference_doctype, + "fieldname": "doctype_fields", + "fieldtype": "MultiCheck", + "options": doctype_fields, + "columns": 2, + "hidden": 1 + } + ]; + + const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype) + .map(df => { + return { + "label": df.options, + "fieldname": df.fieldname + '_fields', + "fieldtype": "MultiCheck", + "options": frappe.meta.get_docfields(df.options) + .filter(filter_fields) + .map(df => ({ + label: df.label, + reqd: df.reqd ? 1 : 0, + value: df.fieldname, + checked: 1, + danger: df.reqd + })), + "columns": 2, + "hidden": 1 + }; + }); + + fields = fields.concat(child_table_fields); + + dialog = new frappe.ui.Dialog({ + title: __('Download Template'), + fields: fields, + primary_action: function(values) { + var data = values; + if (frm.doc.reference_doctype) { + var export_params = () => { + let columns = {}; + if(values.select_columns) { + columns = get_doctype_checkbox_fields().reduce((columns, field) => { + const options = field.get_checked_options(); + columns[field.df.label] = options; + return columns; + }, {}); + } + + return { + doctype: frm.doc.reference_doctype, + parent_doctype: frm.doc.reference_doctype, + select_columns: JSON.stringify(columns), + with_data: frm.doc.overwrite && data.with_data, + all_doctypes: true, + file_type: data.file_type, + template: true + }; + }; + let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data'; + open_url_post(get_template_url, export_params()); + } else { + frappe.msgprint(__("Please select the Document Type.")); + } + dialog.hide(); + }, + primary_action_label: __('Download') + }); + + $(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]') + .wrapAll('
    '); + const button_container = $(dialog.body).find('.inline-buttons'); + button_container.addClass('flex'); + $(button_container).find('.frappe-control').map((index, button) => { + $(button).css({"margin-right": "1em"}); + }); + + function checkbox_toggle(checked=false) { + $(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => { + $(element).find(`:checkbox`).prop("checked", checked).trigger('click'); + }); + } + + return dialog; +}; diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy.json b/frappe/core/doctype/data_import_legacy/data_import_legacy.json new file mode 100644 index 0000000000..852ccba156 --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/data_import_legacy.json @@ -0,0 +1,218 @@ +{ + "actions": [], + "allow_copy": 1, + "creation": "2020-06-11 16:13:23.813709", + "doctype": "DocType", + "document_type": "Document", + "editable_grid": 1, + "engine": "InnoDB", + "field_order": [ + "reference_doctype", + "action", + "insert_new", + "overwrite", + "only_update", + "section_break_4", + "import_file", + "column_break_4", + "error_file", + "section_break_6", + "skip_errors", + "submit_after_import", + "ignore_encoding_errors", + "no_email", + "import_detail", + "import_status", + "show_only_errors", + "import_log", + "log_details", + "amended_from", + "total_rows", + "amended_from" + ], + "fields": [ + { + "fieldname": "reference_doctype", + "fieldtype": "Link", + "ignore_user_permissions": 1, + "in_list_view": 1, + "label": "Document Type", + "options": "DocType", + "reqd": 1 + }, + { + "fieldname": "action", + "fieldtype": "Select", + "label": "Action", + "options": "Insert new records\nUpdate records", + "reqd": 1 + }, + { + "default": "0", + "depends_on": "eval:!doc.overwrite", + "description": "New data will be inserted.", + "fieldname": "insert_new", + "fieldtype": "Check", + "hidden": 1, + "label": "Insert new records", + "set_only_once": 1 + }, + { + "default": "0", + "depends_on": "eval:!doc.insert_new", + "description": "If you are updating/overwriting already created records.", + "fieldname": "overwrite", + "fieldtype": "Check", + "hidden": 1, + "label": "Update records", + "set_only_once": 1 + }, + { + "default": "0", + "depends_on": "overwrite", + "description": "If you don't want to create any new records while updating the older records.", + "fieldname": "only_update", + "fieldtype": "Check", + "label": "Don't create new records" + }, + { + "depends_on": "eval:(!doc.__islocal)", + "fieldname": "section_break_4", + "fieldtype": "Section Break" + }, + { + "fieldname": "import_file", + "fieldtype": "Attach", + "label": "Attach file for Import" + }, + { + "fieldname": "column_break_4", + "fieldtype": "Column Break" + }, + { + "depends_on": "eval: doc.import_status == \"Partially Successful\"", + "description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.", + "fieldname": "error_file", + "fieldtype": "Attach", + "label": "Generated File" + }, + { + "depends_on": "eval:(!doc.__islocal)", + "fieldname": "section_break_6", + "fieldtype": "Section Break" + }, + { + "default": "0", + "description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.", + "fieldname": "skip_errors", + "fieldtype": "Check", + "label": "Skip rows with errors" + }, + { + "default": "0", + "fieldname": "submit_after_import", + "fieldtype": "Check", + "label": "Submit after importing" + }, + { + "default": "0", + "fieldname": "ignore_encoding_errors", + "fieldtype": "Check", + "label": "Ignore encoding errors" + }, + { + "default": "1", + "fieldname": "no_email", + "fieldtype": "Check", + "label": "Do not send Emails" + }, + { + "collapsible": 1, + "collapsible_depends_on": "eval: doc.import_status == \"Failed\"", + "depends_on": "import_status", + "fieldname": "import_detail", + "fieldtype": "Section Break", + "label": "Import Log" + }, + { + "fieldname": "import_status", + "fieldtype": "Select", + "label": "Import Status", + "options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful", + "read_only": 1 + }, + { + "allow_on_submit": 1, + "default": "1", + "fieldname": "show_only_errors", + "fieldtype": "Check", + "label": "Show only errors", + "no_copy": 1, + "print_hide": 1 + }, + { + "allow_on_submit": 1, + "depends_on": "import_status", + "fieldname": "import_log", + "fieldtype": "HTML", + "label": "Import Log" + }, + { + "allow_on_submit": 1, + "fieldname": "log_details", + "fieldtype": "Code", + "hidden": 1, + "label": "Log Details", + "read_only": 1 + }, + { + "fieldname": "amended_from", + "fieldtype": "Link", + "label": "Amended From", + "no_copy": 1, + "options": "Data Import", + "print_hide": 1, + "read_only": 1 + }, + { + "fieldname": "total_rows", + "fieldtype": "Int", + "hidden": 1, + "label": "Total Rows", + "read_only": 1 + }, + { + "fieldname": "amended_from", + "fieldtype": "Link", + "label": "Amended From", + "no_copy": 1, + "options": "Data Import Legacy", + "print_hide": 1, + "read_only": 1 + } + ], + "is_submittable": 1, + "links": [], + "max_attachments": 1, + "modified": "2020-06-11 16:13:23.813709", + "modified_by": "Administrator", + "module": "Core", + "name": "Data Import Legacy", + "owner": "Administrator", + "permissions": [ + { + "create": 1, + "delete": 1, + "email": 1, + "read": 1, + "role": "System Manager", + "share": 1, + "submit": 1, + "write": 1 + } + ], + "sort_field": "modified", + "sort_order": "DESC", + "track_changes": 1, + "track_seen": 1 +} \ No newline at end of file diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy.py b/frappe/core/doctype/data_import_legacy/data_import_legacy.py new file mode 100644 index 0000000000..df3a3edd3a --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/data_import_legacy.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2017, Frappe Technologies and contributors +# For license information, please see license.txt + +from __future__ import unicode_literals +import frappe, os +from frappe import _ +import frappe.modules.import_file +from frappe.model.document import Document +from frappe.utils.data import format_datetime +from frappe.core.doctype.data_import_legacy.importer import upload +from frappe.utils.background_jobs import enqueue + + +class DataImportLegacy(Document): + def autoname(self): + if not self.name: + self.name = "Import on " +format_datetime(self.creation) + + def validate(self): + if not self.import_file: + self.db_set("total_rows", 0) + if self.import_status == "In Progress": + frappe.throw(_("Can't save the form as data import is in progress.")) + + # validate the template just after the upload + # if there is total_rows in the doc, it means that the template is already validated and error free + if self.import_file and not self.total_rows: + upload(data_import_doc=self, from_data_import="Yes", validate_template=True) + + +@frappe.whitelist() +def get_importable_doctypes(): + return frappe.cache().hget("can_import", frappe.session.user) + +@frappe.whitelist() +def import_data(data_import): + frappe.db.set_value("Data Import Legacy", data_import, "import_status", "In Progress", update_modified=False) + frappe.publish_realtime("data_import_progress", {"progress": "0", + "data_import": data_import, "reload": True}, user=frappe.session.user) + + from frappe.core.page.background_jobs.background_jobs import get_info + enqueued_jobs = [d.get("job_name") for d in get_info()] + + if data_import not in enqueued_jobs: + enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import, + data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user) + + +def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False, + insert=False, submit=False, pre_process=None): + if os.path.isdir(path): + files = [os.path.join(path, f) for f in os.listdir(path)] + else: + files = [path] + + for f in files: + if f.endswith(".json"): + frappe.flags.mute_emails = True + frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True) + frappe.flags.mute_emails = False + frappe.db.commit() + elif f.endswith(".csv"): + import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process) + frappe.db.commit() + + +def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True): + from frappe.utils.csvutils import read_csv_content + print("Importing " + path) + with open(path, "r") as infile: + upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite, + submit_after_import=submit, pre_process=pre_process) + + +def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"): + def post_process(out): + del_keys = ('modified_by', 'creation', 'owner', 'idx') + for doc in out: + for key in del_keys: + if key in doc: + del doc[key] + for k, v in doc.items(): + if isinstance(v, list): + for child in v: + for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'): + if key in child: + del child[key] + + out = [] + if name: + out.append(frappe.get_doc(doctype, name).as_dict()) + elif frappe.db.get_value("DocType", doctype, "issingle"): + out.append(frappe.get_doc(doctype).as_dict()) + else: + for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by): + out.append(frappe.get_doc(doctype, doc.name).as_dict()) + post_process(out) + + dirname = os.path.dirname(path) + if not os.path.exists(dirname): + path = os.path.join('..', path) + + with open(path, "w") as outfile: + outfile.write(frappe.as_json(out)) + + +def export_csv(doctype, path): + from frappe.core.doctype.data_export.exporter import export_data + with open(path, "wb") as csvfile: + export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True) + csvfile.write(frappe.response.result.encode("utf-8")) + + +@frappe.whitelist() +def export_fixture(doctype, app): + if frappe.session.user != "Administrator": + raise frappe.PermissionError + + if not os.path.exists(frappe.get_app_path(app, "fixtures")): + os.mkdir(frappe.get_app_path(app, "fixtures")) + + export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc") diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy_list.js b/frappe/core/doctype/data_import_legacy/data_import_legacy_list.js new file mode 100644 index 0000000000..fcf2391313 --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/data_import_legacy_list.js @@ -0,0 +1,24 @@ +frappe.listview_settings['Data Import Legacy'] = { + add_fields: ["import_status"], + has_indicator_for_draft: 1, + get_indicator: function(doc) { + + let status = { + 'Successful': [__("Success"), "green", "import_status,=,Successful"], + 'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"], + 'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"], + 'Failed': [__("Failed"), "red", "import_status,=,Failed"], + 'Pending': [__("Pending"), "orange", "import_status,=,"] + } + + if (doc.import_status) { + return status[doc.import_status]; + } + + if (doc.docstatus == 0) { + return status['Pending']; + } + + return status['Pending']; + } +}; diff --git a/frappe/core/doctype/data_import_legacy/importer.py b/frappe/core/doctype/data_import_legacy/importer.py new file mode 100644 index 0000000000..5bd0daf32b --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/importer.py @@ -0,0 +1,541 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors +# MIT License. See license.txt + +from __future__ import unicode_literals, print_function + +from six.moves import range +import requests +import frappe, json +import frappe.permissions + +from frappe import _ + +from frappe.utils.csvutils import getlink +from frappe.utils.dateutils import parse_date + +from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url +from six import string_types + + +@frappe.whitelist() +def get_data_keys(): + return frappe._dict({ + "data_separator": _('Start entering data below this line'), + "main_table": _("Table") + ":", + "parent_table": _("Parent Table") + ":", + "columns": _("Column Name") + ":", + "doctype": _("DocType") + ":" + }) + + + +@frappe.whitelist() +def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None, + update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No", + skip_errors = True, data_import_doc=None, validate_template=False, user=None): + """upload data""" + + # for translations + if user: + frappe.cache().hdel("lang", user) + frappe.set_user_lang(user) + + if data_import_doc and isinstance(data_import_doc, string_types): + data_import_doc = frappe.get_doc("Data Import Legacy", data_import_doc) + if data_import_doc and from_data_import == "Yes": + no_email = data_import_doc.no_email + ignore_encoding_errors = data_import_doc.ignore_encoding_errors + update_only = data_import_doc.only_update + submit_after_import = data_import_doc.submit_after_import + overwrite = data_import_doc.overwrite + skip_errors = data_import_doc.skip_errors + else: + # extra input params + params = json.loads(frappe.form_dict.get("params") or '{}') + if params.get("submit_after_import"): + submit_after_import = True + if params.get("ignore_encoding_errors"): + ignore_encoding_errors = True + if not params.get("no_email"): + no_email = False + if params.get('update_only'): + update_only = True + if params.get('from_data_import'): + from_data_import = params.get('from_data_import') + if not params.get('skip_errors'): + skip_errors = params.get('skip_errors') + + frappe.flags.in_import = True + frappe.flags.mute_emails = no_email + + def get_data_keys_definition(): + return get_data_keys() + + def bad_template(): + frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator)) + + def check_data_length(): + if not data: + frappe.throw(_("No data found in the file. Please reattach the new file with data.")) + + def get_start_row(): + for i, row in enumerate(rows): + if row and row[0]==get_data_keys_definition().data_separator: + return i+1 + bad_template() + + def get_header_row(key): + return get_header_row_and_idx(key)[0] + + def get_header_row_and_idx(key): + for i, row in enumerate(header): + if row and row[0]==key: + return row, i + return [], -1 + + def filter_empty_columns(columns): + empty_cols = list(filter(lambda x: x in ("", None), columns)) + + if empty_cols: + if columns[-1*len(empty_cols):] == empty_cols: + # filter empty columns if they exist at the end + columns = columns[:-1*len(empty_cols)] + else: + frappe.msgprint(_("Please make sure that there are no empty columns in the file."), + raise_exception=1) + + return columns + + def make_column_map(): + doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype) + if row_idx == -1: # old style + return + + dt = None + for i, d in enumerate(doctype_row[1:]): + if d not in ("~", "-"): + if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"): + dt, parentfield = d, None + # xls format truncates the row, so it may not have more columns + if len(doctype_row) > i+2: + parentfield = doctype_row[i+2] + doctypes.append((dt, parentfield)) + column_idx_to_fieldname[(dt, parentfield)] = {} + column_idx_to_fieldtype[(dt, parentfield)] = {} + if dt: + column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1] + column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1] + + def get_doc(start_idx): + if doctypes: + doc = {} + attachments = [] + last_error_row_idx = None + for idx in range(start_idx, len(rows)): + last_error_row_idx = idx # pylint: disable=W0612 + if (not doc) or main_doc_empty(rows[idx]): + for dt, parentfield in doctypes: + d = {} + for column_idx in column_idx_to_fieldname[(dt, parentfield)]: + try: + fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx] + fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx] + + if not fieldname or not rows[idx][column_idx]: + continue + + d[fieldname] = rows[idx][column_idx] + if fieldtype in ("Int", "Check"): + d[fieldname] = cint(d[fieldname]) + elif fieldtype in ("Float", "Currency", "Percent"): + d[fieldname] = flt(d[fieldname]) + elif fieldtype == "Date": + if d[fieldname] and isinstance(d[fieldname], string_types): + d[fieldname] = getdate(parse_date(d[fieldname])) + elif fieldtype == "Datetime": + if d[fieldname]: + if " " in d[fieldname]: + _date, _time = d[fieldname].split() + else: + _date, _time = d[fieldname], '00:00:00' + _date = parse_date(d[fieldname]) + d[fieldname] = get_datetime(_date + " " + _time) + else: + d[fieldname] = None + + elif fieldtype in ("Image", "Attach Image", "Attach"): + # added file to attachments list + attachments.append(d[fieldname]) + + elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]: + # as fields can be saved in the number format(long type) in data import template + d[fieldname] = cstr(d[fieldname]) + + except IndexError: + pass + + # scrub quotes from name and modified + if d.get("name") and d["name"].startswith('"'): + d["name"] = d["name"][1:-1] + + if sum([0 if not val else 1 for val in d.values()]): + d['doctype'] = dt + if dt == doctype: + doc.update(d) + else: + if not overwrite and doc.get("name"): + d['parent'] = doc["name"] + d['parenttype'] = doctype + d['parentfield'] = parentfield + doc.setdefault(d['parentfield'], []).append(d) + else: + break + + return doc, attachments, last_error_row_idx + else: + doc = frappe._dict(zip(columns, rows[start_idx][1:])) + doc['doctype'] = doctype + return doc, [], None + + # used in testing whether a row is empty or parent row or child row + # checked only 3 first columns since first two columns can be blank for example the case of + # importing the item variant where item code and item name will be blank. + def main_doc_empty(row): + if row: + for i in range(3,0,-1): + if len(row) > i and row[i]: + return False + return True + + def validate_naming(doc): + autoname = frappe.get_meta(doctype).autoname + if autoname: + if autoname[0:5] == 'field': + autoname = autoname[6:] + elif autoname == 'naming_series:': + autoname = 'naming_series' + else: + return True + + if (autoname not in doc) or (not doc[autoname]): + from frappe.model.base_document import get_controller + if not hasattr(get_controller(doctype), "autoname"): + frappe.throw(_("{0} is a mandatory field").format(autoname)) + return True + + users = frappe.db.sql_list("select name from tabUser") + def prepare_for_insert(doc): + # don't block data import if user is not set + # migrating from another system + if not doc.owner in users: + doc.owner = frappe.session.user + if not doc.modified_by in users: + doc.modified_by = frappe.session.user + + def is_valid_url(url): + is_valid = False + if url.startswith("/files") or url.startswith("/private/files"): + url = get_url(url) + + try: + r = requests.get(url) + is_valid = True if r.status_code == 200 else False + except Exception: + pass + + return is_valid + + def attach_file_to_doc(doctype, docname, file_url): + # check if attachment is already available + # check if the attachement link is relative or not + if not file_url: + return + if not is_valid_url(file_url): + return + + files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and + attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format( + doctype=doctype, + docname=docname, + file_url=file_url + )) + + if files: + # file is already attached + return + + _file = frappe.get_doc({ + "doctype": "File", + "file_url": file_url, + "attached_to_name": docname, + "attached_to_doctype": doctype, + "attached_to_field": 0, + "folder": "Home/Attachments"}) + _file.save() + + + # header + filename, file_extension = ['',''] + if not rows: + _file = frappe.get_doc("File", {"file_url": data_import_doc.import_file}) + fcontent = _file.get_content() + filename, file_extension = _file.get_extension() + + if file_extension == '.xlsx' and from_data_import == 'Yes': + from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file + rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file) + + elif file_extension == '.csv': + from frappe.utils.csvutils import read_csv_content + rows = read_csv_content(fcontent, ignore_encoding_errors) + + else: + frappe.throw(_("Unsupported File Format")) + + start_row = get_start_row() + header = rows[:start_row] + data = rows[start_row:] + try: + doctype = get_header_row(get_data_keys_definition().main_table)[1] + columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:]) + except: + frappe.throw(_("Cannot change header content")) + doctypes = [] + column_idx_to_fieldname = {} + column_idx_to_fieldtype = {} + + if skip_errors: + data_rows_with_error = header + + if submit_after_import and not cint(frappe.db.get_value("DocType", + doctype, "is_submittable")): + submit_after_import = False + + parenttype = get_header_row(get_data_keys_definition().parent_table) + + if len(parenttype) > 1: + parenttype = parenttype[1] + + # check permissions + if not frappe.permissions.can_import(parenttype or doctype): + frappe.flags.mute_emails = False + return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True} + + # Throw expception in case of the empty data file + check_data_length() + make_column_map() + total = len(data) + + if validate_template: + if total: + data_import_doc.total_rows = total + return True + + if overwrite==None: + overwrite = params.get('overwrite') + + # delete child rows (if parenttype) + parentfield = None + if parenttype: + parentfield = get_parent_field(doctype, parenttype) + + if overwrite: + delete_child_rows(data, doctype) + + import_log = [] + def log(**kwargs): + if via_console: + print((kwargs.get("title") + kwargs.get("message")).encode('utf-8')) + else: + import_log.append(kwargs) + + def as_link(doctype, name): + if via_console: + return "{0}: {1}".format(doctype, name) + else: + return getlink(doctype, name) + + # publish realtime task update + def publish_progress(achieved, reload=False): + if data_import_doc: + frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)), + "data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user) + + + error_flag = rollback_flag = False + + batch_size = frappe.conf.data_import_batch_size or 1000 + + for batch_start in range(0, total, batch_size): + batch = data[batch_start:batch_start + batch_size] + + for i, row in enumerate(batch): + # bypass empty rows + if main_doc_empty(row): + continue + + row_idx = i + start_row + doc = None + + publish_progress(i) + + try: + doc, attachments, last_error_row_idx = get_doc(row_idx) + validate_naming(doc) + if pre_process: + pre_process(doc) + + original = None + if parentfield: + parent = frappe.get_doc(parenttype, doc["parent"]) + doc = parent.append(parentfield, doc) + parent.save() + else: + if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]): + original = frappe.get_doc(doctype, doc["name"]) + original_name = original.name + original.update(doc) + # preserve original name for case sensitivity + original.name = original_name + original.flags.ignore_links = ignore_links + original.save() + doc = original + else: + if not update_only: + doc = frappe.get_doc(doc) + prepare_for_insert(doc) + doc.flags.ignore_links = ignore_links + doc.insert() + if attachments: + # check file url and create a File document + for file_url in attachments: + attach_file_to_doc(doc.doctype, doc.name, file_url) + if submit_after_import: + doc.submit() + + # log errors + if parentfield: + log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)), + "link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"}) + elif submit_after_import: + log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)), + "message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"}) + elif original: + log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)), + "message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"}) + elif not update_only: + log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)), + "message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"}) + else: + log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None, + "message": "Document updation ignored", "indicator": "orange"}) + + except Exception as e: + error_flag = True + + # build error message + if frappe.local.message_log: + err_msg = "\n".join(['

    {}

    '.format(json.loads(msg).get('message')) for msg in frappe.local.message_log]) + else: + err_msg = '

    {}

    '.format(cstr(e)) + + error_trace = frappe.get_traceback() + if error_trace: + error_log_doc = frappe.log_error(error_trace) + error_link = get_absolute_url("Error Log", error_log_doc.name) + else: + error_link = None + + log(**{ + "row": row_idx + 1, + "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""), + "message": err_msg, + "indicator": "red", + "link":error_link + }) + + # data with error to create a new file + # include the errored data in the last row as last_error_row_idx will not be updated for the last row + if skip_errors: + if last_error_row_idx == len(rows)-1: + last_error_row_idx = len(rows) + data_rows_with_error += rows[row_idx:last_error_row_idx] + else: + rollback_flag = True + finally: + frappe.local.message_log = [] + + start_row += batch_size + if rollback_flag: + frappe.db.rollback() + else: + frappe.db.commit() + + frappe.flags.mute_emails = False + frappe.flags.in_import = False + + log_message = {"messages": import_log, "error": error_flag} + if data_import_doc: + data_import_doc.log_details = json.dumps(log_message) + + import_status = None + if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error): + import_status = "Partially Successful" + # write the file with the faulty row + file_name = 'error_' + filename + file_extension + if file_extension == '.xlsx': + from frappe.utils.xlsxutils import make_xlsx + xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") + file_data = xlsx_file.getvalue() + else: + from frappe.utils.csvutils import to_csv + file_data = to_csv(data_rows_with_error) + _file = frappe.get_doc({ + "doctype": "File", + "file_name": file_name, + "attached_to_doctype": "Data Import Legacy", + "attached_to_name": data_import_doc.name, + "folder": "Home/Attachments", + "content": file_data}) + _file.save() + data_import_doc.error_file = _file.file_url + + elif error_flag: + import_status = "Failed" + else: + import_status = "Successful" + + data_import_doc.import_status = import_status + data_import_doc.save() + if data_import_doc.import_status in ["Successful", "Partially Successful"]: + data_import_doc.submit() + publish_progress(100, True) + else: + publish_progress(0, True) + frappe.db.commit() + else: + return log_message + +def get_parent_field(doctype, parenttype): + parentfield = None + + # get parentfield + if parenttype: + for d in frappe.get_meta(parenttype).get_table_fields(): + if d.options==doctype: + parentfield = d.fieldname + break + + if not parentfield: + frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype)) + raise Exception + + return parentfield + +def delete_child_rows(rows, doctype): + """delete child rows for all parents""" + for p in list(set([r[1] for r in rows])): + if p: + frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p) diff --git a/frappe/core/doctype/data_import/log_details.html b/frappe/core/doctype/data_import_legacy/log_details.html similarity index 100% rename from frappe/core/doctype/data_import/log_details.html rename to frappe/core/doctype/data_import_legacy/log_details.html diff --git a/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py b/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py new file mode 100644 index 0000000000..e5b244e6a0 --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2020, Frappe Technologies and Contributors +# See license.txt +from __future__ import unicode_literals + +# import frappe +import unittest + +class TestDataImportLegacy(unittest.TestCase): + pass diff --git a/frappe/core/doctype/docfield/docfield.json b/frappe/core/doctype/docfield/docfield.json index 83d3c18453..aab59a5a0a 100644 --- a/frappe/core/doctype/docfield/docfield.json +++ b/frappe/core/doctype/docfield/docfield.json @@ -13,8 +13,8 @@ "fieldname", "precision", "length", - "show_days", - "show_seconds", + "hide_days", + "hide_seconds", "reqd", "search_index", "in_list_view", @@ -453,18 +453,18 @@ "fieldtype": "Column Break" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_days", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_days", "fieldtype": "Check", - "label": "Show Days" + "label": "Hide Days" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_seconds", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_seconds", "fieldtype": "Check", - "label": "Show Seconds" + "label": "Hide Seconds" }, { "default": "0", @@ -477,7 +477,7 @@ "idx": 1, "istable": 1, "links": [], - "modified": "2020-05-15 09:06:25.224411", + "modified": "2020-02-06 09:06:25.224413", "modified_by": "Administrator", "module": "Core", "name": "DocField", diff --git a/frappe/core/doctype/doctype/doctype.py b/frappe/core/doctype/doctype/doctype.py index 904deb9990..6ca3cccdba 100644 --- a/frappe/core/doctype/doctype/doctype.py +++ b/frappe/core/doctype/doctype/doctype.py @@ -688,6 +688,9 @@ def validate_fields(meta): def check_link_table_options(docname, d): if frappe.flags.in_patch: return + + if frappe.flags.in_fixtures: return + if d.fieldtype in ("Link",) + table_fields: if not d.options: frappe.throw(_("{0}: Options required for Link or Table type field {1} in row {2}").format(docname, d.label, d.idx), DoctypeLinkError) @@ -908,6 +911,8 @@ def validate_fields(meta): frappe.msgprint(text_str + df_options_str, title="Invalid Data Field", raise_exception=True) def check_child_table_option(docfield): + + if frappe.flags.in_fixtures: return if docfield.fieldtype not in ['Table MultiSelect', 'Table']: return doctype = docfield.options diff --git a/frappe/core/doctype/file/file.py b/frappe/core/doctype/file/file.py index a17b3acd02..831d2ab22d 100755 --- a/frappe/core/doctype/file/file.py +++ b/frappe/core/doctype/file/file.py @@ -182,11 +182,11 @@ class File(Document): if duplicate_file: duplicate_file_doc = frappe.get_cached_doc('File', duplicate_file.name) if duplicate_file_doc.exists_on_disk(): - # if it is attached to a document then throw DuplicateEntryError + # if it is attached to a document then throw FileAlreadyAttachedException if self.attached_to_doctype and self.attached_to_name: self.duplicate_entry = duplicate_file.name frappe.throw(_("Same file has already been attached to the record"), - frappe.DuplicateEntryError) + frappe.FileAlreadyAttachedException) # else just use the url, to avoid uploading a duplicate else: self.file_url = duplicate_file.file_url @@ -714,7 +714,12 @@ def remove_all(dt, dn, from_delete=False): try: for fid in frappe.db.sql_list("""select name from `tabFile` where attached_to_doctype=%s and attached_to_name=%s""", (dt, dn)): - remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete) + if from_delete: + # If deleting a doc, directly delete files + frappe.delete_doc("File", fid, ignore_permissions=True) + else: + # Removes file and adds a comment in the document it is attached to + remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete) except Exception as e: if e.args[0]!=1054: raise # (temp till for patched) diff --git a/frappe/core/doctype/session_default_settings/session_default_settings.py b/frappe/core/doctype/session_default_settings/session_default_settings.py index 453ece2890..7b4bd19e9a 100644 --- a/frappe/core/doctype/session_default_settings/session_default_settings.py +++ b/frappe/core/doctype/session_default_settings/session_default_settings.py @@ -28,8 +28,7 @@ def get_session_default_values(): @frappe.whitelist() def set_session_default_values(default_values): - if not frappe.flags.in_test: - default_values = json.loads(default_values) + default_values = frappe.parse_json(default_values) for entry in default_values: try: frappe.defaults.set_user_default(entry, default_values.get(entry)) diff --git a/frappe/core/doctype/user/user.py b/frappe/core/doctype/user/user.py index 0c5ebc3ede..7b9266ff64 100644 --- a/frappe/core/doctype/user/user.py +++ b/frappe/core/doctype/user/user.py @@ -4,7 +4,7 @@ from __future__ import unicode_literals, print_function import frappe from frappe.model.document import Document -from frappe.utils import cint, has_gravatar, format_datetime, now_datetime, get_formatted_email, today +from frappe.utils import cint, flt, has_gravatar, format_datetime, now_datetime, get_formatted_email, today from frappe import throw, msgprint, _ from frappe.utils.password import update_password as _update_password from frappe.desk.notifications import clear_notifications @@ -841,11 +841,11 @@ def user_query(doctype, txt, searchfield, start, page_len, filters): def get_total_users(): """Returns total no. of system users""" - return frappe.db.sql('''SELECT SUM(`simultaneous_sessions`) + return flt(frappe.db.sql('''SELECT SUM(`simultaneous_sessions`) FROM `tabUser` WHERE `enabled` = 1 AND `user_type` = 'System User' - AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0] + AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0]) def get_system_users(exclude_users=None, limit=None): if not exclude_users: diff --git a/frappe/core/doctype/version/version.py b/frappe/core/doctype/version/version.py index 216cdb1716..7654db4ae5 100644 --- a/frappe/core/doctype/version/version.py +++ b/frappe/core/doctype/version/version.py @@ -21,6 +21,17 @@ class Version(Document): else: return False + def for_insert(self, doc): + updater_reference = doc.flags.updater_reference + data = { + 'creation': doc.creation, + 'updater_reference': updater_reference, + 'created_by': doc.owner + } + self.ref_doctype = doc.doctype + self.docname = doc.name + self.data = frappe.as_json(data) + def get_data(self): return json.loads(self.data) diff --git a/frappe/custom/doctype/custom_field/custom_field.json b/frappe/custom/doctype/custom_field/custom_field.json index 77490c8c43..6fa7b29161 100644 --- a/frappe/custom/doctype/custom_field/custom_field.json +++ b/frappe/custom/doctype/custom_field/custom_field.json @@ -16,8 +16,8 @@ "column_break_6", "fieldtype", "precision", - "show_seconds", - "show_days", + "hide_seconds", + "hide_days", "options", "fetch_from", "fetch_if_empty", @@ -383,22 +383,18 @@ "label": "In Preview" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_seconds", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_seconds", "fieldtype": "Check", - "label": "Show Seconds", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Seconds" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_days", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_days", "fieldtype": "Check", - "label": "Show Days", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Days" }, { "default": "0", @@ -411,7 +407,7 @@ "icon": "fa fa-glass", "idx": 1, "links": [], - "modified": "2020-05-15 23:43:00.123572", + "modified": "2020-02-06 23:43:00.123575", "modified_by": "Administrator", "module": "Custom", "name": "Custom Field", diff --git a/frappe/custom/doctype/custom_field/custom_field.py b/frappe/custom/doctype/custom_field/custom_field.py index a24777a80a..bc325b654e 100644 --- a/frappe/custom/doctype/custom_field/custom_field.py +++ b/frappe/custom/doctype/custom_field/custom_field.py @@ -31,6 +31,13 @@ class CustomField(Document): # fieldnames should be lowercase self.fieldname = self.fieldname.lower() + def before_insert(self): + meta = frappe.get_meta(self.dt, cached=False) + fieldnames = [df.fieldname for df in meta.get("fields")] + + if self.fieldname in fieldnames: + frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt)) + def validate(self): meta = frappe.get_meta(self.dt, cached=False) fieldnames = [df.fieldname for df in meta.get("fields")] @@ -46,9 +53,6 @@ class CustomField(Document): if not self.fieldname: frappe.throw(_("Fieldname not set for Custom Field")) - if self.fieldname in fieldnames: - frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt)) - if self.get('translatable', 0) and not supports_translation(self.fieldtype): self.translatable = 0 @@ -68,6 +72,11 @@ class CustomField(Document): frappe.db.updatedb(self.dt) def on_trash(self): + #check if Admin owned field + if self.owner == 'Administrator' and frappe.session.user != 'Administrator': + frappe.throw(_("Custom Field {0} is created by the Administrator and can only be deleted through the Administrator account.").format( + frappe.bold(self.label))) + # delete property setter entries frappe.db.sql("""\ DELETE FROM `tabProperty Setter` diff --git a/frappe/custom/doctype/customize_form/customize_form.py b/frappe/custom/doctype/customize_form/customize_form.py index 6a54d9c7e6..d4eeba3f93 100644 --- a/frappe/custom/doctype/customize_form/customize_form.py +++ b/frappe/custom/doctype/customize_form/customize_form.py @@ -77,7 +77,9 @@ docfield_properties = { 'allow_bulk_edit': 'Check', 'auto_repeat': 'Link', 'allow_in_quick_entry': 'Check', - 'hide_border': 'Check' + 'hide_border': 'Check', + 'hide_days': 'Check', + 'hide_seconds': 'Check' } allowed_fieldtype_change = (('Currency', 'Float', 'Percent'), ('Small Text', 'Data'), diff --git a/frappe/custom/doctype/customize_form_field/customize_form_field.json b/frappe/custom/doctype/customize_form_field/customize_form_field.json index f422c36e61..267213517c 100644 --- a/frappe/custom/doctype/customize_form_field/customize_form_field.json +++ b/frappe/custom/doctype/customize_form_field/customize_form_field.json @@ -11,8 +11,8 @@ "label", "fieldtype", "fieldname", - "show_seconds", - "show_days", + "hide_seconds", + "hide_days", "reqd", "unique", "in_list_view", @@ -393,22 +393,18 @@ "label": "In Preview" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_seconds", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_seconds", "fieldtype": "Check", - "label": "Show Seconds", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Seconds" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_days", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_days", "fieldtype": "Check", - "label": "Show Days", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Days" }, { "default": "0", @@ -421,7 +417,7 @@ "idx": 1, "istable": 1, "links": [], - "modified": "2020-05-15 23:45:46.810869", + "modified": "2020-06-02 23:45:46.810868", "modified_by": "Administrator", "module": "Custom", "name": "Customize Form Field", diff --git a/frappe/database/mariadb/framework_mariadb.sql b/frappe/database/mariadb/framework_mariadb.sql index bd93069a3f..af537e0612 100644 --- a/frappe/database/mariadb/framework_mariadb.sql +++ b/frappe/database/mariadb/framework_mariadb.sql @@ -64,6 +64,8 @@ CREATE TABLE `tabDocField` ( `length` int(11) NOT NULL DEFAULT 0, `translatable` int(1) NOT NULL DEFAULT 0, `hide_border` int(1) NOT NULL DEFAULT 0, + `hide_days` int(1) NOT NULL DEFAULT 0, + `hide_seconds` int(1) NOT NULL DEFAULT 0, PRIMARY KEY (`name`), KEY `parent` (`parent`), KEY `label` (`label`), diff --git a/frappe/database/postgres/framework_postgres.sql b/frappe/database/postgres/framework_postgres.sql index 76309e7347..8f77ed6230 100644 --- a/frappe/database/postgres/framework_postgres.sql +++ b/frappe/database/postgres/framework_postgres.sql @@ -64,6 +64,8 @@ CREATE TABLE "tabDocField" ( "length" bigint NOT NULL DEFAULT 0, "translatable" smallint NOT NULL DEFAULT 0, "hide_border" smallint NOT NULL DEFAULT 0, + "hide_days" smallint NOT NULL DEFAULT 0, + "hide_seconds" smallint NOT NULL DEFAULT 0, PRIMARY KEY ("name") ) ; diff --git a/frappe/desk/desktop.py b/frappe/desk/desktop.py index 956308568b..142c103c68 100644 --- a/frappe/desk/desktop.py +++ b/frappe/desk/desktop.py @@ -168,7 +168,6 @@ class Workspace: 'subtitle': _(self.onboarding_doc.subtitle), 'success': _(self.onboarding_doc.success_message), 'docs_url': self.onboarding_doc.documentation_url, - 'user_can_dismiss': self.onboarding_doc.user_can_dismiss, 'items': self.get_onboarding_steps() } @handle_not_exist diff --git a/frappe/desk/doctype/calendar_view/calendar_view.json b/frappe/desk/doctype/calendar_view/calendar_view.json index 04839abc9f..ea220c335c 100644 --- a/frappe/desk/doctype/calendar_view/calendar_view.json +++ b/frappe/desk/doctype/calendar_view/calendar_view.json @@ -1,208 +1,81 @@ { - "allow_copy": 0, - "allow_guest_to_view": 0, - "allow_import": 0, - "allow_rename": 0, + "actions": [], "autoname": "Prompt", - "beta": 0, "creation": "2017-10-23 13:02:10.295824", - "custom": 0, - "docstatus": 0, "doctype": "DocType", - "document_type": "", "editable_grid": 1, "engine": "InnoDB", + "field_order": [ + "reference_doctype", + "subject_field", + "start_date_field", + "end_date_field", + "column_break_5", + "all_day" + ], "fields": [ { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "reference_doctype", "fieldtype": "Link", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, "in_list_view": 1, - "in_standard_filter": 0, "label": "Reference Document Type", - "length": 0, - "no_copy": 0, "options": "DocType", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "subject_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, "in_list_view": 1, - "in_standard_filter": 0, "label": "Subject Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "start_date_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, "label": "Start Date Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "end_date_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, "label": "End Date Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 + }, + { + "fieldname": "column_break_5", + "fieldtype": "Column Break" + }, + { + "default": "0", + "fieldname": "all_day", + "fieldtype": "Check", + "label": "All Day" } ], - "has_web_view": 0, - "hide_heading": 0, - "hide_toolbar": 0, - "idx": 0, - "image_view": 0, - "in_create": 0, - "is_submittable": 0, - "issingle": 0, - "istable": 0, - "max_attachments": 0, - "modified": "2019-09-05 14:22:27.664645", + "links": [], + "modified": "2020-06-15 11:24:57.639430", "modified_by": "Administrator", "module": "Desk", "name": "Calendar View", - "name_case": "", "owner": "faris@erpnext.com", "permissions": [ { - "amend": 0, - "apply_user_permissions": 0, - "cancel": 0, "create": 1, "delete": 1, "email": 1, "export": 1, - "if_owner": 0, - "import": 0, - "permlevel": 0, "print": 1, "read": 1, "report": 1, "role": "System Manager", - "set_user_permissions": 0, "share": 1, - "submit": 0, "write": 1 }, { - "amend": 0, - "apply_user_permissions": 0, - "cancel": 0, - "create": 0, - "delete": 0, - "email": 0, - "export": 0, - "if_owner": 0, - "import": 0, - "permlevel": 0, - "print": 0, "read": 1, - "report": 0, - "role": "All", - "set_user_permissions": 0, - "share": 0, - "submit": 0, - "write": 0 + "role": "All" } ], - "quick_entry": 0, - "read_only": 0, - "read_only_onload": 0, - "show_name_in_global_search": 0, "sort_field": "modified", - "sort_order": "DESC", - "track_changes": 0, - "track_seen": 0 + "sort_order": "DESC" } \ No newline at end of file diff --git a/frappe/desk/doctype/dashboard_chart/dashboard_chart.py b/frappe/desk/doctype/dashboard_chart/dashboard_chart.py index ab1863ca0b..c6343dd187 100644 --- a/frappe/desk/doctype/dashboard_chart/dashboard_chart.py +++ b/frappe/desk/doctype/dashboard_chart/dashboard_chart.py @@ -26,15 +26,15 @@ def get_permission_query_conditions(user): if "System Manager" in roles: return None - allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read()) - allowed_reports = tuple([key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()]) + allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()] + allowed_reports = ['"%s"' % key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()] return ''' - `tabDashboard Chart`.`document_type` in {allowed_doctypes} - or `tabDashboard Chart`.`report_name` in {allowed_reports} + `tabDashboard Chart`.`document_type` in ({allowed_doctypes}) + or `tabDashboard Chart`.`report_name` in ({allowed_reports}) '''.format( - allowed_doctypes=allowed_doctypes, - allowed_reports=allowed_reports + allowed_doctypes=','.join(allowed_doctypes), + allowed_reports=','.join(allowed_reports) ) diff --git a/frappe/desk/doctype/module_onboarding/module_onboarding.json b/frappe/desk/doctype/module_onboarding/module_onboarding.json index 0667ddf6ad..02a18b9c2d 100644 --- a/frappe/desk/doctype/module_onboarding/module_onboarding.json +++ b/frappe/desk/doctype/module_onboarding/module_onboarding.json @@ -13,7 +13,6 @@ "column_break_4", "success_message", "documentation_url", - "user_can_dismiss", "is_complete", "section_break_6", "steps" @@ -53,13 +52,6 @@ "label": "Success Message", "reqd": 1 }, - { - "default": "1", - "description": "Allow users to dismiss onboarding temporarily for a day", - "fieldname": "user_can_dismiss", - "fieldtype": "Check", - "label": "User Can Dismiss " - }, { "fieldname": "documentation_url", "fieldtype": "Data", @@ -90,7 +82,7 @@ } ], "links": [], - "modified": "2020-05-18 19:42:39.738869", + "modified": "2020-06-08 15:36:04.701049", "modified_by": "Administrator", "module": "Desk", "name": "Module Onboarding", diff --git a/frappe/desk/doctype/notification_log/notification_log.py b/frappe/desk/doctype/notification_log/notification_log.py index 211b3ae5e6..12f2c41274 100644 --- a/frappe/desk/doctype/notification_log/notification_log.py +++ b/frappe/desk/doctype/notification_log/notification_log.py @@ -100,14 +100,16 @@ def send_notification_email(doc): ) def get_email_header(doc): - return { + docname = doc.document_name + header_map = { 'Default': _('New Notification'), - 'Mention': _('New Mention'), - 'Assignment': _('New Assignment'), - 'Share': _('New Document Shared'), - 'Energy Point': _('Energy Point Update'), - }[doc.type or 'Default'] + 'Mention': _('New Mention on {0}').format(docname), + 'Assignment': _('Assignment Update on {0}').format(docname), + 'Share': _('New Document Shared {0}').format(docname), + 'Energy Point': _('Energy Point Update on {0}').format(docname), + } + return header_map[doc.type or 'Default'] @frappe.whitelist() def mark_all_as_read(): diff --git a/frappe/desk/doctype/number_card/number_card.py b/frappe/desk/doctype/number_card/number_card.py index 6bb9c7d45c..c4a427c4e0 100644 --- a/frappe/desk/doctype/number_card/number_card.py +++ b/frappe/desk/doctype/number_card/number_card.py @@ -27,12 +27,12 @@ def get_permission_query_conditions(user=None): if "System Manager" in roles: return None - allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read()) + allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()] return ''' - `tabNumber Card`.`document_type` in {allowed_doctypes} + `tabNumber Card`.`document_type` in ({allowed_doctypes}) '''.format( - allowed_doctypes=allowed_doctypes, + allowed_doctypes=','.join(allowed_doctypes) ) def has_permission(doc, ptype, user): diff --git a/frappe/desk/form/assign_to.py b/frappe/desk/form/assign_to.py index a916cbca82..26b2bd2835 100644 --- a/frappe/desk/form/assign_to.py +++ b/frappe/desk/form/assign_to.py @@ -178,7 +178,8 @@ def notify_assignment(assigned_by, owner, doc_type, doc_name, action='CLOSE', description_html = "
    {0}
    ".format(description) if description else None if action=='CLOSE': - subject = _('Your assignment on {0} {1} has been removed').format(frappe.bold(doc_type), get_title_html(title)) + subject = _('Your assignment on {0} {1} has been removed by {2}')\ + .format(frappe.bold(doc_type), get_title_html(title), frappe.bold(user_name)) else: user_name = frappe.bold(user_name) document_type = frappe.bold(doc_type) diff --git a/frappe/desk/listview.py b/frappe/desk/listview.py index 1bce14fb2d..1d10a13930 100644 --- a/frappe/desk/listview.py +++ b/frappe/desk/listview.py @@ -35,7 +35,7 @@ def get_group_by_count(doctype, current_filters, field): from `tabToDo`, `tabUser` where - `tabToDo`.status='Open' and + `tabToDo`.status!='Cancelled' and `tabToDo`.owner = `tabUser`.name and `tabUser`.user_type = 'System User' {subquery_condition} diff --git a/frappe/desk/notifications.py b/frappe/desk/notifications.py index 4a1302788b..4b584a2429 100644 --- a/frappe/desk/notifications.py +++ b/frappe/desk/notifications.py @@ -252,7 +252,7 @@ def get_open_count(doctype, name, items=[]): continue filters = get_filters_for(d) - fieldname = links.get("non_standard_fieldnames", {}).get(d, links.fieldname) + fieldname = links.get("non_standard_fieldnames", {}).get(d, links.get('fieldname')) data = {"name": d} if filters: # get the fieldname for the current document diff --git a/frappe/email/doctype/email_account/email_account.py b/frappe/email/doctype/email_account/email_account.py index 082b16c17a..ff09024f69 100755 --- a/frappe/email/doctype/email_account/email_account.py +++ b/frappe/email/doctype/email_account/email_account.py @@ -10,7 +10,7 @@ import socket import time from frappe import _ from frappe.model.document import Document -from frappe.utils import validate_email_address, cint, get_datetime, DATE_FORMAT, strip, comma_or, sanitize_html, add_days +from frappe.utils import validate_email_address, cint, cstr, get_datetime, DATE_FORMAT, strip, comma_or, sanitize_html, add_days from frappe.utils.user import is_system_user from frappe.utils.jinja import render_template from frappe.email.smtp import SMTPServer @@ -169,19 +169,20 @@ class EmailAccount(Document): try: email_server.connect() except (error_proto, imaplib.IMAP4.error) as e: - message = e.message.lower().replace(" ","") - if in_receive and any(map(lambda t: t in message, ['authenticationfail', 'loginviayourwebbrowser', #abbreviated to work with both failure and failed + e = cstr(e) + message = e.lower().replace(" ","") + if in_receive and any(map(lambda t: t in message, ['authenticationfailed', 'loginviayourwebbrowser', #abbreviated to work with both failure and failed 'loginfailed', 'err[auth]', 'errtemporaryerror'])): #temporary error to deal with godaddy # if called via self.receive and it leads to authentication error, disable incoming # and send email to system manager self.handle_incoming_connect_error( - description=_('Authentication failed while receiving emails from Email Account {0}. Message from server: {1}').format(self.name, e.message) + description=_('Authentication failed while receiving emails from Email Account {0}. Message from server: {1}').format(self.name, e) ) return None else: - frappe.throw(e.message) + frappe.throw(e) except socket.error: if in_receive: diff --git a/frappe/email/doctype/notification/notification.py b/frappe/email/doctype/notification/notification.py index 8e53b50fa2..81670756f6 100644 --- a/frappe/email/doctype/notification/notification.py +++ b/frappe/email/doctype/notification/notification.py @@ -119,15 +119,17 @@ def get_context(context): if self.is_standard: self.load_standard_properties(context) + try: + if self.channel == 'Email': + self.send_an_email(doc, context) - if self.channel == 'Email': - self.send_an_email(doc, context) + if self.channel == 'Slack': + self.send_a_slack_msg(doc, context) - if self.channel == 'Slack': - self.send_a_slack_msg(doc, context) - - if self.channel == 'System Notification' or self.send_system_notification: - self.create_system_notification(doc, context) + if self.channel == 'System Notification' or self.send_system_notification: + self.create_system_notification(doc, context) + except: + frappe.log_error(title='Failed to send notification', message=frappe.get_traceback()) if self.set_property_after_alert: allow_update = True diff --git a/frappe/event_streaming/doctype/event_producer/event_producer.py b/frappe/event_streaming/doctype/event_producer/event_producer.py index 62f78ca56a..d4aa0914d8 100644 --- a/frappe/event_streaming/doctype/event_producer/event_producer.py +++ b/frappe/event_streaming/doctype/event_producer/event_producer.py @@ -35,7 +35,8 @@ class EventProducer(Document): self.create_custom_fields() else: # when producer doc is updated it updates the consumer doc, set flag to avoid deadlock - frappe.db.set_value(self.doctype, self.name, 'incoming_change', 0) + self.db_set('incoming_change', 0) + self.reload() def check_url(self): if not validate_url(self.producer_url): diff --git a/frappe/event_streaming/doctype/event_producer/test_event_producer.py b/frappe/event_streaming/doctype/event_producer/test_event_producer.py index 90f450bfcc..0d414f5d63 100644 --- a/frappe/event_streaming/doctype/event_producer/test_event_producer.py +++ b/frappe/event_streaming/doctype/event_producer/test_event_producer.py @@ -9,42 +9,20 @@ import json from frappe.frappeclient import FrappeClient from frappe.event_streaming.doctype.event_producer.event_producer import pull_from_node -def create_event_producer(producer_url): - producer = frappe.db.exists('Event Producer', producer_url) - if producer: - event_producer = frappe.get_doc('Event Producer', producer) - else: - event_producer = frappe.new_doc('Event Producer') - event_producer.producer_doctypes = [] - event_producer.producer_url = producer_url - event_producer.append('producer_doctypes', { - 'ref_doctype': 'ToDo', - 'use_same_name': 1 - }) - event_producer.append('producer_doctypes', { - 'ref_doctype': 'Note', - 'use_same_name': 1 - }) - event_producer.user = 'Administrator' - event_producer.save() - event_producer.reload() - +producer_url = 'http://test_site_producer:8000' class TestEventProducer(unittest.TestCase): def setUp(self): - self.producer_url = 'http://test_site_producer:8000' - create_event_producer(self.producer_url) - frappe.db.sql('delete from tabToDo') - frappe.db.sql('delete from tabNote') + create_event_producer(producer_url) def test_insert(self): - producer = self.get_remote_site() + producer = get_remote_site() producer_doc = insert_into_producer(producer, 'test creation 1 sync') self.pull_producer_data() self.assertTrue(frappe.db.exists('ToDo', producer_doc.name)) def test_update(self): - producer = self.get_remote_site() + producer = get_remote_site() producer_doc = insert_into_producer(producer, 'test update 1') producer_doc['description'] = 'test update 2' producer_doc = producer.update(producer_doc) @@ -53,7 +31,7 @@ class TestEventProducer(unittest.TestCase): self.assertEqual(local_doc.description, producer_doc.description) def test_delete(self): - producer = self.get_remote_site() + producer = get_remote_site() producer_doc = insert_into_producer(producer, 'test delete sync') self.pull_producer_data() self.assertTrue(frappe.db.exists('ToDo', producer_doc.name)) @@ -62,17 +40,17 @@ class TestEventProducer(unittest.TestCase): self.assertFalse(frappe.db.exists('ToDo', producer_doc.name)) def test_multiple_doctypes_sync(self): - producer = self.get_remote_site() + producer = get_remote_site() #insert todo and note in producer producer_todo = insert_into_producer(producer, 'test multiple doc sync') - producer_note1 = frappe.get_doc(dict(doctype='Note', title='test multiple doc sync 1')) - delete_on_remote_if_exists(producer, 'Note', {'title': producer_note1.title}) - frappe.db.delete('Note', {'title': producer_note1.title}) + producer_note1 = frappe._dict(doctype='Note', title='test multiple doc sync 1') + delete_on_remote_if_exists(producer, 'Note', {'title': producer_note1['title']}) + frappe.db.delete('Note', {'title': producer_note1['title']}) producer_note1 = producer.insert(producer_note1) - producer_note2 = frappe.get_doc(dict(doctype='Note', title='test multiple doc sync 2')) - delete_on_remote_if_exists(producer, 'Note', {'title': producer_note2.title}) - frappe.db.delete('Note', {'title': producer_note2.title}) + producer_note2 = frappe._dict(doctype='Note', title='test multiple doc sync 2') + delete_on_remote_if_exists(producer, 'Note', {'title': producer_note2['title']}) + frappe.db.delete('Note', {'title': producer_note2['title']}) producer_note2 = producer.insert(producer_note2) #update in producer @@ -98,22 +76,19 @@ class TestEventProducer(unittest.TestCase): self.assertFalse(frappe.db.exists('Note', producer_note2.name)) def test_child_table_sync_with_dependencies(self): - producer = self.get_remote_site() - producer_user = frappe.get_doc(dict(doctype='User', email='test_user@sync.com', first_name='Test Sync User')) + producer = get_remote_site() + producer_user = frappe._dict(doctype='User', email='test_user@sync.com', send_welcome_email=0, + first_name='Test Sync User', enabled=1, roles=[{'role': 'System Manager'}]) delete_on_remote_if_exists(producer, 'User', {'email': producer_user.email}) frappe.db.delete('User', {'email':producer_user.email}) - producer_user.enabled = 1 - producer_user.append('roles', { - 'role': 'System Manager' - }) producer_user = producer.insert(producer_user) - producer_note = frappe.get_doc(dict(doctype='Note', title='test child table dependency sync')) - producer_note.append('seen_by', { - 'user': producer_user.name - }) + + producer_note = frappe._dict(doctype='Note', title='test child table dependency sync', + seen_by=[{'user': producer_user.name}]) delete_on_remote_if_exists(producer, 'Note', {'title': producer_note.title}) frappe.db.delete('Note', {'title': producer_note.title}) producer_note = producer.insert(producer_note) + self.pull_producer_data() self.assertTrue(frappe.db.exists('User', producer_user.name)) if self.assertTrue(frappe.db.exists('Note', producer_note.name)): @@ -121,24 +96,23 @@ class TestEventProducer(unittest.TestCase): self.assertEqual(len(local_note.seen_by), 1) def test_dynamic_link_dependencies_synced(self): + producer = get_remote_site() #unsubscribe for Note to check whether dependency is fulfilled - event_producer = frappe.get_doc('Event Producer', self.producer_url) + event_producer = frappe.get_doc('Event Producer', producer_url) event_producer.producer_doctypes = [] event_producer.append('producer_doctypes', { 'ref_doctype': 'ToDo', 'use_same_name': 1 }) event_producer.save() - event_producer.reload() - producer = self.get_remote_site() - producer_link_doc = frappe.get_doc(dict(doctype='Note', title='Test Dynamic Link 1')) + producer_link_doc = frappe._dict(doctype='Note', title='Test Dynamic Link 1') delete_on_remote_if_exists(producer, 'Note', {'title': producer_link_doc.title}) frappe.db.delete('Note', {'title': producer_link_doc.title}) producer_link_doc = producer.insert(producer_link_doc) - producer_doc = frappe.get_doc(dict(doctype='ToDo', description='Test Dynamic Link 2', assigned_by='Administrator', - reference_type='Note', reference_name=producer_link_doc.name)) + producer_doc = frappe._dict(doctype='ToDo', description='Test Dynamic Link 2', assigned_by='Administrator', + reference_type='Note', reference_name=producer_link_doc.name) producer_doc = producer.insert(producer_doc) self.pull_producer_data() @@ -147,39 +121,42 @@ class TestEventProducer(unittest.TestCase): self.assertTrue(frappe.db.exists('Note', producer_link_doc.name)) self.assertEqual(producer_link_doc.name, frappe.db.get_value('ToDo', producer_doc.name, 'reference_name')) + reset_configuration(producer_url) + def test_naming_configuration(self): #test with use_same_name = 0 - event_producer = frappe.get_doc('Event Producer', self.producer_url) + producer = get_remote_site() + event_producer = frappe.get_doc('Event Producer', producer_url) event_producer.producer_doctypes = [] event_producer.append('producer_doctypes', { 'ref_doctype': 'ToDo', 'use_same_name': 0 }) event_producer.save() - event_producer.reload() - producer = self.get_remote_site() producer_doc = insert_into_producer(producer, 'test different name sync') self.pull_producer_data() - self.assertTrue(frappe.db.exists('ToDo', {'remote_docname': producer_doc.name, 'remote_site_name': self.producer_url})) + self.assertTrue(frappe.db.exists('ToDo', {'remote_docname': producer_doc.name, 'remote_site_name': producer_url})) + + reset_configuration(producer_url) def test_update_log(self): - producer = self.get_remote_site() + producer = get_remote_site() producer_doc = insert_into_producer(producer, 'test update log') update_log_doc = producer.get_value('Event Update Log', 'docname', {'docname': producer_doc.get('name')}) self.assertEqual(update_log_doc.get('docname'), producer_doc.get('name')) def test_event_sync_log(self): - producer = self.get_remote_site() + producer = get_remote_site() producer_doc = insert_into_producer(producer, 'test event sync log') self.pull_producer_data() self.assertTrue(frappe.db.exists('Event Sync Log', {'docname': producer_doc.name})) def pull_producer_data(self): - pull_from_node(self.producer_url) + pull_from_node(producer_url) def get_remote_site(self): - producer_doc = frappe.get_doc('Event Producer', self.producer_url) + producer_doc = frappe.get_doc('Event Producer', producer_url) producer_site = FrappeClient( url=producer_doc.producer_url, api_key=producer_doc.api_key, @@ -189,7 +166,8 @@ class TestEventProducer(unittest.TestCase): return producer_site def test_mapping(self): - event_producer = frappe.get_doc('Event Producer', self.producer_url) + producer = get_remote_site() + event_producer = frappe.get_doc('Event Producer', producer_url) event_producer.producer_doctypes = [] mapping = [{ 'local_fieldname': 'description', @@ -202,10 +180,8 @@ class TestEventProducer(unittest.TestCase): 'mapping': get_mapping('ToDo to Note', 'ToDo', 'Note', mapping) }) event_producer.save() - event_producer.reload() - producer = self.get_remote_site() - producer_note = frappe.get_doc(dict(doctype='Note', title='Test Mapping', content='Test Mapping')) + producer_note = frappe._dict(doctype='Note', title='Test Mapping', content='Test Mapping') delete_on_remote_if_exists(producer, 'Note', {'title': producer_note.title}) producer_note = producer.insert(producer_note) self.pull_producer_data() @@ -225,8 +201,11 @@ class TestEventProducer(unittest.TestCase): #check delete self.assertFalse(frappe.db.exists('ToDo', {'description': producer_note.content})) + reset_configuration(producer_url) + def test_inner_mapping(self): - event_producer = frappe.get_doc('Event Producer', self.producer_url) + producer = get_remote_site() + event_producer = frappe.get_doc('Event Producer', producer_url) event_producer.producer_doctypes = [] inner_mapping = [ { @@ -255,10 +234,8 @@ class TestEventProducer(unittest.TestCase): 'mapping': get_mapping('ToDo to Note Mapping', 'ToDo', 'Note', mapping) }) event_producer.save() - event_producer.reload() - producer = self.get_remote_site() - producer_note = frappe.get_doc(dict(doctype='Note', title='Inner Mapping Tester', content='Test Inner Mapping')) + producer_note = frappe._dict(doctype='Note', title='Inner Mapping Tester', content='Test Inner Mapping') delete_on_remote_if_exists(producer, 'Note', {'title': producer_note.title}) producer_note = producer.insert(producer_note) self.pull_producer_data() @@ -268,11 +245,13 @@ class TestEventProducer(unittest.TestCase): #check doc inserted self.assertTrue(frappe.db.exists('ToDo', {'description': producer_note.content})) + reset_configuration(producer_url) + def insert_into_producer(producer, description): - #create and insert todo on remote site - todo = frappe.get_doc(dict(doctype='ToDo', description=description, assigned_by='Administrator')) - return producer.insert(todo) + #create and insert todo on remote site + todo = dict(doctype='ToDo', description=description, assigned_by='Administrator') + return producer.insert(todo) def delete_on_remote_if_exists(producer, doctype, filters): remote_doc = producer.get_value(doctype, 'name', filters) @@ -292,4 +271,47 @@ def get_mapping(mapping_name, local, remote, field_map): for entry in field_map: doc.append('field_mapping', entry) doc.save() - return doc.name \ No newline at end of file + return doc.name + + +def create_event_producer(producer_url): + if frappe.db.exists('Event Producer', producer_url): + return + event_producer = frappe.new_doc('Event Producer') + event_producer.producer_doctypes = [] + event_producer.producer_url = producer_url + event_producer.append('producer_doctypes', { + 'ref_doctype': 'ToDo', + 'use_same_name': 1 + }) + event_producer.append('producer_doctypes', { + 'ref_doctype': 'Note', + 'use_same_name': 1 + }) + event_producer.user = 'Administrator' + event_producer.save() + +def reset_configuration(producer_url): + event_producer = frappe.get_doc('Event Producer', producer_url) + event_producer.producer_doctypes = [] + event_producer.producer_url = producer_url + event_producer.append('producer_doctypes', { + 'ref_doctype': 'ToDo', + 'use_same_name': 1 + }) + event_producer.append('producer_doctypes', { + 'ref_doctype': 'Note', + 'use_same_name': 1 + }) + event_producer.user = 'Administrator' + event_producer.save() + +def get_remote_site(): + producer_doc = frappe.get_doc('Event Producer', producer_url) + producer_site = FrappeClient( + url=producer_doc.producer_url, + api_key=producer_doc.api_key, + api_secret=producer_doc.get_password('api_secret'), + frappe_authorization_source='Event Consumer' + ) + return producer_site diff --git a/frappe/exceptions.py b/frappe/exceptions.py index 1aac339228..8ebda9c7b8 100644 --- a/frappe/exceptions.py +++ b/frappe/exceptions.py @@ -103,6 +103,7 @@ class InvalidColumnName(ValidationError): pass class IncompatibleApp(ValidationError): pass class InvalidDates(ValidationError): pass class DataTooLongException(ValidationError): pass +class FileAlreadyAttachedException(Exception): pass # OAuth exceptions class InvalidAuthorizationHeader(CSRFTokenError): pass class InvalidAuthorizationPrefix(CSRFTokenError): pass diff --git a/frappe/hooks.py b/frappe/hooks.py index 200280f6de..f5a8701089 100644 --- a/frappe/hooks.py +++ b/frappe/hooks.py @@ -56,6 +56,8 @@ website_route_rules = [ {"from_route": "/profile", "to_route": "me"}, ] +base_template = "templates/base.html" + write_file_keys = ["file_url", "file_name"] notification_config = "frappe.core.notifications.get_notification_config" @@ -270,7 +272,10 @@ setup_wizard_exception = [ ] before_migrate = ['frappe.patches.v11_0.sync_user_permission_doctype_before_migrate.execute'] -after_migrate = ['frappe.website.doctype.website_theme.website_theme.generate_theme_files_if_not_exist'] +after_migrate = [ + 'frappe.website.doctype.website_theme.website_theme.generate_theme_files_if_not_exist', + 'frappe.modules.full_text_search.build_index_for_all_routes' +] otp_methods = ['OTP App','Email','SMS'] user_privacy_documents = [ diff --git a/frappe/installer.py b/frappe/installer.py index 4fc19b282a..b157ec53e5 100755 --- a/frappe/installer.py +++ b/frappe/installer.py @@ -113,12 +113,12 @@ def remove_from_installed_apps(app_name): installed_apps = frappe.get_installed_apps() if app_name in installed_apps: installed_apps.remove(app_name) - frappe.db.set_global("installed_apps", json.dumps(installed_apps)) + frappe.db.set_value("DefaultValue", {"defkey": "installed_apps"}, "defvalue", json.dumps(installed_apps)) frappe.db.commit() if frappe.flags.in_install: post_install() -def remove_app(app_name, dry_run=False, yes=False): +def remove_app(app_name, dry_run=False, yes=False, no_backup=False): """Delete app and all linked to the app's module with the app.""" if not dry_run and not yes: @@ -126,9 +126,10 @@ def remove_app(app_name, dry_run=False, yes=False): if confirm!="y": return - from frappe.utils.backups import scheduled_backup - print("Backing up...") - scheduled_backup(ignore_files=True) + if not no_backup: + from frappe.utils.backups import scheduled_backup + print("Backing up...") + scheduled_backup(ignore_files=True) drop_doctypes = [] diff --git a/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py b/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py index 4b595b1abf..f177aa6620 100644 --- a/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py +++ b/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py @@ -56,7 +56,8 @@ def take_backup_to_dropbox(retry_count=0, upload_db_backup=True): did_not_upload, error_log = backup_to_dropbox(upload_db_backup) if did_not_upload: raise Exception - send_email(True, "Dropbox", "Dropbox Settings", "send_notifications_to") + if cint(frappe.db.get_value("Dropbox Settings", None, "send_email_for_successful_backup")): + send_email(True, "Dropbox", "Dropbox Settings", "send_notifications_to") except JobTimeoutException: if retry_count < 2: args = { diff --git a/frappe/integrations/frappe_providers/frappecloud.py b/frappe/integrations/frappe_providers/frappecloud.py index 3e4b584246..16bc09d9bf 100644 --- a/frappe/integrations/frappe_providers/frappecloud.py +++ b/frappe/integrations/frappe_providers/frappecloud.py @@ -1,6 +1,7 @@ # imports - standard imports import getpass import json +import os import re import sys @@ -8,6 +9,7 @@ import sys import click from html2text import html2text import requests +from tenacity import retry, stop_after_attempt, wait_fixed # imports - module imports import frappe @@ -138,6 +140,7 @@ def select_team(session): return team +@retry(stop=stop_after_attempt(5)) def get_new_site_options(): site_options_sc = session.post(options_url) @@ -158,6 +161,7 @@ def is_valid_subdomain(subdomain): print("Subdomain contains invalid characters. Use lowercase characters, numbers and hyphens") +@retry(stop=stop_after_attempt(5)) def is_subdomain_available(subdomain): res = session.post(site_exists_url, {"subdomain": subdomain}) if res.ok: @@ -252,6 +256,17 @@ def get_subdomain(domain): return subdomain +@retry(stop=stop_after_attempt(2), wait=wait_fixed(5)) +def upload_backup_file(file_type, file_path): + return session.post(files_url, data={}, files={ + "file": open(file_path, "rb"), + "is_private": 1, + "folder": "Home", + "method": "press.api.site.upload_backup", + "type": file_type + }) + + @add_line_after def upload_backup(local_site): # take backup @@ -265,14 +280,11 @@ def upload_backup(local_site): ("public", odb.backup_path_files), ("private", odb.backup_path_private_files) ]): - file_upload_response = session.post(files_url, data={}, files={ - "file": open(file_path, "rb"), - "is_private": 1, - "folder": "Home", - "method": "press.api.site.upload_backup", - "type": file_type - }) - print("Uploading files ({}/3)".format(x+1), end="\r") + file_name = file_path.split(os.sep)[-1] + + print("Uploading {} file: {} ({}/3)".format(file_type, file_name, x+1)) + file_upload_response = upload_backup_file(file_type, file_path) + if file_upload_response.ok: files_session[file_type] = file_upload_response.json()["message"] else: @@ -362,7 +374,10 @@ def create_session(): if login_sc.ok: print("Authorization Successful! ✅") team = select_team(session) - session.headers.update({"X-Press-Team": team }) + session.headers.update({ + "X-Press-Team": team, + "Connection": "keep-alive" + }) return session else: handle_request_failure(message="Authorization Failed with Error Code {}".format(login_sc.status_code), traceback=False) diff --git a/frappe/model/base_document.py b/frappe/model/base_document.py index 106d21eb51..d7028870f4 100644 --- a/frappe/model/base_document.py +++ b/frappe/model/base_document.py @@ -504,19 +504,7 @@ class BaseDocument(object): for _df in fields_to_fetch: if self.is_new() or self.docstatus != 1 or _df.allow_on_submit: - fetch_from_fieldname = _df.fetch_from.split('.')[-1] - value = values[fetch_from_fieldname] - if _df.fieldtype == 'Small Text' or _df.fieldtype == 'Text' or _df.fieldtype == 'Data': - if fetch_from_fieldname in default_fields: - from frappe.model.meta import get_default_df - fetch_from_df = get_default_df(fetch_from_fieldname) - else: - fetch_from_df = frappe.get_meta(doctype).get_field(fetch_from_fieldname) - - fetch_from_ft = fetch_from_df.get('fieldtype') - if fetch_from_ft == 'Text Editor' and value: - value = unescape_html(strip_html(value)) - setattr(self, _df.fieldname, value) + self.set_fetch_from_value(doctype, _df, values) notify_link_count(doctype, docname) @@ -531,6 +519,27 @@ class BaseDocument(object): return invalid_links, cancelled_links + def set_fetch_from_value(self, doctype, df, values): + fetch_from_fieldname = df.fetch_from.split('.')[-1] + value = values[fetch_from_fieldname] + if df.fieldtype in ['Small Text', 'Text', 'Data']: + if fetch_from_fieldname in default_fields: + from frappe.model.meta import get_default_df + fetch_from_df = get_default_df(fetch_from_fieldname) + else: + fetch_from_df = frappe.get_meta(doctype).get_field(fetch_from_fieldname) + + if not fetch_from_df: + frappe.throw( + _('Please check the value of "Fetch From" set for field {0}').format(frappe.bold(df.label)), + title = _('Wrong Fetch From value') + ) + + fetch_from_ft = fetch_from_df.get('fieldtype') + if fetch_from_ft == 'Text Editor' and value: + value = unescape_html(strip_html(value)) + setattr(self, df.fieldname, value) + def _validate_selects(self): if frappe.flags.in_import: return diff --git a/frappe/model/document.py b/frappe/model/document.py index 843cb421fe..24450f0cc6 100644 --- a/frappe/model/document.py +++ b/frappe/model/document.py @@ -961,7 +961,8 @@ class Document(BaseDocument): update_global_search(self) - if getattr(self.meta, 'track_changes', False) and self._doc_before_save and not self.flags.ignore_version: + if getattr(self.meta, 'track_changes', False) and not self.flags.ignore_version \ + and not self.doctype == 'Version' and not frappe.flags.in_install: self.save_version() self.run_method('on_change') @@ -1058,8 +1059,13 @@ class Document(BaseDocument): def save_version(self): """Save version info""" + if not self._doc_before_save and frappe.flags.in_patch: return + version = frappe.new_doc('Version') - if version.set_diff(self._doc_before_save, self): + if not self._doc_before_save: + version.for_insert(self) + version.insert(ignore_permissions=True) + elif version.set_diff(self._doc_before_save, self): version.insert(ignore_permissions=True) if not frappe.flags.in_migrate: follow_document(self.doctype, self.name, frappe.session.user) diff --git a/frappe/model/mapper.py b/frappe/model/mapper.py index 3639a947c0..d3014435e0 100644 --- a/frappe/model/mapper.py +++ b/frappe/model/mapper.py @@ -14,6 +14,12 @@ def make_mapped_doc(method, source_name, selected_children=None, args=None): Sets selected_children as flags for the `get_mapped_doc` method. Called from `open_mapped_doc` from create_new.js''' + + for hook in frappe.get_hooks("override_whitelisted_methods", {}).get(method, []): + # override using the first hook + method = hook + break + method = frappe.get_attr(method) if method not in frappe.whitelisted: diff --git a/frappe/model/meta.py b/frappe/model/meta.py index 0c5ec75597..1cc3abba5b 100644 --- a/frappe/model/meta.py +++ b/frappe/model/meta.py @@ -483,6 +483,9 @@ class Meta(Document): def get_row_template(self): return self.get_web_template(suffix='_row') + def get_list_template(self): + return self.get_web_template(suffix='_list') + def get_web_template(self, suffix=''): '''Returns the relative path of the row template for this doctype''' module_name = frappe.scrub(self.module) diff --git a/frappe/modules/full_text_search.py b/frappe/modules/full_text_search.py new file mode 100644 index 0000000000..fce9983907 --- /dev/null +++ b/frappe/modules/full_text_search.py @@ -0,0 +1,106 @@ +# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors +# MIT License. See license.txt + +from __future__ import unicode_literals +import frappe +from whoosh.index import create_in, open_dir +from whoosh.fields import TEXT, ID, Schema +from whoosh.qparser import MultifieldParser, FieldsPlugin, WildcardPlugin +from whoosh.query import Prefix +from bs4 import BeautifulSoup +from frappe.website.render import render_page +from frappe.utils import set_request, cint +from frappe.utils.global_search import get_routes_to_index + + +def build_index_for_all_routes(): + print("Building search index for all web routes...") + routes = get_routes_to_index() + documents = [get_document_to_index(route) for route in routes] + build_index("web_routes", documents) + + +@frappe.whitelist(allow_guest=True) +def web_search(index_name, query, scope=None, limit=20): + limit = cint(limit) + return search(index_name, query, scope, limit) + + +def get_document_to_index(route): + frappe.set_user("Guest") + frappe.local.no_cache = True + + try: + set_request(method="GET", path=route) + content = render_page(route) + soup = BeautifulSoup(content, "html.parser") + page_content = soup.find(class_="page_content") + text_content = page_content.text if page_content else "" + title = soup.title.text.strip() if soup.title else route + + frappe.set_user("Administrator") + + return frappe._dict(title=title, content=text_content, path=route) + except ( + frappe.PermissionError, + frappe.DoesNotExistError, + frappe.ValidationError, + Exception, + ): + pass + + +def build_index(index_name, documents): + schema = Schema( + title=TEXT(stored=True), path=ID(stored=True), content=TEXT(stored=True) + ) + + index_dir = get_index_path(index_name) + frappe.create_folder(index_dir) + + ix = create_in(index_dir, schema) + writer = ix.writer() + + for document in documents: + if document: + writer.add_document( + title=document.title, path=document.path, content=document.content + ) + + writer.commit() + + +def search(index_name, text, scope=None, limit=20): + index_dir = get_index_path(index_name) + ix = open_dir(index_dir) + + results = None + out = [] + with ix.searcher() as searcher: + parser = MultifieldParser(["title", "content"], ix.schema) + parser.remove_plugin_class(FieldsPlugin) + parser.remove_plugin_class(WildcardPlugin) + query = parser.parse(text) + + filter_scoped = None + if scope: + filter_scoped = Prefix("path", scope) + results = searcher.search(query, limit=limit, filter=filter_scoped) + + for r in results: + title_highlights = r.highlights("title") + content_highlights = r.highlights("content") + out.append( + frappe._dict( + title=r["title"], + path=r["path"], + title_highlights=title_highlights, + content_highlights=content_highlights, + ) + ) + + return out + + +def get_index_path(index_name): + return frappe.get_site_path("indexes", index_name) diff --git a/frappe/patches.txt b/frappe/patches.txt index fb5bf447b7..a03d31918b 100644 --- a/frappe/patches.txt +++ b/frappe/patches.txt @@ -288,3 +288,5 @@ execute:frappe.delete_doc("DocType", "Onboarding Slide") execute:frappe.delete_doc("DocType", "Onboarding Slide Field") execute:frappe.delete_doc("DocType", "Onboarding Slide Help Link") frappe.patches.v13_0.update_date_filters_in_user_settings +frappe.patches.v13_0.update_duration_options +frappe.patches.v13_0.replace_old_data_import diff --git a/frappe/patches/v13_0/replace_old_data_import.py b/frappe/patches/v13_0/replace_old_data_import.py new file mode 100644 index 0000000000..1c00ae5f34 --- /dev/null +++ b/frappe/patches/v13_0/replace_old_data_import.py @@ -0,0 +1,14 @@ +# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors +# MIT License. See license.txt + +from __future__ import unicode_literals +import frappe + + +def execute(): + frappe.rename_doc('DocType', 'Data Import', 'Data Import Legacy') + frappe.db.commit() + frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`") + frappe.reload_doc("core", "doctype", "data_import") + frappe.get_doc("DocType", "Data Import").on_update() + frappe.delete_doc_if_exists("DocType", "Data Import Beta") diff --git a/frappe/patches/v13_0/update_duration_options.py b/frappe/patches/v13_0/update_duration_options.py new file mode 100644 index 0000000000..60eef8fc93 --- /dev/null +++ b/frappe/patches/v13_0/update_duration_options.py @@ -0,0 +1,28 @@ +# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors +# MIT License. See license.txt + +from __future__ import unicode_literals +import frappe + +def execute(): + frappe.reload_doc('core', 'doctype', 'DocField') + + if frappe.db.has_column('DocField', 'show_days'): + frappe.db.sql(""" + UPDATE + tabDocField + SET + hide_days = 1 WHERE show_days = 0 + """) + frappe.db.sql_ddl('alter table tabDocField drop column show_days') + + if frappe.db.has_column('DocField', 'show_seconds'): + frappe.db.sql(""" + UPDATE + tabDocField + SET + hide_seconds = 1 WHERE show_seconds = 0 + """) + frappe.db.sql_ddl('alter table tabDocField drop column show_seconds') + + frappe.clear_cache(doctype='DocField') \ No newline at end of file diff --git a/frappe/public/css/hljs-night-owl.css b/frappe/public/css/hljs-night-owl.css new file mode 100644 index 0000000000..932ad2e46f --- /dev/null +++ b/frappe/public/css/hljs-night-owl.css @@ -0,0 +1,183 @@ +/* + +Night Owl for highlight.js (c) Carl Baxter + +An adaptation of Sarah Drasner's Night Owl VS Code Theme +https://github.com/sdras/night-owl-vscode-theme + +Copyright (c) 2018 Sarah Drasner + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +*/ + +.hljs { + display: block; + overflow-x: auto; + padding: 1rem 1.25rem; + background: #011627; + color: #d6deeb; + border-radius: 0.5rem; + } + + /* General Purpose */ + .hljs-keyword { + color: #c792ea; + font-style: italic; + } + .hljs-built_in { + color: #addb67; + font-style: italic; + } + .hljs-type { + color: #82aaff; + } + .hljs-literal { + color: #ff5874; + } + .hljs-number { + color: #F78C6C; + } + .hljs-regexp { + color: #5ca7e4; + } + .hljs-string { + color: #ecc48d; + } + .hljs-subst { + color: #d3423e; + } + .hljs-symbol { + color: #82aaff; + } + .hljs-class { + color: #ffcb8b; + } + .hljs-function { + color: #82AAFF; + } + .hljs-title { + color: #DCDCAA; + font-style: italic; + } + .hljs-params { + color: #7fdbca; + } + + /* Meta */ + .hljs-comment { + color: #637777; + font-style: italic; + } + .hljs-doctag { + color: #7fdbca; + } + .hljs-meta { + color: #82aaff; + } + .hljs-meta-keyword { + color: #82aaff; + } + .hljs-meta-string { + color: #ecc48d; + } + + /* Tags, attributes, config */ + .hljs-section { + color: #82b1ff; + } + .hljs-tag, + .hljs-name, + .hljs-builtin-name { + color: #7fdbca; + } + .hljs-attr { + color: #7fdbca; + } + .hljs-attribute { + color: #80cbc4; + } + .hljs-variable { + color: #addb67; + } + + /* Markup */ + .hljs-bullet { + color: #d9f5dd; + } + .hljs-code { + color: #80CBC4; + } + .hljs-emphasis { + color: #c792ea; + font-style: italic; + } + .hljs-strong { + color: #addb67; + font-weight: bold; + } + .hljs-formula { + color: #c792ea; + } + .hljs-link { + color: #ff869a; + } + .hljs-quote { + color: #697098; + font-style: italic; + } + + /* CSS */ + .hljs-selector-tag { + color: #ff6363; + } + + .hljs-selector-id { + color: #fad430; + } + + .hljs-selector-class { + color: #addb67; + font-style: italic; + } + + .hljs-selector-attr, + .hljs-selector-pseudo { + color: #c792ea; + font-style: italic; + } + + /* Templates */ + .hljs-template-tag { + color: #c792ea; + } + .hljs-template-variable { + color: #addb67; + } + + /* diff */ + .hljs-addition { + color: #addb67ff; + font-style: italic; + } + + .hljs-deletion { + color: #EF535090; + font-style: italic; + } diff --git a/frappe/public/js/frappe/data_import/column_picker_fields.js b/frappe/public/js/frappe/data_import/column_picker_fields.js deleted file mode 100644 index 36cbf3c413..0000000000 --- a/frappe/public/js/frappe/data_import/column_picker_fields.js +++ /dev/null @@ -1,28 +0,0 @@ -export default class ColumnPickerFields extends frappe.views.ReportView { - show() {} - - get_fields_as_options() { - let column_map = this.get_columns_for_picker(); - let doctypes = [this.doctype].concat( - ...frappe.meta.get_table_fields(this.doctype).map(df => df.options) - ); - // flatten array - return [].concat( - ...doctypes.map(doctype => { - return column_map[doctype].map(df => { - let label = df.label; - let value = df.fieldname; - if (this.doctype !== doctype) { - label = `${df.label} (${doctype})`; - value = `${doctype}:${df.fieldname}`; - } - return { - label, - value, - description: value - }; - }); - }) - ); - } -} diff --git a/frappe/public/js/frappe/data_import/data_exporter.js b/frappe/public/js/frappe/data_import/data_exporter.js index d0bf794df6..735237189d 100644 --- a/frappe/public/js/frappe/data_import/data_exporter.js +++ b/frappe/public/js/frappe/data_import/data_exporter.js @@ -1,9 +1,9 @@ -import ColumnPickerFields from './column_picker_fields'; frappe.provide('frappe.data_import'); frappe.data_import.DataExporter = class DataExporter { - constructor(doctype) { + constructor(doctype, exporting_for) { this.doctype = doctype; + this.exporting_for = exporting_for; frappe.model.with_doctype(doctype, () => { this.make_dialog(); }); @@ -13,6 +13,36 @@ frappe.data_import.DataExporter = class DataExporter { this.dialog = new frappe.ui.Dialog({ title: __('Export Data'), fields: [ + { + fieldtype: 'Select', + fieldname: 'exporting_for', + label: __('Exporting For'), + options: [ + { + label: __('Insert New Records'), + value: 'Insert New Records' + }, + { + label: __('Update Existing Records'), + value: 'Update Existing Records' + } + ], + change: () => { + let exporting_for = this.dialog.get_value('exporting_for'); + this.dialog.set_value( + 'export_records', + exporting_for === 'Insert New Records' ? 'blank_template' : 'all' + ); + + // Force ID field to be exported when updating existing records + let id_field = this.dialog.get_field(this.doctype).options[0]; + if (id_field.value === 'name' && id_field.$checkbox) { + id_field.$checkbox + .find('input') + .prop('disabled', exporting_for === 'Update Existing Records'); + } + } + }, { fieldtype: 'Select', fieldname: 'export_records', @@ -67,27 +97,32 @@ frappe.data_import.DataExporter = class DataExporter { on_change: () => this.update_primary_action(), options: this.get_multicheck_options(this.doctype) }, - ...frappe.meta.get_table_fields(this.doctype) - .map(df => { - let doctype = df.options; - let label = df.reqd - ? __('{0} (1 row mandatory)', [doctype]) - : __(doctype); - return { - label, - fieldname: doctype, - fieldtype: 'MultiCheck', - columns: 2, - on_change: () => this.update_primary_action(), - options: this.get_multicheck_options(doctype) - }; - }) + ...frappe.meta.get_table_fields(this.doctype).map(df => { + let doctype = df.options; + let child_fieldname = df.fieldname; + let label = df.reqd + ? // prettier-ignore + __('{0} ({1}) (1 row mandatory)', [df.label || df.fieldname, doctype]) + : __('{0} ({1})', [df.label || df.fieldname, doctype]); + return { + label, + fieldname: child_fieldname, + fieldtype: 'MultiCheck', + columns: 2, + on_change: () => this.update_primary_action(), + options: this.get_multicheck_options(doctype, child_fieldname) + }; + }) ], primary_action_label: __('Export'), primary_action: values => this.export_records(values), on_page_show: () => this.select_mandatory() }); + if (this.exporting_for) { + this.dialog.set_value('exporting_for', this.exporting_for); + } + this.make_filter_area(); this.make_select_all_buttons(); this.update_record_count_message(); @@ -97,7 +132,7 @@ frappe.data_import.DataExporter = class DataExporter { export_records() { let method = - '/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_template'; + '/api/method/frappe.core.doctype.data_import.data_import.download_template'; let multicheck_fields = this.dialog.fields .filter(df => df.fieldtype === 'MultiCheck') @@ -165,16 +200,16 @@ frappe.data_import.DataExporter = class DataExporter { } select_mandatory() { - let mandatory_table_doctypes = frappe.meta + let mandatory_table_fields = frappe.meta .get_table_fields(this.doctype) .filter(df => df.reqd) - .map(df => df.options); - mandatory_table_doctypes.push(this.doctype); + .map(df => df.fieldname); + mandatory_table_fields.push(this.doctype); let multicheck_fields = this.dialog.fields .filter(df => df.fieldtype === 'MultiCheck') .map(df => df.fieldname) - .filter(doctype => mandatory_table_doctypes.includes(doctype)); + .filter(doctype => mandatory_table_fields.includes(doctype)); let checkboxes = [].concat( ...multicheck_fields.map(fieldname => { @@ -192,8 +227,12 @@ frappe.data_import.DataExporter = class DataExporter { } unselect_all() { + let update_existing_records = + this.dialog.get_value('exporting_for') == 'Update Existing Records'; this.dialog.$wrapper - .find(':checkbox') + .find( + `:checkbox${update_existing_records ? ':not([data-unit=name])' : ''}` + ) .prop('checked', false) .trigger('change'); } @@ -253,11 +292,9 @@ frappe.data_import.DataExporter = class DataExporter { }, {}); } - get_multicheck_options(doctype) { + get_multicheck_options(doctype, child_fieldname = null) { if (!this.column_map) { - this.column_map = new ColumnPickerFields({ - doctype: this.doctype - }).get_columns_for_picker(); + this.column_map = get_columns_for_picker(this.doctype); } let autoname_field = null; @@ -267,7 +304,11 @@ frappe.data_import.DataExporter = class DataExporter { autoname_field = frappe.meta.get_field(doctype, fieldname); } - return this.column_map[doctype] + let fields = child_fieldname + ? this.column_map[child_fieldname] + : this.column_map[doctype]; + + return fields .filter(df => { if (autoname_field && df.fieldname === autoname_field.fieldname) { return false; @@ -289,3 +330,52 @@ frappe.data_import.DataExporter = class DataExporter { }); } }; + +export function get_columns_for_picker(doctype) { + let out = {}; + + const exportable_fields = df => { + let keep = true; + if (frappe.model.no_value_type.includes(df.fieldtype)) { + keep = false; + } + if (['lft', 'rgt'].includes(df.fieldname)) { + keep = false; + } + return keep; + }; + + // parent + let doctype_fields = frappe.meta + .get_docfields(doctype) + .filter(exportable_fields); + + out[doctype] = [ + { + label: __('ID'), + fieldname: 'name', + fieldtype: 'Data', + reqd: 1 + } + ].concat(doctype_fields); + + // children + const table_fields = frappe.meta.get_table_fields(doctype); + table_fields.forEach(df => { + const cdt = df.options; + const child_table_fields = frappe.meta + .get_docfields(cdt) + .filter(exportable_fields); + + out[df.fieldname] = [ + { + label: __('ID'), + fieldname: 'name', + fieldtype: 'Data', + reqd: 1 + } + ].concat(child_table_fields); + }); + + return out; +} diff --git a/frappe/public/js/frappe/data_import/import_preview.js b/frappe/public/js/frappe/data_import/import_preview.js index 27d81b75b7..7cf8431456 100644 --- a/frappe/public/js/frappe/data_import/import_preview.js +++ b/frappe/public/js/frappe/data_import/import_preview.js @@ -1,5 +1,5 @@ import DataTable from 'frappe-datatable'; -import ColumnPickerFields from './column_picker_fields'; +import { get_columns_for_picker } from './data_exporter'; frappe.provide('frappe.data_import'); @@ -236,9 +236,7 @@ frappe.data_import.ImportPreview = class ImportPreview { } show_column_mapper() { - let column_picker_fields = new ColumnPickerFields({ - doctype: this.doctype - }); + let column_picker_fields = get_columns_for_picker(this.doctype); let changed = []; let fields = this.preview_data.columns.map((col, i) => { let df = col.df; diff --git a/frappe/public/js/frappe/form/controls/button.js b/frappe/public/js/frappe/form/controls/button.js index dca2a82654..ae24651892 100644 --- a/frappe/public/js/frappe/form/controls/button.js +++ b/frappe/public/js/frappe/form/controls/button.js @@ -5,7 +5,8 @@ frappe.ui.form.ControlButton = frappe.ui.form.ControlData.extend({ }, make_input: function() { var me = this; - this.$input = $('
    ` ); this.$wrapper.append(this.$picker); - this.build_numeric_input("days", !this.duration_options.show_days); + this.build_numeric_input("days", this.duration_options.hide_days); this.build_numeric_input("hours", false); this.build_numeric_input("minutes", false); - this.build_numeric_input("seconds", !this.duration_options.show_seconds); + this.build_numeric_input("seconds", this.duration_options.hide_seconds); this.set_duration_picker_value(this.value); this.$picker.hide(); this.bind_events(); @@ -130,10 +130,10 @@ frappe.ui.form.ControlDuration = frappe.ui.form.ControlData.extend({ if (this.inputs) { total_duration.minutes = parseInt(this.inputs.minutes.val()); total_duration.hours = parseInt(this.inputs.hours.val()); - if (this.duration_options.show_days) { + if (!this.duration_options.hide_days) { total_duration.days = parseInt(this.inputs.days.val()); } - if (this.duration_options.show_seconds) { + if (!this.duration_options.hide_seconds) { total_duration.seconds = parseInt(this.inputs.seconds.val()); } } diff --git a/frappe/public/js/frappe/form/controls/link.js b/frappe/public/js/frappe/form/controls/link.js index 9d8241f5a7..603506a056 100644 --- a/frappe/public/js/frappe/form/controls/link.js +++ b/frappe/public/js/frappe/form/controls/link.js @@ -140,6 +140,8 @@ frappe.ui.form.ControlLink = frappe.ui.form.ControlData.extend({ } }); + this.custom_awesomplete_filter && this.custom_awesomplete_filter(this.awesomplete); + this.$input.on("input", frappe.utils.debounce(function(e) { var doctype = me.get_options(); if(!doctype) return; @@ -467,10 +469,10 @@ frappe.ui.form.ControlLink = frappe.ui.form.ControlData.extend({ for(var i=0; i < fl.length; i++) { frappe.model.set_value(df.parent, docname, fl[i], fetch_values[i], df.fieldtype); } - } + }, }); -if(Awesomplete) { +if (Awesomplete) { Awesomplete.prototype.get_item = function(value) { return this._list.find(function(item) { return item.value === value; diff --git a/frappe/public/js/frappe/form/controls/table_multiselect.js b/frappe/public/js/frappe/form/controls/table_multiselect.js index a75a947e3f..1d85498b80 100644 --- a/frappe/public/js/frappe/form/controls/table_multiselect.js +++ b/frappe/public/js/frappe/form/controls/table_multiselect.js @@ -11,6 +11,8 @@ frappe.ui.form.ControlTableMultiSelect = frappe.ui.form.ControlLink.extend({ // used as an internal model to store values this.rows = []; + // used as an internal model to filter awesomplete values + this._rows_list = []; this.$input_area.on('click', (e) => { if (e.target === this.$input_area.get(0)) { @@ -61,7 +63,7 @@ frappe.ui.form.ControlTableMultiSelect = frappe.ui.form.ControlLink.extend({ }); } } - + this._rows_list = this.rows.map(row => row[link_field.fieldname]); return this.rows; }, validate(value) { @@ -141,4 +143,15 @@ frappe.ui.form.ControlTableMultiSelect = frappe.ui.form.ControlLink.extend({ } return this._link_field; }, + custom_awesomplete_filter: function(awesomplete) { + let me = this; + + awesomplete.filter = function(item) { + if (in_list(me._rows_list, item.value)) { + return false; + } + + return true; + }; + } }); diff --git a/frappe/public/js/frappe/form/footer/timeline.js b/frappe/public/js/frappe/form/footer/timeline.js index bb44408c2a..7821a04c50 100644 --- a/frappe/public/js/frappe/form/footer/timeline.js +++ b/frappe/public/js/frappe/form/footer/timeline.js @@ -205,16 +205,18 @@ frappe.ui.form.Timeline = class Timeline {
    ').appendTo(me.list); } - // created - me.render_timeline_item({ - content: __("created"), - comment_type: "Created", - communication_type: "Comment", - sender: this.frm.doc.owner, - communication_date: this.frm.doc.creation, - creation: this.frm.doc.creation, - frm: this.frm - }); + // if a created comment is not added, add the default one + if (!timeline.find(comment => comment.comment_type === 'Created')) { + me.render_timeline_item({ + content: __("created"), + comment_type: "Created", + communication_type: "Comment", + sender: this.frm.doc.owner, + communication_date: this.frm.doc.creation, + creation: this.frm.doc.creation, + frm: this.frm + }); + } this.wrapper.find(".is-email").prop("checked", this.last_type==="Email").change(); @@ -564,12 +566,17 @@ frappe.ui.form.Timeline = class Timeline { let updater_reference = data.updater_reference; if (!$.isEmptyObject(updater_reference)) { let label = updater_reference.label || __('via {0}', [updater_reference.doctype]); - updater_reference_link = frappe.utils.get_form_link( - updater_reference.doctype, - updater_reference.docname, - true, - label - ); + let { doctype, docname } = updater_reference; + if (doctype && docname) { + updater_reference_link = frappe.utils.get_form_link( + doctype, + docname, + true, + label + ); + } else { + updater_reference_link = label; + } } // value changed in parent @@ -677,6 +684,15 @@ frappe.ui.form.Timeline = class Timeline { } } }); + + // creation by updater reference + if (data.creation && data.created_by) { + if (updater_reference_link) { + out.push(me.get_version_comment(version, __('created {0}', [updater_reference_link]), 'Created')); + } else { + out.push(me.get_version_comment(version, __('created'), 'Created')); + } + } }); } diff --git a/frappe/public/js/frappe/form/form.js b/frappe/public/js/frappe/form/form.js index 369e4a56d4..ebe94b4cdb 100644 --- a/frappe/public/js/frappe/form/form.js +++ b/frappe/public/js/frappe/form/form.js @@ -340,7 +340,6 @@ frappe.ui.form.Form = class FrappeForm { switch_doc(docname) { // record switch if(this.docname != docname && (!this.meta.in_dialog || this.in_form) && !this.meta.istable) { - frappe.utils.scroll_to(0); if (this.print_preview) { this.print_preview.hide(); } @@ -787,15 +786,24 @@ frappe.ui.form.Form = class FrappeForm { frappe.msgprint(__('"amended_from" field must be present to do an amendment.')); return; } - this.validate_form_action("Amend"); - var me = this; - var fn = function(newdoc) { - newdoc.amended_from = me.docname; - if(me.fields_dict && me.fields_dict['amendment_date']) - newdoc.amendment_date = frappe.datetime.obj_to_str(new Date()); - }; - this.copy_doc(fn, 1); - frappe.utils.play_sound("click"); + + frappe.xcall('frappe.client.is_document_amended', { + 'doctype': this.doc.doctype, + 'docname': this.doc.name + }).then(is_amended => { + if (is_amended) { + frappe.throw(__('This document is already amended, you cannot ammend it again')); + } + this.validate_form_action("Amend"); + var me = this; + var fn = function(newdoc) { + newdoc.amended_from = me.docname; + if (me.fields_dict && me.fields_dict['amendment_date']) + newdoc.amendment_date = frappe.datetime.obj_to_str(new Date()); + }; + this.copy_doc(fn, 1); + frappe.utils.play_sound("click"); + }); } validate_form_action(action, resolve) { diff --git a/frappe/public/js/frappe/form/sidebar/form_sidebar.js b/frappe/public/js/frappe/form/sidebar/form_sidebar.js index a145e47149..eab09c1e10 100644 --- a/frappe/public/js/frappe/form/sidebar/form_sidebar.js +++ b/frappe/public/js/frappe/form/sidebar/form_sidebar.js @@ -6,14 +6,14 @@ import './share'; import './review'; import './document_follow'; import './user_image'; -import './form_viewers'; +import './form_sidebar_users'; -frappe.ui.form.Sidebar = Class.extend({ - init: function(opts) { +frappe.ui.form.Sidebar = class { + constructor(opts) { $.extend(this, opts); - }, + } - make: function() { + make () { var sidebar_content = frappe.render_template("form_sidebar", {doctype: this.frm.doctype, frm:this.frm}); this.sidebar = $('') @@ -43,9 +43,9 @@ frappe.ui.form.Sidebar = Class.extend({ this.refresh(); - }, + } - bind_events: function() { + bind_events () { var me = this; // scroll to comments @@ -58,7 +58,7 @@ frappe.ui.form.Sidebar = Class.extend({ me.refresh_like(); }); }); - }, + } setup_keyboard_shortcuts() { // add assignment shortcut @@ -66,9 +66,9 @@ frappe.ui.form.Sidebar = Class.extend({ frappe.ui.keys .get_shortcut_group(this.page) .add(assignment_link); - }, + } - refresh: function() { + refresh () { if (this.frm.doc.__islocal) { this.sidebar.toggle(false); } else { @@ -113,9 +113,9 @@ frappe.ui.form.Sidebar = Class.extend({ this.refresh_like(); frappe.ui.form.set_user_image(this.frm); } - }, + } - show_auto_repeat_status: function() { + show_auto_repeat_status() { if (this.frm.meta.allow_auto_repeat && this.frm.doc.auto_repeat) { const me = this; frappe.call({ @@ -135,16 +135,16 @@ frappe.ui.form.Sidebar = Class.extend({ } }); } - }, + } - refresh_comments: function() { + refresh_comments() { $.map(this.frm.timeline.get_communications(), function(c) { return (c.communication_type==="Communication" || (c.communication_type=="Comment" && c.comment_type==="Comment")) ? c : null; }); this.comments.find(".n-comments").html(this.frm.get_docinfo().total_comments); - }, + } - make_tags: function() { + make_tags() { if (this.frm.meta.issingle) { this.sidebar.find(".form-tags").toggle(false); return; @@ -157,54 +157,62 @@ frappe.ui.form.Sidebar = Class.extend({ this.frm.tags && this.frm.tags.refresh(user_tags); } }); - }, - make_attachments: function() { + } + + make_attachments() { var me = this; this.frm.attachments = new frappe.ui.form.Attachments({ parent: me.sidebar.find(".form-attachments"), frm: me.frm }); - }, - make_assignments: function() { + } + + make_assignments() { this.frm.assign_to = new frappe.ui.form.AssignTo({ parent: this.sidebar.find(".form-assignments"), frm: this.frm }); - }, - make_shared: function() { + } + + make_shared() { this.frm.shared = new frappe.ui.form.Share({ frm: this.frm, parent: this.sidebar.find(".form-shared") }); - }, - make_viewers: function() { - this.frm.viewers = new frappe.ui.form.Viewers({ + } + + make_viewers() { + this.frm.viewers = new frappe.ui.form.SidebarUsers({ frm: this.frm, - parent: this.sidebar.find(".form-viewers") + $wrapper: this.sidebar, }); - }, - add_user_action: function(label, click) { + } + + add_user_action(label, click) { return $('').html(label).appendTo($('
  • ') .appendTo(this.user_actions.removeClass("hidden"))).on("click", click); - }, - clear_user_actions: function() { + } + + clear_user_actions() { this.user_actions.addClass("hidden") this.user_actions.find(".user-action-row").remove(); - }, + } - make_like: function() { + make_like() { this.like_wrapper = this.sidebar.find(".liked-by"); this.like_icon = this.sidebar.find(".liked-by .octicon-heart"); this.like_count = this.sidebar.find(".liked-by .likes-count"); frappe.ui.setup_like_popover(this.sidebar.find(".liked-by-parent"), ".liked-by"); - }, - make_follow: function(){ + } + + make_follow() { this.frm.follow = new frappe.ui.form.DocumentFollow({ frm: this.frm, parent: this.sidebar.find(".followed-by-section") }); - }, - refresh_like: function() { + } + + refresh_like() { if (!this.like_icon) { return; } @@ -217,21 +225,21 @@ frappe.ui.form.Sidebar = Class.extend({ .attr("data-name", this.frm.doc.name); this.like_count.text(JSON.parse(this.frm.doc._liked_by || "[]").length); - }, + } - refresh_image: function() { - }, + refresh_image() { + } - make_review: function() { + make_review() { if (frappe.boot.energy_points_enabled && !this.frm.is_new()) { this.frm.reviews = new frappe.ui.form.Review({ parent: this.sidebar.find(".form-reviews"), frm: this.frm }); } - }, + } - reload_docinfo: function(callback) { + reload_docinfo(callback) { frappe.call({ method: "frappe.desk.form.load.get_docinfo", args: { @@ -248,4 +256,4 @@ frappe.ui.form.Sidebar = Class.extend({ }); } -}); +}; diff --git a/frappe/public/js/frappe/form/sidebar/form_sidebar_users.js b/frappe/public/js/frappe/form/sidebar/form_sidebar_users.js new file mode 100644 index 0000000000..57e3ed8e3e --- /dev/null +++ b/frappe/public/js/frappe/form/sidebar/form_sidebar_users.js @@ -0,0 +1,91 @@ +frappe.ui.form.SidebarUsers = class { + constructor(opts) { + $.extend(this, opts); + } + + get_users(type) { + let docinfo = this.frm.get_docinfo(); + return docinfo ? docinfo[type] || null: null; + } + + refresh(data_updated, type) { + this.parent = type == 'viewers'? this.$wrapper.find('.form-viewers'): this.$wrapper.find('.form-typers'); + this.parent.empty(); + + const users = this.get_users(type); + users && this.show_in_sidebar(users, type, data_updated); + } + + show_in_sidebar(users, type, show_alert) { + let sidebar_users = []; + let new_users = []; + let current_users = []; + + const message = type == 'viewers' ? 'viewing this document': 'composing an email'; + + users.current.forEach(username => { + if (username === frappe.session.user) { + // current user + return; + } + + var user_info = frappe.user_info(username); + sidebar_users.push({ + image: user_info.image, + fullname: user_info.fullname, + abbr: user_info.abbr, + color: user_info.color, + title: __("{0} is currently {1}", [user_info.fullname, message]) + }); + + if (users.new.indexOf(username) !== -1) { + new_users.push(user_info.fullname); + } + + current_users.push(user_info.fullname); + }); + + if (sidebar_users.length) { + this.parent.parent().removeClass('hidden'); + this.parent.append(frappe.render_template('users_in_sidebar', {'users': sidebar_users})); + } else { + this.parent.parent().addClass('hidden'); + } + + // For typers always show the alert + // For viewers show the alert to new user viewing this document + const alert_users = type == 'viewers' ? new_users : current_users; + show_alert && this.show_alert(alert_users, message); + } + + show_alert(users, message) { + if (users.length) { + if (users.length===1) { + frappe.show_alert(__('{0} is currently {1}', [users[0], message])); + } else { + frappe.show_alert(__('{0} are currently {1}', [frappe.utils.comma_and(users), message])); + } + + } + } +}; + +frappe.ui.form.set_users = function(data, type) { + const doctype = data.doctype; + const docname = data.docname; + const docinfo = frappe.model.get_docinfo(doctype, docname); + + const past_users = ((docinfo && docinfo[type]) || {}).past || []; + const users = data.users || []; + const new_users = users.filter(user => !past_users.includes(user)); + + frappe.model.set_docinfo(doctype, docname, type, { + past: past_users.concat(new_users), + new: new_users, + current: users + }); + + if (cur_frm && cur_frm.doc && cur_frm.doc.doctype===doctype && cur_frm.doc.name==docname) { + cur_frm.viewers.refresh(true, type); + } +}; \ No newline at end of file diff --git a/frappe/public/js/frappe/form/sidebar/form_viewers.js b/frappe/public/js/frappe/form/sidebar/form_viewers.js deleted file mode 100644 index 72f4984e94..0000000000 --- a/frappe/public/js/frappe/form/sidebar/form_viewers.js +++ /dev/null @@ -1,80 +0,0 @@ - - -frappe.ui.form.Viewers = Class.extend({ - init: function(opts) { - $.extend(this, opts); - }, - get_viewers: function() { - let docinfo = this.frm.get_docinfo(); - if (docinfo) { - return docinfo.viewers || {}; - } else { - return {}; - } - }, - refresh: function(data_updated) { - this.parent.empty(); - - var viewers = this.get_viewers(); - - var users = []; - var new_users = []; - for (var i=0, l=(viewers.current || []).length; i < l; i++) { - var username = viewers.current[i]; - if (username===frappe.session.user) { - // current user - continue; - } - - var user_info = frappe.user_info(username); - users.push({ - image: user_info.image, - fullname: user_info.fullname, - abbr: user_info.abbr, - color: user_info.color, - title: __("{0} is currently viewing this document", [user_info.fullname]) - }); - - if (viewers.new.indexOf(username)!==-1) { - new_users.push(user_info.fullname); - } - } - - if (users.length) { - this.parent.parent().removeClass("hidden"); - this.parent.append(frappe.render_template("users_in_sidebar", {"users": users})); - } else { - this.parent.parent().addClass("hidden"); - } - - if (data_updated && new_users.length) { - // new user viewing this document, who wasn't viewing in the past - if (new_users.length===1) { - frappe.show_alert(__("{0} is currently viewing this document", [new_users[0]])); - } else { - frappe.show_alert(__("{0} are currently viewing this document", [frappe.utils.comma_and(new_users)])); - } - - } - } -}); - -frappe.ui.form.set_viewers = function(data) { - var doctype = data.doctype; - var docname = data.docname; - var docinfo = frappe.model.get_docinfo(doctype, docname); - var past_viewers = ((docinfo && docinfo.viewers) || {}).past || []; - var viewers = data.viewers || []; - - var new_viewers = viewers.filter(viewer => !past_viewers.includes(viewer)); - - frappe.model.set_docinfo(doctype, docname, "viewers", { - past: past_viewers.concat(new_viewers), - new: new_viewers, - current: viewers - }); - - if (cur_frm && cur_frm.doc && cur_frm.doc.doctype===doctype && cur_frm.doc.name==docname) { - cur_frm.viewers.refresh(true); - } -} diff --git a/frappe/public/js/frappe/form/templates/form_sidebar.html b/frappe/public/js/frappe/form/templates/form_sidebar.html index c3f2de9c7e..296eb160bd 100644 --- a/frappe/public/js/frappe/form/templates/form_sidebar.html +++ b/frappe/public/js/frappe/form/templates/form_sidebar.html @@ -78,6 +78,10 @@
  • {%= __("Currently Viewing") %}
  • +
    diff --git a/frappe/public/js/frappe/form/toolbar.js b/frappe/public/js/frappe/form/toolbar.js index 528c874935..3ba326f022 100644 --- a/frappe/public/js/frappe/form/toolbar.js +++ b/frappe/public/js/frappe/form/toolbar.js @@ -36,6 +36,11 @@ frappe.ui.form.Toolbar = Class.extend({ this.page.set_title_sub(""); } else { this.page.set_title_sub(this.frm.docname); + this.page.$sub_title_area.css("cursor", "copy"); + this.page.$sub_title_area.on('click', (event) => { + event.stopImmediatePropagation(); + frappe.utils.copy_to_clipboard(this.frm.docname); + }); } } else { var title = this.frm.docname; @@ -374,19 +379,24 @@ frappe.ui.form.Toolbar = Class.extend({ var status = this.get_action_status(); if (status) { - if (status !== this.current_status) { - if (status === 'Amend') { - let doc = this.frm.doc; - frappe.xcall('frappe.client.is_document_amended', { - 'doctype': doc.doctype, - 'docname': doc.name - }).then(is_amended => { - if (is_amended) return; - this.set_page_actions(status); - }); - } else { + // When moving from a page with status amend to another page with status amend + // We need to check if document is already amened specifcally and hide + // or clear the menu actions accordingly + + if (status !== this.current_status || status === 'Amend') { + let doc = this.frm.doc; + frappe.xcall('frappe.client.is_document_amended', { + 'doctype': doc.doctype, + 'docname': doc.name + }).then(is_amended => { + if (is_amended) { + this.page.clear_actions(); + return; + } this.set_page_actions(status); - } + }); + } else { + this.set_page_actions(status); } } else { this.page.clear_actions(); diff --git a/frappe/public/js/frappe/list/list_view.js b/frappe/public/js/frappe/list/list_view.js index c282d43d9b..9e1ba1b9bd 100644 --- a/frappe/public/js/frappe/list/list_view.js +++ b/frappe/public/js/frappe/list/list_view.js @@ -29,6 +29,8 @@ frappe.views.ListView = class ListView extends frappe.views.BaseList { } show() { + this.parent.disable_scroll_to_top = true; + if (!this.has_permissions()) { frappe.set_route(''); frappe.msgprint(__(`Not permitted to view ${this.doctype}`)); @@ -241,20 +243,8 @@ frappe.views.ListView = class ListView extends frappe.views.BaseList { } refresh(refresh_header=false) { - this.freeze(true); - // fetch data from server - return frappe.call(this.get_call_args()).then(r => { - // render - this.prepare_data(r); - this.toggle_result_area(); - this.before_render(); + super.refresh().then(() => { this.render_header(refresh_header); - this.render(); - this.after_render(); - this.freeze(false); - if (this.settings.refresh) { - this.settings.refresh(this); - } }); } diff --git a/frappe/public/js/frappe/socketio_client.js b/frappe/public/js/frappe/socketio_client.js index 1411b6289d..fdc3f8619c 100644 --- a/frappe/public/js/frappe/socketio_client.js +++ b/frappe/public/js/frappe/socketio_client.js @@ -89,6 +89,14 @@ frappe.socketio = { frappe.socketio.doc_close(frm.doctype, frm.docname); }); + $(document).on('form-typing', function(e, frm) { + frappe.socketio.form_typing(frm.doctype, frm.docname); + }); + + $(document).on('form-stopped-typing', function(e, frm) { + frappe.socketio.form_stopped_typing(frm.doctype, frm.docname); + }); + window.onbeforeunload = function() { if (!cur_frm || cur_frm.is_new()) { return; @@ -161,8 +169,18 @@ frappe.socketio = { doc_close: function(doctype, docname) { // notify that the user has closed this doc frappe.socketio.socket.emit('doc_close', doctype, docname); - }, + // if the doc is closed the user has also stopped typing + frappe.socketio.socket.emit('doc_typing_stopped', doctype, docname); + }, + form_typing: function(doctype, docname) { + // notifiy that the user is typing on the doc + frappe.socketio.socket.emit('doc_typing', doctype, docname); + }, + form_stopped_typing: function(doctype, docname) { + // notifiy that the user has stopped typing + frappe.socketio.socket.emit('doc_typing_stopped', doctype, docname); + }, setup_listeners: function() { frappe.socketio.socket.on('task_status_change', function(data) { frappe.socketio.process_response(data, data.status.toLowerCase()); diff --git a/frappe/public/js/frappe/ui/filters/filters.js b/frappe/public/js/frappe/ui/filters/filters.js index f8f0535b83..a775413d39 100644 --- a/frappe/public/js/frappe/ui/filters/filters.js +++ b/frappe/public/js/frappe/ui/filters/filters.js @@ -202,8 +202,8 @@ frappe.ui.FilterList = Class.extend({ value = {0:"No", 1:"Yes"}[cint(value)]; } else if (field.df.original_type === "Duration") { let duration_options = { - show_days: field.df.show_days, - show_seconds: field.df.show_seconds + hide_days: field.df.hide_days, + hide_seconds: field.df.hide_seconds }; value = frappe.utils.get_formatted_duration(value, duration_options); } diff --git a/frappe/public/js/frappe/ui/messages.js b/frappe/public/js/frappe/ui/messages.js index ab20feeedd..eb5f4f09a2 100644 --- a/frappe/public/js/frappe/ui/messages.js +++ b/frappe/public/js/frappe/ui/messages.js @@ -167,6 +167,11 @@ frappe.msgprint = function(msg, title, is_minimizable) { method: data.primary_action.server_action, args: { args: data.primary_action.args + }, + callback() { + if (data.primary_action.hide_on_success) { + frappe.hide_msgprint(); + } } }); } diff --git a/frappe/public/js/frappe/utils/utils.js b/frappe/public/js/frappe/utils/utils.js index f4dde5804f..38c22c9c9f 100644 --- a/frappe/public/js/frappe/utils/utils.js +++ b/frappe/public/js/frappe/utils/utils.js @@ -856,7 +856,7 @@ Object.assign(frappe.utils, { minutes: Math.floor(secs % 3600 / 60), seconds: Math.floor(secs % 60) }; - if (!duration_options.show_days) { + if (duration_options.hide_days) { total_duration.hours = Math.floor(secs / 3600); total_duration.days = 0; } @@ -882,8 +882,8 @@ Object.assign(frappe.utils, { get_duration_options: function(docfield) { let duration_options = { - show_days: docfield.show_days, - show_seconds: docfield.show_seconds + hide_days: docfield.hide_days, + hide_seconds: docfield.hide_seconds }; return duration_options; } diff --git a/frappe/public/js/frappe/views/calendar/calendar.js b/frappe/public/js/frappe/views/calendar/calendar.js index 4b091d502c..e053da0263 100644 --- a/frappe/public/js/frappe/views/calendar/calendar.js +++ b/frappe/public/js/frappe/views/calendar/calendar.js @@ -78,7 +78,8 @@ frappe.views.CalendarView = class CalendarView extends frappe.views.ListView { id: "name", start: doc.start_date_field, end: doc.end_date_field, - title: doc.subject_field + title: doc.subject_field, + allDay: doc.all_day ? 1 : 0 } }); resolve(options); diff --git a/frappe/public/js/frappe/views/communication.js b/frappe/public/js/frappe/views/communication.js index ba290417f5..8dad5d9121 100755 --- a/frappe/public/js/frappe/views/communication.js +++ b/frappe/public/js/frappe/views/communication.js @@ -66,6 +66,10 @@ frappe.views.CommunicationComposer = Class.extend({ }) this.prepare(); this.dialog.show(); + + if (this.frm) { + $(document).trigger('form-typing', [this.frm]); + } }, get_fields: function() { @@ -262,6 +266,10 @@ frappe.views.CommunicationComposer = Class.extend({ subject: me.dialog.get_value("subject"), content: me.dialog.get_value("content"), }); + + if (me.frm) { + $(document).trigger("form-stopped-typing", [me.frm]); + } } this.dialog.on_page_show = function() { diff --git a/frappe/public/js/frappe/views/container.js b/frappe/public/js/frappe/views/container.js index 8e67792079..889063e24b 100644 --- a/frappe/public/js/frappe/views/container.js +++ b/frappe/public/js/frappe/views/container.js @@ -84,7 +84,7 @@ frappe.views.Container = Class.extend({ this.page._route = window.location.hash; $(this.page).trigger('show'); - frappe.utils.scroll_to(0); + !this.page.disable_scroll_to_top && frappe.utils.scroll_to(0); frappe.breadcrumbs.update(); return this.page; diff --git a/frappe/public/js/frappe/views/desktop/desktop.js b/frappe/public/js/frappe/views/desktop/desktop.js index 51add61f07..acc49c79a4 100644 --- a/frappe/public/js/frappe/views/desktop/desktop.js +++ b/frappe/public/js/frappe/views/desktop/desktop.js @@ -3,6 +3,7 @@ export default class Desktop { this.wrapper = wrapper; this.pages = {}; this.sidebar_items = {}; + this.mobile_sidebar_items = {}; this.sidebar_categories = [ "Modules", "Domains", @@ -26,14 +27,25 @@ export default class Desktop { } make_container() { - this.container = $(`
    + this.container = $(` +
    -
    +
    +
    +
    + +
    +
    +
    +
    `); this.container.appendTo(this.wrapper); this.sidebar = this.container.find(".desk-sidebar"); this.body = this.container.find(".desk-body"); + this.current_title = this.container.find(".current-title"); + this.mobile_list = this.container.find(".mobile-list"); + this.page_switcher = this.container.find(".page-switcher"); } fetch_desktop_settings() { @@ -73,8 +85,13 @@ export default class Desktop { this.current_page = item.name; } let $item = get_sidebar_item(item); + let $mobile_item = $item.clone(); + $item.appendTo(this.sidebar); this.sidebar_items[item.name] = $item; + + $mobile_item.appendTo(this.mobile_list); + this.mobile_sidebar_items[item.name] = $mobile_item; }; const make_category_title = name => { @@ -84,6 +101,7 @@ export default class Desktop { `` ); $title.appendTo(this.sidebar); + $title.clone().appendTo(this.mobile_list); }; this.sidebar_categories.forEach(category => { @@ -94,6 +112,11 @@ export default class Desktop { }); } }); + if (frappe.is_mobile) { + this.page_switcher.on('click', () => { + this.mobile_list.toggle(); + }); + } } show_page(page) { @@ -103,9 +126,14 @@ export default class Desktop { if (this.sidebar_items && this.sidebar_items[this.current_page]) { this.sidebar_items[this.current_page].removeClass("selected"); + this.mobile_sidebar_items[this.current_page].removeClass("selected"); + this.sidebar_items[page].addClass("selected"); + this.mobile_sidebar_items[page].addClass("selected"); } this.current_page = page; + this.mobile_list.hide(); + this.current_title.empty().append(this.current_page); localStorage.current_desk_page = page; this.pages[page] ? this.pages[page].show() : this.make_page(page); } @@ -113,11 +141,13 @@ export default class Desktop { get_page_to_show() { const default_page = this.desktop_settings ? this.desktop_settings["Modules"][0].name - : "Website"; + : frappe.boot.allowed_workspaces[0].name; + let page = frappe.get_route()[1] || localStorage.current_desk_page || default_page; + return page; } @@ -278,7 +308,6 @@ class DesktopPage { steps: this.data.onboarding.items, success: this.data.onboarding.success, docs_url: this.data.onboarding.docs_url, - user_can_dismiss: this.data.onboarding.user_can_dismiss, widget_type: 'onboarding', container: this.page, options: { diff --git a/frappe/public/js/frappe/views/formview.js b/frappe/public/js/frappe/views/formview.js index 033569a03c..7440ab198d 100644 --- a/frappe/public/js/frappe/views/formview.js +++ b/frappe/public/js/frappe/views/formview.js @@ -37,7 +37,13 @@ frappe.views.FormFactory = class FormFactory extends frappe.views.Factory { }); frappe.realtime.on("doc_viewers", function(data) { - frappe.ui.form.set_viewers(data); + // set users that currently viewing the form + frappe.ui.form.set_users(data, 'viewers'); + }); + + frappe.realtime.on("doc_typers", function(data) { + // set users that currently typing on the form + frappe.ui.form.set_users(data, 'typers'); }); } diff --git a/frappe/public/js/frappe/views/gantt/gantt_view.js b/frappe/public/js/frappe/views/gantt/gantt_view.js index df01c203bb..6fb4688a48 100644 --- a/frappe/public/js/frappe/views/gantt/gantt_view.js +++ b/frappe/public/js/frappe/views/gantt/gantt_view.js @@ -79,6 +79,10 @@ frappe.views.GanttView = class GanttView extends frappe.views.ListView { }); } + render_header() { + + } + render_gantt() { const me = this; const gantt_view_mode = this.view_user_settings.gantt_view_mode || 'Day'; @@ -126,8 +130,8 @@ frappe.views.GanttView = class GanttView extends frappe.views.ListView { var item = me.get_item(task.id); var html = - `
    ${task.name}
    -

    ${moment(task._start).format('MMM D')} - ${moment(task._end).format('MMM D')}

    `; + `
    ${task.name}
    +
    ${moment(task._start).format('MMM D')} - ${moment(task._end).format('MMM D')}
    `; // custom html in doctype settings var custom = me.settings.gantt_custom_popup_html; @@ -204,8 +208,8 @@ frappe.views.GanttView = class GanttView extends frappe.views.ListView { get required_libs() { return [ - "assets/frappe/js/lib/frappe-gantt/frappe-gantt.css", - "assets/frappe/js/lib/frappe-gantt/frappe-gantt.min.js" + "assets/frappe/node_modules/frappe-gantt/dist/frappe-gantt.css", + "assets/frappe/node_modules/frappe-gantt/dist/frappe-gantt.min.js" ]; } }; diff --git a/frappe/public/js/frappe/views/reports/print_grid.html b/frappe/public/js/frappe/views/reports/print_grid.html index ea510fa7bd..852c2925e8 100644 --- a/frappe/public/js/frappe/views/reports/print_grid.html +++ b/frappe/public/js/frappe/views/reports/print_grid.html @@ -8,50 +8,51 @@
    {% endif %} - - - - {% for col in columns %} - {% if col.name && col._id !== "_check" %} + + + + {% for col in columns %} + {% if col.name && col._id !== "_check" %} - {% endif %} - {% endfor %} - - - - - {% for row in data %} - - {% for col in columns %} - {% if col.name && col._id !== "_check" %} + {% endif %} + {% endfor %} + + + + + {% for row in data %} + + {% for col in columns %} + {% if col.name && col._id !== "_check" %} - {% var value = col.fieldname ? row[col.fieldname] : row[col.id]; %} + {% var value = col.fieldname ? row[col.fieldname] : row[col.id]; %} - - {% endif %} - {% endfor %} - - {% endfor %} - + + {% endif %} + {% endfor %} + + {% endfor %} +
    {{ __(col.name) }}
    - - {{ - col.formatter - ? col.formatter(row._index, col._index, value, col, row, true) - : col.format - ? col.format(value, row, col, data) - : col.docfield - ? frappe.format(value, col.docfield) - : value - }} - -
    + + {% format_data = row.is_total_row ? data[0] : row %} + {{ + col.formatter + ? col.formatter(row._index, col._index, value, col, format_data, true) + : col.format + ? col.format(value, row, col, format_data) + : col.docfield + ? frappe.format(value, col.docfield) + : value + }} + +
    diff --git a/frappe/public/js/frappe/views/reports/query_report.js b/frappe/public/js/frappe/views/reports/query_report.js index e79e43ae02..f82956adac 100644 --- a/frappe/public/js/frappe/views/reports/query_report.js +++ b/frappe/public/js/frappe/views/reports/query_report.js @@ -261,27 +261,25 @@ frappe.views.QueryReport = class QueryReport extends frappe.views.BaseList { } get_report_settings() { - if (frappe.query_reports[this.report_name]) { - this.report_settings = this.get_local_report_settings(); - return this._load_script; - } - - this._load_script = (new Promise(resolve => frappe.call({ - method: 'frappe.desk.query_report.get_script', - args: { report_name: this.report_name }, - callback: resolve - }))).then(r => { - frappe.dom.eval(r.message.script || ''); - return r; - }).then(r => { - return frappe.after_ajax(() => { - this.report_settings = this.get_local_report_settings(); - this.report_settings.html_format = r.message.html_format; - this.report_settings.execution_time = r.message.execution_time || 0; - }); + return new Promise((resolve, reject) => { + if (frappe.query_reports[this.report_name]) { + this.report_settings = frappe.query_reports[this.report_name]; + resolve(); + } else { + frappe.xcall('frappe.desk.query_report.get_script', { + report_name: this.report_name + }).then(settings => { + frappe.dom.eval(settings.script || ''); + frappe.after_ajax(() => { + this.report_settings = this.get_local_report_settings(); + this.report_settings.html_format = settings.html_format; + this.report_settings.execution_time = settings.execution_time || 0; + frappe.query_reports[this.report_name] = this.report_settings; + resolve(); + }); + }).catch(reject); + } }); - - return this._load_script; } get_local_report_settings() { @@ -1158,6 +1156,7 @@ frappe.views.QueryReport = class QueryReport extends frappe.views.BaseList { if (this.raw_data.add_total_row) { let totalRow = this.datatable.bodyRenderer.getTotalRow().reduce((row, cell) => { row[cell.column.id] = cell.content; + row.is_total_row = true; return row; }, {}); diff --git a/frappe/public/js/frappe/views/reports/report_view.js b/frappe/public/js/frappe/views/reports/report_view.js index 17e61c4f89..5475c302b7 100644 --- a/frappe/public/js/frappe/views/reports/report_view.js +++ b/frappe/public/js/frappe/views/reports/report_view.js @@ -10,6 +10,10 @@ frappe.views.ReportView = class ReportView extends frappe.views.ListView { return 'Report'; } + render_header() { + // Override List View Header + } + setup_defaults() { super.setup_defaults(); this.page_title = __('Report:') + ' ' + this.page_title; @@ -182,7 +186,6 @@ frappe.views.ReportView = class ReportView extends frappe.views.ListView { if (this.group_by) { this.$charts_wrapper.addClass('hidden'); } else if (this.chart) { - this.$charts_wrapper.removeClass('hidden'); this.refresh_charts(); } @@ -514,7 +517,8 @@ frappe.views.ReportView = class ReportView extends frappe.views.ListView { } refresh_charts() { - if (!this.chart) return; + if (!this.chart || !this.chart_args) return; + this.$charts_wrapper.removeClass('hidden'); const { x_axis, y_axes, chart_type } = this.chart_args; this.build_chart_args(x_axis, y_axes, chart_type); this.chart.update(this.chart_args); @@ -1091,8 +1095,7 @@ frappe.views.ReportView = class ReportView extends frappe.views.ListView { get_checked_items(only_docnames) { const indexes = this.datatable.rowmanager.getCheckedRows(); - const items = indexes.filter(i => i != undefined) - .map(i => this.data[i]); + const items = indexes.map(i => this.data[i]).filter(i => i != undefined); if (only_docnames) { return items.map(d => d.name); diff --git a/frappe/public/js/frappe/web_form/web_form.js b/frappe/public/js/frappe/web_form/web_form.js index 4dc1a50bc4..8cde4c9ba5 100644 --- a/frappe/public/js/frappe/web_form/web_form.js +++ b/frappe/public/js/frappe/web_form/web_form.js @@ -139,6 +139,16 @@ export default class WebForm extends frappe.ui.FieldGroup { this.handle_success(response.message); frappe.web_form.events.trigger('after_save'); this.after_save && this.after_save(); + // args doctype and docname added to link doctype in file manager + frappe.call({ + type: 'POST', + method: "frappe.handler.upload_file", + args: { + file_url: response.message.attachment, + doctype: response.message.doctype, + docname: response.message.name + } + }); } }, always: function() { diff --git a/frappe/public/js/frappe/web_form/webform_script.js b/frappe/public/js/frappe/web_form/webform_script.js index 53d9701774..c3211de99f 100644 --- a/frappe/public/js/frappe/web_form/webform_script.js +++ b/frappe/public/js/frappe/web_form/webform_script.js @@ -95,6 +95,11 @@ frappe.ready(function() { }; df.fields = form_data[df.fieldname]; + $.each(df.fields || [], function(_i, field) { + if (field.fieldtype === "Link") { + field.only_select = true; + } + }); if (df.fieldtype === "Attach") { df.is_private = true; diff --git a/frappe/public/js/frappe/widgets/onboarding_widget.js b/frappe/public/js/frappe/widgets/onboarding_widget.js index 821824a2d2..c2d891f5af 100644 --- a/frappe/public/js/frappe/widgets/onboarding_widget.js +++ b/frappe/public/js/frappe/widgets/onboarding_widget.js @@ -7,12 +7,6 @@ export default class OnboardingWidget extends Widget { } make_body() { - this.body.addClass("grid"); - if (this.steps.length < 5) { - this.body.addClass(`grid-rows-${this.steps.length}`); - } else if (this.steps.length >= 5) { - this.body.addClass("grid-rows-5"); - } this.steps.forEach((step) => { this.add_step(step); }); @@ -57,10 +51,14 @@ export default class OnboardingWidget extends Widget { let actions = { "Watch Video": () => this.show_video(step), "Create Entry": () => { - if (step.show_full_form) { - this.create_entry(step); + if (step.is_complete) { + frappe.set_route(`#List/${step.reference_document}`); } else { - this.show_quick_entry(step); + if (step.show_full_form) { + this.create_entry(step); + } else { + this.show_quick_entry(step); + } } }, "Show Form Tour": () => this.show_form_tour(step), @@ -293,6 +291,10 @@ export default class OnboardingWidget extends Widget { }); }; } else { + frappe.msgprint({ + message: __("You may continue with onboarding"), + title: __("Looks Great") + }); this.mark_complete(step); } }, @@ -440,8 +442,6 @@ export default class OnboardingWidget extends Widget { set_actions() { this.action_area.empty(); - if (!this.user_can_dismiss) return; - const dismiss = $( `
    Dismiss
    ` ); diff --git a/frappe/public/js/frappe/widgets/widget_dialog.js b/frappe/public/js/frappe/widgets/widget_dialog.js index d5cd6d9643..054159116f 100644 --- a/frappe/public/js/frappe/widgets/widget_dialog.js +++ b/frappe/public/js/frappe/widgets/widget_dialog.js @@ -74,7 +74,7 @@ class WidgetDialog { this.filters = []; if (this.values && this.values.stats_filter) { - const filters_json = JSON.parse(this.values.stats_filter); + const filters_json = new Function(`return ${this.values.stats_filter}`)(); this.filters = Object.keys(filters_json).map((filter) => { let val = filters_json[filter]; return [this.values.link_to, filter, val[0], val[1], false]; diff --git a/frappe/public/js/lib/frappe-gantt/frappe-gantt.css b/frappe/public/js/lib/frappe-gantt/frappe-gantt.css deleted file mode 100644 index 4d463b281d..0000000000 --- a/frappe/public/js/lib/frappe-gantt/frappe-gantt.css +++ /dev/null @@ -1,118 +0,0 @@ -.gantt .grid-background { - fill: none; } - -.gantt .grid-header { - fill: #ffffff; - stroke: #e0e0e0; - stroke-width: 1.4; } - -.gantt .grid-row { - fill: #ffffff; } - -.gantt .grid-row:nth-child(even) { - fill: #f5f5f5; } - -.gantt .row-line { - stroke: #ebeff2; } - -.gantt .tick { - stroke: #e0e0e0; - stroke-width: 0.2; } - .gantt .tick.thick { - stroke-width: 0.4; } - -.gantt .today-highlight { - fill: #fcf8e3; - opacity: 0.5; } - -.gantt .arrow { - fill: none; - stroke: #666; - stroke-width: 1.4; } - -.gantt .bar { - fill: #b8c2cc; - stroke: #8D99A6; - stroke-width: 0; - transition: stroke-width .3s ease; - user-select: none; } - -.gantt .bar-progress { - fill: #a3a3ff; } - -.gantt .bar-invalid { - fill: transparent; - stroke: #8D99A6; - stroke-width: 1; - stroke-dasharray: 5; } - .gantt .bar-invalid ~ .bar-label { - fill: #555; } - -.gantt .bar-label { - fill: #fff; - dominant-baseline: central; - text-anchor: middle; - font-size: 12px; - font-weight: lighter; } - .gantt .bar-label.big { - fill: #555; - text-anchor: start; } - -.gantt .handle { - fill: #ddd; - cursor: ew-resize; - opacity: 0; - visibility: hidden; - transition: opacity .3s ease; } - -.gantt .bar-wrapper { - cursor: pointer; } - .gantt .bar-wrapper:hover .bar { - fill: #a9b5c1; } - .gantt .bar-wrapper:hover .bar-progress { - fill: #8a8aff; } - .gantt .bar-wrapper:hover .handle { - visibility: visible; - opacity: 1; } - .gantt .bar-wrapper.active .bar { - fill: #a9b5c1; } - .gantt .bar-wrapper.active .bar-progress { - fill: #8a8aff; } - -.gantt .lower-text, .gantt .upper-text { - font-size: 12px; - text-anchor: middle; } - -.gantt .upper-text { - fill: #555; } - -.gantt .lower-text { - fill: #333; } - -.gantt .hide { - display: none; } - -.gantt-container { - position: relative; - overflow: auto; - font-size: 12px; } - .gantt-container .popup-wrapper { - position: absolute; - top: 0; - left: 0; - background: rgba(0, 0, 0, 0.8); - padding: 0; - color: #959da5; - border-radius: 3px; } - .gantt-container .popup-wrapper .title { - border-bottom: 3px solid #a3a3ff; - padding: 10px; } - .gantt-container .popup-wrapper .subtitle { - padding: 10px; - color: #dfe2e5; } - .gantt-container .popup-wrapper .pointer { - position: absolute; - height: 5px; - margin: 0 0 0 -5px; - border: 5px solid transparent; - border-top-color: rgba(0, 0, 0, 0.8); } diff --git a/frappe/public/js/lib/frappe-gantt/frappe-gantt.min.js b/frappe/public/js/lib/frappe-gantt/frappe-gantt.min.js deleted file mode 100644 index 4abbba9f49..0000000000 --- a/frappe/public/js/lib/frappe-gantt/frappe-gantt.min.js +++ /dev/null @@ -1 +0,0 @@ -var Gantt=function(){"use strict";const t=["January","February","March","April","May","June","July","August","September","October","November","December"];var e={parse(t,e="-",s=":"){if(t instanceof Date)return t;if("string"==typeof t){let i,n;const a=t.split(" ");i=a[0].split(e).map(t=>parseInt(t,10)),n=a[1]&&a[1].split(s),i[1]=i[1]-1;let o=i;return n&&n.length&&(o=o.concat(n)),new Date(...o)}},to_string(t,e=!1){if(!(t instanceof Date))throw new TypeError("Invalid argument type");const i=this.get_date_values(t).map((t,e)=>(1===e&&(t+=1),s(t+"",2,"0"))),n=`${i[0]}-${i[1]}-${i[2]}`,a=`${i[3]}:${i[4]}:${i[5]}`;return n+(e?" "+a:"")},format(e,i="YYYY-MM-DD HH:mm:ss"){const n=this.get_date_values(e).map(t=>s(t,2,0)),a={YYYY:n[0],MM:s(+n[1]+1,2,0),DD:n[2],HH:n[3],mm:n[4],ss:n[5],D:n[2],MMMM:t[+n[1]],MMM:t[+n[1]]};let o=i;return Object.keys(a).sort((t,e)=>e.length-t.length).forEach(t=>{o=o.replace(t,a[t])}),o},diff(t,e,s="day"){let i,n,a,o,r,h,d;return d=(h=(r=(a=(o=(n=(i=t-e)/1e3)/60)/60)/24)/30)/12,s.endsWith("s")||(s+="s"),Math.floor({milliseconds:i,seconds:n,minutes:o,hours:a,days:r,months:h,years:d}[s])},today(){const t=this.get_date_values(new Date).slice(0,3);return new Date(...t)},now:()=>new Date,add(t,e,s){e=parseInt(e,10);const i=[t.getFullYear()+("year"===s?e:0),t.getMonth()+("month"===s?e:0),t.getDate()+("day"===s?e:0),t.getHours()+("hour"===s?e:0),t.getMinutes()+("minute"===s?e:0),t.getSeconds()+("second"===s?e:0),t.getMilliseconds()+("millisecond"===s?e:0)];return new Date(...i)},start_of(t,e){const s={year:6,month:5,day:4,hour:3,minute:2,second:1,millisecond:0};function i(t){const i=s[e];return s[t]<=i}const n=[t.getFullYear(),i("year")?0:t.getMonth(),i("month")?1:t.getDate(),i("day")?0:t.getHours(),i("hour")?0:t.getMinutes(),i("minute")?0:t.getSeconds(),i("second")?0:t.getMilliseconds()];return new Date(...n)},clone(t){return new Date(...this.get_date_values(t))},get_date_values:t=>[t.getFullYear(),t.getMonth(),t.getDate(),t.getHours(),t.getMinutes(),t.getSeconds(),t.getMilliseconds()],get_days_in_month(t){const e=[31,28,31,30,31,30,31,31,30,31,30,31],s=t.getMonth();if(1!==s)return e[s];const i=t.getFullYear();return i%4==0&&i%100!=0||i%400==0?29:28}};function s(t,e,s){return t+="",e>>=0,s=String(void 0!==s?s:" "),t.length>e?String(t):((e-=t.length)>s.length&&(s+=s.repeat(e/s.length)),s.slice(0,e)+String(t))}function i(t,e){return"string"==typeof t?(e||document).querySelector(t):t||null}function n(t,e){const s=document.createElementNS("http://www.w3.org/2000/svg",t);for(let t in e)if("append_to"===t){e.append_to.appendChild(s)}else"innerHTML"===t?s.innerHTML=e.innerHTML:s.setAttribute(t,e[t]);return s}function a(t,e,s,a){const o=function(t,e,s,a,o="0.4s",r="0.1s"){const h=t.querySelector("animate");if(h)return i.attr(h,{attributeName:e,from:s,to:a,dur:o,begin:"click + "+r}),t;const d=n("animate",{attributeName:e,from:s,to:a,dur:o,begin:r,calcMode:"spline",values:s+";"+a,keyTimes:"0; 1",keySplines:(p="ease-out",{ease:".25 .1 .25 1",linear:"0 0 1 1","ease-in":".42 0 1 1","ease-out":"0 0 .58 1","ease-in-out":".42 0 .58 1"}[p])});var p;return t.appendChild(d),t}(t,e,s,a);if(o===t){const t=document.createEvent("HTMLEvents");t.initEvent("click",!0,!0),t.eventName="click",o.dispatchEvent(t)}}i.on=((t,e,s,n)=>{n?i.delegate(t,e,s,n):(n=s,i.bind(t,e,n))}),i.off=((t,e,s)=>{t.removeEventListener(e,s)}),i.bind=((t,e,s)=>{e.split(/\s+/).forEach(function(e){t.addEventListener(e,s)})}),i.delegate=((t,e,s,i)=>{t.addEventListener(e,function(t){const e=t.target.closest(s);e&&(t.delegatedTarget=e,i.call(this,t,e))})}),i.closest=((t,e)=>e?e.matches(t)?e:i.closest(t,e.parentNode):null),i.attr=((t,e,s)=>{if(!s&&"string"==typeof e)return t.getAttribute(e);if("object"!=typeof e)t.setAttribute(e,s);else for(let s in e)i.attr(t,s,e[s])});class o{constructor(t,e){this.set_defaults(t,e),this.prepare(),this.draw(),this.bind()}set_defaults(t,e){this.action_completed=!1,this.gantt=t,this.task=e}prepare(){this.prepare_values(),this.prepare_helpers()}prepare_values(){this.invalid=this.task.invalid,this.height=this.gantt.options.bar_height,this.x=this.compute_x(),this.y=this.compute_y(),this.corner_radius=this.gantt.options.bar_corner_radius,this.duration=(e.diff(this.task._end,this.task._start,"hour")+24)/this.gantt.options.step,this.width=this.gantt.options.column_width*this.duration,this.progress_width=this.gantt.options.column_width*this.duration*(this.task.progress/100)||0,this.group=n("g",{class:"bar-wrapper "+(this.task.custom_class||""),"data-id":this.task.id}),this.bar_group=n("g",{class:"bar-group",append_to:this.group}),this.handle_group=n("g",{class:"handle-group",append_to:this.group})}prepare_helpers(){SVGElement.prototype.getX=function(){return+this.getAttribute("x")},SVGElement.prototype.getY=function(){return+this.getAttribute("y")},SVGElement.prototype.getWidth=function(){return+this.getAttribute("width")},SVGElement.prototype.getHeight=function(){return+this.getAttribute("height")},SVGElement.prototype.getEndX=function(){return this.getX()+this.getWidth()}}draw(){this.draw_bar(),this.draw_progress_bar(),this.draw_label(),this.draw_resize_handles()}draw_bar(){this.$bar=n("rect",{x:this.x,y:this.y,width:this.width,height:this.height,rx:this.corner_radius,ry:this.corner_radius,class:"bar",append_to:this.bar_group}),a(this.$bar,"width",0,this.width),this.invalid&&this.$bar.classList.add("bar-invalid")}draw_progress_bar(){this.invalid||(this.$bar_progress=n("rect",{x:this.x,y:this.y,width:this.progress_width,height:this.height,rx:this.corner_radius,ry:this.corner_radius,class:"bar-progress",append_to:this.bar_group}),a(this.$bar_progress,"width",0,this.progress_width))}draw_label(){n("text",{x:this.x+this.width/2,y:this.y+this.height/2,innerHTML:this.task.name,class:"bar-label",append_to:this.bar_group}),requestAnimationFrame(()=>this.update_label_position())}draw_resize_handles(){if(this.invalid)return;const t=this.$bar;n("rect",{x:t.getX()+t.getWidth()-9,y:t.getY()+1,width:8,height:this.height-2,rx:this.corner_radius,ry:this.corner_radius,class:"handle right",append_to:this.handle_group}),n("rect",{x:t.getX()+1,y:t.getY()+1,width:8,height:this.height-2,rx:this.corner_radius,ry:this.corner_radius,class:"handle left",append_to:this.handle_group}),this.task.progress&&this.task.progress<100&&(this.$handle_progress=n("polygon",{points:this.get_progress_polygon_points().join(","),class:"handle progress",append_to:this.handle_group}))}get_progress_polygon_points(){const t=this.$bar_progress;return[t.getEndX()-5,t.getY()+t.getHeight(),t.getEndX()+5,t.getY()+t.getHeight(),t.getEndX(),t.getY()+t.getHeight()-8.66]}bind(){this.invalid||this.setup_click_event()}setup_click_event(){i.on(this.group,"click",t=>{this.action_completed||(this.group.classList.contains("active")&&this.gantt.trigger_event("click",[this.task]),this.gantt.unselect_all(),this.group.classList.toggle("active"),this.show_popup())})}show_popup(){if(this.gantt.bar_being_dragged)return;const t=e.format(this.task._start,"MMM D")+" - "+e.format(this.task._end,"MMM D");this.gantt.show_popup({target_element:this.$bar,title:this.task.name,subtitle:t})}update_bar_position({x:t=null,width:e=null}){const s=this.$bar;if(t){if(!this.task.dependencies.map(t=>this.gantt.get_bar(t).$bar.getX()).reduce((e,s)=>t>=s,t))return void(e=null);this.update_attr(s,"x",t)}e&&e>=this.gantt.options.column_width&&this.update_attr(s,"width",e),this.update_label_position(),this.update_handle_position(),this.update_progressbar_position(),this.update_arrow_position()}date_changed(){const{new_start_date:t,new_end_date:e}=this.compute_start_end_date();this.task._start=t,this.task._end=e,this.gantt.trigger_event("date_change",[this.task,t,e])}progress_changed(){const t=this.compute_progress();this.task.progress=t,this.gantt.trigger_event("progress_change",[this.task,t])}set_action_completed(){this.action_completed=!0,setTimeout(()=>this.action_completed=!1,1e3)}compute_start_end_date(){const t=this.$bar,s=t.getX()/this.gantt.options.column_width,i=e.add(this.gantt.gantt_start,s*this.gantt.options.step,"hours"),n=t.getWidth()/this.gantt.options.column_width,a=e.add(i,n*this.gantt.options.step,"hours");return e.add(a,-1,"second"),{new_start_date:i,new_end_date:a}}compute_progress(){const t=this.$bar_progress.getWidth()/this.$bar.getWidth()*100;return parseInt(t,10)}compute_x(){let t=e.diff(this.task._start,this.gantt.gantt_start,"hour")/this.gantt.options.step*this.gantt.options.column_width;return this.gantt.view_is("Month")&&(t=e.diff(this.task._start,this.gantt.gantt_start,"day")*this.gantt.options.column_width/30),t}compute_y(){return this.gantt.options.header_height+this.gantt.options.padding+this.task._index*(this.height+this.gantt.options.padding)}get_snap_position(t){let e,s,i=t;return s=this.gantt.view_is("Week")?i-(e=t%(this.gantt.options.column_width/7))+(et.getWidth()?(e.classList.add("big"),e.setAttribute("x",t.getX()+t.getWidth()+5)):(e.classList.remove("big"),e.setAttribute("x",t.getX()+t.getWidth()/2))}update_handle_position(){const t=this.$bar;this.handle_group.querySelector(".handle.left").setAttribute("x",t.getX()+1),this.handle_group.querySelector(".handle.right").setAttribute("x",t.getEndX()-9);const e=this.group.querySelector(".handle.progress");e&&e.setAttribute("points",this.get_progress_polygon_points())}update_arrow_position(){this.arrows=this.arrows||[];for(let t of this.arrows)t.update()}}class r{constructor(t,e,s){this.gantt=t,this.from_task=e,this.to_task=s,this.calculate_path(),this.draw()}calculate_path(){let t=this.from_task.$bar.getX()+this.from_task.$bar.getWidth()/2;const e=()=>this.to_task.$bar.getX()this.from_task.$bar.getX()+this.gantt.options.padding;for(;e();)t-=10;const s=this.gantt.options.header_height+this.gantt.options.bar_height+(this.gantt.options.padding+this.gantt.options.bar_height)*this.from_task.task._index+this.gantt.options.padding,i=this.to_task.$bar.getX()-this.gantt.options.padding/2,n=this.gantt.options.header_height+this.gantt.options.bar_height/2+(this.gantt.options.padding+this.gantt.options.bar_height)*this.to_task.task._index+this.gantt.options.padding,a=this.from_task.task._index>this.to_task.task._index,o=this.gantt.options.arrow_curve,r=a?1:0,h=a?-o:o,d=a?n+this.gantt.options.arrow_curve:n-this.gantt.options.arrow_curve;if(this.path=`\n M ${t} ${s}\n V ${d}\n a ${o} ${o} 0 0 ${r} ${o} ${h}\n L ${i} ${n}\n m -5 -5\n l 5 5\n l -5 5`,this.to_task.$bar.getX()
    \n
    \n
    \n ',this.hide(),this.title=this.parent.querySelector(".title"),this.subtitle=this.parent.querySelector(".subtitle"),this.pointer=this.parent.querySelector(".pointer")}show(t){if(!t.target_element)throw new Error("target_element is required to show popup");t.position||(t.position="left");const e=t.target_element;let s;this.title.innerHTML=t.title,this.subtitle.innerHTML=t.subtitle,this.parent.style.width=this.parent.clientWidth+"px",e instanceof HTMLElement?s=e.getBoundingClientRect():e instanceof SVGElement&&(s=t.target_element.getBBox()),"left"===t.position&&(this.parent.style.left=s.x+(s.width+10)+"px",this.parent.style.top=s.y-this.title.clientHeight/2+s.height/2+"px",this.pointer.style.transform="rotateZ(90deg)",this.pointer.style.left="-7px",this.pointer.style.top=this.title.clientHeight/2-this.pointer.getBoundingClientRect().height+2+"px"),this.parent.style.opacity=1}hide(){this.parent.style.opacity=0}}return class{constructor(t,e,s){this.setup_wrapper(t),this.setup_options(s),this.setup_tasks(e),this.change_view_mode(),this.bind_events()}setup_wrapper(t){if("string"==typeof t&&(t=document.querySelector(t)),!(t instanceof HTMLElement))throw new Error("Invalid argument passed for element");this.$container=document.createElement("div"),this.$container.classList.add("gantt-container"),t.appendChild(this.$container),this.$svg=n("svg",{append_to:this.$container,class:"gantt"}),this.popup_wrapper=document.createElement("div"),this.popup_wrapper.classList.add("popup-wrapper"),this.$svg.parentElement.appendChild(this.popup_wrapper)}setup_options(t){this.options=Object.assign({},{header_height:50,column_width:30,step:24,view_modes:["Quarter Day","Half Day","Day","Week","Month"],bar_height:20,bar_corner_radius:3,arrow_curve:5,padding:18,view_mode:"Day",date_format:"YYYY-MM-DD",custom_popup_html:null},t)}setup_tasks(t){this.tasks=t.map((t,s)=>{if(t._start=e.parse(t.start),t._end=e.parse(t.end),e.diff(t._end,t._start,"year")>10&&(t.end=null),t._index=s,!t.start&&!t.end){const s=e.today();t._start=s,t._end=e.add(s,2,"day")}if(!t.start&&t.end&&(t._start=e.add(t._end,-2,"day")),t.start&&!t.end&&(t._end=e.add(t._start,2,"day")),t.start&&t.end||(t.invalid=!0),"string"==typeof t.dependencies||!t.dependencies){let e=[];t.dependencies&&(e=t.dependencies.split(",").map(t=>t.trim()).filter(t=>t)),t.dependencies=e}return t.id||(t.id=function(t){return t.name+"_"+Math.random().toString(36).slice(2,12)}(t)),t}),this.setup_dependencies()}setup_dependencies(){this.dependency_map={};for(let t of this.tasks)for(let e of t.dependencies)this.dependency_map[e]=this.dependency_map[e]||[],this.dependency_map[e].push(t.id)}refresh(t){this.setup_tasks(t),this.change_view_mode()}change_view_mode(t=this.options.view_mode){this.update_view_scale(t),this.setup_dates(),this.render(),this.trigger_event("view_change",[t])}update_view_scale(t){this.options.view_mode=t,"Day"===t?(this.options.step=24,this.options.column_width=38):"Half Day"===t?(this.options.step=12,this.options.column_width=38):"Quarter Day"===t?(this.options.step=6,this.options.column_width=38):"Week"===t?(this.options.step=168,this.options.column_width=140):"Month"===t&&(this.options.step=720,this.options.column_width=120)}setup_dates(){this.setup_gantt_dates(),this.setup_date_values()}setup_gantt_dates(){this.gantt_start=this.gantt_end=null;for(let t of this.tasks)(!this.gantt_start||t._startthis.gantt_end)&&(this.gantt_end=t._end);this.view_is(["Quarter Day","Half Day"])?(this.gantt_start=e.add(this.gantt_start,-7,"day"),this.gantt_end=e.add(this.gantt_end,7,"day")):this.view_is("Month")?(this.gantt_start=e.start_of(this.gantt_start,"year"),this.gantt_end=e.add(this.gantt_end,1,"year")):(this.gantt_start=e.add(this.gantt_start,-1,"month"),this.gantt_end=e.add(this.gantt_end,1,"month"))}setup_date_values(){this.dates=[];let t=null;for(;null===t||t=1&&a.getDate()<8&&(o+=" thick"),this.view_is("Month")&&(a.getMonth()+1)%3==0&&(o+=" thick"),n("path",{d:`M ${t} ${s} v ${i}`,class:o,append_to:this.layers.grid}),this.view_is("Month")?t+=e.get_days_in_month(a)*this.options.column_width/30:t+=this.options.column_width}}make_grid_highlights(){this.view_is("Day")&&n("rect",{x:e.diff(e.today(),this.gantt_start,"hour")/this.options.step*this.options.column_width,y:0,width:this.options.column_width,height:(this.options.bar_height+this.options.padding)*this.tasks.length+this.options.header_height+this.options.padding/2,class:"today-highlight",append_to:this.layers.grid})}make_dates(){for(let t of this.get_dates_to_draw())if(n("text",{x:t.lower_x,y:t.lower_y,innerHTML:t.lower_text,class:"lower-text",append_to:this.layers.date}),t.upper_text){const e=n("text",{x:t.upper_x,y:t.upper_y,innerHTML:t.upper_text,class:"upper-text",append_to:this.layers.date});e.getBBox().x2>this.layers.grid.getBBox().width&&e.remove()}}get_dates_to_draw(){let t=null;return this.dates.map((e,s)=>{const i=this.get_date_info(e,t,s);return t=e,i})}get_date_info(t,s,i){s||(s=e.add(t,1,"year"));const n={"Quarter Day_lower":e.format(t,"HH"),"Half Day_lower":e.format(t,"HH"),Day_lower:t.getDate()!==s.getDate()?e.format(t,"D"):"",Week_lower:t.getMonth()!==s.getMonth()?e.format(t,"D MMM"):e.format(t,"D"),Month_lower:e.format(t,"MMMM"),"Quarter Day_upper":t.getDate()!==s.getDate()?e.format(t,"D MMM"):"","Half Day_upper":t.getDate()!==s.getDate()?t.getMonth()!==s.getMonth()?e.format(t,"D MMM"):e.format(t,"D"):"",Day_upper:t.getMonth()!==s.getMonth()?e.format(t,"MMMM"):"",Week_upper:t.getMonth()!==s.getMonth()?e.format(t,"MMMM"):"",Month_upper:t.getFullYear()!==s.getFullYear()?e.format(t,"YYYY"):""},a={x:i*this.options.column_width,lower_y:this.options.header_height,upper_y:this.options.header_height-25},o={"Quarter Day_lower":4*this.options.column_width/2,"Quarter Day_upper":0,"Half Day_lower":2*this.options.column_width/2,"Half Day_upper":0,Day_lower:this.options.column_width/2,Day_upper:30*this.options.column_width/2,Week_lower:0,Week_upper:4*this.options.column_width/2,Month_lower:this.options.column_width/2,Month_upper:12*this.options.column_width/2};return{upper_text:n[`${this.options.view_mode}_upper`],lower_text:n[`${this.options.view_mode}_lower`],upper_x:a.x+o[`${this.options.view_mode}_upper`],upper_y:a.upper_y,lower_x:a.x+o[`${this.options.view_mode}_lower`],lower_y:a.lower_y}}make_bars(){this.bars=this.tasks.map(t=>{const e=new o(this,t);return this.layers.bar.appendChild(e.group),e})}make_arrows(){this.arrows=[];for(let t of this.tasks){let e=[];e=t.dependencies.map(e=>{const s=this.get_task(e);if(!s)return;const i=new r(this,this.bars[s._index],this.bars[t._index]);return this.layers.arrow.appendChild(i.element),i}).filter(Boolean),this.arrows=this.arrows.concat(e)}}map_arrows_on_bars(){for(let t of this.bars)t.arrows=this.arrows.filter(e=>e.from_task.task.id===t.task.id||e.to_task.task.id===t.task.id)}set_width(){const t=this.$svg.getBoundingClientRect().width,e=this.$svg.querySelector(".grid .grid-row").getAttribute("width");t{this.unselect_all(),this.hide_popup()})}bind_bar_events(){let t=!1,e=0,s=0,n=!1,a=!1,o=null,r=[];this.bar_being_dragged=null,i.on(this.layers.bar,"mousedown",".bar-wrapper, .handle",(h,d)=>{const p=i.closest(".bar-wrapper",d);d.classList.contains("left")?n=!0:d.classList.contains("right")?a=!0:d.classList.contains("bar-wrapper")&&(t=!0),p.classList.add("active"),e=h.offsetX,s=h.offsetY;const _=[o=p.getAttribute("data-id"),...this.get_all_dependent_tasks(o)];r=_.map(t=>this.get_bar(t)),this.bar_being_dragged=o,r.forEach(t=>{const e=t.$bar;e.ox=e.getX(),e.oy=e.getY(),e.owidth=e.getWidth(),e.finaldx=0})}),i.on(this.$svg,"mousemove",s=>{if(!(t||n||a))return;const i=s.offsetX-e;s.offsetY,r.forEach(e=>{const s=e.$bar;s.finaldx=this.get_snap_position(i),n?o===e.task.id?e.update_bar_position({x:s.ox+s.finaldx,width:s.owidth-s.finaldx}):e.update_bar_position({x:s.ox+s.finaldx}):a?o===e.task.id&&e.update_bar_position({width:s.owidth+s.finaldx}):t&&e.update_bar_position({x:s.ox+s.finaldx})})}),document.addEventListener("mouseup",e=>{(t||n||a)&&r.forEach(t=>t.group.classList.remove("active")),t=!1,n=!1,a=!1}),i.on(this.$svg,"mouseup",t=>{this.bar_being_dragged=null,r.forEach(t=>{t.$bar.finaldx&&(t.date_changed(),t.set_action_completed())})}),this.bind_bar_progress()}bind_bar_progress(){let t=0,e=0,s=null,n=null,a=null,o=null;i.on(this.$svg,"mousedown",".handle.progress",(r,h)=>{s=!0,t=r.offsetX,e=r.offsetY;const d=i.closest(".bar-wrapper",h).getAttribute("data-id");n=this.get_bar(d),a=n.$bar_progress,o=n.$bar,a.finaldx=0,a.owidth=a.getWidth(),a.min_dx=-a.getWidth(),a.max_dx=o.getWidth()-a.getWidth()}),i.on(this.$svg,"mousemove",e=>{if(!s)return;let o=e.offsetX-t;e.offsetY,o>a.max_dx&&(o=a.max_dx),o{s=!1,a&&a.finaldx&&(n.progress_changed(),n.set_action_completed())})}get_all_dependent_tasks(t){let e=[],s=[t];for(;s.length;){const t=s.reduce((t,e)=>t=t.concat(this.dependency_map[e]),[]);e=e.concat(t),s=t.filter(t=>!s.includes(t))}return e.filter(Boolean)}get_snap_position(t){let e,s,i=t;return s=this.view_is("Week")?i-(e=t%(this.options.column_width/7))+(e{t.classList.remove("active")})}view_is(t){return"string"==typeof t?this.options.view_mode===t:!!Array.isArray(t)&&t.some(t=>this.options.view_mode===t)}get_task(t){return this.tasks.find(e=>e.id===t)}get_bar(t){return this.bars.find(e=>e.task.id===t)}show_popup(t){this.popup||(this.popup=new h(this.popup_wrapper)),this.popup.show(t)}hide_popup(){this.popup&&this.popup.hide()}trigger_event(t,e){this.options["on_"+t]&&this.options["on_"+t].apply(null,e)}get_oldest_starting_date(){return this.tasks.map(t=>t._start).reduce((t,e)=>e<=t?e:t)}clear(){this.$svg.innerHTML=""}}}(); diff --git a/frappe/public/less/controls.less b/frappe/public/less/controls.less index 2b03b93f56..f842401515 100644 --- a/frappe/public/less/controls.less +++ b/frappe/public/less/controls.less @@ -34,16 +34,27 @@ overflow-wrap: break-word; } +.frappe-control[data-fieldtype="Data"] .control-input { + position: relative; +} + .link-btn { position: absolute; - top: 3px; + top: 4px; right: 4px; - border-radius: 2px; padding: 3px; display: none; z-index: 3; } +.phone-btn { + position: absolute; + top: 2px; + right: 8px; + padding: 3px; + z-index: 3; +} + .markdown-preview, .html-preview { padding: 12px 15px; min-height: 300px; diff --git a/frappe/public/less/desktop.less b/frappe/public/less/desktop.less index b66a0ad8fc..3e3b59ddf8 100644 --- a/frappe/public/less/desktop.less +++ b/frappe/public/less/desktop.less @@ -3,6 +3,40 @@ .desk-container { margin-top: 20px; + .page-switcher { + border-radius: 5px; + display: none; + border: 1px solid @border-color; + background-color: @panel-bg; + padding: 8px 15px; + justify-content: space-between; + align-items: center; + margin-bottom: 10px; + } + + .mobile-list { + display: none; + border-radius: 5px; + padding: 8px 15px; + border: 1px solid @border-color; + + .sidebar-item { + font-size: 12px; + font-weight: bold; + margin-bottom: 1px; + display: flex; + padding: 10px 15px; + border-radius: 4px; + text-decoration: none; + cursor: pointer; + text-rendering: optimizelegibility; + + &.selected { + background-color: @panel-bg; + } + } + } + .desk-sidebar { width: 20rem; display: block; @@ -103,6 +137,9 @@ .desk-body { padding-left: 15px !important; } + .page-switcher { + display: flex; + } } } @@ -369,13 +406,10 @@ .widget-head { display: flex; - .widget-title { - font-size: 20px; - } - .widget-subtitle { - font-size: 16px; margin-top: 5px; + font-size: 14px; + font-weight: 400; color: @text-muted; } @@ -388,48 +422,9 @@ .widget-body { margin-top: 20px; - padding-right: 200px; - - @media (max-width: 970px) { - padding-right: 0; - } - - &.grid { - display: grid; - grid-template-columns: 1fr 1fr; - grid-auto-flow: column; - - &.grid-rows-2 { - grid-template-rows: repeat(3, 1fr); - } - - &.grid-rows-3 { - grid-template-rows: repeat(3, 1fr); - } - - &.grid-rows-4 { - grid-template-rows: repeat(4, 1fr); - } - - &.grid-rows-5 { - grid-template-rows: repeat(5, 1fr); - } - - @media (max-width: 768px) { - grid-template-columns: 1fr; - &.grid-rows-2, - &.grid-rows-3, - &.grid-rows-4, - &.grid-rows-5 { - grid-template-columns: 1fr; - grid-auto-flow: row; - } - } - } .onboarding-step { margin-bottom: 8px; - font-size: 16px; letter-spacing: 0.015em; i { diff --git a/frappe/public/less/form.less b/frappe/public/less/form.less index df0334c14f..cd391c1f10 100644 --- a/frappe/public/less/form.less +++ b/frappe/public/less/form.less @@ -249,6 +249,7 @@ } .progress-message { + font-feature-settings: "tnum" 1; margin-top: 0px; } } @@ -1011,7 +1012,7 @@ body[data-route^="Form/Communication"] textarea[data-fieldname="subject"] { .map-columns .form-section { padding: 0 7px 7px; - border-bottom: none; + border-top: none; .clearfix { display: none; @@ -1021,3 +1022,7 @@ body[data-route^="Form/Communication"] textarea[data-fieldname="subject"] { .map-columns .form-section:first-child { padding-top: 7px; } + +.table-preview { + margin-top: 12px; +} diff --git a/frappe/public/less/gantt.less b/frappe/public/less/gantt.less index 6190dcf4bb..89be32a2f4 100644 --- a/frappe/public/less/gantt.less +++ b/frappe/public/less/gantt.less @@ -18,4 +18,29 @@ .frappe-rtl .gantt { direction: ltr; +} + +.list-paging-area .gantt-view-mode { + margin-left: 15px; + margin-right: 15px; +} + +.gantt-container { + .details-container { + min-width: 160px; + + .heading { + margin-bottom: 10px; + font-size: 12px; + } + + .avatar-small { + width: 16px; + height: 16px; + } + + .standard-image { + display: block; + } + } } \ No newline at end of file diff --git a/frappe/public/less/list.less b/frappe/public/less/list.less index 639e67e3a7..4e066f86e4 100644 --- a/frappe/public/less/list.less +++ b/frappe/public/less/list.less @@ -471,30 +471,6 @@ input.list-check-all, input.list-row-checkbox { } } -// gantt -.list-paging-area .gantt-view-mode { - margin-left: 15px; - margin-right: 15px; -} - -.gantt { - .details-container { - .heading { - margin-bottom: 10px; - font-size: 12px; - } - - .avatar-small { - width: 16px; - height: 16px; - } - - .standard-image { - display: block; - } - } -} - .inbox-attachment, .inbox-link { margin-right: 7px; } diff --git a/frappe/public/scss/base.scss b/frappe/public/scss/base.scss index 36a1df55ac..0b01a83b02 100644 --- a/frappe/public/scss/base.scss +++ b/frappe/public/scss/base.scss @@ -4,6 +4,7 @@ html { body { -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; font-size: 16px; color: $body-color; } @@ -18,6 +19,7 @@ h1 { font-weight: 800; line-height: 1.25; letter-spacing: -0.025em; + margin-bottom: 1rem; @include media-breakpoint-up(sm) { line-height: 2.5rem; @@ -32,6 +34,7 @@ h1 { h2 { font-size: $font-size-xl; font-weight: bold; + margin-bottom: 0.75rem; @include media-breakpoint-up(sm) { font-size: $font-size-2xl; diff --git a/frappe/public/scss/blog.scss b/frappe/public/scss/blog.scss new file mode 100644 index 0000000000..9918b490c5 --- /dev/null +++ b/frappe/public/scss/blog.scss @@ -0,0 +1,95 @@ +.blog-list { + display: flex; + flex-wrap: wrap; + margin-right: -15px; + margin-left: -15px; + + &.result { + border-bottom: none; + } +} + +.blog-card { + margin-bottom: 2rem; + position: relative; + width: 100%; + + .card-body { + display: flex; + flex-direction: column; + justify-content: space-between; + } + + .card-img-top { + width: 100%; + overflow: hidden; + height: 12rem; + + img { + min-height: 12rem; + min-width: 100%; + object-fit: cover; + } + + .default-cover { + height: 100%; + width: 100%; + padding: 1rem; + display: flex; + align-items: center; + justify-content: center; + background: $gray-200; + + font-size: 1.2rem; + font-weight: 500; + color: $gray-600; + } + } + + .blog-card-footer { + display: flex; + align-items: center; + margin-top: 0.5rem; + + .avatar { + margin-right: 0.5rem; + border-radius: 50%; + } + } +} + +.blog-container { + font-size: 1rem; + max-width: 800px; + margin: 0px auto; + + .blog-title { + margin-top: 1rem; + + @include media-breakpoint-up(xl) { + line-height: 1; + font-size: $font-size-4xl; + } + } + + .blog-footer { + display: flex; + justify-content: space-between; + color: $text-muted; + margin-top: 3rem; + } + + .blog-intro { + font-size: 1.125rem; + font-weight: 400; + } + + .blog-content { + margin-bottom: 1rem; + + .blog-header { + margin-bottom: 3rem; + margin-top: 3rem; + } + } +} diff --git a/frappe/public/scss/doc.scss b/frappe/public/scss/doc.scss new file mode 100644 index 0000000000..13a59ba45b --- /dev/null +++ b/frappe/public/scss/doc.scss @@ -0,0 +1,288 @@ +$navbar-height: 7.625rem; +$navbar-height-lg: 4.5rem; + +.doc-layout { + padding-left: 1.5rem; + padding-right: 1.5rem; + padding-top: $navbar-height; + // border-bottom: 1px solid $gray-200; + + @include media-breakpoint-up(lg) { + padding-top: $navbar-height-lg; + } +} + +.sidebar-column { + display: none; + + @include media-breakpoint-up(lg) { + display: block; + } +} + +.doc-container { + max-width: 1280px; + padding-left: 1.5rem; + padding-right: 1.5rem; +} + +.navbar-expand-lg .doc-container { + padding-left: 1.5rem; + padding-right: 1.5rem; +} + +.doc-navbar { + background-color: white; + padding-left: 0; + padding-right: 0; + + .navbar-toggler { + margin-left: 0.75rem; + } + + .web-sidebar { + display: block; + border-top: 1px solid $gray-200; + + @include media-breakpoint-up(lg) { + display: none; + } + } + + .navbar-collapse { + height: calc(100vh - #{$navbar-height-lg}); + overflow: auto; + + @include media-breakpoint-up(lg) { + height: auto; + overflow: initial; + } + } + + .navbar-nav { + margin-left: -1rem; + margin-top: 0.75rem; + margin-bottom: 1.5rem; + + @include media-breakpoint-up(lg) { + margin-top: 0; + margin-bottom: 0; + } + } +} + +.doc-search-container { + display: flex; + margin-top: 0.75rem; + + @include media-breakpoint-up(lg) { + margin-top: 0; + } +} + +.doc-search { + position: relative; + width: 100%; + + @include media-breakpoint-up(lg) { + padding-left: 4rem; + padding-right: 4rem; + } + + .search-icon { + position: absolute; + left: 0; + top: 0; + width: 2.5rem; + height: 100%; + display: flex; + justify-content: center; + align-items: center; + } + + svg { + color: $gray-600; + } + + input { + padding-left: 2.5rem; + } + + .dropdown-menu { + .dropdown-item { + padding: 1rem 0.75rem; + } + + .match { + background-color: $primary-light; + color: $primary; + font-weight: 500; + padding: 0 0.125rem; + } + } +} + +.doc-sidebar { + position: sticky; + top: $navbar-height; + padding-bottom: 4rem; + height: 100vh; + overflow: hidden; + + .web-sidebar { + height: 100%; + overflow: auto; + padding-top: 3rem; + padding-bottom: 4rem; + } + + @include media-breakpoint-up(lg) { + top: $navbar-height-lg; + } +} + +.doc-main .page-content-wrapper { + padding: 0 0 2rem 0; + + @include media-breakpoint-up(lg) { + padding: 0rem 4rem 4rem 4rem; + } +} + +.doc-sidebar-logo { + padding-top: 2.5rem; + padding-bottom: 2rem; +} + +.page-toc { + font-size: $font-size-sm; + + h5 { + font-size: $font-size-sm; + margin-bottom: 0.5rem; + color: $gray-500; + } + + > div { + padding-top: 3rem; + padding-bottom: 4rem; + position: sticky; + top: $navbar-height; + + @include media-breakpoint-up(lg) { + top: $navbar-height-lg; + } + } + + ul { + padding-left: 0; + list-style-type: none; + } + + li > ul { + padding-left: 0.5rem; + } + + a { + display: block; + padding: 0.25rem 0; + + color: $gray-600; + text-decoration: none; + font-weight: 500; + @include transition(); + + &:hover { + color: $gray-800; + } + } +} + +// typography styles for documentation content +.doc-content .from-markdown { + > :first-child { + margin-top: 3rem; + } + + h1 { + font-size: $font-size-3xl; + font-weight: 500; + } + + h1 + p { + font-size: $font-size-lg; + } + + h2 { + font-size: $font-size-2xl; + font-weight: 400; + } + + h3 { + font-size: $font-size-xl; + font-weight: 500; + } + + h1, + h2, + h3, + h4, + h5, + h6 { + &::before { + height: 6rem; + margin-top: -6rem; + content: ''; + display: block; + visibility: hidden; + } + } + + h4 { + font-size: $font-size-lg; + font-weight: 500; + } + + strong { + font-weight: 600; + } + + table { + border-color: $gray-200; + } + + table thead { + background-color: $light; + } + + .table-bordered, + .table-bordered th, + .table-bordered td { + border-left: none; + border-right: none; + border-color: $gray-200; + } + + .table-bordered thead th, + .table-bordered thead td { + border-bottom-width: 1px; + } +} + +// next links +.btn-next-wrapper { + border-top: 1px solid $gray-200; + margin-top: 2rem; + padding-top: 1rem; + text-align: right; +} + +.doc-content .breadcrumb-container { + padding-left: 0; + padding-right: 0; + margin-top: 3rem; + + .breadcrumb { + margin-bottom: 0; + } +} \ No newline at end of file diff --git a/frappe/public/scss/markdown.scss b/frappe/public/scss/markdown.scss index 595b7f96a3..1cb78dcc62 100644 --- a/frappe/public/scss/markdown.scss +++ b/frappe/public/scss/markdown.scss @@ -1,4 +1,5 @@ .from-markdown { + color: $gray-700; line-height: 1.625; > * + * { @@ -32,12 +33,11 @@ } > blockquote { - padding: 0.75rem 1rem; + padding: 1.25rem 1rem; font-size: $font-size-sm; font-weight: 500; - color: $gray-900; - border-left: 4px solid $yellow; - background-color: lighten($yellow, 42%); + border: 1px solid $gray-200; + border-left: 3px solid $yellow; border-top-left-radius: 0.1rem; border-bottom-left-radius: 0.1rem; border-top-right-radius: 0.375rem; @@ -49,11 +49,17 @@ margin-bottom: 0; } + b, strong { + color: $gray-800; + } + + h1, h2, h3, h4, h5, h6 { + color: $gray-900; + } + h1 + p { - max-width: 42rem; margin-top: 0.75rem; font-size: $font-size-base; - color: $gray-900; @include media-breakpoint-up(sm) { margin-top: 1.25rem; @@ -104,6 +110,7 @@ tr > td, tr > th { font-size: $font-size-sm; + padding: 0.5rem; } th:empty { @@ -114,11 +121,17 @@ border: 1px solid $gray-400; border-radius: 0.375rem; } -} -// apply margin on first h1 if container is full width without top margin -main:not(.my-5) .from-markdown { - h1:first-child { - margin-top: 5rem; + .screenshot + em { + text-align: center; + display: block; + margin-top: 0.5rem; + margin-bottom: 2rem; + } + + code:not(.hljs) { + padding: 0 0.25rem; + background: $light; + border-radius: 0.125rem; } } diff --git a/frappe/public/scss/page-builder.scss b/frappe/public/scss/page-builder.scss index a028e34158..f6446a9ba9 100644 --- a/frappe/public/scss/page-builder.scss +++ b/frappe/public/scss/page-builder.scss @@ -1,13 +1,34 @@ +.hero-content { + .btn-primary { + margin-top: 1rem; + margin-right: 0.5rem; + + @include media-breakpoint-up(lg) { + margin-right: 1rem; + } + } + + .btn-primary-light { + margin-top: 1rem; + } +} + .hero-subtitle { @extend .lead; + font-weight: 400; + color: $gray-600; max-width: 42rem; + font-size: 1rem; + + @include media-breakpoint-up(sm) { + font-size: 1.25rem; + } } .section-description { max-width: 56rem; margin-top: 0.5rem; font-size: $font-size-base; - color: $gray-900; @include media-breakpoint-up(lg) { font-size: $font-size-lg; @@ -88,16 +109,14 @@ } .card { - .card-title { - color: $black; - } - - .card-body { - color: $gray-900; - } + @include transition(); &:hover { - border-color: $gray-600; + border-color: $gray-500; + } + + .card-title { + line-height: 1; } &.card-sm { @@ -156,12 +175,20 @@ } .nav-tabs { + flex-wrap: nowrap; + overflow-x: auto; + overflow-y: hidden; + // 1 pixel bottom padding so that the 2px active border is visible + padding-bottom: 1px; + .nav-link { - color: $gray-700; + color: $gray-800; font-weight: 500; border: none; padding: 1rem 0.5rem; margin-right: 2rem; + white-space: nowrap; + @include transition(); &:hover { color: $primary; @@ -171,7 +198,7 @@ .nav-link.active, .nav-item.show .nav-link { color: darken($primary, 5%); - background-color: #fff; + background-color: transparent; border-bottom: 2px solid $primary; } } @@ -183,7 +210,7 @@ .section-cta { padding: 3rem 2rem; text-align: center; - background-color: lighten($primary, 42%); + background-color: $primary-light; border-radius: 0.75rem; @include media-breakpoint-up(sm) { @@ -210,7 +237,6 @@ margin: 0 auto; margin-top: 0.5rem; font-size: $font-size-base; - color: $gray-900; @include media-breakpoint-up(md) { font-size: $font-size-lg; } @@ -220,7 +246,50 @@ margin: 0 auto; margin-top: 0.5rem; font-size: $font-size-xs; + } +} + +.section-small-cta { + padding: 1.8rem; + background-color: lighten($primary, 42%); + border-radius: 0.75rem; + display: flex; + flex-direction: column; + text-align: center; + + @include media-breakpoint-up(sm) { + flex-direction: column; + text-align: left; + } + + @include media-breakpoint-up(md) { + flex-direction: row; + justify-content: space-between; + + div { + align-self: center; + } + } + + .title { + max-width: 36rem; + font-size: $font-size-xl; + font-weight: 800; + line-height: 1.25; + @include media-breakpoint-up(md) { + font-size: $font-size-2xl; + } + } + .subtitle { + max-width: 36rem; + font-size: $font-size-base; color: $gray-900; + margin-bottom: 1.2rem; + + @include media-breakpoint-up(md) { + font-size: $font-size-lg; + margin-bottom: 0px; + } } } @@ -266,19 +335,77 @@ margin-right: auto; margin-top: 2rem; max-width: 52rem; - font-size: $font-size-2xl; + font-size: $font-size-lg; font-weight: 500; + + @include media-breakpoint-up(lg) { + font-size: $font-size-2xl; + } } .testimonial-by { - font-size: $font-size-lg; + font-size: $font-size-base; margin-top: 2rem; &:before { content: '—' } + + @include media-breakpoint-up(lg) { + font-size: $font-size-lg; + } } .split-section-content { margin-top: 2rem; } + +.section-image-grid { + display: flex; + flex-wrap: wrap; + width: 100%; + + // Offset for padding + margin-right: -2px; + margin-left: -2px; + + .image-container { + overflow: hidden; + border: 2px solid #fff; + border-radius: $border-radius; + + width: 100%; + max-height: 8rem; + + img { + width: 100%; + object-fit: cover; + } + + @include media-breakpoint-up(sm) { + &.wide { + max-width: 75%; + width: 75%; + max-height: 15rem; + height: 15rem; + + img { + width: 100%; + object-fit: cover; + } + } + + &.narrow { + max-width: 25%; + width: 25%; + max-height: 15rem; + height: 15rem; + + img { + height: 100%; + object-fit: cover; + } + } + } + } +} diff --git a/frappe/public/scss/sidebar.scss b/frappe/public/scss/sidebar.scss index 72f64a912e..d3442c2344 100644 --- a/frappe/public/scss/sidebar.scss +++ b/frappe/public/scss/sidebar.scss @@ -6,13 +6,42 @@ .sidebar-item a { display: block; - padding: 0.25rem 0; + padding: 0.25rem 0.5rem; + margin-top: 0.25rem; + border-radius: 0.375rem; font-size: $font-size-sm; - color: $gray-700; + color: $gray-600; text-decoration: none; font-weight: 500; + @include transition(); + + &:hover { + color: $gray-900; + } } .sidebar-item a.active { color: $primary; + background-color: $primary-light; +} + +.sidebar-item-icon { + width: 24px; + height: 24px; + display: inline-block; +} + +.sidebar-group { + margin-bottom: 1rem; + + h6 { + font-size: $font-size-sm; + margin-bottom: 0.75rem; + line-height: 1.5; + } + + > ul { + padding-left: 0.5rem; + margin-bottom: 2rem; + } } diff --git a/frappe/public/scss/variables.scss b/frappe/public/scss/variables.scss index e5f3a47f6f..1339af29a9 100644 --- a/frappe/public/scss/variables.scss +++ b/frappe/public/scss/variables.scss @@ -1,20 +1,23 @@ -$gray-100: #fafbfc !default; -$gray-150: #f5f7fa !default; -$gray-200: #ebecf1 !default; -$gray-300: #d1d8dd !default; -$gray-400: #ced4da !default; -$gray-500: #adb5bd !default; -$gray-600: #8d99a6 !default; -$gray-700: #495057 !default; -$gray-800: #36414c !default; -$gray-900: #2e3338 !default; -$primary: #2490ef !default; +$gray-50: #F9FAFA !default; +$gray-100: #F4F5F6 !default; +$gray-200: #EEF0F2 !default; +$gray-300: #E2E6E9 !default; +$gray-400: #C8CFD5 !default; +$gray-500: #A6B1B9 !default; +$gray-600: #74808B !default; +$gray-700: #4C5A67 !default; +$gray-800: #313B44 !default; +$gray-900: #192734 !default; $black: #000 !default; +$primary: #2490ef !default; +$primary-light: lighten($primary, 42%) !default; +$light: $gray-50 !default; -$body-color: $gray-800 !default; +$body-color: $gray-700 !default; $text-muted: $gray-600 !default; $border-color: $gray-300 !default; +$headings-color: $gray-900 !default; $font-size-xs: 0.75rem !default; $font-size-sm: 0.875rem !default; @@ -33,20 +36,32 @@ $btn-font-size-lg: 1.125rem !default; $btn-line-height-lg: 1 !default; $btn-border-radius-lg: 0.5rem !default; $btn-border-radius: 0.375rem !default; -$btn-font-size: $font-size-sm; +$btn-font-size: $font-size-sm !default; $btn-padding-x: 1rem !default; $btn-padding-y: 0.5rem !default; $btn-font-weight: 500 !default; $navbar-nav-link-padding-x: 1rem !default; -$navbar-padding-y: 1rem; +$navbar-padding-y: 1rem !default; $card-border-radius: 0.75rem !default; -$card-spacer-y: 1rem !default; +$card-spacer-y: 0.5rem !default; $dropdown-font-size: $font-size-sm !default; $dropdown-border-radius: 0.375rem !default; $dropdown-item-padding-y: 0.5rem !default; $dropdown-item-padding-x: 0.5rem !default; +$grid-breakpoints: ( + xs: 0, + sm: 576px, + md: 768px, + lg: 992px, + xl: 1200px, + 2xl: 1440px +) !default; + @import '~bootstrap/scss/functions'; @import '~bootstrap/scss/variables'; +@import "~bootstrap/scss/mixins"; + +$code-color: $purple; diff --git a/frappe/public/scss/website-image.scss b/frappe/public/scss/website-image.scss index 8c32e821fe..d416c05650 100644 --- a/frappe/public/scss/website-image.scss +++ b/frappe/public/scss/website-image.scss @@ -55,6 +55,12 @@ img:after { width: 100%; } +.website-image-extra-small { + @include website-image; + width: 2.5rem; + height: 2.5rem; +} + .website-image-small { @include website-image; width: 5rem; diff --git a/frappe/public/scss/website.scss b/frappe/public/scss/website.scss index 0149ac0d0a..e03c502784 100644 --- a/frappe/public/scss/website.scss +++ b/frappe/public/scss/website.scss @@ -5,8 +5,10 @@ @import 'multilevel-dropdown'; @import 'website-image'; @import 'page-builder'; +@import 'blog'; @import 'markdown'; @import 'sidebar'; +@import 'doc'; .container { padding-left: 1.25rem; @@ -15,26 +17,26 @@ @include media-breakpoint-up(sm) { .container { - padding-left: 1rem; - padding-right: 1rem; - } -} - -@include media-breakpoint-up(md) { - .container { - padding-left: 1rem; - padding-right: 1rem; + padding-left: 0; + padding-right: 0; } } @include media-breakpoint-up(lg) { .container { - padding-left: 1rem; - padding-right: 1rem; + padding-left: 2.5rem; + padding-right: 2.5rem; } } @include media-breakpoint-up(xl) { + .container { + padding-left: 5rem; + padding-right: 5rem; + } +} + +@include media-breakpoint-up(2xl) { .container { padding-left: 1.5rem; padding-right: 1.5rem; @@ -46,7 +48,7 @@ } .navbar-light .navbar-nav .nav-link { - color: $gray-900; + color: $gray-700; font-size: $font-size-sm; font-weight: 500; @@ -108,9 +110,13 @@ color: $light; } +.breadcrumb-container { + margin-top: 1rem; +} + .breadcrumb { - padding-left: 0; - padding-right: 0; + padding: 0; + font-size: $font-size-sm; background-color: white; } @@ -145,11 +151,12 @@ a.card { width: 5rem; height: 2rem; object-fit: contain; + object-position: left; } .footer-link, .footer-child-item a { font-weight: 500; - color: $gray-900; + color: $gray-700; &:hover { color: $primary; @@ -158,8 +165,9 @@ a.card { } .footer-col-left, .footer-col-right { - padding-top: 1rem; + padding-top: 0.8rem; padding-bottom: 1rem; + line-height: 2; } .footer-col-right { @@ -280,7 +288,6 @@ h5.modal-title { } .btn-primary-light { - $primary-light: lighten($primary, 42%); @include button-variant( $background: $primary-light, $border: $primary-light, @@ -304,3 +311,19 @@ h5.modal-title { .image-loaded { filter: blur(0rem); } + +.embed-container { + position: relative; + padding-bottom: 56.25%; + height: 0; + overflow: hidden; + max-width: 100%; +} + +.embed-container iframe { + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; +} \ No newline at end of file diff --git a/frappe/templates/base.html b/frappe/templates/base.html index 5688ce4fc3..0b82b3dac2 100644 --- a/frappe/templates/base.html +++ b/frappe/templates/base.html @@ -10,7 +10,7 @@ {% include "templates/includes/meta_block.html" %} {% endblock %} - {% block title %} {{ title | striptags }} {% endblock %} + {% block title %}{{ title | striptags }}{% endblock %} {% block favicon %} {% endblock %} + + {%- block head -%} {% if head_html is defined -%} {{ head_html or "" }} diff --git a/frappe/templates/doc.html b/frappe/templates/doc.html new file mode 100644 index 0000000000..3e1cc5509a --- /dev/null +++ b/frappe/templates/doc.html @@ -0,0 +1,189 @@ +{% extends "templates/base.html" %} +{%- from "templates/includes/navbar/navbar_items.html" import render_item -%} + +{%- block head_include %} + +{% endblock -%} + +{%- block navbar -%} + +{%- endblock -%} + +{% block content %} + + +{% macro container_attributes() -%} +id="page-{{ name or route | e }}" data-path="{{ pathname | e }}" +{%- if page_or_generator=="Generator" %}source-type="Generator" data-doctype="{{ doctype }}"{%- endif %} +{%- if source_content_type %}source-content-type="{{ source_content_type }}"{%- endif %} +{%- endmacro %} + +
    +
    + +
    +
    + {% block page_container %} +
    +
    + {%- if add_breadcrumbs -%} + {% include "templates/includes/breadcrumbs.html" %} + {%- endif -%} + {%- block page_content -%}{%- endblock -%} +
    +
    + {% endblock %} +
    +
    + {%- if page_toc_html -%} +
    + {% block page_toc %} + {% if page_toc_html %} +
    +
    On this page
    + {{ page_toc_html }} +
    + {% endif %} + {% endblock %} +
    + {%- endif -%} +
    +
    + +{% endblock %} + +{%- block script -%} + +{%- endblock -%} diff --git a/frappe/templates/emails/new_notification.html b/frappe/templates/emails/new_notification.html index fb1fc98901..4eea49a712 100644 --- a/frappe/templates/emails/new_notification.html +++ b/frappe/templates/emails/new_notification.html @@ -8,6 +8,5 @@ {% endif %} - diff --git a/frappe/templates/includes/blog/blog.html b/frappe/templates/includes/blog/blog.html deleted file mode 100644 index 5afaeb6ab8..0000000000 --- a/frappe/templates/includes/blog/blog.html +++ /dev/null @@ -1,19 +0,0 @@ -{% extends "templates/web.html" %} - -{% block title %}{{ blog_title or _("Blog") }}{% endblock %} -{% block header %}

    {{ blog_title or _("Blog") }}

    {% endblock %} -{% block hero %}{% endblock %} - -{% block page_content %} - - -
    -
    - {% include "templates/includes/list/list.html" %} -
    -
    -{% endblock %} - -{% block script %} - -{% endblock %} diff --git a/frappe/templates/includes/blog/blogger.html b/frappe/templates/includes/blog/blogger.html index 68df22786d..ef8f8257e8 100644 --- a/frappe/templates/includes/blog/blogger.html +++ b/frappe/templates/includes/blog/blogger.html @@ -1,7 +1,7 @@ {% from "frappe/templates/includes/macros.html" import square_image_with_fallback %}
    - {{ square_image_with_fallback(src=blogger_info.avatar, size='72px', alt=blogger_info.full_name, class='align-self-start mr-3 rounded') }} + {{ square_image_with_fallback(src=blogger_info.avatar, size='small', alt=blogger_info.full_name, class='align-self-start mr-3 rounded') }}
    {{ blogger_info.full_name }} diff --git a/frappe/templates/includes/breadcrumbs.html b/frappe/templates/includes/breadcrumbs.html index 3fda731372..e281c4b111 100644 --- a/frappe/templates/includes/breadcrumbs.html +++ b/frappe/templates/includes/breadcrumbs.html @@ -1,5 +1,5 @@ -{% if not no_breadcrumbs and parents %} -
    +{%- if not no_breadcrumbs and parents -%} + -{% endif %} +{%- endif -%} diff --git a/frappe/templates/includes/comments/comment.html b/frappe/templates/includes/comments/comment.html index 3fe3d7df58..1deb49bb3e 100644 --- a/frappe/templates/includes/comments/comment.html +++ b/frappe/templates/includes/comments/comment.html @@ -1,7 +1,7 @@ {% from "frappe/templates/includes/macros.html" import square_image_with_fallback %}
    - {{ square_image_with_fallback(src=frappe.get_gravatar(comment.comment_email or comment.sender), size='48px', alt=comment.sender_full_name, class='align-self-start mr-3') }} + {{ square_image_with_fallback(src=frappe.get_gravatar(comment.comment_email or comment.sender), size='extra-small', alt=comment.sender_full_name, class='align-self-start mr-3') }}
    diff --git a/frappe/templates/includes/comments/comments.py b/frappe/templates/includes/comments/comments.py index cf2436da15..b5a366e63f 100644 --- a/frappe/templates/includes/comments/comments.py +++ b/frappe/templates/includes/comments/comments.py @@ -34,7 +34,7 @@ def add_comment(comment, comment_email, comment_by, reference_doctype, reference clear_cache(route) content = (comment.content - + "

    {2}

    ".format(frappe.utils.get_request_site_address(), + + "

    {2}

    ".format(frappe.utils.get_request_site_address(), comment.name, _("View Comment"))) diff --git a/frappe/templates/includes/macros.html b/frappe/templates/includes/macros.html index 3e822b8bf3..767bd59ec9 100644 --- a/frappe/templates/includes/macros.html +++ b/frappe/templates/includes/macros.html @@ -1,18 +1,6 @@ -{% macro square_image_with_fallback(src=None, size=None, alt=None, class="") %} +{% macro square_image_with_fallback(src=None, size='small', alt=None, class="") %} {% if src %} -{{ alt or '' }} + {% else %}
    {% endif %} diff --git a/frappe/templates/includes/navbar/navbar_login.html b/frappe/templates/includes/navbar/navbar_login.html index 4e2c6dc93b..2a58efe039 100644 --- a/frappe/templates/includes/navbar/navbar_login.html +++ b/frappe/templates/includes/navbar/navbar_login.html @@ -1,5 +1,5 @@ -{% if not only_static and not hide_login %} +{% if not only_static %} {% if frappe.session.user != 'Guest' %}
  • + {%- if item.group_title -%} + +
    {{ item.group_title }}
    + {{ render_sidebar_items(item.group_items) }} + + {%- else -%} + + {% if item.type != 'input' %} + {%- set item_route = item.route[1:] if item.route[0] == '/' else item.route -%} + + {{ _(item.title or item.label) }} + + {% else %} +
    + +
    + {% endif %} + + {%- endif -%} +
  • +{% endmacro %} + +{% macro render_sidebar_items(items) %} +{%- if items | len > 0 -%} +
      + {% for item in items -%} + {{ render_sidebar_item(item) }} + {%- endfor %} +
    +{%- endif -%} +{% endmacro %} + +{% macro my_account() %} +{% if frappe.user != 'Guest' %} + +{% endif %} +{% endmacro %} +
    + {% if sidebar_title %} +
  • + {{ sidebar_title }} +
  • + {% endif %}
    diff --git a/frappe/templates/print_formats/standard_macros.html b/frappe/templates/print_formats/standard_macros.html index 07ef577aa7..c330bc7619 100644 --- a/frappe/templates/print_formats/standard_macros.html +++ b/frappe/templates/print_formats/standard_macros.html @@ -35,6 +35,7 @@ {%- set visible_columns = get_visible_columns(doc.get(df.fieldname), table_meta, df) -%}
    + @@ -95,7 +96,7 @@ data-fieldname="{{ df.fieldname }}" data-fieldtype="{{ df.fieldtype }}" {%- macro render_text_field(df, doc) -%} {%- if doc.get(df.fieldname) != None -%}
    - {%- if df.fieldtype in ("Text", "Code", "Long Text") %}{%- endif %} + {%- if df.fieldtype in ("Text", "Code", "Long Text", "Text Editor") %}{%- endif %} {%- if df.fieldtype=="Code" %}
    {{ doc.get(df.fieldname) }}
    {% else -%} diff --git a/frappe/templates/web.html b/frappe/templates/web.html index 2e7aea6b53..e014ef7ace 100644 --- a/frappe/templates/web.html +++ b/frappe/templates/web.html @@ -1,10 +1,6 @@ {% extends base_template_path %} {% block hero %}{% endblock %} -{% macro page_content() %} -{%- block page_content -%}{%- endblock -%} -{% endmacro %} - {% block content %} {% macro main_content() %} @@ -31,7 +27,7 @@
    - {{ page_content() }} + {%- block page_content -%}{%- endblock -%}