diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml
new file mode 100644
index 0000000000..4b1147e79f
--- /dev/null
+++ b/.github/workflows/docker-release.yml
@@ -0,0 +1,14 @@
+name: Trigger Docker build on release
+on:
+ release:
+ types: [released]
+jobs:
+ curl:
+ runs-on: ubuntu-latest
+ container:
+ image: alpine:latest
+ steps:
+ - name: curl
+ run: |
+ apk add curl bash
+ curl -s -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "Travis-API-Version: 3" -H "Authorization: token ${{ secrets.TRAVIS_CI_TOKEN }}" -d '{"request":{"branch":"master"}}' https://api.travis-ci.com/repo/frappe%2Ffrappe_docker/requests
diff --git a/.travis.yml b/.travis.yml
index 30eb882256..63895675ea 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -25,6 +25,7 @@ cache:
# https://docs.cypress.io/guides/guides/continuous-integration.html#Caching
- ~/.cache
+
matrix:
include:
- name: "Python 3.7 MariaDB"
@@ -46,7 +47,14 @@ matrix:
script: bench --site test_site run-ui-tests frappe --headless
before_install:
- # install wkhtmltopdf
+ # do we really want to run travis?
+ - |
+ python ./.travis/roulette.py
+ if [[ $? != 2 ]];then
+ exit;
+ fi
+
+ # install wkhtmltopdf
- wget -O /tmp/wkhtmltox.tar.xz https://github.com/frappe/wkhtmltopdf/raw/master/wkhtmltox-0.12.3_linux-generic-amd64.tar.xz
- tar -xf /tmp/wkhtmltox.tar.xz -C /tmp
- sudo mv /tmp/wkhtmltox/bin/wkhtmltopdf /usr/local/bin/wkhtmltopdf
diff --git a/.travis/roulette.py b/.travis/roulette.py
new file mode 100644
index 0000000000..4d83137199
--- /dev/null
+++ b/.travis/roulette.py
@@ -0,0 +1,54 @@
+# if the script ends with exit code 0, then no tests are run further, else all tests are run
+import os
+import re
+import shlex
+import subprocess
+import sys
+
+
+def get_output(command, shell=True):
+ print(command)
+ command = shlex.split(command)
+ return subprocess.check_output(command, shell=shell, encoding="utf8").strip()
+
+def is_py(file):
+ return file.endswith("py")
+
+def is_js(file):
+ return file.endswith("js")
+
+def is_docs(file):
+ regex = re.compile('\.(md|png|jpg|jpeg)$|^.github|LICENSE')
+ return bool(regex.search(file))
+
+
+if __name__ == "__main__":
+ build_type = os.environ.get("TYPE")
+ commit_range = os.environ.get("TRAVIS_COMMIT_RANGE")
+ print("Build Type: {}".format(build_type))
+ print("Commit Range: {}".format(commit_range))
+
+ try:
+ files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False)
+ except Exception:
+ sys.exit(2)
+
+ if "fatal" not in files_changed:
+ files_list = files_changed.split()
+ only_docs_changed = len(list(filter(is_docs, files_list))) == len(files_list)
+ only_js_changed = len(list(filter(is_js, files_list))) == len(files_list)
+ only_py_changed = len(list(filter(is_py, files_list))) == len(files_list)
+
+ if only_docs_changed:
+ print("Only docs were updated, stopping build process.")
+ sys.exit(0)
+
+ if only_js_changed and build_type == "server":
+ print("Only JavaScript code was updated; Stopping Python build process.")
+ sys.exit(0)
+
+ if only_py_changed and build_type == "ui":
+ print("Only Python code was updated, stopping Cypress build process.")
+ sys.exit(0)
+
+ sys.exit(2)
diff --git a/README.md b/README.md
index 860958087e..7545249610 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
@@ -33,8 +33,8 @@
Full-stack web application framework that uses Python and MariaDB on the server side and a tightly integrated client side library. Built for [ERPNext](https://erpnext.com)
### Table of Contents
-* [Installation](#installation)
-* [Documentation](https://frappe.io/docs)
+* [Installation](https://frappeframework.com/docs/user/en/installation)
+* [Documentation](https://frappeframework.com/docs)
* [License](#license)
### Installation
@@ -49,7 +49,7 @@ Full-stack web application framework that uses Python and MariaDB on the server
### Website
For details and documentation, see the website
-[https://frappe.io](https://frappe.io)
+[https://frappeframework.com](https://frappeframework.com)
### License
This repository has been released under the [MIT License](LICENSE).
diff --git a/cypress.json b/cypress.json
index ae0c45c3ae..97ac41bb61 100644
--- a/cypress.json
+++ b/cypress.json
@@ -2,6 +2,6 @@
"baseUrl": "http://test_site_ui:8000",
"projectId": "92odwv",
"adminPassword": "admin",
- "defaultCommandTimeout": 10000,
+ "defaultCommandTimeout": 20000,
"pageLoadTimeout": 15000
}
diff --git a/cypress/integration/control_duration.js b/cypress/integration/control_duration.js
new file mode 100644
index 0000000000..edad759216
--- /dev/null
+++ b/cypress/integration/control_duration.js
@@ -0,0 +1,45 @@
+context('Control Duration', () => {
+ before(() => {
+ cy.login();
+ cy.visit('/desk#workspace/Website');
+ });
+
+ function get_dialog_with_duration(hide_days=0, hide_seconds=0) {
+ return cy.dialog({
+ title: 'Duration',
+ fields: [{
+ 'fieldname': 'duration',
+ 'fieldtype': 'Duration',
+ 'hide_days': hide_days,
+ 'hide_seconds': hide_seconds
+ }]
+ });
+ }
+
+ it('should set duration', () => {
+ get_dialog_with_duration().as('dialog');
+ cy.get('.frappe-control[data-fieldname=duration] input')
+ .first()
+ .click();
+ cy.get('.duration-input[data-duration=days]')
+ .type(45, {force: true})
+ .blur({force: true});
+ cy.get('.duration-input[data-duration=minutes]')
+ .type(30)
+ .blur({force: true});
+ cy.get('.frappe-control[data-fieldname=duration] input').first().should('have.value', '45d 30m');
+ cy.get('.frappe-control[data-fieldname=duration] input').first().blur();
+ cy.get('.duration-picker').should('not.be.visible');
+ cy.get('@dialog').then(dialog => {
+ let value = dialog.get_value('duration');
+ expect(value).to.equal(3889800);
+ });
+ });
+
+ it('should hide days or seconds according to duration options', () => {
+ get_dialog_with_duration(1, 1).as('dialog');
+ cy.get('.frappe-control[data-fieldname=duration] input').first().click();
+ cy.get('.duration-input[data-duration=days]').should('not.be.visible');
+ cy.get('.duration-input[data-duration=seconds]').should('not.be.visible');
+ });
+});
\ No newline at end of file
diff --git a/cypress/integration/control_link.js b/cypress/integration/control_link.js
index 658a7fe320..0dc7d5b88e 100644
--- a/cypress/integration/control_link.js
+++ b/cypress/integration/control_link.js
@@ -1,7 +1,11 @@
context('Control Link', () => {
- beforeEach(() => {
+ before(() => {
cy.login();
cy.visit('/desk#workspace/Website');
+ });
+
+ beforeEach(() => {
+ cy.visit('/desk#workspace/Website');
cy.create_records({
doctype: 'ToDo',
description: 'this is a test todo for link'
@@ -30,7 +34,7 @@ context('Control Link', () => {
cy.get('.frappe-control[data-fieldname=link] input').focus().as('input');
cy.wait('@search_link');
- cy.get('@input').type('todo for link');
+ cy.get('@input').type('todo for link', { delay: 200 });
cy.wait('@search_link');
cy.get('.frappe-control[data-fieldname=link] ul').should('be.visible');
cy.get('.frappe-control[data-fieldname=link] input').type('{enter}', { delay: 100 });
diff --git a/cypress/integration/form.js b/cypress/integration/form.js
index 23fc57fc57..ef89a18e7d 100644
--- a/cypress/integration/form.js
+++ b/cypress/integration/form.js
@@ -9,6 +9,7 @@ context('Form', () => {
it('create a new form', () => {
cy.visit('/desk#Form/ToDo/New ToDo 1');
cy.fill_field('description', 'this is a test todo', 'Text Editor').blur();
+ cy.wait(300);
cy.get('.page-title').should('contain', 'Not Saved');
cy.server();
cy.route({
diff --git a/cypress/integration/grid_pagination.js b/cypress/integration/grid_pagination.js
index f03384cb93..b383f30bb8 100644
--- a/cypress/integration/grid_pagination.js
+++ b/cypress/integration/grid_pagination.js
@@ -40,12 +40,12 @@ context('Grid Pagination', () => {
cy.get('@table').find('.current-page-number').should('contain', '20');
cy.get('@table').find('.total-page-number').should('contain', '20');
});
- it('deletes all rows', ()=> {
- cy.visit('/desk#Form/Contact/Test Contact');
- cy.get('.frappe-control[data-fieldname="phone_nos"]').as('table');
- cy.get('@table').find('.grid-heading-row .grid-row-check').click({force: true});
- cy.get('@table').find('button.grid-remove-all-rows').click();
- cy.get('.modal-dialog .btn-primary').contains('Yes').click();
- cy.get('@table').find('.grid-body .grid-row').should('have.length', 0);
- });
+ // it('deletes all rows', ()=> {
+ // cy.visit('/desk#Form/Contact/Test Contact');
+ // cy.get('.frappe-control[data-fieldname="phone_nos"]').as('table');
+ // cy.get('@table').find('.grid-heading-row .grid-row-check').click({force: true});
+ // cy.get('@table').find('button.grid-remove-all-rows').click();
+ // cy.get('.modal-dialog .btn-primary').contains('Yes').click();
+ // cy.get('@table').find('.grid-body .grid-row').should('have.length', 0);
+ // });
});
\ No newline at end of file
diff --git a/cypress/integration/relative_filters.js b/cypress/integration/relative_time_filters.js
similarity index 85%
rename from cypress/integration/relative_filters.js
rename to cypress/integration/relative_time_filters.js
index 986c5ce342..ac70c44345 100644
--- a/cypress/integration/relative_filters.js
+++ b/cypress/integration/relative_time_filters.js
@@ -1,7 +1,6 @@
context('Relative Timeframe', () => {
beforeEach(() => {
cy.login();
- cy.visit('/desk#workspace/Website');
});
before(() => {
cy.login();
@@ -10,14 +9,14 @@ context('Relative Timeframe', () => {
frappe.call("frappe.tests.ui_test_helpers.create_todo_records");
});
});
- it('set relative filter for Previous and check list', () => {
+ it('sets relative timespan filter for last week and filters list', () => {
cy.visit('/desk#List/ToDo/List');
cy.get('.list-row:contains("this is fourth todo")').should('exist');
cy.get('.tag-filters-area .btn:contains("Add Filter")').click();
cy.get('.fieldname-select-area').should('exist');
cy.get('.fieldname-select-area input').type("Due Date{enter}", { delay: 100 });
- cy.get('select.condition.form-control').select("Previous");
- cy.get('.filter-field select.input-with-feedback.form-control').select("1 week");
+ cy.get('select.condition.form-control').select("Timespan");
+ cy.get('.filter-field select.input-with-feedback.form-control').select("last week");
cy.server();
cy.route('POST', '/api/method/frappe.desk.reportview.get').as('list_refresh');
cy.get('.filter-box .btn:contains("Apply")').click();
@@ -29,13 +28,13 @@ context('Relative Timeframe', () => {
cy.get('.remove-filter.btn').click();
cy.wait('@save_user_settings');
});
- it('set relative filter for Next and check list', () => {
+ it('sets relative timespan filter for next week and filters list', () => {
cy.visit('/desk#List/ToDo/List');
cy.get('.list-row:contains("this is fourth todo")').should('exist');
cy.get('.tag-filters-area .btn:contains("Add Filter")').click();
cy.get('.fieldname-select-area input').type("Due Date{enter}", { delay: 100 });
- cy.get('select.condition.form-control').select("Next");
- cy.get('.filter-field select.input-with-feedback.form-control').select("1 week");
+ cy.get('select.condition.form-control').select("Timespan");
+ cy.get('.filter-field select.input-with-feedback.form-control').select("next week");
cy.server();
cy.route('POST', '/api/method/frappe.desk.reportview.get').as('list_refresh');
cy.get('.filter-box .btn:contains("Apply")').click();
diff --git a/frappe/__init__.py b/frappe/__init__.py
index f0b6bfe41b..f35409fa48 100644
--- a/frappe/__init__.py
+++ b/frappe/__init__.py
@@ -231,9 +231,8 @@ def get_site_config(sites_path=None, site_path=None):
if os.path.exists(site_config):
config.update(get_file_json(site_config))
elif local.site and not local.flags.new_site:
- print("{0} does not exist".format(local.site))
+ print("Site {0} does not exist".format(local.site))
sys.exit(1)
- #raise IncorrectSitePath, "{0} does not exist".format(site_config)
return _dict(config)
@@ -1146,8 +1145,8 @@ def make_property_setter(args, ignore_validate=False, validate_fields_for_doctyp
def import_doc(path, ignore_links=False, ignore_insert=False, insert=False):
"""Import a file using Data Import."""
- from frappe.core.doctype.data_import import data_import
- data_import.import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
+ from frappe.core.doctype.data_import.data_import import import_doc
+ import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
def copy_doc(doc, ignore_no_copy=True):
""" No_copy fields also get copied."""
@@ -1559,10 +1558,10 @@ def get_doctype_app(doctype):
loggers = {}
log_level = None
-def logger(module=None, with_more_info=True):
+def logger(module=None, with_more_info=False):
'''Returns a python logger that uses StreamHandler'''
from frappe.utils.logger import get_logger
- return get_logger(module or 'default', with_more_info=with_more_info)
+ return get_logger(module=module, with_more_info=with_more_info)
def log_error(message=None, title=_("Error")):
'''Log error to Error Log'''
diff --git a/frappe/app.py b/frappe/app.py
index 3bb764149b..57db867882 100644
--- a/frappe/app.py
+++ b/frappe/app.py
@@ -99,6 +99,16 @@ def application(request):
frappe.monitor.stop(response)
frappe.recorder.dump()
+ frappe.logger("frappe.web").info({
+ "site": get_site_name(request.host),
+ "remote_addr": getattr(request, "remote_addr", "NOTFOUND"),
+ "base_url": getattr(request, "base_url", "NOTFOUND"),
+ "full_path": getattr(request, "full_path", "NOTFOUND"),
+ "method": getattr(request, "method", "NOTFOUND"),
+ "scheme": getattr(request, "scheme", "NOTFOUND"),
+ "http_status_code": getattr(response, "status_code", "NOTFOUND")
+ })
+
if response and hasattr(frappe.local, 'rate_limiter'):
response.headers.extend(frappe.local.rate_limiter.headers())
@@ -195,7 +205,6 @@ def handle_exception(e):
frappe.local.login_manager.clear_cookies()
if http_status_code >= 500:
- frappe.logger().error('Request Error', exc_info=True)
make_error_snapshot(e)
if return_as_message:
diff --git a/frappe/automation/doctype/assignment_rule/assignment_rule.py b/frappe/automation/doctype/assignment_rule/assignment_rule.py
index 0a5d85636f..bf45347c4f 100644
--- a/frappe/automation/doctype/assignment_rule/assignment_rule.py
+++ b/frappe/automation/doctype/assignment_rule/assignment_rule.py
@@ -44,7 +44,7 @@ class AssignmentRule(Document):
user = self.get_user()
assign_to.add(dict(
- assign_to = user,
+ assign_to = [user],
doctype = doc.get('doctype'),
name = doc.get('name'),
description = frappe.render_template(self.description, doc),
diff --git a/frappe/boot.py b/frappe/boot.py
index 42b5ca38b7..8862ce3c61 100644
--- a/frappe/boot.py
+++ b/frappe/boot.py
@@ -19,6 +19,7 @@ from frappe.email.inbox import get_email_accounts
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
from frappe.website.doctype.web_page_view.web_page_view import is_tracking_enabled
from frappe.social.doctype.energy_point_log.energy_point_log import get_energy_points
+from frappe.model.base_document import get_controller
from frappe.social.doctype.post.post import frequently_visited_links
def get_bootinfo():
@@ -84,6 +85,7 @@ def get_bootinfo():
bootinfo.points = get_energy_points(frappe.session.user)
bootinfo.frequently_visited_links = frequently_visited_links()
bootinfo.link_preview_doctypes = get_link_preview_doctypes()
+ bootinfo.additional_filters_config = get_additional_filters_from_hooks()
return bootinfo
@@ -106,6 +108,8 @@ def load_desktop_data(bootinfo):
from frappe.desk.desktop import get_desk_sidebar_items
bootinfo.allowed_modules = get_modules_from_all_apps_for_user()
bootinfo.allowed_workspaces = get_desk_sidebar_items(True)
+ bootinfo.module_page_map = get_controller("Desk Page").get_module_page_map()
+ bootinfo.dashboards = frappe.get_all("Dashboard")
def get_allowed_pages(cache=False):
return get_user_pages_or_reports('Page', cache=cache)
@@ -294,3 +298,11 @@ def get_link_preview_doctypes():
link_preview_doctypes.append(custom.doc_type)
return link_preview_doctypes
+
+def get_additional_filters_from_hooks():
+ filter_config = frappe._dict()
+ filter_hooks = frappe.get_hooks('filters_config')
+ for hook in filter_hooks:
+ filter_config.update(frappe.get_attr(hook)())
+
+ return filter_config
diff --git a/frappe/cache_manager.py b/frappe/cache_manager.py
index 4560680653..92d12289c6 100644
--- a/frappe/cache_manager.py
+++ b/frappe/cache_manager.py
@@ -24,7 +24,7 @@ user_cache_keys = ("bootinfo", "user_recent", "roles", "user_doc", "lang",
"has_role:Page", "has_role:Report")
doctype_cache_keys = ("meta", "form_meta", "table_columns", "last_modified",
- "linked_doctypes", 'notifications', 'workflow' ,'energy_point_rule_map')
+ "linked_doctypes", 'notifications', 'workflow' ,'energy_point_rule_map', 'data_import_column_header_map')
def clear_user_cache(user=None):
diff --git a/frappe/commands/__init__.py b/frappe/commands/__init__.py
index 8110f2ec19..b7294fff77 100644
--- a/frappe/commands/__init__.py
+++ b/frappe/commands/__init__.py
@@ -22,7 +22,11 @@ def pass_context(f):
pr = cProfile.Profile()
pr.enable()
- ret = f(frappe._dict(ctx.obj), *args, **kwargs)
+ try:
+ ret = f(frappe._dict(ctx.obj), *args, **kwargs)
+ except frappe.exceptions.SiteNotSpecifiedError as e:
+ click.secho(str(e), fg='yellow')
+ sys.exit(1)
if profile:
pr.disable()
@@ -39,13 +43,14 @@ def pass_context(f):
return click.pass_context(_func)
-def get_site(context):
+def get_site(context, raise_err=True):
try:
site = context.sites[0]
return site
except (IndexError, TypeError):
- print('Please specify --site sitename')
- sys.exit(1)
+ if raise_err:
+ raise frappe.SiteNotSpecifiedError
+ return None
def popen(command, *args, **kwargs):
output = kwargs.get('output', True)
diff --git a/frappe/commands/scheduler.py b/frappe/commands/scheduler.py
index 6f51c81211..bd9c9d2cb0 100755
--- a/frappe/commands/scheduler.py
+++ b/frappe/commands/scheduler.py
@@ -4,6 +4,7 @@ import sys
import frappe
from frappe.utils import cint
from frappe.commands import pass_context, get_site
+from frappe.exceptions import SiteNotSpecifiedError
def _is_scheduler_enabled():
enable_scheduler = False
@@ -30,6 +31,8 @@ def trigger_scheduler_event(context, event):
frappe.utils.scheduler.trigger(site, event, now=True)
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('enable-scheduler')
@pass_context
@@ -45,6 +48,8 @@ def enable_scheduler(context):
print("Enabled for", site)
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('disable-scheduler')
@pass_context
@@ -60,7 +65,8 @@ def disable_scheduler(context):
print("Disabled for", site)
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('scheduler')
@@ -120,7 +126,7 @@ def doctor(context, site=None):
"Get diagnostic info about background workers"
from frappe.utils.doctor import doctor as _doctor
if not site:
- site = get_site(context)
+ site = get_site(context, raise_err=False)
return _doctor(site=site)
@click.command('show-pending-jobs')
diff --git a/frappe/commands/site.py b/frappe/commands/site.py
index 82ed72dd5c..55ac05bd71 100755
--- a/frappe/commands/site.py
+++ b/frappe/commands/site.py
@@ -15,6 +15,7 @@ import frappe
from frappe import _
from frappe.commands import get_site, pass_context
from frappe.commands.scheduler import _is_scheduler_enabled
+from frappe.exceptions import SiteNotSpecifiedError
from frappe.installer import update_site_config
from frappe.utils import get_site_path, touch_file
@@ -43,14 +44,16 @@ def new_site(site, mariadb_root_username=None, mariadb_root_password=None, admin
_new_site(db_name, site, mariadb_root_username=mariadb_root_username,
mariadb_root_password=mariadb_root_password, admin_password=admin_password,
verbose=verbose, install_apps=install_app, source_sql=source_sql, force=force,
- no_mariadb_socket=no_mariadb_socket, db_password=db_password, db_type=db_type, db_host=db_host, db_port=db_port)
+ no_mariadb_socket=no_mariadb_socket, db_password=db_password, db_type=db_type, db_host=db_host,
+ db_port=db_port, new_site=True)
if len(frappe.utils.get_sites()) == 1:
use(site)
def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=None,
admin_password=None, verbose=False, install_apps=None, source_sql=None, force=False,
- no_mariadb_socket=False, reinstall=False, db_password=None, db_type=None, db_host=None, db_port=None):
+ no_mariadb_socket=False, reinstall=False, db_password=None, db_type=None, db_host=None,
+ db_port=None, new_site=False):
"""Install a new Frappe site"""
if not force and os.path.exists(site):
@@ -79,7 +82,6 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
make_site_dirs()
installing = touch_file(get_site_path('locks', 'installing.lock'))
- atexit.register(_new_site_cleanup, site, mariadb_root_username, mariadb_root_password)
install_db(root_login=mariadb_root_username, root_password=mariadb_root_password, db_name=db_name,
admin_password=admin_password, verbose=verbose, source_sql=source_sql, force=force, reinstall=reinstall,
@@ -96,15 +98,6 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
scheduler_status = "disabled" if frappe.utils.scheduler.is_scheduler_disabled() else "enabled"
print("*** Scheduler is", scheduler_status, "***")
-def _new_site_cleanup(site, mariadb_root_username, mariadb_root_password):
- installing = get_site_path('locks', 'installing.lock')
-
- if installing and os.path.exists(installing):
- if mariadb_root_password:
- _drop_site(site, mariadb_root_username, mariadb_root_password, force=True, no_backup=True)
- shutil.rmtree(site)
-
- frappe.destroy()
@click.command('restore')
@click.argument('sql-file-path')
@@ -122,30 +115,47 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
if not os.path.exists(sql_file_path):
- sql_file_path = '../' + sql_file_path
+ base_path = '..'
+ sql_file_path = os.path.join(base_path, sql_file_path)
if not os.path.exists(sql_file_path):
print('Invalid path {0}'.format(sql_file_path[3:]))
sys.exit(1)
+ elif sql_file_path.startswith(os.sep):
+ base_path = os.sep
+ else:
+ base_path = '.'
+
if sql_file_path.endswith('sql.gz'):
- sql_file_path = extract_sql_gzip(os.path.abspath(sql_file_path))
+ decompressed_file_name = extract_sql_gzip(os.path.abspath(sql_file_path))
+ else:
+ decompressed_file_name = sql_file_path
site = get_site(context)
frappe.init(site=site)
_new_site(frappe.conf.db_name, site, mariadb_root_username=mariadb_root_username,
mariadb_root_password=mariadb_root_password, admin_password=admin_password,
- verbose=context.verbose, install_apps=install_app, source_sql=sql_file_path,
- force=context.force)
+ verbose=context.verbose, install_apps=install_app, source_sql=decompressed_file_name,
+ force=True)
# Extract public and/or private files to the restored site, if user has given the path
if with_public_files:
+ with_public_files = os.path.join(base_path, with_public_files)
public = extract_tar_files(site, with_public_files, 'public')
os.remove(public)
if with_private_files:
+ with_private_files = os.path.join(base_path, with_private_files)
private = extract_tar_files(site, with_private_files, 'private')
os.remove(private)
+ # Removing temporarily created file
+ if decompressed_file_name != sql_file_path:
+ os.remove(decompressed_file_name)
+
+ success_message = "Site {0} has been restored{1}".format(site, " with files" if (with_public_files or with_private_files) else "")
+ click.secho(success_message, fg="green")
+
@click.command('reinstall')
@click.option('--admin-password', help='Administrator Password for reinstalled site')
@click.option('--mariadb-root-username', help='Root username for MariaDB')
@@ -192,6 +202,8 @@ def install_app(context, apps):
_install_app(app, verbose=context.verbose)
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('list-apps')
@pass_context
@@ -221,7 +233,8 @@ def add_system_manager(context, email, first_name, last_name, send_welcome_email
frappe.db.commit()
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('disable-user')
@click.argument('email')
@@ -252,6 +265,8 @@ def migrate(context, rebuild_website=False, skip_failing=False):
migrate(context.verbose, rebuild_website=rebuild_website, skip_failing=skip_failing)
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
print("Compiling Python Files...")
compileall.compile_dir('../apps', quiet=1, rx=re.compile('.*node_modules.*'))
@@ -263,7 +278,12 @@ def migrate_to(context, frappe_provider):
"Migrates site to the specified provider"
from frappe.integrations.frappe_providers import migrate_to
for site in context.sites:
+ frappe.init(site=site)
+ frappe.connect()
migrate_to(site, frappe_provider)
+ frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('run-patch')
@click.argument('module')
@@ -278,6 +298,8 @@ def run_patch(context, module):
frappe.modules.patch_handler.run_single(module, force=context.force)
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('reload-doc')
@click.argument('module')
@@ -294,6 +316,8 @@ def reload_doc(context, module, doctype, docname):
frappe.db.commit()
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('reload-doctype')
@click.argument('doctype')
@@ -308,6 +332,8 @@ def reload_doctype(context, doctype):
frappe.db.commit()
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('add-to-hosts')
@pass_context
@@ -315,6 +341,8 @@ def add_to_hosts(context):
"Add site to hosts"
for site in context.sites:
frappe.commands.popen('echo 127.0.0.1\t{0} | sudo tee -a /etc/hosts'.format(site))
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('use')
@click.argument('site')
@@ -328,7 +356,7 @@ def use(site, sites_path='.'):
sitefile.write(site)
print("Current Site set to {}".format(site))
else:
- print("{} does not exist".format(site))
+ print("Site {} does not exist".format(site))
@click.command('backup')
@click.option('--with-files', default=False, is_flag=True, help="Take backup with files")
@@ -361,6 +389,9 @@ def backup(context, with_files=False, backup_path_db=None, backup_path_files=Non
print("Private files: ", odb.backup_path_private_files)
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
+
sys.exit(exit_code)
@click.command('remove-from-installed-apps')
@@ -376,22 +407,27 @@ def remove_from_installed_apps(context, app):
remove_from_installed_apps(app)
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('uninstall-app')
@click.argument('app')
@click.option('--yes', '-y', help='To bypass confirmation prompt for uninstalling the app', is_flag=True, default=False, multiple=True)
@click.option('--dry-run', help='List all doctypes that will be deleted', is_flag=True, default=False)
+@click.option('--no-backup', help='Do not backup the site', is_flag=True, default=False)
@pass_context
-def uninstall(context, app, dry_run=False, yes=False):
+def uninstall(context, app, dry_run=False, yes=False, no_backup=False):
"Remove app and linked modules from site"
from frappe.installer import remove_app
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
- remove_app(app, dry_run, yes)
+ remove_app(app, dry_run, yes, no_backup)
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('drop-site')
@@ -422,7 +458,7 @@ def _drop_site(site, root_login='root', root_password=None, archived_sites_path=
else:
click.echo("="*80)
click.echo("Error: The operation has stopped because backup of {s}'s database failed.".format(s=site))
- click.echo("Reason: {reason}{sep}".format(reason=err[1], sep="\n"))
+ click.echo("Reason: {reason}{sep}".format(reason=str(err), sep="\n"))
click.echo("Fix the issue and try again.")
click.echo(
"Hint: Use 'bench drop-site {s} --force' to force the removal of {s}".format(sep="\n", tab="\t", s=site)
@@ -483,6 +519,8 @@ def set_admin_password(context, admin_password, logout_all_sessions=False):
admin_password = None
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('set-last-active-for-user')
@click.option('--user', help="Setup last active date for user")
@@ -528,6 +566,8 @@ def publish_realtime(context, event, message, room, user, doctype, docname, afte
frappe.db.commit()
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('browse')
@click.argument('site', required=False)
@@ -555,6 +595,8 @@ def start_recording(context):
for site in context.sites:
frappe.init(site=site)
frappe.recorder.start()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('stop-recording')
@@ -563,6 +605,8 @@ def stop_recording(context):
for site in context.sites:
frappe.init(site=site)
frappe.recorder.stop()
+ if not context.sites:
+ raise SiteNotSpecifiedError
commands = [
diff --git a/frappe/commands/translate.py b/frappe/commands/translate.py
index 5a48e2b409..48a7fd1db7 100644
--- a/frappe/commands/translate.py
+++ b/frappe/commands/translate.py
@@ -1,6 +1,7 @@
from __future__ import unicode_literals, absolute_import, print_function
import click
from frappe.commands import pass_context, get_site
+from frappe.exceptions import SiteNotSpecifiedError
# translation
@click.command('build-message-files')
@@ -15,6 +16,8 @@ def build_message_files(context):
frappe.translate.rebuild_all_translation_files()
finally:
frappe.destroy()
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('new-language') #, help="Create lang-code.csv for given app")
@pass_context
diff --git a/frappe/commands/utils.py b/frappe/commands/utils.py
index 3610393d9a..28b6344b8e 100644
--- a/frappe/commands/utils.py
+++ b/frappe/commands/utils.py
@@ -6,6 +6,7 @@ import json, os, sys, subprocess
from distutils.spawn import find_executable
import frappe
from frappe.commands import pass_context, get_site
+from frappe.exceptions import SiteNotSpecifiedError
from frappe.utils import update_progress_bar, get_bench_path
from frappe.utils.response import json_handler
from coverage import Coverage
@@ -51,7 +52,8 @@ def clear_cache(context):
frappe.website.render.clear_cache()
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('clear-website-cache')
@pass_context
@@ -65,7 +67,8 @@ def clear_website_cache(context):
frappe.website.render.clear_cache()
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('destroy-all-sessions')
@click.option('--reason')
@@ -81,7 +84,8 @@ def destroy_all_sessions(context, reason=None):
frappe.db.commit()
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('show-config')
@pass_context
@@ -117,7 +121,8 @@ def reset_perms(context):
reset_perms(d)
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('execute')
@click.argument('method')
@@ -164,6 +169,9 @@ def execute(context, method, args=None, kwargs=None, profile=False):
if ret:
print(json.dumps(ret, default=json_handler))
+ if not context.sites:
+ raise SiteNotSpecifiedError
+
@click.command('add-to-email-queue')
@click.argument('email-path')
@@ -197,7 +205,8 @@ def export_doc(context, doctype, docname):
frappe.modules.export_doc(doctype, docname)
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('export-json')
@click.argument('doctype')
@@ -206,15 +215,16 @@ def export_doc(context, doctype, docname):
@pass_context
def export_json(context, doctype, path, name=None):
"Export doclist as json to the given path, use '-' as name for Singles."
- from frappe.core.doctype.data_import import data_import
+ from frappe.core.doctype.data_import.data_import import export_json
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
- data_import.export_json(doctype, path, name=name)
+ export_json(doctype, path, name=name)
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('export-csv')
@click.argument('doctype')
@@ -222,15 +232,16 @@ def export_json(context, doctype, path, name=None):
@pass_context
def export_csv(context, doctype, path):
"Export data import template with data for DocType"
- from frappe.core.doctype.data_import import data_import
+ from frappe.core.doctype.data_import.data_import import export_csv
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
- data_import.export_csv(doctype, path)
+ export_csv(doctype, path)
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('export-fixtures')
@click.option('--app', default=None, help='Export fixtures of a specific app')
@@ -245,14 +256,15 @@ def export_fixtures(context, app=None):
export_fixtures(app=app)
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('import-doc')
@click.argument('path')
@pass_context
def import_doc(context, path, force=False):
"Import (insert/update) doclist. If the argument is a directory, all files ending with .json are imported"
- from frappe.core.doctype.data_import import data_import
+ from frappe.core.doctype.data_import.data_import import import_doc
if not os.path.exists(path):
path = os.path.join('..', path)
@@ -264,10 +276,11 @@ def import_doc(context, path, force=False):
try:
frappe.init(site=site)
frappe.connect()
- data_import.import_doc(path, overwrite=context.force)
+ import_doc(path, overwrite=context.force)
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('import-csv')
@click.argument('path')
@@ -280,7 +293,7 @@ def import_doc(context, path, force=False):
@pass_context
def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True):
"Import CSV using data import"
- from frappe.core.doctype.data_import import importer
+ from frappe.core.doctype.data_import_legacy import importer
from frappe.utils.csvutils import read_csv_content
site = get_site(context)
@@ -316,20 +329,12 @@ def import_csv(context, path, only_insert=False, submit_after_import=False, igno
@pass_context
def data_import(context, file_path, doctype, import_type=None, submit_after_import=False, mute_emails=True):
"Import documents in bulk from CSV or XLSX using data import"
- from frappe.core.doctype.data_import.importer_new import Importer
+ from frappe.core.doctype.data_import.data_import import import_file
site = get_site(context)
frappe.init(site=site)
frappe.connect()
-
- data_import = frappe.new_doc('Data Import Beta')
- data_import.submit_after_import = submit_after_import
- data_import.mute_emails = mute_emails
- data_import.import_type = 'Insert New Records' if import_type.lower() == 'insert' else 'Update Existing Records'
-
- i = Importer(doctype=doctype, file_path=file_path, data_import=data_import, console=True)
- i.import_data()
-
+ import_file(doctype, file_path, import_type, submit_after_import, console=True)
frappe.destroy()
@@ -364,6 +369,8 @@ def mariadb(context):
import os
site = get_site(context)
+ if not site:
+ raise SiteNotSpecifiedError
frappe.init(site=site)
# This is assuming you're within the bench instance.
@@ -487,7 +494,17 @@ def run_tests(context, app=None, module=None, doctype=None, test=(),
if coverage:
# Generate coverage report only for app that is being tested
source_path = os.path.join(get_bench_path(), 'apps', app or 'frappe')
- cov = Coverage(source=[source_path], omit=['*.html', '*.js', '*.xml', '*.css', '*/doctype/*/*_dashboard.py', '*/patches/*'])
+ cov = Coverage(source=[source_path], omit=[
+ '*.html',
+ '*.js',
+ '*.xml',
+ '*.css',
+ '*.less',
+ '*.scss',
+ '*.vue',
+ '*/doctype/*/*_dashboard.py',
+ '*/patches/*'
+ ])
cov.start()
ret = frappe.test_runner.main(app, module, doctype, context.verbose, tests=tests,
@@ -577,7 +594,8 @@ def request(context, args=None, path=None):
print(frappe.response)
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('make-app')
@click.argument('destination')
@@ -658,7 +676,8 @@ def rebuild_global_search(context, static_pages=False):
finally:
frappe.destroy()
-
+ if not context.sites:
+ raise SiteNotSpecifiedError
@click.command('auto-deploy')
@click.argument('app')
diff --git a/frappe/contacts/doctype/contact/contact.js b/frappe/contacts/doctype/contact/contact.js
index 5285f8b85c..fae6e6515e 100644
--- a/frappe/contacts/doctype/contact/contact.js
+++ b/frappe/contacts/doctype/contact/contact.js
@@ -42,6 +42,16 @@ frappe.ui.form.on("Contact", {
});
frm.refresh_field("links");
+ let numbers = frm.doc.phone_nos;
+ if (numbers && numbers.length && frappe.phone_call.handler) {
+ frm.add_custom_button(__('Call'), () => {
+ numbers = frm.doc.phone_nos
+ .sort((prev, next) => next.is_primary_mobile_no - prev.is_primary_mobile_no)
+ .map(d => d.phone);
+ frappe.phone_call.handler(numbers);
+ });
+ }
+
if (frm.doc.links) {
frappe.call({
method: "frappe.contacts.doctype.contact.contact.address_query",
diff --git a/frappe/contacts/doctype/contact/contact.py b/frappe/contacts/doctype/contact/contact.py
index 99068dcf6d..4cf209541c 100644
--- a/frappe/contacts/doctype/contact/contact.py
+++ b/frappe/contacts/doctype/contact/contact.py
@@ -3,7 +3,7 @@
from __future__ import unicode_literals
import frappe
-from frappe.utils import cstr, has_gravatar
+from frappe.utils import cstr, has_gravatar, cint
from frappe import _
from frappe.model.document import Document
from frappe.core.doctype.dynamic_link.dynamic_link import deduplicate_dynamic_links
@@ -133,7 +133,7 @@ def get_default_contact(doctype, name):
dl.parenttype = "Contact"''', (doctype, name))
if out:
- return sorted(out, key = functools.cmp_to_key(lambda x,y: cmp(y[1], x[1])))[0][0]
+ return sorted(out, key = functools.cmp_to_key(lambda x,y: cmp(cint(y[1]), cint(x[1]))))[0][0]
else:
return None
diff --git a/frappe/core/doctype/access_log/test_access_log.py b/frappe/core/doctype/access_log/test_access_log.py
index 312f77c026..9830507423 100644
--- a/frappe/core/doctype/access_log/test_access_log.py
+++ b/frappe/core/doctype/access_log/test_access_log.py
@@ -158,11 +158,7 @@ class TestAccessLog(unittest.TestCase):
request = requests.post(private_file_link, headers=self.header)
last_doc = frappe.get_last_doc('Access Log')
- if request.status_code == 403:
- # if file is not accessible, access log wont be generated
- pass
-
- else:
+ if request.ok:
# check for the access log of downloaded file
self.assertEqual(new_private_file.doctype, last_doc.export_from)
self.assertEqual(new_private_file.name, last_doc.reference_document)
diff --git a/frappe/core/doctype/communication/communication.py b/frappe/core/doctype/communication/communication.py
index abd24fb468..232d485f36 100644
--- a/frappe/core/doctype/communication/communication.py
+++ b/frappe/core/doctype/communication/communication.py
@@ -2,20 +2,21 @@
# MIT License. See license.txt
from __future__ import unicode_literals, absolute_import
+from collections import Counter
import frappe
from frappe import _
from frappe.model.document import Document
-from frappe.utils import validate_email_address, get_fullname, strip_html, cstr
-from frappe.core.doctype.communication.email import (validate_email,
- notify, _notify, update_parent_mins_to_first_response)
+from frappe.utils import validate_email_address, strip_html, cstr, time_diff_in_seconds
+from frappe.core.doctype.communication.email import validate_email, notify, _notify
from frappe.core.utils import get_parent_doc
from frappe.utils.bot import BotReply
from frappe.utils import parse_addr
from frappe.core.doctype.comment.comment import update_comment_in_doc
from email.utils import parseaddr
from six.moves.urllib.parse import unquote
-from collections import Counter
+from frappe.utils.user import is_system_user
from frappe.contacts.doctype.contact.contact import get_contact_name
+from frappe.automation.doctype.assignment_rule.assignment_rule import apply as apply_assignment_rule
exclude_from_linked_with = True
@@ -119,7 +120,7 @@ class Communication(Document):
update_comment_in_doc(self)
if self.comment_type != 'Updated':
- update_parent_mins_to_first_response(self)
+ update_parent_document_on_communication(self)
self.bot_reply()
def on_trash(self):
@@ -258,7 +259,12 @@ class Communication(Document):
# Timeline Links
def set_timeline_links(self):
- contacts = get_contacts([self.sender, self.recipients, self.cc, self.bcc])
+ contacts = []
+ if (self.email_account and frappe.db.get_value("Email Account", self.email_account, "create_contact")) or \
+ frappe.flags.in_test:
+
+ contacts = get_contacts([self.sender, self.recipients, self.cc, self.bcc])
+
for contact_name in contacts:
self.add_link('Contact', contact_name)
@@ -423,3 +429,63 @@ def get_email_without_link(email):
email_host = email.split("@")[1]
return "{0}@{1}".format(email_id, email_host)
+
+def update_parent_document_on_communication(doc):
+ """Update mins_to_first_communication of parent document based on who is replying."""
+
+ parent = get_parent_doc(doc)
+ if not parent:
+ return
+
+ # update parent mins_to_first_communication only if we create the Email communication
+ # ignore in case of only Comment is added
+ if doc.communication_type == "Comment":
+ return
+
+ status_field = parent.meta.get_field("status")
+ if status_field:
+ options = (status_field.options or "").splitlines()
+
+ # if status has a "Replied" option, then update the status for received communication
+ if ("Replied" in options) and doc.sent_or_received == "Received":
+ parent.db_set("status", "Open")
+ parent.run_method("handle_hold_time", "Replied")
+ apply_assignment_rule(parent)
+ else:
+ # update the modified date for document
+ parent.update_modified()
+
+ update_mins_to_first_communication(parent, doc)
+ set_avg_response_time(parent, doc)
+ parent.run_method("notify_communication", doc)
+ parent.notify_update()
+
+def update_mins_to_first_communication(parent, communication):
+ if parent.meta.has_field("mins_to_first_response") and not parent.get("mins_to_first_response"):
+ if is_system_user(communication.sender):
+ first_responded_on = communication.creation
+ if parent.meta.has_field("first_responded_on") and communication.sent_or_received == "Sent":
+ parent.db_set("first_responded_on", first_responded_on)
+ parent.db_set("mins_to_first_response", round(time_diff_in_seconds(first_responded_on, parent.creation) / 60), 2)
+
+def set_avg_response_time(parent, communication):
+ if parent.meta.has_field("avg_response_time") and communication.sent_or_received == "Sent":
+ # avg response time for all the responses
+ communications = frappe.get_list("Communication", filters={
+ "reference_doctype": parent.doctype,
+ "reference_name": parent.name
+ },
+ fields=["sent_or_received", "name", "creation"],
+ order_by="creation"
+ )
+
+ if len(communications):
+ response_times = []
+ for i in range(len(communications)):
+ if communications[i].sent_or_received == "Sent" and communications[i-1].sent_or_received == "Received":
+ response_time = round(time_diff_in_seconds(communications[i].creation, communications[i-1].creation), 2)
+ if response_time > 0:
+ response_times.append(response_time)
+ if response_times:
+ avg_response_time = sum(response_times) / len(response_times)
+ parent.db_set("avg_response_time", avg_response_time)
\ No newline at end of file
diff --git a/frappe/core/doctype/communication/email.py b/frappe/core/doctype/communication/email.py
index 8793c60934..daf64d4b8b 100755
--- a/frappe/core/doctype/communication/email.py
+++ b/frappe/core/doctype/communication/email.py
@@ -9,7 +9,7 @@ import json
from email.utils import formataddr
from frappe.core.utils import get_parent_doc
from frappe.utils import (get_url, get_formatted_email, cint,
- validate_email_address, split_emails, time_diff_in_seconds, parse_addr, get_datetime)
+ validate_email_address, split_emails, parse_addr, get_datetime)
from frappe.email.email_body import get_message_id
import frappe.email.smtp
import time
@@ -172,33 +172,6 @@ def _notify(doc, print_html=None, print_format=None, attachments=None,
print_letterhead=frappe.flags.print_letterhead
)
-def update_parent_mins_to_first_response(doc):
- """Update mins_to_first_communication of parent document based on who is replying."""
-
- parent = get_parent_doc(doc)
- if not parent:
- return
-
- # update parent mins_to_first_communication only if we create the Email communication
- # ignore in case of only Comment is added
- if doc.communication_type == "Comment":
- return
-
- status_field = parent.meta.get_field("status")
- if status_field:
- options = (status_field.options or '').splitlines()
-
- # if status has a "Replied" option, then update the status for received communication
- if ('Replied' in options) and doc.sent_or_received=="Received":
- parent.db_set("status", "Open")
- else:
- # update the modified date for document
- parent.update_modified()
-
- update_mins_to_first_communication(parent, doc)
- parent.run_method('notify_communication', doc)
- parent.notify_update()
-
def get_recipients_cc_and_bcc(doc, recipients, cc, bcc, fetched_from_email_account=False):
doc.all_email_addresses = []
doc.sent_email_addresses = []
@@ -499,15 +472,6 @@ def sendmail(communication_name, print_html=None, print_format=None, attachments
traceback = frappe.log_error("frappe.core.doctype.communication.email.sendmail")
raise
-def update_mins_to_first_communication(parent, communication):
- if parent.meta.has_field('mins_to_first_response') and not parent.get('mins_to_first_response'):
- if frappe.db.get_all('User', filters={'email': communication.sender,
- 'user_type': 'System User', 'enabled': 1}, limit=1):
- first_responded_on = communication.creation
- if parent.meta.has_field('first_responded_on') and communication.sent_or_received == "Sent":
- parent.db_set('first_responded_on', first_responded_on)
- parent.db_set('mins_to_first_response', round(time_diff_in_seconds(first_responded_on, parent.creation) / 60), 2)
-
@frappe.whitelist(allow_guest=True)
def mark_email_as_seen(name=None):
try:
diff --git a/frappe/core/doctype/communication/test_communication.py b/frappe/core/doctype/communication/test_communication.py
index fb859586bb..6df90baaae 100644
--- a/frappe/core/doctype/communication/test_communication.py
+++ b/frappe/core/doctype/communication/test_communication.py
@@ -202,6 +202,8 @@ class TestCommunication(unittest.TestCase):
self.assertIn(("Note", note.name), doc_links)
def create_email_account():
+ frappe.delete_doc_if_exists("Email Account", "_Test Comm Account 1")
+
frappe.flags.mute_emails = False
frappe.flags.sent_mail = None
diff --git a/frappe/core/doctype/data_export/exporter.py b/frappe/core/doctype/data_export/exporter.py
index 6518c59653..e4d2ff2af6 100644
--- a/frappe/core/doctype/data_export/exporter.py
+++ b/frappe/core/doctype/data_export/exporter.py
@@ -9,7 +9,7 @@ import frappe.permissions
import re, csv, os
from frappe.utils.csvutils import UnicodeWriter
from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint
-from frappe.core.doctype.data_import.importer import get_data_keys
+from frappe.core.doctype.data_import_legacy.importer import get_data_keys
from six import string_types
from frappe.core.doctype.access_log.access_log import make_access_log
diff --git a/frappe/core/doctype/data_import/README.md b/frappe/core/doctype/data_import/README.md
deleted file mode 100644
index 7bd4ac809b..0000000000
--- a/frappe/core/doctype/data_import/README.md
+++ /dev/null
@@ -1 +0,0 @@
-Bulk import / update of data via file upload in Excel or CSV.
\ No newline at end of file
diff --git a/frappe/core/doctype/data_import/data_import.css b/frappe/core/doctype/data_import/data_import.css
new file mode 100644
index 0000000000..5206540a33
--- /dev/null
+++ b/frappe/core/doctype/data_import/data_import.css
@@ -0,0 +1,3 @@
+.warnings .warning {
+ margin-bottom: 40px;
+}
diff --git a/frappe/core/doctype/data_import/data_import.js b/frappe/core/doctype/data_import/data_import.js
index 9391b262d7..81a7bc9705 100644
--- a/frappe/core/doctype/data_import/data_import.js
+++ b/frappe/core/doctype/data_import/data_import.js
@@ -1,324 +1,522 @@
-// Copyright (c) 2017, Frappe Technologies and contributors
+// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import', {
- onload: function(frm) {
- if (frm.doc.__islocal) {
- frm.set_value("action", "");
- }
-
- frappe.call({
- method: "frappe.core.doctype.data_import.data_import.get_importable_doctypes",
- callback: function (r) {
- let importable_doctypes = r.message;
- frm.set_query("reference_doctype", function () {
- return {
- "filters": {
- "issingle": 0,
- "istable": 0,
- "name": ['in', importable_doctypes]
- }
- };
- });
+ setup(frm) {
+ frappe.realtime.on('data_import_refresh', ({ data_import }) => {
+ frm.import_in_progress = false;
+ if (data_import !== frm.doc.name) return;
+ frappe.model.clear_doc('Data Import', frm.doc.name);
+ frappe.model.with_doc('Data Import', frm.doc.name).then(() => {
+ frm.refresh();
+ });
+ });
+ frappe.realtime.on('data_import_progress', data => {
+ frm.import_in_progress = true;
+ if (data.data_import !== frm.doc.name) {
+ return;
}
- }),
+ let percent = Math.floor((data.current * 100) / data.total);
+ let seconds = Math.floor(data.eta);
+ let minutes = Math.floor(data.eta / 60);
+ let eta_message =
+ // prettier-ignore
+ seconds < 60
+ ? __('About {0} seconds remaining', [seconds])
+ : minutes === 1
+ ? __('About {0} minute remaining', [minutes])
+ : __('About {0} minutes remaining', [minutes]);
- // should never check public
- frm.fields_dict["import_file"].df.is_private = 1;
+ let message;
+ if (data.success) {
+ let message_args = [data.current, data.total, eta_message];
+ message =
+ frm.doc.import_type === 'Insert New Records'
+ ? __('Importing {0} of {1}, {2}', message_args)
+ : __('Updating {0} of {1}, {2}', message_args);
+ }
+ if (data.skipping) {
+ message = __('Skipping {0} of {1}, {2}', [
+ data.current,
+ data.total,
+ eta_message
+ ]);
+ }
+ frm.dashboard.show_progress(__('Import Progress'), percent, message);
+ frm.page.set_indicator(__('In Progress'), 'orange');
- frappe.realtime.on("data_import_progress", function(data) {
- if (data.data_import === frm.doc.name) {
- if (data.reload && data.reload === true) {
- frm.reload_doc();
- }
- if (data.progress) {
- let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar");
- if (progress_bar) {
- $(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped");
- $(progress_bar).css("width", data.progress + "%");
- }
- }
+ // hide progress when complete
+ if (data.current === data.total) {
+ setTimeout(() => {
+ frm.dashboard.hide();
+ frm.refresh();
+ }, 2000);
}
});
+
+ frm.set_query('reference_doctype', () => {
+ return {
+ filters: {
+ name: ['in', frappe.boot.user.can_import]
+ }
+ };
+ });
+
+ frm.get_field('import_file').df.options = {
+ restrictions: {
+ allowed_file_types: ['.csv', '.xls', '.xlsx']
+ }
+ };
+
+ frm.has_import_file = () => {
+ return frm.doc.import_file || frm.doc.google_sheets_url;
+ };
},
- reference_doctype: function(frm){
- if (frm.doc.reference_doctype) {
- frappe.model.with_doctype(frm.doc.reference_doctype);
+ refresh(frm) {
+ frm.page.hide_icon_group();
+ frm.trigger('update_indicators');
+ frm.trigger('import_file');
+ frm.trigger('show_import_log');
+ frm.trigger('show_import_warnings');
+ frm.trigger('toggle_submit_after_import');
+ frm.trigger('show_import_status');
+ frm.trigger('show_report_error_button');
+
+ if (frm.doc.status === 'Partial Success') {
+ frm.add_custom_button(__('Export Errored Rows'), () =>
+ frm.trigger('export_errored_rows')
+ );
+ }
+
+ if (frm.doc.status.includes('Success')) {
+ frm.add_custom_button(
+ __('Go to {0} List', [frm.doc.reference_doctype]),
+ () => frappe.set_route('List', frm.doc.reference_doctype)
+ );
}
},
- refresh: function(frm) {
+ onload_post_render(frm) {
+ frm.trigger('update_primary_action');
+ },
+
+ update_primary_action(frm) {
frm.disable_save();
- frm.dashboard.clear_headline();
- if (frm.doc.reference_doctype && !frm.doc.import_file) {
- frm.page.set_indicator(__('Attach file'), 'orange');
- } else {
- if (frm.doc.import_status) {
- const listview_settings = frappe.listview_settings['Data Import'];
- const indicator = listview_settings.get_indicator(frm.doc);
-
- frm.page.set_indicator(indicator[0], indicator[1]);
-
- if (frm.doc.import_status === "In Progress") {
- frm.dashboard.add_progress("Data Import Progress", "0");
- frm.set_read_only();
- frm.refresh_fields();
- }
+ if (frm.doc.status !== 'Success') {
+ if (!frm.is_new() && (frm.has_import_file())) {
+ let label =
+ frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
+ frm.page.set_primary_action(label, () => frm.events.start_import(frm));
+ } else {
+ frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
+ },
- if (frm.doc.reference_doctype) {
- frappe.model.with_doctype(frm.doc.reference_doctype);
+ update_indicators(frm) {
+ const indicator = frappe.get_indicator(frm.doc);
+ if (indicator) {
+ frm.page.set_indicator(indicator[0], indicator[1]);
+ } else {
+ frm.page.clear_indicator();
}
+ },
- if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") {
- frm.set_df_property("action", "read_only", 1);
+ show_import_status(frm) {
+ let import_log = JSON.parse(frm.doc.import_log || '[]');
+ let successful_records = import_log.filter(log => log.success);
+ let failed_records = import_log.filter(log => !log.success);
+ if (successful_records.length === 0) return;
+
+ let message;
+ if (failed_records.length === 0) {
+ let message_args = [successful_records.length];
+ if (frm.doc.import_type === 'Insert New Records') {
+ message =
+ successful_records.length > 1
+ ? __('Successfully imported {0} records.', message_args)
+ : __('Successfully imported {0} record.', message_args);
+ } else {
+ message =
+ successful_records.length > 1
+ ? __('Successfully updated {0} records.', message_args)
+ : __('Successfully updated {0} record.', message_args);
+ }
+ } else {
+ let message_args = [successful_records.length, import_log.length];
+ if (frm.doc.import_type === 'Insert New Records') {
+ message =
+ successful_records.length > 1
+ ? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
+ : __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
+ } else {
+ message =
+ successful_records.length > 1
+ ? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
+ : __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
+ }
}
+ frm.dashboard.set_headline(message);
+ },
- frm.add_custom_button(__("Help"), function() {
- frappe.help.show_video("6wiriRKPhmg");
- });
+ show_report_error_button(frm) {
+ if (frm.doc.status === 'Error') {
+ frappe.db
+ .get_list('Error Log', {
+ filters: { method: frm.doc.name },
+ fields: ['method', 'error'],
+ order_by: 'creation desc',
+ limit: 1
+ })
+ .then(result => {
+ if (result.length > 0) {
+ frm.add_custom_button('Report Error', () => {
+ let fake_xhr = {
+ responseText: JSON.stringify({
+ exc: result[0].error
+ })
+ };
+ frappe.request.report_error(fake_xhr, {});
+ });
+ }
+ });
+ }
+ },
- if (frm.doc.reference_doctype && frm.doc.docstatus === 0) {
- frm.add_custom_button(__("Download template"), function() {
- frappe.data_import.download_dialog(frm).show();
+ start_import(frm) {
+ frm
+ .call({
+ method: 'form_start_import',
+ args: { data_import: frm.doc.name },
+ btn: frm.page.btn_primary
+ })
+ .then(r => {
+ if (r.message === true) {
+ frm.disable_save();
+ }
+ });
+ },
+
+ download_template(frm) {
+ if (
+ frm.data_exporter &&
+ frm.data_exporter.doctype === frm.doc.reference_doctype
+ ) {
+ frm.data_exporter.exporting_for = frm.doc.import_type;
+ frm.data_exporter.dialog.show();
+ } else {
+ frappe.require('/assets/js/data_import_tools.min.js', () => {
+ frm.data_exporter = new frappe.data_import.DataExporter(
+ frm.doc.reference_doctype,
+ frm.doc.import_type
+ );
});
}
+ },
- if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows &&
- frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) {
- frm.page.set_primary_action(__("Start Import"), function() {
- frappe.call({
- btn: frm.page.btn_primary,
- method: "frappe.core.doctype.data_import.data_import.import_data",
- args: {
- data_import: frm.doc.name
- }
- });
- }).addClass('btn btn-primary');
- }
+ reference_doctype(frm) {
+ frm.trigger('toggle_submit_after_import');
+ },
- if (frm.doc.log_details) {
- frm.events.create_log_table(frm);
- } else {
- $(frm.fields_dict.import_log.wrapper).empty();
+ toggle_submit_after_import(frm) {
+ frm.toggle_display('submit_after_import', false);
+ let doctype = frm.doc.reference_doctype;
+ if (doctype) {
+ frappe.model.with_doctype(doctype, () => {
+ let meta = frappe.get_meta(doctype);
+ frm.toggle_display('submit_after_import', meta.is_submittable);
+ });
}
},
- action: function(frm) {
- if(!frm.doc.action) return;
- if(!frm.doc.reference_doctype) {
- frappe.msgprint(__("Please select document type first."));
- frm.set_value("action", "");
+ google_sheets_url(frm) {
+ if (!frm.is_dirty()) {
+ frm.trigger('import_file');
+ } else {
+ frm.trigger('update_primary_action');
+ }
+ },
+
+ refresh_google_sheet(frm) {
+ frm.trigger('import_file');
+ },
+
+ import_file(frm) {
+ frm.toggle_display('section_import_preview', frm.has_import_file());
+ if (!frm.has_import_file()) {
+ frm.get_field('import_preview').$wrapper.empty();
+ return;
+ } else {
+ frm.trigger('update_primary_action');
+ }
+
+ // load import preview
+ frm.get_field('import_preview').$wrapper.empty();
+ $('
')
+ .html(__('Loading import file...'))
+ .appendTo(frm.get_field('import_preview').$wrapper);
+
+ frm
+ .call({
+ method: 'get_preview_from_template',
+ args: {
+ data_import: frm.doc.name,
+ import_file: frm.doc.import_file,
+ google_sheets_url: frm.doc.google_sheets_url
+ },
+ error_handlers: {
+ TimestampMismatchError() {
+ // ignore this error
+ }
+ }
+ })
+ .then(r => {
+ let preview_data = r.message;
+ frm.events.show_import_preview(frm, preview_data);
+ frm.events.show_import_warnings(frm, preview_data);
+ });
+ },
+
+ show_import_preview(frm, preview_data) {
+ let import_log = JSON.parse(frm.doc.import_log || '[]');
+
+ if (
+ frm.import_preview &&
+ frm.import_preview.doctype === frm.doc.reference_doctype
+ ) {
+ frm.import_preview.preview_data = preview_data;
+ frm.import_preview.import_log = import_log;
+ frm.import_preview.refresh();
return;
}
- if(frm.doc.action == "Insert new records") {
- frm.doc.insert_new = 1;
- } else if (frm.doc.action == "Update records"){
- frm.doc.overwrite = 1;
+ frappe.require('/assets/js/data_import_tools.min.js', () => {
+ frm.import_preview = new frappe.data_import.ImportPreview({
+ wrapper: frm.get_field('import_preview').$wrapper,
+ doctype: frm.doc.reference_doctype,
+ preview_data,
+ import_log,
+ frm,
+ events: {
+ remap_column(changed_map) {
+ let template_options = JSON.parse(frm.doc.template_options || '{}');
+ template_options.remap_column = template_options.remap_column || {};
+ Object.assign(template_options.remap_column, changed_map);
+ frm.set_value('template_options', JSON.stringify(template_options));
+ frm.save().then(() => frm.trigger('import_file'));
+ }
+ }
+ });
+ });
+ },
+
+ export_errored_rows(frm) {
+ open_url_post(
+ '/api/method/frappe.core.doctype.data_import.data_import.download_errored_template',
+ {
+ data_import_name: frm.doc.name
+ }
+ );
+ },
+
+ show_import_warnings(frm, preview_data) {
+ let warnings = JSON.parse(frm.doc.template_warnings || '[]');
+ warnings = warnings.concat(preview_data.warnings || []);
+
+ frm.toggle_display('import_warnings_section', warnings.length > 0);
+ if (warnings.length === 0) {
+ frm.get_field('import_warnings').$wrapper.html('');
+ return;
}
- frm.save();
+
+ // group warnings by row
+ let warnings_by_row = {};
+ let other_warnings = [];
+ for (let warning of warnings) {
+ if (warning.row) {
+ warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
+ warnings_by_row[warning.row].push(warning);
+ } else {
+ other_warnings.push(warning);
+ }
+ }
+
+ let html = '';
+ html += Object.keys(warnings_by_row)
+ .map(row_number => {
+ let message = warnings_by_row[row_number]
+ .map(w => {
+ if (w.field) {
+ let label =
+ w.field.label +
+ (w.field.parent !== frm.doc.reference_doctype
+ ? ` (${w.field.parent})`
+ : '');
+ return `${label}: ${w.message}`;
+ }
+ return `${w.message}`;
+ })
+ .join('');
+ return `
+
+
${__('Row {0}', [row_number])}
+
+
+ `;
+ })
+ .join('');
+
+ html += other_warnings
+ .map(warning => {
+ let header = '';
+ if (warning.col) {
+ header = __('Column {0}', [warning.col]);
+ }
+ return `
+
+
${header}
+
${warning.message}
+
+ `;
+ })
+ .join('');
+ frm.get_field('import_warnings').$wrapper.html(`
+
+ `);
},
- only_update: function(frm) {
- frm.save();
+ show_failed_logs(frm) {
+ frm.trigger('show_import_log');
},
- submit_after_import: function(frm) {
- frm.save();
+ show_import_log(frm) {
+ let import_log = JSON.parse(frm.doc.import_log || '[]');
+ let logs = import_log;
+ frm.toggle_display('import_log', false);
+ frm.toggle_display('import_log_section', logs.length > 0);
+
+ if (logs.length === 0) {
+ frm.get_field('import_log_preview').$wrapper.empty();
+ return;
+ }
+
+ let rows = logs
+ .map(log => {
+ let html = '';
+ if (log.success) {
+ if (frm.doc.import_type === 'Insert New Records') {
+ html = __('Successfully imported {0}', [
+ `${frappe.utils.get_form_link(
+ frm.doc.reference_doctype,
+ log.docname,
+ true
+ )}`
+ ]);
+ } else {
+ html = __('Successfully updated {0}', [
+ `${frappe.utils.get_form_link(
+ frm.doc.reference_doctype,
+ log.docname,
+ true
+ )}`
+ ]);
+ }
+ } else {
+ let messages = log.messages
+ .map(JSON.parse)
+ .map(m => {
+ let title = m.title ? `${m.title}` : '';
+ let message = m.message ? `${m.message}
` : '';
+ return title + message;
+ })
+ .join('');
+ let id = frappe.dom.get_unique_id();
+ html = `${messages}
+
+ `;
+ }
+ let indicator_color = log.success ? 'green' : 'red';
+ let title = log.success ? __('Success') : __('Failure');
+
+ if (frm.doc.show_failed_logs && log.success) {
+ return '';
+ }
+
+ return `
+ | ${log.row_indexes.join(', ')} |
+
+ ${title}
+ |
+
+ ${html}
+ |
+
`;
+ })
+ .join('');
+
+ if (!rows && frm.doc.show_failed_logs) {
+ rows = `|
+ ${__('No failed logs')}
+ |
`;
+ }
+
+ frm.get_field('import_log_preview').$wrapper.html(`
+
+
+ | ${__('Row Number')} |
+ ${__('Status')} |
+ ${__('Message')} |
+
+ ${rows}
+
+ `);
},
- skip_errors: function(frm) {
- frm.save();
- },
+ show_missing_link_values(frm, missing_link_values) {
+ let can_be_created_automatically = missing_link_values.every(
+ d => d.has_one_mandatory_field
+ );
- ignore_encoding_errors: function(frm) {
- frm.save();
- },
+ let html = missing_link_values
+ .map(d => {
+ let doctype = d.doctype;
+ let values = d.missing_values;
+ return `
+ ${doctype}
+ ${values.map(v => `- ${v}
`).join('')}
+ `;
+ })
+ .join('');
- no_email: function(frm) {
- frm.save();
- },
-
- show_only_errors: function(frm) {
- frm.events.create_log_table(frm);
- },
-
- create_log_table: function(frm) {
- let msg = JSON.parse(frm.doc.log_details);
- var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty();
- $(frappe.render_template("log_details", {
- data: msg.messages,
- import_status: frm.doc.import_status,
- show_only_errors: frm.doc.show_only_errors,
- })).appendTo($log_wrapper);
+ if (can_be_created_automatically) {
+ // prettier-ignore
+ let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
+ frappe.confirm(message + html, () => {
+ frm
+ .call('create_missing_link_values', {
+ missing_link_values
+ })
+ .then(r => {
+ let records = r.message;
+ frappe.msgprint(
+ __('Created {0} records successfully.', [records.length])
+ );
+ });
+ });
+ } else {
+ frappe.msgprint(
+ // prettier-ignore
+ __('The following records needs to be created before we can import your file.') + html
+ );
+ }
}
});
-
-frappe.provide('frappe.data_import');
-frappe.data_import.download_dialog = function(frm) {
- var dialog;
- const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden;
- const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields);
-
- const get_doctype_checkbox_fields = () => {
- return dialog.fields.filter(df => df.fieldname.endsWith('_fields'))
- .map(df => dialog.fields_dict[df.fieldname]);
- };
-
- const doctype_fields = get_fields(frm.doc.reference_doctype)
- .map(df => {
- let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0;
- return {
- label: df.label,
- reqd: reqd,
- danger: reqd,
- value: df.fieldname,
- checked: 1
- };
- });
-
- let fields = [
- {
- "label": __("Select Columns"),
- "fieldname": "select_columns",
- "fieldtype": "Select",
- "options": "All\nMandatory\nManually",
- "reqd": 1,
- "onchange": function() {
- const fields = get_doctype_checkbox_fields();
- fields.map(f => f.toggle(true));
- if(this.value == 'Mandatory' || this.value == 'Manually') {
- checkbox_toggle(true);
- fields.map(multicheck_field => {
- multicheck_field.options.map(option => {
- if(!option.reqd) return;
- $(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`)
- .prop('checked', false)
- .trigger('click');
- });
- });
- } else if(this.value == 'All'){
- $(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`)
- .prop('disabled', true);
- }
- }
- },
- {
- "label": __("File Type"),
- "fieldname": "file_type",
- "fieldtype": "Select",
- "options": "Excel\nCSV",
- "default": "Excel"
- },
- {
- "label": __("Download with Data"),
- "fieldname": "with_data",
- "fieldtype": "Check",
- "hidden": !frm.doc.overwrite,
- "default": 1
- },
- {
- "label": __("Select All"),
- "fieldname": "select_all",
- "fieldtype": "Button",
- "depends_on": "eval:doc.select_columns=='Manually'",
- click: function() {
- checkbox_toggle();
- }
- },
- {
- "label": __("Unselect All"),
- "fieldname": "unselect_all",
- "fieldtype": "Button",
- "depends_on": "eval:doc.select_columns=='Manually'",
- click: function() {
- checkbox_toggle(true);
- }
- },
- {
- "label": frm.doc.reference_doctype,
- "fieldname": "doctype_fields",
- "fieldtype": "MultiCheck",
- "options": doctype_fields,
- "columns": 2,
- "hidden": 1
- }
- ];
-
- const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype)
- .map(df => {
- return {
- "label": df.options,
- "fieldname": df.fieldname + '_fields',
- "fieldtype": "MultiCheck",
- "options": frappe.meta.get_docfields(df.options)
- .filter(filter_fields)
- .map(df => ({
- label: df.label,
- reqd: df.reqd ? 1 : 0,
- value: df.fieldname,
- checked: 1,
- danger: df.reqd
- })),
- "columns": 2,
- "hidden": 1
- };
- });
-
- fields = fields.concat(child_table_fields);
-
- dialog = new frappe.ui.Dialog({
- title: __('Download Template'),
- fields: fields,
- primary_action: function(values) {
- var data = values;
- if (frm.doc.reference_doctype) {
- var export_params = () => {
- let columns = {};
- if(values.select_columns) {
- columns = get_doctype_checkbox_fields().reduce((columns, field) => {
- const options = field.get_checked_options();
- columns[field.df.label] = options;
- return columns;
- }, {});
- }
-
- return {
- doctype: frm.doc.reference_doctype,
- parent_doctype: frm.doc.reference_doctype,
- select_columns: JSON.stringify(columns),
- with_data: frm.doc.overwrite && data.with_data,
- all_doctypes: true,
- file_type: data.file_type,
- template: true
- };
- };
- let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data';
- open_url_post(get_template_url, export_params());
- } else {
- frappe.msgprint(__("Please select the Document Type."));
- }
- dialog.hide();
- },
- primary_action_label: __('Download')
- });
-
- $(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]')
- .wrapAll('');
- const button_container = $(dialog.body).find('.inline-buttons');
- button_container.addClass('flex');
- $(button_container).find('.frappe-control').map((index, button) => {
- $(button).css({"margin-right": "1em"});
- });
-
- function checkbox_toggle(checked=false) {
- $(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => {
- $(element).find(`:checkbox`).prop("checked", checked).trigger('click');
- });
- }
-
- return dialog;
-};
diff --git a/frappe/core/doctype/data_import/data_import.json b/frappe/core/doctype/data_import/data_import.json
index 11c8368e00..177252ea22 100644
--- a/frappe/core/doctype/data_import/data_import.json
+++ b/frappe/core/doctype/data_import/data_import.json
@@ -1,767 +1,192 @@
{
- "allow_copy": 1,
- "allow_guest_to_view": 0,
- "allow_import": 0,
- "allow_rename": 0,
- "autoname": "",
- "beta": 0,
- "creation": "2016-12-09 14:27:32.720061",
- "custom": 0,
- "docstatus": 0,
- "doctype": "DocType",
- "document_type": "Document",
- "editable_grid": 1,
- "engine": "InnoDB",
+ "actions": [],
+ "autoname": "format:{reference_doctype} Import on {creation}",
+ "beta": 1,
+ "creation": "2019-08-04 14:16:08.318714",
+ "doctype": "DocType",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "reference_doctype",
+ "import_type",
+ "download_template",
+ "import_file",
+ "html_5",
+ "google_sheets_url",
+ "refresh_google_sheet",
+ "column_break_5",
+ "status",
+ "submit_after_import",
+ "mute_emails",
+ "template_options",
+ "import_warnings_section",
+ "template_warnings",
+ "import_warnings",
+ "section_import_preview",
+ "import_preview",
+ "import_log_section",
+ "import_log",
+ "show_failed_logs",
+ "import_log_preview"
+ ],
"fields": [
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "depends_on": "",
- "fieldname": "reference_doctype",
- "fieldtype": "Link",
- "hidden": 0,
- "ignore_user_permissions": 1,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 1,
- "in_standard_filter": 0,
- "label": "Document Type",
- "length": 0,
- "no_copy": 0,
- "options": "DocType",
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 1,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
- {
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fieldname": "action",
- "fieldtype": "Select",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Action",
- "length": 0,
- "no_copy": 0,
- "options": "Insert new records\nUpdate records",
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
+ "fieldname": "reference_doctype",
+ "fieldtype": "Link",
+ "in_list_view": 1,
+ "label": "Document Type",
+ "options": "DocType",
"reqd": 1,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "set_only_once": 1
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "default": "0",
- "depends_on": "eval:!doc.overwrite",
- "description": "New data will be inserted.",
- "fieldname": "insert_new",
- "fieldtype": "Check",
- "hidden": 1,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Insert new records",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 1,
- "translatable": 0,
- "unique": 0
- },
+ "fieldname": "import_type",
+ "fieldtype": "Select",
+ "in_list_view": 1,
+ "label": "Import Type",
+ "options": "\nInsert New Records\nUpdate Existing Records",
+ "reqd": 1,
+ "set_only_once": 1
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "default": "0",
- "depends_on": "eval:!doc.insert_new",
- "description": "If you are updating/overwriting already created records.",
- "fieldname": "overwrite",
- "fieldtype": "Check",
- "hidden": 1,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Update records",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 1,
- "translatable": 0,
- "unique": 0
- },
- {
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "default": "0",
- "depends_on": "overwrite",
- "description": "If you don't want to create any new records while updating the older records.",
- "fieldname": "only_update",
- "fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Don't create new records",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
- {
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "collapsible_depends_on": "",
- "columns": 0,
- "depends_on": "eval:(!doc.__islocal)",
- "fieldname": "section_break_4",
- "fieldtype": "Section Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
- {
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "depends_on": "",
+ "depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Attach file for Import",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "in_list_view": 1,
+ "label": "Import File"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fieldname": "column_break_4",
- "fieldtype": "Column Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "fieldname": "import_preview",
+ "fieldtype": "HTML",
+ "label": "Import Preview"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "depends_on": "eval: doc.import_status == \"Partially Successful\"",
- "description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.",
- "fieldname": "error_file",
- "fieldtype": "Attach",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Generated File",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
- {
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "collapsible_depends_on": "",
- "columns": 0,
- "depends_on": "eval:(!doc.__islocal)",
- "fieldname": "section_break_6",
+ "fieldname": "section_import_preview",
"fieldtype": "Section Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "label": "Preview"
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.",
- "fieldname": "skip_errors",
- "fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Skip rows with errors",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "fieldname": "column_break_5",
+ "fieldtype": "Column Break"
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "default": "0",
- "depends_on": "",
- "fieldname": "submit_after_import",
- "fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Submit after importing",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "fieldname": "template_options",
+ "fieldtype": "Code",
+ "hidden": 1,
+ "label": "Template Options",
+ "options": "JSON",
+ "read_only": 1
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "default": "0",
- "depends_on": "",
- "fieldname": "ignore_encoding_errors",
- "fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Ignore encoding errors",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "fieldname": "import_log",
+ "fieldtype": "Code",
+ "label": "Import Log",
+ "options": "JSON"
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "default": "1",
- "depends_on": "",
- "fieldname": "no_email",
- "fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Do not send Emails",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "fieldname": "import_log_section",
+ "fieldtype": "Section Break",
+ "label": "Import Log"
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 1,
- "collapsible_depends_on": "eval: doc.import_status == \"Failed\"",
- "columns": 0,
- "depends_on": "import_status",
- "fieldname": "import_detail",
- "fieldtype": "Section Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Import Log",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "fieldname": "import_log_preview",
+ "fieldtype": "HTML",
+ "label": "Import Log Preview"
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "depends_on": "",
- "fieldname": "import_status",
- "fieldtype": "Select",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Import Status",
- "length": 0,
- "no_copy": 0,
- "options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful",
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 1,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "default": "Pending",
+ "fieldname": "status",
+ "fieldtype": "Select",
+ "hidden": 1,
+ "label": "Status",
+ "options": "Pending\nSuccess\nPartial Success\nError",
+ "read_only": 1
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 1,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "default": "1",
- "fieldname": "show_only_errors",
- "fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Show only errors",
- "length": 0,
- "no_copy": 1,
- "permlevel": 0,
- "precision": "",
- "print_hide": 1,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "fieldname": "template_warnings",
+ "fieldtype": "Code",
+ "hidden": 1,
+ "label": "Template Warnings",
+ "options": "JSON"
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 1,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "default": "",
- "depends_on": "import_status",
- "fieldname": "import_log",
- "fieldtype": "HTML",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Import Log",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "default": "0",
+ "fieldname": "submit_after_import",
+ "fieldtype": "Check",
+ "label": "Submit After Import",
+ "set_only_once": 1
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 1,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "depends_on": "",
- "fieldname": "log_details",
- "fieldtype": "Code",
- "hidden": 1,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Log Details",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 1,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "fieldname": "import_warnings_section",
+ "fieldtype": "Section Break",
+ "label": "Warnings"
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fieldname": "amended_from",
- "fieldtype": "Link",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Amended From",
- "length": 0,
- "no_copy": 1,
- "options": "Data Import",
- "permlevel": 0,
- "print_hide": 1,
- "print_hide_if_no_value": 0,
- "read_only": 1,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
+ "fieldname": "import_warnings",
+ "fieldtype": "HTML",
+ "label": "Import Warnings"
+ },
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fieldname": "total_rows",
- "fieldtype": "Int",
- "hidden": 1,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Total Rows",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 1,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "depends_on": "reference_doctype",
+ "fieldname": "download_template",
+ "fieldtype": "Button",
+ "label": "Download Template"
+ },
+ {
+ "default": "1",
+ "fieldname": "mute_emails",
+ "fieldtype": "Check",
+ "label": "Don't Send Emails",
+ "set_only_once": 1
+ },
+ {
+ "default": "0",
+ "fieldname": "show_failed_logs",
+ "fieldtype": "Check",
+ "label": "Show Failed Logs"
+ },
+ {
+ "depends_on": "eval:!doc.__islocal && !doc.import_file",
+ "fieldname": "html_5",
+ "fieldtype": "HTML",
+ "options": "Or
"
+ },
+ {
+ "depends_on": "eval:!doc.__islocal && !doc.import_file\n",
+ "description": "Must be a publicly accessible Google Sheets URL",
+ "fieldname": "google_sheets_url",
+ "fieldtype": "Data",
+ "label": "Import from Google Sheets"
+ },
+ {
+ "depends_on": "eval:doc.google_sheets_url",
+ "fieldname": "refresh_google_sheet",
+ "fieldtype": "Button",
+ "label": "Refresh Google Sheet"
}
- ],
- "has_web_view": 0,
- "hide_heading": 0,
- "hide_toolbar": 0,
- "idx": 0,
- "image_view": 0,
- "in_create": 0,
- "is_submittable": 1,
- "issingle": 0,
- "istable": 0,
- "max_attachments": 1,
- "modified": "2018-08-28 15:05:56.787108",
- "modified_by": "Administrator",
- "module": "Core",
- "name": "Data Import",
- "name_case": "",
- "owner": "Administrator",
+ ],
+ "hide_toolbar": 1,
+ "links": [],
+ "modified": "2020-06-18 16:05:54.211034",
+ "modified_by": "Administrator",
+ "module": "Core",
+ "name": "Data Import",
+ "owner": "Administrator",
"permissions": [
{
- "amend": 0,
- "cancel": 0,
- "create": 1,
- "delete": 1,
- "email": 1,
- "export": 0,
- "if_owner": 0,
- "import": 0,
- "permlevel": 0,
- "print": 0,
- "read": 1,
- "report": 0,
- "role": "System Manager",
- "set_user_permissions": 0,
- "share": 1,
- "submit": 1,
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "export": 1,
+ "print": 1,
+ "read": 1,
+ "report": 1,
+ "role": "System Manager",
+ "share": 1,
"write": 1
}
- ],
- "quick_entry": 0,
- "read_only": 0,
- "read_only_onload": 0,
- "show_name_in_global_search": 0,
- "sort_field": "modified",
- "sort_order": "DESC",
- "title_field": "",
- "track_changes": 1,
- "track_seen": 1,
- "track_views": 0
-}
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "track_changes": 1
+}
\ No newline at end of file
diff --git a/frappe/core/doctype/data_import/data_import.py b/frappe/core/doctype/data_import/data_import.py
index ecf34d24b0..72de092461 100644
--- a/frappe/core/doctype/data_import/data_import.py
+++ b/frappe/core/doctype/data_import/data_import.py
@@ -1,54 +1,187 @@
# -*- coding: utf-8 -*-
-# Copyright (c) 2017, Frappe Technologies and contributors
+# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
-import frappe, os
-from frappe import _
-import frappe.modules.import_file
+import os
+import frappe
from frappe.model.document import Document
-from frappe.utils.data import format_datetime
-from frappe.core.doctype.data_import.importer import upload
+
+from frappe.core.doctype.data_import.importer import Importer
+from frappe.core.doctype.data_import.exporter import Exporter
from frappe.utils.background_jobs import enqueue
+from frappe.utils.csvutils import validate_google_sheets_url
+from frappe import _
class DataImport(Document):
- def autoname(self):
- if not self.name:
- self.name = "Import on " +format_datetime(self.creation)
-
def validate(self):
- if not self.import_file:
- self.db_set("total_rows", 0)
- if self.import_status == "In Progress":
- frappe.throw(_("Can't save the form as data import is in progress."))
+ doc_before_save = self.get_doc_before_save()
+ if (
+ not (self.import_file or self.google_sheets_url)
+ or (doc_before_save and doc_before_save.import_file != self.import_file)
+ or (doc_before_save and doc_before_save.google_sheets_url != self.google_sheets_url)
+ ):
+ self.template_options = ""
+ self.template_warnings = ""
- # validate the template just after the upload
- # if there is total_rows in the doc, it means that the template is already validated and error free
- if self.import_file and not self.total_rows:
- upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
+ self.validate_import_file()
+ self.validate_google_sheets_url()
+
+ def validate_import_file(self):
+ if self.import_file:
+ # validate template
+ self.get_importer()
+
+ def validate_google_sheets_url(self):
+ if not self.google_sheets_url:
+ return
+ validate_google_sheets_url(self.google_sheets_url)
+
+ def get_preview_from_template(self, import_file=None, google_sheets_url=None):
+ if import_file:
+ self.import_file = import_file
+
+ if google_sheets_url:
+ self.google_sheets_url = google_sheets_url
+
+ if not (self.import_file or self.google_sheets_url):
+ return
+
+ i = self.get_importer()
+ return i.get_data_for_import_preview()
+
+ def start_import(self):
+ from frappe.core.page.background_jobs.background_jobs import get_info
+ from frappe.utils.scheduler import is_scheduler_inactive
+
+ if is_scheduler_inactive() and not frappe.flags.in_test:
+ frappe.throw(
+ _("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
+ )
+
+ enqueued_jobs = [d.get("job_name") for d in get_info()]
+
+ if self.name not in enqueued_jobs:
+ enqueue(
+ start_import,
+ queue="default",
+ timeout=6000,
+ event="data_import",
+ job_name=self.name,
+ data_import=self.name,
+ now=frappe.conf.developer_mode or frappe.flags.in_test,
+ )
+ return True
+
+ return False
+
+ def export_errored_rows(self):
+ return self.get_importer().export_errored_rows()
+
+ def get_importer(self):
+ return Importer(self.reference_doctype, data_import=self)
@frappe.whitelist()
-def get_importable_doctypes():
- return frappe.cache().hget("can_import", frappe.session.user)
+def get_preview_from_template(data_import, import_file=None, google_sheets_url=None):
+ return frappe.get_doc("Data Import", data_import).get_preview_from_template(
+ import_file, google_sheets_url
+ )
+
@frappe.whitelist()
-def import_data(data_import):
- frappe.db.set_value("Data Import", data_import, "import_status", "In Progress", update_modified=False)
- frappe.publish_realtime("data_import_progress", {"progress": "0",
- "data_import": data_import, "reload": True}, user=frappe.session.user)
-
- from frappe.core.page.background_jobs.background_jobs import get_info
- enqueued_jobs = [d.get("job_name") for d in get_info()]
-
- if data_import not in enqueued_jobs:
- enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
- data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
+def form_start_import(data_import):
+ return frappe.get_doc("Data Import", data_import).start_import()
-def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
- insert=False, submit=False, pre_process=None):
+def start_import(data_import):
+ """This method runs in background job"""
+ data_import = frappe.get_doc("Data Import", data_import)
+ try:
+ i = Importer(data_import.reference_doctype, data_import=data_import)
+ i.import_data()
+ except Exception:
+ frappe.db.rollback()
+ data_import.db_set("status", "Error")
+ frappe.log_error(title=data_import.name)
+ finally:
+ frappe.flags.in_import = False
+
+ frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name})
+
+
+@frappe.whitelist()
+def download_template(
+ doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
+):
+ """
+ Download template from Exporter
+ :param doctype: Document Type
+ :param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
+ :param export_records=None: One of 'all', 'by_filter', 'blank_template'
+ :param export_filters: Filter dict
+ :param file_type: File type to export into
+ """
+
+ export_fields = frappe.parse_json(export_fields)
+ export_filters = frappe.parse_json(export_filters)
+ export_data = export_records != "blank_template"
+
+ e = Exporter(
+ doctype,
+ export_fields=export_fields,
+ export_data=export_data,
+ export_filters=export_filters,
+ file_type=file_type,
+ export_page_length=5 if export_records == "5_records" else None,
+ )
+ e.build_response()
+
+
+@frappe.whitelist()
+def download_errored_template(data_import_name):
+ data_import = frappe.get_doc("Data Import", data_import_name)
+ data_import.export_errored_rows()
+
+
+def import_file(
+ doctype, file_path, import_type, submit_after_import=False, console=False
+):
+ """
+ Import documents in from CSV or XLSX using data import.
+
+ :param doctype: DocType to import
+ :param file_path: Path to .csv, .xls, or .xlsx file to import
+ :param import_type: One of "Insert" or "Update"
+ :param submit_after_import: Whether to submit documents after import
+ :param console: Set to true if this is to be used from command line. Will print errors or progress to stdout.
+ """
+
+ data_import = frappe.new_doc("Data Import")
+ data_import.submit_after_import = submit_after_import
+ data_import.import_type = (
+ "Insert New Records" if import_type.lower() == "insert" else "Update Existing Records"
+ )
+
+ i = Importer(
+ doctype=doctype, file_path=file_path, data_import=data_import, console=console
+ )
+ i.import_data()
+
+
+##############
+
+
+def import_doc(
+ path,
+ overwrite=False,
+ ignore_links=False,
+ ignore_insert=False,
+ insert=False,
+ submit=False,
+ pre_process=None,
+):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
@@ -57,25 +190,44 @@ def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
- frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
+ frappe.modules.import_file.import_file_by_path(
+ f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True
+ )
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
- import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
+ import_file_by_path(
+ f,
+ ignore_links=ignore_links,
+ overwrite=overwrite,
+ submit=submit,
+ pre_process=pre_process,
+ )
frappe.db.commit()
-def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
- from frappe.utils.csvutils import read_csv_content
- print("Importing " + path)
- with open(path, "r") as infile:
- upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
- submit_after_import=submit, pre_process=pre_process)
+def import_file_by_path(
+ path,
+ ignore_links=False,
+ overwrite=False,
+ submit=False,
+ pre_process=None,
+ no_email=True,
+):
+ if path.endswith(".csv"):
+ print()
+ print("This method is deprecated.")
+ print('Import CSV files using the command "bench --site sitename data-import"')
+ print("Or use the method frappe.core.doctype.data_import.data_import.import_file")
+ print()
+ raise Exception("Method deprecated")
-def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
+def export_json(
+ doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"
+):
def post_process(out):
- del_keys = ('modified_by', 'creation', 'owner', 'idx')
+ del_keys = ("modified_by", "creation", "owner", "idx")
for doc in out:
for key in del_keys:
if key in doc:
@@ -83,7 +235,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
for k, v in doc.items():
if isinstance(v, list):
for child in v:
- for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
+ for key in del_keys + ("docstatus", "doctype", "modified", "name"):
if key in child:
del child[key]
@@ -93,13 +245,20 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
- for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
+ for doc in frappe.get_all(
+ doctype,
+ fields=["name"],
+ filters=filters,
+ or_filters=or_filters,
+ limit_page_length=0,
+ order_by=order_by,
+ ):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
- path = os.path.join('..', path)
+ path = os.path.join("..", path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
@@ -107,17 +266,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
+
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))
-
-
-@frappe.whitelist()
-def export_fixture(doctype, app):
- if frappe.session.user != "Administrator":
- raise frappe.PermissionError
-
- if not os.path.exists(frappe.get_app_path(app, "fixtures")):
- os.mkdir(frappe.get_app_path(app, "fixtures"))
-
- export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")
diff --git a/frappe/core/doctype/data_import/data_import_list.js b/frappe/core/doctype/data_import/data_import_list.js
index dc06f44f59..1dee4319f9 100644
--- a/frappe/core/doctype/data_import/data_import_list.js
+++ b/frappe/core/doctype/data_import/data_import_list.js
@@ -1,31 +1,40 @@
+let imports_in_progress = [];
+
frappe.listview_settings['Data Import'] = {
- add_fields: ["import_status"],
- has_indicator_for_draft: 1,
- get_indicator: function(doc) {
-
- let status = {
- 'Successful': [__("Success"), "green", "import_status,=,Successful"],
- 'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"],
- 'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"],
- 'Failed': [__("Failed"), "red", "import_status,=,Failed"],
- 'Pending': [__("Pending"), "orange", "import_status,=,"]
- }
-
- if (doc.import_status) {
- return status[doc.import_status];
- }
-
- if (doc.docstatus == 0) {
- return status['Pending'];
- }
-
- return status['Pending'];
- },
onload(listview) {
- listview.page.set_title_sub(`
-
- ${__('Try the new Data Import')}
-
- `);
- }
+ frappe.realtime.on('data_import_progress', data => {
+ if (!imports_in_progress.includes(data.data_import)) {
+ imports_in_progress.push(data.data_import);
+ }
+ });
+ frappe.realtime.on('data_import_refresh', data => {
+ imports_in_progress = imports_in_progress.filter(
+ d => d !== data.data_import
+ );
+ listview.refresh();
+ });
+ },
+ get_indicator: function(doc) {
+ var colors = {
+ 'Pending': 'orange',
+ 'Partial Success': 'orange',
+ 'Success': 'green',
+ 'In Progress': 'orange',
+ 'Error': 'red'
+ };
+ let status = doc.status;
+ if (imports_in_progress.includes(doc.name)) {
+ status = 'In Progress';
+ }
+ return [__(status), colors[status], 'status,=,' + doc.status];
+ },
+ formatters: {
+ import_type(value) {
+ return {
+ 'Insert New Records': __('Insert'),
+ 'Update Existing Records': __('Update')
+ }[value];
+ }
+ },
+ hide_name_column: true
};
diff --git a/frappe/core/doctype/data_import/exporter.py b/frappe/core/doctype/data_import/exporter.py
new file mode 100644
index 0000000000..3eef6ce016
--- /dev/null
+++ b/frappe/core/doctype/data_import/exporter.py
@@ -0,0 +1,257 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
+# MIT License. See license.txt
+
+import frappe
+from frappe.model import (
+ display_fieldtypes,
+ no_value_fields,
+ table_fields as table_fieldtypes,
+)
+from frappe.utils.csvutils import build_csv_response
+from frappe.utils.xlsxutils import build_xlsx_response
+
+
+class Exporter:
+ def __init__(
+ self,
+ doctype,
+ export_fields=None,
+ export_data=False,
+ export_filters=None,
+ export_page_length=None,
+ file_type="CSV",
+ ):
+ """
+ Exports records of a DocType for use with Importer
+ :param doctype: Document Type to export
+ :param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']}
+ :param export_data=False: Whether to export data as well
+ :param export_filters=None: The filters (dict or list) which is used to query the records
+ :param file_type: One of 'Excel' or 'CSV'
+ """
+ self.doctype = doctype
+ self.meta = frappe.get_meta(doctype)
+ self.export_fields = export_fields
+ self.export_filters = export_filters
+ self.export_page_length = export_page_length
+ self.file_type = file_type
+
+ # this will contain the csv content
+ self.csv_array = []
+
+ # fields that get exported
+ self.exportable_fields = self.get_all_exportable_fields()
+ self.fields = self.serialize_exportable_fields()
+ self.add_header()
+
+ if export_data:
+ self.data = self.get_data_to_export()
+ else:
+ self.data = []
+ self.add_data()
+
+ def get_all_exportable_fields(self):
+ child_table_fields = [
+ df.fieldname for df in self.meta.fields if df.fieldtype in table_fieldtypes
+ ]
+
+ meta = frappe.get_meta(self.doctype)
+ exportable_fields = frappe._dict({})
+
+ for key, fieldnames in self.export_fields.items():
+ if key == self.doctype:
+ # parent fields
+ exportable_fields[key] = self.get_exportable_fields(key, fieldnames)
+
+ elif key in child_table_fields:
+ # child fields
+ child_df = meta.get_field(key)
+ child_doctype = child_df.options
+ exportable_fields[key] = self.get_exportable_fields(child_doctype, fieldnames)
+
+ return exportable_fields
+
+ def serialize_exportable_fields(self):
+ fields = []
+ for key, exportable_fields in self.exportable_fields.items():
+ for _df in exportable_fields:
+ # make a copy of df dict to avoid reference mutation
+ if isinstance(_df, frappe.core.doctype.docfield.docfield.DocField):
+ df = _df.as_dict()
+ else:
+ df = _df.copy()
+
+ df.is_child_table_field = key != self.doctype
+ if df.is_child_table_field:
+ df.child_table_df = self.meta.get_field(key)
+ fields.append(df)
+ return fields
+
+ def get_exportable_fields(self, doctype, fieldnames):
+ meta = frappe.get_meta(doctype)
+
+ def is_exportable(df):
+ return df and df.fieldtype not in (display_fieldtypes + no_value_fields)
+
+ # add name field
+ name_field = frappe._dict(
+ {
+ "fieldtype": "Data",
+ "fieldname": "name",
+ "label": "ID",
+ "reqd": 1,
+ "parent": doctype,
+ }
+ )
+
+ fields = [meta.get_field(fieldname) for fieldname in fieldnames]
+ fields = [df for df in fields if is_exportable(df)]
+
+ if "name" in fieldnames:
+ fields = [name_field] + fields
+
+ return fields or []
+
+ def get_data_to_export(self):
+ frappe.permissions.can_export(self.doctype, raise_exception=True)
+ data_to_export = []
+
+ table_fields = [f for f in self.exportable_fields if f != self.doctype]
+ data = self.get_data_as_docs()
+
+ for doc in data:
+ rows = []
+ rows = self.add_data_row(self.doctype, None, doc, rows, 0)
+
+ if table_fields:
+ # add child table data
+ for f in table_fields:
+ for i, child_row in enumerate(doc[f]):
+ table_df = self.meta.get_field(f)
+ child_doctype = table_df.options
+ rows = self.add_data_row(child_doctype, child_row.parentfield, child_row, rows, i)
+
+ data_to_export += rows
+
+ return data_to_export
+
+ def add_data_row(self, doctype, parentfield, doc, rows, row_idx):
+ if len(rows) < row_idx + 1:
+ rows.append([""] * len(self.fields))
+
+ row = rows[row_idx]
+
+ for i, df in enumerate(self.fields):
+ if df.parent == doctype:
+ if df.is_child_table_field and df.child_table_df.fieldname != parentfield:
+ continue
+ row[i] = doc.get(df.fieldname, "")
+
+ return rows
+
+ def get_data_as_docs(self):
+ def format_column_name(df):
+ return "`tab{0}`.`{1}`".format(df.parent, df.fieldname)
+
+ filters = self.export_filters
+
+ if self.meta.is_nested_set():
+ order_by = "`tab{0}`.`lft` ASC".format(self.doctype)
+ else:
+ order_by = "`tab{0}`.`creation` DESC".format(self.doctype)
+
+ parent_fields = [
+ format_column_name(df) for df in self.fields if df.parent == self.doctype
+ ]
+ parent_data = frappe.db.get_list(
+ self.doctype,
+ filters=filters,
+ fields=["name"] + parent_fields,
+ limit_page_length=self.export_page_length,
+ order_by=order_by,
+ as_list=0,
+ )
+ parent_names = [p.name for p in parent_data]
+
+ child_data = {}
+ for key in self.exportable_fields:
+ if key == self.doctype:
+ continue
+ child_table_df = self.meta.get_field(key)
+ child_table_doctype = child_table_df.options
+ child_fields = ["name", "idx", "parent", "parentfield"] + list(
+ set(
+ [format_column_name(df) for df in self.fields if df.parent == child_table_doctype]
+ )
+ )
+ data = frappe.db.get_list(
+ child_table_doctype,
+ filters={
+ "parent": ("in", parent_names),
+ "parentfield": child_table_df.fieldname,
+ "parenttype": self.doctype,
+ },
+ fields=child_fields,
+ order_by="idx asc",
+ as_list=0,
+ )
+ child_data[key] = data
+
+ return self.merge_data(parent_data, child_data)
+
+ def merge_data(self, parent_data, child_data):
+ for doc in parent_data:
+ for table_field, table_rows in child_data.items():
+ doc[table_field] = [row for row in table_rows if row.parent == doc.name]
+
+ return parent_data
+
+ def add_header(self):
+
+ header = []
+ for df in self.fields:
+ is_parent = not df.is_child_table_field
+ if is_parent:
+ label = df.label
+ else:
+ label = "{0} ({1})".format(df.label, df.child_table_df.label)
+
+ if label in header:
+ # this label is already in the header,
+ # which means two fields with the same label
+ # add the fieldname to avoid clash
+ if is_parent:
+ label = "{0}".format(df.fieldname)
+ else:
+ label = "{0}.{1}".format(df.child_table_df.fieldname, df.fieldname)
+ header.append(label)
+
+ self.csv_array.append(header)
+
+ def add_data(self):
+ self.csv_array += self.data
+
+ def get_csv_array(self):
+ return self.csv_array
+
+ def get_csv_array_for_export(self):
+ csv_array = self.csv_array
+
+ if not self.data:
+ # add 2 empty rows
+ csv_array += [[]] * 2
+
+ return csv_array
+
+ def build_response(self):
+ if self.file_type == "CSV":
+ self.build_csv_response()
+ elif self.file_type == "Excel":
+ self.build_xlsx_response()
+
+ def build_csv_response(self):
+ build_csv_response(self.get_csv_array_for_export(), self.doctype)
+
+ def build_xlsx_response(self):
+ build_xlsx_response(self.get_csv_array_for_export(), self.doctype)
diff --git a/frappe/core/doctype/data_import/exporter_new.py b/frappe/core/doctype/data_import/exporter_new.py
deleted file mode 100644
index 85f933be69..0000000000
--- a/frappe/core/doctype/data_import/exporter_new.py
+++ /dev/null
@@ -1,267 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
-# MIT License. See license.txt
-
-import frappe
-from frappe.model import display_fieldtypes, no_value_fields, table_fields
-from frappe.utils.csvutils import build_csv_response
-from frappe.utils.xlsxutils import build_xlsx_response
-from .importer_new import INVALID_VALUES
-
-
-class Exporter:
- def __init__(
- self,
- doctype,
- export_fields=None,
- export_data=False,
- export_filters=None,
- export_page_length=None,
- file_type="CSV",
- ):
- """
- Exports records of a DocType for use with Importer
- :param doctype: Document Type to export
- :param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']}
- :param export_data=False: Whether to export data as well
- :param export_filters=None: The filters (dict or list) which is used to query the records
- :param file_type: One of 'Excel' or 'CSV'
- """
- self.doctype = doctype
- self.meta = frappe.get_meta(doctype)
- self.export_fields = export_fields
- self.export_filters = export_filters
- self.export_page_length = export_page_length
- self.file_type = file_type
-
- # this will contain the csv content
- self.csv_array = []
-
- # fields that get exported
- # can be All, Mandatory or User Selected Fields
- self.fields = self.get_all_exportable_fields()
- self.add_header()
-
- if export_data:
- self.data = self.get_data_to_export()
- else:
- self.data = []
- self.add_data()
-
- def get_all_exportable_fields(self):
- return self.get_exportable_parent_fields() + self.get_exportable_children_fields()
-
- def get_exportable_parent_fields(self):
- parent_fields = self.get_exportable_fields(self.doctype)
-
- # if autoname is based on field
- # then merge ID and the field column title as "ID (Autoname Field)"
- autoname = self.meta.autoname
- if autoname and autoname.startswith("field:"):
- fieldname = autoname[len("field:") :]
- autoname_field = self.meta.get_field(fieldname)
- if autoname_field:
- name_field = parent_fields[0]
- name_field.label = "ID ({})".format(autoname_field.label)
- # remove the autoname field as it is a duplicate of ID field
- parent_fields = [
- df for df in parent_fields if df.fieldname != autoname_field.fieldname
- ]
-
- return parent_fields
-
- def get_exportable_children_fields(self):
- child_table_fields = [df for df in self.meta.fields if df.fieldtype in table_fields]
- if self.export_fields == "Mandatory":
- child_table_fields = [df for df in child_table_fields if df.reqd]
-
- children = [df.options for df in child_table_fields]
- children_fields = []
- for child in children:
- children_fields += self.get_exportable_fields(child)
-
- return children_fields
-
- def get_exportable_fields(self, doctype):
- meta = frappe.get_meta(doctype)
-
- def is_exportable(df):
- return df and df.fieldtype not in (display_fieldtypes + no_value_fields)
-
- # filter out invalid fieldtypes
- all_fields = [df for df in meta.fields if is_exportable(df)]
- # add name field
- name_field = frappe._dict(
- {
- "fieldtype": "Data",
- "fieldname": "name",
- "label": "ID",
- "reqd": 1,
- "parent": doctype,
- }
- )
- all_fields = [name_field] + all_fields
-
- if self.export_fields == "Mandatory":
- fields = [df for df in all_fields if df.reqd]
-
- if self.export_fields == "All":
- fields = list(all_fields)
-
- elif isinstance(self.export_fields, dict):
- fields_to_export = self.export_fields.get(doctype, [])
- fields = [meta.get_field(fieldname) for fieldname in fields_to_export]
- fields = [df for df in fields if is_exportable(df)]
- if 'name' in fields_to_export:
- fields = [name_field] + fields
-
- return fields or []
-
- def get_data_to_export(self):
- frappe.permissions.can_export(self.doctype, raise_exception=True)
-
- def get_column_name(df):
- return "`tab{0}`.`{1}`".format(df.parent, df.fieldname)
-
- fields = [get_column_name(df) for df in self.fields]
- filters = self.export_filters
-
- if self.meta.is_nested_set():
- order_by = "`tab{0}`.`lft` ASC".format(self.doctype)
- else:
- order_by = "`tab{0}`.`creation` DESC".format(self.doctype)
-
- data = frappe.db.get_list(
- self.doctype,
- filters=filters,
- fields=fields,
- limit_page_length=self.export_page_length,
- order_by=order_by,
- as_list=1,
- )
-
- data = self.remove_duplicate_values(data)
- data = self.remove_row_gaps(data)
- data = self.remove_empty_rows(data)
- # data = self.remove_values_from_name_column(data)
-
- return data
-
- def remove_duplicate_values(self, data):
- out = []
-
- doctypes = set([df.parent for df in self.fields])
-
- def name_exists_in_column_before_row(name, column_index, row_index):
- column_values = [row[column_index] for i, row in enumerate(data) if i < row_index]
- return name in column_values
-
- for i, row in enumerate(data):
- # first row is fine
- if i == 0:
- out.append(row)
- continue
-
- row = list(row)
- for doctype in doctypes:
- name_index = self.get_name_column_index(doctype)
- name = row[name_index]
- column_indexes = self.get_column_indexes(doctype)
-
- if name_exists_in_column_before_row(name, name_index, i):
- # remove the values from the row
- row = [None if i in column_indexes else d for i, d in enumerate(row)]
-
- out.append(row)
-
- return out
-
- def remove_row_gaps(self, data):
- doctypes = set([df.parent for df in self.fields if df.parent != self.doctype])
-
- def get_nearest_empty_row_index(col_index, row_index):
- col_values = [row[col_index] for row in data]
- i = row_index - 1
- while not col_values[i]:
- i = i - 1
- out = i + 1
- if row_index != out:
- return out
-
- for i, row in enumerate(data):
- # if this is the row that contains parent values then skip
- if row[0]:
- continue
-
- for doctype in doctypes:
- name_index = self.get_name_column_index(doctype)
- name = row[name_index]
- column_indexes = self.get_column_indexes(doctype)
-
- if not name:
- continue
-
- row_index = get_nearest_empty_row_index(name_index, i)
- if row_index:
- for col_index in column_indexes:
- data[row_index][col_index] = row[col_index]
- row[col_index] = None
-
- return data
-
- # pylint: disable=R0201
- def remove_empty_rows(self, data):
- return [row for row in data if any(v not in INVALID_VALUES for v in row)]
-
- def remove_values_from_name_column(self, data):
- out = []
- name_columns = [i for i, df in enumerate(self.fields) if df.fieldname == "name"]
- for row in data:
- out.append(["" if i in name_columns else value for i, value in enumerate(row)])
- return out
-
- def get_name_column_index(self, doctype):
- for i, df in enumerate(self.fields):
- if df.parent == doctype and df.fieldname == "name":
- return i
- return -1
-
- def get_column_indexes(self, doctype):
- return [i for i, df in enumerate(self.fields) if df.parent == doctype]
-
- def add_header(self):
- def get_label(df):
- if df.parent == self.doctype:
- return df.label
- else:
- return "{0} ({1})".format(df.label, df.parent)
-
- header = [get_label(df) for df in self.fields]
- self.csv_array.append(header)
-
- def add_data(self):
- self.csv_array += self.data
-
- def get_csv_array(self):
- return self.csv_array
-
- def get_csv_array_for_export(self):
- csv_array = self.csv_array
-
- if not self.data:
- # add 2 empty rows
- csv_array += [[]] * 2
-
- return csv_array
-
- def build_response(self):
- if self.file_type == 'CSV':
- self.build_csv_response()
- elif self.file_type == 'Excel':
- self.build_xlsx_response()
-
- def build_csv_response(self):
- build_csv_response(self.get_csv_array_for_export(), self.doctype)
-
- def build_xlsx_response(self):
- build_xlsx_response(self.get_csv_array_for_export(), self.doctype)
diff --git a/frappe/core/doctype/data_import/fixtures/sample_import_file.csv b/frappe/core/doctype/data_import/fixtures/sample_import_file.csv
new file mode 100644
index 0000000000..ef5b96df58
--- /dev/null
+++ b/frappe/core/doctype/data_import/fixtures/sample_import_file.csv
@@ -0,0 +1,5 @@
+Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
+Test ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
+ , , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 ,child title again 2 ,22-09-2021 , ,
+Test 2 ,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , ,
+Test 3 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71
diff --git a/frappe/core/doctype/data_import/fixtures/sample_import_file_for_update.csv b/frappe/core/doctype/data_import/fixtures/sample_import_file_for_update.csv
new file mode 100644
index 0000000000..656985b519
--- /dev/null
+++ b/frappe/core/doctype/data_import/fixtures/sample_import_file_for_update.csv
@@ -0,0 +1,2 @@
+Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
+Test 26 ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
diff --git a/frappe/core/doctype/data_import/fixtures/sample_import_file_without_mandatory.csv b/frappe/core/doctype/data_import/fixtures/sample_import_file_without_mandatory.csv
new file mode 100644
index 0000000000..c6bff5caeb
--- /dev/null
+++ b/frappe/core/doctype/data_import/fixtures/sample_import_file_without_mandatory.csv
@@ -0,0 +1,5 @@
+Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
+Test 5 ,test description ,1 ,2 ,"" , ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
+ , , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 , ,22-09-2021 , ,
+ ,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , ,
+Test 4 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71
diff --git a/frappe/core/doctype/data_import/importer.py b/frappe/core/doctype/data_import/importer.py
index b6d410d072..4761652c70 100644
--- a/frappe/core/doctype/data_import/importer.py
+++ b/frappe/core/doctype/data_import/importer.py
@@ -1,541 +1,1115 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
+# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
-from __future__ import unicode_literals, print_function
-
-from six.moves import range
-import requests
-import frappe, json, os
-import frappe.permissions
-
+from __future__ import unicode_literals
+import os
+import io
+import frappe
+import timeit
+import json
+from datetime import datetime
from frappe import _
+from frappe.utils import cint, flt, update_progress_bar, cstr
+from frappe.utils.csvutils import read_csv_content, get_csv_content_from_google_sheets
+from frappe.utils.xlsxutils import (
+ read_xlsx_file_from_attached_file,
+ read_xls_file_from_attached_file,
+)
+from frappe.model import no_value_fields, table_fields as table_fieldtypes
-from frappe.utils.csvutils import getlink
-from frappe.utils.dateutils import parse_date
-
-from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url
-from six import text_type, string_types
+INVALID_VALUES = ("", None)
+MAX_ROWS_IN_PREVIEW = 10
+INSERT = "Insert New Records"
+UPDATE = "Update Existing Records"
-@frappe.whitelist()
-def get_data_keys():
- return frappe._dict({
- "data_separator": _('Start entering data below this line'),
- "main_table": _("Table") + ":",
- "parent_table": _("Parent Table") + ":",
- "columns": _("Column Name") + ":",
- "doctype": _("DocType") + ":"
- })
+class Importer:
+ def __init__(
+ self, doctype, data_import=None, file_path=None, import_type=None, console=False
+ ):
+ self.doctype = doctype
+ self.console = console
+ self.data_import = data_import
+ if not self.data_import:
+ self.data_import = frappe.get_doc(doctype="Data Import")
+ if import_type:
+ self.data_import.import_type = import_type
+ self.template_options = frappe.parse_json(self.data_import.template_options or "{}")
+ self.import_type = self.data_import.import_type
-@frappe.whitelist()
-def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None,
- update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No",
- skip_errors = True, data_import_doc=None, validate_template=False, user=None):
- """upload data"""
+ self.import_file = ImportFile(
+ doctype,
+ file_path or data_import.google_sheets_url or data_import.import_file,
+ self.template_options,
+ self.import_type,
+ )
- # for translations
- if user:
- frappe.cache().hdel("lang", user)
- frappe.set_user_lang(user)
+ def get_data_for_import_preview(self):
+ return self.import_file.get_data_for_import_preview()
- if data_import_doc and isinstance(data_import_doc, string_types):
- data_import_doc = frappe.get_doc("Data Import", data_import_doc)
- if data_import_doc and from_data_import == "Yes":
- no_email = data_import_doc.no_email
- ignore_encoding_errors = data_import_doc.ignore_encoding_errors
- update_only = data_import_doc.only_update
- submit_after_import = data_import_doc.submit_after_import
- overwrite = data_import_doc.overwrite
- skip_errors = data_import_doc.skip_errors
- else:
- # extra input params
- params = json.loads(frappe.form_dict.get("params") or '{}')
- if params.get("submit_after_import"):
- submit_after_import = True
- if params.get("ignore_encoding_errors"):
- ignore_encoding_errors = True
- if not params.get("no_email"):
- no_email = False
- if params.get('update_only'):
- update_only = True
- if params.get('from_data_import'):
- from_data_import = params.get('from_data_import')
- if not params.get('skip_errors'):
- skip_errors = params.get('skip_errors')
+ def before_import(self):
+ # set user lang for translations
+ frappe.cache().hdel("lang", frappe.session.user)
+ frappe.set_user_lang(frappe.session.user)
- frappe.flags.in_import = True
- frappe.flags.mute_emails = no_email
+ # set flags
+ frappe.flags.in_import = True
+ frappe.flags.mute_emails = self.data_import.mute_emails
- def get_data_keys_definition():
- return get_data_keys()
+ self.data_import.db_set("template_warnings", "")
- def bad_template():
- frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator))
+ def import_data(self):
+ self.before_import()
- def check_data_length():
- if not data:
- frappe.throw(_("No data found in the file. Please reattach the new file with data."))
+ # parse docs from rows
+ payloads = self.import_file.get_payloads_for_import()
- def get_start_row():
- for i, row in enumerate(rows):
- if row and row[0]==get_data_keys_definition().data_separator:
- return i+1
- bad_template()
+ # dont import if there are non-ignorable warnings
+ warnings = self.import_file.get_warnings()
+ warnings = [w for w in warnings if w.get("type") != "info"]
- def get_header_row(key):
- return get_header_row_and_idx(key)[0]
-
- def get_header_row_and_idx(key):
- for i, row in enumerate(header):
- if row and row[0]==key:
- return row, i
- return [], -1
-
- def filter_empty_columns(columns):
- empty_cols = list(filter(lambda x: x in ("", None), columns))
-
- if empty_cols:
- if columns[-1*len(empty_cols):] == empty_cols:
- # filter empty columns if they exist at the end
- columns = columns[:-1*len(empty_cols)]
+ if warnings:
+ if self.console:
+ self.print_grouped_warnings(warnings)
else:
- frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
- raise_exception=1)
-
- return columns
-
- def make_column_map():
- doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype)
- if row_idx == -1: # old style
+ self.data_import.db_set("template_warnings", json.dumps(warnings))
return
- dt = None
- for i, d in enumerate(doctype_row[1:]):
- if d not in ("~", "-"):
- if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"):
- dt, parentfield = d, None
- # xls format truncates the row, so it may not have more columns
- if len(doctype_row) > i+2:
- parentfield = doctype_row[i+2]
- doctypes.append((dt, parentfield))
- column_idx_to_fieldname[(dt, parentfield)] = {}
- column_idx_to_fieldtype[(dt, parentfield)] = {}
- if dt:
- column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1]
- column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1]
-
- def get_doc(start_idx):
- if doctypes:
- doc = {}
- attachments = []
- last_error_row_idx = None
- for idx in range(start_idx, len(rows)):
- last_error_row_idx = idx # pylint: disable=W0612
- if (not doc) or main_doc_empty(rows[idx]):
- for dt, parentfield in doctypes:
- d = {}
- for column_idx in column_idx_to_fieldname[(dt, parentfield)]:
- try:
- fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx]
- fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx]
-
- if not fieldname or not rows[idx][column_idx]:
- continue
-
- d[fieldname] = rows[idx][column_idx]
- if fieldtype in ("Int", "Check"):
- d[fieldname] = cint(d[fieldname])
- elif fieldtype in ("Float", "Currency", "Percent"):
- d[fieldname] = flt(d[fieldname])
- elif fieldtype == "Date":
- if d[fieldname] and isinstance(d[fieldname], string_types):
- d[fieldname] = getdate(parse_date(d[fieldname]))
- elif fieldtype == "Datetime":
- if d[fieldname]:
- if " " in d[fieldname]:
- _date, _time = d[fieldname].split()
- else:
- _date, _time = d[fieldname], '00:00:00'
- _date = parse_date(d[fieldname])
- d[fieldname] = get_datetime(_date + " " + _time)
- else:
- d[fieldname] = None
-
- elif fieldtype in ("Image", "Attach Image", "Attach"):
- # added file to attachments list
- attachments.append(d[fieldname])
-
- elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]:
- # as fields can be saved in the number format(long type) in data import template
- d[fieldname] = cstr(d[fieldname])
-
- except IndexError:
- pass
-
- # scrub quotes from name and modified
- if d.get("name") and d["name"].startswith('"'):
- d["name"] = d["name"][1:-1]
-
- if sum([0 if not val else 1 for val in d.values()]):
- d['doctype'] = dt
- if dt == doctype:
- doc.update(d)
- else:
- if not overwrite and doc.get("name"):
- d['parent'] = doc["name"]
- d['parenttype'] = doctype
- d['parentfield'] = parentfield
- doc.setdefault(d['parentfield'], []).append(d)
- else:
- break
-
- return doc, attachments, last_error_row_idx
+ # setup import log
+ if self.data_import.import_log:
+ import_log = frappe.parse_json(self.data_import.import_log)
else:
- doc = frappe._dict(zip(columns, rows[start_idx][1:]))
- doc['doctype'] = doctype
- return doc, [], None
+ import_log = []
- # used in testing whether a row is empty or parent row or child row
- # checked only 3 first columns since first two columns can be blank for example the case of
- # importing the item variant where item code and item name will be blank.
- def main_doc_empty(row):
- if row:
- for i in range(3,0,-1):
- if len(row) > i and row[i]:
- return False
- return True
+ # remove previous failures from import log
+ import_log = [log for log in import_log if log.get("success")]
- def validate_naming(doc):
- autoname = frappe.get_meta(doctype).autoname
- if autoname:
- if autoname[0:5] == 'field':
- autoname = autoname[6:]
- elif autoname == 'naming_series:':
- autoname = 'naming_series'
- else:
- return True
+ # get successfully imported rows
+ imported_rows = []
+ for log in import_log:
+ log = frappe._dict(log)
+ if log.success:
+ imported_rows += log.row_indexes
- if (autoname not in doc) or (not doc[autoname]):
- from frappe.model.base_document import get_controller
- if not hasattr(get_controller(doctype), "autoname"):
- frappe.throw(_("{0} is a mandatory field").format(autoname))
- return True
+ # start import
+ total_payload_count = len(payloads)
+ batch_size = frappe.conf.data_import_batch_size or 1000
- users = frappe.db.sql_list("select name from tabUser")
- def prepare_for_insert(doc):
- # don't block data import if user is not set
- # migrating from another system
- if not doc.owner in users:
- doc.owner = frappe.session.user
- if not doc.modified_by in users:
- doc.modified_by = frappe.session.user
+ for batch_index, batched_payloads in enumerate(
+ frappe.utils.create_batch(payloads, batch_size)
+ ):
+ for i, payload in enumerate(batched_payloads):
+ doc = payload.doc
+ row_indexes = [row.row_number for row in payload.rows]
+ current_index = (i + 1) + (batch_index * batch_size)
- def is_valid_url(url):
- is_valid = False
- if url.startswith("/files") or url.startswith("/private/files"):
- url = get_url(url)
+ if set(row_indexes).intersection(set(imported_rows)):
+ print("Skipping imported rows", row_indexes)
+ if total_payload_count > 5:
+ frappe.publish_realtime(
+ "data_import_progress",
+ {
+ "current": current_index,
+ "total": total_payload_count,
+ "skipping": True,
+ "data_import": self.data_import.name,
+ },
+ )
+ continue
- try:
- r = requests.get(url)
- is_valid = True if r.status_code == 200 else False
- except Exception:
- pass
+ try:
+ start = timeit.default_timer()
+ doc = self.process_doc(doc)
+ processing_time = timeit.default_timer() - start
+ eta = self.get_eta(current_index, total_payload_count, processing_time)
- return is_valid
+ if self.console:
+ update_progress_bar(
+ "Importing {0} records".format(total_payload_count),
+ current_index,
+ total_payload_count,
+ )
+ elif total_payload_count > 5:
+ frappe.publish_realtime(
+ "data_import_progress",
+ {
+ "current": current_index,
+ "total": total_payload_count,
+ "docname": doc.name,
+ "data_import": self.data_import.name,
+ "success": True,
+ "row_indexes": row_indexes,
+ "eta": eta,
+ },
+ )
- def attach_file_to_doc(doctype, docname, file_url):
- # check if attachment is already available
- # check if the attachement link is relative or not
- if not file_url:
- return
- if not is_valid_url(file_url):
- return
+ import_log.append(
+ frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes)
+ )
+ # commit after every successful import
+ frappe.db.commit()
- files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and
- attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format(
- doctype=doctype,
- docname=docname,
- file_url=file_url
- ))
-
- if files:
- # file is already attached
- return
-
- _file = frappe.get_doc({
- "doctype": "File",
- "file_url": file_url,
- "attached_to_name": docname,
- "attached_to_doctype": doctype,
- "attached_to_field": 0,
- "folder": "Home/Attachments"})
- _file.save()
-
-
- # header
- filename, file_extension = ['','']
- if not rows:
- _file = frappe.get_doc("File", {"file_url": data_import_doc.import_file})
- fcontent = _file.get_content()
- filename, file_extension = _file.get_extension()
-
- if file_extension == '.xlsx' and from_data_import == 'Yes':
- from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
- rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file)
-
- elif file_extension == '.csv':
- from frappe.utils.csvutils import read_csv_content
- rows = read_csv_content(fcontent, ignore_encoding_errors)
+ except Exception:
+ import_log.append(
+ frappe._dict(
+ success=False,
+ exception=frappe.get_traceback(),
+ messages=frappe.local.message_log,
+ row_indexes=row_indexes,
+ )
+ )
+ frappe.clear_messages()
+ # rollback if exception
+ frappe.db.rollback()
+ # set status
+ failures = [log for log in import_log if not log.get("success")]
+ if len(failures) == total_payload_count:
+ status = "Pending"
+ elif len(failures) > 0:
+ status = "Partial Success"
else:
- frappe.throw(_("Unsupported File Format"))
+ status = "Success"
- start_row = get_start_row()
- header = rows[:start_row]
- data = rows[start_row:]
- try:
- doctype = get_header_row(get_data_keys_definition().main_table)[1]
- columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:])
- except:
- frappe.throw(_("Cannot change header content"))
- doctypes = []
- column_idx_to_fieldname = {}
- column_idx_to_fieldtype = {}
+ if self.console:
+ self.print_import_log(import_log)
+ else:
+ self.data_import.db_set("status", status)
+ self.data_import.db_set("import_log", json.dumps(import_log))
- if skip_errors:
- data_rows_with_error = header
+ self.after_import()
- if submit_after_import and not cint(frappe.db.get_value("DocType",
- doctype, "is_submittable")):
- submit_after_import = False
+ return import_log
- parenttype = get_header_row(get_data_keys_definition().parent_table)
-
- if len(parenttype) > 1:
- parenttype = parenttype[1]
-
- # check permissions
- if not frappe.permissions.can_import(parenttype or doctype):
+ def after_import(self):
+ frappe.flags.in_import = False
frappe.flags.mute_emails = False
- return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}
- # Throw expception in case of the empty data file
- check_data_length()
- make_column_map()
- total = len(data)
+ def process_doc(self, doc):
+ if self.import_type == INSERT:
+ return self.insert_record(doc)
+ elif self.import_type == UPDATE:
+ return self.update_record(doc)
- if validate_template:
- if total:
- data_import_doc.total_rows = total
- return True
+ def insert_record(self, doc):
+ meta = frappe.get_meta(self.doctype)
+ new_doc = frappe.new_doc(self.doctype)
+ new_doc.update(doc)
- if overwrite==None:
- overwrite = params.get('overwrite')
+ if (meta.autoname or "").lower() != "prompt":
+ # name can only be set directly if autoname is prompt
+ new_doc.set("name", None)
- # delete child rows (if parenttype)
- parentfield = None
- if parenttype:
- parentfield = get_parent_field(doctype, parenttype)
+ new_doc.flags.updater_reference = {
+ "doctype": self.data_import.doctype,
+ "docname": self.data_import.name,
+ "label": _("via Data Import"),
+ }
- if overwrite:
- delete_child_rows(data, doctype)
+ new_doc.insert()
+ if meta.is_submittable and self.data_import.submit_after_import:
+ new_doc.submit()
+ return new_doc
- import_log = []
- def log(**kwargs):
- if via_console:
- print((kwargs.get("title") + kwargs.get("message")).encode('utf-8'))
- else:
- import_log.append(kwargs)
+ def update_record(self, doc):
+ id_field = get_id_field(self.doctype)
+ existing_doc = frappe.get_doc(self.doctype, doc.get(id_field.fieldname))
+ existing_doc.flags.updater_reference = {
+ "doctype": self.data_import.doctype,
+ "docname": self.data_import.name,
+ "label": _("via Data Import"),
+ }
+ existing_doc.update(doc)
+ existing_doc.save()
+ return existing_doc
- def as_link(doctype, name):
- if via_console:
- return "{0}: {1}".format(doctype, name)
- else:
- return getlink(doctype, name)
+ def get_eta(self, current, total, processing_time):
+ self.last_eta = getattr(self, "last_eta", 0)
+ remaining = total - current
+ eta = processing_time * remaining
+ if not self.last_eta or eta < self.last_eta:
+ self.last_eta = eta
+ return self.last_eta
- # publish realtime task update
- def publish_progress(achieved, reload=False):
- if data_import_doc:
- frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)),
- "data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user)
+ def export_errored_rows(self):
+ from frappe.utils.csvutils import build_csv_response
+
+ if not self.data_import:
+ return
+
+ import_log = frappe.parse_json(self.data_import.import_log or "[]")
+ failures = [log for log in import_log if not log.get("success")]
+ row_indexes = []
+ for f in failures:
+ row_indexes.extend(f.get("row_indexes", []))
+
+ # de duplicate
+ row_indexes = list(set(row_indexes))
+ row_indexes.sort()
+
+ header_row = [col.header_title for col in self.import_file.columns]
+ rows = [header_row]
+ rows += [row.data for row in self.import_file.data if row.row_number in row_indexes]
+
+ build_csv_response(rows, self.doctype)
+
+ def print_import_log(self, import_log):
+ failed_records = [log for log in import_log if not log.success]
+ successful_records = [log for log in import_log if log.success]
+
+ if successful_records:
+ print()
+ print(
+ "Successfully imported {0} records out of {1}".format(
+ len(successful_records), len(import_log)
+ )
+ )
+
+ if failed_records:
+ print("Failed to import {0} records".format(len(failed_records)))
+ file_name = "{0}_import_on_{1}.txt".format(self.doctype, frappe.utils.now())
+ print("Check {0} for errors".format(os.path.join("sites", file_name)))
+ text = ""
+ for w in failed_records:
+ text += "Row Indexes: {0}\n".format(str(w.get("row_indexes", [])))
+ text += "Messages:\n{0}\n".format("\n".join(w.get("messages", [])))
+ text += "Traceback:\n{0}\n\n".format(w.get("exception"))
+
+ with open(file_name, "w") as f:
+ f.write(text)
+
+ def print_grouped_warnings(self, warnings):
+ warnings_by_row = {}
+ other_warnings = []
+ for w in warnings:
+ if w.get("row"):
+ warnings_by_row.setdefault(w.get("row"), []).append(w)
+ else:
+ other_warnings.append(w)
+
+ for row_number, warnings in warnings_by_row.items():
+ print("Row {0}".format(row_number))
+ for w in warnings:
+ print(w.get("message"))
+
+ for w in other_warnings:
+ print(w.get("message"))
- error_flag = rollback_flag = False
+class ImportFile:
+ def __init__(self, doctype, file, template_options=None, import_type=None):
+ self.doctype = doctype
+ self.template_options = template_options or frappe._dict(
+ column_to_field_map=frappe._dict()
+ )
+ self.column_to_field_map = self.template_options.column_to_field_map
+ self.import_type = import_type
- batch_size = frappe.conf.data_import_batch_size or 1000
+ self.file_doc = self.file_path = None
+ if isinstance(file, frappe.string_types):
+ if frappe.db.exists("File", {"file_url": file}):
+ self.file_doc = frappe.get_doc("File", {"file_url": file})
+ elif 'docs.google.com/spreadsheets' in file:
+ self.google_sheets_url = file
+ elif os.path.exists(file):
+ self.file_path = file
- for batch_start in range(0, total, batch_size):
- batch = data[batch_start:batch_start + batch_size]
+ if not self.file_doc and not self.file_path and not self.google_sheets_url:
+ frappe.throw(_("Invalid template file for import"))
- for i, row in enumerate(batch):
- # bypass empty rows
- if main_doc_empty(row):
+ self.raw_data = self.get_data_from_template_file()
+ self.parse_data_from_template()
+
+ def get_data_from_template_file(self):
+ content = None
+ extension = None
+
+ if self.file_doc:
+ parts = self.file_doc.get_extension()
+ extension = parts[1]
+ content = self.file_doc.get_content()
+ extension = extension.lstrip(".")
+
+ elif self.file_path:
+ content, extension = self.read_file(self.file_path)
+
+ elif self.google_sheets_url:
+ content = get_csv_content_from_google_sheets(self.google_sheets_url)
+ extension = 'csv'
+
+ if not content:
+ frappe.throw(_("Invalid or corrupted content for import"))
+
+ if not extension:
+ extension = "csv"
+
+ if content:
+ return self.read_content(content, extension)
+
+ def parse_data_from_template(self):
+ header = None
+ data = []
+
+ for i, row in enumerate(self.raw_data):
+ if all(v in INVALID_VALUES for v in row):
+ # empty row
continue
- row_idx = i + start_row
- doc = None
-
- publish_progress(i)
-
- try:
- doc, attachments, last_error_row_idx = get_doc(row_idx)
- validate_naming(doc)
- if pre_process:
- pre_process(doc)
-
- original = None
- if parentfield:
- parent = frappe.get_doc(parenttype, doc["parent"])
- doc = parent.append(parentfield, doc)
- parent.save()
- else:
- if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]):
- original = frappe.get_doc(doctype, doc["name"])
- original_name = original.name
- original.update(doc)
- # preserve original name for case sensitivity
- original.name = original_name
- original.flags.ignore_links = ignore_links
- original.save()
- doc = original
- else:
- if not update_only:
- doc = frappe.get_doc(doc)
- prepare_for_insert(doc)
- doc.flags.ignore_links = ignore_links
- doc.insert()
- if attachments:
- # check file url and create a File document
- for file_url in attachments:
- attach_file_to_doc(doc.doctype, doc.name, file_url)
- if submit_after_import:
- doc.submit()
-
- # log errors
- if parentfield:
- log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)),
- "link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"})
- elif submit_after_import:
- log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)),
- "message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"})
- elif original:
- log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)),
- "message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
- elif not update_only:
- log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)),
- "message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
- else:
- log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None,
- "message": "Document updation ignored", "indicator": "orange"})
-
- except Exception as e:
- error_flag = True
-
- # build error message
- if frappe.local.message_log:
- err_msg = "\n".join(['{}
'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log])
- else:
- err_msg = '{}
'.format(cstr(e))
-
- error_trace = frappe.get_traceback()
- if error_trace:
- error_log_doc = frappe.log_error(error_trace)
- error_link = get_absolute_url("Error Log", error_log_doc.name)
- else:
- error_link = None
-
- log(**{
- "row": row_idx + 1,
- "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""),
- "message": err_msg,
- "indicator": "red",
- "link":error_link
- })
-
- # data with error to create a new file
- # include the errored data in the last row as last_error_row_idx will not be updated for the last row
- if skip_errors:
- if last_error_row_idx == len(rows)-1:
- last_error_row_idx = len(rows)
- data_rows_with_error += rows[row_idx:last_error_row_idx]
- else:
- rollback_flag = True
- finally:
- frappe.local.message_log = []
-
- start_row += batch_size
- if rollback_flag:
- frappe.db.rollback()
- else:
- frappe.db.commit()
-
- frappe.flags.mute_emails = False
- frappe.flags.in_import = False
-
- log_message = {"messages": import_log, "error": error_flag}
- if data_import_doc:
- data_import_doc.log_details = json.dumps(log_message)
-
- import_status = None
- if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error):
- import_status = "Partially Successful"
- # write the file with the faulty row
- file_name = 'error_' + filename + file_extension
- if file_extension == '.xlsx':
- from frappe.utils.xlsxutils import make_xlsx
- xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template")
- file_data = xlsx_file.getvalue()
+ if not header:
+ header = Header(i, row, self.doctype, self.raw_data, self.column_to_field_map)
else:
- from frappe.utils.csvutils import to_csv
- file_data = to_csv(data_rows_with_error)
- _file = frappe.get_doc({
- "doctype": "File",
- "file_name": file_name,
- "attached_to_doctype": "Data Import",
- "attached_to_name": data_import_doc.name,
- "folder": "Home/Attachments",
- "content": file_data})
- _file.save()
- data_import_doc.error_file = _file.file_url
+ row_obj = Row(i, row, self.doctype, header, self.import_type)
+ data.append(row_obj)
- elif error_flag:
- import_status = "Failed"
- else:
- import_status = "Successful"
+ self.header = header
+ self.columns = self.header.columns
+ self.data = data
- data_import_doc.import_status = import_status
- data_import_doc.save()
- if data_import_doc.import_status in ["Successful", "Partially Successful"]:
- data_import_doc.submit()
- publish_progress(100, True)
- else:
- publish_progress(0, True)
- frappe.db.commit()
- else:
- return log_message
+ if len(data) < 1:
+ frappe.throw(
+ _("Import template should contain a Header and atleast one row."),
+ title=_("Template Error"),
+ )
-def get_parent_field(doctype, parenttype):
- parentfield = None
+ def get_data_for_import_preview(self):
+ """Adds a serial number column as the first column"""
- # get parentfield
- if parenttype:
- for d in frappe.get_meta(parenttype).get_table_fields():
- if d.options==doctype:
- parentfield = d.fieldname
+ columns = [frappe._dict({"header_title": "Sr. No", "skip_import": True})]
+ columns += [col.as_dict() for col in self.columns]
+ for col in columns:
+ # only pick useful fields in docfields to minimise the payload
+ if col.df:
+ col.df = {
+ "fieldtype": col.df.fieldtype,
+ "fieldname": col.df.fieldname,
+ "label": col.df.label,
+ "options": col.df.options,
+ "parent": col.df.parent,
+ "reqd": col.df.reqd,
+ "default": col.df.default,
+ "read_only": col.df.read_only,
+ }
+
+ data = [[row.row_number] + row.as_list() for row in self.data]
+
+ warnings = self.get_warnings()
+
+ out = frappe._dict()
+ out.data = data
+ out.columns = columns
+ out.warnings = warnings
+ total_number_of_rows = len(out.data)
+ if total_number_of_rows > MAX_ROWS_IN_PREVIEW:
+ out.data = out.data[:MAX_ROWS_IN_PREVIEW]
+ out.max_rows_exceeded = True
+ out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW
+ out.total_number_of_rows = total_number_of_rows
+ return out
+
+ def get_payloads_for_import(self):
+ payloads = []
+ # make a copy
+ data = list(self.data)
+ while data:
+ doc, rows, data = self.parse_next_row_for_import(data)
+ payloads.append(frappe._dict(doc=doc, rows=rows))
+ return payloads
+
+ def parse_next_row_for_import(self, data):
+ """
+ Parses rows that make up a doc. A doc maybe built from a single row or multiple rows.
+ Returns the doc, rows, and data without the rows.
+ """
+ doctypes = self.header.doctypes
+
+ # first row is included by default
+ first_row = data[0]
+ rows = [first_row]
+
+ # if there are child doctypes, find the subsequent rows
+ if len(doctypes) > 1:
+ # subsequent rows either dont have any parent value set
+ # or have the same value as the parent row
+ # we include a row if either of conditions match
+ parent_column_indexes = self.header.get_column_indexes(self.doctype)
+ parent_row_values = first_row.get_values(parent_column_indexes)
+
+ data_without_first_row = data[1:]
+ for row in data_without_first_row:
+ row_values = row.get_values(parent_column_indexes)
+ # if the row is blank, it's a child row doc
+ if all([v in INVALID_VALUES for v in row_values]):
+ rows.append(row)
+ continue
+ # if the row has same values as parent row, it's a child row doc
+ if row_values == parent_row_values:
+ rows.append(row)
+ continue
+ # if any of those conditions dont match, it's the next doc
break
- if not parentfield:
- frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype))
- raise Exception
+ parent_doc = None
+ for row in rows:
+ for doctype, table_df in doctypes:
+ if doctype == self.doctype and not parent_doc:
+ parent_doc = row.parse_doc(doctype)
- return parentfield
+ if doctype != self.doctype and table_df:
+ child_doc = row.parse_doc(doctype, parent_doc, table_df)
+ parent_doc[table_df.fieldname] = parent_doc.get(table_df.fieldname, [])
+ parent_doc[table_df.fieldname].append(child_doc)
-def delete_child_rows(rows, doctype):
- """delete child rows for all parents"""
- for p in list(set([r[1] for r in rows])):
- if p:
- frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p)
+ doc = parent_doc
+ # check if there is atleast one row for mandatory table fields
+ meta = frappe.get_meta(self.doctype)
+ mandatory_table_fields = [
+ df
+ for df in meta.fields
+ if df.fieldtype in table_fieldtypes
+ and df.reqd
+ and len(doc.get(df.fieldname, [])) == 0
+ ]
+ if len(mandatory_table_fields) == 1:
+ self.warnings.append(
+ {
+ "row": first_row.row_number,
+ "message": _("There should be atleast one row for {0} table").format(
+ mandatory_table_fields[0].label
+ ),
+ }
+ )
+ elif mandatory_table_fields:
+ fields_string = ", ".join([df.label for df in mandatory_table_fields])
+ message = _("There should be atleast one row for the following tables: {0}").format(
+ fields_string
+ )
+ self.warnings.append({"row": first_row.row_number, "message": message})
+
+ return doc, rows, data[len(rows) :]
+
+ def get_warnings(self):
+ warnings = []
+ for col in self.header.columns:
+ warnings += col.warnings
+
+ for row in self.data:
+ warnings += row.warnings
+
+ return warnings
+
+ ######
+
+ def read_file(self, file_path):
+ extn = file_path.split(".")[1]
+
+ file_content = None
+ with io.open(file_path, mode="rb") as f:
+ file_content = f.read()
+
+ return file_content, extn
+
+ def read_content(self, content, extension):
+ error_title = _("Template Error")
+ if extension not in ("csv", "xlsx", "xls"):
+ frappe.throw(
+ _("Import template should be of type .csv, .xlsx or .xls"), title=error_title
+ )
+
+ if extension == "csv":
+ data = read_csv_content(content)
+ elif extension == "xlsx":
+ data = read_xlsx_file_from_attached_file(fcontent=content)
+ elif extension == "xls":
+ data = read_xls_file_from_attached_file(content)
+
+ return data
+
+
+class Row:
+ link_values_exist_map = {}
+
+ def __init__(self, index, row, doctype, header, import_type):
+ self.index = index
+ self.row_number = index + 1
+ self.doctype = doctype
+ self.data = row
+ self.header = header
+ self.import_type = import_type
+ self.warnings = []
+
+ len_row = len(self.data)
+ len_columns = len(self.header.columns)
+ if len_row != len_columns:
+ less_than_columns = len_row < len_columns
+ message = (
+ "Row has less values than columns"
+ if less_than_columns
+ else "Row has more values than columns"
+ )
+ self.warnings.append(
+ {"row": self.row_number, "message": message,}
+ )
+
+ def parse_doc(self, doctype, parent_doc=None, table_df=None):
+ col_indexes = self.header.get_column_indexes(doctype, table_df)
+ values = self.get_values(col_indexes)
+ columns = self.header.get_columns(col_indexes)
+ doc = self._parse_doc(doctype, columns, values, parent_doc, table_df)
+ return doc
+
+ def _parse_doc(self, doctype, columns, values, parent_doc=None, table_df=None):
+ doc = frappe._dict()
+ if self.import_type == INSERT:
+ # new_doc returns a dict with default values set
+ doc = frappe.new_doc(
+ doctype,
+ parent_doc=parent_doc,
+ parentfield=table_df.fieldname if table_df else None,
+ as_dict=True,
+ )
+
+ # remove standard fields and __islocal
+ for key in frappe.model.default_fields + ("__islocal",):
+ doc.pop(key, None)
+
+ for col, value in zip(columns, values):
+ df = col.df
+ if value in INVALID_VALUES:
+ value = None
+
+ if value is not None:
+ value = self.validate_value(value, col)
+
+ if value is not None:
+ doc[df.fieldname] = self.parse_value(value, col)
+
+ is_table = frappe.get_meta(doctype).istable
+ is_update = self.import_type == UPDATE
+ if is_table and is_update and doc.get("name") in INVALID_VALUES:
+ # for table rows being inserted in update
+ # create a new doc with defaults set
+ new_doc = frappe.new_doc(doctype, as_dict=True)
+ new_doc.update(doc)
+ doc = new_doc
+
+ self.check_mandatory_fields(doctype, doc, table_df)
+ return doc
+
+ def validate_value(self, value, col):
+ df = col.df
+ if df.fieldtype == "Select":
+ select_options = df.get_select_options()
+ if select_options and value not in select_options:
+ options_string = ", ".join([frappe.bold(d) for d in select_options])
+ msg = _("Value must be one of {0}").format(options_string)
+ self.warnings.append(
+ {
+ "row": self.row_number,
+ "field": df.as_dict(convert_dates_to_str=True),
+ "message": msg,
+ }
+ )
+ return
+
+ elif df.fieldtype == "Link":
+ exists = self.link_exists(value, df)
+ if not exists:
+ msg = _("Value {0} missing for {1}").format(
+ frappe.bold(value), frappe.bold(df.options)
+ )
+ self.warnings.append(
+ {
+ "row": self.row_number,
+ "field": df.as_dict(convert_dates_to_str=True),
+ "message": msg,
+ }
+ )
+ return
+ elif df.fieldtype in ["Date", "Datetime"]:
+ value = self.get_date(value, col)
+ if isinstance(value, frappe.string_types):
+ # value was not parsed as datetime object
+ self.warnings.append(
+ {
+ "row": self.row_number,
+ "col": col.column_number,
+ "field": df.as_dict(convert_dates_to_str=True),
+ "message": _("Value {0} must in {1} format").format(
+ frappe.bold(value), frappe.bold(get_user_format(col.date_format))
+ ),
+ }
+ )
+ return
+
+ return value
+
+ def link_exists(self, value, df):
+ key = df.options + "::" + value
+ if Row.link_values_exist_map.get(key) is None:
+ Row.link_values_exist_map[key] = frappe.db.exists(df.options, value)
+ return Row.link_values_exist_map.get(key)
+
+ def parse_value(self, value, col):
+ df = col.df
+ if isinstance(value, datetime) and df.fieldtype in ["Date", "Datetime"]:
+ return value
+
+ value = cstr(value)
+
+ # convert boolean values to 0 or 1
+ valid_check_values = ["t", "f", "true", "false", "yes", "no", "y", "n"]
+ if df.fieldtype == "Check" and value.lower().strip() in valid_check_values:
+ value = value.lower().strip()
+ value = 1 if value in ["t", "true", "y", "yes"] else 0
+
+ if df.fieldtype in ["Int", "Check"]:
+ value = cint(value)
+ elif df.fieldtype in ["Float", "Percent", "Currency"]:
+ value = flt(value)
+ elif df.fieldtype in ["Date", "Datetime"]:
+ value = self.get_date(value, col)
+
+ return value
+
+ def get_date(self, value, column):
+ date_format = column.date_format
+ if date_format:
+ try:
+ return datetime.strptime(value, date_format)
+ except ValueError:
+ # ignore date values that dont match the format
+ # import will break for these values later
+ pass
+ return value
+
+ def check_mandatory_fields(self, doctype, doc, table_df=None):
+ """If import type is Insert:
+ Check for mandatory fields (except table fields) in doc
+ if import type is Update:
+ Check for name field or autoname field in doc
+ """
+ meta = frappe.get_meta(doctype)
+ if self.import_type == UPDATE:
+ if meta.istable:
+ # when updating records with table rows,
+ # there are two scenarios:
+ # 1. if row 'name' is provided in the template
+ # the table row will be updated
+ # 2. if row 'name' is not provided
+ # then a new row will be added
+ # so we dont need to check for mandatory
+ return
+
+ # for update, only ID (name) field is mandatory
+ id_field = get_id_field(doctype)
+ if doc.get(id_field.fieldname) in INVALID_VALUES:
+ self.warnings.append(
+ {
+ "row": self.row_number,
+ "message": _("{0} is a mandatory field asdadsf").format(id_field.label),
+ }
+ )
+ return
+
+ fields = [
+ df
+ for df in meta.fields
+ if df.fieldtype not in table_fieldtypes
+ and df.reqd
+ and doc.get(df.fieldname) in INVALID_VALUES
+ ]
+
+ if not fields:
+ return
+
+ def get_field_label(df):
+ return "{0}{1}".format(df.label, " ({})".format(table_df.label) if table_df else "")
+
+ if len(fields) == 1:
+ field_label = get_field_label(fields[0])
+ self.warnings.append(
+ {
+ "row": self.row_number,
+ "message": _("{0} is a mandatory field").format(frappe.bold(field_label)),
+ }
+ )
+ else:
+ fields_string = ", ".join([frappe.bold(get_field_label(df)) for df in fields])
+ self.warnings.append(
+ {
+ "row": self.row_number,
+ "message": _("{0} are mandatory fields").format(fields_string),
+ }
+ )
+
+ def get_values(self, indexes):
+ return [self.data[i] for i in indexes]
+
+ def get(self, index):
+ return self.data[index]
+
+ def as_list(self):
+ return self.data
+
+
+class Header(Row):
+ def __init__(self, index, row, doctype, raw_data, column_to_field_map):
+ self.index = index
+ self.row_number = index + 1
+ self.data = row
+ self.doctype = doctype
+
+ self.seen = []
+ self.columns = []
+
+ for j, header in enumerate(row):
+ column_values = [get_item_at_index(r, j) for r in raw_data]
+ column = Column(
+ j, header, self.doctype, column_values, column_to_field_map.get(header), self.seen
+ )
+ self.seen.append(header)
+ self.columns.append(column)
+
+ doctypes = []
+ for col in self.columns:
+ if not col.df:
+ continue
+ if col.df.parent == self.doctype:
+ doctypes.append((col.df.parent, None))
+ else:
+ doctypes.append((col.df.parent, col.df.child_table_df))
+
+ self.doctypes = sorted(
+ list(set(doctypes)), key=lambda x: -1 if x[0] == self.doctype else 1
+ )
+
+ def get_column_indexes(self, doctype, tablefield=None):
+ def is_table_field(df):
+ if tablefield:
+ return df.child_table_df.fieldname == tablefield.fieldname
+ return True
+
+ return [
+ col.index
+ for col in self.columns
+ if not col.skip_import
+ and col.df
+ and col.df.parent == doctype
+ and is_table_field(col.df)
+ ]
+
+ def get_columns(self, indexes):
+ return [self.columns[i] for i in indexes]
+
+
+class Column:
+ seen = []
+ fields_column_map = {}
+
+ def __init__(self, index, header, doctype, column_values, map_to_field=None, seen=[]):
+ self.index = index
+ self.column_number = index + 1
+ self.doctype = doctype
+ self.header_title = header
+ self.column_values = column_values
+ self.map_to_field = map_to_field
+ self.seen = seen
+
+ self.date_format = None
+ self.df = None
+ self.skip_import = None
+ self.warnings = []
+
+ self.meta = frappe.get_meta(doctype)
+ self.parse()
+ self.parse_date_format()
+
+ def parse(self):
+ header_title = self.header_title
+ column_number = str(self.column_number)
+ skip_import = False
+
+ if self.map_to_field and self.map_to_field != "Don't Import":
+ df = get_df_for_column_header(self.doctype, self.map_to_field)
+ if df:
+ self.warnings.append(
+ {
+ "message": _("Mapping column {0} to field {1}").format(
+ frappe.bold(header_title or "Untitled Column"), frappe.bold(df.label)
+ ),
+ "type": "info",
+ }
+ )
+ else:
+ self.warnings.append(
+ {
+ "message": _("Could not map column {0} to field {1}").format(
+ column_number, self.map_to_field
+ ),
+ "type": "info",
+ }
+ )
+ else:
+ df = get_df_for_column_header(self.doctype, header_title)
+ # df = df_by_labels_and_fieldnames.get(header_title)
+
+ if not df:
+ skip_import = True
+ else:
+ skip_import = False
+
+ if header_title in self.seen:
+ self.warnings.append(
+ {
+ "col": column_number,
+ "message": _("Skipping Duplicate Column {0}").format(frappe.bold(header_title)),
+ "type": "info",
+ }
+ )
+ df = None
+ skip_import = True
+ elif self.map_to_field == "Don't Import":
+ skip_import = True
+ self.warnings.append(
+ {
+ "col": column_number,
+ "message": _("Skipping column {0}").format(frappe.bold(header_title)),
+ "type": "info",
+ }
+ )
+ elif header_title and not df:
+ self.warnings.append(
+ {
+ "col": column_number,
+ "message": _("Cannot match column {0} with any field").format(
+ frappe.bold(header_title)
+ ),
+ "type": "info",
+ }
+ )
+ elif not header_title and not df:
+ self.warnings.append(
+ {"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"}
+ )
+
+ self.df = df
+ self.skip_import = skip_import
+
+ def parse_date_format(self):
+ if self.df and self.df.fieldtype in ("Date", "Time", "Datetime"):
+ self.date_format = self.guess_date_format_for_column()
+
+ def guess_date_format_for_column(self):
+ """ Guesses date format for a column by parsing all the values in the column,
+ getting the date format and then returning the one which has the maximum frequency
+ """
+
+ date_formats = [
+ frappe.utils.guess_date_format(d) for d in self.column_values if isinstance(d, str)
+ ]
+ date_formats = [d for d in date_formats if d]
+ if not date_formats:
+ return
+
+ unique_date_formats = set(date_formats)
+ max_occurred_date_format = max(unique_date_formats, key=date_formats.count)
+
+ if len(unique_date_formats) > 1:
+ # fmt: off
+ message = _("The column {0} has {1} different date formats. Automatically setting {2} as the default format as it is the most common. Please change other values in this column to this format.")
+ # fmt: on
+ user_date_format = get_user_format(max_occurred_date_format)
+ self.warnings.append(
+ {
+ "col": self.column_number,
+ "message": message.format(
+ frappe.bold(self.header_title),
+ len(unique_date_formats),
+ frappe.bold(user_date_format),
+ ),
+ "type": "info",
+ }
+ )
+
+ return max_occurred_date_format
+
+ def as_dict(self):
+ d = frappe._dict()
+ d.index = self.index
+ d.column_number = self.column_number
+ d.doctype = self.doctype
+ d.header_title = self.header_title
+ d.map_to_field = self.map_to_field
+ d.date_format = self.date_format
+ d.df = self.df
+ d.skip_import = self.skip_import
+ d.warnings = self.warnings
+ return d
+
+
+def build_fields_dict_for_column_matching(parent_doctype):
+ """
+ Build a dict with various keys to match with column headers and value as docfield
+ The keys can be label or fieldname
+ {
+ 'Customer': df1,
+ 'customer': df1,
+ 'Due Date': df2,
+ 'due_date': df2,
+ 'Item Code (Sales Invoice Item)': df3,
+ 'Sales Invoice Item:item_code': df3,
+ }
+ """
+
+ def get_standard_fields(doctype):
+ meta = frappe.get_meta(doctype)
+ if meta.istable:
+ standard_fields = [
+ {"label": "Parent", "fieldname": "parent"},
+ {"label": "Parent Type", "fieldname": "parenttype"},
+ {"label": "Parent Field", "fieldname": "parentfield"},
+ {"label": "Row Index", "fieldname": "idx"},
+ ]
+ else:
+ standard_fields = [
+ {"label": "Owner", "fieldname": "owner"},
+ {"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"},
+ ]
+
+ out = []
+ for df in standard_fields:
+ df = frappe._dict(df)
+ df.parent = doctype
+ out.append(df)
+ return out
+
+ parent_meta = frappe.get_meta(parent_doctype)
+ out = {}
+
+ # doctypes and fieldname if it is a child doctype
+ doctypes = [[parent_doctype, None]] + [
+ [df.options, df] for df in parent_meta.get_table_fields()
+ ]
+
+ for doctype, table_df in doctypes:
+ # name field
+ name_by_label = (
+ "ID" if doctype == parent_doctype else "ID ({0})".format(table_df.label)
+ )
+ name_by_fieldname = (
+ "name" if doctype == parent_doctype else "{0}.name".format(table_df.fieldname)
+ )
+ name_df = frappe._dict(
+ {
+ "fieldtype": "Data",
+ "fieldname": "name",
+ "label": "ID",
+ "reqd": 1, # self.import_type == UPDATE,
+ "parent": doctype,
+ }
+ )
+
+ if doctype != parent_doctype:
+ name_df.is_child_table_field = True
+ name_df.child_table_df = table_df
+
+ out[name_by_label] = name_df
+ out[name_by_fieldname] = name_df
+
+ # other fields
+ fields = get_standard_fields(doctype) + frappe.get_meta(doctype).fields
+ for df in fields:
+ fieldtype = df.fieldtype or "Data"
+ parent = df.parent or parent_doctype
+ if fieldtype not in no_value_fields:
+ if parent_doctype == doctype:
+ # for parent doctypes keys will be
+ # Label
+ # label
+ # Label (label)
+ if not out.get(df.label):
+ # if Label is already set, don't set it again
+ # in case of duplicate column headers
+ out[df.label] = df
+ out[df.fieldname] = df
+ label_with_fieldname = "{0} ({1})".format(df.label, df.fieldname)
+ out[label_with_fieldname] = df
+ else:
+ # in case there are multiple table fields with the same doctype
+ # for child doctypes keys will be
+ # Label (Table Field Label)
+ # table_field.fieldname
+ table_fields = parent_meta.get(
+ "fields", {"fieldtype": ["in", table_fieldtypes], "options": parent}
+ )
+ for table_field in table_fields:
+ by_label = "{0} ({1})".format(df.label, table_field.label)
+ by_fieldname = "{0}.{1}".format(table_field.fieldname, df.fieldname)
+
+ # create a new df object to avoid mutation problems
+ if isinstance(df, dict):
+ new_df = frappe._dict(df.copy())
+ else:
+ new_df = df.as_dict()
+
+ new_df.is_child_table_field = True
+ new_df.child_table_df = table_field
+ out[by_label] = new_df
+ out[by_fieldname] = new_df
+
+ # if autoname is based on field
+ # add an entry for "ID (Autoname Field)"
+ autoname_field = get_autoname_field(parent_doctype)
+ if autoname_field:
+ out["ID ({})".format(autoname_field.label)] = autoname_field
+ # ID field should also map to the autoname field
+ out["ID"] = autoname_field
+ out["name"] = autoname_field
+
+ return out
+
+
+def get_df_for_column_header(doctype, header):
+ def build_fields_dict_for_doctype():
+ return build_fields_dict_for_column_matching(doctype)
+
+ df_by_labels_and_fieldname = frappe.cache().hget(
+ "data_import_column_header_map", doctype, generator=build_fields_dict_for_doctype
+ )
+ return df_by_labels_and_fieldname.get(header)
+
+
+# utilities
+
+
+def get_id_field(doctype):
+ autoname_field = get_autoname_field(doctype)
+ if autoname_field:
+ return autoname_field
+ return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"})
+
+
+def get_autoname_field(doctype):
+ meta = frappe.get_meta(doctype)
+ if meta.autoname and meta.autoname.startswith("field:"):
+ fieldname = meta.autoname[len("field:") :]
+ return meta.get_field(fieldname)
+
+
+def get_item_at_index(_list, i, default=None):
+ try:
+ a = _list[i]
+ except IndexError:
+ a = default
+ return a
+
+
+def get_user_format(date_format):
+ return (
+ date_format.replace("%Y", "yyyy")
+ .replace("%y", "yy")
+ .replace("%m", "mm")
+ .replace("%d", "dd")
+ )
diff --git a/frappe/core/doctype/data_import/importer_new.py b/frappe/core/doctype/data_import/importer_new.py
deleted file mode 100644
index 040e9fabc4..0000000000
--- a/frappe/core/doctype/data_import/importer_new.py
+++ /dev/null
@@ -1,1044 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
-# MIT License. See license.txt
-
-import io
-import os
-import json
-import timeit
-import frappe
-from datetime import datetime
-from frappe import _
-from frappe.utils import cint, flt, update_progress_bar, cstr, DATETIME_FORMAT
-from frappe.utils.csvutils import read_csv_content
-from frappe.utils.xlsxutils import (
- read_xlsx_file_from_attached_file,
- read_xls_file_from_attached_file,
-)
-from frappe.model import no_value_fields, table_fields
-
-INVALID_VALUES = ["", None]
-MAX_ROWS_IN_PREVIEW = 10
-INSERT = "Insert New Records"
-UPDATE = "Update Existing Records"
-
-# pylint: disable=R0201
-class Importer:
- def __init__(
- self, doctype, data_import=None, file_path=None, content=None, console=False
- ):
- self.doctype = doctype
- self.template_options = frappe._dict({"remap_column": {}})
- self.console = console
-
- if data_import:
- self.data_import = data_import
- if self.data_import.template_options:
- template_options = frappe.parse_json(self.data_import.template_options)
- self.template_options.update(template_options)
- self.import_type = self.data_import.import_type
- else:
- self.data_import = None
-
- self.import_type = self.import_type or INSERT
-
- self.header_row = None
- self.data = None
- # used to store date formats guessed from data rows per column
- self._guessed_date_formats = {}
- # used to store eta during import
- self.last_eta = 0
- # used to collect warnings during template parsing
- # and show them to user
- self.warnings = []
- self.meta = frappe.get_meta(doctype)
- self.prepare_content(file_path, content)
- self.parse_data_from_template()
-
- def prepare_content(self, file_path, content):
- extension = None
- if self.data_import and self.data_import.import_file:
- file_doc = frappe.get_doc("File", {"file_url": self.data_import.import_file})
- parts = file_doc.get_extension()
- extension = parts[1]
- content = file_doc.get_content()
- extension = extension.lstrip(".")
-
- if file_path:
- content, extension = self.read_file(file_path)
-
- if not extension:
- extension = "csv"
-
- if content:
- self.read_content(content, extension)
-
- self.validate_template_content()
-
- def read_file(self, file_path):
- extn = file_path.split(".")[1]
-
- file_content = None
- with io.open(file_path, mode="rb") as f:
- file_content = f.read()
-
- return file_content, extn
-
- def read_content(self, content, extension):
- error_title = _("Template Error")
- if extension not in ("csv", "xlsx", "xls"):
- frappe.throw(
- _("Import template should be of type .csv, .xlsx or .xls"), title=error_title
- )
-
- if extension == "csv":
- data = read_csv_content(content)
- elif extension == "xlsx":
- data = read_xlsx_file_from_attached_file(fcontent=content)
- elif extension == "xls":
- data = read_xls_file_from_attached_file(content)
-
- data = self.remove_empty_rows_and_columns(data)
-
- if len(data) <= 1:
- frappe.throw(
- _("Import template should contain a Header and atleast one row."), title=error_title
- )
-
- self.header_row = data[0]
- self.data = data[1:]
-
- def validate_template_content(self):
- column_count = len(self.header_row)
- if any([len(row) != column_count and len(row) != 0 for row in self.data]):
- frappe.throw(
- _("Number of columns does not match with data"), title=_("Invalid Template")
- )
-
- def remove_empty_rows_and_columns(self, raw_data):
- self.row_index_map = []
- removed_rows = []
- removed_columns = []
-
- # remove empty rows
- data_without_empty_rows = []
- for i, row in enumerate(raw_data):
- if all(v in INVALID_VALUES for v in row):
- # empty row
- removed_rows.append(i)
- else:
- data_without_empty_rows.append(row)
- self.row_index_map.append(i)
-
- # remove empty columns
- # a column with a header and no data is a valid column
- # a column with no header and no data will be removed
- first_row = data_without_empty_rows[0]
- for i, column in enumerate(first_row):
- column_values = [row[i] for row in data_without_empty_rows]
- if all(v in INVALID_VALUES for v in column_values):
- # empty column
- removed_columns.append(i)
-
- if removed_columns:
- data_without_empty_rows_and_columns = []
- # remove empty columns from data
- for i, row in enumerate(data_without_empty_rows):
- new_row = [v for j, v in enumerate(row) if j not in removed_columns]
- data_without_empty_rows_and_columns.append(new_row)
- else:
- data_without_empty_rows_and_columns = data_without_empty_rows
-
- return data_without_empty_rows_and_columns
-
- def get_data_for_import_preview(self):
- out = frappe._dict()
- out.data = list(self.rows)
- out.columns = self.columns
- out.warnings = self.warnings
- total_number_of_rows = len(out.data)
- if total_number_of_rows > MAX_ROWS_IN_PREVIEW:
- out.data = out.data[:MAX_ROWS_IN_PREVIEW]
- out.max_rows_exceeded = True
- out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW
- out.total_number_of_rows = total_number_of_rows
- return out
-
- def parse_data_from_template(self):
- columns = self.parse_columns_from_header_row()
- columns = self.detect_date_formats(columns)
- columns, data = self.add_serial_no_column(columns, self.data)
-
- self.columns = columns
- self.rows = data
-
- def parse_columns_from_header_row(self):
- remap_column = self.template_options.remap_column
- columns = []
-
- df_by_labels_and_fieldnames = self.build_fields_dict_for_column_matching()
-
- for i, header_title in enumerate(self.header_row):
- header_row_index = str(i)
- column_number = str(i + 1)
- skip_import = False
- fieldname = remap_column.get(header_row_index)
-
- if fieldname and fieldname != "Don't Import":
- df = df_by_labels_and_fieldnames.get(fieldname)
- self.warnings.append(
- {
- "col": column_number,
- "message": _("Mapping column {0} to field {1}").format(
- frappe.bold(header_title or "Untitled Column"), frappe.bold(df.label)
- ),
- "type": "info",
- }
- )
- else:
- df = df_by_labels_and_fieldnames.get(header_title)
-
- if not df:
- skip_import = True
- else:
- skip_import = False
-
- if fieldname == "Don't Import":
- skip_import = True
- self.warnings.append(
- {
- "col": column_number,
- "message": _("Skipping column {0}").format(frappe.bold(header_title)),
- "type": "info",
- }
- )
- elif header_title and not df:
- self.warnings.append(
- {
- "col": column_number,
- "message": _("Cannot match column {0} with any field").format(
- frappe.bold(header_title)
- ),
- "type": "info",
- }
- )
- elif not header_title and not df:
- self.warnings.append(
- {"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"}
- )
-
- columns.append(
- frappe._dict(
- df=df,
- skip_import=skip_import,
- header_title=header_title,
- column_number=column_number,
- index=i,
- )
- )
-
- return columns
-
- def build_fields_dict_for_column_matching(self):
- """
- Build a dict with various keys to match with column headers and value as docfield
- The keys can be label or fieldname
- {
- 'Customer': df1,
- 'customer': df1,
- 'Due Date': df2,
- 'due_date': df2,
- 'Item Code (Sales Invoice Item)': df3,
- 'Sales Invoice Item:item_code': df3,
- }
- """
- out = {}
-
- table_doctypes = [df.options for df in self.meta.get_table_fields()]
- doctypes = table_doctypes + [self.doctype]
- for doctype in doctypes:
- # name field
- name_key = "ID" if self.doctype == doctype else "ID ({})".format(doctype)
- name_df = frappe._dict(
- {
- "fieldtype": "Data",
- "fieldname": "name",
- "label": "ID",
- "reqd": self.import_type == UPDATE,
- "parent": doctype,
- }
- )
- out[name_key] = name_df
- out["name"] = name_df
-
- # other fields
- meta = frappe.get_meta(doctype)
- fields = self.get_standard_fields(doctype) + meta.fields
- for df in fields:
- fieldtype = df.fieldtype or "Data"
- parent = df.parent or self.doctype
- if fieldtype not in no_value_fields:
- # label as key
- label = (
- df.label if self.doctype == doctype else "{0} ({1})".format(df.label, parent)
- )
- out[label] = df
- # fieldname as key
- if self.doctype == doctype:
- out[df.fieldname] = df
- else:
- key = "{0}:{1}".format(doctype, df.fieldname)
- out[key] = df
-
- # if autoname is based on field
- # add an entry for "ID (Autoname Field)"
- autoname_field = self.get_autoname_field(self.doctype)
- if autoname_field:
- out["ID ({})".format(autoname_field.label)] = autoname_field
- # ID field should also map to the autoname field
- out["ID"] = autoname_field
- out["name"] = autoname_field
-
- return out
-
- def get_standard_fields(self, doctype):
- meta = frappe.get_meta(doctype)
- if meta.istable:
- standard_fields = [
- {"label": "Parent", "fieldname": "parent"},
- {"label": "Parent Type", "fieldname": "parenttype"},
- {"label": "Parent Field", "fieldname": "parentfield"},
- {"label": "Row Index", "fieldname": "idx"},
- ]
- else:
- standard_fields = [
- {"label": "Owner", "fieldname": "owner"},
- {"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"},
- ]
-
- out = []
- for df in standard_fields:
- df = frappe._dict(df)
- df.parent = doctype
- out.append(df)
- return out
-
- def detect_date_formats(self, columns):
- for col in columns:
- if col.df and col.df.fieldtype in ["Date", "Time", "Datetime"]:
- col.date_format = self.guess_date_format_for_column(col, columns)
- return columns
-
- def add_serial_no_column(self, columns, data):
- columns_with_serial_no = [
- frappe._dict({"header_title": "Sr. No", "skip_import": True})
- ] + columns
-
- # update index for each column
- for i, col in enumerate(columns_with_serial_no):
- col.index = i
-
- data_with_serial_no = []
- for i, row in enumerate(data):
- data_with_serial_no.append([self.row_index_map[i] + 1] + row)
-
- return columns_with_serial_no, data_with_serial_no
-
- def parse_value(self, value, df):
- if isinstance(value, datetime) and df.fieldtype in ["Date", "Datetime"]:
- return value
-
- value = cstr(value)
-
- # convert boolean values to 0 or 1
- if df.fieldtype == "Check" and value.lower().strip() in [
- "t",
- "f",
- "true",
- "false",
- "yes",
- "no",
- "y",
- "n",
- ]:
- value = value.lower().strip()
- value = 1 if value in ["t", "true", "y", "yes"] else 0
-
- if df.fieldtype in ["Int", "Check"]:
- value = cint(value)
- elif df.fieldtype in ["Float", "Percent", "Currency"]:
- value = flt(value)
- elif df.fieldtype in ["Date", "Datetime"]:
- value = self.parse_date_format(value, df)
-
- return value
-
- def parse_date_format(self, value, df):
- date_format = self.get_date_format_for_df(df) or DATETIME_FORMAT
- try:
- return datetime.strptime(value, date_format)
- except ValueError:
- # ignore date values that dont match the format
- # import will break for these values later
- pass
- return value
-
- def get_date_format_for_df(self, df):
- return self._guessed_date_formats.get(df.parent + df.fieldname)
-
- def guess_date_format_for_column(self, column, columns):
- """ Guesses date format for a column by parsing the first 10 values in the column,
- getting the date format and then returning the one which has the maximum frequency
- """
- PARSE_ROW_COUNT = 10
-
- df = column.df
- key = df.parent + df.fieldname
-
- if not self._guessed_date_formats.get(key):
- matches = [col for col in columns if col.df == df]
- if not matches:
- self._guessed_date_formats[key] = None
- return
-
- column = matches[0]
- column_index = column.index
-
- date_values = [
- row[column_index] for row in self.data[:PARSE_ROW_COUNT] if row[column_index]
- ]
- date_formats = [
- guess_date_format(d) if isinstance(d, str) else None for d in date_values
- ]
- if not date_formats:
- return
- max_occurred_date_format = max(set(date_formats), key=date_formats.count)
- self._guessed_date_formats[key] = max_occurred_date_format
-
- return self._guessed_date_formats[key]
-
- def import_data(self):
- # set user lang for translations
- frappe.cache().hdel("lang", frappe.session.user)
- frappe.set_user_lang(frappe.session.user)
-
- if not self.console:
- self.data_import.db_set("template_warnings", "")
-
- # set flags
- frappe.flags.in_import = True
- frappe.flags.mute_emails = self.data_import.mute_emails
-
- # prepare a map for missing link field values
- self.prepare_missing_link_field_values()
-
- # parse docs from rows
- payloads = self.get_payloads_for_import()
-
- # dont import if there are non-ignorable warnings
- warnings = [w for w in self.warnings if w.get("type") != "info"]
- if warnings:
- if self.console:
- self.print_grouped_warnings(warnings)
- else:
- self.data_import.db_set("template_warnings", json.dumps(warnings))
- frappe.publish_realtime(
- "data_import_refresh", {"data_import": self.data_import.name}
- )
- return
-
- # setup import log
- if self.data_import.import_log:
- import_log = frappe.parse_json(self.data_import.import_log)
- else:
- import_log = []
-
- # remove previous failures from import log
- import_log = [l for l in import_log if l.get("success") == True]
-
- # get successfully imported rows
- imported_rows = []
- for log in import_log:
- log = frappe._dict(log)
- if log.success:
- imported_rows += log.row_indexes
-
- # start import
- total_payload_count = len(payloads)
- batch_size = frappe.conf.data_import_batch_size or 1000
-
- for batch_index, batched_payloads in enumerate(
- frappe.utils.create_batch(payloads, batch_size)
- ):
- for i, payload in enumerate(batched_payloads):
- doc = payload.doc
- row_indexes = [row[0] for row in payload.rows]
- current_index = (i + 1) + (batch_index * batch_size)
-
- if set(row_indexes).intersection(set(imported_rows)):
- print("Skipping imported rows", row_indexes)
- if total_payload_count > 5:
- frappe.publish_realtime(
- "data_import_progress",
- {
- "current": current_index,
- "total": total_payload_count,
- "skipping": True,
- "data_import": self.data_import.name,
- },
- )
- continue
-
- try:
- start = timeit.default_timer()
- doc = self.process_doc(doc)
- processing_time = timeit.default_timer() - start
- eta = self.get_eta(current_index, total_payload_count, processing_time)
-
- if total_payload_count > 5:
- frappe.publish_realtime(
- "data_import_progress",
- {
- "current": current_index,
- "total": total_payload_count,
- "docname": doc.name,
- "data_import": self.data_import.name,
- "success": True,
- "row_indexes": row_indexes,
- "eta": eta,
- },
- )
- if self.console:
- update_progress_bar(
- "Importing {0} records".format(total_payload_count),
- current_index,
- total_payload_count,
- )
- import_log.append(
- frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes)
- )
- # commit after every successful import
- frappe.db.commit()
-
- except Exception:
- import_log.append(
- frappe._dict(
- success=False,
- exception=frappe.get_traceback(),
- messages=frappe.local.message_log,
- row_indexes=row_indexes,
- )
- )
- frappe.clear_messages()
- # rollback if exception
- frappe.db.rollback()
-
- # set status
- failures = [l for l in import_log if l.get("success") == False]
- if len(failures) == total_payload_count:
- status = "Pending"
- elif len(failures) > 0:
- status = "Partial Success"
- else:
- status = "Success"
-
- if self.console:
- self.print_import_log(import_log)
- else:
- self.data_import.db_set("status", status)
- self.data_import.db_set("import_log", json.dumps(import_log))
-
- frappe.flags.in_import = False
- frappe.flags.mute_emails = False
- frappe.publish_realtime("data_import_refresh", {"data_import": self.data_import.name})
-
- return import_log
-
- def get_payloads_for_import(self):
- payloads = []
- # make a copy
- data = list(self.rows)
- while data:
- doc, rows, data = self.parse_next_row_for_import(data)
- payloads.append(frappe._dict(doc=doc, rows=rows))
- return payloads
-
- def parse_next_row_for_import(self, data):
- """
- Parses rows that make up a doc. A doc maybe built from a single row or multiple rows.
- Returns the doc, rows, and data without the rows.
- """
- doctypes = set([col.df.parent for col in self.columns if col.df and col.df.parent])
-
- # first row is included by default
- first_row = data[0]
- rows = [first_row]
-
- # if there are child doctypes, find the subsequent rows
- if len(doctypes) > 1:
- # subsequent rows either dont have any parent value set
- # or have the same value as the parent row
- # we include a row if either of conditions match
- parent_column_indexes = [
- col.index
- for col in self.columns
- if not col.skip_import and col.df and col.df.parent == self.doctype
- ]
- parent_row_values = [first_row[i] for i in parent_column_indexes]
-
- data_without_first_row = data[1:]
- for row in data_without_first_row:
- row_values = [row[i] for i in parent_column_indexes]
- # if the row is blank, it's a child row doc
- if all([v in INVALID_VALUES for v in row_values]):
- rows.append(row)
- continue
- # if the row has same values as parent row, it's a child row doc
- if row_values == parent_row_values:
- rows.append(row)
- continue
- # if any of those conditions dont match, it's the next doc
- break
-
- def get_column_indexes(doctype):
- return [
- col.index
- for col in self.columns
- if not col.skip_import and col.df and col.df.parent == doctype
- ]
-
- def validate_value(value, df):
- if df.fieldtype == "Select":
- select_options = df.get_select_options()
- if select_options and value not in select_options:
- options_string = ", ".join([frappe.bold(d) for d in select_options])
- msg = _("Value must be one of {0}").format(options_string)
- self.warnings.append(
- {
- "row": row_number,
- "field": df.as_dict(convert_dates_to_str=True),
- "message": msg,
- }
- )
- return
-
- elif df.fieldtype == "Link":
- d = self.get_missing_link_field_values(df.options)
- if value in d.missing_values and not d.one_mandatory:
- msg = _("Value {0} missing for {1}").format(
- frappe.bold(value), frappe.bold(df.options)
- )
- self.warnings.append(
- {
- "row": row_number,
- "field": df.as_dict(convert_dates_to_str=True),
- "message": msg,
- }
- )
- return value
-
- return value
-
- def parse_doc(doctype, docfields, values, row_number):
- doc = frappe._dict()
- if self.import_type == INSERT:
- # new_doc returns a dict with default values set
- doc = frappe.new_doc(doctype, as_dict=True)
-
- # remove standard fields and __islocal
- for key in frappe.model.default_fields + ("__islocal",):
- doc.pop(key, None)
-
- for df, value in zip(docfields, values):
- if value in INVALID_VALUES:
- value = None
-
- if value is not None:
- value = validate_value(value, df)
-
- if value is not None:
- doc[df.fieldname] = self.parse_value(value, df)
-
- is_table = frappe.get_meta(doctype).istable
- is_update = self.import_type == UPDATE
- if is_table and is_update and doc.get("name") in INVALID_VALUES:
- # for table rows being inserted in update
- # create a new doc with defaults set
- new_doc = frappe.new_doc(doctype, as_dict=True)
- new_doc.update(doc)
- doc = new_doc
-
- check_mandatory_fields(doctype, doc, row_number)
- return doc
-
- def check_mandatory_fields(doctype, doc, row_number):
- """If import type is Insert:
- Check for mandatory fields (except table fields) in doc
- if import type is Update:
- Check for name field or autoname field in doc
- """
- meta = frappe.get_meta(doctype)
- if self.import_type == UPDATE:
- if meta.istable:
- # when updating records with table rows,
- # there are two scenarios:
- # 1. if row 'name' is provided in the template
- # the table row will be updated
- # 2. if row 'name' is not provided
- # then a new row will be added
- # so we dont need to check for mandatory
- return
-
- id_field = self.get_id_field(doctype)
- if doc.get(id_field.fieldname) in INVALID_VALUES:
- self.warnings.append(
- {
- "row": row_number,
- "message": _("{0} is a mandatory field").format(id_field.label),
- }
- )
- return
-
- fields = [
- df
- for df in meta.fields
- if df.fieldtype not in table_fields
- and df.reqd
- and doc.get(df.fieldname) in INVALID_VALUES
- ]
-
- if not fields:
- return
-
- if len(fields) == 1:
- self.warnings.append(
- {
- "row": row_number,
- "message": _("{0} is a mandatory field").format(fields[0].label),
- }
- )
- else:
- fields_string = ", ".join([df.label for df in fields])
- self.warnings.append(
- {"row": row_number, "message": _("{0} are mandatory fields").format(fields_string)}
- )
-
- parsed_docs = {}
- for row in rows:
- for doctype in doctypes:
- if doctype == self.doctype and parsed_docs.get(doctype):
- # if parent doc is already parsed from the first row
- # then skip
- continue
-
- row_number = row[0]
- column_indexes = get_column_indexes(doctype)
- values = [row[i] for i in column_indexes]
-
- if all(v in INVALID_VALUES for v in values):
- # skip values if all of them are empty
- continue
-
- columns = [self.columns[i] for i in column_indexes]
- docfields = [col.df for col in columns]
- doc = parse_doc(doctype, docfields, values, row_number)
- parsed_docs[doctype] = parsed_docs.get(doctype, [])
- parsed_docs[doctype].append(doc)
-
- # build the doc with children
- doc = {}
- for doctype, docs in parsed_docs.items():
- if doctype == self.doctype:
- doc.update(docs[0])
- else:
- table_dfs = self.meta.get(
- "fields", {"options": doctype, "fieldtype": ["in", table_fields]}
- )
- if table_dfs:
- table_field = table_dfs[0]
- doc[table_field.fieldname] = docs
-
- # check if there is atleast one row for mandatory table fields
- mandatory_table_fields = [
- df
- for df in self.meta.fields
- if df.fieldtype in table_fields and df.reqd and len(doc.get(df.fieldname, [])) == 0
- ]
- if len(mandatory_table_fields) == 1:
- self.warnings.append(
- {
- "row": first_row[0],
- "message": _("There should be atleast one row for {0} table").format(
- mandatory_table_fields[0].label
- ),
- }
- )
- elif mandatory_table_fields:
- fields_string = ", ".join([df.label for df in mandatory_table_fields])
- message = _("There should be atleast one row for the following tables: {0}").format(
- fields_string
- )
- self.warnings.append({"row": first_row[0], "message": message})
-
- return doc, rows, data[len(rows) :]
-
- def process_doc(self, doc):
- if self.import_type == INSERT:
- return self.insert_record(doc)
- elif self.import_type == UPDATE:
- return self.update_record(doc)
-
- def insert_record(self, doc):
- self.create_missing_linked_records(doc)
-
- new_doc = frappe.new_doc(self.doctype)
- new_doc.update(doc)
- # name shouldn't be set when inserting a new record
- new_doc.set("name", None)
- new_doc.insert()
- if self.meta.is_submittable and self.data_import.submit_after_import:
- new_doc.submit()
- return new_doc
-
- def create_missing_linked_records(self, doc):
- """
- Finds fields that are of type Link, and creates the corresponding
- document automatically if it has only one mandatory field
- """
- link_values = []
-
- def get_link_fields(doc, doctype):
- for fieldname, value in doc.items():
- meta = frappe.get_meta(doctype)
- df = meta.get_field(fieldname)
- if not df:
- continue
- if df.fieldtype == "Link" and value not in INVALID_VALUES:
- link_values.append([df.options, value])
- elif df.fieldtype in table_fields:
- for row in value:
- get_link_fields(row, df.options)
-
- get_link_fields(doc, self.doctype)
-
- for link_doctype, link_value in link_values:
- d = self.missing_link_values.get(link_doctype)
- if d and d.one_mandatory and link_value in d.missing_values:
- # find the autoname field
- autoname_field = self.get_autoname_field(link_doctype)
- name_field = autoname_field.fieldname if autoname_field else "name"
- new_doc = frappe.new_doc(link_doctype)
- new_doc.set(name_field, link_value)
- new_doc.insert()
- d.missing_values.remove(link_value)
-
- def update_record(self, doc):
- id_fieldname = self.get_id_fieldname(self.doctype)
- id_value = doc[id_fieldname]
- existing_doc = frappe.get_doc(self.doctype, id_value)
- existing_doc.flags.updater_reference = {
- "doctype": self.data_import.doctype,
- "docname": self.data_import.name,
- "label": _("via Data Import"),
- }
- existing_doc.update(doc)
- existing_doc.save()
- return existing_doc
-
- def export_errored_rows(self):
- from frappe.utils.csvutils import build_csv_response
-
- if not self.data_import:
- return
-
- import_log = frappe.parse_json(self.data_import.import_log or "[]")
- failures = [l for l in import_log if l.get("success") == False]
- row_indexes = []
- for f in failures:
- row_indexes.extend(f.get("row_indexes", []))
-
- # de duplicate
- row_indexes = list(set(row_indexes))
- row_indexes.sort()
-
- header_row = [col.header_title for col in self.columns[1:]]
- rows = [header_row]
- rows += [row[1:] for row in self.rows if row[0] in row_indexes]
-
- build_csv_response(rows, self.doctype)
-
- def get_missing_link_field_values(self, doctype):
- return self.missing_link_values.get(doctype, {})
-
- def prepare_missing_link_field_values(self):
- columns = self.columns
- rows = self.rows
- link_column_indexes = [
- col.index for col in columns if col.df and col.df.fieldtype == "Link"
- ]
-
- self.missing_link_values = {}
- for index in link_column_indexes:
- col = columns[index]
- column_values = [row[index] for row in rows]
- values = set([v for v in column_values if v not in INVALID_VALUES])
- doctype = col.df.options
-
- missing_values = [value for value in values if not frappe.db.exists(doctype, value)]
- if self.missing_link_values.get(doctype):
- self.missing_link_values[doctype].missing_values += missing_values
- else:
- self.missing_link_values[doctype] = frappe._dict(
- missing_values=missing_values,
- one_mandatory=self.has_one_mandatory_field(doctype),
- df=col.df,
- )
-
- def get_eta(self, current, total, processing_time):
- remaining = total - current
- eta = processing_time * remaining
- if not self.last_eta or eta < self.last_eta:
- self.last_eta = eta
- return self.last_eta
-
- def has_one_mandatory_field(self, doctype):
- meta = frappe.get_meta(doctype)
- # get mandatory fields with default not set
- mandatory_fields = [df for df in meta.fields if df.reqd and not df.default]
- mandatory_fields_count = len(mandatory_fields)
- if meta.autoname and meta.autoname.lower() == "prompt":
- mandatory_fields_count += 1
- return mandatory_fields_count == 1
-
- def get_id_fieldname(self, doctype):
- return self.get_id_field(doctype).fieldname
-
- def get_id_field(self, doctype):
- autoname_field = self.get_autoname_field(doctype)
- if autoname_field:
- return autoname_field
- return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"})
-
- def get_autoname_field(self, doctype):
- meta = frappe.get_meta(doctype)
- if meta.autoname and meta.autoname.startswith("field:"):
- fieldname = meta.autoname[len("field:") :]
- return meta.get_field(fieldname)
-
- def print_grouped_warnings(self, warnings):
- warnings_by_row = {}
- other_warnings = []
- for w in warnings:
- if w.get("row"):
- warnings_by_row.setdefault(w.get("row"), []).append(w)
- else:
- other_warnings.append(w)
-
- for row_number, warnings in warnings_by_row.items():
- print("Row {0}".format(row_number))
- for w in warnings:
- print(w.get("message"))
-
- for w in other_warnings:
- print(w.get("message"))
-
- def print_import_log(self, import_log):
- failed_records = [l for l in import_log if not l.success]
- successful_records = [l for l in import_log if l.success]
-
- if successful_records:
- print(
- "Successfully imported {0} records out of {1}".format(
- len(successful_records), len(import_log)
- )
- )
-
- if failed_records:
- print("Failed to import {0} records".format(len(failed_records)))
- file_name = "{0}_import_on_{1}.txt".format(self.doctype, frappe.utils.now())
- print("Check {0} for errors".format(os.path.join("sites", file_name)))
- text = ""
- for w in failed_records:
- text += "Row Indexes: {0}\n".format(str(w.get("row_indexes", [])))
- text += "Messages:\n{0}\n".format("\n".join(w.get("messages", [])))
- text += "Traceback:\n{0}\n\n".format(w.get("exception"))
-
- with open(file_name, "w") as f:
- f.write(text)
-
-
-DATE_FORMATS = [
- r"%d-%m-%Y",
- r"%m-%d-%Y",
- r"%Y-%m-%d",
- r"%d-%m-%y",
- r"%m-%d-%y",
- r"%y-%m-%d",
- r"%d/%m/%Y",
- r"%m/%d/%Y",
- r"%Y/%m/%d",
- r"%d/%m/%y",
- r"%m/%d/%y",
- r"%y/%m/%d",
- r"%d.%m.%Y",
- r"%m.%d.%Y",
- r"%Y.%m.%d",
- r"%d.%m.%y",
- r"%m.%d.%y",
- r"%y.%m.%d",
-]
-
-TIME_FORMATS = [
- r"%H:%M:%S.%f",
- r"%H:%M:%S",
- r"%H:%M",
- r"%I:%M:%S.%f %p",
- r"%I:%M:%S %p",
- r"%I:%M %p",
-]
-
-
-def guess_date_format(date_string):
- date_string = date_string.strip()
-
- _date = None
- _time = None
-
- if " " in date_string:
- _date, _time = date_string.split(" ", 1)
- else:
- _date = date_string
-
- date_format = None
- time_format = None
-
- for f in DATE_FORMATS:
- try:
- # if date is parsed without any exception
- # capture the date format
- datetime.strptime(_date, f)
- date_format = f
- break
- except ValueError:
- pass
-
- if _time:
- for f in TIME_FORMATS:
- try:
- # if time is parsed without any exception
- # capture the time format
- datetime.strptime(_time, f)
- time_format = f
- break
- except ValueError:
- pass
-
- full_format = date_format
- if time_format:
- full_format += " " + time_format
- return full_format
-
-
-def import_data(doctype, file_path):
- i = Importer(doctype, file_path)
- i.import_data()
diff --git a/frappe/core/doctype/data_import/test_data_import.js b/frappe/core/doctype/data_import/test_data_import.js
deleted file mode 100644
index fbce7781b6..0000000000
--- a/frappe/core/doctype/data_import/test_data_import.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/* eslint-disable */
-// rename this file from _test_[name] to test_[name] to activate
-// and remove above this line
-
-QUnit.test("test: Data Import", function (assert) {
- let done = assert.async();
-
- // number of asserts
- assert.expect(1);
-
- frappe.run_serially([
- // insert a new Data Import
- () => frappe.tests.make('Data Import', [
- // values to be set
- {key: 'value'}
- ]),
- () => {
- assert.equal(cur_frm.doc.key, 'value');
- },
- () => done()
- ]);
-
-});
diff --git a/frappe/core/doctype/data_import/test_data_import.py b/frappe/core/doctype/data_import/test_data_import.py
index 406ea08958..15fd57744a 100644
--- a/frappe/core/doctype/data_import/test_data_import.py
+++ b/frappe/core/doctype/data_import/test_data_import.py
@@ -1,100 +1,10 @@
# -*- coding: utf-8 -*-
-# Copyright (c) 2017, Frappe Technologies and Contributors
+# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
-import frappe, unittest
-from frappe.core.doctype.data_export import exporter
-from frappe.core.doctype.data_import import importer
-from frappe.utils.csvutils import read_csv_content
+# import frappe
+import unittest
class TestDataImport(unittest.TestCase):
- def test_export(self):
- exporter.export_data("User", all_doctypes=True, template=True)
- content = read_csv_content(frappe.response.result)
- self.assertTrue(content[1][1], "User")
-
- def test_export_with_data(self):
- exporter.export_data("User", all_doctypes=True, template=True, with_data=True)
- content = read_csv_content(frappe.response.result)
- self.assertTrue(content[1][1], "User")
- self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
-
- def test_export_with_all_doctypes(self):
- exporter.export_data("User", all_doctypes="Yes", template=True, with_data=True)
- content = read_csv_content(frappe.response.result)
- self.assertTrue(content[1][1], "User")
- self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
- self.assertEqual(content[13][0], "DocType:")
- self.assertEqual(content[13][1], "User")
- self.assertTrue("Has Role" in content[13])
-
- def test_import(self):
- if frappe.db.exists("Blog Category", "test-category"):
- frappe.delete_doc("Blog Category", "test-category")
-
- exporter.export_data("Blog Category", all_doctypes=True, template=True)
- content = read_csv_content(frappe.response.result)
- content.append(["", "test-category", "Test Cateogry"])
- importer.upload(content)
- self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category")
-
- # export with data
- exporter.export_data("Blog Category", all_doctypes=True, template=True, with_data=True)
- content = read_csv_content(frappe.response.result)
-
- # overwrite
- content[-1][3] = "New Title"
- importer.upload(content, overwrite=True)
- self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title")
-
- def test_import_only_children(self):
- user_email = "test_import_userrole@example.com"
- if frappe.db.exists("User", user_email):
- frappe.delete_doc("User", user_email, force=True)
-
- frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert()
-
- exporter.export_data("Has Role", "User", all_doctypes=True, template=True)
- content = read_csv_content(frappe.response.result)
- content.append(["", "test_import_userrole@example.com", "Blogger"])
- importer.upload(content)
-
- user = frappe.get_doc("User", user_email)
- self.assertTrue(frappe.db.get_value("Has Role", filters={"role": "Blogger", "parent": user_email, "parenttype": "User"}))
- self.assertTrue(user.get("roles")[0].role, "Blogger")
-
- # overwrite
- exporter.export_data("Has Role", "User", all_doctypes=True, template=True)
- content = read_csv_content(frappe.response.result)
- content.append(["", "test_import_userrole@example.com", "Website Manager"])
- importer.upload(content, overwrite=True)
-
- user = frappe.get_doc("User", user_email)
- self.assertEqual(len(user.get("roles")), 1)
- self.assertTrue(user.get("roles")[0].role, "Website Manager")
-
- def test_import_with_children(self): #pylint: disable=R0201
- if frappe.db.exists("Event", "EV00001"):
- frappe.delete_doc("Event", "EV00001")
- exporter.export_data("Event", all_doctypes="Yes", template=True)
- content = read_csv_content(frappe.response.result)
-
- content.append([None] * len(content[-2]))
- content[-1][1] = "__Test Event with children"
- content[-1][2] = "Private"
- content[-1][3] = "2014-01-01 10:00:00.000000"
- importer.upload(content)
-
- frappe.get_doc("Event", {"subject":"__Test Event with children"})
-
- def test_excel_import(self):
- if frappe.db.exists("Event", "EV00001"):
- frappe.delete_doc("Event", "EV00001")
-
- exporter.export_data("Event", all_doctypes=True, template=True, file_type="Excel")
- from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
- content = read_xlsx_file_from_attached_file(fcontent=frappe.response.filecontent)
- content.append(["", "_test", "Private", "05-11-2017 13:51:48", "Event", "blue", "0", "0", "", "Open", "", 0, "", 0, "", "", "1", 0, "", "", 0, 0, 0, 0, 0, 0, 0])
- importer.upload(content)
- self.assertTrue(frappe.db.get_value("Event", {"subject": "_test"}, "name"))
\ No newline at end of file
+ pass
diff --git a/frappe/core/doctype/data_import/test_exporter.py b/frappe/core/doctype/data_import/test_exporter.py
new file mode 100644
index 0000000000..8415af2e63
--- /dev/null
+++ b/frappe/core/doctype/data_import/test_exporter.py
@@ -0,0 +1,104 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2019, Frappe Technologies and Contributors
+# See license.txt
+from __future__ import unicode_literals
+
+import unittest
+import frappe
+from frappe.core.doctype.data_import.exporter import Exporter
+from frappe.core.doctype.data_import.test_importer import (
+ create_doctype_if_not_exists,
+)
+
+doctype_name = 'DocType for Export'
+
+class TestExporter(unittest.TestCase):
+ def setUp(self):
+ create_doctype_if_not_exists(doctype_name)
+
+ def test_exports_specified_fields(self):
+ if not frappe.db.exists(doctype_name, "Test"):
+ doc = frappe.get_doc(
+ doctype=doctype_name,
+ title="Test",
+ description="Test Description",
+ table_field_1=[
+ {"child_title": "Child Title 1", "child_description": "Child Description 1"},
+ {"child_title": "Child Title 2", "child_description": "Child Description 2"},
+ ],
+ table_field_2=[
+ {"child_2_title": "Child Title 1", "child_2_description": "Child Description 1"},
+ ],
+ table_field_1_again=[
+ {
+ "child_title": "Child Title 1 Again",
+ "child_description": "Child Description 1 Again",
+ },
+ ],
+ ).insert()
+ else:
+ doc = frappe.get_doc(doctype_name, "Test")
+
+ e = Exporter(
+ doctype_name,
+ export_fields={
+ doctype_name: ["title", "description", "number", "another_number"],
+ "table_field_1": ["name", "child_title", "child_description"],
+ "table_field_2": ["child_2_date", "child_2_number"],
+ "table_field_1_again": [
+ "child_title",
+ "child_date",
+ "child_number",
+ "child_another_number",
+ ],
+ },
+ export_data=True,
+ )
+ csv_array = e.get_csv_array()
+ header_row = csv_array[0]
+
+ self.assertEqual(
+ header_row,
+ [
+ "Title",
+ "Description",
+ "Number",
+ "another_number",
+ "ID (Table Field 1)",
+ "Child Title (Table Field 1)",
+ "Child Description (Table Field 1)",
+ "Child 2 Date (Table Field 2)",
+ "Child 2 Number (Table Field 2)",
+ "Child Title (Table Field 1 Again)",
+ "Child Date (Table Field 1 Again)",
+ "Child Number (Table Field 1 Again)",
+ "table_field_1_again.child_another_number",
+ ],
+ )
+
+ table_field_1_row_1_name = doc.table_field_1[0].name
+ table_field_1_row_2_name = doc.table_field_1[1].name
+ # fmt: off
+ self.assertEqual(
+ csv_array[1],
+ ["Test", "Test Description", 0, 0, table_field_1_row_1_name, "Child Title 1", "Child Description 1", None, 0, "Child Title 1 Again", None, 0, 0]
+ )
+ self.assertEqual(
+ csv_array[2],
+ ["", "", "", "", table_field_1_row_2_name, "Child Title 2", "Child Description 2", "", "", "", "", "", ""],
+ )
+ # fmt: on
+ self.assertEqual(len(csv_array), 3)
+
+ def test_export_csv_response(self):
+ e = Exporter(
+ doctype_name,
+ export_fields={doctype_name: ["title", "description"]},
+ export_data=True,
+ file_type="CSV"
+ )
+ e.build_response()
+
+ self.assertTrue(frappe.response['result'])
+ self.assertEqual(frappe.response['doctype'], doctype_name)
+ self.assertEqual(frappe.response['type'], "csv")
diff --git a/frappe/core/doctype/data_import/test_exporter_new.py b/frappe/core/doctype/data_import/test_exporter_new.py
deleted file mode 100644
index 0d3aedb033..0000000000
--- a/frappe/core/doctype/data_import/test_exporter_new.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2019, Frappe Technologies and Contributors
-# See license.txt
-from __future__ import unicode_literals
-
-import unittest
-import frappe
-from frappe.core.doctype.data_import.exporter_new import Exporter
-
-
-class TestExporter(unittest.TestCase):
- def test_exports_mandatory_fields(self):
- e = Exporter('Web Page', export_fields='Mandatory')
- csv_array = e.get_csv_array()
- header_row = csv_array[0]
- self.assertEqual(header_row, ['ID', 'Title'])
-
-
- def test_exports_all_fields(self):
- e = Exporter('Web Page', export_fields='All')
- csv_array = e.get_csv_array()
- header = csv_array[0]
- self.assertEqual(len(header), 37)
-
-
- def test_exports_selected_fields(self):
- export_fields = {
- 'Web Page': ['title', 'route', 'published']
- }
- e = Exporter('Web Page', export_fields=export_fields)
- csv_array = e.get_csv_array()
- header = csv_array[0]
- self.assertEqual(header, ['Title', 'Route', 'Published'])
-
-
- def test_exports_data(self):
- e = Exporter('ToDo', export_fields='All', export_data=True)
- todo_records = frappe.db.count('ToDo')
- csv_array = e.get_csv_array()
- self.assertEqual(len(csv_array), todo_records + 1)
diff --git a/frappe/core/doctype/data_import/test_importer.py b/frappe/core/doctype/data_import/test_importer.py
new file mode 100644
index 0000000000..bdadad7890
--- /dev/null
+++ b/frappe/core/doctype/data_import/test_importer.py
@@ -0,0 +1,183 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2019, Frappe Technologies and Contributors
+# See license.txt
+from __future__ import unicode_literals
+
+import unittest
+import frappe
+from frappe.utils import getdate
+
+doctype_name = 'DocType for Import'
+
+class TestImporter(unittest.TestCase):
+ def setUp(self):
+ create_doctype_if_not_exists(doctype_name)
+
+ def test_data_import_from_file(self):
+ import_file = get_import_file('sample_import_file')
+ data_import = self.get_importer(doctype_name, import_file)
+ data_import.start_import()
+
+ doc1 = frappe.get_doc(doctype_name, 'Test')
+ doc2 = frappe.get_doc(doctype_name, 'Test 2')
+ doc3 = frappe.get_doc(doctype_name, 'Test 3')
+
+ self.assertEqual(doc1.description, 'test description')
+ self.assertEqual(doc1.number, 1)
+
+ self.assertEqual(doc1.table_field_1[0].child_title, 'child title')
+ self.assertEqual(doc1.table_field_1[0].child_description, 'child description')
+
+ self.assertEqual(doc1.table_field_1[1].child_title, 'child title 2')
+ self.assertEqual(doc1.table_field_1[1].child_description, 'child description 2')
+
+ self.assertEqual(doc1.table_field_2[1].child_2_title, 'title child')
+ self.assertEqual(doc1.table_field_2[1].child_2_date, getdate('2019-10-30'))
+ self.assertEqual(doc1.table_field_2[1].child_2_another_number, 5)
+
+ self.assertEqual(doc1.table_field_1_again[0].child_title, 'child title again')
+ self.assertEqual(doc1.table_field_1_again[1].child_title, 'child title again 2')
+ self.assertEqual(doc1.table_field_1_again[1].child_date, getdate('2021-09-22'))
+
+ self.assertEqual(doc2.description, 'test description 2')
+ self.assertEqual(doc3.another_number, 5)
+
+ def test_data_import_preview(self):
+ import_file = get_import_file('sample_import_file')
+ data_import = self.get_importer(doctype_name, import_file)
+ preview = data_import.get_preview_from_template()
+
+ self.assertEqual(len(preview.data), 4)
+ self.assertEqual(len(preview.columns), 15)
+
+ def test_data_import_without_mandatory_values(self):
+ import_file = get_import_file('sample_import_file_without_mandatory')
+ data_import = self.get_importer(doctype_name, import_file)
+ data_import.start_import()
+ data_import.reload()
+ warnings = frappe.parse_json(data_import.template_warnings)
+
+ self.assertEqual(warnings[0]['row'], 2)
+ self.assertEqual(warnings[0]['message'], "Child Title (Table Field 1) is a mandatory field")
+
+ self.assertEqual(warnings[1]['row'], 3)
+ self.assertEqual(warnings[1]['message'], "Child Title (Table Field 1 Again) is a mandatory field")
+
+ self.assertEqual(warnings[2]['row'], 4)
+ self.assertEqual(warnings[2]['message'], "Title is a mandatory field")
+
+ def test_data_import_update(self):
+ if not frappe.db.exists(doctype_name, 'Test 26'):
+ frappe.get_doc(
+ doctype=doctype_name,
+ title='Test 26'
+ ).insert()
+
+ import_file = get_import_file('sample_import_file_for_update')
+ data_import = self.get_importer(doctype_name, import_file, update=True)
+ data_import.start_import()
+
+ updated_doc = frappe.get_doc(doctype_name, 'Test 26')
+ self.assertEqual(updated_doc.description, 'test description')
+ self.assertEqual(updated_doc.table_field_1[0].child_title, 'child title')
+ self.assertEqual(updated_doc.table_field_1[0].child_description, 'child description')
+ self.assertEqual(updated_doc.table_field_1_again[0].child_title, 'child title again')
+
+ def get_importer(self, doctype, import_file, update=False):
+ data_import = frappe.new_doc('Data Import')
+ data_import.import_type = 'Insert New Records' if not update else 'Update Existing Records'
+ data_import.reference_doctype = doctype
+ data_import.import_file = import_file.file_url
+ data_import.insert()
+
+ return data_import
+
+def create_doctype_if_not_exists(doctype_name, force=False):
+ if force:
+ frappe.delete_doc_if_exists('DocType', doctype_name)
+ frappe.delete_doc_if_exists('DocType', 'Child 1 of ' + doctype_name)
+ frappe.delete_doc_if_exists('DocType', 'Child 2 of ' + doctype_name)
+
+ if frappe.db.exists('DocType', doctype_name):
+ return
+
+ # Child Table 1
+ table_1_name = 'Child 1 of ' + doctype_name
+ frappe.get_doc({
+ 'doctype': 'DocType',
+ 'name': table_1_name,
+ 'module': 'Custom',
+ 'custom': 1,
+ 'istable': 1,
+ 'fields': [
+ {'label': 'Child Title', 'fieldname': 'child_title', 'reqd': 1, 'fieldtype': 'Data'},
+ {'label': 'Child Description', 'fieldname': 'child_description', 'fieldtype': 'Small Text'},
+ {'label': 'Child Date', 'fieldname': 'child_date', 'fieldtype': 'Date'},
+ {'label': 'Child Number', 'fieldname': 'child_number', 'fieldtype': 'Int'},
+ {'label': 'Child Number', 'fieldname': 'child_another_number', 'fieldtype': 'Int'},
+ ]
+ }).insert()
+
+ # Child Table 2
+ table_2_name = 'Child 2 of ' + doctype_name
+ frappe.get_doc({
+ 'doctype': 'DocType',
+ 'name': table_2_name,
+ 'module': 'Custom',
+ 'custom': 1,
+ 'istable': 1,
+ 'fields': [
+ {'label': 'Child 2 Title', 'fieldname': 'child_2_title', 'reqd': 1, 'fieldtype': 'Data'},
+ {'label': 'Child 2 Description', 'fieldname': 'child_2_description', 'fieldtype': 'Small Text'},
+ {'label': 'Child 2 Date', 'fieldname': 'child_2_date', 'fieldtype': 'Date'},
+ {'label': 'Child 2 Number', 'fieldname': 'child_2_number', 'fieldtype': 'Int'},
+ {'label': 'Child 2 Number', 'fieldname': 'child_2_another_number', 'fieldtype': 'Int'},
+ ]
+ }).insert()
+
+ # Main Table
+ frappe.get_doc({
+ 'doctype': 'DocType',
+ 'name': doctype_name,
+ 'module': 'Custom',
+ 'custom': 1,
+ 'autoname': 'field:title',
+ 'fields': [
+ {'label': 'Title', 'fieldname': 'title', 'reqd': 1, 'fieldtype': 'Data'},
+ {'label': 'Description', 'fieldname': 'description', 'fieldtype': 'Small Text'},
+ {'label': 'Date', 'fieldname': 'date', 'fieldtype': 'Date'},
+ {'label': 'Number', 'fieldname': 'number', 'fieldtype': 'Int'},
+ {'label': 'Number', 'fieldname': 'another_number', 'fieldtype': 'Int'},
+ {'label': 'Table Field 1', 'fieldname': 'table_field_1', 'fieldtype': 'Table', 'options': table_1_name},
+ {'label': 'Table Field 2', 'fieldname': 'table_field_2', 'fieldtype': 'Table', 'options': table_2_name},
+ {'label': 'Table Field 1 Again', 'fieldname': 'table_field_1_again', 'fieldtype': 'Table', 'options': table_1_name},
+ ],
+ 'permissions': [
+ {'role': 'System Manager'}
+ ]
+ }).insert()
+
+
+def get_import_file(csv_file_name, force=False):
+ file_name = csv_file_name + '.csv'
+ _file = frappe.db.exists('File', {'file_name': file_name})
+ if force and _file:
+ frappe.delete_doc_if_exists('File', _file)
+
+ if frappe.db.exists('File', {'file_name': file_name}):
+ f = frappe.get_doc('File', {'file_name': file_name})
+ else:
+ full_path = get_csv_file_path(file_name)
+ f = frappe.get_doc(
+ doctype='File',
+ content=frappe.read_file(full_path),
+ file_name=file_name,
+ is_private=1
+ )
+ f.save(ignore_permissions=True)
+
+ return f
+
+
+def get_csv_file_path(file_name):
+ return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import', 'fixtures', file_name)
diff --git a/frappe/core/doctype/data_import/test_importer_new.py b/frappe/core/doctype/data_import/test_importer_new.py
deleted file mode 100644
index d6349daa55..0000000000
--- a/frappe/core/doctype/data_import/test_importer_new.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2019, Frappe Technologies and Contributors
-# See license.txt
-from __future__ import unicode_literals
-
-import datetime
-import unittest
-import frappe
-from frappe.core.doctype.data_import.importer_new import Importer
-
-content_empty_rows = '''title,start_date,idx,show_title
-,,,
-est phasellus sit amet,5/20/2019,52,1
-nibh in,7/29/2019,77,1
-'''
-
-content_mandatory_missing = '''title,start_date,idx,show_title
-,5/20/2019,52,1
-'''
-
-content_convert_value = '''title,start_date,idx,show_title
-est phasellus sit amet,5/20/2019,52,True
-'''
-
-content_invalid_column = '''title,start_date,idx,show_title,invalid_column
-est phasellus sit amet,5/20/2019,52,True,invalid value
-'''
-
-
-class TestImporter(unittest.TestCase):
- def test_should_skip_empty_rows(self):
- i = self.get_importer('Web Page', content=content_empty_rows)
- payloads = i.get_payloads_for_import()
- row_to_be_imported = []
- for p in payloads:
- row_to_be_imported += [row[0] for row in p.rows]
- self.assertEqual(len(row_to_be_imported), 2)
-
- def test_should_throw_if_mandatory_is_missing(self):
- i = self.get_importer('Web Page', content=content_mandatory_missing)
- i.import_data()
- warning = i.warnings[0]
- self.assertTrue('Title is a mandatory field' in warning['message'])
-
- def test_should_convert_value_based_on_fieldtype(self):
- i = self.get_importer('Web Page', content=content_convert_value)
- payloads = i.get_payloads_for_import()
- doc = payloads[0].doc
-
- self.assertEqual(type(doc['show_title']), int)
- self.assertEqual(type(doc['idx']), int)
- self.assertEqual(type(doc['start_date']), datetime.datetime)
-
- def test_should_ignore_invalid_columns(self):
- i = self.get_importer('Web Page', content=content_invalid_column)
- payloads = i.get_payloads_for_import()
- doc = payloads[0].doc
-
- self.assertTrue('invalid_column' not in doc)
- self.assertTrue('title' in doc)
-
- def test_should_import_valid_template(self):
- title = 'est phasellus sit amet {0}'.format(frappe.utils.random_string(8))
- content_valid_content = '''title,start_date,idx,show_title
-{0},5/20/2019,52,1'''.format(title)
- i = self.get_importer('Web Page', content=content_valid_content)
- import_log = i.import_data()
- log = import_log[0]
- self.assertTrue(log.success)
- doc = frappe.get_doc('Web Page', { 'title': title })
- self.assertEqual(frappe.utils.get_datetime_str(doc.start_date),
- frappe.utils.get_datetime_str('2019-05-20'))
-
- def get_importer(self, doctype, content):
- data_import = frappe.new_doc('Data Import Beta')
- data_import.import_type = 'Insert New Records'
- i = Importer(doctype, content=content, data_import=data_import)
- return i
diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.js b/frappe/core/doctype/data_import_beta/data_import_beta.js
deleted file mode 100644
index 527dbd7d0c..0000000000
--- a/frappe/core/doctype/data_import_beta/data_import_beta.js
+++ /dev/null
@@ -1,511 +0,0 @@
-// Copyright (c) 2019, Frappe Technologies and contributors
-// For license information, please see license.txt
-
-frappe.ui.form.on('Data Import Beta', {
- setup(frm) {
- frappe.realtime.on('data_import_refresh', ({ data_import }) => {
- frm.import_in_progress = false;
- if (data_import !== frm.doc.name) return;
- frappe.model.clear_doc('Data Import Beta', frm.doc.name);
- frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => {
- frm.refresh();
- });
- });
- frappe.realtime.on('data_import_progress', data => {
- frm.import_in_progress = true;
- if (data.data_import !== frm.doc.name) {
- return;
- }
- let percent = Math.floor((data.current * 100) / data.total);
- let seconds = Math.floor(data.eta);
- let minutes = Math.floor(data.eta / 60);
- let eta_message =
- // prettier-ignore
- seconds < 60
- ? __('About {0} seconds remaining', [seconds])
- : minutes === 1
- ? __('About {0} minute remaining', [minutes])
- : __('About {0} minutes remaining', [minutes]);
-
- let message;
- if (data.success) {
- let message_args = [data.current, data.total, eta_message];
- message =
- frm.doc.import_type === 'Insert New Records'
- ? __('Importing {0} of {1}, {2}', message_args)
- : __('Updating {0} of {1}, {2}', message_args);
- }
- if (data.skipping) {
- message = __('Skipping {0} of {1}, {2}', [
- data.current,
- data.total,
- eta_message
- ]);
- }
- frm.dashboard.show_progress(__('Import Progress'), percent, message);
- frm.page.set_indicator(__('In Progress'), 'orange');
-
- // hide progress when complete
- if (data.current === data.total) {
- setTimeout(() => {
- frm.dashboard.hide();
- frm.refresh();
- }, 2000);
- }
- });
-
- frm.set_query('reference_doctype', () => {
- return {
- filters: {
- allow_import: 1
- }
- };
- });
-
- frm.get_field('import_file').df.options = {
- restrictions: {
- allowed_file_types: ['.csv', '.xls', '.xlsx']
- }
- };
- },
-
- refresh(frm) {
- frm.page.hide_icon_group();
- frm.trigger('update_indicators');
- frm.trigger('import_file');
- frm.trigger('show_import_log');
- frm.trigger('show_import_warnings');
- frm.trigger('toggle_submit_after_import');
- frm.trigger('show_import_status');
- frm.trigger('show_report_error_button');
-
- if (frm.doc.status === 'Partial Success') {
- frm.add_custom_button(__('Export Errored Rows'), () =>
- frm.trigger('export_errored_rows')
- );
- }
-
- if (frm.doc.status.includes('Success')) {
- frm.add_custom_button(
- __('Go to {0} List', [frm.doc.reference_doctype]),
- () => frappe.set_route('List', frm.doc.reference_doctype)
- );
- }
-
- frm.disable_save();
- if (frm.doc.status !== 'Success') {
- if (!frm.is_new() && frm.doc.import_file) {
- let label =
- frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
- frm.page.set_primary_action(label, () => frm.events.start_import(frm));
- } else {
- frm.page.set_primary_action(__('Save'), () => frm.save());
- }
- }
- },
-
- update_indicators(frm) {
- const indicator = frappe.get_indicator(frm.doc);
- if (indicator) {
- frm.page.set_indicator(indicator[0], indicator[1]);
- } else {
- frm.page.clear_indicator();
- }
- },
-
- show_import_status(frm) {
- let import_log = JSON.parse(frm.doc.import_log || '[]');
- let successful_records = import_log.filter(log => log.success);
- let failed_records = import_log.filter(log => !log.success);
- if (successful_records.length === 0) return;
-
- let message;
- if (failed_records.length === 0) {
- let message_args = [successful_records.length];
- if (frm.doc.import_type === 'Insert New Records') {
- message =
- successful_records.length > 1
- ? __('Successfully imported {0} records.', message_args)
- : __('Successfully imported {0} record.', message_args);
- } else {
- message =
- successful_records.length > 1
- ? __('Successfully updated {0} records.', message_args)
- : __('Successfully updated {0} record.', message_args);
- }
- } else {
- let message_args = [successful_records.length, import_log.length];
- if (frm.doc.import_type === 'Insert New Records') {
- message =
- successful_records.length > 1
- ? __('Successfully imported {0} records out of {1}.', message_args)
- : __('Successfully imported {0} record out of {1}.', message_args);
- } else {
- message =
- successful_records.length > 1
- ? __('Successfully updated {0} records out of {1}.', message_args)
- : __('Successfully updated {0} record out of {1}.', message_args);
- }
- }
- frm.dashboard.set_headline(message);
- },
-
- show_report_error_button(frm) {
- if (frm.doc.status === 'Error') {
- frappe.db
- .get_list('Error Log', {
- filters: { method: frm.doc.name },
- fields: ['method', 'error'],
- order_by: 'creation desc',
- limit: 1
- })
- .then(result => {
- if (result.length > 0) {
- frm.add_custom_button('Report Error', () => {
- let fake_xhr = {
- responseText: JSON.stringify({
- exc: result[0].error
- })
- };
- frappe.request.report_error(fake_xhr, {});
- });
- }
- });
- }
- },
-
- start_import(frm) {
- frm
- .call({
- method: 'form_start_import',
- args: { data_import: frm.doc.name },
- btn: frm.page.btn_primary
- })
- .then(r => {
- if (r.message === true) {
- frm.disable_save();
- }
- });
- },
-
- download_template(frm) {
- if (
- frm.data_exporter &&
- frm.data_exporter.doctype === frm.doc.reference_doctype
- ) {
- frm.data_exporter.dialog.show();
- set_export_records();
- } else {
- frappe.require('/assets/js/data_import_tools.min.js', () => {
- frm.data_exporter = new frappe.data_import.DataExporter(
- frm.doc.reference_doctype
- );
- set_export_records();
- });
- }
-
- function set_export_records() {
- if (frm.doc.import_type === 'Insert New Records') {
- frm.data_exporter.dialog.set_value('export_records', 'blank_template');
- } else {
- frm.data_exporter.dialog.set_value('export_records', 'all');
- }
- // Force ID field to be exported when updating existing records
- let id_field = frm.data_exporter.dialog.get_field(
- frm.doc.reference_doctype
- ).options[0];
- if (id_field.value === 'name' && id_field.$checkbox) {
- id_field.$checkbox
- .find('input')
- .prop('disabled', frm.doc.import_type === 'Update Existing Records');
- }
- }
- },
-
- reference_doctype(frm) {
- frm.trigger('toggle_submit_after_import');
- },
-
- toggle_submit_after_import(frm) {
- frm.toggle_display('submit_after_import', false);
- let doctype = frm.doc.reference_doctype;
- if (doctype) {
- frappe.model.with_doctype(doctype, () => {
- let meta = frappe.get_meta(doctype);
- frm.toggle_display('submit_after_import', meta.is_submittable);
- });
- }
- },
-
- import_file(frm) {
- frm.toggle_display('section_import_preview', frm.doc.import_file);
- if (!frm.doc.import_file) {
- frm.get_field('import_preview').$wrapper.empty();
- return;
- }
-
- // load import preview
- frm.get_field('import_preview').$wrapper.empty();
- $('')
- .html(__('Loading import file...'))
- .appendTo(frm.get_field('import_preview').$wrapper);
-
- frm
- .call({
- method: 'get_preview_from_template',
- args: { data_import: frm.doc.name },
- error_handlers: {
- TimestampMismatchError() {
- // ignore this error
- }
- }
- })
- .then(r => {
- let preview_data = r.message;
- frm.events.show_import_preview(frm, preview_data);
- frm.events.show_import_warnings(frm, preview_data);
- });
- },
-
- show_import_preview(frm, preview_data) {
- let import_log = JSON.parse(frm.doc.import_log || '[]');
-
- if (
- frm.import_preview &&
- frm.import_preview.doctype === frm.doc.reference_doctype
- ) {
- frm.import_preview.preview_data = preview_data;
- frm.import_preview.import_log = import_log;
- frm.import_preview.refresh();
- return;
- }
-
- frappe.require('/assets/js/data_import_tools.min.js', () => {
- frm.import_preview = new frappe.data_import.ImportPreview({
- wrapper: frm.get_field('import_preview').$wrapper,
- doctype: frm.doc.reference_doctype,
- preview_data,
- import_log,
- frm,
- events: {
- remap_column(changed_map) {
- let template_options = JSON.parse(frm.doc.template_options || '{}');
- template_options.remap_column = template_options.remap_column || {};
- Object.assign(template_options.remap_column, changed_map);
- frm.set_value('template_options', JSON.stringify(template_options));
- frm.save().then(() => frm.trigger('import_file'));
- }
- }
- });
- });
- },
-
- export_errored_rows(frm) {
- open_url_post(
- '/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template',
- {
- data_import_name: frm.doc.name
- }
- );
- },
-
- show_import_warnings(frm, preview_data) {
- let warnings = JSON.parse(frm.doc.template_warnings || '[]');
- warnings = warnings.concat(preview_data.warnings || []);
-
- frm.toggle_display('import_warnings_section', warnings.length > 0);
- if (warnings.length === 0) {
- frm.get_field('import_warnings').$wrapper.html('');
- return;
- }
-
- // group warnings by row
- let warnings_by_row = {};
- let other_warnings = [];
- for (let warning of warnings) {
- if (warning.row) {
- warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
- warnings_by_row[warning.row].push(warning);
- } else {
- other_warnings.push(warning);
- }
- }
-
- let html = '';
- html += Object.keys(warnings_by_row)
- .map(row_number => {
- let message = warnings_by_row[row_number]
- .map(w => {
- if (w.field) {
- let label =
- w.field.label +
- (w.field.parent !== frm.doc.reference_doctype
- ? ` (${w.field.parent})`
- : '');
- return `${label}: ${w.message}`;
- }
- return `${w.message}`;
- })
- .join('');
- return `
-
-
${__('Row {0}', [row_number])}
-
-
- `;
- })
- .join('');
-
- html += other_warnings
- .map(warning => {
- let header = '';
- if (warning.col) {
- header = __('Column {0}', [warning.col]);
- }
- return `
-
-
${header}
-
${warning.message}
-
- `;
- })
- .join('');
- frm.get_field('import_warnings').$wrapper.html(`
-
- `);
- },
-
- show_failed_logs(frm) {
- frm.trigger('show_import_log');
- },
-
- show_import_log(frm) {
- let import_log = JSON.parse(frm.doc.import_log || '[]');
- let logs = import_log;
- frm.toggle_display('import_log', false);
- frm.toggle_display('import_log_section', logs.length > 0);
-
- if (logs.length === 0) {
- frm.get_field('import_log_preview').$wrapper.empty();
- return;
- }
-
- let rows = logs
- .map(log => {
- let html = '';
- if (log.success) {
- if (frm.doc.import_type === 'Insert New Records') {
- html = __('Successfully imported {0}', [
- `${frappe.utils.get_form_link(
- frm.doc.reference_doctype,
- log.docname,
- true
- )}`
- ]);
- } else {
- html = __('Successfully updated {0}', [
- `${frappe.utils.get_form_link(
- frm.doc.reference_doctype,
- log.docname,
- true
- )}`
- ]);
- }
- } else {
- let messages = log.messages
- .map(JSON.parse)
- .map(m => {
- let title = m.title ? `${m.title}` : '';
- let message = m.message ? `${m.message}
` : '';
- return title + message;
- })
- .join('');
- let id = frappe.dom.get_unique_id();
- html = `${messages}
-
- `;
- }
- let indicator_color = log.success ? 'green' : 'red';
- let title = log.success ? __('Success') : __('Failure');
-
- if (frm.doc.show_failed_logs && log.success) {
- return '';
- }
-
- return `
- | ${log.row_indexes.join(', ')} |
-
- ${title}
- |
-
- ${html}
- |
-
`;
- })
- .join('');
-
- if (!rows && frm.doc.show_failed_logs) {
- rows = `|
- ${__('No failed logs')}
- |
`;
- }
-
- frm.get_field('import_log_preview').$wrapper.html(`
-
-
- | ${__('Row Number')} |
- ${__('Status')} |
- ${__('Message')} |
-
- ${rows}
-
- `);
- },
-
- show_missing_link_values(frm, missing_link_values) {
- let can_be_created_automatically = missing_link_values.every(
- d => d.has_one_mandatory_field
- );
-
- let html = missing_link_values
- .map(d => {
- let doctype = d.doctype;
- let values = d.missing_values;
- return `
- ${doctype}
- ${values.map(v => `- ${v}
`).join('')}
- `;
- })
- .join('');
-
- if (can_be_created_automatically) {
- // prettier-ignore
- let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
- frappe.confirm(message + html, () => {
- frm
- .call('create_missing_link_values', {
- missing_link_values
- })
- .then(r => {
- let records = r.message;
- frappe.msgprint(
- __('Created {0} records successfully.', [records.length])
- );
- });
- });
- } else {
- frappe.msgprint(
- // prettier-ignore
- __('The following records needs to be created before we can import your file.') + html
- );
- }
- }
-});
diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.json b/frappe/core/doctype/data_import_beta/data_import_beta.json
deleted file mode 100644
index 777af0a071..0000000000
--- a/frappe/core/doctype/data_import_beta/data_import_beta.json
+++ /dev/null
@@ -1,170 +0,0 @@
-{
- "actions": [],
- "autoname": "format:{reference_doctype} Import on {creation}",
- "beta": 1,
- "creation": "2019-08-04 14:16:08.318714",
- "doctype": "DocType",
- "editable_grid": 1,
- "engine": "InnoDB",
- "field_order": [
- "reference_doctype",
- "import_type",
- "download_template",
- "import_file",
- "column_break_5",
- "status",
- "submit_after_import",
- "mute_emails",
- "template_options",
- "section_import_preview",
- "import_preview",
- "import_warnings_section",
- "template_warnings",
- "import_warnings",
- "import_log_section",
- "import_log",
- "show_failed_logs",
- "import_log_preview"
- ],
- "fields": [
- {
- "fieldname": "reference_doctype",
- "fieldtype": "Link",
- "in_list_view": 1,
- "label": "Document Type",
- "options": "DocType",
- "reqd": 1,
- "set_only_once": 1
- },
- {
- "fieldname": "import_type",
- "fieldtype": "Select",
- "in_list_view": 1,
- "label": "Import Type",
- "options": "\nInsert New Records\nUpdate Existing Records",
- "reqd": 1,
- "set_only_once": 1
- },
- {
- "depends_on": "eval:!doc.__islocal",
- "fieldname": "import_file",
- "fieldtype": "Attach",
- "in_list_view": 1,
- "label": "Import File"
- },
- {
- "fieldname": "import_preview",
- "fieldtype": "HTML",
- "label": "Import Preview"
- },
- {
- "fieldname": "section_import_preview",
- "fieldtype": "Section Break",
- "label": "Preview"
- },
- {
- "fieldname": "column_break_5",
- "fieldtype": "Column Break"
- },
- {
- "fieldname": "template_options",
- "fieldtype": "Code",
- "hidden": 1,
- "label": "Template Options",
- "options": "JSON",
- "read_only": 1
- },
- {
- "fieldname": "import_log",
- "fieldtype": "Code",
- "label": "Import Log",
- "options": "JSON"
- },
- {
- "fieldname": "import_log_section",
- "fieldtype": "Section Break",
- "label": "Import Log"
- },
- {
- "fieldname": "import_log_preview",
- "fieldtype": "HTML",
- "label": "Import Log Preview"
- },
- {
- "default": "Pending",
- "fieldname": "status",
- "fieldtype": "Select",
- "hidden": 1,
- "label": "Status",
- "options": "Pending\nSuccess\nPartial Success\nError",
- "read_only": 1
- },
- {
- "fieldname": "template_warnings",
- "fieldtype": "Code",
- "hidden": 1,
- "label": "Template Warnings",
- "options": "JSON"
- },
- {
- "default": "0",
- "fieldname": "submit_after_import",
- "fieldtype": "Check",
- "label": "Submit After Import",
- "set_only_once": 1
- },
- {
- "fieldname": "import_warnings_section",
- "fieldtype": "Section Break",
- "label": "Warnings"
- },
- {
- "fieldname": "import_warnings",
- "fieldtype": "HTML",
- "label": "Import Warnings"
- },
- {
- "depends_on": "reference_doctype",
- "fieldname": "download_template",
- "fieldtype": "Button",
- "label": "Download Template"
- },
- {
- "default": "1",
- "fieldname": "mute_emails",
- "fieldtype": "Check",
- "label": "Don't Send Emails",
- "set_only_once": 1
- },
- {
- "default": "0",
- "fieldname": "show_failed_logs",
- "fieldtype": "Check",
- "label": "Show Failed Logs"
- }
- ],
- "hide_toolbar": 1,
- "links": [],
- "modified": "2020-02-17 15:35:04.386098",
- "modified_by": "faris@erpnext.com",
- "module": "Core",
- "name": "Data Import Beta",
- "owner": "Administrator",
- "permissions": [
- {
- "create": 1,
- "delete": 1,
- "email": 1,
- "export": 1,
- "print": 1,
- "read": 1,
- "report": 1,
- "role": "System Manager",
- "share": 1,
- "write": 1
- }
- ],
- "sort_field": "modified",
- "sort_order": "DESC",
- "track_changes": 1
-}
\ No newline at end of file
diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.py b/frappe/core/doctype/data_import_beta/data_import_beta.py
deleted file mode 100644
index 8f12bd20ed..0000000000
--- a/frappe/core/doctype/data_import_beta/data_import_beta.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2019, Frappe Technologies and contributors
-# For license information, please see license.txt
-
-from __future__ import unicode_literals
-import frappe
-from frappe.model.document import Document
-from frappe.core.doctype.data_import.importer_new import Importer
-from frappe.core.doctype.data_import.exporter_new import Exporter
-from frappe.core.page.background_jobs.background_jobs import get_info
-from frappe.utils.background_jobs import enqueue
-from frappe import _
-
-
-class DataImportBeta(Document):
- def validate(self):
- doc_before_save = self.get_doc_before_save()
- if not self.import_file or (
- doc_before_save and doc_before_save.import_file != self.import_file
- ):
- self.template_options = ""
- self.template_warnings = ""
-
- if self.import_file:
- # validate template
- self.get_importer()
-
- def get_preview_from_template(self):
- if not self.import_file:
- return
-
- i = self.get_importer()
- return i.get_data_for_import_preview()
-
- def start_import(self):
- if frappe.utils.scheduler.is_scheduler_inactive():
- frappe.throw(
- _("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
- )
-
- enqueued_jobs = [d.get("job_name") for d in get_info()]
-
- if self.name not in enqueued_jobs:
- enqueue(
- start_import,
- queue="default",
- timeout=6000,
- event="data_import",
- job_name=self.name,
- data_import=self.name,
- now=frappe.conf.developer_mode or frappe.flags.in_test,
- )
- return True
-
- return False
-
- def export_errored_rows(self):
- return self.get_importer().export_errored_rows()
-
- def get_importer(self):
- return Importer(self.reference_doctype, data_import=self)
-
-
-@frappe.whitelist()
-def get_preview_from_template(data_import):
- return frappe.get_doc("Data Import Beta", data_import).get_preview_from_template()
-
-
-@frappe.whitelist()
-def form_start_import(data_import):
- return frappe.get_doc("Data Import Beta", data_import).start_import()
-
-
-def start_import(data_import):
- """This method runs in background job"""
- data_import = frappe.get_doc("Data Import Beta", data_import)
- try:
- i = Importer(data_import.reference_doctype, data_import=data_import)
- i.import_data()
- except:
- frappe.db.rollback()
- data_import.db_set("status", "Error")
- frappe.log_error(title=data_import.name)
- frappe.db.commit()
- frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name})
-
-
-@frappe.whitelist()
-def download_template(
- doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
-):
- """
- Download template from Exporter
- :param doctype: Document Type
- :param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
- :param export_records=None: One of 'all', 'by_filter', 'blank_template'
- :param export_filters: Filter dict
- :param file_type: File type to export into
- """
-
- export_fields = frappe.parse_json(export_fields)
- export_filters = frappe.parse_json(export_filters)
- export_data = export_records != "blank_template"
-
- e = Exporter(
- doctype,
- export_fields=export_fields,
- export_data=export_data,
- export_filters=export_filters,
- file_type=file_type,
- export_page_length=5 if export_records == "5_records" else None,
- )
- e.build_response()
-
-
-@frappe.whitelist()
-def download_errored_template(data_import_name):
- data_import = frappe.get_doc("Data Import Beta", data_import_name)
- data_import.export_errored_rows()
diff --git a/frappe/core/doctype/data_import_beta/data_import_beta_list.js b/frappe/core/doctype/data_import_beta/data_import_beta_list.js
deleted file mode 100644
index 58953d2531..0000000000
--- a/frappe/core/doctype/data_import_beta/data_import_beta_list.js
+++ /dev/null
@@ -1,40 +0,0 @@
-let imports_in_progress = [];
-
-frappe.listview_settings['Data Import Beta'] = {
- onload(listview) {
- frappe.realtime.on('data_import_progress', data => {
- if (!imports_in_progress.includes(data.data_import)) {
- imports_in_progress.push(data.data_import);
- }
- });
- frappe.realtime.on('data_import_refresh', data => {
- imports_in_progress = imports_in_progress.filter(
- d => d !== data.data_import
- );
- listview.refresh();
- });
- },
- get_indicator: function(doc) {
- var colors = {
- 'Pending': 'orange',
- 'Partial Success': 'orange',
- 'Success': 'green',
- 'In Progress': 'orange',
- 'Error': 'red'
- };
- let status = doc.status;
- if (imports_in_progress.includes(doc.name)) {
- status = 'In Progress';
- }
- return [__(status), colors[status], 'status,=,' + doc.status];
- },
- formatters: {
- import_type(value) {
- return {
- 'Insert New Records': __('Insert'),
- 'Update Existing Records': __('Update')
- }[value];
- }
- },
- hide_name_column: true
-};
diff --git a/frappe/core/doctype/data_import_beta/__init__.py b/frappe/core/doctype/data_import_legacy/__init__.py
similarity index 100%
rename from frappe/core/doctype/data_import_beta/__init__.py
rename to frappe/core/doctype/data_import_legacy/__init__.py
diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy.js b/frappe/core/doctype/data_import_legacy/data_import_legacy.js
new file mode 100644
index 0000000000..9a301af76e
--- /dev/null
+++ b/frappe/core/doctype/data_import_legacy/data_import_legacy.js
@@ -0,0 +1,324 @@
+// Copyright (c) 2017, Frappe Technologies and contributors
+// For license information, please see license.txt
+
+frappe.ui.form.on('Data Import Legacy', {
+ onload: function(frm) {
+ if (frm.doc.__islocal) {
+ frm.set_value("action", "");
+ }
+
+ frappe.call({
+ method: "frappe.core.doctype.data_import_legacy.data_import_legacy.get_importable_doctypes",
+ callback: function (r) {
+ let importable_doctypes = r.message;
+ frm.set_query("reference_doctype", function () {
+ return {
+ "filters": {
+ "issingle": 0,
+ "istable": 0,
+ "name": ['in', importable_doctypes]
+ }
+ };
+ });
+ }
+ }),
+
+ // should never check public
+ frm.fields_dict["import_file"].df.is_private = 1;
+
+ frappe.realtime.on("data_import_progress", function(data) {
+ if (data.data_import === frm.doc.name) {
+ if (data.reload && data.reload === true) {
+ frm.reload_doc();
+ }
+ if (data.progress) {
+ let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar");
+ if (progress_bar) {
+ $(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped");
+ $(progress_bar).css("width", data.progress + "%");
+ }
+ }
+ }
+ });
+ },
+
+ reference_doctype: function(frm){
+ if (frm.doc.reference_doctype) {
+ frappe.model.with_doctype(frm.doc.reference_doctype);
+ }
+ },
+
+ refresh: function(frm) {
+ frm.disable_save();
+ frm.dashboard.clear_headline();
+ if (frm.doc.reference_doctype && !frm.doc.import_file) {
+ frm.page.set_indicator(__('Attach file'), 'orange');
+ } else {
+ if (frm.doc.import_status) {
+ const listview_settings = frappe.listview_settings['Data Import Legacy'];
+ const indicator = listview_settings.get_indicator(frm.doc);
+
+ frm.page.set_indicator(indicator[0], indicator[1]);
+
+ if (frm.doc.import_status === "In Progress") {
+ frm.dashboard.add_progress("Data Import Progress", "0");
+ frm.set_read_only();
+ frm.refresh_fields();
+ }
+ }
+ }
+
+ if (frm.doc.reference_doctype) {
+ frappe.model.with_doctype(frm.doc.reference_doctype);
+ }
+
+ if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") {
+ frm.set_df_property("action", "read_only", 1);
+ }
+
+ frm.add_custom_button(__("Help"), function() {
+ frappe.help.show_video("6wiriRKPhmg");
+ });
+
+ if (frm.doc.reference_doctype && frm.doc.docstatus === 0) {
+ frm.add_custom_button(__("Download template"), function() {
+ frappe.data_import.download_dialog(frm).show();
+ });
+ }
+
+ if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows &&
+ frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) {
+ frm.page.set_primary_action(__("Start Import"), function() {
+ frappe.call({
+ btn: frm.page.btn_primary,
+ method: "frappe.core.doctype.data_import_legacy.data_import_legacy.import_data",
+ args: {
+ data_import: frm.doc.name
+ }
+ });
+ }).addClass('btn btn-primary');
+ }
+
+ if (frm.doc.log_details) {
+ frm.events.create_log_table(frm);
+ } else {
+ $(frm.fields_dict.import_log.wrapper).empty();
+ }
+ },
+
+ action: function(frm) {
+ if(!frm.doc.action) return;
+ if(!frm.doc.reference_doctype) {
+ frappe.msgprint(__("Please select document type first."));
+ frm.set_value("action", "");
+ return;
+ }
+
+ if(frm.doc.action == "Insert new records") {
+ frm.doc.insert_new = 1;
+ } else if (frm.doc.action == "Update records"){
+ frm.doc.overwrite = 1;
+ }
+ frm.save();
+ },
+
+ only_update: function(frm) {
+ frm.save();
+ },
+
+ submit_after_import: function(frm) {
+ frm.save();
+ },
+
+ skip_errors: function(frm) {
+ frm.save();
+ },
+
+ ignore_encoding_errors: function(frm) {
+ frm.save();
+ },
+
+ no_email: function(frm) {
+ frm.save();
+ },
+
+ show_only_errors: function(frm) {
+ frm.events.create_log_table(frm);
+ },
+
+ create_log_table: function(frm) {
+ let msg = JSON.parse(frm.doc.log_details);
+ var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty();
+ $(frappe.render_template("log_details", {
+ data: msg.messages,
+ import_status: frm.doc.import_status,
+ show_only_errors: frm.doc.show_only_errors,
+ })).appendTo($log_wrapper);
+ }
+});
+
+frappe.provide('frappe.data_import');
+frappe.data_import.download_dialog = function(frm) {
+ var dialog;
+ const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden;
+ const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields);
+
+ const get_doctype_checkbox_fields = () => {
+ return dialog.fields.filter(df => df.fieldname.endsWith('_fields'))
+ .map(df => dialog.fields_dict[df.fieldname]);
+ };
+
+ const doctype_fields = get_fields(frm.doc.reference_doctype)
+ .map(df => {
+ let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0;
+ return {
+ label: df.label,
+ reqd: reqd,
+ danger: reqd,
+ value: df.fieldname,
+ checked: 1
+ };
+ });
+
+ let fields = [
+ {
+ "label": __("Select Columns"),
+ "fieldname": "select_columns",
+ "fieldtype": "Select",
+ "options": "All\nMandatory\nManually",
+ "reqd": 1,
+ "onchange": function() {
+ const fields = get_doctype_checkbox_fields();
+ fields.map(f => f.toggle(true));
+ if(this.value == 'Mandatory' || this.value == 'Manually') {
+ checkbox_toggle(true);
+ fields.map(multicheck_field => {
+ multicheck_field.options.map(option => {
+ if(!option.reqd) return;
+ $(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`)
+ .prop('checked', false)
+ .trigger('click');
+ });
+ });
+ } else if(this.value == 'All'){
+ $(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`)
+ .prop('disabled', true);
+ }
+ }
+ },
+ {
+ "label": __("File Type"),
+ "fieldname": "file_type",
+ "fieldtype": "Select",
+ "options": "Excel\nCSV",
+ "default": "Excel"
+ },
+ {
+ "label": __("Download with Data"),
+ "fieldname": "with_data",
+ "fieldtype": "Check",
+ "hidden": !frm.doc.overwrite,
+ "default": 1
+ },
+ {
+ "label": __("Select All"),
+ "fieldname": "select_all",
+ "fieldtype": "Button",
+ "depends_on": "eval:doc.select_columns=='Manually'",
+ click: function() {
+ checkbox_toggle();
+ }
+ },
+ {
+ "label": __("Unselect All"),
+ "fieldname": "unselect_all",
+ "fieldtype": "Button",
+ "depends_on": "eval:doc.select_columns=='Manually'",
+ click: function() {
+ checkbox_toggle(true);
+ }
+ },
+ {
+ "label": frm.doc.reference_doctype,
+ "fieldname": "doctype_fields",
+ "fieldtype": "MultiCheck",
+ "options": doctype_fields,
+ "columns": 2,
+ "hidden": 1
+ }
+ ];
+
+ const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype)
+ .map(df => {
+ return {
+ "label": df.options,
+ "fieldname": df.fieldname + '_fields',
+ "fieldtype": "MultiCheck",
+ "options": frappe.meta.get_docfields(df.options)
+ .filter(filter_fields)
+ .map(df => ({
+ label: df.label,
+ reqd: df.reqd ? 1 : 0,
+ value: df.fieldname,
+ checked: 1,
+ danger: df.reqd
+ })),
+ "columns": 2,
+ "hidden": 1
+ };
+ });
+
+ fields = fields.concat(child_table_fields);
+
+ dialog = new frappe.ui.Dialog({
+ title: __('Download Template'),
+ fields: fields,
+ primary_action: function(values) {
+ var data = values;
+ if (frm.doc.reference_doctype) {
+ var export_params = () => {
+ let columns = {};
+ if(values.select_columns) {
+ columns = get_doctype_checkbox_fields().reduce((columns, field) => {
+ const options = field.get_checked_options();
+ columns[field.df.label] = options;
+ return columns;
+ }, {});
+ }
+
+ return {
+ doctype: frm.doc.reference_doctype,
+ parent_doctype: frm.doc.reference_doctype,
+ select_columns: JSON.stringify(columns),
+ with_data: frm.doc.overwrite && data.with_data,
+ all_doctypes: true,
+ file_type: data.file_type,
+ template: true
+ };
+ };
+ let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data';
+ open_url_post(get_template_url, export_params());
+ } else {
+ frappe.msgprint(__("Please select the Document Type."));
+ }
+ dialog.hide();
+ },
+ primary_action_label: __('Download')
+ });
+
+ $(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]')
+ .wrapAll('');
+ const button_container = $(dialog.body).find('.inline-buttons');
+ button_container.addClass('flex');
+ $(button_container).find('.frappe-control').map((index, button) => {
+ $(button).css({"margin-right": "1em"});
+ });
+
+ function checkbox_toggle(checked=false) {
+ $(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => {
+ $(element).find(`:checkbox`).prop("checked", checked).trigger('click');
+ });
+ }
+
+ return dialog;
+};
diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy.json b/frappe/core/doctype/data_import_legacy/data_import_legacy.json
new file mode 100644
index 0000000000..852ccba156
--- /dev/null
+++ b/frappe/core/doctype/data_import_legacy/data_import_legacy.json
@@ -0,0 +1,218 @@
+{
+ "actions": [],
+ "allow_copy": 1,
+ "creation": "2020-06-11 16:13:23.813709",
+ "doctype": "DocType",
+ "document_type": "Document",
+ "editable_grid": 1,
+ "engine": "InnoDB",
+ "field_order": [
+ "reference_doctype",
+ "action",
+ "insert_new",
+ "overwrite",
+ "only_update",
+ "section_break_4",
+ "import_file",
+ "column_break_4",
+ "error_file",
+ "section_break_6",
+ "skip_errors",
+ "submit_after_import",
+ "ignore_encoding_errors",
+ "no_email",
+ "import_detail",
+ "import_status",
+ "show_only_errors",
+ "import_log",
+ "log_details",
+ "amended_from",
+ "total_rows",
+ "amended_from"
+ ],
+ "fields": [
+ {
+ "fieldname": "reference_doctype",
+ "fieldtype": "Link",
+ "ignore_user_permissions": 1,
+ "in_list_view": 1,
+ "label": "Document Type",
+ "options": "DocType",
+ "reqd": 1
+ },
+ {
+ "fieldname": "action",
+ "fieldtype": "Select",
+ "label": "Action",
+ "options": "Insert new records\nUpdate records",
+ "reqd": 1
+ },
+ {
+ "default": "0",
+ "depends_on": "eval:!doc.overwrite",
+ "description": "New data will be inserted.",
+ "fieldname": "insert_new",
+ "fieldtype": "Check",
+ "hidden": 1,
+ "label": "Insert new records",
+ "set_only_once": 1
+ },
+ {
+ "default": "0",
+ "depends_on": "eval:!doc.insert_new",
+ "description": "If you are updating/overwriting already created records.",
+ "fieldname": "overwrite",
+ "fieldtype": "Check",
+ "hidden": 1,
+ "label": "Update records",
+ "set_only_once": 1
+ },
+ {
+ "default": "0",
+ "depends_on": "overwrite",
+ "description": "If you don't want to create any new records while updating the older records.",
+ "fieldname": "only_update",
+ "fieldtype": "Check",
+ "label": "Don't create new records"
+ },
+ {
+ "depends_on": "eval:(!doc.__islocal)",
+ "fieldname": "section_break_4",
+ "fieldtype": "Section Break"
+ },
+ {
+ "fieldname": "import_file",
+ "fieldtype": "Attach",
+ "label": "Attach file for Import"
+ },
+ {
+ "fieldname": "column_break_4",
+ "fieldtype": "Column Break"
+ },
+ {
+ "depends_on": "eval: doc.import_status == \"Partially Successful\"",
+ "description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.",
+ "fieldname": "error_file",
+ "fieldtype": "Attach",
+ "label": "Generated File"
+ },
+ {
+ "depends_on": "eval:(!doc.__islocal)",
+ "fieldname": "section_break_6",
+ "fieldtype": "Section Break"
+ },
+ {
+ "default": "0",
+ "description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.",
+ "fieldname": "skip_errors",
+ "fieldtype": "Check",
+ "label": "Skip rows with errors"
+ },
+ {
+ "default": "0",
+ "fieldname": "submit_after_import",
+ "fieldtype": "Check",
+ "label": "Submit after importing"
+ },
+ {
+ "default": "0",
+ "fieldname": "ignore_encoding_errors",
+ "fieldtype": "Check",
+ "label": "Ignore encoding errors"
+ },
+ {
+ "default": "1",
+ "fieldname": "no_email",
+ "fieldtype": "Check",
+ "label": "Do not send Emails"
+ },
+ {
+ "collapsible": 1,
+ "collapsible_depends_on": "eval: doc.import_status == \"Failed\"",
+ "depends_on": "import_status",
+ "fieldname": "import_detail",
+ "fieldtype": "Section Break",
+ "label": "Import Log"
+ },
+ {
+ "fieldname": "import_status",
+ "fieldtype": "Select",
+ "label": "Import Status",
+ "options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful",
+ "read_only": 1
+ },
+ {
+ "allow_on_submit": 1,
+ "default": "1",
+ "fieldname": "show_only_errors",
+ "fieldtype": "Check",
+ "label": "Show only errors",
+ "no_copy": 1,
+ "print_hide": 1
+ },
+ {
+ "allow_on_submit": 1,
+ "depends_on": "import_status",
+ "fieldname": "import_log",
+ "fieldtype": "HTML",
+ "label": "Import Log"
+ },
+ {
+ "allow_on_submit": 1,
+ "fieldname": "log_details",
+ "fieldtype": "Code",
+ "hidden": 1,
+ "label": "Log Details",
+ "read_only": 1
+ },
+ {
+ "fieldname": "amended_from",
+ "fieldtype": "Link",
+ "label": "Amended From",
+ "no_copy": 1,
+ "options": "Data Import",
+ "print_hide": 1,
+ "read_only": 1
+ },
+ {
+ "fieldname": "total_rows",
+ "fieldtype": "Int",
+ "hidden": 1,
+ "label": "Total Rows",
+ "read_only": 1
+ },
+ {
+ "fieldname": "amended_from",
+ "fieldtype": "Link",
+ "label": "Amended From",
+ "no_copy": 1,
+ "options": "Data Import Legacy",
+ "print_hide": 1,
+ "read_only": 1
+ }
+ ],
+ "is_submittable": 1,
+ "links": [],
+ "max_attachments": 1,
+ "modified": "2020-06-11 16:13:23.813709",
+ "modified_by": "Administrator",
+ "module": "Core",
+ "name": "Data Import Legacy",
+ "owner": "Administrator",
+ "permissions": [
+ {
+ "create": 1,
+ "delete": 1,
+ "email": 1,
+ "read": 1,
+ "role": "System Manager",
+ "share": 1,
+ "submit": 1,
+ "write": 1
+ }
+ ],
+ "sort_field": "modified",
+ "sort_order": "DESC",
+ "track_changes": 1,
+ "track_seen": 1
+}
\ No newline at end of file
diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy.py b/frappe/core/doctype/data_import_legacy/data_import_legacy.py
new file mode 100644
index 0000000000..df3a3edd3a
--- /dev/null
+++ b/frappe/core/doctype/data_import_legacy/data_import_legacy.py
@@ -0,0 +1,123 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017, Frappe Technologies and contributors
+# For license information, please see license.txt
+
+from __future__ import unicode_literals
+import frappe, os
+from frappe import _
+import frappe.modules.import_file
+from frappe.model.document import Document
+from frappe.utils.data import format_datetime
+from frappe.core.doctype.data_import_legacy.importer import upload
+from frappe.utils.background_jobs import enqueue
+
+
+class DataImportLegacy(Document):
+ def autoname(self):
+ if not self.name:
+ self.name = "Import on " +format_datetime(self.creation)
+
+ def validate(self):
+ if not self.import_file:
+ self.db_set("total_rows", 0)
+ if self.import_status == "In Progress":
+ frappe.throw(_("Can't save the form as data import is in progress."))
+
+ # validate the template just after the upload
+ # if there is total_rows in the doc, it means that the template is already validated and error free
+ if self.import_file and not self.total_rows:
+ upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
+
+
+@frappe.whitelist()
+def get_importable_doctypes():
+ return frappe.cache().hget("can_import", frappe.session.user)
+
+@frappe.whitelist()
+def import_data(data_import):
+ frappe.db.set_value("Data Import Legacy", data_import, "import_status", "In Progress", update_modified=False)
+ frappe.publish_realtime("data_import_progress", {"progress": "0",
+ "data_import": data_import, "reload": True}, user=frappe.session.user)
+
+ from frappe.core.page.background_jobs.background_jobs import get_info
+ enqueued_jobs = [d.get("job_name") for d in get_info()]
+
+ if data_import not in enqueued_jobs:
+ enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
+ data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
+
+
+def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
+ insert=False, submit=False, pre_process=None):
+ if os.path.isdir(path):
+ files = [os.path.join(path, f) for f in os.listdir(path)]
+ else:
+ files = [path]
+
+ for f in files:
+ if f.endswith(".json"):
+ frappe.flags.mute_emails = True
+ frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
+ frappe.flags.mute_emails = False
+ frappe.db.commit()
+ elif f.endswith(".csv"):
+ import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
+ frappe.db.commit()
+
+
+def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
+ from frappe.utils.csvutils import read_csv_content
+ print("Importing " + path)
+ with open(path, "r") as infile:
+ upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
+ submit_after_import=submit, pre_process=pre_process)
+
+
+def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
+ def post_process(out):
+ del_keys = ('modified_by', 'creation', 'owner', 'idx')
+ for doc in out:
+ for key in del_keys:
+ if key in doc:
+ del doc[key]
+ for k, v in doc.items():
+ if isinstance(v, list):
+ for child in v:
+ for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
+ if key in child:
+ del child[key]
+
+ out = []
+ if name:
+ out.append(frappe.get_doc(doctype, name).as_dict())
+ elif frappe.db.get_value("DocType", doctype, "issingle"):
+ out.append(frappe.get_doc(doctype).as_dict())
+ else:
+ for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
+ out.append(frappe.get_doc(doctype, doc.name).as_dict())
+ post_process(out)
+
+ dirname = os.path.dirname(path)
+ if not os.path.exists(dirname):
+ path = os.path.join('..', path)
+
+ with open(path, "w") as outfile:
+ outfile.write(frappe.as_json(out))
+
+
+def export_csv(doctype, path):
+ from frappe.core.doctype.data_export.exporter import export_data
+ with open(path, "wb") as csvfile:
+ export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
+ csvfile.write(frappe.response.result.encode("utf-8"))
+
+
+@frappe.whitelist()
+def export_fixture(doctype, app):
+ if frappe.session.user != "Administrator":
+ raise frappe.PermissionError
+
+ if not os.path.exists(frappe.get_app_path(app, "fixtures")):
+ os.mkdir(frappe.get_app_path(app, "fixtures"))
+
+ export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")
diff --git a/frappe/core/doctype/data_import_legacy/data_import_legacy_list.js b/frappe/core/doctype/data_import_legacy/data_import_legacy_list.js
new file mode 100644
index 0000000000..fcf2391313
--- /dev/null
+++ b/frappe/core/doctype/data_import_legacy/data_import_legacy_list.js
@@ -0,0 +1,24 @@
+frappe.listview_settings['Data Import Legacy'] = {
+ add_fields: ["import_status"],
+ has_indicator_for_draft: 1,
+ get_indicator: function(doc) {
+
+ let status = {
+ 'Successful': [__("Success"), "green", "import_status,=,Successful"],
+ 'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"],
+ 'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"],
+ 'Failed': [__("Failed"), "red", "import_status,=,Failed"],
+ 'Pending': [__("Pending"), "orange", "import_status,=,"]
+ }
+
+ if (doc.import_status) {
+ return status[doc.import_status];
+ }
+
+ if (doc.docstatus == 0) {
+ return status['Pending'];
+ }
+
+ return status['Pending'];
+ }
+};
diff --git a/frappe/core/doctype/data_import_legacy/importer.py b/frappe/core/doctype/data_import_legacy/importer.py
new file mode 100644
index 0000000000..5bd0daf32b
--- /dev/null
+++ b/frappe/core/doctype/data_import_legacy/importer.py
@@ -0,0 +1,541 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
+# MIT License. See license.txt
+
+from __future__ import unicode_literals, print_function
+
+from six.moves import range
+import requests
+import frappe, json
+import frappe.permissions
+
+from frappe import _
+
+from frappe.utils.csvutils import getlink
+from frappe.utils.dateutils import parse_date
+
+from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url
+from six import string_types
+
+
+@frappe.whitelist()
+def get_data_keys():
+ return frappe._dict({
+ "data_separator": _('Start entering data below this line'),
+ "main_table": _("Table") + ":",
+ "parent_table": _("Parent Table") + ":",
+ "columns": _("Column Name") + ":",
+ "doctype": _("DocType") + ":"
+ })
+
+
+
+@frappe.whitelist()
+def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None,
+ update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No",
+ skip_errors = True, data_import_doc=None, validate_template=False, user=None):
+ """upload data"""
+
+ # for translations
+ if user:
+ frappe.cache().hdel("lang", user)
+ frappe.set_user_lang(user)
+
+ if data_import_doc and isinstance(data_import_doc, string_types):
+ data_import_doc = frappe.get_doc("Data Import Legacy", data_import_doc)
+ if data_import_doc and from_data_import == "Yes":
+ no_email = data_import_doc.no_email
+ ignore_encoding_errors = data_import_doc.ignore_encoding_errors
+ update_only = data_import_doc.only_update
+ submit_after_import = data_import_doc.submit_after_import
+ overwrite = data_import_doc.overwrite
+ skip_errors = data_import_doc.skip_errors
+ else:
+ # extra input params
+ params = json.loads(frappe.form_dict.get("params") or '{}')
+ if params.get("submit_after_import"):
+ submit_after_import = True
+ if params.get("ignore_encoding_errors"):
+ ignore_encoding_errors = True
+ if not params.get("no_email"):
+ no_email = False
+ if params.get('update_only'):
+ update_only = True
+ if params.get('from_data_import'):
+ from_data_import = params.get('from_data_import')
+ if not params.get('skip_errors'):
+ skip_errors = params.get('skip_errors')
+
+ frappe.flags.in_import = True
+ frappe.flags.mute_emails = no_email
+
+ def get_data_keys_definition():
+ return get_data_keys()
+
+ def bad_template():
+ frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator))
+
+ def check_data_length():
+ if not data:
+ frappe.throw(_("No data found in the file. Please reattach the new file with data."))
+
+ def get_start_row():
+ for i, row in enumerate(rows):
+ if row and row[0]==get_data_keys_definition().data_separator:
+ return i+1
+ bad_template()
+
+ def get_header_row(key):
+ return get_header_row_and_idx(key)[0]
+
+ def get_header_row_and_idx(key):
+ for i, row in enumerate(header):
+ if row and row[0]==key:
+ return row, i
+ return [], -1
+
+ def filter_empty_columns(columns):
+ empty_cols = list(filter(lambda x: x in ("", None), columns))
+
+ if empty_cols:
+ if columns[-1*len(empty_cols):] == empty_cols:
+ # filter empty columns if they exist at the end
+ columns = columns[:-1*len(empty_cols)]
+ else:
+ frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
+ raise_exception=1)
+
+ return columns
+
+ def make_column_map():
+ doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype)
+ if row_idx == -1: # old style
+ return
+
+ dt = None
+ for i, d in enumerate(doctype_row[1:]):
+ if d not in ("~", "-"):
+ if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"):
+ dt, parentfield = d, None
+ # xls format truncates the row, so it may not have more columns
+ if len(doctype_row) > i+2:
+ parentfield = doctype_row[i+2]
+ doctypes.append((dt, parentfield))
+ column_idx_to_fieldname[(dt, parentfield)] = {}
+ column_idx_to_fieldtype[(dt, parentfield)] = {}
+ if dt:
+ column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1]
+ column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1]
+
+ def get_doc(start_idx):
+ if doctypes:
+ doc = {}
+ attachments = []
+ last_error_row_idx = None
+ for idx in range(start_idx, len(rows)):
+ last_error_row_idx = idx # pylint: disable=W0612
+ if (not doc) or main_doc_empty(rows[idx]):
+ for dt, parentfield in doctypes:
+ d = {}
+ for column_idx in column_idx_to_fieldname[(dt, parentfield)]:
+ try:
+ fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx]
+ fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx]
+
+ if not fieldname or not rows[idx][column_idx]:
+ continue
+
+ d[fieldname] = rows[idx][column_idx]
+ if fieldtype in ("Int", "Check"):
+ d[fieldname] = cint(d[fieldname])
+ elif fieldtype in ("Float", "Currency", "Percent"):
+ d[fieldname] = flt(d[fieldname])
+ elif fieldtype == "Date":
+ if d[fieldname] and isinstance(d[fieldname], string_types):
+ d[fieldname] = getdate(parse_date(d[fieldname]))
+ elif fieldtype == "Datetime":
+ if d[fieldname]:
+ if " " in d[fieldname]:
+ _date, _time = d[fieldname].split()
+ else:
+ _date, _time = d[fieldname], '00:00:00'
+ _date = parse_date(d[fieldname])
+ d[fieldname] = get_datetime(_date + " " + _time)
+ else:
+ d[fieldname] = None
+
+ elif fieldtype in ("Image", "Attach Image", "Attach"):
+ # added file to attachments list
+ attachments.append(d[fieldname])
+
+ elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]:
+ # as fields can be saved in the number format(long type) in data import template
+ d[fieldname] = cstr(d[fieldname])
+
+ except IndexError:
+ pass
+
+ # scrub quotes from name and modified
+ if d.get("name") and d["name"].startswith('"'):
+ d["name"] = d["name"][1:-1]
+
+ if sum([0 if not val else 1 for val in d.values()]):
+ d['doctype'] = dt
+ if dt == doctype:
+ doc.update(d)
+ else:
+ if not overwrite and doc.get("name"):
+ d['parent'] = doc["name"]
+ d['parenttype'] = doctype
+ d['parentfield'] = parentfield
+ doc.setdefault(d['parentfield'], []).append(d)
+ else:
+ break
+
+ return doc, attachments, last_error_row_idx
+ else:
+ doc = frappe._dict(zip(columns, rows[start_idx][1:]))
+ doc['doctype'] = doctype
+ return doc, [], None
+
+ # used in testing whether a row is empty or parent row or child row
+ # checked only 3 first columns since first two columns can be blank for example the case of
+ # importing the item variant where item code and item name will be blank.
+ def main_doc_empty(row):
+ if row:
+ for i in range(3,0,-1):
+ if len(row) > i and row[i]:
+ return False
+ return True
+
+ def validate_naming(doc):
+ autoname = frappe.get_meta(doctype).autoname
+ if autoname:
+ if autoname[0:5] == 'field':
+ autoname = autoname[6:]
+ elif autoname == 'naming_series:':
+ autoname = 'naming_series'
+ else:
+ return True
+
+ if (autoname not in doc) or (not doc[autoname]):
+ from frappe.model.base_document import get_controller
+ if not hasattr(get_controller(doctype), "autoname"):
+ frappe.throw(_("{0} is a mandatory field").format(autoname))
+ return True
+
+ users = frappe.db.sql_list("select name from tabUser")
+ def prepare_for_insert(doc):
+ # don't block data import if user is not set
+ # migrating from another system
+ if not doc.owner in users:
+ doc.owner = frappe.session.user
+ if not doc.modified_by in users:
+ doc.modified_by = frappe.session.user
+
+ def is_valid_url(url):
+ is_valid = False
+ if url.startswith("/files") or url.startswith("/private/files"):
+ url = get_url(url)
+
+ try:
+ r = requests.get(url)
+ is_valid = True if r.status_code == 200 else False
+ except Exception:
+ pass
+
+ return is_valid
+
+ def attach_file_to_doc(doctype, docname, file_url):
+ # check if attachment is already available
+ # check if the attachement link is relative or not
+ if not file_url:
+ return
+ if not is_valid_url(file_url):
+ return
+
+ files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and
+ attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format(
+ doctype=doctype,
+ docname=docname,
+ file_url=file_url
+ ))
+
+ if files:
+ # file is already attached
+ return
+
+ _file = frappe.get_doc({
+ "doctype": "File",
+ "file_url": file_url,
+ "attached_to_name": docname,
+ "attached_to_doctype": doctype,
+ "attached_to_field": 0,
+ "folder": "Home/Attachments"})
+ _file.save()
+
+
+ # header
+ filename, file_extension = ['','']
+ if not rows:
+ _file = frappe.get_doc("File", {"file_url": data_import_doc.import_file})
+ fcontent = _file.get_content()
+ filename, file_extension = _file.get_extension()
+
+ if file_extension == '.xlsx' and from_data_import == 'Yes':
+ from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
+ rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file)
+
+ elif file_extension == '.csv':
+ from frappe.utils.csvutils import read_csv_content
+ rows = read_csv_content(fcontent, ignore_encoding_errors)
+
+ else:
+ frappe.throw(_("Unsupported File Format"))
+
+ start_row = get_start_row()
+ header = rows[:start_row]
+ data = rows[start_row:]
+ try:
+ doctype = get_header_row(get_data_keys_definition().main_table)[1]
+ columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:])
+ except:
+ frappe.throw(_("Cannot change header content"))
+ doctypes = []
+ column_idx_to_fieldname = {}
+ column_idx_to_fieldtype = {}
+
+ if skip_errors:
+ data_rows_with_error = header
+
+ if submit_after_import and not cint(frappe.db.get_value("DocType",
+ doctype, "is_submittable")):
+ submit_after_import = False
+
+ parenttype = get_header_row(get_data_keys_definition().parent_table)
+
+ if len(parenttype) > 1:
+ parenttype = parenttype[1]
+
+ # check permissions
+ if not frappe.permissions.can_import(parenttype or doctype):
+ frappe.flags.mute_emails = False
+ return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}
+
+ # Throw expception in case of the empty data file
+ check_data_length()
+ make_column_map()
+ total = len(data)
+
+ if validate_template:
+ if total:
+ data_import_doc.total_rows = total
+ return True
+
+ if overwrite==None:
+ overwrite = params.get('overwrite')
+
+ # delete child rows (if parenttype)
+ parentfield = None
+ if parenttype:
+ parentfield = get_parent_field(doctype, parenttype)
+
+ if overwrite:
+ delete_child_rows(data, doctype)
+
+ import_log = []
+ def log(**kwargs):
+ if via_console:
+ print((kwargs.get("title") + kwargs.get("message")).encode('utf-8'))
+ else:
+ import_log.append(kwargs)
+
+ def as_link(doctype, name):
+ if via_console:
+ return "{0}: {1}".format(doctype, name)
+ else:
+ return getlink(doctype, name)
+
+ # publish realtime task update
+ def publish_progress(achieved, reload=False):
+ if data_import_doc:
+ frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)),
+ "data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user)
+
+
+ error_flag = rollback_flag = False
+
+ batch_size = frappe.conf.data_import_batch_size or 1000
+
+ for batch_start in range(0, total, batch_size):
+ batch = data[batch_start:batch_start + batch_size]
+
+ for i, row in enumerate(batch):
+ # bypass empty rows
+ if main_doc_empty(row):
+ continue
+
+ row_idx = i + start_row
+ doc = None
+
+ publish_progress(i)
+
+ try:
+ doc, attachments, last_error_row_idx = get_doc(row_idx)
+ validate_naming(doc)
+ if pre_process:
+ pre_process(doc)
+
+ original = None
+ if parentfield:
+ parent = frappe.get_doc(parenttype, doc["parent"])
+ doc = parent.append(parentfield, doc)
+ parent.save()
+ else:
+ if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]):
+ original = frappe.get_doc(doctype, doc["name"])
+ original_name = original.name
+ original.update(doc)
+ # preserve original name for case sensitivity
+ original.name = original_name
+ original.flags.ignore_links = ignore_links
+ original.save()
+ doc = original
+ else:
+ if not update_only:
+ doc = frappe.get_doc(doc)
+ prepare_for_insert(doc)
+ doc.flags.ignore_links = ignore_links
+ doc.insert()
+ if attachments:
+ # check file url and create a File document
+ for file_url in attachments:
+ attach_file_to_doc(doc.doctype, doc.name, file_url)
+ if submit_after_import:
+ doc.submit()
+
+ # log errors
+ if parentfield:
+ log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)),
+ "link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"})
+ elif submit_after_import:
+ log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)),
+ "message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"})
+ elif original:
+ log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)),
+ "message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
+ elif not update_only:
+ log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)),
+ "message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
+ else:
+ log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None,
+ "message": "Document updation ignored", "indicator": "orange"})
+
+ except Exception as e:
+ error_flag = True
+
+ # build error message
+ if frappe.local.message_log:
+ err_msg = "\n".join(['{}
'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log])
+ else:
+ err_msg = '{}
'.format(cstr(e))
+
+ error_trace = frappe.get_traceback()
+ if error_trace:
+ error_log_doc = frappe.log_error(error_trace)
+ error_link = get_absolute_url("Error Log", error_log_doc.name)
+ else:
+ error_link = None
+
+ log(**{
+ "row": row_idx + 1,
+ "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""),
+ "message": err_msg,
+ "indicator": "red",
+ "link":error_link
+ })
+
+ # data with error to create a new file
+ # include the errored data in the last row as last_error_row_idx will not be updated for the last row
+ if skip_errors:
+ if last_error_row_idx == len(rows)-1:
+ last_error_row_idx = len(rows)
+ data_rows_with_error += rows[row_idx:last_error_row_idx]
+ else:
+ rollback_flag = True
+ finally:
+ frappe.local.message_log = []
+
+ start_row += batch_size
+ if rollback_flag:
+ frappe.db.rollback()
+ else:
+ frappe.db.commit()
+
+ frappe.flags.mute_emails = False
+ frappe.flags.in_import = False
+
+ log_message = {"messages": import_log, "error": error_flag}
+ if data_import_doc:
+ data_import_doc.log_details = json.dumps(log_message)
+
+ import_status = None
+ if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error):
+ import_status = "Partially Successful"
+ # write the file with the faulty row
+ file_name = 'error_' + filename + file_extension
+ if file_extension == '.xlsx':
+ from frappe.utils.xlsxutils import make_xlsx
+ xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template")
+ file_data = xlsx_file.getvalue()
+ else:
+ from frappe.utils.csvutils import to_csv
+ file_data = to_csv(data_rows_with_error)
+ _file = frappe.get_doc({
+ "doctype": "File",
+ "file_name": file_name,
+ "attached_to_doctype": "Data Import Legacy",
+ "attached_to_name": data_import_doc.name,
+ "folder": "Home/Attachments",
+ "content": file_data})
+ _file.save()
+ data_import_doc.error_file = _file.file_url
+
+ elif error_flag:
+ import_status = "Failed"
+ else:
+ import_status = "Successful"
+
+ data_import_doc.import_status = import_status
+ data_import_doc.save()
+ if data_import_doc.import_status in ["Successful", "Partially Successful"]:
+ data_import_doc.submit()
+ publish_progress(100, True)
+ else:
+ publish_progress(0, True)
+ frappe.db.commit()
+ else:
+ return log_message
+
+def get_parent_field(doctype, parenttype):
+ parentfield = None
+
+ # get parentfield
+ if parenttype:
+ for d in frappe.get_meta(parenttype).get_table_fields():
+ if d.options==doctype:
+ parentfield = d.fieldname
+ break
+
+ if not parentfield:
+ frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype))
+ raise Exception
+
+ return parentfield
+
+def delete_child_rows(rows, doctype):
+ """delete child rows for all parents"""
+ for p in list(set([r[1] for r in rows])):
+ if p:
+ frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p)
diff --git a/frappe/core/doctype/data_import/log_details.html b/frappe/core/doctype/data_import_legacy/log_details.html
similarity index 100%
rename from frappe/core/doctype/data_import/log_details.html
rename to frappe/core/doctype/data_import_legacy/log_details.html
diff --git a/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py b/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py
new file mode 100644
index 0000000000..e5b244e6a0
--- /dev/null
+++ b/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py
@@ -0,0 +1,10 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2020, Frappe Technologies and Contributors
+# See license.txt
+from __future__ import unicode_literals
+
+# import frappe
+import unittest
+
+class TestDataImportLegacy(unittest.TestCase):
+ pass
diff --git a/frappe/core/doctype/docfield/docfield.json b/frappe/core/doctype/docfield/docfield.json
index 8e7516cd0a..aab59a5a0a 100644
--- a/frappe/core/doctype/docfield/docfield.json
+++ b/frappe/core/doctype/docfield/docfield.json
@@ -13,6 +13,8 @@
"fieldname",
"precision",
"length",
+ "hide_days",
+ "hide_seconds",
"reqd",
"search_index",
"in_list_view",
@@ -87,7 +89,7 @@
"label": "Type",
"oldfieldname": "fieldtype",
"oldfieldtype": "Select",
- "options": "Attach\nAttach Image\nBarcode\nButton\nCheck\nCode\nColor\nColumn Break\nCurrency\nData\nDate\nDatetime\nDynamic Link\nFloat\nFold\nGeolocation\nHeading\nHTML\nHTML Editor\nImage\nInt\nLink\nLong Text\nMarkdown Editor\nPassword\nPercent\nRead Only\nRating\nSection Break\nSelect\nSmall Text\nTable\nTable MultiSelect\nText\nText Editor\nTime\nSignature",
+ "options": "Attach\nAttach Image\nBarcode\nButton\nCheck\nCode\nColor\nColumn Break\nCurrency\nData\nDate\nDatetime\nDuration\nDynamic Link\nFloat\nFold\nGeolocation\nHeading\nHTML\nHTML Editor\nImage\nInt\nLink\nLong Text\nMarkdown Editor\nPassword\nPercent\nRead Only\nRating\nSection Break\nSelect\nSmall Text\nTable\nTable MultiSelect\nText\nText Editor\nTime\nSignature",
"reqd": 1,
"search_index": 1
},
@@ -450,6 +452,20 @@
"fieldname": "column_break_38",
"fieldtype": "Column Break"
},
+ {
+ "default": "0",
+ "depends_on": "eval:doc.fieldtype=='Duration'",
+ "fieldname": "hide_days",
+ "fieldtype": "Check",
+ "label": "Hide Days"
+ },
+ {
+ "default": "0",
+ "depends_on": "eval:doc.fieldtype=='Duration'",
+ "fieldname": "hide_seconds",
+ "fieldtype": "Check",
+ "label": "Hide Seconds"
+ },
{
"default": "0",
"depends_on": "eval:doc.fieldtype=='Section Break'",
@@ -461,7 +477,7 @@
"idx": 1,
"istable": 1,
"links": [],
- "modified": "2020-04-27 11:38:21.223185",
+ "modified": "2020-02-06 09:06:25.224413",
"modified_by": "Administrator",
"module": "Core",
"name": "DocField",
diff --git a/frappe/core/doctype/doctype/doctype.py b/frappe/core/doctype/doctype/doctype.py
index 904deb9990..7f84555b79 100644
--- a/frappe/core/doctype/doctype/doctype.py
+++ b/frappe/core/doctype/doctype/doctype.py
@@ -406,9 +406,13 @@ class DocType(Document):
with open(fname, 'r') as f:
code = f.read()
with open(fname, 'w') as f:
- file_content = code.replace(old, new) # replace str with full str (js controllers)
- file_content = file_content.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports)
- file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers)
+ if fname.endswith('.js'):
+ file_content = code.replace(old, new) # replace str with full str (js controllers)
+
+ elif fname.endswith('.py'):
+ file_content = code.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports)
+ file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers)
+
f.write(file_content)
# updating json file with new name
@@ -688,6 +692,9 @@ def validate_fields(meta):
def check_link_table_options(docname, d):
if frappe.flags.in_patch: return
+
+ if frappe.flags.in_fixtures: return
+
if d.fieldtype in ("Link",) + table_fields:
if not d.options:
frappe.throw(_("{0}: Options required for Link or Table type field {1} in row {2}").format(docname, d.label, d.idx), DoctypeLinkError)
@@ -908,6 +915,8 @@ def validate_fields(meta):
frappe.msgprint(text_str + df_options_str, title="Invalid Data Field", raise_exception=True)
def check_child_table_option(docfield):
+
+ if frappe.flags.in_fixtures: return
if docfield.fieldtype not in ['Table MultiSelect', 'Table']: return
doctype = docfield.options
diff --git a/frappe/core/doctype/file/file.py b/frappe/core/doctype/file/file.py
index b35abfa861..831d2ab22d 100755
--- a/frappe/core/doctype/file/file.py
+++ b/frappe/core/doctype/file/file.py
@@ -48,6 +48,8 @@ class File(Document):
def before_insert(self):
frappe.local.rollback_observers.append(self)
self.set_folder_name()
+ if self.file_name:
+ self.file_name = re.sub(r'/', '', self.file_name)
self.content = self.get("content", None)
self.decode = self.get("decode", False)
if self.content:
@@ -180,11 +182,11 @@ class File(Document):
if duplicate_file:
duplicate_file_doc = frappe.get_cached_doc('File', duplicate_file.name)
if duplicate_file_doc.exists_on_disk():
- # if it is attached to a document then throw DuplicateEntryError
+ # if it is attached to a document then throw FileAlreadyAttachedException
if self.attached_to_doctype and self.attached_to_name:
self.duplicate_entry = duplicate_file.name
frappe.throw(_("Same file has already been attached to the record"),
- frappe.DuplicateEntryError)
+ frappe.FileAlreadyAttachedException)
# else just use the url, to avoid uploading a duplicate
else:
self.file_url = duplicate_file.file_url
@@ -192,6 +194,8 @@ class File(Document):
def set_file_name(self):
if not self.file_name and self.file_url:
self.file_name = self.file_url.split('/')[-1]
+ else:
+ self.file_name = re.sub(r'/', '', self.file_name)
def generate_content_hash(self):
if self.content_hash or not self.file_url or self.file_url.startswith('http'):
@@ -405,6 +409,12 @@ class File(Document):
frappe.throw(_("URL must start with 'http://' or 'https://'"))
return
+ if not self.file_url.startswith(("http://", "https://")):
+ # local file
+ root_files_path = get_files_path(is_private=self.is_private)
+ if not os.path.commonpath([root_files_path]) == os.path.commonpath([root_files_path, self.get_full_path()]):
+ # basically the file url is skewed to not point to /files/ or /private/files
+ frappe.throw(_("{0} is not a valid file url").format(self.file_url))
self.file_url = unquote(self.file_url)
self.file_size = frappe.form_dict.file_size or self.file_size
@@ -704,7 +714,12 @@ def remove_all(dt, dn, from_delete=False):
try:
for fid in frappe.db.sql_list("""select name from `tabFile` where
attached_to_doctype=%s and attached_to_name=%s""", (dt, dn)):
- remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete)
+ if from_delete:
+ # If deleting a doc, directly delete files
+ frappe.delete_doc("File", fid, ignore_permissions=True)
+ else:
+ # Removes file and adds a comment in the document it is attached to
+ remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete)
except Exception as e:
if e.args[0]!=1054: raise # (temp till for patched)
diff --git a/frappe/core/doctype/installed_applications/installed_applications.py b/frappe/core/doctype/installed_applications/installed_applications.py
index aa0401f368..4e6eadf07e 100644
--- a/frappe/core/doctype/installed_applications/installed_applications.py
+++ b/frappe/core/doctype/installed_applications/installed_applications.py
@@ -12,7 +12,7 @@ class InstalledApplications(Document):
for app in frappe.utils.get_installed_apps_info():
self.append("installed_applications", {
"app_name": app.get("app_name"),
- "app_version": app.get("version"),
- "git_branch": app.get("branch")
+ "app_version": app.get("version") or "UNVERSIONED",
+ "git_branch": app.get("branch") or "UNVERSIONED"
})
self.save()
\ No newline at end of file
diff --git a/frappe/core/doctype/scheduled_job_type/scheduled_job_type.py b/frappe/core/doctype/scheduled_job_type/scheduled_job_type.py
index c179054550..765ae5fe93 100644
--- a/frappe/core/doctype/scheduled_job_type/scheduled_job_type.py
+++ b/frappe/core/doctype/scheduled_job_type/scheduled_job_type.py
@@ -84,7 +84,7 @@ class ScheduledJobType(Document):
def log_status(self, status):
# log file
- frappe.logger(__name__).info('Scheduled Job {0}: {1} for {2}'.format(status, self.method, frappe.local.site))
+ frappe.logger("scheduler").info('Scheduled Job {0}: {1} for {2}'.format(status, self.method, frappe.local.site))
self.update_scheduler_log(status)
def update_scheduler_log(self, status):
diff --git a/frappe/core/doctype/session_default_settings/session_default_settings.py b/frappe/core/doctype/session_default_settings/session_default_settings.py
index 453ece2890..7b4bd19e9a 100644
--- a/frappe/core/doctype/session_default_settings/session_default_settings.py
+++ b/frappe/core/doctype/session_default_settings/session_default_settings.py
@@ -28,8 +28,7 @@ def get_session_default_values():
@frappe.whitelist()
def set_session_default_values(default_values):
- if not frappe.flags.in_test:
- default_values = json.loads(default_values)
+ default_values = frappe.parse_json(default_values)
for entry in default_values:
try:
frappe.defaults.set_user_default(entry, default_values.get(entry))
diff --git a/frappe/core/doctype/user/user.py b/frappe/core/doctype/user/user.py
index 0c5ebc3ede..7b9266ff64 100644
--- a/frappe/core/doctype/user/user.py
+++ b/frappe/core/doctype/user/user.py
@@ -4,7 +4,7 @@
from __future__ import unicode_literals, print_function
import frappe
from frappe.model.document import Document
-from frappe.utils import cint, has_gravatar, format_datetime, now_datetime, get_formatted_email, today
+from frappe.utils import cint, flt, has_gravatar, format_datetime, now_datetime, get_formatted_email, today
from frappe import throw, msgprint, _
from frappe.utils.password import update_password as _update_password
from frappe.desk.notifications import clear_notifications
@@ -841,11 +841,11 @@ def user_query(doctype, txt, searchfield, start, page_len, filters):
def get_total_users():
"""Returns total no. of system users"""
- return frappe.db.sql('''SELECT SUM(`simultaneous_sessions`)
+ return flt(frappe.db.sql('''SELECT SUM(`simultaneous_sessions`)
FROM `tabUser`
WHERE `enabled` = 1
AND `user_type` = 'System User'
- AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0]
+ AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0])
def get_system_users(exclude_users=None, limit=None):
if not exclude_users:
diff --git a/frappe/core/doctype/version/version.py b/frappe/core/doctype/version/version.py
index 216cdb1716..7654db4ae5 100644
--- a/frappe/core/doctype/version/version.py
+++ b/frappe/core/doctype/version/version.py
@@ -21,6 +21,17 @@ class Version(Document):
else:
return False
+ def for_insert(self, doc):
+ updater_reference = doc.flags.updater_reference
+ data = {
+ 'creation': doc.creation,
+ 'updater_reference': updater_reference,
+ 'created_by': doc.owner
+ }
+ self.ref_doctype = doc.doctype
+ self.docname = doc.name
+ self.data = frappe.as_json(data)
+
def get_data(self):
return json.loads(self.data)
diff --git a/frappe/core/page/dashboard/dashboard.css b/frappe/core/page/dashboard/dashboard.css
index e69de29bb2..b319cc1ed2 100644
--- a/frappe/core/page/dashboard/dashboard.css
+++ b/frappe/core/page/dashboard/dashboard.css
@@ -0,0 +1,5 @@
+.restricted-button {
+ cursor: default;
+ position: relative;
+ right: -5px;
+}
\ No newline at end of file
diff --git a/frappe/core/page/dashboard/dashboard.js b/frappe/core/page/dashboard/dashboard.js
index 0d1337351e..f17bc1e0b5 100644
--- a/frappe/core/page/dashboard/dashboard.js
+++ b/frappe/core/page/dashboard/dashboard.js
@@ -26,6 +26,13 @@ class Dashboard {
`).appendTo(this.wrapper.find(".page-content").empty());
this.container = this.wrapper.find(".dashboard-graph");
this.page = wrapper.page;
+
+ this.page.set_title_sub(
+ $(`