Merge branch 'develop' into currency_precison_fix

This commit is contained in:
Suraj Shetty 2020-06-24 08:56:05 +05:30 committed by GitHub
commit bc728dc857
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
334 changed files with 11698 additions and 7156 deletions

14
.github/workflows/docker-release.yml vendored Normal file
View file

@ -0,0 +1,14 @@
name: Trigger Docker build on release
on:
release:
types: [released]
jobs:
curl:
runs-on: ubuntu-latest
container:
image: alpine:latest
steps:
- name: curl
run: |
apk add curl bash
curl -s -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "Travis-API-Version: 3" -H "Authorization: token ${{ secrets.TRAVIS_CI_TOKEN }}" -d '{"request":{"branch":"master"}}' https://api.travis-ci.com/repo/frappe%2Ffrappe_docker/requests

View file

@ -25,6 +25,7 @@ cache:
# https://docs.cypress.io/guides/guides/continuous-integration.html#Caching
- ~/.cache
matrix:
include:
- name: "Python 3.7 MariaDB"
@ -46,7 +47,14 @@ matrix:
script: bench --site test_site run-ui-tests frappe --headless
before_install:
# install wkhtmltopdf
# do we really want to run travis?
- |
python ./.travis/roulette.py
if [[ $? != 2 ]];then
exit;
fi
# install wkhtmltopdf
- wget -O /tmp/wkhtmltox.tar.xz https://github.com/frappe/wkhtmltopdf/raw/master/wkhtmltox-0.12.3_linux-generic-amd64.tar.xz
- tar -xf /tmp/wkhtmltox.tar.xz -C /tmp
- sudo mv /tmp/wkhtmltox/bin/wkhtmltopdf /usr/local/bin/wkhtmltopdf

54
.travis/roulette.py Normal file
View file

@ -0,0 +1,54 @@
# if the script ends with exit code 0, then no tests are run further, else all tests are run
import os
import re
import shlex
import subprocess
import sys
def get_output(command, shell=True):
print(command)
command = shlex.split(command)
return subprocess.check_output(command, shell=shell, encoding="utf8").strip()
def is_py(file):
return file.endswith("py")
def is_js(file):
return file.endswith("js")
def is_docs(file):
regex = re.compile('\.(md|png|jpg|jpeg)$|^.github|LICENSE')
return bool(regex.search(file))
if __name__ == "__main__":
build_type = os.environ.get("TYPE")
commit_range = os.environ.get("TRAVIS_COMMIT_RANGE")
print("Build Type: {}".format(build_type))
print("Commit Range: {}".format(commit_range))
try:
files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False)
except Exception:
sys.exit(2)
if "fatal" not in files_changed:
files_list = files_changed.split()
only_docs_changed = len(list(filter(is_docs, files_list))) == len(files_list)
only_js_changed = len(list(filter(is_js, files_list))) == len(files_list)
only_py_changed = len(list(filter(is_py, files_list))) == len(files_list)
if only_docs_changed:
print("Only docs were updated, stopping build process.")
sys.exit(0)
if only_js_changed and build_type == "server":
print("Only JavaScript code was updated; Stopping Python build process.")
sys.exit(0)
if only_py_changed and build_type == "ui":
print("Only Python code was updated, stopping Cypress build process.")
sys.exit(0)
sys.exit(2)

View file

@ -1,7 +1,7 @@
<div align="center">
<img src=".github/frappe-framework-logo.png" height="150">
<h1>
<a href="https://frappe.io">
<a href="https://frappeframework.com">
frappe
</a>
</h1>
@ -33,8 +33,8 @@
Full-stack web application framework that uses Python and MariaDB on the server side and a tightly integrated client side library. Built for [ERPNext](https://erpnext.com)
### Table of Contents
* [Installation](#installation)
* [Documentation](https://frappe.io/docs)
* [Installation](https://frappeframework.com/docs/user/en/installation)
* [Documentation](https://frappeframework.com/docs)
* [License](#license)
### Installation
@ -49,7 +49,7 @@ Full-stack web application framework that uses Python and MariaDB on the server
### Website
For details and documentation, see the website
[https://frappe.io](https://frappe.io)
[https://frappeframework.com](https://frappeframework.com)
### License
This repository has been released under the [MIT License](LICENSE).

View file

@ -2,6 +2,6 @@
"baseUrl": "http://test_site_ui:8000",
"projectId": "92odwv",
"adminPassword": "admin",
"defaultCommandTimeout": 10000,
"defaultCommandTimeout": 20000,
"pageLoadTimeout": 15000
}

View file

@ -0,0 +1,45 @@
context('Control Duration', () => {
before(() => {
cy.login();
cy.visit('/desk#workspace/Website');
});
function get_dialog_with_duration(hide_days=0, hide_seconds=0) {
return cy.dialog({
title: 'Duration',
fields: [{
'fieldname': 'duration',
'fieldtype': 'Duration',
'hide_days': hide_days,
'hide_seconds': hide_seconds
}]
});
}
it('should set duration', () => {
get_dialog_with_duration().as('dialog');
cy.get('.frappe-control[data-fieldname=duration] input')
.first()
.click();
cy.get('.duration-input[data-duration=days]')
.type(45, {force: true})
.blur({force: true});
cy.get('.duration-input[data-duration=minutes]')
.type(30)
.blur({force: true});
cy.get('.frappe-control[data-fieldname=duration] input').first().should('have.value', '45d 30m');
cy.get('.frappe-control[data-fieldname=duration] input').first().blur();
cy.get('.duration-picker').should('not.be.visible');
cy.get('@dialog').then(dialog => {
let value = dialog.get_value('duration');
expect(value).to.equal(3889800);
});
});
it('should hide days or seconds according to duration options', () => {
get_dialog_with_duration(1, 1).as('dialog');
cy.get('.frappe-control[data-fieldname=duration] input').first().click();
cy.get('.duration-input[data-duration=days]').should('not.be.visible');
cy.get('.duration-input[data-duration=seconds]').should('not.be.visible');
});
});

View file

@ -1,7 +1,11 @@
context('Control Link', () => {
beforeEach(() => {
before(() => {
cy.login();
cy.visit('/desk#workspace/Website');
});
beforeEach(() => {
cy.visit('/desk#workspace/Website');
cy.create_records({
doctype: 'ToDo',
description: 'this is a test todo for link'
@ -30,7 +34,7 @@ context('Control Link', () => {
cy.get('.frappe-control[data-fieldname=link] input').focus().as('input');
cy.wait('@search_link');
cy.get('@input').type('todo for link');
cy.get('@input').type('todo for link', { delay: 200 });
cy.wait('@search_link');
cy.get('.frappe-control[data-fieldname=link] ul').should('be.visible');
cy.get('.frappe-control[data-fieldname=link] input').type('{enter}', { delay: 100 });

View file

@ -9,6 +9,7 @@ context('Form', () => {
it('create a new form', () => {
cy.visit('/desk#Form/ToDo/New ToDo 1');
cy.fill_field('description', 'this is a test todo', 'Text Editor').blur();
cy.wait(300);
cy.get('.page-title').should('contain', 'Not Saved');
cy.server();
cy.route({

View file

@ -40,12 +40,12 @@ context('Grid Pagination', () => {
cy.get('@table').find('.current-page-number').should('contain', '20');
cy.get('@table').find('.total-page-number').should('contain', '20');
});
it('deletes all rows', ()=> {
cy.visit('/desk#Form/Contact/Test Contact');
cy.get('.frappe-control[data-fieldname="phone_nos"]').as('table');
cy.get('@table').find('.grid-heading-row .grid-row-check').click({force: true});
cy.get('@table').find('button.grid-remove-all-rows').click();
cy.get('.modal-dialog .btn-primary').contains('Yes').click();
cy.get('@table').find('.grid-body .grid-row').should('have.length', 0);
});
// it('deletes all rows', ()=> {
// cy.visit('/desk#Form/Contact/Test Contact');
// cy.get('.frappe-control[data-fieldname="phone_nos"]').as('table');
// cy.get('@table').find('.grid-heading-row .grid-row-check').click({force: true});
// cy.get('@table').find('button.grid-remove-all-rows').click();
// cy.get('.modal-dialog .btn-primary').contains('Yes').click();
// cy.get('@table').find('.grid-body .grid-row').should('have.length', 0);
// });
});

View file

@ -1,7 +1,6 @@
context('Relative Timeframe', () => {
beforeEach(() => {
cy.login();
cy.visit('/desk#workspace/Website');
});
before(() => {
cy.login();
@ -10,14 +9,14 @@ context('Relative Timeframe', () => {
frappe.call("frappe.tests.ui_test_helpers.create_todo_records");
});
});
it('set relative filter for Previous and check list', () => {
it('sets relative timespan filter for last week and filters list', () => {
cy.visit('/desk#List/ToDo/List');
cy.get('.list-row:contains("this is fourth todo")').should('exist');
cy.get('.tag-filters-area .btn:contains("Add Filter")').click();
cy.get('.fieldname-select-area').should('exist');
cy.get('.fieldname-select-area input').type("Due Date{enter}", { delay: 100 });
cy.get('select.condition.form-control').select("Previous");
cy.get('.filter-field select.input-with-feedback.form-control').select("1 week");
cy.get('select.condition.form-control').select("Timespan");
cy.get('.filter-field select.input-with-feedback.form-control').select("last week");
cy.server();
cy.route('POST', '/api/method/frappe.desk.reportview.get').as('list_refresh');
cy.get('.filter-box .btn:contains("Apply")').click();
@ -29,13 +28,13 @@ context('Relative Timeframe', () => {
cy.get('.remove-filter.btn').click();
cy.wait('@save_user_settings');
});
it('set relative filter for Next and check list', () => {
it('sets relative timespan filter for next week and filters list', () => {
cy.visit('/desk#List/ToDo/List');
cy.get('.list-row:contains("this is fourth todo")').should('exist');
cy.get('.tag-filters-area .btn:contains("Add Filter")').click();
cy.get('.fieldname-select-area input').type("Due Date{enter}", { delay: 100 });
cy.get('select.condition.form-control').select("Next");
cy.get('.filter-field select.input-with-feedback.form-control').select("1 week");
cy.get('select.condition.form-control').select("Timespan");
cy.get('.filter-field select.input-with-feedback.form-control').select("next week");
cy.server();
cy.route('POST', '/api/method/frappe.desk.reportview.get').as('list_refresh');
cy.get('.filter-box .btn:contains("Apply")').click();

View file

@ -231,9 +231,8 @@ def get_site_config(sites_path=None, site_path=None):
if os.path.exists(site_config):
config.update(get_file_json(site_config))
elif local.site and not local.flags.new_site:
print("{0} does not exist".format(local.site))
print("Site {0} does not exist".format(local.site))
sys.exit(1)
#raise IncorrectSitePath, "{0} does not exist".format(site_config)
return _dict(config)
@ -1146,8 +1145,8 @@ def make_property_setter(args, ignore_validate=False, validate_fields_for_doctyp
def import_doc(path, ignore_links=False, ignore_insert=False, insert=False):
"""Import a file using Data Import."""
from frappe.core.doctype.data_import import data_import
data_import.import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
from frappe.core.doctype.data_import.data_import import import_doc
import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
def copy_doc(doc, ignore_no_copy=True):
""" No_copy fields also get copied."""
@ -1559,10 +1558,10 @@ def get_doctype_app(doctype):
loggers = {}
log_level = None
def logger(module=None, with_more_info=True):
def logger(module=None, with_more_info=False):
'''Returns a python logger that uses StreamHandler'''
from frappe.utils.logger import get_logger
return get_logger(module or 'default', with_more_info=with_more_info)
return get_logger(module=module, with_more_info=with_more_info)
def log_error(message=None, title=_("Error")):
'''Log error to Error Log'''

View file

@ -99,6 +99,16 @@ def application(request):
frappe.monitor.stop(response)
frappe.recorder.dump()
frappe.logger("frappe.web").info({
"site": get_site_name(request.host),
"remote_addr": getattr(request, "remote_addr", "NOTFOUND"),
"base_url": getattr(request, "base_url", "NOTFOUND"),
"full_path": getattr(request, "full_path", "NOTFOUND"),
"method": getattr(request, "method", "NOTFOUND"),
"scheme": getattr(request, "scheme", "NOTFOUND"),
"http_status_code": getattr(response, "status_code", "NOTFOUND")
})
if response and hasattr(frappe.local, 'rate_limiter'):
response.headers.extend(frappe.local.rate_limiter.headers())
@ -195,7 +205,6 @@ def handle_exception(e):
frappe.local.login_manager.clear_cookies()
if http_status_code >= 500:
frappe.logger().error('Request Error', exc_info=True)
make_error_snapshot(e)
if return_as_message:

View file

@ -44,7 +44,7 @@ class AssignmentRule(Document):
user = self.get_user()
assign_to.add(dict(
assign_to = user,
assign_to = [user],
doctype = doc.get('doctype'),
name = doc.get('name'),
description = frappe.render_template(self.description, doc),

View file

@ -19,6 +19,7 @@ from frappe.email.inbox import get_email_accounts
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
from frappe.website.doctype.web_page_view.web_page_view import is_tracking_enabled
from frappe.social.doctype.energy_point_log.energy_point_log import get_energy_points
from frappe.model.base_document import get_controller
from frappe.social.doctype.post.post import frequently_visited_links
def get_bootinfo():
@ -84,6 +85,7 @@ def get_bootinfo():
bootinfo.points = get_energy_points(frappe.session.user)
bootinfo.frequently_visited_links = frequently_visited_links()
bootinfo.link_preview_doctypes = get_link_preview_doctypes()
bootinfo.additional_filters_config = get_additional_filters_from_hooks()
return bootinfo
@ -106,6 +108,8 @@ def load_desktop_data(bootinfo):
from frappe.desk.desktop import get_desk_sidebar_items
bootinfo.allowed_modules = get_modules_from_all_apps_for_user()
bootinfo.allowed_workspaces = get_desk_sidebar_items(True)
bootinfo.module_page_map = get_controller("Desk Page").get_module_page_map()
bootinfo.dashboards = frappe.get_all("Dashboard")
def get_allowed_pages(cache=False):
return get_user_pages_or_reports('Page', cache=cache)
@ -294,3 +298,11 @@ def get_link_preview_doctypes():
link_preview_doctypes.append(custom.doc_type)
return link_preview_doctypes
def get_additional_filters_from_hooks():
filter_config = frappe._dict()
filter_hooks = frappe.get_hooks('filters_config')
for hook in filter_hooks:
filter_config.update(frappe.get_attr(hook)())
return filter_config

View file

@ -24,7 +24,7 @@ user_cache_keys = ("bootinfo", "user_recent", "roles", "user_doc", "lang",
"has_role:Page", "has_role:Report")
doctype_cache_keys = ("meta", "form_meta", "table_columns", "last_modified",
"linked_doctypes", 'notifications', 'workflow' ,'energy_point_rule_map')
"linked_doctypes", 'notifications', 'workflow' ,'energy_point_rule_map', 'data_import_column_header_map')
def clear_user_cache(user=None):

View file

@ -22,7 +22,11 @@ def pass_context(f):
pr = cProfile.Profile()
pr.enable()
ret = f(frappe._dict(ctx.obj), *args, **kwargs)
try:
ret = f(frappe._dict(ctx.obj), *args, **kwargs)
except frappe.exceptions.SiteNotSpecifiedError as e:
click.secho(str(e), fg='yellow')
sys.exit(1)
if profile:
pr.disable()
@ -39,13 +43,14 @@ def pass_context(f):
return click.pass_context(_func)
def get_site(context):
def get_site(context, raise_err=True):
try:
site = context.sites[0]
return site
except (IndexError, TypeError):
print('Please specify --site sitename')
sys.exit(1)
if raise_err:
raise frappe.SiteNotSpecifiedError
return None
def popen(command, *args, **kwargs):
output = kwargs.get('output', True)

View file

@ -4,6 +4,7 @@ import sys
import frappe
from frappe.utils import cint
from frappe.commands import pass_context, get_site
from frappe.exceptions import SiteNotSpecifiedError
def _is_scheduler_enabled():
enable_scheduler = False
@ -30,6 +31,8 @@ def trigger_scheduler_event(context, event):
frappe.utils.scheduler.trigger(site, event, now=True)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('enable-scheduler')
@pass_context
@ -45,6 +48,8 @@ def enable_scheduler(context):
print("Enabled for", site)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('disable-scheduler')
@pass_context
@ -60,7 +65,8 @@ def disable_scheduler(context):
print("Disabled for", site)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('scheduler')
@ -120,7 +126,7 @@ def doctor(context, site=None):
"Get diagnostic info about background workers"
from frappe.utils.doctor import doctor as _doctor
if not site:
site = get_site(context)
site = get_site(context, raise_err=False)
return _doctor(site=site)
@click.command('show-pending-jobs')

View file

@ -15,6 +15,7 @@ import frappe
from frappe import _
from frappe.commands import get_site, pass_context
from frappe.commands.scheduler import _is_scheduler_enabled
from frappe.exceptions import SiteNotSpecifiedError
from frappe.installer import update_site_config
from frappe.utils import get_site_path, touch_file
@ -43,14 +44,16 @@ def new_site(site, mariadb_root_username=None, mariadb_root_password=None, admin
_new_site(db_name, site, mariadb_root_username=mariadb_root_username,
mariadb_root_password=mariadb_root_password, admin_password=admin_password,
verbose=verbose, install_apps=install_app, source_sql=source_sql, force=force,
no_mariadb_socket=no_mariadb_socket, db_password=db_password, db_type=db_type, db_host=db_host, db_port=db_port)
no_mariadb_socket=no_mariadb_socket, db_password=db_password, db_type=db_type, db_host=db_host,
db_port=db_port, new_site=True)
if len(frappe.utils.get_sites()) == 1:
use(site)
def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=None,
admin_password=None, verbose=False, install_apps=None, source_sql=None, force=False,
no_mariadb_socket=False, reinstall=False, db_password=None, db_type=None, db_host=None, db_port=None):
no_mariadb_socket=False, reinstall=False, db_password=None, db_type=None, db_host=None,
db_port=None, new_site=False):
"""Install a new Frappe site"""
if not force and os.path.exists(site):
@ -79,7 +82,6 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
make_site_dirs()
installing = touch_file(get_site_path('locks', 'installing.lock'))
atexit.register(_new_site_cleanup, site, mariadb_root_username, mariadb_root_password)
install_db(root_login=mariadb_root_username, root_password=mariadb_root_password, db_name=db_name,
admin_password=admin_password, verbose=verbose, source_sql=source_sql, force=force, reinstall=reinstall,
@ -96,15 +98,6 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
scheduler_status = "disabled" if frappe.utils.scheduler.is_scheduler_disabled() else "enabled"
print("*** Scheduler is", scheduler_status, "***")
def _new_site_cleanup(site, mariadb_root_username, mariadb_root_password):
installing = get_site_path('locks', 'installing.lock')
if installing and os.path.exists(installing):
if mariadb_root_password:
_drop_site(site, mariadb_root_username, mariadb_root_password, force=True, no_backup=True)
shutil.rmtree(site)
frappe.destroy()
@click.command('restore')
@click.argument('sql-file-path')
@ -122,30 +115,47 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
if not os.path.exists(sql_file_path):
sql_file_path = '../' + sql_file_path
base_path = '..'
sql_file_path = os.path.join(base_path, sql_file_path)
if not os.path.exists(sql_file_path):
print('Invalid path {0}'.format(sql_file_path[3:]))
sys.exit(1)
elif sql_file_path.startswith(os.sep):
base_path = os.sep
else:
base_path = '.'
if sql_file_path.endswith('sql.gz'):
sql_file_path = extract_sql_gzip(os.path.abspath(sql_file_path))
decompressed_file_name = extract_sql_gzip(os.path.abspath(sql_file_path))
else:
decompressed_file_name = sql_file_path
site = get_site(context)
frappe.init(site=site)
_new_site(frappe.conf.db_name, site, mariadb_root_username=mariadb_root_username,
mariadb_root_password=mariadb_root_password, admin_password=admin_password,
verbose=context.verbose, install_apps=install_app, source_sql=sql_file_path,
force=context.force)
verbose=context.verbose, install_apps=install_app, source_sql=decompressed_file_name,
force=True)
# Extract public and/or private files to the restored site, if user has given the path
if with_public_files:
with_public_files = os.path.join(base_path, with_public_files)
public = extract_tar_files(site, with_public_files, 'public')
os.remove(public)
if with_private_files:
with_private_files = os.path.join(base_path, with_private_files)
private = extract_tar_files(site, with_private_files, 'private')
os.remove(private)
# Removing temporarily created file
if decompressed_file_name != sql_file_path:
os.remove(decompressed_file_name)
success_message = "Site {0} has been restored{1}".format(site, " with files" if (with_public_files or with_private_files) else "")
click.secho(success_message, fg="green")
@click.command('reinstall')
@click.option('--admin-password', help='Administrator Password for reinstalled site')
@click.option('--mariadb-root-username', help='Root username for MariaDB')
@ -192,6 +202,8 @@ def install_app(context, apps):
_install_app(app, verbose=context.verbose)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('list-apps')
@pass_context
@ -221,7 +233,8 @@ def add_system_manager(context, email, first_name, last_name, send_welcome_email
frappe.db.commit()
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('disable-user')
@click.argument('email')
@ -252,6 +265,8 @@ def migrate(context, rebuild_website=False, skip_failing=False):
migrate(context.verbose, rebuild_website=rebuild_website, skip_failing=skip_failing)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
print("Compiling Python Files...")
compileall.compile_dir('../apps', quiet=1, rx=re.compile('.*node_modules.*'))
@ -263,7 +278,12 @@ def migrate_to(context, frappe_provider):
"Migrates site to the specified provider"
from frappe.integrations.frappe_providers import migrate_to
for site in context.sites:
frappe.init(site=site)
frappe.connect()
migrate_to(site, frappe_provider)
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('run-patch')
@click.argument('module')
@ -278,6 +298,8 @@ def run_patch(context, module):
frappe.modules.patch_handler.run_single(module, force=context.force)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('reload-doc')
@click.argument('module')
@ -294,6 +316,8 @@ def reload_doc(context, module, doctype, docname):
frappe.db.commit()
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('reload-doctype')
@click.argument('doctype')
@ -308,6 +332,8 @@ def reload_doctype(context, doctype):
frappe.db.commit()
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('add-to-hosts')
@pass_context
@ -315,6 +341,8 @@ def add_to_hosts(context):
"Add site to hosts"
for site in context.sites:
frappe.commands.popen('echo 127.0.0.1\t{0} | sudo tee -a /etc/hosts'.format(site))
if not context.sites:
raise SiteNotSpecifiedError
@click.command('use')
@click.argument('site')
@ -328,7 +356,7 @@ def use(site, sites_path='.'):
sitefile.write(site)
print("Current Site set to {}".format(site))
else:
print("{} does not exist".format(site))
print("Site {} does not exist".format(site))
@click.command('backup')
@click.option('--with-files', default=False, is_flag=True, help="Take backup with files")
@ -361,6 +389,9 @@ def backup(context, with_files=False, backup_path_db=None, backup_path_files=Non
print("Private files: ", odb.backup_path_private_files)
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
sys.exit(exit_code)
@click.command('remove-from-installed-apps')
@ -376,22 +407,27 @@ def remove_from_installed_apps(context, app):
remove_from_installed_apps(app)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('uninstall-app')
@click.argument('app')
@click.option('--yes', '-y', help='To bypass confirmation prompt for uninstalling the app', is_flag=True, default=False, multiple=True)
@click.option('--dry-run', help='List all doctypes that will be deleted', is_flag=True, default=False)
@click.option('--no-backup', help='Do not backup the site', is_flag=True, default=False)
@pass_context
def uninstall(context, app, dry_run=False, yes=False):
def uninstall(context, app, dry_run=False, yes=False, no_backup=False):
"Remove app and linked modules from site"
from frappe.installer import remove_app
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
remove_app(app, dry_run, yes)
remove_app(app, dry_run, yes, no_backup)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('drop-site')
@ -422,7 +458,7 @@ def _drop_site(site, root_login='root', root_password=None, archived_sites_path=
else:
click.echo("="*80)
click.echo("Error: The operation has stopped because backup of {s}'s database failed.".format(s=site))
click.echo("Reason: {reason}{sep}".format(reason=err[1], sep="\n"))
click.echo("Reason: {reason}{sep}".format(reason=str(err), sep="\n"))
click.echo("Fix the issue and try again.")
click.echo(
"Hint: Use 'bench drop-site {s} --force' to force the removal of {s}".format(sep="\n", tab="\t", s=site)
@ -483,6 +519,8 @@ def set_admin_password(context, admin_password, logout_all_sessions=False):
admin_password = None
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('set-last-active-for-user')
@click.option('--user', help="Setup last active date for user")
@ -528,6 +566,8 @@ def publish_realtime(context, event, message, room, user, doctype, docname, afte
frappe.db.commit()
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('browse')
@click.argument('site', required=False)
@ -555,6 +595,8 @@ def start_recording(context):
for site in context.sites:
frappe.init(site=site)
frappe.recorder.start()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('stop-recording')
@ -563,6 +605,8 @@ def stop_recording(context):
for site in context.sites:
frappe.init(site=site)
frappe.recorder.stop()
if not context.sites:
raise SiteNotSpecifiedError
commands = [

View file

@ -1,6 +1,7 @@
from __future__ import unicode_literals, absolute_import, print_function
import click
from frappe.commands import pass_context, get_site
from frappe.exceptions import SiteNotSpecifiedError
# translation
@click.command('build-message-files')
@ -15,6 +16,8 @@ def build_message_files(context):
frappe.translate.rebuild_all_translation_files()
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('new-language') #, help="Create lang-code.csv for given app")
@pass_context

View file

@ -6,6 +6,7 @@ import json, os, sys, subprocess
from distutils.spawn import find_executable
import frappe
from frappe.commands import pass_context, get_site
from frappe.exceptions import SiteNotSpecifiedError
from frappe.utils import update_progress_bar, get_bench_path
from frappe.utils.response import json_handler
from coverage import Coverage
@ -51,7 +52,8 @@ def clear_cache(context):
frappe.website.render.clear_cache()
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('clear-website-cache')
@pass_context
@ -65,7 +67,8 @@ def clear_website_cache(context):
frappe.website.render.clear_cache()
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('destroy-all-sessions')
@click.option('--reason')
@ -81,7 +84,8 @@ def destroy_all_sessions(context, reason=None):
frappe.db.commit()
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('show-config')
@pass_context
@ -117,7 +121,8 @@ def reset_perms(context):
reset_perms(d)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('execute')
@click.argument('method')
@ -164,6 +169,9 @@ def execute(context, method, args=None, kwargs=None, profile=False):
if ret:
print(json.dumps(ret, default=json_handler))
if not context.sites:
raise SiteNotSpecifiedError
@click.command('add-to-email-queue')
@click.argument('email-path')
@ -197,7 +205,8 @@ def export_doc(context, doctype, docname):
frappe.modules.export_doc(doctype, docname)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('export-json')
@click.argument('doctype')
@ -206,15 +215,16 @@ def export_doc(context, doctype, docname):
@pass_context
def export_json(context, doctype, path, name=None):
"Export doclist as json to the given path, use '-' as name for Singles."
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import export_json
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
data_import.export_json(doctype, path, name=name)
export_json(doctype, path, name=name)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('export-csv')
@click.argument('doctype')
@ -222,15 +232,16 @@ def export_json(context, doctype, path, name=None):
@pass_context
def export_csv(context, doctype, path):
"Export data import template with data for DocType"
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import export_csv
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
data_import.export_csv(doctype, path)
export_csv(doctype, path)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('export-fixtures')
@click.option('--app', default=None, help='Export fixtures of a specific app')
@ -245,14 +256,15 @@ def export_fixtures(context, app=None):
export_fixtures(app=app)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('import-doc')
@click.argument('path')
@pass_context
def import_doc(context, path, force=False):
"Import (insert/update) doclist. If the argument is a directory, all files ending with .json are imported"
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import import_doc
if not os.path.exists(path):
path = os.path.join('..', path)
@ -264,10 +276,11 @@ def import_doc(context, path, force=False):
try:
frappe.init(site=site)
frappe.connect()
data_import.import_doc(path, overwrite=context.force)
import_doc(path, overwrite=context.force)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('import-csv')
@click.argument('path')
@ -280,7 +293,7 @@ def import_doc(context, path, force=False):
@pass_context
def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True):
"Import CSV using data import"
from frappe.core.doctype.data_import import importer
from frappe.core.doctype.data_import_legacy import importer
from frappe.utils.csvutils import read_csv_content
site = get_site(context)
@ -316,20 +329,12 @@ def import_csv(context, path, only_insert=False, submit_after_import=False, igno
@pass_context
def data_import(context, file_path, doctype, import_type=None, submit_after_import=False, mute_emails=True):
"Import documents in bulk from CSV or XLSX using data import"
from frappe.core.doctype.data_import.importer_new import Importer
from frappe.core.doctype.data_import.data_import import import_file
site = get_site(context)
frappe.init(site=site)
frappe.connect()
data_import = frappe.new_doc('Data Import Beta')
data_import.submit_after_import = submit_after_import
data_import.mute_emails = mute_emails
data_import.import_type = 'Insert New Records' if import_type.lower() == 'insert' else 'Update Existing Records'
i = Importer(doctype=doctype, file_path=file_path, data_import=data_import, console=True)
i.import_data()
import_file(doctype, file_path, import_type, submit_after_import, console=True)
frappe.destroy()
@ -364,6 +369,8 @@ def mariadb(context):
import os
site = get_site(context)
if not site:
raise SiteNotSpecifiedError
frappe.init(site=site)
# This is assuming you're within the bench instance.
@ -487,7 +494,17 @@ def run_tests(context, app=None, module=None, doctype=None, test=(),
if coverage:
# Generate coverage report only for app that is being tested
source_path = os.path.join(get_bench_path(), 'apps', app or 'frappe')
cov = Coverage(source=[source_path], omit=['*.html', '*.js', '*.xml', '*.css', '*/doctype/*/*_dashboard.py', '*/patches/*'])
cov = Coverage(source=[source_path], omit=[
'*.html',
'*.js',
'*.xml',
'*.css',
'*.less',
'*.scss',
'*.vue',
'*/doctype/*/*_dashboard.py',
'*/patches/*'
])
cov.start()
ret = frappe.test_runner.main(app, module, doctype, context.verbose, tests=tests,
@ -577,7 +594,8 @@ def request(context, args=None, path=None):
print(frappe.response)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('make-app')
@click.argument('destination')
@ -658,7 +676,8 @@ def rebuild_global_search(context, static_pages=False):
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@click.command('auto-deploy')
@click.argument('app')

View file

@ -42,6 +42,16 @@ frappe.ui.form.on("Contact", {
});
frm.refresh_field("links");
let numbers = frm.doc.phone_nos;
if (numbers && numbers.length && frappe.phone_call.handler) {
frm.add_custom_button(__('Call'), () => {
numbers = frm.doc.phone_nos
.sort((prev, next) => next.is_primary_mobile_no - prev.is_primary_mobile_no)
.map(d => d.phone);
frappe.phone_call.handler(numbers);
});
}
if (frm.doc.links) {
frappe.call({
method: "frappe.contacts.doctype.contact.contact.address_query",

View file

@ -3,7 +3,7 @@
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr, has_gravatar
from frappe.utils import cstr, has_gravatar, cint
from frappe import _
from frappe.model.document import Document
from frappe.core.doctype.dynamic_link.dynamic_link import deduplicate_dynamic_links
@ -133,7 +133,7 @@ def get_default_contact(doctype, name):
dl.parenttype = "Contact"''', (doctype, name))
if out:
return sorted(out, key = functools.cmp_to_key(lambda x,y: cmp(y[1], x[1])))[0][0]
return sorted(out, key = functools.cmp_to_key(lambda x,y: cmp(cint(y[1]), cint(x[1]))))[0][0]
else:
return None

View file

@ -158,11 +158,7 @@ class TestAccessLog(unittest.TestCase):
request = requests.post(private_file_link, headers=self.header)
last_doc = frappe.get_last_doc('Access Log')
if request.status_code == 403:
# if file is not accessible, access log wont be generated
pass
else:
if request.ok:
# check for the access log of downloaded file
self.assertEqual(new_private_file.doctype, last_doc.export_from)
self.assertEqual(new_private_file.name, last_doc.reference_document)

View file

@ -2,20 +2,21 @@
# MIT License. See license.txt
from __future__ import unicode_literals, absolute_import
from collections import Counter
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.utils import validate_email_address, get_fullname, strip_html, cstr
from frappe.core.doctype.communication.email import (validate_email,
notify, _notify, update_parent_mins_to_first_response)
from frappe.utils import validate_email_address, strip_html, cstr, time_diff_in_seconds
from frappe.core.doctype.communication.email import validate_email, notify, _notify
from frappe.core.utils import get_parent_doc
from frappe.utils.bot import BotReply
from frappe.utils import parse_addr
from frappe.core.doctype.comment.comment import update_comment_in_doc
from email.utils import parseaddr
from six.moves.urllib.parse import unquote
from collections import Counter
from frappe.utils.user import is_system_user
from frappe.contacts.doctype.contact.contact import get_contact_name
from frappe.automation.doctype.assignment_rule.assignment_rule import apply as apply_assignment_rule
exclude_from_linked_with = True
@ -119,7 +120,7 @@ class Communication(Document):
update_comment_in_doc(self)
if self.comment_type != 'Updated':
update_parent_mins_to_first_response(self)
update_parent_document_on_communication(self)
self.bot_reply()
def on_trash(self):
@ -258,7 +259,12 @@ class Communication(Document):
# Timeline Links
def set_timeline_links(self):
contacts = get_contacts([self.sender, self.recipients, self.cc, self.bcc])
contacts = []
if (self.email_account and frappe.db.get_value("Email Account", self.email_account, "create_contact")) or \
frappe.flags.in_test:
contacts = get_contacts([self.sender, self.recipients, self.cc, self.bcc])
for contact_name in contacts:
self.add_link('Contact', contact_name)
@ -423,3 +429,63 @@ def get_email_without_link(email):
email_host = email.split("@")[1]
return "{0}@{1}".format(email_id, email_host)
def update_parent_document_on_communication(doc):
"""Update mins_to_first_communication of parent document based on who is replying."""
parent = get_parent_doc(doc)
if not parent:
return
# update parent mins_to_first_communication only if we create the Email communication
# ignore in case of only Comment is added
if doc.communication_type == "Comment":
return
status_field = parent.meta.get_field("status")
if status_field:
options = (status_field.options or "").splitlines()
# if status has a "Replied" option, then update the status for received communication
if ("Replied" in options) and doc.sent_or_received == "Received":
parent.db_set("status", "Open")
parent.run_method("handle_hold_time", "Replied")
apply_assignment_rule(parent)
else:
# update the modified date for document
parent.update_modified()
update_mins_to_first_communication(parent, doc)
set_avg_response_time(parent, doc)
parent.run_method("notify_communication", doc)
parent.notify_update()
def update_mins_to_first_communication(parent, communication):
if parent.meta.has_field("mins_to_first_response") and not parent.get("mins_to_first_response"):
if is_system_user(communication.sender):
first_responded_on = communication.creation
if parent.meta.has_field("first_responded_on") and communication.sent_or_received == "Sent":
parent.db_set("first_responded_on", first_responded_on)
parent.db_set("mins_to_first_response", round(time_diff_in_seconds(first_responded_on, parent.creation) / 60), 2)
def set_avg_response_time(parent, communication):
if parent.meta.has_field("avg_response_time") and communication.sent_or_received == "Sent":
# avg response time for all the responses
communications = frappe.get_list("Communication", filters={
"reference_doctype": parent.doctype,
"reference_name": parent.name
},
fields=["sent_or_received", "name", "creation"],
order_by="creation"
)
if len(communications):
response_times = []
for i in range(len(communications)):
if communications[i].sent_or_received == "Sent" and communications[i-1].sent_or_received == "Received":
response_time = round(time_diff_in_seconds(communications[i].creation, communications[i-1].creation), 2)
if response_time > 0:
response_times.append(response_time)
if response_times:
avg_response_time = sum(response_times) / len(response_times)
parent.db_set("avg_response_time", avg_response_time)

View file

@ -9,7 +9,7 @@ import json
from email.utils import formataddr
from frappe.core.utils import get_parent_doc
from frappe.utils import (get_url, get_formatted_email, cint,
validate_email_address, split_emails, time_diff_in_seconds, parse_addr, get_datetime)
validate_email_address, split_emails, parse_addr, get_datetime)
from frappe.email.email_body import get_message_id
import frappe.email.smtp
import time
@ -172,33 +172,6 @@ def _notify(doc, print_html=None, print_format=None, attachments=None,
print_letterhead=frappe.flags.print_letterhead
)
def update_parent_mins_to_first_response(doc):
"""Update mins_to_first_communication of parent document based on who is replying."""
parent = get_parent_doc(doc)
if not parent:
return
# update parent mins_to_first_communication only if we create the Email communication
# ignore in case of only Comment is added
if doc.communication_type == "Comment":
return
status_field = parent.meta.get_field("status")
if status_field:
options = (status_field.options or '').splitlines()
# if status has a "Replied" option, then update the status for received communication
if ('Replied' in options) and doc.sent_or_received=="Received":
parent.db_set("status", "Open")
else:
# update the modified date for document
parent.update_modified()
update_mins_to_first_communication(parent, doc)
parent.run_method('notify_communication', doc)
parent.notify_update()
def get_recipients_cc_and_bcc(doc, recipients, cc, bcc, fetched_from_email_account=False):
doc.all_email_addresses = []
doc.sent_email_addresses = []
@ -499,15 +472,6 @@ def sendmail(communication_name, print_html=None, print_format=None, attachments
traceback = frappe.log_error("frappe.core.doctype.communication.email.sendmail")
raise
def update_mins_to_first_communication(parent, communication):
if parent.meta.has_field('mins_to_first_response') and not parent.get('mins_to_first_response'):
if frappe.db.get_all('User', filters={'email': communication.sender,
'user_type': 'System User', 'enabled': 1}, limit=1):
first_responded_on = communication.creation
if parent.meta.has_field('first_responded_on') and communication.sent_or_received == "Sent":
parent.db_set('first_responded_on', first_responded_on)
parent.db_set('mins_to_first_response', round(time_diff_in_seconds(first_responded_on, parent.creation) / 60), 2)
@frappe.whitelist(allow_guest=True)
def mark_email_as_seen(name=None):
try:

View file

@ -202,6 +202,8 @@ class TestCommunication(unittest.TestCase):
self.assertIn(("Note", note.name), doc_links)
def create_email_account():
frappe.delete_doc_if_exists("Email Account", "_Test Comm Account 1")
frappe.flags.mute_emails = False
frappe.flags.sent_mail = None

View file

@ -9,7 +9,7 @@ import frappe.permissions
import re, csv, os
from frappe.utils.csvutils import UnicodeWriter
from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint
from frappe.core.doctype.data_import.importer import get_data_keys
from frappe.core.doctype.data_import_legacy.importer import get_data_keys
from six import string_types
from frappe.core.doctype.access_log.access_log import make_access_log

View file

@ -1 +0,0 @@
Bulk import / update of data via file upload in Excel or CSV.

View file

@ -0,0 +1,3 @@
.warnings .warning {
margin-bottom: 40px;
}

View file

@ -1,324 +1,522 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import', {
onload: function(frm) {
if (frm.doc.__islocal) {
frm.set_value("action", "");
}
frappe.call({
method: "frappe.core.doctype.data_import.data_import.get_importable_doctypes",
callback: function (r) {
let importable_doctypes = r.message;
frm.set_query("reference_doctype", function () {
return {
"filters": {
"issingle": 0,
"istable": 0,
"name": ['in', importable_doctypes]
}
};
});
setup(frm) {
frappe.realtime.on('data_import_refresh', ({ data_import }) => {
frm.import_in_progress = false;
if (data_import !== frm.doc.name) return;
frappe.model.clear_doc('Data Import', frm.doc.name);
frappe.model.with_doc('Data Import', frm.doc.name).then(() => {
frm.refresh();
});
});
frappe.realtime.on('data_import_progress', data => {
frm.import_in_progress = true;
if (data.data_import !== frm.doc.name) {
return;
}
}),
let percent = Math.floor((data.current * 100) / data.total);
let seconds = Math.floor(data.eta);
let minutes = Math.floor(data.eta / 60);
let eta_message =
// prettier-ignore
seconds < 60
? __('About {0} seconds remaining', [seconds])
: minutes === 1
? __('About {0} minute remaining', [minutes])
: __('About {0} minutes remaining', [minutes]);
// should never check public
frm.fields_dict["import_file"].df.is_private = 1;
let message;
if (data.success) {
let message_args = [data.current, data.total, eta_message];
message =
frm.doc.import_type === 'Insert New Records'
? __('Importing {0} of {1}, {2}', message_args)
: __('Updating {0} of {1}, {2}', message_args);
}
if (data.skipping) {
message = __('Skipping {0} of {1}, {2}', [
data.current,
data.total,
eta_message
]);
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(__('In Progress'), 'orange');
frappe.realtime.on("data_import_progress", function(data) {
if (data.data_import === frm.doc.name) {
if (data.reload && data.reload === true) {
frm.reload_doc();
}
if (data.progress) {
let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar");
if (progress_bar) {
$(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped");
$(progress_bar).css("width", data.progress + "%");
}
}
// hide progress when complete
if (data.current === data.total) {
setTimeout(() => {
frm.dashboard.hide();
frm.refresh();
}, 2000);
}
});
frm.set_query('reference_doctype', () => {
return {
filters: {
name: ['in', frappe.boot.user.can_import]
}
};
});
frm.get_field('import_file').df.options = {
restrictions: {
allowed_file_types: ['.csv', '.xls', '.xlsx']
}
};
frm.has_import_file = () => {
return frm.doc.import_file || frm.doc.google_sheets_url;
};
},
reference_doctype: function(frm){
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
refresh(frm) {
frm.page.hide_icon_group();
frm.trigger('update_indicators');
frm.trigger('import_file');
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
frm.trigger('show_report_error_button');
if (frm.doc.status === 'Partial Success') {
frm.add_custom_button(__('Export Errored Rows'), () =>
frm.trigger('export_errored_rows')
);
}
if (frm.doc.status.includes('Success')) {
frm.add_custom_button(
__('Go to {0} List', [frm.doc.reference_doctype]),
() => frappe.set_route('List', frm.doc.reference_doctype)
);
}
},
refresh: function(frm) {
onload_post_render(frm) {
frm.trigger('update_primary_action');
},
update_primary_action(frm) {
frm.disable_save();
frm.dashboard.clear_headline();
if (frm.doc.reference_doctype && !frm.doc.import_file) {
frm.page.set_indicator(__('Attach file'), 'orange');
} else {
if (frm.doc.import_status) {
const listview_settings = frappe.listview_settings['Data Import'];
const indicator = listview_settings.get_indicator(frm.doc);
frm.page.set_indicator(indicator[0], indicator[1]);
if (frm.doc.import_status === "In Progress") {
frm.dashboard.add_progress("Data Import Progress", "0");
frm.set_read_only();
frm.refresh_fields();
}
if (frm.doc.status !== 'Success') {
if (!frm.is_new() && (frm.has_import_file())) {
let label =
frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
frm.page.set_primary_action(label, () => frm.events.start_import(frm));
} else {
frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
},
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
update_indicators(frm) {
const indicator = frappe.get_indicator(frm.doc);
if (indicator) {
frm.page.set_indicator(indicator[0], indicator[1]);
} else {
frm.page.clear_indicator();
}
},
if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") {
frm.set_df_property("action", "read_only", 1);
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
let message;
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
}
}
frm.dashboard.set_headline(message);
},
frm.add_custom_button(__("Help"), function() {
frappe.help.show_video("6wiriRKPhmg");
});
show_report_error_button(frm) {
if (frm.doc.status === 'Error') {
frappe.db
.get_list('Error Log', {
filters: { method: frm.doc.name },
fields: ['method', 'error'],
order_by: 'creation desc',
limit: 1
})
.then(result => {
if (result.length > 0) {
frm.add_custom_button('Report Error', () => {
let fake_xhr = {
responseText: JSON.stringify({
exc: result[0].error
})
};
frappe.request.report_error(fake_xhr, {});
});
}
});
}
},
if (frm.doc.reference_doctype && frm.doc.docstatus === 0) {
frm.add_custom_button(__("Download template"), function() {
frappe.data_import.download_dialog(frm).show();
start_import(frm) {
frm
.call({
method: 'form_start_import',
args: { data_import: frm.doc.name },
btn: frm.page.btn_primary
})
.then(r => {
if (r.message === true) {
frm.disable_save();
}
});
},
download_template(frm) {
if (
frm.data_exporter &&
frm.data_exporter.doctype === frm.doc.reference_doctype
) {
frm.data_exporter.exporting_for = frm.doc.import_type;
frm.data_exporter.dialog.show();
} else {
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.data_exporter = new frappe.data_import.DataExporter(
frm.doc.reference_doctype,
frm.doc.import_type
);
});
}
},
if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows &&
frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) {
frm.page.set_primary_action(__("Start Import"), function() {
frappe.call({
btn: frm.page.btn_primary,
method: "frappe.core.doctype.data_import.data_import.import_data",
args: {
data_import: frm.doc.name
}
});
}).addClass('btn btn-primary');
}
reference_doctype(frm) {
frm.trigger('toggle_submit_after_import');
},
if (frm.doc.log_details) {
frm.events.create_log_table(frm);
} else {
$(frm.fields_dict.import_log.wrapper).empty();
toggle_submit_after_import(frm) {
frm.toggle_display('submit_after_import', false);
let doctype = frm.doc.reference_doctype;
if (doctype) {
frappe.model.with_doctype(doctype, () => {
let meta = frappe.get_meta(doctype);
frm.toggle_display('submit_after_import', meta.is_submittable);
});
}
},
action: function(frm) {
if(!frm.doc.action) return;
if(!frm.doc.reference_doctype) {
frappe.msgprint(__("Please select document type first."));
frm.set_value("action", "");
google_sheets_url(frm) {
if (!frm.is_dirty()) {
frm.trigger('import_file');
} else {
frm.trigger('update_primary_action');
}
},
refresh_google_sheet(frm) {
frm.trigger('import_file');
},
import_file(frm) {
frm.toggle_display('section_import_preview', frm.has_import_file());
if (!frm.has_import_file()) {
frm.get_field('import_preview').$wrapper.empty();
return;
} else {
frm.trigger('update_primary_action');
}
// load import preview
frm.get_field('import_preview').$wrapper.empty();
$('<span class="text-muted">')
.html(__('Loading import file...'))
.appendTo(frm.get_field('import_preview').$wrapper);
frm
.call({
method: 'get_preview_from_template',
args: {
data_import: frm.doc.name,
import_file: frm.doc.import_file,
google_sheets_url: frm.doc.google_sheets_url
},
error_handlers: {
TimestampMismatchError() {
// ignore this error
}
}
})
.then(r => {
let preview_data = r.message;
frm.events.show_import_preview(frm, preview_data);
frm.events.show_import_warnings(frm, preview_data);
});
},
show_import_preview(frm, preview_data) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
if (
frm.import_preview &&
frm.import_preview.doctype === frm.doc.reference_doctype
) {
frm.import_preview.preview_data = preview_data;
frm.import_preview.import_log = import_log;
frm.import_preview.refresh();
return;
}
if(frm.doc.action == "Insert new records") {
frm.doc.insert_new = 1;
} else if (frm.doc.action == "Update records"){
frm.doc.overwrite = 1;
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.import_preview = new frappe.data_import.ImportPreview({
wrapper: frm.get_field('import_preview').$wrapper,
doctype: frm.doc.reference_doctype,
preview_data,
import_log,
frm,
events: {
remap_column(changed_map) {
let template_options = JSON.parse(frm.doc.template_options || '{}');
template_options.remap_column = template_options.remap_column || {};
Object.assign(template_options.remap_column, changed_map);
frm.set_value('template_options', JSON.stringify(template_options));
frm.save().then(() => frm.trigger('import_file'));
}
}
});
});
},
export_errored_rows(frm) {
open_url_post(
'/api/method/frappe.core.doctype.data_import.data_import.download_errored_template',
{
data_import_name: frm.doc.name
}
);
},
show_import_warnings(frm, preview_data) {
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
warnings = warnings.concat(preview_data.warnings || []);
frm.toggle_display('import_warnings_section', warnings.length > 0);
if (warnings.length === 0) {
frm.get_field('import_warnings').$wrapper.html('');
return;
}
frm.save();
// group warnings by row
let warnings_by_row = {};
let other_warnings = [];
for (let warning of warnings) {
if (warning.row) {
warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
warnings_by_row[warning.row].push(warning);
} else {
other_warnings.push(warning);
}
}
let html = '';
html += Object.keys(warnings_by_row)
.map(row_number => {
let message = warnings_by_row[row_number]
.map(w => {
if (w.field) {
let label =
w.field.label +
(w.field.parent !== frm.doc.reference_doctype
? ` (${w.field.parent})`
: '');
return `<li>${label}: ${w.message}</li>`;
}
return `<li>${w.message}</li>`;
})
.join('');
return `
<div class="warning" data-row="${row_number}">
<h5 class="text-uppercase">${__('Row {0}', [row_number])}</h5>
<div class="body"><ul>${message}</ul></div>
</div>
`;
})
.join('');
html += other_warnings
.map(warning => {
let header = '';
if (warning.col) {
header = __('Column {0}', [warning.col]);
}
return `
<div class="warning" data-col="${warning.col}">
<h5 class="text-uppercase">${header}</h5>
<div class="body">${warning.message}</div>
</div>
`;
})
.join('');
frm.get_field('import_warnings').$wrapper.html(`
<div class="row">
<div class="col-sm-10 warnings">${html}</div>
</div>
`);
},
only_update: function(frm) {
frm.save();
show_failed_logs(frm) {
frm.trigger('show_import_log');
},
submit_after_import: function(frm) {
frm.save();
show_import_log(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let logs = import_log;
frm.toggle_display('import_log', false);
frm.toggle_display('import_log_section', logs.length > 0);
if (logs.length === 0) {
frm.get_field('import_log_preview').$wrapper.empty();
return;
}
let rows = logs
.map(log => {
let html = '';
if (log.success) {
if (frm.doc.import_type === 'Insert New Records') {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
html = __('Successfully updated {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
}
} else {
let messages = log.messages
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs margin-top" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}">
${__('Show Traceback')}
</button>
<div class="collapse margin-top" id="${id}">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
if (frm.doc.show_failed_logs && log.success) {
return '';
}
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
if (!rows && frm.doc.show_failed_logs) {
rows = `<tr><td class="text-center text-muted" colspan=3>
${__('No failed logs')}
</td></tr>`;
}
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
},
skip_errors: function(frm) {
frm.save();
},
show_missing_link_values(frm, missing_link_values) {
let can_be_created_automatically = missing_link_values.every(
d => d.has_one_mandatory_field
);
ignore_encoding_errors: function(frm) {
frm.save();
},
let html = missing_link_values
.map(d => {
let doctype = d.doctype;
let values = d.missing_values;
return `
<h5>${doctype}</h5>
<ul>${values.map(v => `<li>${v}</li>`).join('')}</ul>
`;
})
.join('');
no_email: function(frm) {
frm.save();
},
show_only_errors: function(frm) {
frm.events.create_log_table(frm);
},
create_log_table: function(frm) {
let msg = JSON.parse(frm.doc.log_details);
var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty();
$(frappe.render_template("log_details", {
data: msg.messages,
import_status: frm.doc.import_status,
show_only_errors: frm.doc.show_only_errors,
})).appendTo($log_wrapper);
if (can_be_created_automatically) {
// prettier-ignore
let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
frappe.confirm(message + html, () => {
frm
.call('create_missing_link_values', {
missing_link_values
})
.then(r => {
let records = r.message;
frappe.msgprint(
__('Created {0} records successfully.', [records.length])
);
});
});
} else {
frappe.msgprint(
// prettier-ignore
__('The following records needs to be created before we can import your file.') + html
);
}
}
});
frappe.provide('frappe.data_import');
frappe.data_import.download_dialog = function(frm) {
var dialog;
const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden;
const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields);
const get_doctype_checkbox_fields = () => {
return dialog.fields.filter(df => df.fieldname.endsWith('_fields'))
.map(df => dialog.fields_dict[df.fieldname]);
};
const doctype_fields = get_fields(frm.doc.reference_doctype)
.map(df => {
let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0;
return {
label: df.label,
reqd: reqd,
danger: reqd,
value: df.fieldname,
checked: 1
};
});
let fields = [
{
"label": __("Select Columns"),
"fieldname": "select_columns",
"fieldtype": "Select",
"options": "All\nMandatory\nManually",
"reqd": 1,
"onchange": function() {
const fields = get_doctype_checkbox_fields();
fields.map(f => f.toggle(true));
if(this.value == 'Mandatory' || this.value == 'Manually') {
checkbox_toggle(true);
fields.map(multicheck_field => {
multicheck_field.options.map(option => {
if(!option.reqd) return;
$(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`)
.prop('checked', false)
.trigger('click');
});
});
} else if(this.value == 'All'){
$(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`)
.prop('disabled', true);
}
}
},
{
"label": __("File Type"),
"fieldname": "file_type",
"fieldtype": "Select",
"options": "Excel\nCSV",
"default": "Excel"
},
{
"label": __("Download with Data"),
"fieldname": "with_data",
"fieldtype": "Check",
"hidden": !frm.doc.overwrite,
"default": 1
},
{
"label": __("Select All"),
"fieldname": "select_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle();
}
},
{
"label": __("Unselect All"),
"fieldname": "unselect_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle(true);
}
},
{
"label": frm.doc.reference_doctype,
"fieldname": "doctype_fields",
"fieldtype": "MultiCheck",
"options": doctype_fields,
"columns": 2,
"hidden": 1
}
];
const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype)
.map(df => {
return {
"label": df.options,
"fieldname": df.fieldname + '_fields',
"fieldtype": "MultiCheck",
"options": frappe.meta.get_docfields(df.options)
.filter(filter_fields)
.map(df => ({
label: df.label,
reqd: df.reqd ? 1 : 0,
value: df.fieldname,
checked: 1,
danger: df.reqd
})),
"columns": 2,
"hidden": 1
};
});
fields = fields.concat(child_table_fields);
dialog = new frappe.ui.Dialog({
title: __('Download Template'),
fields: fields,
primary_action: function(values) {
var data = values;
if (frm.doc.reference_doctype) {
var export_params = () => {
let columns = {};
if(values.select_columns) {
columns = get_doctype_checkbox_fields().reduce((columns, field) => {
const options = field.get_checked_options();
columns[field.df.label] = options;
return columns;
}, {});
}
return {
doctype: frm.doc.reference_doctype,
parent_doctype: frm.doc.reference_doctype,
select_columns: JSON.stringify(columns),
with_data: frm.doc.overwrite && data.with_data,
all_doctypes: true,
file_type: data.file_type,
template: true
};
};
let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data';
open_url_post(get_template_url, export_params());
} else {
frappe.msgprint(__("Please select the Document Type."));
}
dialog.hide();
},
primary_action_label: __('Download')
});
$(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]')
.wrapAll('<div class="inline-buttons" />');
const button_container = $(dialog.body).find('.inline-buttons');
button_container.addClass('flex');
$(button_container).find('.frappe-control').map((index, button) => {
$(button).css({"margin-right": "1em"});
});
function checkbox_toggle(checked=false) {
$(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => {
$(element).find(`:checkbox`).prop("checked", checked).trigger('click');
});
}
return dialog;
};

View file

@ -1,767 +1,192 @@
{
"allow_copy": 1,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"autoname": "",
"beta": 0,
"creation": "2016-12-09 14:27:32.720061",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "Document",
"editable_grid": 1,
"engine": "InnoDB",
"actions": [],
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"html_5",
"google_sheets_url",
"refresh_google_sheet",
"column_break_5",
"status",
"submit_after_import",
"mute_emails",
"template_options",
"import_warnings_section",
"template_warnings",
"import_warnings",
"section_import_preview",
"import_preview",
"import_log_section",
"import_log",
"show_failed_logs",
"import_log_preview"
],
"fields": [
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "reference_doctype",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 1,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Document Type",
"length": 0,
"no_copy": 0,
"options": "DocType",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "action",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Action",
"length": 0,
"no_copy": 0,
"options": "Insert new records\nUpdate records",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "eval:!doc.overwrite",
"description": "New data will be inserted.",
"fieldname": "insert_new",
"fieldtype": "Check",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Insert new records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 1,
"translatable": 0,
"unique": 0
},
"fieldname": "import_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "eval:!doc.insert_new",
"description": "If you are updating/overwriting already created records.",
"fieldname": "overwrite",
"fieldtype": "Check",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Update records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 1,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "overwrite",
"description": "If you don't want to create any new records while updating the older records.",
"fieldname": "only_update",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Don't create new records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"collapsible_depends_on": "",
"columns": 0,
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_4",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Attach file for Import",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"in_list_view": 1,
"label": "Import File"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "column_break_4",
"fieldtype": "Column Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"fieldname": "import_preview",
"fieldtype": "HTML",
"label": "Import Preview"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "eval: doc.import_status == \"Partially Successful\"",
"description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.",
"fieldname": "error_file",
"fieldtype": "Attach",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Generated File",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"collapsible_depends_on": "",
"columns": 0,
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_6",
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"label": "Preview"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.",
"fieldname": "skip_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Skip rows with errors",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Submit after importing",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Options",
"options": "JSON",
"read_only": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "",
"fieldname": "ignore_encoding_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Ignore encoding errors",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_log",
"fieldtype": "Code",
"label": "Import Log",
"options": "JSON"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "1",
"depends_on": "",
"fieldname": "no_email",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Do not send Emails",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 1,
"collapsible_depends_on": "eval: doc.import_status == \"Failed\"",
"columns": 0,
"depends_on": "import_status",
"fieldname": "import_detail",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Log",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "import_status",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Status",
"length": 0,
"no_copy": 0,
"options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Pending\nSuccess\nPartial Success\nError",
"read_only": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "1",
"fieldname": "show_only_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Show only errors",
"length": 0,
"no_copy": 1,
"permlevel": 0,
"precision": "",
"print_hide": 1,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "",
"depends_on": "import_status",
"fieldname": "import_log",
"fieldtype": "HTML",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Log",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import",
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "log_details",
"fieldtype": "Code",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Log Details",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Warnings"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "amended_from",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Amended From",
"length": 0,
"no_copy": 1,
"options": "Data Import",
"permlevel": 0,
"print_hide": 1,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "total_rows",
"fieldtype": "Int",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Total Rows",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"depends_on": "reference_doctype",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template"
},
{
"default": "1",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails",
"set_only_once": 1
},
{
"default": "0",
"fieldname": "show_failed_logs",
"fieldtype": "Check",
"label": "Show Failed Logs"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file",
"fieldname": "html_5",
"fieldtype": "HTML",
"options": "<h5 class=\"text-muted uppercase\">Or</h5>"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file\n",
"description": "Must be a publicly accessible Google Sheets URL",
"fieldname": "google_sheets_url",
"fieldtype": "Data",
"label": "Import from Google Sheets"
},
{
"depends_on": "eval:doc.google_sheets_url",
"fieldname": "refresh_google_sheet",
"fieldtype": "Button",
"label": "Refresh Google Sheet"
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 1,
"issingle": 0,
"istable": 0,
"max_attachments": 1,
"modified": "2018-08-28 15:05:56.787108",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
"name_case": "",
"owner": "Administrator",
],
"hide_toolbar": 1,
"links": [],
"modified": "2020-06-18 16:05:54.211034",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 0,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 0,
"read": 1,
"report": 0,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 1,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"title_field": "",
"track_changes": 1,
"track_seen": 1,
"track_views": 0
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View file

@ -1,54 +1,187 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, os
from frappe import _
import frappe.modules.import_file
import os
import frappe
from frappe.model.document import Document
from frappe.utils.data import format_datetime
from frappe.core.doctype.data_import.importer import upload
from frappe.core.doctype.data_import.importer import Importer
from frappe.core.doctype.data_import.exporter import Exporter
from frappe.utils.background_jobs import enqueue
from frappe.utils.csvutils import validate_google_sheets_url
from frappe import _
class DataImport(Document):
def autoname(self):
if not self.name:
self.name = "Import on " +format_datetime(self.creation)
def validate(self):
if not self.import_file:
self.db_set("total_rows", 0)
if self.import_status == "In Progress":
frappe.throw(_("Can't save the form as data import is in progress."))
doc_before_save = self.get_doc_before_save()
if (
not (self.import_file or self.google_sheets_url)
or (doc_before_save and doc_before_save.import_file != self.import_file)
or (doc_before_save and doc_before_save.google_sheets_url != self.google_sheets_url)
):
self.template_options = ""
self.template_warnings = ""
# validate the template just after the upload
# if there is total_rows in the doc, it means that the template is already validated and error free
if self.import_file and not self.total_rows:
upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
self.validate_import_file()
self.validate_google_sheets_url()
def validate_import_file(self):
if self.import_file:
# validate template
self.get_importer()
def validate_google_sheets_url(self):
if not self.google_sheets_url:
return
validate_google_sheets_url(self.google_sheets_url)
def get_preview_from_template(self, import_file=None, google_sheets_url=None):
if import_file:
self.import_file = import_file
if google_sheets_url:
self.google_sheets_url = google_sheets_url
if not (self.import_file or self.google_sheets_url):
return
i = self.get_importer()
return i.get_data_for_import_preview()
def start_import(self):
from frappe.core.page.background_jobs.background_jobs import get_info
from frappe.utils.scheduler import is_scheduler_inactive
if is_scheduler_inactive() and not frappe.flags.in_test:
frappe.throw(
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
enqueued_jobs = [d.get("job_name") for d in get_info()]
if self.name not in enqueued_jobs:
enqueue(
start_import,
queue="default",
timeout=6000,
event="data_import",
job_name=self.name,
data_import=self.name,
now=frappe.conf.developer_mode or frappe.flags.in_test,
)
return True
return False
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
@frappe.whitelist()
def get_importable_doctypes():
return frappe.cache().hget("can_import", frappe.session.user)
def get_preview_from_template(data_import, import_file=None, google_sheets_url=None):
return frappe.get_doc("Data Import", data_import).get_preview_from_template(
import_file, google_sheets_url
)
@frappe.whitelist()
def import_data(data_import):
frappe.db.set_value("Data Import", data_import, "import_status", "In Progress", update_modified=False)
frappe.publish_realtime("data_import_progress", {"progress": "0",
"data_import": data_import, "reload": True}, user=frappe.session.user)
from frappe.core.page.background_jobs.background_jobs import get_info
enqueued_jobs = [d.get("job_name") for d in get_info()]
if data_import not in enqueued_jobs:
enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
def form_start_import(data_import):
return frappe.get_doc("Data Import", data_import).start_import()
def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
insert=False, submit=False, pre_process=None):
def start_import(data_import):
"""This method runs in background job"""
data_import = frappe.get_doc("Data Import", data_import)
try:
i = Importer(data_import.reference_doctype, data_import=data_import)
i.import_data()
except Exception:
frappe.db.rollback()
data_import.db_set("status", "Error")
frappe.log_error(title=data_import.name)
finally:
frappe.flags.in_import = False
frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name})
@frappe.whitelist()
def download_template(
doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
):
"""
Download template from Exporter
:param doctype: Document Type
:param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
:param export_records=None: One of 'all', 'by_filter', 'blank_template'
:param export_filters: Filter dict
:param file_type: File type to export into
"""
export_fields = frappe.parse_json(export_fields)
export_filters = frappe.parse_json(export_filters)
export_data = export_records != "blank_template"
e = Exporter(
doctype,
export_fields=export_fields,
export_data=export_data,
export_filters=export_filters,
file_type=file_type,
export_page_length=5 if export_records == "5_records" else None,
)
e.build_response()
@frappe.whitelist()
def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import", data_import_name)
data_import.export_errored_rows()
def import_file(
doctype, file_path, import_type, submit_after_import=False, console=False
):
"""
Import documents in from CSV or XLSX using data import.
:param doctype: DocType to import
:param file_path: Path to .csv, .xls, or .xlsx file to import
:param import_type: One of "Insert" or "Update"
:param submit_after_import: Whether to submit documents after import
:param console: Set to true if this is to be used from command line. Will print errors or progress to stdout.
"""
data_import = frappe.new_doc("Data Import")
data_import.submit_after_import = submit_after_import
data_import.import_type = (
"Insert New Records" if import_type.lower() == "insert" else "Update Existing Records"
)
i = Importer(
doctype=doctype, file_path=file_path, data_import=data_import, console=console
)
i.import_data()
##############
def import_doc(
path,
overwrite=False,
ignore_links=False,
ignore_insert=False,
insert=False,
submit=False,
pre_process=None,
):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
@ -57,25 +190,44 @@ def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
frappe.modules.import_file.import_file_by_path(
f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True
)
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
import_file_by_path(
f,
ignore_links=ignore_links,
overwrite=overwrite,
submit=submit,
pre_process=pre_process,
)
frappe.db.commit()
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
from frappe.utils.csvutils import read_csv_content
print("Importing " + path)
with open(path, "r") as infile:
upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
submit_after_import=submit, pre_process=pre_process)
def import_file_by_path(
path,
ignore_links=False,
overwrite=False,
submit=False,
pre_process=None,
no_email=True,
):
if path.endswith(".csv"):
print()
print("This method is deprecated.")
print('Import CSV files using the command "bench --site sitename data-import"')
print("Or use the method frappe.core.doctype.data_import.data_import.import_file")
print()
raise Exception("Method deprecated")
def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
def export_json(
doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"
):
def post_process(out):
del_keys = ('modified_by', 'creation', 'owner', 'idx')
del_keys = ("modified_by", "creation", "owner", "idx")
for doc in out:
for key in del_keys:
if key in doc:
@ -83,7 +235,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
for k, v in doc.items():
if isinstance(v, list):
for child in v:
for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
for key in del_keys + ("docstatus", "doctype", "modified", "name"):
if key in child:
del child[key]
@ -93,13 +245,20 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
for doc in frappe.get_all(
doctype,
fields=["name"],
filters=filters,
or_filters=or_filters,
limit_page_length=0,
order_by=order_by,
):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
path = os.path.join('..', path)
path = os.path.join("..", path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
@ -107,17 +266,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))
@frappe.whitelist()
def export_fixture(doctype, app):
if frappe.session.user != "Administrator":
raise frappe.PermissionError
if not os.path.exists(frappe.get_app_path(app, "fixtures")):
os.mkdir(frappe.get_app_path(app, "fixtures"))
export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")

View file

@ -1,31 +1,40 @@
let imports_in_progress = [];
frappe.listview_settings['Data Import'] = {
add_fields: ["import_status"],
has_indicator_for_draft: 1,
get_indicator: function(doc) {
let status = {
'Successful': [__("Success"), "green", "import_status,=,Successful"],
'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"],
'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"],
'Failed': [__("Failed"), "red", "import_status,=,Failed"],
'Pending': [__("Pending"), "orange", "import_status,=,"]
}
if (doc.import_status) {
return status[doc.import_status];
}
if (doc.docstatus == 0) {
return status['Pending'];
}
return status['Pending'];
},
onload(listview) {
listview.page.set_title_sub(`
<span class="indicator blue">
<a class="text-muted" href="#List/Data Import Beta">${__('Try the new Data Import')}</a>
</span>
`);
}
frappe.realtime.on('data_import_progress', data => {
if (!imports_in_progress.includes(data.data_import)) {
imports_in_progress.push(data.data_import);
}
});
frappe.realtime.on('data_import_refresh', data => {
imports_in_progress = imports_in_progress.filter(
d => d !== data.data_import
);
listview.refresh();
});
},
get_indicator: function(doc) {
var colors = {
'Pending': 'orange',
'Partial Success': 'orange',
'Success': 'green',
'In Progress': 'orange',
'Error': 'red'
};
let status = doc.status;
if (imports_in_progress.includes(doc.name)) {
status = 'In Progress';
}
return [__(status), colors[status], 'status,=,' + doc.status];
},
formatters: {
import_type(value) {
return {
'Insert New Records': __('Insert'),
'Update Existing Records': __('Update')
}[value];
}
},
hide_name_column: true
};

View file

@ -0,0 +1,257 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
from frappe.model import (
display_fieldtypes,
no_value_fields,
table_fields as table_fieldtypes,
)
from frappe.utils.csvutils import build_csv_response
from frappe.utils.xlsxutils import build_xlsx_response
class Exporter:
def __init__(
self,
doctype,
export_fields=None,
export_data=False,
export_filters=None,
export_page_length=None,
file_type="CSV",
):
"""
Exports records of a DocType for use with Importer
:param doctype: Document Type to export
:param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']}
:param export_data=False: Whether to export data as well
:param export_filters=None: The filters (dict or list) which is used to query the records
:param file_type: One of 'Excel' or 'CSV'
"""
self.doctype = doctype
self.meta = frappe.get_meta(doctype)
self.export_fields = export_fields
self.export_filters = export_filters
self.export_page_length = export_page_length
self.file_type = file_type
# this will contain the csv content
self.csv_array = []
# fields that get exported
self.exportable_fields = self.get_all_exportable_fields()
self.fields = self.serialize_exportable_fields()
self.add_header()
if export_data:
self.data = self.get_data_to_export()
else:
self.data = []
self.add_data()
def get_all_exportable_fields(self):
child_table_fields = [
df.fieldname for df in self.meta.fields if df.fieldtype in table_fieldtypes
]
meta = frappe.get_meta(self.doctype)
exportable_fields = frappe._dict({})
for key, fieldnames in self.export_fields.items():
if key == self.doctype:
# parent fields
exportable_fields[key] = self.get_exportable_fields(key, fieldnames)
elif key in child_table_fields:
# child fields
child_df = meta.get_field(key)
child_doctype = child_df.options
exportable_fields[key] = self.get_exportable_fields(child_doctype, fieldnames)
return exportable_fields
def serialize_exportable_fields(self):
fields = []
for key, exportable_fields in self.exportable_fields.items():
for _df in exportable_fields:
# make a copy of df dict to avoid reference mutation
if isinstance(_df, frappe.core.doctype.docfield.docfield.DocField):
df = _df.as_dict()
else:
df = _df.copy()
df.is_child_table_field = key != self.doctype
if df.is_child_table_field:
df.child_table_df = self.meta.get_field(key)
fields.append(df)
return fields
def get_exportable_fields(self, doctype, fieldnames):
meta = frappe.get_meta(doctype)
def is_exportable(df):
return df and df.fieldtype not in (display_fieldtypes + no_value_fields)
# add name field
name_field = frappe._dict(
{
"fieldtype": "Data",
"fieldname": "name",
"label": "ID",
"reqd": 1,
"parent": doctype,
}
)
fields = [meta.get_field(fieldname) for fieldname in fieldnames]
fields = [df for df in fields if is_exportable(df)]
if "name" in fieldnames:
fields = [name_field] + fields
return fields or []
def get_data_to_export(self):
frappe.permissions.can_export(self.doctype, raise_exception=True)
data_to_export = []
table_fields = [f for f in self.exportable_fields if f != self.doctype]
data = self.get_data_as_docs()
for doc in data:
rows = []
rows = self.add_data_row(self.doctype, None, doc, rows, 0)
if table_fields:
# add child table data
for f in table_fields:
for i, child_row in enumerate(doc[f]):
table_df = self.meta.get_field(f)
child_doctype = table_df.options
rows = self.add_data_row(child_doctype, child_row.parentfield, child_row, rows, i)
data_to_export += rows
return data_to_export
def add_data_row(self, doctype, parentfield, doc, rows, row_idx):
if len(rows) < row_idx + 1:
rows.append([""] * len(self.fields))
row = rows[row_idx]
for i, df in enumerate(self.fields):
if df.parent == doctype:
if df.is_child_table_field and df.child_table_df.fieldname != parentfield:
continue
row[i] = doc.get(df.fieldname, "")
return rows
def get_data_as_docs(self):
def format_column_name(df):
return "`tab{0}`.`{1}`".format(df.parent, df.fieldname)
filters = self.export_filters
if self.meta.is_nested_set():
order_by = "`tab{0}`.`lft` ASC".format(self.doctype)
else:
order_by = "`tab{0}`.`creation` DESC".format(self.doctype)
parent_fields = [
format_column_name(df) for df in self.fields if df.parent == self.doctype
]
parent_data = frappe.db.get_list(
self.doctype,
filters=filters,
fields=["name"] + parent_fields,
limit_page_length=self.export_page_length,
order_by=order_by,
as_list=0,
)
parent_names = [p.name for p in parent_data]
child_data = {}
for key in self.exportable_fields:
if key == self.doctype:
continue
child_table_df = self.meta.get_field(key)
child_table_doctype = child_table_df.options
child_fields = ["name", "idx", "parent", "parentfield"] + list(
set(
[format_column_name(df) for df in self.fields if df.parent == child_table_doctype]
)
)
data = frappe.db.get_list(
child_table_doctype,
filters={
"parent": ("in", parent_names),
"parentfield": child_table_df.fieldname,
"parenttype": self.doctype,
},
fields=child_fields,
order_by="idx asc",
as_list=0,
)
child_data[key] = data
return self.merge_data(parent_data, child_data)
def merge_data(self, parent_data, child_data):
for doc in parent_data:
for table_field, table_rows in child_data.items():
doc[table_field] = [row for row in table_rows if row.parent == doc.name]
return parent_data
def add_header(self):
header = []
for df in self.fields:
is_parent = not df.is_child_table_field
if is_parent:
label = df.label
else:
label = "{0} ({1})".format(df.label, df.child_table_df.label)
if label in header:
# this label is already in the header,
# which means two fields with the same label
# add the fieldname to avoid clash
if is_parent:
label = "{0}".format(df.fieldname)
else:
label = "{0}.{1}".format(df.child_table_df.fieldname, df.fieldname)
header.append(label)
self.csv_array.append(header)
def add_data(self):
self.csv_array += self.data
def get_csv_array(self):
return self.csv_array
def get_csv_array_for_export(self):
csv_array = self.csv_array
if not self.data:
# add 2 empty rows
csv_array += [[]] * 2
return csv_array
def build_response(self):
if self.file_type == "CSV":
self.build_csv_response()
elif self.file_type == "Excel":
self.build_xlsx_response()
def build_csv_response(self):
build_csv_response(self.get_csv_array_for_export(), self.doctype)
def build_xlsx_response(self):
build_xlsx_response(self.get_csv_array_for_export(), self.doctype)

View file

@ -1,267 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
from frappe.model import display_fieldtypes, no_value_fields, table_fields
from frappe.utils.csvutils import build_csv_response
from frappe.utils.xlsxutils import build_xlsx_response
from .importer_new import INVALID_VALUES
class Exporter:
def __init__(
self,
doctype,
export_fields=None,
export_data=False,
export_filters=None,
export_page_length=None,
file_type="CSV",
):
"""
Exports records of a DocType for use with Importer
:param doctype: Document Type to export
:param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']}
:param export_data=False: Whether to export data as well
:param export_filters=None: The filters (dict or list) which is used to query the records
:param file_type: One of 'Excel' or 'CSV'
"""
self.doctype = doctype
self.meta = frappe.get_meta(doctype)
self.export_fields = export_fields
self.export_filters = export_filters
self.export_page_length = export_page_length
self.file_type = file_type
# this will contain the csv content
self.csv_array = []
# fields that get exported
# can be All, Mandatory or User Selected Fields
self.fields = self.get_all_exportable_fields()
self.add_header()
if export_data:
self.data = self.get_data_to_export()
else:
self.data = []
self.add_data()
def get_all_exportable_fields(self):
return self.get_exportable_parent_fields() + self.get_exportable_children_fields()
def get_exportable_parent_fields(self):
parent_fields = self.get_exportable_fields(self.doctype)
# if autoname is based on field
# then merge ID and the field column title as "ID (Autoname Field)"
autoname = self.meta.autoname
if autoname and autoname.startswith("field:"):
fieldname = autoname[len("field:") :]
autoname_field = self.meta.get_field(fieldname)
if autoname_field:
name_field = parent_fields[0]
name_field.label = "ID ({})".format(autoname_field.label)
# remove the autoname field as it is a duplicate of ID field
parent_fields = [
df for df in parent_fields if df.fieldname != autoname_field.fieldname
]
return parent_fields
def get_exportable_children_fields(self):
child_table_fields = [df for df in self.meta.fields if df.fieldtype in table_fields]
if self.export_fields == "Mandatory":
child_table_fields = [df for df in child_table_fields if df.reqd]
children = [df.options for df in child_table_fields]
children_fields = []
for child in children:
children_fields += self.get_exportable_fields(child)
return children_fields
def get_exportable_fields(self, doctype):
meta = frappe.get_meta(doctype)
def is_exportable(df):
return df and df.fieldtype not in (display_fieldtypes + no_value_fields)
# filter out invalid fieldtypes
all_fields = [df for df in meta.fields if is_exportable(df)]
# add name field
name_field = frappe._dict(
{
"fieldtype": "Data",
"fieldname": "name",
"label": "ID",
"reqd": 1,
"parent": doctype,
}
)
all_fields = [name_field] + all_fields
if self.export_fields == "Mandatory":
fields = [df for df in all_fields if df.reqd]
if self.export_fields == "All":
fields = list(all_fields)
elif isinstance(self.export_fields, dict):
fields_to_export = self.export_fields.get(doctype, [])
fields = [meta.get_field(fieldname) for fieldname in fields_to_export]
fields = [df for df in fields if is_exportable(df)]
if 'name' in fields_to_export:
fields = [name_field] + fields
return fields or []
def get_data_to_export(self):
frappe.permissions.can_export(self.doctype, raise_exception=True)
def get_column_name(df):
return "`tab{0}`.`{1}`".format(df.parent, df.fieldname)
fields = [get_column_name(df) for df in self.fields]
filters = self.export_filters
if self.meta.is_nested_set():
order_by = "`tab{0}`.`lft` ASC".format(self.doctype)
else:
order_by = "`tab{0}`.`creation` DESC".format(self.doctype)
data = frappe.db.get_list(
self.doctype,
filters=filters,
fields=fields,
limit_page_length=self.export_page_length,
order_by=order_by,
as_list=1,
)
data = self.remove_duplicate_values(data)
data = self.remove_row_gaps(data)
data = self.remove_empty_rows(data)
# data = self.remove_values_from_name_column(data)
return data
def remove_duplicate_values(self, data):
out = []
doctypes = set([df.parent for df in self.fields])
def name_exists_in_column_before_row(name, column_index, row_index):
column_values = [row[column_index] for i, row in enumerate(data) if i < row_index]
return name in column_values
for i, row in enumerate(data):
# first row is fine
if i == 0:
out.append(row)
continue
row = list(row)
for doctype in doctypes:
name_index = self.get_name_column_index(doctype)
name = row[name_index]
column_indexes = self.get_column_indexes(doctype)
if name_exists_in_column_before_row(name, name_index, i):
# remove the values from the row
row = [None if i in column_indexes else d for i, d in enumerate(row)]
out.append(row)
return out
def remove_row_gaps(self, data):
doctypes = set([df.parent for df in self.fields if df.parent != self.doctype])
def get_nearest_empty_row_index(col_index, row_index):
col_values = [row[col_index] for row in data]
i = row_index - 1
while not col_values[i]:
i = i - 1
out = i + 1
if row_index != out:
return out
for i, row in enumerate(data):
# if this is the row that contains parent values then skip
if row[0]:
continue
for doctype in doctypes:
name_index = self.get_name_column_index(doctype)
name = row[name_index]
column_indexes = self.get_column_indexes(doctype)
if not name:
continue
row_index = get_nearest_empty_row_index(name_index, i)
if row_index:
for col_index in column_indexes:
data[row_index][col_index] = row[col_index]
row[col_index] = None
return data
# pylint: disable=R0201
def remove_empty_rows(self, data):
return [row for row in data if any(v not in INVALID_VALUES for v in row)]
def remove_values_from_name_column(self, data):
out = []
name_columns = [i for i, df in enumerate(self.fields) if df.fieldname == "name"]
for row in data:
out.append(["" if i in name_columns else value for i, value in enumerate(row)])
return out
def get_name_column_index(self, doctype):
for i, df in enumerate(self.fields):
if df.parent == doctype and df.fieldname == "name":
return i
return -1
def get_column_indexes(self, doctype):
return [i for i, df in enumerate(self.fields) if df.parent == doctype]
def add_header(self):
def get_label(df):
if df.parent == self.doctype:
return df.label
else:
return "{0} ({1})".format(df.label, df.parent)
header = [get_label(df) for df in self.fields]
self.csv_array.append(header)
def add_data(self):
self.csv_array += self.data
def get_csv_array(self):
return self.csv_array
def get_csv_array_for_export(self):
csv_array = self.csv_array
if not self.data:
# add 2 empty rows
csv_array += [[]] * 2
return csv_array
def build_response(self):
if self.file_type == 'CSV':
self.build_csv_response()
elif self.file_type == 'Excel':
self.build_xlsx_response()
def build_csv_response(self):
build_csv_response(self.get_csv_array_for_export(), self.doctype)
def build_xlsx_response(self):
build_xlsx_response(self.get_csv_array_for_export(), self.doctype)

View file

@ -0,0 +1,5 @@
Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
Test ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
, , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 ,child title again 2 ,22-09-2021 , ,
Test 2 ,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , ,
Test 3 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71
Can't render this file because it contains an unexpected character in line 2 and column 54.

View file

@ -0,0 +1,2 @@
Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
Test 26 ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
Can't render this file because it contains an unexpected character in line 2 and column 56.

View file

@ -0,0 +1,5 @@
Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
Test 5 ,test description ,1 ,2 ,"" , ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
, , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 , ,22-09-2021 , ,
,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , ,
Test 4 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71
Can't render this file because it contains an unexpected character in line 2 and column 55.

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,23 +0,0 @@
/* eslint-disable */
// rename this file from _test_[name] to test_[name] to activate
// and remove above this line
QUnit.test("test: Data Import", function (assert) {
let done = assert.async();
// number of asserts
assert.expect(1);
frappe.run_serially([
// insert a new Data Import
() => frappe.tests.make('Data Import', [
// values to be set
{key: 'value'}
]),
() => {
assert.equal(cur_frm.doc.key, 'value');
},
() => done()
]);
});

View file

@ -1,100 +1,10 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and Contributors
# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe, unittest
from frappe.core.doctype.data_export import exporter
from frappe.core.doctype.data_import import importer
from frappe.utils.csvutils import read_csv_content
# import frappe
import unittest
class TestDataImport(unittest.TestCase):
def test_export(self):
exporter.export_data("User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
def test_export_with_data(self):
exporter.export_data("User", all_doctypes=True, template=True, with_data=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
def test_export_with_all_doctypes(self):
exporter.export_data("User", all_doctypes="Yes", template=True, with_data=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
self.assertEqual(content[13][0], "DocType:")
self.assertEqual(content[13][1], "User")
self.assertTrue("Has Role" in content[13])
def test_import(self):
if frappe.db.exists("Blog Category", "test-category"):
frappe.delete_doc("Blog Category", "test-category")
exporter.export_data("Blog Category", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test-category", "Test Cateogry"])
importer.upload(content)
self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category")
# export with data
exporter.export_data("Blog Category", all_doctypes=True, template=True, with_data=True)
content = read_csv_content(frappe.response.result)
# overwrite
content[-1][3] = "New Title"
importer.upload(content, overwrite=True)
self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title")
def test_import_only_children(self):
user_email = "test_import_userrole@example.com"
if frappe.db.exists("User", user_email):
frappe.delete_doc("User", user_email, force=True)
frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert()
exporter.export_data("Has Role", "User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test_import_userrole@example.com", "Blogger"])
importer.upload(content)
user = frappe.get_doc("User", user_email)
self.assertTrue(frappe.db.get_value("Has Role", filters={"role": "Blogger", "parent": user_email, "parenttype": "User"}))
self.assertTrue(user.get("roles")[0].role, "Blogger")
# overwrite
exporter.export_data("Has Role", "User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test_import_userrole@example.com", "Website Manager"])
importer.upload(content, overwrite=True)
user = frappe.get_doc("User", user_email)
self.assertEqual(len(user.get("roles")), 1)
self.assertTrue(user.get("roles")[0].role, "Website Manager")
def test_import_with_children(self): #pylint: disable=R0201
if frappe.db.exists("Event", "EV00001"):
frappe.delete_doc("Event", "EV00001")
exporter.export_data("Event", all_doctypes="Yes", template=True)
content = read_csv_content(frappe.response.result)
content.append([None] * len(content[-2]))
content[-1][1] = "__Test Event with children"
content[-1][2] = "Private"
content[-1][3] = "2014-01-01 10:00:00.000000"
importer.upload(content)
frappe.get_doc("Event", {"subject":"__Test Event with children"})
def test_excel_import(self):
if frappe.db.exists("Event", "EV00001"):
frappe.delete_doc("Event", "EV00001")
exporter.export_data("Event", all_doctypes=True, template=True, file_type="Excel")
from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
content = read_xlsx_file_from_attached_file(fcontent=frappe.response.filecontent)
content.append(["", "_test", "Private", "05-11-2017 13:51:48", "Event", "blue", "0", "0", "", "Open", "", 0, "", 0, "", "", "1", 0, "", "", 0, 0, 0, 0, 0, 0, 0])
importer.upload(content)
self.assertTrue(frappe.db.get_value("Event", {"subject": "_test"}, "name"))
pass

View file

@ -0,0 +1,104 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.core.doctype.data_import.exporter import Exporter
from frappe.core.doctype.data_import.test_importer import (
create_doctype_if_not_exists,
)
doctype_name = 'DocType for Export'
class TestExporter(unittest.TestCase):
def setUp(self):
create_doctype_if_not_exists(doctype_name)
def test_exports_specified_fields(self):
if not frappe.db.exists(doctype_name, "Test"):
doc = frappe.get_doc(
doctype=doctype_name,
title="Test",
description="Test Description",
table_field_1=[
{"child_title": "Child Title 1", "child_description": "Child Description 1"},
{"child_title": "Child Title 2", "child_description": "Child Description 2"},
],
table_field_2=[
{"child_2_title": "Child Title 1", "child_2_description": "Child Description 1"},
],
table_field_1_again=[
{
"child_title": "Child Title 1 Again",
"child_description": "Child Description 1 Again",
},
],
).insert()
else:
doc = frappe.get_doc(doctype_name, "Test")
e = Exporter(
doctype_name,
export_fields={
doctype_name: ["title", "description", "number", "another_number"],
"table_field_1": ["name", "child_title", "child_description"],
"table_field_2": ["child_2_date", "child_2_number"],
"table_field_1_again": [
"child_title",
"child_date",
"child_number",
"child_another_number",
],
},
export_data=True,
)
csv_array = e.get_csv_array()
header_row = csv_array[0]
self.assertEqual(
header_row,
[
"Title",
"Description",
"Number",
"another_number",
"ID (Table Field 1)",
"Child Title (Table Field 1)",
"Child Description (Table Field 1)",
"Child 2 Date (Table Field 2)",
"Child 2 Number (Table Field 2)",
"Child Title (Table Field 1 Again)",
"Child Date (Table Field 1 Again)",
"Child Number (Table Field 1 Again)",
"table_field_1_again.child_another_number",
],
)
table_field_1_row_1_name = doc.table_field_1[0].name
table_field_1_row_2_name = doc.table_field_1[1].name
# fmt: off
self.assertEqual(
csv_array[1],
["Test", "Test Description", 0, 0, table_field_1_row_1_name, "Child Title 1", "Child Description 1", None, 0, "Child Title 1 Again", None, 0, 0]
)
self.assertEqual(
csv_array[2],
["", "", "", "", table_field_1_row_2_name, "Child Title 2", "Child Description 2", "", "", "", "", "", ""],
)
# fmt: on
self.assertEqual(len(csv_array), 3)
def test_export_csv_response(self):
e = Exporter(
doctype_name,
export_fields={doctype_name: ["title", "description"]},
export_data=True,
file_type="CSV"
)
e.build_response()
self.assertTrue(frappe.response['result'])
self.assertEqual(frappe.response['doctype'], doctype_name)
self.assertEqual(frappe.response['type'], "csv")

View file

@ -1,40 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.core.doctype.data_import.exporter_new import Exporter
class TestExporter(unittest.TestCase):
def test_exports_mandatory_fields(self):
e = Exporter('Web Page', export_fields='Mandatory')
csv_array = e.get_csv_array()
header_row = csv_array[0]
self.assertEqual(header_row, ['ID', 'Title'])
def test_exports_all_fields(self):
e = Exporter('Web Page', export_fields='All')
csv_array = e.get_csv_array()
header = csv_array[0]
self.assertEqual(len(header), 37)
def test_exports_selected_fields(self):
export_fields = {
'Web Page': ['title', 'route', 'published']
}
e = Exporter('Web Page', export_fields=export_fields)
csv_array = e.get_csv_array()
header = csv_array[0]
self.assertEqual(header, ['Title', 'Route', 'Published'])
def test_exports_data(self):
e = Exporter('ToDo', export_fields='All', export_data=True)
todo_records = frappe.db.count('ToDo')
csv_array = e.get_csv_array()
self.assertEqual(len(csv_array), todo_records + 1)

View file

@ -0,0 +1,183 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.utils import getdate
doctype_name = 'DocType for Import'
class TestImporter(unittest.TestCase):
def setUp(self):
create_doctype_if_not_exists(doctype_name)
def test_data_import_from_file(self):
import_file = get_import_file('sample_import_file')
data_import = self.get_importer(doctype_name, import_file)
data_import.start_import()
doc1 = frappe.get_doc(doctype_name, 'Test')
doc2 = frappe.get_doc(doctype_name, 'Test 2')
doc3 = frappe.get_doc(doctype_name, 'Test 3')
self.assertEqual(doc1.description, 'test description')
self.assertEqual(doc1.number, 1)
self.assertEqual(doc1.table_field_1[0].child_title, 'child title')
self.assertEqual(doc1.table_field_1[0].child_description, 'child description')
self.assertEqual(doc1.table_field_1[1].child_title, 'child title 2')
self.assertEqual(doc1.table_field_1[1].child_description, 'child description 2')
self.assertEqual(doc1.table_field_2[1].child_2_title, 'title child')
self.assertEqual(doc1.table_field_2[1].child_2_date, getdate('2019-10-30'))
self.assertEqual(doc1.table_field_2[1].child_2_another_number, 5)
self.assertEqual(doc1.table_field_1_again[0].child_title, 'child title again')
self.assertEqual(doc1.table_field_1_again[1].child_title, 'child title again 2')
self.assertEqual(doc1.table_field_1_again[1].child_date, getdate('2021-09-22'))
self.assertEqual(doc2.description, 'test description 2')
self.assertEqual(doc3.another_number, 5)
def test_data_import_preview(self):
import_file = get_import_file('sample_import_file')
data_import = self.get_importer(doctype_name, import_file)
preview = data_import.get_preview_from_template()
self.assertEqual(len(preview.data), 4)
self.assertEqual(len(preview.columns), 15)
def test_data_import_without_mandatory_values(self):
import_file = get_import_file('sample_import_file_without_mandatory')
data_import = self.get_importer(doctype_name, import_file)
data_import.start_import()
data_import.reload()
warnings = frappe.parse_json(data_import.template_warnings)
self.assertEqual(warnings[0]['row'], 2)
self.assertEqual(warnings[0]['message'], "<b>Child Title (Table Field 1)</b> is a mandatory field")
self.assertEqual(warnings[1]['row'], 3)
self.assertEqual(warnings[1]['message'], "<b>Child Title (Table Field 1 Again)</b> is a mandatory field")
self.assertEqual(warnings[2]['row'], 4)
self.assertEqual(warnings[2]['message'], "<b>Title</b> is a mandatory field")
def test_data_import_update(self):
if not frappe.db.exists(doctype_name, 'Test 26'):
frappe.get_doc(
doctype=doctype_name,
title='Test 26'
).insert()
import_file = get_import_file('sample_import_file_for_update')
data_import = self.get_importer(doctype_name, import_file, update=True)
data_import.start_import()
updated_doc = frappe.get_doc(doctype_name, 'Test 26')
self.assertEqual(updated_doc.description, 'test description')
self.assertEqual(updated_doc.table_field_1[0].child_title, 'child title')
self.assertEqual(updated_doc.table_field_1[0].child_description, 'child description')
self.assertEqual(updated_doc.table_field_1_again[0].child_title, 'child title again')
def get_importer(self, doctype, import_file, update=False):
data_import = frappe.new_doc('Data Import')
data_import.import_type = 'Insert New Records' if not update else 'Update Existing Records'
data_import.reference_doctype = doctype
data_import.import_file = import_file.file_url
data_import.insert()
return data_import
def create_doctype_if_not_exists(doctype_name, force=False):
if force:
frappe.delete_doc_if_exists('DocType', doctype_name)
frappe.delete_doc_if_exists('DocType', 'Child 1 of ' + doctype_name)
frappe.delete_doc_if_exists('DocType', 'Child 2 of ' + doctype_name)
if frappe.db.exists('DocType', doctype_name):
return
# Child Table 1
table_1_name = 'Child 1 of ' + doctype_name
frappe.get_doc({
'doctype': 'DocType',
'name': table_1_name,
'module': 'Custom',
'custom': 1,
'istable': 1,
'fields': [
{'label': 'Child Title', 'fieldname': 'child_title', 'reqd': 1, 'fieldtype': 'Data'},
{'label': 'Child Description', 'fieldname': 'child_description', 'fieldtype': 'Small Text'},
{'label': 'Child Date', 'fieldname': 'child_date', 'fieldtype': 'Date'},
{'label': 'Child Number', 'fieldname': 'child_number', 'fieldtype': 'Int'},
{'label': 'Child Number', 'fieldname': 'child_another_number', 'fieldtype': 'Int'},
]
}).insert()
# Child Table 2
table_2_name = 'Child 2 of ' + doctype_name
frappe.get_doc({
'doctype': 'DocType',
'name': table_2_name,
'module': 'Custom',
'custom': 1,
'istable': 1,
'fields': [
{'label': 'Child 2 Title', 'fieldname': 'child_2_title', 'reqd': 1, 'fieldtype': 'Data'},
{'label': 'Child 2 Description', 'fieldname': 'child_2_description', 'fieldtype': 'Small Text'},
{'label': 'Child 2 Date', 'fieldname': 'child_2_date', 'fieldtype': 'Date'},
{'label': 'Child 2 Number', 'fieldname': 'child_2_number', 'fieldtype': 'Int'},
{'label': 'Child 2 Number', 'fieldname': 'child_2_another_number', 'fieldtype': 'Int'},
]
}).insert()
# Main Table
frappe.get_doc({
'doctype': 'DocType',
'name': doctype_name,
'module': 'Custom',
'custom': 1,
'autoname': 'field:title',
'fields': [
{'label': 'Title', 'fieldname': 'title', 'reqd': 1, 'fieldtype': 'Data'},
{'label': 'Description', 'fieldname': 'description', 'fieldtype': 'Small Text'},
{'label': 'Date', 'fieldname': 'date', 'fieldtype': 'Date'},
{'label': 'Number', 'fieldname': 'number', 'fieldtype': 'Int'},
{'label': 'Number', 'fieldname': 'another_number', 'fieldtype': 'Int'},
{'label': 'Table Field 1', 'fieldname': 'table_field_1', 'fieldtype': 'Table', 'options': table_1_name},
{'label': 'Table Field 2', 'fieldname': 'table_field_2', 'fieldtype': 'Table', 'options': table_2_name},
{'label': 'Table Field 1 Again', 'fieldname': 'table_field_1_again', 'fieldtype': 'Table', 'options': table_1_name},
],
'permissions': [
{'role': 'System Manager'}
]
}).insert()
def get_import_file(csv_file_name, force=False):
file_name = csv_file_name + '.csv'
_file = frappe.db.exists('File', {'file_name': file_name})
if force and _file:
frappe.delete_doc_if_exists('File', _file)
if frappe.db.exists('File', {'file_name': file_name}):
f = frappe.get_doc('File', {'file_name': file_name})
else:
full_path = get_csv_file_path(file_name)
f = frappe.get_doc(
doctype='File',
content=frappe.read_file(full_path),
file_name=file_name,
is_private=1
)
f.save(ignore_permissions=True)
return f
def get_csv_file_path(file_name):
return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import', 'fixtures', file_name)

View file

@ -1,78 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import datetime
import unittest
import frappe
from frappe.core.doctype.data_import.importer_new import Importer
content_empty_rows = '''title,start_date,idx,show_title
,,,
est phasellus sit amet,5/20/2019,52,1
nibh in,7/29/2019,77,1
'''
content_mandatory_missing = '''title,start_date,idx,show_title
,5/20/2019,52,1
'''
content_convert_value = '''title,start_date,idx,show_title
est phasellus sit amet,5/20/2019,52,True
'''
content_invalid_column = '''title,start_date,idx,show_title,invalid_column
est phasellus sit amet,5/20/2019,52,True,invalid value
'''
class TestImporter(unittest.TestCase):
def test_should_skip_empty_rows(self):
i = self.get_importer('Web Page', content=content_empty_rows)
payloads = i.get_payloads_for_import()
row_to_be_imported = []
for p in payloads:
row_to_be_imported += [row[0] for row in p.rows]
self.assertEqual(len(row_to_be_imported), 2)
def test_should_throw_if_mandatory_is_missing(self):
i = self.get_importer('Web Page', content=content_mandatory_missing)
i.import_data()
warning = i.warnings[0]
self.assertTrue('Title is a mandatory field' in warning['message'])
def test_should_convert_value_based_on_fieldtype(self):
i = self.get_importer('Web Page', content=content_convert_value)
payloads = i.get_payloads_for_import()
doc = payloads[0].doc
self.assertEqual(type(doc['show_title']), int)
self.assertEqual(type(doc['idx']), int)
self.assertEqual(type(doc['start_date']), datetime.datetime)
def test_should_ignore_invalid_columns(self):
i = self.get_importer('Web Page', content=content_invalid_column)
payloads = i.get_payloads_for_import()
doc = payloads[0].doc
self.assertTrue('invalid_column' not in doc)
self.assertTrue('title' in doc)
def test_should_import_valid_template(self):
title = 'est phasellus sit amet {0}'.format(frappe.utils.random_string(8))
content_valid_content = '''title,start_date,idx,show_title
{0},5/20/2019,52,1'''.format(title)
i = self.get_importer('Web Page', content=content_valid_content)
import_log = i.import_data()
log = import_log[0]
self.assertTrue(log.success)
doc = frappe.get_doc('Web Page', { 'title': title })
self.assertEqual(frappe.utils.get_datetime_str(doc.start_date),
frappe.utils.get_datetime_str('2019-05-20'))
def get_importer(self, doctype, content):
data_import = frappe.new_doc('Data Import Beta')
data_import.import_type = 'Insert New Records'
i = Importer(doctype, content=content, data_import=data_import)
return i

View file

@ -1,511 +0,0 @@
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import Beta', {
setup(frm) {
frappe.realtime.on('data_import_refresh', ({ data_import }) => {
frm.import_in_progress = false;
if (data_import !== frm.doc.name) return;
frappe.model.clear_doc('Data Import Beta', frm.doc.name);
frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => {
frm.refresh();
});
});
frappe.realtime.on('data_import_progress', data => {
frm.import_in_progress = true;
if (data.data_import !== frm.doc.name) {
return;
}
let percent = Math.floor((data.current * 100) / data.total);
let seconds = Math.floor(data.eta);
let minutes = Math.floor(data.eta / 60);
let eta_message =
// prettier-ignore
seconds < 60
? __('About {0} seconds remaining', [seconds])
: minutes === 1
? __('About {0} minute remaining', [minutes])
: __('About {0} minutes remaining', [minutes]);
let message;
if (data.success) {
let message_args = [data.current, data.total, eta_message];
message =
frm.doc.import_type === 'Insert New Records'
? __('Importing {0} of {1}, {2}', message_args)
: __('Updating {0} of {1}, {2}', message_args);
}
if (data.skipping) {
message = __('Skipping {0} of {1}, {2}', [
data.current,
data.total,
eta_message
]);
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(__('In Progress'), 'orange');
// hide progress when complete
if (data.current === data.total) {
setTimeout(() => {
frm.dashboard.hide();
frm.refresh();
}, 2000);
}
});
frm.set_query('reference_doctype', () => {
return {
filters: {
allow_import: 1
}
};
});
frm.get_field('import_file').df.options = {
restrictions: {
allowed_file_types: ['.csv', '.xls', '.xlsx']
}
};
},
refresh(frm) {
frm.page.hide_icon_group();
frm.trigger('update_indicators');
frm.trigger('import_file');
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
frm.trigger('show_report_error_button');
if (frm.doc.status === 'Partial Success') {
frm.add_custom_button(__('Export Errored Rows'), () =>
frm.trigger('export_errored_rows')
);
}
if (frm.doc.status.includes('Success')) {
frm.add_custom_button(
__('Go to {0} List', [frm.doc.reference_doctype]),
() => frappe.set_route('List', frm.doc.reference_doctype)
);
}
frm.disable_save();
if (frm.doc.status !== 'Success') {
if (!frm.is_new() && frm.doc.import_file) {
let label =
frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
frm.page.set_primary_action(label, () => frm.events.start_import(frm));
} else {
frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
},
update_indicators(frm) {
const indicator = frappe.get_indicator(frm.doc);
if (indicator) {
frm.page.set_indicator(indicator[0], indicator[1]);
} else {
frm.page.clear_indicator();
}
},
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
let message;
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}.', message_args)
: __('Successfully imported {0} record out of {1}.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}.', message_args)
: __('Successfully updated {0} record out of {1}.', message_args);
}
}
frm.dashboard.set_headline(message);
},
show_report_error_button(frm) {
if (frm.doc.status === 'Error') {
frappe.db
.get_list('Error Log', {
filters: { method: frm.doc.name },
fields: ['method', 'error'],
order_by: 'creation desc',
limit: 1
})
.then(result => {
if (result.length > 0) {
frm.add_custom_button('Report Error', () => {
let fake_xhr = {
responseText: JSON.stringify({
exc: result[0].error
})
};
frappe.request.report_error(fake_xhr, {});
});
}
});
}
},
start_import(frm) {
frm
.call({
method: 'form_start_import',
args: { data_import: frm.doc.name },
btn: frm.page.btn_primary
})
.then(r => {
if (r.message === true) {
frm.disable_save();
}
});
},
download_template(frm) {
if (
frm.data_exporter &&
frm.data_exporter.doctype === frm.doc.reference_doctype
) {
frm.data_exporter.dialog.show();
set_export_records();
} else {
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.data_exporter = new frappe.data_import.DataExporter(
frm.doc.reference_doctype
);
set_export_records();
});
}
function set_export_records() {
if (frm.doc.import_type === 'Insert New Records') {
frm.data_exporter.dialog.set_value('export_records', 'blank_template');
} else {
frm.data_exporter.dialog.set_value('export_records', 'all');
}
// Force ID field to be exported when updating existing records
let id_field = frm.data_exporter.dialog.get_field(
frm.doc.reference_doctype
).options[0];
if (id_field.value === 'name' && id_field.$checkbox) {
id_field.$checkbox
.find('input')
.prop('disabled', frm.doc.import_type === 'Update Existing Records');
}
}
},
reference_doctype(frm) {
frm.trigger('toggle_submit_after_import');
},
toggle_submit_after_import(frm) {
frm.toggle_display('submit_after_import', false);
let doctype = frm.doc.reference_doctype;
if (doctype) {
frappe.model.with_doctype(doctype, () => {
let meta = frappe.get_meta(doctype);
frm.toggle_display('submit_after_import', meta.is_submittable);
});
}
},
import_file(frm) {
frm.toggle_display('section_import_preview', frm.doc.import_file);
if (!frm.doc.import_file) {
frm.get_field('import_preview').$wrapper.empty();
return;
}
// load import preview
frm.get_field('import_preview').$wrapper.empty();
$('<span class="text-muted">')
.html(__('Loading import file...'))
.appendTo(frm.get_field('import_preview').$wrapper);
frm
.call({
method: 'get_preview_from_template',
args: { data_import: frm.doc.name },
error_handlers: {
TimestampMismatchError() {
// ignore this error
}
}
})
.then(r => {
let preview_data = r.message;
frm.events.show_import_preview(frm, preview_data);
frm.events.show_import_warnings(frm, preview_data);
});
},
show_import_preview(frm, preview_data) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
if (
frm.import_preview &&
frm.import_preview.doctype === frm.doc.reference_doctype
) {
frm.import_preview.preview_data = preview_data;
frm.import_preview.import_log = import_log;
frm.import_preview.refresh();
return;
}
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.import_preview = new frappe.data_import.ImportPreview({
wrapper: frm.get_field('import_preview').$wrapper,
doctype: frm.doc.reference_doctype,
preview_data,
import_log,
frm,
events: {
remap_column(changed_map) {
let template_options = JSON.parse(frm.doc.template_options || '{}');
template_options.remap_column = template_options.remap_column || {};
Object.assign(template_options.remap_column, changed_map);
frm.set_value('template_options', JSON.stringify(template_options));
frm.save().then(() => frm.trigger('import_file'));
}
}
});
});
},
export_errored_rows(frm) {
open_url_post(
'/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template',
{
data_import_name: frm.doc.name
}
);
},
show_import_warnings(frm, preview_data) {
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
warnings = warnings.concat(preview_data.warnings || []);
frm.toggle_display('import_warnings_section', warnings.length > 0);
if (warnings.length === 0) {
frm.get_field('import_warnings').$wrapper.html('');
return;
}
// group warnings by row
let warnings_by_row = {};
let other_warnings = [];
for (let warning of warnings) {
if (warning.row) {
warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
warnings_by_row[warning.row].push(warning);
} else {
other_warnings.push(warning);
}
}
let html = '';
html += Object.keys(warnings_by_row)
.map(row_number => {
let message = warnings_by_row[row_number]
.map(w => {
if (w.field) {
let label =
w.field.label +
(w.field.parent !== frm.doc.reference_doctype
? ` (${w.field.parent})`
: '');
return `<li>${label}: ${w.message}</li>`;
}
return `<li>${w.message}</li>`;
})
.join('');
return `
<div class="alert border" data-row="${row_number}">
<div class="uppercase">${__('Row {0}', [row_number])}</div>
<div class="body"><ul>${message}</ul></div>
</div>
`;
})
.join('');
html += other_warnings
.map(warning => {
let header = '';
if (warning.col) {
header = __('Column {0}', [warning.col]);
}
return `
<div class="alert border" data-col="${warning.col}">
<div class="uppercase">${header}</div>
<div class="body">${warning.message}</div>
</div>
`;
})
.join('');
frm.get_field('import_warnings').$wrapper.html(`
<div class="row">
<div class="col-sm-6 warnings text-muted">${html}</div>
</div>
`);
},
show_failed_logs(frm) {
frm.trigger('show_import_log');
},
show_import_log(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let logs = import_log;
frm.toggle_display('import_log', false);
frm.toggle_display('import_log_section', logs.length > 0);
if (logs.length === 0) {
frm.get_field('import_log_preview').$wrapper.empty();
return;
}
let rows = logs
.map(log => {
let html = '';
if (log.success) {
if (frm.doc.import_type === 'Insert New Records') {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
html = __('Successfully updated {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
}
} else {
let messages = log.messages
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs margin-top" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}">
${__('Show Traceback')}
</button>
<div class="collapse margin-top" id="${id}">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
if (frm.doc.show_failed_logs && log.success) {
return '';
}
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
if (!rows && frm.doc.show_failed_logs) {
rows = `<tr><td class="text-center text-muted" colspan=3>
${__('No failed logs')}
</td></tr>`;
}
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
},
show_missing_link_values(frm, missing_link_values) {
let can_be_created_automatically = missing_link_values.every(
d => d.has_one_mandatory_field
);
let html = missing_link_values
.map(d => {
let doctype = d.doctype;
let values = d.missing_values;
return `
<h5>${doctype}</h5>
<ul>${values.map(v => `<li>${v}</li>`).join('')}</ul>
`;
})
.join('');
if (can_be_created_automatically) {
// prettier-ignore
let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
frappe.confirm(message + html, () => {
frm
.call('create_missing_link_values', {
missing_link_values
})
.then(r => {
let records = r.message;
frappe.msgprint(
__('Created {0} records successfully.', [records.length])
);
});
});
} else {
frappe.msgprint(
// prettier-ignore
__('The following records needs to be created before we can import your file.') + html
);
}
}
});

View file

@ -1,170 +0,0 @@
{
"actions": [],
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"column_break_5",
"status",
"submit_after_import",
"mute_emails",
"template_options",
"section_import_preview",
"import_preview",
"import_warnings_section",
"template_warnings",
"import_warnings",
"import_log_section",
"import_log",
"show_failed_logs",
"import_log_preview"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"set_only_once": 1
},
{
"fieldname": "import_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"set_only_once": 1
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"in_list_view": 1,
"label": "Import File"
},
{
"fieldname": "import_preview",
"fieldtype": "HTML",
"label": "Import Preview"
},
{
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"label": "Preview"
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Options",
"options": "JSON",
"read_only": 1
},
{
"fieldname": "import_log",
"fieldtype": "Code",
"label": "Import Log",
"options": "JSON"
},
{
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview"
},
{
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Pending\nSuccess\nPartial Success\nError",
"read_only": 1
},
{
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import",
"set_only_once": 1
},
{
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Warnings"
},
{
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings"
},
{
"depends_on": "reference_doctype",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template"
},
{
"default": "1",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails",
"set_only_once": 1
},
{
"default": "0",
"fieldname": "show_failed_logs",
"fieldtype": "Check",
"label": "Show Failed Logs"
}
],
"hide_toolbar": 1,
"links": [],
"modified": "2020-02-17 15:35:04.386098",
"modified_by": "faris@erpnext.com",
"module": "Core",
"name": "Data Import Beta",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View file

@ -1,119 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.core.doctype.data_import.importer_new import Importer
from frappe.core.doctype.data_import.exporter_new import Exporter
from frappe.core.page.background_jobs.background_jobs import get_info
from frappe.utils.background_jobs import enqueue
from frappe import _
class DataImportBeta(Document):
def validate(self):
doc_before_save = self.get_doc_before_save()
if not self.import_file or (
doc_before_save and doc_before_save.import_file != self.import_file
):
self.template_options = ""
self.template_warnings = ""
if self.import_file:
# validate template
self.get_importer()
def get_preview_from_template(self):
if not self.import_file:
return
i = self.get_importer()
return i.get_data_for_import_preview()
def start_import(self):
if frappe.utils.scheduler.is_scheduler_inactive():
frappe.throw(
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
enqueued_jobs = [d.get("job_name") for d in get_info()]
if self.name not in enqueued_jobs:
enqueue(
start_import,
queue="default",
timeout=6000,
event="data_import",
job_name=self.name,
data_import=self.name,
now=frappe.conf.developer_mode or frappe.flags.in_test,
)
return True
return False
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
@frappe.whitelist()
def get_preview_from_template(data_import):
return frappe.get_doc("Data Import Beta", data_import).get_preview_from_template()
@frappe.whitelist()
def form_start_import(data_import):
return frappe.get_doc("Data Import Beta", data_import).start_import()
def start_import(data_import):
"""This method runs in background job"""
data_import = frappe.get_doc("Data Import Beta", data_import)
try:
i = Importer(data_import.reference_doctype, data_import=data_import)
i.import_data()
except:
frappe.db.rollback()
data_import.db_set("status", "Error")
frappe.log_error(title=data_import.name)
frappe.db.commit()
frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name})
@frappe.whitelist()
def download_template(
doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
):
"""
Download template from Exporter
:param doctype: Document Type
:param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
:param export_records=None: One of 'all', 'by_filter', 'blank_template'
:param export_filters: Filter dict
:param file_type: File type to export into
"""
export_fields = frappe.parse_json(export_fields)
export_filters = frappe.parse_json(export_filters)
export_data = export_records != "blank_template"
e = Exporter(
doctype,
export_fields=export_fields,
export_data=export_data,
export_filters=export_filters,
file_type=file_type,
export_page_length=5 if export_records == "5_records" else None,
)
e.build_response()
@frappe.whitelist()
def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import Beta", data_import_name)
data_import.export_errored_rows()

View file

@ -1,40 +0,0 @@
let imports_in_progress = [];
frappe.listview_settings['Data Import Beta'] = {
onload(listview) {
frappe.realtime.on('data_import_progress', data => {
if (!imports_in_progress.includes(data.data_import)) {
imports_in_progress.push(data.data_import);
}
});
frappe.realtime.on('data_import_refresh', data => {
imports_in_progress = imports_in_progress.filter(
d => d !== data.data_import
);
listview.refresh();
});
},
get_indicator: function(doc) {
var colors = {
'Pending': 'orange',
'Partial Success': 'orange',
'Success': 'green',
'In Progress': 'orange',
'Error': 'red'
};
let status = doc.status;
if (imports_in_progress.includes(doc.name)) {
status = 'In Progress';
}
return [__(status), colors[status], 'status,=,' + doc.status];
},
formatters: {
import_type(value) {
return {
'Insert New Records': __('Insert'),
'Update Existing Records': __('Update')
}[value];
}
},
hide_name_column: true
};

View file

@ -0,0 +1,324 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import Legacy', {
onload: function(frm) {
if (frm.doc.__islocal) {
frm.set_value("action", "");
}
frappe.call({
method: "frappe.core.doctype.data_import_legacy.data_import_legacy.get_importable_doctypes",
callback: function (r) {
let importable_doctypes = r.message;
frm.set_query("reference_doctype", function () {
return {
"filters": {
"issingle": 0,
"istable": 0,
"name": ['in', importable_doctypes]
}
};
});
}
}),
// should never check public
frm.fields_dict["import_file"].df.is_private = 1;
frappe.realtime.on("data_import_progress", function(data) {
if (data.data_import === frm.doc.name) {
if (data.reload && data.reload === true) {
frm.reload_doc();
}
if (data.progress) {
let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar");
if (progress_bar) {
$(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped");
$(progress_bar).css("width", data.progress + "%");
}
}
}
});
},
reference_doctype: function(frm){
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
}
},
refresh: function(frm) {
frm.disable_save();
frm.dashboard.clear_headline();
if (frm.doc.reference_doctype && !frm.doc.import_file) {
frm.page.set_indicator(__('Attach file'), 'orange');
} else {
if (frm.doc.import_status) {
const listview_settings = frappe.listview_settings['Data Import Legacy'];
const indicator = listview_settings.get_indicator(frm.doc);
frm.page.set_indicator(indicator[0], indicator[1]);
if (frm.doc.import_status === "In Progress") {
frm.dashboard.add_progress("Data Import Progress", "0");
frm.set_read_only();
frm.refresh_fields();
}
}
}
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
}
if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") {
frm.set_df_property("action", "read_only", 1);
}
frm.add_custom_button(__("Help"), function() {
frappe.help.show_video("6wiriRKPhmg");
});
if (frm.doc.reference_doctype && frm.doc.docstatus === 0) {
frm.add_custom_button(__("Download template"), function() {
frappe.data_import.download_dialog(frm).show();
});
}
if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows &&
frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) {
frm.page.set_primary_action(__("Start Import"), function() {
frappe.call({
btn: frm.page.btn_primary,
method: "frappe.core.doctype.data_import_legacy.data_import_legacy.import_data",
args: {
data_import: frm.doc.name
}
});
}).addClass('btn btn-primary');
}
if (frm.doc.log_details) {
frm.events.create_log_table(frm);
} else {
$(frm.fields_dict.import_log.wrapper).empty();
}
},
action: function(frm) {
if(!frm.doc.action) return;
if(!frm.doc.reference_doctype) {
frappe.msgprint(__("Please select document type first."));
frm.set_value("action", "");
return;
}
if(frm.doc.action == "Insert new records") {
frm.doc.insert_new = 1;
} else if (frm.doc.action == "Update records"){
frm.doc.overwrite = 1;
}
frm.save();
},
only_update: function(frm) {
frm.save();
},
submit_after_import: function(frm) {
frm.save();
},
skip_errors: function(frm) {
frm.save();
},
ignore_encoding_errors: function(frm) {
frm.save();
},
no_email: function(frm) {
frm.save();
},
show_only_errors: function(frm) {
frm.events.create_log_table(frm);
},
create_log_table: function(frm) {
let msg = JSON.parse(frm.doc.log_details);
var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty();
$(frappe.render_template("log_details", {
data: msg.messages,
import_status: frm.doc.import_status,
show_only_errors: frm.doc.show_only_errors,
})).appendTo($log_wrapper);
}
});
frappe.provide('frappe.data_import');
frappe.data_import.download_dialog = function(frm) {
var dialog;
const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden;
const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields);
const get_doctype_checkbox_fields = () => {
return dialog.fields.filter(df => df.fieldname.endsWith('_fields'))
.map(df => dialog.fields_dict[df.fieldname]);
};
const doctype_fields = get_fields(frm.doc.reference_doctype)
.map(df => {
let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0;
return {
label: df.label,
reqd: reqd,
danger: reqd,
value: df.fieldname,
checked: 1
};
});
let fields = [
{
"label": __("Select Columns"),
"fieldname": "select_columns",
"fieldtype": "Select",
"options": "All\nMandatory\nManually",
"reqd": 1,
"onchange": function() {
const fields = get_doctype_checkbox_fields();
fields.map(f => f.toggle(true));
if(this.value == 'Mandatory' || this.value == 'Manually') {
checkbox_toggle(true);
fields.map(multicheck_field => {
multicheck_field.options.map(option => {
if(!option.reqd) return;
$(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`)
.prop('checked', false)
.trigger('click');
});
});
} else if(this.value == 'All'){
$(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`)
.prop('disabled', true);
}
}
},
{
"label": __("File Type"),
"fieldname": "file_type",
"fieldtype": "Select",
"options": "Excel\nCSV",
"default": "Excel"
},
{
"label": __("Download with Data"),
"fieldname": "with_data",
"fieldtype": "Check",
"hidden": !frm.doc.overwrite,
"default": 1
},
{
"label": __("Select All"),
"fieldname": "select_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle();
}
},
{
"label": __("Unselect All"),
"fieldname": "unselect_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle(true);
}
},
{
"label": frm.doc.reference_doctype,
"fieldname": "doctype_fields",
"fieldtype": "MultiCheck",
"options": doctype_fields,
"columns": 2,
"hidden": 1
}
];
const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype)
.map(df => {
return {
"label": df.options,
"fieldname": df.fieldname + '_fields',
"fieldtype": "MultiCheck",
"options": frappe.meta.get_docfields(df.options)
.filter(filter_fields)
.map(df => ({
label: df.label,
reqd: df.reqd ? 1 : 0,
value: df.fieldname,
checked: 1,
danger: df.reqd
})),
"columns": 2,
"hidden": 1
};
});
fields = fields.concat(child_table_fields);
dialog = new frappe.ui.Dialog({
title: __('Download Template'),
fields: fields,
primary_action: function(values) {
var data = values;
if (frm.doc.reference_doctype) {
var export_params = () => {
let columns = {};
if(values.select_columns) {
columns = get_doctype_checkbox_fields().reduce((columns, field) => {
const options = field.get_checked_options();
columns[field.df.label] = options;
return columns;
}, {});
}
return {
doctype: frm.doc.reference_doctype,
parent_doctype: frm.doc.reference_doctype,
select_columns: JSON.stringify(columns),
with_data: frm.doc.overwrite && data.with_data,
all_doctypes: true,
file_type: data.file_type,
template: true
};
};
let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data';
open_url_post(get_template_url, export_params());
} else {
frappe.msgprint(__("Please select the Document Type."));
}
dialog.hide();
},
primary_action_label: __('Download')
});
$(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]')
.wrapAll('<div class="inline-buttons" />');
const button_container = $(dialog.body).find('.inline-buttons');
button_container.addClass('flex');
$(button_container).find('.frappe-control').map((index, button) => {
$(button).css({"margin-right": "1em"});
});
function checkbox_toggle(checked=false) {
$(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => {
$(element).find(`:checkbox`).prop("checked", checked).trigger('click');
});
}
return dialog;
};

View file

@ -0,0 +1,218 @@
{
"actions": [],
"allow_copy": 1,
"creation": "2020-06-11 16:13:23.813709",
"doctype": "DocType",
"document_type": "Document",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"action",
"insert_new",
"overwrite",
"only_update",
"section_break_4",
"import_file",
"column_break_4",
"error_file",
"section_break_6",
"skip_errors",
"submit_after_import",
"ignore_encoding_errors",
"no_email",
"import_detail",
"import_status",
"show_only_errors",
"import_log",
"log_details",
"amended_from",
"total_rows",
"amended_from"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"ignore_user_permissions": 1,
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1
},
{
"fieldname": "action",
"fieldtype": "Select",
"label": "Action",
"options": "Insert new records\nUpdate records",
"reqd": 1
},
{
"default": "0",
"depends_on": "eval:!doc.overwrite",
"description": "New data will be inserted.",
"fieldname": "insert_new",
"fieldtype": "Check",
"hidden": 1,
"label": "Insert new records",
"set_only_once": 1
},
{
"default": "0",
"depends_on": "eval:!doc.insert_new",
"description": "If you are updating/overwriting already created records.",
"fieldname": "overwrite",
"fieldtype": "Check",
"hidden": 1,
"label": "Update records",
"set_only_once": 1
},
{
"default": "0",
"depends_on": "overwrite",
"description": "If you don't want to create any new records while updating the older records.",
"fieldname": "only_update",
"fieldtype": "Check",
"label": "Don't create new records"
},
{
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_4",
"fieldtype": "Section Break"
},
{
"fieldname": "import_file",
"fieldtype": "Attach",
"label": "Attach file for Import"
},
{
"fieldname": "column_break_4",
"fieldtype": "Column Break"
},
{
"depends_on": "eval: doc.import_status == \"Partially Successful\"",
"description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.",
"fieldname": "error_file",
"fieldtype": "Attach",
"label": "Generated File"
},
{
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_6",
"fieldtype": "Section Break"
},
{
"default": "0",
"description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.",
"fieldname": "skip_errors",
"fieldtype": "Check",
"label": "Skip rows with errors"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit after importing"
},
{
"default": "0",
"fieldname": "ignore_encoding_errors",
"fieldtype": "Check",
"label": "Ignore encoding errors"
},
{
"default": "1",
"fieldname": "no_email",
"fieldtype": "Check",
"label": "Do not send Emails"
},
{
"collapsible": 1,
"collapsible_depends_on": "eval: doc.import_status == \"Failed\"",
"depends_on": "import_status",
"fieldname": "import_detail",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_status",
"fieldtype": "Select",
"label": "Import Status",
"options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful",
"read_only": 1
},
{
"allow_on_submit": 1,
"default": "1",
"fieldname": "show_only_errors",
"fieldtype": "Check",
"label": "Show only errors",
"no_copy": 1,
"print_hide": 1
},
{
"allow_on_submit": 1,
"depends_on": "import_status",
"fieldname": "import_log",
"fieldtype": "HTML",
"label": "Import Log"
},
{
"allow_on_submit": 1,
"fieldname": "log_details",
"fieldtype": "Code",
"hidden": 1,
"label": "Log Details",
"read_only": 1
},
{
"fieldname": "amended_from",
"fieldtype": "Link",
"label": "Amended From",
"no_copy": 1,
"options": "Data Import",
"print_hide": 1,
"read_only": 1
},
{
"fieldname": "total_rows",
"fieldtype": "Int",
"hidden": 1,
"label": "Total Rows",
"read_only": 1
},
{
"fieldname": "amended_from",
"fieldtype": "Link",
"label": "Amended From",
"no_copy": 1,
"options": "Data Import Legacy",
"print_hide": 1,
"read_only": 1
}
],
"is_submittable": 1,
"links": [],
"max_attachments": 1,
"modified": "2020-06-11 16:13:23.813709",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import Legacy",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"submit": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1,
"track_seen": 1
}

View file

@ -0,0 +1,123 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, os
from frappe import _
import frappe.modules.import_file
from frappe.model.document import Document
from frappe.utils.data import format_datetime
from frappe.core.doctype.data_import_legacy.importer import upload
from frappe.utils.background_jobs import enqueue
class DataImportLegacy(Document):
def autoname(self):
if not self.name:
self.name = "Import on " +format_datetime(self.creation)
def validate(self):
if not self.import_file:
self.db_set("total_rows", 0)
if self.import_status == "In Progress":
frappe.throw(_("Can't save the form as data import is in progress."))
# validate the template just after the upload
# if there is total_rows in the doc, it means that the template is already validated and error free
if self.import_file and not self.total_rows:
upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
@frappe.whitelist()
def get_importable_doctypes():
return frappe.cache().hget("can_import", frappe.session.user)
@frappe.whitelist()
def import_data(data_import):
frappe.db.set_value("Data Import Legacy", data_import, "import_status", "In Progress", update_modified=False)
frappe.publish_realtime("data_import_progress", {"progress": "0",
"data_import": data_import, "reload": True}, user=frappe.session.user)
from frappe.core.page.background_jobs.background_jobs import get_info
enqueued_jobs = [d.get("job_name") for d in get_info()]
if data_import not in enqueued_jobs:
enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
insert=False, submit=False, pre_process=None):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
files = [path]
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
frappe.db.commit()
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
from frappe.utils.csvutils import read_csv_content
print("Importing " + path)
with open(path, "r") as infile:
upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
submit_after_import=submit, pre_process=pre_process)
def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
def post_process(out):
del_keys = ('modified_by', 'creation', 'owner', 'idx')
for doc in out:
for key in del_keys:
if key in doc:
del doc[key]
for k, v in doc.items():
if isinstance(v, list):
for child in v:
for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
if key in child:
del child[key]
out = []
if name:
out.append(frappe.get_doc(doctype, name).as_dict())
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
path = os.path.join('..', path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))
@frappe.whitelist()
def export_fixture(doctype, app):
if frappe.session.user != "Administrator":
raise frappe.PermissionError
if not os.path.exists(frappe.get_app_path(app, "fixtures")):
os.mkdir(frappe.get_app_path(app, "fixtures"))
export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")

View file

@ -0,0 +1,24 @@
frappe.listview_settings['Data Import Legacy'] = {
add_fields: ["import_status"],
has_indicator_for_draft: 1,
get_indicator: function(doc) {
let status = {
'Successful': [__("Success"), "green", "import_status,=,Successful"],
'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"],
'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"],
'Failed': [__("Failed"), "red", "import_status,=,Failed"],
'Pending': [__("Pending"), "orange", "import_status,=,"]
}
if (doc.import_status) {
return status[doc.import_status];
}
if (doc.docstatus == 0) {
return status['Pending'];
}
return status['Pending'];
}
};

View file

@ -0,0 +1,541 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
from six.moves import range
import requests
import frappe, json
import frappe.permissions
from frappe import _
from frappe.utils.csvutils import getlink
from frappe.utils.dateutils import parse_date
from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url
from six import string_types
@frappe.whitelist()
def get_data_keys():
return frappe._dict({
"data_separator": _('Start entering data below this line'),
"main_table": _("Table") + ":",
"parent_table": _("Parent Table") + ":",
"columns": _("Column Name") + ":",
"doctype": _("DocType") + ":"
})
@frappe.whitelist()
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None,
update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No",
skip_errors = True, data_import_doc=None, validate_template=False, user=None):
"""upload data"""
# for translations
if user:
frappe.cache().hdel("lang", user)
frappe.set_user_lang(user)
if data_import_doc and isinstance(data_import_doc, string_types):
data_import_doc = frappe.get_doc("Data Import Legacy", data_import_doc)
if data_import_doc and from_data_import == "Yes":
no_email = data_import_doc.no_email
ignore_encoding_errors = data_import_doc.ignore_encoding_errors
update_only = data_import_doc.only_update
submit_after_import = data_import_doc.submit_after_import
overwrite = data_import_doc.overwrite
skip_errors = data_import_doc.skip_errors
else:
# extra input params
params = json.loads(frappe.form_dict.get("params") or '{}')
if params.get("submit_after_import"):
submit_after_import = True
if params.get("ignore_encoding_errors"):
ignore_encoding_errors = True
if not params.get("no_email"):
no_email = False
if params.get('update_only'):
update_only = True
if params.get('from_data_import'):
from_data_import = params.get('from_data_import')
if not params.get('skip_errors'):
skip_errors = params.get('skip_errors')
frappe.flags.in_import = True
frappe.flags.mute_emails = no_email
def get_data_keys_definition():
return get_data_keys()
def bad_template():
frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator))
def check_data_length():
if not data:
frappe.throw(_("No data found in the file. Please reattach the new file with data."))
def get_start_row():
for i, row in enumerate(rows):
if row and row[0]==get_data_keys_definition().data_separator:
return i+1
bad_template()
def get_header_row(key):
return get_header_row_and_idx(key)[0]
def get_header_row_and_idx(key):
for i, row in enumerate(header):
if row and row[0]==key:
return row, i
return [], -1
def filter_empty_columns(columns):
empty_cols = list(filter(lambda x: x in ("", None), columns))
if empty_cols:
if columns[-1*len(empty_cols):] == empty_cols:
# filter empty columns if they exist at the end
columns = columns[:-1*len(empty_cols)]
else:
frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
raise_exception=1)
return columns
def make_column_map():
doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype)
if row_idx == -1: # old style
return
dt = None
for i, d in enumerate(doctype_row[1:]):
if d not in ("~", "-"):
if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"):
dt, parentfield = d, None
# xls format truncates the row, so it may not have more columns
if len(doctype_row) > i+2:
parentfield = doctype_row[i+2]
doctypes.append((dt, parentfield))
column_idx_to_fieldname[(dt, parentfield)] = {}
column_idx_to_fieldtype[(dt, parentfield)] = {}
if dt:
column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1]
column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1]
def get_doc(start_idx):
if doctypes:
doc = {}
attachments = []
last_error_row_idx = None
for idx in range(start_idx, len(rows)):
last_error_row_idx = idx # pylint: disable=W0612
if (not doc) or main_doc_empty(rows[idx]):
for dt, parentfield in doctypes:
d = {}
for column_idx in column_idx_to_fieldname[(dt, parentfield)]:
try:
fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx]
fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx]
if not fieldname or not rows[idx][column_idx]:
continue
d[fieldname] = rows[idx][column_idx]
if fieldtype in ("Int", "Check"):
d[fieldname] = cint(d[fieldname])
elif fieldtype in ("Float", "Currency", "Percent"):
d[fieldname] = flt(d[fieldname])
elif fieldtype == "Date":
if d[fieldname] and isinstance(d[fieldname], string_types):
d[fieldname] = getdate(parse_date(d[fieldname]))
elif fieldtype == "Datetime":
if d[fieldname]:
if " " in d[fieldname]:
_date, _time = d[fieldname].split()
else:
_date, _time = d[fieldname], '00:00:00'
_date = parse_date(d[fieldname])
d[fieldname] = get_datetime(_date + " " + _time)
else:
d[fieldname] = None
elif fieldtype in ("Image", "Attach Image", "Attach"):
# added file to attachments list
attachments.append(d[fieldname])
elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]:
# as fields can be saved in the number format(long type) in data import template
d[fieldname] = cstr(d[fieldname])
except IndexError:
pass
# scrub quotes from name and modified
if d.get("name") and d["name"].startswith('"'):
d["name"] = d["name"][1:-1]
if sum([0 if not val else 1 for val in d.values()]):
d['doctype'] = dt
if dt == doctype:
doc.update(d)
else:
if not overwrite and doc.get("name"):
d['parent'] = doc["name"]
d['parenttype'] = doctype
d['parentfield'] = parentfield
doc.setdefault(d['parentfield'], []).append(d)
else:
break
return doc, attachments, last_error_row_idx
else:
doc = frappe._dict(zip(columns, rows[start_idx][1:]))
doc['doctype'] = doctype
return doc, [], None
# used in testing whether a row is empty or parent row or child row
# checked only 3 first columns since first two columns can be blank for example the case of
# importing the item variant where item code and item name will be blank.
def main_doc_empty(row):
if row:
for i in range(3,0,-1):
if len(row) > i and row[i]:
return False
return True
def validate_naming(doc):
autoname = frappe.get_meta(doctype).autoname
if autoname:
if autoname[0:5] == 'field':
autoname = autoname[6:]
elif autoname == 'naming_series:':
autoname = 'naming_series'
else:
return True
if (autoname not in doc) or (not doc[autoname]):
from frappe.model.base_document import get_controller
if not hasattr(get_controller(doctype), "autoname"):
frappe.throw(_("{0} is a mandatory field").format(autoname))
return True
users = frappe.db.sql_list("select name from tabUser")
def prepare_for_insert(doc):
# don't block data import if user is not set
# migrating from another system
if not doc.owner in users:
doc.owner = frappe.session.user
if not doc.modified_by in users:
doc.modified_by = frappe.session.user
def is_valid_url(url):
is_valid = False
if url.startswith("/files") or url.startswith("/private/files"):
url = get_url(url)
try:
r = requests.get(url)
is_valid = True if r.status_code == 200 else False
except Exception:
pass
return is_valid
def attach_file_to_doc(doctype, docname, file_url):
# check if attachment is already available
# check if the attachement link is relative or not
if not file_url:
return
if not is_valid_url(file_url):
return
files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and
attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format(
doctype=doctype,
docname=docname,
file_url=file_url
))
if files:
# file is already attached
return
_file = frappe.get_doc({
"doctype": "File",
"file_url": file_url,
"attached_to_name": docname,
"attached_to_doctype": doctype,
"attached_to_field": 0,
"folder": "Home/Attachments"})
_file.save()
# header
filename, file_extension = ['','']
if not rows:
_file = frappe.get_doc("File", {"file_url": data_import_doc.import_file})
fcontent = _file.get_content()
filename, file_extension = _file.get_extension()
if file_extension == '.xlsx' and from_data_import == 'Yes':
from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file)
elif file_extension == '.csv':
from frappe.utils.csvutils import read_csv_content
rows = read_csv_content(fcontent, ignore_encoding_errors)
else:
frappe.throw(_("Unsupported File Format"))
start_row = get_start_row()
header = rows[:start_row]
data = rows[start_row:]
try:
doctype = get_header_row(get_data_keys_definition().main_table)[1]
columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:])
except:
frappe.throw(_("Cannot change header content"))
doctypes = []
column_idx_to_fieldname = {}
column_idx_to_fieldtype = {}
if skip_errors:
data_rows_with_error = header
if submit_after_import and not cint(frappe.db.get_value("DocType",
doctype, "is_submittable")):
submit_after_import = False
parenttype = get_header_row(get_data_keys_definition().parent_table)
if len(parenttype) > 1:
parenttype = parenttype[1]
# check permissions
if not frappe.permissions.can_import(parenttype or doctype):
frappe.flags.mute_emails = False
return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}
# Throw expception in case of the empty data file
check_data_length()
make_column_map()
total = len(data)
if validate_template:
if total:
data_import_doc.total_rows = total
return True
if overwrite==None:
overwrite = params.get('overwrite')
# delete child rows (if parenttype)
parentfield = None
if parenttype:
parentfield = get_parent_field(doctype, parenttype)
if overwrite:
delete_child_rows(data, doctype)
import_log = []
def log(**kwargs):
if via_console:
print((kwargs.get("title") + kwargs.get("message")).encode('utf-8'))
else:
import_log.append(kwargs)
def as_link(doctype, name):
if via_console:
return "{0}: {1}".format(doctype, name)
else:
return getlink(doctype, name)
# publish realtime task update
def publish_progress(achieved, reload=False):
if data_import_doc:
frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)),
"data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user)
error_flag = rollback_flag = False
batch_size = frappe.conf.data_import_batch_size or 1000
for batch_start in range(0, total, batch_size):
batch = data[batch_start:batch_start + batch_size]
for i, row in enumerate(batch):
# bypass empty rows
if main_doc_empty(row):
continue
row_idx = i + start_row
doc = None
publish_progress(i)
try:
doc, attachments, last_error_row_idx = get_doc(row_idx)
validate_naming(doc)
if pre_process:
pre_process(doc)
original = None
if parentfield:
parent = frappe.get_doc(parenttype, doc["parent"])
doc = parent.append(parentfield, doc)
parent.save()
else:
if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]):
original = frappe.get_doc(doctype, doc["name"])
original_name = original.name
original.update(doc)
# preserve original name for case sensitivity
original.name = original_name
original.flags.ignore_links = ignore_links
original.save()
doc = original
else:
if not update_only:
doc = frappe.get_doc(doc)
prepare_for_insert(doc)
doc.flags.ignore_links = ignore_links
doc.insert()
if attachments:
# check file url and create a File document
for file_url in attachments:
attach_file_to_doc(doc.doctype, doc.name, file_url)
if submit_after_import:
doc.submit()
# log errors
if parentfield:
log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)),
"link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"})
elif submit_after_import:
log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"})
elif original:
log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
elif not update_only:
log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
else:
log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None,
"message": "Document updation ignored", "indicator": "orange"})
except Exception as e:
error_flag = True
# build error message
if frappe.local.message_log:
err_msg = "\n".join(['<p class="border-bottom small">{}</p>'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log])
else:
err_msg = '<p class="border-bottom small">{}</p>'.format(cstr(e))
error_trace = frappe.get_traceback()
if error_trace:
error_log_doc = frappe.log_error(error_trace)
error_link = get_absolute_url("Error Log", error_log_doc.name)
else:
error_link = None
log(**{
"row": row_idx + 1,
"title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""),
"message": err_msg,
"indicator": "red",
"link":error_link
})
# data with error to create a new file
# include the errored data in the last row as last_error_row_idx will not be updated for the last row
if skip_errors:
if last_error_row_idx == len(rows)-1:
last_error_row_idx = len(rows)
data_rows_with_error += rows[row_idx:last_error_row_idx]
else:
rollback_flag = True
finally:
frappe.local.message_log = []
start_row += batch_size
if rollback_flag:
frappe.db.rollback()
else:
frappe.db.commit()
frappe.flags.mute_emails = False
frappe.flags.in_import = False
log_message = {"messages": import_log, "error": error_flag}
if data_import_doc:
data_import_doc.log_details = json.dumps(log_message)
import_status = None
if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error):
import_status = "Partially Successful"
# write the file with the faulty row
file_name = 'error_' + filename + file_extension
if file_extension == '.xlsx':
from frappe.utils.xlsxutils import make_xlsx
xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template")
file_data = xlsx_file.getvalue()
else:
from frappe.utils.csvutils import to_csv
file_data = to_csv(data_rows_with_error)
_file = frappe.get_doc({
"doctype": "File",
"file_name": file_name,
"attached_to_doctype": "Data Import Legacy",
"attached_to_name": data_import_doc.name,
"folder": "Home/Attachments",
"content": file_data})
_file.save()
data_import_doc.error_file = _file.file_url
elif error_flag:
import_status = "Failed"
else:
import_status = "Successful"
data_import_doc.import_status = import_status
data_import_doc.save()
if data_import_doc.import_status in ["Successful", "Partially Successful"]:
data_import_doc.submit()
publish_progress(100, True)
else:
publish_progress(0, True)
frappe.db.commit()
else:
return log_message
def get_parent_field(doctype, parenttype):
parentfield = None
# get parentfield
if parenttype:
for d in frappe.get_meta(parenttype).get_table_fields():
if d.options==doctype:
parentfield = d.fieldname
break
if not parentfield:
frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype))
raise Exception
return parentfield
def delete_child_rows(rows, doctype):
"""delete child rows for all parents"""
for p in list(set([r[1] for r in rows])):
if p:
frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p)

View file

@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestDataImportLegacy(unittest.TestCase):
pass

View file

@ -13,6 +13,8 @@
"fieldname",
"precision",
"length",
"hide_days",
"hide_seconds",
"reqd",
"search_index",
"in_list_view",
@ -87,7 +89,7 @@
"label": "Type",
"oldfieldname": "fieldtype",
"oldfieldtype": "Select",
"options": "Attach\nAttach Image\nBarcode\nButton\nCheck\nCode\nColor\nColumn Break\nCurrency\nData\nDate\nDatetime\nDynamic Link\nFloat\nFold\nGeolocation\nHeading\nHTML\nHTML Editor\nImage\nInt\nLink\nLong Text\nMarkdown Editor\nPassword\nPercent\nRead Only\nRating\nSection Break\nSelect\nSmall Text\nTable\nTable MultiSelect\nText\nText Editor\nTime\nSignature",
"options": "Attach\nAttach Image\nBarcode\nButton\nCheck\nCode\nColor\nColumn Break\nCurrency\nData\nDate\nDatetime\nDuration\nDynamic Link\nFloat\nFold\nGeolocation\nHeading\nHTML\nHTML Editor\nImage\nInt\nLink\nLong Text\nMarkdown Editor\nPassword\nPercent\nRead Only\nRating\nSection Break\nSelect\nSmall Text\nTable\nTable MultiSelect\nText\nText Editor\nTime\nSignature",
"reqd": 1,
"search_index": 1
},
@ -450,6 +452,20 @@
"fieldname": "column_break_38",
"fieldtype": "Column Break"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_days",
"fieldtype": "Check",
"label": "Hide Days"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_seconds",
"fieldtype": "Check",
"label": "Hide Seconds"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype=='Section Break'",
@ -461,7 +477,7 @@
"idx": 1,
"istable": 1,
"links": [],
"modified": "2020-04-27 11:38:21.223185",
"modified": "2020-02-06 09:06:25.224413",
"modified_by": "Administrator",
"module": "Core",
"name": "DocField",

View file

@ -406,9 +406,13 @@ class DocType(Document):
with open(fname, 'r') as f:
code = f.read()
with open(fname, 'w') as f:
file_content = code.replace(old, new) # replace str with full str (js controllers)
file_content = file_content.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports)
file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers)
if fname.endswith('.js'):
file_content = code.replace(old, new) # replace str with full str (js controllers)
elif fname.endswith('.py'):
file_content = code.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports)
file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers)
f.write(file_content)
# updating json file with new name
@ -688,6 +692,9 @@ def validate_fields(meta):
def check_link_table_options(docname, d):
if frappe.flags.in_patch: return
if frappe.flags.in_fixtures: return
if d.fieldtype in ("Link",) + table_fields:
if not d.options:
frappe.throw(_("{0}: Options required for Link or Table type field {1} in row {2}").format(docname, d.label, d.idx), DoctypeLinkError)
@ -908,6 +915,8 @@ def validate_fields(meta):
frappe.msgprint(text_str + df_options_str, title="Invalid Data Field", raise_exception=True)
def check_child_table_option(docfield):
if frappe.flags.in_fixtures: return
if docfield.fieldtype not in ['Table MultiSelect', 'Table']: return
doctype = docfield.options

View file

@ -48,6 +48,8 @@ class File(Document):
def before_insert(self):
frappe.local.rollback_observers.append(self)
self.set_folder_name()
if self.file_name:
self.file_name = re.sub(r'/', '', self.file_name)
self.content = self.get("content", None)
self.decode = self.get("decode", False)
if self.content:
@ -180,11 +182,11 @@ class File(Document):
if duplicate_file:
duplicate_file_doc = frappe.get_cached_doc('File', duplicate_file.name)
if duplicate_file_doc.exists_on_disk():
# if it is attached to a document then throw DuplicateEntryError
# if it is attached to a document then throw FileAlreadyAttachedException
if self.attached_to_doctype and self.attached_to_name:
self.duplicate_entry = duplicate_file.name
frappe.throw(_("Same file has already been attached to the record"),
frappe.DuplicateEntryError)
frappe.FileAlreadyAttachedException)
# else just use the url, to avoid uploading a duplicate
else:
self.file_url = duplicate_file.file_url
@ -192,6 +194,8 @@ class File(Document):
def set_file_name(self):
if not self.file_name and self.file_url:
self.file_name = self.file_url.split('/')[-1]
else:
self.file_name = re.sub(r'/', '', self.file_name)
def generate_content_hash(self):
if self.content_hash or not self.file_url or self.file_url.startswith('http'):
@ -405,6 +409,12 @@ class File(Document):
frappe.throw(_("URL must start with 'http://' or 'https://'"))
return
if not self.file_url.startswith(("http://", "https://")):
# local file
root_files_path = get_files_path(is_private=self.is_private)
if not os.path.commonpath([root_files_path]) == os.path.commonpath([root_files_path, self.get_full_path()]):
# basically the file url is skewed to not point to /files/ or /private/files
frappe.throw(_("{0} is not a valid file url").format(self.file_url))
self.file_url = unquote(self.file_url)
self.file_size = frappe.form_dict.file_size or self.file_size
@ -704,7 +714,12 @@ def remove_all(dt, dn, from_delete=False):
try:
for fid in frappe.db.sql_list("""select name from `tabFile` where
attached_to_doctype=%s and attached_to_name=%s""", (dt, dn)):
remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete)
if from_delete:
# If deleting a doc, directly delete files
frappe.delete_doc("File", fid, ignore_permissions=True)
else:
# Removes file and adds a comment in the document it is attached to
remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete)
except Exception as e:
if e.args[0]!=1054: raise # (temp till for patched)

View file

@ -12,7 +12,7 @@ class InstalledApplications(Document):
for app in frappe.utils.get_installed_apps_info():
self.append("installed_applications", {
"app_name": app.get("app_name"),
"app_version": app.get("version"),
"git_branch": app.get("branch")
"app_version": app.get("version") or "UNVERSIONED",
"git_branch": app.get("branch") or "UNVERSIONED"
})
self.save()

View file

@ -84,7 +84,7 @@ class ScheduledJobType(Document):
def log_status(self, status):
# log file
frappe.logger(__name__).info('Scheduled Job {0}: {1} for {2}'.format(status, self.method, frappe.local.site))
frappe.logger("scheduler").info('Scheduled Job {0}: {1} for {2}'.format(status, self.method, frappe.local.site))
self.update_scheduler_log(status)
def update_scheduler_log(self, status):

View file

@ -28,8 +28,7 @@ def get_session_default_values():
@frappe.whitelist()
def set_session_default_values(default_values):
if not frappe.flags.in_test:
default_values = json.loads(default_values)
default_values = frappe.parse_json(default_values)
for entry in default_values:
try:
frappe.defaults.set_user_default(entry, default_values.get(entry))

View file

@ -4,7 +4,7 @@
from __future__ import unicode_literals, print_function
import frappe
from frappe.model.document import Document
from frappe.utils import cint, has_gravatar, format_datetime, now_datetime, get_formatted_email, today
from frappe.utils import cint, flt, has_gravatar, format_datetime, now_datetime, get_formatted_email, today
from frappe import throw, msgprint, _
from frappe.utils.password import update_password as _update_password
from frappe.desk.notifications import clear_notifications
@ -841,11 +841,11 @@ def user_query(doctype, txt, searchfield, start, page_len, filters):
def get_total_users():
"""Returns total no. of system users"""
return frappe.db.sql('''SELECT SUM(`simultaneous_sessions`)
return flt(frappe.db.sql('''SELECT SUM(`simultaneous_sessions`)
FROM `tabUser`
WHERE `enabled` = 1
AND `user_type` = 'System User'
AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0]
AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0])
def get_system_users(exclude_users=None, limit=None):
if not exclude_users:

View file

@ -21,6 +21,17 @@ class Version(Document):
else:
return False
def for_insert(self, doc):
updater_reference = doc.flags.updater_reference
data = {
'creation': doc.creation,
'updater_reference': updater_reference,
'created_by': doc.owner
}
self.ref_doctype = doc.doctype
self.docname = doc.name
self.data = frappe.as_json(data)
def get_data(self):
return json.loads(self.data)

View file

@ -0,0 +1,5 @@
.restricted-button {
cursor: default;
position: relative;
right: -5px;
}

View file

@ -26,6 +26,13 @@ class Dashboard {
</div>`).appendTo(this.wrapper.find(".page-content").empty());
this.container = this.wrapper.find(".dashboard-graph");
this.page = wrapper.page;
this.page.set_title_sub(
$(`<button class="restricted-button">
<span class="octicon octicon-lock"></span>
<span>${__('Restricted')}</span>
</button>`)
);
}
show() {

View file

@ -16,6 +16,8 @@
"column_break_6",
"fieldtype",
"precision",
"hide_seconds",
"hide_days",
"options",
"fetch_from",
"fetch_if_empty",
@ -56,368 +58,382 @@
],
"fields": [
{
"bold": 1,
"fieldname": "dt",
"fieldtype": "Link",
"in_filter": 1,
"in_list_view": 1,
"label": "Document",
"oldfieldname": "dt",
"oldfieldtype": "Link",
"options": "DocType",
"reqd": 1,
"search_index": 1
"bold": 1,
"fieldname": "dt",
"fieldtype": "Link",
"in_filter": 1,
"in_list_view": 1,
"label": "Document",
"oldfieldname": "dt",
"oldfieldtype": "Link",
"options": "DocType",
"reqd": 1,
"search_index": 1
},
{
"bold": 1,
"fieldname": "label",
"fieldtype": "Data",
"in_filter": 1,
"label": "Label",
"no_copy": 1,
"oldfieldname": "label",
"oldfieldtype": "Data"
"bold": 1,
"fieldname": "label",
"fieldtype": "Data",
"in_filter": 1,
"label": "Label",
"no_copy": 1,
"oldfieldname": "label",
"oldfieldtype": "Data"
},
{
"fieldname": "label_help",
"fieldtype": "HTML",
"label": "Label Help",
"oldfieldtype": "HTML"
"fieldname": "label_help",
"fieldtype": "HTML",
"label": "Label Help",
"oldfieldtype": "HTML"
},
{
"fieldname": "fieldname",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Fieldname",
"no_copy": 1,
"oldfieldname": "fieldname",
"oldfieldtype": "Data",
"read_only": 1
"fieldname": "fieldname",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Fieldname",
"no_copy": 1,
"oldfieldname": "fieldname",
"oldfieldtype": "Data",
"read_only": 1
},
{
"description": "Select the label after which you want to insert new field.",
"fieldname": "insert_after",
"fieldtype": "Select",
"label": "Insert After",
"no_copy": 1,
"oldfieldname": "insert_after",
"oldfieldtype": "Select"
"description": "Select the label after which you want to insert new field.",
"fieldname": "insert_after",
"fieldtype": "Select",
"label": "Insert After",
"no_copy": 1,
"oldfieldname": "insert_after",
"oldfieldtype": "Select"
},
{
"fieldname": "column_break_6",
"fieldtype": "Column Break"
"fieldname": "column_break_6",
"fieldtype": "Column Break"
},
{
"bold": 1,
"default": "Data",
"fieldname": "fieldtype",
"fieldtype": "Select",
"in_filter": 1,
"in_list_view": 1,
"label": "Field Type",
"oldfieldname": "fieldtype",
"oldfieldtype": "Select",
"options": "Attach\nAttach Image\nBarcode\nButton\nCheck\nCode\nColor\nColumn Break\nCurrency\nData\nDate\nDatetime\nDynamic Link\nFloat\nGeolocation\nHTML\nImage\nInt\nLink\nLong Text\nMarkdown Editor\nPassword\nPercent\nRating\nRead Only\nSection Break\nSelect\nSmall Text\nTable\nTable MultiSelect\nText\nText Editor\nTime\nSignature",
"reqd": 1
"bold": 1,
"default": "Data",
"fieldname": "fieldtype",
"fieldtype": "Select",
"in_filter": 1,
"in_list_view": 1,
"label": "Field Type",
"oldfieldname": "fieldtype",
"oldfieldtype": "Select",
"options": "Attach\nAttach Image\nBarcode\nButton\nCheck\nCode\nColor\nColumn Break\nCurrency\nData\nDate\nDatetime\nDuration\nDynamic Link\nFloat\nGeolocation\nHTML\nImage\nInt\nLink\nLong Text\nMarkdown Editor\nPassword\nPercent\nRating\nRead Only\nSection Break\nSelect\nSmall Text\nTable\nTable MultiSelect\nText\nText Editor\nTime\nSignature",
"reqd": 1
},
{
"depends_on": "eval:in_list([\"Float\", \"Currency\", \"Percent\"], doc.fieldtype)",
"description": "Set non-standard precision for a Float or Currency field",
"fieldname": "precision",
"fieldtype": "Select",
"label": "Precision",
"options": "\n0\n1\n2\n3\n4\n5\n6\n7\n8\n9"
"depends_on": "eval:in_list([\"Float\", \"Currency\", \"Percent\"], doc.fieldtype)",
"description": "Set non-standard precision for a Float or Currency field",
"fieldname": "precision",
"fieldtype": "Select",
"label": "Precision",
"options": "\n0\n1\n2\n3\n4\n5\n6\n7\n8\n9"
},
{
"fieldname": "options",
"fieldtype": "Small Text",
"in_list_view": 1,
"label": "Options",
"oldfieldname": "options",
"oldfieldtype": "Text"
"fieldname": "options",
"fieldtype": "Small Text",
"in_list_view": 1,
"label": "Options",
"oldfieldname": "options",
"oldfieldtype": "Text"
},
{
"fieldname": "fetch_from",
"fieldtype": "Small Text",
"label": "Fetch From"
"fieldname": "fetch_from",
"fieldtype": "Small Text",
"label": "Fetch From"
},
{
"default": "0",
"description": "If checked, this field will be not overwritten based on Fetch From if a value already exists.",
"fieldname": "fetch_if_empty",
"fieldtype": "Check",
"label": "Fetch If Empty"
"default": "0",
"description": "If checked, this field will be not overwritten based on Fetch From if a value already exists.",
"fieldname": "fetch_if_empty",
"fieldtype": "Check",
"label": "Fetch If Empty"
},
{
"fieldname": "options_help",
"fieldtype": "HTML",
"label": "Options Help",
"oldfieldtype": "HTML"
"fieldname": "options_help",
"fieldtype": "HTML",
"label": "Options Help",
"oldfieldtype": "HTML"
},
{
"fieldname": "section_break_11",
"fieldtype": "Section Break"
"fieldname": "section_break_11",
"fieldtype": "Section Break"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype==\"Section Break\"",
"fieldname": "collapsible",
"fieldtype": "Check",
"label": "Collapsible"
"default": "0",
"depends_on": "eval:doc.fieldtype==\"Section Break\"",
"fieldname": "collapsible",
"fieldtype": "Check",
"label": "Collapsible"
},
{
"depends_on": "eval:doc.fieldtype==\"Section Break\"",
"fieldname": "collapsible_depends_on",
"fieldtype": "Code",
"label": "Collapsible Depends On"
"depends_on": "eval:doc.fieldtype==\"Section Break\"",
"fieldname": "collapsible_depends_on",
"fieldtype": "Code",
"label": "Collapsible Depends On"
},
{
"fieldname": "default",
"fieldtype": "Text",
"label": "Default Value",
"oldfieldname": "default",
"oldfieldtype": "Text"
"fieldname": "default",
"fieldtype": "Text",
"label": "Default Value",
"oldfieldname": "default",
"oldfieldtype": "Text"
},
{
"fieldname": "depends_on",
"fieldtype": "Code",
"label": "Depends On",
"length": 255
"fieldname": "depends_on",
"fieldtype": "Code",
"label": "Depends On",
"length": 255
},
{
"fieldname": "description",
"fieldtype": "Text",
"label": "Field Description",
"oldfieldname": "description",
"oldfieldtype": "Text",
"print_width": "300px",
"width": "300px"
"fieldname": "description",
"fieldtype": "Text",
"label": "Field Description",
"oldfieldname": "description",
"oldfieldtype": "Text",
"print_width": "300px",
"width": "300px"
},
{
"default": "0",
"fieldname": "permlevel",
"fieldtype": "Int",
"label": "Permission Level",
"oldfieldname": "permlevel",
"oldfieldtype": "Int"
"default": "0",
"fieldname": "permlevel",
"fieldtype": "Int",
"label": "Permission Level",
"oldfieldname": "permlevel",
"oldfieldtype": "Int"
},
{
"fieldname": "width",
"fieldtype": "Data",
"label": "Width",
"oldfieldname": "width",
"oldfieldtype": "Data"
"fieldname": "width",
"fieldtype": "Data",
"label": "Width",
"oldfieldname": "width",
"oldfieldtype": "Data"
},
{
"description": "Number of columns for a field in a List View or a Grid (Total Columns should be less than 11)",
"fieldname": "columns",
"fieldtype": "Int",
"label": "Columns"
"description": "Number of columns for a field in a List View or a Grid (Total Columns should be less than 11)",
"fieldname": "columns",
"fieldtype": "Int",
"label": "Columns"
},
{
"fieldname": "properties",
"fieldtype": "Column Break",
"oldfieldtype": "Column Break",
"print_width": "50%",
"width": "50%"
"fieldname": "properties",
"fieldtype": "Column Break",
"oldfieldtype": "Column Break",
"print_width": "50%",
"width": "50%"
},
{
"default": "0",
"fieldname": "reqd",
"fieldtype": "Check",
"in_list_view": 1,
"label": "Is Mandatory Field",
"oldfieldname": "reqd",
"oldfieldtype": "Check"
"default": "0",
"fieldname": "reqd",
"fieldtype": "Check",
"in_list_view": 1,
"label": "Is Mandatory Field",
"oldfieldname": "reqd",
"oldfieldtype": "Check"
},
{
"default": "0",
"fieldname": "unique",
"fieldtype": "Check",
"label": "Unique"
"default": "0",
"fieldname": "unique",
"fieldtype": "Check",
"label": "Unique"
},
{
"default": "0",
"fieldname": "read_only",
"fieldtype": "Check",
"label": "Read Only"
"default": "0",
"fieldname": "read_only",
"fieldtype": "Check",
"label": "Read Only"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype===\"Link\"",
"fieldname": "ignore_user_permissions",
"fieldtype": "Check",
"label": "Ignore User Permissions"
"default": "0",
"depends_on": "eval:doc.fieldtype===\"Link\"",
"fieldname": "ignore_user_permissions",
"fieldtype": "Check",
"label": "Ignore User Permissions"
},
{
"default": "0",
"fieldname": "hidden",
"fieldtype": "Check",
"label": "Hidden"
"default": "0",
"fieldname": "hidden",
"fieldtype": "Check",
"label": "Hidden"
},
{
"default": "0",
"fieldname": "print_hide",
"fieldtype": "Check",
"label": "Print Hide",
"oldfieldname": "print_hide",
"oldfieldtype": "Check"
"default": "0",
"fieldname": "print_hide",
"fieldtype": "Check",
"label": "Print Hide",
"oldfieldname": "print_hide",
"oldfieldtype": "Check"
},
{
"default": "0",
"depends_on": "eval:[\"Int\", \"Float\", \"Currency\", \"Percent\"].indexOf(doc.fieldtype)!==-1",
"fieldname": "print_hide_if_no_value",
"fieldtype": "Check",
"label": "Print Hide If No Value"
"default": "0",
"depends_on": "eval:[\"Int\", \"Float\", \"Currency\", \"Percent\"].indexOf(doc.fieldtype)!==-1",
"fieldname": "print_hide_if_no_value",
"fieldtype": "Check",
"label": "Print Hide If No Value"
},
{
"fieldname": "print_width",
"fieldtype": "Data",
"hidden": 1,
"label": "Print Width",
"no_copy": 1,
"print_hide": 1
"fieldname": "print_width",
"fieldtype": "Data",
"hidden": 1,
"label": "Print Width",
"no_copy": 1,
"print_hide": 1
},
{
"default": "0",
"fieldname": "no_copy",
"fieldtype": "Check",
"label": "No Copy",
"oldfieldname": "no_copy",
"oldfieldtype": "Check"
"default": "0",
"fieldname": "no_copy",
"fieldtype": "Check",
"label": "No Copy",
"oldfieldname": "no_copy",
"oldfieldtype": "Check"
},
{
"default": "0",
"fieldname": "allow_on_submit",
"fieldtype": "Check",
"label": "Allow on Submit",
"oldfieldname": "allow_on_submit",
"oldfieldtype": "Check"
"default": "0",
"fieldname": "allow_on_submit",
"fieldtype": "Check",
"label": "Allow on Submit",
"oldfieldname": "allow_on_submit",
"oldfieldtype": "Check"
},
{
"default": "0",
"fieldname": "in_list_view",
"fieldtype": "Check",
"label": "In List View"
"default": "0",
"fieldname": "in_list_view",
"fieldtype": "Check",
"label": "In List View"
},
{
"default": "0",
"fieldname": "in_standard_filter",
"fieldtype": "Check",
"label": "In Standard Filter"
"default": "0",
"fieldname": "in_standard_filter",
"fieldtype": "Check",
"label": "In Standard Filter"
},
{
"default": "0",
"depends_on": "eval:([\"Data\", \"Select\", \"Table\", \"Text\", \"Text Editor\", \"Link\", \"Small Text\", \"Long Text\", \"Read Only\", \"Heading\", \"Dynamic Link\"].indexOf(doc.fieldtype) !== -1)",
"fieldname": "in_global_search",
"fieldtype": "Check",
"label": "In Global Search"
"default": "0",
"depends_on": "eval:([\"Data\", \"Select\", \"Table\", \"Text\", \"Text Editor\", \"Link\", \"Small Text\", \"Long Text\", \"Read Only\", \"Heading\", \"Dynamic Link\"].indexOf(doc.fieldtype) !== -1)",
"fieldname": "in_global_search",
"fieldtype": "Check",
"label": "In Global Search"
},
{
"default": "0",
"fieldname": "bold",
"fieldtype": "Check",
"label": "Bold"
"default": "0",
"fieldname": "bold",
"fieldtype": "Check",
"label": "Bold"
},
{
"default": "0",
"fieldname": "report_hide",
"fieldtype": "Check",
"label": "Report Hide",
"oldfieldname": "report_hide",
"oldfieldtype": "Check"
"default": "0",
"fieldname": "report_hide",
"fieldtype": "Check",
"label": "Report Hide",
"oldfieldname": "report_hide",
"oldfieldtype": "Check"
},
{
"default": "0",
"fieldname": "search_index",
"fieldtype": "Check",
"hidden": 1,
"label": "Index",
"no_copy": 1,
"print_hide": 1
"default": "0",
"fieldname": "search_index",
"fieldtype": "Check",
"hidden": 1,
"label": "Index",
"no_copy": 1,
"print_hide": 1
},
{
"default": "0",
"description": "Don't HTML Encode HTML tags like &lt;script&gt; or just characters like &lt; or &gt;, as they could be intentionally used in this field",
"fieldname": "ignore_xss_filter",
"fieldtype": "Check",
"label": "Ignore XSS Filter"
"default": "0",
"description": "Don't HTML Encode HTML tags like &lt;script&gt; or just characters like &lt; or &gt;, as they could be intentionally used in this field",
"fieldname": "ignore_xss_filter",
"fieldtype": "Check",
"label": "Ignore XSS Filter"
},
{
"default": "1",
"depends_on": "eval:['Data', 'Select', 'Text', 'Small Text', 'Text Editor'].includes(doc.fieldtype)",
"fieldname": "translatable",
"fieldtype": "Check",
"label": "Translatable"
"default": "1",
"depends_on": "eval:['Data', 'Select', 'Text', 'Small Text', 'Text Editor'].includes(doc.fieldtype)",
"fieldname": "translatable",
"fieldtype": "Check",
"label": "Translatable"
},
{
"depends_on": "eval:in_list(['Data', 'Link', 'Dynamic Link', 'Password', 'Select', 'Read Only', 'Attach', 'Attach Image', 'Int'], doc.fieldtype)",
"fieldname": "length",
"fieldtype": "Int",
"label": "Length"
"depends_on": "eval:in_list(['Data', 'Link', 'Dynamic Link', 'Password', 'Select', 'Read Only', 'Attach', 'Attach Image', 'Int'], doc.fieldtype)",
"fieldname": "length",
"fieldtype": "Int",
"label": "Length"
},
{
"fieldname": "mandatory_depends_on",
"fieldtype": "Code",
"label": "Mandatory Depends On",
"length": 255
"fieldname": "mandatory_depends_on",
"fieldtype": "Code",
"label": "Mandatory Depends On",
"length": 255
},
{
"fieldname": "read_only_depends_on",
"fieldtype": "Code",
"label": "Read Only Depends On",
"length": 255
"fieldname": "read_only_depends_on",
"fieldtype": "Code",
"label": "Read Only Depends On",
"length": 255
},
{
"default": "0",
"fieldname": "allow_in_quick_entry",
"fieldtype": "Check",
"label": "Allow in Quick Entry"
"default": "0",
"fieldname": "allow_in_quick_entry",
"fieldtype": "Check",
"label": "Allow in Quick Entry"
},
{
"default": "0",
"fieldname": "in_preview",
"fieldtype": "Check",
"label": "In Preview"
"default": "0",
"fieldname": "in_preview",
"fieldtype": "Check",
"label": "In Preview"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype=='Section Break'",
"fieldname": "hide_border",
"fieldtype": "Check",
"label": "Hide Border"
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_seconds",
"fieldtype": "Check",
"label": "Hide Seconds"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_days",
"fieldtype": "Check",
"label": "Hide Days"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype=='Section Break'",
"fieldname": "hide_border",
"fieldtype": "Check",
"label": "Hide Border"
}
],
"icon": "fa fa-glass",
"idx": 1,
"links": [],
"modified": "2020-04-27 11:40:48.325481",
"modified": "2020-02-06 23:43:00.123575",
"modified_by": "Administrator",
"module": "Custom",
"name": "Custom Field",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Administrator",
"share": 1,
"write": 1
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Administrator",
"share": 1,
"write": 1
},
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"search_fields": "dt,label,fieldtype,options",

View file

@ -31,6 +31,13 @@ class CustomField(Document):
# fieldnames should be lowercase
self.fieldname = self.fieldname.lower()
def before_insert(self):
meta = frappe.get_meta(self.dt, cached=False)
fieldnames = [df.fieldname for df in meta.get("fields")]
if self.fieldname in fieldnames:
frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt))
def validate(self):
meta = frappe.get_meta(self.dt, cached=False)
fieldnames = [df.fieldname for df in meta.get("fields")]
@ -65,6 +72,11 @@ class CustomField(Document):
frappe.db.updatedb(self.dt)
def on_trash(self):
#check if Admin owned field
if self.owner == 'Administrator' and frappe.session.user != 'Administrator':
frappe.throw(_("Custom Field {0} is created by the Administrator and can only be deleted through the Administrator account.").format(
frappe.bold(self.label)))
# delete property setter entries
frappe.db.sql("""\
DELETE FROM `tabProperty Setter`

View file

@ -77,7 +77,9 @@ docfield_properties = {
'allow_bulk_edit': 'Check',
'auto_repeat': 'Link',
'allow_in_quick_entry': 'Check',
'hide_border': 'Check'
'hide_border': 'Check',
'hide_days': 'Check',
'hide_seconds': 'Check'
}
allowed_fieldtype_change = (('Currency', 'Float', 'Percent'), ('Small Text', 'Data'),

View file

@ -11,6 +11,8 @@
"label",
"fieldtype",
"fieldname",
"hide_seconds",
"hide_days",
"reqd",
"unique",
"in_list_view",
@ -58,350 +60,364 @@
],
"fields": [
{
"fieldname": "label_and_type",
"fieldtype": "Section Break",
"label": "Label and Type"
"fieldname": "label_and_type",
"fieldtype": "Section Break",
"label": "Label and Type"
},
{
"fieldname": "label",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Label",
"oldfieldname": "label",
"oldfieldtype": "Data",
"search_index": 1
"fieldname": "label",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Label",
"oldfieldname": "label",
"oldfieldtype": "Data",
"search_index": 1
},
{
"default": "Data",
"fieldname": "fieldtype",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Type",
"oldfieldname": "fieldtype",
"oldfieldtype": "Select",
"options": "Attach\nAttach Image\nBarcode\nButton\nCheck\nCode\nColor\nColumn Break\nCurrency\nData\nDate\nDatetime\nDynamic Link\nFloat\nFold\nGeolocation\nHeading\nHTML\nHTML Editor\nImage\nInt\nLink\nLong Text\nMarkdown Editor\nPassword\nPercent\nRating\nRead Only\nSection Break\nSelect\nSignature\nSmall Text\nTable\nTable MultiSelect\nText\nText Editor\nTime",
"reqd": 1,
"search_index": 1
"default": "Data",
"fieldname": "fieldtype",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Type",
"oldfieldname": "fieldtype",
"oldfieldtype": "Select",
"options": "Attach\nAttach Image\nBarcode\nButton\nCheck\nCode\nColor\nColumn Break\nCurrency\nData\nDate\nDatetime\nDuration\nDynamic Link\nFloat\nFold\nGeolocation\nHeading\nHTML\nHTML Editor\nImage\nInt\nLink\nLong Text\nMarkdown Editor\nPassword\nPercent\nRating\nRead Only\nSection Break\nSelect\nSignature\nSmall Text\nTable\nTable MultiSelect\nText\nText Editor\nTime",
"reqd": 1,
"search_index": 1
},
{
"fieldname": "fieldname",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Name",
"oldfieldname": "fieldname",
"oldfieldtype": "Data",
"read_only": 1,
"search_index": 1
"fieldname": "fieldname",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Name",
"oldfieldname": "fieldname",
"oldfieldtype": "Data",
"read_only": 1,
"search_index": 1
},
{
"default": "0",
"depends_on": "eval:!in_list([\"Section Break\", \"Column Break\", \"Button\", \"HTML\"], doc.fieldtype)",
"fieldname": "reqd",
"fieldtype": "Check",
"label": "Mandatory",
"oldfieldname": "reqd",
"oldfieldtype": "Check",
"print_width": "50px",
"width": "50px"
"default": "0",
"depends_on": "eval:!in_list([\"Section Break\", \"Column Break\", \"Button\", \"HTML\"], doc.fieldtype)",
"fieldname": "reqd",
"fieldtype": "Check",
"label": "Mandatory",
"oldfieldname": "reqd",
"oldfieldtype": "Check",
"print_width": "50px",
"width": "50px"
},
{
"default": "0",
"fieldname": "unique",
"fieldtype": "Check",
"label": "Unique"
"default": "0",
"fieldname": "unique",
"fieldtype": "Check",
"label": "Unique"
},
{
"default": "0",
"fieldname": "in_list_view",
"fieldtype": "Check",
"label": "In List View"
"default": "0",
"fieldname": "in_list_view",
"fieldtype": "Check",
"label": "In List View"
},
{
"default": "0",
"fieldname": "in_standard_filter",
"fieldtype": "Check",
"label": "In Standard Filter"
"default": "0",
"fieldname": "in_standard_filter",
"fieldtype": "Check",
"label": "In Standard Filter"
},
{
"default": "0",
"depends_on": "eval:([\"Data\", \"Select\", \"Table\", \"Text\", \"Text Editor\", \"Link\", \"Small Text\", \"Long Text\", \"Read Only\", \"Heading\", \"Dynamic Link\"].indexOf(doc.fieldtype) !== -1)",
"fieldname": "in_global_search",
"fieldtype": "Check",
"label": "In Global Search"
"default": "0",
"depends_on": "eval:([\"Data\", \"Select\", \"Table\", \"Text\", \"Text Editor\", \"Link\", \"Small Text\", \"Long Text\", \"Read Only\", \"Heading\", \"Dynamic Link\"].indexOf(doc.fieldtype) !== -1)",
"fieldname": "in_global_search",
"fieldtype": "Check",
"label": "In Global Search"
},
{
"default": "0",
"fieldname": "bold",
"fieldtype": "Check",
"label": "Bold"
"default": "0",
"fieldname": "bold",
"fieldtype": "Check",
"label": "Bold"
},
{
"default": "1",
"depends_on": "eval:['Data', 'Select', 'Text', 'Small Text', 'Text Editor'].includes(doc.fieldtype)",
"fieldname": "translatable",
"fieldtype": "Check",
"label": "Translatable"
"default": "1",
"depends_on": "eval:['Data', 'Select', 'Text', 'Small Text', 'Text Editor'].includes(doc.fieldtype)",
"fieldname": "translatable",
"fieldtype": "Check",
"label": "Translatable"
},
{
"fieldname": "column_break_7",
"fieldtype": "Column Break"
"fieldname": "column_break_7",
"fieldtype": "Column Break"
},
{
"depends_on": "eval:in_list([\"Float\", \"Currency\", \"Percent\"], doc.fieldtype)",
"description": "Set non-standard precision for a Float or Currency field",
"fieldname": "precision",
"fieldtype": "Select",
"label": "Precision",
"options": "\n0\n1\n2\n3\n4\n5\n6\n7\n8\n9"
"depends_on": "eval:in_list([\"Float\", \"Currency\", \"Percent\"], doc.fieldtype)",
"description": "Set non-standard precision for a Float or Currency field",
"fieldname": "precision",
"fieldtype": "Select",
"label": "Precision",
"options": "\n0\n1\n2\n3\n4\n5\n6\n7\n8\n9"
},
{
"depends_on": "eval:in_list(['Data', 'Link', 'Dynamic Link', 'Password', 'Select', 'Read Only', 'Attach', 'Attach Image'], doc.fieldtype)",
"fieldname": "length",
"fieldtype": "Int",
"label": "Length"
"depends_on": "eval:in_list(['Data', 'Link', 'Dynamic Link', 'Password', 'Select', 'Read Only', 'Attach', 'Attach Image'], doc.fieldtype)",
"fieldname": "length",
"fieldtype": "Int",
"label": "Length"
},
{
"description": "For Links, enter the DocType as range.\nFor Select, enter list of Options, each on a new line.",
"fieldname": "options",
"fieldtype": "Small Text",
"in_list_view": 1,
"label": "Options",
"oldfieldname": "options",
"oldfieldtype": "Text"
"description": "For Links, enter the DocType as range.\nFor Select, enter list of Options, each on a new line.",
"fieldname": "options",
"fieldtype": "Small Text",
"in_list_view": 1,
"label": "Options",
"oldfieldname": "options",
"oldfieldtype": "Text"
},
{
"fieldname": "fetch_from",
"fieldtype": "Small Text",
"label": "Fetch From"
"fieldname": "fetch_from",
"fieldtype": "Small Text",
"label": "Fetch From"
},
{
"default": "0",
"description": "If checked, this field will be not overwritten based on Fetch From if a value already exists.",
"fieldname": "fetch_if_empty",
"fieldtype": "Check",
"label": "Fetch If Empty"
"default": "0",
"description": "If checked, this field will be not overwritten based on Fetch From if a value already exists.",
"fieldname": "fetch_if_empty",
"fieldtype": "Check",
"label": "Fetch If Empty"
},
{
"fieldname": "permissions",
"fieldtype": "Section Break",
"label": "Permissions"
"fieldname": "permissions",
"fieldtype": "Section Break",
"label": "Permissions"
},
{
"description": "This field will appear only if the fieldname defined here has value OR the rules are true (examples): \nmyfield\neval:doc.myfield=='My Value'\neval:doc.age&gt;18",
"fieldname": "depends_on",
"fieldtype": "Code",
"label": "Depends On",
"oldfieldname": "depends_on",
"oldfieldtype": "Data",
"options": "JS"
"description": "This field will appear only if the fieldname defined here has value OR the rules are true (examples): \nmyfield\neval:doc.myfield=='My Value'\neval:doc.age&gt;18",
"fieldname": "depends_on",
"fieldtype": "Code",
"label": "Depends On",
"oldfieldname": "depends_on",
"oldfieldtype": "Data",
"options": "JS"
},
{
"default": "0",
"fieldname": "permlevel",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Perm Level",
"oldfieldname": "permlevel",
"oldfieldtype": "Int"
"default": "0",
"fieldname": "permlevel",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Perm Level",
"oldfieldname": "permlevel",
"oldfieldtype": "Int"
},
{
"default": "0",
"fieldname": "hidden",
"fieldtype": "Check",
"label": "Hidden",
"oldfieldname": "hidden",
"oldfieldtype": "Check",
"print_width": "50px",
"width": "50px"
"default": "0",
"fieldname": "hidden",
"fieldtype": "Check",
"label": "Hidden",
"oldfieldname": "hidden",
"oldfieldtype": "Check",
"print_width": "50px",
"width": "50px"
},
{
"default": "0",
"fieldname": "read_only",
"fieldtype": "Check",
"label": "Read Only"
"default": "0",
"fieldname": "read_only",
"fieldtype": "Check",
"label": "Read Only"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype==\"Section Break\"",
"fieldname": "collapsible",
"fieldtype": "Check",
"label": "Collapsible"
"default": "0",
"depends_on": "eval:doc.fieldtype==\"Section Break\"",
"fieldname": "collapsible",
"fieldtype": "Check",
"label": "Collapsible"
},
{
"default": "0",
"depends_on": "eval: doc.fieldtype == \"Table\"",
"fieldname": "allow_bulk_edit",
"fieldtype": "Check",
"label": "Allow Bulk Edit"
"default": "0",
"depends_on": "eval: doc.fieldtype == \"Table\"",
"fieldname": "allow_bulk_edit",
"fieldtype": "Check",
"label": "Allow Bulk Edit"
},
{
"depends_on": "eval:doc.fieldtype==\"Section Break\"",
"fieldname": "collapsible_depends_on",
"fieldtype": "Code",
"label": "Collapsible Depends On",
"options": "JS"
"depends_on": "eval:doc.fieldtype==\"Section Break\"",
"fieldname": "collapsible_depends_on",
"fieldtype": "Code",
"label": "Collapsible Depends On",
"options": "JS"
},
{
"fieldname": "column_break_14",
"fieldtype": "Column Break"
"fieldname": "column_break_14",
"fieldtype": "Column Break"
},
{
"default": "0",
"fieldname": "ignore_user_permissions",
"fieldtype": "Check",
"label": "Ignore User Permissions"
"default": "0",
"fieldname": "ignore_user_permissions",
"fieldtype": "Check",
"label": "Ignore User Permissions"
},
{
"default": "0",
"fieldname": "allow_on_submit",
"fieldtype": "Check",
"label": "Allow on Submit",
"oldfieldname": "allow_on_submit",
"oldfieldtype": "Check"
"default": "0",
"fieldname": "allow_on_submit",
"fieldtype": "Check",
"label": "Allow on Submit",
"oldfieldname": "allow_on_submit",
"oldfieldtype": "Check"
},
{
"default": "0",
"fieldname": "report_hide",
"fieldtype": "Check",
"label": "Report Hide",
"oldfieldname": "report_hide",
"oldfieldtype": "Check"
"default": "0",
"fieldname": "report_hide",
"fieldtype": "Check",
"label": "Report Hide",
"oldfieldname": "report_hide",
"oldfieldtype": "Check"
},
{
"default": "0",
"depends_on": "eval:(doc.fieldtype == 'Link')",
"fieldname": "remember_last_selected_value",
"fieldtype": "Check",
"label": "Remember Last Selected Value"
"default": "0",
"depends_on": "eval:(doc.fieldtype == 'Link')",
"fieldname": "remember_last_selected_value",
"fieldtype": "Check",
"label": "Remember Last Selected Value"
},
{
"fieldname": "display",
"fieldtype": "Section Break",
"label": "Display"
"fieldname": "display",
"fieldtype": "Section Break",
"label": "Display"
},
{
"fieldname": "default",
"fieldtype": "Text",
"label": "Default",
"oldfieldname": "default",
"oldfieldtype": "Text"
"fieldname": "default",
"fieldtype": "Text",
"label": "Default",
"oldfieldname": "default",
"oldfieldtype": "Text"
},
{
"default": "0",
"fieldname": "in_filter",
"fieldtype": "Check",
"label": "In Filter",
"oldfieldname": "in_filter",
"oldfieldtype": "Check",
"print_width": "50px",
"width": "50px"
"default": "0",
"fieldname": "in_filter",
"fieldtype": "Check",
"label": "In Filter",
"oldfieldname": "in_filter",
"oldfieldtype": "Check",
"print_width": "50px",
"width": "50px"
},
{
"fieldname": "column_break_21",
"fieldtype": "Column Break"
"fieldname": "column_break_21",
"fieldtype": "Column Break"
},
{
"fieldname": "description",
"fieldtype": "Text",
"label": "Description",
"oldfieldname": "description",
"oldfieldtype": "Text",
"print_width": "300px",
"width": "300px"
"fieldname": "description",
"fieldtype": "Text",
"label": "Description",
"oldfieldname": "description",
"oldfieldtype": "Text",
"print_width": "300px",
"width": "300px"
},
{
"default": "0",
"fieldname": "print_hide",
"fieldtype": "Check",
"label": "Print Hide",
"oldfieldname": "print_hide",
"oldfieldtype": "Check"
"default": "0",
"fieldname": "print_hide",
"fieldtype": "Check",
"label": "Print Hide",
"oldfieldname": "print_hide",
"oldfieldtype": "Check"
},
{
"default": "0",
"depends_on": "eval:[\"Int\", \"Float\", \"Currency\", \"Percent\"].indexOf(doc.fieldtype)!==-1",
"fieldname": "print_hide_if_no_value",
"fieldtype": "Check",
"label": "Print Hide If No Value"
"default": "0",
"depends_on": "eval:[\"Int\", \"Float\", \"Currency\", \"Percent\"].indexOf(doc.fieldtype)!==-1",
"fieldname": "print_hide_if_no_value",
"fieldtype": "Check",
"label": "Print Hide If No Value"
},
{
"description": "Print Width of the field, if the field is a column in a table",
"fieldname": "print_width",
"fieldtype": "Data",
"label": "Print Width",
"print_width": "50px",
"width": "50px"
"description": "Print Width of the field, if the field is a column in a table",
"fieldname": "print_width",
"fieldtype": "Data",
"label": "Print Width",
"print_width": "50px",
"width": "50px"
},
{
"depends_on": "eval:cur_frm.doc.istable",
"description": "Number of columns for a field in a Grid (Total Columns in a grid should be less than 11)",
"fieldname": "columns",
"fieldtype": "Int",
"label": "Columns"
"depends_on": "eval:cur_frm.doc.istable",
"description": "Number of columns for a field in a Grid (Total Columns in a grid should be less than 11)",
"fieldname": "columns",
"fieldtype": "Int",
"label": "Columns"
},
{
"fieldname": "width",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Width",
"oldfieldname": "width",
"oldfieldtype": "Data",
"print_width": "50px",
"width": "50px"
"fieldname": "width",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Width",
"oldfieldname": "width",
"oldfieldtype": "Data",
"print_width": "50px",
"width": "50px"
},
{
"default": "0",
"fieldname": "is_custom_field",
"fieldtype": "Check",
"hidden": 1,
"label": "Is Custom Field",
"read_only": 1
"default": "0",
"fieldname": "is_custom_field",
"fieldtype": "Check",
"hidden": 1,
"label": "Is Custom Field",
"read_only": 1
},
{
"default": "0",
"fieldname": "allow_in_quick_entry",
"fieldtype": "Check",
"label": "Allow in Quick Entry"
"default": "0",
"fieldname": "allow_in_quick_entry",
"fieldtype": "Check",
"label": "Allow in Quick Entry"
},
{
"fieldname": "property_depends_on_section",
"fieldtype": "Section Break",
"label": "Property Depends On"
"fieldname": "property_depends_on_section",
"fieldtype": "Section Break",
"label": "Property Depends On"
},
{
"fieldname": "mandatory_depends_on",
"fieldtype": "Code",
"label": "Mandatory Depends On",
"options": "JS"
"fieldname": "mandatory_depends_on",
"fieldtype": "Code",
"label": "Mandatory Depends On",
"options": "JS"
},
{
"fieldname": "column_break_33",
"fieldtype": "Column Break"
"fieldname": "column_break_33",
"fieldtype": "Column Break"
},
{
"fieldname": "read_only_depends_on",
"fieldtype": "Code",
"label": "Read Only Depends On",
"options": "JS"
"fieldname": "read_only_depends_on",
"fieldtype": "Code",
"label": "Read Only Depends On",
"options": "JS"
},
{
"default": "0",
"fieldname": "in_preview",
"fieldtype": "Check",
"label": "In Preview"
"default": "0",
"fieldname": "in_preview",
"fieldtype": "Check",
"label": "In Preview"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype=='Section Break'",
"fieldname": "hide_border",
"fieldtype": "Check",
"label": "Hide Border"
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_seconds",
"fieldtype": "Check",
"label": "Hide Seconds"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_days",
"fieldtype": "Check",
"label": "Hide Days"
},
{
"default": "0",
"depends_on": "eval:doc.fieldtype=='Section Break'",
"fieldname": "hide_border",
"fieldtype": "Check",
"label": "Hide Border"
}
],
"idx": 1,
"istable": 1,
"links": [],
"modified": "2020-04-27 11:39:26.389300",
"modified": "2020-06-02 23:45:46.810868",
"modified_by": "Administrator",
"module": "Custom",
"name": "Customize Form Field",

View file

@ -55,7 +55,8 @@ class MariaDBDatabase(Database):
'Signature': ('longtext', ''),
'Color': ('varchar', self.VARCHAR_LEN),
'Barcode': ('longtext', ''),
'Geolocation': ('longtext', '')
'Geolocation': ('longtext', ''),
'Duration': ('decimal', '18,6')
}
def get_connection(self):

View file

@ -64,6 +64,8 @@ CREATE TABLE `tabDocField` (
`length` int(11) NOT NULL DEFAULT 0,
`translatable` int(1) NOT NULL DEFAULT 0,
`hide_border` int(1) NOT NULL DEFAULT 0,
`hide_days` int(1) NOT NULL DEFAULT 0,
`hide_seconds` int(1) NOT NULL DEFAULT 0,
PRIMARY KEY (`name`),
KEY `parent` (`parent`),
KEY `label` (`label`),

View file

@ -60,7 +60,8 @@ class PostgresDatabase(Database):
'Signature': ('text', ''),
'Color': ('varchar', self.VARCHAR_LEN),
'Barcode': ('text', ''),
'Geolocation': ('text', '')
'Geolocation': ('text', ''),
'Duration': ('decimal', '18,6')
}
def get_connection(self):

View file

@ -64,6 +64,8 @@ CREATE TABLE "tabDocField" (
"length" bigint NOT NULL DEFAULT 0,
"translatable" smallint NOT NULL DEFAULT 0,
"hide_border" smallint NOT NULL DEFAULT 0,
"hide_days" smallint NOT NULL DEFAULT 0,
"hide_seconds" smallint NOT NULL DEFAULT 0,
PRIMARY KEY ("name")
) ;

View file

@ -8,12 +8,26 @@ from json import loads, dumps
from frappe import _, DoesNotExistError, ValidationError, _dict
from frappe.boot import get_allowed_pages, get_allowed_reports
from six import string_types
from functools import wraps
from frappe.cache_manager import (
build_domain_restriced_doctype_cache,
build_domain_restriced_page_cache,
build_table_count_cache
)
def handle_not_exist(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except DoesNotExistError:
if frappe.message_log:
frappe.message_log.pop()
return []
return wrapper
class Workspace:
def __init__(self, page_name):
self.page_name = page_name
@ -154,20 +168,22 @@ class Workspace:
'subtitle': _(self.onboarding_doc.subtitle),
'success': _(self.onboarding_doc.success_message),
'docs_url': self.onboarding_doc.documentation_url,
'user_can_dismiss': self.onboarding_doc.user_can_dismiss,
'items': self.get_onboarding_steps()
}
@handle_not_exist
def get_cards(self):
cards = self.doc.cards + get_custom_reports_and_doctypes(self.doc.module)
cards = self.doc.cards
if not self.doc.hide_custom:
cards = cards + get_custom_reports_and_doctypes(self.doc.module)
if len(self.extended_cards):
cards = cards + self.extended_cards
default_country = frappe.db.get_default("country")
def _doctype_contains_a_record(name):
exists = self.table_counts.get(name, None)
if exists is None:
if not frappe.db.get_value('DocType', name, 'issingle', cache=True):
if not exists:
if not frappe.db.get_value('DocType', name, 'issingle'):
exists = frappe.db.count(name)
else:
exists = True
@ -224,6 +240,7 @@ class Workspace:
return new_data
@handle_not_exist
def get_charts(self):
all_charts = []
if frappe.has_permission("Dashboard Chart", throw=False):
@ -239,6 +256,7 @@ class Workspace:
return all_charts
@handle_not_exist
def get_shortcuts(self):
def _in_active_domains(item):
@ -269,6 +287,7 @@ class Workspace:
return items
@handle_not_exist
def get_onboarding_steps(self):
steps = []
for doc in self.onboarding_doc.get_steps():
@ -293,21 +312,15 @@ def get_desktop_page(page):
Returns:
dict: dictionary of cards, charts and shortcuts to be displayed on website
"""
try:
wspace = Workspace(page)
wspace.build_workspace()
return {
'charts': wspace.charts,
'shortcuts': wspace.shortcuts,
'cards': wspace.cards,
'onboarding': wspace.onboarding,
'allow_customization': not wspace.doc.disable_user_customization
}
except DoesNotExistError:
if frappe.message_log:
frappe.message_log.pop()
return None
wspace = Workspace(page)
wspace.build_workspace()
return {
'charts': wspace.charts,
'shortcuts': wspace.shortcuts,
'cards': wspace.cards,
'onboarding': wspace.onboarding,
'allow_customization': not wspace.doc.disable_user_customization
}
@frappe.whitelist()
def get_desk_sidebar_items(flatten=False):

View file

@ -1,208 +1,81 @@
{
"allow_copy": 0,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"actions": [],
"autoname": "Prompt",
"beta": 0,
"creation": "2017-10-23 13:02:10.295824",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"subject_field",
"start_date_field",
"end_date_field",
"column_break_5",
"all_day"
],
"fields": [
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "reference_doctype",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Reference Document Type",
"length": 0,
"no_copy": 0,
"options": "DocType",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "subject_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Subject Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "start_date_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Start Date Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "end_date_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "End Date Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"default": "0",
"fieldname": "all_day",
"fieldtype": "Check",
"label": "All Day"
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2019-09-05 14:22:27.664645",
"links": [],
"modified": "2020-06-15 11:24:57.639430",
"modified_by": "Administrator",
"module": "Desk",
"name": "Calendar View",
"name_case": "",
"owner": "faris@erpnext.com",
"permissions": [
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 0,
"write": 1
},
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 0,
"delete": 0,
"email": 0,
"export": 0,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 0,
"read": 1,
"report": 0,
"role": "All",
"set_user_permissions": 0,
"share": 0,
"submit": 0,
"write": 0
"role": "All"
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 0,
"track_seen": 0
"sort_order": "DESC"
}

View file

@ -251,6 +251,7 @@ frappe.ui.form.on('Dashboard Chart', {
render_filters_table: function(frm) {
frm.set_df_property("filters_section", "hidden", 0);
let is_document_type = frm.doc.chart_type!== 'Report' && frm.doc.chart_type!=='Custom';
let is_dynamic_filter = f => ['Date', 'DateRange'].includes(f.fieldtype) && f.default;
let wrapper = $(frm.get_field('filters_json').wrapper).empty();
let table = $(`<table class="table table-bordered" style="cursor:pointer; margin:0px;">
@ -268,6 +269,18 @@ frappe.ui.form.on('Dashboard Chart', {
let filters = JSON.parse(frm.doc.filters_json || '[]');
var filters_set = false;
// Set dynamic filters for reports
if (frm.doc.chart_type == 'Report') {
let set_filters = false;
frm.chart_filters.forEach(f => {
if (is_dynamic_filter(f)) {
filters[f.fieldname] = f.default;
set_filters = true;
}
});
set_filters && frm.set_value('filters_json', JSON.stringify(filters));
}
let fields;
if (is_document_type) {
fields = [
@ -292,6 +305,7 @@ frappe.ui.form.on('Dashboard Chart', {
}
} else if (frm.chart_filters.length) {
fields = frm.chart_filters.filter(f => f.fieldname);
fields.map( f => {
if (filters[f.fieldname]) {
let condition = '=';
@ -318,7 +332,7 @@ frappe.ui.form.on('Dashboard Chart', {
let dialog = new frappe.ui.Dialog({
title: __('Set Filters'),
fields: fields,
fields: fields.filter(f => !is_dynamic_filter(f)),
primary_action: function() {
let values = this.get_values();
if (values) {
@ -351,8 +365,15 @@ frappe.ui.form.on('Dashboard Chart', {
}
dialog.show();
//Set query report object so that it can be used while fetching filter values in the report
frappe.query_report = new frappe.views.QueryReport({'filters': dialog.fields_list});
if (frm.doc.chart_type == 'Report') {
//Set query report object so that it can be used while fetching filter values in the report
frappe.query_report = new frappe.views.QueryReport({'filters': dialog.fields_list});
frappe.query_reports[frm.doc.report_name]
&& frappe.query_reports[frm.doc.report_name].onload
&& frappe.query_reports[frm.doc.report_name].onload(frappe.query_report);
}
dialog.set_values(filters);
});
},

View file

@ -26,15 +26,15 @@ def get_permission_query_conditions(user):
if "System Manager" in roles:
return None
allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read())
allowed_reports = tuple([key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()])
allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()]
allowed_reports = ['"%s"' % key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()]
return '''
`tabDashboard Chart`.`document_type` in {allowed_doctypes}
or `tabDashboard Chart`.`report_name` in {allowed_reports}
`tabDashboard Chart`.`document_type` in ({allowed_doctypes})
or `tabDashboard Chart`.`report_name` in ({allowed_reports})
'''.format(
allowed_doctypes=allowed_doctypes,
allowed_reports=allowed_reports
allowed_doctypes=','.join(allowed_doctypes),
allowed_reports=','.join(allowed_reports)
)
@ -110,11 +110,11 @@ def create_dashboard_chart(args):
doc.insert(ignore_permissions=True)
return doc
@frappe.whitelist()
def create_report_chart(args):
create_dashboard_chart(args)
doc = create_dashboard_chart(args)
args = frappe.parse_json(args)
args.chart_name = doc.chart_name
if args.dashboard:
add_chart_to_dashboard(json.dumps(args))
@ -137,7 +137,6 @@ def get_chart_config(chart, filters, timespan, timegrain, from_date, to_date):
to_date = datetime.datetime.now()
doctype = chart.document_type
unit_function = get_unit_function(doctype, chart.based_on, timegrain)
datefield = chart.based_on
aggregate_function = get_aggregate_function(chart.chart_type)
value_field = chart.value_based_on or '1'
@ -147,26 +146,21 @@ def get_chart_config(chart, filters, timespan, timegrain, from_date, to_date):
filters.append([doctype, datefield, '>=', from_date, False])
filters.append([doctype, datefield, '<=', to_date, False])
data = frappe.db.get_all(
data = frappe.db.get_list(
doctype,
fields = [
'extract(year from `tab{doctype}`.{datefield}) as _year'.format(doctype=doctype, datefield=datefield),
'{} as _unit'.format(unit_function),
'{} as _unit'.format(datefield),
'{aggregate_function}({value_field})'.format(aggregate_function=aggregate_function, value_field=value_field),
],
filters = filters,
group_by = '_year, _unit',
order_by = '_year asc, _unit asc',
group_by = '_unit',
order_by = '_unit asc',
as_list = True,
ignore_ifnull = True
)
result = get_result(data, timegrain, from_date, to_date)
# result given as year, unit -> convert it to end of period of that unit
result = convert_to_dates(data, timegrain)
# add missing data points for periods where there was no result
result = add_missing_values(result, timegrain, timespan, from_date, to_date)
chart_config = {
"labels": [formatdate(r[0].strftime('%Y-%m-%d')) for r in result],
"datasets": [{
@ -220,7 +214,7 @@ def get_group_by_chart_config(chart, filters):
group_by_field = chart.group_by_based_on
doctype = chart.document_type
data = frappe.db.get_all(
data = frappe.db.get_list(
doctype,
fields = [
'{} as name'.format(group_by_field),
@ -261,75 +255,22 @@ def get_aggregate_function(chart_type):
}[chart_type]
def convert_to_dates(data, timegrain):
""" Converts individual dates within data to the end of period """
result = []
for d in data:
if d[2] != 0:
if timegrain == 'Daily':
result.append([add_to_date('{:d}-01-01'.format(int(d[0])), days = d[1] - 1), d[2]])
elif timegrain == 'Weekly':
result.append([add_to_date(add_to_date('{:d}-01-01'.format(int(d[0])), weeks = d[1] + 1), days = -1), d[2]])
elif timegrain == 'Monthly':
result.append([add_to_date(add_to_date('{:d}-01-01'.format(int(d[0])), months=d[1]), days = -1), d[2]])
elif timegrain == 'Quarterly':
result.append([add_to_date(add_to_date('{:d}-01-01'.format(int(d[0])), months=d[1] * 3), days = -1), d[2]])
elif timegrain == 'Yearly':
result.append([add_to_date(add_to_date('{:d}-01-01'.format(int(d[0])), months=12), days = -1), d[2]])
result[-1][0] = getdate(result[-1][0])
return result
def get_unit_function(doctype, datefield, timegrain):
unit_function = ''
if timegrain=='Daily':
if frappe.db.db_type == 'mariadb':
unit_function = 'dayofyear(`tab{doctype}`.{datefield})'.format(
doctype=doctype, datefield=datefield)
else:
unit_function = 'extract(doy from `tab{doctype}`.{datefield})'.format(
doctype=doctype, datefield=datefield)
else:
unit_function = 'extract({unit} from `tab{doctype}`.{datefield})'.format(
unit = timegrain[:-2].lower(), doctype=doctype, datefield=datefield)
return unit_function
def add_missing_values(data, timegrain, timespan, from_date, to_date):
# add missing intervals
def get_result(data, timegrain, from_date, to_date):
start_date = getdate(from_date)
end_date = getdate(to_date)
result = []
if timespan != 'All Time':
first_expected_date = get_period_ending(from_date, timegrain)
# fill out data before the first data point
first_data_point_date = data[0][0] if data else getdate(add_to_date(to_date, days=1))
while first_data_point_date > first_expected_date:
result.append([first_expected_date, 0.0])
first_expected_date = get_next_expected_date(first_expected_date, timegrain)
while start_date <= end_date:
next_date = get_next_expected_date(start_date, timegrain)
result.append([next_date, 0.0])
start_date = next_date
# fill data points and missing points
for i, d in enumerate(data):
result.append(d)
next_expected_date = get_next_expected_date(d[0], timegrain)
if i < len(data)-1:
next_date = data[i+1][0]
else:
# already reached at end of data, see if we need any more dates
next_date = getdate(nowdate())
# if next data point is earler than the expected date
# need to fill out missing data points
while next_date > next_expected_date:
# fill missing value
result.append([next_expected_date, 0.0])
next_expected_date = get_next_expected_date(next_expected_date, timegrain)
# add date for the last period (if missing)
if result and get_period_ending(to_date, timegrain) > result[-1][0]:
result.append([get_period_ending(to_date, timegrain), 0.0])
data_index = 0
if data:
for i, d in enumerate(result):
while data_index < len(data) and getdate(data[data_index][0]) <= d[0]:
d[1] += data[data_index][1]
data_index += 1
return result
@ -358,17 +299,12 @@ def get_period_ending(date, timegrain):
return getdate(date)
def get_week_ending(date):
# fun fact: week ends on the day before 1st Jan of the year.
# for 2019 it is Monday
# week starts on monday
from datetime import timedelta
start = date - timedelta(days = date.weekday())
end = start + timedelta(days=6)
week_of_the_year = int(date.strftime('%U'))
if week_of_the_year == 52:
date = add_to_date(date, years=1)
# first day of next week
date = add_to_date('{}-01-01'.format(date.year), weeks = (week_of_the_year%52) + 1)
# last day of this week
return add_to_date(date, days=-1)
return end
def get_month_ending(date):
month_of_the_year = int(date.strftime('%m'))

View file

@ -17,10 +17,9 @@ class TestDashboardChart(unittest.TestCase):
self.assertEqual(get_period_ending('2019-04-10', 'Daily'),
getdate('2019-04-10'))
# fun fact: week ends on the day before 1st Jan of the year.
# for 2019 it is Monday
# week starts on monday
self.assertEqual(get_period_ending('2019-04-10', 'Weekly'),
getdate('2019-04-15'))
getdate('2019-04-14'))
self.assertEqual(get_period_ending('2019-04-10', 'Monthly'),
getdate('2019-04-30'))
@ -133,6 +132,34 @@ class TestDashboardChart(unittest.TestCase):
frappe.db.rollback()
def test_weekly_dashboard_chart(self):
insert_test_records()
if frappe.db.exists('Dashboard Chart', 'Test Weekly Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Weekly Dashboard Chart')
frappe.get_doc(dict(
doctype = 'Dashboard Chart',
chart_name = 'Test Weekly Dashboard Chart',
chart_type = 'Sum',
document_type = 'Communication',
based_on = 'communication_date',
value_based_on = 'rating',
timespan = 'Select Date Range',
time_interval = 'Weekly',
from_date = datetime(2018, 12, 30),
to_date = datetime(2019, 1, 15),
filters_json = '[]',
timeseries = 1
)).insert()
result = get(chart_name ='Test Weekly Dashboard Chart', refresh = 1)
self.assertEqual(result.get('datasets')[0].get('values'), [200.0, 400.0, 0.0])
self.assertEqual(result.get('labels'), [formatdate('2019-01-06'), formatdate('2019-01-13'), formatdate('2019-01-20')])
frappe.db.rollback()
def test_group_by_chart_type(self):
if frappe.db.exists('Dashboard Chart', 'Test Group By Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Group By Dashboard Chart')
@ -155,17 +182,16 @@ class TestDashboardChart(unittest.TestCase):
frappe.db.rollback()
def test_dashboard_with_single_doctype(self):
if frappe.db.exists('Dashboard Chart', 'Test Single DocType In Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Single DocType In Dashboard Chart')
def insert_test_records():
create_new_communication(datetime(2019, 1, 10), 100)
create_new_communication(datetime(2019, 1, 6), 200)
create_new_communication(datetime(2019, 1, 8), 300)
chart_doc = frappe.get_doc(dict(
doctype = 'Dashboard Chart',
chart_name = 'Test Single DocType In Dashboard Chart',
chart_type = 'Count',
document_type = 'System Settings',
group_by_based_on = 'Created On',
filters_json = '{}',
))
self.assertRaises(frappe.ValidationError, chart_doc.insert)
def create_new_communication(date, rating):
communication = {
'doctype': 'Communication',
'subject': 'Test Communication',
'rating': rating,
'communication_date': date
}
frappe.get_doc(communication).insert()

View file

@ -21,6 +21,7 @@
"disable_user_customization",
"pin_to_top",
"pin_to_bottom",
"hide_custom",
"section_break_2",
"charts_label",
"charts",
@ -189,10 +190,17 @@
"fieldtype": "Link",
"label": "Onboarding",
"options": "Module Onboarding"
},
{
"default": "0",
"description": "Checking this will hide custom doctypes and reports cards in Links section",
"fieldname": "hide_custom",
"fieldtype": "Check",
"label": "Hide Custom DocTypes and Reports"
}
],
"links": [],
"modified": "2020-05-13 19:01:42.041524",
"modified": "2020-05-18 19:17:27.206646",
"modified_by": "Administrator",
"module": "Desk",
"name": "Desk Page",

View file

@ -20,6 +20,17 @@ class DeskPage(Document):
if frappe.conf.developer_mode and self.is_standard:
export_to_files(record_list=[['Desk Page', self.name]], record_module=self.module)
@staticmethod
def get_module_page_map():
filters = {
'extends_another_page': 0,
'for_user': '',
}
pages = frappe.get_all("Desk Page", fields=["name", "module"], filters=filters, as_list=1)
return { page[1]: page[0] for page in pages }
def disable_saving_as_standard():
return frappe.flags.in_install or \
frappe.flags.in_patch or \

View file

@ -71,7 +71,7 @@ class TestEvent(unittest.TestCase):
ev = frappe.get_doc(self.test_records[0]).insert()
add({
"assign_to": "test@example.com",
"assign_to": ["test@example.com"],
"doctype": "Event",
"name": ev.name,
"description": "Test Assignment"
@ -83,7 +83,7 @@ class TestEvent(unittest.TestCase):
# add another one
add({
"assign_to": self.test_user,
"assign_to": [self.test_user],
"doctype": "Event",
"name": ev.name,
"description": "Test Assignment"
@ -93,10 +93,10 @@ class TestEvent(unittest.TestCase):
self.assertEqual(set(json.loads(ev._assign)), set(["test@example.com", self.test_user]))
# close an assignment
# Remove an assignment
todo = frappe.get_doc("ToDo", {"reference_type": ev.doctype, "reference_name": ev.name,
"owner": self.test_user})
todo.status = "Closed"
todo.status = "Cancelled"
todo.save()
ev = frappe.get_doc("Event", ev.name)

View file

@ -1,8 +0,0 @@
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('List View Setting', {
// refresh: function(frm) {
// }
});

View file

@ -1,160 +0,0 @@
{
"allow_copy": 0,
"allow_events_in_timeline": 0,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"autoname": "Prompt",
"beta": 0,
"creation": "2019-03-06 13:29:21.101860",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "",
"editable_grid": 1,
"engine": "InnoDB",
"fields": [
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "disable_count",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Disable Count",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "disable_sidebar_stats",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Disable Sidebar Stats",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "disable_auto_refresh",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Disable Auto Refresh",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2019-03-06 13:40:59.533586",
"modified_by": "Administrator",
"module": "Desk",
"name": "List View Setting",
"name_case": "",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 0,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 1,
"read": 1,
"report": 0,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 0,
"write": 1
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1,
"track_seen": 0,
"track_views": 0
}

View file

@ -0,0 +1,8 @@
// Copyright (c) 2020, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('List View Settings', {
// refresh: function(frm) {
// }
});

View file

@ -0,0 +1,76 @@
{
"actions": [],
"autoname": "Prompt",
"creation": "2019-10-23 15:00:48.392374",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"disable_count",
"disable_sidebar_stats",
"disable_auto_refresh",
"total_fields",
"fields_html",
"fields"
],
"fields": [
{
"default": "0",
"fieldname": "disable_count",
"fieldtype": "Check",
"label": "Disable Count"
},
{
"default": "0",
"fieldname": "disable_sidebar_stats",
"fieldtype": "Check",
"label": "Disable Sidebar Stats"
},
{
"default": "0",
"fieldname": "disable_auto_refresh",
"fieldtype": "Check",
"label": "Disable Auto Refresh"
},
{
"fieldname": "total_fields",
"fieldtype": "Select",
"label": "Maximum Number of Fields",
"options": "\n4\n5\n6\n7\n8\n9\n10"
},
{
"fieldname": "fields_html",
"fieldtype": "HTML",
"label": "Fields"
},
{
"fieldname": "fields",
"fieldtype": "Code",
"hidden": 1,
"label": "Fields",
"read_only": 1
}
],
"links": [],
"modified": "2020-05-12 18:27:15.568199",
"modified_by": "Administrator",
"module": "Desk",
"name": "List View Settings",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View file

@ -0,0 +1,79 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class ListViewSettings(Document):
def on_update(self):
frappe.clear_document_cache(self.doctype, self.name)
@frappe.whitelist()
def save_listview_settings(doctype, listview_settings, removed_listview_fields):
listview_settings = frappe.parse_json(listview_settings)
removed_listview_fields = frappe.parse_json(removed_listview_fields)
if frappe.get_all("List View Settings", filters={"name": doctype}):
doc = frappe.get_doc("List View Settings", doctype)
doc.update(listview_settings)
doc.save()
else:
doc = frappe.new_doc("List View Settings")
doc.name = doctype
doc.update(listview_settings)
doc.insert()
set_listview_fields(doctype, listview_settings.get("fields"), removed_listview_fields)
return {
"meta": frappe.get_meta(doctype, False),
"listview_settings": doc
}
def set_listview_fields(doctype, listview_fields, removed_listview_fields):
meta = frappe.get_meta(doctype)
listview_fields = [f.get("fieldname") for f in frappe.parse_json(listview_fields) if f.get("fieldname")]
for field in removed_listview_fields:
set_in_list_view_property(doctype, meta.get_field(field), "0")
for field in listview_fields:
set_in_list_view_property(doctype, meta.get_field(field), "1")
def set_in_list_view_property(doctype, field, value):
if not field or field.fieldname == "status_field":
return
property_setter = frappe.db.get_value("Property Setter", {"doc_type": doctype, "field_name": field.fieldname, "property": "in_list_view"})
if property_setter:
doc = frappe.get_doc("Property Setter", property_setter)
doc.value = value
doc.save()
else:
frappe.make_property_setter({
"doctype": doctype,
"doctype_or_field": "DocField",
"fieldname": field.fieldname,
"property": "in_list_view",
"value": value,
"property_type": "Check"
}, ignore_validate=True)
@frappe.whitelist()
def get_default_listview_fields(doctype):
meta = frappe.get_meta(doctype)
path = frappe.get_module_path(frappe.scrub(meta.module), "doctype", frappe.scrub(meta.name), frappe.scrub(meta.name) + ".json")
doctype_json = frappe.get_file_json(path)
fields = [f.get("fieldname") for f in doctype_json.get("fields") if f.get("in_list_view")]
if meta.title_field:
if not meta.title_field.strip() in fields:
fields.append(meta.title_field.strip())
return fields

View file

@ -3,7 +3,8 @@
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestListViewSetting(unittest.TestCase):
class TestListViewSettings(unittest.TestCase):
pass

View file

@ -13,7 +13,6 @@
"column_break_4",
"success_message",
"documentation_url",
"user_can_dismiss",
"is_complete",
"section_break_6",
"steps"
@ -53,13 +52,6 @@
"label": "Success Message",
"reqd": 1
},
{
"default": "1",
"description": "Allow users to dismiss onboarding temporarily for a day",
"fieldname": "user_can_dismiss",
"fieldtype": "Check",
"label": "User Can Dismiss "
},
{
"fieldname": "documentation_url",
"fieldtype": "Data",
@ -90,7 +82,7 @@
}
],
"links": [],
"modified": "2020-05-01 19:37:21.492405",
"modified": "2020-06-08 15:36:04.701049",
"modified_by": "Administrator",
"module": "Desk",
"name": "Module Onboarding",
@ -118,6 +110,7 @@
"share": 1
}
],
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1

View file

@ -3,10 +3,43 @@
frappe.ui.form.on('Notification Log', {
refresh: function(frm) {
let dt = frm.doc.document_type;
let dn = frm.doc.document_name;
frm.fields_dict.document_name.$input_wrapper
.find('.control-value')
.wrapInner(`<a href='#Form/${dt}/${dn}'></a>`);
if (frm.doc.attached_file) {
frm.trigger('set_attachment');
} else {
frm.get_field('attachment_link').$wrapper.empty();
}
},
open_reference_document: function(frm) {
const dt = frm.doc.document_type;
const dn = frm.doc.document_name;
frappe.set_route('Form', dt, dn);
},
set_attachment: function(frm) {
const attachment = JSON.parse(frm.doc.attached_file);
const $wrapper = frm.get_field('attachment_link').$wrapper;
$wrapper.html(`
<div class="attached-file text-medium">
<div class="ellipsis">
<i class="fa fa-paperclip"></i>
<a class="attached-file-link">${attachment.name}.pdf</a>
</div>
</div>
`);
$wrapper.find(".attached-file-link").click(() => {
const w = window.open(
frappe.urllib.get_full_url(`/api/method/frappe.utils.print_format.download_pdf?
doctype=${encodeURIComponent(attachment.doctype)}
&name=${encodeURIComponent(attachment.name)}
&format=${encodeURIComponent(attachment.print_format)}
&lang=${encodeURIComponent(attachment.lang)}`)
);
if (!w) {
frappe.msgprint(__("Please enable pop-ups"));
}
});
}
});

View file

@ -1,4 +1,5 @@
{
"actions": [],
"creation": "2019-08-26 13:37:34.165254",
"doctype": "DocType",
"editable_grid": 1,
@ -8,10 +9,12 @@
"for_user",
"type",
"email_content",
"column_break_4",
"document_type",
"read",
"document_name",
"attached_file",
"attachment_link",
"open_reference_document",
"from_user"
],
"fields": [
@ -20,57 +23,65 @@
"fieldtype": "Text",
"in_list_view": 1,
"label": "Subject",
"read_only": 1
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "for_user",
"fieldtype": "Link",
"hidden": 1,
"label": "For User",
"options": "User",
"read_only": 1
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "type",
"fieldtype": "Select",
"hidden": 1,
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Type",
"options": "Mention\nEnergy Point\nAssignment\nShare",
"read_only": 1,
"search_index": 1
"options": "Mention\nEnergy Point\nAssignment\nShare\nAlert",
"search_index": 1,
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "email_content",
"fieldtype": "Text",
"label": "Email Content",
"read_only": 1
},
{
"fieldname": "column_break_4",
"fieldtype": "Column Break"
"fieldtype": "Text Editor",
"label": "Message",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "document_type",
"fieldtype": "Link",
"hidden": 1,
"label": "Document Type",
"options": "DocType",
"read_only": 1,
"search_index": 1
"search_index": 1,
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "document_name",
"fieldtype": "Data",
"label": "Document Name",
"read_only": 1,
"search_index": 1
"hidden": 1,
"label": "Document Link",
"search_index": 1,
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "from_user",
"fieldtype": "Link",
"hidden": 1,
"label": "From User",
"options": "User",
"read_only": 1,
"search_index": 1
"search_index": 1,
"show_days": 1,
"show_seconds": 1
},
{
"default": "0",
@ -78,26 +89,51 @@
"fieldtype": "Check",
"hidden": 1,
"ignore_user_permissions": 1,
"label": "Read"
"label": "Read",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "open_reference_document",
"fieldtype": "Button",
"label": "Open Reference Document",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "attached_file",
"fieldtype": "Code",
"hidden": 1,
"label": "Attached File",
"options": "JSON",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "attachment_link",
"fieldtype": "HTML",
"label": "Attachment Link",
"show_days": 1,
"show_seconds": 1
}
],
"hide_toolbar": 1,
"in_create": 1,
"modified": "2019-11-12 15:22:35.283678",
"links": [],
"modified": "2020-05-31 22:31:12.886950",
"modified_by": "umair@erpnext.com",
"module": "Desk",
"name": "Notification Log",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "All",
"share": 1,
"write": 1
"share": 1
}
],
"sort_field": "modified",

View file

@ -48,6 +48,7 @@ def enqueue_create_notification(users, doc):
if isinstance(users, frappe.string_types):
users = [user.strip() for user in users.split(',') if user.strip()]
users = list(set(users))
frappe.enqueue(
'frappe.desk.doctype.notification_log.notification_log.make_notification_logs',
@ -58,6 +59,7 @@ def enqueue_create_notification(users, doc):
def make_notification_logs(doc, users):
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
for user in users:
if frappe.db.exists('User', user):
if is_notifications_enabled(user):
@ -68,7 +70,7 @@ def make_notification_logs(doc, users):
_doc.update(doc)
_doc.for_user = user
_doc.subject = _doc.subject.replace('<div>', '').replace('</div>', '')
if _doc.for_user != _doc.from_user or doc.type == 'Energy Point':
if _doc.for_user != _doc.from_user or doc.type == 'Energy Point' or doc.type == 'Alert':
_doc.insert(ignore_permissions=True)
def send_notification_email(doc):
@ -98,14 +100,16 @@ def send_notification_email(doc):
)
def get_email_header(doc):
return {
docname = doc.document_name
header_map = {
'Default': _('New Notification'),
'Mention': _('New Mention'),
'Assignment': _('New Assignment'),
'Share': _('New Document Shared'),
'Energy Point': _('Energy Point Update'),
}[doc.type or 'Default']
'Mention': _('New Mention on {0}').format(docname),
'Assignment': _('Assignment Update on {0}').format(docname),
'Share': _('New Document Shared {0}').format(docname),
'Energy Point': _('Energy Point Update on {0}').format(docname),
}
return header_map[doc.type or 'Default']
@frappe.whitelist()
def mark_all_as_read():

View file

@ -13,7 +13,7 @@ class TestNotificationLog(unittest.TestCase):
user = get_user()
assign_task({
"assign_to": user,
"assign_to": [user],
"doctype": 'ToDo',
"name": todo.name,
"description": todo.description

View file

@ -1,4 +1,5 @@
{
"actions": [],
"autoname": "Prompt",
"creation": "2019-09-11 22:15:44.851526",
"doctype": "DocType",
@ -21,52 +22,68 @@
"default": "1",
"fieldname": "enabled",
"fieldtype": "Check",
"label": "Enabled"
"label": "Enabled",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "subscribed_documents",
"fieldtype": "Table MultiSelect",
"label": "Subscribed Documents",
"options": "Notification Subscribed Document"
"options": "Notification Subscribed Document",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "column_break_3",
"fieldtype": "Section Break",
"label": "Email Settings"
"label": "Email Settings",
"show_days": 1,
"show_seconds": 1
},
{
"default": "1",
"fieldname": "enable_email_notifications",
"fieldtype": "Check",
"label": "Enable Email Notifications"
"label": "Enable Email Notifications",
"show_days": 1,
"show_seconds": 1
},
{
"default": "1",
"depends_on": "enable_email_notifications",
"fieldname": "enable_email_mention",
"fieldtype": "Check",
"label": "Mentions"
"label": "Mentions",
"show_days": 1,
"show_seconds": 1
},
{
"default": "1",
"depends_on": "enable_email_notifications",
"fieldname": "enable_email_assignment",
"fieldtype": "Check",
"label": "Assignments"
"label": "Assignments",
"show_days": 1,
"show_seconds": 1
},
{
"default": "1",
"depends_on": "enable_email_notifications",
"fieldname": "enable_email_energy_point",
"fieldtype": "Check",
"label": "Energy Points"
"label": "Energy Points",
"show_days": 1,
"show_seconds": 1
},
{
"default": "1",
"depends_on": "enable_email_notifications",
"fieldname": "enable_email_share",
"fieldtype": "Check",
"label": "Document Share"
"label": "Document Share",
"show_days": 1,
"show_seconds": 1
},
{
"default": "__user",
@ -75,18 +92,23 @@
"hidden": 1,
"label": "User",
"options": "User",
"read_only": 1
"read_only": 1,
"show_days": 1,
"show_seconds": 1
},
{
"default": "0",
"fieldname": "seen",
"fieldtype": "Check",
"hidden": 1,
"label": "Seen"
"label": "Seen",
"show_days": 1,
"show_seconds": 1
}
],
"in_create": 1,
"modified": "2019-11-19 12:57:59.356786",
"links": [],
"modified": "2020-05-31 22:16:40.798019",
"modified_by": "Administrator",
"module": "Desk",
"name": "Notification Settings",

View file

@ -28,6 +28,9 @@ def is_email_notifications_enabled_for_type(user, notification_type):
if not is_email_notifications_enabled(user):
return False
if notification_type == 'Alert':
return False
fieldname = 'enable_email_' + frappe.scrub(notification_type)
enabled = frappe.db.get_value('Notification Settings', user, fieldname)
if enabled is None:

View file

@ -27,12 +27,12 @@ def get_permission_query_conditions(user=None):
if "System Manager" in roles:
return None
allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read())
allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()]
return '''
`tabNumber Card`.`document_type` in {allowed_doctypes}
`tabNumber Card`.`document_type` in ({allowed_doctypes})
'''.format(
allowed_doctypes=allowed_doctypes,
allowed_doctypes=','.join(allowed_doctypes)
)
def has_permission(doc, ptype, user):
@ -70,7 +70,7 @@ def get_result(doc, to_date=None):
if to_date:
filters.append([doc.document_type, 'creation', '<', to_date, False])
res = frappe.db.get_all(doc.document_type, fields=fields, filters=filters)
res = frappe.db.get_list(doc.document_type, fields=fields, filters=filters)
number = res[0]['result'] if res else 0
return cint(number)

View file

@ -184,7 +184,7 @@
}
],
"links": [],
"modified": "2020-05-14 15:10:05.627706",
"modified": "2020-05-18 19:42:30.435604",
"modified_by": "Administrator",
"module": "Desk",
"name": "Onboarding Step",
@ -213,6 +213,7 @@
}
],
"quick_entry": 1,
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1

Some files were not shown because too many files have changed in this diff Show more