Merge branch 'develop' into paytm-integration

This commit is contained in:
Mangesh-Khairnar 2020-07-21 00:13:59 +05:30 committed by GitHub
commit 48e6f8999d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
296 changed files with 9152 additions and 8622 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.9 KiB

4
.github/frappe-framework-logo.svg vendored Normal file
View file

@ -0,0 +1,4 @@
<svg viewBox="0 0 1082 125" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M1 56.284v67.508h26.904V79.368l47.902-.04V56.248L1 56.284zM82.994 1H1.034v23.043h81.96V1z" fill="#3C88F7"/>
<path d="M216.735 75.269V53.233h-52.897V22.505h61.849V.469H138v122.509h25.838v-47.71h52.897zM285.477 59.096c2.645 0 5.29.404 7.731 1.415l3.052-26.281c-2.441-.607-4.883-1.01-7.934-1.01-11.8.201-19.125 6.67-25.635 22.439.61-3.639.814-7.48.814-11.321v-9.3h-23.193v87.94h25.227V72.64c5.697-12.13 15.056-13.544 19.938-13.544zM380.301 125c4.476 0 9.359-1.011 13.428-4.043l-2.442-17.184c-.813.404-1.627.607-2.645.607-2.644 0-4.272-2.426-4.272-6.672v-32.75c0-23.652-16.276-31.739-39.266-31.739-23.193 0-38.655 10.715-41.3 30.728l22.583 1.213c1.22-8.49 7.12-12.533 17.09-12.533 9.358 0 15.869 4.65 15.869 13.949v5.458h-16.683c-26.245 0-41.097 8.895-41.097 27.898 0 18.397 14.648 25.068 31.128 25.068 15.462 0 24.007-6.671 28.89-15.364C363.211 121.563 372.163 125 380.301 125zm-20.752-39.22c0 14.759-10.579 21.228-19.735 21.228-7.324 0-12.41-3.033-12.41-9.704 0-8.086 6.917-11.523 18.107-11.523h14.038zM449.522 55.255c9.562 0 15.055 5.862 15.055 19.003v48.72h25.228V69.001c3.458-8.895 9.358-13.746 16.683-13.746 10.172 0 15.055 5.862 15.055 19.003v48.72h25.228v-54.38c0-24.462-11.8-35.379-32.145-35.379-11.8 0-22.38 5.054-29.297 15.97-4.476-11.118-13.021-15.97-26.856-15.97-13.02 0-22.379 5.66-28.076 17.184.204-2.022.204-3.841.204-5.863V35.04h-23.193v87.939h25.227V69.406c3.459-9.097 8.952-14.151 16.887-14.151zM606.579 125c22.583 0 39.062-8.491 44.148-29.111l-24.21-2.224c-2.442 7.48-8.749 11.725-18.311 11.725-12.614 0-19.531-7.277-20.548-21.429l64.087-.202c.203-2.224.203-4.245.203-6.469 0-28.505-15.462-44.07-43.131-44.07-29.297 0-47.201 18.8-47.201 46.496 0 28.909 17.09 45.284 44.963 45.284zm1.831-72.98c10.783 0 17.09 6.065 17.903 17.184h-37.841c2.441-11.928 9.765-17.184 19.938-17.184zM751.095 122.978h34.18l24.414-87.94H782.63l-15.056 63.479h-1.831l-17.089-63.478h-28.89l-17.497 63.478h-2.035l-15.055-63.478h-27.669l24.617 87.939h34.383l16.48-61.456h1.831l16.276 61.456zM814.935 79.918c0 28.303 16.276 45.082 45.98 45.082 31.535 0 48.828-17.992 48.828-46.093 0-27.898-16.073-45.688-45.573-45.688-31.738 0-49.235 18.599-49.235 46.7zm26.652-.606c0-16.173 8.545-26.079 21.566-26.079 12.614 0 19.938 9.906 19.938 26.079 0 15.768-8.341 25.876-20.955 25.876-12.818 0-20.549-10.108-20.549-25.876zM971.128 59.096c2.645 0 5.289.404 7.731 1.415l3.052-26.281c-2.442-.607-4.883-1.01-7.935-1.01-11.8.201-19.124 6.67-25.635 22.439.611-3.639.814-7.48.814-11.321v-9.3h-23.193v87.94h25.228V72.64c5.696-12.13 15.055-13.544 19.938-13.544zM1070.4 125c3.46 0 7.33-.404 10.79-1.819l-1.83-21.227c-.82.202-1.63.404-3.26.404-4.27 0-7.12-2.224-11.39-7.682l-16.48-21.833L1082 35.039h-27.67l-34.18 36.793h-.61V1.5h-25.227v121.478h25.227v-20.62l10.78-12.534 14.45 19.61c8.95 12.938 15.87 15.566 25.63 15.566z" fill="#000"/>
</svg>

After

Width:  |  Height:  |  Size: 2.8 KiB

13
.github/semantic.yml vendored Normal file
View file

@ -0,0 +1,13 @@
# Always validate the PR title AND all the commits
titleAndCommits: true
# Allow use of Merge commits (eg on github: "Merge branch 'master' into feature/ride-unicorns")
# this is only relevant when using commitsOnly: true (or titleAndCommits: true)
allowMergeCommits: true
# Allow use of Revert commits (eg on github: "Revert "feat: ride unicorns"")
# this is only relevant when using commitsOnly: true (or titleAndCommits: true)
allowRevertCommits: true
# For allowed PR types: https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json
# Tool Reference: https://github.com/zeke/semantic-pull-requests

14
.github/workflows/docker-release.yml vendored Normal file
View file

@ -0,0 +1,14 @@
name: Trigger Docker build on release
on:
release:
types: [released]
jobs:
curl:
runs-on: ubuntu-latest
container:
image: alpine:latest
steps:
- name: curl
run: |
apk add curl bash
curl -s -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "Travis-API-Version: 3" -H "Authorization: token ${{ secrets.TRAVIS_CI_TOKEN }}" -d '{"request":{"branch":"master"}}' https://api.travis-ci.com/repo/frappe%2Ffrappe_docker/requests

View file

@ -4,8 +4,7 @@ pull_request_rules:
- status-success=Sider
- status-success=Semantic Pull Request
- status-success=Travis CI - Pull Request
- status-success=security/snyk - package.json (frappe)
- status-success=security/snyk - requirements.txt (frappe)
- status-success=security/snyk (frappe)
- label!=don't-merge
- label!=squash
- "#approved-reviews-by>=1"
@ -17,8 +16,7 @@ pull_request_rules:
- status-success=Sider
- status-success=Semantic Pull Request
- status-success=Travis CI - Pull Request
- status-success=security/snyk - package.json (frappe)
- status-success=security/snyk - requirements.txt (frappe)
- status-success=security/snyk (frappe)
- label!=don't-merge
- label=squash
- "#approved-reviews-by>=1"

View file

@ -47,27 +47,11 @@ matrix:
script: bench --site test_site run-ui-tests frappe --headless
before_install:
# do we really want to run travis? check which files are changed and if git doesnt face any fatal errors
# do we really want to run travis?
- |
FILES_CHANGED=$( git diff --name-only $TRAVIS_COMMIT_RANGE 2>&1 )
if [[ $FILES_CHANGED != *"fatal"* ]]; then
ONLY_DOCS_CHANGES=$( echo $FILES_CHANGED | grep -qvE '\.(md|png|jpg|jpeg)$|^.github|LICENSE' ; echo $? )
ONLY_JS_CHANGES=$( echo $FILES_CHANGED | grep -qvE '\.js$' ; echo $? )
ONLY_PY_CHANGES=$( echo $FILES_CHANGED | grep -qvE '\.py$' ; echo $? )
if [[ $ONLY_DOCS_CHANGES == "1" ]]; then
echo "Only docs were updated, stopping build process.";
exit;
fi
if [[ $ONLY_JS_CHANGES == "1" && $TYPE == "server" ]]; then
echo "Only JavaScript code was updated; Stopping Python build process.";
exit;
fi
if [[ $ONLY_PY_CHANGES == "1" && $TYPE == "ui" ]]; then
echo "Only Python code was updated, stopping Cypress build process.";
exit;
fi
python ./.travis/roulette.py
if [[ $? != 2 ]];then
exit;
fi
# install wkhtmltopdf

54
.travis/roulette.py Normal file
View file

@ -0,0 +1,54 @@
# if the script ends with exit code 0, then no tests are run further, else all tests are run
import os
import re
import shlex
import subprocess
import sys
def get_output(command, shell=True):
print(command)
command = shlex.split(command)
return subprocess.check_output(command, shell=shell, encoding="utf8").strip()
def is_py(file):
return file.endswith("py")
def is_js(file):
return file.endswith("js")
def is_docs(file):
regex = re.compile('\.(md|png|jpg|jpeg)$|^.github|LICENSE')
return bool(regex.search(file))
if __name__ == "__main__":
build_type = os.environ.get("TYPE")
commit_range = os.environ.get("TRAVIS_COMMIT_RANGE")
print("Build Type: {}".format(build_type))
print("Commit Range: {}".format(commit_range))
try:
files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False)
except Exception:
sys.exit(2)
if "fatal" not in files_changed:
files_list = files_changed.split()
only_docs_changed = len(list(filter(is_docs, files_list))) == len(files_list)
only_js_changed = len(list(filter(is_js, files_list))) == len(files_list)
only_py_changed = len(list(filter(is_py, files_list))) == len(files_list)
if only_docs_changed:
print("Only docs were updated, stopping build process.")
sys.exit(0)
if only_js_changed and build_type == "server":
print("Only JavaScript code was updated; Stopping Python build process.")
sys.exit(0)
if only_py_changed and build_type == "ui":
print("Only Python code was updated, stopping Cypress build process.")
sys.exit(0)
sys.exit(2)

View file

@ -1,12 +1,12 @@
<div align="center">
<img src=".github/frappe-framework-logo.png" height="150">
<h1>
<a href="https://frappe.io">
frappe
<br>
<a href="https://frappeframework.com">
<img src=".github/frappe-framework-logo.svg" height="50">
</a>
</h1>
<h3>
a web framework with <a href="https://www.youtube.com/watch?v=LOjk3m0wTwg">"batteries included"
a web framework with <a href="https://www.youtube.com/watch?v=LOjk3m0wTwg">"batteries included"</a>
</h3>
<h5>
it's pronounced - <em>fra-pay</em>
@ -33,8 +33,8 @@
Full-stack web application framework that uses Python and MariaDB on the server side and a tightly integrated client side library. Built for [ERPNext](https://erpnext.com)
### Table of Contents
* [Installation](#installation)
* [Documentation](https://frappe.io/docs)
* [Installation](https://frappeframework.com/docs/user/en/installation)
* [Documentation](https://frappeframework.com/docs)
* [License](#license)
### Installation
@ -49,7 +49,7 @@ Full-stack web application framework that uses Python and MariaDB on the server
### Website
For details and documentation, see the website
[https://frappe.io](https://frappe.io)
[https://frappeframework.com](https://frappeframework.com)
### License
This repository has been released under the [MIT License](LICENSE).

View file

@ -4,14 +4,14 @@ context('Control Duration', () => {
cy.visit('/desk#workspace/Website');
});
function get_dialog_with_duration(show_days=1, show_seconds=1) {
function get_dialog_with_duration(hide_days=0, hide_seconds=0) {
return cy.dialog({
title: 'Duration',
fields: [{
'fieldname': 'duration',
'fieldtype': 'Duration',
'show_seconds': show_days,
'show_days': show_seconds
'hide_days': hide_days,
'hide_seconds': hide_seconds
}]
});
}
@ -37,7 +37,7 @@ context('Control Duration', () => {
});
it('should hide days or seconds according to duration options', () => {
get_dialog_with_duration(0, 0).as('dialog');
get_dialog_with_duration(1, 1).as('dialog');
cy.get('.frappe-control[data-fieldname=duration] input').first().click();
cy.get('.duration-input[data-duration=days]').should('not.be.visible');
cy.get('.duration-input[data-duration=seconds]').should('not.be.visible');

View file

@ -40,12 +40,12 @@ context('Grid Pagination', () => {
cy.get('@table').find('.current-page-number').should('contain', '20');
cy.get('@table').find('.total-page-number').should('contain', '20');
});
it('deletes all rows', ()=> {
cy.visit('/desk#Form/Contact/Test Contact');
cy.get('.frappe-control[data-fieldname="phone_nos"]').as('table');
cy.get('@table').find('.grid-heading-row .grid-row-check').click({force: true});
cy.get('@table').find('button.grid-remove-all-rows').click();
cy.get('.modal-dialog .btn-primary').contains('Yes').click();
cy.get('@table').find('.grid-body .grid-row').should('have.length', 0);
});
// it('deletes all rows', ()=> {
// cy.visit('/desk#Form/Contact/Test Contact');
// cy.get('.frappe-control[data-fieldname="phone_nos"]').as('table');
// cy.get('@table').find('.grid-heading-row .grid-row-check').click({force: true});
// cy.get('@table').find('button.grid-remove-all-rows').click();
// cy.get('.modal-dialog .btn-primary').contains('Yes').click();
// cy.get('@table').find('.grid-body .grid-row').should('have.length', 0);
// });
});

View file

@ -490,7 +490,8 @@ def sendmail(recipients=[], sender="", subject="No Subject", message="No Message
message = content or message
if as_markdown:
message = frappe.utils.md_to_html(message)
from frappe.utils import md_to_html
message = md_to_html(message)
if not delayed:
now = True
@ -1145,8 +1146,8 @@ def make_property_setter(args, ignore_validate=False, validate_fields_for_doctyp
def import_doc(path, ignore_links=False, ignore_insert=False, insert=False):
"""Import a file using Data Import."""
from frappe.core.doctype.data_import import data_import
data_import.import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
from frappe.core.doctype.data_import.data_import import import_doc
import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
def copy_doc(doc, ignore_no_copy=True):
""" No_copy fields also get copied."""

View file

@ -159,14 +159,14 @@ def validate_auth():
authorization_type = authorization_header[0].lower()
if len(authorization_header) == 1:
frappe.throw(_('Invalid Authorization headers, add a token with a prefix from one of the following: {0}.'.format(VALID_AUTH_PREFIX_STRING)), frappe.InvalidAuthorizationHeader)
frappe.throw(_('Invalid Authorization headers, add a token with a prefix from one of the following: {0}.').format(VALID_AUTH_PREFIX_STRING), frappe.InvalidAuthorizationHeader)
if authorization_type == "bearer":
validate_oauth(authorization_header)
elif authorization_type in VALID_AUTH_PREFIX_TYPES:
validate_auth_via_api_keys(authorization_header)
else:
frappe.throw(_('Invalid Authorization Type {0}, must be one of {1}.'.format(authorization_type, VALID_AUTH_PREFIX_STRING)), frappe.InvalidAuthorizationPrefix)
frappe.throw(_('Invalid Authorization Type {0}, must be one of {1}.').format(authorization_type, VALID_AUTH_PREFIX_STRING), frappe.InvalidAuthorizationPrefix)
def validate_oauth(authorization_header):
@ -245,5 +245,6 @@ def validate_api_key_secret(api_key, api_secret, frappe_authorization_source=Non
)
else:
user = frappe.db.get_value(doctype, doc, 'user')
frappe.set_user(user)
if frappe.local.login_manager.user in ('', 'Guest'):
frappe.set_user(user)
frappe.local.form_dict = form_dict

View file

@ -99,7 +99,7 @@ def application(request):
frappe.monitor.stop(response)
frappe.recorder.dump()
frappe.logger("web").info({
frappe.logger("frappe.web").info({
"site": get_site_name(request.host),
"remote_addr": getattr(request, "remote_addr", "NOTFOUND"),
"base_url": getattr(request, "base_url", "NOTFOUND"),

View file

@ -333,12 +333,20 @@ class CookieManager:
# sid expires in 3 days
expires = datetime.datetime.now() + datetime.timedelta(days=3)
if frappe.session.sid:
self.cookies["sid"] = {"value": frappe.session.sid, "expires": expires}
self.set_cookie("sid", frappe.session.sid, expires=expires, httponly=True)
if frappe.session.session_country:
self.cookies["country"] = {"value": frappe.session.get("session_country")}
self.set_cookie("country", frappe.session.session_country)
def set_cookie(self, key, value, expires=None):
self.cookies[key] = {"value": value, "expires": expires}
def set_cookie(self, key, value, expires=None, secure=False, httponly=False, samesite="Lax"):
if not secure:
secure = frappe.local.request.scheme == "https"
self.cookies[key] = {
"value": value,
"expires": expires,
"secure": secure,
"httponly": httponly,
"samesite": samesite
}
def delete_cookie(self, to_delete):
if not isinstance(to_delete, (list, tuple)):
@ -349,7 +357,10 @@ class CookieManager:
def flush_cookies(self, response):
for key, opts in self.cookies.items():
response.set_cookie(key, quote((opts.get("value") or "").encode('utf-8')),
expires=opts.get("expires"))
expires=opts.get("expires"),
secure=opts.get("secure"),
httponly=opts.get("httponly"),
samesite=opts.get("samesite"))
# expires yesterday!
expires = datetime.datetime.now() + datetime.timedelta(days=-1)

View file

@ -21,7 +21,7 @@ class AssignmentRule(Document):
def on_update(self): # pylint: disable=no-self-use
frappe.cache_manager.clear_doctype_map('Assignment Rule', self.name)
def after_rename(self): # pylint: disable=no-self-use
def after_rename(self, old, new, merge): # pylint: disable=no-self-use
frappe.cache_manager.clear_doctype_map('Assignment Rule', self.name)
def apply_unassign(self, doc, assignments):

View file

@ -146,7 +146,7 @@ class AutoRepeat(Document):
def make_new_document(self):
reference_doc = frappe.get_doc(self.reference_doctype, self.reference_document)
new_doc = frappe.copy_doc(reference_doc, ignore_no_copy = False)
new_doc = frappe.copy_doc(reference_doc)
self.update_doc(new_doc, reference_doc)
new_doc.insert(ignore_permissions = True)
@ -372,7 +372,8 @@ def make_auto_repeat(doctype, docname, frequency = 'Daily', start_date = None, e
doc.save()
return doc
#method for reference_doctype filter
# method for reference_doctype filter
@frappe.whitelist()
def get_auto_repeat_doctypes(doctype, txt, searchfield, start, page_len, filters):
res = frappe.db.get_all('Property Setter', {
'property': 'allow_auto_repeat',

View file

@ -107,7 +107,7 @@ def load_desktop_data(bootinfo):
from frappe.config import get_modules_from_all_apps_for_user
from frappe.desk.desktop import get_desk_sidebar_items
bootinfo.allowed_modules = get_modules_from_all_apps_for_user()
bootinfo.allowed_workspaces = get_desk_sidebar_items(True)
bootinfo.allowed_workspaces = get_desk_sidebar_items(flatten=True, cache=False)
bootinfo.module_page_map = get_controller("Desk Page").get_module_page_map()
bootinfo.dashboards = frappe.get_all("Dashboard")

View file

@ -21,10 +21,10 @@ global_cache_keys = ("app_hooks", "installed_apps",
user_cache_keys = ("bootinfo", "user_recent", "roles", "user_doc", "lang",
"defaults", "user_permissions", "home_page", "linked_with",
"desktop_icons", 'portal_menu_items', 'user_perm_can_read',
"has_role:Page", "has_role:Report")
"has_role:Page", "has_role:Report", "desk_sidebar_items")
doctype_cache_keys = ("meta", "form_meta", "table_columns", "last_modified",
"linked_doctypes", 'notifications', 'workflow' ,'energy_point_rule_map')
"linked_doctypes", 'notifications', 'workflow' ,'energy_point_rule_map', 'data_import_column_header_map')
def clear_user_cache(user=None):

View file

@ -43,12 +43,14 @@ def pass_context(f):
return click.pass_context(_func)
def get_site(context):
def get_site(context, raise_err=True):
try:
site = context.sites[0]
return site
except (IndexError, TypeError):
raise frappe.SiteNotSpecifiedError
if raise_err:
raise frappe.SiteNotSpecifiedError
return None
def popen(command, *args, **kwargs):
output = kwargs.get('output', True)

View file

@ -126,7 +126,7 @@ def doctor(context, site=None):
"Get diagnostic info about background workers"
from frappe.utils.doctor import doctor as _doctor
if not site:
site = get_site(context)
site = get_site(context, raise_err=False)
return _doctor(site=site)
@click.command('show-pending-jobs')

View file

@ -108,12 +108,14 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
@click.option('--install-app', multiple=True, help='Install app after installation')
@click.option('--with-public-files', help='Restores the public files of the site, given path to its tar file')
@click.option('--with-private-files', help='Restores the private files of the site, given path to its tar file')
@click.option('--force', is_flag=True, default=False, help='Use a bit of force to get the job done')
@pass_context
def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_password=None, db_name=None, verbose=None, install_app=None, admin_password=None, force=None, with_public_files=None, with_private_files=None):
"Restore site database from an sql file"
from frappe.installer import extract_sql_gzip, extract_tar_files
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
from frappe.installer import extract_sql_gzip, extract_tar_files, is_downgrade
force = context.force or force
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
if not os.path.exists(sql_file_path):
base_path = '..'
sql_file_path = os.path.join(base_path, sql_file_path)
@ -125,7 +127,6 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas
else:
base_path = '.'
if sql_file_path.endswith('sql.gz'):
decompressed_file_name = extract_sql_gzip(os.path.abspath(sql_file_path))
else:
@ -133,10 +134,16 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas
site = get_site(context)
frappe.init(site=site)
# dont allow downgrading to older versions of frappe without force
if not force and is_downgrade(decompressed_file_name, verbose=True):
warn_message = "This is not recommended and may lead to unexpected behaviour. Do you want to continue anyway?"
click.confirm(warn_message, abort=True)
_new_site(frappe.conf.db_name, site, mariadb_root_username=mariadb_root_username,
mariadb_root_password=mariadb_root_password, admin_password=admin_password,
verbose=context.verbose, install_apps=install_app, source_sql=decompressed_file_name,
force=True)
force=True, db_type=frappe.conf.db_type)
# Extract public and/or private files to the restored site, if user has given the path
if with_public_files:
@ -194,16 +201,31 @@ def _reinstall(site, admin_password=None, mariadb_root_username=None, mariadb_ro
def install_app(context, apps):
"Install a new app to site, supports multiple apps"
from frappe.installer import install_app as _install_app
exit_code = 0
if not context.sites:
raise SiteNotSpecifiedError
for site in context.sites:
frappe.init(site=site)
frappe.connect()
try:
for app in apps:
for app in apps:
try:
_install_app(app, verbose=context.verbose)
finally:
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
except frappe.IncompatibleApp as err:
err_msg = ":\n{}".format(err) if str(err) else ""
print("App {} is Incompatible with Site {}{}".format(app, site, err_msg))
exit_code = 1
except Exception as err:
err_msg = ":\n{}".format(err if str(err) else frappe.get_traceback())
print("An error occurred while installing {}{}".format(app, err_msg))
exit_code = 1
frappe.destroy()
sys.exit(exit_code)
@click.command('list-apps')
@pass_context
@ -414,15 +436,17 @@ def remove_from_installed_apps(context, app):
@click.argument('app')
@click.option('--yes', '-y', help='To bypass confirmation prompt for uninstalling the app', is_flag=True, default=False, multiple=True)
@click.option('--dry-run', help='List all doctypes that will be deleted', is_flag=True, default=False)
@click.option('--no-backup', help='Do not backup the site', is_flag=True, default=False)
@click.option('--force', help='Force remove app from site', is_flag=True, default=False)
@pass_context
def uninstall(context, app, dry_run=False, yes=False):
def uninstall(context, app, dry_run, yes, no_backup, force):
"Remove app and linked modules from site"
from frappe.installer import remove_app
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
remove_app(app, dry_run, yes)
remove_app(app_name=app, dry_run=dry_run, yes=yes, no_backup=no_backup, force=force)
finally:
frappe.destroy()
if not context.sites:
@ -607,6 +631,29 @@ def stop_recording(context):
if not context.sites:
raise SiteNotSpecifiedError
@click.command('ngrok')
@pass_context
def start_ngrok(context):
from pyngrok import ngrok
site = get_site(context)
frappe.init(site=site)
port = frappe.conf.http_port or frappe.conf.webserver_port
public_url = ngrok.connect(port=port, options={
'host_header': site
})
print(f'Public URL: {public_url}')
print('Inspect logs at http://localhost:4040')
ngrok_process = ngrok.get_ngrok_process()
try:
# Block until CTRL-C or some other terminating event
ngrok_process.proc.wait()
except KeyboardInterrupt:
print("Shutting down server...")
frappe.destroy()
ngrok.kill()
commands = [
add_system_manager,
@ -632,5 +679,6 @@ commands = [
browse,
start_recording,
stop_recording,
add_to_hosts
add_to_hosts,
start_ngrok
]

View file

@ -215,12 +215,12 @@ def export_doc(context, doctype, docname):
@pass_context
def export_json(context, doctype, path, name=None):
"Export doclist as json to the given path, use '-' as name for Singles."
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import export_json
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
data_import.export_json(doctype, path, name=name)
export_json(doctype, path, name=name)
finally:
frappe.destroy()
if not context.sites:
@ -232,12 +232,12 @@ def export_json(context, doctype, path, name=None):
@pass_context
def export_csv(context, doctype, path):
"Export data import template with data for DocType"
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import export_csv
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
data_import.export_csv(doctype, path)
export_csv(doctype, path)
finally:
frappe.destroy()
if not context.sites:
@ -264,7 +264,7 @@ def export_fixtures(context, app=None):
@pass_context
def import_doc(context, path, force=False):
"Import (insert/update) doclist. If the argument is a directory, all files ending with .json are imported"
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import import_doc
if not os.path.exists(path):
path = os.path.join('..', path)
@ -276,7 +276,7 @@ def import_doc(context, path, force=False):
try:
frappe.init(site=site)
frappe.connect()
data_import.import_doc(path, overwrite=context.force)
import_doc(path, overwrite=context.force)
finally:
frappe.destroy()
if not context.sites:
@ -293,7 +293,7 @@ def import_doc(context, path, force=False):
@pass_context
def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True):
"Import CSV using data import"
from frappe.core.doctype.data_import import importer
from frappe.core.doctype.data_import_legacy import importer
from frappe.utils.csvutils import read_csv_content
site = get_site(context)
@ -329,20 +329,12 @@ def import_csv(context, path, only_insert=False, submit_after_import=False, igno
@pass_context
def data_import(context, file_path, doctype, import_type=None, submit_after_import=False, mute_emails=True):
"Import documents in bulk from CSV or XLSX using data import"
from frappe.core.doctype.data_import.importer_new import Importer
from frappe.core.doctype.data_import.data_import import import_file
site = get_site(context)
frappe.init(site=site)
frappe.connect()
data_import = frappe.new_doc('Data Import Beta')
data_import.submit_after_import = submit_after_import
data_import.mute_emails = mute_emails
data_import.import_type = 'Insert New Records' if import_type.lower() == 'insert' else 'Update Existing Records'
i = Importer(doctype=doctype, file_path=file_path, data_import=data_import, console=True)
i.import_data()
import_file(doctype, file_path, import_type, submit_after_import, console=True)
frappe.destroy()

View file

@ -16,6 +16,13 @@ def get_data():
"description": _("Language, Date and Time settings"),
"hide_count": True
},
{
"type": "doctype",
"name": "Global Defaults",
"label": _("Global Defaults"),
"description": _("Company, Fiscal Year and Currency defaults"),
"hide_count": True
},
{
"type": "doctype",
"name": "Error Log",

View file

@ -42,6 +42,16 @@ frappe.ui.form.on("Contact", {
});
frm.refresh_field("links");
let numbers = frm.doc.phone_nos;
if (numbers && numbers.length && frappe.phone_call.handler) {
frm.add_custom_button(__('Call'), () => {
numbers = frm.doc.phone_nos
.sort((prev, next) => next.is_primary_mobile_no - prev.is_primary_mobile_no)
.map(d => d.phone);
frappe.phone_call.handler(numbers);
});
}
if (frm.doc.links) {
frappe.call({
method: "frappe.contacts.doctype.contact.contact.address_query",

View file

@ -18,7 +18,7 @@
{
"hidden": 0,
"label": "Core",
"links": "[\n {\n \"description\": \"Language, Date and Time settings\",\n \"hide_count\": true,\n \"label\": \"System Settings\",\n \"name\": \"System Settings\",\n \"type\": \"doctype\"\n },\n {\n \"description\": \"Log of error on automated events (scheduler).\",\n \"label\": \"Error Log\",\n \"name\": \"Error Log\",\n \"type\": \"doctype\"\n },\n {\n \"description\": \"Log of error during requests.\",\n \"label\": \"Error Snapshot\",\n \"name\": \"Error Snapshot\",\n \"type\": \"doctype\"\n },\n {\n \"description\": \"Enable / Disable Domains\",\n \"hide_count\": true,\n \"label\": \"Domain Settings\",\n \"name\": \"Domain Settings\",\n \"type\": \"doctype\"\n }\n]"
"links": "[\n {\n \"description\": \"Language, Date and Time settings\",\n \"hide_count\": true,\n \"label\": \"System Settings\",\n \"name\": \"System Settings\",\n \"type\": \"doctype\"\n },\n {\n \"description\": \"Company, Fiscal Year and Currency defaults\",\n \"hide_count\": true,\n \"label\": \"Global Defaults\",\n \"name\": \"Global Defaults\",\n \"type\": \"doctype\"\n },\n {\n \"description\": \"Log of error on automated events (scheduler).\",\n \"label\": \"Error Log\",\n \"name\": \"Error Log\",\n \"type\": \"doctype\"\n },\n {\n \"description\": \"Log of error during requests.\",\n \"label\": \"Error Snapshot\",\n \"name\": \"Error Snapshot\",\n \"type\": \"doctype\"\n },\n {\n \"description\": \"Enable / Disable Domains\",\n \"hide_count\": true,\n \"label\": \"Domain Settings\",\n \"name\": \"Domain Settings\",\n \"type\": \"doctype\"\n }\n]"
},
{
"hidden": 0,
@ -39,10 +39,11 @@
"docstatus": 0,
"doctype": "Desk Page",
"extends_another_page": 0,
"hide_custom": 0,
"idx": 0,
"is_standard": 1,
"label": "Settings",
"modified": "2020-04-01 11:24:40.636747",
"modified": "2020-07-14 10:09:09.520557",
"modified_by": "Administrator",
"module": "Core",
"name": "Settings",

View file

@ -444,24 +444,48 @@ def update_parent_document_on_communication(doc):
status_field = parent.meta.get_field("status")
if status_field:
options = (status_field.options or '').splitlines()
options = (status_field.options or "").splitlines()
# if status has a "Replied" option, then update the status for received communication
if ('Replied' in options) and doc.sent_or_received=="Received":
if ("Replied" in options) and doc.sent_or_received == "Received":
parent.db_set("status", "Open")
parent.run_method("handle_hold_time", "Replied")
apply_assignment_rule(parent)
else:
# update the modified date for document
parent.update_modified()
update_mins_to_first_communication(parent, doc)
parent.run_method('notify_communication', doc)
set_avg_response_time(parent, doc)
parent.run_method("notify_communication", doc)
parent.notify_update()
def update_mins_to_first_communication(parent, communication):
if parent.meta.has_field('mins_to_first_response') and not parent.get('mins_to_first_response'):
if parent.meta.has_field("mins_to_first_response") and not parent.get("mins_to_first_response"):
if is_system_user(communication.sender):
first_responded_on = communication.creation
if parent.meta.has_field('first_responded_on') and communication.sent_or_received == "Sent":
parent.db_set('first_responded_on', first_responded_on)
parent.db_set('mins_to_first_response', round(time_diff_in_seconds(first_responded_on, parent.creation) / 60), 2)
if parent.meta.has_field("first_responded_on") and communication.sent_or_received == "Sent":
parent.db_set("first_responded_on", first_responded_on)
parent.db_set("mins_to_first_response", round(time_diff_in_seconds(first_responded_on, parent.creation) / 60), 2)
def set_avg_response_time(parent, communication):
if parent.meta.has_field("avg_response_time") and communication.sent_or_received == "Sent":
# avg response time for all the responses
communications = frappe.get_list("Communication", filters={
"reference_doctype": parent.doctype,
"reference_name": parent.name
},
fields=["sent_or_received", "name", "creation"],
order_by="creation"
)
if len(communications):
response_times = []
for i in range(len(communications)):
if communications[i].sent_or_received == "Sent" and communications[i-1].sent_or_received == "Received":
response_time = round(time_diff_in_seconds(communications[i].creation, communications[i-1].creation), 2)
if response_time > 0:
response_times.append(response_time)
if response_times:
avg_response_time = sum(response_times) / len(response_times)
parent.db_set("avg_response_time", avg_response_time)

View file

@ -221,7 +221,7 @@ def prepare_to_notify(doc, print_html=None, print_format=None, attachments=None)
:param print_html: Send given value as HTML attachment.
:param print_format: Attach print format of parent document."""
view_link = frappe.utils.cint(frappe.db.get_value("Print Settings", "Print Settings", "attach_view_link"))
view_link = frappe.utils.cint(frappe.db.get_value("System Settings", "System Settings", "attach_view_link"))
if print_format and view_link:
doc.content += get_attach_link(doc, print_format)
@ -236,7 +236,7 @@ def prepare_to_notify(doc, print_html=None, print_format=None, attachments=None)
if doc.sender:
# combine for sending to get the format 'Jane <jane@example.com>'
doc.sender = formataddr([doc.sender_full_name, doc.sender])
doc.sender = get_formatted_email(doc.sender_full_name, mail=doc.sender)
doc.attachments = []

View file

@ -9,7 +9,7 @@ import frappe.permissions
import re, csv, os
from frappe.utils.csvutils import UnicodeWriter
from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint
from frappe.core.doctype.data_import.importer import get_data_keys
from frappe.core.doctype.data_import_legacy.importer import get_data_keys
from six import string_types
from frappe.core.doctype.access_log.access_log import make_access_log

View file

@ -1 +0,0 @@
Bulk import / update of data via file upload in Excel or CSV.

View file

@ -0,0 +1,3 @@
.warnings .warning {
margin-bottom: 40px;
}

View file

@ -1,324 +1,518 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import', {
onload: function(frm) {
if (frm.doc.__islocal) {
frm.set_value("action", "");
}
frappe.call({
method: "frappe.core.doctype.data_import.data_import.get_importable_doctypes",
callback: function (r) {
let importable_doctypes = r.message;
frm.set_query("reference_doctype", function () {
return {
"filters": {
"issingle": 0,
"istable": 0,
"name": ['in', importable_doctypes]
}
};
});
setup(frm) {
frappe.realtime.on('data_import_refresh', ({ data_import }) => {
frm.import_in_progress = false;
if (data_import !== frm.doc.name) return;
frappe.model.clear_doc('Data Import', frm.doc.name);
frappe.model.with_doc('Data Import', frm.doc.name).then(() => {
frm.refresh();
});
});
frappe.realtime.on('data_import_progress', data => {
frm.import_in_progress = true;
if (data.data_import !== frm.doc.name) {
return;
}
}),
let percent = Math.floor((data.current * 100) / data.total);
let seconds = Math.floor(data.eta);
let minutes = Math.floor(data.eta / 60);
let eta_message =
// prettier-ignore
seconds < 60
? __('About {0} seconds remaining', [seconds])
: minutes === 1
? __('About {0} minute remaining', [minutes])
: __('About {0} minutes remaining', [minutes]);
// should never check public
frm.fields_dict["import_file"].df.is_private = 1;
let message;
if (data.success) {
let message_args = [data.current, data.total, eta_message];
message =
frm.doc.import_type === 'Insert New Records'
? __('Importing {0} of {1}, {2}', message_args)
: __('Updating {0} of {1}, {2}', message_args);
}
if (data.skipping) {
message = __('Skipping {0} of {1}, {2}', [
data.current,
data.total,
eta_message
]);
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(__('In Progress'), 'orange');
frappe.realtime.on("data_import_progress", function(data) {
if (data.data_import === frm.doc.name) {
if (data.reload && data.reload === true) {
frm.reload_doc();
}
if (data.progress) {
let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar");
if (progress_bar) {
$(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped");
$(progress_bar).css("width", data.progress + "%");
}
}
// hide progress when complete
if (data.current === data.total) {
setTimeout(() => {
frm.dashboard.hide();
frm.refresh();
}, 2000);
}
});
frm.set_query('reference_doctype', () => {
return {
filters: {
name: ['in', frappe.boot.user.can_import]
}
};
});
frm.get_field('import_file').df.options = {
restrictions: {
allowed_file_types: ['.csv', '.xls', '.xlsx']
}
};
frm.has_import_file = () => {
return frm.doc.import_file || frm.doc.google_sheets_url;
};
},
reference_doctype: function(frm){
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
refresh(frm) {
frm.page.hide_icon_group();
frm.trigger('update_indicators');
frm.trigger('import_file');
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
frm.trigger('show_report_error_button');
if (frm.doc.status === 'Partial Success') {
frm.add_custom_button(__('Export Errored Rows'), () =>
frm.trigger('export_errored_rows')
);
}
if (frm.doc.status.includes('Success')) {
frm.add_custom_button(
__('Go to {0} List', [frm.doc.reference_doctype]),
() => frappe.set_route('List', frm.doc.reference_doctype)
);
}
},
refresh: function(frm) {
onload_post_render(frm) {
frm.trigger('update_primary_action');
},
update_primary_action(frm) {
if (frm.is_dirty()) {
frm.enable_save();
return;
}
frm.disable_save();
frm.dashboard.clear_headline();
if (frm.doc.reference_doctype && !frm.doc.import_file) {
frm.page.set_indicator(__('Attach file'), 'orange');
} else {
if (frm.doc.import_status) {
const listview_settings = frappe.listview_settings['Data Import'];
const indicator = listview_settings.get_indicator(frm.doc);
frm.page.set_indicator(indicator[0], indicator[1]);
if (frm.doc.import_status === "In Progress") {
frm.dashboard.add_progress("Data Import Progress", "0");
frm.set_read_only();
frm.refresh_fields();
}
if (frm.doc.status !== 'Success') {
if (!frm.is_new() && (frm.has_import_file())) {
let label =
frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
frm.page.set_primary_action(label, () => frm.events.start_import(frm));
} else {
frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
},
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
update_indicators(frm) {
const indicator = frappe.get_indicator(frm.doc);
if (indicator) {
frm.page.set_indicator(indicator[0], indicator[1]);
} else {
frm.page.clear_indicator();
}
},
if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") {
frm.set_df_property("action", "read_only", 1);
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
let message;
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
}
}
frm.dashboard.set_headline(message);
},
frm.add_custom_button(__("Help"), function() {
frappe.help.show_video("6wiriRKPhmg");
show_report_error_button(frm) {
if (frm.doc.status === 'Error') {
frappe.db
.get_list('Error Log', {
filters: { method: frm.doc.name },
fields: ['method', 'error'],
order_by: 'creation desc',
limit: 1
})
.then(result => {
if (result.length > 0) {
frm.add_custom_button('Report Error', () => {
let fake_xhr = {
responseText: JSON.stringify({
exc: result[0].error
})
};
frappe.request.report_error(fake_xhr, {});
});
}
});
}
},
start_import(frm) {
frm
.call({
method: 'form_start_import',
args: { data_import: frm.doc.name },
btn: frm.page.btn_primary
})
.then(r => {
if (r.message === true) {
frm.disable_save();
}
});
},
download_template(frm) {
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.data_exporter = new frappe.data_import.DataExporter(
frm.doc.reference_doctype,
frm.doc.import_type
);
});
},
if (frm.doc.reference_doctype && frm.doc.docstatus === 0) {
frm.add_custom_button(__("Download template"), function() {
frappe.data_import.download_dialog(frm).show();
reference_doctype(frm) {
frm.trigger('toggle_submit_after_import');
},
toggle_submit_after_import(frm) {
frm.toggle_display('submit_after_import', false);
let doctype = frm.doc.reference_doctype;
if (doctype) {
frappe.model.with_doctype(doctype, () => {
let meta = frappe.get_meta(doctype);
frm.toggle_display('submit_after_import', meta.is_submittable);
});
}
},
if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows &&
frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) {
frm.page.set_primary_action(__("Start Import"), function() {
frappe.call({
btn: frm.page.btn_primary,
method: "frappe.core.doctype.data_import.data_import.import_data",
args: {
data_import: frm.doc.name
}
});
}).addClass('btn btn-primary');
}
if (frm.doc.log_details) {
frm.events.create_log_table(frm);
google_sheets_url(frm) {
if (!frm.is_dirty()) {
frm.trigger('import_file');
} else {
$(frm.fields_dict.import_log.wrapper).empty();
frm.trigger('update_primary_action');
}
},
action: function(frm) {
if(!frm.doc.action) return;
if(!frm.doc.reference_doctype) {
frappe.msgprint(__("Please select document type first."));
frm.set_value("action", "");
refresh_google_sheet(frm) {
frm.trigger('import_file');
},
import_file(frm) {
frm.toggle_display('section_import_preview', frm.has_import_file());
if (!frm.has_import_file()) {
frm.get_field('import_preview').$wrapper.empty();
return;
} else {
frm.trigger('update_primary_action');
}
// load import preview
frm.get_field('import_preview').$wrapper.empty();
$('<span class="text-muted">')
.html(__('Loading import file...'))
.appendTo(frm.get_field('import_preview').$wrapper);
frm
.call({
method: 'get_preview_from_template',
args: {
data_import: frm.doc.name,
import_file: frm.doc.import_file,
google_sheets_url: frm.doc.google_sheets_url
},
error_handlers: {
TimestampMismatchError() {
// ignore this error
}
}
})
.then(r => {
let preview_data = r.message;
frm.events.show_import_preview(frm, preview_data);
frm.events.show_import_warnings(frm, preview_data);
});
},
show_import_preview(frm, preview_data) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
if (
frm.import_preview &&
frm.import_preview.doctype === frm.doc.reference_doctype
) {
frm.import_preview.preview_data = preview_data;
frm.import_preview.import_log = import_log;
frm.import_preview.refresh();
return;
}
if(frm.doc.action == "Insert new records") {
frm.doc.insert_new = 1;
} else if (frm.doc.action == "Update records"){
frm.doc.overwrite = 1;
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.import_preview = new frappe.data_import.ImportPreview({
wrapper: frm.get_field('import_preview').$wrapper,
doctype: frm.doc.reference_doctype,
preview_data,
import_log,
frm,
events: {
remap_column(changed_map) {
let template_options = JSON.parse(frm.doc.template_options || '{}');
template_options.column_to_field_map = template_options.column_to_field_map || {};
Object.assign(template_options.column_to_field_map, changed_map);
frm.set_value('template_options', JSON.stringify(template_options));
frm.save().then(() => frm.trigger('import_file'));
}
}
});
});
},
export_errored_rows(frm) {
open_url_post(
'/api/method/frappe.core.doctype.data_import.data_import.download_errored_template',
{
data_import_name: frm.doc.name
}
);
},
show_import_warnings(frm, preview_data) {
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
warnings = warnings.concat(preview_data.warnings || []);
frm.toggle_display('import_warnings_section', warnings.length > 0);
if (warnings.length === 0) {
frm.get_field('import_warnings').$wrapper.html('');
return;
}
frm.save();
// group warnings by row
let warnings_by_row = {};
let other_warnings = [];
for (let warning of warnings) {
if (warning.row) {
warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
warnings_by_row[warning.row].push(warning);
} else {
other_warnings.push(warning);
}
}
let html = '';
html += Object.keys(warnings_by_row)
.map(row_number => {
let message = warnings_by_row[row_number]
.map(w => {
if (w.field) {
let label =
w.field.label +
(w.field.parent !== frm.doc.reference_doctype
? ` (${w.field.parent})`
: '');
return `<li>${label}: ${w.message}</li>`;
}
return `<li>${w.message}</li>`;
})
.join('');
return `
<div class="warning" data-row="${row_number}">
<h5 class="text-uppercase">${__('Row {0}', [row_number])}</h5>
<div class="body"><ul>${message}</ul></div>
</div>
`;
})
.join('');
html += other_warnings
.map(warning => {
let header = '';
if (warning.col) {
header = __('Column {0}', [warning.col]);
}
return `
<div class="warning" data-col="${warning.col}">
<h5 class="text-uppercase">${header}</h5>
<div class="body">${warning.message}</div>
</div>
`;
})
.join('');
frm.get_field('import_warnings').$wrapper.html(`
<div class="row">
<div class="col-sm-10 warnings">${html}</div>
</div>
`);
},
only_update: function(frm) {
frm.save();
show_failed_logs(frm) {
frm.trigger('show_import_log');
},
submit_after_import: function(frm) {
frm.save();
show_import_log(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let logs = import_log;
frm.toggle_display('import_log', false);
frm.toggle_display('import_log_section', logs.length > 0);
if (logs.length === 0) {
frm.get_field('import_log_preview').$wrapper.empty();
return;
}
let rows = logs
.map(log => {
let html = '';
if (log.success) {
if (frm.doc.import_type === 'Insert New Records') {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
html = __('Successfully updated {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
}
} else {
let messages = log.messages
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}" style="margin-top: 15px;">
${__('Show Traceback')}
</button>
<div class="collapse" id="${id}" style="margin-top: 15px;">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
if (frm.doc.show_failed_logs && log.success) {
return '';
}
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
if (!rows && frm.doc.show_failed_logs) {
rows = `<tr><td class="text-center text-muted" colspan=3>
${__('No failed logs')}
</td></tr>`;
}
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
},
skip_errors: function(frm) {
frm.save();
},
show_missing_link_values(frm, missing_link_values) {
let can_be_created_automatically = missing_link_values.every(
d => d.has_one_mandatory_field
);
ignore_encoding_errors: function(frm) {
frm.save();
},
let html = missing_link_values
.map(d => {
let doctype = d.doctype;
let values = d.missing_values;
return `
<h5>${doctype}</h5>
<ul>${values.map(v => `<li>${v}</li>`).join('')}</ul>
`;
})
.join('');
no_email: function(frm) {
frm.save();
},
show_only_errors: function(frm) {
frm.events.create_log_table(frm);
},
create_log_table: function(frm) {
let msg = JSON.parse(frm.doc.log_details);
var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty();
$(frappe.render_template("log_details", {
data: msg.messages,
import_status: frm.doc.import_status,
show_only_errors: frm.doc.show_only_errors,
})).appendTo($log_wrapper);
if (can_be_created_automatically) {
// prettier-ignore
let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
frappe.confirm(message + html, () => {
frm
.call('create_missing_link_values', {
missing_link_values
})
.then(r => {
let records = r.message;
frappe.msgprint(
__('Created {0} records successfully.', [records.length])
);
});
});
} else {
frappe.msgprint(
// prettier-ignore
__('The following records needs to be created before we can import your file.') + html
);
}
}
});
frappe.provide('frappe.data_import');
frappe.data_import.download_dialog = function(frm) {
var dialog;
const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden;
const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields);
const get_doctype_checkbox_fields = () => {
return dialog.fields.filter(df => df.fieldname.endsWith('_fields'))
.map(df => dialog.fields_dict[df.fieldname]);
};
const doctype_fields = get_fields(frm.doc.reference_doctype)
.map(df => {
let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0;
return {
label: df.label,
reqd: reqd,
danger: reqd,
value: df.fieldname,
checked: 1
};
});
let fields = [
{
"label": __("Select Columns"),
"fieldname": "select_columns",
"fieldtype": "Select",
"options": "All\nMandatory\nManually",
"reqd": 1,
"onchange": function() {
const fields = get_doctype_checkbox_fields();
fields.map(f => f.toggle(true));
if(this.value == 'Mandatory' || this.value == 'Manually') {
checkbox_toggle(true);
fields.map(multicheck_field => {
multicheck_field.options.map(option => {
if(!option.reqd) return;
$(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`)
.prop('checked', false)
.trigger('click');
});
});
} else if(this.value == 'All'){
$(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`)
.prop('disabled', true);
}
}
},
{
"label": __("File Type"),
"fieldname": "file_type",
"fieldtype": "Select",
"options": "Excel\nCSV",
"default": "Excel"
},
{
"label": __("Download with Data"),
"fieldname": "with_data",
"fieldtype": "Check",
"hidden": !frm.doc.overwrite,
"default": 1
},
{
"label": __("Select All"),
"fieldname": "select_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle();
}
},
{
"label": __("Unselect All"),
"fieldname": "unselect_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle(true);
}
},
{
"label": frm.doc.reference_doctype,
"fieldname": "doctype_fields",
"fieldtype": "MultiCheck",
"options": doctype_fields,
"columns": 2,
"hidden": 1
}
];
const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype)
.map(df => {
return {
"label": df.options,
"fieldname": df.fieldname + '_fields',
"fieldtype": "MultiCheck",
"options": frappe.meta.get_docfields(df.options)
.filter(filter_fields)
.map(df => ({
label: df.label,
reqd: df.reqd ? 1 : 0,
value: df.fieldname,
checked: 1,
danger: df.reqd
})),
"columns": 2,
"hidden": 1
};
});
fields = fields.concat(child_table_fields);
dialog = new frappe.ui.Dialog({
title: __('Download Template'),
fields: fields,
primary_action: function(values) {
var data = values;
if (frm.doc.reference_doctype) {
var export_params = () => {
let columns = {};
if(values.select_columns) {
columns = get_doctype_checkbox_fields().reduce((columns, field) => {
const options = field.get_checked_options();
columns[field.df.label] = options;
return columns;
}, {});
}
return {
doctype: frm.doc.reference_doctype,
parent_doctype: frm.doc.reference_doctype,
select_columns: JSON.stringify(columns),
with_data: frm.doc.overwrite && data.with_data,
all_doctypes: true,
file_type: data.file_type,
template: true
};
};
let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data';
open_url_post(get_template_url, export_params());
} else {
frappe.msgprint(__("Please select the Document Type."));
}
dialog.hide();
},
primary_action_label: __('Download')
});
$(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]')
.wrapAll('<div class="inline-buttons" />');
const button_container = $(dialog.body).find('.inline-buttons');
button_container.addClass('flex');
$(button_container).find('.frappe-control').map((index, button) => {
$(button).css({"margin-right": "1em"});
});
function checkbox_toggle(checked=false) {
$(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => {
$(element).find(`:checkbox`).prop("checked", checked).trigger('click');
});
}
return dialog;
};

View file

@ -1,767 +1,192 @@
{
"allow_copy": 1,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"autoname": "",
"beta": 0,
"creation": "2016-12-09 14:27:32.720061",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "Document",
"editable_grid": 1,
"engine": "InnoDB",
"actions": [],
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"html_5",
"google_sheets_url",
"refresh_google_sheet",
"column_break_5",
"status",
"submit_after_import",
"mute_emails",
"template_options",
"import_warnings_section",
"template_warnings",
"import_warnings",
"section_import_preview",
"import_preview",
"import_log_section",
"import_log",
"show_failed_logs",
"import_log_preview"
],
"fields": [
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "reference_doctype",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 1,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Document Type",
"length": 0,
"no_copy": 0,
"options": "DocType",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "action",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Action",
"length": 0,
"no_copy": 0,
"options": "Insert new records\nUpdate records",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "eval:!doc.overwrite",
"description": "New data will be inserted.",
"fieldname": "insert_new",
"fieldtype": "Check",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Insert new records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 1,
"translatable": 0,
"unique": 0
},
"fieldname": "import_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "eval:!doc.insert_new",
"description": "If you are updating/overwriting already created records.",
"fieldname": "overwrite",
"fieldtype": "Check",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Update records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 1,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "overwrite",
"description": "If you don't want to create any new records while updating the older records.",
"fieldname": "only_update",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Don't create new records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"collapsible_depends_on": "",
"columns": 0,
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_4",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Attach file for Import",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"in_list_view": 1,
"label": "Import File"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "column_break_4",
"fieldtype": "Column Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"fieldname": "import_preview",
"fieldtype": "HTML",
"label": "Import Preview"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "eval: doc.import_status == \"Partially Successful\"",
"description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.",
"fieldname": "error_file",
"fieldtype": "Attach",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Generated File",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"collapsible_depends_on": "",
"columns": 0,
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_6",
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"label": "Preview"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.",
"fieldname": "skip_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Skip rows with errors",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Submit after importing",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Options",
"options": "JSON",
"read_only": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "",
"fieldname": "ignore_encoding_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Ignore encoding errors",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_log",
"fieldtype": "Code",
"label": "Import Log",
"options": "JSON"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "1",
"depends_on": "",
"fieldname": "no_email",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Do not send Emails",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 1,
"collapsible_depends_on": "eval: doc.import_status == \"Failed\"",
"columns": 0,
"depends_on": "import_status",
"fieldname": "import_detail",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Log",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "import_status",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Status",
"length": 0,
"no_copy": 0,
"options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Pending\nSuccess\nPartial Success\nError",
"read_only": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "1",
"fieldname": "show_only_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Show only errors",
"length": 0,
"no_copy": 1,
"permlevel": 0,
"precision": "",
"print_hide": 1,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "",
"depends_on": "import_status",
"fieldname": "import_log",
"fieldtype": "HTML",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Log",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import",
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "log_details",
"fieldtype": "Code",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Log Details",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Import File Errors and Warnings"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "amended_from",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Amended From",
"length": 0,
"no_copy": 1,
"options": "Data Import",
"permlevel": 0,
"print_hide": 1,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "total_rows",
"fieldtype": "Int",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Total Rows",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"depends_on": "eval:!doc.__islocal",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template"
},
{
"default": "1",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails",
"set_only_once": 1
},
{
"default": "0",
"fieldname": "show_failed_logs",
"fieldtype": "Check",
"label": "Show Failed Logs"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file",
"fieldname": "html_5",
"fieldtype": "HTML",
"options": "<h5 class=\"text-muted uppercase\">Or</h5>"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file\n",
"description": "Must be a publicly accessible Google Sheets URL",
"fieldname": "google_sheets_url",
"fieldtype": "Data",
"label": "Import from Google Sheets"
},
{
"depends_on": "eval:doc.google_sheets_url && !doc.__unsaved",
"fieldname": "refresh_google_sheet",
"fieldtype": "Button",
"label": "Refresh Google Sheet"
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 1,
"issingle": 0,
"istable": 0,
"max_attachments": 1,
"modified": "2018-08-28 15:05:56.787108",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
"name_case": "",
"owner": "Administrator",
],
"hide_toolbar": 1,
"links": [],
"modified": "2020-06-24 14:33:03.173876",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 0,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 0,
"read": 1,
"report": 0,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 1,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"title_field": "",
"track_changes": 1,
"track_seen": 1,
"track_views": 0
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View file

@ -1,54 +1,187 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, os
from frappe import _
import frappe.modules.import_file
import os
import frappe
from frappe.model.document import Document
from frappe.utils.data import format_datetime
from frappe.core.doctype.data_import.importer import upload
from frappe.core.doctype.data_import.importer import Importer
from frappe.core.doctype.data_import.exporter import Exporter
from frappe.utils.background_jobs import enqueue
from frappe.utils.csvutils import validate_google_sheets_url
from frappe import _
class DataImport(Document):
def autoname(self):
if not self.name:
self.name = "Import on " +format_datetime(self.creation)
def validate(self):
if not self.import_file:
self.db_set("total_rows", 0)
if self.import_status == "In Progress":
frappe.throw(_("Can't save the form as data import is in progress."))
doc_before_save = self.get_doc_before_save()
if (
not (self.import_file or self.google_sheets_url)
or (doc_before_save and doc_before_save.import_file != self.import_file)
or (doc_before_save and doc_before_save.google_sheets_url != self.google_sheets_url)
):
self.template_options = ""
self.template_warnings = ""
# validate the template just after the upload
# if there is total_rows in the doc, it means that the template is already validated and error free
if self.import_file and not self.total_rows:
upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
self.validate_import_file()
self.validate_google_sheets_url()
def validate_import_file(self):
if self.import_file:
# validate template
self.get_importer()
def validate_google_sheets_url(self):
if not self.google_sheets_url:
return
validate_google_sheets_url(self.google_sheets_url)
def get_preview_from_template(self, import_file=None, google_sheets_url=None):
if import_file:
self.import_file = import_file
if google_sheets_url:
self.google_sheets_url = google_sheets_url
if not (self.import_file or self.google_sheets_url):
return
i = self.get_importer()
return i.get_data_for_import_preview()
def start_import(self):
from frappe.core.page.background_jobs.background_jobs import get_info
from frappe.utils.scheduler import is_scheduler_inactive
if is_scheduler_inactive() and not frappe.flags.in_test:
frappe.throw(
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
enqueued_jobs = [d.get("job_name") for d in get_info()]
if self.name not in enqueued_jobs:
enqueue(
start_import,
queue="default",
timeout=6000,
event="data_import",
job_name=self.name,
data_import=self.name,
now=frappe.conf.developer_mode or frappe.flags.in_test,
)
return True
return False
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
@frappe.whitelist()
def get_importable_doctypes():
return frappe.cache().hget("can_import", frappe.session.user)
def get_preview_from_template(data_import, import_file=None, google_sheets_url=None):
return frappe.get_doc("Data Import", data_import).get_preview_from_template(
import_file, google_sheets_url
)
@frappe.whitelist()
def import_data(data_import):
frappe.db.set_value("Data Import", data_import, "import_status", "In Progress", update_modified=False)
frappe.publish_realtime("data_import_progress", {"progress": "0",
"data_import": data_import, "reload": True}, user=frappe.session.user)
from frappe.core.page.background_jobs.background_jobs import get_info
enqueued_jobs = [d.get("job_name") for d in get_info()]
if data_import not in enqueued_jobs:
enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
def form_start_import(data_import):
return frappe.get_doc("Data Import", data_import).start_import()
def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
insert=False, submit=False, pre_process=None):
def start_import(data_import):
"""This method runs in background job"""
data_import = frappe.get_doc("Data Import", data_import)
try:
i = Importer(data_import.reference_doctype, data_import=data_import)
i.import_data()
except Exception:
frappe.db.rollback()
data_import.db_set("status", "Error")
frappe.log_error(title=data_import.name)
finally:
frappe.flags.in_import = False
frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name})
@frappe.whitelist()
def download_template(
doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
):
"""
Download template from Exporter
:param doctype: Document Type
:param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
:param export_records=None: One of 'all', 'by_filter', 'blank_template'
:param export_filters: Filter dict
:param file_type: File type to export into
"""
export_fields = frappe.parse_json(export_fields)
export_filters = frappe.parse_json(export_filters)
export_data = export_records != "blank_template"
e = Exporter(
doctype,
export_fields=export_fields,
export_data=export_data,
export_filters=export_filters,
file_type=file_type,
export_page_length=5 if export_records == "5_records" else None,
)
e.build_response()
@frappe.whitelist()
def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import", data_import_name)
data_import.export_errored_rows()
def import_file(
doctype, file_path, import_type, submit_after_import=False, console=False
):
"""
Import documents in from CSV or XLSX using data import.
:param doctype: DocType to import
:param file_path: Path to .csv, .xls, or .xlsx file to import
:param import_type: One of "Insert" or "Update"
:param submit_after_import: Whether to submit documents after import
:param console: Set to true if this is to be used from command line. Will print errors or progress to stdout.
"""
data_import = frappe.new_doc("Data Import")
data_import.submit_after_import = submit_after_import
data_import.import_type = (
"Insert New Records" if import_type.lower() == "insert" else "Update Existing Records"
)
i = Importer(
doctype=doctype, file_path=file_path, data_import=data_import, console=console
)
i.import_data()
##############
def import_doc(
path,
overwrite=False,
ignore_links=False,
ignore_insert=False,
insert=False,
submit=False,
pre_process=None,
):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
@ -57,25 +190,44 @@ def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
frappe.modules.import_file.import_file_by_path(
f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True
)
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
import_file_by_path(
f,
ignore_links=ignore_links,
overwrite=overwrite,
submit=submit,
pre_process=pre_process,
)
frappe.db.commit()
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
from frappe.utils.csvutils import read_csv_content
print("Importing " + path)
with open(path, "r") as infile:
upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
submit_after_import=submit, pre_process=pre_process)
def import_file_by_path(
path,
ignore_links=False,
overwrite=False,
submit=False,
pre_process=None,
no_email=True,
):
if path.endswith(".csv"):
print()
print("This method is deprecated.")
print('Import CSV files using the command "bench --site sitename data-import"')
print("Or use the method frappe.core.doctype.data_import.data_import.import_file")
print()
raise Exception("Method deprecated")
def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
def export_json(
doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"
):
def post_process(out):
del_keys = ('modified_by', 'creation', 'owner', 'idx')
del_keys = ("modified_by", "creation", "owner", "idx")
for doc in out:
for key in del_keys:
if key in doc:
@ -83,7 +235,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
for k, v in doc.items():
if isinstance(v, list):
for child in v:
for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
for key in del_keys + ("docstatus", "doctype", "modified", "name"):
if key in child:
del child[key]
@ -93,13 +245,20 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
for doc in frappe.get_all(
doctype,
fields=["name"],
filters=filters,
or_filters=or_filters,
limit_page_length=0,
order_by=order_by,
):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
path = os.path.join('..', path)
path = os.path.join("..", path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
@ -107,17 +266,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))
@frappe.whitelist()
def export_fixture(doctype, app):
if frappe.session.user != "Administrator":
raise frappe.PermissionError
if not os.path.exists(frappe.get_app_path(app, "fixtures")):
os.mkdir(frappe.get_app_path(app, "fixtures"))
export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")

View file

@ -1,31 +1,40 @@
let imports_in_progress = [];
frappe.listview_settings['Data Import'] = {
add_fields: ["import_status"],
has_indicator_for_draft: 1,
get_indicator: function(doc) {
let status = {
'Successful': [__("Success"), "green", "import_status,=,Successful"],
'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"],
'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"],
'Failed': [__("Failed"), "red", "import_status,=,Failed"],
'Pending': [__("Pending"), "orange", "import_status,=,"]
}
if (doc.import_status) {
return status[doc.import_status];
}
if (doc.docstatus == 0) {
return status['Pending'];
}
return status['Pending'];
},
onload(listview) {
listview.page.set_title_sub(`
<span class="indicator blue">
<a class="text-muted" href="#List/Data Import Beta">${__('Try the new Data Import')}</a>
</span>
`);
}
frappe.realtime.on('data_import_progress', data => {
if (!imports_in_progress.includes(data.data_import)) {
imports_in_progress.push(data.data_import);
}
});
frappe.realtime.on('data_import_refresh', data => {
imports_in_progress = imports_in_progress.filter(
d => d !== data.data_import
);
listview.refresh();
});
},
get_indicator: function(doc) {
var colors = {
'Pending': 'orange',
'Partial Success': 'orange',
'Success': 'green',
'In Progress': 'orange',
'Error': 'red'
};
let status = doc.status;
if (imports_in_progress.includes(doc.name)) {
status = 'In Progress';
}
return [__(status), colors[status], 'status,=,' + doc.status];
},
formatters: {
import_type(value) {
return {
'Insert New Records': __('Insert'),
'Update Existing Records': __('Update')
}[value];
}
},
hide_name_column: true
};

View file

@ -0,0 +1,257 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
from frappe.model import (
display_fieldtypes,
no_value_fields,
table_fields as table_fieldtypes,
)
from frappe.utils.csvutils import build_csv_response
from frappe.utils.xlsxutils import build_xlsx_response
class Exporter:
def __init__(
self,
doctype,
export_fields=None,
export_data=False,
export_filters=None,
export_page_length=None,
file_type="CSV",
):
"""
Exports records of a DocType for use with Importer
:param doctype: Document Type to export
:param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']}
:param export_data=False: Whether to export data as well
:param export_filters=None: The filters (dict or list) which is used to query the records
:param file_type: One of 'Excel' or 'CSV'
"""
self.doctype = doctype
self.meta = frappe.get_meta(doctype)
self.export_fields = export_fields
self.export_filters = export_filters
self.export_page_length = export_page_length
self.file_type = file_type
# this will contain the csv content
self.csv_array = []
# fields that get exported
self.exportable_fields = self.get_all_exportable_fields()
self.fields = self.serialize_exportable_fields()
self.add_header()
if export_data:
self.data = self.get_data_to_export()
else:
self.data = []
self.add_data()
def get_all_exportable_fields(self):
child_table_fields = [
df.fieldname for df in self.meta.fields if df.fieldtype in table_fieldtypes
]
meta = frappe.get_meta(self.doctype)
exportable_fields = frappe._dict({})
for key, fieldnames in self.export_fields.items():
if key == self.doctype:
# parent fields
exportable_fields[key] = self.get_exportable_fields(key, fieldnames)
elif key in child_table_fields:
# child fields
child_df = meta.get_field(key)
child_doctype = child_df.options
exportable_fields[key] = self.get_exportable_fields(child_doctype, fieldnames)
return exportable_fields
def serialize_exportable_fields(self):
fields = []
for key, exportable_fields in self.exportable_fields.items():
for _df in exportable_fields:
# make a copy of df dict to avoid reference mutation
if isinstance(_df, frappe.core.doctype.docfield.docfield.DocField):
df = _df.as_dict()
else:
df = _df.copy()
df.is_child_table_field = key != self.doctype
if df.is_child_table_field:
df.child_table_df = self.meta.get_field(key)
fields.append(df)
return fields
def get_exportable_fields(self, doctype, fieldnames):
meta = frappe.get_meta(doctype)
def is_exportable(df):
return df and df.fieldtype not in (display_fieldtypes + no_value_fields)
# add name field
name_field = frappe._dict(
{
"fieldtype": "Data",
"fieldname": "name",
"label": "ID",
"reqd": 1,
"parent": doctype,
}
)
fields = [meta.get_field(fieldname) for fieldname in fieldnames]
fields = [df for df in fields if is_exportable(df)]
if "name" in fieldnames:
fields = [name_field] + fields
return fields or []
def get_data_to_export(self):
frappe.permissions.can_export(self.doctype, raise_exception=True)
data_to_export = []
table_fields = [f for f in self.exportable_fields if f != self.doctype]
data = self.get_data_as_docs()
for doc in data:
rows = []
rows = self.add_data_row(self.doctype, None, doc, rows, 0)
if table_fields:
# add child table data
for f in table_fields:
for i, child_row in enumerate(doc[f]):
table_df = self.meta.get_field(f)
child_doctype = table_df.options
rows = self.add_data_row(child_doctype, child_row.parentfield, child_row, rows, i)
data_to_export += rows
return data_to_export
def add_data_row(self, doctype, parentfield, doc, rows, row_idx):
if len(rows) < row_idx + 1:
rows.append([""] * len(self.fields))
row = rows[row_idx]
for i, df in enumerate(self.fields):
if df.parent == doctype:
if df.is_child_table_field and df.child_table_df.fieldname != parentfield:
continue
row[i] = doc.get(df.fieldname, "")
return rows
def get_data_as_docs(self):
def format_column_name(df):
return "`tab{0}`.`{1}`".format(df.parent, df.fieldname)
filters = self.export_filters
if self.meta.is_nested_set():
order_by = "`tab{0}`.`lft` ASC".format(self.doctype)
else:
order_by = "`tab{0}`.`creation` DESC".format(self.doctype)
parent_fields = [
format_column_name(df) for df in self.fields if df.parent == self.doctype
]
parent_data = frappe.db.get_list(
self.doctype,
filters=filters,
fields=["name"] + parent_fields,
limit_page_length=self.export_page_length,
order_by=order_by,
as_list=0,
)
parent_names = [p.name for p in parent_data]
child_data = {}
for key in self.exportable_fields:
if key == self.doctype:
continue
child_table_df = self.meta.get_field(key)
child_table_doctype = child_table_df.options
child_fields = ["name", "idx", "parent", "parentfield"] + list(
set(
[format_column_name(df) for df in self.fields if df.parent == child_table_doctype]
)
)
data = frappe.db.get_list(
child_table_doctype,
filters={
"parent": ("in", parent_names),
"parentfield": child_table_df.fieldname,
"parenttype": self.doctype,
},
fields=child_fields,
order_by="idx asc",
as_list=0,
)
child_data[key] = data
return self.merge_data(parent_data, child_data)
def merge_data(self, parent_data, child_data):
for doc in parent_data:
for table_field, table_rows in child_data.items():
doc[table_field] = [row for row in table_rows if row.parent == doc.name]
return parent_data
def add_header(self):
header = []
for df in self.fields:
is_parent = not df.is_child_table_field
if is_parent:
label = df.label
else:
label = "{0} ({1})".format(df.label, df.child_table_df.label)
if label in header:
# this label is already in the header,
# which means two fields with the same label
# add the fieldname to avoid clash
if is_parent:
label = "{0}".format(df.fieldname)
else:
label = "{0}.{1}".format(df.child_table_df.fieldname, df.fieldname)
header.append(label)
self.csv_array.append(header)
def add_data(self):
self.csv_array += self.data
def get_csv_array(self):
return self.csv_array
def get_csv_array_for_export(self):
csv_array = self.csv_array
if not self.data:
# add 2 empty rows
csv_array += [[]] * 2
return csv_array
def build_response(self):
if self.file_type == "CSV":
self.build_csv_response()
elif self.file_type == "Excel":
self.build_xlsx_response()
def build_csv_response(self):
build_csv_response(self.get_csv_array_for_export(), self.doctype)
def build_xlsx_response(self):
build_xlsx_response(self.get_csv_array_for_export(), self.doctype)

View file

@ -1,267 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
from frappe.model import display_fieldtypes, no_value_fields, table_fields
from frappe.utils.csvutils import build_csv_response
from frappe.utils.xlsxutils import build_xlsx_response
from .importer_new import INVALID_VALUES
class Exporter:
def __init__(
self,
doctype,
export_fields=None,
export_data=False,
export_filters=None,
export_page_length=None,
file_type="CSV",
):
"""
Exports records of a DocType for use with Importer
:param doctype: Document Type to export
:param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']}
:param export_data=False: Whether to export data as well
:param export_filters=None: The filters (dict or list) which is used to query the records
:param file_type: One of 'Excel' or 'CSV'
"""
self.doctype = doctype
self.meta = frappe.get_meta(doctype)
self.export_fields = export_fields
self.export_filters = export_filters
self.export_page_length = export_page_length
self.file_type = file_type
# this will contain the csv content
self.csv_array = []
# fields that get exported
# can be All, Mandatory or User Selected Fields
self.fields = self.get_all_exportable_fields()
self.add_header()
if export_data:
self.data = self.get_data_to_export()
else:
self.data = []
self.add_data()
def get_all_exportable_fields(self):
return self.get_exportable_parent_fields() + self.get_exportable_children_fields()
def get_exportable_parent_fields(self):
parent_fields = self.get_exportable_fields(self.doctype)
# if autoname is based on field
# then merge ID and the field column title as "ID (Autoname Field)"
autoname = self.meta.autoname
if autoname and autoname.startswith("field:"):
fieldname = autoname[len("field:") :]
autoname_field = self.meta.get_field(fieldname)
if autoname_field:
name_field = parent_fields[0]
name_field.label = "ID ({})".format(autoname_field.label)
# remove the autoname field as it is a duplicate of ID field
parent_fields = [
df for df in parent_fields if df.fieldname != autoname_field.fieldname
]
return parent_fields
def get_exportable_children_fields(self):
child_table_fields = [df for df in self.meta.fields if df.fieldtype in table_fields]
if self.export_fields == "Mandatory":
child_table_fields = [df for df in child_table_fields if df.reqd]
children = [df.options for df in child_table_fields]
children_fields = []
for child in children:
children_fields += self.get_exportable_fields(child)
return children_fields
def get_exportable_fields(self, doctype):
meta = frappe.get_meta(doctype)
def is_exportable(df):
return df and df.fieldtype not in (display_fieldtypes + no_value_fields)
# filter out invalid fieldtypes
all_fields = [df for df in meta.fields if is_exportable(df)]
# add name field
name_field = frappe._dict(
{
"fieldtype": "Data",
"fieldname": "name",
"label": "ID",
"reqd": 1,
"parent": doctype,
}
)
all_fields = [name_field] + all_fields
if self.export_fields == "Mandatory":
fields = [df for df in all_fields if df.reqd]
if self.export_fields == "All":
fields = list(all_fields)
elif isinstance(self.export_fields, dict):
fields_to_export = self.export_fields.get(doctype, [])
fields = [meta.get_field(fieldname) for fieldname in fields_to_export]
fields = [df for df in fields if is_exportable(df)]
if 'name' in fields_to_export:
fields = [name_field] + fields
return fields or []
def get_data_to_export(self):
frappe.permissions.can_export(self.doctype, raise_exception=True)
def get_column_name(df):
return "`tab{0}`.`{1}`".format(df.parent, df.fieldname)
fields = [get_column_name(df) for df in self.fields]
filters = self.export_filters
if self.meta.is_nested_set():
order_by = "`tab{0}`.`lft` ASC".format(self.doctype)
else:
order_by = "`tab{0}`.`creation` DESC".format(self.doctype)
data = frappe.db.get_list(
self.doctype,
filters=filters,
fields=fields,
limit_page_length=self.export_page_length,
order_by=order_by,
as_list=1,
)
data = self.remove_duplicate_values(data)
data = self.remove_row_gaps(data)
data = self.remove_empty_rows(data)
# data = self.remove_values_from_name_column(data)
return data
def remove_duplicate_values(self, data):
out = []
doctypes = set([df.parent for df in self.fields])
def name_exists_in_column_before_row(name, column_index, row_index):
column_values = [row[column_index] for i, row in enumerate(data) if i < row_index]
return name in column_values
for i, row in enumerate(data):
# first row is fine
if i == 0:
out.append(row)
continue
row = list(row)
for doctype in doctypes:
name_index = self.get_name_column_index(doctype)
name = row[name_index]
column_indexes = self.get_column_indexes(doctype)
if name_exists_in_column_before_row(name, name_index, i):
# remove the values from the row
row = [None if i in column_indexes else d for i, d in enumerate(row)]
out.append(row)
return out
def remove_row_gaps(self, data):
doctypes = set([df.parent for df in self.fields if df.parent != self.doctype])
def get_nearest_empty_row_index(col_index, row_index):
col_values = [row[col_index] for row in data]
i = row_index - 1
while not col_values[i]:
i = i - 1
out = i + 1
if row_index != out:
return out
for i, row in enumerate(data):
# if this is the row that contains parent values then skip
if row[0]:
continue
for doctype in doctypes:
name_index = self.get_name_column_index(doctype)
name = row[name_index]
column_indexes = self.get_column_indexes(doctype)
if not name:
continue
row_index = get_nearest_empty_row_index(name_index, i)
if row_index:
for col_index in column_indexes:
data[row_index][col_index] = row[col_index]
row[col_index] = None
return data
# pylint: disable=R0201
def remove_empty_rows(self, data):
return [row for row in data if any(v not in INVALID_VALUES for v in row)]
def remove_values_from_name_column(self, data):
out = []
name_columns = [i for i, df in enumerate(self.fields) if df.fieldname == "name"]
for row in data:
out.append(["" if i in name_columns else value for i, value in enumerate(row)])
return out
def get_name_column_index(self, doctype):
for i, df in enumerate(self.fields):
if df.parent == doctype and df.fieldname == "name":
return i
return -1
def get_column_indexes(self, doctype):
return [i for i, df in enumerate(self.fields) if df.parent == doctype]
def add_header(self):
def get_label(df):
if df.parent == self.doctype:
return df.label
else:
return "{0} ({1})".format(df.label, df.parent)
header = [get_label(df) for df in self.fields]
self.csv_array.append(header)
def add_data(self):
self.csv_array += self.data
def get_csv_array(self):
return self.csv_array
def get_csv_array_for_export(self):
csv_array = self.csv_array
if not self.data:
# add 2 empty rows
csv_array += [[]] * 2
return csv_array
def build_response(self):
if self.file_type == 'CSV':
self.build_csv_response()
elif self.file_type == 'Excel':
self.build_xlsx_response()
def build_csv_response(self):
build_csv_response(self.get_csv_array_for_export(), self.doctype)
def build_xlsx_response(self):
build_xlsx_response(self.get_csv_array_for_export(), self.doctype)

View file

@ -0,0 +1,5 @@
Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
Test ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
, , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 ,child title again 2 ,22-09-2021 , ,
Test 2 ,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , ,
Test 3 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71
Can't render this file because it contains an unexpected character in line 2 and column 54.

View file

@ -0,0 +1,2 @@
Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
Test 26 ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
Can't render this file because it contains an unexpected character in line 2 and column 56.

View file

@ -0,0 +1,5 @@
Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
Test 5 ,test description ,1 ,2 ,"" , ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
, , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 , ,22-09-2021 , ,
,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , ,
Test 4 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71
Can't render this file because it contains an unexpected character in line 2 and column 55.

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,23 +0,0 @@
/* eslint-disable */
// rename this file from _test_[name] to test_[name] to activate
// and remove above this line
QUnit.test("test: Data Import", function (assert) {
let done = assert.async();
// number of asserts
assert.expect(1);
frappe.run_serially([
// insert a new Data Import
() => frappe.tests.make('Data Import', [
// values to be set
{key: 'value'}
]),
() => {
assert.equal(cur_frm.doc.key, 'value');
},
() => done()
]);
});

View file

@ -1,100 +1,10 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and Contributors
# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe, unittest
from frappe.core.doctype.data_export import exporter
from frappe.core.doctype.data_import import importer
from frappe.utils.csvutils import read_csv_content
# import frappe
import unittest
class TestDataImport(unittest.TestCase):
def test_export(self):
exporter.export_data("User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
def test_export_with_data(self):
exporter.export_data("User", all_doctypes=True, template=True, with_data=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
def test_export_with_all_doctypes(self):
exporter.export_data("User", all_doctypes="Yes", template=True, with_data=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
self.assertEqual(content[13][0], "DocType:")
self.assertEqual(content[13][1], "User")
self.assertTrue("Has Role" in content[13])
def test_import(self):
if frappe.db.exists("Blog Category", "test-category"):
frappe.delete_doc("Blog Category", "test-category")
exporter.export_data("Blog Category", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test-category", "Test Cateogry"])
importer.upload(content)
self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category")
# export with data
exporter.export_data("Blog Category", all_doctypes=True, template=True, with_data=True)
content = read_csv_content(frappe.response.result)
# overwrite
content[-1][3] = "New Title"
importer.upload(content, overwrite=True)
self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title")
def test_import_only_children(self):
user_email = "test_import_userrole@example.com"
if frappe.db.exists("User", user_email):
frappe.delete_doc("User", user_email, force=True)
frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert()
exporter.export_data("Has Role", "User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test_import_userrole@example.com", "Blogger"])
importer.upload(content)
user = frappe.get_doc("User", user_email)
self.assertTrue(frappe.db.get_value("Has Role", filters={"role": "Blogger", "parent": user_email, "parenttype": "User"}))
self.assertTrue(user.get("roles")[0].role, "Blogger")
# overwrite
exporter.export_data("Has Role", "User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test_import_userrole@example.com", "Website Manager"])
importer.upload(content, overwrite=True)
user = frappe.get_doc("User", user_email)
self.assertEqual(len(user.get("roles")), 1)
self.assertTrue(user.get("roles")[0].role, "Website Manager")
def test_import_with_children(self): #pylint: disable=R0201
if frappe.db.exists("Event", "EV00001"):
frappe.delete_doc("Event", "EV00001")
exporter.export_data("Event", all_doctypes="Yes", template=True)
content = read_csv_content(frappe.response.result)
content.append([None] * len(content[-2]))
content[-1][1] = "__Test Event with children"
content[-1][2] = "Private"
content[-1][3] = "2014-01-01 10:00:00.000000"
importer.upload(content)
frappe.get_doc("Event", {"subject":"__Test Event with children"})
def test_excel_import(self):
if frappe.db.exists("Event", "EV00001"):
frappe.delete_doc("Event", "EV00001")
exporter.export_data("Event", all_doctypes=True, template=True, file_type="Excel")
from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
content = read_xlsx_file_from_attached_file(fcontent=frappe.response.filecontent)
content.append(["", "_test", "Private", "05-11-2017 13:51:48", "Event", "blue", "0", "0", "", "Open", "", 0, "", 0, "", "", "1", 0, "", "", 0, 0, 0, 0, 0, 0, 0])
importer.upload(content)
self.assertTrue(frappe.db.get_value("Event", {"subject": "_test"}, "name"))
pass

View file

@ -0,0 +1,104 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.core.doctype.data_import.exporter import Exporter
from frappe.core.doctype.data_import.test_importer import (
create_doctype_if_not_exists,
)
doctype_name = 'DocType for Export'
class TestExporter(unittest.TestCase):
def setUp(self):
create_doctype_if_not_exists(doctype_name)
def test_exports_specified_fields(self):
if not frappe.db.exists(doctype_name, "Test"):
doc = frappe.get_doc(
doctype=doctype_name,
title="Test",
description="Test Description",
table_field_1=[
{"child_title": "Child Title 1", "child_description": "Child Description 1"},
{"child_title": "Child Title 2", "child_description": "Child Description 2"},
],
table_field_2=[
{"child_2_title": "Child Title 1", "child_2_description": "Child Description 1"},
],
table_field_1_again=[
{
"child_title": "Child Title 1 Again",
"child_description": "Child Description 1 Again",
},
],
).insert()
else:
doc = frappe.get_doc(doctype_name, "Test")
e = Exporter(
doctype_name,
export_fields={
doctype_name: ["title", "description", "number", "another_number"],
"table_field_1": ["name", "child_title", "child_description"],
"table_field_2": ["child_2_date", "child_2_number"],
"table_field_1_again": [
"child_title",
"child_date",
"child_number",
"child_another_number",
],
},
export_data=True,
)
csv_array = e.get_csv_array()
header_row = csv_array[0]
self.assertEqual(
header_row,
[
"Title",
"Description",
"Number",
"another_number",
"ID (Table Field 1)",
"Child Title (Table Field 1)",
"Child Description (Table Field 1)",
"Child 2 Date (Table Field 2)",
"Child 2 Number (Table Field 2)",
"Child Title (Table Field 1 Again)",
"Child Date (Table Field 1 Again)",
"Child Number (Table Field 1 Again)",
"table_field_1_again.child_another_number",
],
)
table_field_1_row_1_name = doc.table_field_1[0].name
table_field_1_row_2_name = doc.table_field_1[1].name
# fmt: off
self.assertEqual(
csv_array[1],
["Test", "Test Description", 0, 0, table_field_1_row_1_name, "Child Title 1", "Child Description 1", None, 0, "Child Title 1 Again", None, 0, 0]
)
self.assertEqual(
csv_array[2],
["", "", "", "", table_field_1_row_2_name, "Child Title 2", "Child Description 2", "", "", "", "", "", ""],
)
# fmt: on
self.assertEqual(len(csv_array), 3)
def test_export_csv_response(self):
e = Exporter(
doctype_name,
export_fields={doctype_name: ["title", "description"]},
export_data=True,
file_type="CSV"
)
e.build_response()
self.assertTrue(frappe.response['result'])
self.assertEqual(frappe.response['doctype'], doctype_name)
self.assertEqual(frappe.response['type'], "csv")

View file

@ -1,40 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.core.doctype.data_import.exporter_new import Exporter
class TestExporter(unittest.TestCase):
def test_exports_mandatory_fields(self):
e = Exporter('Web Page', export_fields='Mandatory')
csv_array = e.get_csv_array()
header_row = csv_array[0]
self.assertEqual(header_row, ['ID', 'Title'])
def test_exports_all_fields(self):
e = Exporter('Web Page', export_fields='All')
csv_array = e.get_csv_array()
header = csv_array[0]
self.assertEqual(len(header), 37)
def test_exports_selected_fields(self):
export_fields = {
'Web Page': ['title', 'route', 'published']
}
e = Exporter('Web Page', export_fields=export_fields)
csv_array = e.get_csv_array()
header = csv_array[0]
self.assertEqual(header, ['Title', 'Route', 'Published'])
def test_exports_data(self):
e = Exporter('ToDo', export_fields='All', export_data=True)
todo_records = frappe.db.count('ToDo')
csv_array = e.get_csv_array()
self.assertEqual(len(csv_array), todo_records + 1)

View file

@ -0,0 +1,183 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.utils import getdate
doctype_name = 'DocType for Import'
class TestImporter(unittest.TestCase):
def setUp(self):
create_doctype_if_not_exists(doctype_name)
def test_data_import_from_file(self):
import_file = get_import_file('sample_import_file')
data_import = self.get_importer(doctype_name, import_file)
data_import.start_import()
doc1 = frappe.get_doc(doctype_name, 'Test')
doc2 = frappe.get_doc(doctype_name, 'Test 2')
doc3 = frappe.get_doc(doctype_name, 'Test 3')
self.assertEqual(doc1.description, 'test description')
self.assertEqual(doc1.number, 1)
self.assertEqual(doc1.table_field_1[0].child_title, 'child title')
self.assertEqual(doc1.table_field_1[0].child_description, 'child description')
self.assertEqual(doc1.table_field_1[1].child_title, 'child title 2')
self.assertEqual(doc1.table_field_1[1].child_description, 'child description 2')
self.assertEqual(doc1.table_field_2[1].child_2_title, 'title child')
self.assertEqual(doc1.table_field_2[1].child_2_date, getdate('2019-10-30'))
self.assertEqual(doc1.table_field_2[1].child_2_another_number, 5)
self.assertEqual(doc1.table_field_1_again[0].child_title, 'child title again')
self.assertEqual(doc1.table_field_1_again[1].child_title, 'child title again 2')
self.assertEqual(doc1.table_field_1_again[1].child_date, getdate('2021-09-22'))
self.assertEqual(doc2.description, 'test description 2')
self.assertEqual(doc3.another_number, 5)
def test_data_import_preview(self):
import_file = get_import_file('sample_import_file')
data_import = self.get_importer(doctype_name, import_file)
preview = data_import.get_preview_from_template()
self.assertEqual(len(preview.data), 4)
self.assertEqual(len(preview.columns), 15)
def test_data_import_without_mandatory_values(self):
import_file = get_import_file('sample_import_file_without_mandatory')
data_import = self.get_importer(doctype_name, import_file)
data_import.start_import()
data_import.reload()
warnings = frappe.parse_json(data_import.template_warnings)
self.assertEqual(warnings[0]['row'], 2)
self.assertEqual(warnings[0]['message'], "<b>Child Title (Table Field 1)</b> is a mandatory field")
self.assertEqual(warnings[1]['row'], 3)
self.assertEqual(warnings[1]['message'], "<b>Child Title (Table Field 1 Again)</b> is a mandatory field")
self.assertEqual(warnings[2]['row'], 4)
self.assertEqual(warnings[2]['message'], "<b>Title</b> is a mandatory field")
def test_data_import_update(self):
if not frappe.db.exists(doctype_name, 'Test 26'):
frappe.get_doc(
doctype=doctype_name,
title='Test 26'
).insert()
import_file = get_import_file('sample_import_file_for_update')
data_import = self.get_importer(doctype_name, import_file, update=True)
data_import.start_import()
updated_doc = frappe.get_doc(doctype_name, 'Test 26')
self.assertEqual(updated_doc.description, 'test description')
self.assertEqual(updated_doc.table_field_1[0].child_title, 'child title')
self.assertEqual(updated_doc.table_field_1[0].child_description, 'child description')
self.assertEqual(updated_doc.table_field_1_again[0].child_title, 'child title again')
def get_importer(self, doctype, import_file, update=False):
data_import = frappe.new_doc('Data Import')
data_import.import_type = 'Insert New Records' if not update else 'Update Existing Records'
data_import.reference_doctype = doctype
data_import.import_file = import_file.file_url
data_import.insert()
return data_import
def create_doctype_if_not_exists(doctype_name, force=False):
if force:
frappe.delete_doc_if_exists('DocType', doctype_name)
frappe.delete_doc_if_exists('DocType', 'Child 1 of ' + doctype_name)
frappe.delete_doc_if_exists('DocType', 'Child 2 of ' + doctype_name)
if frappe.db.exists('DocType', doctype_name):
return
# Child Table 1
table_1_name = 'Child 1 of ' + doctype_name
frappe.get_doc({
'doctype': 'DocType',
'name': table_1_name,
'module': 'Custom',
'custom': 1,
'istable': 1,
'fields': [
{'label': 'Child Title', 'fieldname': 'child_title', 'reqd': 1, 'fieldtype': 'Data'},
{'label': 'Child Description', 'fieldname': 'child_description', 'fieldtype': 'Small Text'},
{'label': 'Child Date', 'fieldname': 'child_date', 'fieldtype': 'Date'},
{'label': 'Child Number', 'fieldname': 'child_number', 'fieldtype': 'Int'},
{'label': 'Child Number', 'fieldname': 'child_another_number', 'fieldtype': 'Int'},
]
}).insert()
# Child Table 2
table_2_name = 'Child 2 of ' + doctype_name
frappe.get_doc({
'doctype': 'DocType',
'name': table_2_name,
'module': 'Custom',
'custom': 1,
'istable': 1,
'fields': [
{'label': 'Child 2 Title', 'fieldname': 'child_2_title', 'reqd': 1, 'fieldtype': 'Data'},
{'label': 'Child 2 Description', 'fieldname': 'child_2_description', 'fieldtype': 'Small Text'},
{'label': 'Child 2 Date', 'fieldname': 'child_2_date', 'fieldtype': 'Date'},
{'label': 'Child 2 Number', 'fieldname': 'child_2_number', 'fieldtype': 'Int'},
{'label': 'Child 2 Number', 'fieldname': 'child_2_another_number', 'fieldtype': 'Int'},
]
}).insert()
# Main Table
frappe.get_doc({
'doctype': 'DocType',
'name': doctype_name,
'module': 'Custom',
'custom': 1,
'autoname': 'field:title',
'fields': [
{'label': 'Title', 'fieldname': 'title', 'reqd': 1, 'fieldtype': 'Data'},
{'label': 'Description', 'fieldname': 'description', 'fieldtype': 'Small Text'},
{'label': 'Date', 'fieldname': 'date', 'fieldtype': 'Date'},
{'label': 'Number', 'fieldname': 'number', 'fieldtype': 'Int'},
{'label': 'Number', 'fieldname': 'another_number', 'fieldtype': 'Int'},
{'label': 'Table Field 1', 'fieldname': 'table_field_1', 'fieldtype': 'Table', 'options': table_1_name},
{'label': 'Table Field 2', 'fieldname': 'table_field_2', 'fieldtype': 'Table', 'options': table_2_name},
{'label': 'Table Field 1 Again', 'fieldname': 'table_field_1_again', 'fieldtype': 'Table', 'options': table_1_name},
],
'permissions': [
{'role': 'System Manager'}
]
}).insert()
def get_import_file(csv_file_name, force=False):
file_name = csv_file_name + '.csv'
_file = frappe.db.exists('File', {'file_name': file_name})
if force and _file:
frappe.delete_doc_if_exists('File', _file)
if frappe.db.exists('File', {'file_name': file_name}):
f = frappe.get_doc('File', {'file_name': file_name})
else:
full_path = get_csv_file_path(file_name)
f = frappe.get_doc(
doctype='File',
content=frappe.read_file(full_path),
file_name=file_name,
is_private=1
)
f.save(ignore_permissions=True)
return f
def get_csv_file_path(file_name):
return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import', 'fixtures', file_name)

View file

@ -1,78 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import datetime
import unittest
import frappe
from frappe.core.doctype.data_import.importer_new import Importer
content_empty_rows = '''title,start_date,idx,show_title
,,,
est phasellus sit amet,5/20/2019,52,1
nibh in,7/29/2019,77,1
'''
content_mandatory_missing = '''title,start_date,idx,show_title
,5/20/2019,52,1
'''
content_convert_value = '''title,start_date,idx,show_title
est phasellus sit amet,5/20/2019,52,True
'''
content_invalid_column = '''title,start_date,idx,show_title,invalid_column
est phasellus sit amet,5/20/2019,52,True,invalid value
'''
class TestImporter(unittest.TestCase):
def test_should_skip_empty_rows(self):
i = self.get_importer('Web Page', content=content_empty_rows)
payloads = i.get_payloads_for_import()
row_to_be_imported = []
for p in payloads:
row_to_be_imported += [row[0] for row in p.rows]
self.assertEqual(len(row_to_be_imported), 2)
def test_should_throw_if_mandatory_is_missing(self):
i = self.get_importer('Web Page', content=content_mandatory_missing)
i.import_data()
warning = i.warnings[0]
self.assertTrue('Title is a mandatory field' in warning['message'])
def test_should_convert_value_based_on_fieldtype(self):
i = self.get_importer('Web Page', content=content_convert_value)
payloads = i.get_payloads_for_import()
doc = payloads[0].doc
self.assertEqual(type(doc['show_title']), int)
self.assertEqual(type(doc['idx']), int)
self.assertEqual(type(doc['start_date']), datetime.datetime)
def test_should_ignore_invalid_columns(self):
i = self.get_importer('Web Page', content=content_invalid_column)
payloads = i.get_payloads_for_import()
doc = payloads[0].doc
self.assertTrue('invalid_column' not in doc)
self.assertTrue('title' in doc)
def test_should_import_valid_template(self):
title = 'est phasellus sit amet {0}'.format(frappe.utils.random_string(8))
content_valid_content = '''title,start_date,idx,show_title
{0},5/20/2019,52,1'''.format(title)
i = self.get_importer('Web Page', content=content_valid_content)
import_log = i.import_data()
log = import_log[0]
self.assertTrue(log.success)
doc = frappe.get_doc('Web Page', { 'title': title })
self.assertEqual(frappe.utils.get_datetime_str(doc.start_date),
frappe.utils.get_datetime_str('2019-05-20'))
def get_importer(self, doctype, content):
data_import = frappe.new_doc('Data Import Beta')
data_import.import_type = 'Insert New Records'
i = Importer(doctype, content=content, data_import=data_import)
return i

View file

@ -1,511 +0,0 @@
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import Beta', {
setup(frm) {
frappe.realtime.on('data_import_refresh', ({ data_import }) => {
frm.import_in_progress = false;
if (data_import !== frm.doc.name) return;
frappe.model.clear_doc('Data Import Beta', frm.doc.name);
frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => {
frm.refresh();
});
});
frappe.realtime.on('data_import_progress', data => {
frm.import_in_progress = true;
if (data.data_import !== frm.doc.name) {
return;
}
let percent = Math.floor((data.current * 100) / data.total);
let seconds = Math.floor(data.eta);
let minutes = Math.floor(data.eta / 60);
let eta_message =
// prettier-ignore
seconds < 60
? __('About {0} seconds remaining', [seconds])
: minutes === 1
? __('About {0} minute remaining', [minutes])
: __('About {0} minutes remaining', [minutes]);
let message;
if (data.success) {
let message_args = [data.current, data.total, eta_message];
message =
frm.doc.import_type === 'Insert New Records'
? __('Importing {0} of {1}, {2}', message_args)
: __('Updating {0} of {1}, {2}', message_args);
}
if (data.skipping) {
message = __('Skipping {0} of {1}, {2}', [
data.current,
data.total,
eta_message
]);
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(__('In Progress'), 'orange');
// hide progress when complete
if (data.current === data.total) {
setTimeout(() => {
frm.dashboard.hide();
frm.refresh();
}, 2000);
}
});
frm.set_query('reference_doctype', () => {
return {
filters: {
allow_import: 1
}
};
});
frm.get_field('import_file').df.options = {
restrictions: {
allowed_file_types: ['.csv', '.xls', '.xlsx']
}
};
},
refresh(frm) {
frm.page.hide_icon_group();
frm.trigger('update_indicators');
frm.trigger('import_file');
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
frm.trigger('show_report_error_button');
if (frm.doc.status === 'Partial Success') {
frm.add_custom_button(__('Export Errored Rows'), () =>
frm.trigger('export_errored_rows')
);
}
if (frm.doc.status.includes('Success')) {
frm.add_custom_button(
__('Go to {0} List', [frm.doc.reference_doctype]),
() => frappe.set_route('List', frm.doc.reference_doctype)
);
}
frm.disable_save();
if (frm.doc.status !== 'Success') {
if (!frm.is_new() && frm.doc.import_file) {
let label =
frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
frm.page.set_primary_action(label, () => frm.events.start_import(frm));
} else {
frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
},
update_indicators(frm) {
const indicator = frappe.get_indicator(frm.doc);
if (indicator) {
frm.page.set_indicator(indicator[0], indicator[1]);
} else {
frm.page.clear_indicator();
}
},
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
let message;
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}.', message_args)
: __('Successfully imported {0} record out of {1}.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}.', message_args)
: __('Successfully updated {0} record out of {1}.', message_args);
}
}
frm.dashboard.set_headline(message);
},
show_report_error_button(frm) {
if (frm.doc.status === 'Error') {
frappe.db
.get_list('Error Log', {
filters: { method: frm.doc.name },
fields: ['method', 'error'],
order_by: 'creation desc',
limit: 1
})
.then(result => {
if (result.length > 0) {
frm.add_custom_button('Report Error', () => {
let fake_xhr = {
responseText: JSON.stringify({
exc: result[0].error
})
};
frappe.request.report_error(fake_xhr, {});
});
}
});
}
},
start_import(frm) {
frm
.call({
method: 'form_start_import',
args: { data_import: frm.doc.name },
btn: frm.page.btn_primary
})
.then(r => {
if (r.message === true) {
frm.disable_save();
}
});
},
download_template(frm) {
if (
frm.data_exporter &&
frm.data_exporter.doctype === frm.doc.reference_doctype
) {
frm.data_exporter.dialog.show();
set_export_records();
} else {
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.data_exporter = new frappe.data_import.DataExporter(
frm.doc.reference_doctype
);
set_export_records();
});
}
function set_export_records() {
if (frm.doc.import_type === 'Insert New Records') {
frm.data_exporter.dialog.set_value('export_records', 'blank_template');
} else {
frm.data_exporter.dialog.set_value('export_records', 'all');
}
// Force ID field to be exported when updating existing records
let id_field = frm.data_exporter.dialog.get_field(
frm.doc.reference_doctype
).options[0];
if (id_field.value === 'name' && id_field.$checkbox) {
id_field.$checkbox
.find('input')
.prop('disabled', frm.doc.import_type === 'Update Existing Records');
}
}
},
reference_doctype(frm) {
frm.trigger('toggle_submit_after_import');
},
toggle_submit_after_import(frm) {
frm.toggle_display('submit_after_import', false);
let doctype = frm.doc.reference_doctype;
if (doctype) {
frappe.model.with_doctype(doctype, () => {
let meta = frappe.get_meta(doctype);
frm.toggle_display('submit_after_import', meta.is_submittable);
});
}
},
import_file(frm) {
frm.toggle_display('section_import_preview', frm.doc.import_file);
if (!frm.doc.import_file) {
frm.get_field('import_preview').$wrapper.empty();
return;
}
// load import preview
frm.get_field('import_preview').$wrapper.empty();
$('<span class="text-muted">')
.html(__('Loading import file...'))
.appendTo(frm.get_field('import_preview').$wrapper);
frm
.call({
method: 'get_preview_from_template',
args: { data_import: frm.doc.name },
error_handlers: {
TimestampMismatchError() {
// ignore this error
}
}
})
.then(r => {
let preview_data = r.message;
frm.events.show_import_preview(frm, preview_data);
frm.events.show_import_warnings(frm, preview_data);
});
},
show_import_preview(frm, preview_data) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
if (
frm.import_preview &&
frm.import_preview.doctype === frm.doc.reference_doctype
) {
frm.import_preview.preview_data = preview_data;
frm.import_preview.import_log = import_log;
frm.import_preview.refresh();
return;
}
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.import_preview = new frappe.data_import.ImportPreview({
wrapper: frm.get_field('import_preview').$wrapper,
doctype: frm.doc.reference_doctype,
preview_data,
import_log,
frm,
events: {
remap_column(changed_map) {
let template_options = JSON.parse(frm.doc.template_options || '{}');
template_options.remap_column = template_options.remap_column || {};
Object.assign(template_options.remap_column, changed_map);
frm.set_value('template_options', JSON.stringify(template_options));
frm.save().then(() => frm.trigger('import_file'));
}
}
});
});
},
export_errored_rows(frm) {
open_url_post(
'/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template',
{
data_import_name: frm.doc.name
}
);
},
show_import_warnings(frm, preview_data) {
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
warnings = warnings.concat(preview_data.warnings || []);
frm.toggle_display('import_warnings_section', warnings.length > 0);
if (warnings.length === 0) {
frm.get_field('import_warnings').$wrapper.html('');
return;
}
// group warnings by row
let warnings_by_row = {};
let other_warnings = [];
for (let warning of warnings) {
if (warning.row) {
warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
warnings_by_row[warning.row].push(warning);
} else {
other_warnings.push(warning);
}
}
let html = '';
html += Object.keys(warnings_by_row)
.map(row_number => {
let message = warnings_by_row[row_number]
.map(w => {
if (w.field) {
let label =
w.field.label +
(w.field.parent !== frm.doc.reference_doctype
? ` (${w.field.parent})`
: '');
return `<li>${label}: ${w.message}</li>`;
}
return `<li>${w.message}</li>`;
})
.join('');
return `
<div class="alert border" data-row="${row_number}">
<div class="uppercase">${__('Row {0}', [row_number])}</div>
<div class="body"><ul>${message}</ul></div>
</div>
`;
})
.join('');
html += other_warnings
.map(warning => {
let header = '';
if (warning.col) {
header = __('Column {0}', [warning.col]);
}
return `
<div class="alert border" data-col="${warning.col}">
<div class="uppercase">${header}</div>
<div class="body">${warning.message}</div>
</div>
`;
})
.join('');
frm.get_field('import_warnings').$wrapper.html(`
<div class="row">
<div class="col-sm-6 warnings text-muted">${html}</div>
</div>
`);
},
show_failed_logs(frm) {
frm.trigger('show_import_log');
},
show_import_log(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let logs = import_log;
frm.toggle_display('import_log', false);
frm.toggle_display('import_log_section', logs.length > 0);
if (logs.length === 0) {
frm.get_field('import_log_preview').$wrapper.empty();
return;
}
let rows = logs
.map(log => {
let html = '';
if (log.success) {
if (frm.doc.import_type === 'Insert New Records') {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
html = __('Successfully updated {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
}
} else {
let messages = log.messages
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs margin-top" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}">
${__('Show Traceback')}
</button>
<div class="collapse margin-top" id="${id}">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
if (frm.doc.show_failed_logs && log.success) {
return '';
}
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
if (!rows && frm.doc.show_failed_logs) {
rows = `<tr><td class="text-center text-muted" colspan=3>
${__('No failed logs')}
</td></tr>`;
}
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
},
show_missing_link_values(frm, missing_link_values) {
let can_be_created_automatically = missing_link_values.every(
d => d.has_one_mandatory_field
);
let html = missing_link_values
.map(d => {
let doctype = d.doctype;
let values = d.missing_values;
return `
<h5>${doctype}</h5>
<ul>${values.map(v => `<li>${v}</li>`).join('')}</ul>
`;
})
.join('');
if (can_be_created_automatically) {
// prettier-ignore
let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
frappe.confirm(message + html, () => {
frm
.call('create_missing_link_values', {
missing_link_values
})
.then(r => {
let records = r.message;
frappe.msgprint(
__('Created {0} records successfully.', [records.length])
);
});
});
} else {
frappe.msgprint(
// prettier-ignore
__('The following records needs to be created before we can import your file.') + html
);
}
}
});

View file

@ -1,170 +0,0 @@
{
"actions": [],
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"column_break_5",
"status",
"submit_after_import",
"mute_emails",
"template_options",
"section_import_preview",
"import_preview",
"import_warnings_section",
"template_warnings",
"import_warnings",
"import_log_section",
"import_log",
"show_failed_logs",
"import_log_preview"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"set_only_once": 1
},
{
"fieldname": "import_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"set_only_once": 1
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"in_list_view": 1,
"label": "Import File"
},
{
"fieldname": "import_preview",
"fieldtype": "HTML",
"label": "Import Preview"
},
{
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"label": "Preview"
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Options",
"options": "JSON",
"read_only": 1
},
{
"fieldname": "import_log",
"fieldtype": "Code",
"label": "Import Log",
"options": "JSON"
},
{
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview"
},
{
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Pending\nSuccess\nPartial Success\nError",
"read_only": 1
},
{
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import",
"set_only_once": 1
},
{
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Warnings"
},
{
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings"
},
{
"depends_on": "reference_doctype",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template"
},
{
"default": "1",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails",
"set_only_once": 1
},
{
"default": "0",
"fieldname": "show_failed_logs",
"fieldtype": "Check",
"label": "Show Failed Logs"
}
],
"hide_toolbar": 1,
"links": [],
"modified": "2020-02-17 15:35:04.386098",
"modified_by": "faris@erpnext.com",
"module": "Core",
"name": "Data Import Beta",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View file

@ -1,119 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.core.doctype.data_import.importer_new import Importer
from frappe.core.doctype.data_import.exporter_new import Exporter
from frappe.core.page.background_jobs.background_jobs import get_info
from frappe.utils.background_jobs import enqueue
from frappe import _
class DataImportBeta(Document):
def validate(self):
doc_before_save = self.get_doc_before_save()
if not self.import_file or (
doc_before_save and doc_before_save.import_file != self.import_file
):
self.template_options = ""
self.template_warnings = ""
if self.import_file:
# validate template
self.get_importer()
def get_preview_from_template(self):
if not self.import_file:
return
i = self.get_importer()
return i.get_data_for_import_preview()
def start_import(self):
if frappe.utils.scheduler.is_scheduler_inactive():
frappe.throw(
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
enqueued_jobs = [d.get("job_name") for d in get_info()]
if self.name not in enqueued_jobs:
enqueue(
start_import,
queue="default",
timeout=6000,
event="data_import",
job_name=self.name,
data_import=self.name,
now=frappe.conf.developer_mode or frappe.flags.in_test,
)
return True
return False
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
@frappe.whitelist()
def get_preview_from_template(data_import):
return frappe.get_doc("Data Import Beta", data_import).get_preview_from_template()
@frappe.whitelist()
def form_start_import(data_import):
return frappe.get_doc("Data Import Beta", data_import).start_import()
def start_import(data_import):
"""This method runs in background job"""
data_import = frappe.get_doc("Data Import Beta", data_import)
try:
i = Importer(data_import.reference_doctype, data_import=data_import)
i.import_data()
except:
frappe.db.rollback()
data_import.db_set("status", "Error")
frappe.log_error(title=data_import.name)
frappe.db.commit()
frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name})
@frappe.whitelist()
def download_template(
doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
):
"""
Download template from Exporter
:param doctype: Document Type
:param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
:param export_records=None: One of 'all', 'by_filter', 'blank_template'
:param export_filters: Filter dict
:param file_type: File type to export into
"""
export_fields = frappe.parse_json(export_fields)
export_filters = frappe.parse_json(export_filters)
export_data = export_records != "blank_template"
e = Exporter(
doctype,
export_fields=export_fields,
export_data=export_data,
export_filters=export_filters,
file_type=file_type,
export_page_length=5 if export_records == "5_records" else None,
)
e.build_response()
@frappe.whitelist()
def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import Beta", data_import_name)
data_import.export_errored_rows()

View file

@ -1,40 +0,0 @@
let imports_in_progress = [];
frappe.listview_settings['Data Import Beta'] = {
onload(listview) {
frappe.realtime.on('data_import_progress', data => {
if (!imports_in_progress.includes(data.data_import)) {
imports_in_progress.push(data.data_import);
}
});
frappe.realtime.on('data_import_refresh', data => {
imports_in_progress = imports_in_progress.filter(
d => d !== data.data_import
);
listview.refresh();
});
},
get_indicator: function(doc) {
var colors = {
'Pending': 'orange',
'Partial Success': 'orange',
'Success': 'green',
'In Progress': 'orange',
'Error': 'red'
};
let status = doc.status;
if (imports_in_progress.includes(doc.name)) {
status = 'In Progress';
}
return [__(status), colors[status], 'status,=,' + doc.status];
},
formatters: {
import_type(value) {
return {
'Insert New Records': __('Insert'),
'Update Existing Records': __('Update')
}[value];
}
},
hide_name_column: true
};

View file

@ -0,0 +1,324 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import Legacy', {
onload: function(frm) {
if (frm.doc.__islocal) {
frm.set_value("action", "");
}
frappe.call({
method: "frappe.core.doctype.data_import_legacy.data_import_legacy.get_importable_doctypes",
callback: function (r) {
let importable_doctypes = r.message;
frm.set_query("reference_doctype", function () {
return {
"filters": {
"issingle": 0,
"istable": 0,
"name": ['in', importable_doctypes]
}
};
});
}
}),
// should never check public
frm.fields_dict["import_file"].df.is_private = 1;
frappe.realtime.on("data_import_progress", function(data) {
if (data.data_import === frm.doc.name) {
if (data.reload && data.reload === true) {
frm.reload_doc();
}
if (data.progress) {
let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar");
if (progress_bar) {
$(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped");
$(progress_bar).css("width", data.progress + "%");
}
}
}
});
},
reference_doctype: function(frm){
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
}
},
refresh: function(frm) {
frm.disable_save();
frm.dashboard.clear_headline();
if (frm.doc.reference_doctype && !frm.doc.import_file) {
frm.page.set_indicator(__('Attach file'), 'orange');
} else {
if (frm.doc.import_status) {
const listview_settings = frappe.listview_settings['Data Import Legacy'];
const indicator = listview_settings.get_indicator(frm.doc);
frm.page.set_indicator(indicator[0], indicator[1]);
if (frm.doc.import_status === "In Progress") {
frm.dashboard.add_progress("Data Import Progress", "0");
frm.set_read_only();
frm.refresh_fields();
}
}
}
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
}
if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") {
frm.set_df_property("action", "read_only", 1);
}
frm.add_custom_button(__("Help"), function() {
frappe.help.show_video("6wiriRKPhmg");
});
if (frm.doc.reference_doctype && frm.doc.docstatus === 0) {
frm.add_custom_button(__("Download template"), function() {
frappe.data_import.download_dialog(frm).show();
});
}
if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows &&
frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) {
frm.page.set_primary_action(__("Start Import"), function() {
frappe.call({
btn: frm.page.btn_primary,
method: "frappe.core.doctype.data_import_legacy.data_import_legacy.import_data",
args: {
data_import: frm.doc.name
}
});
}).addClass('btn btn-primary');
}
if (frm.doc.log_details) {
frm.events.create_log_table(frm);
} else {
$(frm.fields_dict.import_log.wrapper).empty();
}
},
action: function(frm) {
if(!frm.doc.action) return;
if(!frm.doc.reference_doctype) {
frappe.msgprint(__("Please select document type first."));
frm.set_value("action", "");
return;
}
if(frm.doc.action == "Insert new records") {
frm.doc.insert_new = 1;
} else if (frm.doc.action == "Update records"){
frm.doc.overwrite = 1;
}
frm.save();
},
only_update: function(frm) {
frm.save();
},
submit_after_import: function(frm) {
frm.save();
},
skip_errors: function(frm) {
frm.save();
},
ignore_encoding_errors: function(frm) {
frm.save();
},
no_email: function(frm) {
frm.save();
},
show_only_errors: function(frm) {
frm.events.create_log_table(frm);
},
create_log_table: function(frm) {
let msg = JSON.parse(frm.doc.log_details);
var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty();
$(frappe.render_template("log_details", {
data: msg.messages,
import_status: frm.doc.import_status,
show_only_errors: frm.doc.show_only_errors,
})).appendTo($log_wrapper);
}
});
frappe.provide('frappe.data_import');
frappe.data_import.download_dialog = function(frm) {
var dialog;
const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden;
const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields);
const get_doctype_checkbox_fields = () => {
return dialog.fields.filter(df => df.fieldname.endsWith('_fields'))
.map(df => dialog.fields_dict[df.fieldname]);
};
const doctype_fields = get_fields(frm.doc.reference_doctype)
.map(df => {
let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0;
return {
label: df.label,
reqd: reqd,
danger: reqd,
value: df.fieldname,
checked: 1
};
});
let fields = [
{
"label": __("Select Columns"),
"fieldname": "select_columns",
"fieldtype": "Select",
"options": "All\nMandatory\nManually",
"reqd": 1,
"onchange": function() {
const fields = get_doctype_checkbox_fields();
fields.map(f => f.toggle(true));
if(this.value == 'Mandatory' || this.value == 'Manually') {
checkbox_toggle(true);
fields.map(multicheck_field => {
multicheck_field.options.map(option => {
if(!option.reqd) return;
$(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`)
.prop('checked', false)
.trigger('click');
});
});
} else if(this.value == 'All'){
$(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`)
.prop('disabled', true);
}
}
},
{
"label": __("File Type"),
"fieldname": "file_type",
"fieldtype": "Select",
"options": "Excel\nCSV",
"default": "Excel"
},
{
"label": __("Download with Data"),
"fieldname": "with_data",
"fieldtype": "Check",
"hidden": !frm.doc.overwrite,
"default": 1
},
{
"label": __("Select All"),
"fieldname": "select_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle();
}
},
{
"label": __("Unselect All"),
"fieldname": "unselect_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle(true);
}
},
{
"label": frm.doc.reference_doctype,
"fieldname": "doctype_fields",
"fieldtype": "MultiCheck",
"options": doctype_fields,
"columns": 2,
"hidden": 1
}
];
const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype)
.map(df => {
return {
"label": df.options,
"fieldname": df.fieldname + '_fields',
"fieldtype": "MultiCheck",
"options": frappe.meta.get_docfields(df.options)
.filter(filter_fields)
.map(df => ({
label: df.label,
reqd: df.reqd ? 1 : 0,
value: df.fieldname,
checked: 1,
danger: df.reqd
})),
"columns": 2,
"hidden": 1
};
});
fields = fields.concat(child_table_fields);
dialog = new frappe.ui.Dialog({
title: __('Download Template'),
fields: fields,
primary_action: function(values) {
var data = values;
if (frm.doc.reference_doctype) {
var export_params = () => {
let columns = {};
if(values.select_columns) {
columns = get_doctype_checkbox_fields().reduce((columns, field) => {
const options = field.get_checked_options();
columns[field.df.label] = options;
return columns;
}, {});
}
return {
doctype: frm.doc.reference_doctype,
parent_doctype: frm.doc.reference_doctype,
select_columns: JSON.stringify(columns),
with_data: frm.doc.overwrite && data.with_data,
all_doctypes: true,
file_type: data.file_type,
template: true
};
};
let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data';
open_url_post(get_template_url, export_params());
} else {
frappe.msgprint(__("Please select the Document Type."));
}
dialog.hide();
},
primary_action_label: __('Download')
});
$(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]')
.wrapAll('<div class="inline-buttons" />');
const button_container = $(dialog.body).find('.inline-buttons');
button_container.addClass('flex');
$(button_container).find('.frappe-control').map((index, button) => {
$(button).css({"margin-right": "1em"});
});
function checkbox_toggle(checked=false) {
$(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => {
$(element).find(`:checkbox`).prop("checked", checked).trigger('click');
});
}
return dialog;
};

View file

@ -0,0 +1,218 @@
{
"actions": [],
"allow_copy": 1,
"creation": "2020-06-11 16:13:23.813709",
"doctype": "DocType",
"document_type": "Document",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"action",
"insert_new",
"overwrite",
"only_update",
"section_break_4",
"import_file",
"column_break_4",
"error_file",
"section_break_6",
"skip_errors",
"submit_after_import",
"ignore_encoding_errors",
"no_email",
"import_detail",
"import_status",
"show_only_errors",
"import_log",
"log_details",
"amended_from",
"total_rows",
"amended_from"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"ignore_user_permissions": 1,
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1
},
{
"fieldname": "action",
"fieldtype": "Select",
"label": "Action",
"options": "Insert new records\nUpdate records",
"reqd": 1
},
{
"default": "0",
"depends_on": "eval:!doc.overwrite",
"description": "New data will be inserted.",
"fieldname": "insert_new",
"fieldtype": "Check",
"hidden": 1,
"label": "Insert new records",
"set_only_once": 1
},
{
"default": "0",
"depends_on": "eval:!doc.insert_new",
"description": "If you are updating/overwriting already created records.",
"fieldname": "overwrite",
"fieldtype": "Check",
"hidden": 1,
"label": "Update records",
"set_only_once": 1
},
{
"default": "0",
"depends_on": "overwrite",
"description": "If you don't want to create any new records while updating the older records.",
"fieldname": "only_update",
"fieldtype": "Check",
"label": "Don't create new records"
},
{
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_4",
"fieldtype": "Section Break"
},
{
"fieldname": "import_file",
"fieldtype": "Attach",
"label": "Attach file for Import"
},
{
"fieldname": "column_break_4",
"fieldtype": "Column Break"
},
{
"depends_on": "eval: doc.import_status == \"Partially Successful\"",
"description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.",
"fieldname": "error_file",
"fieldtype": "Attach",
"label": "Generated File"
},
{
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_6",
"fieldtype": "Section Break"
},
{
"default": "0",
"description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.",
"fieldname": "skip_errors",
"fieldtype": "Check",
"label": "Skip rows with errors"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit after importing"
},
{
"default": "0",
"fieldname": "ignore_encoding_errors",
"fieldtype": "Check",
"label": "Ignore encoding errors"
},
{
"default": "1",
"fieldname": "no_email",
"fieldtype": "Check",
"label": "Do not send Emails"
},
{
"collapsible": 1,
"collapsible_depends_on": "eval: doc.import_status == \"Failed\"",
"depends_on": "import_status",
"fieldname": "import_detail",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_status",
"fieldtype": "Select",
"label": "Import Status",
"options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful",
"read_only": 1
},
{
"allow_on_submit": 1,
"default": "1",
"fieldname": "show_only_errors",
"fieldtype": "Check",
"label": "Show only errors",
"no_copy": 1,
"print_hide": 1
},
{
"allow_on_submit": 1,
"depends_on": "import_status",
"fieldname": "import_log",
"fieldtype": "HTML",
"label": "Import Log"
},
{
"allow_on_submit": 1,
"fieldname": "log_details",
"fieldtype": "Code",
"hidden": 1,
"label": "Log Details",
"read_only": 1
},
{
"fieldname": "amended_from",
"fieldtype": "Link",
"label": "Amended From",
"no_copy": 1,
"options": "Data Import",
"print_hide": 1,
"read_only": 1
},
{
"fieldname": "total_rows",
"fieldtype": "Int",
"hidden": 1,
"label": "Total Rows",
"read_only": 1
},
{
"fieldname": "amended_from",
"fieldtype": "Link",
"label": "Amended From",
"no_copy": 1,
"options": "Data Import Legacy",
"print_hide": 1,
"read_only": 1
}
],
"is_submittable": 1,
"links": [],
"max_attachments": 1,
"modified": "2020-06-11 16:13:23.813709",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import Legacy",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"submit": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1,
"track_seen": 1
}

View file

@ -0,0 +1,123 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, os
from frappe import _
import frappe.modules.import_file
from frappe.model.document import Document
from frappe.utils.data import format_datetime
from frappe.core.doctype.data_import_legacy.importer import upload
from frappe.utils.background_jobs import enqueue
class DataImportLegacy(Document):
def autoname(self):
if not self.name:
self.name = "Import on " +format_datetime(self.creation)
def validate(self):
if not self.import_file:
self.db_set("total_rows", 0)
if self.import_status == "In Progress":
frappe.throw(_("Can't save the form as data import is in progress."))
# validate the template just after the upload
# if there is total_rows in the doc, it means that the template is already validated and error free
if self.import_file and not self.total_rows:
upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
@frappe.whitelist()
def get_importable_doctypes():
return frappe.cache().hget("can_import", frappe.session.user)
@frappe.whitelist()
def import_data(data_import):
frappe.db.set_value("Data Import Legacy", data_import, "import_status", "In Progress", update_modified=False)
frappe.publish_realtime("data_import_progress", {"progress": "0",
"data_import": data_import, "reload": True}, user=frappe.session.user)
from frappe.core.page.background_jobs.background_jobs import get_info
enqueued_jobs = [d.get("job_name") for d in get_info()]
if data_import not in enqueued_jobs:
enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
insert=False, submit=False, pre_process=None):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
files = [path]
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
frappe.db.commit()
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
from frappe.utils.csvutils import read_csv_content
print("Importing " + path)
with open(path, "r") as infile:
upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
submit_after_import=submit, pre_process=pre_process)
def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
def post_process(out):
del_keys = ('modified_by', 'creation', 'owner', 'idx')
for doc in out:
for key in del_keys:
if key in doc:
del doc[key]
for k, v in doc.items():
if isinstance(v, list):
for child in v:
for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
if key in child:
del child[key]
out = []
if name:
out.append(frappe.get_doc(doctype, name).as_dict())
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
path = os.path.join('..', path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))
@frappe.whitelist()
def export_fixture(doctype, app):
if frappe.session.user != "Administrator":
raise frappe.PermissionError
if not os.path.exists(frappe.get_app_path(app, "fixtures")):
os.mkdir(frappe.get_app_path(app, "fixtures"))
export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")

View file

@ -0,0 +1,24 @@
frappe.listview_settings['Data Import Legacy'] = {
add_fields: ["import_status"],
has_indicator_for_draft: 1,
get_indicator: function(doc) {
let status = {
'Successful': [__("Success"), "green", "import_status,=,Successful"],
'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"],
'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"],
'Failed': [__("Failed"), "red", "import_status,=,Failed"],
'Pending': [__("Pending"), "orange", "import_status,=,"]
}
if (doc.import_status) {
return status[doc.import_status];
}
if (doc.docstatus == 0) {
return status['Pending'];
}
return status['Pending'];
}
};

View file

@ -0,0 +1,541 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
from six.moves import range
import requests
import frappe, json
import frappe.permissions
from frappe import _
from frappe.utils.csvutils import getlink
from frappe.utils.dateutils import parse_date
from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url
from six import string_types
@frappe.whitelist()
def get_data_keys():
return frappe._dict({
"data_separator": _('Start entering data below this line'),
"main_table": _("Table") + ":",
"parent_table": _("Parent Table") + ":",
"columns": _("Column Name") + ":",
"doctype": _("DocType") + ":"
})
@frappe.whitelist()
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None,
update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No",
skip_errors = True, data_import_doc=None, validate_template=False, user=None):
"""upload data"""
# for translations
if user:
frappe.cache().hdel("lang", user)
frappe.set_user_lang(user)
if data_import_doc and isinstance(data_import_doc, string_types):
data_import_doc = frappe.get_doc("Data Import Legacy", data_import_doc)
if data_import_doc and from_data_import == "Yes":
no_email = data_import_doc.no_email
ignore_encoding_errors = data_import_doc.ignore_encoding_errors
update_only = data_import_doc.only_update
submit_after_import = data_import_doc.submit_after_import
overwrite = data_import_doc.overwrite
skip_errors = data_import_doc.skip_errors
else:
# extra input params
params = json.loads(frappe.form_dict.get("params") or '{}')
if params.get("submit_after_import"):
submit_after_import = True
if params.get("ignore_encoding_errors"):
ignore_encoding_errors = True
if not params.get("no_email"):
no_email = False
if params.get('update_only'):
update_only = True
if params.get('from_data_import'):
from_data_import = params.get('from_data_import')
if not params.get('skip_errors'):
skip_errors = params.get('skip_errors')
frappe.flags.in_import = True
frappe.flags.mute_emails = no_email
def get_data_keys_definition():
return get_data_keys()
def bad_template():
frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator))
def check_data_length():
if not data:
frappe.throw(_("No data found in the file. Please reattach the new file with data."))
def get_start_row():
for i, row in enumerate(rows):
if row and row[0]==get_data_keys_definition().data_separator:
return i+1
bad_template()
def get_header_row(key):
return get_header_row_and_idx(key)[0]
def get_header_row_and_idx(key):
for i, row in enumerate(header):
if row and row[0]==key:
return row, i
return [], -1
def filter_empty_columns(columns):
empty_cols = list(filter(lambda x: x in ("", None), columns))
if empty_cols:
if columns[-1*len(empty_cols):] == empty_cols:
# filter empty columns if they exist at the end
columns = columns[:-1*len(empty_cols)]
else:
frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
raise_exception=1)
return columns
def make_column_map():
doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype)
if row_idx == -1: # old style
return
dt = None
for i, d in enumerate(doctype_row[1:]):
if d not in ("~", "-"):
if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"):
dt, parentfield = d, None
# xls format truncates the row, so it may not have more columns
if len(doctype_row) > i+2:
parentfield = doctype_row[i+2]
doctypes.append((dt, parentfield))
column_idx_to_fieldname[(dt, parentfield)] = {}
column_idx_to_fieldtype[(dt, parentfield)] = {}
if dt:
column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1]
column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1]
def get_doc(start_idx):
if doctypes:
doc = {}
attachments = []
last_error_row_idx = None
for idx in range(start_idx, len(rows)):
last_error_row_idx = idx # pylint: disable=W0612
if (not doc) or main_doc_empty(rows[idx]):
for dt, parentfield in doctypes:
d = {}
for column_idx in column_idx_to_fieldname[(dt, parentfield)]:
try:
fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx]
fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx]
if not fieldname or not rows[idx][column_idx]:
continue
d[fieldname] = rows[idx][column_idx]
if fieldtype in ("Int", "Check"):
d[fieldname] = cint(d[fieldname])
elif fieldtype in ("Float", "Currency", "Percent"):
d[fieldname] = flt(d[fieldname])
elif fieldtype == "Date":
if d[fieldname] and isinstance(d[fieldname], string_types):
d[fieldname] = getdate(parse_date(d[fieldname]))
elif fieldtype == "Datetime":
if d[fieldname]:
if " " in d[fieldname]:
_date, _time = d[fieldname].split()
else:
_date, _time = d[fieldname], '00:00:00'
_date = parse_date(d[fieldname])
d[fieldname] = get_datetime(_date + " " + _time)
else:
d[fieldname] = None
elif fieldtype in ("Image", "Attach Image", "Attach"):
# added file to attachments list
attachments.append(d[fieldname])
elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]:
# as fields can be saved in the number format(long type) in data import template
d[fieldname] = cstr(d[fieldname])
except IndexError:
pass
# scrub quotes from name and modified
if d.get("name") and d["name"].startswith('"'):
d["name"] = d["name"][1:-1]
if sum([0 if not val else 1 for val in d.values()]):
d['doctype'] = dt
if dt == doctype:
doc.update(d)
else:
if not overwrite and doc.get("name"):
d['parent'] = doc["name"]
d['parenttype'] = doctype
d['parentfield'] = parentfield
doc.setdefault(d['parentfield'], []).append(d)
else:
break
return doc, attachments, last_error_row_idx
else:
doc = frappe._dict(zip(columns, rows[start_idx][1:]))
doc['doctype'] = doctype
return doc, [], None
# used in testing whether a row is empty or parent row or child row
# checked only 3 first columns since first two columns can be blank for example the case of
# importing the item variant where item code and item name will be blank.
def main_doc_empty(row):
if row:
for i in range(3,0,-1):
if len(row) > i and row[i]:
return False
return True
def validate_naming(doc):
autoname = frappe.get_meta(doctype).autoname
if autoname:
if autoname[0:5] == 'field':
autoname = autoname[6:]
elif autoname == 'naming_series:':
autoname = 'naming_series'
else:
return True
if (autoname not in doc) or (not doc[autoname]):
from frappe.model.base_document import get_controller
if not hasattr(get_controller(doctype), "autoname"):
frappe.throw(_("{0} is a mandatory field").format(autoname))
return True
users = frappe.db.sql_list("select name from tabUser")
def prepare_for_insert(doc):
# don't block data import if user is not set
# migrating from another system
if not doc.owner in users:
doc.owner = frappe.session.user
if not doc.modified_by in users:
doc.modified_by = frappe.session.user
def is_valid_url(url):
is_valid = False
if url.startswith("/files") or url.startswith("/private/files"):
url = get_url(url)
try:
r = requests.get(url)
is_valid = True if r.status_code == 200 else False
except Exception:
pass
return is_valid
def attach_file_to_doc(doctype, docname, file_url):
# check if attachment is already available
# check if the attachement link is relative or not
if not file_url:
return
if not is_valid_url(file_url):
return
files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and
attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format(
doctype=doctype,
docname=docname,
file_url=file_url
))
if files:
# file is already attached
return
_file = frappe.get_doc({
"doctype": "File",
"file_url": file_url,
"attached_to_name": docname,
"attached_to_doctype": doctype,
"attached_to_field": 0,
"folder": "Home/Attachments"})
_file.save()
# header
filename, file_extension = ['','']
if not rows:
_file = frappe.get_doc("File", {"file_url": data_import_doc.import_file})
fcontent = _file.get_content()
filename, file_extension = _file.get_extension()
if file_extension == '.xlsx' and from_data_import == 'Yes':
from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file)
elif file_extension == '.csv':
from frappe.utils.csvutils import read_csv_content
rows = read_csv_content(fcontent, ignore_encoding_errors)
else:
frappe.throw(_("Unsupported File Format"))
start_row = get_start_row()
header = rows[:start_row]
data = rows[start_row:]
try:
doctype = get_header_row(get_data_keys_definition().main_table)[1]
columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:])
except:
frappe.throw(_("Cannot change header content"))
doctypes = []
column_idx_to_fieldname = {}
column_idx_to_fieldtype = {}
if skip_errors:
data_rows_with_error = header
if submit_after_import and not cint(frappe.db.get_value("DocType",
doctype, "is_submittable")):
submit_after_import = False
parenttype = get_header_row(get_data_keys_definition().parent_table)
if len(parenttype) > 1:
parenttype = parenttype[1]
# check permissions
if not frappe.permissions.can_import(parenttype or doctype):
frappe.flags.mute_emails = False
return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}
# Throw expception in case of the empty data file
check_data_length()
make_column_map()
total = len(data)
if validate_template:
if total:
data_import_doc.total_rows = total
return True
if overwrite==None:
overwrite = params.get('overwrite')
# delete child rows (if parenttype)
parentfield = None
if parenttype:
parentfield = get_parent_field(doctype, parenttype)
if overwrite:
delete_child_rows(data, doctype)
import_log = []
def log(**kwargs):
if via_console:
print((kwargs.get("title") + kwargs.get("message")).encode('utf-8'))
else:
import_log.append(kwargs)
def as_link(doctype, name):
if via_console:
return "{0}: {1}".format(doctype, name)
else:
return getlink(doctype, name)
# publish realtime task update
def publish_progress(achieved, reload=False):
if data_import_doc:
frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)),
"data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user)
error_flag = rollback_flag = False
batch_size = frappe.conf.data_import_batch_size or 1000
for batch_start in range(0, total, batch_size):
batch = data[batch_start:batch_start + batch_size]
for i, row in enumerate(batch):
# bypass empty rows
if main_doc_empty(row):
continue
row_idx = i + start_row
doc = None
publish_progress(i)
try:
doc, attachments, last_error_row_idx = get_doc(row_idx)
validate_naming(doc)
if pre_process:
pre_process(doc)
original = None
if parentfield:
parent = frappe.get_doc(parenttype, doc["parent"])
doc = parent.append(parentfield, doc)
parent.save()
else:
if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]):
original = frappe.get_doc(doctype, doc["name"])
original_name = original.name
original.update(doc)
# preserve original name for case sensitivity
original.name = original_name
original.flags.ignore_links = ignore_links
original.save()
doc = original
else:
if not update_only:
doc = frappe.get_doc(doc)
prepare_for_insert(doc)
doc.flags.ignore_links = ignore_links
doc.insert()
if attachments:
# check file url and create a File document
for file_url in attachments:
attach_file_to_doc(doc.doctype, doc.name, file_url)
if submit_after_import:
doc.submit()
# log errors
if parentfield:
log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)),
"link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"})
elif submit_after_import:
log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"})
elif original:
log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
elif not update_only:
log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
else:
log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None,
"message": "Document updation ignored", "indicator": "orange"})
except Exception as e:
error_flag = True
# build error message
if frappe.local.message_log:
err_msg = "\n".join(['<p class="border-bottom small">{}</p>'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log])
else:
err_msg = '<p class="border-bottom small">{}</p>'.format(cstr(e))
error_trace = frappe.get_traceback()
if error_trace:
error_log_doc = frappe.log_error(error_trace)
error_link = get_absolute_url("Error Log", error_log_doc.name)
else:
error_link = None
log(**{
"row": row_idx + 1,
"title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""),
"message": err_msg,
"indicator": "red",
"link":error_link
})
# data with error to create a new file
# include the errored data in the last row as last_error_row_idx will not be updated for the last row
if skip_errors:
if last_error_row_idx == len(rows)-1:
last_error_row_idx = len(rows)
data_rows_with_error += rows[row_idx:last_error_row_idx]
else:
rollback_flag = True
finally:
frappe.local.message_log = []
start_row += batch_size
if rollback_flag:
frappe.db.rollback()
else:
frappe.db.commit()
frappe.flags.mute_emails = False
frappe.flags.in_import = False
log_message = {"messages": import_log, "error": error_flag}
if data_import_doc:
data_import_doc.log_details = json.dumps(log_message)
import_status = None
if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error):
import_status = "Partially Successful"
# write the file with the faulty row
file_name = 'error_' + filename + file_extension
if file_extension == '.xlsx':
from frappe.utils.xlsxutils import make_xlsx
xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template")
file_data = xlsx_file.getvalue()
else:
from frappe.utils.csvutils import to_csv
file_data = to_csv(data_rows_with_error)
_file = frappe.get_doc({
"doctype": "File",
"file_name": file_name,
"attached_to_doctype": "Data Import Legacy",
"attached_to_name": data_import_doc.name,
"folder": "Home/Attachments",
"content": file_data})
_file.save()
data_import_doc.error_file = _file.file_url
elif error_flag:
import_status = "Failed"
else:
import_status = "Successful"
data_import_doc.import_status = import_status
data_import_doc.save()
if data_import_doc.import_status in ["Successful", "Partially Successful"]:
data_import_doc.submit()
publish_progress(100, True)
else:
publish_progress(0, True)
frappe.db.commit()
else:
return log_message
def get_parent_field(doctype, parenttype):
parentfield = None
# get parentfield
if parenttype:
for d in frappe.get_meta(parenttype).get_table_fields():
if d.options==doctype:
parentfield = d.fieldname
break
if not parentfield:
frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype))
raise Exception
return parentfield
def delete_child_rows(rows, doctype):
"""delete child rows for all parents"""
for p in list(set([r[1] for r in rows])):
if p:
frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p)

View file

@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestDataImportLegacy(unittest.TestCase):
pass

View file

@ -13,8 +13,8 @@
"fieldname",
"precision",
"length",
"show_days",
"show_seconds",
"hide_days",
"hide_seconds",
"reqd",
"search_index",
"in_list_view",
@ -453,18 +453,18 @@
"fieldtype": "Column Break"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_days",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_days",
"fieldtype": "Check",
"label": "Show Days"
"label": "Hide Days"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_seconds",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_seconds",
"fieldtype": "Check",
"label": "Show Seconds"
"label": "Hide Seconds"
},
{
"default": "0",
@ -477,7 +477,7 @@
"idx": 1,
"istable": 1,
"links": [],
"modified": "2020-05-15 09:06:25.224411",
"modified": "2020-02-06 09:06:25.224413",
"modified_by": "Administrator",
"module": "Core",
"name": "DocField",

View file

@ -406,9 +406,13 @@ class DocType(Document):
with open(fname, 'r') as f:
code = f.read()
with open(fname, 'w') as f:
file_content = code.replace(old, new) # replace str with full str (js controllers)
file_content = file_content.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports)
file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers)
if fname.endswith('.js'):
file_content = code.replace(old, new) # replace str with full str (js controllers)
elif fname.endswith('.py'):
file_content = code.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports)
file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers)
f.write(file_content)
# updating json file with new name
@ -688,6 +692,9 @@ def validate_fields(meta):
def check_link_table_options(docname, d):
if frappe.flags.in_patch: return
if frappe.flags.in_fixtures: return
if d.fieldtype in ("Link",) + table_fields:
if not d.options:
frappe.throw(_("{0}: Options required for Link or Table type field {1} in row {2}").format(docname, d.label, d.idx), DoctypeLinkError)
@ -908,6 +915,8 @@ def validate_fields(meta):
frappe.msgprint(text_str + df_options_str, title="Invalid Data Field", raise_exception=True)
def check_child_table_option(docfield):
if frappe.flags.in_fixtures: return
if docfield.fieldtype not in ['Table MultiSelect', 'Table']: return
doctype = docfield.options

View file

@ -1,4 +1,5 @@
{
"actions": [],
"allow_import": 1,
"creation": "2012-12-12 11:19:22",
"doctype": "DocType",
@ -63,7 +64,8 @@
"fieldname": "is_home_folder",
"fieldtype": "Check",
"hidden": 1,
"label": "Is Home Folder"
"label": "Is Home Folder",
"search_index": 1
},
{
"default": "0",
@ -172,7 +174,8 @@
],
"icon": "fa fa-file",
"idx": 1,
"modified": "2019-08-30 19:46:20.796453",
"links": [],
"modified": "2020-06-28 12:21:30.772386",
"modified_by": "Administrator",
"module": "Core",
"name": "File",

View file

@ -100,26 +100,26 @@ class File(Document):
self.validate_file()
self.generate_content_hash()
self.validate_url()
if frappe.db.exists('File', {'name': self.name, 'is_folder': 0}):
old_file_url = self.file_url
if not self.is_folder and (self.is_private != self.db_get('is_private')):
private_files = frappe.get_site_path('private', 'files')
public_files = frappe.get_site_path('public', 'files')
file_name = self.file_url.split('/')[-1]
if not self.is_private:
shutil.move(os.path.join(private_files, self.file_name),
os.path.join(public_files, self.file_name))
shutil.move(os.path.join(private_files, file_name),
os.path.join(public_files, file_name))
self.file_url = "/files/{0}".format(self.file_name)
self.file_url = "/files/{0}".format(file_name)
else:
shutil.move(os.path.join(public_files, self.file_name),
os.path.join(private_files, self.file_name))
shutil.move(os.path.join(public_files, file_name),
os.path.join(private_files, file_name))
self.file_url = "/private/files/{0}".format(self.file_name)
self.file_url = "/private/files/{0}".format(file_name)
update_existing_file_docs(self)
# update documents image url with new file url
if self.attached_to_doctype and self.attached_to_name:
@ -135,6 +135,8 @@ class File(Document):
frappe.db.set_value(self.attached_to_doctype, self.attached_to_name,
self.attached_to_field, self.file_url)
self.validate_url()
if self.file_url and (self.is_private != self.file_url.startswith('/private')):
frappe.throw(_('Invalid file URL. Please contact System Administrator.'))
@ -182,13 +184,7 @@ class File(Document):
if duplicate_file:
duplicate_file_doc = frappe.get_cached_doc('File', duplicate_file.name)
if duplicate_file_doc.exists_on_disk():
# if it is attached to a document then throw DuplicateEntryError
if self.attached_to_doctype and self.attached_to_name:
self.duplicate_entry = duplicate_file.name
frappe.throw(_("Same file has already been attached to the record"),
frappe.DuplicateEntryError)
# else just use the url, to avoid uploading a duplicate
else:
# just use the url, to avoid uploading a duplicate
self.file_url = duplicate_file.file_url
def set_file_name(self):
@ -714,7 +710,12 @@ def remove_all(dt, dn, from_delete=False):
try:
for fid in frappe.db.sql_list("""select name from `tabFile` where
attached_to_doctype=%s and attached_to_name=%s""", (dt, dn)):
remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete)
if from_delete:
# If deleting a doc, directly delete files
frappe.delete_doc("File", fid, ignore_permissions=True)
else:
# Removes file and adds a comment in the document it is attached to
remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete)
except Exception as e:
if e.args[0]!=1054: raise # (temp till for patched)
@ -904,3 +905,20 @@ def get_files_in_folder(folder):
{ 'folder': folder },
['name', 'file_name', 'file_url', 'is_folder', 'modified']
)
def update_existing_file_docs(doc):
# Update is private and file url of all file docs that point to the same file
frappe.db.sql("""
UPDATE `tabFile`
SET
file_url = %(file_url)s,
is_private = %(is_private)s
WHERE
content_hash = %(content_hash)s
and name != %(file_name)s
""", dict(
file_url=doc.file_url,
is_private=doc.is_private,
content_hash=doc.content_hash,
file_name=doc.name
))

View file

@ -294,4 +294,37 @@ class TestFile(unittest.TestCase):
folder = frappe.get_doc("File", "Home/Test Folder 1/Test Folder 3")
self.assertRaises(frappe.ValidationError, folder.delete)
def test_same_file_url_update(self):
attached_to_doctype1, attached_to_docname1 = make_test_doc()
attached_to_doctype2, attached_to_docname2 = make_test_doc()
file1 = frappe.get_doc({
"doctype": "File",
"file_name": 'file1.txt',
"attached_to_doctype": attached_to_doctype1,
"attached_to_name": attached_to_docname1,
"is_private": 1,
"content": test_content1}).insert()
file2 = frappe.get_doc({
"doctype": "File",
"file_name": 'file2.txt',
"attached_to_doctype": attached_to_doctype2,
"attached_to_name": attached_to_docname2,
"is_private": 1,
"content": test_content1}).insert()
self.assertEqual(file1.is_private, file2.is_private, 1)
self.assertEqual(file1.file_url, file2.file_url)
self.assertTrue(os.path.exists(file1.get_full_path()))
file1.is_private = 0
file1.save()
file2 = frappe.get_doc('File', file2.name)
self.assertEqual(file1.is_private, file2.is_private, 0)
self.assertEqual(file1.file_url, file2.file_url)
self.assertTrue(os.path.exists(file2.get_full_path()))

View file

@ -12,7 +12,7 @@ class InstalledApplications(Document):
for app in frappe.utils.get_installed_apps_info():
self.append("installed_applications", {
"app_name": app.get("app_name"),
"app_version": app.get("version"),
"git_branch": app.get("branch")
"app_version": app.get("version") or "UNVERSIONED",
"git_branch": app.get("branch") or "UNVERSIONED"
})
self.save()

View file

@ -42,6 +42,10 @@ class ModuleDef(Document):
def on_trash(self):
"""Delete module name from modules.txt"""
if frappe.flags.in_uninstall:
return
modules = None
if frappe.local.module_app.get(frappe.scrub(self.name)):
with open(frappe.get_app_path(self.app_name, "modules.txt"), "r") as f:

View file

@ -1,6 +1,6 @@
frappe.ui.form.on('Report', {
refresh: function(frm) {
if(!frappe.boot.developer_mode && frappe.session.user !== 'Administrator') {
if (frm.doc.is_standard && !frappe.boot.developer_mode) {
// make the document read-only
frm.set_read_only();
}

View file

@ -22,16 +22,28 @@ class Role(Document):
frappe.db.sql("delete from `tabHas Role` where role = %s", self.name)
frappe.clear_cache()
def on_update(self):
'''update system user desk access if this has changed in this update'''
if frappe.flags.in_install: return
if self.has_value_changed('desk_access'):
for user_name in get_users(self.name):
user = frappe.get_doc('User', user_name)
user_type = user.user_type
user.set_system_user()
if user_type != user.user_type:
user.save()
# Get email addresses of all users that have been assigned this role
def get_emails_from_role(role):
emails = []
users = frappe.get_list("Has Role", filters={"role": role, "parenttype": "User"},
fields=["parent"])
for user in users:
user_email, enabled = frappe.db.get_value("User", user.parent, ["email", "enabled"])
for user in get_users(role):
user_email, enabled = frappe.db.get_value("User", user, ["email", "enabled"])
if enabled and user_email not in ["admin@example.com", "guest@example.com"]:
emails.append(user_email)
return emails
return emails
def get_users(role):
return [d.parent for d in frappe.get_all("Has Role", filters={"role": role, "parenttype": "User"},
fields=["parent"])]

View file

@ -23,3 +23,28 @@ class TestUser(unittest.TestCase):
frappe.get_doc("User", "test@example.com").add_roles("_Test Role 3")
self.assertTrue("_Test Role 3" in frappe.get_roles("test@example.com"))
def test_change_desk_access(self):
'''if we change desk acecss from role, remove from user'''
frappe.delete_doc_if_exists('User', 'test-user-for-desk-access@example.com')
frappe.delete_doc_if_exists('Role', 'desk-access-test')
user = frappe.get_doc(dict(
doctype='User',
email='test-user-for-desk-access@example.com',
first_name='test')).insert()
role = frappe.get_doc(dict(
doctype = 'Role',
role_name = 'desk-access-test',
desk_access = 0
)).insert()
user.add_roles(role.name)
user.save()
self.assertTrue(user.user_type=='Website User')
role.desk_access = 1
role.save()
user.reload()
self.assertTrue(user.user_type=='System User')
role.desk_access = 0
role.save()
user.reload()
self.assertTrue(user.user_type=='Website User')

View file

@ -42,7 +42,7 @@ class ServerScript(Document):
@frappe.whitelist()
def setup_scheduler_events(script_name, frequency):
method = frappe.scrub(script_name) + '_' + frequency.lower()
method = frappe.scrub('{0}-{1}'.format(script_name, frequency))
scheduled_script = frappe.db.get_value('Scheduled Job Type',
dict(method=method))

View file

@ -28,8 +28,7 @@ def get_session_default_values():
@frappe.whitelist()
def set_session_default_values(default_values):
if not frappe.flags.in_test:
default_values = json.loads(default_values)
default_values = frappe.parse_json(default_values)
for entry in default_values:
try:
frappe.defaults.set_user_default(entry, default_values.get(entry))

View file

@ -59,6 +59,7 @@
"column_break_18",
"disable_standard_email_footer",
"hide_footer_in_auto_email_reports",
"attach_view_link",
"chat",
"enable_chat",
"use_socketio_to_upload_file"
@ -422,12 +423,18 @@
"fieldname": "enable_onboarding",
"fieldtype": "Check",
"label": "Enable Onboarding"
},
{
"default": "1",
"fieldname": "attach_view_link",
"fieldtype": "Check",
"label": "Send document Web View link in email"
}
],
"icon": "fa fa-cog",
"issingle": 1,
"links": [],
"modified": "2020-05-01 19:21:15.496065",
"modified": "2020-07-02 16:13:00.166382",
"modified_by": "Administrator",
"module": "Core",
"name": "System Settings",

View file

@ -4,7 +4,7 @@
from __future__ import unicode_literals, print_function
import frappe
from frappe.model.document import Document
from frappe.utils import cint, has_gravatar, format_datetime, now_datetime, get_formatted_email, today
from frappe.utils import cint, flt, has_gravatar, escape_html, format_datetime, now_datetime, get_formatted_email, today
from frappe import throw, msgprint, _
from frappe.utils.password import update_password as _update_password
from frappe.desk.notifications import clear_notifications
@ -770,7 +770,7 @@ def sign_up(email, full_name, redirect_to):
user = frappe.get_doc({
"doctype":"User",
"email": email,
"first_name": full_name,
"first_name": escape_html(full_name),
"enabled": 1,
"new_password": random_string(10),
"user_type": "Website User"
@ -811,6 +811,7 @@ def reset_password(user):
frappe.clear_messages()
return 'not found'
@frappe.whitelist()
def user_query(doctype, txt, searchfield, start, page_len, filters):
from frappe.desk.reportview import get_match_cond
@ -841,11 +842,11 @@ def user_query(doctype, txt, searchfield, start, page_len, filters):
def get_total_users():
"""Returns total no. of system users"""
return frappe.db.sql('''SELECT SUM(`simultaneous_sessions`)
return flt(frappe.db.sql('''SELECT SUM(`simultaneous_sessions`)
FROM `tabUser`
WHERE `enabled` = 1
AND `user_type` = 'System User'
AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0]
AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0])
def get_system_users(exclude_users=None, limit=None):
if not exclude_users:

View file

@ -21,6 +21,17 @@ class Version(Document):
else:
return False
def for_insert(self, doc):
updater_reference = doc.flags.updater_reference
data = {
'creation': doc.creation,
'updater_reference': updater_reference,
'created_by': doc.owner
}
self.ref_doctype = doc.doctype
self.docname = doc.name
self.data = frappe.as_json(data)
def get_data(self):
return json.loads(self.data)

View file

@ -172,19 +172,26 @@ class Dashboard {
set_dropdown() {
this.page.clear_menu();
this.page.add_menu_item('Edit...', () => {
this.page.add_menu_item(__('Edit'), () => {
frappe.set_route('Form', 'Dashboard', frappe.dashboard.dashboard_name);
}, 1);
});
this.page.add_menu_item('New...', () => {
this.page.add_menu_item(__('New'), () => {
frappe.new_doc('Dashboard');
}, 1);
});
frappe.db.get_list("Dashboard").then(dashboards => {
this.page.add_menu_item(__('Refresh All'), () => {
this.chart_group &&
this.chart_group.widgets_list.forEach(chart => chart.refresh());
this.number_card_group &&
this.number_card_group.widgets_list.forEach(card => card.render_card());
});
frappe.db.get_list('Dashboard').then(dashboards => {
dashboards.map(dashboard => {
let name = dashboard.name;
if(name != this.dashboard_name){
this.page.add_menu_item(name, () => frappe.set_route("dashboard", name));
this.page.add_menu_item(name, () => frappe.set_route("dashboard", name), 1);
}
});
});

View file

@ -5,23 +5,23 @@ from __future__ import unicode_literals
import frappe
from frappe import _, throw
import frappe.utils.user
from frappe.permissions import check_admin_or_system_manager
from frappe.permissions import check_admin_or_system_manager, rights
from frappe.model import data_fieldtypes
def execute(filters=None):
user, doctype, show_permissions = filters.get("user"), filters.get("doctype"), filters.get("show_permissions")
if not validate(user, doctype): return [], []
columns, fields = get_columns_and_fields(doctype)
data = frappe.get_list(doctype, fields=fields, as_list=True, user=user)
if show_permissions:
columns = columns + ["Read", "Write", "Create", "Delete", "Submit", "Cancel", "Amend", "Print", "Email",
"Report", "Import", "Export", "Share"]
columns = columns + [frappe.unscrub(right) + ':Check:80' for right in rights]
data = list(data)
for i,item in enumerate(data):
temp = frappe.permissions.get_doc_permissions(frappe.get_doc(doctype, item[0]), False,user)
data[i] = item+(temp.get("read"),temp.get("write"),temp.get("create"),temp.get("delete"),temp.get("submit"),temp.get("cancel"),temp.get("amend"),temp.get("print"),temp.get("email"),temp.get("report"),temp.get("import"),temp.get("export"),temp.get("share"),)
for i, doc in enumerate(data):
permission = frappe.permissions.get_doc_permissions(frappe.get_doc(doctype, doc[0]), user)
data[i] = doc + tuple(permission.get(right) for right in rights)
return columns, data

View file

@ -16,8 +16,8 @@
"column_break_6",
"fieldtype",
"precision",
"show_seconds",
"show_days",
"hide_seconds",
"hide_days",
"options",
"fetch_from",
"fetch_if_empty",
@ -383,22 +383,18 @@
"label": "In Preview"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_seconds",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_seconds",
"fieldtype": "Check",
"label": "Show Seconds",
"show_days": 1,
"show_seconds": 1
"label": "Hide Seconds"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_days",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_days",
"fieldtype": "Check",
"label": "Show Days",
"show_days": 1,
"show_seconds": 1
"label": "Hide Days"
},
{
"default": "0",
@ -411,7 +407,7 @@
"icon": "fa fa-glass",
"idx": 1,
"links": [],
"modified": "2020-05-15 23:43:00.123572",
"modified": "2020-02-06 23:43:00.123575",
"modified_by": "Administrator",
"module": "Custom",
"name": "Custom Field",

View file

@ -31,6 +31,13 @@ class CustomField(Document):
# fieldnames should be lowercase
self.fieldname = self.fieldname.lower()
def before_insert(self):
meta = frappe.get_meta(self.dt, cached=False)
fieldnames = [df.fieldname for df in meta.get("fields")]
if self.fieldname in fieldnames:
frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt))
def validate(self):
meta = frappe.get_meta(self.dt, cached=False)
fieldnames = [df.fieldname for df in meta.get("fields")]
@ -46,9 +53,6 @@ class CustomField(Document):
if not self.fieldname:
frappe.throw(_("Fieldname not set for Custom Field"))
if self.fieldname in fieldnames:
frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt))
if self.get('translatable', 0) and not supports_translation(self.fieldtype):
self.translatable = 0
@ -68,6 +72,11 @@ class CustomField(Document):
frappe.db.updatedb(self.dt)
def on_trash(self):
#check if Admin owned field
if self.owner == 'Administrator' and frappe.session.user != 'Administrator':
frappe.throw(_("Custom Field {0} is created by the Administrator and can only be deleted through the Administrator account.").format(
frappe.bold(self.label)))
# delete property setter entries
frappe.db.sql("""\
DELETE FROM `tabProperty Setter`

View file

@ -77,7 +77,9 @@ docfield_properties = {
'allow_bulk_edit': 'Check',
'auto_repeat': 'Link',
'allow_in_quick_entry': 'Check',
'hide_border': 'Check'
'hide_border': 'Check',
'hide_days': 'Check',
'hide_seconds': 'Check'
}
allowed_fieldtype_change = (('Currency', 'Float', 'Percent'), ('Small Text', 'Data'),

View file

@ -11,8 +11,8 @@
"label",
"fieldtype",
"fieldname",
"show_seconds",
"show_days",
"hide_seconds",
"hide_days",
"reqd",
"unique",
"in_list_view",
@ -393,22 +393,18 @@
"label": "In Preview"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_seconds",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_seconds",
"fieldtype": "Check",
"label": "Show Seconds",
"show_days": 1,
"show_seconds": 1
"label": "Hide Seconds"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_days",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_days",
"fieldtype": "Check",
"label": "Show Days",
"show_days": 1,
"show_seconds": 1
"label": "Hide Days"
},
{
"default": "0",
@ -421,7 +417,7 @@
"idx": 1,
"istable": 1,
"links": [],
"modified": "2020-05-15 23:45:46.810869",
"modified": "2020-06-02 23:45:46.810868",
"modified_by": "Administrator",
"module": "Custom",
"name": "Customize Form Field",

View file

@ -49,7 +49,7 @@ class DbManager:
host = self.get_current_host()
if frappe.conf.get('rds_db', 0) == 1:
self.db.sql("GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, EXECUTE ON `%s`.* TO '%s'@'%s';" % (target, user, host))
self.db.sql("GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, EXECUTE, LOCK TABLES ON `%s`.* TO '%s'@'%s';" % (target, user, host))
else:
self.db.sql("GRANT ALL PRIVILEGES ON `%s`.* TO '%s'@'%s';" % (target, user, host))

View file

@ -64,6 +64,8 @@ CREATE TABLE `tabDocField` (
`length` int(11) NOT NULL DEFAULT 0,
`translatable` int(1) NOT NULL DEFAULT 0,
`hide_border` int(1) NOT NULL DEFAULT 0,
`hide_days` int(1) NOT NULL DEFAULT 0,
`hide_seconds` int(1) NOT NULL DEFAULT 0,
PRIMARY KEY (`name`),
KEY `parent` (`parent`),
KEY `label` (`label`),

View file

@ -82,5 +82,7 @@ class MariaDBTable(DBTable):
fieldname = str(e).split("'")[-2]
frappe.throw(_("{0} field cannot be set as unique in {1}, as there are non-unique existing values").format(
fieldname, self.table_name))
elif e.args[0]==1067:
frappe.throw(str(e.args[1]))
else:
raise e

View file

@ -64,6 +64,8 @@ CREATE TABLE "tabDocField" (
"length" bigint NOT NULL DEFAULT 0,
"translatable" smallint NOT NULL DEFAULT 0,
"hide_border" smallint NOT NULL DEFAULT 0,
"hide_days" smallint NOT NULL DEFAULT 0,
"hide_seconds" smallint NOT NULL DEFAULT 0,
PRIMARY KEY ("name")
) ;

View file

@ -1,7 +1,7 @@
import frappe, subprocess, os
from six.moves import input
def setup_database(force, source_sql, verbose):
def setup_database(force, source_sql=None, verbose=False):
root_conn = get_root_connection()
root_conn.commit()
root_conn.sql("DROP DATABASE IF EXISTS `{0}`".format(frappe.conf.db_name))
@ -16,10 +16,12 @@ def setup_database(force, source_sql, verbose):
subprocess_env = os.environ.copy()
subprocess_env['PGPASSWORD'] = str(frappe.conf.db_password)
# bootstrap db
if not source_sql:
source_sql = os.path.join(os.path.dirname(__file__), 'framework_postgres.sql')
subprocess.check_output([
'psql', frappe.conf.db_name, '-h', frappe.conf.db_host or 'localhost', '-U',
frappe.conf.db_name, '-f',
os.path.join(os.path.dirname(__file__), 'framework_postgres.sql')
frappe.conf.db_name, '-f', source_sql
], env=subprocess_env)
frappe.connect()

View file

@ -29,31 +29,56 @@ def handle_not_exist(fn):
class Workspace:
def __init__(self, page_name):
def __init__(self, page_name, minimal=False):
self.page_name = page_name
self.extended_cards = []
self.extended_charts = []
self.extended_shortcuts = []
self.user = frappe.get_user()
self.allowed_modules = self.get_cached_value('user_allowed_modules', self.get_allowed_modules)
self.allowed_modules = self.get_cached('user_allowed_modules', self.get_allowed_modules)
self.doc = self.get_page_for_user()
if self.doc.module not in self.allowed_modules:
raise frappe.PermissionError
self.can_read = self.get_cached_value('user_perm_can_read', self.get_can_read_items)
self.can_read = self.get_cached('user_perm_can_read', self.get_can_read_items)
self.allowed_pages = get_allowed_pages(cache=True)
self.allowed_reports = get_allowed_reports(cache=True)
self.onboarding_doc = self.get_onboarding_doc()
self.onboarding = None
self.table_counts = get_table_with_counts()
if not minimal:
self.onboarding_doc = self.get_onboarding_doc()
self.onboarding = None
self.table_counts = get_table_with_counts()
self.restricted_doctypes = frappe.cache().get_value("domain_restricted_doctypes") or build_domain_restriced_doctype_cache()
self.restricted_pages = frappe.cache().get_value("domain_restricted_pages") or build_domain_restriced_page_cache()
def get_cached_value(self, cache_key, fallback_fn):
def is_page_allowed(self):
cards = self.doc.cards + get_custom_reports_and_doctypes(self.doc.module) + self.extended_cards
shortcuts = self.doc.shortcuts + self.extended_shortcuts
for section in cards:
links = loads(section.links) if isinstance(section.links, string_types) else section.links
for item in links:
if self.is_item_allowed(item.get('name'), item.get('type')):
return True
def _in_active_domains(item):
if not item.restrict_to_domain:
return True
else:
return item.restrict_to_domain in frappe.get_active_domains()
for item in shortcuts:
if self.is_item_allowed(item.link_to, item.type) and _in_active_domains(item):
return True
return False
def get_cached(self, cache_key, fallback_fn):
_cache = frappe.cache()
value = _cache.get_value(cache_key, user=frappe.session.user)
@ -83,12 +108,12 @@ class Workspace:
'extends': self.page_name,
'for_user': frappe.session.user
}
pages = frappe.get_list("Desk Page", filters=filters)
pages = frappe.get_all("Desk Page", filters=filters, limit=1)
if pages:
return frappe.get_doc("Desk Page", pages[0])
return frappe.get_cached_doc("Desk Page", pages[0])
self.get_pages_to_extend()
return frappe.get_doc("Desk Page", self.page_name)
return frappe.get_cached_doc("Desk Page", self.page_name)
def get_onboarding_doc(self):
# Check if onboarding is enabled
@ -123,7 +148,7 @@ class Workspace:
'module': ['in', self.allowed_modules]
})
pages = [frappe.get_doc("Desk Page", page['name']) for page in pages]
pages = [frappe.get_cached_doc("Desk Page", page['name']) for page in pages]
for page in pages:
self.extended_cards = self.extended_cards + page.cards
@ -168,9 +193,9 @@ class Workspace:
'subtitle': _(self.onboarding_doc.subtitle),
'success': _(self.onboarding_doc.success_message),
'docs_url': self.onboarding_doc.documentation_url,
'user_can_dismiss': self.onboarding_doc.user_can_dismiss,
'items': self.get_onboarding_steps()
}
@handle_not_exist
def get_cards(self):
cards = self.doc.cards
@ -324,25 +349,44 @@ def get_desktop_page(page):
}
@frappe.whitelist()
def get_desk_sidebar_items(flatten=False):
def get_desk_sidebar_items(flatten=False, cache=True):
"""Get list of sidebar items for desk
"""
# don't get domain restricted pages
blocked_modules = frappe.get_doc('User', frappe.session.user).get_blocked_modules()
pages = []
_cache = frappe.cache()
if cache:
pages = _cache.get_value("desk_sidebar_items", user=frappe.session.user)
if not pages or not cache:
# don't get domain restricted pages
blocked_modules = frappe.get_doc('User', frappe.session.user).get_blocked_modules()
filters = {
'restrict_to_domain': ['in', frappe.get_active_domains()],
'extends_another_page': 0,
'for_user': '',
'module': ['not in', blocked_modules]
}
filters = {
'restrict_to_domain': ['in', frappe.get_active_domains()],
'extends_another_page': 0,
'for_user': '',
'module': ['not in', blocked_modules]
}
if not frappe.local.conf.developer_mode:
filters['developer_mode_only'] = '0'
if not frappe.local.conf.developer_mode:
filters['developer_mode_only'] = '0'
# pages sorted based on pinned to top and then by name
order_by = "pin_to_top desc, pin_to_bottom asc, name asc"
all_pages = frappe.get_all("Desk Page", fields=["name", "category"], filters=filters, order_by=order_by, ignore_permissions=True)
pages = []
# Filter Page based on Permission
for page in all_pages:
try:
wspace = Workspace(page.get('name'), True)
if wspace.is_page_allowed():
pages.append(page)
except frappe.PermissionError:
pass
_cache.set_value("desk_sidebar_items", pages, frappe.session.user)
# pages sorted based on pinned to top and then by name
order_by = "pin_to_top desc, pin_to_bottom asc, name asc"
pages = frappe.get_all("Desk Page", fields=["name", "category"], filters=filters, order_by=order_by, ignore_permissions=True)
if flatten:
return pages
@ -376,7 +420,7 @@ def get_custom_reports_and_doctypes(module):
]
def get_custom_doctype_list(module):
doctypes = frappe.get_list("DocType", fields=["name"], filters={"custom": 1, "istable": 0, "module": module}, order_by="name", ignore_permissions=True)
doctypes = frappe.get_all("DocType", fields=["name"], filters={"custom": 1, "istable": 0, "module": module}, order_by="name")
out = []
for d in doctypes:
@ -391,9 +435,9 @@ def get_custom_doctype_list(module):
def get_custom_report_list(module):
"""Returns list on new style reports for modules."""
reports = frappe.get_list("Report", fields=["name", "ref_doctype", "report_type"], filters=
reports = frappe.get_all("Report", fields=["name", "ref_doctype", "report_type"], filters=
{"is_standard": "No", "disabled": 0, "module": module},
order_by="name", ignore_permissions=True)
order_by="name")
out = []
for r in reports:

View file

@ -1,208 +1,81 @@
{
"allow_copy": 0,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"actions": [],
"autoname": "Prompt",
"beta": 0,
"creation": "2017-10-23 13:02:10.295824",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"subject_field",
"start_date_field",
"end_date_field",
"column_break_5",
"all_day"
],
"fields": [
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "reference_doctype",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Reference Document Type",
"length": 0,
"no_copy": 0,
"options": "DocType",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "subject_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Subject Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "start_date_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Start Date Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "end_date_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "End Date Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"default": "0",
"fieldname": "all_day",
"fieldtype": "Check",
"label": "All Day"
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2019-09-05 14:22:27.664645",
"links": [],
"modified": "2020-06-15 11:24:57.639430",
"modified_by": "Administrator",
"module": "Desk",
"name": "Calendar View",
"name_case": "",
"owner": "faris@erpnext.com",
"permissions": [
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 0,
"write": 1
},
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 0,
"delete": 0,
"email": 0,
"export": 0,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 0,
"read": 1,
"report": 0,
"role": "All",
"set_user_permissions": 0,
"share": 0,
"submit": 0,
"write": 0
"role": "All"
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 0,
"track_seen": 0
"sort_order": "DESC"
}

View file

@ -5,10 +5,14 @@ frappe.ui.form.on('Dashboard', {
refresh: function(frm) {
frm.add_custom_button(__("Show Dashboard"), () => frappe.set_route('dashboard', frm.doc.name));
if (!frappe.boot.developer_mode) {
frm.disable_form();
}
frm.set_query("chart", "charts", function() {
return {
filters: {
is_public: 1
is_public: 1,
}
};
});
@ -16,7 +20,7 @@ frappe.ui.form.on('Dashboard', {
frm.set_query("card", "cards", function() {
return {
filters: {
is_public: 1
is_public: 1,
}
};
});

View file

@ -1,5 +1,6 @@
{
"actions": [],
"allow_rename": 1,
"autoname": "field:dashboard_name",
"creation": "2019-01-10 12:54:40.938705",
"doctype": "DocType",
@ -8,6 +9,8 @@
"field_order": [
"dashboard_name",
"is_default",
"is_standard",
"module",
"charts",
"chart_options",
"cards"
@ -35,21 +38,35 @@
"reqd": 1
},
{
"description": "Set Default Options for all charts on this Dashboard (Ex: \"colors\": [\"#d1d8dd\", \"#ff5858\"])",
"fieldname": "chart_options",
"fieldtype": "Code",
"label": "Chart Options",
"options": "JSON"
"description": "Set Default Options for all charts on this Dashboard (Ex: \"colors\": [\"#d1d8dd\", \"#ff5858\"])",
"fieldname": "chart_options",
"fieldtype": "Code",
"label": "Chart Options",
"options": "JSON"
},
{
"fieldname": "cards",
"fieldtype": "Table",
"label": "Cards",
"options": "Number Card Link"
},
{
"default": "0",
"fieldname": "is_standard",
"fieldtype": "Check",
"label": "Is Standard"
},
{
"depends_on": "eval: doc.is_standard",
"fieldname": "module",
"fieldtype": "Link",
"label": "Module",
"mandatory_depends_on": "eval: doc.is_standard",
"options": "Module Def"
}
],
"links": [],
"modified": "2020-04-29 13:26:37.362482",
"modified": "2020-07-10 17:48:19.468813",
"modified_by": "Administrator",
"module": "Desk",
"name": "Dashboard",

View file

@ -4,6 +4,7 @@
from __future__ import unicode_literals
from frappe.model.document import Document
from frappe.modules.export_file import export_to_files
import frappe
from frappe import _
import json
@ -15,7 +16,23 @@ class Dashboard(Document):
frappe.db.sql('''update
tabDashboard set is_default = 0 where name != %s''', self.name)
if frappe.conf.developer_mode and self.is_standard:
export_to_files(record_list=[['Dashboard', self.name, self.module + ' Dashboard']], record_module=self.module)
def validate(self):
if not frappe.conf.developer_mode and self.is_standard:
frappe.throw('Cannot edit Standard Dashboards')
if self.is_standard:
non_standard_docs_map = {
'Dashboard Chart': get_non_standard_charts_in_dashboard(self),
'Number Card': get_non_standard_cards_in_dashboard(self)
}
if non_standard_docs_map['Dashboard Chart'] or non_standard_docs_map['Number Card']:
message = get_non_standard_warning_message(non_standard_docs_map)
frappe.throw(message, title=_("Standard Not Set"), is_minimizable=True)
self.validate_custom_options()
def validate_custom_options(self):
@ -48,3 +65,29 @@ def get_permitted_cards(dashboard_name):
if frappe.has_permission('Number Card', doc=card.card):
permitted_cards.append(card)
return permitted_cards
def get_non_standard_charts_in_dashboard(dashboard):
non_standard_charts = [doc.name for doc in frappe.get_list('Dashboard Chart', {'is_standard': 0})]
return [chart_link.chart for chart_link in dashboard.charts if chart_link.chart in non_standard_charts]
def get_non_standard_cards_in_dashboard(dashboard):
non_standard_cards = [doc.name for doc in frappe.get_list('Number Card', {'is_standard': 0})]
return [card_link.card for card_link in dashboard.cards if card_link.card in non_standard_cards]
def get_non_standard_warning_message(non_standard_docs_map):
message = _('''Please set the following documents in this Dashboard as standard first.''')
def get_html(docs, doctype):
html = '<p>{}</p>'.format(frappe.bold(doctype))
for doc in docs:
html += '<div><a href="#Form/{doctype}/{doc}">{doc}</a></div>'.format(doctype=doctype, doc=doc)
html += '<br>'
return html
html = message + '<br>'
for doctype in non_standard_docs_map:
if non_standard_docs_map[doctype]:
html += get_html(non_standard_docs_map[doctype], doctype)
return html

View file

@ -9,9 +9,24 @@ frappe.ui.form.on('Dashboard Chart', {
frm.add_fetch('source', 'timeseries', 'timeseries');
},
before_save: function(frm) {
let dynamic_filters = JSON.parse(frm.doc.dynamic_filters_json || 'null');
let static_filters = JSON.parse(frm.doc.filters_json || 'null');
static_filters =
frappe.dashboard_utils.remove_common_static_filter_values(static_filters, dynamic_filters);
frm.set_value('filters_json', JSON.stringify(static_filters));
frm.trigger('show_filters');
},
refresh: function(frm) {
frm.chart_filters = null;
if (!frappe.boot.developer_mode && frm.doc.is_standard) {
frm.set_df_property('chart_options_section', 'hidden', 1);
frm.disable_form();
}
frm.add_custom_button('Add Chart to Dashboard', () => {
const d = new frappe.ui.Dialog({
title: __('Add to Dashboard'),
@ -49,6 +64,8 @@ frappe.ui.form.on('Dashboard Chart', {
});
frm.set_df_property("filters_section", "hidden", 1);
frm.set_df_property("dynamic_filters_section", "hidden", 1);
frm.trigger('set_time_series');
frm.set_query('document_type', function() {
return {
@ -66,6 +83,15 @@ frappe.ui.form.on('Dashboard Chart', {
if (!frappe.boot.developer_mode) {
frm.set_df_property("custom_options", "hidden", 1);
}
},
is_standard: function(frm) {
if (frappe.boot.developer_mode && frm.doc.is_standard) {
frm.trigger('render_dynamic_filters_table');
} else {
frm.set_df_property("dynamic_filters_section", "hidden", 1);
}
},
source: function(frm) {
@ -111,6 +137,7 @@ frappe.ui.form.on('Dashboard Chart', {
frm.set_value('based_on', '');
frm.set_value('value_based_on', '');
frm.set_value('filters_json', '[]');
frm.set_value('dynamic_filters_json', '[]');
frm.trigger('update_options');
},
@ -119,6 +146,7 @@ frappe.ui.form.on('Dashboard Chart', {
frm.set_value('y_axis', []);
frm.set_df_property('x_field', 'options', []);
frm.set_value('filters_json', '{}');
frm.set_value('dynamic_filters_json', '{}');
frm.trigger('set_chart_report_filters');
},
@ -146,7 +174,10 @@ frappe.ui.form.on('Dashboard Chart', {
},
set_chart_field_options: function(frm) {
let filters = frm.doc.filters_json.length > 2? JSON.parse(frm.doc.filters_json): null;
let filters = frm.doc.filters_json.length > 2 ? JSON.parse(frm.doc.filters_json) : null;
if (frm.doc.dynamic_filters_json.length > 2) {
filters = {...filters, ...JSON.parse(frm.doc.dynamic_filters_json)};
}
frappe.xcall(
'frappe.desk.query_report.run',
{
@ -165,7 +196,7 @@ frappe.ui.form.on('Dashboard Chart', {
if (!frm.doc.is_custom) {
if (data.result.length) {
frm.field_options = frappe.report_utils.get_possible_chart_options(data.columns, data);
frm.field_options = frappe.report_utils.get_field_options_from_report(data.columns, data);
frm.set_df_property('x_field', 'options', frm.field_options.non_numeric_fields);
if (!frm.field_options.numeric_fields.length) {
frappe.msgprint(__(`Report has no numeric fields, please change the Report Name`));
@ -240,11 +271,14 @@ frappe.ui.form.on('Dashboard Chart', {
show_filters: function(frm) {
frm.chart_filters = [];
frappe.dashboard_utils.get_filters_for_chart_type(frm.doc).then(filters => {
if (filters) {
frm.chart_filters = filters;
}
if (filters) {
frm.chart_filters = filters;
}
frm.trigger('render_filters_table');
frm.trigger('render_filters_table');
if (frappe.boot.developer_mode && frm.doc.is_standard) {
frm.trigger('render_dynamic_filters_table');
}
});
},
@ -257,8 +291,8 @@ frappe.ui.form.on('Dashboard Chart', {
let table = $(`<table class="table table-bordered" style="cursor:pointer; margin:0px;">
<thead>
<tr>
<th style="width: 33%">${__('Filter')}</th>
<th style="width: 33%">${__('Condition')}</th>
<th style="width: 20%">${__('Filter')}</th>
<th style="width: 20%">${__('Condition')}</th>
<th>${__('Value')}</th>
</tr>
</thead>
@ -378,4 +412,102 @@ frappe.ui.form.on('Dashboard Chart', {
});
},
render_dynamic_filters_table(frm) {
frm.set_df_property("dynamic_filters_section", "hidden", 0);
let is_document_type = frm.doc.chart_type !== 'Report'
&& frm.doc.chart_type !== 'Custom';
let wrapper = $(frm.get_field('dynamic_filters_json').wrapper).empty();
frm.dynamic_filter_table = $(`<table class="table table-bordered" style="cursor:pointer; margin:0px;">
<thead>
<tr>
<th style="width: 20%">${__('Filter')}</th>
<th style="width: 20%">${__('Condition')}</th>
<th>${__('Value')}</th>
</tr>
</thead>
<tbody></tbody>
</table>`).appendTo(wrapper);
frm.dynamic_filters = frm.doc.dynamic_filters_json && frm.doc.dynamic_filters_json.length > 2
? JSON.parse(frm.doc.dynamic_filters_json)
: null;
frm.trigger('set_dynamic_filters_in_table');
let filters = JSON.parse(frm.doc.filters_json || '[]');
let fields = frappe.dashboard_utils.get_fields_for_dynamic_filter_dialog(
is_document_type, filters, frm.dynamic_filters
);
frm.dynamic_filter_table.on('click', () => {
let dialog = new frappe.ui.Dialog({
title: __('Set Dynamic Filters'),
fields: fields,
primary_action: () => {
let values = dialog.get_values();
dialog.hide();
let dynamic_filters = [];
for (let key of Object.keys(values)) {
if (is_document_type) {
let [doctype, fieldname] = key.split(':');
dynamic_filters.push([doctype, fieldname, '=', values[key]]);
}
}
if (is_document_type) {
frm.set_value('dynamic_filters_json', JSON.stringify(dynamic_filters));
} else {
frm.set_value('dynamic_filters_json', JSON.stringify(values));
}
frm.trigger('set_dynamic_filters_in_table');
},
primary_action_label: "Set"
});
dialog.show();
dialog.set_values(frm.dynamic_filters);
});
},
set_dynamic_filters_in_table: function(frm) {
frm.dynamic_filters = frm.doc.dynamic_filters_json && frm.doc.dynamic_filters_json.length > 2
? JSON.parse(frm.doc.dynamic_filters_json)
: null;
if (!frm.dynamic_filters) {
const filter_row = $(`<tr><td colspan="3" class="text-muted text-center">
${__("Click to Set Dynamic Filters")}</td></tr>`);
frm.dynamic_filter_table.find('tbody').html(filter_row);
} else {
let filter_rows = '';
if ($.isArray(frm.dynamic_filters)) {
frm.dynamic_filters.forEach(filter => {
filter_rows +=
`<tr>
<td>${filter[1]}</td>
<td>${filter[2] || ""}</td>
<td>${filter[3]}</td>
</tr>`;
});
} else {
let condition = '=';
for (let [key, val] of Object.entries(frm.dynamic_filters)) {
filter_rows +=
`<tr>
<td>${key}</td>
<td>${condition}</td>
<td>${val || ""}</td>
</tr>`
;
}
}
frm.dynamic_filter_table.find('tbody').html(filter_rows);
}
}
});

View file

@ -7,6 +7,8 @@
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"is_standard",
"module",
"chart_name",
"chart_type",
"report_name",
@ -32,10 +34,12 @@
"type",
"filters_section",
"filters_json",
"dynamic_filters_section",
"dynamic_filters_json",
"chart_options_section",
"color",
"column_break_2",
"custom_options",
"column_break_2",
"color",
"section_break_10",
"last_synced_on"
],
@ -67,7 +71,8 @@
"fieldname": "document_type",
"fieldtype": "Link",
"label": "Document Type",
"options": "DocType"
"options": "DocType",
"set_only_once": 1
},
{
"depends_on": "eval: doc.timeseries && ['Count', 'Sum', 'Average'].includes(doc.chart_type)",
@ -200,7 +205,8 @@
"fieldname": "report_name",
"fieldtype": "Link",
"label": "Report Name",
"options": "Report"
"options": "Report",
"set_only_once": 1
},
{
"default": "0",
@ -235,10 +241,43 @@
"fieldname": "heatmap_year",
"fieldtype": "Select",
"label": "Year"
},
{
"default": "0",
"fieldname": "is_standard",
"fieldtype": "Check",
"label": "Is Standard",
"show_days": 1,
"show_seconds": 1
},
{
"depends_on": "eval: doc.is_standard",
"fieldname": "module",
"fieldtype": "Link",
"label": "Module",
"mandatory_depends_on": "eval: doc.is_standard",
"options": "Module Def",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "dynamic_filters_json",
"fieldtype": "Code",
"label": "Dynamic Filters JSON",
"options": "JSON",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "dynamic_filters_section",
"fieldtype": "Section Break",
"label": "Dynamic Filters",
"show_days": 1,
"show_seconds": 1
}
],
"links": [],
"modified": "2020-05-16 15:03:02.455395",
"modified": "2020-07-10 16:09:47.102062",
"modified_by": "Administrator",
"module": "Desk",
"name": "Dashboard Chart",

View file

@ -8,10 +8,12 @@ from frappe import _
import datetime
import json
from frappe.utils.dashboard import cache_source, get_from_date_from_timespan
from frappe.utils import nowdate, add_to_date, getdate, get_last_day, formatdate, get_datetime, cint
from frappe.utils import nowdate, add_to_date, getdate, get_last_day, formatdate,\
get_datetime, cint, now_datetime
from frappe.model.naming import append_number_if_name_exists
from frappe.boot import get_allowed_reports
from frappe.model.document import Document
from frappe.modules.export_file import export_to_files
def get_permission_query_conditions(user):
@ -26,15 +28,15 @@ def get_permission_query_conditions(user):
if "System Manager" in roles:
return None
allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read())
allowed_reports = tuple([key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()])
allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()]
allowed_reports = ['"%s"' % key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()]
return '''
`tabDashboard Chart`.`document_type` in {allowed_doctypes}
or `tabDashboard Chart`.`report_name` in {allowed_reports}
`tabDashboard Chart`.`document_type` in ({allowed_doctypes})
or `tabDashboard Chart`.`report_name` in ({allowed_reports})
'''.format(
allowed_doctypes=allowed_doctypes,
allowed_reports=allowed_reports
allowed_doctypes=','.join(allowed_doctypes),
allowed_reports=','.join(allowed_reports)
)
@ -79,7 +81,9 @@ def get(chart_name = None, chart = None, no_cache = None, filters = None, from_d
to_date = get_datetime(chart.to_date)
timegrain = time_interval or chart.time_interval
filters = frappe.parse_json(filters) or frappe.parse_json(chart.filters_json) or []
filters = frappe.parse_json(filters) or frappe.parse_json(chart.filters_json)
if not filters:
filters = []
# don't include cancelled documents
filters.append([chart.document_type, 'docstatus', '<', 2, False])
@ -134,7 +138,7 @@ def get_chart_config(chart, filters, timespan, timegrain, from_date, to_date):
if not from_date:
from_date = get_from_date_from_timespan(to_date, timespan)
if not to_date:
to_date = datetime.datetime.now()
to_date = now_datetime()
doctype = chart.document_type
datefield = chart.based_on
@ -258,9 +262,10 @@ def get_aggregate_function(chart_type):
def get_result(data, timegrain, from_date, to_date):
start_date = getdate(from_date)
end_date = getdate(to_date)
result = []
while start_date <= end_date:
result = [[start_date, 0.0]]
while start_date < end_date:
next_date = get_next_expected_date(start_date, timegrain)
result.append([next_date, 0.0])
start_date = next_date
@ -276,11 +281,8 @@ def get_result(data, timegrain, from_date, to_date):
def get_next_expected_date(date, timegrain):
next_date = None
if timegrain=='Daily':
next_date = add_to_date(date, days=1)
else:
# given date is always assumed to be the period ending date
next_date = get_period_ending(add_to_date(date, days=1), timegrain)
# given date is always assumed to be the period ending date
next_date = get_period_ending(add_to_date(date, days=1), timegrain)
return getdate(next_date)
def get_period_ending(date, timegrain):
@ -348,8 +350,13 @@ class DashboardChart(Document):
def on_update(self):
frappe.cache().delete_key('chart-data:{}'.format(self.name))
if frappe.conf.developer_mode and self.is_standard:
export_to_files(record_list=[['Dashboard Chart', self.name]], record_module=self.module)
def validate(self):
if not frappe.conf.developer_mode and self.is_standard:
frappe.throw('Cannot edit Standard charts')
if self.chart_type != 'Custom' and self.chart_type != 'Report':
self.check_required_field()
self.check_document_type()

View file

@ -4,13 +4,12 @@
from __future__ import unicode_literals
import unittest, frappe
from frappe.utils import getdate, formatdate
from frappe.utils import getdate, formatdate, get_last_day
from frappe.desk.doctype.dashboard_chart.dashboard_chart import (get,
get_period_ending)
from datetime import datetime
from dateutil.relativedelta import relativedelta
import calendar
class TestDashboardChart(unittest.TestCase):
def test_period_ending(self):
@ -35,9 +34,6 @@ class TestDashboardChart(unittest.TestCase):
self.assertEqual(get_period_ending('2019-10-01', 'Quarterly'),
getdate('2019-12-31'))
self.assertEqual(get_period_ending('2019-10-01', 'Yearly'),
getdate('2019-12-31'))
def test_dashboard_chart(self):
if frappe.db.exists('Dashboard Chart', 'Test Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Dashboard Chart')
@ -50,22 +46,24 @@ class TestDashboardChart(unittest.TestCase):
based_on = 'creation',
timespan = 'Last Year',
time_interval = 'Monthly',
filters_json = '[]',
filters_json = '{}',
timeseries = 1
)).insert()
cur_date = datetime.now() - relativedelta(years=1)
result = get(chart_name ='Test Dashboard Chart', refresh = 1)
for idx in range(13):
month = datetime(int(cur_date.year), int(cur_date.strftime('%m')), int(calendar.monthrange(cur_date.year, cur_date.month)[1]))
result = get(chart_name='Test Dashboard Chart', refresh=1)
self.assertEqual(result.get('labels')[0], formatdate(cur_date.strftime('%Y-%m-%d')))
if formatdate(cur_date.strftime('%Y-%m-%d')) == formatdate(get_last_day(cur_date).strftime('%Y-%m-%d')):
cur_date += relativedelta(months=1)
for idx in range(1, 13):
month = get_last_day(cur_date)
month = formatdate(month.strftime('%Y-%m-%d'))
self.assertEqual(result.get('labels')[idx], month)
cur_date += relativedelta(months=1)
# self.assertEqual(result.get('datasets')[0].get('values')[:-1],
# [44, 28, 8, 11, 2, 6, 18, 6, 4, 5, 15, 13])
frappe.db.rollback()
def test_empty_dashboard_chart(self):
@ -88,9 +86,14 @@ class TestDashboardChart(unittest.TestCase):
cur_date = datetime.now() - relativedelta(years=1)
result = get(chart_name ='Test Empty Dashboard Chart', refresh = 1)
for idx in range(13):
month = datetime(int(cur_date.year), int(cur_date.strftime('%m')), int(calendar.monthrange(cur_date.year, cur_date.month)[1]))
result = get(chart_name ='Test Empty Dashboard Chart', refresh=1)
self.assertEqual(result.get('labels')[0], formatdate(cur_date.strftime('%Y-%m-%d')))
if formatdate(cur_date.strftime('%Y-%m-%d')) == formatdate(get_last_day(cur_date).strftime('%Y-%m-%d')):
cur_date += relativedelta(months=1)
for idx in range(1, 13):
month = get_last_day(cur_date)
month = formatdate(month.strftime('%Y-%m-%d'))
self.assertEqual(result.get('labels')[idx], month)
cur_date += relativedelta(months=1)
@ -121,8 +124,13 @@ class TestDashboardChart(unittest.TestCase):
cur_date = datetime.now() - relativedelta(years=1)
result = get(chart_name ='Test Empty Dashboard Chart 2', refresh = 1)
for idx in range(13):
month = datetime(int(cur_date.year), int(cur_date.strftime('%m')), int(calendar.monthrange(cur_date.year, cur_date.month)[1]))
self.assertEqual(result.get('labels')[0], formatdate(cur_date.strftime('%Y-%m-%d')))
if formatdate(cur_date.strftime('%Y-%m-%d')) == formatdate(get_last_day(cur_date).strftime('%Y-%m-%d')):
cur_date += relativedelta(months=1)
for idx in range(1, 13):
month = get_last_day(cur_date)
month = formatdate(month.strftime('%Y-%m-%d'))
self.assertEqual(result.get('labels')[idx], month)
cur_date += relativedelta(months=1)
@ -132,6 +140,60 @@ class TestDashboardChart(unittest.TestCase):
frappe.db.rollback()
def test_group_by_chart_type(self):
if frappe.db.exists('Dashboard Chart', 'Test Group By Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Group By Dashboard Chart')
frappe.get_doc({"doctype":"ToDo", "description": "test"}).insert()
frappe.get_doc(dict(
doctype = 'Dashboard Chart',
chart_name = 'Test Group By Dashboard Chart',
chart_type = 'Group By',
document_type = 'ToDo',
group_by_based_on = 'status',
filters_json = '[]',
)).insert()
result = get(chart_name ='Test Group By Dashboard Chart', refresh = 1)
todo_status_count = frappe.db.count('ToDo', {'status': result.get('labels')[0]})
self.assertEqual(result.get('datasets')[0].get('values')[0], todo_status_count)
frappe.db.rollback()
def test_daily_dashboard_chart(self):
insert_test_records()
if frappe.db.exists('Dashboard Chart', 'Test Daily Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Daily Dashboard Chart')
frappe.get_doc(dict(
doctype = 'Dashboard Chart',
chart_name = 'Test Daily Dashboard Chart',
chart_type = 'Sum',
document_type = 'Communication',
based_on = 'communication_date',
value_based_on = 'rating',
timespan = 'Select Date Range',
time_interval = 'Daily',
from_date = datetime(2019, 1, 6),
to_date = datetime(2019, 1, 11),
filters_json = '[]',
timeseries = 1
)).insert()
result = get(chart_name ='Test Daily Dashboard Chart', refresh = 1)
self.assertEqual(result.get('datasets')[0].get('values'), [200.0, 400.0, 300.0, 0.0, 100.0, 0.0])
self.assertEqual(
result.get('labels'),
[formatdate('2019-01-06'), formatdate('2019-01-07'), formatdate('2019-01-08'),\
formatdate('2019-01-09'), formatdate('2019-01-10'), formatdate('2019-01-11')]
)
frappe.db.rollback()
def test_weekly_dashboard_chart(self):
insert_test_records()
@ -155,37 +217,18 @@ class TestDashboardChart(unittest.TestCase):
result = get(chart_name ='Test Weekly Dashboard Chart', refresh = 1)
self.assertEqual(result.get('datasets')[0].get('values'), [200.0, 400.0, 0.0])
self.assertEqual(result.get('labels'), [formatdate('2019-01-06'), formatdate('2019-01-13'), formatdate('2019-01-20')])
frappe.db.rollback()
def test_group_by_chart_type(self):
if frappe.db.exists('Dashboard Chart', 'Test Group By Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Group By Dashboard Chart')
frappe.get_doc({"doctype":"ToDo", "description": "test"}).insert()
frappe.get_doc(dict(
doctype = 'Dashboard Chart',
chart_name = 'Test Group By Dashboard Chart',
chart_type = 'Group By',
document_type = 'ToDo',
group_by_based_on = 'status',
filters_json = '[]',
)).insert()
result = get(chart_name ='Test Group By Dashboard Chart', refresh = 1)
todo_status_count = frappe.db.count('ToDo', {'status': result.get('labels')[0]})
self.assertEqual(result.get('datasets')[0].get('values')[0], todo_status_count)
self.assertEqual(result.get('datasets')[0].get('values'), [50.0, 300.0, 800.0, 0.0])
self.assertEqual(result.get('labels'), [formatdate('2018-12-30'), formatdate('2019-01-06'), formatdate('2019-01-13'), formatdate('2019-01-20')])
frappe.db.rollback()
def insert_test_records():
create_new_communication(datetime(2019, 1, 10), 100)
create_new_communication(datetime(2018, 12, 30), 50)
create_new_communication(datetime(2019, 1, 4), 100)
create_new_communication(datetime(2019, 1, 6), 200)
create_new_communication(datetime(2019, 1, 7), 400)
create_new_communication(datetime(2019, 1, 8), 300)
create_new_communication(datetime(2019, 1, 10), 100)
def create_new_communication(date, rating):
communication = {

View file

@ -1,162 +1,69 @@
{
"allow_copy": 0,
"allow_events_in_timeline": 0,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"actions": [],
"autoname": "field:source_name",
"beta": 0,
"creation": "2019-02-06 07:55:29.579840",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"source_name",
"module",
"timeseries"
],
"fields": [
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fetch_if_empty": 0,
"fieldname": "source_name",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Source Name",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fetch_if_empty": 0,
"fieldname": "module",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Module",
"length": 0,
"no_copy": 0,
"options": "Module Def",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fetch_if_empty": 0,
"default": "0",
"fieldname": "timeseries",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Timeseries",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"label": "Timeseries"
}
],
"has_web_view": 0,
"hide_toolbar": 0,
"idx": 0,
"in_create": 0,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2019-04-09 14:20:51.548207",
"links": [],
"modified": "2020-06-26 18:00:37.421491",
"modified_by": "Administrator",
"module": "Desk",
"name": "Dashboard Chart Source",
"name_case": "",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1
},
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Administrator",
"share": 1,
"submit": 0,
"write": 1
}
],
"quick_entry": 0,
"read_only": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"title_field": "",
"track_changes": 1,
"track_seen": 0,
"track_views": 0
"track_changes": 1
}

View file

@ -18,10 +18,6 @@ def get_config(name):
return f.read()
class DashboardChartSource(Document):
def validate(self):
if frappe.session.user != "Administrator":
frappe.throw(_("Only Administrator is allowed to create Dashboard Chart Sources"))
def on_update(self):
export_to_files(record_list=[[self.doctype, self.name]],
record_module=self.module, create_init=True)

View file

@ -13,7 +13,6 @@
"column_break_4",
"success_message",
"documentation_url",
"user_can_dismiss",
"is_complete",
"section_break_6",
"steps"
@ -53,13 +52,6 @@
"label": "Success Message",
"reqd": 1
},
{
"default": "1",
"description": "Allow users to dismiss onboarding temporarily for a day",
"fieldname": "user_can_dismiss",
"fieldtype": "Check",
"label": "User Can Dismiss "
},
{
"fieldname": "documentation_url",
"fieldtype": "Data",
@ -90,7 +82,7 @@
}
],
"links": [],
"modified": "2020-05-18 19:42:39.738869",
"modified": "2020-06-08 15:36:04.701049",
"modified_by": "Administrator",
"module": "Desk",
"name": "Module Onboarding",

View file

@ -69,7 +69,6 @@ def make_notification_logs(doc, users):
_doc = frappe.new_doc('Notification Log')
_doc.update(doc)
_doc.for_user = user
_doc.subject = _doc.subject.replace('<div>', '').replace('</div>', '')
if _doc.for_user != _doc.from_user or doc.type == 'Energy Point' or doc.type == 'Alert':
_doc.insert(ignore_permissions=True)
@ -100,14 +99,16 @@ def send_notification_email(doc):
)
def get_email_header(doc):
return {
docname = doc.document_name
header_map = {
'Default': _('New Notification'),
'Mention': _('New Mention'),
'Assignment': _('New Assignment'),
'Share': _('New Document Shared'),
'Energy Point': _('Energy Point Update'),
}[doc.type or 'Default']
'Mention': _('New Mention on {0}').format(docname),
'Assignment': _('Assignment Update on {0}').format(docname),
'Share': _('New Document Shared {0}').format(docname),
'Energy Point': _('Energy Point Update on {0}').format(docname),
}
return header_map[doc.type or 'Default']
@frappe.whitelist()
def mark_all_as_read():

View file

@ -3,8 +3,153 @@
frappe.ui.form.on('Number Card', {
refresh: function(frm) {
if (!frappe.boot.developer_mode && frm.doc.is_standard) {
frm.disable_form();
}
frm.set_df_property("filters_section", "hidden", 1);
frm.set_df_property("dynamic_filters_section", "hidden", 1);
frm.trigger('set_options');
if (!frm.doc.type) {
frm.set_value('type', 'Document Type');
}
if (frm.doc.type == 'Report' && frm.doc.report_name) {
frm.trigger('set_report_filters');
}
if (frm.doc.type == 'Custom') {
if (!frappe.boot.developer_mode) {
frm.disable_form();
}
frm.filters = eval(frm.doc.filters_config);
frm.trigger('set_filters_description');
frm.trigger('set_method_description');
frm.trigger('render_filters_table');
}
frm.trigger('create_add_to_dashboard_button');
},
create_add_to_dashboard_button: function(frm) {
frm.add_custom_button('Add Card to Dashboard', () => {
const d = new frappe.ui.Dialog({
title: __('Add to Dashboard'),
fields: [
{
label: __('Select Dashboard'),
fieldtype: 'Link',
fieldname: 'dashboard',
options: 'Dashboard',
}
],
primary_action: (values) => {
values.name = frm.doc.name;
frappe.xcall(
'frappe.desk.doctype.number_card.number_card.add_card_to_dashboard',
{
args: values
}
).then(()=> {
let dashboard_route_html =
`<a href = "#dashboard/${values.dashboard}">${values.dashboard}</a>`;
let message =
__(`Number Card ${values.name} add to Dashboard ` + dashboard_route_html);
frappe.msgprint(message);
});
d.hide();
}
});
if (!frm.doc.name) {
frappe.msgprint(__('Please create Card first'));
} else {
d.show();
}
});
},
before_save: function(frm) {
let dynamic_filters = JSON.parse(frm.doc.dynamic_filters_json || 'null');
let static_filters = JSON.parse(frm.doc.filters_json || 'null');
static_filters =
frappe.dashboard_utils.remove_common_static_filter_values(static_filters, dynamic_filters);
frm.set_value('filters_json', JSON.stringify(static_filters));
frm.trigger('render_filters_table');
frm.trigger('render_dynamic_filters_table');
},
is_standard: function(frm) {
frm.trigger('render_dynamic_filters_table');
frm.set_df_property("dynamic_filters_section", "hidden", 1);
},
set_filters_description: function(frm) {
if (frm.doc.type == 'Custom') {
frm.fields_dict.filters_config.set_description(`
Set the filters here. For example:
<pre class="small text-muted">
<code>
[{
fieldname: "company",
label: __("Company"),
fieldtype: "Link",
options: "Company",
default: frappe.defaults.get_user_default("Company"),
reqd: 1
},
{
fieldname: "account",
label: __("Account"),
fieldtype: "Link",
options: "Account",
reqd: 1
}]
</code></pre>`);
}
},
set_method_description: function(frm) {
if (frm.doc.type == 'Custom') {
frm.fields_dict.method.set_description(`
Set the path to a whitelisted function that will return the number on the card in the format:
<pre class="small text-muted">
<code>
{
"value": value,
"fieldtype": "Currency"
}
</code></pre>`);
}
},
type: function(frm) {
frm.trigger('set_filters_description');
if (frm.doc.type == 'Report') {
frm.set_query('report_name', () => {
return {
filters: {
'report_type': ['!=', 'Report Builder']
}
};
});
}
},
report_name: function(frm) {
frm.set_value('filters_json', '{}');
frm.set_value('dynamic_filters_json', '{}');
frm.set_df_property('report_field', 'options', []);
frm.trigger('set_report_filters');
},
filters_config: function(frm) {
frm.filters = eval(frm.doc.filters_config);
const filter_values = frappe.report_utils.get_filter_values(frm.filters);
frm.set_value('filters_json', JSON.stringify(filter_values));
frm.trigger('render_filters_table');
},
@ -17,11 +162,16 @@ frappe.ui.form.on('Number Card', {
};
});
frm.set_value('filters_json', '[]');
frm.set_value('dynamic_filters_json', '[]');
frm.set_value('aggregate_function_based_on', '');
frm.trigger('set_options');
},
set_options: function(frm) {
if (frm.doc.type !== 'Document Type') {
return;
}
let aggregate_based_on_fields = [];
const doctype = frm.doc.document_type;
@ -40,80 +190,275 @@ frappe.ui.form.on('Number Card', {
frm.set_df_property('aggregate_function_based_on', 'options', aggregate_based_on_fields);
});
frm.trigger('render_filters_table');
frm.trigger('render_dynamic_filters_table');
}
},
set_report_filters: function(frm) {
const report_name = frm.doc.report_name;
if (report_name) {
frappe.report_utils.get_report_filters(report_name).then(filters => {
if (filters) {
frm.filters = filters;
const filter_values = frappe.report_utils.get_filter_values(filters);
if (frm.doc.filters_json.length <= 2) {
frm.set_value('filters_json', JSON.stringify(filter_values));
}
}
frm.trigger('render_filters_table');
frm.trigger('set_report_field_options');
frm.trigger('render_dynamic_filters_table');
});
}
},
set_report_field_options: function(frm) {
let filters = frm.doc.filters_json.length > 2 ? JSON.parse(frm.doc.filters_json) : null;
if (frm.doc.dynamic_filters_json.length > 2) {
filters = {...filters, ...JSON.parse(frm.doc.dynamic_filters_json)};
}
frappe.xcall(
'frappe.desk.query_report.run',
{
report_name: frm.doc.report_name,
filters: filters,
ignore_prepared_report: 1
}
).then(data => {
if (data.result.length) {
frm.field_options = frappe.report_utils.get_field_options_from_report(data.columns, data);
frm.set_df_property('report_field', 'options', frm.field_options.numeric_fields);
if (!frm.field_options.numeric_fields.length) {
frappe.msgprint(__(`Report has no numeric fields, please change the Report Name`));
}
} else {
frappe.msgprint(__('Report has no data, please modify the filters or change the Report Name'));
}
});
},
render_filters_table: function(frm) {
frm.set_df_property("filters_section", "hidden", 0);
let is_document_type = frm.doc.type == 'Document Type';
let is_dynamic_filter = f => ['Date', 'DateRange'].includes(f.fieldtype) && f.default;
let wrapper = $(frm.get_field('filters_json').wrapper).empty();
frm.filter_table = $(`<table class="table table-bordered" style="cursor:pointer; margin:0px;">
let table = $(`<table class="table table-bordered" style="cursor:pointer; margin:0px;">
<thead>
<tr>
<th style="width: 33%">${__('Filter')}</th>
<th style="width: 33%">${__('Condition')}</th>
<th style="width: 20%">${__('Filter')}</th>
<th style="width: 20%">${__('Condition')}</th>
<th>${__('Value')}</th>
</tr>
</thead>
<tbody></tbody>
</table>`).appendTo(wrapper);
$(`<p class="text-muted small">${__("Click table to edit")}</p>`).appendTo(wrapper);
let filters = JSON.parse(frm.doc.filters_json || '[]');
let filters_set = false;
// Set dynamic filters for reports
if (frm.doc.type == 'Report') {
let set_filters = false;
frm.filters.forEach(f => {
if (is_dynamic_filter(f)) {
filters[f.fieldname] = f.default;
set_filters = true;
}
});
set_filters && frm.set_value('filters_json', JSON.stringify(filters));
}
let fields;
if (is_document_type) {
fields = [
{
fieldtype: 'HTML',
fieldname: 'filter_area',
}
];
if (filters.length) {
filters.forEach(filter => {
const filter_row =
$(`<tr>
<td>${filter[1]}</td>
<td>${filter[2] || ""}</td>
<td>${filter[3]}</td>
</tr>`);
table.find('tbody').append(filter_row);
});
filters_set = true;
}
} else if (frm.filters.length) {
fields = frm.filters.filter(f => f.fieldname);
fields.map(f => {
if (filters[f.fieldname]) {
let condition = '=';
const filter_row =
$(`<tr>
<td>${f.label}</td>
<td>${condition}</td>
<td>${filters[f.fieldname] || ""}</td>
</tr>`);
table.find('tbody').append(filter_row);
if (!filters_set) filters_set = true;
}
});
}
if (!filters_set) {
const filter_row = $(`<tr><td colspan="3" class="text-muted text-center">
${__("Click to Set Filters")}</td></tr>`);
table.find('tbody').append(filter_row);
}
table.on('click', () => {
let dialog = new frappe.ui.Dialog({
title: __('Set Filters'),
fields: fields.filter(f => !is_dynamic_filter(f)),
primary_action: function() {
let values = this.get_values();
if (values) {
this.hide();
if (is_document_type) {
let filters = frm.filter_group.get_filters();
frm.set_value('filters_json', JSON.stringify(filters));
} else {
frm.set_value('filters_json', JSON.stringify(values));
}
frm.trigger('render_filters_table');
}
},
primary_action_label: "Set"
});
if (is_document_type) {
frm.filter_group = new frappe.ui.FilterGroup({
parent: dialog.get_field('filter_area').$wrapper,
doctype: frm.doc.document_type,
on_change: () => {},
});
filters && frm.filter_group.add_filters_to_filter_group(filters);
}
dialog.show();
if (frm.doc.type == 'Report') {
//Set query report object so that it can be used while fetching filter values in the report
frappe.query_report = new frappe.views.QueryReport({'filters': dialog.fields_list});
frappe.query_reports[frm.doc.report_name]
&& frappe.query_reports[frm.doc.report_name].onload
&& frappe.query_reports[frm.doc.report_name].onload(frappe.query_report);
}
dialog.set_values(filters);
});
},
render_dynamic_filters_table(frm) {
if (!frappe.boot.developer_mode || !frm.doc.is_standard || frm.doc.type == 'Custom') {
return;
}
frm.set_df_property("dynamic_filters_section", "hidden", 0);
let is_document_type = frm.doc.type == 'Document Type';
let wrapper = $(frm.get_field('dynamic_filters_json').wrapper).empty();
frm.dynamic_filter_table = $(`<table class="table table-bordered" style="cursor:pointer; margin:0px;">
<thead>
<tr>
<th style="width: 20%">${__('Filter')}</th>
<th style="width: 20%">${__('Condition')}</th>
<th>${__('Value')}</th>
</tr>
</thead>
<tbody></tbody>
</table>`).appendTo(wrapper);
frm.filters = JSON.parse(frm.doc.filters_json || '[]');
frm.dynamic_filters = frm.doc.dynamic_filters_json && frm.doc.dynamic_filters_json.length > 2
? JSON.parse(frm.doc.dynamic_filters_json)
: null;
frm.trigger('set_filters_in_table');
frm.trigger('set_dynamic_filters_in_table');
frm.filter_table.on('click', () => {
let filters = JSON.parse(frm.doc.filters_json || '[]');
let fields = frappe.dashboard_utils.get_fields_for_dynamic_filter_dialog(
is_document_type, filters, frm.dynamic_filters
);
frm.dynamic_filter_table.on('click', () => {
let dialog = new frappe.ui.Dialog({
title: __('Set Filters'),
fields: [{
fieldtype: 'HTML',
fieldname: 'filter_area',
}],
primary_action: function() {
let values = this.get_values();
if (values) {
this.hide();
frm.filters = frm.filter_group.get_filters();
frm.set_value('filters_json', JSON.stringify(frm.filters));
frm.trigger('set_filters_in_table');
title: __('Set Dynamic Filters'),
fields: fields,
primary_action: () => {
let values = dialog.get_values();
dialog.hide();
let dynamic_filters = [];
for (let key of Object.keys(values)) {
if (is_document_type) {
let [doctype, fieldname] = key.split(':');
dynamic_filters.push([doctype, fieldname, '=', values[key]]);
}
}
if (is_document_type) {
frm.set_value('dynamic_filters_json', JSON.stringify(dynamic_filters));
} else {
frm.set_value('dynamic_filters_json', JSON.stringify(values));
}
frm.trigger('set_dynamic_filters_in_table');
},
primary_action_label: "Set"
});
frappe.dashboards.filters_dialog = dialog;
frm.filter_group = new frappe.ui.FilterGroup({
parent: dialog.get_field('filter_area').$wrapper,
doctype: frm.doc.document_type,
on_change: () => {},
});
frm.filter_group.add_filters_to_filter_group(frm.filters);
dialog.show();
dialog.set_values(frm.filters);
dialog.set_values(frm.dynamic_filters);
});
},
set_filters_in_table: function(frm) {
if (!frm.filters.length) {
set_dynamic_filters_in_table: function(frm) {
frm.dynamic_filters = frm.doc.dynamic_filters_json && frm.doc.dynamic_filters_json.length > 2
? JSON.parse(frm.doc.dynamic_filters_json)
: null;
if (!frm.dynamic_filters) {
const filter_row = $(`<tr><td colspan="3" class="text-muted text-center">
${__("Click to Set Filters")}</td></tr>`);
frm.filter_table.find('tbody').html(filter_row);
${__("Click to Set Dynamic Filters")}</td></tr>`);
frm.dynamic_filter_table.find('tbody').html(filter_row);
} else {
let filter_rows = '';
frm.filters.forEach(filter => {
filter_rows +=
`<tr>
<td>${filter[1]}</td>
<td>${filter[2] || ""}</td>
<td>${filter[3]}</td>
</tr>`;
if ($.isArray(frm.dynamic_filters)) {
frm.dynamic_filters.forEach(filter => {
filter_rows +=
`<tr>
<td>${filter[1]}</td>
<td>${filter[2] || ""}</td>
<td>${filter[3]}</td>
</tr>`;
});
} else {
let condition = '=';
for (let [key, val] of Object.entries(frm.dynamic_filters)) {
filter_rows +=
`<tr>
<td>${key}</td>
<td>${condition}</td>
<td>${val || ""}</td>
</tr>`
;
}
}
});
frm.filter_table.find('tbody').html(filter_rows);
frm.dynamic_filter_table.find('tbody').html(filter_rows);
}
}
});

View file

@ -1,39 +1,53 @@
{
"actions": [],
"allow_rename": 1,
"creation": "2020-04-15 18:06:39.444683",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"is_standard",
"module",
"label",
"type",
"report_name",
"method",
"function",
"aggregate_function_based_on",
"column_break_2",
"document_type",
"report_field",
"report_function",
"is_public",
"custom_configuration_section",
"filters_config",
"stats_section",
"show_percentage_stats",
"stats_time_interval",
"filters_section",
"filters_json",
"dynamic_filters_section",
"dynamic_filters_json",
"section_break_16",
"color"
],
"fields": [
{
"depends_on": "eval: doc.type == 'Document Type'",
"fieldname": "document_type",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1
"mandatory_depends_on": "eval: doc.type == 'Document Type'",
"options": "DocType"
},
{
"depends_on": "eval: doc.document_type",
"depends_on": "eval: doc.type == 'Document Type'",
"fieldname": "function",
"fieldtype": "Select",
"label": "Function",
"options": "Count\nSum\nAverage\nMinimum\nMaximum",
"reqd": 1
"mandatory_depends_on": "eval: doc.type == 'Document Type'",
"options": "Count\nSum\nAverage\nMinimum\nMaximum"
},
{
"depends_on": "eval: doc.function !== 'Count'",
@ -92,13 +106,91 @@
"options": "Daily\nWeekly\nMonthly\nYearly"
},
{
"depends_on": "eval: doc.type == 'Document Type'",
"fieldname": "stats_section",
"fieldtype": "Section Break",
"label": "Stats"
},
{
"default": "0",
"fieldname": "is_standard",
"fieldtype": "Check",
"label": "Is Standard"
},
{
"depends_on": "eval: doc.is_standard",
"fieldname": "module",
"fieldtype": "Link",
"label": "Module",
"mandatory_depends_on": "eval: doc.is_standard",
"options": "Module Def"
},
{
"fieldname": "dynamic_filters_json",
"fieldtype": "Code",
"label": "Dynamic Filters JSON",
"options": "JSON"
},
{
"fieldname": "section_break_16",
"fieldtype": "Section Break"
},
{
"fieldname": "dynamic_filters_section",
"fieldtype": "Section Break",
"label": "Dynamic Filters Section"
},
{
"fieldname": "type",
"fieldtype": "Select",
"label": "Type",
"options": "Document Type\nReport\nCustom"
},
{
"depends_on": "eval: doc.type == 'Report'",
"fieldname": "report_name",
"fieldtype": "Link",
"label": "Report Name",
"mandatory_depends_on": "eval: doc.type == 'Report'",
"options": "Report"
},
{
"depends_on": "eval: doc.type == 'Report'",
"fieldname": "report_field",
"fieldtype": "Select",
"label": "Field",
"mandatory_depends_on": "eval: doc.type == 'Report'"
},
{
"depends_on": "eval: doc.type == 'Custom'",
"fieldname": "method",
"fieldtype": "Data",
"label": "Method",
"mandatory_depends_on": "eval: doc.type == 'Custom'"
},
{
"depends_on": "eval: doc.type == 'Custom'",
"fieldname": "custom_configuration_section",
"fieldtype": "Section Break",
"label": "Custom Configuration"
},
{
"fieldname": "filters_config",
"fieldtype": "Code",
"label": "Filters Configuration",
"options": "JSON"
},
{
"depends_on": "eval: doc.type == 'Report'",
"fieldname": "report_function",
"fieldtype": "Select",
"label": "Function",
"mandatory_depends_on": "eval: doc.type == 'Report'",
"options": "Sum\nAverage\nMinimum\nMaximum"
}
],
"links": [],
"modified": "2020-05-06 19:47:57.753574",
"modified": "2020-07-18 17:08:22.882538",
"modified_by": "Administrator",
"module": "Desk",
"name": "Number Card",

Some files were not shown because too many files have changed in this diff Show more