Merge branch 'develop' into fix-text-editor-issues

This commit is contained in:
Suraj Shetty 2020-06-22 17:10:51 +05:30 committed by GitHub
commit 19e38502c1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
188 changed files with 6831 additions and 5307 deletions

14
.github/workflows/docker-release.yml vendored Normal file
View file

@ -0,0 +1,14 @@
name: Trigger Docker build on release
on:
release:
types: [released]
jobs:
curl:
runs-on: ubuntu-latest
container:
image: alpine:latest
steps:
- name: curl
run: |
apk add curl bash
curl -s -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "Travis-API-Version: 3" -H "Authorization: token ${{ secrets.TRAVIS_CI_TOKEN }}" -d '{"request":{"branch":"master"}}' https://api.travis-ci.com/repo/frappe%2Ffrappe_docker/requests

View file

@ -47,23 +47,11 @@ matrix:
script: bench --site test_site run-ui-tests frappe --headless
before_install:
# do we really want to run travis?
# do we really want to run travis?
- |
ONLY_DOCS_CHANGES=$(git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qvE '\.(md|png|jpg|jpeg)$|^.github|LICENSE' ; echo $?)
ONLY_JS_CHANGES=$(git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qvE '\.js$' ; echo $?)
ONLY_PY_CHANGES=$(git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qvE '\.py$' ; echo $?)
if [[ $ONLY_DOCS_CHANGES == "1" ]]; then
echo "Only docs were updated, stopping build process.";
exit;
fi
if [[ $ONLY_JS_CHANGES == "1" && $TYPE == "server" ]]; then
echo "Only JavaScript code was updated; Stopping Python build process.";
exit;
fi
if [[ $ONLY_PY_CHANGES == "1" && $TYPE == "ui" ]]; then
echo "Only Python code was updated, stopping Cypress build process.";
exit;
python ./.travis/roulette.py
if [[ $? != 2 ]];then
exit;
fi
# install wkhtmltopdf

54
.travis/roulette.py Normal file
View file

@ -0,0 +1,54 @@
# if the script ends with exit code 0, then no tests are run further, else all tests are run
import os
import re
import shlex
import subprocess
import sys
def get_output(command, shell=True):
print(command)
command = shlex.split(command)
return subprocess.check_output(command, shell=shell, encoding="utf8").strip()
def is_py(file):
return file.endswith("py")
def is_js(file):
return file.endswith("js")
def is_docs(file):
regex = re.compile('\.(md|png|jpg|jpeg)$|^.github|LICENSE')
return bool(regex.search(file))
if __name__ == "__main__":
build_type = os.environ.get("TYPE")
commit_range = os.environ.get("TRAVIS_COMMIT_RANGE")
print("Build Type: {}".format(build_type))
print("Commit Range: {}".format(commit_range))
try:
files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False)
except Exception:
sys.exit(2)
if "fatal" not in files_changed:
files_list = files_changed.split()
only_docs_changed = len(list(filter(is_docs, files_list))) == len(files_list)
only_js_changed = len(list(filter(is_js, files_list))) == len(files_list)
only_py_changed = len(list(filter(is_py, files_list))) == len(files_list)
if only_docs_changed:
print("Only docs were updated, stopping build process.")
sys.exit(0)
if only_js_changed and build_type == "server":
print("Only JavaScript code was updated; Stopping Python build process.")
sys.exit(0)
if only_py_changed and build_type == "ui":
print("Only Python code was updated, stopping Cypress build process.")
sys.exit(0)
sys.exit(2)

View file

@ -1,7 +1,7 @@
<div align="center">
<img src=".github/frappe-framework-logo.png" height="150">
<h1>
<a href="https://frappe.io">
<a href="https://frappeframework.com">
frappe
</a>
</h1>
@ -33,8 +33,8 @@
Full-stack web application framework that uses Python and MariaDB on the server side and a tightly integrated client side library. Built for [ERPNext](https://erpnext.com)
### Table of Contents
* [Installation](#installation)
* [Documentation](https://frappe.io/docs)
* [Installation](https://frappeframework.com/docs/user/en/installation)
* [Documentation](https://frappeframework.com/docs)
* [License](#license)
### Installation
@ -49,7 +49,7 @@ Full-stack web application framework that uses Python and MariaDB on the server
### Website
For details and documentation, see the website
[https://frappe.io](https://frappe.io)
[https://frappeframework.com](https://frappeframework.com)
### License
This repository has been released under the [MIT License](LICENSE).

View file

@ -4,14 +4,14 @@ context('Control Duration', () => {
cy.visit('/desk#workspace/Website');
});
function get_dialog_with_duration(show_days=1, show_seconds=1) {
function get_dialog_with_duration(hide_days=0, hide_seconds=0) {
return cy.dialog({
title: 'Duration',
fields: [{
'fieldname': 'duration',
'fieldtype': 'Duration',
'show_seconds': show_days,
'show_days': show_seconds
'hide_days': hide_days,
'hide_seconds': hide_seconds
}]
});
}
@ -37,7 +37,7 @@ context('Control Duration', () => {
});
it('should hide days or seconds according to duration options', () => {
get_dialog_with_duration(0, 0).as('dialog');
get_dialog_with_duration(1, 1).as('dialog');
cy.get('.frappe-control[data-fieldname=duration] input').first().click();
cy.get('.duration-input[data-duration=days]').should('not.be.visible');
cy.get('.duration-input[data-duration=seconds]').should('not.be.visible');

View file

@ -9,6 +9,7 @@ context('Form', () => {
it('create a new form', () => {
cy.visit('/desk#Form/ToDo/New ToDo 1');
cy.fill_field('description', 'this is a test todo', 'Text Editor').blur();
cy.wait(300);
cy.get('.page-title').should('contain', 'Not Saved');
cy.server();
cy.route({

View file

@ -40,12 +40,12 @@ context('Grid Pagination', () => {
cy.get('@table').find('.current-page-number').should('contain', '20');
cy.get('@table').find('.total-page-number').should('contain', '20');
});
it('deletes all rows', ()=> {
cy.visit('/desk#Form/Contact/Test Contact');
cy.get('.frappe-control[data-fieldname="phone_nos"]').as('table');
cy.get('@table').find('.grid-heading-row .grid-row-check').click({force: true});
cy.get('@table').find('button.grid-remove-all-rows').click();
cy.get('.modal-dialog .btn-primary').contains('Yes').click();
cy.get('@table').find('.grid-body .grid-row').should('have.length', 0);
});
// it('deletes all rows', ()=> {
// cy.visit('/desk#Form/Contact/Test Contact');
// cy.get('.frappe-control[data-fieldname="phone_nos"]').as('table');
// cy.get('@table').find('.grid-heading-row .grid-row-check').click({force: true});
// cy.get('@table').find('button.grid-remove-all-rows').click();
// cy.get('.modal-dialog .btn-primary').contains('Yes').click();
// cy.get('@table').find('.grid-body .grid-row').should('have.length', 0);
// });
});

View file

@ -1145,8 +1145,8 @@ def make_property_setter(args, ignore_validate=False, validate_fields_for_doctyp
def import_doc(path, ignore_links=False, ignore_insert=False, insert=False):
"""Import a file using Data Import."""
from frappe.core.doctype.data_import import data_import
data_import.import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
from frappe.core.doctype.data_import.data_import import import_doc
import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
def copy_doc(doc, ignore_no_copy=True):
""" No_copy fields also get copied."""

View file

@ -99,7 +99,7 @@ def application(request):
frappe.monitor.stop(response)
frappe.recorder.dump()
frappe.logger("web").info({
frappe.logger("frappe.web").info({
"site": get_site_name(request.host),
"remote_addr": getattr(request, "remote_addr", "NOTFOUND"),
"base_url": getattr(request, "base_url", "NOTFOUND"),

View file

@ -24,7 +24,7 @@ user_cache_keys = ("bootinfo", "user_recent", "roles", "user_doc", "lang",
"has_role:Page", "has_role:Report")
doctype_cache_keys = ("meta", "form_meta", "table_columns", "last_modified",
"linked_doctypes", 'notifications', 'workflow' ,'energy_point_rule_map')
"linked_doctypes", 'notifications', 'workflow' ,'energy_point_rule_map', 'data_import_column_header_map')
def clear_user_cache(user=None):

View file

@ -43,12 +43,14 @@ def pass_context(f):
return click.pass_context(_func)
def get_site(context):
def get_site(context, raise_err=True):
try:
site = context.sites[0]
return site
except (IndexError, TypeError):
raise frappe.SiteNotSpecifiedError
if raise_err:
raise frappe.SiteNotSpecifiedError
return None
def popen(command, *args, **kwargs):
output = kwargs.get('output', True)

View file

@ -126,7 +126,7 @@ def doctor(context, site=None):
"Get diagnostic info about background workers"
from frappe.utils.doctor import doctor as _doctor
if not site:
site = get_site(context)
site = get_site(context, raise_err=False)
return _doctor(site=site)
@click.command('show-pending-jobs')

View file

@ -83,10 +83,6 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
installing = touch_file(get_site_path('locks', 'installing.lock'))
if new_site:
# run cleanup only if new-site is called
atexit.register(_new_site_cleanup, site, mariadb_root_username, mariadb_root_password)
install_db(root_login=mariadb_root_username, root_password=mariadb_root_password, db_name=db_name,
admin_password=admin_password, verbose=verbose, source_sql=source_sql, force=force, reinstall=reinstall,
db_password=db_password, db_type=db_type, db_host=db_host, db_port=db_port, no_mariadb_socket=no_mariadb_socket)
@ -102,18 +98,6 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
scheduler_status = "disabled" if frappe.utils.scheduler.is_scheduler_disabled() else "enabled"
print("*** Scheduler is", scheduler_status, "***")
def _new_site_cleanup(site, mariadb_root_username, mariadb_root_password):
try:
installing = get_site_path('locks', 'installing.lock')
except AttributeError:
installing = os.path.join(site, 'locks', 'installing.lock')
if installing and os.path.exists(installing):
if mariadb_root_password:
_drop_site(site, mariadb_root_username, mariadb_root_password, force=True, no_backup=True)
shutil.rmtree(site)
frappe.destroy()
@click.command('restore')
@click.argument('sql-file-path')
@ -430,15 +414,16 @@ def remove_from_installed_apps(context, app):
@click.argument('app')
@click.option('--yes', '-y', help='To bypass confirmation prompt for uninstalling the app', is_flag=True, default=False, multiple=True)
@click.option('--dry-run', help='List all doctypes that will be deleted', is_flag=True, default=False)
@click.option('--no-backup', help='Do not backup the site', is_flag=True, default=False)
@pass_context
def uninstall(context, app, dry_run=False, yes=False):
def uninstall(context, app, dry_run=False, yes=False, no_backup=False):
"Remove app and linked modules from site"
from frappe.installer import remove_app
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
remove_app(app, dry_run, yes)
remove_app(app, dry_run, yes, no_backup)
finally:
frappe.destroy()
if not context.sites:

View file

@ -215,12 +215,12 @@ def export_doc(context, doctype, docname):
@pass_context
def export_json(context, doctype, path, name=None):
"Export doclist as json to the given path, use '-' as name for Singles."
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import export_json
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
data_import.export_json(doctype, path, name=name)
export_json(doctype, path, name=name)
finally:
frappe.destroy()
if not context.sites:
@ -232,12 +232,12 @@ def export_json(context, doctype, path, name=None):
@pass_context
def export_csv(context, doctype, path):
"Export data import template with data for DocType"
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import export_csv
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
data_import.export_csv(doctype, path)
export_csv(doctype, path)
finally:
frappe.destroy()
if not context.sites:
@ -264,7 +264,7 @@ def export_fixtures(context, app=None):
@pass_context
def import_doc(context, path, force=False):
"Import (insert/update) doclist. If the argument is a directory, all files ending with .json are imported"
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import import_doc
if not os.path.exists(path):
path = os.path.join('..', path)
@ -276,7 +276,7 @@ def import_doc(context, path, force=False):
try:
frappe.init(site=site)
frappe.connect()
data_import.import_doc(path, overwrite=context.force)
import_doc(path, overwrite=context.force)
finally:
frappe.destroy()
if not context.sites:
@ -293,7 +293,7 @@ def import_doc(context, path, force=False):
@pass_context
def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True):
"Import CSV using data import"
from frappe.core.doctype.data_import import importer
from frappe.core.doctype.data_import_legacy import importer
from frappe.utils.csvutils import read_csv_content
site = get_site(context)
@ -329,20 +329,12 @@ def import_csv(context, path, only_insert=False, submit_after_import=False, igno
@pass_context
def data_import(context, file_path, doctype, import_type=None, submit_after_import=False, mute_emails=True):
"Import documents in bulk from CSV or XLSX using data import"
from frappe.core.doctype.data_import.importer_new import Importer
from frappe.core.doctype.data_import.data_import import import_file
site = get_site(context)
frappe.init(site=site)
frappe.connect()
data_import = frappe.new_doc('Data Import Beta')
data_import.submit_after_import = submit_after_import
data_import.mute_emails = mute_emails
data_import.import_type = 'Insert New Records' if import_type.lower() == 'insert' else 'Update Existing Records'
i = Importer(doctype=doctype, file_path=file_path, data_import=data_import, console=True)
i.import_data()
import_file(doctype, file_path, import_type, submit_after_import, console=True)
frappe.destroy()
@ -502,7 +494,17 @@ def run_tests(context, app=None, module=None, doctype=None, test=(),
if coverage:
# Generate coverage report only for app that is being tested
source_path = os.path.join(get_bench_path(), 'apps', app or 'frappe')
cov = Coverage(source=[source_path], omit=['*.html', '*.js', '*.xml', '*.css', '*/doctype/*/*_dashboard.py', '*/patches/*'])
cov = Coverage(source=[source_path], omit=[
'*.html',
'*.js',
'*.xml',
'*.css',
'*.less',
'*.scss',
'*.vue',
'*/doctype/*/*_dashboard.py',
'*/patches/*'
])
cov.start()
ret = frappe.test_runner.main(app, module, doctype, context.verbose, tests=tests,

View file

@ -42,6 +42,16 @@ frappe.ui.form.on("Contact", {
});
frm.refresh_field("links");
let numbers = frm.doc.phone_nos;
if (numbers && numbers.length && frappe.phone_call.handler) {
frm.add_custom_button(__('Call'), () => {
numbers = frm.doc.phone_nos
.sort((prev, next) => next.is_primary_mobile_no - prev.is_primary_mobile_no)
.map(d => d.phone);
frappe.phone_call.handler(numbers);
});
}
if (frm.doc.links) {
frappe.call({
method: "frappe.contacts.doctype.contact.contact.address_query",

View file

@ -444,24 +444,48 @@ def update_parent_document_on_communication(doc):
status_field = parent.meta.get_field("status")
if status_field:
options = (status_field.options or '').splitlines()
options = (status_field.options or "").splitlines()
# if status has a "Replied" option, then update the status for received communication
if ('Replied' in options) and doc.sent_or_received=="Received":
if ("Replied" in options) and doc.sent_or_received == "Received":
parent.db_set("status", "Open")
parent.run_method("handle_hold_time", "Replied")
apply_assignment_rule(parent)
else:
# update the modified date for document
parent.update_modified()
update_mins_to_first_communication(parent, doc)
parent.run_method('notify_communication', doc)
set_avg_response_time(parent, doc)
parent.run_method("notify_communication", doc)
parent.notify_update()
def update_mins_to_first_communication(parent, communication):
if parent.meta.has_field('mins_to_first_response') and not parent.get('mins_to_first_response'):
if parent.meta.has_field("mins_to_first_response") and not parent.get("mins_to_first_response"):
if is_system_user(communication.sender):
first_responded_on = communication.creation
if parent.meta.has_field('first_responded_on') and communication.sent_or_received == "Sent":
parent.db_set('first_responded_on', first_responded_on)
parent.db_set('mins_to_first_response', round(time_diff_in_seconds(first_responded_on, parent.creation) / 60), 2)
if parent.meta.has_field("first_responded_on") and communication.sent_or_received == "Sent":
parent.db_set("first_responded_on", first_responded_on)
parent.db_set("mins_to_first_response", round(time_diff_in_seconds(first_responded_on, parent.creation) / 60), 2)
def set_avg_response_time(parent, communication):
if parent.meta.has_field("avg_response_time") and communication.sent_or_received == "Sent":
# avg response time for all the responses
communications = frappe.get_list("Communication", filters={
"reference_doctype": parent.doctype,
"reference_name": parent.name
},
fields=["sent_or_received", "name", "creation"],
order_by="creation"
)
if len(communications):
response_times = []
for i in range(len(communications)):
if communications[i].sent_or_received == "Sent" and communications[i-1].sent_or_received == "Received":
response_time = round(time_diff_in_seconds(communications[i].creation, communications[i-1].creation), 2)
if response_time > 0:
response_times.append(response_time)
if response_times:
avg_response_time = sum(response_times) / len(response_times)
parent.db_set("avg_response_time", avg_response_time)

View file

@ -9,7 +9,7 @@ import frappe.permissions
import re, csv, os
from frappe.utils.csvutils import UnicodeWriter
from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint
from frappe.core.doctype.data_import.importer import get_data_keys
from frappe.core.doctype.data_import_legacy.importer import get_data_keys
from six import string_types
from frappe.core.doctype.access_log.access_log import make_access_log

View file

@ -1 +0,0 @@
Bulk import / update of data via file upload in Excel or CSV.

View file

@ -0,0 +1,3 @@
.warnings .warning {
margin-bottom: 40px;
}

View file

@ -1,324 +1,522 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import', {
onload: function(frm) {
if (frm.doc.__islocal) {
frm.set_value("action", "");
}
frappe.call({
method: "frappe.core.doctype.data_import.data_import.get_importable_doctypes",
callback: function (r) {
let importable_doctypes = r.message;
frm.set_query("reference_doctype", function () {
return {
"filters": {
"issingle": 0,
"istable": 0,
"name": ['in', importable_doctypes]
}
};
});
setup(frm) {
frappe.realtime.on('data_import_refresh', ({ data_import }) => {
frm.import_in_progress = false;
if (data_import !== frm.doc.name) return;
frappe.model.clear_doc('Data Import', frm.doc.name);
frappe.model.with_doc('Data Import', frm.doc.name).then(() => {
frm.refresh();
});
});
frappe.realtime.on('data_import_progress', data => {
frm.import_in_progress = true;
if (data.data_import !== frm.doc.name) {
return;
}
}),
let percent = Math.floor((data.current * 100) / data.total);
let seconds = Math.floor(data.eta);
let minutes = Math.floor(data.eta / 60);
let eta_message =
// prettier-ignore
seconds < 60
? __('About {0} seconds remaining', [seconds])
: minutes === 1
? __('About {0} minute remaining', [minutes])
: __('About {0} minutes remaining', [minutes]);
// should never check public
frm.fields_dict["import_file"].df.is_private = 1;
let message;
if (data.success) {
let message_args = [data.current, data.total, eta_message];
message =
frm.doc.import_type === 'Insert New Records'
? __('Importing {0} of {1}, {2}', message_args)
: __('Updating {0} of {1}, {2}', message_args);
}
if (data.skipping) {
message = __('Skipping {0} of {1}, {2}', [
data.current,
data.total,
eta_message
]);
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(__('In Progress'), 'orange');
frappe.realtime.on("data_import_progress", function(data) {
if (data.data_import === frm.doc.name) {
if (data.reload && data.reload === true) {
frm.reload_doc();
}
if (data.progress) {
let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar");
if (progress_bar) {
$(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped");
$(progress_bar).css("width", data.progress + "%");
}
}
// hide progress when complete
if (data.current === data.total) {
setTimeout(() => {
frm.dashboard.hide();
frm.refresh();
}, 2000);
}
});
frm.set_query('reference_doctype', () => {
return {
filters: {
name: ['in', frappe.boot.user.can_import]
}
};
});
frm.get_field('import_file').df.options = {
restrictions: {
allowed_file_types: ['.csv', '.xls', '.xlsx']
}
};
frm.has_import_file = () => {
return frm.doc.import_file || frm.doc.google_sheets_url;
};
},
reference_doctype: function(frm){
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
refresh(frm) {
frm.page.hide_icon_group();
frm.trigger('update_indicators');
frm.trigger('import_file');
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
frm.trigger('show_report_error_button');
if (frm.doc.status === 'Partial Success') {
frm.add_custom_button(__('Export Errored Rows'), () =>
frm.trigger('export_errored_rows')
);
}
if (frm.doc.status.includes('Success')) {
frm.add_custom_button(
__('Go to {0} List', [frm.doc.reference_doctype]),
() => frappe.set_route('List', frm.doc.reference_doctype)
);
}
},
refresh: function(frm) {
onload_post_render(frm) {
frm.trigger('update_primary_action');
},
update_primary_action(frm) {
frm.disable_save();
frm.dashboard.clear_headline();
if (frm.doc.reference_doctype && !frm.doc.import_file) {
frm.page.set_indicator(__('Attach file'), 'orange');
} else {
if (frm.doc.import_status) {
const listview_settings = frappe.listview_settings['Data Import'];
const indicator = listview_settings.get_indicator(frm.doc);
frm.page.set_indicator(indicator[0], indicator[1]);
if (frm.doc.import_status === "In Progress") {
frm.dashboard.add_progress("Data Import Progress", "0");
frm.set_read_only();
frm.refresh_fields();
}
if (frm.doc.status !== 'Success') {
if (!frm.is_new() && (frm.has_import_file())) {
let label =
frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
frm.page.set_primary_action(label, () => frm.events.start_import(frm));
} else {
frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
},
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
update_indicators(frm) {
const indicator = frappe.get_indicator(frm.doc);
if (indicator) {
frm.page.set_indicator(indicator[0], indicator[1]);
} else {
frm.page.clear_indicator();
}
},
if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") {
frm.set_df_property("action", "read_only", 1);
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
let message;
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
}
}
frm.dashboard.set_headline(message);
},
frm.add_custom_button(__("Help"), function() {
frappe.help.show_video("6wiriRKPhmg");
});
show_report_error_button(frm) {
if (frm.doc.status === 'Error') {
frappe.db
.get_list('Error Log', {
filters: { method: frm.doc.name },
fields: ['method', 'error'],
order_by: 'creation desc',
limit: 1
})
.then(result => {
if (result.length > 0) {
frm.add_custom_button('Report Error', () => {
let fake_xhr = {
responseText: JSON.stringify({
exc: result[0].error
})
};
frappe.request.report_error(fake_xhr, {});
});
}
});
}
},
if (frm.doc.reference_doctype && frm.doc.docstatus === 0) {
frm.add_custom_button(__("Download template"), function() {
frappe.data_import.download_dialog(frm).show();
start_import(frm) {
frm
.call({
method: 'form_start_import',
args: { data_import: frm.doc.name },
btn: frm.page.btn_primary
})
.then(r => {
if (r.message === true) {
frm.disable_save();
}
});
},
download_template(frm) {
if (
frm.data_exporter &&
frm.data_exporter.doctype === frm.doc.reference_doctype
) {
frm.data_exporter.exporting_for = frm.doc.import_type;
frm.data_exporter.dialog.show();
} else {
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.data_exporter = new frappe.data_import.DataExporter(
frm.doc.reference_doctype,
frm.doc.import_type
);
});
}
},
if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows &&
frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) {
frm.page.set_primary_action(__("Start Import"), function() {
frappe.call({
btn: frm.page.btn_primary,
method: "frappe.core.doctype.data_import.data_import.import_data",
args: {
data_import: frm.doc.name
}
});
}).addClass('btn btn-primary');
}
reference_doctype(frm) {
frm.trigger('toggle_submit_after_import');
},
if (frm.doc.log_details) {
frm.events.create_log_table(frm);
} else {
$(frm.fields_dict.import_log.wrapper).empty();
toggle_submit_after_import(frm) {
frm.toggle_display('submit_after_import', false);
let doctype = frm.doc.reference_doctype;
if (doctype) {
frappe.model.with_doctype(doctype, () => {
let meta = frappe.get_meta(doctype);
frm.toggle_display('submit_after_import', meta.is_submittable);
});
}
},
action: function(frm) {
if(!frm.doc.action) return;
if(!frm.doc.reference_doctype) {
frappe.msgprint(__("Please select document type first."));
frm.set_value("action", "");
google_sheets_url(frm) {
if (!frm.is_dirty()) {
frm.trigger('import_file');
} else {
frm.trigger('update_primary_action');
}
},
refresh_google_sheet(frm) {
frm.trigger('import_file');
},
import_file(frm) {
frm.toggle_display('section_import_preview', frm.has_import_file());
if (!frm.has_import_file()) {
frm.get_field('import_preview').$wrapper.empty();
return;
} else {
frm.trigger('update_primary_action');
}
// load import preview
frm.get_field('import_preview').$wrapper.empty();
$('<span class="text-muted">')
.html(__('Loading import file...'))
.appendTo(frm.get_field('import_preview').$wrapper);
frm
.call({
method: 'get_preview_from_template',
args: {
data_import: frm.doc.name,
import_file: frm.doc.import_file,
google_sheets_url: frm.doc.google_sheets_url
},
error_handlers: {
TimestampMismatchError() {
// ignore this error
}
}
})
.then(r => {
let preview_data = r.message;
frm.events.show_import_preview(frm, preview_data);
frm.events.show_import_warnings(frm, preview_data);
});
},
show_import_preview(frm, preview_data) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
if (
frm.import_preview &&
frm.import_preview.doctype === frm.doc.reference_doctype
) {
frm.import_preview.preview_data = preview_data;
frm.import_preview.import_log = import_log;
frm.import_preview.refresh();
return;
}
if(frm.doc.action == "Insert new records") {
frm.doc.insert_new = 1;
} else if (frm.doc.action == "Update records"){
frm.doc.overwrite = 1;
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.import_preview = new frappe.data_import.ImportPreview({
wrapper: frm.get_field('import_preview').$wrapper,
doctype: frm.doc.reference_doctype,
preview_data,
import_log,
frm,
events: {
remap_column(changed_map) {
let template_options = JSON.parse(frm.doc.template_options || '{}');
template_options.remap_column = template_options.remap_column || {};
Object.assign(template_options.remap_column, changed_map);
frm.set_value('template_options', JSON.stringify(template_options));
frm.save().then(() => frm.trigger('import_file'));
}
}
});
});
},
export_errored_rows(frm) {
open_url_post(
'/api/method/frappe.core.doctype.data_import.data_import.download_errored_template',
{
data_import_name: frm.doc.name
}
);
},
show_import_warnings(frm, preview_data) {
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
warnings = warnings.concat(preview_data.warnings || []);
frm.toggle_display('import_warnings_section', warnings.length > 0);
if (warnings.length === 0) {
frm.get_field('import_warnings').$wrapper.html('');
return;
}
frm.save();
// group warnings by row
let warnings_by_row = {};
let other_warnings = [];
for (let warning of warnings) {
if (warning.row) {
warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
warnings_by_row[warning.row].push(warning);
} else {
other_warnings.push(warning);
}
}
let html = '';
html += Object.keys(warnings_by_row)
.map(row_number => {
let message = warnings_by_row[row_number]
.map(w => {
if (w.field) {
let label =
w.field.label +
(w.field.parent !== frm.doc.reference_doctype
? ` (${w.field.parent})`
: '');
return `<li>${label}: ${w.message}</li>`;
}
return `<li>${w.message}</li>`;
})
.join('');
return `
<div class="warning" data-row="${row_number}">
<h5 class="text-uppercase">${__('Row {0}', [row_number])}</h5>
<div class="body"><ul>${message}</ul></div>
</div>
`;
})
.join('');
html += other_warnings
.map(warning => {
let header = '';
if (warning.col) {
header = __('Column {0}', [warning.col]);
}
return `
<div class="warning" data-col="${warning.col}">
<h5 class="text-uppercase">${header}</h5>
<div class="body">${warning.message}</div>
</div>
`;
})
.join('');
frm.get_field('import_warnings').$wrapper.html(`
<div class="row">
<div class="col-sm-10 warnings">${html}</div>
</div>
`);
},
only_update: function(frm) {
frm.save();
show_failed_logs(frm) {
frm.trigger('show_import_log');
},
submit_after_import: function(frm) {
frm.save();
show_import_log(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let logs = import_log;
frm.toggle_display('import_log', false);
frm.toggle_display('import_log_section', logs.length > 0);
if (logs.length === 0) {
frm.get_field('import_log_preview').$wrapper.empty();
return;
}
let rows = logs
.map(log => {
let html = '';
if (log.success) {
if (frm.doc.import_type === 'Insert New Records') {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
html = __('Successfully updated {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
}
} else {
let messages = log.messages
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs margin-top" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}">
${__('Show Traceback')}
</button>
<div class="collapse margin-top" id="${id}">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
if (frm.doc.show_failed_logs && log.success) {
return '';
}
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
if (!rows && frm.doc.show_failed_logs) {
rows = `<tr><td class="text-center text-muted" colspan=3>
${__('No failed logs')}
</td></tr>`;
}
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
},
skip_errors: function(frm) {
frm.save();
},
show_missing_link_values(frm, missing_link_values) {
let can_be_created_automatically = missing_link_values.every(
d => d.has_one_mandatory_field
);
ignore_encoding_errors: function(frm) {
frm.save();
},
let html = missing_link_values
.map(d => {
let doctype = d.doctype;
let values = d.missing_values;
return `
<h5>${doctype}</h5>
<ul>${values.map(v => `<li>${v}</li>`).join('')}</ul>
`;
})
.join('');
no_email: function(frm) {
frm.save();
},
show_only_errors: function(frm) {
frm.events.create_log_table(frm);
},
create_log_table: function(frm) {
let msg = JSON.parse(frm.doc.log_details);
var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty();
$(frappe.render_template("log_details", {
data: msg.messages,
import_status: frm.doc.import_status,
show_only_errors: frm.doc.show_only_errors,
})).appendTo($log_wrapper);
if (can_be_created_automatically) {
// prettier-ignore
let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
frappe.confirm(message + html, () => {
frm
.call('create_missing_link_values', {
missing_link_values
})
.then(r => {
let records = r.message;
frappe.msgprint(
__('Created {0} records successfully.', [records.length])
);
});
});
} else {
frappe.msgprint(
// prettier-ignore
__('The following records needs to be created before we can import your file.') + html
);
}
}
});
frappe.provide('frappe.data_import');
frappe.data_import.download_dialog = function(frm) {
var dialog;
const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden;
const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields);
const get_doctype_checkbox_fields = () => {
return dialog.fields.filter(df => df.fieldname.endsWith('_fields'))
.map(df => dialog.fields_dict[df.fieldname]);
};
const doctype_fields = get_fields(frm.doc.reference_doctype)
.map(df => {
let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0;
return {
label: df.label,
reqd: reqd,
danger: reqd,
value: df.fieldname,
checked: 1
};
});
let fields = [
{
"label": __("Select Columns"),
"fieldname": "select_columns",
"fieldtype": "Select",
"options": "All\nMandatory\nManually",
"reqd": 1,
"onchange": function() {
const fields = get_doctype_checkbox_fields();
fields.map(f => f.toggle(true));
if(this.value == 'Mandatory' || this.value == 'Manually') {
checkbox_toggle(true);
fields.map(multicheck_field => {
multicheck_field.options.map(option => {
if(!option.reqd) return;
$(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`)
.prop('checked', false)
.trigger('click');
});
});
} else if(this.value == 'All'){
$(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`)
.prop('disabled', true);
}
}
},
{
"label": __("File Type"),
"fieldname": "file_type",
"fieldtype": "Select",
"options": "Excel\nCSV",
"default": "Excel"
},
{
"label": __("Download with Data"),
"fieldname": "with_data",
"fieldtype": "Check",
"hidden": !frm.doc.overwrite,
"default": 1
},
{
"label": __("Select All"),
"fieldname": "select_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle();
}
},
{
"label": __("Unselect All"),
"fieldname": "unselect_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle(true);
}
},
{
"label": frm.doc.reference_doctype,
"fieldname": "doctype_fields",
"fieldtype": "MultiCheck",
"options": doctype_fields,
"columns": 2,
"hidden": 1
}
];
const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype)
.map(df => {
return {
"label": df.options,
"fieldname": df.fieldname + '_fields',
"fieldtype": "MultiCheck",
"options": frappe.meta.get_docfields(df.options)
.filter(filter_fields)
.map(df => ({
label: df.label,
reqd: df.reqd ? 1 : 0,
value: df.fieldname,
checked: 1,
danger: df.reqd
})),
"columns": 2,
"hidden": 1
};
});
fields = fields.concat(child_table_fields);
dialog = new frappe.ui.Dialog({
title: __('Download Template'),
fields: fields,
primary_action: function(values) {
var data = values;
if (frm.doc.reference_doctype) {
var export_params = () => {
let columns = {};
if(values.select_columns) {
columns = get_doctype_checkbox_fields().reduce((columns, field) => {
const options = field.get_checked_options();
columns[field.df.label] = options;
return columns;
}, {});
}
return {
doctype: frm.doc.reference_doctype,
parent_doctype: frm.doc.reference_doctype,
select_columns: JSON.stringify(columns),
with_data: frm.doc.overwrite && data.with_data,
all_doctypes: true,
file_type: data.file_type,
template: true
};
};
let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data';
open_url_post(get_template_url, export_params());
} else {
frappe.msgprint(__("Please select the Document Type."));
}
dialog.hide();
},
primary_action_label: __('Download')
});
$(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]')
.wrapAll('<div class="inline-buttons" />');
const button_container = $(dialog.body).find('.inline-buttons');
button_container.addClass('flex');
$(button_container).find('.frappe-control').map((index, button) => {
$(button).css({"margin-right": "1em"});
});
function checkbox_toggle(checked=false) {
$(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => {
$(element).find(`:checkbox`).prop("checked", checked).trigger('click');
});
}
return dialog;
};

View file

@ -1,767 +1,192 @@
{
"allow_copy": 1,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"autoname": "",
"beta": 0,
"creation": "2016-12-09 14:27:32.720061",
"custom": 0,
"docstatus": 0,
"actions": [],
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"document_type": "Document",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"html_5",
"google_sheets_url",
"refresh_google_sheet",
"column_break_5",
"status",
"submit_after_import",
"mute_emails",
"template_options",
"import_warnings_section",
"template_warnings",
"import_warnings",
"section_import_preview",
"import_preview",
"import_log_section",
"import_log",
"show_failed_logs",
"import_log_preview"
],
"fields": [
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "reference_doctype",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 1,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Document Type",
"length": 0,
"no_copy": 0,
"options": "DocType",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "action",
"fieldname": "import_type",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Action",
"length": 0,
"no_copy": 0,
"options": "Insert new records\nUpdate records",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "eval:!doc.overwrite",
"description": "New data will be inserted.",
"fieldname": "insert_new",
"fieldtype": "Check",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Insert new records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 1,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "eval:!doc.insert_new",
"description": "If you are updating/overwriting already created records.",
"fieldname": "overwrite",
"fieldtype": "Check",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Update records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 1,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "overwrite",
"description": "If you don't want to create any new records while updating the older records.",
"fieldname": "only_update",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Don't create new records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"collapsible_depends_on": "",
"columns": 0,
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_4",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Attach file for Import",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"in_list_view": 1,
"label": "Import File"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "column_break_4",
"fieldtype": "Column Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "eval: doc.import_status == \"Partially Successful\"",
"description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.",
"fieldname": "error_file",
"fieldtype": "Attach",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Generated File",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"collapsible_depends_on": "",
"columns": 0,
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_6",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.",
"fieldname": "skip_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Skip rows with errors",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Submit after importing",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "",
"fieldname": "ignore_encoding_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Ignore encoding errors",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "1",
"depends_on": "",
"fieldname": "no_email",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Do not send Emails",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 1,
"collapsible_depends_on": "eval: doc.import_status == \"Failed\"",
"columns": 0,
"depends_on": "import_status",
"fieldname": "import_detail",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Log",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "import_status",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Status",
"length": 0,
"no_copy": 0,
"options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "1",
"fieldname": "show_only_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Show only errors",
"length": 0,
"no_copy": 1,
"permlevel": 0,
"precision": "",
"print_hide": 1,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "",
"depends_on": "import_status",
"fieldname": "import_log",
"fieldname": "import_preview",
"fieldtype": "HTML",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Log",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"label": "Import Preview"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "log_details",
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"label": "Preview"
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Log Details",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"label": "Template Options",
"options": "JSON",
"read_only": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "amended_from",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Amended From",
"length": 0,
"no_copy": 1,
"options": "Data Import",
"permlevel": 0,
"print_hide": 1,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"fieldname": "import_log",
"fieldtype": "Code",
"label": "Import Log",
"options": "JSON"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "total_rows",
"fieldtype": "Int",
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview"
},
{
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Total Rows",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"label": "Status",
"options": "Pending\nSuccess\nPartial Success\nError",
"read_only": 1
},
{
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import",
"set_only_once": 1
},
{
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Warnings"
},
{
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings"
},
{
"depends_on": "reference_doctype",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template"
},
{
"default": "1",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails",
"set_only_once": 1
},
{
"default": "0",
"fieldname": "show_failed_logs",
"fieldtype": "Check",
"label": "Show Failed Logs"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file",
"fieldname": "html_5",
"fieldtype": "HTML",
"options": "<h5 class=\"text-muted uppercase\">Or</h5>"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file\n",
"description": "Must be a publicly accessible Google Sheets URL",
"fieldname": "google_sheets_url",
"fieldtype": "Data",
"label": "Import from Google Sheets"
},
{
"depends_on": "eval:doc.google_sheets_url",
"fieldname": "refresh_google_sheet",
"fieldtype": "Button",
"label": "Refresh Google Sheet"
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 1,
"issingle": 0,
"istable": 0,
"max_attachments": 1,
"modified": "2018-08-28 15:05:56.787108",
"hide_toolbar": 1,
"links": [],
"modified": "2020-06-18 16:05:54.211034",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
"name_case": "",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 0,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 0,
"export": 1,
"print": 1,
"read": 1,
"report": 0,
"report": 1,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 1,
"write": 1
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"title_field": "",
"track_changes": 1,
"track_seen": 1,
"track_views": 0
"track_changes": 1
}

View file

@ -1,54 +1,187 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, os
from frappe import _
import frappe.modules.import_file
import os
import frappe
from frappe.model.document import Document
from frappe.utils.data import format_datetime
from frappe.core.doctype.data_import.importer import upload
from frappe.core.doctype.data_import.importer import Importer
from frappe.core.doctype.data_import.exporter import Exporter
from frappe.utils.background_jobs import enqueue
from frappe.utils.csvutils import validate_google_sheets_url
from frappe import _
class DataImport(Document):
def autoname(self):
if not self.name:
self.name = "Import on " +format_datetime(self.creation)
def validate(self):
if not self.import_file:
self.db_set("total_rows", 0)
if self.import_status == "In Progress":
frappe.throw(_("Can't save the form as data import is in progress."))
doc_before_save = self.get_doc_before_save()
if (
not (self.import_file or self.google_sheets_url)
or (doc_before_save and doc_before_save.import_file != self.import_file)
or (doc_before_save and doc_before_save.google_sheets_url != self.google_sheets_url)
):
self.template_options = ""
self.template_warnings = ""
# validate the template just after the upload
# if there is total_rows in the doc, it means that the template is already validated and error free
if self.import_file and not self.total_rows:
upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
self.validate_import_file()
self.validate_google_sheets_url()
def validate_import_file(self):
if self.import_file:
# validate template
self.get_importer()
def validate_google_sheets_url(self):
if not self.google_sheets_url:
return
validate_google_sheets_url(self.google_sheets_url)
def get_preview_from_template(self, import_file=None, google_sheets_url=None):
if import_file:
self.import_file = import_file
if google_sheets_url:
self.google_sheets_url = google_sheets_url
if not (self.import_file or self.google_sheets_url):
return
i = self.get_importer()
return i.get_data_for_import_preview()
def start_import(self):
from frappe.core.page.background_jobs.background_jobs import get_info
from frappe.utils.scheduler import is_scheduler_inactive
if is_scheduler_inactive() and not frappe.flags.in_test:
frappe.throw(
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
enqueued_jobs = [d.get("job_name") for d in get_info()]
if self.name not in enqueued_jobs:
enqueue(
start_import,
queue="default",
timeout=6000,
event="data_import",
job_name=self.name,
data_import=self.name,
now=frappe.conf.developer_mode or frappe.flags.in_test,
)
return True
return False
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
@frappe.whitelist()
def get_importable_doctypes():
return frappe.cache().hget("can_import", frappe.session.user)
def get_preview_from_template(data_import, import_file=None, google_sheets_url=None):
return frappe.get_doc("Data Import", data_import).get_preview_from_template(
import_file, google_sheets_url
)
@frappe.whitelist()
def import_data(data_import):
frappe.db.set_value("Data Import", data_import, "import_status", "In Progress", update_modified=False)
frappe.publish_realtime("data_import_progress", {"progress": "0",
"data_import": data_import, "reload": True}, user=frappe.session.user)
from frappe.core.page.background_jobs.background_jobs import get_info
enqueued_jobs = [d.get("job_name") for d in get_info()]
if data_import not in enqueued_jobs:
enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
def form_start_import(data_import):
return frappe.get_doc("Data Import", data_import).start_import()
def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
insert=False, submit=False, pre_process=None):
def start_import(data_import):
"""This method runs in background job"""
data_import = frappe.get_doc("Data Import", data_import)
try:
i = Importer(data_import.reference_doctype, data_import=data_import)
i.import_data()
except Exception:
frappe.db.rollback()
data_import.db_set("status", "Error")
frappe.log_error(title=data_import.name)
finally:
frappe.flags.in_import = False
frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name})
@frappe.whitelist()
def download_template(
doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
):
"""
Download template from Exporter
:param doctype: Document Type
:param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
:param export_records=None: One of 'all', 'by_filter', 'blank_template'
:param export_filters: Filter dict
:param file_type: File type to export into
"""
export_fields = frappe.parse_json(export_fields)
export_filters = frappe.parse_json(export_filters)
export_data = export_records != "blank_template"
e = Exporter(
doctype,
export_fields=export_fields,
export_data=export_data,
export_filters=export_filters,
file_type=file_type,
export_page_length=5 if export_records == "5_records" else None,
)
e.build_response()
@frappe.whitelist()
def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import", data_import_name)
data_import.export_errored_rows()
def import_file(
doctype, file_path, import_type, submit_after_import=False, console=False
):
"""
Import documents in from CSV or XLSX using data import.
:param doctype: DocType to import
:param file_path: Path to .csv, .xls, or .xlsx file to import
:param import_type: One of "Insert" or "Update"
:param submit_after_import: Whether to submit documents after import
:param console: Set to true if this is to be used from command line. Will print errors or progress to stdout.
"""
data_import = frappe.new_doc("Data Import")
data_import.submit_after_import = submit_after_import
data_import.import_type = (
"Insert New Records" if import_type.lower() == "insert" else "Update Existing Records"
)
i = Importer(
doctype=doctype, file_path=file_path, data_import=data_import, console=console
)
i.import_data()
##############
def import_doc(
path,
overwrite=False,
ignore_links=False,
ignore_insert=False,
insert=False,
submit=False,
pre_process=None,
):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
@ -57,25 +190,44 @@ def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
frappe.modules.import_file.import_file_by_path(
f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True
)
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
import_file_by_path(
f,
ignore_links=ignore_links,
overwrite=overwrite,
submit=submit,
pre_process=pre_process,
)
frappe.db.commit()
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
from frappe.utils.csvutils import read_csv_content
print("Importing " + path)
with open(path, "r") as infile:
upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
submit_after_import=submit, pre_process=pre_process)
def import_file_by_path(
path,
ignore_links=False,
overwrite=False,
submit=False,
pre_process=None,
no_email=True,
):
if path.endswith(".csv"):
print()
print("This method is deprecated.")
print('Import CSV files using the command "bench --site sitename data-import"')
print("Or use the method frappe.core.doctype.data_import.data_import.import_file")
print()
raise Exception("Method deprecated")
def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
def export_json(
doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"
):
def post_process(out):
del_keys = ('modified_by', 'creation', 'owner', 'idx')
del_keys = ("modified_by", "creation", "owner", "idx")
for doc in out:
for key in del_keys:
if key in doc:
@ -83,7 +235,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
for k, v in doc.items():
if isinstance(v, list):
for child in v:
for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
for key in del_keys + ("docstatus", "doctype", "modified", "name"):
if key in child:
del child[key]
@ -93,13 +245,20 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
for doc in frappe.get_all(
doctype,
fields=["name"],
filters=filters,
or_filters=or_filters,
limit_page_length=0,
order_by=order_by,
):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
path = os.path.join('..', path)
path = os.path.join("..", path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
@ -107,17 +266,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))
@frappe.whitelist()
def export_fixture(doctype, app):
if frappe.session.user != "Administrator":
raise frappe.PermissionError
if not os.path.exists(frappe.get_app_path(app, "fixtures")):
os.mkdir(frappe.get_app_path(app, "fixtures"))
export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")

View file

@ -1,31 +1,40 @@
let imports_in_progress = [];
frappe.listview_settings['Data Import'] = {
add_fields: ["import_status"],
has_indicator_for_draft: 1,
get_indicator: function(doc) {
let status = {
'Successful': [__("Success"), "green", "import_status,=,Successful"],
'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"],
'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"],
'Failed': [__("Failed"), "red", "import_status,=,Failed"],
'Pending': [__("Pending"), "orange", "import_status,=,"]
}
if (doc.import_status) {
return status[doc.import_status];
}
if (doc.docstatus == 0) {
return status['Pending'];
}
return status['Pending'];
},
onload(listview) {
listview.page.set_title_sub(`
<span class="indicator blue">
<a class="text-muted" href="#List/Data Import Beta">${__('Try the new Data Import')}</a>
</span>
`);
}
frappe.realtime.on('data_import_progress', data => {
if (!imports_in_progress.includes(data.data_import)) {
imports_in_progress.push(data.data_import);
}
});
frappe.realtime.on('data_import_refresh', data => {
imports_in_progress = imports_in_progress.filter(
d => d !== data.data_import
);
listview.refresh();
});
},
get_indicator: function(doc) {
var colors = {
'Pending': 'orange',
'Partial Success': 'orange',
'Success': 'green',
'In Progress': 'orange',
'Error': 'red'
};
let status = doc.status;
if (imports_in_progress.includes(doc.name)) {
status = 'In Progress';
}
return [__(status), colors[status], 'status,=,' + doc.status];
},
formatters: {
import_type(value) {
return {
'Insert New Records': __('Insert'),
'Update Existing Records': __('Update')
}[value];
}
},
hide_name_column: true
};

View file

@ -0,0 +1,257 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
from frappe.model import (
display_fieldtypes,
no_value_fields,
table_fields as table_fieldtypes,
)
from frappe.utils.csvutils import build_csv_response
from frappe.utils.xlsxutils import build_xlsx_response
class Exporter:
def __init__(
self,
doctype,
export_fields=None,
export_data=False,
export_filters=None,
export_page_length=None,
file_type="CSV",
):
"""
Exports records of a DocType for use with Importer
:param doctype: Document Type to export
:param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']}
:param export_data=False: Whether to export data as well
:param export_filters=None: The filters (dict or list) which is used to query the records
:param file_type: One of 'Excel' or 'CSV'
"""
self.doctype = doctype
self.meta = frappe.get_meta(doctype)
self.export_fields = export_fields
self.export_filters = export_filters
self.export_page_length = export_page_length
self.file_type = file_type
# this will contain the csv content
self.csv_array = []
# fields that get exported
self.exportable_fields = self.get_all_exportable_fields()
self.fields = self.serialize_exportable_fields()
self.add_header()
if export_data:
self.data = self.get_data_to_export()
else:
self.data = []
self.add_data()
def get_all_exportable_fields(self):
child_table_fields = [
df.fieldname for df in self.meta.fields if df.fieldtype in table_fieldtypes
]
meta = frappe.get_meta(self.doctype)
exportable_fields = frappe._dict({})
for key, fieldnames in self.export_fields.items():
if key == self.doctype:
# parent fields
exportable_fields[key] = self.get_exportable_fields(key, fieldnames)
elif key in child_table_fields:
# child fields
child_df = meta.get_field(key)
child_doctype = child_df.options
exportable_fields[key] = self.get_exportable_fields(child_doctype, fieldnames)
return exportable_fields
def serialize_exportable_fields(self):
fields = []
for key, exportable_fields in self.exportable_fields.items():
for _df in exportable_fields:
# make a copy of df dict to avoid reference mutation
if isinstance(_df, frappe.core.doctype.docfield.docfield.DocField):
df = _df.as_dict()
else:
df = _df.copy()
df.is_child_table_field = key != self.doctype
if df.is_child_table_field:
df.child_table_df = self.meta.get_field(key)
fields.append(df)
return fields
def get_exportable_fields(self, doctype, fieldnames):
meta = frappe.get_meta(doctype)
def is_exportable(df):
return df and df.fieldtype not in (display_fieldtypes + no_value_fields)
# add name field
name_field = frappe._dict(
{
"fieldtype": "Data",
"fieldname": "name",
"label": "ID",
"reqd": 1,
"parent": doctype,
}
)
fields = [meta.get_field(fieldname) for fieldname in fieldnames]
fields = [df for df in fields if is_exportable(df)]
if "name" in fieldnames:
fields = [name_field] + fields
return fields or []
def get_data_to_export(self):
frappe.permissions.can_export(self.doctype, raise_exception=True)
data_to_export = []
table_fields = [f for f in self.exportable_fields if f != self.doctype]
data = self.get_data_as_docs()
for doc in data:
rows = []
rows = self.add_data_row(self.doctype, None, doc, rows, 0)
if table_fields:
# add child table data
for f in table_fields:
for i, child_row in enumerate(doc[f]):
table_df = self.meta.get_field(f)
child_doctype = table_df.options
rows = self.add_data_row(child_doctype, child_row.parentfield, child_row, rows, i)
data_to_export += rows
return data_to_export
def add_data_row(self, doctype, parentfield, doc, rows, row_idx):
if len(rows) < row_idx + 1:
rows.append([""] * len(self.fields))
row = rows[row_idx]
for i, df in enumerate(self.fields):
if df.parent == doctype:
if df.is_child_table_field and df.child_table_df.fieldname != parentfield:
continue
row[i] = doc.get(df.fieldname, "")
return rows
def get_data_as_docs(self):
def format_column_name(df):
return "`tab{0}`.`{1}`".format(df.parent, df.fieldname)
filters = self.export_filters
if self.meta.is_nested_set():
order_by = "`tab{0}`.`lft` ASC".format(self.doctype)
else:
order_by = "`tab{0}`.`creation` DESC".format(self.doctype)
parent_fields = [
format_column_name(df) for df in self.fields if df.parent == self.doctype
]
parent_data = frappe.db.get_list(
self.doctype,
filters=filters,
fields=["name"] + parent_fields,
limit_page_length=self.export_page_length,
order_by=order_by,
as_list=0,
)
parent_names = [p.name for p in parent_data]
child_data = {}
for key in self.exportable_fields:
if key == self.doctype:
continue
child_table_df = self.meta.get_field(key)
child_table_doctype = child_table_df.options
child_fields = ["name", "idx", "parent", "parentfield"] + list(
set(
[format_column_name(df) for df in self.fields if df.parent == child_table_doctype]
)
)
data = frappe.db.get_list(
child_table_doctype,
filters={
"parent": ("in", parent_names),
"parentfield": child_table_df.fieldname,
"parenttype": self.doctype,
},
fields=child_fields,
order_by="idx asc",
as_list=0,
)
child_data[key] = data
return self.merge_data(parent_data, child_data)
def merge_data(self, parent_data, child_data):
for doc in parent_data:
for table_field, table_rows in child_data.items():
doc[table_field] = [row for row in table_rows if row.parent == doc.name]
return parent_data
def add_header(self):
header = []
for df in self.fields:
is_parent = not df.is_child_table_field
if is_parent:
label = df.label
else:
label = "{0} ({1})".format(df.label, df.child_table_df.label)
if label in header:
# this label is already in the header,
# which means two fields with the same label
# add the fieldname to avoid clash
if is_parent:
label = "{0}".format(df.fieldname)
else:
label = "{0}.{1}".format(df.child_table_df.fieldname, df.fieldname)
header.append(label)
self.csv_array.append(header)
def add_data(self):
self.csv_array += self.data
def get_csv_array(self):
return self.csv_array
def get_csv_array_for_export(self):
csv_array = self.csv_array
if not self.data:
# add 2 empty rows
csv_array += [[]] * 2
return csv_array
def build_response(self):
if self.file_type == "CSV":
self.build_csv_response()
elif self.file_type == "Excel":
self.build_xlsx_response()
def build_csv_response(self):
build_csv_response(self.get_csv_array_for_export(), self.doctype)
def build_xlsx_response(self):
build_xlsx_response(self.get_csv_array_for_export(), self.doctype)

View file

@ -1,267 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
from frappe.model import display_fieldtypes, no_value_fields, table_fields
from frappe.utils.csvutils import build_csv_response
from frappe.utils.xlsxutils import build_xlsx_response
from .importer_new import INVALID_VALUES
class Exporter:
def __init__(
self,
doctype,
export_fields=None,
export_data=False,
export_filters=None,
export_page_length=None,
file_type="CSV",
):
"""
Exports records of a DocType for use with Importer
:param doctype: Document Type to export
:param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']}
:param export_data=False: Whether to export data as well
:param export_filters=None: The filters (dict or list) which is used to query the records
:param file_type: One of 'Excel' or 'CSV'
"""
self.doctype = doctype
self.meta = frappe.get_meta(doctype)
self.export_fields = export_fields
self.export_filters = export_filters
self.export_page_length = export_page_length
self.file_type = file_type
# this will contain the csv content
self.csv_array = []
# fields that get exported
# can be All, Mandatory or User Selected Fields
self.fields = self.get_all_exportable_fields()
self.add_header()
if export_data:
self.data = self.get_data_to_export()
else:
self.data = []
self.add_data()
def get_all_exportable_fields(self):
return self.get_exportable_parent_fields() + self.get_exportable_children_fields()
def get_exportable_parent_fields(self):
parent_fields = self.get_exportable_fields(self.doctype)
# if autoname is based on field
# then merge ID and the field column title as "ID (Autoname Field)"
autoname = self.meta.autoname
if autoname and autoname.startswith("field:"):
fieldname = autoname[len("field:") :]
autoname_field = self.meta.get_field(fieldname)
if autoname_field:
name_field = parent_fields[0]
name_field.label = "ID ({})".format(autoname_field.label)
# remove the autoname field as it is a duplicate of ID field
parent_fields = [
df for df in parent_fields if df.fieldname != autoname_field.fieldname
]
return parent_fields
def get_exportable_children_fields(self):
child_table_fields = [df for df in self.meta.fields if df.fieldtype in table_fields]
if self.export_fields == "Mandatory":
child_table_fields = [df for df in child_table_fields if df.reqd]
children = [df.options for df in child_table_fields]
children_fields = []
for child in children:
children_fields += self.get_exportable_fields(child)
return children_fields
def get_exportable_fields(self, doctype):
meta = frappe.get_meta(doctype)
def is_exportable(df):
return df and df.fieldtype not in (display_fieldtypes + no_value_fields)
# filter out invalid fieldtypes
all_fields = [df for df in meta.fields if is_exportable(df)]
# add name field
name_field = frappe._dict(
{
"fieldtype": "Data",
"fieldname": "name",
"label": "ID",
"reqd": 1,
"parent": doctype,
}
)
all_fields = [name_field] + all_fields
if self.export_fields == "Mandatory":
fields = [df for df in all_fields if df.reqd]
if self.export_fields == "All":
fields = list(all_fields)
elif isinstance(self.export_fields, dict):
fields_to_export = self.export_fields.get(doctype, [])
fields = [meta.get_field(fieldname) for fieldname in fields_to_export]
fields = [df for df in fields if is_exportable(df)]
if 'name' in fields_to_export:
fields = [name_field] + fields
return fields or []
def get_data_to_export(self):
frappe.permissions.can_export(self.doctype, raise_exception=True)
def get_column_name(df):
return "`tab{0}`.`{1}`".format(df.parent, df.fieldname)
fields = [get_column_name(df) for df in self.fields]
filters = self.export_filters
if self.meta.is_nested_set():
order_by = "`tab{0}`.`lft` ASC".format(self.doctype)
else:
order_by = "`tab{0}`.`creation` DESC".format(self.doctype)
data = frappe.db.get_list(
self.doctype,
filters=filters,
fields=fields,
limit_page_length=self.export_page_length,
order_by=order_by,
as_list=1,
)
data = self.remove_duplicate_values(data)
data = self.remove_row_gaps(data)
data = self.remove_empty_rows(data)
# data = self.remove_values_from_name_column(data)
return data
def remove_duplicate_values(self, data):
out = []
doctypes = set([df.parent for df in self.fields])
def name_exists_in_column_before_row(name, column_index, row_index):
column_values = [row[column_index] for i, row in enumerate(data) if i < row_index]
return name in column_values
for i, row in enumerate(data):
# first row is fine
if i == 0:
out.append(row)
continue
row = list(row)
for doctype in doctypes:
name_index = self.get_name_column_index(doctype)
name = row[name_index]
column_indexes = self.get_column_indexes(doctype)
if name_exists_in_column_before_row(name, name_index, i):
# remove the values from the row
row = [None if i in column_indexes else d for i, d in enumerate(row)]
out.append(row)
return out
def remove_row_gaps(self, data):
doctypes = set([df.parent for df in self.fields if df.parent != self.doctype])
def get_nearest_empty_row_index(col_index, row_index):
col_values = [row[col_index] for row in data]
i = row_index - 1
while not col_values[i]:
i = i - 1
out = i + 1
if row_index != out:
return out
for i, row in enumerate(data):
# if this is the row that contains parent values then skip
if row[0]:
continue
for doctype in doctypes:
name_index = self.get_name_column_index(doctype)
name = row[name_index]
column_indexes = self.get_column_indexes(doctype)
if not name:
continue
row_index = get_nearest_empty_row_index(name_index, i)
if row_index:
for col_index in column_indexes:
data[row_index][col_index] = row[col_index]
row[col_index] = None
return data
# pylint: disable=R0201
def remove_empty_rows(self, data):
return [row for row in data if any(v not in INVALID_VALUES for v in row)]
def remove_values_from_name_column(self, data):
out = []
name_columns = [i for i, df in enumerate(self.fields) if df.fieldname == "name"]
for row in data:
out.append(["" if i in name_columns else value for i, value in enumerate(row)])
return out
def get_name_column_index(self, doctype):
for i, df in enumerate(self.fields):
if df.parent == doctype and df.fieldname == "name":
return i
return -1
def get_column_indexes(self, doctype):
return [i for i, df in enumerate(self.fields) if df.parent == doctype]
def add_header(self):
def get_label(df):
if df.parent == self.doctype:
return df.label
else:
return "{0} ({1})".format(df.label, df.parent)
header = [get_label(df) for df in self.fields]
self.csv_array.append(header)
def add_data(self):
self.csv_array += self.data
def get_csv_array(self):
return self.csv_array
def get_csv_array_for_export(self):
csv_array = self.csv_array
if not self.data:
# add 2 empty rows
csv_array += [[]] * 2
return csv_array
def build_response(self):
if self.file_type == 'CSV':
self.build_csv_response()
elif self.file_type == 'Excel':
self.build_xlsx_response()
def build_csv_response(self):
build_csv_response(self.get_csv_array_for_export(), self.doctype)
def build_xlsx_response(self):
build_xlsx_response(self.get_csv_array_for_export(), self.doctype)

View file

@ -0,0 +1,5 @@
Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
Test ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
, , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 ,child title again 2 ,22-09-2021 , ,
Test 2 ,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , ,
Test 3 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71
Can't render this file because it contains an unexpected character in line 2 and column 54.

View file

@ -0,0 +1,2 @@
Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
Test 26 ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
Can't render this file because it contains an unexpected character in line 2 and column 56.

View file

@ -0,0 +1,5 @@
Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
Test 5 ,test description ,1 ,2 ,"" , ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
, , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 , ,22-09-2021 , ,
,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , ,
Test 4 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71
Can't render this file because it contains an unexpected character in line 2 and column 55.

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,23 +0,0 @@
/* eslint-disable */
// rename this file from _test_[name] to test_[name] to activate
// and remove above this line
QUnit.test("test: Data Import", function (assert) {
let done = assert.async();
// number of asserts
assert.expect(1);
frappe.run_serially([
// insert a new Data Import
() => frappe.tests.make('Data Import', [
// values to be set
{key: 'value'}
]),
() => {
assert.equal(cur_frm.doc.key, 'value');
},
() => done()
]);
});

View file

@ -1,100 +1,10 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and Contributors
# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe, unittest
from frappe.core.doctype.data_export import exporter
from frappe.core.doctype.data_import import importer
from frappe.utils.csvutils import read_csv_content
# import frappe
import unittest
class TestDataImport(unittest.TestCase):
def test_export(self):
exporter.export_data("User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
def test_export_with_data(self):
exporter.export_data("User", all_doctypes=True, template=True, with_data=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
def test_export_with_all_doctypes(self):
exporter.export_data("User", all_doctypes="Yes", template=True, with_data=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
self.assertEqual(content[13][0], "DocType:")
self.assertEqual(content[13][1], "User")
self.assertTrue("Has Role" in content[13])
def test_import(self):
if frappe.db.exists("Blog Category", "test-category"):
frappe.delete_doc("Blog Category", "test-category")
exporter.export_data("Blog Category", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test-category", "Test Cateogry"])
importer.upload(content)
self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category")
# export with data
exporter.export_data("Blog Category", all_doctypes=True, template=True, with_data=True)
content = read_csv_content(frappe.response.result)
# overwrite
content[-1][3] = "New Title"
importer.upload(content, overwrite=True)
self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title")
def test_import_only_children(self):
user_email = "test_import_userrole@example.com"
if frappe.db.exists("User", user_email):
frappe.delete_doc("User", user_email, force=True)
frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert()
exporter.export_data("Has Role", "User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test_import_userrole@example.com", "Blogger"])
importer.upload(content)
user = frappe.get_doc("User", user_email)
self.assertTrue(frappe.db.get_value("Has Role", filters={"role": "Blogger", "parent": user_email, "parenttype": "User"}))
self.assertTrue(user.get("roles")[0].role, "Blogger")
# overwrite
exporter.export_data("Has Role", "User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test_import_userrole@example.com", "Website Manager"])
importer.upload(content, overwrite=True)
user = frappe.get_doc("User", user_email)
self.assertEqual(len(user.get("roles")), 1)
self.assertTrue(user.get("roles")[0].role, "Website Manager")
def test_import_with_children(self): #pylint: disable=R0201
if frappe.db.exists("Event", "EV00001"):
frappe.delete_doc("Event", "EV00001")
exporter.export_data("Event", all_doctypes="Yes", template=True)
content = read_csv_content(frappe.response.result)
content.append([None] * len(content[-2]))
content[-1][1] = "__Test Event with children"
content[-1][2] = "Private"
content[-1][3] = "2014-01-01 10:00:00.000000"
importer.upload(content)
frappe.get_doc("Event", {"subject":"__Test Event with children"})
def test_excel_import(self):
if frappe.db.exists("Event", "EV00001"):
frappe.delete_doc("Event", "EV00001")
exporter.export_data("Event", all_doctypes=True, template=True, file_type="Excel")
from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
content = read_xlsx_file_from_attached_file(fcontent=frappe.response.filecontent)
content.append(["", "_test", "Private", "05-11-2017 13:51:48", "Event", "blue", "0", "0", "", "Open", "", 0, "", 0, "", "", "1", 0, "", "", 0, 0, 0, 0, 0, 0, 0])
importer.upload(content)
self.assertTrue(frappe.db.get_value("Event", {"subject": "_test"}, "name"))
pass

View file

@ -0,0 +1,104 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.core.doctype.data_import.exporter import Exporter
from frappe.core.doctype.data_import.test_importer import (
create_doctype_if_not_exists,
)
doctype_name = 'DocType for Export'
class TestExporter(unittest.TestCase):
def setUp(self):
create_doctype_if_not_exists(doctype_name)
def test_exports_specified_fields(self):
if not frappe.db.exists(doctype_name, "Test"):
doc = frappe.get_doc(
doctype=doctype_name,
title="Test",
description="Test Description",
table_field_1=[
{"child_title": "Child Title 1", "child_description": "Child Description 1"},
{"child_title": "Child Title 2", "child_description": "Child Description 2"},
],
table_field_2=[
{"child_2_title": "Child Title 1", "child_2_description": "Child Description 1"},
],
table_field_1_again=[
{
"child_title": "Child Title 1 Again",
"child_description": "Child Description 1 Again",
},
],
).insert()
else:
doc = frappe.get_doc(doctype_name, "Test")
e = Exporter(
doctype_name,
export_fields={
doctype_name: ["title", "description", "number", "another_number"],
"table_field_1": ["name", "child_title", "child_description"],
"table_field_2": ["child_2_date", "child_2_number"],
"table_field_1_again": [
"child_title",
"child_date",
"child_number",
"child_another_number",
],
},
export_data=True,
)
csv_array = e.get_csv_array()
header_row = csv_array[0]
self.assertEqual(
header_row,
[
"Title",
"Description",
"Number",
"another_number",
"ID (Table Field 1)",
"Child Title (Table Field 1)",
"Child Description (Table Field 1)",
"Child 2 Date (Table Field 2)",
"Child 2 Number (Table Field 2)",
"Child Title (Table Field 1 Again)",
"Child Date (Table Field 1 Again)",
"Child Number (Table Field 1 Again)",
"table_field_1_again.child_another_number",
],
)
table_field_1_row_1_name = doc.table_field_1[0].name
table_field_1_row_2_name = doc.table_field_1[1].name
# fmt: off
self.assertEqual(
csv_array[1],
["Test", "Test Description", 0, 0, table_field_1_row_1_name, "Child Title 1", "Child Description 1", None, 0, "Child Title 1 Again", None, 0, 0]
)
self.assertEqual(
csv_array[2],
["", "", "", "", table_field_1_row_2_name, "Child Title 2", "Child Description 2", "", "", "", "", "", ""],
)
# fmt: on
self.assertEqual(len(csv_array), 3)
def test_export_csv_response(self):
e = Exporter(
doctype_name,
export_fields={doctype_name: ["title", "description"]},
export_data=True,
file_type="CSV"
)
e.build_response()
self.assertTrue(frappe.response['result'])
self.assertEqual(frappe.response['doctype'], doctype_name)
self.assertEqual(frappe.response['type'], "csv")

View file

@ -1,40 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.core.doctype.data_import.exporter_new import Exporter
class TestExporter(unittest.TestCase):
def test_exports_mandatory_fields(self):
e = Exporter('Web Page', export_fields='Mandatory')
csv_array = e.get_csv_array()
header_row = csv_array[0]
self.assertEqual(header_row, ['ID', 'Title'])
def test_exports_all_fields(self):
e = Exporter('Web Page', export_fields='All')
csv_array = e.get_csv_array()
header = csv_array[0]
self.assertEqual(len(header), 37)
def test_exports_selected_fields(self):
export_fields = {
'Web Page': ['title', 'route', 'published']
}
e = Exporter('Web Page', export_fields=export_fields)
csv_array = e.get_csv_array()
header = csv_array[0]
self.assertEqual(header, ['Title', 'Route', 'Published'])
def test_exports_data(self):
e = Exporter('ToDo', export_fields='All', export_data=True)
todo_records = frappe.db.count('ToDo')
csv_array = e.get_csv_array()
self.assertEqual(len(csv_array), todo_records + 1)

View file

@ -0,0 +1,183 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.utils import getdate
doctype_name = 'DocType for Import'
class TestImporter(unittest.TestCase):
def setUp(self):
create_doctype_if_not_exists(doctype_name)
def test_data_import_from_file(self):
import_file = get_import_file('sample_import_file')
data_import = self.get_importer(doctype_name, import_file)
data_import.start_import()
doc1 = frappe.get_doc(doctype_name, 'Test')
doc2 = frappe.get_doc(doctype_name, 'Test 2')
doc3 = frappe.get_doc(doctype_name, 'Test 3')
self.assertEqual(doc1.description, 'test description')
self.assertEqual(doc1.number, 1)
self.assertEqual(doc1.table_field_1[0].child_title, 'child title')
self.assertEqual(doc1.table_field_1[0].child_description, 'child description')
self.assertEqual(doc1.table_field_1[1].child_title, 'child title 2')
self.assertEqual(doc1.table_field_1[1].child_description, 'child description 2')
self.assertEqual(doc1.table_field_2[1].child_2_title, 'title child')
self.assertEqual(doc1.table_field_2[1].child_2_date, getdate('2019-10-30'))
self.assertEqual(doc1.table_field_2[1].child_2_another_number, 5)
self.assertEqual(doc1.table_field_1_again[0].child_title, 'child title again')
self.assertEqual(doc1.table_field_1_again[1].child_title, 'child title again 2')
self.assertEqual(doc1.table_field_1_again[1].child_date, getdate('2021-09-22'))
self.assertEqual(doc2.description, 'test description 2')
self.assertEqual(doc3.another_number, 5)
def test_data_import_preview(self):
import_file = get_import_file('sample_import_file')
data_import = self.get_importer(doctype_name, import_file)
preview = data_import.get_preview_from_template()
self.assertEqual(len(preview.data), 4)
self.assertEqual(len(preview.columns), 15)
def test_data_import_without_mandatory_values(self):
import_file = get_import_file('sample_import_file_without_mandatory')
data_import = self.get_importer(doctype_name, import_file)
data_import.start_import()
data_import.reload()
warnings = frappe.parse_json(data_import.template_warnings)
self.assertEqual(warnings[0]['row'], 2)
self.assertEqual(warnings[0]['message'], "<b>Child Title (Table Field 1)</b> is a mandatory field")
self.assertEqual(warnings[1]['row'], 3)
self.assertEqual(warnings[1]['message'], "<b>Child Title (Table Field 1 Again)</b> is a mandatory field")
self.assertEqual(warnings[2]['row'], 4)
self.assertEqual(warnings[2]['message'], "<b>Title</b> is a mandatory field")
def test_data_import_update(self):
if not frappe.db.exists(doctype_name, 'Test 26'):
frappe.get_doc(
doctype=doctype_name,
title='Test 26'
).insert()
import_file = get_import_file('sample_import_file_for_update')
data_import = self.get_importer(doctype_name, import_file, update=True)
data_import.start_import()
updated_doc = frappe.get_doc(doctype_name, 'Test 26')
self.assertEqual(updated_doc.description, 'test description')
self.assertEqual(updated_doc.table_field_1[0].child_title, 'child title')
self.assertEqual(updated_doc.table_field_1[0].child_description, 'child description')
self.assertEqual(updated_doc.table_field_1_again[0].child_title, 'child title again')
def get_importer(self, doctype, import_file, update=False):
data_import = frappe.new_doc('Data Import')
data_import.import_type = 'Insert New Records' if not update else 'Update Existing Records'
data_import.reference_doctype = doctype
data_import.import_file = import_file.file_url
data_import.insert()
return data_import
def create_doctype_if_not_exists(doctype_name, force=False):
if force:
frappe.delete_doc_if_exists('DocType', doctype_name)
frappe.delete_doc_if_exists('DocType', 'Child 1 of ' + doctype_name)
frappe.delete_doc_if_exists('DocType', 'Child 2 of ' + doctype_name)
if frappe.db.exists('DocType', doctype_name):
return
# Child Table 1
table_1_name = 'Child 1 of ' + doctype_name
frappe.get_doc({
'doctype': 'DocType',
'name': table_1_name,
'module': 'Custom',
'custom': 1,
'istable': 1,
'fields': [
{'label': 'Child Title', 'fieldname': 'child_title', 'reqd': 1, 'fieldtype': 'Data'},
{'label': 'Child Description', 'fieldname': 'child_description', 'fieldtype': 'Small Text'},
{'label': 'Child Date', 'fieldname': 'child_date', 'fieldtype': 'Date'},
{'label': 'Child Number', 'fieldname': 'child_number', 'fieldtype': 'Int'},
{'label': 'Child Number', 'fieldname': 'child_another_number', 'fieldtype': 'Int'},
]
}).insert()
# Child Table 2
table_2_name = 'Child 2 of ' + doctype_name
frappe.get_doc({
'doctype': 'DocType',
'name': table_2_name,
'module': 'Custom',
'custom': 1,
'istable': 1,
'fields': [
{'label': 'Child 2 Title', 'fieldname': 'child_2_title', 'reqd': 1, 'fieldtype': 'Data'},
{'label': 'Child 2 Description', 'fieldname': 'child_2_description', 'fieldtype': 'Small Text'},
{'label': 'Child 2 Date', 'fieldname': 'child_2_date', 'fieldtype': 'Date'},
{'label': 'Child 2 Number', 'fieldname': 'child_2_number', 'fieldtype': 'Int'},
{'label': 'Child 2 Number', 'fieldname': 'child_2_another_number', 'fieldtype': 'Int'},
]
}).insert()
# Main Table
frappe.get_doc({
'doctype': 'DocType',
'name': doctype_name,
'module': 'Custom',
'custom': 1,
'autoname': 'field:title',
'fields': [
{'label': 'Title', 'fieldname': 'title', 'reqd': 1, 'fieldtype': 'Data'},
{'label': 'Description', 'fieldname': 'description', 'fieldtype': 'Small Text'},
{'label': 'Date', 'fieldname': 'date', 'fieldtype': 'Date'},
{'label': 'Number', 'fieldname': 'number', 'fieldtype': 'Int'},
{'label': 'Number', 'fieldname': 'another_number', 'fieldtype': 'Int'},
{'label': 'Table Field 1', 'fieldname': 'table_field_1', 'fieldtype': 'Table', 'options': table_1_name},
{'label': 'Table Field 2', 'fieldname': 'table_field_2', 'fieldtype': 'Table', 'options': table_2_name},
{'label': 'Table Field 1 Again', 'fieldname': 'table_field_1_again', 'fieldtype': 'Table', 'options': table_1_name},
],
'permissions': [
{'role': 'System Manager'}
]
}).insert()
def get_import_file(csv_file_name, force=False):
file_name = csv_file_name + '.csv'
_file = frappe.db.exists('File', {'file_name': file_name})
if force and _file:
frappe.delete_doc_if_exists('File', _file)
if frappe.db.exists('File', {'file_name': file_name}):
f = frappe.get_doc('File', {'file_name': file_name})
else:
full_path = get_csv_file_path(file_name)
f = frappe.get_doc(
doctype='File',
content=frappe.read_file(full_path),
file_name=file_name,
is_private=1
)
f.save(ignore_permissions=True)
return f
def get_csv_file_path(file_name):
return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import', 'fixtures', file_name)

View file

@ -1,78 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import datetime
import unittest
import frappe
from frappe.core.doctype.data_import.importer_new import Importer
content_empty_rows = '''title,start_date,idx,show_title
,,,
est phasellus sit amet,5/20/2019,52,1
nibh in,7/29/2019,77,1
'''
content_mandatory_missing = '''title,start_date,idx,show_title
,5/20/2019,52,1
'''
content_convert_value = '''title,start_date,idx,show_title
est phasellus sit amet,5/20/2019,52,True
'''
content_invalid_column = '''title,start_date,idx,show_title,invalid_column
est phasellus sit amet,5/20/2019,52,True,invalid value
'''
class TestImporter(unittest.TestCase):
def test_should_skip_empty_rows(self):
i = self.get_importer('Web Page', content=content_empty_rows)
payloads = i.get_payloads_for_import()
row_to_be_imported = []
for p in payloads:
row_to_be_imported += [row[0] for row in p.rows]
self.assertEqual(len(row_to_be_imported), 2)
def test_should_throw_if_mandatory_is_missing(self):
i = self.get_importer('Web Page', content=content_mandatory_missing)
i.import_data()
warning = i.warnings[0]
self.assertTrue('Title is a mandatory field' in warning['message'])
def test_should_convert_value_based_on_fieldtype(self):
i = self.get_importer('Web Page', content=content_convert_value)
payloads = i.get_payloads_for_import()
doc = payloads[0].doc
self.assertEqual(type(doc['show_title']), int)
self.assertEqual(type(doc['idx']), int)
self.assertEqual(type(doc['start_date']), datetime.datetime)
def test_should_ignore_invalid_columns(self):
i = self.get_importer('Web Page', content=content_invalid_column)
payloads = i.get_payloads_for_import()
doc = payloads[0].doc
self.assertTrue('invalid_column' not in doc)
self.assertTrue('title' in doc)
def test_should_import_valid_template(self):
title = 'est phasellus sit amet {0}'.format(frappe.utils.random_string(8))
content_valid_content = '''title,start_date,idx,show_title
{0},5/20/2019,52,1'''.format(title)
i = self.get_importer('Web Page', content=content_valid_content)
import_log = i.import_data()
log = import_log[0]
self.assertTrue(log.success)
doc = frappe.get_doc('Web Page', { 'title': title })
self.assertEqual(frappe.utils.get_datetime_str(doc.start_date),
frappe.utils.get_datetime_str('2019-05-20'))
def get_importer(self, doctype, content):
data_import = frappe.new_doc('Data Import Beta')
data_import.import_type = 'Insert New Records'
i = Importer(doctype, content=content, data_import=data_import)
return i

View file

@ -1,511 +0,0 @@
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import Beta', {
setup(frm) {
frappe.realtime.on('data_import_refresh', ({ data_import }) => {
frm.import_in_progress = false;
if (data_import !== frm.doc.name) return;
frappe.model.clear_doc('Data Import Beta', frm.doc.name);
frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => {
frm.refresh();
});
});
frappe.realtime.on('data_import_progress', data => {
frm.import_in_progress = true;
if (data.data_import !== frm.doc.name) {
return;
}
let percent = Math.floor((data.current * 100) / data.total);
let seconds = Math.floor(data.eta);
let minutes = Math.floor(data.eta / 60);
let eta_message =
// prettier-ignore
seconds < 60
? __('About {0} seconds remaining', [seconds])
: minutes === 1
? __('About {0} minute remaining', [minutes])
: __('About {0} minutes remaining', [minutes]);
let message;
if (data.success) {
let message_args = [data.current, data.total, eta_message];
message =
frm.doc.import_type === 'Insert New Records'
? __('Importing {0} of {1}, {2}', message_args)
: __('Updating {0} of {1}, {2}', message_args);
}
if (data.skipping) {
message = __('Skipping {0} of {1}, {2}', [
data.current,
data.total,
eta_message
]);
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(__('In Progress'), 'orange');
// hide progress when complete
if (data.current === data.total) {
setTimeout(() => {
frm.dashboard.hide();
frm.refresh();
}, 2000);
}
});
frm.set_query('reference_doctype', () => {
return {
filters: {
allow_import: 1
}
};
});
frm.get_field('import_file').df.options = {
restrictions: {
allowed_file_types: ['.csv', '.xls', '.xlsx']
}
};
},
refresh(frm) {
frm.page.hide_icon_group();
frm.trigger('update_indicators');
frm.trigger('import_file');
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
frm.trigger('show_report_error_button');
if (frm.doc.status === 'Partial Success') {
frm.add_custom_button(__('Export Errored Rows'), () =>
frm.trigger('export_errored_rows')
);
}
if (frm.doc.status.includes('Success')) {
frm.add_custom_button(
__('Go to {0} List', [frm.doc.reference_doctype]),
() => frappe.set_route('List', frm.doc.reference_doctype)
);
}
frm.disable_save();
if (frm.doc.status !== 'Success') {
if (!frm.is_new() && frm.doc.import_file) {
let label =
frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
frm.page.set_primary_action(label, () => frm.events.start_import(frm));
} else {
frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
},
update_indicators(frm) {
const indicator = frappe.get_indicator(frm.doc);
if (indicator) {
frm.page.set_indicator(indicator[0], indicator[1]);
} else {
frm.page.clear_indicator();
}
},
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
let message;
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}.', message_args)
: __('Successfully imported {0} record out of {1}.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}.', message_args)
: __('Successfully updated {0} record out of {1}.', message_args);
}
}
frm.dashboard.set_headline(message);
},
show_report_error_button(frm) {
if (frm.doc.status === 'Error') {
frappe.db
.get_list('Error Log', {
filters: { method: frm.doc.name },
fields: ['method', 'error'],
order_by: 'creation desc',
limit: 1
})
.then(result => {
if (result.length > 0) {
frm.add_custom_button('Report Error', () => {
let fake_xhr = {
responseText: JSON.stringify({
exc: result[0].error
})
};
frappe.request.report_error(fake_xhr, {});
});
}
});
}
},
start_import(frm) {
frm
.call({
method: 'form_start_import',
args: { data_import: frm.doc.name },
btn: frm.page.btn_primary
})
.then(r => {
if (r.message === true) {
frm.disable_save();
}
});
},
download_template(frm) {
if (
frm.data_exporter &&
frm.data_exporter.doctype === frm.doc.reference_doctype
) {
frm.data_exporter.dialog.show();
set_export_records();
} else {
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.data_exporter = new frappe.data_import.DataExporter(
frm.doc.reference_doctype
);
set_export_records();
});
}
function set_export_records() {
if (frm.doc.import_type === 'Insert New Records') {
frm.data_exporter.dialog.set_value('export_records', 'blank_template');
} else {
frm.data_exporter.dialog.set_value('export_records', 'all');
}
// Force ID field to be exported when updating existing records
let id_field = frm.data_exporter.dialog.get_field(
frm.doc.reference_doctype
).options[0];
if (id_field.value === 'name' && id_field.$checkbox) {
id_field.$checkbox
.find('input')
.prop('disabled', frm.doc.import_type === 'Update Existing Records');
}
}
},
reference_doctype(frm) {
frm.trigger('toggle_submit_after_import');
},
toggle_submit_after_import(frm) {
frm.toggle_display('submit_after_import', false);
let doctype = frm.doc.reference_doctype;
if (doctype) {
frappe.model.with_doctype(doctype, () => {
let meta = frappe.get_meta(doctype);
frm.toggle_display('submit_after_import', meta.is_submittable);
});
}
},
import_file(frm) {
frm.toggle_display('section_import_preview', frm.doc.import_file);
if (!frm.doc.import_file) {
frm.get_field('import_preview').$wrapper.empty();
return;
}
// load import preview
frm.get_field('import_preview').$wrapper.empty();
$('<span class="text-muted">')
.html(__('Loading import file...'))
.appendTo(frm.get_field('import_preview').$wrapper);
frm
.call({
method: 'get_preview_from_template',
args: { data_import: frm.doc.name },
error_handlers: {
TimestampMismatchError() {
// ignore this error
}
}
})
.then(r => {
let preview_data = r.message;
frm.events.show_import_preview(frm, preview_data);
frm.events.show_import_warnings(frm, preview_data);
});
},
show_import_preview(frm, preview_data) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
if (
frm.import_preview &&
frm.import_preview.doctype === frm.doc.reference_doctype
) {
frm.import_preview.preview_data = preview_data;
frm.import_preview.import_log = import_log;
frm.import_preview.refresh();
return;
}
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.import_preview = new frappe.data_import.ImportPreview({
wrapper: frm.get_field('import_preview').$wrapper,
doctype: frm.doc.reference_doctype,
preview_data,
import_log,
frm,
events: {
remap_column(changed_map) {
let template_options = JSON.parse(frm.doc.template_options || '{}');
template_options.remap_column = template_options.remap_column || {};
Object.assign(template_options.remap_column, changed_map);
frm.set_value('template_options', JSON.stringify(template_options));
frm.save().then(() => frm.trigger('import_file'));
}
}
});
});
},
export_errored_rows(frm) {
open_url_post(
'/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template',
{
data_import_name: frm.doc.name
}
);
},
show_import_warnings(frm, preview_data) {
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
warnings = warnings.concat(preview_data.warnings || []);
frm.toggle_display('import_warnings_section', warnings.length > 0);
if (warnings.length === 0) {
frm.get_field('import_warnings').$wrapper.html('');
return;
}
// group warnings by row
let warnings_by_row = {};
let other_warnings = [];
for (let warning of warnings) {
if (warning.row) {
warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
warnings_by_row[warning.row].push(warning);
} else {
other_warnings.push(warning);
}
}
let html = '';
html += Object.keys(warnings_by_row)
.map(row_number => {
let message = warnings_by_row[row_number]
.map(w => {
if (w.field) {
let label =
w.field.label +
(w.field.parent !== frm.doc.reference_doctype
? ` (${w.field.parent})`
: '');
return `<li>${label}: ${w.message}</li>`;
}
return `<li>${w.message}</li>`;
})
.join('');
return `
<div class="alert border" data-row="${row_number}">
<div class="uppercase">${__('Row {0}', [row_number])}</div>
<div class="body"><ul>${message}</ul></div>
</div>
`;
})
.join('');
html += other_warnings
.map(warning => {
let header = '';
if (warning.col) {
header = __('Column {0}', [warning.col]);
}
return `
<div class="alert border" data-col="${warning.col}">
<div class="uppercase">${header}</div>
<div class="body">${warning.message}</div>
</div>
`;
})
.join('');
frm.get_field('import_warnings').$wrapper.html(`
<div class="row">
<div class="col-sm-6 warnings text-muted">${html}</div>
</div>
`);
},
show_failed_logs(frm) {
frm.trigger('show_import_log');
},
show_import_log(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let logs = import_log;
frm.toggle_display('import_log', false);
frm.toggle_display('import_log_section', logs.length > 0);
if (logs.length === 0) {
frm.get_field('import_log_preview').$wrapper.empty();
return;
}
let rows = logs
.map(log => {
let html = '';
if (log.success) {
if (frm.doc.import_type === 'Insert New Records') {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
html = __('Successfully updated {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
}
} else {
let messages = log.messages
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs margin-top" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}">
${__('Show Traceback')}
</button>
<div class="collapse margin-top" id="${id}">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
if (frm.doc.show_failed_logs && log.success) {
return '';
}
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
if (!rows && frm.doc.show_failed_logs) {
rows = `<tr><td class="text-center text-muted" colspan=3>
${__('No failed logs')}
</td></tr>`;
}
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
},
show_missing_link_values(frm, missing_link_values) {
let can_be_created_automatically = missing_link_values.every(
d => d.has_one_mandatory_field
);
let html = missing_link_values
.map(d => {
let doctype = d.doctype;
let values = d.missing_values;
return `
<h5>${doctype}</h5>
<ul>${values.map(v => `<li>${v}</li>`).join('')}</ul>
`;
})
.join('');
if (can_be_created_automatically) {
// prettier-ignore
let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
frappe.confirm(message + html, () => {
frm
.call('create_missing_link_values', {
missing_link_values
})
.then(r => {
let records = r.message;
frappe.msgprint(
__('Created {0} records successfully.', [records.length])
);
});
});
} else {
frappe.msgprint(
// prettier-ignore
__('The following records needs to be created before we can import your file.') + html
);
}
}
});

View file

@ -1,170 +0,0 @@
{
"actions": [],
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"column_break_5",
"status",
"submit_after_import",
"mute_emails",
"template_options",
"section_import_preview",
"import_preview",
"import_warnings_section",
"template_warnings",
"import_warnings",
"import_log_section",
"import_log",
"show_failed_logs",
"import_log_preview"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"set_only_once": 1
},
{
"fieldname": "import_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"set_only_once": 1
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"in_list_view": 1,
"label": "Import File"
},
{
"fieldname": "import_preview",
"fieldtype": "HTML",
"label": "Import Preview"
},
{
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"label": "Preview"
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Options",
"options": "JSON",
"read_only": 1
},
{
"fieldname": "import_log",
"fieldtype": "Code",
"label": "Import Log",
"options": "JSON"
},
{
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview"
},
{
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Pending\nSuccess\nPartial Success\nError",
"read_only": 1
},
{
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import",
"set_only_once": 1
},
{
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Warnings"
},
{
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings"
},
{
"depends_on": "reference_doctype",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template"
},
{
"default": "1",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails",
"set_only_once": 1
},
{
"default": "0",
"fieldname": "show_failed_logs",
"fieldtype": "Check",
"label": "Show Failed Logs"
}
],
"hide_toolbar": 1,
"links": [],
"modified": "2020-02-17 15:35:04.386098",
"modified_by": "faris@erpnext.com",
"module": "Core",
"name": "Data Import Beta",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View file

@ -1,119 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.core.doctype.data_import.importer_new import Importer
from frappe.core.doctype.data_import.exporter_new import Exporter
from frappe.core.page.background_jobs.background_jobs import get_info
from frappe.utils.background_jobs import enqueue
from frappe import _
class DataImportBeta(Document):
def validate(self):
doc_before_save = self.get_doc_before_save()
if not self.import_file or (
doc_before_save and doc_before_save.import_file != self.import_file
):
self.template_options = ""
self.template_warnings = ""
if self.import_file:
# validate template
self.get_importer()
def get_preview_from_template(self):
if not self.import_file:
return
i = self.get_importer()
return i.get_data_for_import_preview()
def start_import(self):
if frappe.utils.scheduler.is_scheduler_inactive():
frappe.throw(
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
enqueued_jobs = [d.get("job_name") for d in get_info()]
if self.name not in enqueued_jobs:
enqueue(
start_import,
queue="default",
timeout=6000,
event="data_import",
job_name=self.name,
data_import=self.name,
now=frappe.conf.developer_mode or frappe.flags.in_test,
)
return True
return False
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
@frappe.whitelist()
def get_preview_from_template(data_import):
return frappe.get_doc("Data Import Beta", data_import).get_preview_from_template()
@frappe.whitelist()
def form_start_import(data_import):
return frappe.get_doc("Data Import Beta", data_import).start_import()
def start_import(data_import):
"""This method runs in background job"""
data_import = frappe.get_doc("Data Import Beta", data_import)
try:
i = Importer(data_import.reference_doctype, data_import=data_import)
i.import_data()
except:
frappe.db.rollback()
data_import.db_set("status", "Error")
frappe.log_error(title=data_import.name)
frappe.db.commit()
frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name})
@frappe.whitelist()
def download_template(
doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
):
"""
Download template from Exporter
:param doctype: Document Type
:param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
:param export_records=None: One of 'all', 'by_filter', 'blank_template'
:param export_filters: Filter dict
:param file_type: File type to export into
"""
export_fields = frappe.parse_json(export_fields)
export_filters = frappe.parse_json(export_filters)
export_data = export_records != "blank_template"
e = Exporter(
doctype,
export_fields=export_fields,
export_data=export_data,
export_filters=export_filters,
file_type=file_type,
export_page_length=5 if export_records == "5_records" else None,
)
e.build_response()
@frappe.whitelist()
def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import Beta", data_import_name)
data_import.export_errored_rows()

View file

@ -1,40 +0,0 @@
let imports_in_progress = [];
frappe.listview_settings['Data Import Beta'] = {
onload(listview) {
frappe.realtime.on('data_import_progress', data => {
if (!imports_in_progress.includes(data.data_import)) {
imports_in_progress.push(data.data_import);
}
});
frappe.realtime.on('data_import_refresh', data => {
imports_in_progress = imports_in_progress.filter(
d => d !== data.data_import
);
listview.refresh();
});
},
get_indicator: function(doc) {
var colors = {
'Pending': 'orange',
'Partial Success': 'orange',
'Success': 'green',
'In Progress': 'orange',
'Error': 'red'
};
let status = doc.status;
if (imports_in_progress.includes(doc.name)) {
status = 'In Progress';
}
return [__(status), colors[status], 'status,=,' + doc.status];
},
formatters: {
import_type(value) {
return {
'Insert New Records': __('Insert'),
'Update Existing Records': __('Update')
}[value];
}
},
hide_name_column: true
};

View file

@ -0,0 +1,324 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import Legacy', {
onload: function(frm) {
if (frm.doc.__islocal) {
frm.set_value("action", "");
}
frappe.call({
method: "frappe.core.doctype.data_import_legacy.data_import_legacy.get_importable_doctypes",
callback: function (r) {
let importable_doctypes = r.message;
frm.set_query("reference_doctype", function () {
return {
"filters": {
"issingle": 0,
"istable": 0,
"name": ['in', importable_doctypes]
}
};
});
}
}),
// should never check public
frm.fields_dict["import_file"].df.is_private = 1;
frappe.realtime.on("data_import_progress", function(data) {
if (data.data_import === frm.doc.name) {
if (data.reload && data.reload === true) {
frm.reload_doc();
}
if (data.progress) {
let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar");
if (progress_bar) {
$(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped");
$(progress_bar).css("width", data.progress + "%");
}
}
}
});
},
reference_doctype: function(frm){
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
}
},
refresh: function(frm) {
frm.disable_save();
frm.dashboard.clear_headline();
if (frm.doc.reference_doctype && !frm.doc.import_file) {
frm.page.set_indicator(__('Attach file'), 'orange');
} else {
if (frm.doc.import_status) {
const listview_settings = frappe.listview_settings['Data Import Legacy'];
const indicator = listview_settings.get_indicator(frm.doc);
frm.page.set_indicator(indicator[0], indicator[1]);
if (frm.doc.import_status === "In Progress") {
frm.dashboard.add_progress("Data Import Progress", "0");
frm.set_read_only();
frm.refresh_fields();
}
}
}
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
}
if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") {
frm.set_df_property("action", "read_only", 1);
}
frm.add_custom_button(__("Help"), function() {
frappe.help.show_video("6wiriRKPhmg");
});
if (frm.doc.reference_doctype && frm.doc.docstatus === 0) {
frm.add_custom_button(__("Download template"), function() {
frappe.data_import.download_dialog(frm).show();
});
}
if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows &&
frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) {
frm.page.set_primary_action(__("Start Import"), function() {
frappe.call({
btn: frm.page.btn_primary,
method: "frappe.core.doctype.data_import_legacy.data_import_legacy.import_data",
args: {
data_import: frm.doc.name
}
});
}).addClass('btn btn-primary');
}
if (frm.doc.log_details) {
frm.events.create_log_table(frm);
} else {
$(frm.fields_dict.import_log.wrapper).empty();
}
},
action: function(frm) {
if(!frm.doc.action) return;
if(!frm.doc.reference_doctype) {
frappe.msgprint(__("Please select document type first."));
frm.set_value("action", "");
return;
}
if(frm.doc.action == "Insert new records") {
frm.doc.insert_new = 1;
} else if (frm.doc.action == "Update records"){
frm.doc.overwrite = 1;
}
frm.save();
},
only_update: function(frm) {
frm.save();
},
submit_after_import: function(frm) {
frm.save();
},
skip_errors: function(frm) {
frm.save();
},
ignore_encoding_errors: function(frm) {
frm.save();
},
no_email: function(frm) {
frm.save();
},
show_only_errors: function(frm) {
frm.events.create_log_table(frm);
},
create_log_table: function(frm) {
let msg = JSON.parse(frm.doc.log_details);
var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty();
$(frappe.render_template("log_details", {
data: msg.messages,
import_status: frm.doc.import_status,
show_only_errors: frm.doc.show_only_errors,
})).appendTo($log_wrapper);
}
});
frappe.provide('frappe.data_import');
frappe.data_import.download_dialog = function(frm) {
var dialog;
const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden;
const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields);
const get_doctype_checkbox_fields = () => {
return dialog.fields.filter(df => df.fieldname.endsWith('_fields'))
.map(df => dialog.fields_dict[df.fieldname]);
};
const doctype_fields = get_fields(frm.doc.reference_doctype)
.map(df => {
let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0;
return {
label: df.label,
reqd: reqd,
danger: reqd,
value: df.fieldname,
checked: 1
};
});
let fields = [
{
"label": __("Select Columns"),
"fieldname": "select_columns",
"fieldtype": "Select",
"options": "All\nMandatory\nManually",
"reqd": 1,
"onchange": function() {
const fields = get_doctype_checkbox_fields();
fields.map(f => f.toggle(true));
if(this.value == 'Mandatory' || this.value == 'Manually') {
checkbox_toggle(true);
fields.map(multicheck_field => {
multicheck_field.options.map(option => {
if(!option.reqd) return;
$(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`)
.prop('checked', false)
.trigger('click');
});
});
} else if(this.value == 'All'){
$(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`)
.prop('disabled', true);
}
}
},
{
"label": __("File Type"),
"fieldname": "file_type",
"fieldtype": "Select",
"options": "Excel\nCSV",
"default": "Excel"
},
{
"label": __("Download with Data"),
"fieldname": "with_data",
"fieldtype": "Check",
"hidden": !frm.doc.overwrite,
"default": 1
},
{
"label": __("Select All"),
"fieldname": "select_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle();
}
},
{
"label": __("Unselect All"),
"fieldname": "unselect_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle(true);
}
},
{
"label": frm.doc.reference_doctype,
"fieldname": "doctype_fields",
"fieldtype": "MultiCheck",
"options": doctype_fields,
"columns": 2,
"hidden": 1
}
];
const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype)
.map(df => {
return {
"label": df.options,
"fieldname": df.fieldname + '_fields',
"fieldtype": "MultiCheck",
"options": frappe.meta.get_docfields(df.options)
.filter(filter_fields)
.map(df => ({
label: df.label,
reqd: df.reqd ? 1 : 0,
value: df.fieldname,
checked: 1,
danger: df.reqd
})),
"columns": 2,
"hidden": 1
};
});
fields = fields.concat(child_table_fields);
dialog = new frappe.ui.Dialog({
title: __('Download Template'),
fields: fields,
primary_action: function(values) {
var data = values;
if (frm.doc.reference_doctype) {
var export_params = () => {
let columns = {};
if(values.select_columns) {
columns = get_doctype_checkbox_fields().reduce((columns, field) => {
const options = field.get_checked_options();
columns[field.df.label] = options;
return columns;
}, {});
}
return {
doctype: frm.doc.reference_doctype,
parent_doctype: frm.doc.reference_doctype,
select_columns: JSON.stringify(columns),
with_data: frm.doc.overwrite && data.with_data,
all_doctypes: true,
file_type: data.file_type,
template: true
};
};
let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data';
open_url_post(get_template_url, export_params());
} else {
frappe.msgprint(__("Please select the Document Type."));
}
dialog.hide();
},
primary_action_label: __('Download')
});
$(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]')
.wrapAll('<div class="inline-buttons" />');
const button_container = $(dialog.body).find('.inline-buttons');
button_container.addClass('flex');
$(button_container).find('.frappe-control').map((index, button) => {
$(button).css({"margin-right": "1em"});
});
function checkbox_toggle(checked=false) {
$(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => {
$(element).find(`:checkbox`).prop("checked", checked).trigger('click');
});
}
return dialog;
};

View file

@ -0,0 +1,218 @@
{
"actions": [],
"allow_copy": 1,
"creation": "2020-06-11 16:13:23.813709",
"doctype": "DocType",
"document_type": "Document",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"action",
"insert_new",
"overwrite",
"only_update",
"section_break_4",
"import_file",
"column_break_4",
"error_file",
"section_break_6",
"skip_errors",
"submit_after_import",
"ignore_encoding_errors",
"no_email",
"import_detail",
"import_status",
"show_only_errors",
"import_log",
"log_details",
"amended_from",
"total_rows",
"amended_from"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"ignore_user_permissions": 1,
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1
},
{
"fieldname": "action",
"fieldtype": "Select",
"label": "Action",
"options": "Insert new records\nUpdate records",
"reqd": 1
},
{
"default": "0",
"depends_on": "eval:!doc.overwrite",
"description": "New data will be inserted.",
"fieldname": "insert_new",
"fieldtype": "Check",
"hidden": 1,
"label": "Insert new records",
"set_only_once": 1
},
{
"default": "0",
"depends_on": "eval:!doc.insert_new",
"description": "If you are updating/overwriting already created records.",
"fieldname": "overwrite",
"fieldtype": "Check",
"hidden": 1,
"label": "Update records",
"set_only_once": 1
},
{
"default": "0",
"depends_on": "overwrite",
"description": "If you don't want to create any new records while updating the older records.",
"fieldname": "only_update",
"fieldtype": "Check",
"label": "Don't create new records"
},
{
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_4",
"fieldtype": "Section Break"
},
{
"fieldname": "import_file",
"fieldtype": "Attach",
"label": "Attach file for Import"
},
{
"fieldname": "column_break_4",
"fieldtype": "Column Break"
},
{
"depends_on": "eval: doc.import_status == \"Partially Successful\"",
"description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.",
"fieldname": "error_file",
"fieldtype": "Attach",
"label": "Generated File"
},
{
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_6",
"fieldtype": "Section Break"
},
{
"default": "0",
"description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.",
"fieldname": "skip_errors",
"fieldtype": "Check",
"label": "Skip rows with errors"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit after importing"
},
{
"default": "0",
"fieldname": "ignore_encoding_errors",
"fieldtype": "Check",
"label": "Ignore encoding errors"
},
{
"default": "1",
"fieldname": "no_email",
"fieldtype": "Check",
"label": "Do not send Emails"
},
{
"collapsible": 1,
"collapsible_depends_on": "eval: doc.import_status == \"Failed\"",
"depends_on": "import_status",
"fieldname": "import_detail",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_status",
"fieldtype": "Select",
"label": "Import Status",
"options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful",
"read_only": 1
},
{
"allow_on_submit": 1,
"default": "1",
"fieldname": "show_only_errors",
"fieldtype": "Check",
"label": "Show only errors",
"no_copy": 1,
"print_hide": 1
},
{
"allow_on_submit": 1,
"depends_on": "import_status",
"fieldname": "import_log",
"fieldtype": "HTML",
"label": "Import Log"
},
{
"allow_on_submit": 1,
"fieldname": "log_details",
"fieldtype": "Code",
"hidden": 1,
"label": "Log Details",
"read_only": 1
},
{
"fieldname": "amended_from",
"fieldtype": "Link",
"label": "Amended From",
"no_copy": 1,
"options": "Data Import",
"print_hide": 1,
"read_only": 1
},
{
"fieldname": "total_rows",
"fieldtype": "Int",
"hidden": 1,
"label": "Total Rows",
"read_only": 1
},
{
"fieldname": "amended_from",
"fieldtype": "Link",
"label": "Amended From",
"no_copy": 1,
"options": "Data Import Legacy",
"print_hide": 1,
"read_only": 1
}
],
"is_submittable": 1,
"links": [],
"max_attachments": 1,
"modified": "2020-06-11 16:13:23.813709",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import Legacy",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"submit": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1,
"track_seen": 1
}

View file

@ -0,0 +1,123 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, os
from frappe import _
import frappe.modules.import_file
from frappe.model.document import Document
from frappe.utils.data import format_datetime
from frappe.core.doctype.data_import_legacy.importer import upload
from frappe.utils.background_jobs import enqueue
class DataImportLegacy(Document):
def autoname(self):
if not self.name:
self.name = "Import on " +format_datetime(self.creation)
def validate(self):
if not self.import_file:
self.db_set("total_rows", 0)
if self.import_status == "In Progress":
frappe.throw(_("Can't save the form as data import is in progress."))
# validate the template just after the upload
# if there is total_rows in the doc, it means that the template is already validated and error free
if self.import_file and not self.total_rows:
upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
@frappe.whitelist()
def get_importable_doctypes():
return frappe.cache().hget("can_import", frappe.session.user)
@frappe.whitelist()
def import_data(data_import):
frappe.db.set_value("Data Import Legacy", data_import, "import_status", "In Progress", update_modified=False)
frappe.publish_realtime("data_import_progress", {"progress": "0",
"data_import": data_import, "reload": True}, user=frappe.session.user)
from frappe.core.page.background_jobs.background_jobs import get_info
enqueued_jobs = [d.get("job_name") for d in get_info()]
if data_import not in enqueued_jobs:
enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
insert=False, submit=False, pre_process=None):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
files = [path]
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
frappe.db.commit()
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
from frappe.utils.csvutils import read_csv_content
print("Importing " + path)
with open(path, "r") as infile:
upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
submit_after_import=submit, pre_process=pre_process)
def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
def post_process(out):
del_keys = ('modified_by', 'creation', 'owner', 'idx')
for doc in out:
for key in del_keys:
if key in doc:
del doc[key]
for k, v in doc.items():
if isinstance(v, list):
for child in v:
for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
if key in child:
del child[key]
out = []
if name:
out.append(frappe.get_doc(doctype, name).as_dict())
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
path = os.path.join('..', path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))
@frappe.whitelist()
def export_fixture(doctype, app):
if frappe.session.user != "Administrator":
raise frappe.PermissionError
if not os.path.exists(frappe.get_app_path(app, "fixtures")):
os.mkdir(frappe.get_app_path(app, "fixtures"))
export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")

View file

@ -0,0 +1,24 @@
frappe.listview_settings['Data Import Legacy'] = {
add_fields: ["import_status"],
has_indicator_for_draft: 1,
get_indicator: function(doc) {
let status = {
'Successful': [__("Success"), "green", "import_status,=,Successful"],
'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"],
'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"],
'Failed': [__("Failed"), "red", "import_status,=,Failed"],
'Pending': [__("Pending"), "orange", "import_status,=,"]
}
if (doc.import_status) {
return status[doc.import_status];
}
if (doc.docstatus == 0) {
return status['Pending'];
}
return status['Pending'];
}
};

View file

@ -0,0 +1,541 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
from six.moves import range
import requests
import frappe, json
import frappe.permissions
from frappe import _
from frappe.utils.csvutils import getlink
from frappe.utils.dateutils import parse_date
from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url
from six import string_types
@frappe.whitelist()
def get_data_keys():
return frappe._dict({
"data_separator": _('Start entering data below this line'),
"main_table": _("Table") + ":",
"parent_table": _("Parent Table") + ":",
"columns": _("Column Name") + ":",
"doctype": _("DocType") + ":"
})
@frappe.whitelist()
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None,
update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No",
skip_errors = True, data_import_doc=None, validate_template=False, user=None):
"""upload data"""
# for translations
if user:
frappe.cache().hdel("lang", user)
frappe.set_user_lang(user)
if data_import_doc and isinstance(data_import_doc, string_types):
data_import_doc = frappe.get_doc("Data Import Legacy", data_import_doc)
if data_import_doc and from_data_import == "Yes":
no_email = data_import_doc.no_email
ignore_encoding_errors = data_import_doc.ignore_encoding_errors
update_only = data_import_doc.only_update
submit_after_import = data_import_doc.submit_after_import
overwrite = data_import_doc.overwrite
skip_errors = data_import_doc.skip_errors
else:
# extra input params
params = json.loads(frappe.form_dict.get("params") or '{}')
if params.get("submit_after_import"):
submit_after_import = True
if params.get("ignore_encoding_errors"):
ignore_encoding_errors = True
if not params.get("no_email"):
no_email = False
if params.get('update_only'):
update_only = True
if params.get('from_data_import'):
from_data_import = params.get('from_data_import')
if not params.get('skip_errors'):
skip_errors = params.get('skip_errors')
frappe.flags.in_import = True
frappe.flags.mute_emails = no_email
def get_data_keys_definition():
return get_data_keys()
def bad_template():
frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator))
def check_data_length():
if not data:
frappe.throw(_("No data found in the file. Please reattach the new file with data."))
def get_start_row():
for i, row in enumerate(rows):
if row and row[0]==get_data_keys_definition().data_separator:
return i+1
bad_template()
def get_header_row(key):
return get_header_row_and_idx(key)[0]
def get_header_row_and_idx(key):
for i, row in enumerate(header):
if row and row[0]==key:
return row, i
return [], -1
def filter_empty_columns(columns):
empty_cols = list(filter(lambda x: x in ("", None), columns))
if empty_cols:
if columns[-1*len(empty_cols):] == empty_cols:
# filter empty columns if they exist at the end
columns = columns[:-1*len(empty_cols)]
else:
frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
raise_exception=1)
return columns
def make_column_map():
doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype)
if row_idx == -1: # old style
return
dt = None
for i, d in enumerate(doctype_row[1:]):
if d not in ("~", "-"):
if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"):
dt, parentfield = d, None
# xls format truncates the row, so it may not have more columns
if len(doctype_row) > i+2:
parentfield = doctype_row[i+2]
doctypes.append((dt, parentfield))
column_idx_to_fieldname[(dt, parentfield)] = {}
column_idx_to_fieldtype[(dt, parentfield)] = {}
if dt:
column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1]
column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1]
def get_doc(start_idx):
if doctypes:
doc = {}
attachments = []
last_error_row_idx = None
for idx in range(start_idx, len(rows)):
last_error_row_idx = idx # pylint: disable=W0612
if (not doc) or main_doc_empty(rows[idx]):
for dt, parentfield in doctypes:
d = {}
for column_idx in column_idx_to_fieldname[(dt, parentfield)]:
try:
fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx]
fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx]
if not fieldname or not rows[idx][column_idx]:
continue
d[fieldname] = rows[idx][column_idx]
if fieldtype in ("Int", "Check"):
d[fieldname] = cint(d[fieldname])
elif fieldtype in ("Float", "Currency", "Percent"):
d[fieldname] = flt(d[fieldname])
elif fieldtype == "Date":
if d[fieldname] and isinstance(d[fieldname], string_types):
d[fieldname] = getdate(parse_date(d[fieldname]))
elif fieldtype == "Datetime":
if d[fieldname]:
if " " in d[fieldname]:
_date, _time = d[fieldname].split()
else:
_date, _time = d[fieldname], '00:00:00'
_date = parse_date(d[fieldname])
d[fieldname] = get_datetime(_date + " " + _time)
else:
d[fieldname] = None
elif fieldtype in ("Image", "Attach Image", "Attach"):
# added file to attachments list
attachments.append(d[fieldname])
elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]:
# as fields can be saved in the number format(long type) in data import template
d[fieldname] = cstr(d[fieldname])
except IndexError:
pass
# scrub quotes from name and modified
if d.get("name") and d["name"].startswith('"'):
d["name"] = d["name"][1:-1]
if sum([0 if not val else 1 for val in d.values()]):
d['doctype'] = dt
if dt == doctype:
doc.update(d)
else:
if not overwrite and doc.get("name"):
d['parent'] = doc["name"]
d['parenttype'] = doctype
d['parentfield'] = parentfield
doc.setdefault(d['parentfield'], []).append(d)
else:
break
return doc, attachments, last_error_row_idx
else:
doc = frappe._dict(zip(columns, rows[start_idx][1:]))
doc['doctype'] = doctype
return doc, [], None
# used in testing whether a row is empty or parent row or child row
# checked only 3 first columns since first two columns can be blank for example the case of
# importing the item variant where item code and item name will be blank.
def main_doc_empty(row):
if row:
for i in range(3,0,-1):
if len(row) > i and row[i]:
return False
return True
def validate_naming(doc):
autoname = frappe.get_meta(doctype).autoname
if autoname:
if autoname[0:5] == 'field':
autoname = autoname[6:]
elif autoname == 'naming_series:':
autoname = 'naming_series'
else:
return True
if (autoname not in doc) or (not doc[autoname]):
from frappe.model.base_document import get_controller
if not hasattr(get_controller(doctype), "autoname"):
frappe.throw(_("{0} is a mandatory field").format(autoname))
return True
users = frappe.db.sql_list("select name from tabUser")
def prepare_for_insert(doc):
# don't block data import if user is not set
# migrating from another system
if not doc.owner in users:
doc.owner = frappe.session.user
if not doc.modified_by in users:
doc.modified_by = frappe.session.user
def is_valid_url(url):
is_valid = False
if url.startswith("/files") or url.startswith("/private/files"):
url = get_url(url)
try:
r = requests.get(url)
is_valid = True if r.status_code == 200 else False
except Exception:
pass
return is_valid
def attach_file_to_doc(doctype, docname, file_url):
# check if attachment is already available
# check if the attachement link is relative or not
if not file_url:
return
if not is_valid_url(file_url):
return
files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and
attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format(
doctype=doctype,
docname=docname,
file_url=file_url
))
if files:
# file is already attached
return
_file = frappe.get_doc({
"doctype": "File",
"file_url": file_url,
"attached_to_name": docname,
"attached_to_doctype": doctype,
"attached_to_field": 0,
"folder": "Home/Attachments"})
_file.save()
# header
filename, file_extension = ['','']
if not rows:
_file = frappe.get_doc("File", {"file_url": data_import_doc.import_file})
fcontent = _file.get_content()
filename, file_extension = _file.get_extension()
if file_extension == '.xlsx' and from_data_import == 'Yes':
from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file)
elif file_extension == '.csv':
from frappe.utils.csvutils import read_csv_content
rows = read_csv_content(fcontent, ignore_encoding_errors)
else:
frappe.throw(_("Unsupported File Format"))
start_row = get_start_row()
header = rows[:start_row]
data = rows[start_row:]
try:
doctype = get_header_row(get_data_keys_definition().main_table)[1]
columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:])
except:
frappe.throw(_("Cannot change header content"))
doctypes = []
column_idx_to_fieldname = {}
column_idx_to_fieldtype = {}
if skip_errors:
data_rows_with_error = header
if submit_after_import and not cint(frappe.db.get_value("DocType",
doctype, "is_submittable")):
submit_after_import = False
parenttype = get_header_row(get_data_keys_definition().parent_table)
if len(parenttype) > 1:
parenttype = parenttype[1]
# check permissions
if not frappe.permissions.can_import(parenttype or doctype):
frappe.flags.mute_emails = False
return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}
# Throw expception in case of the empty data file
check_data_length()
make_column_map()
total = len(data)
if validate_template:
if total:
data_import_doc.total_rows = total
return True
if overwrite==None:
overwrite = params.get('overwrite')
# delete child rows (if parenttype)
parentfield = None
if parenttype:
parentfield = get_parent_field(doctype, parenttype)
if overwrite:
delete_child_rows(data, doctype)
import_log = []
def log(**kwargs):
if via_console:
print((kwargs.get("title") + kwargs.get("message")).encode('utf-8'))
else:
import_log.append(kwargs)
def as_link(doctype, name):
if via_console:
return "{0}: {1}".format(doctype, name)
else:
return getlink(doctype, name)
# publish realtime task update
def publish_progress(achieved, reload=False):
if data_import_doc:
frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)),
"data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user)
error_flag = rollback_flag = False
batch_size = frappe.conf.data_import_batch_size or 1000
for batch_start in range(0, total, batch_size):
batch = data[batch_start:batch_start + batch_size]
for i, row in enumerate(batch):
# bypass empty rows
if main_doc_empty(row):
continue
row_idx = i + start_row
doc = None
publish_progress(i)
try:
doc, attachments, last_error_row_idx = get_doc(row_idx)
validate_naming(doc)
if pre_process:
pre_process(doc)
original = None
if parentfield:
parent = frappe.get_doc(parenttype, doc["parent"])
doc = parent.append(parentfield, doc)
parent.save()
else:
if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]):
original = frappe.get_doc(doctype, doc["name"])
original_name = original.name
original.update(doc)
# preserve original name for case sensitivity
original.name = original_name
original.flags.ignore_links = ignore_links
original.save()
doc = original
else:
if not update_only:
doc = frappe.get_doc(doc)
prepare_for_insert(doc)
doc.flags.ignore_links = ignore_links
doc.insert()
if attachments:
# check file url and create a File document
for file_url in attachments:
attach_file_to_doc(doc.doctype, doc.name, file_url)
if submit_after_import:
doc.submit()
# log errors
if parentfield:
log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)),
"link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"})
elif submit_after_import:
log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"})
elif original:
log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
elif not update_only:
log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
else:
log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None,
"message": "Document updation ignored", "indicator": "orange"})
except Exception as e:
error_flag = True
# build error message
if frappe.local.message_log:
err_msg = "\n".join(['<p class="border-bottom small">{}</p>'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log])
else:
err_msg = '<p class="border-bottom small">{}</p>'.format(cstr(e))
error_trace = frappe.get_traceback()
if error_trace:
error_log_doc = frappe.log_error(error_trace)
error_link = get_absolute_url("Error Log", error_log_doc.name)
else:
error_link = None
log(**{
"row": row_idx + 1,
"title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""),
"message": err_msg,
"indicator": "red",
"link":error_link
})
# data with error to create a new file
# include the errored data in the last row as last_error_row_idx will not be updated for the last row
if skip_errors:
if last_error_row_idx == len(rows)-1:
last_error_row_idx = len(rows)
data_rows_with_error += rows[row_idx:last_error_row_idx]
else:
rollback_flag = True
finally:
frappe.local.message_log = []
start_row += batch_size
if rollback_flag:
frappe.db.rollback()
else:
frappe.db.commit()
frappe.flags.mute_emails = False
frappe.flags.in_import = False
log_message = {"messages": import_log, "error": error_flag}
if data_import_doc:
data_import_doc.log_details = json.dumps(log_message)
import_status = None
if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error):
import_status = "Partially Successful"
# write the file with the faulty row
file_name = 'error_' + filename + file_extension
if file_extension == '.xlsx':
from frappe.utils.xlsxutils import make_xlsx
xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template")
file_data = xlsx_file.getvalue()
else:
from frappe.utils.csvutils import to_csv
file_data = to_csv(data_rows_with_error)
_file = frappe.get_doc({
"doctype": "File",
"file_name": file_name,
"attached_to_doctype": "Data Import Legacy",
"attached_to_name": data_import_doc.name,
"folder": "Home/Attachments",
"content": file_data})
_file.save()
data_import_doc.error_file = _file.file_url
elif error_flag:
import_status = "Failed"
else:
import_status = "Successful"
data_import_doc.import_status = import_status
data_import_doc.save()
if data_import_doc.import_status in ["Successful", "Partially Successful"]:
data_import_doc.submit()
publish_progress(100, True)
else:
publish_progress(0, True)
frappe.db.commit()
else:
return log_message
def get_parent_field(doctype, parenttype):
parentfield = None
# get parentfield
if parenttype:
for d in frappe.get_meta(parenttype).get_table_fields():
if d.options==doctype:
parentfield = d.fieldname
break
if not parentfield:
frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype))
raise Exception
return parentfield
def delete_child_rows(rows, doctype):
"""delete child rows for all parents"""
for p in list(set([r[1] for r in rows])):
if p:
frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p)

View file

@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestDataImportLegacy(unittest.TestCase):
pass

View file

@ -13,8 +13,8 @@
"fieldname",
"precision",
"length",
"show_days",
"show_seconds",
"hide_days",
"hide_seconds",
"reqd",
"search_index",
"in_list_view",
@ -453,18 +453,18 @@
"fieldtype": "Column Break"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_days",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_days",
"fieldtype": "Check",
"label": "Show Days"
"label": "Hide Days"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_seconds",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_seconds",
"fieldtype": "Check",
"label": "Show Seconds"
"label": "Hide Seconds"
},
{
"default": "0",
@ -477,7 +477,7 @@
"idx": 1,
"istable": 1,
"links": [],
"modified": "2020-05-15 09:06:25.224411",
"modified": "2020-02-06 09:06:25.224413",
"modified_by": "Administrator",
"module": "Core",
"name": "DocField",

View file

@ -688,6 +688,9 @@ def validate_fields(meta):
def check_link_table_options(docname, d):
if frappe.flags.in_patch: return
if frappe.flags.in_fixtures: return
if d.fieldtype in ("Link",) + table_fields:
if not d.options:
frappe.throw(_("{0}: Options required for Link or Table type field {1} in row {2}").format(docname, d.label, d.idx), DoctypeLinkError)
@ -908,6 +911,8 @@ def validate_fields(meta):
frappe.msgprint(text_str + df_options_str, title="Invalid Data Field", raise_exception=True)
def check_child_table_option(docfield):
if frappe.flags.in_fixtures: return
if docfield.fieldtype not in ['Table MultiSelect', 'Table']: return
doctype = docfield.options

View file

@ -182,11 +182,11 @@ class File(Document):
if duplicate_file:
duplicate_file_doc = frappe.get_cached_doc('File', duplicate_file.name)
if duplicate_file_doc.exists_on_disk():
# if it is attached to a document then throw DuplicateEntryError
# if it is attached to a document then throw FileAlreadyAttachedException
if self.attached_to_doctype and self.attached_to_name:
self.duplicate_entry = duplicate_file.name
frappe.throw(_("Same file has already been attached to the record"),
frappe.DuplicateEntryError)
frappe.FileAlreadyAttachedException)
# else just use the url, to avoid uploading a duplicate
else:
self.file_url = duplicate_file.file_url
@ -714,7 +714,12 @@ def remove_all(dt, dn, from_delete=False):
try:
for fid in frappe.db.sql_list("""select name from `tabFile` where
attached_to_doctype=%s and attached_to_name=%s""", (dt, dn)):
remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete)
if from_delete:
# If deleting a doc, directly delete files
frappe.delete_doc("File", fid, ignore_permissions=True)
else:
# Removes file and adds a comment in the document it is attached to
remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete)
except Exception as e:
if e.args[0]!=1054: raise # (temp till for patched)

View file

@ -28,8 +28,7 @@ def get_session_default_values():
@frappe.whitelist()
def set_session_default_values(default_values):
if not frappe.flags.in_test:
default_values = json.loads(default_values)
default_values = frappe.parse_json(default_values)
for entry in default_values:
try:
frappe.defaults.set_user_default(entry, default_values.get(entry))

View file

@ -4,7 +4,7 @@
from __future__ import unicode_literals, print_function
import frappe
from frappe.model.document import Document
from frappe.utils import cint, has_gravatar, format_datetime, now_datetime, get_formatted_email, today
from frappe.utils import cint, flt, has_gravatar, format_datetime, now_datetime, get_formatted_email, today
from frappe import throw, msgprint, _
from frappe.utils.password import update_password as _update_password
from frappe.desk.notifications import clear_notifications
@ -841,11 +841,11 @@ def user_query(doctype, txt, searchfield, start, page_len, filters):
def get_total_users():
"""Returns total no. of system users"""
return frappe.db.sql('''SELECT SUM(`simultaneous_sessions`)
return flt(frappe.db.sql('''SELECT SUM(`simultaneous_sessions`)
FROM `tabUser`
WHERE `enabled` = 1
AND `user_type` = 'System User'
AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0]
AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0])
def get_system_users(exclude_users=None, limit=None):
if not exclude_users:

View file

@ -21,6 +21,17 @@ class Version(Document):
else:
return False
def for_insert(self, doc):
updater_reference = doc.flags.updater_reference
data = {
'creation': doc.creation,
'updater_reference': updater_reference,
'created_by': doc.owner
}
self.ref_doctype = doc.doctype
self.docname = doc.name
self.data = frappe.as_json(data)
def get_data(self):
return json.loads(self.data)

View file

@ -16,8 +16,8 @@
"column_break_6",
"fieldtype",
"precision",
"show_seconds",
"show_days",
"hide_seconds",
"hide_days",
"options",
"fetch_from",
"fetch_if_empty",
@ -383,22 +383,18 @@
"label": "In Preview"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_seconds",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_seconds",
"fieldtype": "Check",
"label": "Show Seconds",
"show_days": 1,
"show_seconds": 1
"label": "Hide Seconds"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_days",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_days",
"fieldtype": "Check",
"label": "Show Days",
"show_days": 1,
"show_seconds": 1
"label": "Hide Days"
},
{
"default": "0",
@ -411,7 +407,7 @@
"icon": "fa fa-glass",
"idx": 1,
"links": [],
"modified": "2020-05-15 23:43:00.123572",
"modified": "2020-02-06 23:43:00.123575",
"modified_by": "Administrator",
"module": "Custom",
"name": "Custom Field",

View file

@ -31,6 +31,13 @@ class CustomField(Document):
# fieldnames should be lowercase
self.fieldname = self.fieldname.lower()
def before_insert(self):
meta = frappe.get_meta(self.dt, cached=False)
fieldnames = [df.fieldname for df in meta.get("fields")]
if self.fieldname in fieldnames:
frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt))
def validate(self):
meta = frappe.get_meta(self.dt, cached=False)
fieldnames = [df.fieldname for df in meta.get("fields")]
@ -46,9 +53,6 @@ class CustomField(Document):
if not self.fieldname:
frappe.throw(_("Fieldname not set for Custom Field"))
if self.fieldname in fieldnames:
frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt))
if self.get('translatable', 0) and not supports_translation(self.fieldtype):
self.translatable = 0
@ -68,6 +72,11 @@ class CustomField(Document):
frappe.db.updatedb(self.dt)
def on_trash(self):
#check if Admin owned field
if self.owner == 'Administrator' and frappe.session.user != 'Administrator':
frappe.throw(_("Custom Field {0} is created by the Administrator and can only be deleted through the Administrator account.").format(
frappe.bold(self.label)))
# delete property setter entries
frappe.db.sql("""\
DELETE FROM `tabProperty Setter`

View file

@ -77,7 +77,9 @@ docfield_properties = {
'allow_bulk_edit': 'Check',
'auto_repeat': 'Link',
'allow_in_quick_entry': 'Check',
'hide_border': 'Check'
'hide_border': 'Check',
'hide_days': 'Check',
'hide_seconds': 'Check'
}
allowed_fieldtype_change = (('Currency', 'Float', 'Percent'), ('Small Text', 'Data'),

View file

@ -11,8 +11,8 @@
"label",
"fieldtype",
"fieldname",
"show_seconds",
"show_days",
"hide_seconds",
"hide_days",
"reqd",
"unique",
"in_list_view",
@ -393,22 +393,18 @@
"label": "In Preview"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_seconds",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_seconds",
"fieldtype": "Check",
"label": "Show Seconds",
"show_days": 1,
"show_seconds": 1
"label": "Hide Seconds"
},
{
"default": "1",
"depends_on": "eval:doc.fieldtype === \"Duration\";",
"fieldname": "show_days",
"default": "0",
"depends_on": "eval:doc.fieldtype=='Duration'",
"fieldname": "hide_days",
"fieldtype": "Check",
"label": "Show Days",
"show_days": 1,
"show_seconds": 1
"label": "Hide Days"
},
{
"default": "0",
@ -421,7 +417,7 @@
"idx": 1,
"istable": 1,
"links": [],
"modified": "2020-05-15 23:45:46.810869",
"modified": "2020-06-02 23:45:46.810868",
"modified_by": "Administrator",
"module": "Custom",
"name": "Customize Form Field",

View file

@ -64,6 +64,8 @@ CREATE TABLE `tabDocField` (
`length` int(11) NOT NULL DEFAULT 0,
`translatable` int(1) NOT NULL DEFAULT 0,
`hide_border` int(1) NOT NULL DEFAULT 0,
`hide_days` int(1) NOT NULL DEFAULT 0,
`hide_seconds` int(1) NOT NULL DEFAULT 0,
PRIMARY KEY (`name`),
KEY `parent` (`parent`),
KEY `label` (`label`),

View file

@ -64,6 +64,8 @@ CREATE TABLE "tabDocField" (
"length" bigint NOT NULL DEFAULT 0,
"translatable" smallint NOT NULL DEFAULT 0,
"hide_border" smallint NOT NULL DEFAULT 0,
"hide_days" smallint NOT NULL DEFAULT 0,
"hide_seconds" smallint NOT NULL DEFAULT 0,
PRIMARY KEY ("name")
) ;

View file

@ -168,7 +168,6 @@ class Workspace:
'subtitle': _(self.onboarding_doc.subtitle),
'success': _(self.onboarding_doc.success_message),
'docs_url': self.onboarding_doc.documentation_url,
'user_can_dismiss': self.onboarding_doc.user_can_dismiss,
'items': self.get_onboarding_steps()
}
@handle_not_exist

View file

@ -1,208 +1,81 @@
{
"allow_copy": 0,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"actions": [],
"autoname": "Prompt",
"beta": 0,
"creation": "2017-10-23 13:02:10.295824",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"subject_field",
"start_date_field",
"end_date_field",
"column_break_5",
"all_day"
],
"fields": [
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "reference_doctype",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Reference Document Type",
"length": 0,
"no_copy": 0,
"options": "DocType",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "subject_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Subject Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "start_date_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Start Date Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "end_date_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "End Date Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"default": "0",
"fieldname": "all_day",
"fieldtype": "Check",
"label": "All Day"
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2019-09-05 14:22:27.664645",
"links": [],
"modified": "2020-06-15 11:24:57.639430",
"modified_by": "Administrator",
"module": "Desk",
"name": "Calendar View",
"name_case": "",
"owner": "faris@erpnext.com",
"permissions": [
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 0,
"write": 1
},
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 0,
"delete": 0,
"email": 0,
"export": 0,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 0,
"read": 1,
"report": 0,
"role": "All",
"set_user_permissions": 0,
"share": 0,
"submit": 0,
"write": 0
"role": "All"
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 0,
"track_seen": 0
"sort_order": "DESC"
}

View file

@ -26,15 +26,15 @@ def get_permission_query_conditions(user):
if "System Manager" in roles:
return None
allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read())
allowed_reports = tuple([key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()])
allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()]
allowed_reports = ['"%s"' % key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()]
return '''
`tabDashboard Chart`.`document_type` in {allowed_doctypes}
or `tabDashboard Chart`.`report_name` in {allowed_reports}
`tabDashboard Chart`.`document_type` in ({allowed_doctypes})
or `tabDashboard Chart`.`report_name` in ({allowed_reports})
'''.format(
allowed_doctypes=allowed_doctypes,
allowed_reports=allowed_reports
allowed_doctypes=','.join(allowed_doctypes),
allowed_reports=','.join(allowed_reports)
)

View file

@ -13,7 +13,6 @@
"column_break_4",
"success_message",
"documentation_url",
"user_can_dismiss",
"is_complete",
"section_break_6",
"steps"
@ -53,13 +52,6 @@
"label": "Success Message",
"reqd": 1
},
{
"default": "1",
"description": "Allow users to dismiss onboarding temporarily for a day",
"fieldname": "user_can_dismiss",
"fieldtype": "Check",
"label": "User Can Dismiss "
},
{
"fieldname": "documentation_url",
"fieldtype": "Data",
@ -90,7 +82,7 @@
}
],
"links": [],
"modified": "2020-05-18 19:42:39.738869",
"modified": "2020-06-08 15:36:04.701049",
"modified_by": "Administrator",
"module": "Desk",
"name": "Module Onboarding",

View file

@ -100,14 +100,16 @@ def send_notification_email(doc):
)
def get_email_header(doc):
return {
docname = doc.document_name
header_map = {
'Default': _('New Notification'),
'Mention': _('New Mention'),
'Assignment': _('New Assignment'),
'Share': _('New Document Shared'),
'Energy Point': _('Energy Point Update'),
}[doc.type or 'Default']
'Mention': _('New Mention on {0}').format(docname),
'Assignment': _('Assignment Update on {0}').format(docname),
'Share': _('New Document Shared {0}').format(docname),
'Energy Point': _('Energy Point Update on {0}').format(docname),
}
return header_map[doc.type or 'Default']
@frappe.whitelist()
def mark_all_as_read():

View file

@ -27,12 +27,12 @@ def get_permission_query_conditions(user=None):
if "System Manager" in roles:
return None
allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read())
allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()]
return '''
`tabNumber Card`.`document_type` in {allowed_doctypes}
`tabNumber Card`.`document_type` in ({allowed_doctypes})
'''.format(
allowed_doctypes=allowed_doctypes,
allowed_doctypes=','.join(allowed_doctypes)
)
def has_permission(doc, ptype, user):

View file

@ -178,7 +178,8 @@ def notify_assignment(assigned_by, owner, doc_type, doc_name, action='CLOSE',
description_html = "<div>{0}</div>".format(description) if description else None
if action=='CLOSE':
subject = _('Your assignment on {0} {1} has been removed').format(frappe.bold(doc_type), get_title_html(title))
subject = _('Your assignment on {0} {1} has been removed by {2}')\
.format(frappe.bold(doc_type), get_title_html(title), frappe.bold(user_name))
else:
user_name = frappe.bold(user_name)
document_type = frappe.bold(doc_type)

View file

@ -35,7 +35,7 @@ def get_group_by_count(doctype, current_filters, field):
from
`tabToDo`, `tabUser`
where
`tabToDo`.status='Open' and
`tabToDo`.status!='Cancelled' and
`tabToDo`.owner = `tabUser`.name and
`tabUser`.user_type = 'System User'
{subquery_condition}

View file

@ -252,7 +252,7 @@ def get_open_count(doctype, name, items=[]):
continue
filters = get_filters_for(d)
fieldname = links.get("non_standard_fieldnames", {}).get(d, links.fieldname)
fieldname = links.get("non_standard_fieldnames", {}).get(d, links.get('fieldname'))
data = {"name": d}
if filters:
# get the fieldname for the current document

View file

@ -10,7 +10,7 @@ import socket
import time
from frappe import _
from frappe.model.document import Document
from frappe.utils import validate_email_address, cint, get_datetime, DATE_FORMAT, strip, comma_or, sanitize_html, add_days
from frappe.utils import validate_email_address, cint, cstr, get_datetime, DATE_FORMAT, strip, comma_or, sanitize_html, add_days
from frappe.utils.user import is_system_user
from frappe.utils.jinja import render_template
from frappe.email.smtp import SMTPServer
@ -169,19 +169,20 @@ class EmailAccount(Document):
try:
email_server.connect()
except (error_proto, imaplib.IMAP4.error) as e:
message = e.message.lower().replace(" ","")
if in_receive and any(map(lambda t: t in message, ['authenticationfail', 'loginviayourwebbrowser', #abbreviated to work with both failure and failed
e = cstr(e)
message = e.lower().replace(" ","")
if in_receive and any(map(lambda t: t in message, ['authenticationfailed', 'loginviayourwebbrowser', #abbreviated to work with both failure and failed
'loginfailed', 'err[auth]', 'errtemporaryerror'])): #temporary error to deal with godaddy
# if called via self.receive and it leads to authentication error, disable incoming
# and send email to system manager
self.handle_incoming_connect_error(
description=_('Authentication failed while receiving emails from Email Account {0}. Message from server: {1}').format(self.name, e.message)
description=_('Authentication failed while receiving emails from Email Account {0}. Message from server: {1}').format(self.name, e)
)
return None
else:
frappe.throw(e.message)
frappe.throw(e)
except socket.error:
if in_receive:

View file

@ -119,15 +119,17 @@ def get_context(context):
if self.is_standard:
self.load_standard_properties(context)
try:
if self.channel == 'Email':
self.send_an_email(doc, context)
if self.channel == 'Email':
self.send_an_email(doc, context)
if self.channel == 'Slack':
self.send_a_slack_msg(doc, context)
if self.channel == 'Slack':
self.send_a_slack_msg(doc, context)
if self.channel == 'System Notification' or self.send_system_notification:
self.create_system_notification(doc, context)
if self.channel == 'System Notification' or self.send_system_notification:
self.create_system_notification(doc, context)
except:
frappe.log_error(title='Failed to send notification', message=frappe.get_traceback())
if self.set_property_after_alert:
allow_update = True

View file

@ -35,7 +35,8 @@ class EventProducer(Document):
self.create_custom_fields()
else:
# when producer doc is updated it updates the consumer doc, set flag to avoid deadlock
frappe.db.set_value(self.doctype, self.name, 'incoming_change', 0)
self.db_set('incoming_change', 0)
self.reload()
def check_url(self):
if not validate_url(self.producer_url):

View file

@ -9,42 +9,20 @@ import json
from frappe.frappeclient import FrappeClient
from frappe.event_streaming.doctype.event_producer.event_producer import pull_from_node
def create_event_producer(producer_url):
producer = frappe.db.exists('Event Producer', producer_url)
if producer:
event_producer = frappe.get_doc('Event Producer', producer)
else:
event_producer = frappe.new_doc('Event Producer')
event_producer.producer_doctypes = []
event_producer.producer_url = producer_url
event_producer.append('producer_doctypes', {
'ref_doctype': 'ToDo',
'use_same_name': 1
})
event_producer.append('producer_doctypes', {
'ref_doctype': 'Note',
'use_same_name': 1
})
event_producer.user = 'Administrator'
event_producer.save()
event_producer.reload()
producer_url = 'http://test_site_producer:8000'
class TestEventProducer(unittest.TestCase):
def setUp(self):
self.producer_url = 'http://test_site_producer:8000'
create_event_producer(self.producer_url)
frappe.db.sql('delete from tabToDo')
frappe.db.sql('delete from tabNote')
create_event_producer(producer_url)
def test_insert(self):
producer = self.get_remote_site()
producer = get_remote_site()
producer_doc = insert_into_producer(producer, 'test creation 1 sync')
self.pull_producer_data()
self.assertTrue(frappe.db.exists('ToDo', producer_doc.name))
def test_update(self):
producer = self.get_remote_site()
producer = get_remote_site()
producer_doc = insert_into_producer(producer, 'test update 1')
producer_doc['description'] = 'test update 2'
producer_doc = producer.update(producer_doc)
@ -53,7 +31,7 @@ class TestEventProducer(unittest.TestCase):
self.assertEqual(local_doc.description, producer_doc.description)
def test_delete(self):
producer = self.get_remote_site()
producer = get_remote_site()
producer_doc = insert_into_producer(producer, 'test delete sync')
self.pull_producer_data()
self.assertTrue(frappe.db.exists('ToDo', producer_doc.name))
@ -62,17 +40,17 @@ class TestEventProducer(unittest.TestCase):
self.assertFalse(frappe.db.exists('ToDo', producer_doc.name))
def test_multiple_doctypes_sync(self):
producer = self.get_remote_site()
producer = get_remote_site()
#insert todo and note in producer
producer_todo = insert_into_producer(producer, 'test multiple doc sync')
producer_note1 = frappe.get_doc(dict(doctype='Note', title='test multiple doc sync 1'))
delete_on_remote_if_exists(producer, 'Note', {'title': producer_note1.title})
frappe.db.delete('Note', {'title': producer_note1.title})
producer_note1 = frappe._dict(doctype='Note', title='test multiple doc sync 1')
delete_on_remote_if_exists(producer, 'Note', {'title': producer_note1['title']})
frappe.db.delete('Note', {'title': producer_note1['title']})
producer_note1 = producer.insert(producer_note1)
producer_note2 = frappe.get_doc(dict(doctype='Note', title='test multiple doc sync 2'))
delete_on_remote_if_exists(producer, 'Note', {'title': producer_note2.title})
frappe.db.delete('Note', {'title': producer_note2.title})
producer_note2 = frappe._dict(doctype='Note', title='test multiple doc sync 2')
delete_on_remote_if_exists(producer, 'Note', {'title': producer_note2['title']})
frappe.db.delete('Note', {'title': producer_note2['title']})
producer_note2 = producer.insert(producer_note2)
#update in producer
@ -98,22 +76,19 @@ class TestEventProducer(unittest.TestCase):
self.assertFalse(frappe.db.exists('Note', producer_note2.name))
def test_child_table_sync_with_dependencies(self):
producer = self.get_remote_site()
producer_user = frappe.get_doc(dict(doctype='User', email='test_user@sync.com', first_name='Test Sync User'))
producer = get_remote_site()
producer_user = frappe._dict(doctype='User', email='test_user@sync.com', send_welcome_email=0,
first_name='Test Sync User', enabled=1, roles=[{'role': 'System Manager'}])
delete_on_remote_if_exists(producer, 'User', {'email': producer_user.email})
frappe.db.delete('User', {'email':producer_user.email})
producer_user.enabled = 1
producer_user.append('roles', {
'role': 'System Manager'
})
producer_user = producer.insert(producer_user)
producer_note = frappe.get_doc(dict(doctype='Note', title='test child table dependency sync'))
producer_note.append('seen_by', {
'user': producer_user.name
})
producer_note = frappe._dict(doctype='Note', title='test child table dependency sync',
seen_by=[{'user': producer_user.name}])
delete_on_remote_if_exists(producer, 'Note', {'title': producer_note.title})
frappe.db.delete('Note', {'title': producer_note.title})
producer_note = producer.insert(producer_note)
self.pull_producer_data()
self.assertTrue(frappe.db.exists('User', producer_user.name))
if self.assertTrue(frappe.db.exists('Note', producer_note.name)):
@ -121,24 +96,23 @@ class TestEventProducer(unittest.TestCase):
self.assertEqual(len(local_note.seen_by), 1)
def test_dynamic_link_dependencies_synced(self):
producer = get_remote_site()
#unsubscribe for Note to check whether dependency is fulfilled
event_producer = frappe.get_doc('Event Producer', self.producer_url)
event_producer = frappe.get_doc('Event Producer', producer_url)
event_producer.producer_doctypes = []
event_producer.append('producer_doctypes', {
'ref_doctype': 'ToDo',
'use_same_name': 1
})
event_producer.save()
event_producer.reload()
producer = self.get_remote_site()
producer_link_doc = frappe.get_doc(dict(doctype='Note', title='Test Dynamic Link 1'))
producer_link_doc = frappe._dict(doctype='Note', title='Test Dynamic Link 1')
delete_on_remote_if_exists(producer, 'Note', {'title': producer_link_doc.title})
frappe.db.delete('Note', {'title': producer_link_doc.title})
producer_link_doc = producer.insert(producer_link_doc)
producer_doc = frappe.get_doc(dict(doctype='ToDo', description='Test Dynamic Link 2', assigned_by='Administrator',
reference_type='Note', reference_name=producer_link_doc.name))
producer_doc = frappe._dict(doctype='ToDo', description='Test Dynamic Link 2', assigned_by='Administrator',
reference_type='Note', reference_name=producer_link_doc.name)
producer_doc = producer.insert(producer_doc)
self.pull_producer_data()
@ -147,39 +121,42 @@ class TestEventProducer(unittest.TestCase):
self.assertTrue(frappe.db.exists('Note', producer_link_doc.name))
self.assertEqual(producer_link_doc.name, frappe.db.get_value('ToDo', producer_doc.name, 'reference_name'))
reset_configuration(producer_url)
def test_naming_configuration(self):
#test with use_same_name = 0
event_producer = frappe.get_doc('Event Producer', self.producer_url)
producer = get_remote_site()
event_producer = frappe.get_doc('Event Producer', producer_url)
event_producer.producer_doctypes = []
event_producer.append('producer_doctypes', {
'ref_doctype': 'ToDo',
'use_same_name': 0
})
event_producer.save()
event_producer.reload()
producer = self.get_remote_site()
producer_doc = insert_into_producer(producer, 'test different name sync')
self.pull_producer_data()
self.assertTrue(frappe.db.exists('ToDo', {'remote_docname': producer_doc.name, 'remote_site_name': self.producer_url}))
self.assertTrue(frappe.db.exists('ToDo', {'remote_docname': producer_doc.name, 'remote_site_name': producer_url}))
reset_configuration(producer_url)
def test_update_log(self):
producer = self.get_remote_site()
producer = get_remote_site()
producer_doc = insert_into_producer(producer, 'test update log')
update_log_doc = producer.get_value('Event Update Log', 'docname', {'docname': producer_doc.get('name')})
self.assertEqual(update_log_doc.get('docname'), producer_doc.get('name'))
def test_event_sync_log(self):
producer = self.get_remote_site()
producer = get_remote_site()
producer_doc = insert_into_producer(producer, 'test event sync log')
self.pull_producer_data()
self.assertTrue(frappe.db.exists('Event Sync Log', {'docname': producer_doc.name}))
def pull_producer_data(self):
pull_from_node(self.producer_url)
pull_from_node(producer_url)
def get_remote_site(self):
producer_doc = frappe.get_doc('Event Producer', self.producer_url)
producer_doc = frappe.get_doc('Event Producer', producer_url)
producer_site = FrappeClient(
url=producer_doc.producer_url,
api_key=producer_doc.api_key,
@ -189,7 +166,8 @@ class TestEventProducer(unittest.TestCase):
return producer_site
def test_mapping(self):
event_producer = frappe.get_doc('Event Producer', self.producer_url)
producer = get_remote_site()
event_producer = frappe.get_doc('Event Producer', producer_url)
event_producer.producer_doctypes = []
mapping = [{
'local_fieldname': 'description',
@ -202,10 +180,8 @@ class TestEventProducer(unittest.TestCase):
'mapping': get_mapping('ToDo to Note', 'ToDo', 'Note', mapping)
})
event_producer.save()
event_producer.reload()
producer = self.get_remote_site()
producer_note = frappe.get_doc(dict(doctype='Note', title='Test Mapping', content='Test Mapping'))
producer_note = frappe._dict(doctype='Note', title='Test Mapping', content='Test Mapping')
delete_on_remote_if_exists(producer, 'Note', {'title': producer_note.title})
producer_note = producer.insert(producer_note)
self.pull_producer_data()
@ -225,8 +201,11 @@ class TestEventProducer(unittest.TestCase):
#check delete
self.assertFalse(frappe.db.exists('ToDo', {'description': producer_note.content}))
reset_configuration(producer_url)
def test_inner_mapping(self):
event_producer = frappe.get_doc('Event Producer', self.producer_url)
producer = get_remote_site()
event_producer = frappe.get_doc('Event Producer', producer_url)
event_producer.producer_doctypes = []
inner_mapping = [
{
@ -255,10 +234,8 @@ class TestEventProducer(unittest.TestCase):
'mapping': get_mapping('ToDo to Note Mapping', 'ToDo', 'Note', mapping)
})
event_producer.save()
event_producer.reload()
producer = self.get_remote_site()
producer_note = frappe.get_doc(dict(doctype='Note', title='Inner Mapping Tester', content='Test Inner Mapping'))
producer_note = frappe._dict(doctype='Note', title='Inner Mapping Tester', content='Test Inner Mapping')
delete_on_remote_if_exists(producer, 'Note', {'title': producer_note.title})
producer_note = producer.insert(producer_note)
self.pull_producer_data()
@ -268,11 +245,13 @@ class TestEventProducer(unittest.TestCase):
#check doc inserted
self.assertTrue(frappe.db.exists('ToDo', {'description': producer_note.content}))
reset_configuration(producer_url)
def insert_into_producer(producer, description):
#create and insert todo on remote site
todo = frappe.get_doc(dict(doctype='ToDo', description=description, assigned_by='Administrator'))
return producer.insert(todo)
#create and insert todo on remote site
todo = dict(doctype='ToDo', description=description, assigned_by='Administrator')
return producer.insert(todo)
def delete_on_remote_if_exists(producer, doctype, filters):
remote_doc = producer.get_value(doctype, 'name', filters)
@ -293,3 +272,46 @@ def get_mapping(mapping_name, local, remote, field_map):
doc.append('field_mapping', entry)
doc.save()
return doc.name
def create_event_producer(producer_url):
if frappe.db.exists('Event Producer', producer_url):
return
event_producer = frappe.new_doc('Event Producer')
event_producer.producer_doctypes = []
event_producer.producer_url = producer_url
event_producer.append('producer_doctypes', {
'ref_doctype': 'ToDo',
'use_same_name': 1
})
event_producer.append('producer_doctypes', {
'ref_doctype': 'Note',
'use_same_name': 1
})
event_producer.user = 'Administrator'
event_producer.save()
def reset_configuration(producer_url):
event_producer = frappe.get_doc('Event Producer', producer_url)
event_producer.producer_doctypes = []
event_producer.producer_url = producer_url
event_producer.append('producer_doctypes', {
'ref_doctype': 'ToDo',
'use_same_name': 1
})
event_producer.append('producer_doctypes', {
'ref_doctype': 'Note',
'use_same_name': 1
})
event_producer.user = 'Administrator'
event_producer.save()
def get_remote_site():
producer_doc = frappe.get_doc('Event Producer', producer_url)
producer_site = FrappeClient(
url=producer_doc.producer_url,
api_key=producer_doc.api_key,
api_secret=producer_doc.get_password('api_secret'),
frappe_authorization_source='Event Consumer'
)
return producer_site

View file

@ -103,6 +103,7 @@ class InvalidColumnName(ValidationError): pass
class IncompatibleApp(ValidationError): pass
class InvalidDates(ValidationError): pass
class DataTooLongException(ValidationError): pass
class FileAlreadyAttachedException(Exception): pass
# OAuth exceptions
class InvalidAuthorizationHeader(CSRFTokenError): pass
class InvalidAuthorizationPrefix(CSRFTokenError): pass

View file

@ -56,6 +56,8 @@ website_route_rules = [
{"from_route": "/profile", "to_route": "me"},
]
base_template = "templates/base.html"
write_file_keys = ["file_url", "file_name"]
notification_config = "frappe.core.notifications.get_notification_config"
@ -270,7 +272,10 @@ setup_wizard_exception = [
]
before_migrate = ['frappe.patches.v11_0.sync_user_permission_doctype_before_migrate.execute']
after_migrate = ['frappe.website.doctype.website_theme.website_theme.generate_theme_files_if_not_exist']
after_migrate = [
'frappe.website.doctype.website_theme.website_theme.generate_theme_files_if_not_exist',
'frappe.modules.full_text_search.build_index_for_all_routes'
]
otp_methods = ['OTP App','Email','SMS']
user_privacy_documents = [

View file

@ -113,12 +113,12 @@ def remove_from_installed_apps(app_name):
installed_apps = frappe.get_installed_apps()
if app_name in installed_apps:
installed_apps.remove(app_name)
frappe.db.set_global("installed_apps", json.dumps(installed_apps))
frappe.db.set_value("DefaultValue", {"defkey": "installed_apps"}, "defvalue", json.dumps(installed_apps))
frappe.db.commit()
if frappe.flags.in_install:
post_install()
def remove_app(app_name, dry_run=False, yes=False):
def remove_app(app_name, dry_run=False, yes=False, no_backup=False):
"""Delete app and all linked to the app's module with the app."""
if not dry_run and not yes:
@ -126,9 +126,10 @@ def remove_app(app_name, dry_run=False, yes=False):
if confirm!="y":
return
from frappe.utils.backups import scheduled_backup
print("Backing up...")
scheduled_backup(ignore_files=True)
if not no_backup:
from frappe.utils.backups import scheduled_backup
print("Backing up...")
scheduled_backup(ignore_files=True)
drop_doctypes = []

View file

@ -56,7 +56,8 @@ def take_backup_to_dropbox(retry_count=0, upload_db_backup=True):
did_not_upload, error_log = backup_to_dropbox(upload_db_backup)
if did_not_upload: raise Exception
send_email(True, "Dropbox", "Dropbox Settings", "send_notifications_to")
if cint(frappe.db.get_value("Dropbox Settings", None, "send_email_for_successful_backup")):
send_email(True, "Dropbox", "Dropbox Settings", "send_notifications_to")
except JobTimeoutException:
if retry_count < 2:
args = {

View file

@ -1,6 +1,7 @@
# imports - standard imports
import getpass
import json
import os
import re
import sys
@ -8,6 +9,7 @@ import sys
import click
from html2text import html2text
import requests
from tenacity import retry, stop_after_attempt, wait_fixed
# imports - module imports
import frappe
@ -138,6 +140,7 @@ def select_team(session):
return team
@retry(stop=stop_after_attempt(5))
def get_new_site_options():
site_options_sc = session.post(options_url)
@ -158,6 +161,7 @@ def is_valid_subdomain(subdomain):
print("Subdomain contains invalid characters. Use lowercase characters, numbers and hyphens")
@retry(stop=stop_after_attempt(5))
def is_subdomain_available(subdomain):
res = session.post(site_exists_url, {"subdomain": subdomain})
if res.ok:
@ -252,6 +256,17 @@ def get_subdomain(domain):
return subdomain
@retry(stop=stop_after_attempt(2), wait=wait_fixed(5))
def upload_backup_file(file_type, file_path):
return session.post(files_url, data={}, files={
"file": open(file_path, "rb"),
"is_private": 1,
"folder": "Home",
"method": "press.api.site.upload_backup",
"type": file_type
})
@add_line_after
def upload_backup(local_site):
# take backup
@ -265,14 +280,11 @@ def upload_backup(local_site):
("public", odb.backup_path_files),
("private", odb.backup_path_private_files)
]):
file_upload_response = session.post(files_url, data={}, files={
"file": open(file_path, "rb"),
"is_private": 1,
"folder": "Home",
"method": "press.api.site.upload_backup",
"type": file_type
})
print("Uploading files ({}/3)".format(x+1), end="\r")
file_name = file_path.split(os.sep)[-1]
print("Uploading {} file: {} ({}/3)".format(file_type, file_name, x+1))
file_upload_response = upload_backup_file(file_type, file_path)
if file_upload_response.ok:
files_session[file_type] = file_upload_response.json()["message"]
else:
@ -362,7 +374,10 @@ def create_session():
if login_sc.ok:
print("Authorization Successful! ✅")
team = select_team(session)
session.headers.update({"X-Press-Team": team })
session.headers.update({
"X-Press-Team": team,
"Connection": "keep-alive"
})
return session
else:
handle_request_failure(message="Authorization Failed with Error Code {}".format(login_sc.status_code), traceback=False)

View file

@ -504,19 +504,7 @@ class BaseDocument(object):
for _df in fields_to_fetch:
if self.is_new() or self.docstatus != 1 or _df.allow_on_submit:
fetch_from_fieldname = _df.fetch_from.split('.')[-1]
value = values[fetch_from_fieldname]
if _df.fieldtype == 'Small Text' or _df.fieldtype == 'Text' or _df.fieldtype == 'Data':
if fetch_from_fieldname in default_fields:
from frappe.model.meta import get_default_df
fetch_from_df = get_default_df(fetch_from_fieldname)
else:
fetch_from_df = frappe.get_meta(doctype).get_field(fetch_from_fieldname)
fetch_from_ft = fetch_from_df.get('fieldtype')
if fetch_from_ft == 'Text Editor' and value:
value = unescape_html(strip_html(value))
setattr(self, _df.fieldname, value)
self.set_fetch_from_value(doctype, _df, values)
notify_link_count(doctype, docname)
@ -531,6 +519,27 @@ class BaseDocument(object):
return invalid_links, cancelled_links
def set_fetch_from_value(self, doctype, df, values):
fetch_from_fieldname = df.fetch_from.split('.')[-1]
value = values[fetch_from_fieldname]
if df.fieldtype in ['Small Text', 'Text', 'Data']:
if fetch_from_fieldname in default_fields:
from frappe.model.meta import get_default_df
fetch_from_df = get_default_df(fetch_from_fieldname)
else:
fetch_from_df = frappe.get_meta(doctype).get_field(fetch_from_fieldname)
if not fetch_from_df:
frappe.throw(
_('Please check the value of "Fetch From" set for field {0}').format(frappe.bold(df.label)),
title = _('Wrong Fetch From value')
)
fetch_from_ft = fetch_from_df.get('fieldtype')
if fetch_from_ft == 'Text Editor' and value:
value = unescape_html(strip_html(value))
setattr(self, df.fieldname, value)
def _validate_selects(self):
if frappe.flags.in_import:
return

View file

@ -961,7 +961,8 @@ class Document(BaseDocument):
update_global_search(self)
if getattr(self.meta, 'track_changes', False) and self._doc_before_save and not self.flags.ignore_version:
if getattr(self.meta, 'track_changes', False) and not self.flags.ignore_version \
and not self.doctype == 'Version' and not frappe.flags.in_install:
self.save_version()
self.run_method('on_change')
@ -1058,8 +1059,13 @@ class Document(BaseDocument):
def save_version(self):
"""Save version info"""
if not self._doc_before_save and frappe.flags.in_patch: return
version = frappe.new_doc('Version')
if version.set_diff(self._doc_before_save, self):
if not self._doc_before_save:
version.for_insert(self)
version.insert(ignore_permissions=True)
elif version.set_diff(self._doc_before_save, self):
version.insert(ignore_permissions=True)
if not frappe.flags.in_migrate:
follow_document(self.doctype, self.name, frappe.session.user)

View file

@ -14,6 +14,12 @@ def make_mapped_doc(method, source_name, selected_children=None, args=None):
Sets selected_children as flags for the `get_mapped_doc` method.
Called from `open_mapped_doc` from create_new.js'''
for hook in frappe.get_hooks("override_whitelisted_methods", {}).get(method, []):
# override using the first hook
method = hook
break
method = frappe.get_attr(method)
if method not in frappe.whitelisted:

View file

@ -483,6 +483,9 @@ class Meta(Document):
def get_row_template(self):
return self.get_web_template(suffix='_row')
def get_list_template(self):
return self.get_web_template(suffix='_list')
def get_web_template(self, suffix=''):
'''Returns the relative path of the row template for this doctype'''
module_name = frappe.scrub(self.module)

View file

@ -0,0 +1,106 @@
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from whoosh.index import create_in, open_dir
from whoosh.fields import TEXT, ID, Schema
from whoosh.qparser import MultifieldParser, FieldsPlugin, WildcardPlugin
from whoosh.query import Prefix
from bs4 import BeautifulSoup
from frappe.website.render import render_page
from frappe.utils import set_request, cint
from frappe.utils.global_search import get_routes_to_index
def build_index_for_all_routes():
print("Building search index for all web routes...")
routes = get_routes_to_index()
documents = [get_document_to_index(route) for route in routes]
build_index("web_routes", documents)
@frappe.whitelist(allow_guest=True)
def web_search(index_name, query, scope=None, limit=20):
limit = cint(limit)
return search(index_name, query, scope, limit)
def get_document_to_index(route):
frappe.set_user("Guest")
frappe.local.no_cache = True
try:
set_request(method="GET", path=route)
content = render_page(route)
soup = BeautifulSoup(content, "html.parser")
page_content = soup.find(class_="page_content")
text_content = page_content.text if page_content else ""
title = soup.title.text.strip() if soup.title else route
frappe.set_user("Administrator")
return frappe._dict(title=title, content=text_content, path=route)
except (
frappe.PermissionError,
frappe.DoesNotExistError,
frappe.ValidationError,
Exception,
):
pass
def build_index(index_name, documents):
schema = Schema(
title=TEXT(stored=True), path=ID(stored=True), content=TEXT(stored=True)
)
index_dir = get_index_path(index_name)
frappe.create_folder(index_dir)
ix = create_in(index_dir, schema)
writer = ix.writer()
for document in documents:
if document:
writer.add_document(
title=document.title, path=document.path, content=document.content
)
writer.commit()
def search(index_name, text, scope=None, limit=20):
index_dir = get_index_path(index_name)
ix = open_dir(index_dir)
results = None
out = []
with ix.searcher() as searcher:
parser = MultifieldParser(["title", "content"], ix.schema)
parser.remove_plugin_class(FieldsPlugin)
parser.remove_plugin_class(WildcardPlugin)
query = parser.parse(text)
filter_scoped = None
if scope:
filter_scoped = Prefix("path", scope)
results = searcher.search(query, limit=limit, filter=filter_scoped)
for r in results:
title_highlights = r.highlights("title")
content_highlights = r.highlights("content")
out.append(
frappe._dict(
title=r["title"],
path=r["path"],
title_highlights=title_highlights,
content_highlights=content_highlights,
)
)
return out
def get_index_path(index_name):
return frappe.get_site_path("indexes", index_name)

View file

@ -288,3 +288,5 @@ execute:frappe.delete_doc("DocType", "Onboarding Slide")
execute:frappe.delete_doc("DocType", "Onboarding Slide Field")
execute:frappe.delete_doc("DocType", "Onboarding Slide Help Link")
frappe.patches.v13_0.update_date_filters_in_user_settings
frappe.patches.v13_0.update_duration_options
frappe.patches.v13_0.replace_old_data_import

View file

@ -0,0 +1,14 @@
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.rename_doc('DocType', 'Data Import', 'Data Import Legacy')
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
frappe.delete_doc_if_exists("DocType", "Data Import Beta")

View file

@ -0,0 +1,28 @@
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc('core', 'doctype', 'DocField')
if frappe.db.has_column('DocField', 'show_days'):
frappe.db.sql("""
UPDATE
tabDocField
SET
hide_days = 1 WHERE show_days = 0
""")
frappe.db.sql_ddl('alter table tabDocField drop column show_days')
if frappe.db.has_column('DocField', 'show_seconds'):
frappe.db.sql("""
UPDATE
tabDocField
SET
hide_seconds = 1 WHERE show_seconds = 0
""")
frappe.db.sql_ddl('alter table tabDocField drop column show_seconds')
frappe.clear_cache(doctype='DocField')

View file

@ -0,0 +1,183 @@
/*
Night Owl for highlight.js (c) Carl Baxter <carl@cbax.tech>
An adaptation of Sarah Drasner's Night Owl VS Code Theme
https://github.com/sdras/night-owl-vscode-theme
Copyright (c) 2018 Sarah Drasner
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
.hljs {
display: block;
overflow-x: auto;
padding: 1rem 1.25rem;
background: #011627;
color: #d6deeb;
border-radius: 0.5rem;
}
/* General Purpose */
.hljs-keyword {
color: #c792ea;
font-style: italic;
}
.hljs-built_in {
color: #addb67;
font-style: italic;
}
.hljs-type {
color: #82aaff;
}
.hljs-literal {
color: #ff5874;
}
.hljs-number {
color: #F78C6C;
}
.hljs-regexp {
color: #5ca7e4;
}
.hljs-string {
color: #ecc48d;
}
.hljs-subst {
color: #d3423e;
}
.hljs-symbol {
color: #82aaff;
}
.hljs-class {
color: #ffcb8b;
}
.hljs-function {
color: #82AAFF;
}
.hljs-title {
color: #DCDCAA;
font-style: italic;
}
.hljs-params {
color: #7fdbca;
}
/* Meta */
.hljs-comment {
color: #637777;
font-style: italic;
}
.hljs-doctag {
color: #7fdbca;
}
.hljs-meta {
color: #82aaff;
}
.hljs-meta-keyword {
color: #82aaff;
}
.hljs-meta-string {
color: #ecc48d;
}
/* Tags, attributes, config */
.hljs-section {
color: #82b1ff;
}
.hljs-tag,
.hljs-name,
.hljs-builtin-name {
color: #7fdbca;
}
.hljs-attr {
color: #7fdbca;
}
.hljs-attribute {
color: #80cbc4;
}
.hljs-variable {
color: #addb67;
}
/* Markup */
.hljs-bullet {
color: #d9f5dd;
}
.hljs-code {
color: #80CBC4;
}
.hljs-emphasis {
color: #c792ea;
font-style: italic;
}
.hljs-strong {
color: #addb67;
font-weight: bold;
}
.hljs-formula {
color: #c792ea;
}
.hljs-link {
color: #ff869a;
}
.hljs-quote {
color: #697098;
font-style: italic;
}
/* CSS */
.hljs-selector-tag {
color: #ff6363;
}
.hljs-selector-id {
color: #fad430;
}
.hljs-selector-class {
color: #addb67;
font-style: italic;
}
.hljs-selector-attr,
.hljs-selector-pseudo {
color: #c792ea;
font-style: italic;
}
/* Templates */
.hljs-template-tag {
color: #c792ea;
}
.hljs-template-variable {
color: #addb67;
}
/* diff */
.hljs-addition {
color: #addb67ff;
font-style: italic;
}
.hljs-deletion {
color: #EF535090;
font-style: italic;
}

View file

@ -1,28 +0,0 @@
export default class ColumnPickerFields extends frappe.views.ReportView {
show() {}
get_fields_as_options() {
let column_map = this.get_columns_for_picker();
let doctypes = [this.doctype].concat(
...frappe.meta.get_table_fields(this.doctype).map(df => df.options)
);
// flatten array
return [].concat(
...doctypes.map(doctype => {
return column_map[doctype].map(df => {
let label = df.label;
let value = df.fieldname;
if (this.doctype !== doctype) {
label = `${df.label} (${doctype})`;
value = `${doctype}:${df.fieldname}`;
}
return {
label,
value,
description: value
};
});
})
);
}
}

View file

@ -1,9 +1,9 @@
import ColumnPickerFields from './column_picker_fields';
frappe.provide('frappe.data_import');
frappe.data_import.DataExporter = class DataExporter {
constructor(doctype) {
constructor(doctype, exporting_for) {
this.doctype = doctype;
this.exporting_for = exporting_for;
frappe.model.with_doctype(doctype, () => {
this.make_dialog();
});
@ -13,6 +13,36 @@ frappe.data_import.DataExporter = class DataExporter {
this.dialog = new frappe.ui.Dialog({
title: __('Export Data'),
fields: [
{
fieldtype: 'Select',
fieldname: 'exporting_for',
label: __('Exporting For'),
options: [
{
label: __('Insert New Records'),
value: 'Insert New Records'
},
{
label: __('Update Existing Records'),
value: 'Update Existing Records'
}
],
change: () => {
let exporting_for = this.dialog.get_value('exporting_for');
this.dialog.set_value(
'export_records',
exporting_for === 'Insert New Records' ? 'blank_template' : 'all'
);
// Force ID field to be exported when updating existing records
let id_field = this.dialog.get_field(this.doctype).options[0];
if (id_field.value === 'name' && id_field.$checkbox) {
id_field.$checkbox
.find('input')
.prop('disabled', exporting_for === 'Update Existing Records');
}
}
},
{
fieldtype: 'Select',
fieldname: 'export_records',
@ -67,27 +97,32 @@ frappe.data_import.DataExporter = class DataExporter {
on_change: () => this.update_primary_action(),
options: this.get_multicheck_options(this.doctype)
},
...frappe.meta.get_table_fields(this.doctype)
.map(df => {
let doctype = df.options;
let label = df.reqd
? __('{0} (1 row mandatory)', [doctype])
: __(doctype);
return {
label,
fieldname: doctype,
fieldtype: 'MultiCheck',
columns: 2,
on_change: () => this.update_primary_action(),
options: this.get_multicheck_options(doctype)
};
})
...frappe.meta.get_table_fields(this.doctype).map(df => {
let doctype = df.options;
let child_fieldname = df.fieldname;
let label = df.reqd
? // prettier-ignore
__('{0} ({1}) (1 row mandatory)', [df.label || df.fieldname, doctype])
: __('{0} ({1})', [df.label || df.fieldname, doctype]);
return {
label,
fieldname: child_fieldname,
fieldtype: 'MultiCheck',
columns: 2,
on_change: () => this.update_primary_action(),
options: this.get_multicheck_options(doctype, child_fieldname)
};
})
],
primary_action_label: __('Export'),
primary_action: values => this.export_records(values),
on_page_show: () => this.select_mandatory()
});
if (this.exporting_for) {
this.dialog.set_value('exporting_for', this.exporting_for);
}
this.make_filter_area();
this.make_select_all_buttons();
this.update_record_count_message();
@ -97,7 +132,7 @@ frappe.data_import.DataExporter = class DataExporter {
export_records() {
let method =
'/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_template';
'/api/method/frappe.core.doctype.data_import.data_import.download_template';
let multicheck_fields = this.dialog.fields
.filter(df => df.fieldtype === 'MultiCheck')
@ -165,16 +200,16 @@ frappe.data_import.DataExporter = class DataExporter {
}
select_mandatory() {
let mandatory_table_doctypes = frappe.meta
let mandatory_table_fields = frappe.meta
.get_table_fields(this.doctype)
.filter(df => df.reqd)
.map(df => df.options);
mandatory_table_doctypes.push(this.doctype);
.map(df => df.fieldname);
mandatory_table_fields.push(this.doctype);
let multicheck_fields = this.dialog.fields
.filter(df => df.fieldtype === 'MultiCheck')
.map(df => df.fieldname)
.filter(doctype => mandatory_table_doctypes.includes(doctype));
.filter(doctype => mandatory_table_fields.includes(doctype));
let checkboxes = [].concat(
...multicheck_fields.map(fieldname => {
@ -192,8 +227,12 @@ frappe.data_import.DataExporter = class DataExporter {
}
unselect_all() {
let update_existing_records =
this.dialog.get_value('exporting_for') == 'Update Existing Records';
this.dialog.$wrapper
.find(':checkbox')
.find(
`:checkbox${update_existing_records ? ':not([data-unit=name])' : ''}`
)
.prop('checked', false)
.trigger('change');
}
@ -253,11 +292,9 @@ frappe.data_import.DataExporter = class DataExporter {
}, {});
}
get_multicheck_options(doctype) {
get_multicheck_options(doctype, child_fieldname = null) {
if (!this.column_map) {
this.column_map = new ColumnPickerFields({
doctype: this.doctype
}).get_columns_for_picker();
this.column_map = get_columns_for_picker(this.doctype);
}
let autoname_field = null;
@ -267,7 +304,11 @@ frappe.data_import.DataExporter = class DataExporter {
autoname_field = frappe.meta.get_field(doctype, fieldname);
}
return this.column_map[doctype]
let fields = child_fieldname
? this.column_map[child_fieldname]
: this.column_map[doctype];
return fields
.filter(df => {
if (autoname_field && df.fieldname === autoname_field.fieldname) {
return false;
@ -289,3 +330,52 @@ frappe.data_import.DataExporter = class DataExporter {
});
}
};
export function get_columns_for_picker(doctype) {
let out = {};
const exportable_fields = df => {
let keep = true;
if (frappe.model.no_value_type.includes(df.fieldtype)) {
keep = false;
}
if (['lft', 'rgt'].includes(df.fieldname)) {
keep = false;
}
return keep;
};
// parent
let doctype_fields = frappe.meta
.get_docfields(doctype)
.filter(exportable_fields);
out[doctype] = [
{
label: __('ID'),
fieldname: 'name',
fieldtype: 'Data',
reqd: 1
}
].concat(doctype_fields);
// children
const table_fields = frappe.meta.get_table_fields(doctype);
table_fields.forEach(df => {
const cdt = df.options;
const child_table_fields = frappe.meta
.get_docfields(cdt)
.filter(exportable_fields);
out[df.fieldname] = [
{
label: __('ID'),
fieldname: 'name',
fieldtype: 'Data',
reqd: 1
}
].concat(child_table_fields);
});
return out;
}

View file

@ -1,5 +1,5 @@
import DataTable from 'frappe-datatable';
import ColumnPickerFields from './column_picker_fields';
import { get_columns_for_picker } from './data_exporter';
frappe.provide('frappe.data_import');
@ -236,9 +236,7 @@ frappe.data_import.ImportPreview = class ImportPreview {
}
show_column_mapper() {
let column_picker_fields = new ColumnPickerFields({
doctype: this.doctype
});
let column_picker_fields = get_columns_for_picker(this.doctype);
let changed = [];
let fields = this.preview_data.columns.map((col, i) => {
let df = col.df;

View file

@ -5,7 +5,8 @@ frappe.ui.form.ControlButton = frappe.ui.form.ControlData.extend({
},
make_input: function() {
var me = this;
this.$input = $('<button class="btn btn-default btn-xs">')
const btn_type = this.df.primary ? 'btn-primary': 'btn-default';
this.$input = $(`<button class="btn btn-xs ${btn_type}">`)
.prependTo(me.input_area)
.on("click", function() {
me.onclick();
@ -16,16 +17,15 @@ frappe.ui.form.ControlButton = frappe.ui.form.ControlData.extend({
this.toggle_label(false);
},
onclick: function() {
if(this.frm && this.frm.doc) {
if(this.frm.script_manager.has_handlers(this.df.fieldname, this.doctype)) {
if (this.frm && this.frm.doc) {
if (this.frm.script_manager.has_handlers(this.df.fieldname, this.doctype)) {
this.frm.script_manager.trigger(this.df.fieldname, this.doctype, this.docname);
} else {
if (this.df.options) {
this.run_server_script();
}
}
}
else if(this.df.click) {
} else if (this.df.click) {
this.df.click();
}
},

View file

@ -1,3 +1,5 @@
frappe.provide('frappe.phone_call');
frappe.ui.form.ControlData = frappe.ui.form.ControlInput.extend({
html_element: "input",
input_type: "text",
@ -21,10 +23,27 @@ frappe.ui.form.ControlData = frappe.ui.form.ControlInput.extend({
this.bind_change_event();
this.bind_focusout();
this.setup_autoname_check();
if (this.df.options == 'Phone') {
this.setup_phone();
}
// somehow this event does not bubble up to document
// after v7, if you can debug, remove this
},
setup_phone() {
if (frappe.phone_call.handler) {
this.$wrapper.find('.control-input')
.append(`
<span class="phone-btn">
<a class="btn-open no-decoration" title="${__('Make a call')}">
<i class="fa fa-phone"></i></a>
</span>
`)
.find('.phone-btn')
.click(() => {
frappe.phone_call.handler(this.get_value(), this.frm);
});
}
},
setup_autoname_check: function() {
if (!this.df.parent) return;
this.meta = frappe.get_meta(this.df.parent);

View file

@ -13,10 +13,10 @@ frappe.ui.form.ControlDuration = frappe.ui.form.ControlData.extend({
</div>`
);
this.$wrapper.append(this.$picker);
this.build_numeric_input("days", !this.duration_options.show_days);
this.build_numeric_input("days", this.duration_options.hide_days);
this.build_numeric_input("hours", false);
this.build_numeric_input("minutes", false);
this.build_numeric_input("seconds", !this.duration_options.show_seconds);
this.build_numeric_input("seconds", this.duration_options.hide_seconds);
this.set_duration_picker_value(this.value);
this.$picker.hide();
this.bind_events();
@ -130,10 +130,10 @@ frappe.ui.form.ControlDuration = frappe.ui.form.ControlData.extend({
if (this.inputs) {
total_duration.minutes = parseInt(this.inputs.minutes.val());
total_duration.hours = parseInt(this.inputs.hours.val());
if (this.duration_options.show_days) {
if (!this.duration_options.hide_days) {
total_duration.days = parseInt(this.inputs.days.val());
}
if (this.duration_options.show_seconds) {
if (!this.duration_options.hide_seconds) {
total_duration.seconds = parseInt(this.inputs.seconds.val());
}
}

View file

@ -140,6 +140,8 @@ frappe.ui.form.ControlLink = frappe.ui.form.ControlData.extend({
}
});
this.custom_awesomplete_filter && this.custom_awesomplete_filter(this.awesomplete);
this.$input.on("input", frappe.utils.debounce(function(e) {
var doctype = me.get_options();
if(!doctype) return;
@ -467,10 +469,10 @@ frappe.ui.form.ControlLink = frappe.ui.form.ControlData.extend({
for(var i=0; i < fl.length; i++) {
frappe.model.set_value(df.parent, docname, fl[i], fetch_values[i], df.fieldtype);
}
}
},
});
if(Awesomplete) {
if (Awesomplete) {
Awesomplete.prototype.get_item = function(value) {
return this._list.find(function(item) {
return item.value === value;

View file

@ -11,6 +11,8 @@ frappe.ui.form.ControlTableMultiSelect = frappe.ui.form.ControlLink.extend({
// used as an internal model to store values
this.rows = [];
// used as an internal model to filter awesomplete values
this._rows_list = [];
this.$input_area.on('click', (e) => {
if (e.target === this.$input_area.get(0)) {
@ -61,7 +63,7 @@ frappe.ui.form.ControlTableMultiSelect = frappe.ui.form.ControlLink.extend({
});
}
}
this._rows_list = this.rows.map(row => row[link_field.fieldname]);
return this.rows;
},
validate(value) {
@ -141,4 +143,15 @@ frappe.ui.form.ControlTableMultiSelect = frappe.ui.form.ControlLink.extend({
}
return this._link_field;
},
custom_awesomplete_filter: function(awesomplete) {
let me = this;
awesomplete.filter = function(item) {
if (in_list(me._rows_list, item.value)) {
return false;
}
return true;
};
}
});

View file

@ -205,16 +205,18 @@ frappe.ui.form.Timeline = class Timeline {
</div>').appendTo(me.list);
}
// created
me.render_timeline_item({
content: __("created"),
comment_type: "Created",
communication_type: "Comment",
sender: this.frm.doc.owner,
communication_date: this.frm.doc.creation,
creation: this.frm.doc.creation,
frm: this.frm
});
// if a created comment is not added, add the default one
if (!timeline.find(comment => comment.comment_type === 'Created')) {
me.render_timeline_item({
content: __("created"),
comment_type: "Created",
communication_type: "Comment",
sender: this.frm.doc.owner,
communication_date: this.frm.doc.creation,
creation: this.frm.doc.creation,
frm: this.frm
});
}
this.wrapper.find(".is-email").prop("checked", this.last_type==="Email").change();
@ -564,12 +566,17 @@ frappe.ui.form.Timeline = class Timeline {
let updater_reference = data.updater_reference;
if (!$.isEmptyObject(updater_reference)) {
let label = updater_reference.label || __('via {0}', [updater_reference.doctype]);
updater_reference_link = frappe.utils.get_form_link(
updater_reference.doctype,
updater_reference.docname,
true,
label
);
let { doctype, docname } = updater_reference;
if (doctype && docname) {
updater_reference_link = frappe.utils.get_form_link(
doctype,
docname,
true,
label
);
} else {
updater_reference_link = label;
}
}
// value changed in parent
@ -677,6 +684,15 @@ frappe.ui.form.Timeline = class Timeline {
}
}
});
// creation by updater reference
if (data.creation && data.created_by) {
if (updater_reference_link) {
out.push(me.get_version_comment(version, __('created {0}', [updater_reference_link]), 'Created'));
} else {
out.push(me.get_version_comment(version, __('created'), 'Created'));
}
}
});
}

View file

@ -340,7 +340,6 @@ frappe.ui.form.Form = class FrappeForm {
switch_doc(docname) {
// record switch
if(this.docname != docname && (!this.meta.in_dialog || this.in_form) && !this.meta.istable) {
frappe.utils.scroll_to(0);
if (this.print_preview) {
this.print_preview.hide();
}
@ -787,15 +786,24 @@ frappe.ui.form.Form = class FrappeForm {
frappe.msgprint(__('"amended_from" field must be present to do an amendment.'));
return;
}
this.validate_form_action("Amend");
var me = this;
var fn = function(newdoc) {
newdoc.amended_from = me.docname;
if(me.fields_dict && me.fields_dict['amendment_date'])
newdoc.amendment_date = frappe.datetime.obj_to_str(new Date());
};
this.copy_doc(fn, 1);
frappe.utils.play_sound("click");
frappe.xcall('frappe.client.is_document_amended', {
'doctype': this.doc.doctype,
'docname': this.doc.name
}).then(is_amended => {
if (is_amended) {
frappe.throw(__('This document is already amended, you cannot ammend it again'));
}
this.validate_form_action("Amend");
var me = this;
var fn = function(newdoc) {
newdoc.amended_from = me.docname;
if (me.fields_dict && me.fields_dict['amendment_date'])
newdoc.amendment_date = frappe.datetime.obj_to_str(new Date());
};
this.copy_doc(fn, 1);
frappe.utils.play_sound("click");
});
}
validate_form_action(action, resolve) {

View file

@ -6,14 +6,14 @@ import './share';
import './review';
import './document_follow';
import './user_image';
import './form_viewers';
import './form_sidebar_users';
frappe.ui.form.Sidebar = Class.extend({
init: function(opts) {
frappe.ui.form.Sidebar = class {
constructor(opts) {
$.extend(this, opts);
},
}
make: function() {
make () {
var sidebar_content = frappe.render_template("form_sidebar", {doctype: this.frm.doctype, frm:this.frm});
this.sidebar = $('<div class="form-sidebar overlay-sidebar hidden-xs hidden-sm"></div>')
@ -43,9 +43,9 @@ frappe.ui.form.Sidebar = Class.extend({
this.refresh();
},
}
bind_events: function() {
bind_events () {
var me = this;
// scroll to comments
@ -58,7 +58,7 @@ frappe.ui.form.Sidebar = Class.extend({
me.refresh_like();
});
});
},
}
setup_keyboard_shortcuts() {
// add assignment shortcut
@ -66,9 +66,9 @@ frappe.ui.form.Sidebar = Class.extend({
frappe.ui.keys
.get_shortcut_group(this.page)
.add(assignment_link);
},
}
refresh: function() {
refresh () {
if (this.frm.doc.__islocal) {
this.sidebar.toggle(false);
} else {
@ -113,9 +113,9 @@ frappe.ui.form.Sidebar = Class.extend({
this.refresh_like();
frappe.ui.form.set_user_image(this.frm);
}
},
}
show_auto_repeat_status: function() {
show_auto_repeat_status() {
if (this.frm.meta.allow_auto_repeat && this.frm.doc.auto_repeat) {
const me = this;
frappe.call({
@ -135,16 +135,16 @@ frappe.ui.form.Sidebar = Class.extend({
}
});
}
},
}
refresh_comments: function() {
refresh_comments() {
$.map(this.frm.timeline.get_communications(), function(c) {
return (c.communication_type==="Communication" || (c.communication_type=="Comment" && c.comment_type==="Comment")) ? c : null;
});
this.comments.find(".n-comments").html(this.frm.get_docinfo().total_comments);
},
}
make_tags: function() {
make_tags() {
if (this.frm.meta.issingle) {
this.sidebar.find(".form-tags").toggle(false);
return;
@ -157,54 +157,62 @@ frappe.ui.form.Sidebar = Class.extend({
this.frm.tags && this.frm.tags.refresh(user_tags);
}
});
},
make_attachments: function() {
}
make_attachments() {
var me = this;
this.frm.attachments = new frappe.ui.form.Attachments({
parent: me.sidebar.find(".form-attachments"),
frm: me.frm
});
},
make_assignments: function() {
}
make_assignments() {
this.frm.assign_to = new frappe.ui.form.AssignTo({
parent: this.sidebar.find(".form-assignments"),
frm: this.frm
});
},
make_shared: function() {
}
make_shared() {
this.frm.shared = new frappe.ui.form.Share({
frm: this.frm,
parent: this.sidebar.find(".form-shared")
});
},
make_viewers: function() {
this.frm.viewers = new frappe.ui.form.Viewers({
}
make_viewers() {
this.frm.viewers = new frappe.ui.form.SidebarUsers({
frm: this.frm,
parent: this.sidebar.find(".form-viewers")
$wrapper: this.sidebar,
});
},
add_user_action: function(label, click) {
}
add_user_action(label, click) {
return $('<a>').html(label).appendTo($('<li class="user-action-row">')
.appendTo(this.user_actions.removeClass("hidden"))).on("click", click);
},
clear_user_actions: function() {
}
clear_user_actions() {
this.user_actions.addClass("hidden")
this.user_actions.find(".user-action-row").remove();
},
}
make_like: function() {
make_like() {
this.like_wrapper = this.sidebar.find(".liked-by");
this.like_icon = this.sidebar.find(".liked-by .octicon-heart");
this.like_count = this.sidebar.find(".liked-by .likes-count");
frappe.ui.setup_like_popover(this.sidebar.find(".liked-by-parent"), ".liked-by");
},
make_follow: function(){
}
make_follow() {
this.frm.follow = new frappe.ui.form.DocumentFollow({
frm: this.frm,
parent: this.sidebar.find(".followed-by-section")
});
},
refresh_like: function() {
}
refresh_like() {
if (!this.like_icon) {
return;
}
@ -217,21 +225,21 @@ frappe.ui.form.Sidebar = Class.extend({
.attr("data-name", this.frm.doc.name);
this.like_count.text(JSON.parse(this.frm.doc._liked_by || "[]").length);
},
}
refresh_image: function() {
},
refresh_image() {
}
make_review: function() {
make_review() {
if (frappe.boot.energy_points_enabled && !this.frm.is_new()) {
this.frm.reviews = new frappe.ui.form.Review({
parent: this.sidebar.find(".form-reviews"),
frm: this.frm
});
}
},
}
reload_docinfo: function(callback) {
reload_docinfo(callback) {
frappe.call({
method: "frappe.desk.form.load.get_docinfo",
args: {
@ -248,4 +256,4 @@ frappe.ui.form.Sidebar = Class.extend({
});
}
});
};

View file

@ -0,0 +1,91 @@
frappe.ui.form.SidebarUsers = class {
constructor(opts) {
$.extend(this, opts);
}
get_users(type) {
let docinfo = this.frm.get_docinfo();
return docinfo ? docinfo[type] || null: null;
}
refresh(data_updated, type) {
this.parent = type == 'viewers'? this.$wrapper.find('.form-viewers'): this.$wrapper.find('.form-typers');
this.parent.empty();
const users = this.get_users(type);
users && this.show_in_sidebar(users, type, data_updated);
}
show_in_sidebar(users, type, show_alert) {
let sidebar_users = [];
let new_users = [];
let current_users = [];
const message = type == 'viewers' ? 'viewing this document': 'composing an email';
users.current.forEach(username => {
if (username === frappe.session.user) {
// current user
return;
}
var user_info = frappe.user_info(username);
sidebar_users.push({
image: user_info.image,
fullname: user_info.fullname,
abbr: user_info.abbr,
color: user_info.color,
title: __("{0} is currently {1}", [user_info.fullname, message])
});
if (users.new.indexOf(username) !== -1) {
new_users.push(user_info.fullname);
}
current_users.push(user_info.fullname);
});
if (sidebar_users.length) {
this.parent.parent().removeClass('hidden');
this.parent.append(frappe.render_template('users_in_sidebar', {'users': sidebar_users}));
} else {
this.parent.parent().addClass('hidden');
}
// For typers always show the alert
// For viewers show the alert to new user viewing this document
const alert_users = type == 'viewers' ? new_users : current_users;
show_alert && this.show_alert(alert_users, message);
}
show_alert(users, message) {
if (users.length) {
if (users.length===1) {
frappe.show_alert(__('{0} is currently {1}', [users[0], message]));
} else {
frappe.show_alert(__('{0} are currently {1}', [frappe.utils.comma_and(users), message]));
}
}
}
};
frappe.ui.form.set_users = function(data, type) {
const doctype = data.doctype;
const docname = data.docname;
const docinfo = frappe.model.get_docinfo(doctype, docname);
const past_users = ((docinfo && docinfo[type]) || {}).past || [];
const users = data.users || [];
const new_users = users.filter(user => !past_users.includes(user));
frappe.model.set_docinfo(doctype, docname, type, {
past: past_users.concat(new_users),
new: new_users,
current: users
});
if (cur_frm && cur_frm.doc && cur_frm.doc.doctype===doctype && cur_frm.doc.name==docname) {
cur_frm.viewers.refresh(true, type);
}
};

View file

@ -1,80 +0,0 @@
frappe.ui.form.Viewers = Class.extend({
init: function(opts) {
$.extend(this, opts);
},
get_viewers: function() {
let docinfo = this.frm.get_docinfo();
if (docinfo) {
return docinfo.viewers || {};
} else {
return {};
}
},
refresh: function(data_updated) {
this.parent.empty();
var viewers = this.get_viewers();
var users = [];
var new_users = [];
for (var i=0, l=(viewers.current || []).length; i < l; i++) {
var username = viewers.current[i];
if (username===frappe.session.user) {
// current user
continue;
}
var user_info = frappe.user_info(username);
users.push({
image: user_info.image,
fullname: user_info.fullname,
abbr: user_info.abbr,
color: user_info.color,
title: __("{0} is currently viewing this document", [user_info.fullname])
});
if (viewers.new.indexOf(username)!==-1) {
new_users.push(user_info.fullname);
}
}
if (users.length) {
this.parent.parent().removeClass("hidden");
this.parent.append(frappe.render_template("users_in_sidebar", {"users": users}));
} else {
this.parent.parent().addClass("hidden");
}
if (data_updated && new_users.length) {
// new user viewing this document, who wasn't viewing in the past
if (new_users.length===1) {
frappe.show_alert(__("{0} is currently viewing this document", [new_users[0]]));
} else {
frappe.show_alert(__("{0} are currently viewing this document", [frappe.utils.comma_and(new_users)]));
}
}
}
});
frappe.ui.form.set_viewers = function(data) {
var doctype = data.doctype;
var docname = data.docname;
var docinfo = frappe.model.get_docinfo(doctype, docname);
var past_viewers = ((docinfo && docinfo.viewers) || {}).past || [];
var viewers = data.viewers || [];
var new_viewers = viewers.filter(viewer => !past_viewers.includes(viewer));
frappe.model.set_docinfo(doctype, docname, "viewers", {
past: past_viewers.concat(new_viewers),
new: new_viewers,
current: viewers
});
if (cur_frm && cur_frm.doc && cur_frm.doc.doctype===doctype && cur_frm.doc.name==docname) {
cur_frm.viewers.refresh(true);
}
}

Some files were not shown because too many files have changed in this diff Show more