Merge branch 'develop' of https://github.com/frappe/frappe into rebrand-ui

This commit is contained in:
Suraj Shetty 2020-07-06 13:19:59 +05:30
commit 5e8422ae42
173 changed files with 5363 additions and 5252 deletions

13
.github/semantic.yml vendored Normal file
View file

@ -0,0 +1,13 @@
# Always validate the PR title AND all the commits
titleAndCommits: true
# Allow use of Merge commits (eg on github: "Merge branch 'master' into feature/ride-unicorns")
# this is only relevant when using commitsOnly: true (or titleAndCommits: true)
allowMergeCommits: true
# Allow use of Revert commits (eg on github: "Revert "feat: ride unicorns"")
# this is only relevant when using commitsOnly: true (or titleAndCommits: true)
allowRevertCommits: true
# For allowed PR types: https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json
# Tool Reference: https://github.com/zeke/semantic-pull-requests

14
.github/workflows/docker-release.yml vendored Normal file
View file

@ -0,0 +1,14 @@
name: Trigger Docker build on release
on:
release:
types: [released]
jobs:
curl:
runs-on: ubuntu-latest
container:
image: alpine:latest
steps:
- name: curl
run: |
apk add curl bash
curl -s -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "Travis-API-Version: 3" -H "Authorization: token ${{ secrets.TRAVIS_CI_TOKEN }}" -d '{"request":{"branch":"master"}}' https://api.travis-ci.com/repo/frappe%2Ffrappe_docker/requests

View file

@ -4,8 +4,7 @@ pull_request_rules:
- status-success=Sider
- status-success=Semantic Pull Request
- status-success=Travis CI - Pull Request
- status-success=security/snyk - package.json (frappe)
- status-success=security/snyk - requirements.txt (frappe)
- status-success=security/snyk (frappe)
- label!=don't-merge
- label!=squash
- "#approved-reviews-by>=1"
@ -17,8 +16,7 @@ pull_request_rules:
- status-success=Sider
- status-success=Semantic Pull Request
- status-success=Travis CI - Pull Request
- status-success=security/snyk - package.json (frappe)
- status-success=security/snyk - requirements.txt (frappe)
- status-success=security/snyk (frappe)
- label!=don't-merge
- label=squash
- "#approved-reviews-by>=1"

View file

@ -27,8 +27,11 @@ if __name__ == "__main__":
commit_range = os.environ.get("TRAVIS_COMMIT_RANGE")
print("Build Type: {}".format(build_type))
print("Commit Range: {}".format(commit_range))
files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False)
try:
files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False)
except Exception:
sys.exit(2)
if "fatal" not in files_changed:
files_list = files_changed.split()

View file

@ -490,7 +490,8 @@ def sendmail(recipients=[], sender="", subject="No Subject", message="No Message
message = content or message
if as_markdown:
message = frappe.utils.md_to_html(message)
from frappe.utils import md_to_html
message = md_to_html(message)
if not delayed:
now = True
@ -1145,8 +1146,8 @@ def make_property_setter(args, ignore_validate=False, validate_fields_for_doctyp
def import_doc(path, ignore_links=False, ignore_insert=False, insert=False):
"""Import a file using Data Import."""
from frappe.core.doctype.data_import import data_import
data_import.import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
from frappe.core.doctype.data_import.data_import import import_doc
import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
def copy_doc(doc, ignore_no_copy=True):
""" No_copy fields also get copied."""

View file

@ -159,14 +159,14 @@ def validate_auth():
authorization_type = authorization_header[0].lower()
if len(authorization_header) == 1:
frappe.throw(_('Invalid Authorization headers, add a token with a prefix from one of the following: {0}.'.format(VALID_AUTH_PREFIX_STRING)), frappe.InvalidAuthorizationHeader)
frappe.throw(_('Invalid Authorization headers, add a token with a prefix from one of the following: {0}.').format(VALID_AUTH_PREFIX_STRING), frappe.InvalidAuthorizationHeader)
if authorization_type == "bearer":
validate_oauth(authorization_header)
elif authorization_type in VALID_AUTH_PREFIX_TYPES:
validate_auth_via_api_keys(authorization_header)
else:
frappe.throw(_('Invalid Authorization Type {0}, must be one of {1}.'.format(authorization_type, VALID_AUTH_PREFIX_STRING)), frappe.InvalidAuthorizationPrefix)
frappe.throw(_('Invalid Authorization Type {0}, must be one of {1}.').format(authorization_type, VALID_AUTH_PREFIX_STRING), frappe.InvalidAuthorizationPrefix)
def validate_oauth(authorization_header):
@ -245,5 +245,6 @@ def validate_api_key_secret(api_key, api_secret, frappe_authorization_source=Non
)
else:
user = frappe.db.get_value(doctype, doc, 'user')
frappe.set_user(user)
if frappe.local.login_manager.user in ('', 'Guest'):
frappe.set_user(user)
frappe.local.form_dict = form_dict

View file

@ -99,7 +99,7 @@ def application(request):
frappe.monitor.stop(response)
frappe.recorder.dump()
frappe.logger("web").info({
frappe.logger("frappe.web").info({
"site": get_site_name(request.host),
"remote_addr": getattr(request, "remote_addr", "NOTFOUND"),
"base_url": getattr(request, "base_url", "NOTFOUND"),

View file

@ -21,7 +21,7 @@ class AssignmentRule(Document):
def on_update(self): # pylint: disable=no-self-use
frappe.cache_manager.clear_doctype_map('Assignment Rule', self.name)
def after_rename(self): # pylint: disable=no-self-use
def after_rename(self, old, new, merge): # pylint: disable=no-self-use
frappe.cache_manager.clear_doctype_map('Assignment Rule', self.name)
def apply_unassign(self, doc, assignments):

View file

@ -146,7 +146,7 @@ class AutoRepeat(Document):
def make_new_document(self):
reference_doc = frappe.get_doc(self.reference_doctype, self.reference_document)
new_doc = frappe.copy_doc(reference_doc, ignore_no_copy = False)
new_doc = frappe.copy_doc(reference_doc)
self.update_doc(new_doc, reference_doc)
new_doc.insert(ignore_permissions = True)
@ -372,7 +372,8 @@ def make_auto_repeat(doctype, docname, frequency = 'Daily', start_date = None, e
doc.save()
return doc
#method for reference_doctype filter
# method for reference_doctype filter
@frappe.whitelist()
def get_auto_repeat_doctypes(doctype, txt, searchfield, start, page_len, filters):
res = frappe.db.get_all('Property Setter', {
'property': 'allow_auto_repeat',

View file

@ -108,12 +108,14 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
@click.option('--install-app', multiple=True, help='Install app after installation')
@click.option('--with-public-files', help='Restores the public files of the site, given path to its tar file')
@click.option('--with-private-files', help='Restores the private files of the site, given path to its tar file')
@click.option('--force', is_flag=True, default=False, help='Use a bit of force to get the job done')
@pass_context
def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_password=None, db_name=None, verbose=None, install_app=None, admin_password=None, force=None, with_public_files=None, with_private_files=None):
"Restore site database from an sql file"
from frappe.installer import extract_sql_gzip, extract_tar_files
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
from frappe.installer import extract_sql_gzip, extract_tar_files, is_downgrade
force = context.force or force
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
if not os.path.exists(sql_file_path):
base_path = '..'
sql_file_path = os.path.join(base_path, sql_file_path)
@ -125,7 +127,6 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas
else:
base_path = '.'
if sql_file_path.endswith('sql.gz'):
decompressed_file_name = extract_sql_gzip(os.path.abspath(sql_file_path))
else:
@ -133,10 +134,16 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas
site = get_site(context)
frappe.init(site=site)
# dont allow downgrading to older versions of frappe without force
if not force and is_downgrade(decompressed_file_name, verbose=True):
warn_message = "This is not recommended and may lead to unexpected behaviour. Do you want to continue anyway?"
click.confirm(warn_message, abort=True)
_new_site(frappe.conf.db_name, site, mariadb_root_username=mariadb_root_username,
mariadb_root_password=mariadb_root_password, admin_password=admin_password,
verbose=context.verbose, install_apps=install_app, source_sql=decompressed_file_name,
force=True)
force=True, db_type=frappe.conf.db_type)
# Extract public and/or private files to the restored site, if user has given the path
if with_public_files:
@ -414,15 +421,16 @@ def remove_from_installed_apps(context, app):
@click.argument('app')
@click.option('--yes', '-y', help='To bypass confirmation prompt for uninstalling the app', is_flag=True, default=False, multiple=True)
@click.option('--dry-run', help='List all doctypes that will be deleted', is_flag=True, default=False)
@click.option('--no-backup', help='Do not backup the site', is_flag=True, default=False)
@pass_context
def uninstall(context, app, dry_run=False, yes=False):
def uninstall(context, app, dry_run=False, yes=False, no_backup=False):
"Remove app and linked modules from site"
from frappe.installer import remove_app
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
remove_app(app, dry_run, yes)
remove_app(app, dry_run, yes, no_backup)
finally:
frappe.destroy()
if not context.sites:

View file

@ -215,12 +215,12 @@ def export_doc(context, doctype, docname):
@pass_context
def export_json(context, doctype, path, name=None):
"Export doclist as json to the given path, use '-' as name for Singles."
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import export_json
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
data_import.export_json(doctype, path, name=name)
export_json(doctype, path, name=name)
finally:
frappe.destroy()
if not context.sites:
@ -232,12 +232,12 @@ def export_json(context, doctype, path, name=None):
@pass_context
def export_csv(context, doctype, path):
"Export data import template with data for DocType"
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import export_csv
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
data_import.export_csv(doctype, path)
export_csv(doctype, path)
finally:
frappe.destroy()
if not context.sites:
@ -264,7 +264,7 @@ def export_fixtures(context, app=None):
@pass_context
def import_doc(context, path, force=False):
"Import (insert/update) doclist. If the argument is a directory, all files ending with .json are imported"
from frappe.core.doctype.data_import import data_import
from frappe.core.doctype.data_import.data_import import import_doc
if not os.path.exists(path):
path = os.path.join('..', path)
@ -276,7 +276,7 @@ def import_doc(context, path, force=False):
try:
frappe.init(site=site)
frappe.connect()
data_import.import_doc(path, overwrite=context.force)
import_doc(path, overwrite=context.force)
finally:
frappe.destroy()
if not context.sites:
@ -293,7 +293,7 @@ def import_doc(context, path, force=False):
@pass_context
def import_csv(context, path, only_insert=False, submit_after_import=False, ignore_encoding_errors=False, no_email=True):
"Import CSV using data import"
from frappe.core.doctype.data_import import importer
from frappe.core.doctype.data_import_legacy import importer
from frappe.utils.csvutils import read_csv_content
site = get_site(context)
@ -329,20 +329,12 @@ def import_csv(context, path, only_insert=False, submit_after_import=False, igno
@pass_context
def data_import(context, file_path, doctype, import_type=None, submit_after_import=False, mute_emails=True):
"Import documents in bulk from CSV or XLSX using data import"
from frappe.core.doctype.data_import_beta.importer import Importer
from frappe.core.doctype.data_import.data_import import import_file
site = get_site(context)
frappe.init(site=site)
frappe.connect()
data_import = frappe.new_doc('Data Import Beta')
data_import.submit_after_import = submit_after_import
data_import.mute_emails = mute_emails
data_import.import_type = 'Insert New Records' if import_type.lower() == 'insert' else 'Update Existing Records'
i = Importer(doctype=doctype, file_path=file_path, data_import=data_import, console=True)
i.import_data()
import_file(doctype, file_path, import_type, submit_after_import, console=True)
frappe.destroy()

View file

@ -42,6 +42,16 @@ frappe.ui.form.on("Contact", {
});
frm.refresh_field("links");
let numbers = frm.doc.phone_nos;
if (numbers && numbers.length && frappe.phone_call.handler) {
frm.add_custom_button(__('Call'), () => {
numbers = frm.doc.phone_nos
.sort((prev, next) => next.is_primary_mobile_no - prev.is_primary_mobile_no)
.map(d => d.phone);
frappe.phone_call.handler(numbers);
});
}
if (frm.doc.links) {
frappe.call({
method: "frappe.contacts.doctype.contact.contact.address_query",

View file

@ -9,7 +9,7 @@ import frappe.permissions
import re, csv, os
from frappe.utils.csvutils import UnicodeWriter
from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint
from frappe.core.doctype.data_import.importer import get_data_keys
from frappe.core.doctype.data_import_legacy.importer import get_data_keys
from six import string_types
from frappe.core.doctype.access_log.access_log import make_access_log

View file

@ -1 +0,0 @@
Bulk import / update of data via file upload in Excel or CSV.

View file

@ -1,324 +1,518 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import', {
onload: function(frm) {
if (frm.doc.__islocal) {
frm.set_value("action", "");
}
frappe.call({
method: "frappe.core.doctype.data_import.data_import.get_importable_doctypes",
callback: function (r) {
let importable_doctypes = r.message;
frm.set_query("reference_doctype", function () {
return {
"filters": {
"issingle": 0,
"istable": 0,
"name": ['in', importable_doctypes]
}
};
});
setup(frm) {
frappe.realtime.on('data_import_refresh', ({ data_import }) => {
frm.import_in_progress = false;
if (data_import !== frm.doc.name) return;
frappe.model.clear_doc('Data Import', frm.doc.name);
frappe.model.with_doc('Data Import', frm.doc.name).then(() => {
frm.refresh();
});
});
frappe.realtime.on('data_import_progress', data => {
frm.import_in_progress = true;
if (data.data_import !== frm.doc.name) {
return;
}
}),
let percent = Math.floor((data.current * 100) / data.total);
let seconds = Math.floor(data.eta);
let minutes = Math.floor(data.eta / 60);
let eta_message =
// prettier-ignore
seconds < 60
? __('About {0} seconds remaining', [seconds])
: minutes === 1
? __('About {0} minute remaining', [minutes])
: __('About {0} minutes remaining', [minutes]);
// should never check public
frm.fields_dict["import_file"].df.is_private = 1;
let message;
if (data.success) {
let message_args = [data.current, data.total, eta_message];
message =
frm.doc.import_type === 'Insert New Records'
? __('Importing {0} of {1}, {2}', message_args)
: __('Updating {0} of {1}, {2}', message_args);
}
if (data.skipping) {
message = __('Skipping {0} of {1}, {2}', [
data.current,
data.total,
eta_message
]);
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(__('In Progress'), 'orange');
frappe.realtime.on("data_import_progress", function(data) {
if (data.data_import === frm.doc.name) {
if (data.reload && data.reload === true) {
frm.reload_doc();
}
if (data.progress) {
let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar");
if (progress_bar) {
$(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped");
$(progress_bar).css("width", data.progress + "%");
}
}
// hide progress when complete
if (data.current === data.total) {
setTimeout(() => {
frm.dashboard.hide();
frm.refresh();
}, 2000);
}
});
frm.set_query('reference_doctype', () => {
return {
filters: {
name: ['in', frappe.boot.user.can_import]
}
};
});
frm.get_field('import_file').df.options = {
restrictions: {
allowed_file_types: ['.csv', '.xls', '.xlsx']
}
};
frm.has_import_file = () => {
return frm.doc.import_file || frm.doc.google_sheets_url;
};
},
reference_doctype: function(frm){
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
refresh(frm) {
frm.page.hide_icon_group();
frm.trigger('update_indicators');
frm.trigger('import_file');
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
frm.trigger('show_report_error_button');
if (frm.doc.status === 'Partial Success') {
frm.add_custom_button(__('Export Errored Rows'), () =>
frm.trigger('export_errored_rows')
);
}
if (frm.doc.status.includes('Success')) {
frm.add_custom_button(
__('Go to {0} List', [frm.doc.reference_doctype]),
() => frappe.set_route('List', frm.doc.reference_doctype)
);
}
},
refresh: function(frm) {
onload_post_render(frm) {
frm.trigger('update_primary_action');
},
update_primary_action(frm) {
if (frm.is_dirty()) {
frm.enable_save();
return;
}
frm.disable_save();
frm.dashboard.clear_headline();
if (frm.doc.reference_doctype && !frm.doc.import_file) {
frm.page.set_indicator(__('Attach file'), 'orange');
} else {
if (frm.doc.import_status) {
const listview_settings = frappe.listview_settings['Data Import'];
const indicator = listview_settings.get_indicator(frm.doc);
frm.page.set_indicator(indicator[0], indicator[1]);
if (frm.doc.import_status === "In Progress") {
frm.dashboard.add_progress("Data Import Progress", "0");
frm.set_read_only();
frm.refresh_fields();
}
if (frm.doc.status !== 'Success') {
if (!frm.is_new() && (frm.has_import_file())) {
let label =
frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
frm.page.set_primary_action(label, () => frm.events.start_import(frm));
} else {
frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
},
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
update_indicators(frm) {
const indicator = frappe.get_indicator(frm.doc);
if (indicator) {
frm.page.set_indicator(indicator[0], indicator[1]);
} else {
frm.page.clear_indicator();
}
},
if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") {
frm.set_df_property("action", "read_only", 1);
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
let message;
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
}
}
frm.dashboard.set_headline(message);
},
frm.add_custom_button(__("Help"), function() {
frappe.help.show_video("6wiriRKPhmg");
show_report_error_button(frm) {
if (frm.doc.status === 'Error') {
frappe.db
.get_list('Error Log', {
filters: { method: frm.doc.name },
fields: ['method', 'error'],
order_by: 'creation desc',
limit: 1
})
.then(result => {
if (result.length > 0) {
frm.add_custom_button('Report Error', () => {
let fake_xhr = {
responseText: JSON.stringify({
exc: result[0].error
})
};
frappe.request.report_error(fake_xhr, {});
});
}
});
}
},
start_import(frm) {
frm
.call({
method: 'form_start_import',
args: { data_import: frm.doc.name },
btn: frm.page.btn_primary
})
.then(r => {
if (r.message === true) {
frm.disable_save();
}
});
},
download_template(frm) {
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.data_exporter = new frappe.data_import.DataExporter(
frm.doc.reference_doctype,
frm.doc.import_type
);
});
},
if (frm.doc.reference_doctype && frm.doc.docstatus === 0) {
frm.add_custom_button(__("Download template"), function() {
frappe.data_import.download_dialog(frm).show();
reference_doctype(frm) {
frm.trigger('toggle_submit_after_import');
},
toggle_submit_after_import(frm) {
frm.toggle_display('submit_after_import', false);
let doctype = frm.doc.reference_doctype;
if (doctype) {
frappe.model.with_doctype(doctype, () => {
let meta = frappe.get_meta(doctype);
frm.toggle_display('submit_after_import', meta.is_submittable);
});
}
},
if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows &&
frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) {
frm.page.set_primary_action(__("Start Import"), function() {
frappe.call({
btn: frm.page.btn_primary,
method: "frappe.core.doctype.data_import.data_import.import_data",
args: {
data_import: frm.doc.name
}
});
}).addClass('btn btn-primary');
}
if (frm.doc.log_details) {
frm.events.create_log_table(frm);
google_sheets_url(frm) {
if (!frm.is_dirty()) {
frm.trigger('import_file');
} else {
$(frm.fields_dict.import_log.wrapper).empty();
frm.trigger('update_primary_action');
}
},
action: function(frm) {
if(!frm.doc.action) return;
if(!frm.doc.reference_doctype) {
frappe.msgprint(__("Please select document type first."));
frm.set_value("action", "");
refresh_google_sheet(frm) {
frm.trigger('import_file');
},
import_file(frm) {
frm.toggle_display('section_import_preview', frm.has_import_file());
if (!frm.has_import_file()) {
frm.get_field('import_preview').$wrapper.empty();
return;
} else {
frm.trigger('update_primary_action');
}
// load import preview
frm.get_field('import_preview').$wrapper.empty();
$('<span class="text-muted">')
.html(__('Loading import file...'))
.appendTo(frm.get_field('import_preview').$wrapper);
frm
.call({
method: 'get_preview_from_template',
args: {
data_import: frm.doc.name,
import_file: frm.doc.import_file,
google_sheets_url: frm.doc.google_sheets_url
},
error_handlers: {
TimestampMismatchError() {
// ignore this error
}
}
})
.then(r => {
let preview_data = r.message;
frm.events.show_import_preview(frm, preview_data);
frm.events.show_import_warnings(frm, preview_data);
});
},
show_import_preview(frm, preview_data) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
if (
frm.import_preview &&
frm.import_preview.doctype === frm.doc.reference_doctype
) {
frm.import_preview.preview_data = preview_data;
frm.import_preview.import_log = import_log;
frm.import_preview.refresh();
return;
}
if(frm.doc.action == "Insert new records") {
frm.doc.insert_new = 1;
} else if (frm.doc.action == "Update records"){
frm.doc.overwrite = 1;
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.import_preview = new frappe.data_import.ImportPreview({
wrapper: frm.get_field('import_preview').$wrapper,
doctype: frm.doc.reference_doctype,
preview_data,
import_log,
frm,
events: {
remap_column(changed_map) {
let template_options = JSON.parse(frm.doc.template_options || '{}');
template_options.column_to_field_map = template_options.column_to_field_map || {};
Object.assign(template_options.column_to_field_map, changed_map);
frm.set_value('template_options', JSON.stringify(template_options));
frm.save().then(() => frm.trigger('import_file'));
}
}
});
});
},
export_errored_rows(frm) {
open_url_post(
'/api/method/frappe.core.doctype.data_import.data_import.download_errored_template',
{
data_import_name: frm.doc.name
}
);
},
show_import_warnings(frm, preview_data) {
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
warnings = warnings.concat(preview_data.warnings || []);
frm.toggle_display('import_warnings_section', warnings.length > 0);
if (warnings.length === 0) {
frm.get_field('import_warnings').$wrapper.html('');
return;
}
frm.save();
// group warnings by row
let warnings_by_row = {};
let other_warnings = [];
for (let warning of warnings) {
if (warning.row) {
warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
warnings_by_row[warning.row].push(warning);
} else {
other_warnings.push(warning);
}
}
let html = '';
html += Object.keys(warnings_by_row)
.map(row_number => {
let message = warnings_by_row[row_number]
.map(w => {
if (w.field) {
let label =
w.field.label +
(w.field.parent !== frm.doc.reference_doctype
? ` (${w.field.parent})`
: '');
return `<li>${label}: ${w.message}</li>`;
}
return `<li>${w.message}</li>`;
})
.join('');
return `
<div class="warning" data-row="${row_number}">
<h5 class="text-uppercase">${__('Row {0}', [row_number])}</h5>
<div class="body"><ul>${message}</ul></div>
</div>
`;
})
.join('');
html += other_warnings
.map(warning => {
let header = '';
if (warning.col) {
header = __('Column {0}', [warning.col]);
}
return `
<div class="warning" data-col="${warning.col}">
<h5 class="text-uppercase">${header}</h5>
<div class="body">${warning.message}</div>
</div>
`;
})
.join('');
frm.get_field('import_warnings').$wrapper.html(`
<div class="row">
<div class="col-sm-10 warnings">${html}</div>
</div>
`);
},
only_update: function(frm) {
frm.save();
show_failed_logs(frm) {
frm.trigger('show_import_log');
},
submit_after_import: function(frm) {
frm.save();
show_import_log(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let logs = import_log;
frm.toggle_display('import_log', false);
frm.toggle_display('import_log_section', logs.length > 0);
if (logs.length === 0) {
frm.get_field('import_log_preview').$wrapper.empty();
return;
}
let rows = logs
.map(log => {
let html = '';
if (log.success) {
if (frm.doc.import_type === 'Insert New Records') {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
html = __('Successfully updated {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
}
} else {
let messages = log.messages
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}" style="margin-top: 15px;">
${__('Show Traceback')}
</button>
<div class="collapse" id="${id}" style="margin-top: 15px;">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
if (frm.doc.show_failed_logs && log.success) {
return '';
}
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
if (!rows && frm.doc.show_failed_logs) {
rows = `<tr><td class="text-center text-muted" colspan=3>
${__('No failed logs')}
</td></tr>`;
}
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
},
skip_errors: function(frm) {
frm.save();
},
show_missing_link_values(frm, missing_link_values) {
let can_be_created_automatically = missing_link_values.every(
d => d.has_one_mandatory_field
);
ignore_encoding_errors: function(frm) {
frm.save();
},
let html = missing_link_values
.map(d => {
let doctype = d.doctype;
let values = d.missing_values;
return `
<h5>${doctype}</h5>
<ul>${values.map(v => `<li>${v}</li>`).join('')}</ul>
`;
})
.join('');
no_email: function(frm) {
frm.save();
},
show_only_errors: function(frm) {
frm.events.create_log_table(frm);
},
create_log_table: function(frm) {
let msg = JSON.parse(frm.doc.log_details);
var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty();
$(frappe.render_template("log_details", {
data: msg.messages,
import_status: frm.doc.import_status,
show_only_errors: frm.doc.show_only_errors,
})).appendTo($log_wrapper);
if (can_be_created_automatically) {
// prettier-ignore
let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
frappe.confirm(message + html, () => {
frm
.call('create_missing_link_values', {
missing_link_values
})
.then(r => {
let records = r.message;
frappe.msgprint(
__('Created {0} records successfully.', [records.length])
);
});
});
} else {
frappe.msgprint(
// prettier-ignore
__('The following records needs to be created before we can import your file.') + html
);
}
}
});
frappe.provide('frappe.data_import');
frappe.data_import.download_dialog = function(frm) {
var dialog;
const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden;
const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields);
const get_doctype_checkbox_fields = () => {
return dialog.fields.filter(df => df.fieldname.endsWith('_fields'))
.map(df => dialog.fields_dict[df.fieldname]);
};
const doctype_fields = get_fields(frm.doc.reference_doctype)
.map(df => {
let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0;
return {
label: df.label,
reqd: reqd,
danger: reqd,
value: df.fieldname,
checked: 1
};
});
let fields = [
{
"label": __("Select Columns"),
"fieldname": "select_columns",
"fieldtype": "Select",
"options": "All\nMandatory\nManually",
"reqd": 1,
"onchange": function() {
const fields = get_doctype_checkbox_fields();
fields.map(f => f.toggle(true));
if(this.value == 'Mandatory' || this.value == 'Manually') {
checkbox_toggle(true);
fields.map(multicheck_field => {
multicheck_field.options.map(option => {
if(!option.reqd) return;
$(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`)
.prop('checked', false)
.trigger('click');
});
});
} else if(this.value == 'All'){
$(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`)
.prop('disabled', true);
}
}
},
{
"label": __("File Type"),
"fieldname": "file_type",
"fieldtype": "Select",
"options": "Excel\nCSV",
"default": "Excel"
},
{
"label": __("Download with Data"),
"fieldname": "with_data",
"fieldtype": "Check",
"hidden": !frm.doc.overwrite,
"default": 1
},
{
"label": __("Select All"),
"fieldname": "select_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle();
}
},
{
"label": __("Unselect All"),
"fieldname": "unselect_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle(true);
}
},
{
"label": frm.doc.reference_doctype,
"fieldname": "doctype_fields",
"fieldtype": "MultiCheck",
"options": doctype_fields,
"columns": 2,
"hidden": 1
}
];
const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype)
.map(df => {
return {
"label": df.options,
"fieldname": df.fieldname + '_fields',
"fieldtype": "MultiCheck",
"options": frappe.meta.get_docfields(df.options)
.filter(filter_fields)
.map(df => ({
label: df.label,
reqd: df.reqd ? 1 : 0,
value: df.fieldname,
checked: 1,
danger: df.reqd
})),
"columns": 2,
"hidden": 1
};
});
fields = fields.concat(child_table_fields);
dialog = new frappe.ui.Dialog({
title: __('Download Template'),
fields: fields,
primary_action: function(values) {
var data = values;
if (frm.doc.reference_doctype) {
var export_params = () => {
let columns = {};
if(values.select_columns) {
columns = get_doctype_checkbox_fields().reduce((columns, field) => {
const options = field.get_checked_options();
columns[field.df.label] = options;
return columns;
}, {});
}
return {
doctype: frm.doc.reference_doctype,
parent_doctype: frm.doc.reference_doctype,
select_columns: JSON.stringify(columns),
with_data: frm.doc.overwrite && data.with_data,
all_doctypes: true,
file_type: data.file_type,
template: true
};
};
let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data';
open_url_post(get_template_url, export_params());
} else {
frappe.msgprint(__("Please select the Document Type."));
}
dialog.hide();
},
primary_action_label: __('Download')
});
$(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]')
.wrapAll('<div class="inline-buttons" />');
const button_container = $(dialog.body).find('.inline-buttons');
button_container.addClass('flex');
$(button_container).find('.frappe-control').map((index, button) => {
$(button).css({"margin-right": "1em"});
});
function checkbox_toggle(checked=false) {
$(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => {
$(element).find(`:checkbox`).prop("checked", checked).trigger('click');
});
}
return dialog;
};

View file

@ -1,767 +1,192 @@
{
"allow_copy": 1,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"autoname": "",
"beta": 0,
"creation": "2016-12-09 14:27:32.720061",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "Document",
"editable_grid": 1,
"engine": "InnoDB",
"actions": [],
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"html_5",
"google_sheets_url",
"refresh_google_sheet",
"column_break_5",
"status",
"submit_after_import",
"mute_emails",
"template_options",
"import_warnings_section",
"template_warnings",
"import_warnings",
"section_import_preview",
"import_preview",
"import_log_section",
"import_log",
"show_failed_logs",
"import_log_preview"
],
"fields": [
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "reference_doctype",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 1,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Document Type",
"length": 0,
"no_copy": 0,
"options": "DocType",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "action",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Action",
"length": 0,
"no_copy": 0,
"options": "Insert new records\nUpdate records",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "eval:!doc.overwrite",
"description": "New data will be inserted.",
"fieldname": "insert_new",
"fieldtype": "Check",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Insert new records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 1,
"translatable": 0,
"unique": 0
},
"fieldname": "import_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "eval:!doc.insert_new",
"description": "If you are updating/overwriting already created records.",
"fieldname": "overwrite",
"fieldtype": "Check",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Update records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 1,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "overwrite",
"description": "If you don't want to create any new records while updating the older records.",
"fieldname": "only_update",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Don't create new records",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"collapsible_depends_on": "",
"columns": 0,
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_4",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Attach file for Import",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"in_list_view": 1,
"label": "Import File"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "column_break_4",
"fieldtype": "Column Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"fieldname": "import_preview",
"fieldtype": "HTML",
"label": "Import Preview"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "eval: doc.import_status == \"Partially Successful\"",
"description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.",
"fieldname": "error_file",
"fieldtype": "Attach",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Generated File",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"collapsible_depends_on": "",
"columns": 0,
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_6",
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"label": "Preview"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.",
"fieldname": "skip_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Skip rows with errors",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Submit after importing",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Options",
"options": "JSON",
"read_only": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "0",
"depends_on": "",
"fieldname": "ignore_encoding_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Ignore encoding errors",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_log",
"fieldtype": "Code",
"label": "Import Log",
"options": "JSON"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "1",
"depends_on": "",
"fieldname": "no_email",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Do not send Emails",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 1,
"collapsible_depends_on": "eval: doc.import_status == \"Failed\"",
"columns": 0,
"depends_on": "import_status",
"fieldname": "import_detail",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Log",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "import_status",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Status",
"length": 0,
"no_copy": 0,
"options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Pending\nSuccess\nPartial Success\nError",
"read_only": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "1",
"fieldname": "show_only_errors",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Show only errors",
"length": 0,
"no_copy": 1,
"permlevel": 0,
"precision": "",
"print_hide": 1,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "",
"depends_on": "import_status",
"fieldname": "import_log",
"fieldtype": "HTML",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Import Log",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import",
"set_only_once": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 1,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "",
"fieldname": "log_details",
"fieldtype": "Code",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Log Details",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Import File Errors and Warnings"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "amended_from",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Amended From",
"length": 0,
"no_copy": 1,
"options": "Data Import",
"permlevel": 0,
"print_hide": 1,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
},
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings"
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "total_rows",
"fieldtype": "Int",
"hidden": 1,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Total Rows",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"depends_on": "eval:!doc.__islocal",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template"
},
{
"default": "1",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails",
"set_only_once": 1
},
{
"default": "0",
"fieldname": "show_failed_logs",
"fieldtype": "Check",
"label": "Show Failed Logs"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file",
"fieldname": "html_5",
"fieldtype": "HTML",
"options": "<h5 class=\"text-muted uppercase\">Or</h5>"
},
{
"depends_on": "eval:!doc.__islocal && !doc.import_file\n",
"description": "Must be a publicly accessible Google Sheets URL",
"fieldname": "google_sheets_url",
"fieldtype": "Data",
"label": "Import from Google Sheets"
},
{
"depends_on": "eval:doc.google_sheets_url && !doc.__unsaved",
"fieldname": "refresh_google_sheet",
"fieldtype": "Button",
"label": "Refresh Google Sheet"
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 1,
"issingle": 0,
"istable": 0,
"max_attachments": 1,
"modified": "2018-08-28 15:05:56.787108",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
"name_case": "",
"owner": "Administrator",
],
"hide_toolbar": 1,
"links": [],
"modified": "2020-06-24 14:33:03.173876",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 0,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 0,
"read": 1,
"report": 0,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 1,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"title_field": "",
"track_changes": 1,
"track_seen": 1,
"track_views": 0
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View file

@ -1,54 +1,187 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, os
from frappe import _
import frappe.modules.import_file
import os
import frappe
from frappe.model.document import Document
from frappe.utils.data import format_datetime
from frappe.core.doctype.data_import.importer import upload
from frappe.core.doctype.data_import.importer import Importer
from frappe.core.doctype.data_import.exporter import Exporter
from frappe.utils.background_jobs import enqueue
from frappe.utils.csvutils import validate_google_sheets_url
from frappe import _
class DataImport(Document):
def autoname(self):
if not self.name:
self.name = "Import on " +format_datetime(self.creation)
def validate(self):
if not self.import_file:
self.db_set("total_rows", 0)
if self.import_status == "In Progress":
frappe.throw(_("Can't save the form as data import is in progress."))
doc_before_save = self.get_doc_before_save()
if (
not (self.import_file or self.google_sheets_url)
or (doc_before_save and doc_before_save.import_file != self.import_file)
or (doc_before_save and doc_before_save.google_sheets_url != self.google_sheets_url)
):
self.template_options = ""
self.template_warnings = ""
# validate the template just after the upload
# if there is total_rows in the doc, it means that the template is already validated and error free
if self.import_file and not self.total_rows:
upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
self.validate_import_file()
self.validate_google_sheets_url()
def validate_import_file(self):
if self.import_file:
# validate template
self.get_importer()
def validate_google_sheets_url(self):
if not self.google_sheets_url:
return
validate_google_sheets_url(self.google_sheets_url)
def get_preview_from_template(self, import_file=None, google_sheets_url=None):
if import_file:
self.import_file = import_file
if google_sheets_url:
self.google_sheets_url = google_sheets_url
if not (self.import_file or self.google_sheets_url):
return
i = self.get_importer()
return i.get_data_for_import_preview()
def start_import(self):
from frappe.core.page.background_jobs.background_jobs import get_info
from frappe.utils.scheduler import is_scheduler_inactive
if is_scheduler_inactive() and not frappe.flags.in_test:
frappe.throw(
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
enqueued_jobs = [d.get("job_name") for d in get_info()]
if self.name not in enqueued_jobs:
enqueue(
start_import,
queue="default",
timeout=6000,
event="data_import",
job_name=self.name,
data_import=self.name,
now=frappe.conf.developer_mode or frappe.flags.in_test,
)
return True
return False
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
@frappe.whitelist()
def get_importable_doctypes():
return frappe.cache().hget("can_import", frappe.session.user)
def get_preview_from_template(data_import, import_file=None, google_sheets_url=None):
return frappe.get_doc("Data Import", data_import).get_preview_from_template(
import_file, google_sheets_url
)
@frappe.whitelist()
def import_data(data_import):
frappe.db.set_value("Data Import", data_import, "import_status", "In Progress", update_modified=False)
frappe.publish_realtime("data_import_progress", {"progress": "0",
"data_import": data_import, "reload": True}, user=frappe.session.user)
from frappe.core.page.background_jobs.background_jobs import get_info
enqueued_jobs = [d.get("job_name") for d in get_info()]
if data_import not in enqueued_jobs:
enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
def form_start_import(data_import):
return frappe.get_doc("Data Import", data_import).start_import()
def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
insert=False, submit=False, pre_process=None):
def start_import(data_import):
"""This method runs in background job"""
data_import = frappe.get_doc("Data Import", data_import)
try:
i = Importer(data_import.reference_doctype, data_import=data_import)
i.import_data()
except Exception:
frappe.db.rollback()
data_import.db_set("status", "Error")
frappe.log_error(title=data_import.name)
finally:
frappe.flags.in_import = False
frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name})
@frappe.whitelist()
def download_template(
doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
):
"""
Download template from Exporter
:param doctype: Document Type
:param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
:param export_records=None: One of 'all', 'by_filter', 'blank_template'
:param export_filters: Filter dict
:param file_type: File type to export into
"""
export_fields = frappe.parse_json(export_fields)
export_filters = frappe.parse_json(export_filters)
export_data = export_records != "blank_template"
e = Exporter(
doctype,
export_fields=export_fields,
export_data=export_data,
export_filters=export_filters,
file_type=file_type,
export_page_length=5 if export_records == "5_records" else None,
)
e.build_response()
@frappe.whitelist()
def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import", data_import_name)
data_import.export_errored_rows()
def import_file(
doctype, file_path, import_type, submit_after_import=False, console=False
):
"""
Import documents in from CSV or XLSX using data import.
:param doctype: DocType to import
:param file_path: Path to .csv, .xls, or .xlsx file to import
:param import_type: One of "Insert" or "Update"
:param submit_after_import: Whether to submit documents after import
:param console: Set to true if this is to be used from command line. Will print errors or progress to stdout.
"""
data_import = frappe.new_doc("Data Import")
data_import.submit_after_import = submit_after_import
data_import.import_type = (
"Insert New Records" if import_type.lower() == "insert" else "Update Existing Records"
)
i = Importer(
doctype=doctype, file_path=file_path, data_import=data_import, console=console
)
i.import_data()
##############
def import_doc(
path,
overwrite=False,
ignore_links=False,
ignore_insert=False,
insert=False,
submit=False,
pre_process=None,
):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
@ -57,25 +190,44 @@ def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
frappe.modules.import_file.import_file_by_path(
f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True
)
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
import_file_by_path(
f,
ignore_links=ignore_links,
overwrite=overwrite,
submit=submit,
pre_process=pre_process,
)
frappe.db.commit()
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
from frappe.utils.csvutils import read_csv_content
print("Importing " + path)
with open(path, "r") as infile:
upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
submit_after_import=submit, pre_process=pre_process)
def import_file_by_path(
path,
ignore_links=False,
overwrite=False,
submit=False,
pre_process=None,
no_email=True,
):
if path.endswith(".csv"):
print()
print("This method is deprecated.")
print('Import CSV files using the command "bench --site sitename data-import"')
print("Or use the method frappe.core.doctype.data_import.data_import.import_file")
print()
raise Exception("Method deprecated")
def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
def export_json(
doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"
):
def post_process(out):
del_keys = ('modified_by', 'creation', 'owner', 'idx')
del_keys = ("modified_by", "creation", "owner", "idx")
for doc in out:
for key in del_keys:
if key in doc:
@ -83,7 +235,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
for k, v in doc.items():
if isinstance(v, list):
for child in v:
for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
for key in del_keys + ("docstatus", "doctype", "modified", "name"):
if key in child:
del child[key]
@ -93,13 +245,20 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
for doc in frappe.get_all(
doctype,
fields=["name"],
filters=filters,
or_filters=or_filters,
limit_page_length=0,
order_by=order_by,
):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
path = os.path.join('..', path)
path = os.path.join("..", path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
@ -107,17 +266,7 @@ def export_json(doctype, path, filters=None, or_filters=None, name=None, order_b
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))
@frappe.whitelist()
def export_fixture(doctype, app):
if frappe.session.user != "Administrator":
raise frappe.PermissionError
if not os.path.exists(frappe.get_app_path(app, "fixtures")):
os.mkdir(frappe.get_app_path(app, "fixtures"))
export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")

View file

@ -1,31 +1,40 @@
let imports_in_progress = [];
frappe.listview_settings['Data Import'] = {
add_fields: ["import_status"],
has_indicator_for_draft: 1,
get_indicator: function(doc) {
let status = {
'Successful': [__("Success"), "green", "import_status,=,Successful"],
'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"],
'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"],
'Failed': [__("Failed"), "red", "import_status,=,Failed"],
'Pending': [__("Pending"), "orange", "import_status,=,"]
}
if (doc.import_status) {
return status[doc.import_status];
}
if (doc.docstatus == 0) {
return status['Pending'];
}
return status['Pending'];
},
onload(listview) {
listview.page.set_title_sub(`
<span class="indicator blue">
<a class="text-muted" href="#List/Data Import Beta">${__('Try the new Data Import')}</a>
</span>
`);
}
frappe.realtime.on('data_import_progress', data => {
if (!imports_in_progress.includes(data.data_import)) {
imports_in_progress.push(data.data_import);
}
});
frappe.realtime.on('data_import_refresh', data => {
imports_in_progress = imports_in_progress.filter(
d => d !== data.data_import
);
listview.refresh();
});
},
get_indicator: function(doc) {
var colors = {
'Pending': 'orange',
'Partial Success': 'orange',
'Success': 'green',
'In Progress': 'orange',
'Error': 'red'
};
let status = doc.status;
if (imports_in_progress.includes(doc.name)) {
status = 'In Progress';
}
return [__(status), colors[status], 'status,=,' + doc.status];
},
formatters: {
import_type(value) {
return {
'Insert New Records': __('Insert'),
'Update Existing Records': __('Update')
}[value];
}
},
hide_name_column: true
};

View file

Can't render this file because it contains an unexpected character in line 2 and column 54.

View file

Can't render this file because it contains an unexpected character in line 2 and column 56.

File diff suppressed because it is too large Load diff

View file

@ -1,23 +0,0 @@
/* eslint-disable */
// rename this file from _test_[name] to test_[name] to activate
// and remove above this line
QUnit.test("test: Data Import", function (assert) {
let done = assert.async();
// number of asserts
assert.expect(1);
frappe.run_serially([
// insert a new Data Import
() => frappe.tests.make('Data Import', [
// values to be set
{key: 'value'}
]),
() => {
assert.equal(cur_frm.doc.key, 'value');
},
() => done()
]);
});

View file

@ -1,100 +1,10 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and Contributors
# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe, unittest
from frappe.core.doctype.data_export import exporter
from frappe.core.doctype.data_import import importer
from frappe.utils.csvutils import read_csv_content
# import frappe
import unittest
class TestDataImport(unittest.TestCase):
def test_export(self):
exporter.export_data("User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
def test_export_with_data(self):
exporter.export_data("User", all_doctypes=True, template=True, with_data=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
def test_export_with_all_doctypes(self):
exporter.export_data("User", all_doctypes="Yes", template=True, with_data=True)
content = read_csv_content(frappe.response.result)
self.assertTrue(content[1][1], "User")
self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
self.assertEqual(content[13][0], "DocType:")
self.assertEqual(content[13][1], "User")
self.assertTrue("Has Role" in content[13])
def test_import(self):
if frappe.db.exists("Blog Category", "test-category"):
frappe.delete_doc("Blog Category", "test-category")
exporter.export_data("Blog Category", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test-category", "Test Cateogry"])
importer.upload(content)
self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category")
# export with data
exporter.export_data("Blog Category", all_doctypes=True, template=True, with_data=True)
content = read_csv_content(frappe.response.result)
# overwrite
content[-1][3] = "New Title"
importer.upload(content, overwrite=True)
self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title")
def test_import_only_children(self):
user_email = "test_import_userrole@example.com"
if frappe.db.exists("User", user_email):
frappe.delete_doc("User", user_email, force=True)
frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert()
exporter.export_data("Has Role", "User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test_import_userrole@example.com", "Blogger"])
importer.upload(content)
user = frappe.get_doc("User", user_email)
self.assertTrue(frappe.db.get_value("Has Role", filters={"role": "Blogger", "parent": user_email, "parenttype": "User"}))
self.assertTrue(user.get("roles")[0].role, "Blogger")
# overwrite
exporter.export_data("Has Role", "User", all_doctypes=True, template=True)
content = read_csv_content(frappe.response.result)
content.append(["", "test_import_userrole@example.com", "Website Manager"])
importer.upload(content, overwrite=True)
user = frappe.get_doc("User", user_email)
self.assertEqual(len(user.get("roles")), 1)
self.assertTrue(user.get("roles")[0].role, "Website Manager")
def test_import_with_children(self): #pylint: disable=R0201
if frappe.db.exists("Event", "EV00001"):
frappe.delete_doc("Event", "EV00001")
exporter.export_data("Event", all_doctypes="Yes", template=True)
content = read_csv_content(frappe.response.result)
content.append([None] * len(content[-2]))
content[-1][1] = "__Test Event with children"
content[-1][2] = "Private"
content[-1][3] = "2014-01-01 10:00:00.000000"
importer.upload(content)
frappe.get_doc("Event", {"subject":"__Test Event with children"})
def test_excel_import(self):
if frappe.db.exists("Event", "EV00001"):
frappe.delete_doc("Event", "EV00001")
exporter.export_data("Event", all_doctypes=True, template=True, file_type="Excel")
from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
content = read_xlsx_file_from_attached_file(fcontent=frappe.response.filecontent)
content.append(["", "_test", "Private", "05-11-2017 13:51:48", "Event", "blue", "0", "0", "", "Open", "", 0, "", 0, "", "", "1", 0, "", "", 0, 0, 0, 0, 0, 0, 0])
importer.upload(content)
self.assertTrue(frappe.db.get_value("Event", {"subject": "_test"}, "name"))
pass

View file

@ -5,8 +5,8 @@ from __future__ import unicode_literals
import unittest
import frappe
from frappe.core.doctype.data_import_beta.exporter import Exporter
from frappe.core.doctype.data_import_beta.test_importer import (
from frappe.core.doctype.data_import.exporter import Exporter
from frappe.core.doctype.data_import.test_importer import (
create_doctype_if_not_exists,
)

View file

@ -84,7 +84,7 @@ class TestImporter(unittest.TestCase):
self.assertEqual(updated_doc.table_field_1_again[0].child_title, 'child title again')
def get_importer(self, doctype, import_file, update=False):
data_import = frappe.new_doc('Data Import Beta')
data_import = frappe.new_doc('Data Import')
data_import.import_type = 'Insert New Records' if not update else 'Update Existing Records'
data_import.reference_doctype = doctype
data_import.import_file = import_file.file_url
@ -180,4 +180,4 @@ def get_import_file(csv_file_name, force=False):
def get_csv_file_path(file_name):
return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import_beta', 'fixtures', file_name)
return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import', 'fixtures', file_name)

View file

@ -1,494 +0,0 @@
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import Beta', {
setup(frm) {
frappe.realtime.on('data_import_refresh', ({ data_import }) => {
frm.import_in_progress = false;
if (data_import !== frm.doc.name) return;
frappe.model.clear_doc('Data Import Beta', frm.doc.name);
frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => {
frm.refresh();
});
});
frappe.realtime.on('data_import_progress', data => {
frm.import_in_progress = true;
if (data.data_import !== frm.doc.name) {
return;
}
let percent = Math.floor((data.current * 100) / data.total);
let seconds = Math.floor(data.eta);
let minutes = Math.floor(data.eta / 60);
let eta_message =
// prettier-ignore
seconds < 60
? __('About {0} seconds remaining', [seconds])
: minutes === 1
? __('About {0} minute remaining', [minutes])
: __('About {0} minutes remaining', [minutes]);
let message;
if (data.success) {
let message_args = [data.current, data.total, eta_message];
message =
frm.doc.import_type === 'Insert New Records'
? __('Importing {0} of {1}, {2}', message_args)
: __('Updating {0} of {1}, {2}', message_args);
}
if (data.skipping) {
message = __('Skipping {0} of {1}, {2}', [
data.current,
data.total,
eta_message
]);
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(__('In Progress'), 'orange');
// hide progress when complete
if (data.current === data.total) {
setTimeout(() => {
frm.dashboard.hide();
frm.refresh();
}, 2000);
}
});
frm.set_query('reference_doctype', () => {
return {
filters: {
name: ['in', frappe.boot.user.can_import]
}
};
});
frm.get_field('import_file').df.options = {
restrictions: {
allowed_file_types: ['.csv', '.xls', '.xlsx']
}
};
},
refresh(frm) {
frm.page.hide_icon_group();
frm.trigger('update_indicators');
frm.trigger('import_file');
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
frm.trigger('show_report_error_button');
if (frm.doc.status === 'Partial Success') {
frm.add_custom_button(__('Export Errored Rows'), () =>
frm.trigger('export_errored_rows')
);
}
if (frm.doc.status.includes('Success')) {
frm.add_custom_button(
__('Go to {0} List', [frm.doc.reference_doctype]),
() => frappe.set_route('List', frm.doc.reference_doctype)
);
}
frm.disable_save();
if (frm.doc.status !== 'Success') {
if (!frm.is_new() && frm.doc.import_file) {
let label =
frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
frm.page.set_primary_action(label, () => frm.events.start_import(frm));
} else {
frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
},
update_indicators(frm) {
const indicator = frappe.get_indicator(frm.doc);
if (indicator) {
frm.page.set_indicator(indicator[0], indicator[1]);
} else {
frm.page.clear_indicator();
}
},
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
let message;
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully imported {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args)
: __('Successfully updated {0} record out of {1}. Click on Export Errored Rows, fix the errors and import again.', message_args);
}
}
frm.dashboard.set_headline(message);
},
show_report_error_button(frm) {
if (frm.doc.status === 'Error') {
frappe.db
.get_list('Error Log', {
filters: { method: frm.doc.name },
fields: ['method', 'error'],
order_by: 'creation desc',
limit: 1
})
.then(result => {
if (result.length > 0) {
frm.add_custom_button('Report Error', () => {
let fake_xhr = {
responseText: JSON.stringify({
exc: result[0].error
})
};
frappe.request.report_error(fake_xhr, {});
});
}
});
}
},
start_import(frm) {
frm
.call({
method: 'form_start_import',
args: { data_import: frm.doc.name },
btn: frm.page.btn_primary
})
.then(r => {
if (r.message === true) {
frm.disable_save();
}
});
},
download_template(frm) {
if (
frm.data_exporter &&
frm.data_exporter.doctype === frm.doc.reference_doctype
) {
frm.data_exporter.exporting_for = frm.doc.import_type;
frm.data_exporter.dialog.show();
} else {
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.data_exporter = new frappe.data_import.DataExporter(
frm.doc.reference_doctype,
frm.doc.import_type
);
});
}
},
reference_doctype(frm) {
frm.trigger('toggle_submit_after_import');
},
toggle_submit_after_import(frm) {
frm.toggle_display('submit_after_import', false);
let doctype = frm.doc.reference_doctype;
if (doctype) {
frappe.model.with_doctype(doctype, () => {
let meta = frappe.get_meta(doctype);
frm.toggle_display('submit_after_import', meta.is_submittable);
});
}
},
import_file(frm) {
frm.toggle_display('section_import_preview', frm.doc.import_file);
if (!frm.doc.import_file) {
frm.get_field('import_preview').$wrapper.empty();
return;
}
// load import preview
frm.get_field('import_preview').$wrapper.empty();
$('<span class="text-muted">')
.html(__('Loading import file...'))
.appendTo(frm.get_field('import_preview').$wrapper);
frm
.call({
method: 'get_preview_from_template',
args: { data_import: frm.doc.name, import_file: frm.doc.import_file },
error_handlers: {
TimestampMismatchError() {
// ignore this error
}
}
})
.then(r => {
let preview_data = r.message;
frm.events.show_import_preview(frm, preview_data);
frm.events.show_import_warnings(frm, preview_data);
});
},
show_import_preview(frm, preview_data) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
if (
frm.import_preview &&
frm.import_preview.doctype === frm.doc.reference_doctype
) {
frm.import_preview.preview_data = preview_data;
frm.import_preview.import_log = import_log;
frm.import_preview.refresh();
return;
}
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.import_preview = new frappe.data_import.ImportPreview({
wrapper: frm.get_field('import_preview').$wrapper,
doctype: frm.doc.reference_doctype,
preview_data,
import_log,
frm,
events: {
remap_column(changed_map) {
let template_options = JSON.parse(frm.doc.template_options || '{}');
template_options.remap_column = template_options.remap_column || {};
Object.assign(template_options.remap_column, changed_map);
frm.set_value('template_options', JSON.stringify(template_options));
frm.save().then(() => frm.trigger('import_file'));
}
}
});
});
},
export_errored_rows(frm) {
open_url_post(
'/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template',
{
data_import_name: frm.doc.name
}
);
},
show_import_warnings(frm, preview_data) {
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
warnings = warnings.concat(preview_data.warnings || []);
frm.toggle_display('import_warnings_section', warnings.length > 0);
if (warnings.length === 0) {
frm.get_field('import_warnings').$wrapper.html('');
return;
}
// group warnings by row
let warnings_by_row = {};
let other_warnings = [];
for (let warning of warnings) {
if (warning.row) {
warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
warnings_by_row[warning.row].push(warning);
} else {
other_warnings.push(warning);
}
}
let html = '';
html += Object.keys(warnings_by_row)
.map(row_number => {
let message = warnings_by_row[row_number]
.map(w => {
if (w.field) {
let label =
w.field.label +
(w.field.parent !== frm.doc.reference_doctype
? ` (${w.field.parent})`
: '');
return `<li>${label}: ${w.message}</li>`;
}
return `<li>${w.message}</li>`;
})
.join('');
return `
<div class="warning" data-row="${row_number}">
<h5 class="text-uppercase">${__('Row {0}', [row_number])}</h5>
<div class="body"><ul>${message}</ul></div>
</div>
`;
})
.join('');
html += other_warnings
.map(warning => {
let header = '';
if (warning.col) {
header = __('Column {0}', [warning.col]);
}
return `
<div class="warning" data-col="${warning.col}">
<h5 class="text-uppercase">${header}</h5>
<div class="body">${warning.message}</div>
</div>
`;
})
.join('');
frm.get_field('import_warnings').$wrapper.html(`
<div class="row">
<div class="col-sm-10 warnings">${html}</div>
</div>
`);
},
show_failed_logs(frm) {
frm.trigger('show_import_log');
},
show_import_log(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let logs = import_log;
frm.toggle_display('import_log', false);
frm.toggle_display('import_log_section', logs.length > 0);
if (logs.length === 0) {
frm.get_field('import_log_preview').$wrapper.empty();
return;
}
let rows = logs
.map(log => {
let html = '';
if (log.success) {
if (frm.doc.import_type === 'Insert New Records') {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
html = __('Successfully updated {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
}
} else {
let messages = log.messages
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs margin-top" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}">
${__('Show Traceback')}
</button>
<div class="collapse margin-top" id="${id}">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
if (frm.doc.show_failed_logs && log.success) {
return '';
}
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
if (!rows && frm.doc.show_failed_logs) {
rows = `<tr><td class="text-center text-muted" colspan=3>
${__('No failed logs')}
</td></tr>`;
}
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
},
show_missing_link_values(frm, missing_link_values) {
let can_be_created_automatically = missing_link_values.every(
d => d.has_one_mandatory_field
);
let html = missing_link_values
.map(d => {
let doctype = d.doctype;
let values = d.missing_values;
return `
<h5>${doctype}</h5>
<ul>${values.map(v => `<li>${v}</li>`).join('')}</ul>
`;
})
.join('');
if (can_be_created_automatically) {
// prettier-ignore
let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
frappe.confirm(message + html, () => {
frm
.call('create_missing_link_values', {
missing_link_values
})
.then(r => {
let records = r.message;
frappe.msgprint(
__('Created {0} records successfully.', [records.length])
);
});
});
} else {
frappe.msgprint(
// prettier-ignore
__('The following records needs to be created before we can import your file.') + html
);
}
}
});

View file

@ -1,206 +0,0 @@
{
"actions": [],
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"column_break_5",
"status",
"submit_after_import",
"mute_emails",
"template_options",
"import_warnings_section",
"template_warnings",
"import_warnings",
"section_import_preview",
"import_preview",
"import_log_section",
"import_log",
"show_failed_logs",
"import_log_preview"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"set_only_once": 1,
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "import_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"set_only_once": 1,
"show_days": 1,
"show_seconds": 1
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"in_list_view": 1,
"label": "Import File",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "import_preview",
"fieldtype": "HTML",
"label": "Import Preview",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"label": "Preview",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Options",
"options": "JSON",
"read_only": 1,
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "import_log",
"fieldtype": "Code",
"label": "Import Log",
"options": "JSON",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview",
"show_days": 1,
"show_seconds": 1
},
{
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Pending\nSuccess\nPartial Success\nError",
"read_only": 1,
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON",
"show_days": 1,
"show_seconds": 1
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import",
"set_only_once": 1,
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Warnings",
"show_days": 1,
"show_seconds": 1
},
{
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings",
"show_days": 1,
"show_seconds": 1
},
{
"depends_on": "reference_doctype",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template",
"show_days": 1,
"show_seconds": 1
},
{
"default": "1",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails",
"set_only_once": 1,
"show_days": 1,
"show_seconds": 1
},
{
"default": "0",
"fieldname": "show_failed_logs",
"fieldtype": "Check",
"label": "Show Failed Logs",
"show_days": 1,
"show_seconds": 1
}
],
"hide_toolbar": 1,
"links": [],
"modified": "2020-05-28 22:11:38.266208",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import Beta",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View file

@ -1,125 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.core.doctype.data_import_beta.importer import Importer
from frappe.core.doctype.data_import_beta.exporter import Exporter
from frappe.core.page.background_jobs.background_jobs import get_info
from frappe.utils.background_jobs import enqueue
from frappe import _
class DataImportBeta(Document):
def validate(self):
doc_before_save = self.get_doc_before_save()
if not self.import_file or (
doc_before_save and doc_before_save.import_file != self.import_file
):
self.template_options = ""
self.template_warnings = ""
if self.import_file:
# validate template
self.get_importer()
def get_preview_from_template(self, import_file=None):
if import_file:
self.import_file = import_file
if not self.import_file:
return
i = self.get_importer()
return i.get_data_for_import_preview()
def start_import(self):
if frappe.utils.scheduler.is_scheduler_inactive() and not frappe.flags.in_test:
frappe.throw(
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
enqueued_jobs = [d.get("job_name") for d in get_info()]
if self.name not in enqueued_jobs:
enqueue(
start_import,
queue="default",
timeout=6000,
event="data_import",
job_name=self.name,
data_import=self.name,
now=frappe.conf.developer_mode or frappe.flags.in_test,
)
return True
return False
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
@frappe.whitelist()
def get_preview_from_template(data_import, import_file):
return frappe.get_doc("Data Import Beta", data_import).get_preview_from_template(import_file)
@frappe.whitelist()
def form_start_import(data_import):
return frappe.get_doc("Data Import Beta", data_import).start_import()
def start_import(data_import):
"""This method runs in background job"""
data_import = frappe.get_doc("Data Import Beta", data_import)
try:
i = Importer(data_import.reference_doctype, data_import=data_import)
i.import_data()
except:
frappe.db.rollback()
data_import.db_set("status", "Error")
frappe.log_error(title=data_import.name)
finally:
frappe.flags.in_import = False
frappe.publish_realtime("data_import_refresh", {"data_import": data_import.name})
@frappe.whitelist()
def download_template(
doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
):
"""
Download template from Exporter
:param doctype: Document Type
:param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
:param export_records=None: One of 'all', 'by_filter', 'blank_template'
:param export_filters: Filter dict
:param file_type: File type to export into
"""
export_fields = frappe.parse_json(export_fields)
export_filters = frappe.parse_json(export_filters)
export_data = export_records != "blank_template"
e = Exporter(
doctype,
export_fields=export_fields,
export_data=export_data,
export_filters=export_filters,
file_type=file_type,
export_page_length=5 if export_records == "5_records" else None,
)
e.build_response()
@frappe.whitelist()
def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import Beta", data_import_name)
data_import.export_errored_rows()

View file

@ -1,40 +0,0 @@
let imports_in_progress = [];
frappe.listview_settings['Data Import Beta'] = {
onload(listview) {
frappe.realtime.on('data_import_progress', data => {
if (!imports_in_progress.includes(data.data_import)) {
imports_in_progress.push(data.data_import);
}
});
frappe.realtime.on('data_import_refresh', data => {
imports_in_progress = imports_in_progress.filter(
d => d !== data.data_import
);
listview.refresh();
});
},
get_indicator: function(doc) {
var colors = {
'Pending': 'orange',
'Partial Success': 'orange',
'Success': 'green',
'In Progress': 'orange',
'Error': 'red'
};
let status = doc.status;
if (imports_in_progress.includes(doc.name)) {
status = 'In Progress';
}
return [__(status), colors[status], 'status,=,' + doc.status];
},
formatters: {
import_type(value) {
return {
'Insert New Records': __('Insert'),
'Update Existing Records': __('Update')
}[value];
}
},
hide_name_column: true
};

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,324 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import Legacy', {
onload: function(frm) {
if (frm.doc.__islocal) {
frm.set_value("action", "");
}
frappe.call({
method: "frappe.core.doctype.data_import_legacy.data_import_legacy.get_importable_doctypes",
callback: function (r) {
let importable_doctypes = r.message;
frm.set_query("reference_doctype", function () {
return {
"filters": {
"issingle": 0,
"istable": 0,
"name": ['in', importable_doctypes]
}
};
});
}
}),
// should never check public
frm.fields_dict["import_file"].df.is_private = 1;
frappe.realtime.on("data_import_progress", function(data) {
if (data.data_import === frm.doc.name) {
if (data.reload && data.reload === true) {
frm.reload_doc();
}
if (data.progress) {
let progress_bar = $(frm.dashboard.progress_area).find(".progress-bar");
if (progress_bar) {
$(progress_bar).removeClass("progress-bar-danger").addClass("progress-bar-success progress-bar-striped");
$(progress_bar).css("width", data.progress + "%");
}
}
}
});
},
reference_doctype: function(frm){
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
}
},
refresh: function(frm) {
frm.disable_save();
frm.dashboard.clear_headline();
if (frm.doc.reference_doctype && !frm.doc.import_file) {
frm.page.set_indicator(__('Attach file'), 'orange');
} else {
if (frm.doc.import_status) {
const listview_settings = frappe.listview_settings['Data Import Legacy'];
const indicator = listview_settings.get_indicator(frm.doc);
frm.page.set_indicator(indicator[0], indicator[1]);
if (frm.doc.import_status === "In Progress") {
frm.dashboard.add_progress("Data Import Progress", "0");
frm.set_read_only();
frm.refresh_fields();
}
}
}
if (frm.doc.reference_doctype) {
frappe.model.with_doctype(frm.doc.reference_doctype);
}
if(frm.doc.action == "Insert new records" || frm.doc.action == "Update records") {
frm.set_df_property("action", "read_only", 1);
}
frm.add_custom_button(__("Help"), function() {
frappe.help.show_video("6wiriRKPhmg");
});
if (frm.doc.reference_doctype && frm.doc.docstatus === 0) {
frm.add_custom_button(__("Download template"), function() {
frappe.data_import.download_dialog(frm).show();
});
}
if (frm.doc.reference_doctype && frm.doc.import_file && frm.doc.total_rows &&
frm.doc.docstatus === 0 && (!frm.doc.import_status || frm.doc.import_status == "Failed")) {
frm.page.set_primary_action(__("Start Import"), function() {
frappe.call({
btn: frm.page.btn_primary,
method: "frappe.core.doctype.data_import_legacy.data_import_legacy.import_data",
args: {
data_import: frm.doc.name
}
});
}).addClass('btn btn-primary');
}
if (frm.doc.log_details) {
frm.events.create_log_table(frm);
} else {
$(frm.fields_dict.import_log.wrapper).empty();
}
},
action: function(frm) {
if(!frm.doc.action) return;
if(!frm.doc.reference_doctype) {
frappe.msgprint(__("Please select document type first."));
frm.set_value("action", "");
return;
}
if(frm.doc.action == "Insert new records") {
frm.doc.insert_new = 1;
} else if (frm.doc.action == "Update records"){
frm.doc.overwrite = 1;
}
frm.save();
},
only_update: function(frm) {
frm.save();
},
submit_after_import: function(frm) {
frm.save();
},
skip_errors: function(frm) {
frm.save();
},
ignore_encoding_errors: function(frm) {
frm.save();
},
no_email: function(frm) {
frm.save();
},
show_only_errors: function(frm) {
frm.events.create_log_table(frm);
},
create_log_table: function(frm) {
let msg = JSON.parse(frm.doc.log_details);
var $log_wrapper = $(frm.fields_dict.import_log.wrapper).empty();
$(frappe.render_template("log_details", {
data: msg.messages,
import_status: frm.doc.import_status,
show_only_errors: frm.doc.show_only_errors,
})).appendTo($log_wrapper);
}
});
frappe.provide('frappe.data_import');
frappe.data_import.download_dialog = function(frm) {
var dialog;
const filter_fields = df => frappe.model.is_value_type(df) && !df.hidden;
const get_fields = dt => frappe.meta.get_docfields(dt).filter(filter_fields);
const get_doctype_checkbox_fields = () => {
return dialog.fields.filter(df => df.fieldname.endsWith('_fields'))
.map(df => dialog.fields_dict[df.fieldname]);
};
const doctype_fields = get_fields(frm.doc.reference_doctype)
.map(df => {
let reqd = (df.reqd || df.fieldname == 'naming_series') ? 1 : 0;
return {
label: df.label,
reqd: reqd,
danger: reqd,
value: df.fieldname,
checked: 1
};
});
let fields = [
{
"label": __("Select Columns"),
"fieldname": "select_columns",
"fieldtype": "Select",
"options": "All\nMandatory\nManually",
"reqd": 1,
"onchange": function() {
const fields = get_doctype_checkbox_fields();
fields.map(f => f.toggle(true));
if(this.value == 'Mandatory' || this.value == 'Manually') {
checkbox_toggle(true);
fields.map(multicheck_field => {
multicheck_field.options.map(option => {
if(!option.reqd) return;
$(multicheck_field.$wrapper).find(`:checkbox[data-unit="${option.value}"]`)
.prop('checked', false)
.trigger('click');
});
});
} else if(this.value == 'All'){
$(dialog.body).find(`[data-fieldtype="MultiCheck"] :checkbox`)
.prop('disabled', true);
}
}
},
{
"label": __("File Type"),
"fieldname": "file_type",
"fieldtype": "Select",
"options": "Excel\nCSV",
"default": "Excel"
},
{
"label": __("Download with Data"),
"fieldname": "with_data",
"fieldtype": "Check",
"hidden": !frm.doc.overwrite,
"default": 1
},
{
"label": __("Select All"),
"fieldname": "select_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle();
}
},
{
"label": __("Unselect All"),
"fieldname": "unselect_all",
"fieldtype": "Button",
"depends_on": "eval:doc.select_columns=='Manually'",
click: function() {
checkbox_toggle(true);
}
},
{
"label": frm.doc.reference_doctype,
"fieldname": "doctype_fields",
"fieldtype": "MultiCheck",
"options": doctype_fields,
"columns": 2,
"hidden": 1
}
];
const child_table_fields = frappe.meta.get_table_fields(frm.doc.reference_doctype)
.map(df => {
return {
"label": df.options,
"fieldname": df.fieldname + '_fields',
"fieldtype": "MultiCheck",
"options": frappe.meta.get_docfields(df.options)
.filter(filter_fields)
.map(df => ({
label: df.label,
reqd: df.reqd ? 1 : 0,
value: df.fieldname,
checked: 1,
danger: df.reqd
})),
"columns": 2,
"hidden": 1
};
});
fields = fields.concat(child_table_fields);
dialog = new frappe.ui.Dialog({
title: __('Download Template'),
fields: fields,
primary_action: function(values) {
var data = values;
if (frm.doc.reference_doctype) {
var export_params = () => {
let columns = {};
if(values.select_columns) {
columns = get_doctype_checkbox_fields().reduce((columns, field) => {
const options = field.get_checked_options();
columns[field.df.label] = options;
return columns;
}, {});
}
return {
doctype: frm.doc.reference_doctype,
parent_doctype: frm.doc.reference_doctype,
select_columns: JSON.stringify(columns),
with_data: frm.doc.overwrite && data.with_data,
all_doctypes: true,
file_type: data.file_type,
template: true
};
};
let get_template_url = '/api/method/frappe.core.doctype.data_export.exporter.export_data';
open_url_post(get_template_url, export_params());
} else {
frappe.msgprint(__("Please select the Document Type."));
}
dialog.hide();
},
primary_action_label: __('Download')
});
$(dialog.body).find('div[data-fieldname="select_all"], div[data-fieldname="unselect_all"]')
.wrapAll('<div class="inline-buttons" />');
const button_container = $(dialog.body).find('.inline-buttons');
button_container.addClass('flex');
$(button_container).find('.frappe-control').map((index, button) => {
$(button).css({"margin-right": "1em"});
});
function checkbox_toggle(checked=false) {
$(dialog.body).find('[data-fieldtype="MultiCheck"]').map((index, element) => {
$(element).find(`:checkbox`).prop("checked", checked).trigger('click');
});
}
return dialog;
};

View file

@ -0,0 +1,218 @@
{
"actions": [],
"allow_copy": 1,
"creation": "2020-06-11 16:13:23.813709",
"doctype": "DocType",
"document_type": "Document",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"action",
"insert_new",
"overwrite",
"only_update",
"section_break_4",
"import_file",
"column_break_4",
"error_file",
"section_break_6",
"skip_errors",
"submit_after_import",
"ignore_encoding_errors",
"no_email",
"import_detail",
"import_status",
"show_only_errors",
"import_log",
"log_details",
"amended_from",
"total_rows",
"amended_from"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"ignore_user_permissions": 1,
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1
},
{
"fieldname": "action",
"fieldtype": "Select",
"label": "Action",
"options": "Insert new records\nUpdate records",
"reqd": 1
},
{
"default": "0",
"depends_on": "eval:!doc.overwrite",
"description": "New data will be inserted.",
"fieldname": "insert_new",
"fieldtype": "Check",
"hidden": 1,
"label": "Insert new records",
"set_only_once": 1
},
{
"default": "0",
"depends_on": "eval:!doc.insert_new",
"description": "If you are updating/overwriting already created records.",
"fieldname": "overwrite",
"fieldtype": "Check",
"hidden": 1,
"label": "Update records",
"set_only_once": 1
},
{
"default": "0",
"depends_on": "overwrite",
"description": "If you don't want to create any new records while updating the older records.",
"fieldname": "only_update",
"fieldtype": "Check",
"label": "Don't create new records"
},
{
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_4",
"fieldtype": "Section Break"
},
{
"fieldname": "import_file",
"fieldtype": "Attach",
"label": "Attach file for Import"
},
{
"fieldname": "column_break_4",
"fieldtype": "Column Break"
},
{
"depends_on": "eval: doc.import_status == \"Partially Successful\"",
"description": "This is the template file generated with only the rows having some error. You should use this file for correction and import.",
"fieldname": "error_file",
"fieldtype": "Attach",
"label": "Generated File"
},
{
"depends_on": "eval:(!doc.__islocal)",
"fieldname": "section_break_6",
"fieldtype": "Section Break"
},
{
"default": "0",
"description": "If this is checked, rows with valid data will be imported and invalid rows will be dumped into a new file for you to import later.",
"fieldname": "skip_errors",
"fieldtype": "Check",
"label": "Skip rows with errors"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit after importing"
},
{
"default": "0",
"fieldname": "ignore_encoding_errors",
"fieldtype": "Check",
"label": "Ignore encoding errors"
},
{
"default": "1",
"fieldname": "no_email",
"fieldtype": "Check",
"label": "Do not send Emails"
},
{
"collapsible": 1,
"collapsible_depends_on": "eval: doc.import_status == \"Failed\"",
"depends_on": "import_status",
"fieldname": "import_detail",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_status",
"fieldtype": "Select",
"label": "Import Status",
"options": "\nSuccessful\nFailed\nIn Progress\nPartially Successful",
"read_only": 1
},
{
"allow_on_submit": 1,
"default": "1",
"fieldname": "show_only_errors",
"fieldtype": "Check",
"label": "Show only errors",
"no_copy": 1,
"print_hide": 1
},
{
"allow_on_submit": 1,
"depends_on": "import_status",
"fieldname": "import_log",
"fieldtype": "HTML",
"label": "Import Log"
},
{
"allow_on_submit": 1,
"fieldname": "log_details",
"fieldtype": "Code",
"hidden": 1,
"label": "Log Details",
"read_only": 1
},
{
"fieldname": "amended_from",
"fieldtype": "Link",
"label": "Amended From",
"no_copy": 1,
"options": "Data Import",
"print_hide": 1,
"read_only": 1
},
{
"fieldname": "total_rows",
"fieldtype": "Int",
"hidden": 1,
"label": "Total Rows",
"read_only": 1
},
{
"fieldname": "amended_from",
"fieldtype": "Link",
"label": "Amended From",
"no_copy": 1,
"options": "Data Import Legacy",
"print_hide": 1,
"read_only": 1
}
],
"is_submittable": 1,
"links": [],
"max_attachments": 1,
"modified": "2020-06-11 16:13:23.813709",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import Legacy",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"submit": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1,
"track_seen": 1
}

View file

@ -0,0 +1,123 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, os
from frappe import _
import frappe.modules.import_file
from frappe.model.document import Document
from frappe.utils.data import format_datetime
from frappe.core.doctype.data_import_legacy.importer import upload
from frappe.utils.background_jobs import enqueue
class DataImportLegacy(Document):
def autoname(self):
if not self.name:
self.name = "Import on " +format_datetime(self.creation)
def validate(self):
if not self.import_file:
self.db_set("total_rows", 0)
if self.import_status == "In Progress":
frappe.throw(_("Can't save the form as data import is in progress."))
# validate the template just after the upload
# if there is total_rows in the doc, it means that the template is already validated and error free
if self.import_file and not self.total_rows:
upload(data_import_doc=self, from_data_import="Yes", validate_template=True)
@frappe.whitelist()
def get_importable_doctypes():
return frappe.cache().hget("can_import", frappe.session.user)
@frappe.whitelist()
def import_data(data_import):
frappe.db.set_value("Data Import Legacy", data_import, "import_status", "In Progress", update_modified=False)
frappe.publish_realtime("data_import_progress", {"progress": "0",
"data_import": data_import, "reload": True}, user=frappe.session.user)
from frappe.core.page.background_jobs.background_jobs import get_info
enqueued_jobs = [d.get("job_name") for d in get_info()]
if data_import not in enqueued_jobs:
enqueue(upload, queue='default', timeout=6000, event='data_import', job_name=data_import,
data_import_doc=data_import, from_data_import="Yes", user=frappe.session.user)
def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False,
insert=False, submit=False, pre_process=None):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
files = [path]
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
frappe.modules.import_file.import_file_by_path(f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True)
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
import_file_by_path(f, ignore_links=ignore_links, overwrite=overwrite, submit=submit, pre_process=pre_process)
frappe.db.commit()
def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False, pre_process=None, no_email=True):
from frappe.utils.csvutils import read_csv_content
print("Importing " + path)
with open(path, "r") as infile:
upload(rows = read_csv_content(infile.read()), ignore_links=ignore_links, no_email=no_email, overwrite=overwrite,
submit_after_import=submit, pre_process=pre_process)
def export_json(doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"):
def post_process(out):
del_keys = ('modified_by', 'creation', 'owner', 'idx')
for doc in out:
for key in del_keys:
if key in doc:
del doc[key]
for k, v in doc.items():
if isinstance(v, list):
for child in v:
for key in del_keys + ('docstatus', 'doctype', 'modified', 'name'):
if key in child:
del child[key]
out = []
if name:
out.append(frappe.get_doc(doctype, name).as_dict())
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
for doc in frappe.get_all(doctype, fields=["name"], filters=filters, or_filters=or_filters, limit_page_length=0, order_by=order_by):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
path = os.path.join('..', path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))
@frappe.whitelist()
def export_fixture(doctype, app):
if frappe.session.user != "Administrator":
raise frappe.PermissionError
if not os.path.exists(frappe.get_app_path(app, "fixtures")):
os.mkdir(frappe.get_app_path(app, "fixtures"))
export_json(doctype, frappe.get_app_path(app, "fixtures", frappe.scrub(doctype) + ".json"), order_by="name asc")

View file

@ -0,0 +1,24 @@
frappe.listview_settings['Data Import Legacy'] = {
add_fields: ["import_status"],
has_indicator_for_draft: 1,
get_indicator: function(doc) {
let status = {
'Successful': [__("Success"), "green", "import_status,=,Successful"],
'Partially Successful': [__("Partial Success"), "blue", "import_status,=,Partially Successful"],
'In Progress': [__("In Progress"), "orange", "import_status,=,In Progress"],
'Failed': [__("Failed"), "red", "import_status,=,Failed"],
'Pending': [__("Pending"), "orange", "import_status,=,"]
}
if (doc.import_status) {
return status[doc.import_status];
}
if (doc.docstatus == 0) {
return status['Pending'];
}
return status['Pending'];
}
};

View file

@ -0,0 +1,541 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
from six.moves import range
import requests
import frappe, json
import frappe.permissions
from frappe import _
from frappe.utils.csvutils import getlink
from frappe.utils.dateutils import parse_date
from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url
from six import string_types
@frappe.whitelist()
def get_data_keys():
return frappe._dict({
"data_separator": _('Start entering data below this line'),
"main_table": _("Table") + ":",
"parent_table": _("Parent Table") + ":",
"columns": _("Column Name") + ":",
"doctype": _("DocType") + ":"
})
@frappe.whitelist()
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, no_email=True, overwrite=None,
update_only = None, ignore_links=False, pre_process=None, via_console=False, from_data_import="No",
skip_errors = True, data_import_doc=None, validate_template=False, user=None):
"""upload data"""
# for translations
if user:
frappe.cache().hdel("lang", user)
frappe.set_user_lang(user)
if data_import_doc and isinstance(data_import_doc, string_types):
data_import_doc = frappe.get_doc("Data Import Legacy", data_import_doc)
if data_import_doc and from_data_import == "Yes":
no_email = data_import_doc.no_email
ignore_encoding_errors = data_import_doc.ignore_encoding_errors
update_only = data_import_doc.only_update
submit_after_import = data_import_doc.submit_after_import
overwrite = data_import_doc.overwrite
skip_errors = data_import_doc.skip_errors
else:
# extra input params
params = json.loads(frappe.form_dict.get("params") or '{}')
if params.get("submit_after_import"):
submit_after_import = True
if params.get("ignore_encoding_errors"):
ignore_encoding_errors = True
if not params.get("no_email"):
no_email = False
if params.get('update_only'):
update_only = True
if params.get('from_data_import'):
from_data_import = params.get('from_data_import')
if not params.get('skip_errors'):
skip_errors = params.get('skip_errors')
frappe.flags.in_import = True
frappe.flags.mute_emails = no_email
def get_data_keys_definition():
return get_data_keys()
def bad_template():
frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator))
def check_data_length():
if not data:
frappe.throw(_("No data found in the file. Please reattach the new file with data."))
def get_start_row():
for i, row in enumerate(rows):
if row and row[0]==get_data_keys_definition().data_separator:
return i+1
bad_template()
def get_header_row(key):
return get_header_row_and_idx(key)[0]
def get_header_row_and_idx(key):
for i, row in enumerate(header):
if row and row[0]==key:
return row, i
return [], -1
def filter_empty_columns(columns):
empty_cols = list(filter(lambda x: x in ("", None), columns))
if empty_cols:
if columns[-1*len(empty_cols):] == empty_cols:
# filter empty columns if they exist at the end
columns = columns[:-1*len(empty_cols)]
else:
frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
raise_exception=1)
return columns
def make_column_map():
doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype)
if row_idx == -1: # old style
return
dt = None
for i, d in enumerate(doctype_row[1:]):
if d not in ("~", "-"):
if d and doctype_row[i] in (None, '' ,'~', '-', _("DocType") + ":"):
dt, parentfield = d, None
# xls format truncates the row, so it may not have more columns
if len(doctype_row) > i+2:
parentfield = doctype_row[i+2]
doctypes.append((dt, parentfield))
column_idx_to_fieldname[(dt, parentfield)] = {}
column_idx_to_fieldtype[(dt, parentfield)] = {}
if dt:
column_idx_to_fieldname[(dt, parentfield)][i+1] = rows[row_idx + 2][i+1]
column_idx_to_fieldtype[(dt, parentfield)][i+1] = rows[row_idx + 4][i+1]
def get_doc(start_idx):
if doctypes:
doc = {}
attachments = []
last_error_row_idx = None
for idx in range(start_idx, len(rows)):
last_error_row_idx = idx # pylint: disable=W0612
if (not doc) or main_doc_empty(rows[idx]):
for dt, parentfield in doctypes:
d = {}
for column_idx in column_idx_to_fieldname[(dt, parentfield)]:
try:
fieldname = column_idx_to_fieldname[(dt, parentfield)][column_idx]
fieldtype = column_idx_to_fieldtype[(dt, parentfield)][column_idx]
if not fieldname or not rows[idx][column_idx]:
continue
d[fieldname] = rows[idx][column_idx]
if fieldtype in ("Int", "Check"):
d[fieldname] = cint(d[fieldname])
elif fieldtype in ("Float", "Currency", "Percent"):
d[fieldname] = flt(d[fieldname])
elif fieldtype == "Date":
if d[fieldname] and isinstance(d[fieldname], string_types):
d[fieldname] = getdate(parse_date(d[fieldname]))
elif fieldtype == "Datetime":
if d[fieldname]:
if " " in d[fieldname]:
_date, _time = d[fieldname].split()
else:
_date, _time = d[fieldname], '00:00:00'
_date = parse_date(d[fieldname])
d[fieldname] = get_datetime(_date + " " + _time)
else:
d[fieldname] = None
elif fieldtype in ("Image", "Attach Image", "Attach"):
# added file to attachments list
attachments.append(d[fieldname])
elif fieldtype in ("Link", "Dynamic Link", "Data") and d[fieldname]:
# as fields can be saved in the number format(long type) in data import template
d[fieldname] = cstr(d[fieldname])
except IndexError:
pass
# scrub quotes from name and modified
if d.get("name") and d["name"].startswith('"'):
d["name"] = d["name"][1:-1]
if sum([0 if not val else 1 for val in d.values()]):
d['doctype'] = dt
if dt == doctype:
doc.update(d)
else:
if not overwrite and doc.get("name"):
d['parent'] = doc["name"]
d['parenttype'] = doctype
d['parentfield'] = parentfield
doc.setdefault(d['parentfield'], []).append(d)
else:
break
return doc, attachments, last_error_row_idx
else:
doc = frappe._dict(zip(columns, rows[start_idx][1:]))
doc['doctype'] = doctype
return doc, [], None
# used in testing whether a row is empty or parent row or child row
# checked only 3 first columns since first two columns can be blank for example the case of
# importing the item variant where item code and item name will be blank.
def main_doc_empty(row):
if row:
for i in range(3,0,-1):
if len(row) > i and row[i]:
return False
return True
def validate_naming(doc):
autoname = frappe.get_meta(doctype).autoname
if autoname:
if autoname[0:5] == 'field':
autoname = autoname[6:]
elif autoname == 'naming_series:':
autoname = 'naming_series'
else:
return True
if (autoname not in doc) or (not doc[autoname]):
from frappe.model.base_document import get_controller
if not hasattr(get_controller(doctype), "autoname"):
frappe.throw(_("{0} is a mandatory field").format(autoname))
return True
users = frappe.db.sql_list("select name from tabUser")
def prepare_for_insert(doc):
# don't block data import if user is not set
# migrating from another system
if not doc.owner in users:
doc.owner = frappe.session.user
if not doc.modified_by in users:
doc.modified_by = frappe.session.user
def is_valid_url(url):
is_valid = False
if url.startswith("/files") or url.startswith("/private/files"):
url = get_url(url)
try:
r = requests.get(url)
is_valid = True if r.status_code == 200 else False
except Exception:
pass
return is_valid
def attach_file_to_doc(doctype, docname, file_url):
# check if attachment is already available
# check if the attachement link is relative or not
if not file_url:
return
if not is_valid_url(file_url):
return
files = frappe.db.sql("""Select name from `tabFile` where attached_to_doctype='{doctype}' and
attached_to_name='{docname}' and (file_url='{file_url}' or thumbnail_url='{file_url}')""".format(
doctype=doctype,
docname=docname,
file_url=file_url
))
if files:
# file is already attached
return
_file = frappe.get_doc({
"doctype": "File",
"file_url": file_url,
"attached_to_name": docname,
"attached_to_doctype": doctype,
"attached_to_field": 0,
"folder": "Home/Attachments"})
_file.save()
# header
filename, file_extension = ['','']
if not rows:
_file = frappe.get_doc("File", {"file_url": data_import_doc.import_file})
fcontent = _file.get_content()
filename, file_extension = _file.get_extension()
if file_extension == '.xlsx' and from_data_import == 'Yes':
from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file
rows = read_xlsx_file_from_attached_file(file_url=data_import_doc.import_file)
elif file_extension == '.csv':
from frappe.utils.csvutils import read_csv_content
rows = read_csv_content(fcontent, ignore_encoding_errors)
else:
frappe.throw(_("Unsupported File Format"))
start_row = get_start_row()
header = rows[:start_row]
data = rows[start_row:]
try:
doctype = get_header_row(get_data_keys_definition().main_table)[1]
columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:])
except:
frappe.throw(_("Cannot change header content"))
doctypes = []
column_idx_to_fieldname = {}
column_idx_to_fieldtype = {}
if skip_errors:
data_rows_with_error = header
if submit_after_import and not cint(frappe.db.get_value("DocType",
doctype, "is_submittable")):
submit_after_import = False
parenttype = get_header_row(get_data_keys_definition().parent_table)
if len(parenttype) > 1:
parenttype = parenttype[1]
# check permissions
if not frappe.permissions.can_import(parenttype or doctype):
frappe.flags.mute_emails = False
return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}
# Throw expception in case of the empty data file
check_data_length()
make_column_map()
total = len(data)
if validate_template:
if total:
data_import_doc.total_rows = total
return True
if overwrite==None:
overwrite = params.get('overwrite')
# delete child rows (if parenttype)
parentfield = None
if parenttype:
parentfield = get_parent_field(doctype, parenttype)
if overwrite:
delete_child_rows(data, doctype)
import_log = []
def log(**kwargs):
if via_console:
print((kwargs.get("title") + kwargs.get("message")).encode('utf-8'))
else:
import_log.append(kwargs)
def as_link(doctype, name):
if via_console:
return "{0}: {1}".format(doctype, name)
else:
return getlink(doctype, name)
# publish realtime task update
def publish_progress(achieved, reload=False):
if data_import_doc:
frappe.publish_realtime("data_import_progress", {"progress": str(int(100.0*achieved/total)),
"data_import": data_import_doc.name, "reload": reload}, user=frappe.session.user)
error_flag = rollback_flag = False
batch_size = frappe.conf.data_import_batch_size or 1000
for batch_start in range(0, total, batch_size):
batch = data[batch_start:batch_start + batch_size]
for i, row in enumerate(batch):
# bypass empty rows
if main_doc_empty(row):
continue
row_idx = i + start_row
doc = None
publish_progress(i)
try:
doc, attachments, last_error_row_idx = get_doc(row_idx)
validate_naming(doc)
if pre_process:
pre_process(doc)
original = None
if parentfield:
parent = frappe.get_doc(parenttype, doc["parent"])
doc = parent.append(parentfield, doc)
parent.save()
else:
if overwrite and doc.get("name") and frappe.db.exists(doctype, doc["name"]):
original = frappe.get_doc(doctype, doc["name"])
original_name = original.name
original.update(doc)
# preserve original name for case sensitivity
original.name = original_name
original.flags.ignore_links = ignore_links
original.save()
doc = original
else:
if not update_only:
doc = frappe.get_doc(doc)
prepare_for_insert(doc)
doc.flags.ignore_links = ignore_links
doc.insert()
if attachments:
# check file url and create a File document
for file_url in attachments:
attach_file_to_doc(doc.doctype, doc.name, file_url)
if submit_after_import:
doc.submit()
# log errors
if parentfield:
log(**{"row": doc.idx, "title": 'Inserted row for "%s"' % (as_link(parenttype, doc.parent)),
"link": get_absolute_url(parenttype, doc.parent), "message": 'Document successfully saved', "indicator": "green"})
elif submit_after_import:
log(**{"row": row_idx + 1, "title":'Submitted row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully submitted", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "blue"})
elif original:
log(**{"row": row_idx + 1,"title":'Updated row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully updated", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
elif not update_only:
log(**{"row": row_idx + 1, "title":'Inserted row for "%s"' % (as_link(doc.doctype, doc.name)),
"message": "Document successfully saved", "link": get_absolute_url(doc.doctype, doc.name), "indicator": "green"})
else:
log(**{"row": row_idx + 1, "title":'Ignored row for %s' % (row[1]), "link": None,
"message": "Document updation ignored", "indicator": "orange"})
except Exception as e:
error_flag = True
# build error message
if frappe.local.message_log:
err_msg = "\n".join(['<p class="border-bottom small">{}</p>'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log])
else:
err_msg = '<p class="border-bottom small">{}</p>'.format(cstr(e))
error_trace = frappe.get_traceback()
if error_trace:
error_log_doc = frappe.log_error(error_trace)
error_link = get_absolute_url("Error Log", error_log_doc.name)
else:
error_link = None
log(**{
"row": row_idx + 1,
"title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""),
"message": err_msg,
"indicator": "red",
"link":error_link
})
# data with error to create a new file
# include the errored data in the last row as last_error_row_idx will not be updated for the last row
if skip_errors:
if last_error_row_idx == len(rows)-1:
last_error_row_idx = len(rows)
data_rows_with_error += rows[row_idx:last_error_row_idx]
else:
rollback_flag = True
finally:
frappe.local.message_log = []
start_row += batch_size
if rollback_flag:
frappe.db.rollback()
else:
frappe.db.commit()
frappe.flags.mute_emails = False
frappe.flags.in_import = False
log_message = {"messages": import_log, "error": error_flag}
if data_import_doc:
data_import_doc.log_details = json.dumps(log_message)
import_status = None
if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error):
import_status = "Partially Successful"
# write the file with the faulty row
file_name = 'error_' + filename + file_extension
if file_extension == '.xlsx':
from frappe.utils.xlsxutils import make_xlsx
xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template")
file_data = xlsx_file.getvalue()
else:
from frappe.utils.csvutils import to_csv
file_data = to_csv(data_rows_with_error)
_file = frappe.get_doc({
"doctype": "File",
"file_name": file_name,
"attached_to_doctype": "Data Import Legacy",
"attached_to_name": data_import_doc.name,
"folder": "Home/Attachments",
"content": file_data})
_file.save()
data_import_doc.error_file = _file.file_url
elif error_flag:
import_status = "Failed"
else:
import_status = "Successful"
data_import_doc.import_status = import_status
data_import_doc.save()
if data_import_doc.import_status in ["Successful", "Partially Successful"]:
data_import_doc.submit()
publish_progress(100, True)
else:
publish_progress(0, True)
frappe.db.commit()
else:
return log_message
def get_parent_field(doctype, parenttype):
parentfield = None
# get parentfield
if parenttype:
for d in frappe.get_meta(parenttype).get_table_fields():
if d.options==doctype:
parentfield = d.fieldname
break
if not parentfield:
frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype))
raise Exception
return parentfield
def delete_child_rows(rows, doctype):
"""delete child rows for all parents"""
for p in list(set([r[1] for r in rows])):
if p:
frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p)

View file

@ -0,0 +1,10 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestDataImportLegacy(unittest.TestCase):
pass

View file

@ -406,9 +406,13 @@ class DocType(Document):
with open(fname, 'r') as f:
code = f.read()
with open(fname, 'w') as f:
file_content = code.replace(old, new) # replace str with full str (js controllers)
file_content = file_content.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports)
file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers)
if fname.endswith('.js'):
file_content = code.replace(old, new) # replace str with full str (js controllers)
elif fname.endswith('.py'):
file_content = code.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports)
file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers)
f.write(file_content)
# updating json file with new name

View file

@ -12,7 +12,7 @@ class InstalledApplications(Document):
for app in frappe.utils.get_installed_apps_info():
self.append("installed_applications", {
"app_name": app.get("app_name"),
"app_version": app.get("version"),
"git_branch": app.get("branch")
"app_version": app.get("version") or "UNVERSIONED",
"git_branch": app.get("branch") or "UNVERSIONED"
})
self.save()

View file

@ -22,16 +22,28 @@ class Role(Document):
frappe.db.sql("delete from `tabHas Role` where role = %s", self.name)
frappe.clear_cache()
def on_update(self):
'''update system user desk access if this has changed in this update'''
if frappe.flags.in_install: return
if self.has_value_changed('desk_access'):
for user_name in get_users(self.name):
user = frappe.get_doc('User', user_name)
user_type = user.user_type
user.set_system_user()
if user_type != user.user_type:
user.save()
# Get email addresses of all users that have been assigned this role
def get_emails_from_role(role):
emails = []
users = frappe.get_list("Has Role", filters={"role": role, "parenttype": "User"},
fields=["parent"])
for user in users:
user_email, enabled = frappe.db.get_value("User", user.parent, ["email", "enabled"])
for user in get_users(role):
user_email, enabled = frappe.db.get_value("User", user, ["email", "enabled"])
if enabled and user_email not in ["admin@example.com", "guest@example.com"]:
emails.append(user_email)
return emails
return emails
def get_users(role):
return [d.parent for d in frappe.get_all("Has Role", filters={"role": role, "parenttype": "User"},
fields=["parent"])]

View file

@ -23,3 +23,28 @@ class TestUser(unittest.TestCase):
frappe.get_doc("User", "test@example.com").add_roles("_Test Role 3")
self.assertTrue("_Test Role 3" in frappe.get_roles("test@example.com"))
def test_change_desk_access(self):
'''if we change desk acecss from role, remove from user'''
frappe.delete_doc_if_exists('User', 'test-user-for-desk-access@example.com')
frappe.delete_doc_if_exists('Role', 'desk-access-test')
user = frappe.get_doc(dict(
doctype='User',
email='test-user-for-desk-access@example.com',
first_name='test')).insert()
role = frappe.get_doc(dict(
doctype = 'Role',
role_name = 'desk-access-test',
desk_access = 0
)).insert()
user.add_roles(role.name)
user.save()
self.assertTrue(user.user_type=='Website User')
role.desk_access = 1
role.save()
user.reload()
self.assertTrue(user.user_type=='System User')
role.desk_access = 0
role.save()
user.reload()
self.assertTrue(user.user_type=='Website User')

View file

@ -42,7 +42,7 @@ class ServerScript(Document):
@frappe.whitelist()
def setup_scheduler_events(script_name, frequency):
method = frappe.scrub(script_name) + '_' + frequency.lower()
method = frappe.scrub('{0}-{1}'.format(script_name, frequency))
scheduled_script = frappe.db.get_value('Scheduled Job Type',
dict(method=method))

View file

@ -811,6 +811,7 @@ def reset_password(user):
frappe.clear_messages()
return 'not found'
@frappe.whitelist()
def user_query(doctype, txt, searchfield, start, page_len, filters):
from frappe.desk.reportview import get_match_cond

View file

@ -5,23 +5,23 @@ from __future__ import unicode_literals
import frappe
from frappe import _, throw
import frappe.utils.user
from frappe.permissions import check_admin_or_system_manager
from frappe.permissions import check_admin_or_system_manager, rights
from frappe.model import data_fieldtypes
def execute(filters=None):
user, doctype, show_permissions = filters.get("user"), filters.get("doctype"), filters.get("show_permissions")
if not validate(user, doctype): return [], []
columns, fields = get_columns_and_fields(doctype)
data = frappe.get_list(doctype, fields=fields, as_list=True, user=user)
if show_permissions:
columns = columns + ["Read", "Write", "Create", "Delete", "Submit", "Cancel", "Amend", "Print", "Email",
"Report", "Import", "Export", "Share"]
columns = columns + [frappe.unscrub(right) + ':Check:80' for right in rights]
data = list(data)
for i,item in enumerate(data):
temp = frappe.permissions.get_doc_permissions(frappe.get_doc(doctype, item[0]), False,user)
data[i] = item+(temp.get("read"),temp.get("write"),temp.get("create"),temp.get("delete"),temp.get("submit"),temp.get("cancel"),temp.get("amend"),temp.get("print"),temp.get("email"),temp.get("report"),temp.get("import"),temp.get("export"),temp.get("share"),)
for i, doc in enumerate(data):
permission = frappe.permissions.get_doc_permissions(frappe.get_doc(doctype, doc[0]), user)
data[i] = doc + tuple(permission.get(right) for right in rights)
return columns, data

View file

@ -72,6 +72,11 @@ class CustomField(Document):
frappe.db.updatedb(self.dt)
def on_trash(self):
#check if Admin owned field
if self.owner == 'Administrator' and frappe.session.user != 'Administrator':
frappe.throw(_("Custom Field {0} is created by the Administrator and can only be deleted through the Administrator account.").format(
frappe.bold(self.label)))
# delete property setter entries
frappe.db.sql("""\
DELETE FROM `tabProperty Setter`

View file

@ -49,7 +49,7 @@ class DbManager:
host = self.get_current_host()
if frappe.conf.get('rds_db', 0) == 1:
self.db.sql("GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, EXECUTE ON `%s`.* TO '%s'@'%s';" % (target, user, host))
self.db.sql("GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, EXECUTE, LOCK TABLES ON `%s`.* TO '%s'@'%s';" % (target, user, host))
else:
self.db.sql("GRANT ALL PRIVILEGES ON `%s`.* TO '%s'@'%s';" % (target, user, host))

View file

@ -1,7 +1,7 @@
import frappe, subprocess, os
from six.moves import input
def setup_database(force, source_sql, verbose):
def setup_database(force, source_sql=None, verbose=False):
root_conn = get_root_connection()
root_conn.commit()
root_conn.sql("DROP DATABASE IF EXISTS `{0}`".format(frappe.conf.db_name))
@ -16,10 +16,12 @@ def setup_database(force, source_sql, verbose):
subprocess_env = os.environ.copy()
subprocess_env['PGPASSWORD'] = str(frappe.conf.db_password)
# bootstrap db
if not source_sql:
source_sql = os.path.join(os.path.dirname(__file__), 'framework_postgres.sql')
subprocess.check_output([
'psql', frappe.conf.db_name, '-h', frappe.conf.db_host or 'localhost', '-U',
frappe.conf.db_name, '-f',
os.path.join(os.path.dirname(__file__), 'framework_postgres.sql')
frappe.conf.db_name, '-f', source_sql
], env=subprocess_env)
frappe.connect()

View file

@ -168,7 +168,6 @@ class Workspace:
'subtitle': _(self.onboarding_doc.subtitle),
'success': _(self.onboarding_doc.success_message),
'docs_url': self.onboarding_doc.documentation_url,
'user_can_dismiss': self.onboarding_doc.user_can_dismiss,
'items': self.get_onboarding_steps()
}
@handle_not_exist

View file

@ -1,208 +1,81 @@
{
"allow_copy": 0,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"actions": [],
"autoname": "Prompt",
"beta": 0,
"creation": "2017-10-23 13:02:10.295824",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"subject_field",
"start_date_field",
"end_date_field",
"column_break_5",
"all_day"
],
"fields": [
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "reference_doctype",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Reference Document Type",
"length": 0,
"no_copy": 0,
"options": "DocType",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "subject_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Subject Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "start_date_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Start Date Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "end_date_field",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "End Date Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"default": "0",
"fieldname": "all_day",
"fieldtype": "Check",
"label": "All Day"
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2019-09-05 14:22:27.664645",
"links": [],
"modified": "2020-06-15 11:24:57.639430",
"modified_by": "Administrator",
"module": "Desk",
"name": "Calendar View",
"name_case": "",
"owner": "faris@erpnext.com",
"permissions": [
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 0,
"write": 1
},
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 0,
"delete": 0,
"email": 0,
"export": 0,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 0,
"read": 1,
"report": 0,
"role": "All",
"set_user_permissions": 0,
"share": 0,
"submit": 0,
"write": 0
"role": "All"
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 0,
"track_seen": 0
"sort_order": "DESC"
}

View file

@ -8,7 +8,8 @@ from frappe import _
import datetime
import json
from frappe.utils.dashboard import cache_source, get_from_date_from_timespan
from frappe.utils import nowdate, add_to_date, getdate, get_last_day, formatdate, get_datetime, cint
from frappe.utils import nowdate, add_to_date, getdate, get_last_day, formatdate,\
get_datetime, cint, now_datetime
from frappe.model.naming import append_number_if_name_exists
from frappe.boot import get_allowed_reports
from frappe.model.document import Document
@ -26,15 +27,15 @@ def get_permission_query_conditions(user):
if "System Manager" in roles:
return None
allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read())
allowed_reports = tuple([key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()])
allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()]
allowed_reports = ['"%s"' % key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()]
return '''
`tabDashboard Chart`.`document_type` in {allowed_doctypes}
or `tabDashboard Chart`.`report_name` in {allowed_reports}
`tabDashboard Chart`.`document_type` in ({allowed_doctypes})
or `tabDashboard Chart`.`report_name` in ({allowed_reports})
'''.format(
allowed_doctypes=allowed_doctypes,
allowed_reports=allowed_reports
allowed_doctypes=','.join(allowed_doctypes),
allowed_reports=','.join(allowed_reports)
)
@ -134,7 +135,7 @@ def get_chart_config(chart, filters, timespan, timegrain, from_date, to_date):
if not from_date:
from_date = get_from_date_from_timespan(to_date, timespan)
if not to_date:
to_date = datetime.datetime.now()
to_date = now_datetime()
doctype = chart.document_type
datefield = chart.based_on
@ -258,9 +259,10 @@ def get_aggregate_function(chart_type):
def get_result(data, timegrain, from_date, to_date):
start_date = getdate(from_date)
end_date = getdate(to_date)
result = []
while start_date <= end_date:
result = [[start_date, 0.0]]
while start_date < end_date:
next_date = get_next_expected_date(start_date, timegrain)
result.append([next_date, 0.0])
start_date = next_date
@ -276,11 +278,8 @@ def get_result(data, timegrain, from_date, to_date):
def get_next_expected_date(date, timegrain):
next_date = None
if timegrain=='Daily':
next_date = add_to_date(date, days=1)
else:
# given date is always assumed to be the period ending date
next_date = get_period_ending(add_to_date(date, days=1), timegrain)
# given date is always assumed to be the period ending date
next_date = get_period_ending(add_to_date(date, days=1), timegrain)
return getdate(next_date)
def get_period_ending(date, timegrain):

View file

@ -4,13 +4,12 @@
from __future__ import unicode_literals
import unittest, frappe
from frappe.utils import getdate, formatdate
from frappe.utils import getdate, formatdate, get_last_day
from frappe.desk.doctype.dashboard_chart.dashboard_chart import (get,
get_period_ending)
from datetime import datetime
from dateutil.relativedelta import relativedelta
import calendar
class TestDashboardChart(unittest.TestCase):
def test_period_ending(self):
@ -35,9 +34,6 @@ class TestDashboardChart(unittest.TestCase):
self.assertEqual(get_period_ending('2019-10-01', 'Quarterly'),
getdate('2019-12-31'))
self.assertEqual(get_period_ending('2019-10-01', 'Yearly'),
getdate('2019-12-31'))
def test_dashboard_chart(self):
if frappe.db.exists('Dashboard Chart', 'Test Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Dashboard Chart')
@ -50,22 +46,24 @@ class TestDashboardChart(unittest.TestCase):
based_on = 'creation',
timespan = 'Last Year',
time_interval = 'Monthly',
filters_json = '[]',
filters_json = '{}',
timeseries = 1
)).insert()
cur_date = datetime.now() - relativedelta(years=1)
result = get(chart_name ='Test Dashboard Chart', refresh = 1)
for idx in range(13):
month = datetime(int(cur_date.year), int(cur_date.strftime('%m')), int(calendar.monthrange(cur_date.year, cur_date.month)[1]))
result = get(chart_name='Test Dashboard Chart', refresh=1)
self.assertEqual(result.get('labels')[0], formatdate(cur_date.strftime('%Y-%m-%d')))
if formatdate(cur_date.strftime('%Y-%m-%d')) == formatdate(get_last_day(cur_date).strftime('%Y-%m-%d')):
cur_date += relativedelta(months=1)
for idx in range(1, 13):
month = get_last_day(cur_date)
month = formatdate(month.strftime('%Y-%m-%d'))
self.assertEqual(result.get('labels')[idx], month)
cur_date += relativedelta(months=1)
# self.assertEqual(result.get('datasets')[0].get('values')[:-1],
# [44, 28, 8, 11, 2, 6, 18, 6, 4, 5, 15, 13])
frappe.db.rollback()
def test_empty_dashboard_chart(self):
@ -88,9 +86,14 @@ class TestDashboardChart(unittest.TestCase):
cur_date = datetime.now() - relativedelta(years=1)
result = get(chart_name ='Test Empty Dashboard Chart', refresh = 1)
for idx in range(13):
month = datetime(int(cur_date.year), int(cur_date.strftime('%m')), int(calendar.monthrange(cur_date.year, cur_date.month)[1]))
result = get(chart_name ='Test Empty Dashboard Chart', refresh=1)
self.assertEqual(result.get('labels')[0], formatdate(cur_date.strftime('%Y-%m-%d')))
if formatdate(cur_date.strftime('%Y-%m-%d')) == formatdate(get_last_day(cur_date).strftime('%Y-%m-%d')):
cur_date += relativedelta(months=1)
for idx in range(1, 13):
month = get_last_day(cur_date)
month = formatdate(month.strftime('%Y-%m-%d'))
self.assertEqual(result.get('labels')[idx], month)
cur_date += relativedelta(months=1)
@ -121,8 +124,13 @@ class TestDashboardChart(unittest.TestCase):
cur_date = datetime.now() - relativedelta(years=1)
result = get(chart_name ='Test Empty Dashboard Chart 2', refresh = 1)
for idx in range(13):
month = datetime(int(cur_date.year), int(cur_date.strftime('%m')), int(calendar.monthrange(cur_date.year, cur_date.month)[1]))
self.assertEqual(result.get('labels')[0], formatdate(cur_date.strftime('%Y-%m-%d')))
if formatdate(cur_date.strftime('%Y-%m-%d')) == formatdate(get_last_day(cur_date).strftime('%Y-%m-%d')):
cur_date += relativedelta(months=1)
for idx in range(1, 13):
month = get_last_day(cur_date)
month = formatdate(month.strftime('%Y-%m-%d'))
self.assertEqual(result.get('labels')[idx], month)
cur_date += relativedelta(months=1)
@ -132,6 +140,60 @@ class TestDashboardChart(unittest.TestCase):
frappe.db.rollback()
def test_group_by_chart_type(self):
if frappe.db.exists('Dashboard Chart', 'Test Group By Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Group By Dashboard Chart')
frappe.get_doc({"doctype":"ToDo", "description": "test"}).insert()
frappe.get_doc(dict(
doctype = 'Dashboard Chart',
chart_name = 'Test Group By Dashboard Chart',
chart_type = 'Group By',
document_type = 'ToDo',
group_by_based_on = 'status',
filters_json = '[]',
)).insert()
result = get(chart_name ='Test Group By Dashboard Chart', refresh = 1)
todo_status_count = frappe.db.count('ToDo', {'status': result.get('labels')[0]})
self.assertEqual(result.get('datasets')[0].get('values')[0], todo_status_count)
frappe.db.rollback()
def test_daily_dashboard_chart(self):
insert_test_records()
if frappe.db.exists('Dashboard Chart', 'Test Daily Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Daily Dashboard Chart')
frappe.get_doc(dict(
doctype = 'Dashboard Chart',
chart_name = 'Test Daily Dashboard Chart',
chart_type = 'Sum',
document_type = 'Communication',
based_on = 'communication_date',
value_based_on = 'rating',
timespan = 'Select Date Range',
time_interval = 'Daily',
from_date = datetime(2019, 1, 6),
to_date = datetime(2019, 1, 11),
filters_json = '[]',
timeseries = 1
)).insert()
result = get(chart_name ='Test Daily Dashboard Chart', refresh = 1)
self.assertEqual(result.get('datasets')[0].get('values'), [200.0, 400.0, 300.0, 0.0, 100.0, 0.0])
self.assertEqual(
result.get('labels'),
[formatdate('2019-01-06'), formatdate('2019-01-07'), formatdate('2019-01-08'),\
formatdate('2019-01-09'), formatdate('2019-01-10'), formatdate('2019-01-11')]
)
frappe.db.rollback()
def test_weekly_dashboard_chart(self):
insert_test_records()
@ -155,37 +217,18 @@ class TestDashboardChart(unittest.TestCase):
result = get(chart_name ='Test Weekly Dashboard Chart', refresh = 1)
self.assertEqual(result.get('datasets')[0].get('values'), [200.0, 400.0, 0.0])
self.assertEqual(result.get('labels'), [formatdate('2019-01-06'), formatdate('2019-01-13'), formatdate('2019-01-20')])
frappe.db.rollback()
def test_group_by_chart_type(self):
if frappe.db.exists('Dashboard Chart', 'Test Group By Dashboard Chart'):
frappe.delete_doc('Dashboard Chart', 'Test Group By Dashboard Chart')
frappe.get_doc({"doctype":"ToDo", "description": "test"}).insert()
frappe.get_doc(dict(
doctype = 'Dashboard Chart',
chart_name = 'Test Group By Dashboard Chart',
chart_type = 'Group By',
document_type = 'ToDo',
group_by_based_on = 'status',
filters_json = '[]',
)).insert()
result = get(chart_name ='Test Group By Dashboard Chart', refresh = 1)
todo_status_count = frappe.db.count('ToDo', {'status': result.get('labels')[0]})
self.assertEqual(result.get('datasets')[0].get('values')[0], todo_status_count)
self.assertEqual(result.get('datasets')[0].get('values'), [50.0, 300.0, 800.0, 0.0])
self.assertEqual(result.get('labels'), [formatdate('2018-12-30'), formatdate('2019-01-06'), formatdate('2019-01-13'), formatdate('2019-01-20')])
frappe.db.rollback()
def insert_test_records():
create_new_communication(datetime(2019, 1, 10), 100)
create_new_communication(datetime(2018, 12, 30), 50)
create_new_communication(datetime(2019, 1, 4), 100)
create_new_communication(datetime(2019, 1, 6), 200)
create_new_communication(datetime(2019, 1, 7), 400)
create_new_communication(datetime(2019, 1, 8), 300)
create_new_communication(datetime(2019, 1, 10), 100)
def create_new_communication(date, rating):
communication = {

View file

@ -1,162 +1,69 @@
{
"allow_copy": 0,
"allow_events_in_timeline": 0,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 0,
"actions": [],
"autoname": "field:source_name",
"beta": 0,
"creation": "2019-02-06 07:55:29.579840",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"source_name",
"module",
"timeseries"
],
"fields": [
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fetch_if_empty": 0,
"fieldname": "source_name",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Source Name",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fetch_if_empty": 0,
"fieldname": "module",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Module",
"length": 0,
"no_copy": 0,
"options": "Module Def",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"reqd": 1
},
{
"allow_bulk_edit": 0,
"allow_in_quick_entry": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fetch_if_empty": 0,
"default": "0",
"fieldname": "timeseries",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Timeseries",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"translatable": 0,
"unique": 0
"label": "Timeseries"
}
],
"has_web_view": 0,
"hide_toolbar": 0,
"idx": 0,
"in_create": 0,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2019-04-09 14:20:51.548207",
"links": [],
"modified": "2020-06-26 18:00:37.421491",
"modified_by": "Administrator",
"module": "Desk",
"name": "Dashboard Chart Source",
"name_case": "",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1
},
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Administrator",
"share": 1,
"submit": 0,
"write": 1
}
],
"quick_entry": 0,
"read_only": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"title_field": "",
"track_changes": 1,
"track_seen": 0,
"track_views": 0
"track_changes": 1
}

View file

@ -18,10 +18,6 @@ def get_config(name):
return f.read()
class DashboardChartSource(Document):
def validate(self):
if frappe.session.user != "Administrator":
frappe.throw(_("Only Administrator is allowed to create Dashboard Chart Sources"))
def on_update(self):
export_to_files(record_list=[[self.doctype, self.name]],
record_module=self.module, create_init=True)

View file

@ -13,7 +13,6 @@
"column_break_4",
"success_message",
"documentation_url",
"user_can_dismiss",
"is_complete",
"section_break_6",
"steps"
@ -53,13 +52,6 @@
"label": "Success Message",
"reqd": 1
},
{
"default": "1",
"description": "Allow users to dismiss onboarding temporarily for a day",
"fieldname": "user_can_dismiss",
"fieldtype": "Check",
"label": "User Can Dismiss "
},
{
"fieldname": "documentation_url",
"fieldtype": "Data",
@ -90,7 +82,7 @@
}
],
"links": [],
"modified": "2020-05-18 19:42:39.738869",
"modified": "2020-06-08 15:36:04.701049",
"modified_by": "Administrator",
"module": "Desk",
"name": "Module Onboarding",

View file

@ -100,14 +100,16 @@ def send_notification_email(doc):
)
def get_email_header(doc):
return {
docname = doc.document_name
header_map = {
'Default': _('New Notification'),
'Mention': _('New Mention'),
'Assignment': _('New Assignment'),
'Share': _('New Document Shared'),
'Energy Point': _('Energy Point Update'),
}[doc.type or 'Default']
'Mention': _('New Mention on {0}').format(docname),
'Assignment': _('Assignment Update on {0}').format(docname),
'Share': _('New Document Shared {0}').format(docname),
'Energy Point': _('Energy Point Update on {0}').format(docname),
}
return header_map[doc.type or 'Default']
@frappe.whitelist()
def mark_all_as_read():

View file

@ -27,12 +27,12 @@ def get_permission_query_conditions(user=None):
if "System Manager" in roles:
return None
allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read())
allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()]
return '''
`tabNumber Card`.`document_type` in {allowed_doctypes}
`tabNumber Card`.`document_type` in ({allowed_doctypes})
'''.format(
allowed_doctypes=allowed_doctypes,
allowed_doctypes=','.join(allowed_doctypes)
)
def has_permission(doc, ptype, user):

View file

@ -178,7 +178,8 @@ def notify_assignment(assigned_by, owner, doc_type, doc_name, action='CLOSE',
description_html = "<div>{0}</div>".format(description) if description else None
if action=='CLOSE':
subject = _('Your assignment on {0} {1} has been removed').format(frappe.bold(doc_type), get_title_html(title))
subject = _('Your assignment on {0} {1} has been removed by {2}')\
.format(frappe.bold(doc_type), get_title_html(title), frappe.bold(user_name))
else:
user_name = frappe.bold(user_name)
document_type = frappe.bold(doc_type)

View file

@ -13,7 +13,7 @@ from frappe.modules import load_doctype_module
@frappe.whitelist()
def get_submitted_linked_docs(doctype, name, docs=None, linked=None):
def get_submitted_linked_docs(doctype, name, docs=None, visited=None):
"""
Get all nested submitted linked doctype linkinfo
@ -31,26 +31,27 @@ def get_submitted_linked_docs(doctype, name, docs=None, linked=None):
if not docs:
docs = []
if not linked:
linked = {}
if not visited:
visited = {}
if doctype not in visited:
visited[doctype] = []
if name in visited[doctype]:
return
linkinfo = get_linked_doctypes(doctype)
linked_docs = get_linked_docs(doctype, name, linkinfo)
link_count = 0
visited[doctype].append(name)
for link_doctype, link_names in linked_docs.items():
if link_doctype not in linked:
linked[link_doctype] = []
for link in link_names:
if link['name'] == name:
continue
if linked and name in linked[link_doctype]:
continue
linked[link_doctype].append(link['name'])
docinfo = link.update({"doctype": link_doctype})
validated_doc = validate_linked_doc(docinfo)
@ -58,16 +59,15 @@ def get_submitted_linked_docs(doctype, name, docs=None, linked=None):
continue
link_count += 1
if link.name in [doc.get("name") for doc in docs]:
continue
links = get_submitted_linked_docs(link_doctype, link.name, docs, linked)
docs.append({
"doctype": link_doctype,
"name": link.name,
"docstatus": link.docstatus,
"link_count": links.get("count")
})
links = get_submitted_linked_docs(link_doctype, link.name, docs, visited)
if links:
docs.append({
"doctype": link_doctype,
"name": link.name,
"docstatus": link.docstatus,
"link_count": links.get("count")
})
# sort linked documents by ascending number of links
docs.sort(key=lambda doc: doc.get("link_count"))

View file

@ -100,6 +100,7 @@ def get_docinfo(doc=None, doctype=None, name=None):
"shared": frappe.share.get_users(doc.doctype, doc.name),
"views": get_view_logs(doc.doctype, doc.name),
"energy_point_logs": get_point_logs(doc.doctype, doc.name),
"additional_timeline_content": get_additional_timeline_content(doc.doctype, doc.name),
"milestones": get_milestones(doc.doctype, doc.name),
"is_document_followed": is_document_followed(doc.doctype, doc.name, frappe.session.user),
"tags": get_tags(doc.doctype, doc.name),
@ -277,3 +278,14 @@ def get_document_email(doctype, name):
def get_automatic_email_link():
return frappe.db.get_value("Email Account", {"enable_incoming": 1, "enable_automatic_linking": 1}, "email_id")
def get_additional_timeline_content(doctype, docname):
contents = []
hooks = frappe.get_hooks().get('additional_timeline_content', {})
methods_for_all_doctype = hooks.get('*', [])
methods_for_current_doctype = hooks.get(doctype, [])
for method in methods_for_all_doctype + methods_for_current_doctype:
contents.extend(frappe.get_attr(method)(doctype, docname) or [])
return contents

View file

@ -14,13 +14,16 @@ def get_leaderboards():
return leaderboards
@frappe.whitelist()
def get_energy_point_leaderboard(from_date, company = None, field = None, limit = None):
def get_energy_point_leaderboard(date_range, company = None, field = None, limit = None):
filters = [
['type', '!=', 'Review'],
]
if date_range:
date_range = frappe.parse_json(date_range)
filters.append(['creation', 'between', [date_range[0], date_range[1]]])
energy_point_users = frappe.db.get_all('Energy Point Log',
fields = ['user as name', 'sum(points) as value'],
filters = [
['type', '!=', 'Review'],
['creation', '>', from_date]
],
filters = filters,
group_by = 'user',
order_by = 'value desc'
)

View file

@ -35,7 +35,7 @@ def get_group_by_count(doctype, current_filters, field):
from
`tabToDo`, `tabUser`
where
`tabToDo`.status='Open' and
`tabToDo`.status!='Cancelled' and
`tabToDo`.owner = `tabUser`.name and
`tabUser`.user_type = 'System User'
{subquery_condition}

View file

@ -30,7 +30,7 @@ def get_context(context):
get_size(os.path.join(path, _file))) for _file in files if _file.endswith('sql.gz')]
files.sort(key=lambda x: x[1], reverse=True)
return {"files": files}
return {"files": files[:backup_limit]}
def get_scheduled_backup_limit():
backup_limit = frappe.db.get_singles_value('System Settings', 'backup_limit')
@ -89,4 +89,4 @@ def backup_files_and_notify_user(user_email=None):
def get_downloadable_links(backup_files):
for key in ['backup_path_files', 'backup_path_private_files']:
path = backup_files[key]
backup_files[key] = get_url('/'.join(path.split('/')[-2:]))
backup_files[key] = get_url('/'.join(path.split('/')[-2:]))

View file

@ -49,7 +49,7 @@ class Leaderboard {
this.timespans = [
"This Week", "This Month", "This Quarter", "This Year",
"Last Week", "Last Month", "Last Quarter", "Last Year",
"All Time", "Select From Date"
"All Time", "Select Date Range"
];
// for saving current selected filters
@ -113,7 +113,7 @@ class Leaderboard {
return {"label": __(d), value: d };
})
);
this.create_from_date_field();
this.create_date_range_field();
this.type_select = this.page.add_select(__("Field"),
this.options.selected_filter.map(d => {
@ -123,12 +123,12 @@ class Leaderboard {
this.timespan_select.on("change", (e) => {
this.options.selected_timespan = e.currentTarget.value;
if (this.options.selected_timespan === 'Select From Date') {
this.from_date_field.show();
if (this.options.selected_timespan === 'Select Date Range') {
this.date_range_field.show();
} else {
this.from_date_field.hide();
this.make_request();
this.date_range_field.hide();
}
this.make_request();
});
this.type_select.on("change", (e) => {
@ -137,21 +137,21 @@ class Leaderboard {
});
}
create_from_date_field() {
create_date_range_field() {
let timespan_field = $(this.parent).find(`.frappe-control[data-original-title='Timespan']`);
this.from_date_field = $(`<div class="from-date-field"></div>`).insertAfter(timespan_field).hide();
this.date_range_field = $(`<div class="from-date-field"></div>`).insertAfter(timespan_field).hide();
let date_field = frappe.ui.form.make_control({
df: {
fieldtype: 'Date',
fieldname: 'selected_from_date',
placeholder: frappe.datetime.month_start(),
default: frappe.datetime.month_start(),
fieldtype: 'DateRange',
fieldname: 'selected_date_range',
placeholder: "Date Range",
default: [frappe.datetime.month_start(), frappe.datetime.now_date()],
input_class: 'input-sm',
reqd: 1,
change: () => {
this.selected_from_date = date_field.get_value();
if (this.selected_from_date) this.make_request();
this.selected_date_range = date_field.get_value();
if (this.selected_date_range) this.make_request();
}
},
parent: $(this.parent).find('.from-date-field'),
@ -225,7 +225,7 @@ class Leaderboard {
frappe.call(
this.leaderboard_config[this.options.selected_doctype].method,
{
'from_date': this.get_from_date(),
'date_range': this.get_date_range(),
'company': this.options.selected_company,
'field': this.options.selected_filter_item,
'limit': this.leaderboard_limit,
@ -375,23 +375,22 @@ class Leaderboard {
</li>`);
}
get_from_date() {
get_date_range() {
let timespan = this.options.selected_timespan.toLowerCase();
let current_date = frappe.datetime.now_date();
let get_from_date = {
"this week": frappe.datetime.week_start(),
"this month": frappe.datetime.month_start(),
"this quarter": frappe.datetime.quarter_start(),
"this year": frappe.datetime.year_start(),
"last week": frappe.datetime.add_days(current_date, -7),
"last month": frappe.datetime.add_months(current_date, -1),
"last quarter": frappe.datetime.add_months(current_date, -3),
"last year": frappe.datetime.add_months(current_date, -12),
"all time": "",
"select from date": this.selected_from_date || frappe.datetime.month_start()
let date_range_map = {
"this week": [frappe.datetime.week_start(), current_date],
"this month": [frappe.datetime.month_start(), current_date],
"this quarter": [frappe.datetime.quarter_start(), current_date],
"this year": [frappe.datetime.year_start(), current_date],
"last week": [frappe.datetime.add_days(current_date, -7), current_date],
"last month": [frappe.datetime.add_months(current_date, -1), current_date],
"last quarter": [frappe.datetime.add_months(current_date, -3), current_date],
"last year": [frappe.datetime.add_months(current_date, -12), current_date],
"all time": null,
"select date range": this.selected_date_range || [frappe.datetime.month_start(), current_date]
}
return get_from_date[timespan];
return date_range_map[timespan];
}
}

View file

@ -6,6 +6,7 @@ from __future__ import unicode_literals
import frappe, json
from frappe.utils import cstr, unique, cint
from frappe.permissions import has_permission
from frappe.handler import is_whitelisted
from frappe import _
from six import string_types
import re
@ -74,8 +75,17 @@ def search_widget(doctype, txt, query=None, searchfield=None, start=0,
if query and query.split()[0].lower()!="select":
# by method
frappe.response["values"] = frappe.call(query, doctype, txt,
searchfield, start, page_length, filters, as_dict=as_dict)
try:
is_whitelisted(frappe.get_attr(query))
frappe.response["values"] = frappe.call(query, doctype, txt,
searchfield, start, page_length, filters, as_dict=as_dict)
except Exception as e:
if frappe.local.conf.developer_mode:
raise e
else:
frappe.respond_as_web_page(title='Invalid Method', html='Method not found',
indicator_color='red', http_status_code=404)
return
elif not query and doctype in standard_queries:
# from standard queries
search_widget(doctype, txt, standard_queries[doctype][0],
@ -157,7 +167,7 @@ def search_widget(doctype, txt, query=None, searchfield=None, start=0,
strict=False)
if doctype in UNTRANSLATED_DOCTYPES:
values = tuple([v for v in list(values) if re.search(txt+".*", (_(v.name) if as_dict else _(v[0])), re.IGNORECASE)])
values = tuple([v for v in list(values) if re.search(re.escape(txt)+".*", (_(v.name) if as_dict else _(v[0])), re.IGNORECASE)])
# remove _relevance from results
if as_dict:

View file

@ -10,7 +10,7 @@ import socket
import time
from frappe import _
from frappe.model.document import Document
from frappe.utils import validate_email_address, cint, get_datetime, DATE_FORMAT, strip, comma_or, sanitize_html, add_days
from frappe.utils import validate_email_address, cint, cstr, get_datetime, DATE_FORMAT, strip, comma_or, sanitize_html, add_days
from frappe.utils.user import is_system_user
from frappe.utils.jinja import render_template
from frappe.email.smtp import SMTPServer
@ -169,19 +169,20 @@ class EmailAccount(Document):
try:
email_server.connect()
except (error_proto, imaplib.IMAP4.error) as e:
message = e.message.lower().replace(" ","")
if in_receive and any(map(lambda t: t in message, ['authenticationfail', 'loginviayourwebbrowser', #abbreviated to work with both failure and failed
e = cstr(e)
message = e.lower().replace(" ","")
if in_receive and any(map(lambda t: t in message, ['authenticationfailed', 'loginviayourwebbrowser', #abbreviated to work with both failure and failed
'loginfailed', 'err[auth]', 'errtemporaryerror'])): #temporary error to deal with godaddy
# if called via self.receive and it leads to authentication error, disable incoming
# and send email to system manager
self.handle_incoming_connect_error(
description=_('Authentication failed while receiving emails from Email Account {0}. Message from server: {1}').format(self.name, e.message)
description=_('Authentication failed while receiving emails from Email Account {0}. Message from server: {1}').format(self.name, e)
)
return None
else:
frappe.throw(e.message)
frappe.throw(e)
except socket.error:
if in_receive:
@ -273,6 +274,8 @@ class EmailAccount(Document):
for idx, msg in enumerate(incoming_mails):
uid = None if not uid_list else uid_list[idx]
self.flags.notify = True
try:
args = {
"uid": uid,
@ -293,7 +296,11 @@ class EmailAccount(Document):
else:
frappe.db.commit()
if communication:
if communication and self.flags.notify:
# If email already exists in the system
# then do not send notifications for the same email.
attachments = []
if hasattr(communication, '_attachments'):
@ -362,6 +369,9 @@ class EmailAccount(Document):
name = names[0].get("name")
# email is already available update communication uid instead
frappe.db.set_value("Communication", name, "uid", uid, update_modified=False)
self.flags.notify = False
return frappe.get_doc("Communication", name)
if email.content_type == 'text/html':
@ -468,26 +478,38 @@ class EmailAccount(Document):
if self.append_to and self.sender_field:
if self.subject_field:
# try and match by subject and sender
# if sent by same sender with same subject,
# append it to old coversation
subject = frappe.as_unicode(strip(re.sub(r"(^\s*(fw|fwd|wg)[^:]*:|\s*(re|aw)[^:]*:\s*)*",
"", email.subject, 0, flags=re.IGNORECASE)))
if '#' in email.subject:
# try and match if ID is found
# document ID is appended to subject
# example "Re: Your email (#OPP-2020-2334343)"
parent_id = email.subject.rsplit('#', 1)[-1].strip(' ()')
if parent_id:
parent = frappe.db.get_all(self.append_to, filters = dict(name = parent_id),
fields = 'name')
parent = frappe.db.get_all(self.append_to, filters={
self.sender_field: email.from_email,
self.subject_field: ("like", "%{0}%".format(subject)),
"creation": (">", (get_datetime() - relativedelta(days=60)).strftime(DATE_FORMAT))
}, fields="name")
if not parent:
# try and match by subject and sender
# if sent by same sender with same subject,
# append it to old coversation
subject = frappe.as_unicode(strip(re.sub(r"(^\s*(fw|fwd|wg)[^:]*:|\s*(re|aw)[^:]*:\s*)*",
"", email.subject, 0, flags=re.IGNORECASE)))
parent = frappe.db.get_all(self.append_to, filters={
self.sender_field: email.from_email,
self.subject_field: ("like", "%{0}%".format(subject)),
"creation": (">", (get_datetime() - relativedelta(days=60)).strftime(DATE_FORMAT))
}, fields = "name", limit = 1)
# match only subject field
# when the from_email is of a user in the system
# and subject is atleast 10 chars long
if not parent and len(subject) > 10 and is_system_user(email.from_email):
# match only subject field
# when the from_email is of a user in the system
# and subject is atleast 10 chars long
parent = frappe.db.get_all(self.append_to, filters={
self.subject_field: ("like", "%{0}%".format(subject)),
"creation": (">", (get_datetime() - relativedelta(days=60)).strftime(DATE_FORMAT))
}, fields="name")
}, fields = "name", limit = 1)
if parent:
parent = frappe._dict(doctype=self.append_to, name=parent[0].name)

View file

@ -191,7 +191,7 @@ def subscribe(email, email_group=_('Website')):
<p><a href="{2}">{3}</a></p>
""".format(*messages)
frappe.sendmail(email, subject=getattr('email_template', 'subject', '') or _("Confirm Your Email"), content=content)
frappe.sendmail(email, subject=getattr('email_template', 'subject', '') or _("Confirm Your Email"), content=content, now=True)
@frappe.whitelist(allow_guest=True)
def confirm_subscription(email, email_group=_('Website')):

View file

@ -48,15 +48,11 @@
"default": "1",
"fieldname": "enabled",
"fieldtype": "Check",
"label": "Enabled",
"show_days": 1,
"show_seconds": 1
"label": "Enabled"
},
{
"fieldname": "column_break_2",
"fieldtype": "Column Break",
"show_days": 1,
"show_seconds": 1
"fieldtype": "Column Break"
},
{
"default": "Email",
@ -65,9 +61,7 @@
"fieldtype": "Select",
"label": "Channel",
"options": "Email\nSlack\nSystem Notification",
"reqd": 1,
"show_days": 1,
"show_seconds": 1
"reqd": 1
},
{
"depends_on": "eval:doc.channel=='Slack'",
@ -75,16 +69,12 @@
"fieldtype": "Link",
"label": "Slack Channel",
"mandatory_depends_on": "eval:doc.channel=='Slack'",
"options": "Slack Webhook URL",
"show_days": 1,
"show_seconds": 1
"options": "Slack Webhook URL"
},
{
"fieldname": "filters",
"fieldtype": "Section Break",
"label": "Filters",
"show_days": 1,
"show_seconds": 1
"label": "Filters"
},
{
"description": "To add dynamic subject, use jinja tags like\n\n<div><pre><code>{{ doc.name }} Delivered</code></pre></div>",
@ -93,9 +83,7 @@
"ignore_xss_filter": 1,
"in_list_view": 1,
"label": "Subject",
"reqd": 1,
"show_days": 1,
"show_seconds": 1
"reqd": 1
},
{
"fieldname": "document_type",
@ -105,17 +93,13 @@
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"search_index": 1,
"show_days": 1,
"show_seconds": 1
"search_index": 1
},
{
"default": "0",
"fieldname": "is_standard",
"fieldtype": "Check",
"label": "Is Standard",
"show_days": 1,
"show_seconds": 1
"label": "Is Standard"
},
{
"depends_on": "is_standard",
@ -123,86 +107,67 @@
"fieldtype": "Link",
"in_standard_filter": 1,
"label": "Module",
"options": "Module Def",
"show_days": 1,
"show_seconds": 1
"options": "Module Def"
},
{
"fieldname": "col_break_1",
"fieldtype": "Column Break",
"show_days": 1,
"show_seconds": 1
"fieldtype": "Column Break"
},
{
"depends_on": "eval: doc.document_type",
"fieldname": "event",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Send Alert On",
"options": "\nNew\nSave\nSubmit\nCancel\nDays After\nDays Before\nValue Change\nMethod\nCustom",
"reqd": 1,
"search_index": 1,
"show_days": 1,
"show_seconds": 1
"search_index": 1
},
{
"depends_on": "eval:doc.event=='Method'",
"description": "Trigger on valid methods like \"before_insert\", \"after_update\", etc (will depend on the DocType selected)",
"fieldname": "method",
"fieldtype": "Data",
"label": "Trigger Method",
"show_days": 1,
"show_seconds": 1
"label": "Trigger Method"
},
{
"depends_on": "eval:doc.event==\"Days After\" || doc.event==\"Days Before\"",
"depends_on": "eval:doc.document_type && (doc.event==\"Days After\" || doc.event==\"Days Before\")",
"description": "Send alert if date matches this field's value",
"fieldname": "date_changed",
"fieldtype": "Select",
"label": "Reference Date",
"show_days": 1,
"show_seconds": 1
"label": "Reference Date"
},
{
"default": "0",
"depends_on": "eval:doc.event==\"Days After\" || doc.event==\"Days Before\"",
"depends_on": "eval:doc.document_type && (doc.event==\"Days After\" || doc.event==\"Days Before\")",
"description": "Send days before or after the reference date",
"fieldname": "days_in_advance",
"fieldtype": "Int",
"label": "Days Before or After",
"show_days": 1,
"show_seconds": 1
"label": "Days Before or After"
},
{
"depends_on": "eval:doc.event==\"Value Change\"",
"depends_on": "eval:doc.document_type && doc.event==\"Value Change\"",
"description": "Send alert if this field's value changes",
"fieldname": "value_changed",
"fieldtype": "Select",
"label": "Value Changed",
"show_days": 1,
"show_seconds": 1
"label": "Value Changed"
},
{
"fieldname": "sender",
"fieldtype": "Link",
"label": "Sender",
"options": "Email Account",
"show_days": 1,
"show_seconds": 1
"options": "Email Account"
},
{
"fieldname": "sender_email",
"fieldtype": "Data",
"label": "Sender Email",
"options": "Email",
"read_only": 1,
"show_days": 1,
"show_seconds": 1
"read_only": 1
},
{
"fieldname": "section_break_9",
"fieldtype": "Section Break",
"show_days": 1,
"show_seconds": 1
"fieldtype": "Section Break"
},
{
"description": "Optional: The alert will be sent if this expression is true",
@ -210,128 +175,96 @@
"fieldtype": "Code",
"ignore_xss_filter": 1,
"in_list_view": 1,
"label": "Condition",
"show_days": 1,
"show_seconds": 1
"label": "Condition"
},
{
"fieldname": "column_break_6",
"fieldtype": "Column Break",
"show_days": 1,
"show_seconds": 1
"fieldtype": "Column Break"
},
{
"fieldname": "html_7",
"fieldtype": "HTML",
"options": "<p><strong>Condition Examples:</strong></p>\n<pre>doc.status==\"Open\"<br>doc.due_date==nowdate()<br>doc.total &gt; 40000\n</pre>\n",
"show_days": 1,
"show_seconds": 1
"options": "<p><strong>Condition Examples:</strong></p>\n<pre>doc.status==\"Open\"<br>doc.due_date==nowdate()<br>doc.total &gt; 40000\n</pre>\n"
},
{
"collapsible": 1,
"fieldname": "property_section",
"fieldtype": "Section Break",
"label": "Set Property After Alert",
"show_days": 1,
"show_seconds": 1
"label": "Set Property After Alert"
},
{
"fieldname": "set_property_after_alert",
"fieldtype": "Select",
"label": "Set Property After Alert",
"show_days": 1,
"show_seconds": 1
"label": "Set Property After Alert"
},
{
"fieldname": "property_value",
"fieldtype": "Data",
"label": "Value To Be Set",
"show_days": 1,
"show_seconds": 1
"label": "Value To Be Set"
},
{
"depends_on": "eval:doc.channel!=='Slack'",
"fieldname": "column_break_5",
"fieldtype": "Section Break",
"label": "Recipients",
"show_days": 1,
"show_seconds": 1
"label": "Recipients"
},
{
"fieldname": "recipients",
"fieldtype": "Table",
"label": "Recipients",
"mandatory_depends_on": "eval:doc.channel!=='Slack'",
"options": "Notification Recipient",
"show_days": 1,
"show_seconds": 1
"options": "Notification Recipient"
},
{
"fieldname": "message_sb",
"fieldtype": "Section Break",
"label": "Message",
"show_days": 1,
"show_seconds": 1
"label": "Message"
},
{
"default": "Add your message here",
"fieldname": "message",
"fieldtype": "Code",
"ignore_xss_filter": 1,
"label": "Message",
"show_days": 1,
"show_seconds": 1
"label": "Message"
},
{
"depends_on": "eval:doc.channel=='Email'",
"fieldname": "message_examples",
"fieldtype": "HTML",
"label": "Message Examples",
"options": "<h5>Message Example</h5>\n\n<pre>&lt;h3&gt;Order Overdue&lt;/h3&gt;\n\n&lt;p&gt;Transaction {{ doc.name }} has exceeded Due Date. Please take necessary action.&lt;/p&gt;\n\n&lt;!-- show last comment --&gt;\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n&lt;h4&gt;Details&lt;/h4&gt;\n\n&lt;ul&gt;\n&lt;li&gt;Customer: {{ doc.customer }}\n&lt;li&gt;Amount: {{ doc.grand_total }}\n&lt;/ul&gt;\n</pre>",
"show_days": 1,
"show_seconds": 1
"options": "<h5>Message Example</h5>\n\n<pre>&lt;h3&gt;Order Overdue&lt;/h3&gt;\n\n&lt;p&gt;Transaction {{ doc.name }} has exceeded Due Date. Please take necessary action.&lt;/p&gt;\n\n&lt;!-- show last comment --&gt;\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n&lt;h4&gt;Details&lt;/h4&gt;\n\n&lt;ul&gt;\n&lt;li&gt;Customer: {{ doc.customer }}\n&lt;li&gt;Amount: {{ doc.grand_total }}\n&lt;/ul&gt;\n</pre>"
},
{
"depends_on": "eval:doc.channel=='Slack'",
"fieldname": "slack_message_examples",
"fieldtype": "HTML",
"label": "Message Examples",
"options": "<h5>Message Example</h5>\n\n<pre>*Order Overdue*\n\nTransaction {{ doc.name }} has exceeded Due Date. Please take necessary action.\n\n<!-- show last comment -->\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n*Details*\n\n\u2022 Customer: {{ doc.customer }}\n\u2022 Amount: {{ doc.grand_total }}\n</pre>",
"show_days": 1,
"show_seconds": 1
"options": "<h5>Message Example</h5>\n\n<pre>*Order Overdue*\n\nTransaction {{ doc.name }} has exceeded Due Date. Please take necessary action.\n\n<!-- show last comment -->\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n*Details*\n\n\u2022 Customer: {{ doc.customer }}\n\u2022 Amount: {{ doc.grand_total }}\n</pre>"
},
{
"fieldname": "view_properties",
"fieldtype": "Button",
"label": "View Properties (via Customize Form)",
"show_days": 1,
"show_seconds": 1
"label": "View Properties (via Customize Form)"
},
{
"collapsible": 1,
"collapsible_depends_on": "attach_print",
"fieldname": "column_break_25",
"fieldtype": "Section Break",
"label": "Print Settings",
"show_days": 1,
"show_seconds": 1
"label": "Print Settings"
},
{
"default": "0",
"fieldname": "attach_print",
"fieldtype": "Check",
"label": "Attach Print",
"show_days": 1,
"show_seconds": 1
"label": "Attach Print"
},
{
"depends_on": "attach_print",
"fieldname": "print_format",
"fieldtype": "Link",
"label": "Print Format",
"options": "Print Format",
"show_days": 1,
"show_seconds": 1
"options": "Print Format"
},
{
"default": "0",
@ -339,14 +272,12 @@
"description": "If enabled, the notification will show up in the notifications dropdown on the top right corner of the navigation bar.",
"fieldname": "send_system_notification",
"fieldtype": "Check",
"label": "Send System Notification",
"show_days": 1,
"show_seconds": 1
"label": "Send System Notification"
}
],
"icon": "fa fa-envelope",
"links": [],
"modified": "2020-05-29 16:03:10.914526",
"modified": "2020-06-23 14:01:25.462544",
"modified_by": "Administrator",
"module": "Email",
"name": "Notification",

View file

@ -347,7 +347,7 @@ def flush(from_test=False):
if not smtpserver:
smtpserver = SMTPServer()
smtpserver_dict[email.sender] = smtpserver
if from_test:
send_one(email.name, smtpserver, auto_commit)
else:
@ -390,12 +390,12 @@ def send_one(email, smtpserver=None, auto_commit=True, now=False):
where
name=%s
for update''', email, as_dict=True)
if len(email):
email = email[0]
else:
return
recipients_list = frappe.db.sql('''select name, recipient, status from
`tabEmail Queue Recipient` where parent=%s''', email.name, as_dict=1)
@ -417,6 +417,8 @@ def send_one(email, smtpserver=None, auto_commit=True, now=False):
if email.communication:
frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit)
email_sent_to_any_recipient = None
try:
message = None

View file

@ -12,7 +12,7 @@ source_link = "https://github.com/frappe/frappe"
app_license = "MIT"
app_logo_url = '/assets/frappe/images/frappe-framework-logo.png'
develop_version = '12.x.x-develop'
develop_version = '13.x.x-develop'
app_email = "info@frappe.io"
@ -273,7 +273,6 @@ setup_wizard_exception = [
before_migrate = ['frappe.patches.v11_0.sync_user_permission_doctype_before_migrate.execute']
after_migrate = [
'frappe.website.doctype.website_theme.website_theme.generate_theme_files_if_not_exist',
'frappe.modules.full_text_search.build_index_for_all_routes'
]

View file

@ -113,23 +113,25 @@ def remove_from_installed_apps(app_name):
installed_apps = frappe.get_installed_apps()
if app_name in installed_apps:
installed_apps.remove(app_name)
frappe.db.set_global("installed_apps", json.dumps(installed_apps))
frappe.db.set_value("DefaultValue", {"defkey": "installed_apps"}, "defvalue", json.dumps(installed_apps))
frappe.db.commit()
if frappe.flags.in_install:
post_install()
def remove_app(app_name, dry_run=False, yes=False):
"""Delete app and all linked to the app's module with the app."""
def remove_app(app_name, dry_run=False, yes=False, no_backup=False):
"""Remove app and all linked to the app's module with the app from a site."""
if not dry_run and not yes:
confirm = input("All doctypes (including custom), modules related to this app will be deleted. Are you sure you want to continue (y/n) ? ")
if confirm!="y":
return
from frappe.utils.backups import scheduled_backup
print("Backing up...")
scheduled_backup(ignore_files=True)
if not no_backup:
from frappe.utils.backups import scheduled_backup
print("Backing up...")
scheduled_backup(ignore_files=True)
frappe.flags.in_uninstall = True
drop_doctypes = []
# remove modules, doctypes, roles
@ -164,6 +166,8 @@ def remove_app(app_name, dry_run=False, yes=False):
for doctype in set(drop_doctypes):
frappe.db.sql("drop table `tab{0}`".format(doctype))
frappe.flags.in_uninstall = False
def post_install(rebuild_website=False):
if rebuild_website:
render.clear_cache()
@ -299,12 +303,15 @@ def remove_missing_apps():
def extract_sql_gzip(sql_gz_path):
try:
# kdvf - keep, decompress, verbose, force
subprocess.check_call(['gzip', '-kdvf', sql_gz_path])
# dvf - decompress, verbose, force
original_file = sql_gz_path
decompressed_file = original_file.rstrip(".gz")
cmd = 'gzip -dvf < {0} > {1}'.format(original_file, decompressed_file)
subprocess.check_call(cmd, shell=True)
except:
raise
return sql_gz_path[:-3]
return decompressed_file
def extract_tar_files(site_name, file_path, folder_name):
# Need to do frappe.init to maintain the site locals
@ -326,3 +333,34 @@ def extract_tar_files(site_name, file_path, folder_name):
frappe.destroy()
return tar_path
def is_downgrade(sql_file_path, verbose=False):
"""checks if input db backup will get downgraded on current bench"""
from semantic_version import Version
head = "INSERT INTO `tabInstalled Application` VALUES"
with open(sql_file_path) as f:
for line in f:
if head in line:
# 'line' (str) format: ('2056588823','2020-05-11 18:21:31.488367','2020-06-12 11:49:31.079506','Administrator','Administrator',0,'Installed Applications','installed_applications','Installed Applications',1,'frappe','v10.1.71-74 (3c50d5e) (v10.x.x)','v10.x.x'),('855c640b8e','2020-05-11 18:21:31.488367','2020-06-12 11:49:31.079506','Administrator','Administrator',0,'Installed Applications','installed_applications','Installed Applications',2,'your_custom_app','0.0.1','master')
line = line.strip().lstrip(head).rstrip(";").strip()
# 'all_apps' (list) format: [('frappe', '12.x.x-develop ()', 'develop'), ('your_custom_app', '0.0.1', 'master')]
all_apps = [ x[-3:] for x in frappe.safe_eval(line) ]
for app in all_apps:
app_name = app[0]
app_version = app[1].split(" ")[0]
if app_name == "frappe":
try:
current_version = Version(frappe.__version__)
backup_version = Version(app_version[1:] if app_version[0] == "v" else app_version)
except ValueError:
return False
downgrade = backup_version > current_version
if verbose and downgrade:
print("Your site will be downgraded from Frappe {0} to {1}".format(current_version, backup_version))
return downgrade

View file

@ -97,10 +97,12 @@ def backup_to_dropbox(upload_db_backup=True):
if frappe.flags.create_new_backup:
backup = new_backup(ignore_files=True)
filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db))
site_config = os.path.join(get_backups_path(), os.path.basename(backup.site_config_backup_path))
else:
filename = get_latest_backup_file()
filename, site_config = get_latest_backup_file()
upload_file_to_dropbox(filename, "/database", dropbox_client)
upload_file_to_dropbox(site_config, "/database", dropbox_client)
# delete older databases
if dropbox_settings['no_of_backups']:

View file

@ -12,6 +12,7 @@ from frappe import _
from frappe.model.document import Document
from frappe.utils import get_request_site_address
from googleapiclient.errors import HttpError
from frappe.utils.password import set_encrypted_password
from frappe.utils import add_days, get_datetime, get_weekdays, now_datetime, add_to_date, get_time_zone
from dateutil import parser
from datetime import datetime, timedelta
@ -198,7 +199,7 @@ def check_google_calendar(account, google_calendar):
except HttpError as err:
frappe.throw(_("Google Calendar - Could not create Calendar for {0}, error code {1}.").format(account.name, err.resp.status))
def sync_events_from_google_calendar(g_calendar, method=None, page_length=10):
def sync_events_from_google_calendar(g_calendar, method=None):
"""
Syncs Events from Google Calendar in Framework Calendar.
Google Calendar returns nextSyncToken when all the events in Google Calendar are fetched.
@ -210,23 +211,32 @@ def sync_events_from_google_calendar(g_calendar, method=None, page_length=10):
if not account.pull_from_google_calendar:
return
sync_token = account.get_password(fieldname="next_sync_token", raise_exception=False) or None
events = frappe._dict()
results = []
while True:
try:
# API Response listed at EOF
sync_token = account.get_password(fieldname="next_sync_token", raise_exception=False) or None
events = google_calendar.events().list(calendarId=account.google_calendar_id, maxResults=page_length,
singleEvents=False, showDeleted=True, syncToken=sync_token).execute()
events = google_calendar.events().list(calendarId=account.google_calendar_id, maxResults=2000,
pageToken=events.get("nextPageToken"), singleEvents=False, showDeleted=True, syncToken=sync_token).execute()
except HttpError as err:
frappe.throw(_("Google Calendar - Could not fetch event from Google Calendar, error code {0}.").format(err.resp.status))
msg = _("Google Calendar - Could not fetch event from Google Calendar, error code {0}.").format(err.resp.status)
if err.resp.status == 410:
set_encrypted_password("Google Calendar", account.name, "", "next_sync_token")
frappe.db.commit()
msg += ' ' + _('Sync token was invalid and has been resetted, Retry syncing.')
frappe.msgprint(msg, title='Invalid Sync Token', indicator='blue')
else:
frappe.throw(msg)
for event in events.get("items", []):
results.append(event)
if not events.get("nextPageToken"):
if events.get("nextSyncToken"):
frappe.db.set_value("Google Calendar", account.name, "next_sync_token", events.get("nextSyncToken"))
frappe.db.commit()
account.next_sync_token = events.get("nextSyncToken")
account.save()
break
for idx, event in enumerate(results):

View file

@ -190,12 +190,16 @@ def upload_system_backup_to_google_drive():
set_progress(1, "Backing up Data.")
backup = new_backup()
fileurl_backup = os.path.basename(backup.backup_path_db)
fileurl_site_config = os.path.basename(backup.site_config_backup_path)
fileurl_public_files = os.path.basename(backup.backup_path_files)
fileurl_private_files = os.path.basename(backup.backup_path_private_files)
else:
fileurl_backup, fileurl_public_files, fileurl_private_files = get_latest_backup_file(with_files=True)
fileurl_backup, fileurl_site_config, fileurl_public_files, fileurl_private_files = get_latest_backup_file(with_files=True)
for fileurl in [fileurl_backup, fileurl_site_config, fileurl_public_files, fileurl_private_files]:
if not fileurl:
continue
for fileurl in [fileurl_backup, fileurl_public_files, fileurl_private_files]:
file_metadata = {
"name": fileurl,
"parents": [account.backup_folder_id]
@ -218,11 +222,13 @@ def upload_system_backup_to_google_drive():
return _("Google Drive Backup Successful.")
def daily_backup():
if frappe.db.get_single_value("Google Drive", "frequency") == "Daily":
drive_settings = frappe.db.get_singles_dict('Google Drive')
if drive_settings.enable and drive_settings.frequency == "Daily":
upload_system_backup_to_google_drive()
def weekly_backup():
if frappe.db.get_single_value("Google Drive", "frequency") == "Weekly":
drive_settings = frappe.db.get_singles_dict('Google Drive')
if drive_settings.enable and drive_settings.frequency == "Weekly":
upload_system_backup_to_google_drive()
def get_absolute_path(filename):

View file

@ -19,6 +19,9 @@ from botocore.exceptions import ClientError
class S3BackupSettings(Document):
def validate(self):
if not self.enabled:
return
if not self.endpoint_url:
self.endpoint_url = 'https://s3.amazonaws.com'
conn = boto3.client(
@ -115,19 +118,21 @@ def backup_to_s3():
backup = new_backup(ignore_files=False, backup_path_db=None,
backup_path_files=None, backup_path_private_files=None, force=True)
db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db))
site_config = os.path.join(get_backups_path(), os.path.basename(backup.site_config_backup_path))
if backup_files:
files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files))
private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files))
else:
if backup_files:
db_filename, files_filename, private_files = get_latest_backup_file(with_files=backup_files)
db_filename, site_config, files_filename, private_files = get_latest_backup_file(with_files=backup_files)
else:
db_filename = get_latest_backup_file()
db_filename, site_config = get_latest_backup_file()
folder = os.path.basename(db_filename)[:15] + '/'
# for adding datetime to folder name
upload_file_to_s3(db_filename, folder, conn, bucket)
upload_file_to_s3(site_config, folder, conn, bucket)
if backup_files:
upload_file_to_s3(private_files, folder, conn, bucket)
upload_file_to_s3(files_filename, folder, conn, bucket)

View file

@ -1,6 +1,7 @@
# imports - standard imports
import getpass
import json
import os
import re
import sys
@ -8,6 +9,7 @@ import sys
import click
from html2text import html2text
import requests
from tenacity import retry, stop_after_attempt, wait_fixed
# imports - module imports
import frappe
@ -138,6 +140,7 @@ def select_team(session):
return team
@retry(stop=stop_after_attempt(5))
def get_new_site_options():
site_options_sc = session.post(options_url)
@ -158,6 +161,7 @@ def is_valid_subdomain(subdomain):
print("Subdomain contains invalid characters. Use lowercase characters, numbers and hyphens")
@retry(stop=stop_after_attempt(5))
def is_subdomain_available(subdomain):
res = session.post(site_exists_url, {"subdomain": subdomain})
if res.ok:
@ -252,6 +256,17 @@ def get_subdomain(domain):
return subdomain
@retry(stop=stop_after_attempt(2), wait=wait_fixed(5))
def upload_backup_file(file_type, file_path):
return session.post(files_url, data={}, files={
"file": open(file_path, "rb"),
"is_private": 1,
"folder": "Home",
"method": "press.api.site.upload_backup",
"type": file_type
})
@add_line_after
def upload_backup(local_site):
# take backup
@ -265,14 +280,11 @@ def upload_backup(local_site):
("public", odb.backup_path_files),
("private", odb.backup_path_private_files)
]):
file_upload_response = session.post(files_url, data={}, files={
"file": open(file_path, "rb"),
"is_private": 1,
"folder": "Home",
"method": "press.api.site.upload_backup",
"type": file_type
})
print("Uploading files ({}/3)".format(x+1), end="\r")
file_name = file_path.split(os.sep)[-1]
print("Uploading {} file: {} ({}/3)".format(file_type, file_name, x+1))
file_upload_response = upload_backup_file(file_type, file_path)
if file_upload_response.ok:
files_session[file_type] = file_upload_response.json()["message"]
else:
@ -362,7 +374,10 @@ def create_session():
if login_sc.ok:
print("Authorization Successful! ✅")
team = select_team(session)
session.headers.update({"X-Press-Team": team })
session.headers.update({
"X-Press-Team": team,
"Connection": "keep-alive"
})
return session
else:
handle_request_failure(message="Authorization Failed with Error Code {}".format(login_sc.status_code), traceback=False)

View file

@ -47,16 +47,17 @@ def get_latest_backup_file(with_files=False):
def get_latest(file_ext):
file_list = glob.glob(os.path.join(get_backups_path(), file_ext))
return max(file_list, key=os.path.getctime)
return max(file_list, key=os.path.getctime) if file_list else None
latest_file = get_latest('*.sql.gz')
latest_site_config = get_latest('*.json')
if with_files:
latest_public_file_bak = get_latest('*-files.tar')
latest_private_file_bak = get_latest('*-private-files.tar')
return latest_file, latest_public_file_bak, latest_private_file_bak
return latest_file, latest_site_config, latest_public_file_bak, latest_private_file_bak
return latest_file
return latest_file, latest_site_config
def get_file_size(file_path, unit):
@ -76,7 +77,7 @@ def get_file_size(file_path, unit):
def validate_file_size():
frappe.flags.create_new_backup = True
latest_file = get_latest_backup_file()
latest_file, site_config = get_latest_backup_file()
file_size = get_file_size(latest_file, unit='GB')
if file_size > 1:

View file

@ -504,19 +504,7 @@ class BaseDocument(object):
for _df in fields_to_fetch:
if self.is_new() or self.docstatus != 1 or _df.allow_on_submit:
fetch_from_fieldname = _df.fetch_from.split('.')[-1]
value = values[fetch_from_fieldname]
if _df.fieldtype == 'Small Text' or _df.fieldtype == 'Text' or _df.fieldtype == 'Data':
if fetch_from_fieldname in default_fields:
from frappe.model.meta import get_default_df
fetch_from_df = get_default_df(fetch_from_fieldname)
else:
fetch_from_df = frappe.get_meta(doctype).get_field(fetch_from_fieldname)
fetch_from_ft = fetch_from_df.get('fieldtype')
if fetch_from_ft == 'Text Editor' and value:
value = unescape_html(strip_html(value))
setattr(self, _df.fieldname, value)
self.set_fetch_from_value(doctype, _df, values)
notify_link_count(doctype, docname)
@ -531,6 +519,27 @@ class BaseDocument(object):
return invalid_links, cancelled_links
def set_fetch_from_value(self, doctype, df, values):
fetch_from_fieldname = df.fetch_from.split('.')[-1]
value = values[fetch_from_fieldname]
if df.fieldtype in ['Small Text', 'Text', 'Data']:
if fetch_from_fieldname in default_fields:
from frappe.model.meta import get_default_df
fetch_from_df = get_default_df(fetch_from_fieldname)
else:
fetch_from_df = frappe.get_meta(doctype).get_field(fetch_from_fieldname)
if not fetch_from_df:
frappe.throw(
_('Please check the value of "Fetch From" set for field {0}').format(frappe.bold(df.label)),
title = _('Wrong Fetch From value')
)
fetch_from_ft = fetch_from_df.get('fieldtype')
if fetch_from_ft == 'Text Editor' and value:
value = unescape_html(strip_html(value))
setattr(self, df.fieldname, value)
def _validate_selects(self):
if frappe.flags.in_import:
return

View file

@ -77,7 +77,7 @@ def delete_doc(doctype=None, name=None, force=0, ignore_doctypes=None, for_reloa
delete_from_table(doctype, name, ignore_doctypes, None)
if not (for_reload or frappe.flags.in_migrate or frappe.flags.in_install or frappe.flags.in_test):
if not (for_reload or frappe.flags.in_migrate or frappe.flags.in_install or frappe.flags.in_uninstall or frappe.flags.in_test):
try:
delete_controllers(name, doc.module)
except (FileNotFoundError, OSError, KeyError):

View file

@ -396,6 +396,11 @@ class Document(BaseDocument):
def get_doc_before_save(self):
return getattr(self, '_doc_before_save', None)
def has_value_changed(self, fieldname):
'''Returns true if value is changed before and after saving'''
previous = self.get_doc_before_save()
return previous.get(fieldname)!=self.get(fieldname) if previous else True
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
if self.flags.name_set and not force:
@ -825,7 +830,7 @@ class Document(BaseDocument):
def run_notifications(self, method):
"""Run notifications for this method"""
if frappe.flags.in_import or frappe.flags.in_patch or frappe.flags.in_install:
if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
@ -1059,6 +1064,8 @@ class Document(BaseDocument):
def save_version(self):
"""Save version info"""
if not self._doc_before_save and frappe.flags.in_patch: return
version = frappe.new_doc('Version')
if not self._doc_before_save:
version.for_insert(self)

View file

@ -19,6 +19,7 @@ execute:frappe.reload_doc('core', 'doctype', 'module_def') #2017-09-22
execute:frappe.reload_doc('core', 'doctype', 'version') #2017-04-01
execute:frappe.reload_doc('email', 'doctype', 'document_follow')
execute:frappe.reload_doc('core', 'doctype', 'communication_link') #2019-10-02
execute:frappe.reload_doc('core', 'doctype', 'has_role')
execute:frappe.reload_doc('core', 'doctype', 'communication') #2019-10-02
frappe.patches.v11_0.replicate_old_user_permissions
frappe.patches.v11_0.reload_and_rename_view_log #2019-01-03
@ -289,3 +290,4 @@ execute:frappe.delete_doc("DocType", "Onboarding Slide Field")
execute:frappe.delete_doc("DocType", "Onboarding Slide Help Link")
frappe.patches.v13_0.update_date_filters_in_user_settings
frappe.patches.v13_0.update_duration_options
frappe.patches.v13_0.replace_old_data_import # 2020-06-24

View file

@ -2,7 +2,7 @@ from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.exists('DocType', 'View log'):
if frappe.db.table_exists('View log'):
# for mac users direct renaming would not work since mysql for mac saves table name in lower case
# so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error
# more info https://stackoverflow.com/a/44753093/5955589 ,

View file

@ -0,0 +1,16 @@
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
if not frappe.db.exists("DocType", "Data Import Beta"):
return
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import Legacy`")
frappe.rename_doc('DocType', 'Data Import', 'Data Import Legacy')
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.rename_doc('DocType', 'Data Import Beta', 'Data Import')

View file

@ -110,4 +110,9 @@ ul.tree-children {
}
.section-header {
direction: ltr;
}
.ql-editor {
direction: rtl;
text-align: right;
}

View file

@ -13,36 +13,6 @@ frappe.data_import.DataExporter = class DataExporter {
this.dialog = new frappe.ui.Dialog({
title: __('Export Data'),
fields: [
{
fieldtype: 'Select',
fieldname: 'exporting_for',
label: __('Exporting For'),
options: [
{
label: __('Insert New Records'),
value: 'Insert New Records'
},
{
label: __('Update Existing Records'),
value: 'Update Existing Records'
}
],
change: () => {
let exporting_for = this.dialog.get_value('exporting_for');
this.dialog.set_value(
'export_records',
exporting_for === 'Insert New Records' ? 'blank_template' : 'all'
);
// Force ID field to be exported when updating existing records
let id_field = this.dialog.get_field(this.doctype).options[0];
if (id_field.value === 'name' && id_field.$checkbox) {
id_field.$checkbox
.find('input')
.prop('disabled', exporting_for === 'Update Existing Records');
}
}
},
{
fieldtype: 'Select',
fieldname: 'export_records',
@ -65,7 +35,7 @@ frappe.data_import.DataExporter = class DataExporter {
value: 'blank_template'
}
],
default: 'blank_template',
default: this.exporting_for === 'Insert New Records' ? 'blank_template' : 'all',
change: () => {
this.update_record_count_message();
}
@ -119,10 +89,6 @@ frappe.data_import.DataExporter = class DataExporter {
on_page_show: () => this.select_mandatory()
});
if (this.exporting_for) {
this.dialog.set_value('exporting_for', this.exporting_for);
}
this.make_filter_area();
this.make_select_all_buttons();
this.update_record_count_message();
@ -132,7 +98,7 @@ frappe.data_import.DataExporter = class DataExporter {
export_records() {
let method =
'/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_template';
'/api/method/frappe.core.doctype.data_import.data_import.download_template';
let multicheck_fields = this.dialog.fields
.filter(df => df.fieldtype === 'MultiCheck')
@ -172,15 +138,17 @@ frappe.data_import.DataExporter = class DataExporter {
}
make_select_all_buttons() {
let for_insert = this.exporting_for === 'Insert New Records';
let section_title = for_insert ? __('Select Fields To Insert') : __('Select Fields To Update');
let $select_all_buttons = $(`
<div>
<h6 class="form-section-heading uppercase">${__('Select fields to export')}</h6>
<h6 class="form-section-heading uppercase">${section_title}</h6>
<button class="btn btn-default btn-xs" data-action="select_all">
${__('Select All')}
</button>
<button class="btn btn-default btn-xs" data-action="select_mandatory">
${for_insert ? `<button class="btn btn-default btn-xs" data-action="select_mandatory">
${__('Select Mandatory')}
</button>
</button>`: ''}
<button class="btn btn-default btn-xs" data-action="unselect_all">
${__('Unselect All')}
</button>
@ -285,11 +253,9 @@ frappe.data_import.DataExporter = class DataExporter {
}
get_filters() {
return this.filter_group.get_filters().reduce((acc, filter) => {
return Object.assign(acc, {
[filter[1]]: [filter[2], filter[3]]
});
}, {});
return this.filter_group.get_filters().map(filter => {
return filter.slice(0, 4);
});
}
get_multicheck_options(doctype, child_fieldname = null) {
@ -308,6 +274,9 @@ frappe.data_import.DataExporter = class DataExporter {
? this.column_map[child_fieldname]
: this.column_map[doctype];
let is_field_mandatory = df => (df.fieldname === 'name' && !child_fieldname)
|| (df.reqd && this.exporting_for == 'Insert New Records');
return fields
.filter(df => {
if (autoname_field && df.fieldname === autoname_field.fieldname) {
@ -323,7 +292,7 @@ frappe.data_import.DataExporter = class DataExporter {
return {
label,
value: df.fieldname,
danger: df.reqd,
danger: is_field_mandatory(df),
checked: false,
description: `${df.fieldname} ${df.reqd ? __('(Mandatory)') : ''}`
};

View file

@ -245,11 +245,12 @@ frappe.data_import.ImportPreview = class ImportPreview {
let fieldname;
if (!df) {
fieldname = null;
} else if (col.map_to_field) {
fieldname = col.map_to_field;
} else if (col.is_child_table_field) {
fieldname = `${col.child_table_df.fieldname}.${df.fieldname}`;
} else {
fieldname =
df.parent === this.doctype
? df.fieldname
: `${df.parent}:${df.fieldname}`;
fieldname = df.fieldname;
}
return [
{
@ -272,7 +273,7 @@ frappe.data_import.ImportPreview = class ImportPreview {
label: __("Don't Import"),
value: "Don't Import"
}
].concat(column_picker_fields.get_fields_as_options()),
].concat(get_fields_as_options(this.doctype, column_picker_fields)),
default: fieldname || "Don't Import",
change() {
changed.push(i);
@ -328,3 +329,29 @@ frappe.data_import.ImportPreview = class ImportPreview {
});
}
};
function get_fields_as_options(doctype, column_map) {
let keys = [doctype];
frappe.meta.get_table_fields(doctype).forEach(df => {
keys.push(df.fieldname);
});
// flatten array
return [].concat(
...keys.map(key => {
return column_map[key].map(df => {
let label = df.label;
let value = df.fieldname;
if (doctype !== key) {
let table_field = frappe.meta.get_docfield(doctype, key);
label = `${df.label} (${table_field.label})`;
value = `${table_field.fieldname}.${df.fieldname}`;
}
return {
label,
value,
description: value
};
});
})
);
}

View file

@ -91,12 +91,26 @@ frappe.db = {
});
},
count: function(doctype, args={}) {
return new Promise(resolve => {
frappe.call({
method: 'frappe.client.get_count',
type: 'GET',
args: Object.assign(args, { doctype })
}).then(r => resolve(r.message));
let filters = args.filters || {};
const with_child_table_filter = Array.isArray(filters) && filters.some(filter => {
return filter[0] !== doctype;
});
const fields = [
// cannot break this line as it adds extra \n's and \t's which breaks the query
`count(${with_child_table_filter ? 'distinct': ''} ${frappe.model.get_full_column_name('name', doctype)}) AS total_count`
];
return frappe.call({
type: 'GET',
method: 'frappe.desk.reportview.get',
args: {
doctype,
filters,
fields,
}
}).then(r => {
return r.message.values[0][0];
});
},
get_link_options(doctype, txt = '', filters={}) {

View file

@ -5,7 +5,8 @@ frappe.ui.form.ControlButton = frappe.ui.form.ControlData.extend({
},
make_input: function() {
var me = this;
this.$input = $('<button class="btn btn-default btn-xs">')
const btn_type = this.df.primary ? 'btn-primary': 'btn-default';
this.$input = $(`<button class="btn btn-xs ${btn_type}">`)
.prependTo(me.input_area)
.on("click", function() {
me.onclick();
@ -16,16 +17,15 @@ frappe.ui.form.ControlButton = frappe.ui.form.ControlData.extend({
this.toggle_label(false);
},
onclick: function() {
if(this.frm && this.frm.doc) {
if(this.frm.script_manager.has_handlers(this.df.fieldname, this.doctype)) {
if (this.frm && this.frm.doc) {
if (this.frm.script_manager.has_handlers(this.df.fieldname, this.doctype)) {
this.frm.script_manager.trigger(this.df.fieldname, this.doctype, this.docname);
} else {
if (this.df.options) {
this.run_server_script();
}
}
}
else if(this.df.click) {
} else if (this.df.click) {
this.df.click();
}
},

View file

@ -39,7 +39,7 @@ frappe.ui.form.ControlCode = frappe.ui.form.ControlText.extend({
toggle_label() {
const button_label = this.expanded ? __('Collapse') : __('Expand');
this.$expand_button.text(button_label);
this.$expand_button && this.$expand_button.text(button_label);
},
set_language() {

View file

@ -1,3 +1,5 @@
frappe.provide('frappe.phone_call');
frappe.ui.form.ControlData = frappe.ui.form.ControlInput.extend({
html_element: "input",
input_type: "text",
@ -21,10 +23,27 @@ frappe.ui.form.ControlData = frappe.ui.form.ControlInput.extend({
this.bind_change_event();
this.bind_focusout();
this.setup_autoname_check();
if (this.df.options == 'Phone') {
this.setup_phone();
}
// somehow this event does not bubble up to document
// after v7, if you can debug, remove this
},
setup_phone() {
if (frappe.phone_call.handler) {
this.$wrapper.find('.control-input')
.append(`
<span class="phone-btn">
<a class="btn-open no-decoration" title="${__('Make a call')}">
<i class="fa fa-phone"></i></a>
</span>
`)
.find('.phone-btn')
.click(() => {
frappe.phone_call.handler(this.get_value(), this.frm);
});
}
},
setup_autoname_check: function() {
if (!this.df.parent) return;
this.meta = frappe.get_meta(this.df.parent);

View file

@ -1,10 +1,5 @@
import Quill from 'quill';
// replace <p> tag with <div>
const Block = Quill.import('blots/block');
Block.tagName = 'DIV';
Quill.register(Block, true);
const CodeBlockContainer = Quill.import('formats/code-block-container');
CodeBlockContainer.tagName = 'PRE';
Quill.register(CodeBlockContainer, true);
@ -17,7 +12,8 @@ Table.create = (value) => {
node.classList.add('table');
node.classList.add('table-bordered');
return node;
}
};
Quill.register(Table, true);
// link without href
@ -28,7 +24,7 @@ class MyLink extends Link {
let node = super.create(value);
value = this.sanitize(value);
node.setAttribute('href', value);
if(value.startsWith('/') || value.indexOf(window.location.host)) {
if (value.startsWith('/') || value.indexOf(window.location.host)) {
// no href if internal link
node.removeAttribute('target');
}
@ -73,7 +69,7 @@ Quill.register(CustomColor, true);
frappe.ui.form.ControlTextEditor = frappe.ui.form.ControlCode.extend({
make_wrapper() {
this._super();
this.$wrapper.find(".like-disabled-input").addClass('text-editor-print');
this.$wrapper.find(".like-disabled-input").addClass('ql-editor');
},
make_input() {
@ -203,91 +199,9 @@ frappe.ui.form.ControlTextEditor = frappe.ui.form.ControlCode.extend({
get_input_value() {
let value = this.quill ? this.quill.root.innerHTML : '';
// quill keeps ol as a common container for both type of lists
// and uses css for appearances, this is not semantic
// so we convert ol to ul if it is unordered
const $value = $(`<div>${value}</div>`);
$value.find('ol li[data-list=bullet]:first-child').each((i, li) => {
let $li = $(li);
let $parent = $li.parent();
let $children = $parent.children();
let $ul = $('<ul>').append($children);
$parent.replaceWith($ul);
});
value = this.convertLists($value.html());
return value;
},
// hack
// https://github.com/quilljs/quill/issues/979
convertLists(richtext) {
const tempEl = window.document.createElement('div');
tempEl.setAttribute('style', 'display: none;');
tempEl.innerHTML = richtext;
const startLi = '::startli::';
const endLi = '::endli::';
['ul','ol'].forEach((type) => {
const startTag = `::start${type}::`;
const endTag = `::end${type}::`;
// Grab each list, and work on it in turn
Array.from(tempEl.querySelectorAll(type)).forEach((outerListEl) => {
const listChildren = Array.from(outerListEl.children).filter((el) => el.tagName === 'LI');
let lastLiLevel = 0;
let currentLiLevel = 0;
let difference = 0;
// Now work through each li in this list
for (let i = 0; i < listChildren.length; i++) {
const currentLi = listChildren[i];
lastLiLevel = currentLiLevel;
currentLiLevel = this.getListLevel(currentLi);
difference = currentLiLevel - lastLiLevel;
// we only need to add tags if the level is changing
if (difference > 0) {
currentLi.before((startLi + startTag).repeat(difference));
} else if (difference < 0) {
currentLi.before((endTag + endLi).repeat(-difference));
}
if (i === listChildren.length - 1) {
// last li, account for the fact that it might not be at level 0
currentLi.after((endTag + endLi).repeat(currentLiLevel));
}
}
});
});
// Get the content in the element and replace the temporary tags with new ones
let newContent = tempEl.innerHTML;
newContent = newContent.replace(/::startul::/g, '<ul>');
newContent = newContent.replace(/::endul::/g, '</ul>');
newContent = newContent.replace(/::startol::/g, '<ol>');
newContent = newContent.replace(/::endol::/g, '</ol>');
newContent = newContent.replace(/::startli::/g, '<li>');
newContent = newContent.replace(/::endli::/g, '</li>');
// remove quill classes
newContent = newContent.replace(/data-list=.bullet./g, '');
newContent = newContent.replace(/class=.ql-indent-../g, '');
// ul/ol should not be inside another li
newContent = newContent.replace(/<\/li><li><ul>/g, '<ul>');
newContent = newContent.replace(/<\/li><li><ol>/g, '<ol>');
tempEl.remove();
return newContent;
},
getListLevel(el) {
const className = el.className || '0';
return +className.replace(/[^\d]/g, '');
},
set_focus() {
this.quill.focus();
}

View file

@ -120,9 +120,11 @@ frappe.ui.form.Timeline = class Timeline {
display_automatic_link_email() {
let docinfo = this.frm.get_docinfo();
if (docinfo.document_email){
if (docinfo.document_email) {
let link = __("Send an email to {0} to link it here", [`<b><a class="timeline-email-import-link copy-to-clipboard">${docinfo.document_email}</a></b>`]);
$('.timeline-email-import').html(link);
const email_link = $('.timeline-email-import');
email_link.removeClass('hide');
email_link.html(link);
}
}
@ -180,12 +182,15 @@ frappe.ui.form.Timeline = class Timeline {
// append energy point logs
timeline = timeline.concat(this.get_energy_point_logs());
// custom contents
timeline = timeline.concat(this.get_additional_timeline_content());
// append milestones
timeline = timeline.concat(this.get_milestones());
// sort
timeline
.filter(a => a.content)
.filter(a => a.content || a.template)
.sort((b, c) => me.compare_dates(b, c))
.forEach(d => {
d.frm = me.frm;
@ -407,7 +412,10 @@ frappe.ui.form.Timeline = class Timeline {
c.original_content = c.content;
c.content = frappe.utils.toggle_blockquote(c.content);
}
if(!frappe.utils.is_html(c.content)) {
if (c.template) {
c.content_html = frappe.render_template(c.template, c.template_data);
} else if (!frappe.utils.is_html(c.content)) {
c.content_html = frappe.markdown(__(c.content));
} else {
c.content_html = c.content;
@ -529,6 +537,10 @@ frappe.ui.form.Timeline = class Timeline {
return energy_point_logs;
}
get_additional_timeline_content() {
return this.frm.get_docinfo().additional_timeline_content || [];
}
get_milestones() {
let milestones = this.frm.get_docinfo().milestones;
milestones.map(log => {

View file

@ -340,7 +340,6 @@ frappe.ui.form.Form = class FrappeForm {
switch_doc(docname) {
// record switch
if(this.docname != docname && (!this.meta.in_dialog || this.in_form) && !this.meta.istable) {
frappe.utils.scroll_to(0);
if (this.print_preview) {
this.print_preview.hide();
}

View file

@ -70,7 +70,10 @@ frappe.form.formatters = {
if ( decimals.length < 3 || decimals.length < precision ) {
const fraction = frappe.model.get_value(":Currency", currency, "fraction_units") || 100; // if not set, minimum 2.
precision = cstr(fraction).length - 1;
if (decimals.length < cstr(fraction).length) {
precision = cstr(fraction).length - 1;
}
}
}

View file

@ -101,19 +101,25 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
columns[1] = [];
columns[2] = [];
Object.keys(this.setters).forEach((setter, index) => {
let df_prop = frappe.meta.docfield_map[this.doctype][setter];
// Index + 1 to start filling from index 1
// Since Search is a standrd field already pushed
columns[(index + 1) % 3].push({
fieldtype: df_prop.fieldtype,
label: df_prop.label,
fieldname: setter,
options: df_prop.options,
default: this.setters[setter]
if ($.isArray(this.setters)) {
this.setters.forEach((setter, index) => {
columns[(index + 1) % 3].push(setter);
});
});
} else {
Object.keys(this.setters).forEach((setter, index) => {
let df_prop = frappe.meta.docfield_map[this.doctype][setter];
// Index + 1 to start filling from index 1
// Since Search is a standrd field already pushed
columns[(index + 1) % 3].push({
fieldtype: df_prop.fieldtype,
label: df_prop.label,
fieldname: setter,
options: df_prop.options,
default: this.setters[setter]
});
});
}
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/seal
if (Object.seal) {
@ -217,7 +223,13 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
let contents = ``;
let columns = ["name"];
columns = columns.concat(Object.keys(this.setters));
if ($.isArray(this.setters)) {
for (let df of this.setters) {
columns.push(df.fieldname);
}
} else {
columns = columns.concat(Object.keys(this.setters));
}
columns.forEach(function (column) {
contents += `<div class="list-item__content ellipsis">
@ -290,16 +302,24 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
let filters = this.get_query ? this.get_query().filters : {} || {};
let filter_fields = [];
Object.keys(this.setters).forEach(function (setter) {
var value = me.dialog.fields_dict[setter].get_value();
if (me.dialog.fields_dict[setter].df.fieldtype == "Data" && value) {
filters[setter] = ["like", "%" + value + "%"];
} else {
filters[setter] = value || undefined;
me.args[setter] = filters[setter];
filter_fields.push(setter);
if ($.isArray(this.setters)) {
for (let df of this.setters) {
filters[df.fieldname] = me.dialog.fields_dict[df.fieldname].get_value() || undefined;
me.args[df.fieldname] = filters[df.fieldname];
filter_fields.push(df.fieldname);
}
});
} else {
Object.keys(this.setters).forEach(function (setter) {
var value = me.dialog.fields_dict[setter].get_value();
if (me.dialog.fields_dict[setter].df.fieldtype == "Data" && value) {
filters[setter] = ["like", "%" + value + "%"];
} else {
filters[setter] = value || undefined;
me.args[setter] = filters[setter];
filter_fields.push(setter);
}
});
}
let filter_group = this.get_custom_filters();
Object.assign(filters, filter_group);

View file

@ -6,14 +6,14 @@ import './share';
import './review';
import './document_follow';
import './user_image';
import './form_viewers';
import './form_sidebar_users';
frappe.ui.form.Sidebar = Class.extend({
init: function(opts) {
frappe.ui.form.Sidebar = class {
constructor(opts) {
$.extend(this, opts);
},
}
make: function() {
make () {
var sidebar_content = frappe.render_template("form_sidebar", {doctype: this.frm.doctype, frm:this.frm});
this.sidebar = $('<div class="form-sidebar overlay-sidebar hidden-xs hidden-sm"></div>')
@ -43,9 +43,9 @@ frappe.ui.form.Sidebar = Class.extend({
this.refresh();
},
}
bind_events: function() {
bind_events () {
var me = this;
// scroll to comments
@ -58,7 +58,7 @@ frappe.ui.form.Sidebar = Class.extend({
me.refresh_like();
});
});
},
}
setup_keyboard_shortcuts() {
// add assignment shortcut
@ -66,9 +66,9 @@ frappe.ui.form.Sidebar = Class.extend({
frappe.ui.keys
.get_shortcut_group(this.page)
.add(assignment_link);
},
}
refresh: function() {
refresh () {
if (this.frm.doc.__islocal) {
this.sidebar.toggle(false);
} else {
@ -113,9 +113,9 @@ frappe.ui.form.Sidebar = Class.extend({
this.refresh_like();
frappe.ui.form.set_user_image(this.frm);
}
},
}
show_auto_repeat_status: function() {
show_auto_repeat_status() {
if (this.frm.meta.allow_auto_repeat && this.frm.doc.auto_repeat) {
const me = this;
frappe.call({
@ -135,16 +135,16 @@ frappe.ui.form.Sidebar = Class.extend({
}
});
}
},
}
refresh_comments: function() {
refresh_comments() {
$.map(this.frm.timeline.get_communications(), function(c) {
return (c.communication_type==="Communication" || (c.communication_type=="Comment" && c.comment_type==="Comment")) ? c : null;
});
this.comments.find(".n-comments").html(this.frm.get_docinfo().total_comments);
},
}
make_tags: function() {
make_tags() {
if (this.frm.meta.issingle) {
this.sidebar.find(".form-tags").toggle(false);
return;
@ -157,54 +157,62 @@ frappe.ui.form.Sidebar = Class.extend({
this.frm.tags && this.frm.tags.refresh(user_tags);
}
});
},
make_attachments: function() {
}
make_attachments() {
var me = this;
this.frm.attachments = new frappe.ui.form.Attachments({
parent: me.sidebar.find(".form-attachments"),
frm: me.frm
});
},
make_assignments: function() {
}
make_assignments() {
this.frm.assign_to = new frappe.ui.form.AssignTo({
parent: this.sidebar.find(".form-assignments"),
frm: this.frm
});
},
make_shared: function() {
}
make_shared() {
this.frm.shared = new frappe.ui.form.Share({
frm: this.frm,
parent: this.sidebar.find(".form-shared")
});
},
make_viewers: function() {
this.frm.viewers = new frappe.ui.form.Viewers({
}
make_viewers() {
this.frm.viewers = new frappe.ui.form.SidebarUsers({
frm: this.frm,
parent: this.sidebar.find(".form-viewers")
$wrapper: this.sidebar,
});
},
add_user_action: function(label, click) {
}
add_user_action(label, click) {
return $('<a>').html(label).appendTo($('<li class="user-action-row">')
.appendTo(this.user_actions.removeClass("hidden"))).on("click", click);
},
clear_user_actions: function() {
}
clear_user_actions() {
this.user_actions.addClass("hidden")
this.user_actions.find(".user-action-row").remove();
},
}
make_like: function() {
make_like() {
this.like_wrapper = this.sidebar.find(".liked-by");
this.like_icon = this.sidebar.find(".liked-by .octicon-heart");
this.like_count = this.sidebar.find(".liked-by .likes-count");
frappe.ui.setup_like_popover(this.sidebar.find(".liked-by-parent"), ".liked-by");
},
make_follow: function(){
}
make_follow() {
this.frm.follow = new frappe.ui.form.DocumentFollow({
frm: this.frm,
parent: this.sidebar.find(".followed-by-section")
});
},
refresh_like: function() {
}
refresh_like() {
if (!this.like_icon) {
return;
}
@ -217,21 +225,21 @@ frappe.ui.form.Sidebar = Class.extend({
.attr("data-name", this.frm.doc.name);
this.like_count.text(JSON.parse(this.frm.doc._liked_by || "[]").length);
},
}
refresh_image: function() {
},
refresh_image() {
}
make_review: function() {
make_review() {
if (frappe.boot.energy_points_enabled && !this.frm.is_new()) {
this.frm.reviews = new frappe.ui.form.Review({
parent: this.sidebar.find(".form-reviews"),
frm: this.frm
});
}
},
}
reload_docinfo: function(callback) {
reload_docinfo(callback) {
frappe.call({
method: "frappe.desk.form.load.get_docinfo",
args: {
@ -248,4 +256,4 @@ frappe.ui.form.Sidebar = Class.extend({
});
}
});
};

View file

@ -0,0 +1,91 @@
frappe.ui.form.SidebarUsers = class {
constructor(opts) {
$.extend(this, opts);
}
get_users(type) {
let docinfo = this.frm.get_docinfo();
return docinfo ? docinfo[type] || null: null;
}
refresh(data_updated, type) {
this.parent = type == 'viewers'? this.$wrapper.find('.form-viewers'): this.$wrapper.find('.form-typers');
this.parent.empty();
const users = this.get_users(type);
users && this.show_in_sidebar(users, type, data_updated);
}
show_in_sidebar(users, type, show_alert) {
let sidebar_users = [];
let new_users = [];
let current_users = [];
const message = type == 'viewers' ? 'viewing this document': 'composing an email';
users.current.forEach(username => {
if (username === frappe.session.user) {
// current user
return;
}
var user_info = frappe.user_info(username);
sidebar_users.push({
image: user_info.image,
fullname: user_info.fullname,
abbr: user_info.abbr,
color: user_info.color,
title: __("{0} is currently {1}", [user_info.fullname, message])
});
if (users.new.indexOf(username) !== -1) {
new_users.push(user_info.fullname);
}
current_users.push(user_info.fullname);
});
if (sidebar_users.length) {
this.parent.parent().removeClass('hidden');
this.parent.append(frappe.render_template('users_in_sidebar', {'users': sidebar_users}));
} else {
this.parent.parent().addClass('hidden');
}
// For typers always show the alert
// For viewers show the alert to new user viewing this document
const alert_users = type == 'viewers' ? new_users : current_users;
show_alert && this.show_alert(alert_users, message);
}
show_alert(users, message) {
if (users.length) {
if (users.length===1) {
frappe.show_alert(__('{0} is currently {1}', [users[0], message]));
} else {
frappe.show_alert(__('{0} are currently {1}', [frappe.utils.comma_and(users), message]));
}
}
}
};
frappe.ui.form.set_users = function(data, type) {
const doctype = data.doctype;
const docname = data.docname;
const docinfo = frappe.model.get_docinfo(doctype, docname);
const past_users = ((docinfo && docinfo[type]) || {}).past || [];
const users = data.users || [];
const new_users = users.filter(user => !past_users.includes(user));
frappe.model.set_docinfo(doctype, docname, type, {
past: past_users.concat(new_users),
new: new_users,
current: users
});
if (cur_frm && cur_frm.doc && cur_frm.doc.doctype===doctype && cur_frm.doc.name==docname) {
cur_frm.viewers.refresh(true, type);
}
};

Some files were not shown because too many files have changed in this diff Show more