feat: New Data Import (#8310)

This commit is contained in:
Faris Ansari 2019-09-30 23:54:54 +05:30 committed by GitHub
commit 63cb27b924
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
39 changed files with 2912 additions and 108 deletions

View file

@ -293,6 +293,33 @@ def import_csv(context, path, only_insert=False, submit_after_import=False, igno
frappe.destroy()
@click.command('data-import')
@click.option('--file', 'file_path', type=click.Path(), required=True, help="Path to import file (.csv, .xlsx)")
@click.option('--doctype', type=str, required=True)
@click.option('--type', 'import_type', type=click.Choice(['Insert', 'Update'], case_sensitive=False), default='Insert', help="Insert New Records or Update Existing Records")
@click.option('--submit-after-import', default=False, is_flag=True, help='Submit document after importing it')
@click.option('--mute-emails', default=True, is_flag=True, help='Mute emails during import')
@pass_context
def data_import(context, file_path, doctype, import_type=None, submit_after_import=False, mute_emails=True):
"Import documents in bulk from CSV or XLSX using data import"
from frappe.core.doctype.data_import.importer_new import Importer
site = get_site(context)
frappe.init(site=site)
frappe.connect()
data_import = frappe.new_doc('Data Import Beta')
data_import.submit_after_import = submit_after_import
data_import.mute_emails = mute_emails
data_import.import_type = 'Insert New Records' if import_type.lower() == 'insert' else 'Update Existing Records'
i = Importer(doctype=doctype, file_path=file_path, data_import=data_import, console=True)
i.import_data()
frappe.destroy()
@click.command('bulk-rename')
@click.argument('doctype')
@click.argument('path')
@ -715,6 +742,7 @@ commands = [
export_json,
get_version,
import_csv,
data_import,
import_doc,
make_app,
mysql,

View file

@ -0,0 +1,267 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import frappe
from frappe.model import display_fieldtypes, no_value_fields, table_fields
from frappe.utils.csvutils import build_csv_response
from frappe.utils.xlsxutils import build_xlsx_response
from .importer_new import INVALID_VALUES
class Exporter:
def __init__(
self,
doctype,
export_fields=None,
export_data=False,
export_filters=None,
export_page_length=None,
file_type="CSV",
):
"""
Exports records of a DocType for use with Importer
:param doctype: Document Type to export
:param export_fields=None: One of 'All', 'Mandatory' or {'DocType': ['field1', 'field2'], 'Child DocType': ['childfield1']}
:param export_data=False: Whether to export data as well
:param export_filters=None: The filters (dict or list) which is used to query the records
:param file_type: One of 'Excel' or 'CSV'
"""
self.doctype = doctype
self.meta = frappe.get_meta(doctype)
self.export_fields = export_fields
self.export_filters = export_filters
self.export_page_length = export_page_length
self.file_type = file_type
# this will contain the csv content
self.csv_array = []
# fields that get exported
# can be All, Mandatory or User Selected Fields
self.fields = self.get_all_exportable_fields()
self.add_header()
if export_data:
self.data = self.get_data_to_export()
else:
self.data = []
self.add_data()
def get_all_exportable_fields(self):
return self.get_exportable_parent_fields() + self.get_exportable_children_fields()
def get_exportable_parent_fields(self):
parent_fields = self.get_exportable_fields(self.doctype)
# if autoname is based on field
# then merge ID and the field column title as "ID (Autoname Field)"
autoname = self.meta.autoname
if autoname and autoname.startswith("field:"):
fieldname = autoname[len("field:") :]
autoname_field = self.meta.get_field(fieldname)
if autoname_field:
name_field = parent_fields[0]
name_field.label = "ID ({})".format(autoname_field.label)
# remove the autoname field as it is a duplicate of ID field
parent_fields = [
df for df in parent_fields if df.fieldname != autoname_field.fieldname
]
return parent_fields
def get_exportable_children_fields(self):
children = [df.options for df in self.meta.fields if df.fieldtype in table_fields]
children_fields = []
for child in children:
children_fields += self.get_exportable_fields(child)
return children_fields
def get_exportable_fields(self, doctype):
fields = []
def is_exportable(df):
return (
df.fieldtype not in display_fieldtypes
and df.fieldtype not in no_value_fields
)
meta = frappe.get_meta(doctype)
# filter out layout fields
fields = [df for df in meta.fields if is_exportable(df)]
if self.export_fields == "Mandatory":
fields = [df for df in fields if df.reqd]
if self.export_fields == "All":
fields = list(fields)
elif isinstance(self.export_fields, dict):
whitelist = self.export_fields.get(doctype, [])
fields = [df for df in fields if df.fieldname in whitelist]
name_field = frappe._dict(
{
"fieldtype": "Data",
"fieldname": "name",
"label": "ID",
"reqd": 1,
"parent": doctype,
}
)
if fields:
return [name_field] + fields
else:
return []
def get_data_to_export(self):
frappe.permissions.can_export(self.doctype, raise_exception=True)
def get_column_name(df):
return "`tab{0}`.`{1}`".format(df.parent, df.fieldname)
fields = [get_column_name(df) for df in self.fields]
filters = self.export_filters
if self.meta.is_nested_set():
order_by = "`tab{0}`.`lft` ASC".format(self.doctype)
else:
order_by = "`tab{0}`.`creation` DESC".format(self.doctype)
data = frappe.db.get_list(
self.doctype,
filters=filters,
fields=fields,
limit_page_length=self.export_page_length,
order_by=order_by,
as_list=1,
)
data = self.remove_duplicate_values(data)
data = self.remove_row_gaps(data)
data = self.remove_empty_rows(data)
# data = self.remove_values_from_name_column(data)
return data
def remove_duplicate_values(self, data):
out = []
doctypes = set([df.parent for df in self.fields])
def name_exists_in_column_before_row(name, column_index, row_index):
column_values = [row[column_index] for i, row in enumerate(data) if i < row_index]
return name in column_values
for i, row in enumerate(data):
# first row is fine
if i == 0:
out.append(row)
continue
row = list(row)
for doctype in doctypes:
name_index = self.get_name_column_index(doctype)
name = row[name_index]
column_indexes = self.get_column_indexes(doctype)
if name_exists_in_column_before_row(name, name_index, i):
# remove the values from the row
row = [None if i in column_indexes else d for i, d in enumerate(row)]
out.append(row)
return out
def remove_row_gaps(self, data):
doctypes = set([df.parent for df in self.fields if df.parent != self.doctype])
def get_nearest_empty_row_index(col_index, row_index):
col_values = [row[col_index] for row in data]
i = row_index - 1
while not col_values[i]:
i = i - 1
out = i + 1
if row_index != out:
return out
for i, row in enumerate(data):
# if this is the row that contains parent values then skip
if row[0]:
continue
for doctype in doctypes:
name_index = self.get_name_column_index(doctype)
name = row[name_index]
column_indexes = self.get_column_indexes(doctype)
if not name:
continue
row_index = get_nearest_empty_row_index(name_index, i)
if row_index:
for col_index in column_indexes:
data[row_index][col_index] = row[col_index]
row[col_index] = None
return data
# pylint: disable=R0201
def remove_empty_rows(self, data):
return [row for row in data if any(v not in INVALID_VALUES for v in row)]
def remove_values_from_name_column(self, data):
out = []
name_columns = [i for i, df in enumerate(self.fields) if df.fieldname == "name"]
for row in data:
out.append(["" if i in name_columns else value for i, value in enumerate(row)])
return out
def get_name_column_index(self, doctype):
for i, df in enumerate(self.fields):
if df.parent == doctype and df.fieldname == "name":
return i
return -1
def get_column_indexes(self, doctype):
return [i for i, df in enumerate(self.fields) if df.parent == doctype]
def add_header(self):
def get_label(df):
if df.parent == self.doctype:
return df.label
else:
return "{0} ({1})".format(df.label, df.parent)
header = [get_label(df) for df in self.fields]
self.csv_array.append(header)
def add_data(self):
self.csv_array += self.data
def get_csv_array(self):
return self.csv_array
def get_csv_array_for_export(self):
csv_array = self.csv_array
if not self.data:
# add 2 empty rows
csv_array += [[]] * 2
return csv_array
def build_response(self):
if self.file_type == 'CSV':
self.build_csv_response()
elif self.file_type == 'Excel':
self.build_xlsx_response()
def build_csv_response(self):
build_csv_response(self.get_csv_array_for_export(), self.doctype)
def build_xlsx_response(self):
build_xlsx_response(self.get_csv_array_for_export(), self.doctype)

View file

@ -0,0 +1,951 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
import io
import os
import json
import timeit
import frappe
from datetime import datetime
from frappe import _
from frappe.utils import cint, flt, update_progress_bar
from frappe.utils.csvutils import read_csv_content
from frappe.utils.xlsxutils import (
read_xlsx_file_from_attached_file,
read_xls_file_from_attached_file,
)
from frappe.model import no_value_fields, table_fields
INVALID_VALUES = ["", None]
MAX_ROWS_IN_PREVIEW = 10
# pylint: disable=R0201
class Importer:
def __init__(
self, doctype, data_import=None, file_path=None, content=None, console=False
):
self.doctype = doctype
self.template_options = frappe._dict({"remap_column": {}})
self.console = console
if data_import:
self.data_import = data_import
if self.data_import.template_options:
template_options = frappe.parse_json(self.data_import.template_options)
self.template_options.update(template_options)
else:
self.data_import = None
self.header_row = None
self.data = None
# used to store date formats guessed from data rows per column
self._guessed_date_formats = {}
# used to store eta during import
self.last_eta = 0
# used to collect warnings during template parsing
# and show them to user
self.warnings = []
self.meta = frappe.get_meta(doctype)
self.prepare_content(file_path, content)
self.parse_data_from_template()
def prepare_content(self, file_path, content):
extension = None
if self.data_import and self.data_import.import_file:
file_doc = frappe.get_doc("File", {"file_url": self.data_import.import_file})
content = file_doc.get_content()
extension = file_doc.file_name.split(".")[1]
if file_path:
content, extension = self.read_file(file_path)
if not extension:
extension = "csv"
if content:
self.read_content(content, extension)
self.validate_template_content()
self.remove_empty_rows_and_columns()
def read_file(self, file_path):
extn = file_path.split(".")[1]
file_content = None
with io.open(file_path, mode="rb") as f:
file_content = f.read()
return file_content, extn
def read_content(self, content, extension):
if extension == "csv":
data = read_csv_content(content)
elif extension == "xlsx":
data = read_xlsx_file_from_attached_file(fcontent=content)
elif extension == "xls":
data = read_xls_file_from_attached_file(content)
self.header_row = data[0]
self.data = data[1:]
def validate_template_content(self):
column_count = len(self.header_row)
if any([len(row) != column_count and len(row) != 0 for row in self.data]):
frappe.throw(
_("Number of columns does not match with data"), title=_("Invalid Template")
)
def remove_empty_rows_and_columns(self):
self.row_index_map = []
removed_rows = []
removed_columns = []
# remove empty rows
data = []
for i, row in enumerate(self.data):
if all(v in INVALID_VALUES for v in row):
# empty row
removed_rows.append(i)
else:
data.append(row)
self.row_index_map.append(i)
# remove empty columns
# a column with a header and no data is a valid column
# a column with no header and no data will be removed
header_row = []
for i, column in enumerate(self.header_row):
column_values = [row[i] for row in data]
values = [column] + column_values
if all(v in INVALID_VALUES for v in values):
# empty column
removed_columns.append(i)
else:
header_row.append(column)
data_without_empty_columns = []
# remove empty columns from data
for i, row in enumerate(data):
new_row = [v for j, v in enumerate(row) if j not in removed_columns]
data_without_empty_columns.append(new_row)
self.data = data_without_empty_columns
self.header_row = header_row
def get_data_for_import_preview(self):
out = frappe._dict()
out.data = list(self.rows)
out.columns = self.columns
out.warnings = self.warnings
if len(out.data) > MAX_ROWS_IN_PREVIEW:
out.data = out.data[:MAX_ROWS_IN_PREVIEW]
out.max_rows_exceeded = True
out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW
return out
def parse_data_from_template(self):
columns = self.parse_columns_from_header_row()
columns, data = self.add_serial_no_column(columns, self.data)
self.columns = columns
self.rows = data
def parse_columns_from_header_row(self):
remap_column = self.template_options.remap_column
columns = []
df_by_labels_and_fieldnames = self.build_fields_dict_for_column_matching()
for i, header_title in enumerate(self.header_row):
header_row_index = str(i)
column_number = str(i + 1)
skip_import = False
fieldname = remap_column.get(header_row_index)
if fieldname and fieldname != "Don't Import":
df = df_by_labels_and_fieldnames.get(fieldname)
self.warnings.append(
{
"col": column_number,
"message": _("Mapping column {0} to field {1}").format(
frappe.bold(header_title or "<i>Untitled Column</i>"), frappe.bold(df.label)
),
"type": "info",
}
)
else:
df = df_by_labels_and_fieldnames.get(header_title)
if not df:
skip_import = True
else:
skip_import = False
if fieldname == "Don't Import":
skip_import = True
self.warnings.append(
{
"col": column_number,
"message": _("Skipping column {0}").format(frappe.bold(header_title)),
"type": "info",
}
)
elif header_title and not df:
self.warnings.append(
{
"col": column_number,
"message": _("Cannot match column {0} with any field").format(
frappe.bold(header_title)
),
"type": "info",
}
)
elif not header_title and not df:
self.warnings.append(
{"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"}
)
columns.append(
frappe._dict(
df=df,
skip_import=skip_import,
header_title=header_title,
column_number=column_number,
index=i,
)
)
return columns
def build_fields_dict_for_column_matching(self):
"""
Build a dict with various keys to match with column headers and value as docfield
The keys can be label or fieldname
{
'Customer': df1,
'customer': df1,
'Due Date': df2,
'due_date': df2,
'Item Code (Sales Invoice Item)': df3,
'Sales Invoice Item:item_code': df3,
}
"""
out = {}
table_doctypes = [df.options for df in self.meta.get_table_fields()]
doctypes = table_doctypes + [self.doctype]
for doctype in doctypes:
# name field
name_key = "ID" if self.doctype == doctype else "ID ({})".format(doctype)
name_df = frappe._dict(
{
"fieldtype": "Data",
"fieldname": "name",
"label": "ID",
"reqd": self.data_import.import_type == "Update Existing Records",
"parent": doctype,
}
)
out[name_key] = name_df
out["name"] = name_df
# other fields
meta = frappe.get_meta(doctype)
fields = self.get_standard_fields(doctype) + meta.fields
for df in fields:
fieldtype = df.fieldtype or "Data"
parent = df.parent or self.doctype
if fieldtype not in no_value_fields:
# label as key
label = (
df.label if self.doctype == doctype else "{0} ({1})".format(df.label, parent)
)
out[label] = df
# fieldname as key
if self.doctype == doctype:
out[df.fieldname] = df
else:
key = "{0}:{1}".format(doctype, df.fieldname)
out[key] = df
# if autoname is based on field
# add an entry for "ID (Autoname Field)"
autoname_field = self.get_autoname_field(self.doctype)
if autoname_field:
out["ID ({})".format(autoname_field.label)] = autoname_field
# ID field should also map to the autoname field
out["ID"] = autoname_field
out["name"] = autoname_field
return out
def get_standard_fields(self, doctype):
meta = frappe.get_meta(doctype)
if meta.istable:
standard_fields = [
{"label": "Parent", "fieldname": "parent"},
{"label": "Parent Type", "fieldname": "parenttype"},
{"label": "Parent Field", "fieldname": "parentfield"},
{"label": "Row Index", "fieldname": "idx"},
]
else:
standard_fields = [
{"label": "Owner", "fieldname": "owner"},
{"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"},
]
out = []
for df in standard_fields:
df = frappe._dict(df)
df.parent = doctype
out.append(df)
return out
def add_serial_no_column(self, columns, data):
columns_with_serial_no = [
frappe._dict({"header_title": "Sr. No", "skip_import": True})
] + columns
# update index for each column
for i, col in enumerate(columns_with_serial_no):
col.index = i
data_with_serial_no = []
for i, row in enumerate(data):
data_with_serial_no.append([self.row_index_map[i] + 1] + row)
return columns_with_serial_no, data_with_serial_no
def parse_value(self, value, df):
# convert boolean values to 0 or 1
if df.fieldtype == "Check" and value.lower().strip() in ["t", "f", "true", "false"]:
value = value.lower().strip()
value = 1 if value in ["t", "true"] else 0
if df.fieldtype in ["Int", "Check"]:
value = cint(value)
elif df.fieldtype in ["Float", "Percent", "Currency"]:
value = flt(value)
elif df.fieldtype in ["Date", "Datetime"]:
value = self.parse_date_format(value, df)
return value
def parse_date_format(self, value, df):
date_format = self.guess_date_format_for_column(df.fieldname)
if date_format:
return datetime.strptime(value, date_format)
return value
def guess_date_format_for_column(self, fieldname):
""" Guesses date format for a column by parsing the first 10 values in the column,
getting the date format and then returning the one which has the maximum frequency
"""
PARSE_ROW_COUNT = 10
if not self._guessed_date_formats.get(fieldname):
column_index = -1
for i, field in enumerate(self.header_row):
if self.meta.has_field(field) and field == fieldname:
column_index = i
break
if column_index == -1:
self._guessed_date_formats[fieldname] = None
date_values = [
row[column_index] for row in self.data[:PARSE_ROW_COUNT] if row[column_index]
]
date_formats = [guess_date_format(d) for d in date_values]
if not date_formats:
return
max_occurred_date_format = max(set(date_formats), key=date_formats.count)
self._guessed_date_formats[fieldname] = max_occurred_date_format
return self._guessed_date_formats[fieldname]
def import_data(self):
# set user lang for translations
frappe.cache().hdel("lang", frappe.session.user)
frappe.set_user_lang(frappe.session.user)
if not self.console:
self.data_import.db_set("template_warnings", "")
# set flags
frappe.flags.in_import = True
frappe.flags.mute_emails = self.data_import.mute_emails
# prepare a map for missing link field values
self.prepare_missing_link_field_values()
# parse docs from rows
payloads = self.get_payloads_for_import()
# dont import if there are non-ignorable warnings
warnings = [w for w in self.warnings if w.get("type") != "info"]
if warnings:
if self.console:
self.print_grouped_warnings(warnings)
else:
self.data_import.db_set("template_warnings", json.dumps(warnings))
frappe.publish_realtime(
"data_import_refresh", {"data_import": self.data_import.name}
)
return
# setup import log
if self.data_import.import_log:
import_log = frappe.parse_json(self.data_import.import_log)
else:
import_log = []
# remove previous failures from import log
import_log = [l for l in import_log if l.get("success") == True]
# get successfully imported rows
imported_rows = []
for log in import_log:
log = frappe._dict(log)
if log.success:
imported_rows += log.row_indexes
# start import
total_payload_count = len(payloads)
batch_size = frappe.conf.data_import_batch_size or 1000
for batch_index, batched_payloads in enumerate(
frappe.utils.create_batch(payloads, batch_size)
):
for i, payload in enumerate(batched_payloads):
doc = payload.doc
row_indexes = [row[0] for row in payload.rows]
current_index = (i + 1) + (batch_index * batch_size)
if set(row_indexes).intersection(set(imported_rows)):
print("Skipping imported rows", row_indexes)
if total_payload_count > 5:
frappe.publish_realtime(
"data_import_progress",
{
"current": current_index,
"total": total_payload_count,
"skipping": True,
"data_import": self.data_import.name,
},
)
continue
try:
start = timeit.default_timer()
doc = self.process_doc(doc)
processing_time = timeit.default_timer() - start
eta = self.get_eta(current_index, total_payload_count, processing_time)
if total_payload_count > 5:
frappe.publish_realtime(
"data_import_progress",
{
"current": current_index,
"total": total_payload_count,
"docname": doc.name,
"data_import": self.data_import.name,
"success": True,
"row_indexes": row_indexes,
"eta": eta,
},
)
if self.console:
update_progress_bar(
"Importing {0} records".format(total_payload_count),
current_index,
total_payload_count,
)
import_log.append(
frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes)
)
# commit after every successful import
frappe.db.commit()
except Exception:
import_log.append(
frappe._dict(
success=False,
exception=frappe.get_traceback(),
messages=frappe.local.message_log,
row_indexes=row_indexes,
)
)
frappe.clear_messages()
# rollback if exception
frappe.db.rollback()
# set status
failures = [l for l in import_log if l.get("success") == False]
if len(failures) == total_payload_count:
status = "Pending"
elif len(failures) > 0:
status = "Partial Success"
else:
status = "Success"
if self.console:
self.print_import_log(import_log)
else:
self.data_import.db_set("status", status)
self.data_import.db_set("import_log", json.dumps(import_log))
frappe.flags.in_import = False
frappe.flags.mute_emails = False
frappe.publish_realtime("data_import_refresh", {"data_import": self.data_import.name})
return import_log
def get_payloads_for_import(self):
payloads = []
# make a copy
data = list(self.rows)
while data:
doc, rows, data = self.parse_next_row_for_import(data)
payloads.append(frappe._dict(doc=doc, rows=rows))
return payloads
def parse_next_row_for_import(self, data):
"""
Parses rows that make up a doc. A doc maybe built from a single row or multiple rows.
Returns the doc, rows, and data without the rows.
"""
doctypes = set([col.df.parent for col in self.columns if col.df and col.df.parent])
# first row is included by default
first_row = data[0]
rows = [first_row]
# if there are child doctypes, find the subsequent rows
if len(doctypes) > 1:
# subsequent rows either dont have any parent value set
# or have the same value as the parent row
# we include a row if either of conditions match
parent_column_indexes = [
col.index
for col in self.columns
if not col.skip_import and col.df and col.df.parent == self.doctype
]
parent_row_values = [first_row[i] for i in parent_column_indexes]
data_without_first_row = data[1:]
for row in data_without_first_row:
row_values = [row[i] for i in parent_column_indexes]
# if the row is blank, it's a child row doc
if all([v in INVALID_VALUES for v in row_values]):
rows.append(row)
continue
# if the row has same values as parent row, it's a child row doc
if row_values == parent_row_values:
rows.append(row)
continue
# if any of those conditions dont match, it's the next doc
break
def get_column_indexes(doctype):
return [
col.index
for col in self.columns
if not col.skip_import and col.df and col.df.parent == doctype
]
def validate_value(value, df):
if df.fieldtype == "Select":
select_options = df.get_select_options()
if select_options and value not in select_options:
options_string = ", ".join([frappe.bold(d) for d in select_options])
msg = _("Value must be one of {0}").format(options_string)
self.warnings.append(
{
"row": row_number,
"field": df.as_dict(convert_dates_to_str=True),
"message": msg,
}
)
return False
elif df.fieldtype == "Link":
d = self.get_missing_link_field_values(df.options)
if value in d.missing_values and not d.one_mandatory:
msg = _("Value {0} missing for {1}").format(
frappe.bold(value), frappe.bold(df.options)
)
self.warnings.append(
{
"row": row_number,
"field": df.as_dict(convert_dates_to_str=True),
"message": msg,
}
)
return value
return value
def parse_doc(doctype, docfields, values, row_number):
# new_doc returns a dict with default values set
doc = frappe.new_doc(doctype, as_dict=True)
# remove standard fields and __islocal
for key in frappe.model.default_fields + ("__islocal",):
doc.pop(key, None)
for df, value in zip(docfields, values):
if value in INVALID_VALUES:
value = None
value = validate_value(value, df)
if value:
doc[df.fieldname] = self.parse_value(value, df)
check_mandatory_fields(doctype, doc, row_number)
return doc
def check_mandatory_fields(doctype, doc, row_number):
# check if mandatory fields are set (except table fields)
meta = frappe.get_meta(doctype)
fields = [
df
for df in meta.fields
if df.fieldtype not in table_fields
and df.reqd
and doc.get(df.fieldname) in INVALID_VALUES
]
if not fields:
return
if len(fields) == 1:
self.warnings.append(
{
"row": row_number,
"message": _("{0} is a mandatory field").format(fields[0].label),
}
)
else:
fields_string = ", ".join([df.label for df in fields])
self.warnings.append(
{"row": row_number, "message": _("{0} are mandatory fields").format(fields_string)}
)
parsed_docs = {}
for row in rows:
for doctype in doctypes:
if doctype == self.doctype and parsed_docs.get(doctype):
# if parent doc is already parsed from the first row
# then skip
continue
row_number = row[0]
column_indexes = get_column_indexes(doctype)
values = [row[i] for i in column_indexes]
if all(v in INVALID_VALUES for v in values):
# skip values if all of them are empty
continue
columns = [self.columns[i] for i in column_indexes]
docfields = [col.df for col in columns]
doc = parse_doc(doctype, docfields, values, row_number)
parsed_docs[doctype] = parsed_docs.get(doctype, [])
parsed_docs[doctype].append(doc)
# build the doc with children
doc = {}
for doctype, docs in parsed_docs.items():
if doctype == self.doctype:
doc.update(docs[0])
else:
table_dfs = self.meta.get(
"fields", {"options": doctype, "fieldtype": ["in", table_fields]}
)
if table_dfs:
table_field = table_dfs[0]
doc[table_field.fieldname] = docs
# check if there is atleast one row for mandatory table fields
mandatory_table_fields = [
df
for df in self.meta.fields
if df.fieldtype in table_fields and df.reqd and len(doc.get(df.fieldname, [])) == 0
]
if len(mandatory_table_fields) == 1:
self.warnings.append(
{
"row": first_row[0],
"message": _("There should be atleast one row for {0} table").format(
mandatory_table_fields[0].label
),
}
)
elif mandatory_table_fields:
fields_string = ", ".join([df.label for df in mandatory_table_fields])
self.warnings.append(
{
"row": first_row[0],
"message": _("There should be atleast one row for the following tables: {0}").format(fields_string),
}
)
return doc, rows, data[len(rows) :]
def process_doc(self, doc):
import_type = self.data_import.import_type
if import_type == "Insert New Records":
return self.insert_record(doc)
elif import_type == "Update Existing Records":
return self.update_record(doc)
def insert_record(self, doc):
self.create_missing_linked_records(doc)
new_doc = frappe.new_doc(self.doctype)
new_doc.update(doc)
# name shouldn't be set when inserting a new record
new_doc.set("name", None)
new_doc.insert()
if self.meta.is_submittable and self.data_import.submit_after_import:
new_doc.submit()
return new_doc
def create_missing_linked_records(self, doc):
"""
Finds fields that are of type Link, and creates the corresponding
document automatically if it has only one mandatory field
"""
link_values = []
def get_link_fields(doc, doctype):
for fieldname, value in doc.items():
meta = frappe.get_meta(doctype)
df = meta.get_field(fieldname)
if not df:
continue
if df.fieldtype == "Link" and value not in INVALID_VALUES:
link_values.append([df.options, value])
elif df.fieldtype in table_fields:
for row in value:
get_link_fields(row, df.options)
get_link_fields(doc, self.doctype)
for link_doctype, link_value in link_values:
d = self.missing_link_values.get(link_doctype)
if d and d.one_mandatory and link_value in d.missing_values:
# find the autoname field
autoname_field = self.get_autoname_field(link_doctype)
name_field = autoname_field.fieldname if autoname_field else "name"
new_doc = frappe.new_doc(link_doctype)
new_doc.set(name_field, link_value)
new_doc.insert()
d.missing_values.remove(link_value)
def update_record(self, doc):
id_fieldname = self.get_id_fieldname()
id_value = doc[id_fieldname]
existing_doc = frappe.get_doc(self.doctype, id_value)
existing_doc.flags.via_data_import = self.data_import.name
existing_doc.update(doc)
existing_doc.save()
return existing_doc
def export_errored_rows(self):
from frappe.utils.csvutils import build_csv_response
if not self.data_import:
return
import_log = frappe.parse_json(self.data_import.import_log or "[]")
failures = [l for l in import_log if l.get("success") == False]
row_indexes = []
for f in failures:
row_indexes.extend(f.get("row_indexes", []))
# de duplicate
row_indexes = list(set(row_indexes))
row_indexes.sort()
header_row = [col.header_title for col in self.columns[1:]]
rows = [header_row]
rows += [row[1:] for row in self.rows if row[0] in row_indexes]
build_csv_response(rows, self.doctype)
def get_missing_link_field_values(self, doctype):
return self.missing_link_values.get(doctype, {})
def prepare_missing_link_field_values(self):
columns = self.columns
rows = self.rows
link_column_indexes = [
col.index for col in columns if col.df and col.df.fieldtype == "Link"
]
self.missing_link_values = {}
for index in link_column_indexes:
col = columns[index]
column_values = [row[index] for row in rows]
values = set([v for v in column_values if v not in INVALID_VALUES])
doctype = col.df.options
missing_values = [value for value in values if not frappe.db.exists(doctype, value)]
if self.missing_link_values.get(doctype):
self.missing_link_values[doctype].missing_values += missing_values
else:
self.missing_link_values[doctype] = frappe._dict(
missing_values=missing_values,
one_mandatory=self.has_one_mandatory_field(doctype),
df=col.df,
)
def get_id_fieldname(self):
autoname_field = self.get_autoname_field(self.doctype)
if autoname_field:
return autoname_field.fieldname
return "name"
def get_eta(self, current, total, processing_time):
remaining = total - current
eta = processing_time * remaining
if not self.last_eta or eta < self.last_eta:
self.last_eta = eta
return self.last_eta
def has_one_mandatory_field(self, doctype):
meta = frappe.get_meta(doctype)
# get mandatory fields with default not set
mandatory_fields = [df for df in meta.fields if df.reqd and not df.default]
mandatory_fields_count = len(mandatory_fields)
if meta.autoname and meta.autoname.lower() == "prompt":
mandatory_fields_count += 1
return mandatory_fields_count == 1
def get_autoname_field(self, doctype):
meta = frappe.get_meta(doctype)
if meta.autoname and meta.autoname.startswith("field:"):
fieldname = meta.autoname[len("field:") :]
return meta.get_field(fieldname)
def print_grouped_warnings(self, warnings):
warnings_by_row = {}
other_warnings = []
for w in warnings:
if w.get("row"):
warnings_by_row.setdefault(w.get("row"), []).append(w)
else:
other_warnings.append(w)
for row_number, warnings in warnings_by_row.items():
print("Row {0}".format(row_number))
for w in warnings:
print(w.get("message"))
for w in other_warnings:
print(w.get("message"))
def print_import_log(self, import_log):
failed_records = [l for l in import_log if not l.success]
successful_records = [l for l in import_log if l.success]
if successful_records:
print(
"Successfully imported {0} records out of {1}".format(
len(successful_records), len(import_log)
)
)
if failed_records:
print("Failed to import {0} records".format(len(failed_records)))
file_name = '{0}_import_on_{1}.txt'.format(self.doctype, frappe.utils.now())
print('Check {0} for errors'.format(os.path.join('sites', file_name)))
text = ""
for w in failed_records:
text += "Row Indexes: {0}\n".format(str(w.get('row_indexes', [])))
text += "Messages:\n{0}\n".format('\n'.join(w.get('messages', [])))
text += "Traceback:\n{0}\n\n".format(w.get('exception'))
with open(file_name, 'w') as f:
f.write(text)
DATE_FORMATS = [
r"%d-%m-%Y",
r"%m-%d-%Y",
r"%Y-%m-%d",
r"%d-%m-%y",
r"%m-%d-%y",
r"%y-%m-%d",
r"%d/%m/%Y",
r"%m/%d/%Y",
r"%Y/%m/%d",
r"%d/%m/%y",
r"%m/%d/%y",
r"%y/%m/%d",
r"%d.%m.%Y",
r"%m.%d.%Y",
r"%Y.%m.%d",
r"%d.%m.%y",
r"%m.%d.%y",
r"%y.%m.%d",
]
TIME_FORMATS = [
r"%H:%M:%S.%f",
r"%H:%M:%S",
r"%H:%M",
r"%I:%M:%S.%f %p",
r"%I:%M:%S %p",
r"%I:%M %p",
]
def guess_date_format(date_string):
date_string = date_string.strip()
_date = None
_time = None
if " " in date_string:
_date, _time = date_string.split(" ", 1)
else:
_date = date_string
date_format = None
time_format = None
for f in DATE_FORMATS:
try:
# if date is parsed without any exception
# capture the date format
datetime.strptime(_date, f)
date_format = f
break
except ValueError:
pass
if _time:
for f in TIME_FORMATS:
try:
# if time is parsed without any exception
# capture the time format
datetime.strptime(_time, f)
time_format = f
break
except ValueError:
pass
full_format = date_format
if time_format:
full_format += " " + time_format
return full_format
def import_data(doctype, file_path):
i = Importer(doctype, file_path)
i.import_data()

View file

@ -0,0 +1,40 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.core.doctype.data_import.exporter_new import Exporter
class TestExporter(unittest.TestCase):
def test_exports_mandatory_fields(self):
e = Exporter('Web Page', export_fields='Mandatory')
csv_array = e.get_csv_array()
header_row = csv_array[0]
self.assertEqual(header_row, ['ID', 'Title'])
def test_exports_all_fields(self):
e = Exporter('Web Page', export_fields='All')
csv_array = e.get_csv_array()
header = csv_array[0]
self.assertEqual(len(header), 23)
def test_exports_selected_fields(self):
export_fields = {
'Web Page': ['title', 'route', 'published']
}
e = Exporter('Web Page', export_fields=export_fields)
csv_array = e.get_csv_array()
header = csv_array[0]
self.assertEqual(header, ['ID', 'Title', 'Route', 'Published'])
def test_exports_data(self):
e = Exporter('ToDo', export_fields='All', export_data=True)
todo_records = frappe.db.count('ToDo')
csv_array = e.get_csv_array()
self.assertEqual(len(csv_array), todo_records + 1)

View file

@ -0,0 +1,78 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
import datetime
import unittest
import frappe
from frappe.core.doctype.data_import.importer_new import Importer
content_empty_rows = '''title,start_date,idx,show_title
,,,
est phasellus sit amet,5/20/2019,52,1
nibh in,7/29/2019,77,1
'''
content_mandatory_missing = '''title,start_date,idx,show_title
,5/20/2019,52,1
'''
content_convert_value = '''title,start_date,idx,show_title
est phasellus sit amet,5/20/2019,52,True
'''
content_invalid_column = '''title,start_date,idx,show_title,invalid_column
est phasellus sit amet,5/20/2019,52,True,invalid value
'''
class TestImporter(unittest.TestCase):
def test_should_skip_empty_rows(self):
i = self.get_importer('Web Page', content=content_empty_rows)
payloads = i.get_payloads_for_import()
row_to_be_imported = []
for p in payloads:
row_to_be_imported += [row[0] for row in p.rows]
self.assertEqual(len(row_to_be_imported), 2)
def test_should_throw_if_mandatory_is_missing(self):
i = self.get_importer('Web Page', content=content_mandatory_missing)
i.import_data()
warning = i.warnings[0]
self.assertTrue('Title is a mandatory field' in warning['message'])
def test_should_convert_value_based_on_fieldtype(self):
i = self.get_importer('Web Page', content=content_convert_value)
payloads = i.get_payloads_for_import()
doc = payloads[0].doc
self.assertEqual(type(doc['show_title']), int)
self.assertEqual(type(doc['idx']), int)
self.assertEqual(type(doc['start_date']), datetime.datetime)
def test_should_ignore_invalid_columns(self):
i = self.get_importer('Web Page', content=content_invalid_column)
payloads = i.get_payloads_for_import()
doc = payloads[0].doc
self.assertTrue('invalid_column' not in doc)
self.assertTrue('title' in doc)
def test_should_import_valid_template(self):
title = 'est phasellus sit amet {0}'.format(frappe.utils.random_string(8))
content_valid_content = '''title,start_date,idx,show_title
{0},5/20/2019,52,1'''.format(title)
i = self.get_importer('Web Page', content=content_valid_content)
import_log = i.import_data()
log = import_log[0]
self.assertTrue(log.success)
doc = frappe.get_doc('Web Page', { 'title': title })
self.assertEqual(frappe.utils.get_datetime_str(doc.start_date),
frappe.utils.get_datetime_str('2019-05-20'))
def get_importer(self, doctype, content):
data_import = frappe.new_doc('Data Import Beta')
data_import.import_type = 'Insert New Records'
i = Importer(doctype, content=content, data_import=data_import)
return i

View file

@ -0,0 +1,403 @@
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Import Beta', {
setup(frm) {
frappe.realtime.on('data_import_refresh', ({ data_import }) => {
if (data_import !== frm.doc.name) return;
frappe.model.clear_doc('Data Import Beta', frm.doc.name);
frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => {
frm.refresh();
});
});
frappe.realtime.on('data_import_progress', data => {
if (data.data_import !== frm.doc.name) {
return;
}
let percent = Math.floor((data.current * 100) / data.total);
let seconds = Math.floor(data.eta);
let minutes = Math.floor(data.eta / 60);
let eta_message =
seconds < 60
? __('About {0} seconds remaining', [seconds])
: minutes === 1
? __('About {0} minute remaining', [minutes])
: __('About {0} minutes remaining', [minutes]);
let message;
if (data.success) {
let message_args = [data.current, data.total, eta_message];
message =
frm.doc.import_type === 'Insert New Records'
? __('Importing {0} of {1}, {2}', message_args)
: __('Updating {0} of {1}, {2}', message_args);
}
if (data.skipping) {
message = __('Skipping {0} of {1}, {2}', [data.current, data.total, eta_message]);
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(__('In Progress'), 'orange');
// hide progress when complete
if (data.current === data.total) {
setTimeout(() => {
frm.dashboard.hide();
frm.refresh();
}, 2000);
}
});
frm.set_query('reference_doctype', () => {
return {
filters: {
allow_import: 1
}
};
});
frm.get_field('import_file').df.options = {
restrictions: {
allowed_file_types: ['.csv', '.xls', '.xlsx']
}
};
},
refresh(frm) {
frm.page.hide_icon_group();
frm.trigger('import_file');
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
if (frm.doc.status === 'Partial Success') {
frm.add_custom_button(__('Export Errored Rows'),
() => frm.trigger('export_errored_rows'));
}
if (frm.doc.status.includes('Success')) {
frm.add_custom_button(__('Go to {0} List', [frm.doc.reference_doctype]),
() => frappe.set_route('List', frm.doc.reference_doctype));
}
if (frm.doc.status !== 'Success') {
if (!frm.is_new() && frm.doc.import_file) {
let label = frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
frm.page.set_primary_action(label, () => frm.events.start_import(frm));
} else {
frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
},
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
let message;
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}.', message_args)
: __('Successfully imported {0} record out of {1}.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}.', message_args)
: __('Successfully updated {0} record out of {1}.', message_args);
}
}
frm.dashboard.set_headline(message);
},
start_import(frm) {
frm.call({
doc: frm.doc,
method: 'start_import',
btn: frm.page.btn_primary
});
},
download_template(frm) {
if (frm.data_exporter && frm.data_exporter.doctype === frm.doc.reference_doctype) {
frm.data_exporter.dialog.show();
set_export_records();
} else {
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.data_exporter = new frappe.data_import.DataExporter(frm.doc.reference_doctype);
set_export_records();
});
}
function set_export_records() {
if (frm.doc.import_type === 'Insert New Records') {
frm.data_exporter.dialog.set_value('export_records', 'blank_template');
} else {
frm.data_exporter.dialog.set_value('export_records', 'all');
}
}
},
reference_doctype(frm) {
frm.trigger('toggle_submit_after_import');
},
toggle_submit_after_import(frm) {
frm.toggle_display('submit_after_import', false);
let doctype = frm.doc.reference_doctype;
if (doctype) {
frappe.model.with_doctype(doctype, () => {
let meta = frappe.get_meta(doctype);
frm.toggle_display('submit_after_import', meta.is_submittable);
});
}
},
import_file(frm) {
frm.toggle_display('section_import_preview', frm.doc.import_file);
if (!frm.doc.import_file) {
frm.get_field('import_preview').$wrapper.empty();
return;
}
// load import preview
frm.get_field('import_preview').$wrapper.empty();
$('<span class="text-muted">')
.html(__('Loading import file...'))
.appendTo(frm.get_field('import_preview').$wrapper);
frm
.call({
doc: frm.doc,
method: 'get_preview_from_template',
error_handlers: {
TimestampMismatchError() {
// ignore this error
}
}
})
.then(r => {
let preview_data = r.message;
frm.events.show_import_preview(frm, preview_data);
frm.events.show_import_warnings(frm, preview_data);
});
},
show_import_preview(frm, preview_data) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
if (frm.import_preview && frm.import_preview.doctype === frm.doc.reference_doctype) {
frm.import_preview.preview_data = preview_data;
frm.import_preview.import_log = import_log;
frm.import_preview.refresh();
return;
}
frappe.require('/assets/js/data_import_tools.min.js', () => {
frm.import_preview = new frappe.data_import.ImportPreview({
wrapper: frm.get_field('import_preview').$wrapper,
doctype: frm.doc.reference_doctype,
preview_data,
import_log,
frm,
events: {
remap_column(changed_map) {
let template_options = JSON.parse(frm.doc.template_options || '{}');
template_options.remap_column = template_options.remap_column || {};
Object.assign(template_options.remap_column, changed_map);
frm.set_value('template_options', JSON.stringify(template_options));
frm.save().then(() => frm.trigger('import_file'));
},
}
});
});
},
export_errored_rows(frm) {
open_url_post('/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template', {
data_import_name: frm.doc.name
});
},
show_import_warnings(frm, preview_data) {
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
warnings = warnings.concat(preview_data.warnings || []);
frm.toggle_display('import_warnings_section', warnings.length > 0);
if (warnings.length === 0) {
frm.get_field('import_warnings').$wrapper.html('');
return;
}
// group warnings by row
let warnings_by_row = {};
let other_warnings = [];
for (let warning of warnings) {
if (warning.row) {
warnings_by_row[warning.row] = warnings_by_row[warning.row] || [];
warnings_by_row[warning.row].push(warning);
} else {
other_warnings.push(warning);
}
}
let html = '';
html += Object.keys(warnings_by_row).map(row_number => {
let message = warnings_by_row[row_number]
.map(w => {
if (w.field) {
return `<li>${w.field.label}: ${w.message}</li>`;
}
return `<li>${w.message}</li>`;
})
.join('');
return `
<div class="alert border" data-row="${row_number}">
<div class="uppercase">${__('Row {0}', [row_number])}</div>
<div class="body"><ul>${message}</ul></div>
</div>
`;
}).join('');
html += other_warnings
.map(warning => {
let header = '';
if (warning.col) {
header = __('Column {0}', [warning.col]);
}
return `
<div class="alert border" data-col="${warning.col}">
<div class="uppercase">${header}</div>
<div class="body">${warning.message}</div>
</div>
`;
})
.join('');
frm.get_field('import_warnings').$wrapper.html(`
<div class="row">
<div class="col-sm-6 warnings text-muted">${html}</div>
</div>
`);
},
show_import_log(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let logs = import_log;
frm.toggle_display('import_log', false);
frm.toggle_display('import_log_section', logs.length > 0);
if (logs.length === 0) {
frm.get_field('import_log_preview').$wrapper.empty();
return;
}
let rows = logs
.map(log => {
let html;
if (log.success) {
html = __('Successfully imported {0}', [
`<span class="underline">${frappe.utils.get_form_link(
frm.doc.reference_doctype,
log.docname,
true
)}<span>`
]);
} else {
let messages = log.messages
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs margin-top" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}">
${__('Show Traceback')}
</button>
<div class="collapse margin-top" id="${id}">
<div class="well">
<pre>${log.exception}</pre>
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>
`);
},
show_missing_link_values(frm, missing_link_values) {
let can_be_created_automatically = missing_link_values.every(
d => d.has_one_mandatory_field
);
let html = missing_link_values
.map(d => {
let doctype = d.doctype;
let values = d.missing_values;
return `
<h5>${doctype}</h5>
<ul>${values.map(v => `<li>${v}</li>`).join('')}</ul>
`;
})
.join('');
if (can_be_created_automatically) {
let message = __('There are some linked records which needs to be created before we can import your file. Do you want to create the following missing records automatically?');
frappe.confirm(message + html, () => {
frm
.call('create_missing_link_values', {
missing_link_values
})
.then(r => {
let records = r.message;
frappe.msgprint(
__('Created {0} records successfully.', [records.length])
);
});
});
} else {
frappe.msgprint(
__('The following records needs to be created before we can import your file.') + html
);
}
}
});

View file

@ -0,0 +1,167 @@
{
"autoname": "format:{reference_doctype} Import on {creation}",
"beta": 1,
"creation": "2019-08-04 14:16:08.318714",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"import_type",
"download_template",
"import_file",
"column_break_5",
"status",
"section_break_7",
"submit_after_import",
"mute_emails",
"template_options",
"section_import_preview",
"import_preview",
"import_warnings_section",
"template_warnings",
"import_warnings",
"import_log_section",
"import_log",
"import_log_preview"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Document Type",
"options": "DocType",
"reqd": 1,
"set_only_once": 1
},
{
"fieldname": "import_type",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Import Type",
"options": "\nInsert New Records\nUpdate Existing Records",
"reqd": 1,
"set_only_once": 1
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "import_file",
"fieldtype": "Attach",
"in_list_view": 1,
"label": "Import File"
},
{
"fieldname": "import_preview",
"fieldtype": "HTML",
"label": "Import Preview"
},
{
"fieldname": "section_import_preview",
"fieldtype": "Section Break",
"label": "Preview"
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"collapsible": 1,
"depends_on": "eval:!doc.__islocal",
"fieldname": "section_break_7",
"fieldtype": "Section Break",
"label": "Import Options"
},
{
"fieldname": "template_options",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Options",
"options": "JSON",
"read_only": 1
},
{
"fieldname": "import_log",
"fieldtype": "Code",
"label": "Import Log",
"options": "JSON"
},
{
"fieldname": "import_log_section",
"fieldtype": "Section Break",
"label": "Import Log"
},
{
"fieldname": "import_log_preview",
"fieldtype": "HTML",
"label": "Import Log Preview"
},
{
"default": "Pending",
"fieldname": "status",
"fieldtype": "Select",
"hidden": 1,
"label": "Status",
"options": "Pending\nSuccess\nPartial Success",
"read_only": 1
},
{
"fieldname": "template_warnings",
"fieldtype": "Code",
"hidden": 1,
"label": "Template Warnings",
"options": "JSON"
},
{
"default": "0",
"fieldname": "submit_after_import",
"fieldtype": "Check",
"label": "Submit After Import"
},
{
"fieldname": "import_warnings_section",
"fieldtype": "Section Break",
"label": "Warnings"
},
{
"fieldname": "import_warnings",
"fieldtype": "HTML",
"label": "Import Warnings"
},
{
"depends_on": "reference_doctype",
"fieldname": "download_template",
"fieldtype": "Button",
"label": "Download Template"
},
{
"default": "0",
"fieldname": "mute_emails",
"fieldtype": "Check",
"label": "Don't Send Emails"
}
],
"hide_toolbar": 1,
"modified": "2019-09-28 13:54:35.061730",
"modified_by": "Administrator",
"module": "Core",
"name": "Data Import Beta",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1
}

View file

@ -0,0 +1,99 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.core.doctype.data_import.importer_new import Importer
from frappe.core.doctype.data_import.exporter_new import Exporter
from frappe.core.page.background_jobs.background_jobs import get_info
from frappe.utils.background_jobs import enqueue
from frappe import _
class DataImportBeta(Document):
def validate(self):
doc_before_save = self.get_doc_before_save()
if not self.import_file or (
doc_before_save and doc_before_save.import_file != self.import_file
):
self.template_options = ""
self.template_warnings = ""
if self.import_file:
# validate template
self.get_importer()
def get_preview_from_template(self):
if not self.import_file:
return
i = self.get_importer()
return i.get_data_for_import_preview()
def start_import(self):
if frappe.utils.scheduler.is_scheduler_inactive():
frappe.throw(
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
enqueued_jobs = [d.get("job_name") for d in get_info()]
if self.name not in enqueued_jobs:
enqueue(
start_import,
queue="default",
timeout=6000,
event="data_import",
job_name=self.name,
data_import=self.name,
now=frappe.conf.developer_mode or frappe.flags.in_test,
)
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
def start_import(data_import):
"""This method runs in background job"""
data_import = frappe.get_doc("Data Import Beta", data_import)
i = Importer(data_import.reference_doctype, data_import=data_import)
return i.import_data()
@frappe.whitelist()
def download_template(
doctype, export_fields=None, export_records=None, export_filters=None, file_type="CSV"
):
"""
Download template from Exporter
:param doctype: Document Type
:param export_fields=None: Fields to export as dict {'Sales Invoice': ['name', 'customer'], 'Sales Invoice Item': ['item_code']}
:param export_records=None: One of 'all', 'by_filter', 'blank_template'
:param export_filters: Filter dict
:param file_type: File type to export into
"""
export_fields = frappe.parse_json(export_fields)
export_filters = frappe.parse_json(export_filters)
export_data = export_records != "blank_template"
e = Exporter(
doctype,
export_fields=export_fields,
export_data=export_data,
export_filters=export_filters,
file_type=file_type,
export_page_length=5 if export_records == "5_records" else None,
)
e.build_response()
@frappe.whitelist()
def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import Beta", data_import_name)
data_import.export_errored_rows()

View file

@ -0,0 +1,19 @@
frappe.listview_settings['Data Import Beta'] = {
get_indicator: function(doc) {
var colors = {
"Pending": "orange",
"Partial Success": "orange",
"Success": "green",
};
return [__(doc.status), colors[doc.status], "status,=," + doc.status];
},
formatters: {
import_type(value) {
return {
'Insert New Records': __('Insert'),
'Update Existing Records': __('Update')
}[value];
}
},
hide_name_column: true
};

View file

@ -26,3 +26,8 @@ class DocField(Document):
}, 'options')
return link_doctype
def get_select_options(self):
if self.fieldtype == 'Select':
options = self.options or ''
return [d for d in options.split('\n') if d]

View file

@ -43,7 +43,9 @@ def get_diff(old, new, for_child=False):
if not new:
return None
out = frappe._dict(changed = [], added = [], removed = [], row_changed = [])
# capture data import if set
data_import = new.flags.via_data_import
out = frappe._dict(changed = [], added = [], removed = [], row_changed = [], data_import=data_import)
for df in new.meta.fields:
if df.fieldtype in no_value_fields and df.fieldtype not in table_fields:
continue
@ -91,4 +93,4 @@ def get_diff(old, new, for_child=False):
return None
def on_doctype_update():
frappe.db.add_index("Version", ["ref_doctype", "docname"])
frappe.db.add_index("Version", ["ref_doctype", "docname"])

View file

@ -233,8 +233,8 @@ class BaseDocument(object):
if isinstance(d[fieldname], list) and df.fieldtype not in table_fields:
frappe.throw(_('Value for {0} cannot be a list').format(_(df.label)))
if convert_dates_to_str and isinstance(d[fieldname], (datetime.datetime, datetime.time, datetime.timedelta)):
d[fieldname] = str(d[fieldname])
if convert_dates_to_str and isinstance(d[fieldname], (datetime.datetime, datetime.time, datetime.timedelta)):
d[fieldname] = str(d[fieldname])
if d[fieldname] == None and ignore_nulls:
del d[fieldname]

View file

@ -325,5 +325,8 @@
],
"js/barcode_scanner.min.js": [
"public/js/frappe/barcode_scanner/quagga.js"
],
"js/data_import_tools.min.js": [
"public/js/frappe/data_import/index.js"
]
}

View file

@ -0,0 +1,28 @@
export default class ColumnPickerFields extends frappe.views.ReportView {
show() {}
get_fields_as_options() {
let column_map = this.get_columns_for_picker();
let doctypes = [this.doctype].concat(
...frappe.meta.get_table_fields(this.doctype).map(df => df.options)
);
// flatten array
return [].concat(
...doctypes.map(doctype => {
return column_map[doctype].map(df => {
let label = df.label;
let value = df.fieldname;
if (this.doctype !== doctype) {
label = `${df.label} (${doctype})`;
value = `${doctype}:${df.fieldname}`;
}
return {
label,
value,
description: value
};
});
})
);
}
}

View file

@ -0,0 +1,291 @@
import ColumnPickerFields from './column_picker_fields';
frappe.provide('frappe.data_import');
frappe.data_import.DataExporter = class DataExporter {
constructor(doctype) {
this.doctype = doctype;
frappe.model.with_doctype(doctype, () => {
this.make_dialog();
});
}
make_dialog() {
this.dialog = new frappe.ui.Dialog({
title: __('Export Data'),
fields: [
{
fieldtype: 'Select',
fieldname: 'export_records',
label: __('Export Type'),
options: [
{
label: __('All Records'),
value: 'all'
},
{
label: __('Filtered Records'),
value: 'by_filter'
},
{
label: __('5 Records'),
value: '5_records'
},
{
label: __('Blank Template'),
value: 'blank_template'
}
],
default: 'blank_template',
change: () => {
this.update_record_count_message();
}
},
{
fieldtype: 'HTML',
fieldname: 'filter_area',
depends_on: doc => doc.export_records === 'by_filter'
},
{
fieldtype: 'Select',
fieldname: 'file_type',
label: __('File Type'),
options: ['Excel', 'CSV'],
default: 'CSV'
},
{
fieldtype: 'Section Break'
},
{
fieldtype: 'HTML',
fieldname: 'select_all_buttons'
},
{
label: __(this.doctype),
fieldname: this.doctype,
fieldtype: 'MultiCheck',
columns: 2,
on_change: () => this.update_primary_action(),
options: this.get_multicheck_options(this.doctype)
},
...frappe.meta.get_table_fields(this.doctype)
.map(df => {
let doctype = df.options;
let label = df.reqd
? __('{0} (1 row mandatory)', [doctype])
: __(doctype);
return {
label,
fieldname: doctype,
fieldtype: 'MultiCheck',
columns: 2,
on_change: () => this.update_primary_action(),
options: this.get_multicheck_options(doctype)
};
})
],
primary_action_label: __('Export'),
primary_action: values => this.export_records(values),
on_page_show: () => this.select_mandatory()
});
this.make_filter_area();
this.make_select_all_buttons();
this.update_record_count_message();
this.dialog.show();
}
export_records() {
let method =
'/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_template';
let multicheck_fields = this.dialog.fields
.filter(df => df.fieldtype === 'MultiCheck')
.map(df => df.fieldname);
let values = this.dialog.get_values();
let doctype_field_map = Object.assign({}, values);
for (let key in doctype_field_map) {
if (!multicheck_fields.includes(key)) {
delete doctype_field_map[key];
}
}
let filters = null;
if (values.export_records === 'by_filter') {
filters = this.get_filters();
}
open_url_post(method, {
doctype: this.doctype,
file_type: values.file_type,
export_records: values.export_records,
export_fields: doctype_field_map,
export_filters: filters
});
}
make_filter_area() {
this.filter_group = new frappe.ui.FilterGroup({
parent: this.dialog.get_field('filter_area').$wrapper,
doctype: this.doctype,
on_change: () => {
this.update_record_count_message();
}
});
}
make_select_all_buttons() {
let $select_all_buttons = $(`
<div>
<h6 class="form-section-heading uppercase">${__('Select fields to export')}</h6>
<button class="btn btn-default btn-xs" data-action="select_all">
${__('Select All')}
</button>
<button class="btn btn-default btn-xs" data-action="select_mandatory">
${__('Select Mandatory')}
</button>
<button class="btn btn-default btn-xs" data-action="unselect_all">
${__('Unselect All')}
</button>
</div>
`);
frappe.utils.bind_actions_with_object($select_all_buttons, this);
this.dialog
.get_field('select_all_buttons')
.$wrapper.html($select_all_buttons);
}
select_all() {
this.dialog.$wrapper
.find(':checkbox')
.prop('checked', true)
.trigger('change');
}
select_mandatory() {
let mandatory_table_doctypes = frappe.meta
.get_table_fields(this.doctype)
.filter(df => df.reqd)
.map(df => df.options);
mandatory_table_doctypes.push(this.doctype);
let multicheck_fields = this.dialog.fields
.filter(df => df.fieldtype === 'MultiCheck')
.map(df => df.fieldname)
.filter(doctype => mandatory_table_doctypes.includes(doctype));
let checkboxes = [].concat(
...multicheck_fields.map(fieldname => {
let field = this.dialog.get_field(fieldname);
return field.options
.filter(option => option.danger)
.map(option => option.$checkbox.find('input').get(0));
})
);
this.unselect_all();
$(checkboxes)
.prop('checked', true)
.trigger('change');
}
unselect_all() {
this.dialog.$wrapper
.find(':checkbox')
.prop('checked', false)
.trigger('change');
}
update_record_count_message() {
let export_records = this.dialog.get_value('export_records');
let count_method = {
all: () => frappe.db.count(this.doctype),
by_filter: () =>
frappe.db.count(this.doctype, {
filters: this.get_filters()
}),
blank_template: () => Promise.resolve(0),
'5_records': () => Promise.resolve(5)
};
count_method[export_records]().then(value => {
let message = '';
value = parseInt(value, 10);
if (value === 0) {
message = __('No records will be exported');
} else if (value === 1) {
message = __('1 record will be exported');
} else {
message = __('{0} records will be exported', [value]);
}
this.dialog.set_df_property('export_records', 'description', message);
this.update_primary_action(value);
});
}
update_primary_action(no_of_records) {
let $primary_action = this.dialog.get_primary_btn();
if (no_of_records != null) {
let label = '';
if (no_of_records === 0) {
label = __('Export');
} else if (no_of_records === 1) {
label = __('Export 1 record');
} else {
label = __('Export {0} records', [no_of_records]);
}
$primary_action.html(label);
} else {
let parent_fields = this.dialog.get_value(this.doctype);
$primary_action.prop('disabled', parent_fields.length === 0);
}
}
get_filters() {
return this.filter_group.get_filters().reduce((acc, filter) => {
return Object.assign(acc, {
[filter[1]]: [filter[2], filter[3]]
});
}, {});
}
get_multicheck_options(doctype) {
if (!this.column_map) {
this.column_map = new ColumnPickerFields({
doctype: this.doctype
}).get_columns_for_picker();
}
let autoname_field = null;
let meta = frappe.get_meta(doctype);
if (meta.autoname && meta.autoname.startsWith('field:')) {
let fieldname = meta.autoname.slice('field:'.length);
autoname_field = frappe.meta.get_field(doctype, fieldname);
}
return this.column_map[doctype]
.filter(df => {
if (autoname_field && df.fieldname === autoname_field.fieldname) {
return false;
}
return true;
})
.map(df => {
let label = __(df.label);
if (autoname_field && df.fieldname === 'name') {
label = label + ` (${__(autoname_field.label)})`;
}
return {
label,
value: df.fieldname,
danger: df.reqd,
checked: false,
description: `${df.fieldname} ${df.reqd ? __('(Mandatory)') : ''}`
};
});
}
};

View file

@ -0,0 +1,303 @@
import DataTable from 'frappe-datatable';
import ColumnPickerFields from './column_picker_fields';
frappe.provide('frappe.data_import');
frappe.data_import.ImportPreview = class ImportPreview {
constructor({ wrapper, doctype, preview_data, frm, import_log, events = {} }) {
this.wrapper = wrapper;
this.doctype = doctype;
this.preview_data = preview_data;
this.events = events;
this.import_log = import_log;
this.frm = frm;
frappe.model.with_doctype(doctype, () => {
this.refresh();
});
}
refresh() {
this.data = this.preview_data.data;
this.make_wrapper();
this.prepare_columns();
this.prepare_data();
this.render_datatable();
this.setup_styles();
this.add_actions();
}
make_wrapper() {
this.wrapper.html(`
<div>
<div class="row">
<div class="col-sm-12">
<div class="table-actions margin-bottom">
</div>
<div class="table-preview border"></div>
<div class="table-message"></div>
</div>
</div>
</div>
`);
frappe.utils.bind_actions_with_object(this.wrapper, this);
this.$table_preview = this.wrapper.find('.table-preview');
}
prepare_columns() {
this.columns = this.preview_data.columns.map((col, i) => {
let df = col.df;
let column_width = 120;
if (col.header_title === 'Sr. No') {
return {
id: 'srno',
name: 'Sr. No',
content: 'Sr. No',
editable: false,
focusable: false,
align: 'left',
width: 60
};
}
if (col.skip_import) {
let show_warnings_button = `<button class="btn btn-xs" data-action="show_column_warning" data-col="${i}">
<i class="octicon octicon-stop"></i></button>`;
if (!col.df) {
// increase column width for unidentified columns
column_width += 50;
}
let column_title = `<span class="indicator red">
${col.header_title || `<i>${__('Untitled Column')}</i>`}
${!col.df ? show_warnings_button : ''}
</span>`;
return {
id: frappe.utils.get_random(6),
name: col.header_title || df.label,
content: column_title,
skip_import: true,
editable: false,
focusable: false,
align: 'left',
width: column_width,
format: value => `<div class="text-muted">${value}</div>`
};
}
return {
id: df.fieldname,
name: col.header_title,
content: `<span class="indicator green">${col.header_title || df.label}</span>`,
df: df,
editable: false,
align: 'left',
width: column_width
};
});
}
prepare_data() {
this.data = this.data.map(row => {
return row.map(cell => {
if (cell == null) {
return '';
}
return cell;
});
});
}
render_datatable() {
if (this.datatable) {
this.datatable.destroy();
}
this.datatable = new DataTable(this.$table_preview.get(0), {
data: this.data,
columns: this.columns,
layout: this.columns.length < 10 ? 'fluid' : 'fixed',
cellHeight: 35,
serialNoColumn: false,
checkboxColumn: false,
noDataMessage: __('No Data'),
disableReorderColumn: true
});
let { max_rows_exceeded, max_rows_in_preview } = this.preview_data;
if (max_rows_exceeded) {
this.wrapper.find('.table-message').html(`
<div class="text-muted margin-top text-medium">
${__('Showing only first {0} rows in preview', [max_rows_in_preview])}
</div>
`);
}
if (this.data.length === 0) {
this.datatable.style.setStyle('.dt-scrollable', {
height: 'auto'
});
}
this.datatable.style.setStyle('.dt-dropdown', {
display: 'none'
});
}
setup_styles() {
// import success checkbox
this.datatable.style.setStyle(`svg.import-success`, {
width: '16px',
fill: frappe.ui.color.get_color_shade('green', 'dark')
});
// make successfully imported rows readonly
let row_classes = this.datatable
.getRows()
.filter(row => this.is_row_imported(row))
.map(row => row.meta.rowIndex)
.map(i => `.dt-row-${i} .dt-cell`)
.join(',');
this.datatable.style.setStyle(row_classes, {
pointerEvents: 'none',
backgroundColor: frappe.ui.color.get_color_shade('white', 'light'),
color: frappe.ui.color.get_color_shade('black', 'extra-light'),
});
}
add_actions() {
let actions = [
{
label: __('Map Columns'),
handler: 'show_column_mapper',
condition: this.frm.doc.status !== 'Success'
},
{
label: __('Export Errored Rows'),
handler: 'export_errored_rows',
condition: this.import_log.filter(log => !log.success).length > 0
},
{
label: __('Show Warnings'),
handler: 'show_warnings',
condition: this.preview_data.warnings.length > 0
}
];
let html = actions.filter(action => action.condition).map(action => {
return `<button class="btn btn-sm btn-default" data-action="${action.handler}">
${action.label}
</button>
`;
});
this.wrapper.find('.table-actions').html(html);
}
export_errored_rows() {
this.frm.trigger('export_errored_rows');
}
show_warnings() {
this.frm.scroll_to_field('import_warnings');
}
show_column_warning(_, $target) {
let $warning = this.frm
.get_field('import_warnings').$wrapper
.find(`[data-col=${$target.data('col')}]`);
frappe.utils.scroll_to($warning, true, 30);
}
show_column_mapper() {
let column_picker_fields = new ColumnPickerFields({
doctype: this.doctype
});
let changed = [];
let fields = this.preview_data.columns.map((col, i) => {
let df = col.df;
if (col.header_title === 'Sr. No') return [];
let fieldname;
if (!df) {
fieldname = null;
} else {
fieldname = df.parent === this.doctype
? df.fieldname
: `${df.parent}:${df.fieldname}`;
}
return [
{
label: '',
fieldtype: 'Data',
default: col.header_title,
fieldname: `Column ${i}`,
read_only: 1
},
{
fieldtype: 'Column Break'
},
{
fieldtype: 'Autocomplete',
fieldname: i,
label: '',
max_items: Infinity,
options: [
{
label: __("Don't Import"),
value: "Don't Import"
}
].concat(column_picker_fields.get_fields_as_options()),
default: fieldname || "Don't Import",
change() {
changed.push(i);
}
},
{
fieldtype: 'Section Break'
}
];
});
// flatten the array
fields = fields.reduce((acc, curr) => [...acc, ...curr]);
let file_name = (this.frm.doc.import_file || '').split('/').pop();
fields = [
{
fieldtype: 'HTML',
fieldname: 'heading',
options: `
<div class="margin-top text-muted">
${__('Map columns from {0} to fields in {1}', [file_name.bold(), this.doctype.bold()])}
</div>
`
},
{
fieldtype: 'Section Break'
}
].concat(fields);
let dialog = new frappe.ui.Dialog({
title: __('Map Columns'),
fields,
primary_action: (values) => {
let changed_map = {};
changed.map(i => {
let header_row_index = i - 1;
changed_map[header_row_index] = values[i];
});
if (changed.length > 0) {
this.events.remap_column(changed_map);
}
dialog.hide();
}
});
dialog.$body.addClass('map-columns');
dialog.show();
}
is_row_imported(row) {
let serial_no = row[0].content;
return this.import_log.find(log => {
return log.success && log.row_indexes.includes(serial_no);
});
}
};

View file

@ -0,0 +1,2 @@
import './import_preview';
import './data_exporter';

View file

@ -13,7 +13,10 @@ frappe.ui.form.ControlAttach = frappe.ui.form.ControlData.extend({
<i class="fa fa-paperclip"></i>
<a class="attached-file-link" target="_blank"></a>
</div>
<a class="btn btn-xs btn-default clear-file">${__('Clear')}</a>
<div>
<a class="btn btn-xs btn-default" data-action="reload_attachment">${__('Reload File')}</a>
<a class="btn btn-xs btn-default" data-action="clear_attachment">${__('Clear')}</a>
</div>
</div>`)
.prependTo(me.input_area)
.toggle(false);
@ -21,13 +24,14 @@ frappe.ui.form.ControlAttach = frappe.ui.form.ControlData.extend({
this.set_input_attributes();
this.has_input = true;
this.$value.find(".clear-file").on("click", function() {
me.clear_attachment();
});
frappe.utils.bind_actions_with_object(this.$value, this);
this.toggle_reload_button();
},
clear_attachment: function() {
var me = this;
if(this.frm) {
me.parse_validate_and_set_in_model(null);
me.refresh();
me.frm.attachments.remove_attachment_by_filename(me.value, function() {
me.parse_validate_and_set_in_model(null);
me.refresh();
@ -41,15 +45,21 @@ frappe.ui.form.ControlAttach = frappe.ui.form.ControlData.extend({
this.refresh();
}
},
reload_attachment() {
if (this.file_uploader) {
this.file_uploader.uploader.upload_files();
}
},
on_attach_click() {
this.set_upload_options();
new frappe.ui.FileUploader(this.upload_options);
this.file_uploader = new frappe.ui.FileUploader(this.upload_options);
},
set_upload_options() {
let options = {
allow_multiple: false,
on_success: file => {
this.on_upload_complete(file);
this.toggle_reload_button();
}
};
@ -95,4 +105,9 @@ frappe.ui.form.ControlAttach = frappe.ui.form.ControlData.extend({
}
this.set_value(attachment.file_url);
},
toggle_reload_button() {
this.$value.find('[data-action="reload_attachment"]')
.toggle(this.file_uploader && this.file_uploader.uploader.files.length > 0);
}
});

View file

@ -14,7 +14,7 @@ frappe.ui.form.ControlAutocomplete = frappe.ui.form.ControlData.extend({
options = options.split('\n');
}
if (typeof options[0] === 'string') {
options = options.map(o => ({label: o, value: o}));
options = options.map(o => ({ label: o, value: o }));
}
this._data = options;
}
@ -24,12 +24,12 @@ frappe.ui.form.ControlAutocomplete = frappe.ui.form.ControlData.extend({
var me = this;
return {
minChars: 0,
maxItems: 99,
maxItems: this.df.max_items || 99,
autoFirst: true,
list: this.get_data(),
data: function(item) {
if(!(item instanceof Object)) {
var d = {"value": item};
if (!(item instanceof Object)) {
var d = { value: item };
item = d;
}
@ -38,9 +38,13 @@ frappe.ui.form.ControlAutocomplete = frappe.ui.form.ControlData.extend({
value: item.value
};
},
filter: function(item, input) {
let hay = item.label + item.value;
return Awesomplete.FILTER_CONTAINS(hay, input);
},
item: function(item) {
var d = this.get_item(item.value);
if(!d) {
if (!d) {
d = item;
}
@ -48,9 +52,9 @@ frappe.ui.form.ControlAutocomplete = frappe.ui.form.ControlData.extend({
d.label = d.value;
}
var _label = (me.translate_values) ? __(d.label) : d.label;
var html = "<strong>" + _label + "</strong>";
if(d.description && d.value!==d.description) {
var _label = me.translate_values ? __(d.label) : d.label;
var html = '<strong>' + _label + '</strong>';
if (d.description) {
html += '<br><span class="small">' + __(d.description) + '</span>';
}
@ -67,13 +71,21 @@ frappe.ui.form.ControlAutocomplete = frappe.ui.form.ControlData.extend({
},
setup_awesomplete() {
this.awesomplete = new Awesomplete(this.input, this.get_awesomplete_settings());
this.awesomplete = new Awesomplete(
this.input,
this.get_awesomplete_settings()
);
$(this.input_area).find('.awesomplete ul').css('min-width', '100%');
$(this.input_area)
.find('.awesomplete ul')
.css('min-width', '100%');
this.$input.on('input', frappe.utils.debounce(() => {
this.awesomplete.list = this.get_data();
}, 500));
this.$input.on(
'input',
frappe.utils.debounce(() => {
this.awesomplete.list = this.get_data();
}, 500)
);
this.$input.on('focus', () => {
if (!this.$input.val()) {
@ -87,6 +99,15 @@ frappe.ui.form.ControlAutocomplete = frappe.ui.form.ControlData.extend({
});
},
validate(value) {
let valid_values = this.awesomplete._list.map(d => d.value);
if (valid_values.includes(value)) {
return value;
} else {
return '';
}
},
get_data() {
return this._data || [];
},

View file

@ -70,6 +70,7 @@ frappe.ui.form.ControlMultiCheck = frappe.ui.form.Control.extend({
if (option.danger) {
checkbox.find('.label-area').addClass('text-danger');
}
option.$checkbox = checkbox;
});
if(this.df.select_all) {
this.setup_select_all();
@ -138,7 +139,7 @@ frappe.ui.form.ControlMultiCheck = frappe.ui.form.Control.extend({
const column_size = this.get_column_size();
return $(`
<div class="checkbox unit-checkbox col-sm-${column_size}">
<label>
<label title="${option.description || ''}">
<input type="checkbox" data-unit="${option.value}">
</input>
<span class="label-area small" data-unit="${option.value}">${__(option.label)}</span>

View file

@ -92,11 +92,14 @@ frappe.ui.form.Dashboard = Class.extend({
show_progress: function(title, percent, message) {
this._progress_map = this._progress_map || {};
if (!this._progress_map[title]) {
const progress_chart = this.add_progress(title, percent, message);
let progress_chart = this._progress_map[title];
// create a new progress chart if it doesnt exist
// or the previous one got detached from the DOM
if (!progress_chart || progress_chart.parent().length == 0) {
progress_chart = this.add_progress(title, percent, message);
this._progress_map[title] = progress_chart;
}
let progress_chart = this._progress_map[title];
if (!$.isArray(percent)) {
percent = this.format_percent(title, percent);
}
@ -123,11 +126,7 @@ frappe.ui.form.Dashboard = Class.extend({
format_percent: function(title, percent) {
var width = cint(percent) < 1 ? 1 : cint(percent);
var progress_class = "";
if(width < 10)
progress_class = "progress-bar-danger";
if(width > 99.9)
progress_class = "progress-bar-success";
var progress_class = "progress-bar-success";
return [{
title: title,

View file

@ -571,15 +571,28 @@ frappe.ui.form.Timeline = class Timeline {
return;
}
let data_import_link = frappe.utils.get_form_link(
'Data Import Beta',
data.data_import,
true,
__('via Data Import')
);
// value changed in parent
if (data.changed && data.changed.length) {
const parts = [];
data.changed.every(function (p) {
if (p[0] === 'docstatus') {
if (p[2] == 1) {
out.push(me.get_version_comment(version, __('submitted this document')));
} else if (p[2] == 2) {
out.push(me.get_version_comment(version, __('cancelled this document')));
var parts = [];
data.changed.every(function(p) {
if (p[0]==='docstatus') {
if (p[2]==1) {
let message = data.data_import
? __('submitted this document {0}', [data_import_link])
: __('submitted this document');
out.push(me.get_version_comment(version, message));
} else if (p[2]==2) {
let message = data.data_import
? __('cancelled this document {0}', [data_import_link])
: __('cancelled this document');
out.push(me.get_version_comment(version, message));
}
} else {
p = p.map(frappe.utils.escape_html);
@ -598,13 +611,19 @@ frappe.ui.form.Timeline = class Timeline {
}
return parts.length < 3;
});
if (parts.length) {
out.push(me.get_version_comment(version, __('changed value of {0}', [parts.join(', ')])));
if(parts.length) {
let message;
if (data.data_import) {
message = __("changed value of {0} {1}", [parts.join(', ').bold(), data_import_link]);
} else {
message = __("changed value of {0}", [parts.join(', ').bold()]);
}
out.push(me.get_version_comment(version, message));
}
}
// value changed in table field
if(data.row_changed && data.row_changed.length) {
if (data.row_changed && data.row_changed.length) {
var parts = [], count = 0;
data.row_changed.every(function(row) {
row[3].every(function(p) {
@ -631,8 +650,13 @@ frappe.ui.form.Timeline = class Timeline {
return parts.length < 3;
});
if(parts.length) {
out.push(me.get_version_comment(version, __("changed values for {0}",
[parts.join(', ')])));
let message;
if (data.data_import) {
message = __("changed values for {0} {1}", [parts.join(', '), data_import_link]);
} else {
message = __("changed values for {0}", [parts.join(', ')]);
}
out.push(me.get_version_comment(version, message));
}
}

View file

@ -1391,6 +1391,28 @@ frappe.ui.form.Form = class FrappeForm {
}
return sum;
}
scroll_to_field(fieldname) {
let field = this.get_field(fieldname);
if (!field) return;
let $el = field.$wrapper;
// uncollapse section
if (field.section.is_collapsed()) {
field.section.collapse(false);
}
// scroll to input
frappe.utils.scroll_to($el);
// highlight input
$el.addClass('has-error');
setTimeout(() => {
$el.removeClass('has-error');
$el.find('input, select, textarea').focus();
}, 1000);
}
};
frappe.validated = 0;

View file

@ -216,7 +216,7 @@ frappe.ui.form.Sidebar = Class.extend({
callback: (r) => {
// docinfo will be synced
if(callback) callback(r.docinfo);
this.frm.timeline.refresh();
this.frm.timeline && this.frm.timeline.refresh();
this.frm.assign_to.refresh();
this.frm.attachments.refresh();
}

View file

@ -411,25 +411,7 @@ frappe.ui.form.Toolbar = Class.extend({
primary_action_label: __('Go'),
primary_action: ({ fieldname }) => {
dialog.hide();
let field = this.frm.get_field(fieldname);
if (!field) return;
let $el = field.$wrapper;
// uncollapse section
if (field.section.is_collapsed()) {
field.section.collapse(false);
}
// scroll to input
frappe.utils.scroll_to($el);
// highlight input
$el.addClass('has-error');
setTimeout(() => {
$el.removeClass('has-error');
$el.find('input, select, textarea').focus();
}, 1000);
this.frm.scroll_to_field(fieldname);
}
});

View file

@ -525,13 +525,8 @@ frappe.views.ListView = class ListView extends frappe.views.BaseList {
const fieldname = df.fieldname;
const value = doc[fieldname] || '';
// listview_setting formatter
const formatters = this.settings.formatters;
const format = () => {
if (formatters && formatters[fieldname]) {
return formatters[fieldname](value, df, doc);
} else if (df.fieldtype === 'Code') {
if (df.fieldtype === 'Code') {
return value;
} else if (df.fieldtype === 'Percent') {
return `<div class="progress level" style="margin: 0px;">
@ -547,7 +542,13 @@ frappe.views.ListView = class ListView extends frappe.views.BaseList {
const field_html = () => {
let html;
const _value = typeof value === 'string' ? frappe.utils.escape_html(value) : value;
let _value;
// listview_setting formatter
if (this.settings.formatters && this.settings.formatters[fieldname]) {
_value = this.settings.formatters[fieldname](value, df, doc);
} else {
_value = typeof value === 'string' ? frappe.utils.escape_html(value) : value;
}
if (df.fieldtype === 'Image') {
html = df.options ?

View file

@ -98,6 +98,7 @@ frappe.call = function(opts) {
freeze: opts.freeze,
freeze_message: opts.freeze_message,
headers: opts.headers || {},
error_handlers: opts.error_handlers || {},
// show_spinner: !opts.no_spinner,
async: opts.async,
url,
@ -324,9 +325,12 @@ frappe.request.cleanup = function(opts, r) {
return;
}
// global error handlers
// error handlers
let global_handlers = frappe.request.error_handlers[r.exc_type] || [];
let request_handler = opts.error_handlers ? opts.error_handlers[r.exc_type] : null;
let handlers = [].concat(global_handlers, request_handler).filter(Boolean);
if (r.exc_type) {
let handlers = frappe.request.error_handlers[r.exc_type] || [];
handlers.forEach(handler => {
handler(r);
});
@ -334,9 +338,8 @@ frappe.request.cleanup = function(opts, r) {
// show messages
if(r._server_messages && !opts.silent) {
let handlers = frappe.request.error_handlers[r.exc_type] || [];
// dont show server messages if their handlers exist
if (!handlers.length) {
// show server messages if no handlers exist
if (handlers.length === 0) {
r._server_messages = JSON.parse(r._server_messages);
frappe.hide_msgprint();
frappe.msgprint(r._server_messages);

View file

@ -1,6 +1,6 @@
<div class="filter-box">
<div class="list_filter row">
<div class="fieldname-select-area col-sm-4 form-group ui-front"></div>
<div class="fieldname-select-area col-sm-4 ui-front form-group"></div>
<div class="col-sm-2 form-group">
<select class="condition form-control">
{% for condition in conditions %}
@ -8,17 +8,16 @@
{% endfor %}
</select>
</div>
<div class="col-sm-6 col-xs-12">
<div class="filter-field pull-left" style="width: calc(100% - 70px)"></div>
<div class="filter-actions pull-left">
<a class="set-filter-and-run btn btn-sm btn-primary pull-left">
<i class=" fa fa-check visible-xs"></i>
<span class="hidden-xs">{%= __("Apply") %}</span></a>
<a class="small grey remove-filter pull-left">
<i class="octicon octicon-trashcan visible-xs"></i>
<span class="hidden-xs">{%= __("Remove") %}</span></a>
<div class="col-sm-4 form-group">
<div class="filter-field"></div>
</div>
<div class="col-sm-2">
<div class="filter-actions">
<a class="set-filter-and-run btn btn-sm btn-primary">
<span>{%= __("Apply") %}</span></a>
<a class="small grey remove-filter">
<span>{%= __("Remove") %}</span></a>
</div>
<div class="clearfix"></div>
</div>
</div>
</div>

View file

@ -725,6 +725,19 @@ Object.assign(frappe.utils, {
},
is_rtl() {
return ["ar", "he", "fa"].includes(frappe.boot.lang);
},
bind_actions_with_object($el, object) {
// remove previously bound event
$($el).off('click.class_actions');
// attach new event
$($el).on('click.class_actions', '[data-action]', e => {
let $target = $(e.currentTarget);
let action = $target.data('action');
let method = object[action];
method ? object[action](e, $target) : null;
});
return $el;
}
});

View file

@ -772,21 +772,20 @@ frappe.views.ReportView = class ReportView extends frappe.views.ListView {
get_columns_for_picker() {
let out = {};
const standard_fields_filter = df =>
!in_list(frappe.model.no_value_type, df.fieldtype) && !df.report_hide;
const standard_fields_filter = df => !in_list(frappe.model.no_value_type, df.fieldtype);
let doctype_fields = frappe.meta.get_docfields(this.doctype).filter(standard_fields_filter);
doctype_fields = [{
label: __('ID'),
fieldname: 'name',
fieldtype: 'Data'
fieldtype: 'Data',
reqd: 1
}].concat(doctype_fields, frappe.model.std_fields);
out[this.doctype] = doctype_fields;
const table_fields = frappe.meta.get_table_fields(this.doctype)
.filter(df => !df.hidden);
const table_fields = frappe.meta.get_table_fields(this.doctype);
table_fields.forEach(df => {
const cdt = df.options;

View file

@ -31,9 +31,17 @@
float: none;
}
.filter-box {
.frappe-list .filter-box {
border-bottom: 1px solid @border-color;
padding: 10px 15px 3px;
padding: 10px 15px;
}
.filter-box {
.form-group {
@media (min-width: @screen-xs) {
margin-bottom: 0;
}
}
.remove-filter {
margin-top: 6px;
@ -41,11 +49,9 @@
}
.filter-field {
padding-right: 15px;
width: calc(100% - 36px);
.frappe-control {
position: relative;
margin-bottom: 0;
}
}
}
@ -56,8 +62,6 @@
padding-right: 0px;
}
.filter-field {
width: 65% !important;
.frappe-control {
position: relative;
}

View file

@ -984,3 +984,11 @@ body[data-route^="Form/Communication"] textarea[data-fieldname="subject"] {
.followed-by-label{
margin-top: 30px;
}
.map-columns .form-section {
padding: 0 7px 7px;
}
.map-columns .form-section:first-child {
padding-top: 7px;
}

View file

@ -250,10 +250,11 @@ def _add_test(app, path, filename, verbose, test_suite=None, ui_tests=False):
if os.path.basename(os.path.dirname(path))=="doctype":
txt_file = os.path.join(path, filename[5:].replace(".py", ".json"))
with open(txt_file, 'r') as f:
doc = json.loads(f.read())
doctype = doc["name"]
make_test_records(doctype, verbose)
if os.path.exists(txt_file):
with open(txt_file, 'r') as f:
doc = json.loads(f.read())
doctype = doc["name"]
make_test_records(doctype, verbose)
test_suite.addTest(unittest.TestLoader().loadTestsFromModule(module))
@ -417,4 +418,4 @@ def get_test_record_log():
else:
frappe.flags.test_record_log = []
return frappe.flags.test_record_log
return frappe.flags.test_record_log

View file

@ -670,3 +670,11 @@ def get_safe_filters(filters):
pass
return filters
def create_batch(iterable, batch_size):
"""
Convert an iterable to multiple batches of constant size of batch_size
"""
total_count = len(iterable)
for i in range(0, total_count, batch_size):
yield iterable[i:min(i + batch_size, total_count)]

View file

@ -5,6 +5,7 @@ from __future__ import unicode_literals
import frappe
import openpyxl
import xlrd
import re
from openpyxl.styles import Font
from openpyxl import load_workbook
@ -95,3 +96,19 @@ def read_xlsx_file_from_attached_file(file_url=None, fcontent=None, filepath=Non
tmp_list.append(cell.value)
rows.append(tmp_list)
return rows
def read_xls_file_from_attached_file(content):
book = xlrd.open_workbook(file_contents=content)
sheets = book.sheets()
sheet = sheets[0]
rows = []
for i in range(sheet.nrows):
rows.append(sheet.row_values(i))
return rows
def build_xlsx_response(data, filename):
xlsx_file = make_xlsx(data, filename)
# write out response as a xlsx type
frappe.response['filename'] = filename + '.xlsx'
frappe.response['filecontent'] = xlsx_file.getvalue()
frappe.response['type'] = 'binary'

View file

@ -25,7 +25,7 @@
"express": "^4.16.2",
"fast-deep-equal": "^2.0.1",
"frappe-charts": "^1.3.0",
"frappe-datatable": "^1.13.5",
"frappe-datatable": "^1.14.0",
"frappe-gantt": "^0.1.0",
"fuse.js": "^3.2.0",
"highlight.js": "^9.12.0",

View file

@ -64,3 +64,4 @@ sqlparse==0.2.4
Pygments==2.2.0
frontmatter
PyYAML==3.13
xlrd

View file

@ -1770,10 +1770,10 @@ frappe-charts@^1.3.0:
resolved "https://registry.yarnpkg.com/frappe-charts/-/frappe-charts-1.3.0.tgz#9ed033fa64833906bba16554187fa2f8a3a54ef6"
integrity sha512-hdLv4fOIVgIL5eV9KYlsQaEpxkcJvuEVVDJewJL8PG0ySPy5EEiG5KZGL2uj7YegVWbtsqJ4Oq/74mjgQoMdag==
frappe-datatable@^1.13.5:
version "1.13.5"
resolved "https://registry.yarnpkg.com/frappe-datatable/-/frappe-datatable-1.13.5.tgz#6f507fe7a84c22b1eab6b08e7b6fccbcdf7bb936"
integrity sha512-k3Y8ScfxSD6Kj3Ch98kY2EWBnHUm0oPuPZonkslq4w5689iUhduy/ZynmLgOYDVjXXajBZG3oh5ycnx1gCwY5Q==
frappe-datatable@^1.14.0:
version "1.14.0"
resolved "https://registry.yarnpkg.com/frappe-datatable/-/frappe-datatable-1.14.0.tgz#8e5a0f61764fd634ae01f6767ce055b04ec5c3e1"
integrity sha512-rxePE/UpYFnWzAFIpiLrVGFHxh+fIbpDI98gAZfraZOgO4Dz6qDcJMaeSKDosQ1Zq6imt15KyKoaePXNpsCVfg==
dependencies:
hyperlist "^1.0.0-beta"
lodash "^4.17.5"