- Parse template and build self.rows and self.columns
- Store header_row data in columns along with df and skip_import
- Use self.columns and self.rows without passing them explicitly
- Remove the ability to edit rows
- Show only first 10 rows as preview
- Build doc with default values set
- Show Dashboard progress when coming back from another view
- Better ETA Message inspired from Apple
- Action buttons "Export Errored Rows" and "Go to DocType List"
- Import status "Imported x out of y records"
- Success / Failure column in import log
This commit is contained in:
Faris Ansari 2019-09-29 19:16:12 +05:30
parent 4116e17bb3
commit 779084991e
5 changed files with 264 additions and 251 deletions

View file

@ -26,9 +26,7 @@ MAX_ROWS_IN_PREVIEW = 10
class Importer:
def __init__(self, doctype, data_import=None, file_path=None, content=None):
self.doctype = doctype
self.template_options = frappe._dict(
{"remap_column": {}}
)
self.template_options = frappe._dict({"remap_column": {}})
if data_import:
self.data_import = data_import
@ -42,9 +40,14 @@ class Importer:
self.data = None
# used to store date formats guessed from data rows per column
self._guessed_date_formats = {}
# used to store eta during import
self.last_eta = 0
# used to collect warnings during template parsing
# and show them to user
self.warnings = []
self.meta = frappe.get_meta(doctype)
self.prepare_content(file_path, content)
self.parse_data_from_template()
def prepare_content(self, file_path, content):
if self.data_import:
@ -128,91 +131,89 @@ class Importer:
self.header_row = header_row
def get_data_for_import_preview(self):
out = self.get_parsed_data_from_template()
# prepare fields
fields = []
for df in out.fields:
header_title = df.header_title
skip_import = df.skip_import
if isinstance(df, DocField):
field = df.as_dict()
else:
field = df
field.update({"header_title": header_title, "skip_import": skip_import})
fields.append(field)
out.fields = fields
out = frappe._dict()
out.data = list(self.rows)
out.columns = self.columns
out.warnings = self.warnings
if len(out.data) > MAX_ROWS_IN_PREVIEW:
out.data = out.data[:MAX_ROWS_IN_PREVIEW]
out.max_rows_exceeded = True
out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW
return out
def get_parsed_data_from_template(self):
fields, fields_warnings = self.parse_fields_from_header_row()
formats, formats_warnings = self.parse_formats_from_first_10_rows()
fields, data = self.add_serial_no_column(fields, self.data)
def parse_data_from_template(self):
columns = self.parse_columns_from_header_row()
columns, data = self.add_serial_no_column(columns, self.data)
warnings = fields_warnings + formats_warnings
self.columns = columns
self.rows = data
return frappe._dict(
header_row=self.header_row, fields=fields, data=data, warnings=warnings
)
def parse_fields_from_header_row(self):
def parse_columns_from_header_row(self):
remap_column = self.template_options.remap_column
fields = []
warnings = []
columns = []
df_by_labels_and_fieldnames = self.build_fields_dict_for_column_matching()
for i, header_title in enumerate(self.header_row):
header_row_index = str(i)
column_number = str(i + 1)
skip_import = False
fieldname = remap_column.get(header_row_index)
if fieldname and fieldname != "Don't Import":
df = df_by_labels_and_fieldnames.get(fieldname)
warnings.append(
self.warnings.append(
{
"col": column_number,
"message": _("Mapping column {0} to field {1}").format(
frappe.bold(header_title or "<i>Untitled Column</i>"), frappe.bold(df.label)
),
"type": "info",
}
)
else:
df = df_by_labels_and_fieldnames.get(header_title)
if not df:
field = frappe._dict(header_title=header_title, skip_import=True)
skip_import = True
else:
field = df
field.header_title = header_title
field.skip_import = False
skip_import = False
if fieldname == "Don't Import":
field.skip_import = True
warnings.append(
skip_import = True
self.warnings.append(
{
"col": column_number,
"message": _("Skipping column {0}").format(frappe.bold(header_title)),
"type": "info",
}
)
elif header_title and not df:
warnings.append(
self.warnings.append(
{
"col": column_number,
"message": _("Cannot match column {0} with any field").format(
frappe.bold(header_title)
),
"type": "info",
}
)
elif not header_title and not df:
warnings.append({"col": column_number, "message": _("Skipping Untitled Column")})
fields.append(field)
self.warnings.append(
{"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"}
)
return fields, warnings
columns.append(
frappe._dict(
df=df,
skip_import=skip_import,
header_title=header_title,
column_number=column_number,
index=i,
)
)
return columns
def build_fields_dict_for_column_matching(self):
"""
@ -301,30 +302,20 @@ class Importer:
out.append(df)
return out
def parse_formats_from_first_10_rows(self):
"""
Returns a list of column descriptors for columns that might need parsing.
For e.g if it is a Date column return the Date format
[
[['Data']],
[['Date', '%m/%d/%y']],
[['Currency', '#,###.##']],
...
]
"""
formats = []
return formats, []
def add_serial_no_column(self, columns, data):
columns_with_serial_no = [
frappe._dict({"header_title": "Sr. No", "skip_import": True})
] + columns
def add_serial_no_column(self, fields, data):
fields_with_serial_no = [
frappe._dict({"label": "Sr. No", "skip_import": True, "parent": None})
] + fields
# update index for each column
for i, col in enumerate(columns_with_serial_no):
col.index = i
data_with_serial_no = []
for i, row in enumerate(data):
data_with_serial_no.append([self.row_index_map[i] + 1] + row)
return fields_with_serial_no, data_with_serial_no
return columns_with_serial_no, data_with_serial_no
def parse_value(self, value, df):
# convert boolean values to 0 or 1
@ -385,24 +376,19 @@ class Importer:
frappe.flags.in_import = True
frappe.flags.mute_emails = self.data_import.mute_emails
out = self.get_parsed_data_from_template()
fields = out["fields"]
data = out["data"]
warnings = []
# prepare a map for missing link field values
self.prepare_missing_link_field_values(fields, data)
self.prepare_missing_link_field_values()
# parse import data
payloads = self.get_payloads_for_import(fields, data)
# collect warnings
for payload in payloads:
warnings += payload.warnings
# parse docs from rows
payloads = self.get_payloads_for_import()
# dont import if there are non-ignorable warnings
warnings = [w for w in self.warnings if w.get("type") != "info"]
if warnings:
self.data_import.db_set("template_warnings", json.dumps(warnings))
frappe.publish_realtime("data_import_refresh")
frappe.publish_realtime(
"data_import_refresh", {"data_import": self.data_import.name}
)
return
# setup import log
@ -422,7 +408,7 @@ class Importer:
imported_rows += log.row_indexes
# start import
print("Importing {0} rows...".format(len(data)))
print("Importing {0} rows...".format(len(self.rows)))
total_payload_count = len(payloads)
batch_size = frappe.conf.data_import_batch_size or 1000
@ -440,7 +426,12 @@ class Importer:
if total_payload_count > 5:
frappe.publish_realtime(
"data_import_progress",
{"current": current_index, "total": total_payload_count, "skipping": True},
{
"current": current_index,
"total": total_payload_count,
"skipping": True,
"data_import": self.data_import.name,
},
)
continue
@ -458,6 +449,7 @@ class Importer:
"current": current_index,
"total": total_payload_count,
"docname": doc.name,
"data_import": self.data_import.name,
"success": True,
"row_indexes": row_indexes,
"eta": eta,
@ -496,24 +488,25 @@ class Importer:
frappe.flags.in_import = False
frappe.flags.mute_emails = False
frappe.publish_realtime("data_import_refresh")
frappe.publish_realtime("data_import_refresh", {"data_import": self.data_import.name})
def get_payloads_for_import(self, fields, data):
def get_payloads_for_import(self):
payloads = []
# make a copy
data = list(self.rows)
while data:
doc, rows, data, warnings = self.parse_next_row_for_import(fields, data)
payloads.append(frappe._dict(doc=doc, rows=rows, warnings=warnings))
doc, rows, data = self.parse_next_row_for_import(data)
payloads.append(frappe._dict(doc=doc, rows=rows))
return payloads
def parse_next_row_for_import(self, fields, data):
def parse_next_row_for_import(self, data):
"""
Parses rows that make up a doc. A doc maybe built from a single row or multiple rows.
Returns the doc, rows, data without the rows and warnings.
Returns the doc, rows, and data without the rows.
"""
doc = {}
warnings = []
mandatory_fields = []
doctypes = set([df.parent for df in fields if df.parent])
doctypes = set([col.df.parent for col in self.columns if col.df and col.df.parent])
# first row is included by default
first_row = data[0]
@ -524,7 +517,7 @@ class Importer:
# subsequent rows either dont have any parent value set
# or have the same value as the parent
# we include a row if either of conditions match
parent_column_index = self.get_first_parent_column_index(fields)
parent_column_index = self.get_first_parent_column_index()
parent_value = first_row[parent_column_index]
data_without_first_row = data[1:]
for d in data_without_first_row:
@ -537,16 +530,26 @@ class Importer:
rows.append(d)
def get_column_indexes(doctype):
return [i for i, df in enumerate(fields) if df.parent == doctype]
return [
col.index
for col in self.columns
if not col.skip_import and col.df and col.df.parent == doctype
]
def validate_value(value, df):
if df.fieldtype == "Select" and value not in df.get_select_options():
options_string = ", ".join([frappe.bold(d) for d in df.get_select_options()])
msg = _("Value must be one of {0}").format(options_string)
warnings.append(
{"row": row_number, "field": df.as_dict(convert_dates_to_str=True), "message": msg}
)
return False
if df.fieldtype == "Select":
select_options = df.get_select_options()
if select_options and value not in select_options:
options_string = ", ".join([frappe.bold(d) for d in select_options])
msg = _("Value must be one of {0}").format(options_string)
self.warnings.append(
{
"row": row_number,
"field": df.as_dict(convert_dates_to_str=True),
"message": msg,
}
)
return False
elif df.fieldtype == "Link":
d = self.get_missing_link_field_values(df.options)
@ -554,7 +557,7 @@ class Importer:
msg = _("Value {0} missing for {1}").format(
frappe.bold(value), frappe.bold(df.options)
)
warnings.append(
self.warnings.append(
{
"row": row_number,
"field": df.as_dict(convert_dates_to_str=True),
@ -566,19 +569,21 @@ class Importer:
return value
def parse_doc(doctype, docfields, values, row_number):
doc = {}
for index, (df, value) in enumerate(zip(docfields, values)):
if df.get("skip_import", False):
continue
# new_doc returns a dict with default values set
doc = frappe.new_doc(doctype, as_dict=True)
# remove standard fields and __islocal
for key in frappe.model.default_fields + ('__islocal',):
doc.pop(key, None)
for index, (df, value) in enumerate(zip(docfields, values)):
if value in INVALID_VALUES:
value = None
if validate_value(value, df):
value = validate_value(value, df)
if value:
doc[df.fieldname] = self.parse_value(value, df)
check_mandatory_fields(doctype, doc, row_number)
return doc
def check_mandatory_fields(doctype, doc, row_number):
@ -591,7 +596,7 @@ class Importer:
return
if len(fields) == 1:
warnings.append(
self.warnings.append(
{
"row": row_number,
"message": _("{0} is a mandatory field").format(fields[0].label),
@ -599,7 +604,7 @@ class Importer:
)
else:
fields_string = ", ".join([df.label for df in fields])
warnings.append(
self.warnings.append(
{"row": row_number, "message": _("{0} are mandatory fields").format(fields_string)}
)
@ -619,7 +624,8 @@ class Importer:
# skip values if all of them are empty
continue
docfields = [fields[i] for i in column_indexes]
columns = [self.columns[i] for i in column_indexes]
docfields = [col.df for col in columns]
doc = parse_doc(doctype, docfields, values, row_number)
parsed_docs[doctype] = parsed_docs.get(doctype, [])
parsed_docs[doctype].append(doc)
@ -635,17 +641,17 @@ class Importer:
table_field = table_dfs[0]
doc[table_field.fieldname] = docs
return doc, rows, data[len(rows) :], warnings
return doc, rows, data[len(rows) :]
def get_first_parent_column_index(self, fields):
def get_first_parent_column_index(self):
"""
Returns the first column's index which must be one of the parent columns
"""
# find a parent column
parent_column_index = -1
for i, df in enumerate(fields):
if not df.get("skip_import", False) and df.parent == self.doctype:
parent_column_index = i
for col in self.columns:
if not col.skip_import and col.df and col.df.parent == self.doctype:
parent_column_index = col.index
break
return parent_column_index
@ -659,9 +665,11 @@ class Importer:
def insert_record(self, doc):
self.create_missing_linked_records(doc)
new_doc = frappe.new_doc(self.doctype)
new_doc.update(doc)
# name shouldn't be set when inserting a new record
doc.update({"doctype": self.doctype, "name": None})
new_doc = frappe.get_doc(doc)
new_doc.set("name", None)
new_doc.insert()
if self.meta.is_submittable and self.data_import.submit_after_import:
new_doc.submit()
@ -673,15 +681,17 @@ class Importer:
document automatically if it has only one mandatory field
"""
link_values = []
def get_link_fields(doc, doctype):
for fieldname, value in doc.items():
meta = frappe.get_meta(doctype)
df = meta.get_field(fieldname)
if df.fieldtype == 'Link':
if df.fieldtype == "Link":
link_values.append([df.options, value])
elif df.fieldtype in table_fields:
for row in value:
get_link_fields(row, df.options)
get_link_fields(doc, self.doctype)
for link_doctype, link_value in link_values:
@ -701,7 +711,7 @@ class Importer:
def update_record(self, doc):
id_fieldname = self.get_id_fieldname()
id_value = doc[id_fieldname]
existing_doc = frappe.get_doc(self.doctype, {id_fieldname: id_value})
existing_doc = frappe.get_doc(self.doctype, id_value)
existing_doc.flags.via_data_import = self.data_import.name
existing_doc.update(doc)
existing_doc.save()
@ -723,43 +733,37 @@ class Importer:
row_indexes = list(set(row_indexes))
row_indexes.sort()
out = self.get_parsed_data_from_template()
header_row = out["header_row"]
data = out["data"]
header_row = [col.header_title for col in self.columns[1:]]
rows = [header_row]
rows += [row[1:] for row in data if row[0] in row_indexes]
rows += [row[1:] for row in self.rows if row[0] in row_indexes]
build_csv_response(rows, self.doctype)
def get_missing_link_field_values(self, doctype):
return self.missing_link_values.get(doctype, {})
def prepare_missing_link_field_values(self, fields, data):
link_column_indexes = [i for i, df in enumerate(fields) if df.fieldtype == "Link"]
def has_one_mandatory_field(doctype):
meta = frappe.get_meta(doctype)
# get mandatory fields with default not set
mandatory_fields = [df for df in meta.fields if df.reqd and not df.default]
mandatory_fields_count = len(mandatory_fields)
if meta.autoname and meta.autoname.lower() == "prompt":
mandatory_fields_count += 1
return mandatory_fields_count == 1
def prepare_missing_link_field_values(self):
columns = self.columns
rows = self.rows
link_column_indexes = [
col.index for col in columns if col.df and col.df.fieldtype == "Link"
]
self.missing_link_values = {}
for index in link_column_indexes:
df = fields[index]
column_values = [row[index] for row in data]
col = columns[index]
column_values = [row[index] for row in rows]
values = set([v for v in column_values if v not in INVALID_VALUES])
doctype = df.options
doctype = col.df.options
missing_values = [value for value in values if not frappe.db.exists(doctype, value)]
if self.missing_link_values.get(doctype):
self.missing_link_values[doctype].missing_values += missing_values
else:
self.missing_link_values[doctype] = frappe._dict(
missing_values=missing_values, one_mandatory=has_one_mandatory_field(doctype), df=df
missing_values=missing_values,
one_mandatory=self.has_one_mandatory_field(doctype),
df=col.df,
)
def get_id_fieldname(self):
@ -778,6 +782,15 @@ class Importer:
self.last_eta = eta
return self.last_eta
def has_one_mandatory_field(self, doctype):
meta = frappe.get_meta(doctype)
# get mandatory fields with default not set
mandatory_fields = [df for df in meta.fields if df.reqd and not df.default]
mandatory_fields_count = len(mandatory_fields)
if meta.autoname and meta.autoname.lower() == "prompt":
mandatory_fields_count += 1
return mandatory_fields_count == 1
DATE_FORMATS = [
r"%d-%m-%Y",

View file

@ -3,31 +3,40 @@
frappe.ui.form.on('Data Import Beta', {
setup(frm) {
frappe.realtime.on('data_import_refresh', () => {
frappe.realtime.on('data_import_refresh', ({ data_import }) => {
if (data_import !== frm.doc.name) return;
frappe.model.clear_doc('Data Import Beta', frm.doc.name);
frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => {
frm.refresh();
});
});
frappe.realtime.on('data_import_progress', data => {
if (data.data_import !== frm.doc.name) {
return;
}
let percent = Math.floor((data.current * 100) / data.total);
let seconds = Math.floor(data.eta);
let minutes = Math.floor(data.eta / 60);
let eta_message =
data.eta < 60
? __('ETA {0} seconds', [Math.floor(data.eta)])
: __('ETA {0} minutes', [Math.floor(data.eta / 60)]);
seconds < 60
? __('About {0} seconds remaining', [seconds])
: minutes === 1
? __('About {0} minute remaining', [minutes])
: __('About {0} minutes remaining', [minutes]);
let message;
if (data.success) {
let message_args = [data.docname, data.current, data.total];
let message_args = [data.current, data.total, eta_message];
message =
frm.doc.import_type === 'Insert New Records'
? __('Importing {0} ({1} of {2})', message_args)
: __('Updating {0} ({1} of {2})', message_args);
? __('Importing {0} of {1}, {2}', message_args)
: __('Updating {0} of {1}, {2}', message_args);
}
if (data.skipping) {
message = __('Skipping ({1} of {2})', [data.current, data.total]);
message = __('Skipping {0} of {1}, {2}', [data.current, data.total, eta_message]);
}
frm.dashboard.show_progress(__('Import Progress'), percent, message);
frm.page.set_indicator(eta_message, 'orange');
frm.page.set_indicator(__('In Progress'), 'orange');
// hide progress when complete
if (data.current === data.total) {
@ -59,14 +68,19 @@ frappe.ui.form.on('Data Import Beta', {
frm.trigger('show_import_log');
frm.trigger('show_import_warnings');
frm.trigger('toggle_submit_after_import');
frm.trigger('show_import_status');
if (frm.doc.import_log && frm.doc.import_log !== '[]') {
frm.disable_save();
if (frm.doc.status === 'Partial Success') {
frm.add_custom_button(__('Export Errored Rows'),
() => frm.trigger('export_errored_rows'));
}
if (frm.doc.status === 'Success') {
frm.events.show_success_message(frm);
} else {
if (frm.doc.status.includes('Success')) {
frm.add_custom_button(__('Go to {0} List', [frm.doc.reference_doctype]),
() => frappe.set_route('List', frm.doc.reference_doctype));
}
if (frm.doc.status !== 'Success') {
if (!frm.is_new() && frm.doc.import_file) {
let label = frm.doc.status === 'Pending' ? __('Start Import') : __('Retry');
frm.page.set_primary_action(label, () => frm.events.start_import(frm));
@ -74,30 +88,40 @@ frappe.ui.form.on('Data Import Beta', {
frm.page.set_primary_action(__('Save'), () => frm.save());
}
}
frm.page.set_indicator(
__(frm.doc.status),
frm.doc.status === 'Success' ? 'green' : 'grey'
);
},
show_success_message(frm) {
show_import_status(frm) {
let import_log = JSON.parse(frm.doc.import_log || '[]');
let successful_records = import_log.filter(log => log.success);
let link = `<a href="#List/${frm.doc.reference_doctype}">
${__('{0} List', [frm.doc.reference_doctype])}
</a>`;
let message_args = [successful_records.length, link];
let failed_records = import_log.filter(log => !log.success);
if (successful_records.length === 0) return;
let message;
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records. Go to {1}', message_args)
: __('Successfully imported {0} record. Go to {1}', message_args);
if (failed_records.length === 0) {
let message_args = [successful_records.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records.', message_args)
: __('Successfully imported {0} record.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records.', message_args)
: __('Successfully updated {0} record.', message_args);
}
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records. Go to {1}', message_args)
: __('Successfully updated {0} record. Go to {1}', message_args);
let message_args = [successful_records.length, import_log.length];
if (frm.doc.import_type === 'Insert New Records') {
message =
successful_records.length > 1
? __('Successfully imported {0} records out of {1}.', message_args)
: __('Successfully imported {0} record out of {1}.', message_args);
} else {
message =
successful_records.length > 1
? __('Successfully updated {0} records out of {1}.', message_args)
: __('Successfully updated {0} record out of {1}.', message_args);
}
}
frm.dashboard.set_headline(message);
},
@ -196,21 +220,17 @@ frappe.ui.form.on('Data Import Beta', {
frm.set_value('template_options', JSON.stringify(template_options));
frm.save().then(() => frm.trigger('import_file'));
},
export_errored_rows() {
open_url_post('/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template', {
data_import_name: frm.doc.name
});
},
show_warnings() {
frm.scroll_to_field('import_warnings');
}
}
});
});
},
export_errored_rows(frm) {
open_url_post('/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template', {
data_import_name: frm.doc.name
});
},
show_import_warnings(frm, preview_data) {
let warnings = JSON.parse(frm.doc.template_warnings || '[]');
warnings = warnings.concat(preview_data.warnings || []);
@ -299,13 +319,13 @@ frappe.ui.form.on('Data Import Beta', {
.map(JSON.parse)
.map(m => {
let title = m.title ? `<strong>${m.title}</strong>` : '';
let message = m.message ? `<p>${m.message}</p>` : '';
let message = m.message ? `<div>${m.message}</div>` : '';
return title + message;
})
.join('');
let id = frappe.dom.get_unique_id();
html = `${messages}
<button class="btn btn-default btn-xs" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}">
<button class="btn btn-default btn-xs margin-top" type="button" data-toggle="collapse" data-target="#${id}" aria-expanded="false" aria-controls="${id}">
${__('Show Traceback')}
</button>
<div class="collapse margin-top" id="${id}">
@ -314,9 +334,16 @@ frappe.ui.form.on('Data Import Beta', {
</div>
</div>`;
}
let indicator_color = log.success ? 'green' : 'red';
let title = log.success ? __('Success') : __('Failure');
return `<tr>
<td>${log.row_indexes.join(', ')}</td>
<td>${html}</td>
<td>
<div class="indicator ${indicator_color}">${title}</div>
</td>
<td>
${html}
</td>
</tr>`;
})
.join('');
@ -324,8 +351,9 @@ frappe.ui.form.on('Data Import Beta', {
frm.get_field('import_log_preview').$wrapper.html(`
<table class="table table-bordered">
<tr class="text-muted">
<th width="30%">${__('Row Number')}</th>
<th width="70%">${__('Message')}</th>
<th width="10%">${__('Row Number')}</th>
<th width="10%">${__('Status')}</th>
<th width="80%">${__('Message')}</th>
</tr>
${rows}
</table>

View file

@ -34,7 +34,9 @@ class DataImportBeta(Document):
def start_import(self):
if frappe.utils.scheduler.is_scheduler_inactive():
frappe.throw(_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive"))
frappe.throw(
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
enqueued_jobs = [d.get("job_name") for d in get_info()]
@ -46,37 +48,15 @@ class DataImportBeta(Document):
event="data_import",
job_name=self.name,
data_import=self.name,
now=True
now=True,
)
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
def create_missing_link_values(self, missing_link_values):
docs = []
for d in missing_link_values:
d = frappe._dict(d)
if not d.has_one_mandatory_field:
continue
doctype = d.doctype
values = d.missing_values
meta = frappe.get_meta(doctype)
# find the autoname field
if meta.autoname and meta.autoname.startswith("field:"):
autoname_field = meta.autoname[len("field:") :]
else:
autoname_field = "name"
for value in values:
new_doc = frappe.new_doc(doctype)
new_doc.set(autoname_field, value)
docs.append(new_doc.insert())
return docs
def export_errored_rows(self):
return self.get_importer().export_errored_rows()
def get_importer(self):
return Importer(self.reference_doctype, data_import=self)
def start_import(data_import):
"""This method runs in background job"""

View file

@ -27,8 +27,6 @@ frappe.data_import.ImportPreview = class ImportPreview {
}
refresh() {
this.header_row = this.preview_data.header_row;
this.fields = this.preview_data.fields;
this.data = this.preview_data.data;
this.make_wrapper();
this.prepare_columns();
@ -57,62 +55,52 @@ frappe.data_import.ImportPreview = class ImportPreview {
}
prepare_columns() {
this.columns = this.fields.map((df, i) => {
this.columns = this.preview_data.columns.map((col, i) => {
let df = col.df;
let column_width = 120;
let header_row_index = i - 1;
if (df.skip_import) {
let is_sr = df.label === 'Sr. No';
if (col.header_title === 'Sr. No') {
return {
id: 'srno',
name: 'Sr. No',
content: 'Sr. No',
editable: false,
focusable: false,
align: 'left',
width: 60
}
}
if (col.skip_import) {
let show_warnings_button = `<button class="btn btn-xs" data-action="show_column_warning" data-col="${i}">
<i class="octicon octicon-stop"></i></button>`;
if (!df.parent) {
if (!col.df) {
// increase column width for unidentified columns
column_width += 50
}
let column_title = is_sr
? df.label
: `<span class="indicator red">
${df.header_title || `<i>${__('Untitled Column')}</i>`}
${!df.parent ? show_warnings_button : ''}
</span>`;
let column_title = `<span class="indicator red">
${col.header_title || `<i>${__('Untitled Column')}</i>`}
${!col.df ? show_warnings_button : ''}
</span>`;
return {
id: frappe.utils.get_random(6),
name: df.label,
name: col.header_title || df.label,
content: column_title,
skip_import: true,
editable: false,
focusable: false,
align: 'left',
header_row_index,
width: is_sr ? 60 : column_width,
format: (value, row, column, data) => {
let html = `<div class="text-muted">${value}</div>`;
if (is_sr && this.is_row_imported(row)) {
html = `
<div class="flex justify-between">${SVG_ICONS['checkbox-circle-line'] +
html}</div>
`;
}
return html;
}
width: column_width,
format: value => `<div class="text-muted">${value}</div>`
};
}
let column_title = df.label;
if (this.doctype !== df.parent) {
column_title = `${df.label} (${df.parent})`;
}
let meta = frappe.get_meta(this.doctype);
if (meta.autoname === `field:${df.fieldname}`) {
column_title = `ID (${df.label})`;
}
return {
id: df.fieldname,
name: column_title,
content: `<span class="indicator green">${df.header_title || df.label}</span>`,
name: col.header_title,
content: `<span class="indicator green">${col.header_title || df.label}</span>`,
df: df,
editable: false,
align: 'left',
header_row_index,
width: column_width
};
});
@ -215,11 +203,11 @@ frappe.data_import.ImportPreview = class ImportPreview {
}
export_errored_rows() {
this.events.export_errored_rows();
this.frm.trigger('export_errored_rows');
}
show_warnings() {
this.events.show_warnings();
this.frm.scroll_to_field('import_warnings');
}
show_column_warning(_, $target) {
@ -234,11 +222,12 @@ frappe.data_import.ImportPreview = class ImportPreview {
doctype: this.doctype
});
let changed = [];
let fields = this.fields.map((df, i) => {
if (df.label === 'Sr. No') return [];
let fields = this.preview_data.columns.map((col, i) => {
let df = col.df;
if (col.header_title === 'Sr. No') return [];
let fieldname;
if (df.skip_import) {
if (!df) {
fieldname = null;
} else {
fieldname = df.parent === this.doctype
@ -249,7 +238,7 @@ frappe.data_import.ImportPreview = class ImportPreview {
{
label: '',
fieldtype: 'Data',
default: df.header_title,
default: col.header_title,
fieldname: `Column ${i}`,
read_only: 1
},

View file

@ -92,11 +92,14 @@ frappe.ui.form.Dashboard = Class.extend({
show_progress: function(title, percent, message) {
this._progress_map = this._progress_map || {};
if (!this._progress_map[title]) {
const progress_chart = this.add_progress(title, percent, message);
let progress_chart = this._progress_map[title];
// create a new progress chart if it doesnt exist
// or the previous one got detached from the DOM
if (!progress_chart || progress_chart.parent().length == 0) {
progress_chart = this.add_progress(title, percent, message);
this._progress_map[title] = progress_chart;
}
let progress_chart = this._progress_map[title];
if (!$.isArray(percent)) {
percent = this.format_percent(title, percent);
}