fix: Add method to import data from file

- Update import paths to use data_import module
- Data Import Beta -> Data Import
This commit is contained in:
Faris Ansari 2020-06-11 17:41:13 +05:30
parent 8102172ea7
commit 5a1ce409b6
10 changed files with 213 additions and 41 deletions

View file

@ -1145,8 +1145,8 @@ def make_property_setter(args, ignore_validate=False, validate_fields_for_doctyp
def import_doc(path, ignore_links=False, ignore_insert=False, insert=False):
"""Import a file using Data Import."""
from frappe.core.doctype.data_import_legacy import data_import_legacy as data_import
data_import.import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
from frappe.core.doctype.data_import.data_import import import_doc
import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
def copy_doc(doc, ignore_no_copy=True):
""" No_copy fields also get copied."""

View file

@ -215,12 +215,12 @@ def export_doc(context, doctype, docname):
@pass_context
def export_json(context, doctype, path, name=None):
"Export doclist as json to the given path, use '-' as name for Singles."
from frappe.core.doctype.data_import_legacy import data_import_legacy as data_import
from frappe.core.doctype.data_import.data_import import export_json
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
data_import.export_json(doctype, path, name=name)
export_json(doctype, path, name=name)
finally:
frappe.destroy()
if not context.sites:
@ -232,12 +232,12 @@ def export_json(context, doctype, path, name=None):
@pass_context
def export_csv(context, doctype, path):
"Export data import template with data for DocType"
from frappe.core.doctype.data_import_legacy import data_import_legacy as data_import
from frappe.core.doctype.data_import.data_import import export_csv
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
data_import.export_csv(doctype, path)
export_csv(doctype, path)
finally:
frappe.destroy()
if not context.sites:
@ -264,7 +264,7 @@ def export_fixtures(context, app=None):
@pass_context
def import_doc(context, path, force=False):
"Import (insert/update) doclist. If the argument is a directory, all files ending with .json are imported"
from frappe.core.doctype.data_import_legacy import data_import_legacy as data_import
from frappe.core.doctype.data_import.data_import import import_doc
if not os.path.exists(path):
path = os.path.join('..', path)
@ -276,7 +276,7 @@ def import_doc(context, path, force=False):
try:
frappe.init(site=site)
frappe.connect()
data_import.import_doc(path, overwrite=context.force)
import_doc(path, overwrite=context.force)
finally:
frappe.destroy()
if not context.sites:
@ -329,20 +329,12 @@ def import_csv(context, path, only_insert=False, submit_after_import=False, igno
@pass_context
def data_import(context, file_path, doctype, import_type=None, submit_after_import=False, mute_emails=True):
"Import documents in bulk from CSV or XLSX using data import"
from frappe.core.doctype.data_import_beta.importer import Importer
from frappe.core.doctype.data_import.data_import import import_file
site = get_site(context)
frappe.init(site=site)
frappe.connect()
data_import = frappe.new_doc('Data Import Beta')
data_import.submit_after_import = submit_after_import
data_import.mute_emails = mute_emails
data_import.import_type = 'Insert New Records' if import_type.lower() == 'insert' else 'Update Existing Records'
i = Importer(doctype=doctype, file_path=file_path, data_import=data_import, console=True)
i.import_data()
import_file(doctype, file_path, import_type, submit_after_import, console=True)
frappe.destroy()

View file

@ -11,7 +11,7 @@ import os
import frappe
from frappe.core.doctype.access_log.access_log import make_access_log
from frappe.utils import cstr, get_site_url
from frappe.core.doctype.data_import_legacy.data_import_legacy import export_csv
from frappe.core.doctype.data_import.data_import import export_csv
from frappe.core.doctype.user.user import generate_keys
# imports - third party imports

View file

@ -3,12 +3,12 @@
# For license information, please see license.txt
from __future__ import unicode_literals
import os
import frappe
from frappe.model.document import Document
from frappe.core.doctype.data_import.importer import Importer
from frappe.core.doctype.data_import.importer import Importer, ImportFile
from frappe.core.doctype.data_import.exporter import Exporter
from frappe.core.page.background_jobs.background_jobs import get_info
from frappe.utils.background_jobs import enqueue
from frappe import _
@ -42,6 +42,8 @@ class DataImport(Document):
_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive")
)
from frappe.core.page.background_jobs.background_jobs import get_info
enqueued_jobs = [d.get("job_name") for d in get_info()]
if self.name not in enqueued_jobs:
@ -123,3 +125,130 @@ def download_template(
def download_errored_template(data_import_name):
data_import = frappe.get_doc("Data Import", data_import_name)
data_import.export_errored_rows()
def import_file(
doctype, file_path, import_type, submit_after_import=False, console=False
):
"""
Import documents in from CSV or XLSX using data import.
:param doctype: DocType to import
:param file_path: Path to .csv, .xls, or .xlsx file to import
:param import_type: One of "Insert" or "Update"
:param submit_after_import: Whether to submit documents after import
:param console: Set to true if this is to be used from command line. Will print errors or progress to stdout.
"""
data_import = frappe.new_doc("Data Import")
data_import.submit_after_import = submit_after_import
data_import.import_type = (
"Insert New Records" if import_type.lower() == "insert" else "Update Existing Records"
)
i = Importer(
doctype=doctype, file_path=file_path, data_import=data_import, console=console
)
i.import_data()
##############
def import_doc(
path,
overwrite=False,
ignore_links=False,
ignore_insert=False,
insert=False,
submit=False,
pre_process=None,
):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
files = [path]
for f in files:
if f.endswith(".json"):
frappe.flags.mute_emails = True
frappe.modules.import_file.import_file_by_path(
f, data_import=True, force=True, pre_process=pre_process, reset_permissions=True
)
frappe.flags.mute_emails = False
frappe.db.commit()
elif f.endswith(".csv"):
import_file_by_path(
f,
ignore_links=ignore_links,
overwrite=overwrite,
submit=submit,
pre_process=pre_process,
)
frappe.db.commit()
def import_file_by_path(
path,
ignore_links=False,
overwrite=False,
submit=False,
pre_process=None,
no_email=True,
):
if path.endswith(".csv"):
print()
print("This method is deprecated.")
print('Import CSV files using the command "bench --site sitename data-import"')
print("Or use the method frappe.core.doctype.data_import.data_import.import_file")
print()
raise Exception("Method deprecated")
def export_json(
doctype, path, filters=None, or_filters=None, name=None, order_by="creation asc"
):
def post_process(out):
del_keys = ("modified_by", "creation", "owner", "idx")
for doc in out:
for key in del_keys:
if key in doc:
del doc[key]
for k, v in doc.items():
if isinstance(v, list):
for child in v:
for key in del_keys + ("docstatus", "doctype", "modified", "name"):
if key in child:
del child[key]
out = []
if name:
out.append(frappe.get_doc(doctype, name).as_dict())
elif frappe.db.get_value("DocType", doctype, "issingle"):
out.append(frappe.get_doc(doctype).as_dict())
else:
for doc in frappe.get_all(
doctype,
fields=["name"],
filters=filters,
or_filters=or_filters,
limit_page_length=0,
order_by=order_by,
):
out.append(frappe.get_doc(doctype, doc.name).as_dict())
post_process(out)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
path = os.path.join("..", path)
with open(path, "w") as outfile:
outfile.write(frappe.as_json(out))
def export_csv(doctype, path):
from frappe.core.doctype.data_export.exporter import export_data
with open(path, "wb") as csvfile:
export_data(doctype=doctype, all_doctypes=True, template=True, with_data=True)
csvfile.write(frappe.response.result.encode("utf-8"))

View file

@ -24,13 +24,15 @@ UPDATE = "Update Existing Records"
class Importer:
def __init__(self, doctype, data_import=None, import_type=None, console=False):
def __init__(
self, doctype, data_import=None, file_path=None, import_type=None, console=False
):
self.doctype = doctype
self.console = console
self.data_import = data_import
if not self.data_import:
self.data_import = frappe.get_doc(doctype="Data Import Beta")
self.data_import = frappe.get_doc(doctype="Data Import")
if import_type:
self.data_import.import_type = import_type
@ -38,7 +40,10 @@ class Importer:
self.import_type = self.data_import.import_type
self.import_file = ImportFile(
doctype, data_import.import_file, self.template_options, self.import_type
doctype,
file_path or data_import.import_file,
self.template_options,
self.import_type,
)
def get_data_for_import_preview(self):
@ -250,6 +255,48 @@ class Importer:
build_csv_response(rows, self.doctype)
def print_import_log(self, import_log):
failed_records = [l for l in import_log if not l.success]
successful_records = [l for l in import_log if l.success]
if successful_records:
print()
print(
"Successfully imported {0} records out of {1}".format(
len(successful_records), len(import_log)
)
)
if failed_records:
print("Failed to import {0} records".format(len(failed_records)))
file_name = "{0}_import_on_{1}.txt".format(self.doctype, frappe.utils.now())
print("Check {0} for errors".format(os.path.join("sites", file_name)))
text = ""
for w in failed_records:
text += "Row Indexes: {0}\n".format(str(w.get("row_indexes", [])))
text += "Messages:\n{0}\n".format("\n".join(w.get("messages", [])))
text += "Traceback:\n{0}\n\n".format(w.get("exception"))
with open(file_name, "w") as f:
f.write(text)
def print_grouped_warnings(self, warnings):
warnings_by_row = {}
other_warnings = []
for w in warnings:
if w.get("row"):
warnings_by_row.setdefault(w.get("row"), []).append(w)
else:
other_warnings.append(w)
for row_number, warnings in warnings_by_row.items():
print("Row {0}".format(row_number))
for w in warnings:
print(w.get("message"))
for w in other_warnings:
print(w.get("message"))
class ImportFile:
def __init__(self, doctype, file, template_options=None, import_type=None):
@ -329,14 +376,14 @@ class ImportFile:
# only pick useful fields in docfields to minimise the payload
if col.df:
col.df = {
'fieldtype': col.df.fieldtype,
'fieldname': col.df.fieldname,
'label': col.df.label,
'options': col.df.options,
'parent': col.df.parent,
'reqd': col.df.reqd,
'default': col.df.default,
'read_only': col.df.read_only
"fieldtype": col.df.fieldtype,
"fieldname": col.df.fieldname,
"label": col.df.label,
"options": col.df.options,
"parent": col.df.parent,
"reqd": col.df.reqd,
"default": col.df.default,
"read_only": col.df.read_only,
}
data = [[row.row_number] + row.as_list() for row in self.data]
@ -741,14 +788,16 @@ class Header(Row):
return [
col.index
for col in self.columns
if not col.skip_import and col.df and col.df.parent == doctype and is_table_field(col.df)
if not col.skip_import
and col.df
and col.df.parent == doctype
and is_table_field(col.df)
]
def get_columns(self, indexes):
return [self.columns[i] for i in indexes]
class Column:
seen = []
fields_column_map = {}
@ -1028,12 +1077,14 @@ def get_df_for_column_header(doctype, header):
# utilities
def get_id_field(doctype):
autoname_field = get_autoname_field(doctype)
if autoname_field:
return autoname_field
return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"})
def get_autoname_field(doctype):
meta = frappe.get_meta(doctype)
if meta.autoname and meta.autoname.startswith("field:"):

View file

@ -5,8 +5,8 @@ from __future__ import unicode_literals
import unittest
import frappe
from frappe.core.doctype.data_import_beta.exporter import Exporter
from frappe.core.doctype.data_import_beta.test_importer import (
from frappe.core.doctype.data_import.exporter import Exporter
from frappe.core.doctype.data_import.test_importer import (
create_doctype_if_not_exists,
)

View file

@ -84,7 +84,7 @@ class TestImporter(unittest.TestCase):
self.assertEqual(updated_doc.table_field_1_again[0].child_title, 'child title again')
def get_importer(self, doctype, import_file, update=False):
data_import = frappe.new_doc('Data Import Beta')
data_import = frappe.new_doc('Data Import')
data_import.import_type = 'Insert New Records' if not update else 'Update Existing Records'
data_import.reference_doctype = doctype
data_import.import_file = import_file.file_url
@ -180,4 +180,4 @@ def get_import_file(csv_file_name, force=False):
def get_csv_file_path(file_name):
return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import_beta', 'fixtures', file_name)
return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import', 'fixtures', file_name)

View file

@ -132,7 +132,7 @@ frappe.data_import.DataExporter = class DataExporter {
export_records() {
let method =
'/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_template';
'/api/method/frappe.core.doctype.data_import.data_import.download_template';
let multicheck_fields = this.dialog.fields
.filter(df => df.fieldtype === 'MultiCheck')

View file

@ -4,7 +4,7 @@ from __future__ import unicode_literals
import frappe
import frappe.defaults
from frappe.core.doctype.data_import_legacy.data_import_legacy import export_csv
from frappe.core.doctype.data_import.data_import import export_csv
import unittest
import os

View file

@ -4,7 +4,7 @@
from __future__ import unicode_literals, print_function
import frappe, os
from frappe.core.doctype.data_import_legacy.data_import_legacy import import_doc, export_json
from frappe.core.doctype.data_import.data_import import import_doc, export_json
def sync_fixtures(app=None):
"""Import, overwrite fixtures from `[app]/fixtures`"""