From ba8bfbe37afa1d9a951e2071d4c7ae85de626cc8 Mon Sep 17 00:00:00 2001
From: Rushabh Mehta
Date: Wed, 2 Apr 2014 15:59:58 +0530
Subject: [PATCH] frappe/frappe#478 fixed data import tool:
---
frappe/__init__.py | 4 +-
frappe/cli.py | 6 +-
.../page/data_import_tool/data_import_tool.js | 79 +--
.../page/data_import_tool/data_import_tool.py | 497 +-----------------
frappe/core/page/data_import_tool/exporter.py | 194 +++++++
frappe/core/page/data_import_tool/importer.py | 248 +++++++++
frappe/tests/test_assign.py | 22 +
frappe/tests/test_data_import.py | 58 ++
frappe/utils/datautils.py | 4 +-
frappe/utils/fixtures.py | 4 +-
10 files changed, 595 insertions(+), 521 deletions(-)
create mode 100644 frappe/core/page/data_import_tool/exporter.py
create mode 100644 frappe/core/page/data_import_tool/importer.py
create mode 100644 frappe/tests/test_assign.py
create mode 100644 frappe/tests/test_data_import.py
diff --git a/frappe/__init__.py b/frappe/__init__.py
index 80efc83987..1a83854f42 100644
--- a/frappe/__init__.py
+++ b/frappe/__init__.py
@@ -496,9 +496,9 @@ def get_application_home_page(user='Guest'):
else:
return db.get_value("Control Panel", None, "home_page")
-def import_doclist(path, ignore_links=False, ignore_insert=False, insert=False):
+def import_doc(path, ignore_links=False, ignore_insert=False, insert=False):
from frappe.core.page.data_import_tool import data_import_tool
- data_import_tool.import_doclist(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
+ data_import_tool.import_doc(path, ignore_links=ignore_links, ignore_insert=ignore_insert, insert=insert)
def copy_doc(doc):
import copy
diff --git a/frappe/cli.py b/frappe/cli.py
index aaf9ae48ec..6632e946b7 100755
--- a/frappe/cli.py
+++ b/frappe/cli.py
@@ -241,7 +241,7 @@ def setup_utilities(parser):
help="""Dump DocType as csv""")
parser.add_argument("--export_fixtures", default=False, action="store_true",
help="""Export fixtures""")
- parser.add_argument("--import_doclist", nargs=1, metavar="PATH",
+ parser.add_argument("--import_doc", nargs=1, metavar="PATH",
help="""Import (insert/update) doclist. If the argument is a directory, all files ending with .json are imported""")
def setup_translation(parser):
@@ -561,10 +561,10 @@ def export_fixtures():
frappe.destroy()
@cmd
-def import_doclist(path, force=False):
+def import_doc(path, force=False):
from frappe.core.page.data_import_tool import data_import_tool
frappe.connect()
- data_import_tool.import_doclist(path, overwrite=force)
+ data_import_tool.import_doc(path, overwrite=force)
frappe.destroy()
# translation
diff --git a/frappe/core/page/data_import_tool/data_import_tool.js b/frappe/core/page/data_import_tool/data_import_tool.js
index 3be83eebad..3b627a862b 100644
--- a/frappe/core/page/data_import_tool/data_import_tool.js
+++ b/frappe/core/page/data_import_tool/data_import_tool.js
@@ -1,7 +1,7 @@
frappe.pages['data-import-tool'].onload = function(wrapper) {
wrapper.app_page = frappe.ui.make_app_page({
parent: wrapper,
- title: "Data Import Tool",
+ title: __("Data Import / Export Tool"),
icon: "data-import-tool"
});
@@ -15,13 +15,19 @@ frappe.pages['data-import-tool'].onload = function(wrapper) {
$(wrapper).find('.layout-main-section').append('1. Download Template \
\
Download a template for importing a table.
\
-
\
- \
- \
- \
- Download with data \
-
\
-
\
+
\
+
\
+
\
+ \
+
\
+ Download with data \
+ \
+
Export all rows in CSV fields for re-upload. This is ideal for bulk-editing.
\
+
\
+
\
+
\
\
\
2. Import Data \
@@ -29,24 +35,24 @@ frappe.pages['data-import-tool'].onload = function(wrapper) {
\
\
\
- ');
-
- $(wrapper).find('.layout-side-section').append('Help \
- Importing non-English data:
\
- While uploading non English files ensure that the encoding is UTF-8.
\
- Microsoft Excel Users:\
-
\
- In Excel, save the file in CSV (Comma Delimited) format \
- Open this saved file in Notepad \
- Click on File -> Save As \
- File Name: <your filename>.csv \
- Save as type: Text Documents (*.txt) \
- Encoding: UTF-8\
- \
- Click on Save \
- \
-
')
-
+ \
+
Help \
+
Importing non-English data:
\
+
While uploading non English files ensure that the encoding is UTF-8.
\
+
Microsoft Excel Users:\
+
\
+ In Excel, save the file in CSV (Comma Delimited) format \
+ Open this saved file in Notepad \
+ Click on File -> Save As \
+ File Name: <your filename>.csv \
+ Save as type: Text Documents (*.txt) \
+ Encoding: UTF-8\
+ \
+ Click on Save \
+ \
+ \
+
');
+
$select = $(wrapper).find('[name="dit-doctype"]');
frappe.messages.waiting($(wrapper).find(".dit-progress-area").toggle(false),
@@ -102,7 +108,7 @@ frappe.pages['data-import-tool'].onload = function(wrapper) {
+ '&with_data=%(with_data)s'
+ '&all_doctypes=%(all_doctypes)s',
{
- cmd: 'frappe.core.page.data_import_tool.data_import_tool.get_template',
+ cmd: 'frappe.core.page.data_import_tool.exporter.get_template',
doctype: doctype,
parent_doctype: parent_doctype,
with_data: with_data ? 'Yes' : 'No',
@@ -116,7 +122,7 @@ frappe.pages['data-import-tool'].onload = function(wrapper) {
$select.change(function() {
var val = $(this).val()
if(val!='Select...') {
- $('#dit-download').empty();
+ $('#dit-download').empty().removeClass("hide");
frappe.model.with_doctype(val, function() {
validate_download_with_data(val);
@@ -126,7 +132,7 @@ frappe.pages['data-import-tool'].onload = function(wrapper) {
method: 'frappe.core.page.data_import_tool.data_import_tool.get_doctype_options',
args: {doctype: val},
callback: function(r) {
- $('Select Template: ').appendTo('#dit-download');
+ $(' Download ').appendTo('#dit-download');
var with_data = $('[name="dit-with-data"]:checked').length ? 'Yes' : 'No';
// download link
$.each(r.message, function(i, v) {
@@ -191,7 +197,7 @@ frappe.pages['data-import-tool'].onload = function(wrapper) {
frappe.upload.make({
parent: $('#dit-upload-area'),
args: {
- method: 'frappe.core.page.data_import_tool.data_import_tool.upload'
+ method: 'frappe.core.page.data_import_tool.importer.upload'
},
onerror: onerror,
callback: function(fid, filename, r) {
@@ -211,21 +217,18 @@ frappe.pages['data-import-tool'].onload = function(wrapper) {
var $submit_btn = $('#dit-upload-area button.btn-upload')
.html(' ' + frappe._("Upload and Import"));
- $(' \
- Overwrite \
- If you are uploading a child table (for example Item Price), the all the entries of that table will be deleted (for that parent record) and new entries will be made.
')
+ $(' Overwrite \
+ If you are uploading a child table (for example Item Price), the all the entries of that table will be deleted (for that parent record) and new entries will be made.
')
.insertBefore($submit_btn);
// add submit option
- $(' \
- Submit \
- If you are inserting new records (overwrite not checked) \
+ $(' Submit \
+
If you are inserting new records (overwrite not checked) \
and if you have submit permission, the record will be submitted.
')
.insertBefore($submit_btn);
// add ignore option
- $(' \
- Ignore Encoding Errors ')
+ $(' Ignore Encoding Errors ')
.insertBefore($submit_btn);
// rename button
diff --git a/frappe/core/page/data_import_tool/data_import_tool.py b/frappe/core/page/data_import_tool/data_import_tool.py
index 41dc128b5c..e49e327c51 100644
--- a/frappe/core/page/data_import_tool/data_import_tool.py
+++ b/frappe/core/page/data_import_tool/data_import_tool.py
@@ -4,11 +4,8 @@
from __future__ import unicode_literals
import frappe, json, os
-import frappe.permissions
-from frappe.utils import cstr
-from frappe.utils.datautils import UnicodeWriter, check_record, import_doc, getlink, cint, flt
-from frappe import _
-import frappe.permissions
+from frappe.utils import cstr, cint, flt
+from frappe.utils.datautils import check_record, import_doc
data_keys = frappe._dict({
"data_separator": 'Start entering data below this line',
@@ -31,464 +28,17 @@ def get_doctype_options():
doctype = frappe.form_dict['doctype']
return [doctype] + [d.options for d in frappe.get_meta(doctype).get_table_fields()]
-@frappe.whitelist()
-def get_template(doctype=None, parent_doctype=None, all_doctypes="No", with_data="No"):
- import frappe.permissions
- all_doctypes = all_doctypes=="Yes"
- if not parent_doctype:
- parent_doctype = doctype
-
- column_start_end = {}
-
- if all_doctypes:
- doctype_parentfield = {}
- child_doctypes = []
- for d in frappe.get_meta(doctype).get_table_fields():
- child_doctypes.append(d[0])
- doctype_parentfield[d[0]] = d[1]
-
- def add_main_header():
- w.writerow(['Data Import Template'])
- w.writerow([data_keys.main_table, doctype])
-
- if parent_doctype != doctype:
- w.writerow([data_keys.parent_table, parent_doctype])
- else:
- w.writerow([''])
- w.writerow([''])
- w.writerow(['Notes:'])
- w.writerow(['Please do not change the template headings.'])
- w.writerow(['First data column must be blank.'])
- w.writerow(['If you are uploading new records, leave the "name" (ID) column blank.'])
- w.writerow(['If you are uploading new records, "Naming Series" becomes mandatory, if present.'])
- w.writerow(['Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish.'])
- w.writerow(['For updating, you can update only selective columns.'])
- w.writerow(['You can only upload upto 5000 records in one go. (may be less in some cases)'])
- if key == "parent":
- w.writerow(['"Parent" signifies the parent table in which this row must be added'])
- w.writerow(['If you are updating, please select "Overwrite" else existing rows will not be deleted.'])
-
- def build_field_columns(dt):
- meta = frappe.get_meta(dt)
-
- tablecolumns = filter(None,
- [meta.get_field(f[0]) for f in frappe.db.sql('desc `tab%s`' % dt)])
-
- tablecolumns.sort(lambda a, b: a.idx - b.idx)
-
- if dt==doctype:
- column_start_end[dt] = frappe._dict({"start": 0})
- else:
- column_start_end[dt] = frappe._dict({"start": len(columns)})
-
- append_field_column(frappe._dict({
- "fieldname": "name",
- "label": "ID",
- "fieldtype": "Data",
- "reqd": 1,
- "idx": 0,
- "info": "Leave blank for new records"
- }), True)
-
- for docfield in tablecolumns:
- append_field_column(docfield, True)
-
- # all non mandatory fields
- for docfield in tablecolumns:
- append_field_column(docfield, False)
-
- # append DocType name
- tablerow[column_start_end[dt].start + 1] = dt
- if dt!=doctype:
- tablerow[column_start_end[dt].start + 2] = doctype_parentfield[dt]
-
- column_start_end[dt].end = len(columns) + 1
-
- def append_field_column(docfield, mandatory):
- if docfield and ((mandatory and docfield.reqd) or not (mandatory or docfield.reqd)) \
- and (docfield.fieldname not in ('parenttype', 'trash_reason')) and not docfield.hidden:
- tablerow.append("")
- fieldrow.append(docfield.fieldname)
- labelrow.append(docfield.label)
- mandatoryrow.append(docfield.reqd and 'Yes' or 'No')
- typerow.append(docfield.fieldtype)
- inforow.append(getinforow(docfield))
- columns.append(docfield.fieldname)
-
- def append_empty_field_column():
- tablerow.append("~")
- fieldrow.append("~")
- labelrow.append("")
- mandatoryrow.append("")
- typerow.append("")
- inforow.append("")
- columns.append("")
-
- def getinforow(docfield):
- """make info comment for options, links etc."""
- if docfield.fieldtype == 'Select':
- if not docfield.options:
- return ''
- elif docfield.options.startswith('link:'):
- return 'Valid %s' % docfield.options[5:]
- else:
- return 'One of: %s' % ', '.join(filter(None, docfield.options.split('\n')))
- elif docfield.fieldtype == 'Link':
- return 'Valid %s' % docfield.options
- elif docfield.fieldtype in ('Int'):
- return 'Integer'
- elif docfield.fieldtype == "Check":
- return "0 or 1"
- elif docfield.info:
- return docfield.info
- else:
- return ''
-
- def add_field_headings():
- w.writerow(tablerow)
- w.writerow(labelrow)
- w.writerow(fieldrow)
- w.writerow(mandatoryrow)
- w.writerow(typerow)
- w.writerow(inforow)
- w.writerow([data_keys.data_separator])
-
- def add_data():
- def add_data_row(row_group, dt, doc, rowidx):
- d = doc.copy()
- if all_doctypes:
- d.name = '"'+ d.name+'"'
-
- if len(row_group) < rowidx + 1:
- row_group.append([""] * (len(columns) + 1))
- row = row_group[rowidx]
- for i, c in enumerate(columns[column_start_end[dt].start:column_start_end[dt].end]):
- row[column_start_end[dt].start + i + 1] = d.get(c, "")
-
- if with_data=='Yes':
- frappe.permissions.can_export(parent_doctype, raise_exception=True)
-
- # get permitted data only
- data = frappe.get_list(doctype, fields=["*"], limit_page_length=None)
- for doc in data:
- # add main table
- row_group = []
-
- add_data_row(row_group, doctype, doc, 0)
-
- if all_doctypes:
- # add child tables
- for child_doctype in child_doctypes:
- for ci, child in enumerate(frappe.db.sql("""select * from `tab%s`
- where parent=%s order by idx""" % (child_doctype, "%s"), doc.name, as_dict=1)):
- add_data_row(row_group, child_doctype, child, ci)
-
- for row in row_group:
- w.writerow(row)
-
- w = UnicodeWriter()
- key = 'parent' if parent_doctype != doctype else 'name'
-
- add_main_header()
-
- w.writerow([''])
- tablerow = [data_keys.doctype, ""]
- labelrow = ["Column Labels:", "ID"]
- fieldrow = [data_keys.columns, key]
- mandatoryrow = ['Mandatory:', 'Yes']
- typerow = ['Type:', 'Data (text)']
- inforow = ['Info:', '']
- columns = [key]
-
- build_field_columns(doctype)
- if all_doctypes:
- for d in child_doctypes:
- append_empty_field_column()
- build_field_columns(d)
-
- add_field_headings()
- add_data()
-
- # write out response as a type csv
- frappe.response['result'] = cstr(w.getvalue())
- frappe.response['type'] = 'csv'
- frappe.response['doctype'] = doctype
-
-@frappe.whitelist()
-def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, overwrite=False, ignore_links=False):
- """upload data"""
- frappe.flags.mute_emails = True
- # extra input params
- params = json.loads(frappe.form_dict.get("params") or '{}')
-
- if params.get("_submit"):
- submit_after_import = True
- if params.get("ignore_encoding_errors"):
- ignore_encoding_errors = True
-
- from frappe.utils.datautils import read_csv_content_from_uploaded_file
-
- def bad_template():
- frappe.msgprint("Please do not change the rows above '%s'" % data_keys.data_separator,
- raise_exception=1)
-
- def check_data_length():
- max_rows = 5000
- if not data:
- frappe.msgprint("No data found", raise_exception=True)
- elif len(data) > max_rows:
- frappe.msgprint("Please upload only upto %d %ss at a time" % \
- (max_rows, doctype), raise_exception=True)
-
- def get_start_row():
- for i, row in enumerate(rows):
- if row and row[0]==data_keys.data_separator:
- return i+1
- bad_template()
-
- def get_header_row(key):
- return get_header_row_and_idx(key)[0]
-
- def get_header_row_and_idx(key):
- for i, row in enumerate(header):
- if row and row[0]==key:
- return row, i
- return [], -1
-
- def filter_empty_columns(columns):
- empty_cols = filter(lambda x: x in ("", None), columns)
-
- if empty_cols:
- if columns[-1*len(empty_cols):] == empty_cols:
- # filter empty columns if they exist at the end
- columns = columns[:-1*len(empty_cols)]
- else:
- frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
- raise_exception=1)
-
- return columns
-
- def make_column_map():
- doctype_row, row_idx = get_header_row_and_idx(data_keys.doctype)
- if row_idx == -1: # old style
- return
-
- dt = None
- for i, d in enumerate(doctype_row[1:]):
- if d not in ("~", "-"):
- if d: # value in doctype_row
- if doctype_row[i]==dt:
- # prev column is doctype (in case of parentfield)
- doctype_parentfield[dt] = doctype_row[i+1]
- else:
- dt = d
- doctypes.append(d)
- column_idx_to_fieldname[dt] = {}
- column_idx_to_fieldtype[dt] = {}
- if dt:
- column_idx_to_fieldname[dt][i+1] = rows[row_idx + 2][i+1]
- column_idx_to_fieldtype[dt][i+1] = rows[row_idx + 4][i+1]
-
- def get_doclist(start_idx):
- if doctypes:
- doclist = []
- for idx in xrange(start_idx, len(rows)):
- if (not len(doclist)) or main_doc_empty(rows[idx]):
- for dt in doctypes:
- d = {}
- for column_idx in column_idx_to_fieldname[dt]:
- try:
- fieldname = column_idx_to_fieldname[dt][column_idx]
- fieldtype = column_idx_to_fieldtype[dt][column_idx]
-
- d[fieldname] = rows[idx][column_idx]
- if fieldtype in ("Int", "Check"):
- d[fieldname] = cint(d[fieldname])
- elif fieldtype in ("Float", "Currency"):
- d[fieldname] = flt(d[fieldname])
- except IndexError, e:
- pass
-
- # scrub quotes from name and modified
- if d.get("name") and d["name"].startswith('"'):
- d["name"] = d["name"][1:-1]
-
- if sum([0 if not val else 1 for val in d.values()]):
- d['doctype'] = dt
- if dt != doctype:
- if not overwrite:
- d['parent'] = doclist[0]["name"]
- d['parenttype'] = doctype
- d['parentfield'] = doctype_parentfield[dt]
- doclist.append(d)
- else:
- break
-
- return doclist
- else:
- d = frappe._dict(zip(columns, rows[start_idx][1:]))
- d['doctype'] = doctype
- return [d]
-
- def main_doc_empty(row):
- return not (row and ((len(row) > 1 and row[1]) or (len(row) > 2 and row[2])))
-
- # header
- if not rows:
- rows = read_csv_content_from_uploaded_file(ignore_encoding_errors)
- start_row = get_start_row()
- header = rows[:start_row]
- data = rows[start_row:]
- doctype = get_header_row(data_keys.main_table)[1]
- columns = filter_empty_columns(get_header_row(data_keys.columns)[1:])
- doctypes = []
- doctype_parentfield = {}
- column_idx_to_fieldname = {}
- column_idx_to_fieldtype = {}
-
- if submit_after_import and not cint(frappe.db.get_value("DocType",
- doctype, "is_submittable")):
- submit_after_import = False
-
- parenttype = get_header_row(data_keys.parent_table)
-
- if len(parenttype) > 1:
- parenttype = parenttype[1]
- parentfield = get_parent_field(doctype, parenttype)
-
- # check permissions
- if not frappe.permissions.can_import(parenttype or doctype):
- frappe.flags.mute_emails = False
- return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}
-
- # allow limit rows to be uploaded
- check_data_length()
- make_column_map()
-
- frappe.db.begin()
- if not overwrite:
- overwrite = params.get('overwrite')
-
- # delete child rows (if parenttype)
- if parenttype and overwrite:
- delete_child_rows(data, doctype)
-
- ret = []
- error = False
- parent_list = []
- for i, row in enumerate(data):
- # bypass empty rows
- if main_doc_empty(row):
- continue
-
- row_idx = i + start_row
- bean = None
-
- doclist = get_doclist(row_idx)
- try:
- frappe.local.message_log = []
- if len(doclist) > 1:
- for d in doclist:
- # ignoring parent check as it will be automatically added
- check_record(d)
-
- if overwrite and frappe.db.exists(doctype, doclist[0]["name"]):
- bean = frappe.get_doc(doctype, doclist[0]["name"])
- bean.ignore_links = ignore_links
- bean.doclist.update(doclist)
- bean.save()
- ret.append('Updated row (#%d) %s' % (row_idx + 1, getlink(bean.doctype, bean.name)))
- else:
- bean = frappe.get_doc(doclist)
- bean.ignore_links = ignore_links
- bean.insert()
- ret.append('Inserted row (#%d) %s' % (row_idx + 1, getlink(bean.doctype, bean.name)))
- if submit_after_import:
- bean.submit()
- ret.append('Submitted row (#%d) %s' % (row_idx + 1, getlink(bean.doctype, bean.name)))
- else:
- check_record(doclist[0])
-
- if parenttype:
- # child doc
- doc = frappe.get_doc(doctype)
- doc.update(doclist[0])
- if parenttype:
- doc.parenttype = parenttype
- doc.parentfield = parentfield
- doc.save()
- ret.append('Inserted row for %s at #%s' % (getlink(parenttype,
- doc.parent), unicode(doc.idx)))
- parent_list.append(doc.parent)
- else:
- ret.append(import_doc(doclist[0], doctype, overwrite, row_idx, submit_after_import, ignore_links))
-
- except Exception, e:
- error = True
- if bean:
- frappe.errprint(bean.doclist)
- err_msg = frappe.local.message_log and " ".join(frappe.local.message_log) or cstr(e)
- ret.append('Error for row (#%d) %s : %s' % (row_idx + 1,
- len(row)>1 and row[1] or "", err_msg))
- frappe.errprint(frappe.get_traceback())
-
- ret, error = validate_parent(parent_list, parenttype, ret, error)
-
- if error:
- frappe.db.rollback()
- else:
- frappe.db.commit()
-
- frappe.flags.mute_emails = False
-
- return {"messages": ret, "error": error}
-
-def validate_parent(parent_list, parenttype, ret, error):
- if parent_list:
- parent_list = list(set(parent_list))
- for p in parent_list:
- try:
- obj = frappe.get_doc(parenttype, p)
- obj.run_method("validate")
- obj.run_method("on_update")
- except Exception, e:
- error = True
- ret.append('Validation Error for %s %s: %s' % (parenttype, p, cstr(e)))
- frappe.errprint(frappe.get_traceback())
-
- return ret, error
-
-def get_parent_field(doctype, parenttype):
- parentfield = None
-
- # get parentfield
- if parenttype:
- for d in frappe.get_meta(parenttype).get_table_fields():
- if d.options==doctype:
- parentfield = d.fieldname
- break
-
- if not parentfield:
- frappe.msgprint("Did not find parentfield for %s (%s)" % \
- (parenttype, doctype))
- raise Exception
-
- return parentfield
-
-def delete_child_rows(rows, doctype):
- """delete child rows for all parents"""
- for p in list(set([r[1] for r in rows])):
- frappe.db.sql("""delete from `tab%s` where parent=%s""" % (doctype, '%s'), p)
-
import csv
-def import_file_by_path(path, ignore_links=False, overwrite=False):
+def import_file_by_path(path, ignore_links=False, overwrite=False, submit=False):
from frappe.utils.datautils import read_csv_content
+ from frappe.core.page.data_import_tool.importer import upload
print "Importing " + path
with open(path, "r") as infile:
- upload(rows = read_csv_content(infile), ignore_links=ignore_links, overwrite=overwrite)
+ upload(rows = read_csv_content(infile), ignore_links=ignore_links, overwrite=overwrite, submit_after_import=submit)
-def export_csv(doctype, path):
+def export_csv(doctype, path):
+ from frappe.core.page.data_import_tool.exporter import get_template
with open(path, "w") as csvfile:
get_template(doctype=doctype, all_doctypes="Yes", with_data="Yes")
csvfile.write(frappe.response.result.encode("utf-8"))
@@ -498,13 +48,12 @@ def export_json(doctype, name, path):
if not name or name=="-":
name = doctype
with open(path, "w") as outfile:
- doclist = frappe.get_doc(doctype, name).as_dict()
- for d in doclist:
- if d.get("parent"):
- del d["parent"]
- del d["name"]
- d["__islocal"] = 1
- outfile.write(json.dumps([doclist], default=json_handler, indent=1, sort_keys=True))
+ doc = frappe.get_doc(doctype, name).as_dict()
+ for d in doc.get_all_children():
+ d.set("parent", None)
+ d.set("name", None)
+ d.set("__islocal") = 1
+ outfile.write(json.dumps([d], default=json_handler, indent=1, sort_keys=True))
@frappe.whitelist()
def export_fixture(doctype, name, app):
@@ -517,25 +66,25 @@ def export_fixture(doctype, name, app):
export_json(doctype, name, frappe.get_app_path(app, "fixtures", frappe.scrub(name) + ".json"))
-def import_doclist(path, overwrite=False, ignore_links=False, ignore_insert=False, insert=False):
+def import_doc(path, overwrite=False, ignore_links=False, ignore_insert=False, insert=False, submit=False):
if os.path.isdir(path):
files = [os.path.join(path, f) for f in os.listdir(path)]
else:
files = [path]
- def _import_doclist(d):
- b = frappe.get_doc(d)
- b.ignore_links = ignore_links
+ def _import_doc(d):
+ doc = frappe.get_doc(d)
+ doc.ignore_links = ignore_links
if insert:
- b.set("__islocal", True)
+ doc.set("__islocal", True)
try:
- b.insert_or_update()
+ doc.save()
except NameError:
if ignore_insert:
pass
else:
raise
- print "Imported: " + b.doctype + " / " + b.name
+ print "Imported: " + doc.doctype + " / " + doc.name
for f in files:
if f.endswith(".json"):
@@ -543,10 +92,10 @@ def import_doclist(path, overwrite=False, ignore_links=False, ignore_insert=Fals
data = json.loads(infile.read())
if isinstance(data, list):
for doc in data:
- _import_doclist(doc)
+ _import_doc(doc)
else:
- _import_doclist(data)
+ _import_doc(data)
frappe.db.commit()
if f.endswith(".csv"):
- import_file_by_path(f, ignore_links=True, overwrite=overwrite)
+ import_file_by_path(f, ignore_links=True, overwrite=overwrite, submit=submit)
frappe.db.commit()
diff --git a/frappe/core/page/data_import_tool/exporter.py b/frappe/core/page/data_import_tool/exporter.py
new file mode 100644
index 0000000000..fc32aa05f1
--- /dev/null
+++ b/frappe/core/page/data_import_tool/exporter.py
@@ -0,0 +1,194 @@
+# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
+# MIT License. See license.txt
+
+from __future__ import unicode_literals
+
+import frappe, json, os
+import frappe.permissions
+from frappe.utils.datautils import UnicodeWriter
+from frappe.utils import cstr, cint, flt
+
+from frappe.core.page.data_import_tool.data_import_tool import data_keys
+
+@frappe.whitelist()
+def get_template(doctype=None, parent_doctype=None, all_doctypes="No", with_data="No"):
+ all_doctypes = all_doctypes=="Yes"
+ if not parent_doctype:
+ parent_doctype = doctype
+
+ column_start_end = {}
+
+ if all_doctypes:
+ doctype_parentfield = {}
+ child_doctypes = []
+ for df in frappe.get_meta(doctype).get_table_fields():
+ child_doctypes.append(df.options)
+ doctype_parentfield[df.options] = df.fieldname
+
+ def add_main_header():
+ w.writerow(['Data Import Template'])
+ w.writerow([data_keys.main_table, doctype])
+
+ if parent_doctype != doctype:
+ w.writerow([data_keys.parent_table, parent_doctype])
+ else:
+ w.writerow([''])
+
+ w.writerow([''])
+ w.writerow(['Notes:'])
+ w.writerow(['Please do not change the template headings.'])
+ w.writerow(['First data column must be blank.'])
+ w.writerow(['If you are uploading new records, leave the "name" (ID) column blank.'])
+ w.writerow(['If you are uploading new records, "Naming Series" becomes mandatory, if present.'])
+ w.writerow(['Only mandatory fields are necessary for new records. You can delete non-mandatory columns if you wish.'])
+ w.writerow(['For updating, you can update only selective columns.'])
+ w.writerow(['You can only upload upto 5000 records in one go. (may be less in some cases)'])
+ if key == "parent":
+ w.writerow(['"Parent" signifies the parent table in which this row must be added'])
+ w.writerow(['If you are updating, please select "Overwrite" else existing rows will not be deleted.'])
+
+ def build_field_columns(dt):
+ meta = frappe.get_meta(dt)
+
+ tablecolumns = filter(None,
+ [(meta.get_field(f[0]) or None) for f in frappe.db.sql('desc `tab%s`' % dt)])
+
+ tablecolumns.sort(lambda a, b: a.idx - b.idx)
+
+ if dt==doctype:
+ column_start_end[dt] = frappe._dict({"start": 0})
+ else:
+ column_start_end[dt] = frappe._dict({"start": len(columns)})
+
+ append_field_column(frappe._dict({
+ "fieldname": "name",
+ "label": "ID",
+ "fieldtype": "Data",
+ "reqd": 1,
+ "idx": 0,
+ "info": "Leave blank for new records"
+ }), True)
+
+ for docfield in tablecolumns:
+ append_field_column(docfield, True)
+
+ # all non mandatory fields
+ for docfield in tablecolumns:
+ append_field_column(docfield, False)
+
+ # append DocType name
+ tablerow[column_start_end[dt].start + 1] = dt
+ if dt!=doctype:
+ tablerow[column_start_end[dt].start + 2] = doctype_parentfield[dt]
+
+ column_start_end[dt].end = len(columns) + 1
+
+ def append_field_column(docfield, mandatory):
+ if docfield and ((mandatory and docfield.reqd) or not (mandatory or docfield.reqd)) \
+ and (docfield.fieldname not in ('parenttype', 'trash_reason')) and not docfield.hidden:
+ tablerow.append("")
+ fieldrow.append(docfield.fieldname)
+ labelrow.append(docfield.label)
+ mandatoryrow.append(docfield.reqd and 'Yes' or 'No')
+ typerow.append(docfield.fieldtype)
+ inforow.append(getinforow(docfield))
+ columns.append(docfield.fieldname)
+
+ def append_empty_field_column():
+ tablerow.append("~")
+ fieldrow.append("~")
+ labelrow.append("")
+ mandatoryrow.append("")
+ typerow.append("")
+ inforow.append("")
+ columns.append("")
+
+ def getinforow(docfield):
+ """make info comment for options, links etc."""
+ if docfield.fieldtype == 'Select':
+ if not docfield.options:
+ return ''
+ elif docfield.options.startswith('link:'):
+ return 'Valid %s' % docfield.options[5:]
+ else:
+ return 'One of: %s' % ', '.join(filter(None, docfield.options.split('\n')))
+ elif docfield.fieldtype == 'Link':
+ return 'Valid %s' % docfield.options
+ elif docfield.fieldtype in ('Int'):
+ return 'Integer'
+ elif docfield.fieldtype == "Check":
+ return "0 or 1"
+ elif hasattr(docfield, "info"):
+ return docfield.info
+ else:
+ return ''
+
+ def add_field_headings():
+ w.writerow(tablerow)
+ w.writerow(labelrow)
+ w.writerow(fieldrow)
+ w.writerow(mandatoryrow)
+ w.writerow(typerow)
+ w.writerow(inforow)
+ w.writerow([data_keys.data_separator])
+
+ def add_data():
+ def add_data_row(row_group, dt, doc, rowidx):
+ d = doc.copy()
+ if all_doctypes:
+ d.name = '"'+ d.name+'"'
+
+ if len(row_group) < rowidx + 1:
+ row_group.append([""] * (len(columns) + 1))
+ row = row_group[rowidx]
+ for i, c in enumerate(columns[column_start_end[dt].start:column_start_end[dt].end]):
+ row[column_start_end[dt].start + i + 1] = d.get(c, "")
+
+ if with_data=='Yes':
+ frappe.permissions.can_export(parent_doctype, raise_exception=True)
+
+ # get permitted data only
+ data = frappe.get_list(doctype, fields=["*"], limit_page_length=None)
+ for doc in data:
+ # add main table
+ row_group = []
+
+ add_data_row(row_group, doctype, doc, 0)
+
+ if all_doctypes:
+ # add child tables
+ for child_doctype in child_doctypes:
+ for ci, child in enumerate(frappe.db.sql("""select * from `tab%s`
+ where parent=%s order by idx""" % (child_doctype, "%s"), doc.name, as_dict=1)):
+ add_data_row(row_group, child_doctype, child, ci)
+
+ for row in row_group:
+ w.writerow(row)
+
+ w = UnicodeWriter()
+ key = 'parent' if parent_doctype != doctype else 'name'
+
+ add_main_header()
+
+ w.writerow([''])
+ tablerow = [data_keys.doctype, ""]
+ labelrow = ["Column Labels:", "ID"]
+ fieldrow = [data_keys.columns, key]
+ mandatoryrow = ['Mandatory:', 'Yes']
+ typerow = ['Type:', 'Data (text)']
+ inforow = ['Info:', '']
+ columns = [key]
+
+ build_field_columns(doctype)
+ if all_doctypes:
+ for d in child_doctypes:
+ append_empty_field_column()
+ build_field_columns(d)
+
+ add_field_headings()
+ add_data()
+
+ # write out response as a type csv
+ frappe.response['result'] = cstr(w.getvalue())
+ frappe.response['type'] = 'csv'
+ frappe.response['doctype'] = doctype
diff --git a/frappe/core/page/data_import_tool/importer.py b/frappe/core/page/data_import_tool/importer.py
new file mode 100644
index 0000000000..a3f3cbb098
--- /dev/null
+++ b/frappe/core/page/data_import_tool/importer.py
@@ -0,0 +1,248 @@
+# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
+# MIT License. See license.txt
+
+from __future__ import unicode_literals
+
+import frappe, json, os
+import frappe.permissions
+
+from frappe.utils.datautils import check_record, import_doc, getlink
+from frappe.utils import cint, cstr, flt
+from frappe.core.page.data_import_tool.data_import_tool import data_keys
+
+
+@frappe.whitelist()
+def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, overwrite=None, ignore_links=False):
+ """upload data"""
+ frappe.flags.mute_emails = True
+ # extra input params
+ params = json.loads(frappe.form_dict.get("params") or '{}')
+
+ if params.get("_submit"):
+ submit_after_import = True
+ if params.get("ignore_encoding_errors"):
+ ignore_encoding_errors = True
+
+ from frappe.utils.datautils import read_csv_content_from_uploaded_file
+
+ def bad_template():
+ frappe.msgprint("Please do not change the rows above '%s'" % data_keys.data_separator,
+ raise_exception=1)
+
+ def check_data_length():
+ max_rows = 5000
+ if not data:
+ frappe.msgprint("No data found", raise_exception=True)
+ elif len(data) > max_rows:
+ frappe.msgprint("Please upload only upto %d %ss at a time" % \
+ (max_rows, doctype), raise_exception=True)
+
+ def get_start_row():
+ for i, row in enumerate(rows):
+ if row and row[0]==data_keys.data_separator:
+ return i+1
+ bad_template()
+
+ def get_header_row(key):
+ return get_header_row_and_idx(key)[0]
+
+ def get_header_row_and_idx(key):
+ for i, row in enumerate(header):
+ if row and row[0]==key:
+ return row, i
+ return [], -1
+
+ def filter_empty_columns(columns):
+ empty_cols = filter(lambda x: x in ("", None), columns)
+
+ if empty_cols:
+ if columns[-1*len(empty_cols):] == empty_cols:
+ # filter empty columns if they exist at the end
+ columns = columns[:-1*len(empty_cols)]
+ else:
+ frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
+ raise_exception=1)
+
+ return columns
+
+ def make_column_map():
+ doctype_row, row_idx = get_header_row_and_idx(data_keys.doctype)
+ if row_idx == -1: # old style
+ return
+
+ dt = None
+ for i, d in enumerate(doctype_row[1:]):
+ if d not in ("~", "-"):
+ if d: # value in doctype_row
+ if doctype_row[i]==dt:
+ # prev column is doctype (in case of parentfield)
+ doctype_parentfield[dt] = doctype_row[i+1]
+ else:
+ dt = d
+ doctypes.append(d)
+ column_idx_to_fieldname[dt] = {}
+ column_idx_to_fieldtype[dt] = {}
+ if dt:
+ column_idx_to_fieldname[dt][i+1] = rows[row_idx + 2][i+1]
+ column_idx_to_fieldtype[dt][i+1] = rows[row_idx + 4][i+1]
+
+ def get_doc(start_idx):
+ if doctypes:
+ doc = {}
+ for idx in xrange(start_idx, len(rows)):
+ if (not doc) or main_doc_empty(rows[idx]):
+ for dt in doctypes:
+ d = {}
+ for column_idx in column_idx_to_fieldname[dt]:
+ try:
+ fieldname = column_idx_to_fieldname[dt][column_idx]
+ fieldtype = column_idx_to_fieldtype[dt][column_idx]
+
+ d[fieldname] = rows[idx][column_idx]
+ if fieldtype in ("Int", "Check"):
+ d[fieldname] = cint(d[fieldname])
+ elif fieldtype in ("Float", "Currency"):
+ d[fieldname] = flt(d[fieldname])
+ except IndexError, e:
+ pass
+
+ # scrub quotes from name and modified
+ if d.get("name") and d["name"].startswith('"'):
+ d["name"] = d["name"][1:-1]
+
+ if sum([0 if not val else 1 for val in d.values()]):
+ d['doctype'] = dt
+ if dt == doctype:
+ doc.update(d)
+ else:
+ if not overwrite:
+ d['parent'] = doc["name"]
+ d['parenttype'] = doctype
+ d['parentfield'] = doctype_parentfield[dt]
+ doc.setdefault(d['parentfield'], []).append(d)
+ else:
+ break
+
+ return doc
+ else:
+ d = frappe._dict(zip(columns, rows[start_idx][1:]))
+ d['doctype'] = doctype
+ return [d]
+
+ def main_doc_empty(row):
+ return not (row and ((len(row) > 1 and row[1]) or (len(row) > 2 and row[2])))
+
+ # header
+ if not rows:
+ rows = read_csv_content_from_uploaded_file(ignore_encoding_errors)
+ start_row = get_start_row()
+ header = rows[:start_row]
+ data = rows[start_row:]
+ doctype = get_header_row(data_keys.main_table)[1]
+ columns = filter_empty_columns(get_header_row(data_keys.columns)[1:])
+ doctypes = []
+ doctype_parentfield = {}
+ column_idx_to_fieldname = {}
+ column_idx_to_fieldtype = {}
+
+ if submit_after_import and not cint(frappe.db.get_value("DocType",
+ doctype, "is_submittable")):
+ submit_after_import = False
+
+ parenttype = get_header_row(data_keys.parent_table)
+
+ if len(parenttype) > 1:
+ parenttype = parenttype[1]
+ parentfield = get_parent_field(doctype, parenttype)
+
+ # check permissions
+ if not frappe.permissions.can_import(parenttype or doctype):
+ frappe.flags.mute_emails = False
+ return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}
+
+ # allow limit rows to be uploaded
+ check_data_length()
+ make_column_map()
+
+ frappe.db.begin()
+ if overwrite==None:
+ overwrite = params.get('overwrite')
+
+ # delete child rows (if parenttype)
+ if parenttype and overwrite:
+ delete_child_rows(data, doctype)
+
+ ret = []
+ error = False
+ parent_list = []
+ for i, row in enumerate(data):
+ # bypass empty rows
+ if main_doc_empty(row):
+ continue
+
+ row_idx = i + start_row
+ bean = None
+
+ doc = get_doc(row_idx)
+ try:
+ frappe.local.message_log = []
+ if doc.get("parentfield"):
+ parent = frappe.get_doc(doc["parenttype"], doc["parentfield"])
+ parent.append(doc)
+ parent.save()
+ ret.append('Inserted row for %s at #%s' % (getlink(parenttype,
+ doc.parent), unicode(doc.idx)))
+
+ else:
+ if overwrite and frappe.db.exists(doctype, doc["name"]):
+ original = frappe.get_doc(doctype, doc["name"])
+ original.update(doc)
+ original.save()
+ ret.append('Updated row (#%d) %s' % (row_idx + 1, getlink(original.doctype, original.name)))
+ else:
+ doc = frappe.get_doc(doc)
+ doc.ignore_links = ignore_links
+ doc.insert()
+ ret.append('Inserted row (#%d) %s' % (row_idx + 1, getlink(doc.doctype, doc.name)))
+ if submit_after_import:
+ doc.submit()
+ ret.append('Submitted row (#%d) %s' % (row_idx + 1, getlink(doc.doctype, doc.name)))
+ except Exception, e:
+ error = True
+ if bean:
+ frappe.errprint(bean.doclist)
+ err_msg = frappe.local.message_log and " ".join(frappe.local.message_log) or cstr(e)
+ ret.append('Error for row (#%d) %s : %s' % (row_idx + 1,
+ len(row)>1 and row[1] or "", err_msg))
+ frappe.errprint(frappe.get_traceback())
+
+ if error:
+ frappe.db.rollback()
+ else:
+ frappe.db.commit()
+
+ frappe.flags.mute_emails = False
+
+ return {"messages": ret, "error": error}
+
+def get_parent_field(doctype, parenttype):
+ parentfield = None
+
+ # get parentfield
+ if parenttype:
+ for d in frappe.get_meta(parenttype).get_table_fields():
+ if d.options==doctype:
+ parentfield = d.fieldname
+ break
+
+ if not parentfield:
+ frappe.msgprint("Did not find parentfield for %s (%s)" % \
+ (parenttype, doctype))
+ raise Exception
+
+ return parentfield
+
+def delete_child_rows(rows, doctype):
+ """delete child rows for all parents"""
+ for p in list(set([r[1] for r in rows])):
+ frappe.db.sql("""delete from `tab%s` where parent=%s""" % (doctype, '%s'), p)
diff --git a/frappe/tests/test_assign.py b/frappe/tests/test_assign.py
new file mode 100644
index 0000000000..789e135668
--- /dev/null
+++ b/frappe/tests/test_assign.py
@@ -0,0 +1,22 @@
+# Copyright (c) 2014, Web Notes Technologies Pvt. Ltd. and Contributors
+# MIT License. See license.txt
+
+import frappe, unittest
+import frappe.widgets.form.assign_to
+
+class TestAssign(unittest.TestCase):
+ def test_assign(self):
+ todo = frappe.get_doc({"doctype":"ToDo", "description": "test"}).insert()
+ if not frappe.db.exists("User", "test@example.com"):
+ frappe.get_doc({"doctype":"User", "email":"test@example.com", "first_name":"Test"})
+
+ added = frappe.widgets.form.assign_to.add({
+ "assign_to": "test@example.com",
+ "doctype": todo.doctype,
+ "name": todo.name,
+ "description": todo.description,
+ })
+ self.assertTrue("test@example.com" in added)
+
+ removed = frappe.widgets.form.assign_to.remove(todo.doctype, todo.name, "test@example.com")
+ self.assertTrue("test@example.com" not in removed)
diff --git a/frappe/tests/test_data_import.py b/frappe/tests/test_data_import.py
new file mode 100644
index 0000000000..140cd891ab
--- /dev/null
+++ b/frappe/tests/test_data_import.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2014, Web Notes Technologies Pvt. Ltd. and Contributors
+# MIT License. See license.txt
+
+import frappe, unittest
+from frappe.core.page.data_import_tool import exporter
+from frappe.core.page.data_import_tool import importer
+from frappe.utils.datautils import read_csv_content
+
+class TestDataImport(unittest.TestCase):
+ def test_export(self):
+ exporter.get_template("User", all_doctypes="No", with_data="No")
+ content = read_csv_content(frappe.response.result)
+ self.assertTrue(content[1][1], "User")
+
+ def test_export_with_data(self):
+ exporter.get_template("User", all_doctypes="No", with_data="Yes")
+ content = read_csv_content(frappe.response.result)
+ self.assertTrue(content[1][1], "User")
+ self.assertTrue("Administrator" in [c[1] for c in content if len(c)>1])
+
+ def test_export_with_all_doctypes(self):
+ exporter.get_template("User", all_doctypes="Yes", with_data="Yes")
+ content = read_csv_content(frappe.response.result)
+ self.assertTrue(content[1][1], "User")
+ self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1])
+ self.assertEquals(content[13][0], "DocType:")
+ self.assertEquals(content[13][1], "User")
+ self.assertTrue("UserRole" in content[13])
+
+ def test_import(self):
+ exporter.get_template("Blog Category", all_doctypes="No", with_data="No")
+ content = read_csv_content(frappe.response.result)
+ content.append(["", "", "test-category", "Test Cateogry"])
+ importer.upload(content)
+ self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category")
+
+ # export with data
+ exporter.get_template("Blog Category", all_doctypes="No", with_data="Yes")
+ content = read_csv_content(frappe.response.result)
+
+ # overwrite
+ content[-1][3] = "New Title"
+ importer.upload(content, overwrite=True)
+ self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title")
+
+ def test_import_with_children(self):
+ exporter.get_template("Event", all_doctypes="Yes", with_data="No")
+ content = read_csv_content(frappe.response.result)
+ content.append([""] * len(content[-2]))
+ content[-1][2] = "__Test Event"
+ content[-1][3] = "Private"
+ content[-1][3] = "2014-01-01 10:00:00.000000"
+ content[-1][content[15].index("person")] = "Administrator"
+ importer.upload(content)
+
+ ev = frappe.get_doc("Event", {"subject":"__Test Event"})
+ self.assertTrue("Administrator" in [d.person for d in ev.event_individuals])
+
\ No newline at end of file
diff --git a/frappe/utils/datautils.py b/frappe/utils/datautils.py
index 469d7fbc24..88761f7319 100644
--- a/frappe/utils/datautils.py
+++ b/frappe/utils/datautils.py
@@ -35,7 +35,7 @@ def read_csv_content_from_attached_file(doc):
def read_csv_content(fcontent, ignore_encoding=False):
rows = []
- if isinstance(fcontent, basestring):
+ if not isinstance(fcontent, unicode):
decoded = False
for encoding in ["utf-8", "windows-1250", "windows-1252"]:
try:
@@ -49,7 +49,7 @@ def read_csv_content(fcontent, ignore_encoding=False):
frappe.msgprint(frappe._("Unknown file encoding. Tried utf-8, windows-1250, windows-1252."),
raise_exception=True)
- fcontent = fcontent.encode("utf-8").splitlines(True)
+ fcontent = fcontent.encode("utf-8").splitlines(True)
try:
reader = csv.reader(fcontent)
diff --git a/frappe/utils/fixtures.py b/frappe/utils/fixtures.py
index 816e3eccdb..9c544704e3 100644
--- a/frappe/utils/fixtures.py
+++ b/frappe/utils/fixtures.py
@@ -4,14 +4,14 @@
from __future__ import unicode_literals
import frappe, os
-from frappe.core.page.data_import_tool.data_import_tool import import_doclist, export_fixture, export_csv
+from frappe.core.page.data_import_tool.data_import_tool import import_doc, export_fixture, export_csv
def sync_fixtures():
for app in frappe.get_installed_apps():
if os.path.exists(frappe.get_app_path(app, "fixtures")):
for fname in os.listdir(frappe.get_app_path(app, "fixtures")):
if fname.endswith(".json") or fname.endswith(".csv"):
- import_doclist(frappe.get_app_path(app, "fixtures", fname), ignore_links=True, overwrite=True)
+ import_doc(frappe.get_app_path(app, "fixtures", fname), ignore_links=True, overwrite=True)
def export_fixtures():