diff --git a/js/legacy/utils/datetime.js b/js/legacy/utils/datetime.js
index 42333bd97c..65238fd366 100644
--- a/js/legacy/utils/datetime.js
+++ b/js/legacy/utils/datetime.js
@@ -41,6 +41,7 @@ function int_to_str(i, len) {
wn.datetime = {
str_to_obj: function(d) {
+ if(typeof d=="object") return d;
if(!d) return new Date();
var tm = [null, null];
if(d.search(' ')!=-1) {
@@ -57,6 +58,7 @@ wn.datetime = {
},
obj_to_str: function(d) {
+ if(typeof d=='string') return d;
return d.getFullYear() + '-' + int_to_str(d.getMonth()+1,2) + '-' + int_to_str(d.getDate(),2);
},
@@ -76,9 +78,9 @@ wn.datetime = {
},
add_days: function(d, days) {
- dt = dateutil.str_to_obj(d)
- dt.setTime(dt.getTime()+(days*24*60*60*1000));
- return dateutil.obj_to_str(dt);
+ var dt = dateutil.str_to_obj(d);
+ var new_dt = new Date(dt.getTime()+(days*24*60*60*1000));
+ return dateutil.obj_to_str(new_dt);
},
add_months: function(d, months) {
diff --git a/js/legacy/utils/dom.js b/js/legacy/utils/dom.js
index dbc1767c6f..5e38849116 100644
--- a/js/legacy/utils/dom.js
+++ b/js/legacy/utils/dom.js
@@ -374,7 +374,7 @@ wn.urllib = {
// returns the base url with http + domain + path (-index.cgi or # or ?)
get_base_url: function() {
- var url= window.location.href.split('#')[0].split('?')[0].split('index.cgi')[0];
+ var url= window.location.href.split('#')[0].split('?')[0].split('app.html')[0];
if(url.substr(url.length-1, 1)=='/') url = url.substr(0, url.length-1)
return url
},
diff --git a/py/core/doctype/doctype_mapper/doctype_mapper.py b/py/core/doctype/doctype_mapper/doctype_mapper.py
index acca86f58f..bcabd6db15 100644
--- a/py/core/doctype/doctype_mapper/doctype_mapper.py
+++ b/py/core/doctype/doctype_mapper/doctype_mapper.py
@@ -73,8 +73,10 @@ class DocType:
if not doclist:
doclist.append(to_doc)
- tbl_list = sql("select from_table, to_table, from_field, to_field, match_id, validation_logic \
- from `tabTable Mapper Detail` where parent ='%s' order by match_id" % self.doc.name, as_dict=1)
+ tbl_list = sql("""\
+ select from_table, to_table, from_field, to_field, match_id, validation_logic
+ from `tabTable Mapper Detail` where parent ="%s" order by match_id""" \
+ % self.doc.name, as_dict=1)
for t in tbl_list:
if [t['from_table'], t['to_table']] in eval(from_to_list):
@@ -114,11 +116,14 @@ class DocType:
"""
docnames = ()
if t['from_table'] == self.doc.from_doctype:
- docnames = sql("select name from `tab%s` where name = '%s' and %s" % (from_dt, from_dn, t['validation_logic']))
+ docnames = sql("""select name from `tab%s` where name = "%s" and %s""" \
+ % (from_dt, from_dn, t['validation_logic']))
if not docnames:
msgprint("Validation failed in doctype mapper. Please contact Administrator.", raise_exception=1)
else:
- docnames = sql("select name from `tab%s` where parent='%s' and parenttype = '%s' and %s order by idx" \
+ docnames = sql("""\
+ select name from `tab%s`
+ where parent="%s" and parenttype = "%s" and %s order by idx""" \
% (t['from_table'], from_dn, self.doc.from_doctype, t['validation_logic']))
return docnames
@@ -129,7 +134,7 @@ class DocType:
return [[f[0], f[1], f[2]] for f in sql("""
select from_field, to_field, map
from `tabField Mapper Detail`
- where parent = '%s' and match_id = %s
+ where parent = "%s" and match_id = %s
""" % (self.doc.name, t['match_id']))]
@@ -142,11 +147,11 @@ class DocType:
exception_flds = copy.copy(default_fields)
exception_flds += [f[1] for f in flds]
- from_flds = [d.fieldname for d in get(t['from_table']) \
+ from_flds = [d.fieldname for d in get(t['from_table'], 0) \
if cint(d.no_copy) == 0 and d.docstatus != 2 and d.fieldname \
and d.fieldtype not in ('Table', 'Section Break', 'Column Break', 'HTML')]
- to_flds = [d.fieldname for d in get(t['to_table']) \
+ to_flds = [d.fieldname for d in get(t['to_table'], 0) \
if cint(d.no_copy) == 0 and d.docstatus != 2 and d.fieldname \
and d.fieldtype not in ('Table', 'Section Break', 'Column Break', 'HTML')]
@@ -200,8 +205,8 @@ class DocType:
"""
flds = {}
for t in getlist(self.doclist, 'table_mapper_details'):
- from_flds = [cstr(d.fieldname) for d in get(t.from_table)]
- to_flds = [cstr(d.fieldname) for d in get(t.to_table)]
+ from_flds = [cstr(d.fieldname) for d in get(t.from_table, 0)]
+ to_flds = [cstr(d.fieldname) for d in get(t.to_table, 0)]
flds[cstr(t.match_id)] = [cstr(t.from_table), from_flds, cstr(t.to_table), to_flds]
for d in getlist(self.doclist, 'field_mapper_details'):
@@ -235,10 +240,10 @@ class DocType:
def get_label_and_type(self, from_dt, to_dt):
"""get label, fieldtype"""
from_flds, to_flds = {}, {}
- for d in get(from_dt):
+ for d in get(from_dt, 0):
from_flds[d.fieldname] = {'label': d.label, 'fieldtype': d.fieldtype}
- for d in get(to_dt):
+ for d in get(to_dt, 0):
to_flds[d.fieldname] = {'label': d.label, 'fieldtype': d.fieldtype}
return from_flds, to_flds
@@ -262,15 +267,21 @@ class DocType:
cur_val = '%.2f' % flt(cur_val)
if cl['op'] == '=' and to_flds[cl['to_fld']]['fieldtype'] in ['Currency', 'Float']:
- consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' - %s <= 0.5" \
- % (cl['from_fld'], t.from_table, child_obj.fields[t.reference_key], flt(cur_val), cl['from_fld']))
+ consistent = sql("""\
+ select name, %s from `tab%s`
+ where name = "%s" and "%s" - %s <= 0.5""" \
+ % (cl['from_fld'], t.from_table, child_obj.fields[t.reference_key],
+ flt(cur_val), cl['from_fld']))
else:
- consistent = sql("select name, %s from `tab%s` where name = '%s' and '%s' %s ifnull(%s, '')" \
+ consistent = sql("""\
+ select name, %s from `tab%s`
+ where name = "%s" and "%s" %s ifnull(%s, '')""" \
% (cl['from_fld'], t.from_table, child_obj.fields[t.reference_key], \
- to_flds[cl['to_fld']]['fieldtype'] in ('Currency', 'Float', 'Int') and flt(cur_val) or cstr(cur_val), cl['op'], cl['from_fld']))
+ to_flds[cl['to_fld']]['fieldtype'] in ('Currency', 'Float', 'Int') \
+ and flt(cur_val) or cstr(cur_val), cl['op'], cl['from_fld']))
if not self.ref_doc:
- det = sql("select name, parent from `tab%s` where name = '%s'" % (t.from_table, child_obj.fields[t.reference_key]))
+ det = sql("""select name, parent from `tab%s` where name = \"%s\"""" % (t.from_table, child_obj.fields[t.reference_key]))
self.ref_doc = det[0][1] and det[0][1] or det[0][0]
if not consistent:
@@ -285,7 +296,8 @@ class DocType:
def check_ref_docstatus(self):
if self.ref_doc:
- det = sql("select name, docstatus from `tab%s` where name = '%s'" % (self.doc.from_doctype, self.ref_doc))
+ det = sql("""select name, docstatus from `tab%s` where name = \"%s\"""" \
+ % (self.doc.from_doctype, self.ref_doc))
if not det:
msgprint("%s: %s does not exists in the system" % (self.doc.from_doctype, self.ref_doc), raise_exception=1)
elif self.doc.ref_doc_submitted and det[0][1] != 1:
diff --git a/py/core/doctype/letter_head/letter_head.js b/py/core/doctype/letter_head/letter_head.js
index f1f8e62d8a..acf93002df 100644
--- a/py/core/doctype/letter_head/letter_head.js
+++ b/py/core/doctype/letter_head/letter_head.js
@@ -34,7 +34,7 @@ cur_frm.cscript['set_from_image'] = function(doc, dt, dn) {
return;
}
- var file_name = doc.file_list.split(',')[0]
+ var file_name = doc.file_list.split(',')[1]
if(!in_list(['gif','jpg','jpeg','png'], file_name.split('.')[1].toLowerCase())) {
msgprint("Please upload a web friendly (GIF, JPG or PNG) image file for the letter head");
diff --git a/py/core/page/data_import_tool/data_import_tool.js b/py/core/page/data_import_tool/data_import_tool.js
index 61800d04c2..83231591a5 100644
--- a/py/core/page/data_import_tool/data_import_tool.js
+++ b/py/core/page/data_import_tool/data_import_tool.js
@@ -121,6 +121,11 @@ wn.pages['data-import-tool'].onload = function(wrapper) {
$(' Overwrite
')
.insertBefore('#dit-upload-area form input[type="submit"]')
+ // add ignore option
+ $(' Ignore Encoding Errors
')
+ .insertBefore('#dit-upload-area form input[type="submit"]')
+
+
// add overwrite option
$('Date Format:
')
.insertBefore('#dit-upload-area form input[type="submit"]')
diff --git a/py/core/page/data_import_tool/data_import_tool.py b/py/core/page/data_import_tool/data_import_tool.py
index d0d5d0fad1..6063b600f8 100644
--- a/py/core/page/data_import_tool/data_import_tool.py
+++ b/py/core/page/data_import_tool/data_import_tool.py
@@ -132,13 +132,16 @@ def upload():
for row in csvrows:
newrow = []
for val in row:
- try:
- newrow.append(unicode(val.strip(), 'utf-8'))
- except UnicodeDecodeError, e:
- raise Exception, """Some character(s) in row #%s, column #%s are
- not readable by utf-8. Ignoring them. If you are importing a non
- english language, please make sure your file is saved in the 'utf-8'
- encoding.""" % (csvrows.index(row)+1, row.index(val)+1)
+ if webnotes.form_dict.get('ignore_encoding_errors'):
+ newrow.append(unicode(val.strip(), 'utf-8', errors='ignore'))
+ else:
+ try:
+ newrow.append(unicode(val.strip(), 'utf-8'))
+ except UnicodeDecodeError, e:
+ raise Exception, """Some character(s) in row #%s, column #%s are
+ not readable by utf-8. Ignoring them. If you are importing a non
+ english language, please make sure your file is saved in the 'utf-8'
+ encoding.""" % (csvrows.index(row)+1, row.index(val)+1)
rows.append(newrow)
diff --git a/py/core/page/login_page/login_page.html b/py/core/page/login_page/login_page.html
index 32790f5d29..eeaa229fd1 100644
--- a/py/core/page/login_page/login_page.html
+++ b/py/core/page/login_page/login_page.html
@@ -1,6 +1,7 @@
Forgot Password