diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml new file mode 100644 index 0000000000..4b1147e79f --- /dev/null +++ b/.github/workflows/docker-release.yml @@ -0,0 +1,14 @@ +name: Trigger Docker build on release +on: + release: + types: [released] +jobs: + curl: + runs-on: ubuntu-latest + container: + image: alpine:latest + steps: + - name: curl + run: | + apk add curl bash + curl -s -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "Travis-API-Version: 3" -H "Authorization: token ${{ secrets.TRAVIS_CI_TOKEN }}" -d '{"request":{"branch":"master"}}' https://api.travis-ci.com/repo/frappe%2Ffrappe_docker/requests diff --git a/.travis.yml b/.travis.yml index 9fab56188b..63895675ea 100644 --- a/.travis.yml +++ b/.travis.yml @@ -47,23 +47,11 @@ matrix: script: bench --site test_site run-ui-tests frappe --headless before_install: - # do we really want to run travis? + # do we really want to run travis? - | - ONLY_DOCS_CHANGES=$(git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qvE '\.(md|png|jpg|jpeg)$|^.github|LICENSE' ; echo $?) - ONLY_JS_CHANGES=$(git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qvE '\.js$' ; echo $?) - ONLY_PY_CHANGES=$(git diff --name-only $TRAVIS_COMMIT_RANGE | grep -qvE '\.py$' ; echo $?) - - if [[ $ONLY_DOCS_CHANGES == "1" ]]; then - echo "Only docs were updated, stopping build process."; - exit; - fi - if [[ $ONLY_JS_CHANGES == "1" && $TYPE == "server" ]]; then - echo "Only JavaScript code was updated; Stopping Python build process."; - exit; - fi - if [[ $ONLY_PY_CHANGES == "1" && $TYPE == "ui" ]]; then - echo "Only Python code was updated, stopping Cypress build process."; - exit; + python ./.travis/roulette.py + if [[ $? != 2 ]];then + exit; fi # install wkhtmltopdf diff --git a/.travis/roulette.py b/.travis/roulette.py new file mode 100644 index 0000000000..4d83137199 --- /dev/null +++ b/.travis/roulette.py @@ -0,0 +1,54 @@ +# if the script ends with exit code 0, then no tests are run further, else all tests are run +import os +import re +import shlex +import subprocess +import sys + + +def get_output(command, shell=True): + print(command) + command = shlex.split(command) + return subprocess.check_output(command, shell=shell, encoding="utf8").strip() + +def is_py(file): + return file.endswith("py") + +def is_js(file): + return file.endswith("js") + +def is_docs(file): + regex = re.compile('\.(md|png|jpg|jpeg)$|^.github|LICENSE') + return bool(regex.search(file)) + + +if __name__ == "__main__": + build_type = os.environ.get("TYPE") + commit_range = os.environ.get("TRAVIS_COMMIT_RANGE") + print("Build Type: {}".format(build_type)) + print("Commit Range: {}".format(commit_range)) + + try: + files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False) + except Exception: + sys.exit(2) + + if "fatal" not in files_changed: + files_list = files_changed.split() + only_docs_changed = len(list(filter(is_docs, files_list))) == len(files_list) + only_js_changed = len(list(filter(is_js, files_list))) == len(files_list) + only_py_changed = len(list(filter(is_py, files_list))) == len(files_list) + + if only_docs_changed: + print("Only docs were updated, stopping build process.") + sys.exit(0) + + if only_js_changed and build_type == "server": + print("Only JavaScript code was updated; Stopping Python build process.") + sys.exit(0) + + if only_py_changed and build_type == "ui": + print("Only Python code was updated, stopping Cypress build process.") + sys.exit(0) + + sys.exit(2) diff --git a/README.md b/README.md index 860958087e..7545249610 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@
${log.exception}
+ | ${__('Row Number')} | +${__('Status')} | +${__('Message')} | +
|---|
{}
'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log]) - else: - err_msg = '{}
'.format(cstr(e)) - - error_trace = frappe.get_traceback() - if error_trace: - error_log_doc = frappe.log_error(error_trace) - error_link = get_absolute_url("Error Log", error_log_doc.name) - else: - error_link = None - - log(**{ - "row": row_idx + 1, - "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""), - "message": err_msg, - "indicator": "red", - "link":error_link - }) - - # data with error to create a new file - # include the errored data in the last row as last_error_row_idx will not be updated for the last row - if skip_errors: - if last_error_row_idx == len(rows)-1: - last_error_row_idx = len(rows) - data_rows_with_error += rows[row_idx:last_error_row_idx] - else: - rollback_flag = True - finally: - frappe.local.message_log = [] - - start_row += batch_size - if rollback_flag: - frappe.db.rollback() - else: - frappe.db.commit() - - frappe.flags.mute_emails = False - frappe.flags.in_import = False - - log_message = {"messages": import_log, "error": error_flag} - if data_import_doc: - data_import_doc.log_details = json.dumps(log_message) - - import_status = None - if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error): - import_status = "Partially Successful" - # write the file with the faulty row - file_name = 'error_' + filename + file_extension - if file_extension == '.xlsx': - from frappe.utils.xlsxutils import make_xlsx - xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") - file_data = xlsx_file.getvalue() + if not header: + header = Header(i, row, self.doctype, self.raw_data, self.column_to_field_map) else: - from frappe.utils.csvutils import to_csv - file_data = to_csv(data_rows_with_error) - _file = frappe.get_doc({ - "doctype": "File", - "file_name": file_name, - "attached_to_doctype": "Data Import", - "attached_to_name": data_import_doc.name, - "folder": "Home/Attachments", - "content": file_data}) - _file.save() - data_import_doc.error_file = _file.file_url + row_obj = Row(i, row, self.doctype, header, self.import_type) + data.append(row_obj) - elif error_flag: - import_status = "Failed" - else: - import_status = "Successful" + self.header = header + self.columns = self.header.columns + self.data = data - data_import_doc.import_status = import_status - data_import_doc.save() - if data_import_doc.import_status in ["Successful", "Partially Successful"]: - data_import_doc.submit() - publish_progress(100, True) - else: - publish_progress(0, True) - frappe.db.commit() - else: - return log_message + if len(data) < 1: + frappe.throw( + _("Import template should contain a Header and atleast one row."), + title=_("Template Error"), + ) -def get_parent_field(doctype, parenttype): - parentfield = None + def get_data_for_import_preview(self): + """Adds a serial number column as the first column""" - # get parentfield - if parenttype: - for d in frappe.get_meta(parenttype).get_table_fields(): - if d.options==doctype: - parentfield = d.fieldname + columns = [frappe._dict({"header_title": "Sr. No", "skip_import": True})] + columns += [col.as_dict() for col in self.columns] + for col in columns: + # only pick useful fields in docfields to minimise the payload + if col.df: + col.df = { + "fieldtype": col.df.fieldtype, + "fieldname": col.df.fieldname, + "label": col.df.label, + "options": col.df.options, + "parent": col.df.parent, + "reqd": col.df.reqd, + "default": col.df.default, + "read_only": col.df.read_only, + } + + data = [[row.row_number] + row.as_list() for row in self.data] + + warnings = self.get_warnings() + + out = frappe._dict() + out.data = data + out.columns = columns + out.warnings = warnings + total_number_of_rows = len(out.data) + if total_number_of_rows > MAX_ROWS_IN_PREVIEW: + out.data = out.data[:MAX_ROWS_IN_PREVIEW] + out.max_rows_exceeded = True + out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW + out.total_number_of_rows = total_number_of_rows + return out + + def get_payloads_for_import(self): + payloads = [] + # make a copy + data = list(self.data) + while data: + doc, rows, data = self.parse_next_row_for_import(data) + payloads.append(frappe._dict(doc=doc, rows=rows)) + return payloads + + def parse_next_row_for_import(self, data): + """ + Parses rows that make up a doc. A doc maybe built from a single row or multiple rows. + Returns the doc, rows, and data without the rows. + """ + doctypes = self.header.doctypes + + # first row is included by default + first_row = data[0] + rows = [first_row] + + # if there are child doctypes, find the subsequent rows + if len(doctypes) > 1: + # subsequent rows either dont have any parent value set + # or have the same value as the parent row + # we include a row if either of conditions match + parent_column_indexes = self.header.get_column_indexes(self.doctype) + parent_row_values = first_row.get_values(parent_column_indexes) + + data_without_first_row = data[1:] + for row in data_without_first_row: + row_values = row.get_values(parent_column_indexes) + # if the row is blank, it's a child row doc + if all([v in INVALID_VALUES for v in row_values]): + rows.append(row) + continue + # if the row has same values as parent row, it's a child row doc + if row_values == parent_row_values: + rows.append(row) + continue + # if any of those conditions dont match, it's the next doc break - if not parentfield: - frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype)) - raise Exception + parent_doc = None + for row in rows: + for doctype, table_df in doctypes: + if doctype == self.doctype and not parent_doc: + parent_doc = row.parse_doc(doctype) - return parentfield + if doctype != self.doctype and table_df: + child_doc = row.parse_doc(doctype, parent_doc, table_df) + parent_doc[table_df.fieldname] = parent_doc.get(table_df.fieldname, []) + parent_doc[table_df.fieldname].append(child_doc) -def delete_child_rows(rows, doctype): - """delete child rows for all parents""" - for p in list(set([r[1] for r in rows])): - if p: - frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p) + doc = parent_doc + # check if there is atleast one row for mandatory table fields + meta = frappe.get_meta(self.doctype) + mandatory_table_fields = [ + df + for df in meta.fields + if df.fieldtype in table_fieldtypes + and df.reqd + and len(doc.get(df.fieldname, [])) == 0 + ] + if len(mandatory_table_fields) == 1: + self.warnings.append( + { + "row": first_row.row_number, + "message": _("There should be atleast one row for {0} table").format( + mandatory_table_fields[0].label + ), + } + ) + elif mandatory_table_fields: + fields_string = ", ".join([df.label for df in mandatory_table_fields]) + message = _("There should be atleast one row for the following tables: {0}").format( + fields_string + ) + self.warnings.append({"row": first_row.row_number, "message": message}) + + return doc, rows, data[len(rows) :] + + def get_warnings(self): + warnings = [] + for col in self.header.columns: + warnings += col.warnings + + for row in self.data: + warnings += row.warnings + + return warnings + + ###### + + def read_file(self, file_path): + extn = file_path.split(".")[1] + + file_content = None + with io.open(file_path, mode="rb") as f: + file_content = f.read() + + return file_content, extn + + def read_content(self, content, extension): + error_title = _("Template Error") + if extension not in ("csv", "xlsx", "xls"): + frappe.throw( + _("Import template should be of type .csv, .xlsx or .xls"), title=error_title + ) + + if extension == "csv": + data = read_csv_content(content) + elif extension == "xlsx": + data = read_xlsx_file_from_attached_file(fcontent=content) + elif extension == "xls": + data = read_xls_file_from_attached_file(content) + + return data + + +class Row: + link_values_exist_map = {} + + def __init__(self, index, row, doctype, header, import_type): + self.index = index + self.row_number = index + 1 + self.doctype = doctype + self.data = row + self.header = header + self.import_type = import_type + self.warnings = [] + + len_row = len(self.data) + len_columns = len(self.header.columns) + if len_row != len_columns: + less_than_columns = len_row < len_columns + message = ( + "Row has less values than columns" + if less_than_columns + else "Row has more values than columns" + ) + self.warnings.append( + {"row": self.row_number, "message": message,} + ) + + def parse_doc(self, doctype, parent_doc=None, table_df=None): + col_indexes = self.header.get_column_indexes(doctype, table_df) + values = self.get_values(col_indexes) + columns = self.header.get_columns(col_indexes) + doc = self._parse_doc(doctype, columns, values, parent_doc, table_df) + return doc + + def _parse_doc(self, doctype, columns, values, parent_doc=None, table_df=None): + doc = frappe._dict() + if self.import_type == INSERT: + # new_doc returns a dict with default values set + doc = frappe.new_doc( + doctype, + parent_doc=parent_doc, + parentfield=table_df.fieldname if table_df else None, + as_dict=True, + ) + + # remove standard fields and __islocal + for key in frappe.model.default_fields + ("__islocal",): + doc.pop(key, None) + + for col, value in zip(columns, values): + df = col.df + if value in INVALID_VALUES: + value = None + + if value is not None: + value = self.validate_value(value, col) + + if value is not None: + doc[df.fieldname] = self.parse_value(value, col) + + is_table = frappe.get_meta(doctype).istable + is_update = self.import_type == UPDATE + if is_table and is_update and doc.get("name") in INVALID_VALUES: + # for table rows being inserted in update + # create a new doc with defaults set + new_doc = frappe.new_doc(doctype, as_dict=True) + new_doc.update(doc) + doc = new_doc + + self.check_mandatory_fields(doctype, doc, table_df) + return doc + + def validate_value(self, value, col): + df = col.df + if df.fieldtype == "Select": + select_options = df.get_select_options() + if select_options and value not in select_options: + options_string = ", ".join([frappe.bold(d) for d in select_options]) + msg = _("Value must be one of {0}").format(options_string) + self.warnings.append( + { + "row": self.row_number, + "field": df.as_dict(convert_dates_to_str=True), + "message": msg, + } + ) + return + + elif df.fieldtype == "Link": + exists = self.link_exists(value, df) + if not exists: + msg = _("Value {0} missing for {1}").format( + frappe.bold(value), frappe.bold(df.options) + ) + self.warnings.append( + { + "row": self.row_number, + "field": df.as_dict(convert_dates_to_str=True), + "message": msg, + } + ) + return + elif df.fieldtype in ["Date", "Datetime"]: + value = self.get_date(value, col) + if isinstance(value, frappe.string_types): + # value was not parsed as datetime object + self.warnings.append( + { + "row": self.row_number, + "col": col.column_number, + "field": df.as_dict(convert_dates_to_str=True), + "message": _("Value {0} must in {1} format").format( + frappe.bold(value), frappe.bold(get_user_format(col.date_format)) + ), + } + ) + return + + return value + + def link_exists(self, value, df): + key = df.options + "::" + value + if Row.link_values_exist_map.get(key) is None: + Row.link_values_exist_map[key] = frappe.db.exists(df.options, value) + return Row.link_values_exist_map.get(key) + + def parse_value(self, value, col): + df = col.df + if isinstance(value, datetime) and df.fieldtype in ["Date", "Datetime"]: + return value + + value = cstr(value) + + # convert boolean values to 0 or 1 + valid_check_values = ["t", "f", "true", "false", "yes", "no", "y", "n"] + if df.fieldtype == "Check" and value.lower().strip() in valid_check_values: + value = value.lower().strip() + value = 1 if value in ["t", "true", "y", "yes"] else 0 + + if df.fieldtype in ["Int", "Check"]: + value = cint(value) + elif df.fieldtype in ["Float", "Percent", "Currency"]: + value = flt(value) + elif df.fieldtype in ["Date", "Datetime"]: + value = self.get_date(value, col) + + return value + + def get_date(self, value, column): + date_format = column.date_format + if date_format: + try: + return datetime.strptime(value, date_format) + except ValueError: + # ignore date values that dont match the format + # import will break for these values later + pass + return value + + def check_mandatory_fields(self, doctype, doc, table_df=None): + """If import type is Insert: + Check for mandatory fields (except table fields) in doc + if import type is Update: + Check for name field or autoname field in doc + """ + meta = frappe.get_meta(doctype) + if self.import_type == UPDATE: + if meta.istable: + # when updating records with table rows, + # there are two scenarios: + # 1. if row 'name' is provided in the template + # the table row will be updated + # 2. if row 'name' is not provided + # then a new row will be added + # so we dont need to check for mandatory + return + + # for update, only ID (name) field is mandatory + id_field = get_id_field(doctype) + if doc.get(id_field.fieldname) in INVALID_VALUES: + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} is a mandatory field asdadsf").format(id_field.label), + } + ) + return + + fields = [ + df + for df in meta.fields + if df.fieldtype not in table_fieldtypes + and df.reqd + and doc.get(df.fieldname) in INVALID_VALUES + ] + + if not fields: + return + + def get_field_label(df): + return "{0}{1}".format(df.label, " ({})".format(table_df.label) if table_df else "") + + if len(fields) == 1: + field_label = get_field_label(fields[0]) + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} is a mandatory field").format(frappe.bold(field_label)), + } + ) + else: + fields_string = ", ".join([frappe.bold(get_field_label(df)) for df in fields]) + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} are mandatory fields").format(fields_string), + } + ) + + def get_values(self, indexes): + return [self.data[i] for i in indexes] + + def get(self, index): + return self.data[index] + + def as_list(self): + return self.data + + +class Header(Row): + def __init__(self, index, row, doctype, raw_data, column_to_field_map): + self.index = index + self.row_number = index + 1 + self.data = row + self.doctype = doctype + + self.seen = [] + self.columns = [] + + for j, header in enumerate(row): + column_values = [get_item_at_index(r, j) for r in raw_data] + column = Column( + j, header, self.doctype, column_values, column_to_field_map.get(header), self.seen + ) + self.seen.append(header) + self.columns.append(column) + + doctypes = [] + for col in self.columns: + if not col.df: + continue + if col.df.parent == self.doctype: + doctypes.append((col.df.parent, None)) + else: + doctypes.append((col.df.parent, col.df.child_table_df)) + + self.doctypes = sorted( + list(set(doctypes)), key=lambda x: -1 if x[0] == self.doctype else 1 + ) + + def get_column_indexes(self, doctype, tablefield=None): + def is_table_field(df): + if tablefield: + return df.child_table_df.fieldname == tablefield.fieldname + return True + + return [ + col.index + for col in self.columns + if not col.skip_import + and col.df + and col.df.parent == doctype + and is_table_field(col.df) + ] + + def get_columns(self, indexes): + return [self.columns[i] for i in indexes] + + +class Column: + seen = [] + fields_column_map = {} + + def __init__(self, index, header, doctype, column_values, map_to_field=None, seen=[]): + self.index = index + self.column_number = index + 1 + self.doctype = doctype + self.header_title = header + self.column_values = column_values + self.map_to_field = map_to_field + self.seen = seen + + self.date_format = None + self.df = None + self.skip_import = None + self.warnings = [] + + self.meta = frappe.get_meta(doctype) + self.parse() + self.parse_date_format() + + def parse(self): + header_title = self.header_title + column_number = str(self.column_number) + skip_import = False + + if self.map_to_field and self.map_to_field != "Don't Import": + df = get_df_for_column_header(self.doctype, self.map_to_field) + if df: + self.warnings.append( + { + "message": _("Mapping column {0} to field {1}").format( + frappe.bold(header_title or "Untitled Column"), frappe.bold(df.label) + ), + "type": "info", + } + ) + else: + self.warnings.append( + { + "message": _("Could not map column {0} to field {1}").format( + column_number, self.map_to_field + ), + "type": "info", + } + ) + else: + df = get_df_for_column_header(self.doctype, header_title) + # df = df_by_labels_and_fieldnames.get(header_title) + + if not df: + skip_import = True + else: + skip_import = False + + if header_title in self.seen: + self.warnings.append( + { + "col": column_number, + "message": _("Skipping Duplicate Column {0}").format(frappe.bold(header_title)), + "type": "info", + } + ) + df = None + skip_import = True + elif self.map_to_field == "Don't Import": + skip_import = True + self.warnings.append( + { + "col": column_number, + "message": _("Skipping column {0}").format(frappe.bold(header_title)), + "type": "info", + } + ) + elif header_title and not df: + self.warnings.append( + { + "col": column_number, + "message": _("Cannot match column {0} with any field").format( + frappe.bold(header_title) + ), + "type": "info", + } + ) + elif not header_title and not df: + self.warnings.append( + {"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"} + ) + + self.df = df + self.skip_import = skip_import + + def parse_date_format(self): + if self.df and self.df.fieldtype in ("Date", "Time", "Datetime"): + self.date_format = self.guess_date_format_for_column() + + def guess_date_format_for_column(self): + """ Guesses date format for a column by parsing all the values in the column, + getting the date format and then returning the one which has the maximum frequency + """ + + date_formats = [ + frappe.utils.guess_date_format(d) for d in self.column_values if isinstance(d, str) + ] + date_formats = [d for d in date_formats if d] + if not date_formats: + return + + unique_date_formats = set(date_formats) + max_occurred_date_format = max(unique_date_formats, key=date_formats.count) + + if len(unique_date_formats) > 1: + # fmt: off + message = _("The column {0} has {1} different date formats. Automatically setting {2} as the default format as it is the most common. Please change other values in this column to this format.") + # fmt: on + user_date_format = get_user_format(max_occurred_date_format) + self.warnings.append( + { + "col": self.column_number, + "message": message.format( + frappe.bold(self.header_title), + len(unique_date_formats), + frappe.bold(user_date_format), + ), + "type": "info", + } + ) + + return max_occurred_date_format + + def as_dict(self): + d = frappe._dict() + d.index = self.index + d.column_number = self.column_number + d.doctype = self.doctype + d.header_title = self.header_title + d.map_to_field = self.map_to_field + d.date_format = self.date_format + d.df = self.df + d.skip_import = self.skip_import + d.warnings = self.warnings + return d + + +def build_fields_dict_for_column_matching(parent_doctype): + """ + Build a dict with various keys to match with column headers and value as docfield + The keys can be label or fieldname + { + 'Customer': df1, + 'customer': df1, + 'Due Date': df2, + 'due_date': df2, + 'Item Code (Sales Invoice Item)': df3, + 'Sales Invoice Item:item_code': df3, + } + """ + + def get_standard_fields(doctype): + meta = frappe.get_meta(doctype) + if meta.istable: + standard_fields = [ + {"label": "Parent", "fieldname": "parent"}, + {"label": "Parent Type", "fieldname": "parenttype"}, + {"label": "Parent Field", "fieldname": "parentfield"}, + {"label": "Row Index", "fieldname": "idx"}, + ] + else: + standard_fields = [ + {"label": "Owner", "fieldname": "owner"}, + {"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"}, + ] + + out = [] + for df in standard_fields: + df = frappe._dict(df) + df.parent = doctype + out.append(df) + return out + + parent_meta = frappe.get_meta(parent_doctype) + out = {} + + # doctypes and fieldname if it is a child doctype + doctypes = [[parent_doctype, None]] + [ + [df.options, df] for df in parent_meta.get_table_fields() + ] + + for doctype, table_df in doctypes: + # name field + name_by_label = ( + "ID" if doctype == parent_doctype else "ID ({0})".format(table_df.label) + ) + name_by_fieldname = ( + "name" if doctype == parent_doctype else "{0}.name".format(table_df.fieldname) + ) + name_df = frappe._dict( + { + "fieldtype": "Data", + "fieldname": "name", + "label": "ID", + "reqd": 1, # self.import_type == UPDATE, + "parent": doctype, + } + ) + + if doctype != parent_doctype: + name_df.is_child_table_field = True + name_df.child_table_df = table_df + + out[name_by_label] = name_df + out[name_by_fieldname] = name_df + + # other fields + fields = get_standard_fields(doctype) + frappe.get_meta(doctype).fields + for df in fields: + fieldtype = df.fieldtype or "Data" + parent = df.parent or parent_doctype + if fieldtype not in no_value_fields: + if parent_doctype == doctype: + # for parent doctypes keys will be + # Label + # label + # Label (label) + if not out.get(df.label): + # if Label is already set, don't set it again + # in case of duplicate column headers + out[df.label] = df + out[df.fieldname] = df + label_with_fieldname = "{0} ({1})".format(df.label, df.fieldname) + out[label_with_fieldname] = df + else: + # in case there are multiple table fields with the same doctype + # for child doctypes keys will be + # Label (Table Field Label) + # table_field.fieldname + table_fields = parent_meta.get( + "fields", {"fieldtype": ["in", table_fieldtypes], "options": parent} + ) + for table_field in table_fields: + by_label = "{0} ({1})".format(df.label, table_field.label) + by_fieldname = "{0}.{1}".format(table_field.fieldname, df.fieldname) + + # create a new df object to avoid mutation problems + if isinstance(df, dict): + new_df = frappe._dict(df.copy()) + else: + new_df = df.as_dict() + + new_df.is_child_table_field = True + new_df.child_table_df = table_field + out[by_label] = new_df + out[by_fieldname] = new_df + + # if autoname is based on field + # add an entry for "ID (Autoname Field)" + autoname_field = get_autoname_field(parent_doctype) + if autoname_field: + out["ID ({})".format(autoname_field.label)] = autoname_field + # ID field should also map to the autoname field + out["ID"] = autoname_field + out["name"] = autoname_field + + return out + + +def get_df_for_column_header(doctype, header): + def build_fields_dict_for_doctype(): + return build_fields_dict_for_column_matching(doctype) + + df_by_labels_and_fieldname = frappe.cache().hget( + "data_import_column_header_map", doctype, generator=build_fields_dict_for_doctype + ) + return df_by_labels_and_fieldname.get(header) + + +# utilities + + +def get_id_field(doctype): + autoname_field = get_autoname_field(doctype) + if autoname_field: + return autoname_field + return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"}) + + +def get_autoname_field(doctype): + meta = frappe.get_meta(doctype) + if meta.autoname and meta.autoname.startswith("field:"): + fieldname = meta.autoname[len("field:") :] + return meta.get_field(fieldname) + + +def get_item_at_index(_list, i, default=None): + try: + a = _list[i] + except IndexError: + a = default + return a + + +def get_user_format(date_format): + return ( + date_format.replace("%Y", "yyyy") + .replace("%y", "yy") + .replace("%m", "mm") + .replace("%d", "dd") + ) diff --git a/frappe/core/doctype/data_import/importer_new.py b/frappe/core/doctype/data_import/importer_new.py deleted file mode 100644 index 040e9fabc4..0000000000 --- a/frappe/core/doctype/data_import/importer_new.py +++ /dev/null @@ -1,1044 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors -# MIT License. See license.txt - -import io -import os -import json -import timeit -import frappe -from datetime import datetime -from frappe import _ -from frappe.utils import cint, flt, update_progress_bar, cstr, DATETIME_FORMAT -from frappe.utils.csvutils import read_csv_content -from frappe.utils.xlsxutils import ( - read_xlsx_file_from_attached_file, - read_xls_file_from_attached_file, -) -from frappe.model import no_value_fields, table_fields - -INVALID_VALUES = ["", None] -MAX_ROWS_IN_PREVIEW = 10 -INSERT = "Insert New Records" -UPDATE = "Update Existing Records" - -# pylint: disable=R0201 -class Importer: - def __init__( - self, doctype, data_import=None, file_path=None, content=None, console=False - ): - self.doctype = doctype - self.template_options = frappe._dict({"remap_column": {}}) - self.console = console - - if data_import: - self.data_import = data_import - if self.data_import.template_options: - template_options = frappe.parse_json(self.data_import.template_options) - self.template_options.update(template_options) - self.import_type = self.data_import.import_type - else: - self.data_import = None - - self.import_type = self.import_type or INSERT - - self.header_row = None - self.data = None - # used to store date formats guessed from data rows per column - self._guessed_date_formats = {} - # used to store eta during import - self.last_eta = 0 - # used to collect warnings during template parsing - # and show them to user - self.warnings = [] - self.meta = frappe.get_meta(doctype) - self.prepare_content(file_path, content) - self.parse_data_from_template() - - def prepare_content(self, file_path, content): - extension = None - if self.data_import and self.data_import.import_file: - file_doc = frappe.get_doc("File", {"file_url": self.data_import.import_file}) - parts = file_doc.get_extension() - extension = parts[1] - content = file_doc.get_content() - extension = extension.lstrip(".") - - if file_path: - content, extension = self.read_file(file_path) - - if not extension: - extension = "csv" - - if content: - self.read_content(content, extension) - - self.validate_template_content() - - def read_file(self, file_path): - extn = file_path.split(".")[1] - - file_content = None - with io.open(file_path, mode="rb") as f: - file_content = f.read() - - return file_content, extn - - def read_content(self, content, extension): - error_title = _("Template Error") - if extension not in ("csv", "xlsx", "xls"): - frappe.throw( - _("Import template should be of type .csv, .xlsx or .xls"), title=error_title - ) - - if extension == "csv": - data = read_csv_content(content) - elif extension == "xlsx": - data = read_xlsx_file_from_attached_file(fcontent=content) - elif extension == "xls": - data = read_xls_file_from_attached_file(content) - - data = self.remove_empty_rows_and_columns(data) - - if len(data) <= 1: - frappe.throw( - _("Import template should contain a Header and atleast one row."), title=error_title - ) - - self.header_row = data[0] - self.data = data[1:] - - def validate_template_content(self): - column_count = len(self.header_row) - if any([len(row) != column_count and len(row) != 0 for row in self.data]): - frappe.throw( - _("Number of columns does not match with data"), title=_("Invalid Template") - ) - - def remove_empty_rows_and_columns(self, raw_data): - self.row_index_map = [] - removed_rows = [] - removed_columns = [] - - # remove empty rows - data_without_empty_rows = [] - for i, row in enumerate(raw_data): - if all(v in INVALID_VALUES for v in row): - # empty row - removed_rows.append(i) - else: - data_without_empty_rows.append(row) - self.row_index_map.append(i) - - # remove empty columns - # a column with a header and no data is a valid column - # a column with no header and no data will be removed - first_row = data_without_empty_rows[0] - for i, column in enumerate(first_row): - column_values = [row[i] for row in data_without_empty_rows] - if all(v in INVALID_VALUES for v in column_values): - # empty column - removed_columns.append(i) - - if removed_columns: - data_without_empty_rows_and_columns = [] - # remove empty columns from data - for i, row in enumerate(data_without_empty_rows): - new_row = [v for j, v in enumerate(row) if j not in removed_columns] - data_without_empty_rows_and_columns.append(new_row) - else: - data_without_empty_rows_and_columns = data_without_empty_rows - - return data_without_empty_rows_and_columns - - def get_data_for_import_preview(self): - out = frappe._dict() - out.data = list(self.rows) - out.columns = self.columns - out.warnings = self.warnings - total_number_of_rows = len(out.data) - if total_number_of_rows > MAX_ROWS_IN_PREVIEW: - out.data = out.data[:MAX_ROWS_IN_PREVIEW] - out.max_rows_exceeded = True - out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW - out.total_number_of_rows = total_number_of_rows - return out - - def parse_data_from_template(self): - columns = self.parse_columns_from_header_row() - columns = self.detect_date_formats(columns) - columns, data = self.add_serial_no_column(columns, self.data) - - self.columns = columns - self.rows = data - - def parse_columns_from_header_row(self): - remap_column = self.template_options.remap_column - columns = [] - - df_by_labels_and_fieldnames = self.build_fields_dict_for_column_matching() - - for i, header_title in enumerate(self.header_row): - header_row_index = str(i) - column_number = str(i + 1) - skip_import = False - fieldname = remap_column.get(header_row_index) - - if fieldname and fieldname != "Don't Import": - df = df_by_labels_and_fieldnames.get(fieldname) - self.warnings.append( - { - "col": column_number, - "message": _("Mapping column {0} to field {1}").format( - frappe.bold(header_title or "Untitled Column"), frappe.bold(df.label) - ), - "type": "info", - } - ) - else: - df = df_by_labels_and_fieldnames.get(header_title) - - if not df: - skip_import = True - else: - skip_import = False - - if fieldname == "Don't Import": - skip_import = True - self.warnings.append( - { - "col": column_number, - "message": _("Skipping column {0}").format(frappe.bold(header_title)), - "type": "info", - } - ) - elif header_title and not df: - self.warnings.append( - { - "col": column_number, - "message": _("Cannot match column {0} with any field").format( - frappe.bold(header_title) - ), - "type": "info", - } - ) - elif not header_title and not df: - self.warnings.append( - {"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"} - ) - - columns.append( - frappe._dict( - df=df, - skip_import=skip_import, - header_title=header_title, - column_number=column_number, - index=i, - ) - ) - - return columns - - def build_fields_dict_for_column_matching(self): - """ - Build a dict with various keys to match with column headers and value as docfield - The keys can be label or fieldname - { - 'Customer': df1, - 'customer': df1, - 'Due Date': df2, - 'due_date': df2, - 'Item Code (Sales Invoice Item)': df3, - 'Sales Invoice Item:item_code': df3, - } - """ - out = {} - - table_doctypes = [df.options for df in self.meta.get_table_fields()] - doctypes = table_doctypes + [self.doctype] - for doctype in doctypes: - # name field - name_key = "ID" if self.doctype == doctype else "ID ({})".format(doctype) - name_df = frappe._dict( - { - "fieldtype": "Data", - "fieldname": "name", - "label": "ID", - "reqd": self.import_type == UPDATE, - "parent": doctype, - } - ) - out[name_key] = name_df - out["name"] = name_df - - # other fields - meta = frappe.get_meta(doctype) - fields = self.get_standard_fields(doctype) + meta.fields - for df in fields: - fieldtype = df.fieldtype or "Data" - parent = df.parent or self.doctype - if fieldtype not in no_value_fields: - # label as key - label = ( - df.label if self.doctype == doctype else "{0} ({1})".format(df.label, parent) - ) - out[label] = df - # fieldname as key - if self.doctype == doctype: - out[df.fieldname] = df - else: - key = "{0}:{1}".format(doctype, df.fieldname) - out[key] = df - - # if autoname is based on field - # add an entry for "ID (Autoname Field)" - autoname_field = self.get_autoname_field(self.doctype) - if autoname_field: - out["ID ({})".format(autoname_field.label)] = autoname_field - # ID field should also map to the autoname field - out["ID"] = autoname_field - out["name"] = autoname_field - - return out - - def get_standard_fields(self, doctype): - meta = frappe.get_meta(doctype) - if meta.istable: - standard_fields = [ - {"label": "Parent", "fieldname": "parent"}, - {"label": "Parent Type", "fieldname": "parenttype"}, - {"label": "Parent Field", "fieldname": "parentfield"}, - {"label": "Row Index", "fieldname": "idx"}, - ] - else: - standard_fields = [ - {"label": "Owner", "fieldname": "owner"}, - {"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"}, - ] - - out = [] - for df in standard_fields: - df = frappe._dict(df) - df.parent = doctype - out.append(df) - return out - - def detect_date_formats(self, columns): - for col in columns: - if col.df and col.df.fieldtype in ["Date", "Time", "Datetime"]: - col.date_format = self.guess_date_format_for_column(col, columns) - return columns - - def add_serial_no_column(self, columns, data): - columns_with_serial_no = [ - frappe._dict({"header_title": "Sr. No", "skip_import": True}) - ] + columns - - # update index for each column - for i, col in enumerate(columns_with_serial_no): - col.index = i - - data_with_serial_no = [] - for i, row in enumerate(data): - data_with_serial_no.append([self.row_index_map[i] + 1] + row) - - return columns_with_serial_no, data_with_serial_no - - def parse_value(self, value, df): - if isinstance(value, datetime) and df.fieldtype in ["Date", "Datetime"]: - return value - - value = cstr(value) - - # convert boolean values to 0 or 1 - if df.fieldtype == "Check" and value.lower().strip() in [ - "t", - "f", - "true", - "false", - "yes", - "no", - "y", - "n", - ]: - value = value.lower().strip() - value = 1 if value in ["t", "true", "y", "yes"] else 0 - - if df.fieldtype in ["Int", "Check"]: - value = cint(value) - elif df.fieldtype in ["Float", "Percent", "Currency"]: - value = flt(value) - elif df.fieldtype in ["Date", "Datetime"]: - value = self.parse_date_format(value, df) - - return value - - def parse_date_format(self, value, df): - date_format = self.get_date_format_for_df(df) or DATETIME_FORMAT - try: - return datetime.strptime(value, date_format) - except ValueError: - # ignore date values that dont match the format - # import will break for these values later - pass - return value - - def get_date_format_for_df(self, df): - return self._guessed_date_formats.get(df.parent + df.fieldname) - - def guess_date_format_for_column(self, column, columns): - """ Guesses date format for a column by parsing the first 10 values in the column, - getting the date format and then returning the one which has the maximum frequency - """ - PARSE_ROW_COUNT = 10 - - df = column.df - key = df.parent + df.fieldname - - if not self._guessed_date_formats.get(key): - matches = [col for col in columns if col.df == df] - if not matches: - self._guessed_date_formats[key] = None - return - - column = matches[0] - column_index = column.index - - date_values = [ - row[column_index] for row in self.data[:PARSE_ROW_COUNT] if row[column_index] - ] - date_formats = [ - guess_date_format(d) if isinstance(d, str) else None for d in date_values - ] - if not date_formats: - return - max_occurred_date_format = max(set(date_formats), key=date_formats.count) - self._guessed_date_formats[key] = max_occurred_date_format - - return self._guessed_date_formats[key] - - def import_data(self): - # set user lang for translations - frappe.cache().hdel("lang", frappe.session.user) - frappe.set_user_lang(frappe.session.user) - - if not self.console: - self.data_import.db_set("template_warnings", "") - - # set flags - frappe.flags.in_import = True - frappe.flags.mute_emails = self.data_import.mute_emails - - # prepare a map for missing link field values - self.prepare_missing_link_field_values() - - # parse docs from rows - payloads = self.get_payloads_for_import() - - # dont import if there are non-ignorable warnings - warnings = [w for w in self.warnings if w.get("type") != "info"] - if warnings: - if self.console: - self.print_grouped_warnings(warnings) - else: - self.data_import.db_set("template_warnings", json.dumps(warnings)) - frappe.publish_realtime( - "data_import_refresh", {"data_import": self.data_import.name} - ) - return - - # setup import log - if self.data_import.import_log: - import_log = frappe.parse_json(self.data_import.import_log) - else: - import_log = [] - - # remove previous failures from import log - import_log = [l for l in import_log if l.get("success") == True] - - # get successfully imported rows - imported_rows = [] - for log in import_log: - log = frappe._dict(log) - if log.success: - imported_rows += log.row_indexes - - # start import - total_payload_count = len(payloads) - batch_size = frappe.conf.data_import_batch_size or 1000 - - for batch_index, batched_payloads in enumerate( - frappe.utils.create_batch(payloads, batch_size) - ): - for i, payload in enumerate(batched_payloads): - doc = payload.doc - row_indexes = [row[0] for row in payload.rows] - current_index = (i + 1) + (batch_index * batch_size) - - if set(row_indexes).intersection(set(imported_rows)): - print("Skipping imported rows", row_indexes) - if total_payload_count > 5: - frappe.publish_realtime( - "data_import_progress", - { - "current": current_index, - "total": total_payload_count, - "skipping": True, - "data_import": self.data_import.name, - }, - ) - continue - - try: - start = timeit.default_timer() - doc = self.process_doc(doc) - processing_time = timeit.default_timer() - start - eta = self.get_eta(current_index, total_payload_count, processing_time) - - if total_payload_count > 5: - frappe.publish_realtime( - "data_import_progress", - { - "current": current_index, - "total": total_payload_count, - "docname": doc.name, - "data_import": self.data_import.name, - "success": True, - "row_indexes": row_indexes, - "eta": eta, - }, - ) - if self.console: - update_progress_bar( - "Importing {0} records".format(total_payload_count), - current_index, - total_payload_count, - ) - import_log.append( - frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes) - ) - # commit after every successful import - frappe.db.commit() - - except Exception: - import_log.append( - frappe._dict( - success=False, - exception=frappe.get_traceback(), - messages=frappe.local.message_log, - row_indexes=row_indexes, - ) - ) - frappe.clear_messages() - # rollback if exception - frappe.db.rollback() - - # set status - failures = [l for l in import_log if l.get("success") == False] - if len(failures) == total_payload_count: - status = "Pending" - elif len(failures) > 0: - status = "Partial Success" - else: - status = "Success" - - if self.console: - self.print_import_log(import_log) - else: - self.data_import.db_set("status", status) - self.data_import.db_set("import_log", json.dumps(import_log)) - - frappe.flags.in_import = False - frappe.flags.mute_emails = False - frappe.publish_realtime("data_import_refresh", {"data_import": self.data_import.name}) - - return import_log - - def get_payloads_for_import(self): - payloads = [] - # make a copy - data = list(self.rows) - while data: - doc, rows, data = self.parse_next_row_for_import(data) - payloads.append(frappe._dict(doc=doc, rows=rows)) - return payloads - - def parse_next_row_for_import(self, data): - """ - Parses rows that make up a doc. A doc maybe built from a single row or multiple rows. - Returns the doc, rows, and data without the rows. - """ - doctypes = set([col.df.parent for col in self.columns if col.df and col.df.parent]) - - # first row is included by default - first_row = data[0] - rows = [first_row] - - # if there are child doctypes, find the subsequent rows - if len(doctypes) > 1: - # subsequent rows either dont have any parent value set - # or have the same value as the parent row - # we include a row if either of conditions match - parent_column_indexes = [ - col.index - for col in self.columns - if not col.skip_import and col.df and col.df.parent == self.doctype - ] - parent_row_values = [first_row[i] for i in parent_column_indexes] - - data_without_first_row = data[1:] - for row in data_without_first_row: - row_values = [row[i] for i in parent_column_indexes] - # if the row is blank, it's a child row doc - if all([v in INVALID_VALUES for v in row_values]): - rows.append(row) - continue - # if the row has same values as parent row, it's a child row doc - if row_values == parent_row_values: - rows.append(row) - continue - # if any of those conditions dont match, it's the next doc - break - - def get_column_indexes(doctype): - return [ - col.index - for col in self.columns - if not col.skip_import and col.df and col.df.parent == doctype - ] - - def validate_value(value, df): - if df.fieldtype == "Select": - select_options = df.get_select_options() - if select_options and value not in select_options: - options_string = ", ".join([frappe.bold(d) for d in select_options]) - msg = _("Value must be one of {0}").format(options_string) - self.warnings.append( - { - "row": row_number, - "field": df.as_dict(convert_dates_to_str=True), - "message": msg, - } - ) - return - - elif df.fieldtype == "Link": - d = self.get_missing_link_field_values(df.options) - if value in d.missing_values and not d.one_mandatory: - msg = _("Value {0} missing for {1}").format( - frappe.bold(value), frappe.bold(df.options) - ) - self.warnings.append( - { - "row": row_number, - "field": df.as_dict(convert_dates_to_str=True), - "message": msg, - } - ) - return value - - return value - - def parse_doc(doctype, docfields, values, row_number): - doc = frappe._dict() - if self.import_type == INSERT: - # new_doc returns a dict with default values set - doc = frappe.new_doc(doctype, as_dict=True) - - # remove standard fields and __islocal - for key in frappe.model.default_fields + ("__islocal",): - doc.pop(key, None) - - for df, value in zip(docfields, values): - if value in INVALID_VALUES: - value = None - - if value is not None: - value = validate_value(value, df) - - if value is not None: - doc[df.fieldname] = self.parse_value(value, df) - - is_table = frappe.get_meta(doctype).istable - is_update = self.import_type == UPDATE - if is_table and is_update and doc.get("name") in INVALID_VALUES: - # for table rows being inserted in update - # create a new doc with defaults set - new_doc = frappe.new_doc(doctype, as_dict=True) - new_doc.update(doc) - doc = new_doc - - check_mandatory_fields(doctype, doc, row_number) - return doc - - def check_mandatory_fields(doctype, doc, row_number): - """If import type is Insert: - Check for mandatory fields (except table fields) in doc - if import type is Update: - Check for name field or autoname field in doc - """ - meta = frappe.get_meta(doctype) - if self.import_type == UPDATE: - if meta.istable: - # when updating records with table rows, - # there are two scenarios: - # 1. if row 'name' is provided in the template - # the table row will be updated - # 2. if row 'name' is not provided - # then a new row will be added - # so we dont need to check for mandatory - return - - id_field = self.get_id_field(doctype) - if doc.get(id_field.fieldname) in INVALID_VALUES: - self.warnings.append( - { - "row": row_number, - "message": _("{0} is a mandatory field").format(id_field.label), - } - ) - return - - fields = [ - df - for df in meta.fields - if df.fieldtype not in table_fields - and df.reqd - and doc.get(df.fieldname) in INVALID_VALUES - ] - - if not fields: - return - - if len(fields) == 1: - self.warnings.append( - { - "row": row_number, - "message": _("{0} is a mandatory field").format(fields[0].label), - } - ) - else: - fields_string = ", ".join([df.label for df in fields]) - self.warnings.append( - {"row": row_number, "message": _("{0} are mandatory fields").format(fields_string)} - ) - - parsed_docs = {} - for row in rows: - for doctype in doctypes: - if doctype == self.doctype and parsed_docs.get(doctype): - # if parent doc is already parsed from the first row - # then skip - continue - - row_number = row[0] - column_indexes = get_column_indexes(doctype) - values = [row[i] for i in column_indexes] - - if all(v in INVALID_VALUES for v in values): - # skip values if all of them are empty - continue - - columns = [self.columns[i] for i in column_indexes] - docfields = [col.df for col in columns] - doc = parse_doc(doctype, docfields, values, row_number) - parsed_docs[doctype] = parsed_docs.get(doctype, []) - parsed_docs[doctype].append(doc) - - # build the doc with children - doc = {} - for doctype, docs in parsed_docs.items(): - if doctype == self.doctype: - doc.update(docs[0]) - else: - table_dfs = self.meta.get( - "fields", {"options": doctype, "fieldtype": ["in", table_fields]} - ) - if table_dfs: - table_field = table_dfs[0] - doc[table_field.fieldname] = docs - - # check if there is atleast one row for mandatory table fields - mandatory_table_fields = [ - df - for df in self.meta.fields - if df.fieldtype in table_fields and df.reqd and len(doc.get(df.fieldname, [])) == 0 - ] - if len(mandatory_table_fields) == 1: - self.warnings.append( - { - "row": first_row[0], - "message": _("There should be atleast one row for {0} table").format( - mandatory_table_fields[0].label - ), - } - ) - elif mandatory_table_fields: - fields_string = ", ".join([df.label for df in mandatory_table_fields]) - message = _("There should be atleast one row for the following tables: {0}").format( - fields_string - ) - self.warnings.append({"row": first_row[0], "message": message}) - - return doc, rows, data[len(rows) :] - - def process_doc(self, doc): - if self.import_type == INSERT: - return self.insert_record(doc) - elif self.import_type == UPDATE: - return self.update_record(doc) - - def insert_record(self, doc): - self.create_missing_linked_records(doc) - - new_doc = frappe.new_doc(self.doctype) - new_doc.update(doc) - # name shouldn't be set when inserting a new record - new_doc.set("name", None) - new_doc.insert() - if self.meta.is_submittable and self.data_import.submit_after_import: - new_doc.submit() - return new_doc - - def create_missing_linked_records(self, doc): - """ - Finds fields that are of type Link, and creates the corresponding - document automatically if it has only one mandatory field - """ - link_values = [] - - def get_link_fields(doc, doctype): - for fieldname, value in doc.items(): - meta = frappe.get_meta(doctype) - df = meta.get_field(fieldname) - if not df: - continue - if df.fieldtype == "Link" and value not in INVALID_VALUES: - link_values.append([df.options, value]) - elif df.fieldtype in table_fields: - for row in value: - get_link_fields(row, df.options) - - get_link_fields(doc, self.doctype) - - for link_doctype, link_value in link_values: - d = self.missing_link_values.get(link_doctype) - if d and d.one_mandatory and link_value in d.missing_values: - # find the autoname field - autoname_field = self.get_autoname_field(link_doctype) - name_field = autoname_field.fieldname if autoname_field else "name" - new_doc = frappe.new_doc(link_doctype) - new_doc.set(name_field, link_value) - new_doc.insert() - d.missing_values.remove(link_value) - - def update_record(self, doc): - id_fieldname = self.get_id_fieldname(self.doctype) - id_value = doc[id_fieldname] - existing_doc = frappe.get_doc(self.doctype, id_value) - existing_doc.flags.updater_reference = { - "doctype": self.data_import.doctype, - "docname": self.data_import.name, - "label": _("via Data Import"), - } - existing_doc.update(doc) - existing_doc.save() - return existing_doc - - def export_errored_rows(self): - from frappe.utils.csvutils import build_csv_response - - if not self.data_import: - return - - import_log = frappe.parse_json(self.data_import.import_log or "[]") - failures = [l for l in import_log if l.get("success") == False] - row_indexes = [] - for f in failures: - row_indexes.extend(f.get("row_indexes", [])) - - # de duplicate - row_indexes = list(set(row_indexes)) - row_indexes.sort() - - header_row = [col.header_title for col in self.columns[1:]] - rows = [header_row] - rows += [row[1:] for row in self.rows if row[0] in row_indexes] - - build_csv_response(rows, self.doctype) - - def get_missing_link_field_values(self, doctype): - return self.missing_link_values.get(doctype, {}) - - def prepare_missing_link_field_values(self): - columns = self.columns - rows = self.rows - link_column_indexes = [ - col.index for col in columns if col.df and col.df.fieldtype == "Link" - ] - - self.missing_link_values = {} - for index in link_column_indexes: - col = columns[index] - column_values = [row[index] for row in rows] - values = set([v for v in column_values if v not in INVALID_VALUES]) - doctype = col.df.options - - missing_values = [value for value in values if not frappe.db.exists(doctype, value)] - if self.missing_link_values.get(doctype): - self.missing_link_values[doctype].missing_values += missing_values - else: - self.missing_link_values[doctype] = frappe._dict( - missing_values=missing_values, - one_mandatory=self.has_one_mandatory_field(doctype), - df=col.df, - ) - - def get_eta(self, current, total, processing_time): - remaining = total - current - eta = processing_time * remaining - if not self.last_eta or eta < self.last_eta: - self.last_eta = eta - return self.last_eta - - def has_one_mandatory_field(self, doctype): - meta = frappe.get_meta(doctype) - # get mandatory fields with default not set - mandatory_fields = [df for df in meta.fields if df.reqd and not df.default] - mandatory_fields_count = len(mandatory_fields) - if meta.autoname and meta.autoname.lower() == "prompt": - mandatory_fields_count += 1 - return mandatory_fields_count == 1 - - def get_id_fieldname(self, doctype): - return self.get_id_field(doctype).fieldname - - def get_id_field(self, doctype): - autoname_field = self.get_autoname_field(doctype) - if autoname_field: - return autoname_field - return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"}) - - def get_autoname_field(self, doctype): - meta = frappe.get_meta(doctype) - if meta.autoname and meta.autoname.startswith("field:"): - fieldname = meta.autoname[len("field:") :] - return meta.get_field(fieldname) - - def print_grouped_warnings(self, warnings): - warnings_by_row = {} - other_warnings = [] - for w in warnings: - if w.get("row"): - warnings_by_row.setdefault(w.get("row"), []).append(w) - else: - other_warnings.append(w) - - for row_number, warnings in warnings_by_row.items(): - print("Row {0}".format(row_number)) - for w in warnings: - print(w.get("message")) - - for w in other_warnings: - print(w.get("message")) - - def print_import_log(self, import_log): - failed_records = [l for l in import_log if not l.success] - successful_records = [l for l in import_log if l.success] - - if successful_records: - print( - "Successfully imported {0} records out of {1}".format( - len(successful_records), len(import_log) - ) - ) - - if failed_records: - print("Failed to import {0} records".format(len(failed_records))) - file_name = "{0}_import_on_{1}.txt".format(self.doctype, frappe.utils.now()) - print("Check {0} for errors".format(os.path.join("sites", file_name))) - text = "" - for w in failed_records: - text += "Row Indexes: {0}\n".format(str(w.get("row_indexes", []))) - text += "Messages:\n{0}\n".format("\n".join(w.get("messages", []))) - text += "Traceback:\n{0}\n\n".format(w.get("exception")) - - with open(file_name, "w") as f: - f.write(text) - - -DATE_FORMATS = [ - r"%d-%m-%Y", - r"%m-%d-%Y", - r"%Y-%m-%d", - r"%d-%m-%y", - r"%m-%d-%y", - r"%y-%m-%d", - r"%d/%m/%Y", - r"%m/%d/%Y", - r"%Y/%m/%d", - r"%d/%m/%y", - r"%m/%d/%y", - r"%y/%m/%d", - r"%d.%m.%Y", - r"%m.%d.%Y", - r"%Y.%m.%d", - r"%d.%m.%y", - r"%m.%d.%y", - r"%y.%m.%d", -] - -TIME_FORMATS = [ - r"%H:%M:%S.%f", - r"%H:%M:%S", - r"%H:%M", - r"%I:%M:%S.%f %p", - r"%I:%M:%S %p", - r"%I:%M %p", -] - - -def guess_date_format(date_string): - date_string = date_string.strip() - - _date = None - _time = None - - if " " in date_string: - _date, _time = date_string.split(" ", 1) - else: - _date = date_string - - date_format = None - time_format = None - - for f in DATE_FORMATS: - try: - # if date is parsed without any exception - # capture the date format - datetime.strptime(_date, f) - date_format = f - break - except ValueError: - pass - - if _time: - for f in TIME_FORMATS: - try: - # if time is parsed without any exception - # capture the time format - datetime.strptime(_time, f) - time_format = f - break - except ValueError: - pass - - full_format = date_format - if time_format: - full_format += " " + time_format - return full_format - - -def import_data(doctype, file_path): - i = Importer(doctype, file_path) - i.import_data() diff --git a/frappe/core/doctype/data_import/test_data_import.js b/frappe/core/doctype/data_import/test_data_import.js deleted file mode 100644 index fbce7781b6..0000000000 --- a/frappe/core/doctype/data_import/test_data_import.js +++ /dev/null @@ -1,23 +0,0 @@ -/* eslint-disable */ -// rename this file from _test_[name] to test_[name] to activate -// and remove above this line - -QUnit.test("test: Data Import", function (assert) { - let done = assert.async(); - - // number of asserts - assert.expect(1); - - frappe.run_serially([ - // insert a new Data Import - () => frappe.tests.make('Data Import', [ - // values to be set - {key: 'value'} - ]), - () => { - assert.equal(cur_frm.doc.key, 'value'); - }, - () => done() - ]); - -}); diff --git a/frappe/core/doctype/data_import/test_data_import.py b/frappe/core/doctype/data_import/test_data_import.py index 406ea08958..15fd57744a 100644 --- a/frappe/core/doctype/data_import/test_data_import.py +++ b/frappe/core/doctype/data_import/test_data_import.py @@ -1,100 +1,10 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2017, Frappe Technologies and Contributors +# Copyright (c) 2020, Frappe Technologies and Contributors # See license.txt from __future__ import unicode_literals -import frappe, unittest -from frappe.core.doctype.data_export import exporter -from frappe.core.doctype.data_import import importer -from frappe.utils.csvutils import read_csv_content +# import frappe +import unittest class TestDataImport(unittest.TestCase): - def test_export(self): - exporter.export_data("User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - - def test_export_with_data(self): - exporter.export_data("User", all_doctypes=True, template=True, with_data=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1]) - - def test_export_with_all_doctypes(self): - exporter.export_data("User", all_doctypes="Yes", template=True, with_data=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1]) - self.assertEqual(content[13][0], "DocType:") - self.assertEqual(content[13][1], "User") - self.assertTrue("Has Role" in content[13]) - - def test_import(self): - if frappe.db.exists("Blog Category", "test-category"): - frappe.delete_doc("Blog Category", "test-category") - - exporter.export_data("Blog Category", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test-category", "Test Cateogry"]) - importer.upload(content) - self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category") - - # export with data - exporter.export_data("Blog Category", all_doctypes=True, template=True, with_data=True) - content = read_csv_content(frappe.response.result) - - # overwrite - content[-1][3] = "New Title" - importer.upload(content, overwrite=True) - self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title") - - def test_import_only_children(self): - user_email = "test_import_userrole@example.com" - if frappe.db.exists("User", user_email): - frappe.delete_doc("User", user_email, force=True) - - frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert() - - exporter.export_data("Has Role", "User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test_import_userrole@example.com", "Blogger"]) - importer.upload(content) - - user = frappe.get_doc("User", user_email) - self.assertTrue(frappe.db.get_value("Has Role", filters={"role": "Blogger", "parent": user_email, "parenttype": "User"})) - self.assertTrue(user.get("roles")[0].role, "Blogger") - - # overwrite - exporter.export_data("Has Role", "User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test_import_userrole@example.com", "Website Manager"]) - importer.upload(content, overwrite=True) - - user = frappe.get_doc("User", user_email) - self.assertEqual(len(user.get("roles")), 1) - self.assertTrue(user.get("roles")[0].role, "Website Manager") - - def test_import_with_children(self): #pylint: disable=R0201 - if frappe.db.exists("Event", "EV00001"): - frappe.delete_doc("Event", "EV00001") - exporter.export_data("Event", all_doctypes="Yes", template=True) - content = read_csv_content(frappe.response.result) - - content.append([None] * len(content[-2])) - content[-1][1] = "__Test Event with children" - content[-1][2] = "Private" - content[-1][3] = "2014-01-01 10:00:00.000000" - importer.upload(content) - - frappe.get_doc("Event", {"subject":"__Test Event with children"}) - - def test_excel_import(self): - if frappe.db.exists("Event", "EV00001"): - frappe.delete_doc("Event", "EV00001") - - exporter.export_data("Event", all_doctypes=True, template=True, file_type="Excel") - from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file - content = read_xlsx_file_from_attached_file(fcontent=frappe.response.filecontent) - content.append(["", "_test", "Private", "05-11-2017 13:51:48", "Event", "blue", "0", "0", "", "Open", "", 0, "", 0, "", "", "1", 0, "", "", 0, 0, 0, 0, 0, 0, 0]) - importer.upload(content) - self.assertTrue(frappe.db.get_value("Event", {"subject": "_test"}, "name")) \ No newline at end of file + pass diff --git a/frappe/core/doctype/data_import/test_exporter.py b/frappe/core/doctype/data_import/test_exporter.py new file mode 100644 index 0000000000..8415af2e63 --- /dev/null +++ b/frappe/core/doctype/data_import/test_exporter.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019, Frappe Technologies and Contributors +# See license.txt +from __future__ import unicode_literals + +import unittest +import frappe +from frappe.core.doctype.data_import.exporter import Exporter +from frappe.core.doctype.data_import.test_importer import ( + create_doctype_if_not_exists, +) + +doctype_name = 'DocType for Export' + +class TestExporter(unittest.TestCase): + def setUp(self): + create_doctype_if_not_exists(doctype_name) + + def test_exports_specified_fields(self): + if not frappe.db.exists(doctype_name, "Test"): + doc = frappe.get_doc( + doctype=doctype_name, + title="Test", + description="Test Description", + table_field_1=[ + {"child_title": "Child Title 1", "child_description": "Child Description 1"}, + {"child_title": "Child Title 2", "child_description": "Child Description 2"}, + ], + table_field_2=[ + {"child_2_title": "Child Title 1", "child_2_description": "Child Description 1"}, + ], + table_field_1_again=[ + { + "child_title": "Child Title 1 Again", + "child_description": "Child Description 1 Again", + }, + ], + ).insert() + else: + doc = frappe.get_doc(doctype_name, "Test") + + e = Exporter( + doctype_name, + export_fields={ + doctype_name: ["title", "description", "number", "another_number"], + "table_field_1": ["name", "child_title", "child_description"], + "table_field_2": ["child_2_date", "child_2_number"], + "table_field_1_again": [ + "child_title", + "child_date", + "child_number", + "child_another_number", + ], + }, + export_data=True, + ) + csv_array = e.get_csv_array() + header_row = csv_array[0] + + self.assertEqual( + header_row, + [ + "Title", + "Description", + "Number", + "another_number", + "ID (Table Field 1)", + "Child Title (Table Field 1)", + "Child Description (Table Field 1)", + "Child 2 Date (Table Field 2)", + "Child 2 Number (Table Field 2)", + "Child Title (Table Field 1 Again)", + "Child Date (Table Field 1 Again)", + "Child Number (Table Field 1 Again)", + "table_field_1_again.child_another_number", + ], + ) + + table_field_1_row_1_name = doc.table_field_1[0].name + table_field_1_row_2_name = doc.table_field_1[1].name + # fmt: off + self.assertEqual( + csv_array[1], + ["Test", "Test Description", 0, 0, table_field_1_row_1_name, "Child Title 1", "Child Description 1", None, 0, "Child Title 1 Again", None, 0, 0] + ) + self.assertEqual( + csv_array[2], + ["", "", "", "", table_field_1_row_2_name, "Child Title 2", "Child Description 2", "", "", "", "", "", ""], + ) + # fmt: on + self.assertEqual(len(csv_array), 3) + + def test_export_csv_response(self): + e = Exporter( + doctype_name, + export_fields={doctype_name: ["title", "description"]}, + export_data=True, + file_type="CSV" + ) + e.build_response() + + self.assertTrue(frappe.response['result']) + self.assertEqual(frappe.response['doctype'], doctype_name) + self.assertEqual(frappe.response['type'], "csv") diff --git a/frappe/core/doctype/data_import/test_exporter_new.py b/frappe/core/doctype/data_import/test_exporter_new.py deleted file mode 100644 index 0d3aedb033..0000000000 --- a/frappe/core/doctype/data_import/test_exporter_new.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies and Contributors -# See license.txt -from __future__ import unicode_literals - -import unittest -import frappe -from frappe.core.doctype.data_import.exporter_new import Exporter - - -class TestExporter(unittest.TestCase): - def test_exports_mandatory_fields(self): - e = Exporter('Web Page', export_fields='Mandatory') - csv_array = e.get_csv_array() - header_row = csv_array[0] - self.assertEqual(header_row, ['ID', 'Title']) - - - def test_exports_all_fields(self): - e = Exporter('Web Page', export_fields='All') - csv_array = e.get_csv_array() - header = csv_array[0] - self.assertEqual(len(header), 37) - - - def test_exports_selected_fields(self): - export_fields = { - 'Web Page': ['title', 'route', 'published'] - } - e = Exporter('Web Page', export_fields=export_fields) - csv_array = e.get_csv_array() - header = csv_array[0] - self.assertEqual(header, ['Title', 'Route', 'Published']) - - - def test_exports_data(self): - e = Exporter('ToDo', export_fields='All', export_data=True) - todo_records = frappe.db.count('ToDo') - csv_array = e.get_csv_array() - self.assertEqual(len(csv_array), todo_records + 1) diff --git a/frappe/core/doctype/data_import/test_importer.py b/frappe/core/doctype/data_import/test_importer.py new file mode 100644 index 0000000000..bdadad7890 --- /dev/null +++ b/frappe/core/doctype/data_import/test_importer.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019, Frappe Technologies and Contributors +# See license.txt +from __future__ import unicode_literals + +import unittest +import frappe +from frappe.utils import getdate + +doctype_name = 'DocType for Import' + +class TestImporter(unittest.TestCase): + def setUp(self): + create_doctype_if_not_exists(doctype_name) + + def test_data_import_from_file(self): + import_file = get_import_file('sample_import_file') + data_import = self.get_importer(doctype_name, import_file) + data_import.start_import() + + doc1 = frappe.get_doc(doctype_name, 'Test') + doc2 = frappe.get_doc(doctype_name, 'Test 2') + doc3 = frappe.get_doc(doctype_name, 'Test 3') + + self.assertEqual(doc1.description, 'test description') + self.assertEqual(doc1.number, 1) + + self.assertEqual(doc1.table_field_1[0].child_title, 'child title') + self.assertEqual(doc1.table_field_1[0].child_description, 'child description') + + self.assertEqual(doc1.table_field_1[1].child_title, 'child title 2') + self.assertEqual(doc1.table_field_1[1].child_description, 'child description 2') + + self.assertEqual(doc1.table_field_2[1].child_2_title, 'title child') + self.assertEqual(doc1.table_field_2[1].child_2_date, getdate('2019-10-30')) + self.assertEqual(doc1.table_field_2[1].child_2_another_number, 5) + + self.assertEqual(doc1.table_field_1_again[0].child_title, 'child title again') + self.assertEqual(doc1.table_field_1_again[1].child_title, 'child title again 2') + self.assertEqual(doc1.table_field_1_again[1].child_date, getdate('2021-09-22')) + + self.assertEqual(doc2.description, 'test description 2') + self.assertEqual(doc3.another_number, 5) + + def test_data_import_preview(self): + import_file = get_import_file('sample_import_file') + data_import = self.get_importer(doctype_name, import_file) + preview = data_import.get_preview_from_template() + + self.assertEqual(len(preview.data), 4) + self.assertEqual(len(preview.columns), 15) + + def test_data_import_without_mandatory_values(self): + import_file = get_import_file('sample_import_file_without_mandatory') + data_import = self.get_importer(doctype_name, import_file) + data_import.start_import() + data_import.reload() + warnings = frappe.parse_json(data_import.template_warnings) + + self.assertEqual(warnings[0]['row'], 2) + self.assertEqual(warnings[0]['message'], "Child Title (Table Field 1) is a mandatory field") + + self.assertEqual(warnings[1]['row'], 3) + self.assertEqual(warnings[1]['message'], "Child Title (Table Field 1 Again) is a mandatory field") + + self.assertEqual(warnings[2]['row'], 4) + self.assertEqual(warnings[2]['message'], "Title is a mandatory field") + + def test_data_import_update(self): + if not frappe.db.exists(doctype_name, 'Test 26'): + frappe.get_doc( + doctype=doctype_name, + title='Test 26' + ).insert() + + import_file = get_import_file('sample_import_file_for_update') + data_import = self.get_importer(doctype_name, import_file, update=True) + data_import.start_import() + + updated_doc = frappe.get_doc(doctype_name, 'Test 26') + self.assertEqual(updated_doc.description, 'test description') + self.assertEqual(updated_doc.table_field_1[0].child_title, 'child title') + self.assertEqual(updated_doc.table_field_1[0].child_description, 'child description') + self.assertEqual(updated_doc.table_field_1_again[0].child_title, 'child title again') + + def get_importer(self, doctype, import_file, update=False): + data_import = frappe.new_doc('Data Import') + data_import.import_type = 'Insert New Records' if not update else 'Update Existing Records' + data_import.reference_doctype = doctype + data_import.import_file = import_file.file_url + data_import.insert() + + return data_import + +def create_doctype_if_not_exists(doctype_name, force=False): + if force: + frappe.delete_doc_if_exists('DocType', doctype_name) + frappe.delete_doc_if_exists('DocType', 'Child 1 of ' + doctype_name) + frappe.delete_doc_if_exists('DocType', 'Child 2 of ' + doctype_name) + + if frappe.db.exists('DocType', doctype_name): + return + + # Child Table 1 + table_1_name = 'Child 1 of ' + doctype_name + frappe.get_doc({ + 'doctype': 'DocType', + 'name': table_1_name, + 'module': 'Custom', + 'custom': 1, + 'istable': 1, + 'fields': [ + {'label': 'Child Title', 'fieldname': 'child_title', 'reqd': 1, 'fieldtype': 'Data'}, + {'label': 'Child Description', 'fieldname': 'child_description', 'fieldtype': 'Small Text'}, + {'label': 'Child Date', 'fieldname': 'child_date', 'fieldtype': 'Date'}, + {'label': 'Child Number', 'fieldname': 'child_number', 'fieldtype': 'Int'}, + {'label': 'Child Number', 'fieldname': 'child_another_number', 'fieldtype': 'Int'}, + ] + }).insert() + + # Child Table 2 + table_2_name = 'Child 2 of ' + doctype_name + frappe.get_doc({ + 'doctype': 'DocType', + 'name': table_2_name, + 'module': 'Custom', + 'custom': 1, + 'istable': 1, + 'fields': [ + {'label': 'Child 2 Title', 'fieldname': 'child_2_title', 'reqd': 1, 'fieldtype': 'Data'}, + {'label': 'Child 2 Description', 'fieldname': 'child_2_description', 'fieldtype': 'Small Text'}, + {'label': 'Child 2 Date', 'fieldname': 'child_2_date', 'fieldtype': 'Date'}, + {'label': 'Child 2 Number', 'fieldname': 'child_2_number', 'fieldtype': 'Int'}, + {'label': 'Child 2 Number', 'fieldname': 'child_2_another_number', 'fieldtype': 'Int'}, + ] + }).insert() + + # Main Table + frappe.get_doc({ + 'doctype': 'DocType', + 'name': doctype_name, + 'module': 'Custom', + 'custom': 1, + 'autoname': 'field:title', + 'fields': [ + {'label': 'Title', 'fieldname': 'title', 'reqd': 1, 'fieldtype': 'Data'}, + {'label': 'Description', 'fieldname': 'description', 'fieldtype': 'Small Text'}, + {'label': 'Date', 'fieldname': 'date', 'fieldtype': 'Date'}, + {'label': 'Number', 'fieldname': 'number', 'fieldtype': 'Int'}, + {'label': 'Number', 'fieldname': 'another_number', 'fieldtype': 'Int'}, + {'label': 'Table Field 1', 'fieldname': 'table_field_1', 'fieldtype': 'Table', 'options': table_1_name}, + {'label': 'Table Field 2', 'fieldname': 'table_field_2', 'fieldtype': 'Table', 'options': table_2_name}, + {'label': 'Table Field 1 Again', 'fieldname': 'table_field_1_again', 'fieldtype': 'Table', 'options': table_1_name}, + ], + 'permissions': [ + {'role': 'System Manager'} + ] + }).insert() + + +def get_import_file(csv_file_name, force=False): + file_name = csv_file_name + '.csv' + _file = frappe.db.exists('File', {'file_name': file_name}) + if force and _file: + frappe.delete_doc_if_exists('File', _file) + + if frappe.db.exists('File', {'file_name': file_name}): + f = frappe.get_doc('File', {'file_name': file_name}) + else: + full_path = get_csv_file_path(file_name) + f = frappe.get_doc( + doctype='File', + content=frappe.read_file(full_path), + file_name=file_name, + is_private=1 + ) + f.save(ignore_permissions=True) + + return f + + +def get_csv_file_path(file_name): + return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import', 'fixtures', file_name) diff --git a/frappe/core/doctype/data_import/test_importer_new.py b/frappe/core/doctype/data_import/test_importer_new.py deleted file mode 100644 index d6349daa55..0000000000 --- a/frappe/core/doctype/data_import/test_importer_new.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies and Contributors -# See license.txt -from __future__ import unicode_literals - -import datetime -import unittest -import frappe -from frappe.core.doctype.data_import.importer_new import Importer - -content_empty_rows = '''title,start_date,idx,show_title -,,, -est phasellus sit amet,5/20/2019,52,1 -nibh in,7/29/2019,77,1 -''' - -content_mandatory_missing = '''title,start_date,idx,show_title -,5/20/2019,52,1 -''' - -content_convert_value = '''title,start_date,idx,show_title -est phasellus sit amet,5/20/2019,52,True -''' - -content_invalid_column = '''title,start_date,idx,show_title,invalid_column -est phasellus sit amet,5/20/2019,52,True,invalid value -''' - - -class TestImporter(unittest.TestCase): - def test_should_skip_empty_rows(self): - i = self.get_importer('Web Page', content=content_empty_rows) - payloads = i.get_payloads_for_import() - row_to_be_imported = [] - for p in payloads: - row_to_be_imported += [row[0] for row in p.rows] - self.assertEqual(len(row_to_be_imported), 2) - - def test_should_throw_if_mandatory_is_missing(self): - i = self.get_importer('Web Page', content=content_mandatory_missing) - i.import_data() - warning = i.warnings[0] - self.assertTrue('Title is a mandatory field' in warning['message']) - - def test_should_convert_value_based_on_fieldtype(self): - i = self.get_importer('Web Page', content=content_convert_value) - payloads = i.get_payloads_for_import() - doc = payloads[0].doc - - self.assertEqual(type(doc['show_title']), int) - self.assertEqual(type(doc['idx']), int) - self.assertEqual(type(doc['start_date']), datetime.datetime) - - def test_should_ignore_invalid_columns(self): - i = self.get_importer('Web Page', content=content_invalid_column) - payloads = i.get_payloads_for_import() - doc = payloads[0].doc - - self.assertTrue('invalid_column' not in doc) - self.assertTrue('title' in doc) - - def test_should_import_valid_template(self): - title = 'est phasellus sit amet {0}'.format(frappe.utils.random_string(8)) - content_valid_content = '''title,start_date,idx,show_title -{0},5/20/2019,52,1'''.format(title) - i = self.get_importer('Web Page', content=content_valid_content) - import_log = i.import_data() - log = import_log[0] - self.assertTrue(log.success) - doc = frappe.get_doc('Web Page', { 'title': title }) - self.assertEqual(frappe.utils.get_datetime_str(doc.start_date), - frappe.utils.get_datetime_str('2019-05-20')) - - def get_importer(self, doctype, content): - data_import = frappe.new_doc('Data Import Beta') - data_import.import_type = 'Insert New Records' - i = Importer(doctype, content=content, data_import=data_import) - return i diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.js b/frappe/core/doctype/data_import_beta/data_import_beta.js deleted file mode 100644 index 527dbd7d0c..0000000000 --- a/frappe/core/doctype/data_import_beta/data_import_beta.js +++ /dev/null @@ -1,511 +0,0 @@ -// Copyright (c) 2019, Frappe Technologies and contributors -// For license information, please see license.txt - -frappe.ui.form.on('Data Import Beta', { - setup(frm) { - frappe.realtime.on('data_import_refresh', ({ data_import }) => { - frm.import_in_progress = false; - if (data_import !== frm.doc.name) return; - frappe.model.clear_doc('Data Import Beta', frm.doc.name); - frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => { - frm.refresh(); - }); - }); - frappe.realtime.on('data_import_progress', data => { - frm.import_in_progress = true; - if (data.data_import !== frm.doc.name) { - return; - } - let percent = Math.floor((data.current * 100) / data.total); - let seconds = Math.floor(data.eta); - let minutes = Math.floor(data.eta / 60); - let eta_message = - // prettier-ignore - seconds < 60 - ? __('About {0} seconds remaining', [seconds]) - : minutes === 1 - ? __('About {0} minute remaining', [minutes]) - : __('About {0} minutes remaining', [minutes]); - - let message; - if (data.success) { - let message_args = [data.current, data.total, eta_message]; - message = - frm.doc.import_type === 'Insert New Records' - ? __('Importing {0} of {1}, {2}', message_args) - : __('Updating {0} of {1}, {2}', message_args); - } - if (data.skipping) { - message = __('Skipping {0} of {1}, {2}', [ - data.current, - data.total, - eta_message - ]); - } - frm.dashboard.show_progress(__('Import Progress'), percent, message); - frm.page.set_indicator(__('In Progress'), 'orange'); - - // hide progress when complete - if (data.current === data.total) { - setTimeout(() => { - frm.dashboard.hide(); - frm.refresh(); - }, 2000); - } - }); - - frm.set_query('reference_doctype', () => { - return { - filters: { - allow_import: 1 - } - }; - }); - - frm.get_field('import_file').df.options = { - restrictions: { - allowed_file_types: ['.csv', '.xls', '.xlsx'] - } - }; - }, - - refresh(frm) { - frm.page.hide_icon_group(); - frm.trigger('update_indicators'); - frm.trigger('import_file'); - frm.trigger('show_import_log'); - frm.trigger('show_import_warnings'); - frm.trigger('toggle_submit_after_import'); - frm.trigger('show_import_status'); - frm.trigger('show_report_error_button'); - - if (frm.doc.status === 'Partial Success') { - frm.add_custom_button(__('Export Errored Rows'), () => - frm.trigger('export_errored_rows') - ); - } - - if (frm.doc.status.includes('Success')) { - frm.add_custom_button( - __('Go to {0} List', [frm.doc.reference_doctype]), - () => frappe.set_route('List', frm.doc.reference_doctype) - ); - } - - frm.disable_save(); - if (frm.doc.status !== 'Success') { - if (!frm.is_new() && frm.doc.import_file) { - let label = - frm.doc.status === 'Pending' ? __('Start Import') : __('Retry'); - frm.page.set_primary_action(label, () => frm.events.start_import(frm)); - } else { - frm.page.set_primary_action(__('Save'), () => frm.save()); - } - } - }, - - update_indicators(frm) { - const indicator = frappe.get_indicator(frm.doc); - if (indicator) { - frm.page.set_indicator(indicator[0], indicator[1]); - } else { - frm.page.clear_indicator(); - } - }, - - show_import_status(frm) { - let import_log = JSON.parse(frm.doc.import_log || '[]'); - let successful_records = import_log.filter(log => log.success); - let failed_records = import_log.filter(log => !log.success); - if (successful_records.length === 0) return; - - let message; - if (failed_records.length === 0) { - let message_args = [successful_records.length]; - if (frm.doc.import_type === 'Insert New Records') { - message = - successful_records.length > 1 - ? __('Successfully imported {0} records.', message_args) - : __('Successfully imported {0} record.', message_args); - } else { - message = - successful_records.length > 1 - ? __('Successfully updated {0} records.', message_args) - : __('Successfully updated {0} record.', message_args); - } - } else { - let message_args = [successful_records.length, import_log.length]; - if (frm.doc.import_type === 'Insert New Records') { - message = - successful_records.length > 1 - ? __('Successfully imported {0} records out of {1}.', message_args) - : __('Successfully imported {0} record out of {1}.', message_args); - } else { - message = - successful_records.length > 1 - ? __('Successfully updated {0} records out of {1}.', message_args) - : __('Successfully updated {0} record out of {1}.', message_args); - } - } - frm.dashboard.set_headline(message); - }, - - show_report_error_button(frm) { - if (frm.doc.status === 'Error') { - frappe.db - .get_list('Error Log', { - filters: { method: frm.doc.name }, - fields: ['method', 'error'], - order_by: 'creation desc', - limit: 1 - }) - .then(result => { - if (result.length > 0) { - frm.add_custom_button('Report Error', () => { - let fake_xhr = { - responseText: JSON.stringify({ - exc: result[0].error - }) - }; - frappe.request.report_error(fake_xhr, {}); - }); - } - }); - } - }, - - start_import(frm) { - frm - .call({ - method: 'form_start_import', - args: { data_import: frm.doc.name }, - btn: frm.page.btn_primary - }) - .then(r => { - if (r.message === true) { - frm.disable_save(); - } - }); - }, - - download_template(frm) { - if ( - frm.data_exporter && - frm.data_exporter.doctype === frm.doc.reference_doctype - ) { - frm.data_exporter.dialog.show(); - set_export_records(); - } else { - frappe.require('/assets/js/data_import_tools.min.js', () => { - frm.data_exporter = new frappe.data_import.DataExporter( - frm.doc.reference_doctype - ); - set_export_records(); - }); - } - - function set_export_records() { - if (frm.doc.import_type === 'Insert New Records') { - frm.data_exporter.dialog.set_value('export_records', 'blank_template'); - } else { - frm.data_exporter.dialog.set_value('export_records', 'all'); - } - // Force ID field to be exported when updating existing records - let id_field = frm.data_exporter.dialog.get_field( - frm.doc.reference_doctype - ).options[0]; - if (id_field.value === 'name' && id_field.$checkbox) { - id_field.$checkbox - .find('input') - .prop('disabled', frm.doc.import_type === 'Update Existing Records'); - } - } - }, - - reference_doctype(frm) { - frm.trigger('toggle_submit_after_import'); - }, - - toggle_submit_after_import(frm) { - frm.toggle_display('submit_after_import', false); - let doctype = frm.doc.reference_doctype; - if (doctype) { - frappe.model.with_doctype(doctype, () => { - let meta = frappe.get_meta(doctype); - frm.toggle_display('submit_after_import', meta.is_submittable); - }); - } - }, - - import_file(frm) { - frm.toggle_display('section_import_preview', frm.doc.import_file); - if (!frm.doc.import_file) { - frm.get_field('import_preview').$wrapper.empty(); - return; - } - - // load import preview - frm.get_field('import_preview').$wrapper.empty(); - $('') - .html(__('Loading import file...')) - .appendTo(frm.get_field('import_preview').$wrapper); - - frm - .call({ - method: 'get_preview_from_template', - args: { data_import: frm.doc.name }, - error_handlers: { - TimestampMismatchError() { - // ignore this error - } - } - }) - .then(r => { - let preview_data = r.message; - frm.events.show_import_preview(frm, preview_data); - frm.events.show_import_warnings(frm, preview_data); - }); - }, - - show_import_preview(frm, preview_data) { - let import_log = JSON.parse(frm.doc.import_log || '[]'); - - if ( - frm.import_preview && - frm.import_preview.doctype === frm.doc.reference_doctype - ) { - frm.import_preview.preview_data = preview_data; - frm.import_preview.import_log = import_log; - frm.import_preview.refresh(); - return; - } - - frappe.require('/assets/js/data_import_tools.min.js', () => { - frm.import_preview = new frappe.data_import.ImportPreview({ - wrapper: frm.get_field('import_preview').$wrapper, - doctype: frm.doc.reference_doctype, - preview_data, - import_log, - frm, - events: { - remap_column(changed_map) { - let template_options = JSON.parse(frm.doc.template_options || '{}'); - template_options.remap_column = template_options.remap_column || {}; - Object.assign(template_options.remap_column, changed_map); - frm.set_value('template_options', JSON.stringify(template_options)); - frm.save().then(() => frm.trigger('import_file')); - } - } - }); - }); - }, - - export_errored_rows(frm) { - open_url_post( - '/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template', - { - data_import_name: frm.doc.name - } - ); - }, - - show_import_warnings(frm, preview_data) { - let warnings = JSON.parse(frm.doc.template_warnings || '[]'); - warnings = warnings.concat(preview_data.warnings || []); - - frm.toggle_display('import_warnings_section', warnings.length > 0); - if (warnings.length === 0) { - frm.get_field('import_warnings').$wrapper.html(''); - return; - } - - // group warnings by row - let warnings_by_row = {}; - let other_warnings = []; - for (let warning of warnings) { - if (warning.row) { - warnings_by_row[warning.row] = warnings_by_row[warning.row] || []; - warnings_by_row[warning.row].push(warning); - } else { - other_warnings.push(warning); - } - } - - let html = ''; - html += Object.keys(warnings_by_row) - .map(row_number => { - let message = warnings_by_row[row_number] - .map(w => { - if (w.field) { - let label = - w.field.label + - (w.field.parent !== frm.doc.reference_doctype - ? ` (${w.field.parent})` - : ''); - return `${log.exception}
- | ${__('Row Number')} | -${__('Status')} | -${__('Message')} | -
|---|
{}
'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log]) + else: + err_msg = '{}
'.format(cstr(e)) + + error_trace = frappe.get_traceback() + if error_trace: + error_log_doc = frappe.log_error(error_trace) + error_link = get_absolute_url("Error Log", error_log_doc.name) + else: + error_link = None + + log(**{ + "row": row_idx + 1, + "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""), + "message": err_msg, + "indicator": "red", + "link":error_link + }) + + # data with error to create a new file + # include the errored data in the last row as last_error_row_idx will not be updated for the last row + if skip_errors: + if last_error_row_idx == len(rows)-1: + last_error_row_idx = len(rows) + data_rows_with_error += rows[row_idx:last_error_row_idx] + else: + rollback_flag = True + finally: + frappe.local.message_log = [] + + start_row += batch_size + if rollback_flag: + frappe.db.rollback() + else: + frappe.db.commit() + + frappe.flags.mute_emails = False + frappe.flags.in_import = False + + log_message = {"messages": import_log, "error": error_flag} + if data_import_doc: + data_import_doc.log_details = json.dumps(log_message) + + import_status = None + if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error): + import_status = "Partially Successful" + # write the file with the faulty row + file_name = 'error_' + filename + file_extension + if file_extension == '.xlsx': + from frappe.utils.xlsxutils import make_xlsx + xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") + file_data = xlsx_file.getvalue() + else: + from frappe.utils.csvutils import to_csv + file_data = to_csv(data_rows_with_error) + _file = frappe.get_doc({ + "doctype": "File", + "file_name": file_name, + "attached_to_doctype": "Data Import Legacy", + "attached_to_name": data_import_doc.name, + "folder": "Home/Attachments", + "content": file_data}) + _file.save() + data_import_doc.error_file = _file.file_url + + elif error_flag: + import_status = "Failed" + else: + import_status = "Successful" + + data_import_doc.import_status = import_status + data_import_doc.save() + if data_import_doc.import_status in ["Successful", "Partially Successful"]: + data_import_doc.submit() + publish_progress(100, True) + else: + publish_progress(0, True) + frappe.db.commit() + else: + return log_message + +def get_parent_field(doctype, parenttype): + parentfield = None + + # get parentfield + if parenttype: + for d in frappe.get_meta(parenttype).get_table_fields(): + if d.options==doctype: + parentfield = d.fieldname + break + + if not parentfield: + frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype)) + raise Exception + + return parentfield + +def delete_child_rows(rows, doctype): + """delete child rows for all parents""" + for p in list(set([r[1] for r in rows])): + if p: + frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p) diff --git a/frappe/core/doctype/data_import/log_details.html b/frappe/core/doctype/data_import_legacy/log_details.html similarity index 100% rename from frappe/core/doctype/data_import/log_details.html rename to frappe/core/doctype/data_import_legacy/log_details.html diff --git a/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py b/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py new file mode 100644 index 0000000000..e5b244e6a0 --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2020, Frappe Technologies and Contributors +# See license.txt +from __future__ import unicode_literals + +# import frappe +import unittest + +class TestDataImportLegacy(unittest.TestCase): + pass diff --git a/frappe/core/doctype/docfield/docfield.json b/frappe/core/doctype/docfield/docfield.json index 83d3c18453..aab59a5a0a 100644 --- a/frappe/core/doctype/docfield/docfield.json +++ b/frappe/core/doctype/docfield/docfield.json @@ -13,8 +13,8 @@ "fieldname", "precision", "length", - "show_days", - "show_seconds", + "hide_days", + "hide_seconds", "reqd", "search_index", "in_list_view", @@ -453,18 +453,18 @@ "fieldtype": "Column Break" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_days", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_days", "fieldtype": "Check", - "label": "Show Days" + "label": "Hide Days" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_seconds", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_seconds", "fieldtype": "Check", - "label": "Show Seconds" + "label": "Hide Seconds" }, { "default": "0", @@ -477,7 +477,7 @@ "idx": 1, "istable": 1, "links": [], - "modified": "2020-05-15 09:06:25.224411", + "modified": "2020-02-06 09:06:25.224413", "modified_by": "Administrator", "module": "Core", "name": "DocField", diff --git a/frappe/core/doctype/doctype/doctype.py b/frappe/core/doctype/doctype/doctype.py index 904deb9990..6ca3cccdba 100644 --- a/frappe/core/doctype/doctype/doctype.py +++ b/frappe/core/doctype/doctype/doctype.py @@ -688,6 +688,9 @@ def validate_fields(meta): def check_link_table_options(docname, d): if frappe.flags.in_patch: return + + if frappe.flags.in_fixtures: return + if d.fieldtype in ("Link",) + table_fields: if not d.options: frappe.throw(_("{0}: Options required for Link or Table type field {1} in row {2}").format(docname, d.label, d.idx), DoctypeLinkError) @@ -908,6 +911,8 @@ def validate_fields(meta): frappe.msgprint(text_str + df_options_str, title="Invalid Data Field", raise_exception=True) def check_child_table_option(docfield): + + if frappe.flags.in_fixtures: return if docfield.fieldtype not in ['Table MultiSelect', 'Table']: return doctype = docfield.options diff --git a/frappe/core/doctype/file/file.py b/frappe/core/doctype/file/file.py index a17b3acd02..831d2ab22d 100755 --- a/frappe/core/doctype/file/file.py +++ b/frappe/core/doctype/file/file.py @@ -182,11 +182,11 @@ class File(Document): if duplicate_file: duplicate_file_doc = frappe.get_cached_doc('File', duplicate_file.name) if duplicate_file_doc.exists_on_disk(): - # if it is attached to a document then throw DuplicateEntryError + # if it is attached to a document then throw FileAlreadyAttachedException if self.attached_to_doctype and self.attached_to_name: self.duplicate_entry = duplicate_file.name frappe.throw(_("Same file has already been attached to the record"), - frappe.DuplicateEntryError) + frappe.FileAlreadyAttachedException) # else just use the url, to avoid uploading a duplicate else: self.file_url = duplicate_file.file_url @@ -714,7 +714,12 @@ def remove_all(dt, dn, from_delete=False): try: for fid in frappe.db.sql_list("""select name from `tabFile` where attached_to_doctype=%s and attached_to_name=%s""", (dt, dn)): - remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete) + if from_delete: + # If deleting a doc, directly delete files + frappe.delete_doc("File", fid, ignore_permissions=True) + else: + # Removes file and adds a comment in the document it is attached to + remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete) except Exception as e: if e.args[0]!=1054: raise # (temp till for patched) diff --git a/frappe/core/doctype/session_default_settings/session_default_settings.py b/frappe/core/doctype/session_default_settings/session_default_settings.py index 453ece2890..7b4bd19e9a 100644 --- a/frappe/core/doctype/session_default_settings/session_default_settings.py +++ b/frappe/core/doctype/session_default_settings/session_default_settings.py @@ -28,8 +28,7 @@ def get_session_default_values(): @frappe.whitelist() def set_session_default_values(default_values): - if not frappe.flags.in_test: - default_values = json.loads(default_values) + default_values = frappe.parse_json(default_values) for entry in default_values: try: frappe.defaults.set_user_default(entry, default_values.get(entry)) diff --git a/frappe/core/doctype/user/user.py b/frappe/core/doctype/user/user.py index 0c5ebc3ede..7b9266ff64 100644 --- a/frappe/core/doctype/user/user.py +++ b/frappe/core/doctype/user/user.py @@ -4,7 +4,7 @@ from __future__ import unicode_literals, print_function import frappe from frappe.model.document import Document -from frappe.utils import cint, has_gravatar, format_datetime, now_datetime, get_formatted_email, today +from frappe.utils import cint, flt, has_gravatar, format_datetime, now_datetime, get_formatted_email, today from frappe import throw, msgprint, _ from frappe.utils.password import update_password as _update_password from frappe.desk.notifications import clear_notifications @@ -841,11 +841,11 @@ def user_query(doctype, txt, searchfield, start, page_len, filters): def get_total_users(): """Returns total no. of system users""" - return frappe.db.sql('''SELECT SUM(`simultaneous_sessions`) + return flt(frappe.db.sql('''SELECT SUM(`simultaneous_sessions`) FROM `tabUser` WHERE `enabled` = 1 AND `user_type` = 'System User' - AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0] + AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0]) def get_system_users(exclude_users=None, limit=None): if not exclude_users: diff --git a/frappe/core/doctype/version/version.py b/frappe/core/doctype/version/version.py index 216cdb1716..7654db4ae5 100644 --- a/frappe/core/doctype/version/version.py +++ b/frappe/core/doctype/version/version.py @@ -21,6 +21,17 @@ class Version(Document): else: return False + def for_insert(self, doc): + updater_reference = doc.flags.updater_reference + data = { + 'creation': doc.creation, + 'updater_reference': updater_reference, + 'created_by': doc.owner + } + self.ref_doctype = doc.doctype + self.docname = doc.name + self.data = frappe.as_json(data) + def get_data(self): return json.loads(self.data) diff --git a/frappe/custom/doctype/custom_field/custom_field.json b/frappe/custom/doctype/custom_field/custom_field.json index 77490c8c43..6fa7b29161 100644 --- a/frappe/custom/doctype/custom_field/custom_field.json +++ b/frappe/custom/doctype/custom_field/custom_field.json @@ -16,8 +16,8 @@ "column_break_6", "fieldtype", "precision", - "show_seconds", - "show_days", + "hide_seconds", + "hide_days", "options", "fetch_from", "fetch_if_empty", @@ -383,22 +383,18 @@ "label": "In Preview" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_seconds", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_seconds", "fieldtype": "Check", - "label": "Show Seconds", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Seconds" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_days", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_days", "fieldtype": "Check", - "label": "Show Days", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Days" }, { "default": "0", @@ -411,7 +407,7 @@ "icon": "fa fa-glass", "idx": 1, "links": [], - "modified": "2020-05-15 23:43:00.123572", + "modified": "2020-02-06 23:43:00.123575", "modified_by": "Administrator", "module": "Custom", "name": "Custom Field", diff --git a/frappe/custom/doctype/custom_field/custom_field.py b/frappe/custom/doctype/custom_field/custom_field.py index a24777a80a..bc325b654e 100644 --- a/frappe/custom/doctype/custom_field/custom_field.py +++ b/frappe/custom/doctype/custom_field/custom_field.py @@ -31,6 +31,13 @@ class CustomField(Document): # fieldnames should be lowercase self.fieldname = self.fieldname.lower() + def before_insert(self): + meta = frappe.get_meta(self.dt, cached=False) + fieldnames = [df.fieldname for df in meta.get("fields")] + + if self.fieldname in fieldnames: + frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt)) + def validate(self): meta = frappe.get_meta(self.dt, cached=False) fieldnames = [df.fieldname for df in meta.get("fields")] @@ -46,9 +53,6 @@ class CustomField(Document): if not self.fieldname: frappe.throw(_("Fieldname not set for Custom Field")) - if self.fieldname in fieldnames: - frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt)) - if self.get('translatable', 0) and not supports_translation(self.fieldtype): self.translatable = 0 @@ -68,6 +72,11 @@ class CustomField(Document): frappe.db.updatedb(self.dt) def on_trash(self): + #check if Admin owned field + if self.owner == 'Administrator' and frappe.session.user != 'Administrator': + frappe.throw(_("Custom Field {0} is created by the Administrator and can only be deleted through the Administrator account.").format( + frappe.bold(self.label))) + # delete property setter entries frappe.db.sql("""\ DELETE FROM `tabProperty Setter` diff --git a/frappe/custom/doctype/customize_form/customize_form.py b/frappe/custom/doctype/customize_form/customize_form.py index 6a54d9c7e6..d4eeba3f93 100644 --- a/frappe/custom/doctype/customize_form/customize_form.py +++ b/frappe/custom/doctype/customize_form/customize_form.py @@ -77,7 +77,9 @@ docfield_properties = { 'allow_bulk_edit': 'Check', 'auto_repeat': 'Link', 'allow_in_quick_entry': 'Check', - 'hide_border': 'Check' + 'hide_border': 'Check', + 'hide_days': 'Check', + 'hide_seconds': 'Check' } allowed_fieldtype_change = (('Currency', 'Float', 'Percent'), ('Small Text', 'Data'), diff --git a/frappe/custom/doctype/customize_form_field/customize_form_field.json b/frappe/custom/doctype/customize_form_field/customize_form_field.json index f422c36e61..267213517c 100644 --- a/frappe/custom/doctype/customize_form_field/customize_form_field.json +++ b/frappe/custom/doctype/customize_form_field/customize_form_field.json @@ -11,8 +11,8 @@ "label", "fieldtype", "fieldname", - "show_seconds", - "show_days", + "hide_seconds", + "hide_days", "reqd", "unique", "in_list_view", @@ -393,22 +393,18 @@ "label": "In Preview" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_seconds", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_seconds", "fieldtype": "Check", - "label": "Show Seconds", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Seconds" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_days", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_days", "fieldtype": "Check", - "label": "Show Days", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Days" }, { "default": "0", @@ -421,7 +417,7 @@ "idx": 1, "istable": 1, "links": [], - "modified": "2020-05-15 23:45:46.810869", + "modified": "2020-06-02 23:45:46.810868", "modified_by": "Administrator", "module": "Custom", "name": "Customize Form Field", diff --git a/frappe/database/mariadb/framework_mariadb.sql b/frappe/database/mariadb/framework_mariadb.sql index bd93069a3f..af537e0612 100644 --- a/frappe/database/mariadb/framework_mariadb.sql +++ b/frappe/database/mariadb/framework_mariadb.sql @@ -64,6 +64,8 @@ CREATE TABLE `tabDocField` ( `length` int(11) NOT NULL DEFAULT 0, `translatable` int(1) NOT NULL DEFAULT 0, `hide_border` int(1) NOT NULL DEFAULT 0, + `hide_days` int(1) NOT NULL DEFAULT 0, + `hide_seconds` int(1) NOT NULL DEFAULT 0, PRIMARY KEY (`name`), KEY `parent` (`parent`), KEY `label` (`label`), diff --git a/frappe/database/postgres/framework_postgres.sql b/frappe/database/postgres/framework_postgres.sql index 76309e7347..8f77ed6230 100644 --- a/frappe/database/postgres/framework_postgres.sql +++ b/frappe/database/postgres/framework_postgres.sql @@ -64,6 +64,8 @@ CREATE TABLE "tabDocField" ( "length" bigint NOT NULL DEFAULT 0, "translatable" smallint NOT NULL DEFAULT 0, "hide_border" smallint NOT NULL DEFAULT 0, + "hide_days" smallint NOT NULL DEFAULT 0, + "hide_seconds" smallint NOT NULL DEFAULT 0, PRIMARY KEY ("name") ) ; diff --git a/frappe/desk/desktop.py b/frappe/desk/desktop.py index 956308568b..142c103c68 100644 --- a/frappe/desk/desktop.py +++ b/frappe/desk/desktop.py @@ -168,7 +168,6 @@ class Workspace: 'subtitle': _(self.onboarding_doc.subtitle), 'success': _(self.onboarding_doc.success_message), 'docs_url': self.onboarding_doc.documentation_url, - 'user_can_dismiss': self.onboarding_doc.user_can_dismiss, 'items': self.get_onboarding_steps() } @handle_not_exist diff --git a/frappe/desk/doctype/calendar_view/calendar_view.json b/frappe/desk/doctype/calendar_view/calendar_view.json index 04839abc9f..ea220c335c 100644 --- a/frappe/desk/doctype/calendar_view/calendar_view.json +++ b/frappe/desk/doctype/calendar_view/calendar_view.json @@ -1,208 +1,81 @@ { - "allow_copy": 0, - "allow_guest_to_view": 0, - "allow_import": 0, - "allow_rename": 0, + "actions": [], "autoname": "Prompt", - "beta": 0, "creation": "2017-10-23 13:02:10.295824", - "custom": 0, - "docstatus": 0, "doctype": "DocType", - "document_type": "", "editable_grid": 1, "engine": "InnoDB", + "field_order": [ + "reference_doctype", + "subject_field", + "start_date_field", + "end_date_field", + "column_break_5", + "all_day" + ], "fields": [ { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "reference_doctype", "fieldtype": "Link", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, "in_list_view": 1, - "in_standard_filter": 0, "label": "Reference Document Type", - "length": 0, - "no_copy": 0, "options": "DocType", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "subject_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, "in_list_view": 1, - "in_standard_filter": 0, "label": "Subject Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "start_date_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, "label": "Start Date Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "end_date_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, "label": "End Date Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 + }, + { + "fieldname": "column_break_5", + "fieldtype": "Column Break" + }, + { + "default": "0", + "fieldname": "all_day", + "fieldtype": "Check", + "label": "All Day" } ], - "has_web_view": 0, - "hide_heading": 0, - "hide_toolbar": 0, - "idx": 0, - "image_view": 0, - "in_create": 0, - "is_submittable": 0, - "issingle": 0, - "istable": 0, - "max_attachments": 0, - "modified": "2019-09-05 14:22:27.664645", + "links": [], + "modified": "2020-06-15 11:24:57.639430", "modified_by": "Administrator", "module": "Desk", "name": "Calendar View", - "name_case": "", "owner": "faris@erpnext.com", "permissions": [ { - "amend": 0, - "apply_user_permissions": 0, - "cancel": 0, "create": 1, "delete": 1, "email": 1, "export": 1, - "if_owner": 0, - "import": 0, - "permlevel": 0, "print": 1, "read": 1, "report": 1, "role": "System Manager", - "set_user_permissions": 0, "share": 1, - "submit": 0, "write": 1 }, { - "amend": 0, - "apply_user_permissions": 0, - "cancel": 0, - "create": 0, - "delete": 0, - "email": 0, - "export": 0, - "if_owner": 0, - "import": 0, - "permlevel": 0, - "print": 0, "read": 1, - "report": 0, - "role": "All", - "set_user_permissions": 0, - "share": 0, - "submit": 0, - "write": 0 + "role": "All" } ], - "quick_entry": 0, - "read_only": 0, - "read_only_onload": 0, - "show_name_in_global_search": 0, "sort_field": "modified", - "sort_order": "DESC", - "track_changes": 0, - "track_seen": 0 + "sort_order": "DESC" } \ No newline at end of file diff --git a/frappe/desk/doctype/dashboard_chart/dashboard_chart.py b/frappe/desk/doctype/dashboard_chart/dashboard_chart.py index ab1863ca0b..c6343dd187 100644 --- a/frappe/desk/doctype/dashboard_chart/dashboard_chart.py +++ b/frappe/desk/doctype/dashboard_chart/dashboard_chart.py @@ -26,15 +26,15 @@ def get_permission_query_conditions(user): if "System Manager" in roles: return None - allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read()) - allowed_reports = tuple([key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()]) + allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()] + allowed_reports = ['"%s"' % key if type(key) == str else key.encode('UTF8') for key in get_allowed_reports()] return ''' - `tabDashboard Chart`.`document_type` in {allowed_doctypes} - or `tabDashboard Chart`.`report_name` in {allowed_reports} + `tabDashboard Chart`.`document_type` in ({allowed_doctypes}) + or `tabDashboard Chart`.`report_name` in ({allowed_reports}) '''.format( - allowed_doctypes=allowed_doctypes, - allowed_reports=allowed_reports + allowed_doctypes=','.join(allowed_doctypes), + allowed_reports=','.join(allowed_reports) ) diff --git a/frappe/desk/doctype/module_onboarding/module_onboarding.json b/frappe/desk/doctype/module_onboarding/module_onboarding.json index 0667ddf6ad..02a18b9c2d 100644 --- a/frappe/desk/doctype/module_onboarding/module_onboarding.json +++ b/frappe/desk/doctype/module_onboarding/module_onboarding.json @@ -13,7 +13,6 @@ "column_break_4", "success_message", "documentation_url", - "user_can_dismiss", "is_complete", "section_break_6", "steps" @@ -53,13 +52,6 @@ "label": "Success Message", "reqd": 1 }, - { - "default": "1", - "description": "Allow users to dismiss onboarding temporarily for a day", - "fieldname": "user_can_dismiss", - "fieldtype": "Check", - "label": "User Can Dismiss " - }, { "fieldname": "documentation_url", "fieldtype": "Data", @@ -90,7 +82,7 @@ } ], "links": [], - "modified": "2020-05-18 19:42:39.738869", + "modified": "2020-06-08 15:36:04.701049", "modified_by": "Administrator", "module": "Desk", "name": "Module Onboarding", diff --git a/frappe/desk/doctype/notification_log/notification_log.py b/frappe/desk/doctype/notification_log/notification_log.py index 211b3ae5e6..12f2c41274 100644 --- a/frappe/desk/doctype/notification_log/notification_log.py +++ b/frappe/desk/doctype/notification_log/notification_log.py @@ -100,14 +100,16 @@ def send_notification_email(doc): ) def get_email_header(doc): - return { + docname = doc.document_name + header_map = { 'Default': _('New Notification'), - 'Mention': _('New Mention'), - 'Assignment': _('New Assignment'), - 'Share': _('New Document Shared'), - 'Energy Point': _('Energy Point Update'), - }[doc.type or 'Default'] + 'Mention': _('New Mention on {0}').format(docname), + 'Assignment': _('Assignment Update on {0}').format(docname), + 'Share': _('New Document Shared {0}').format(docname), + 'Energy Point': _('Energy Point Update on {0}').format(docname), + } + return header_map[doc.type or 'Default'] @frappe.whitelist() def mark_all_as_read(): diff --git a/frappe/desk/doctype/number_card/number_card.py b/frappe/desk/doctype/number_card/number_card.py index 6bb9c7d45c..c4a427c4e0 100644 --- a/frappe/desk/doctype/number_card/number_card.py +++ b/frappe/desk/doctype/number_card/number_card.py @@ -27,12 +27,12 @@ def get_permission_query_conditions(user=None): if "System Manager" in roles: return None - allowed_doctypes = tuple(frappe.permissions.get_doctypes_with_read()) + allowed_doctypes = ['"%s"' % doctype for doctype in frappe.permissions.get_doctypes_with_read()] return ''' - `tabNumber Card`.`document_type` in {allowed_doctypes} + `tabNumber Card`.`document_type` in ({allowed_doctypes}) '''.format( - allowed_doctypes=allowed_doctypes, + allowed_doctypes=','.join(allowed_doctypes) ) def has_permission(doc, ptype, user): diff --git a/frappe/desk/form/assign_to.py b/frappe/desk/form/assign_to.py index a916cbca82..26b2bd2835 100644 --- a/frappe/desk/form/assign_to.py +++ b/frappe/desk/form/assign_to.py @@ -178,7 +178,8 @@ def notify_assignment(assigned_by, owner, doc_type, doc_name, action='CLOSE', description_html = "${moment(task._start).format('MMM D')} - ${moment(task._end).format('MMM D')}
`; + `| {{ __(col.name) }} | - {% endif %} - {% endfor %} -
|---|
| - - {{ - col.formatter - ? col.formatter(row._index, col._index, value, col, row, true) - : col.format - ? col.format(value, row, col, data) - : col.docfield - ? frappe.format(value, col.docfield) - : value - }} - - | - {% endif %} - {% endfor %} -+ + {% format_data = row.is_total_row ? data[0] : row %} + {{ + col.formatter + ? col.formatter(row._index, col._index, value, col, format_data, true) + : col.format + ? col.format(value, row, col, format_data) + : col.docfield + ? frappe.format(value, col.docfield) + : value + }} + + | + {% endif %} + {% endfor %} + + {% endfor %} +
{2}
".format(frappe.utils.get_request_site_address(), + + "{2}
".format(frappe.utils.get_request_site_address(), comment.name, _("View Comment"))) diff --git a/frappe/templates/includes/macros.html b/frappe/templates/includes/macros.html index 3e822b8bf3..767bd59ec9 100644 --- a/frappe/templates/includes/macros.html +++ b/frappe/templates/includes/macros.html @@ -1,18 +1,6 @@ -{% macro square_image_with_fallback(src=None, size=None, alt=None, class="") %} +{% macro square_image_with_fallback(src=None, size='small', alt=None, class="") %} {% if src %} -{{ item.group_title }}
+ {{ render_sidebar_items(item.group_items) }} + + {%- else -%} + + {% if item.type != 'input' %} + {%- set item_route = item.route[1:] if item.route[0] == '/' else item.route -%} + + {{ _(item.title or item.label) }} + + {% else %} + + {% endif %} + + {%- endif -%} ++ {% for item in items -%} + {{ render_sidebar_item(item) }} + {%- endfor %} +
+{%- endif -%} +{% endmacro %} + +{% macro my_account() %} +{% if frappe.user != 'Guest' %} ++-
+ {{ _("My Account") }}
+
+
+{% endif %} +{% endmacro %} +- {% if sidebar_title %} --
- {{ sidebar_title }}
-
- {% endif %}
- {% for item in sidebar_items -%}
- -
- {% if item.type != 'input' %}
- {%- set item_route = item.route[1:] if item.route[0] == '/' else item.route -%}
-
- {{ _(item.title or item.label) }}
-
- {% else %}
-
- {% endif %}
-
- {%- endfor %}
- {% if frappe.user != 'Guest' %}
- -
- {{ _("My Account") }}
-
- {% endif %}
-
+ {{ render_sidebar_items(sidebar_items) }} + {{ my_account() }}{{ doc.get(df.fieldname) }}{% else -%} diff --git a/frappe/templates/web.html b/frappe/templates/web.html index 2e7aea6b53..e014ef7ace 100644 --- a/frappe/templates/web.html +++ b/frappe/templates/web.html @@ -1,10 +1,6 @@ {% extends base_template_path %} {% block hero %}{% endblock %} -{% macro page_content() %} -{%- block page_content -%}{%- endblock -%} -{% endmacro %} - {% block content %} {% macro main_content() %} @@ -31,7 +27,7 @@