diff --git a/.github/frappe-framework-logo.png b/.github/frappe-framework-logo.png deleted file mode 100644 index 5049078a46..0000000000 Binary files a/.github/frappe-framework-logo.png and /dev/null differ diff --git a/.github/frappe-framework-logo.svg b/.github/frappe-framework-logo.svg new file mode 100644 index 0000000000..ba04ebf264 --- /dev/null +++ b/.github/frappe-framework-logo.svg @@ -0,0 +1,4 @@ + diff --git a/.github/semantic.yml b/.github/semantic.yml new file mode 100644 index 0000000000..e1e53bc1a4 --- /dev/null +++ b/.github/semantic.yml @@ -0,0 +1,13 @@ +# Always validate the PR title AND all the commits +titleAndCommits: true + +# Allow use of Merge commits (eg on github: "Merge branch 'master' into feature/ride-unicorns") +# this is only relevant when using commitsOnly: true (or titleAndCommits: true) +allowMergeCommits: true + +# Allow use of Revert commits (eg on github: "Revert "feat: ride unicorns"") +# this is only relevant when using commitsOnly: true (or titleAndCommits: true) +allowRevertCommits: true + +# For allowed PR types: https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json +# Tool Reference: https://github.com/zeke/semantic-pull-requests diff --git a/.github/workflows/docker-release.yml b/.github/workflows/docker-release.yml new file mode 100644 index 0000000000..4b1147e79f --- /dev/null +++ b/.github/workflows/docker-release.yml @@ -0,0 +1,14 @@ +name: Trigger Docker build on release +on: + release: + types: [released] +jobs: + curl: + runs-on: ubuntu-latest + container: + image: alpine:latest + steps: + - name: curl + run: | + apk add curl bash + curl -s -X POST -H "Content-Type: application/json" -H "Accept: application/json" -H "Travis-API-Version: 3" -H "Authorization: token ${{ secrets.TRAVIS_CI_TOKEN }}" -d '{"request":{"branch":"master"}}' https://api.travis-ci.com/repo/frappe%2Ffrappe_docker/requests diff --git a/.mergify.yml b/.mergify.yml index b145834cc4..582bbc2ee5 100644 --- a/.mergify.yml +++ b/.mergify.yml @@ -4,8 +4,7 @@ pull_request_rules: - status-success=Sider - status-success=Semantic Pull Request - status-success=Travis CI - Pull Request - - status-success=security/snyk - package.json (frappe) - - status-success=security/snyk - requirements.txt (frappe) + - status-success=security/snyk (frappe) - label!=don't-merge - label!=squash - "#approved-reviews-by>=1" @@ -17,8 +16,7 @@ pull_request_rules: - status-success=Sider - status-success=Semantic Pull Request - status-success=Travis CI - Pull Request - - status-success=security/snyk - package.json (frappe) - - status-success=security/snyk - requirements.txt (frappe) + - status-success=security/snyk (frappe) - label!=don't-merge - label=squash - "#approved-reviews-by>=1" diff --git a/.travis.yml b/.travis.yml index a1568c9118..63895675ea 100644 --- a/.travis.yml +++ b/.travis.yml @@ -47,27 +47,11 @@ matrix: script: bench --site test_site run-ui-tests frappe --headless before_install: - # do we really want to run travis? check which files are changed and if git doesnt face any fatal errors + # do we really want to run travis? - | - FILES_CHANGED=$( git diff --name-only $TRAVIS_COMMIT_RANGE 2>&1 ) - - if [[ $FILES_CHANGED != *"fatal"* ]]; then - ONLY_DOCS_CHANGES=$( echo $FILES_CHANGED | grep -qvE '\.(md|png|jpg|jpeg)$|^.github|LICENSE' ; echo $? ) - ONLY_JS_CHANGES=$( echo $FILES_CHANGED | grep -qvE '\.js$' ; echo $? ) - ONLY_PY_CHANGES=$( echo $FILES_CHANGED | grep -qvE '\.py$' ; echo $? ) - - if [[ $ONLY_DOCS_CHANGES == "1" ]]; then - echo "Only docs were updated, stopping build process."; - exit; - fi - if [[ $ONLY_JS_CHANGES == "1" && $TYPE == "server" ]]; then - echo "Only JavaScript code was updated; Stopping Python build process."; - exit; - fi - if [[ $ONLY_PY_CHANGES == "1" && $TYPE == "ui" ]]; then - echo "Only Python code was updated, stopping Cypress build process."; - exit; - fi + python ./.travis/roulette.py + if [[ $? != 2 ]];then + exit; fi # install wkhtmltopdf diff --git a/.travis/roulette.py b/.travis/roulette.py new file mode 100644 index 0000000000..4d83137199 --- /dev/null +++ b/.travis/roulette.py @@ -0,0 +1,54 @@ +# if the script ends with exit code 0, then no tests are run further, else all tests are run +import os +import re +import shlex +import subprocess +import sys + + +def get_output(command, shell=True): + print(command) + command = shlex.split(command) + return subprocess.check_output(command, shell=shell, encoding="utf8").strip() + +def is_py(file): + return file.endswith("py") + +def is_js(file): + return file.endswith("js") + +def is_docs(file): + regex = re.compile('\.(md|png|jpg|jpeg)$|^.github|LICENSE') + return bool(regex.search(file)) + + +if __name__ == "__main__": + build_type = os.environ.get("TYPE") + commit_range = os.environ.get("TRAVIS_COMMIT_RANGE") + print("Build Type: {}".format(build_type)) + print("Commit Range: {}".format(commit_range)) + + try: + files_changed = get_output("git diff --name-only {}".format(commit_range), shell=False) + except Exception: + sys.exit(2) + + if "fatal" not in files_changed: + files_list = files_changed.split() + only_docs_changed = len(list(filter(is_docs, files_list))) == len(files_list) + only_js_changed = len(list(filter(is_js, files_list))) == len(files_list) + only_py_changed = len(list(filter(is_py, files_list))) == len(files_list) + + if only_docs_changed: + print("Only docs were updated, stopping build process.") + sys.exit(0) + + if only_js_changed and build_type == "server": + print("Only JavaScript code was updated; Stopping Python build process.") + sys.exit(0) + + if only_py_changed and build_type == "ui": + print("Only Python code was updated, stopping Cypress build process.") + sys.exit(0) + + sys.exit(2) diff --git a/README.md b/README.md index 860958087e..1f59376f48 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,12 @@
${log.exception}
+ | ${__('Row Number')} | +${__('Status')} | +${__('Message')} | +
|---|
{}
'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log]) - else: - err_msg = '{}
'.format(cstr(e)) - - error_trace = frappe.get_traceback() - if error_trace: - error_log_doc = frappe.log_error(error_trace) - error_link = get_absolute_url("Error Log", error_log_doc.name) - else: - error_link = None - - log(**{ - "row": row_idx + 1, - "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""), - "message": err_msg, - "indicator": "red", - "link":error_link - }) - - # data with error to create a new file - # include the errored data in the last row as last_error_row_idx will not be updated for the last row - if skip_errors: - if last_error_row_idx == len(rows)-1: - last_error_row_idx = len(rows) - data_rows_with_error += rows[row_idx:last_error_row_idx] - else: - rollback_flag = True - finally: - frappe.local.message_log = [] - - start_row += batch_size - if rollback_flag: - frappe.db.rollback() - else: - frappe.db.commit() - - frappe.flags.mute_emails = False - frappe.flags.in_import = False - - log_message = {"messages": import_log, "error": error_flag} - if data_import_doc: - data_import_doc.log_details = json.dumps(log_message) - - import_status = None - if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error): - import_status = "Partially Successful" - # write the file with the faulty row - file_name = 'error_' + filename + file_extension - if file_extension == '.xlsx': - from frappe.utils.xlsxutils import make_xlsx - xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") - file_data = xlsx_file.getvalue() + if not header: + header = Header(i, row, self.doctype, self.raw_data, self.column_to_field_map) else: - from frappe.utils.csvutils import to_csv - file_data = to_csv(data_rows_with_error) - _file = frappe.get_doc({ - "doctype": "File", - "file_name": file_name, - "attached_to_doctype": "Data Import", - "attached_to_name": data_import_doc.name, - "folder": "Home/Attachments", - "content": file_data}) - _file.save() - data_import_doc.error_file = _file.file_url + row_obj = Row(i, row, self.doctype, header, self.import_type) + data.append(row_obj) - elif error_flag: - import_status = "Failed" - else: - import_status = "Successful" + self.header = header + self.columns = self.header.columns + self.data = data - data_import_doc.import_status = import_status - data_import_doc.save() - if data_import_doc.import_status in ["Successful", "Partially Successful"]: - data_import_doc.submit() - publish_progress(100, True) - else: - publish_progress(0, True) - frappe.db.commit() - else: - return log_message + if len(data) < 1: + frappe.throw( + _("Import template should contain a Header and atleast one row."), + title=_("Template Error"), + ) -def get_parent_field(doctype, parenttype): - parentfield = None + def get_data_for_import_preview(self): + """Adds a serial number column as the first column""" - # get parentfield - if parenttype: - for d in frappe.get_meta(parenttype).get_table_fields(): - if d.options==doctype: - parentfield = d.fieldname + columns = [frappe._dict({"header_title": "Sr. No", "skip_import": True})] + columns += [col.as_dict() for col in self.columns] + for col in columns: + # only pick useful fields in docfields to minimise the payload + if col.df: + col.df = { + "fieldtype": col.df.fieldtype, + "fieldname": col.df.fieldname, + "label": col.df.label, + "options": col.df.options, + "parent": col.df.parent, + "reqd": col.df.reqd, + "default": col.df.default, + "read_only": col.df.read_only, + } + + data = [[row.row_number] + row.as_list() for row in self.data] + + warnings = self.get_warnings() + + out = frappe._dict() + out.data = data + out.columns = columns + out.warnings = warnings + total_number_of_rows = len(out.data) + if total_number_of_rows > MAX_ROWS_IN_PREVIEW: + out.data = out.data[:MAX_ROWS_IN_PREVIEW] + out.max_rows_exceeded = True + out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW + out.total_number_of_rows = total_number_of_rows + return out + + def get_payloads_for_import(self): + payloads = [] + # make a copy + data = list(self.data) + while data: + doc, rows, data = self.parse_next_row_for_import(data) + payloads.append(frappe._dict(doc=doc, rows=rows)) + return payloads + + def parse_next_row_for_import(self, data): + """ + Parses rows that make up a doc. A doc maybe built from a single row or multiple rows. + Returns the doc, rows, and data without the rows. + """ + doctypes = self.header.doctypes + + # first row is included by default + first_row = data[0] + rows = [first_row] + + # if there are child doctypes, find the subsequent rows + if len(doctypes) > 1: + # subsequent rows that have blank values in parent columns + # are considered as child rows + parent_column_indexes = self.header.get_column_indexes(self.doctype) + parent_row_values = first_row.get_values(parent_column_indexes) + + data_without_first_row = data[1:] + for row in data_without_first_row: + row_values = row.get_values(parent_column_indexes) + # if the row is blank, it's a child row doc + if all([v in INVALID_VALUES for v in row_values]): + rows.append(row) + continue + # if we encounter a row which has values in parent columns, + # then it is the next doc break - if not parentfield: - frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype)) - raise Exception + parent_doc = None + for row in rows: + for doctype, table_df in doctypes: + if doctype == self.doctype and not parent_doc: + parent_doc = row.parse_doc(doctype) - return parentfield + if doctype != self.doctype and table_df: + child_doc = row.parse_doc(doctype, parent_doc, table_df) + parent_doc[table_df.fieldname] = parent_doc.get(table_df.fieldname, []) + parent_doc[table_df.fieldname].append(child_doc) -def delete_child_rows(rows, doctype): - """delete child rows for all parents""" - for p in list(set([r[1] for r in rows])): - if p: - frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p) + doc = parent_doc + + if self.import_type == INSERT: + # check if there is atleast one row for mandatory table fields + meta = frappe.get_meta(self.doctype) + mandatory_table_fields = [ + df + for df in meta.fields + if df.fieldtype in table_fieldtypes + and df.reqd + and len(doc.get(df.fieldname, [])) == 0 + ] + if len(mandatory_table_fields) == 1: + self.warnings.append( + { + "row": first_row.row_number, + "message": _("There should be atleast one row for {0} table").format( + frappe.bold(mandatory_table_fields[0].label) + ), + } + ) + elif mandatory_table_fields: + fields_string = ", ".join([df.label for df in mandatory_table_fields]) + message = _("There should be atleast one row for the following tables: {0}").format( + fields_string + ) + self.warnings.append({"row": first_row.row_number, "message": message}) + + return doc, rows, data[len(rows) :] + + def get_warnings(self): + warnings = [] + + # ImportFile warnings + warnings += self.warnings + + # Column warnings + for col in self.header.columns: + warnings += col.warnings + + # Row warnings + for row in self.data: + warnings += row.warnings + + return warnings + + ###### + + def read_file(self, file_path): + extn = file_path.split(".")[1] + + file_content = None + with io.open(file_path, mode="rb") as f: + file_content = f.read() + + return file_content, extn + + def read_content(self, content, extension): + error_title = _("Template Error") + if extension not in ("csv", "xlsx", "xls"): + frappe.throw( + _("Import template should be of type .csv, .xlsx or .xls"), title=error_title + ) + + if extension == "csv": + data = read_csv_content(content) + elif extension == "xlsx": + data = read_xlsx_file_from_attached_file(fcontent=content) + elif extension == "xls": + data = read_xls_file_from_attached_file(content) + + return data + + +class Row: + link_values_exist_map = {} + + def __init__(self, index, row, doctype, header, import_type): + self.index = index + self.row_number = index + 1 + self.doctype = doctype + self.data = row + self.header = header + self.import_type = import_type + self.warnings = [] + + len_row = len(self.data) + len_columns = len(self.header.columns) + if len_row != len_columns: + less_than_columns = len_row < len_columns + message = ( + "Row has less values than columns" + if less_than_columns + else "Row has more values than columns" + ) + self.warnings.append( + {"row": self.row_number, "message": message,} + ) + + def parse_doc(self, doctype, parent_doc=None, table_df=None): + col_indexes = self.header.get_column_indexes(doctype, table_df) + values = self.get_values(col_indexes) + columns = self.header.get_columns(col_indexes) + doc = self._parse_doc(doctype, columns, values, parent_doc, table_df) + return doc + + def _parse_doc(self, doctype, columns, values, parent_doc=None, table_df=None): + doc = frappe._dict() + if self.import_type == INSERT: + # new_doc returns a dict with default values set + doc = frappe.new_doc( + doctype, + parent_doc=parent_doc, + parentfield=table_df.fieldname if table_df else None, + as_dict=True, + ) + + # remove standard fields and __islocal + for key in frappe.model.default_fields + ("__islocal",): + doc.pop(key, None) + + for col, value in zip(columns, values): + df = col.df + if value in INVALID_VALUES: + value = None + + if value is not None: + value = self.validate_value(value, col) + + if value is not None: + doc[df.fieldname] = self.parse_value(value, col) + + is_table = frappe.get_meta(doctype).istable + is_update = self.import_type == UPDATE + if is_table and is_update and doc.get("name") in INVALID_VALUES: + # for table rows being inserted in update + # create a new doc with defaults set + new_doc = frappe.new_doc(doctype, as_dict=True) + new_doc.update(doc) + doc = new_doc + + self.check_mandatory_fields(doctype, doc, table_df) + return doc + + def validate_value(self, value, col): + df = col.df + if df.fieldtype == "Select": + select_options = [d for d in (df.options or '').split('\n') if d] + if select_options and value not in select_options: + options_string = ", ".join([frappe.bold(d) for d in select_options]) + msg = _("Value must be one of {0}").format(options_string) + self.warnings.append( + { + "row": self.row_number, + "field": df_as_json(df), + "message": msg, + } + ) + return + + elif df.fieldtype == "Link": + exists = self.link_exists(value, df) + if not exists: + msg = _("Value {0} missing for {1}").format( + frappe.bold(value), frappe.bold(df.options) + ) + self.warnings.append( + { + "row": self.row_number, + "field": df_as_json(df), + "message": msg, + } + ) + return + elif df.fieldtype in ["Date", "Datetime"]: + value = self.get_date(value, col) + if isinstance(value, frappe.string_types): + # value was not parsed as datetime object + self.warnings.append( + { + "row": self.row_number, + "col": col.column_number, + "field": df_as_json(df), + "message": _("Value {0} must in {1} format").format( + frappe.bold(value), frappe.bold(get_user_format(col.date_format)) + ), + } + ) + return + + return value + + def link_exists(self, value, df): + key = df.options + "::" + cstr(value) + if Row.link_values_exist_map.get(key) is None: + Row.link_values_exist_map[key] = frappe.db.exists(df.options, value) + return Row.link_values_exist_map.get(key) + + def parse_value(self, value, col): + df = col.df + if isinstance(value, datetime) and df.fieldtype in ["Date", "Datetime"]: + return value + + value = cstr(value) + + # convert boolean values to 0 or 1 + valid_check_values = ["t", "f", "true", "false", "yes", "no", "y", "n"] + if df.fieldtype == "Check" and value.lower().strip() in valid_check_values: + value = value.lower().strip() + value = 1 if value in ["t", "true", "y", "yes"] else 0 + + if df.fieldtype in ["Int", "Check"]: + value = cint(value) + elif df.fieldtype in ["Float", "Percent", "Currency"]: + value = flt(value) + elif df.fieldtype in ["Date", "Datetime"]: + value = self.get_date(value, col) + + return value + + def get_date(self, value, column): + if isinstance(value, datetime): + return value + + date_format = column.date_format + if date_format: + try: + return datetime.strptime(value, date_format) + except ValueError: + # ignore date values that dont match the format + # import will break for these values later + pass + return value + + def check_mandatory_fields(self, doctype, doc, table_df=None): + """If import type is Insert: + Check for mandatory fields (except table fields) in doc + if import type is Update: + Check for name field or autoname field in doc + """ + meta = frappe.get_meta(doctype) + if self.import_type == UPDATE: + if meta.istable: + # when updating records with table rows, + # there are two scenarios: + # 1. if row 'name' is provided in the template + # the table row will be updated + # 2. if row 'name' is not provided + # then a new row will be added + # so we dont need to check for mandatory + return + + # for update, only ID (name) field is mandatory + id_field = get_id_field(doctype) + if doc.get(id_field.fieldname) in INVALID_VALUES: + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} is a mandatory field asdadsf").format(id_field.label), + } + ) + return + + fields = [ + df + for df in meta.fields + if df.fieldtype not in table_fieldtypes + and df.reqd + and doc.get(df.fieldname) in INVALID_VALUES + ] + + if not fields: + return + + def get_field_label(df): + return "{0}{1}".format(df.label, " ({})".format(table_df.label) if table_df else "") + + if len(fields) == 1: + field_label = get_field_label(fields[0]) + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} is a mandatory field").format(frappe.bold(field_label)), + } + ) + else: + fields_string = ", ".join([frappe.bold(get_field_label(df)) for df in fields]) + self.warnings.append( + { + "row": self.row_number, + "message": _("{0} are mandatory fields").format(fields_string), + } + ) + + def get_values(self, indexes): + return [self.data[i] for i in indexes] + + def get(self, index): + return self.data[index] + + def as_list(self): + return self.data + + +class Header(Row): + def __init__(self, index, row, doctype, raw_data, column_to_field_map=None): + self.index = index + self.row_number = index + 1 + self.data = row + self.doctype = doctype + column_to_field_map = column_to_field_map or frappe._dict() + + self.seen = [] + self.columns = [] + + for j, header in enumerate(row): + column_values = [get_item_at_index(r, j) for r in raw_data] + map_to_field = column_to_field_map.get(str(j)) + column = Column( + j, header, self.doctype, column_values, map_to_field, self.seen + ) + self.seen.append(header) + self.columns.append(column) + + doctypes = [] + for col in self.columns: + if not col.df: + continue + if col.df.parent == self.doctype: + doctypes.append((col.df.parent, None)) + else: + doctypes.append((col.df.parent, col.df.child_table_df)) + + self.doctypes = sorted( + list(set(doctypes)), key=lambda x: -1 if x[0] == self.doctype else 1 + ) + + def get_column_indexes(self, doctype, tablefield=None): + def is_table_field(df): + if tablefield: + return df.child_table_df.fieldname == tablefield.fieldname + return True + + return [ + col.index + for col in self.columns + if not col.skip_import + and col.df + and col.df.parent == doctype + and is_table_field(col.df) + ] + + def get_columns(self, indexes): + return [self.columns[i] for i in indexes] + + +class Column: + seen = [] + fields_column_map = {} + + def __init__(self, index, header, doctype, column_values, map_to_field=None, seen=[]): + self.index = index + self.column_number = index + 1 + self.doctype = doctype + self.header_title = header + self.column_values = column_values + self.map_to_field = map_to_field + self.seen = seen + + self.date_format = None + self.df = None + self.skip_import = None + self.warnings = [] + + self.meta = frappe.get_meta(doctype) + self.parse() + self.validate_values() + + def parse(self): + header_title = self.header_title + column_number = str(self.column_number) + skip_import = False + + if self.map_to_field and self.map_to_field != "Don't Import": + df = get_df_for_column_header(self.doctype, self.map_to_field) + if df: + self.warnings.append( + { + "message": _("Mapping column {0} to field {1}").format( + frappe.bold(header_title or "Untitled Column"), frappe.bold(df.label) + ), + "type": "info", + } + ) + else: + self.warnings.append( + { + "message": _("Could not map column {0} to field {1}").format( + column_number, self.map_to_field + ), + "type": "info", + } + ) + else: + df = get_df_for_column_header(self.doctype, header_title) + # df = df_by_labels_and_fieldnames.get(header_title) + + if not df: + skip_import = True + else: + skip_import = False + + if header_title in self.seen: + self.warnings.append( + { + "col": column_number, + "message": _("Skipping Duplicate Column {0}").format(frappe.bold(header_title)), + "type": "info", + } + ) + df = None + skip_import = True + elif self.map_to_field == "Don't Import": + skip_import = True + self.warnings.append( + { + "col": column_number, + "message": _("Skipping column {0}").format(frappe.bold(header_title)), + "type": "info", + } + ) + elif header_title and not df: + self.warnings.append( + { + "col": column_number, + "message": _("Cannot match column {0} with any field").format( + frappe.bold(header_title) + ), + "type": "info", + } + ) + elif not header_title and not df: + self.warnings.append( + {"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"} + ) + + self.df = df + self.skip_import = skip_import + + def guess_date_format_for_column(self): + """ Guesses date format for a column by parsing all the values in the column, + getting the date format and then returning the one which has the maximum frequency + """ + + date_formats = [ + frappe.utils.guess_date_format(d) for d in self.column_values if isinstance(d, str) + ] + date_formats = [d for d in date_formats if d] + if not date_formats: + return + + unique_date_formats = set(date_formats) + max_occurred_date_format = max(unique_date_formats, key=date_formats.count) + + if len(unique_date_formats) > 1: + # fmt: off + message = _("The column {0} has {1} different date formats. Automatically setting {2} as the default format as it is the most common. Please change other values in this column to this format.") + # fmt: on + user_date_format = get_user_format(max_occurred_date_format) + self.warnings.append( + { + "col": self.column_number, + "message": message.format( + frappe.bold(self.header_title), + len(unique_date_formats), + frappe.bold(user_date_format), + ), + "type": "info", + } + ) + + return max_occurred_date_format + + def validate_values(self): + if not self.df: + return + + if self.df.fieldtype == 'Link': + # find all values that dont exist + values = list(set([cstr(v) for v in self.column_values[1:] if v])) + exists = [d.name for d in frappe.db.get_all(self.df.options, filters={'name': ('in', values)})] + not_exists = list(set(values) - set(exists)) + if not_exists: + missing_values = ', '.join(not_exists) + self.warnings.append({ + 'col': self.column_number, + 'message': "The following values do not exist for {}: {}".format(self.df.options, missing_values), + 'type': 'warning' + }) + elif self.df.fieldtype in ("Date", "Time", "Datetime"): + # guess date format + self.date_format = self.guess_date_format_for_column() + if not self.date_format: + self.date_format = '%Y-%m-%d' + self.warnings.append({ + 'col': self.column_number, + 'message': _("Date format could not determined from the values in this column. Defaulting to yyyy-mm-dd."), + 'type': 'info' + }) + + def as_dict(self): + d = frappe._dict() + d.index = self.index + d.column_number = self.column_number + d.doctype = self.doctype + d.header_title = self.header_title + d.map_to_field = self.map_to_field + d.date_format = self.date_format + d.df = self.df + if hasattr(self.df, 'is_child_table_field'): + d.is_child_table_field = self.df.is_child_table_field + d.child_table_df = self.df.child_table_df + d.skip_import = self.skip_import + d.warnings = self.warnings + return d + + +def build_fields_dict_for_column_matching(parent_doctype): + """ + Build a dict with various keys to match with column headers and value as docfield + The keys can be label or fieldname + { + 'Customer': df1, + 'customer': df1, + 'Due Date': df2, + 'due_date': df2, + 'Item Code (Sales Invoice Item)': df3, + 'Sales Invoice Item:item_code': df3, + } + """ + + def get_standard_fields(doctype): + meta = frappe.get_meta(doctype) + if meta.istable: + standard_fields = [ + {"label": "Parent", "fieldname": "parent"}, + {"label": "Parent Type", "fieldname": "parenttype"}, + {"label": "Parent Field", "fieldname": "parentfield"}, + {"label": "Row Index", "fieldname": "idx"}, + ] + else: + standard_fields = [ + {"label": "Owner", "fieldname": "owner"}, + {"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"}, + ] + + out = [] + for df in standard_fields: + df = frappe._dict(df) + df.parent = doctype + out.append(df) + return out + + parent_meta = frappe.get_meta(parent_doctype) + out = {} + + # doctypes and fieldname if it is a child doctype + doctypes = [[parent_doctype, None]] + [ + [df.options, df] for df in parent_meta.get_table_fields() + ] + + for doctype, table_df in doctypes: + # name field + name_by_label = ( + "ID" if doctype == parent_doctype else "ID ({0})".format(table_df.label) + ) + name_by_fieldname = ( + "name" if doctype == parent_doctype else "{0}.name".format(table_df.fieldname) + ) + name_df = frappe._dict( + { + "fieldtype": "Data", + "fieldname": "name", + "label": "ID", + "reqd": 1, # self.import_type == UPDATE, + "parent": doctype, + } + ) + + if doctype != parent_doctype: + name_df.is_child_table_field = True + name_df.child_table_df = table_df + + out[name_by_label] = name_df + out[name_by_fieldname] = name_df + + # other fields + fields = get_standard_fields(doctype) + frappe.get_meta(doctype).fields + for df in fields: + label = (df.label or '').strip() + fieldtype = df.fieldtype or "Data" + parent = df.parent or parent_doctype + if fieldtype not in no_value_fields: + if parent_doctype == doctype: + # for parent doctypes keys will be + # Label + # label + # Label (label) + if not out.get(label): + # if Label is already set, don't set it again + # in case of duplicate column headers + out[label] = df + out[df.fieldname] = df + label_with_fieldname = "{0} ({1})".format(label, df.fieldname) + out[label_with_fieldname] = df + else: + # in case there are multiple table fields with the same doctype + # for child doctypes keys will be + # Label (Table Field Label) + # table_field.fieldname + table_fields = parent_meta.get( + "fields", {"fieldtype": ["in", table_fieldtypes], "options": parent} + ) + for table_field in table_fields: + by_label = "{0} ({1})".format(label, table_field.label) + by_fieldname = "{0}.{1}".format(table_field.fieldname, df.fieldname) + + # create a new df object to avoid mutation problems + if isinstance(df, dict): + new_df = frappe._dict(df.copy()) + else: + new_df = df.as_dict() + + new_df.is_child_table_field = True + new_df.child_table_df = table_field + out[by_label] = new_df + out[by_fieldname] = new_df + + # if autoname is based on field + # add an entry for "ID (Autoname Field)" + autoname_field = get_autoname_field(parent_doctype) + if autoname_field: + out["ID ({})".format(autoname_field.label)] = autoname_field + # ID field should also map to the autoname field + out["ID"] = autoname_field + out["name"] = autoname_field + + return out + + +def get_df_for_column_header(doctype, header): + def build_fields_dict_for_doctype(): + return build_fields_dict_for_column_matching(doctype) + + df_by_labels_and_fieldname = frappe.cache().hget( + "data_import_column_header_map", doctype, generator=build_fields_dict_for_doctype + ) + return df_by_labels_and_fieldname.get(header) + + +# utilities + + +def get_id_field(doctype): + autoname_field = get_autoname_field(doctype) + if autoname_field: + return autoname_field + return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"}) + + +def get_autoname_field(doctype): + meta = frappe.get_meta(doctype) + if meta.autoname and meta.autoname.startswith("field:"): + fieldname = meta.autoname[len("field:") :] + return meta.get_field(fieldname) + + +def get_item_at_index(_list, i, default=None): + try: + a = _list[i] + except IndexError: + a = default + return a + + +def get_user_format(date_format): + return ( + date_format.replace("%Y", "yyyy") + .replace("%y", "yy") + .replace("%m", "mm") + .replace("%d", "dd") + ) + +def df_as_json(df): + return { + 'fieldname': df.fieldname, + 'fieldtype': df.fieldtype, + 'label': df.label, + 'options': df.options, + 'parent': df.parent, + 'default': df.default + } diff --git a/frappe/core/doctype/data_import/importer_new.py b/frappe/core/doctype/data_import/importer_new.py deleted file mode 100644 index 040e9fabc4..0000000000 --- a/frappe/core/doctype/data_import/importer_new.py +++ /dev/null @@ -1,1044 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors -# MIT License. See license.txt - -import io -import os -import json -import timeit -import frappe -from datetime import datetime -from frappe import _ -from frappe.utils import cint, flt, update_progress_bar, cstr, DATETIME_FORMAT -from frappe.utils.csvutils import read_csv_content -from frappe.utils.xlsxutils import ( - read_xlsx_file_from_attached_file, - read_xls_file_from_attached_file, -) -from frappe.model import no_value_fields, table_fields - -INVALID_VALUES = ["", None] -MAX_ROWS_IN_PREVIEW = 10 -INSERT = "Insert New Records" -UPDATE = "Update Existing Records" - -# pylint: disable=R0201 -class Importer: - def __init__( - self, doctype, data_import=None, file_path=None, content=None, console=False - ): - self.doctype = doctype - self.template_options = frappe._dict({"remap_column": {}}) - self.console = console - - if data_import: - self.data_import = data_import - if self.data_import.template_options: - template_options = frappe.parse_json(self.data_import.template_options) - self.template_options.update(template_options) - self.import_type = self.data_import.import_type - else: - self.data_import = None - - self.import_type = self.import_type or INSERT - - self.header_row = None - self.data = None - # used to store date formats guessed from data rows per column - self._guessed_date_formats = {} - # used to store eta during import - self.last_eta = 0 - # used to collect warnings during template parsing - # and show them to user - self.warnings = [] - self.meta = frappe.get_meta(doctype) - self.prepare_content(file_path, content) - self.parse_data_from_template() - - def prepare_content(self, file_path, content): - extension = None - if self.data_import and self.data_import.import_file: - file_doc = frappe.get_doc("File", {"file_url": self.data_import.import_file}) - parts = file_doc.get_extension() - extension = parts[1] - content = file_doc.get_content() - extension = extension.lstrip(".") - - if file_path: - content, extension = self.read_file(file_path) - - if not extension: - extension = "csv" - - if content: - self.read_content(content, extension) - - self.validate_template_content() - - def read_file(self, file_path): - extn = file_path.split(".")[1] - - file_content = None - with io.open(file_path, mode="rb") as f: - file_content = f.read() - - return file_content, extn - - def read_content(self, content, extension): - error_title = _("Template Error") - if extension not in ("csv", "xlsx", "xls"): - frappe.throw( - _("Import template should be of type .csv, .xlsx or .xls"), title=error_title - ) - - if extension == "csv": - data = read_csv_content(content) - elif extension == "xlsx": - data = read_xlsx_file_from_attached_file(fcontent=content) - elif extension == "xls": - data = read_xls_file_from_attached_file(content) - - data = self.remove_empty_rows_and_columns(data) - - if len(data) <= 1: - frappe.throw( - _("Import template should contain a Header and atleast one row."), title=error_title - ) - - self.header_row = data[0] - self.data = data[1:] - - def validate_template_content(self): - column_count = len(self.header_row) - if any([len(row) != column_count and len(row) != 0 for row in self.data]): - frappe.throw( - _("Number of columns does not match with data"), title=_("Invalid Template") - ) - - def remove_empty_rows_and_columns(self, raw_data): - self.row_index_map = [] - removed_rows = [] - removed_columns = [] - - # remove empty rows - data_without_empty_rows = [] - for i, row in enumerate(raw_data): - if all(v in INVALID_VALUES for v in row): - # empty row - removed_rows.append(i) - else: - data_without_empty_rows.append(row) - self.row_index_map.append(i) - - # remove empty columns - # a column with a header and no data is a valid column - # a column with no header and no data will be removed - first_row = data_without_empty_rows[0] - for i, column in enumerate(first_row): - column_values = [row[i] for row in data_without_empty_rows] - if all(v in INVALID_VALUES for v in column_values): - # empty column - removed_columns.append(i) - - if removed_columns: - data_without_empty_rows_and_columns = [] - # remove empty columns from data - for i, row in enumerate(data_without_empty_rows): - new_row = [v for j, v in enumerate(row) if j not in removed_columns] - data_without_empty_rows_and_columns.append(new_row) - else: - data_without_empty_rows_and_columns = data_without_empty_rows - - return data_without_empty_rows_and_columns - - def get_data_for_import_preview(self): - out = frappe._dict() - out.data = list(self.rows) - out.columns = self.columns - out.warnings = self.warnings - total_number_of_rows = len(out.data) - if total_number_of_rows > MAX_ROWS_IN_PREVIEW: - out.data = out.data[:MAX_ROWS_IN_PREVIEW] - out.max_rows_exceeded = True - out.max_rows_in_preview = MAX_ROWS_IN_PREVIEW - out.total_number_of_rows = total_number_of_rows - return out - - def parse_data_from_template(self): - columns = self.parse_columns_from_header_row() - columns = self.detect_date_formats(columns) - columns, data = self.add_serial_no_column(columns, self.data) - - self.columns = columns - self.rows = data - - def parse_columns_from_header_row(self): - remap_column = self.template_options.remap_column - columns = [] - - df_by_labels_and_fieldnames = self.build_fields_dict_for_column_matching() - - for i, header_title in enumerate(self.header_row): - header_row_index = str(i) - column_number = str(i + 1) - skip_import = False - fieldname = remap_column.get(header_row_index) - - if fieldname and fieldname != "Don't Import": - df = df_by_labels_and_fieldnames.get(fieldname) - self.warnings.append( - { - "col": column_number, - "message": _("Mapping column {0} to field {1}").format( - frappe.bold(header_title or "Untitled Column"), frappe.bold(df.label) - ), - "type": "info", - } - ) - else: - df = df_by_labels_and_fieldnames.get(header_title) - - if not df: - skip_import = True - else: - skip_import = False - - if fieldname == "Don't Import": - skip_import = True - self.warnings.append( - { - "col": column_number, - "message": _("Skipping column {0}").format(frappe.bold(header_title)), - "type": "info", - } - ) - elif header_title and not df: - self.warnings.append( - { - "col": column_number, - "message": _("Cannot match column {0} with any field").format( - frappe.bold(header_title) - ), - "type": "info", - } - ) - elif not header_title and not df: - self.warnings.append( - {"col": column_number, "message": _("Skipping Untitled Column"), "type": "info"} - ) - - columns.append( - frappe._dict( - df=df, - skip_import=skip_import, - header_title=header_title, - column_number=column_number, - index=i, - ) - ) - - return columns - - def build_fields_dict_for_column_matching(self): - """ - Build a dict with various keys to match with column headers and value as docfield - The keys can be label or fieldname - { - 'Customer': df1, - 'customer': df1, - 'Due Date': df2, - 'due_date': df2, - 'Item Code (Sales Invoice Item)': df3, - 'Sales Invoice Item:item_code': df3, - } - """ - out = {} - - table_doctypes = [df.options for df in self.meta.get_table_fields()] - doctypes = table_doctypes + [self.doctype] - for doctype in doctypes: - # name field - name_key = "ID" if self.doctype == doctype else "ID ({})".format(doctype) - name_df = frappe._dict( - { - "fieldtype": "Data", - "fieldname": "name", - "label": "ID", - "reqd": self.import_type == UPDATE, - "parent": doctype, - } - ) - out[name_key] = name_df - out["name"] = name_df - - # other fields - meta = frappe.get_meta(doctype) - fields = self.get_standard_fields(doctype) + meta.fields - for df in fields: - fieldtype = df.fieldtype or "Data" - parent = df.parent or self.doctype - if fieldtype not in no_value_fields: - # label as key - label = ( - df.label if self.doctype == doctype else "{0} ({1})".format(df.label, parent) - ) - out[label] = df - # fieldname as key - if self.doctype == doctype: - out[df.fieldname] = df - else: - key = "{0}:{1}".format(doctype, df.fieldname) - out[key] = df - - # if autoname is based on field - # add an entry for "ID (Autoname Field)" - autoname_field = self.get_autoname_field(self.doctype) - if autoname_field: - out["ID ({})".format(autoname_field.label)] = autoname_field - # ID field should also map to the autoname field - out["ID"] = autoname_field - out["name"] = autoname_field - - return out - - def get_standard_fields(self, doctype): - meta = frappe.get_meta(doctype) - if meta.istable: - standard_fields = [ - {"label": "Parent", "fieldname": "parent"}, - {"label": "Parent Type", "fieldname": "parenttype"}, - {"label": "Parent Field", "fieldname": "parentfield"}, - {"label": "Row Index", "fieldname": "idx"}, - ] - else: - standard_fields = [ - {"label": "Owner", "fieldname": "owner"}, - {"label": "Document Status", "fieldname": "docstatus", "fieldtype": "Int"}, - ] - - out = [] - for df in standard_fields: - df = frappe._dict(df) - df.parent = doctype - out.append(df) - return out - - def detect_date_formats(self, columns): - for col in columns: - if col.df and col.df.fieldtype in ["Date", "Time", "Datetime"]: - col.date_format = self.guess_date_format_for_column(col, columns) - return columns - - def add_serial_no_column(self, columns, data): - columns_with_serial_no = [ - frappe._dict({"header_title": "Sr. No", "skip_import": True}) - ] + columns - - # update index for each column - for i, col in enumerate(columns_with_serial_no): - col.index = i - - data_with_serial_no = [] - for i, row in enumerate(data): - data_with_serial_no.append([self.row_index_map[i] + 1] + row) - - return columns_with_serial_no, data_with_serial_no - - def parse_value(self, value, df): - if isinstance(value, datetime) and df.fieldtype in ["Date", "Datetime"]: - return value - - value = cstr(value) - - # convert boolean values to 0 or 1 - if df.fieldtype == "Check" and value.lower().strip() in [ - "t", - "f", - "true", - "false", - "yes", - "no", - "y", - "n", - ]: - value = value.lower().strip() - value = 1 if value in ["t", "true", "y", "yes"] else 0 - - if df.fieldtype in ["Int", "Check"]: - value = cint(value) - elif df.fieldtype in ["Float", "Percent", "Currency"]: - value = flt(value) - elif df.fieldtype in ["Date", "Datetime"]: - value = self.parse_date_format(value, df) - - return value - - def parse_date_format(self, value, df): - date_format = self.get_date_format_for_df(df) or DATETIME_FORMAT - try: - return datetime.strptime(value, date_format) - except ValueError: - # ignore date values that dont match the format - # import will break for these values later - pass - return value - - def get_date_format_for_df(self, df): - return self._guessed_date_formats.get(df.parent + df.fieldname) - - def guess_date_format_for_column(self, column, columns): - """ Guesses date format for a column by parsing the first 10 values in the column, - getting the date format and then returning the one which has the maximum frequency - """ - PARSE_ROW_COUNT = 10 - - df = column.df - key = df.parent + df.fieldname - - if not self._guessed_date_formats.get(key): - matches = [col for col in columns if col.df == df] - if not matches: - self._guessed_date_formats[key] = None - return - - column = matches[0] - column_index = column.index - - date_values = [ - row[column_index] for row in self.data[:PARSE_ROW_COUNT] if row[column_index] - ] - date_formats = [ - guess_date_format(d) if isinstance(d, str) else None for d in date_values - ] - if not date_formats: - return - max_occurred_date_format = max(set(date_formats), key=date_formats.count) - self._guessed_date_formats[key] = max_occurred_date_format - - return self._guessed_date_formats[key] - - def import_data(self): - # set user lang for translations - frappe.cache().hdel("lang", frappe.session.user) - frappe.set_user_lang(frappe.session.user) - - if not self.console: - self.data_import.db_set("template_warnings", "") - - # set flags - frappe.flags.in_import = True - frappe.flags.mute_emails = self.data_import.mute_emails - - # prepare a map for missing link field values - self.prepare_missing_link_field_values() - - # parse docs from rows - payloads = self.get_payloads_for_import() - - # dont import if there are non-ignorable warnings - warnings = [w for w in self.warnings if w.get("type") != "info"] - if warnings: - if self.console: - self.print_grouped_warnings(warnings) - else: - self.data_import.db_set("template_warnings", json.dumps(warnings)) - frappe.publish_realtime( - "data_import_refresh", {"data_import": self.data_import.name} - ) - return - - # setup import log - if self.data_import.import_log: - import_log = frappe.parse_json(self.data_import.import_log) - else: - import_log = [] - - # remove previous failures from import log - import_log = [l for l in import_log if l.get("success") == True] - - # get successfully imported rows - imported_rows = [] - for log in import_log: - log = frappe._dict(log) - if log.success: - imported_rows += log.row_indexes - - # start import - total_payload_count = len(payloads) - batch_size = frappe.conf.data_import_batch_size or 1000 - - for batch_index, batched_payloads in enumerate( - frappe.utils.create_batch(payloads, batch_size) - ): - for i, payload in enumerate(batched_payloads): - doc = payload.doc - row_indexes = [row[0] for row in payload.rows] - current_index = (i + 1) + (batch_index * batch_size) - - if set(row_indexes).intersection(set(imported_rows)): - print("Skipping imported rows", row_indexes) - if total_payload_count > 5: - frappe.publish_realtime( - "data_import_progress", - { - "current": current_index, - "total": total_payload_count, - "skipping": True, - "data_import": self.data_import.name, - }, - ) - continue - - try: - start = timeit.default_timer() - doc = self.process_doc(doc) - processing_time = timeit.default_timer() - start - eta = self.get_eta(current_index, total_payload_count, processing_time) - - if total_payload_count > 5: - frappe.publish_realtime( - "data_import_progress", - { - "current": current_index, - "total": total_payload_count, - "docname": doc.name, - "data_import": self.data_import.name, - "success": True, - "row_indexes": row_indexes, - "eta": eta, - }, - ) - if self.console: - update_progress_bar( - "Importing {0} records".format(total_payload_count), - current_index, - total_payload_count, - ) - import_log.append( - frappe._dict(success=True, docname=doc.name, row_indexes=row_indexes) - ) - # commit after every successful import - frappe.db.commit() - - except Exception: - import_log.append( - frappe._dict( - success=False, - exception=frappe.get_traceback(), - messages=frappe.local.message_log, - row_indexes=row_indexes, - ) - ) - frappe.clear_messages() - # rollback if exception - frappe.db.rollback() - - # set status - failures = [l for l in import_log if l.get("success") == False] - if len(failures) == total_payload_count: - status = "Pending" - elif len(failures) > 0: - status = "Partial Success" - else: - status = "Success" - - if self.console: - self.print_import_log(import_log) - else: - self.data_import.db_set("status", status) - self.data_import.db_set("import_log", json.dumps(import_log)) - - frappe.flags.in_import = False - frappe.flags.mute_emails = False - frappe.publish_realtime("data_import_refresh", {"data_import": self.data_import.name}) - - return import_log - - def get_payloads_for_import(self): - payloads = [] - # make a copy - data = list(self.rows) - while data: - doc, rows, data = self.parse_next_row_for_import(data) - payloads.append(frappe._dict(doc=doc, rows=rows)) - return payloads - - def parse_next_row_for_import(self, data): - """ - Parses rows that make up a doc. A doc maybe built from a single row or multiple rows. - Returns the doc, rows, and data without the rows. - """ - doctypes = set([col.df.parent for col in self.columns if col.df and col.df.parent]) - - # first row is included by default - first_row = data[0] - rows = [first_row] - - # if there are child doctypes, find the subsequent rows - if len(doctypes) > 1: - # subsequent rows either dont have any parent value set - # or have the same value as the parent row - # we include a row if either of conditions match - parent_column_indexes = [ - col.index - for col in self.columns - if not col.skip_import and col.df and col.df.parent == self.doctype - ] - parent_row_values = [first_row[i] for i in parent_column_indexes] - - data_without_first_row = data[1:] - for row in data_without_first_row: - row_values = [row[i] for i in parent_column_indexes] - # if the row is blank, it's a child row doc - if all([v in INVALID_VALUES for v in row_values]): - rows.append(row) - continue - # if the row has same values as parent row, it's a child row doc - if row_values == parent_row_values: - rows.append(row) - continue - # if any of those conditions dont match, it's the next doc - break - - def get_column_indexes(doctype): - return [ - col.index - for col in self.columns - if not col.skip_import and col.df and col.df.parent == doctype - ] - - def validate_value(value, df): - if df.fieldtype == "Select": - select_options = df.get_select_options() - if select_options and value not in select_options: - options_string = ", ".join([frappe.bold(d) for d in select_options]) - msg = _("Value must be one of {0}").format(options_string) - self.warnings.append( - { - "row": row_number, - "field": df.as_dict(convert_dates_to_str=True), - "message": msg, - } - ) - return - - elif df.fieldtype == "Link": - d = self.get_missing_link_field_values(df.options) - if value in d.missing_values and not d.one_mandatory: - msg = _("Value {0} missing for {1}").format( - frappe.bold(value), frappe.bold(df.options) - ) - self.warnings.append( - { - "row": row_number, - "field": df.as_dict(convert_dates_to_str=True), - "message": msg, - } - ) - return value - - return value - - def parse_doc(doctype, docfields, values, row_number): - doc = frappe._dict() - if self.import_type == INSERT: - # new_doc returns a dict with default values set - doc = frappe.new_doc(doctype, as_dict=True) - - # remove standard fields and __islocal - for key in frappe.model.default_fields + ("__islocal",): - doc.pop(key, None) - - for df, value in zip(docfields, values): - if value in INVALID_VALUES: - value = None - - if value is not None: - value = validate_value(value, df) - - if value is not None: - doc[df.fieldname] = self.parse_value(value, df) - - is_table = frappe.get_meta(doctype).istable - is_update = self.import_type == UPDATE - if is_table and is_update and doc.get("name") in INVALID_VALUES: - # for table rows being inserted in update - # create a new doc with defaults set - new_doc = frappe.new_doc(doctype, as_dict=True) - new_doc.update(doc) - doc = new_doc - - check_mandatory_fields(doctype, doc, row_number) - return doc - - def check_mandatory_fields(doctype, doc, row_number): - """If import type is Insert: - Check for mandatory fields (except table fields) in doc - if import type is Update: - Check for name field or autoname field in doc - """ - meta = frappe.get_meta(doctype) - if self.import_type == UPDATE: - if meta.istable: - # when updating records with table rows, - # there are two scenarios: - # 1. if row 'name' is provided in the template - # the table row will be updated - # 2. if row 'name' is not provided - # then a new row will be added - # so we dont need to check for mandatory - return - - id_field = self.get_id_field(doctype) - if doc.get(id_field.fieldname) in INVALID_VALUES: - self.warnings.append( - { - "row": row_number, - "message": _("{0} is a mandatory field").format(id_field.label), - } - ) - return - - fields = [ - df - for df in meta.fields - if df.fieldtype not in table_fields - and df.reqd - and doc.get(df.fieldname) in INVALID_VALUES - ] - - if not fields: - return - - if len(fields) == 1: - self.warnings.append( - { - "row": row_number, - "message": _("{0} is a mandatory field").format(fields[0].label), - } - ) - else: - fields_string = ", ".join([df.label for df in fields]) - self.warnings.append( - {"row": row_number, "message": _("{0} are mandatory fields").format(fields_string)} - ) - - parsed_docs = {} - for row in rows: - for doctype in doctypes: - if doctype == self.doctype and parsed_docs.get(doctype): - # if parent doc is already parsed from the first row - # then skip - continue - - row_number = row[0] - column_indexes = get_column_indexes(doctype) - values = [row[i] for i in column_indexes] - - if all(v in INVALID_VALUES for v in values): - # skip values if all of them are empty - continue - - columns = [self.columns[i] for i in column_indexes] - docfields = [col.df for col in columns] - doc = parse_doc(doctype, docfields, values, row_number) - parsed_docs[doctype] = parsed_docs.get(doctype, []) - parsed_docs[doctype].append(doc) - - # build the doc with children - doc = {} - for doctype, docs in parsed_docs.items(): - if doctype == self.doctype: - doc.update(docs[0]) - else: - table_dfs = self.meta.get( - "fields", {"options": doctype, "fieldtype": ["in", table_fields]} - ) - if table_dfs: - table_field = table_dfs[0] - doc[table_field.fieldname] = docs - - # check if there is atleast one row for mandatory table fields - mandatory_table_fields = [ - df - for df in self.meta.fields - if df.fieldtype in table_fields and df.reqd and len(doc.get(df.fieldname, [])) == 0 - ] - if len(mandatory_table_fields) == 1: - self.warnings.append( - { - "row": first_row[0], - "message": _("There should be atleast one row for {0} table").format( - mandatory_table_fields[0].label - ), - } - ) - elif mandatory_table_fields: - fields_string = ", ".join([df.label for df in mandatory_table_fields]) - message = _("There should be atleast one row for the following tables: {0}").format( - fields_string - ) - self.warnings.append({"row": first_row[0], "message": message}) - - return doc, rows, data[len(rows) :] - - def process_doc(self, doc): - if self.import_type == INSERT: - return self.insert_record(doc) - elif self.import_type == UPDATE: - return self.update_record(doc) - - def insert_record(self, doc): - self.create_missing_linked_records(doc) - - new_doc = frappe.new_doc(self.doctype) - new_doc.update(doc) - # name shouldn't be set when inserting a new record - new_doc.set("name", None) - new_doc.insert() - if self.meta.is_submittable and self.data_import.submit_after_import: - new_doc.submit() - return new_doc - - def create_missing_linked_records(self, doc): - """ - Finds fields that are of type Link, and creates the corresponding - document automatically if it has only one mandatory field - """ - link_values = [] - - def get_link_fields(doc, doctype): - for fieldname, value in doc.items(): - meta = frappe.get_meta(doctype) - df = meta.get_field(fieldname) - if not df: - continue - if df.fieldtype == "Link" and value not in INVALID_VALUES: - link_values.append([df.options, value]) - elif df.fieldtype in table_fields: - for row in value: - get_link_fields(row, df.options) - - get_link_fields(doc, self.doctype) - - for link_doctype, link_value in link_values: - d = self.missing_link_values.get(link_doctype) - if d and d.one_mandatory and link_value in d.missing_values: - # find the autoname field - autoname_field = self.get_autoname_field(link_doctype) - name_field = autoname_field.fieldname if autoname_field else "name" - new_doc = frappe.new_doc(link_doctype) - new_doc.set(name_field, link_value) - new_doc.insert() - d.missing_values.remove(link_value) - - def update_record(self, doc): - id_fieldname = self.get_id_fieldname(self.doctype) - id_value = doc[id_fieldname] - existing_doc = frappe.get_doc(self.doctype, id_value) - existing_doc.flags.updater_reference = { - "doctype": self.data_import.doctype, - "docname": self.data_import.name, - "label": _("via Data Import"), - } - existing_doc.update(doc) - existing_doc.save() - return existing_doc - - def export_errored_rows(self): - from frappe.utils.csvutils import build_csv_response - - if not self.data_import: - return - - import_log = frappe.parse_json(self.data_import.import_log or "[]") - failures = [l for l in import_log if l.get("success") == False] - row_indexes = [] - for f in failures: - row_indexes.extend(f.get("row_indexes", [])) - - # de duplicate - row_indexes = list(set(row_indexes)) - row_indexes.sort() - - header_row = [col.header_title for col in self.columns[1:]] - rows = [header_row] - rows += [row[1:] for row in self.rows if row[0] in row_indexes] - - build_csv_response(rows, self.doctype) - - def get_missing_link_field_values(self, doctype): - return self.missing_link_values.get(doctype, {}) - - def prepare_missing_link_field_values(self): - columns = self.columns - rows = self.rows - link_column_indexes = [ - col.index for col in columns if col.df and col.df.fieldtype == "Link" - ] - - self.missing_link_values = {} - for index in link_column_indexes: - col = columns[index] - column_values = [row[index] for row in rows] - values = set([v for v in column_values if v not in INVALID_VALUES]) - doctype = col.df.options - - missing_values = [value for value in values if not frappe.db.exists(doctype, value)] - if self.missing_link_values.get(doctype): - self.missing_link_values[doctype].missing_values += missing_values - else: - self.missing_link_values[doctype] = frappe._dict( - missing_values=missing_values, - one_mandatory=self.has_one_mandatory_field(doctype), - df=col.df, - ) - - def get_eta(self, current, total, processing_time): - remaining = total - current - eta = processing_time * remaining - if not self.last_eta or eta < self.last_eta: - self.last_eta = eta - return self.last_eta - - def has_one_mandatory_field(self, doctype): - meta = frappe.get_meta(doctype) - # get mandatory fields with default not set - mandatory_fields = [df for df in meta.fields if df.reqd and not df.default] - mandatory_fields_count = len(mandatory_fields) - if meta.autoname and meta.autoname.lower() == "prompt": - mandatory_fields_count += 1 - return mandatory_fields_count == 1 - - def get_id_fieldname(self, doctype): - return self.get_id_field(doctype).fieldname - - def get_id_field(self, doctype): - autoname_field = self.get_autoname_field(doctype) - if autoname_field: - return autoname_field - return frappe._dict({"label": "ID", "fieldname": "name", "fieldtype": "Data"}) - - def get_autoname_field(self, doctype): - meta = frappe.get_meta(doctype) - if meta.autoname and meta.autoname.startswith("field:"): - fieldname = meta.autoname[len("field:") :] - return meta.get_field(fieldname) - - def print_grouped_warnings(self, warnings): - warnings_by_row = {} - other_warnings = [] - for w in warnings: - if w.get("row"): - warnings_by_row.setdefault(w.get("row"), []).append(w) - else: - other_warnings.append(w) - - for row_number, warnings in warnings_by_row.items(): - print("Row {0}".format(row_number)) - for w in warnings: - print(w.get("message")) - - for w in other_warnings: - print(w.get("message")) - - def print_import_log(self, import_log): - failed_records = [l for l in import_log if not l.success] - successful_records = [l for l in import_log if l.success] - - if successful_records: - print( - "Successfully imported {0} records out of {1}".format( - len(successful_records), len(import_log) - ) - ) - - if failed_records: - print("Failed to import {0} records".format(len(failed_records))) - file_name = "{0}_import_on_{1}.txt".format(self.doctype, frappe.utils.now()) - print("Check {0} for errors".format(os.path.join("sites", file_name))) - text = "" - for w in failed_records: - text += "Row Indexes: {0}\n".format(str(w.get("row_indexes", []))) - text += "Messages:\n{0}\n".format("\n".join(w.get("messages", []))) - text += "Traceback:\n{0}\n\n".format(w.get("exception")) - - with open(file_name, "w") as f: - f.write(text) - - -DATE_FORMATS = [ - r"%d-%m-%Y", - r"%m-%d-%Y", - r"%Y-%m-%d", - r"%d-%m-%y", - r"%m-%d-%y", - r"%y-%m-%d", - r"%d/%m/%Y", - r"%m/%d/%Y", - r"%Y/%m/%d", - r"%d/%m/%y", - r"%m/%d/%y", - r"%y/%m/%d", - r"%d.%m.%Y", - r"%m.%d.%Y", - r"%Y.%m.%d", - r"%d.%m.%y", - r"%m.%d.%y", - r"%y.%m.%d", -] - -TIME_FORMATS = [ - r"%H:%M:%S.%f", - r"%H:%M:%S", - r"%H:%M", - r"%I:%M:%S.%f %p", - r"%I:%M:%S %p", - r"%I:%M %p", -] - - -def guess_date_format(date_string): - date_string = date_string.strip() - - _date = None - _time = None - - if " " in date_string: - _date, _time = date_string.split(" ", 1) - else: - _date = date_string - - date_format = None - time_format = None - - for f in DATE_FORMATS: - try: - # if date is parsed without any exception - # capture the date format - datetime.strptime(_date, f) - date_format = f - break - except ValueError: - pass - - if _time: - for f in TIME_FORMATS: - try: - # if time is parsed without any exception - # capture the time format - datetime.strptime(_time, f) - time_format = f - break - except ValueError: - pass - - full_format = date_format - if time_format: - full_format += " " + time_format - return full_format - - -def import_data(doctype, file_path): - i = Importer(doctype, file_path) - i.import_data() diff --git a/frappe/core/doctype/data_import/test_data_import.js b/frappe/core/doctype/data_import/test_data_import.js deleted file mode 100644 index fbce7781b6..0000000000 --- a/frappe/core/doctype/data_import/test_data_import.js +++ /dev/null @@ -1,23 +0,0 @@ -/* eslint-disable */ -// rename this file from _test_[name] to test_[name] to activate -// and remove above this line - -QUnit.test("test: Data Import", function (assert) { - let done = assert.async(); - - // number of asserts - assert.expect(1); - - frappe.run_serially([ - // insert a new Data Import - () => frappe.tests.make('Data Import', [ - // values to be set - {key: 'value'} - ]), - () => { - assert.equal(cur_frm.doc.key, 'value'); - }, - () => done() - ]); - -}); diff --git a/frappe/core/doctype/data_import/test_data_import.py b/frappe/core/doctype/data_import/test_data_import.py index 406ea08958..15fd57744a 100644 --- a/frappe/core/doctype/data_import/test_data_import.py +++ b/frappe/core/doctype/data_import/test_data_import.py @@ -1,100 +1,10 @@ # -*- coding: utf-8 -*- -# Copyright (c) 2017, Frappe Technologies and Contributors +# Copyright (c) 2020, Frappe Technologies and Contributors # See license.txt from __future__ import unicode_literals -import frappe, unittest -from frappe.core.doctype.data_export import exporter -from frappe.core.doctype.data_import import importer -from frappe.utils.csvutils import read_csv_content +# import frappe +import unittest class TestDataImport(unittest.TestCase): - def test_export(self): - exporter.export_data("User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - - def test_export_with_data(self): - exporter.export_data("User", all_doctypes=True, template=True, with_data=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1]) - - def test_export_with_all_doctypes(self): - exporter.export_data("User", all_doctypes="Yes", template=True, with_data=True) - content = read_csv_content(frappe.response.result) - self.assertTrue(content[1][1], "User") - self.assertTrue('"Administrator"' in [c[1] for c in content if len(c)>1]) - self.assertEqual(content[13][0], "DocType:") - self.assertEqual(content[13][1], "User") - self.assertTrue("Has Role" in content[13]) - - def test_import(self): - if frappe.db.exists("Blog Category", "test-category"): - frappe.delete_doc("Blog Category", "test-category") - - exporter.export_data("Blog Category", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test-category", "Test Cateogry"]) - importer.upload(content) - self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "Test Category") - - # export with data - exporter.export_data("Blog Category", all_doctypes=True, template=True, with_data=True) - content = read_csv_content(frappe.response.result) - - # overwrite - content[-1][3] = "New Title" - importer.upload(content, overwrite=True) - self.assertTrue(frappe.db.get_value("Blog Category", "test-category", "title"), "New Title") - - def test_import_only_children(self): - user_email = "test_import_userrole@example.com" - if frappe.db.exists("User", user_email): - frappe.delete_doc("User", user_email, force=True) - - frappe.get_doc({"doctype": "User", "email": user_email, "first_name": "Test Import UserRole"}).insert() - - exporter.export_data("Has Role", "User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test_import_userrole@example.com", "Blogger"]) - importer.upload(content) - - user = frappe.get_doc("User", user_email) - self.assertTrue(frappe.db.get_value("Has Role", filters={"role": "Blogger", "parent": user_email, "parenttype": "User"})) - self.assertTrue(user.get("roles")[0].role, "Blogger") - - # overwrite - exporter.export_data("Has Role", "User", all_doctypes=True, template=True) - content = read_csv_content(frappe.response.result) - content.append(["", "test_import_userrole@example.com", "Website Manager"]) - importer.upload(content, overwrite=True) - - user = frappe.get_doc("User", user_email) - self.assertEqual(len(user.get("roles")), 1) - self.assertTrue(user.get("roles")[0].role, "Website Manager") - - def test_import_with_children(self): #pylint: disable=R0201 - if frappe.db.exists("Event", "EV00001"): - frappe.delete_doc("Event", "EV00001") - exporter.export_data("Event", all_doctypes="Yes", template=True) - content = read_csv_content(frappe.response.result) - - content.append([None] * len(content[-2])) - content[-1][1] = "__Test Event with children" - content[-1][2] = "Private" - content[-1][3] = "2014-01-01 10:00:00.000000" - importer.upload(content) - - frappe.get_doc("Event", {"subject":"__Test Event with children"}) - - def test_excel_import(self): - if frappe.db.exists("Event", "EV00001"): - frappe.delete_doc("Event", "EV00001") - - exporter.export_data("Event", all_doctypes=True, template=True, file_type="Excel") - from frappe.utils.xlsxutils import read_xlsx_file_from_attached_file - content = read_xlsx_file_from_attached_file(fcontent=frappe.response.filecontent) - content.append(["", "_test", "Private", "05-11-2017 13:51:48", "Event", "blue", "0", "0", "", "Open", "", 0, "", 0, "", "", "1", 0, "", "", 0, 0, 0, 0, 0, 0, 0]) - importer.upload(content) - self.assertTrue(frappe.db.get_value("Event", {"subject": "_test"}, "name")) \ No newline at end of file + pass diff --git a/frappe/core/doctype/data_import/test_exporter.py b/frappe/core/doctype/data_import/test_exporter.py new file mode 100644 index 0000000000..8415af2e63 --- /dev/null +++ b/frappe/core/doctype/data_import/test_exporter.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019, Frappe Technologies and Contributors +# See license.txt +from __future__ import unicode_literals + +import unittest +import frappe +from frappe.core.doctype.data_import.exporter import Exporter +from frappe.core.doctype.data_import.test_importer import ( + create_doctype_if_not_exists, +) + +doctype_name = 'DocType for Export' + +class TestExporter(unittest.TestCase): + def setUp(self): + create_doctype_if_not_exists(doctype_name) + + def test_exports_specified_fields(self): + if not frappe.db.exists(doctype_name, "Test"): + doc = frappe.get_doc( + doctype=doctype_name, + title="Test", + description="Test Description", + table_field_1=[ + {"child_title": "Child Title 1", "child_description": "Child Description 1"}, + {"child_title": "Child Title 2", "child_description": "Child Description 2"}, + ], + table_field_2=[ + {"child_2_title": "Child Title 1", "child_2_description": "Child Description 1"}, + ], + table_field_1_again=[ + { + "child_title": "Child Title 1 Again", + "child_description": "Child Description 1 Again", + }, + ], + ).insert() + else: + doc = frappe.get_doc(doctype_name, "Test") + + e = Exporter( + doctype_name, + export_fields={ + doctype_name: ["title", "description", "number", "another_number"], + "table_field_1": ["name", "child_title", "child_description"], + "table_field_2": ["child_2_date", "child_2_number"], + "table_field_1_again": [ + "child_title", + "child_date", + "child_number", + "child_another_number", + ], + }, + export_data=True, + ) + csv_array = e.get_csv_array() + header_row = csv_array[0] + + self.assertEqual( + header_row, + [ + "Title", + "Description", + "Number", + "another_number", + "ID (Table Field 1)", + "Child Title (Table Field 1)", + "Child Description (Table Field 1)", + "Child 2 Date (Table Field 2)", + "Child 2 Number (Table Field 2)", + "Child Title (Table Field 1 Again)", + "Child Date (Table Field 1 Again)", + "Child Number (Table Field 1 Again)", + "table_field_1_again.child_another_number", + ], + ) + + table_field_1_row_1_name = doc.table_field_1[0].name + table_field_1_row_2_name = doc.table_field_1[1].name + # fmt: off + self.assertEqual( + csv_array[1], + ["Test", "Test Description", 0, 0, table_field_1_row_1_name, "Child Title 1", "Child Description 1", None, 0, "Child Title 1 Again", None, 0, 0] + ) + self.assertEqual( + csv_array[2], + ["", "", "", "", table_field_1_row_2_name, "Child Title 2", "Child Description 2", "", "", "", "", "", ""], + ) + # fmt: on + self.assertEqual(len(csv_array), 3) + + def test_export_csv_response(self): + e = Exporter( + doctype_name, + export_fields={doctype_name: ["title", "description"]}, + export_data=True, + file_type="CSV" + ) + e.build_response() + + self.assertTrue(frappe.response['result']) + self.assertEqual(frappe.response['doctype'], doctype_name) + self.assertEqual(frappe.response['type'], "csv") diff --git a/frappe/core/doctype/data_import/test_exporter_new.py b/frappe/core/doctype/data_import/test_exporter_new.py deleted file mode 100644 index 0d3aedb033..0000000000 --- a/frappe/core/doctype/data_import/test_exporter_new.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies and Contributors -# See license.txt -from __future__ import unicode_literals - -import unittest -import frappe -from frappe.core.doctype.data_import.exporter_new import Exporter - - -class TestExporter(unittest.TestCase): - def test_exports_mandatory_fields(self): - e = Exporter('Web Page', export_fields='Mandatory') - csv_array = e.get_csv_array() - header_row = csv_array[0] - self.assertEqual(header_row, ['ID', 'Title']) - - - def test_exports_all_fields(self): - e = Exporter('Web Page', export_fields='All') - csv_array = e.get_csv_array() - header = csv_array[0] - self.assertEqual(len(header), 37) - - - def test_exports_selected_fields(self): - export_fields = { - 'Web Page': ['title', 'route', 'published'] - } - e = Exporter('Web Page', export_fields=export_fields) - csv_array = e.get_csv_array() - header = csv_array[0] - self.assertEqual(header, ['Title', 'Route', 'Published']) - - - def test_exports_data(self): - e = Exporter('ToDo', export_fields='All', export_data=True) - todo_records = frappe.db.count('ToDo') - csv_array = e.get_csv_array() - self.assertEqual(len(csv_array), todo_records + 1) diff --git a/frappe/core/doctype/data_import/test_importer.py b/frappe/core/doctype/data_import/test_importer.py new file mode 100644 index 0000000000..bdadad7890 --- /dev/null +++ b/frappe/core/doctype/data_import/test_importer.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2019, Frappe Technologies and Contributors +# See license.txt +from __future__ import unicode_literals + +import unittest +import frappe +from frappe.utils import getdate + +doctype_name = 'DocType for Import' + +class TestImporter(unittest.TestCase): + def setUp(self): + create_doctype_if_not_exists(doctype_name) + + def test_data_import_from_file(self): + import_file = get_import_file('sample_import_file') + data_import = self.get_importer(doctype_name, import_file) + data_import.start_import() + + doc1 = frappe.get_doc(doctype_name, 'Test') + doc2 = frappe.get_doc(doctype_name, 'Test 2') + doc3 = frappe.get_doc(doctype_name, 'Test 3') + + self.assertEqual(doc1.description, 'test description') + self.assertEqual(doc1.number, 1) + + self.assertEqual(doc1.table_field_1[0].child_title, 'child title') + self.assertEqual(doc1.table_field_1[0].child_description, 'child description') + + self.assertEqual(doc1.table_field_1[1].child_title, 'child title 2') + self.assertEqual(doc1.table_field_1[1].child_description, 'child description 2') + + self.assertEqual(doc1.table_field_2[1].child_2_title, 'title child') + self.assertEqual(doc1.table_field_2[1].child_2_date, getdate('2019-10-30')) + self.assertEqual(doc1.table_field_2[1].child_2_another_number, 5) + + self.assertEqual(doc1.table_field_1_again[0].child_title, 'child title again') + self.assertEqual(doc1.table_field_1_again[1].child_title, 'child title again 2') + self.assertEqual(doc1.table_field_1_again[1].child_date, getdate('2021-09-22')) + + self.assertEqual(doc2.description, 'test description 2') + self.assertEqual(doc3.another_number, 5) + + def test_data_import_preview(self): + import_file = get_import_file('sample_import_file') + data_import = self.get_importer(doctype_name, import_file) + preview = data_import.get_preview_from_template() + + self.assertEqual(len(preview.data), 4) + self.assertEqual(len(preview.columns), 15) + + def test_data_import_without_mandatory_values(self): + import_file = get_import_file('sample_import_file_without_mandatory') + data_import = self.get_importer(doctype_name, import_file) + data_import.start_import() + data_import.reload() + warnings = frappe.parse_json(data_import.template_warnings) + + self.assertEqual(warnings[0]['row'], 2) + self.assertEqual(warnings[0]['message'], "Child Title (Table Field 1) is a mandatory field") + + self.assertEqual(warnings[1]['row'], 3) + self.assertEqual(warnings[1]['message'], "Child Title (Table Field 1 Again) is a mandatory field") + + self.assertEqual(warnings[2]['row'], 4) + self.assertEqual(warnings[2]['message'], "Title is a mandatory field") + + def test_data_import_update(self): + if not frappe.db.exists(doctype_name, 'Test 26'): + frappe.get_doc( + doctype=doctype_name, + title='Test 26' + ).insert() + + import_file = get_import_file('sample_import_file_for_update') + data_import = self.get_importer(doctype_name, import_file, update=True) + data_import.start_import() + + updated_doc = frappe.get_doc(doctype_name, 'Test 26') + self.assertEqual(updated_doc.description, 'test description') + self.assertEqual(updated_doc.table_field_1[0].child_title, 'child title') + self.assertEqual(updated_doc.table_field_1[0].child_description, 'child description') + self.assertEqual(updated_doc.table_field_1_again[0].child_title, 'child title again') + + def get_importer(self, doctype, import_file, update=False): + data_import = frappe.new_doc('Data Import') + data_import.import_type = 'Insert New Records' if not update else 'Update Existing Records' + data_import.reference_doctype = doctype + data_import.import_file = import_file.file_url + data_import.insert() + + return data_import + +def create_doctype_if_not_exists(doctype_name, force=False): + if force: + frappe.delete_doc_if_exists('DocType', doctype_name) + frappe.delete_doc_if_exists('DocType', 'Child 1 of ' + doctype_name) + frappe.delete_doc_if_exists('DocType', 'Child 2 of ' + doctype_name) + + if frappe.db.exists('DocType', doctype_name): + return + + # Child Table 1 + table_1_name = 'Child 1 of ' + doctype_name + frappe.get_doc({ + 'doctype': 'DocType', + 'name': table_1_name, + 'module': 'Custom', + 'custom': 1, + 'istable': 1, + 'fields': [ + {'label': 'Child Title', 'fieldname': 'child_title', 'reqd': 1, 'fieldtype': 'Data'}, + {'label': 'Child Description', 'fieldname': 'child_description', 'fieldtype': 'Small Text'}, + {'label': 'Child Date', 'fieldname': 'child_date', 'fieldtype': 'Date'}, + {'label': 'Child Number', 'fieldname': 'child_number', 'fieldtype': 'Int'}, + {'label': 'Child Number', 'fieldname': 'child_another_number', 'fieldtype': 'Int'}, + ] + }).insert() + + # Child Table 2 + table_2_name = 'Child 2 of ' + doctype_name + frappe.get_doc({ + 'doctype': 'DocType', + 'name': table_2_name, + 'module': 'Custom', + 'custom': 1, + 'istable': 1, + 'fields': [ + {'label': 'Child 2 Title', 'fieldname': 'child_2_title', 'reqd': 1, 'fieldtype': 'Data'}, + {'label': 'Child 2 Description', 'fieldname': 'child_2_description', 'fieldtype': 'Small Text'}, + {'label': 'Child 2 Date', 'fieldname': 'child_2_date', 'fieldtype': 'Date'}, + {'label': 'Child 2 Number', 'fieldname': 'child_2_number', 'fieldtype': 'Int'}, + {'label': 'Child 2 Number', 'fieldname': 'child_2_another_number', 'fieldtype': 'Int'}, + ] + }).insert() + + # Main Table + frappe.get_doc({ + 'doctype': 'DocType', + 'name': doctype_name, + 'module': 'Custom', + 'custom': 1, + 'autoname': 'field:title', + 'fields': [ + {'label': 'Title', 'fieldname': 'title', 'reqd': 1, 'fieldtype': 'Data'}, + {'label': 'Description', 'fieldname': 'description', 'fieldtype': 'Small Text'}, + {'label': 'Date', 'fieldname': 'date', 'fieldtype': 'Date'}, + {'label': 'Number', 'fieldname': 'number', 'fieldtype': 'Int'}, + {'label': 'Number', 'fieldname': 'another_number', 'fieldtype': 'Int'}, + {'label': 'Table Field 1', 'fieldname': 'table_field_1', 'fieldtype': 'Table', 'options': table_1_name}, + {'label': 'Table Field 2', 'fieldname': 'table_field_2', 'fieldtype': 'Table', 'options': table_2_name}, + {'label': 'Table Field 1 Again', 'fieldname': 'table_field_1_again', 'fieldtype': 'Table', 'options': table_1_name}, + ], + 'permissions': [ + {'role': 'System Manager'} + ] + }).insert() + + +def get_import_file(csv_file_name, force=False): + file_name = csv_file_name + '.csv' + _file = frappe.db.exists('File', {'file_name': file_name}) + if force and _file: + frappe.delete_doc_if_exists('File', _file) + + if frappe.db.exists('File', {'file_name': file_name}): + f = frappe.get_doc('File', {'file_name': file_name}) + else: + full_path = get_csv_file_path(file_name) + f = frappe.get_doc( + doctype='File', + content=frappe.read_file(full_path), + file_name=file_name, + is_private=1 + ) + f.save(ignore_permissions=True) + + return f + + +def get_csv_file_path(file_name): + return frappe.get_app_path('frappe', 'core', 'doctype', 'data_import', 'fixtures', file_name) diff --git a/frappe/core/doctype/data_import/test_importer_new.py b/frappe/core/doctype/data_import/test_importer_new.py deleted file mode 100644 index d6349daa55..0000000000 --- a/frappe/core/doctype/data_import/test_importer_new.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2019, Frappe Technologies and Contributors -# See license.txt -from __future__ import unicode_literals - -import datetime -import unittest -import frappe -from frappe.core.doctype.data_import.importer_new import Importer - -content_empty_rows = '''title,start_date,idx,show_title -,,, -est phasellus sit amet,5/20/2019,52,1 -nibh in,7/29/2019,77,1 -''' - -content_mandatory_missing = '''title,start_date,idx,show_title -,5/20/2019,52,1 -''' - -content_convert_value = '''title,start_date,idx,show_title -est phasellus sit amet,5/20/2019,52,True -''' - -content_invalid_column = '''title,start_date,idx,show_title,invalid_column -est phasellus sit amet,5/20/2019,52,True,invalid value -''' - - -class TestImporter(unittest.TestCase): - def test_should_skip_empty_rows(self): - i = self.get_importer('Web Page', content=content_empty_rows) - payloads = i.get_payloads_for_import() - row_to_be_imported = [] - for p in payloads: - row_to_be_imported += [row[0] for row in p.rows] - self.assertEqual(len(row_to_be_imported), 2) - - def test_should_throw_if_mandatory_is_missing(self): - i = self.get_importer('Web Page', content=content_mandatory_missing) - i.import_data() - warning = i.warnings[0] - self.assertTrue('Title is a mandatory field' in warning['message']) - - def test_should_convert_value_based_on_fieldtype(self): - i = self.get_importer('Web Page', content=content_convert_value) - payloads = i.get_payloads_for_import() - doc = payloads[0].doc - - self.assertEqual(type(doc['show_title']), int) - self.assertEqual(type(doc['idx']), int) - self.assertEqual(type(doc['start_date']), datetime.datetime) - - def test_should_ignore_invalid_columns(self): - i = self.get_importer('Web Page', content=content_invalid_column) - payloads = i.get_payloads_for_import() - doc = payloads[0].doc - - self.assertTrue('invalid_column' not in doc) - self.assertTrue('title' in doc) - - def test_should_import_valid_template(self): - title = 'est phasellus sit amet {0}'.format(frappe.utils.random_string(8)) - content_valid_content = '''title,start_date,idx,show_title -{0},5/20/2019,52,1'''.format(title) - i = self.get_importer('Web Page', content=content_valid_content) - import_log = i.import_data() - log = import_log[0] - self.assertTrue(log.success) - doc = frappe.get_doc('Web Page', { 'title': title }) - self.assertEqual(frappe.utils.get_datetime_str(doc.start_date), - frappe.utils.get_datetime_str('2019-05-20')) - - def get_importer(self, doctype, content): - data_import = frappe.new_doc('Data Import Beta') - data_import.import_type = 'Insert New Records' - i = Importer(doctype, content=content, data_import=data_import) - return i diff --git a/frappe/core/doctype/data_import_beta/data_import_beta.js b/frappe/core/doctype/data_import_beta/data_import_beta.js deleted file mode 100644 index 527dbd7d0c..0000000000 --- a/frappe/core/doctype/data_import_beta/data_import_beta.js +++ /dev/null @@ -1,511 +0,0 @@ -// Copyright (c) 2019, Frappe Technologies and contributors -// For license information, please see license.txt - -frappe.ui.form.on('Data Import Beta', { - setup(frm) { - frappe.realtime.on('data_import_refresh', ({ data_import }) => { - frm.import_in_progress = false; - if (data_import !== frm.doc.name) return; - frappe.model.clear_doc('Data Import Beta', frm.doc.name); - frappe.model.with_doc('Data Import Beta', frm.doc.name).then(() => { - frm.refresh(); - }); - }); - frappe.realtime.on('data_import_progress', data => { - frm.import_in_progress = true; - if (data.data_import !== frm.doc.name) { - return; - } - let percent = Math.floor((data.current * 100) / data.total); - let seconds = Math.floor(data.eta); - let minutes = Math.floor(data.eta / 60); - let eta_message = - // prettier-ignore - seconds < 60 - ? __('About {0} seconds remaining', [seconds]) - : minutes === 1 - ? __('About {0} minute remaining', [minutes]) - : __('About {0} minutes remaining', [minutes]); - - let message; - if (data.success) { - let message_args = [data.current, data.total, eta_message]; - message = - frm.doc.import_type === 'Insert New Records' - ? __('Importing {0} of {1}, {2}', message_args) - : __('Updating {0} of {1}, {2}', message_args); - } - if (data.skipping) { - message = __('Skipping {0} of {1}, {2}', [ - data.current, - data.total, - eta_message - ]); - } - frm.dashboard.show_progress(__('Import Progress'), percent, message); - frm.page.set_indicator(__('In Progress'), 'orange'); - - // hide progress when complete - if (data.current === data.total) { - setTimeout(() => { - frm.dashboard.hide(); - frm.refresh(); - }, 2000); - } - }); - - frm.set_query('reference_doctype', () => { - return { - filters: { - allow_import: 1 - } - }; - }); - - frm.get_field('import_file').df.options = { - restrictions: { - allowed_file_types: ['.csv', '.xls', '.xlsx'] - } - }; - }, - - refresh(frm) { - frm.page.hide_icon_group(); - frm.trigger('update_indicators'); - frm.trigger('import_file'); - frm.trigger('show_import_log'); - frm.trigger('show_import_warnings'); - frm.trigger('toggle_submit_after_import'); - frm.trigger('show_import_status'); - frm.trigger('show_report_error_button'); - - if (frm.doc.status === 'Partial Success') { - frm.add_custom_button(__('Export Errored Rows'), () => - frm.trigger('export_errored_rows') - ); - } - - if (frm.doc.status.includes('Success')) { - frm.add_custom_button( - __('Go to {0} List', [frm.doc.reference_doctype]), - () => frappe.set_route('List', frm.doc.reference_doctype) - ); - } - - frm.disable_save(); - if (frm.doc.status !== 'Success') { - if (!frm.is_new() && frm.doc.import_file) { - let label = - frm.doc.status === 'Pending' ? __('Start Import') : __('Retry'); - frm.page.set_primary_action(label, () => frm.events.start_import(frm)); - } else { - frm.page.set_primary_action(__('Save'), () => frm.save()); - } - } - }, - - update_indicators(frm) { - const indicator = frappe.get_indicator(frm.doc); - if (indicator) { - frm.page.set_indicator(indicator[0], indicator[1]); - } else { - frm.page.clear_indicator(); - } - }, - - show_import_status(frm) { - let import_log = JSON.parse(frm.doc.import_log || '[]'); - let successful_records = import_log.filter(log => log.success); - let failed_records = import_log.filter(log => !log.success); - if (successful_records.length === 0) return; - - let message; - if (failed_records.length === 0) { - let message_args = [successful_records.length]; - if (frm.doc.import_type === 'Insert New Records') { - message = - successful_records.length > 1 - ? __('Successfully imported {0} records.', message_args) - : __('Successfully imported {0} record.', message_args); - } else { - message = - successful_records.length > 1 - ? __('Successfully updated {0} records.', message_args) - : __('Successfully updated {0} record.', message_args); - } - } else { - let message_args = [successful_records.length, import_log.length]; - if (frm.doc.import_type === 'Insert New Records') { - message = - successful_records.length > 1 - ? __('Successfully imported {0} records out of {1}.', message_args) - : __('Successfully imported {0} record out of {1}.', message_args); - } else { - message = - successful_records.length > 1 - ? __('Successfully updated {0} records out of {1}.', message_args) - : __('Successfully updated {0} record out of {1}.', message_args); - } - } - frm.dashboard.set_headline(message); - }, - - show_report_error_button(frm) { - if (frm.doc.status === 'Error') { - frappe.db - .get_list('Error Log', { - filters: { method: frm.doc.name }, - fields: ['method', 'error'], - order_by: 'creation desc', - limit: 1 - }) - .then(result => { - if (result.length > 0) { - frm.add_custom_button('Report Error', () => { - let fake_xhr = { - responseText: JSON.stringify({ - exc: result[0].error - }) - }; - frappe.request.report_error(fake_xhr, {}); - }); - } - }); - } - }, - - start_import(frm) { - frm - .call({ - method: 'form_start_import', - args: { data_import: frm.doc.name }, - btn: frm.page.btn_primary - }) - .then(r => { - if (r.message === true) { - frm.disable_save(); - } - }); - }, - - download_template(frm) { - if ( - frm.data_exporter && - frm.data_exporter.doctype === frm.doc.reference_doctype - ) { - frm.data_exporter.dialog.show(); - set_export_records(); - } else { - frappe.require('/assets/js/data_import_tools.min.js', () => { - frm.data_exporter = new frappe.data_import.DataExporter( - frm.doc.reference_doctype - ); - set_export_records(); - }); - } - - function set_export_records() { - if (frm.doc.import_type === 'Insert New Records') { - frm.data_exporter.dialog.set_value('export_records', 'blank_template'); - } else { - frm.data_exporter.dialog.set_value('export_records', 'all'); - } - // Force ID field to be exported when updating existing records - let id_field = frm.data_exporter.dialog.get_field( - frm.doc.reference_doctype - ).options[0]; - if (id_field.value === 'name' && id_field.$checkbox) { - id_field.$checkbox - .find('input') - .prop('disabled', frm.doc.import_type === 'Update Existing Records'); - } - } - }, - - reference_doctype(frm) { - frm.trigger('toggle_submit_after_import'); - }, - - toggle_submit_after_import(frm) { - frm.toggle_display('submit_after_import', false); - let doctype = frm.doc.reference_doctype; - if (doctype) { - frappe.model.with_doctype(doctype, () => { - let meta = frappe.get_meta(doctype); - frm.toggle_display('submit_after_import', meta.is_submittable); - }); - } - }, - - import_file(frm) { - frm.toggle_display('section_import_preview', frm.doc.import_file); - if (!frm.doc.import_file) { - frm.get_field('import_preview').$wrapper.empty(); - return; - } - - // load import preview - frm.get_field('import_preview').$wrapper.empty(); - $('') - .html(__('Loading import file...')) - .appendTo(frm.get_field('import_preview').$wrapper); - - frm - .call({ - method: 'get_preview_from_template', - args: { data_import: frm.doc.name }, - error_handlers: { - TimestampMismatchError() { - // ignore this error - } - } - }) - .then(r => { - let preview_data = r.message; - frm.events.show_import_preview(frm, preview_data); - frm.events.show_import_warnings(frm, preview_data); - }); - }, - - show_import_preview(frm, preview_data) { - let import_log = JSON.parse(frm.doc.import_log || '[]'); - - if ( - frm.import_preview && - frm.import_preview.doctype === frm.doc.reference_doctype - ) { - frm.import_preview.preview_data = preview_data; - frm.import_preview.import_log = import_log; - frm.import_preview.refresh(); - return; - } - - frappe.require('/assets/js/data_import_tools.min.js', () => { - frm.import_preview = new frappe.data_import.ImportPreview({ - wrapper: frm.get_field('import_preview').$wrapper, - doctype: frm.doc.reference_doctype, - preview_data, - import_log, - frm, - events: { - remap_column(changed_map) { - let template_options = JSON.parse(frm.doc.template_options || '{}'); - template_options.remap_column = template_options.remap_column || {}; - Object.assign(template_options.remap_column, changed_map); - frm.set_value('template_options', JSON.stringify(template_options)); - frm.save().then(() => frm.trigger('import_file')); - } - } - }); - }); - }, - - export_errored_rows(frm) { - open_url_post( - '/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_errored_template', - { - data_import_name: frm.doc.name - } - ); - }, - - show_import_warnings(frm, preview_data) { - let warnings = JSON.parse(frm.doc.template_warnings || '[]'); - warnings = warnings.concat(preview_data.warnings || []); - - frm.toggle_display('import_warnings_section', warnings.length > 0); - if (warnings.length === 0) { - frm.get_field('import_warnings').$wrapper.html(''); - return; - } - - // group warnings by row - let warnings_by_row = {}; - let other_warnings = []; - for (let warning of warnings) { - if (warning.row) { - warnings_by_row[warning.row] = warnings_by_row[warning.row] || []; - warnings_by_row[warning.row].push(warning); - } else { - other_warnings.push(warning); - } - } - - let html = ''; - html += Object.keys(warnings_by_row) - .map(row_number => { - let message = warnings_by_row[row_number] - .map(w => { - if (w.field) { - let label = - w.field.label + - (w.field.parent !== frm.doc.reference_doctype - ? ` (${w.field.parent})` - : ''); - return `${log.exception}
- | ${__('Row Number')} | -${__('Status')} | -${__('Message')} | -
|---|
{}
'.format(json.loads(msg).get('message')) for msg in frappe.local.message_log]) + else: + err_msg = '{}
'.format(cstr(e)) + + error_trace = frappe.get_traceback() + if error_trace: + error_log_doc = frappe.log_error(error_trace) + error_link = get_absolute_url("Error Log", error_log_doc.name) + else: + error_link = None + + log(**{ + "row": row_idx + 1, + "title": 'Error for row %s' % (len(row)>1 and frappe.safe_decode(row[1]) or ""), + "message": err_msg, + "indicator": "red", + "link":error_link + }) + + # data with error to create a new file + # include the errored data in the last row as last_error_row_idx will not be updated for the last row + if skip_errors: + if last_error_row_idx == len(rows)-1: + last_error_row_idx = len(rows) + data_rows_with_error += rows[row_idx:last_error_row_idx] + else: + rollback_flag = True + finally: + frappe.local.message_log = [] + + start_row += batch_size + if rollback_flag: + frappe.db.rollback() + else: + frappe.db.commit() + + frappe.flags.mute_emails = False + frappe.flags.in_import = False + + log_message = {"messages": import_log, "error": error_flag} + if data_import_doc: + data_import_doc.log_details = json.dumps(log_message) + + import_status = None + if error_flag and data_import_doc.skip_errors and len(data) != len(data_rows_with_error): + import_status = "Partially Successful" + # write the file with the faulty row + file_name = 'error_' + filename + file_extension + if file_extension == '.xlsx': + from frappe.utils.xlsxutils import make_xlsx + xlsx_file = make_xlsx(data_rows_with_error, "Data Import Template") + file_data = xlsx_file.getvalue() + else: + from frappe.utils.csvutils import to_csv + file_data = to_csv(data_rows_with_error) + _file = frappe.get_doc({ + "doctype": "File", + "file_name": file_name, + "attached_to_doctype": "Data Import Legacy", + "attached_to_name": data_import_doc.name, + "folder": "Home/Attachments", + "content": file_data}) + _file.save() + data_import_doc.error_file = _file.file_url + + elif error_flag: + import_status = "Failed" + else: + import_status = "Successful" + + data_import_doc.import_status = import_status + data_import_doc.save() + if data_import_doc.import_status in ["Successful", "Partially Successful"]: + data_import_doc.submit() + publish_progress(100, True) + else: + publish_progress(0, True) + frappe.db.commit() + else: + return log_message + +def get_parent_field(doctype, parenttype): + parentfield = None + + # get parentfield + if parenttype: + for d in frappe.get_meta(parenttype).get_table_fields(): + if d.options==doctype: + parentfield = d.fieldname + break + + if not parentfield: + frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype)) + raise Exception + + return parentfield + +def delete_child_rows(rows, doctype): + """delete child rows for all parents""" + for p in list(set([r[1] for r in rows])): + if p: + frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p) diff --git a/frappe/core/doctype/data_import/log_details.html b/frappe/core/doctype/data_import_legacy/log_details.html similarity index 100% rename from frappe/core/doctype/data_import/log_details.html rename to frappe/core/doctype/data_import_legacy/log_details.html diff --git a/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py b/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py new file mode 100644 index 0000000000..e5b244e6a0 --- /dev/null +++ b/frappe/core/doctype/data_import_legacy/test_data_import_legacy.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2020, Frappe Technologies and Contributors +# See license.txt +from __future__ import unicode_literals + +# import frappe +import unittest + +class TestDataImportLegacy(unittest.TestCase): + pass diff --git a/frappe/core/doctype/docfield/docfield.json b/frappe/core/doctype/docfield/docfield.json index 83d3c18453..aab59a5a0a 100644 --- a/frappe/core/doctype/docfield/docfield.json +++ b/frappe/core/doctype/docfield/docfield.json @@ -13,8 +13,8 @@ "fieldname", "precision", "length", - "show_days", - "show_seconds", + "hide_days", + "hide_seconds", "reqd", "search_index", "in_list_view", @@ -453,18 +453,18 @@ "fieldtype": "Column Break" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_days", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_days", "fieldtype": "Check", - "label": "Show Days" + "label": "Hide Days" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_seconds", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_seconds", "fieldtype": "Check", - "label": "Show Seconds" + "label": "Hide Seconds" }, { "default": "0", @@ -477,7 +477,7 @@ "idx": 1, "istable": 1, "links": [], - "modified": "2020-05-15 09:06:25.224411", + "modified": "2020-02-06 09:06:25.224413", "modified_by": "Administrator", "module": "Core", "name": "DocField", diff --git a/frappe/core/doctype/doctype/doctype.py b/frappe/core/doctype/doctype/doctype.py index 904deb9990..7f84555b79 100644 --- a/frappe/core/doctype/doctype/doctype.py +++ b/frappe/core/doctype/doctype/doctype.py @@ -406,9 +406,13 @@ class DocType(Document): with open(fname, 'r') as f: code = f.read() with open(fname, 'w') as f: - file_content = code.replace(old, new) # replace str with full str (js controllers) - file_content = file_content.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports) - file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers) + if fname.endswith('.js'): + file_content = code.replace(old, new) # replace str with full str (js controllers) + + elif fname.endswith('.py'): + file_content = code.replace(frappe.scrub(old), frappe.scrub(new)) # replace str with _ (py imports) + file_content = file_content.replace(old.replace(' ', ''), new.replace(' ', '')) # replace str (py controllers) + f.write(file_content) # updating json file with new name @@ -688,6 +692,9 @@ def validate_fields(meta): def check_link_table_options(docname, d): if frappe.flags.in_patch: return + + if frappe.flags.in_fixtures: return + if d.fieldtype in ("Link",) + table_fields: if not d.options: frappe.throw(_("{0}: Options required for Link or Table type field {1} in row {2}").format(docname, d.label, d.idx), DoctypeLinkError) @@ -908,6 +915,8 @@ def validate_fields(meta): frappe.msgprint(text_str + df_options_str, title="Invalid Data Field", raise_exception=True) def check_child_table_option(docfield): + + if frappe.flags.in_fixtures: return if docfield.fieldtype not in ['Table MultiSelect', 'Table']: return doctype = docfield.options diff --git a/frappe/core/doctype/file/file.json b/frappe/core/doctype/file/file.json index d9ab504db7..3008e27aa0 100644 --- a/frappe/core/doctype/file/file.json +++ b/frappe/core/doctype/file/file.json @@ -1,4 +1,5 @@ { + "actions": [], "allow_import": 1, "creation": "2012-12-12 11:19:22", "doctype": "DocType", @@ -63,7 +64,8 @@ "fieldname": "is_home_folder", "fieldtype": "Check", "hidden": 1, - "label": "Is Home Folder" + "label": "Is Home Folder", + "search_index": 1 }, { "default": "0", @@ -172,7 +174,8 @@ ], "icon": "fa fa-file", "idx": 1, - "modified": "2019-08-30 19:46:20.796453", + "links": [], + "modified": "2020-06-28 12:21:30.772386", "modified_by": "Administrator", "module": "Core", "name": "File", diff --git a/frappe/core/doctype/file/file.py b/frappe/core/doctype/file/file.py index a17b3acd02..1748c60020 100755 --- a/frappe/core/doctype/file/file.py +++ b/frappe/core/doctype/file/file.py @@ -100,26 +100,26 @@ class File(Document): self.validate_file() self.generate_content_hash() - self.validate_url() - if frappe.db.exists('File', {'name': self.name, 'is_folder': 0}): old_file_url = self.file_url if not self.is_folder and (self.is_private != self.db_get('is_private')): private_files = frappe.get_site_path('private', 'files') public_files = frappe.get_site_path('public', 'files') + file_name = self.file_url.split('/')[-1] if not self.is_private: - shutil.move(os.path.join(private_files, self.file_name), - os.path.join(public_files, self.file_name)) + shutil.move(os.path.join(private_files, file_name), + os.path.join(public_files, file_name)) - self.file_url = "/files/{0}".format(self.file_name) + self.file_url = "/files/{0}".format(file_name) else: - shutil.move(os.path.join(public_files, self.file_name), - os.path.join(private_files, self.file_name)) + shutil.move(os.path.join(public_files, file_name), + os.path.join(private_files, file_name)) - self.file_url = "/private/files/{0}".format(self.file_name) + self.file_url = "/private/files/{0}".format(file_name) + update_existing_file_docs(self) # update documents image url with new file url if self.attached_to_doctype and self.attached_to_name: @@ -135,6 +135,8 @@ class File(Document): frappe.db.set_value(self.attached_to_doctype, self.attached_to_name, self.attached_to_field, self.file_url) + self.validate_url() + if self.file_url and (self.is_private != self.file_url.startswith('/private')): frappe.throw(_('Invalid file URL. Please contact System Administrator.')) @@ -182,13 +184,7 @@ class File(Document): if duplicate_file: duplicate_file_doc = frappe.get_cached_doc('File', duplicate_file.name) if duplicate_file_doc.exists_on_disk(): - # if it is attached to a document then throw DuplicateEntryError - if self.attached_to_doctype and self.attached_to_name: - self.duplicate_entry = duplicate_file.name - frappe.throw(_("Same file has already been attached to the record"), - frappe.DuplicateEntryError) - # else just use the url, to avoid uploading a duplicate - else: + # just use the url, to avoid uploading a duplicate self.file_url = duplicate_file.file_url def set_file_name(self): @@ -714,7 +710,12 @@ def remove_all(dt, dn, from_delete=False): try: for fid in frappe.db.sql_list("""select name from `tabFile` where attached_to_doctype=%s and attached_to_name=%s""", (dt, dn)): - remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete) + if from_delete: + # If deleting a doc, directly delete files + frappe.delete_doc("File", fid, ignore_permissions=True) + else: + # Removes file and adds a comment in the document it is attached to + remove_file(fid=fid, attached_to_doctype=dt, attached_to_name=dn, from_delete=from_delete) except Exception as e: if e.args[0]!=1054: raise # (temp till for patched) @@ -904,3 +905,20 @@ def get_files_in_folder(folder): { 'folder': folder }, ['name', 'file_name', 'file_url', 'is_folder', 'modified'] ) + +def update_existing_file_docs(doc): + # Update is private and file url of all file docs that point to the same file + frappe.db.sql(""" + UPDATE `tabFile` + SET + file_url = %(file_url)s, + is_private = %(is_private)s + WHERE + content_hash = %(content_hash)s + and name != %(file_name)s + """, dict( + file_url=doc.file_url, + is_private=doc.is_private, + content_hash=doc.content_hash, + file_name=doc.name + )) diff --git a/frappe/core/doctype/file/test_file.py b/frappe/core/doctype/file/test_file.py index cc9628ed5b..ec4f97bf67 100644 --- a/frappe/core/doctype/file/test_file.py +++ b/frappe/core/doctype/file/test_file.py @@ -294,4 +294,37 @@ class TestFile(unittest.TestCase): folder = frappe.get_doc("File", "Home/Test Folder 1/Test Folder 3") self.assertRaises(frappe.ValidationError, folder.delete) + def test_same_file_url_update(self): + attached_to_doctype1, attached_to_docname1 = make_test_doc() + attached_to_doctype2, attached_to_docname2 = make_test_doc() + + file1 = frappe.get_doc({ + "doctype": "File", + "file_name": 'file1.txt', + "attached_to_doctype": attached_to_doctype1, + "attached_to_name": attached_to_docname1, + "is_private": 1, + "content": test_content1}).insert() + + file2 = frappe.get_doc({ + "doctype": "File", + "file_name": 'file2.txt', + "attached_to_doctype": attached_to_doctype2, + "attached_to_name": attached_to_docname2, + "is_private": 1, + "content": test_content1}).insert() + + self.assertEqual(file1.is_private, file2.is_private, 1) + self.assertEqual(file1.file_url, file2.file_url) + self.assertTrue(os.path.exists(file1.get_full_path())) + + file1.is_private = 0 + file1.save() + + file2 = frappe.get_doc('File', file2.name) + + self.assertEqual(file1.is_private, file2.is_private, 0) + self.assertEqual(file1.file_url, file2.file_url) + self.assertTrue(os.path.exists(file2.get_full_path())) + diff --git a/frappe/core/doctype/installed_applications/installed_applications.py b/frappe/core/doctype/installed_applications/installed_applications.py index aa0401f368..4e6eadf07e 100644 --- a/frappe/core/doctype/installed_applications/installed_applications.py +++ b/frappe/core/doctype/installed_applications/installed_applications.py @@ -12,7 +12,7 @@ class InstalledApplications(Document): for app in frappe.utils.get_installed_apps_info(): self.append("installed_applications", { "app_name": app.get("app_name"), - "app_version": app.get("version"), - "git_branch": app.get("branch") + "app_version": app.get("version") or "UNVERSIONED", + "git_branch": app.get("branch") or "UNVERSIONED" }) self.save() \ No newline at end of file diff --git a/frappe/core/doctype/module_def/module_def.py b/frappe/core/doctype/module_def/module_def.py index 0c0e7c4f45..755cb86dbe 100644 --- a/frappe/core/doctype/module_def/module_def.py +++ b/frappe/core/doctype/module_def/module_def.py @@ -42,6 +42,10 @@ class ModuleDef(Document): def on_trash(self): """Delete module name from modules.txt""" + + if frappe.flags.in_uninstall: + return + modules = None if frappe.local.module_app.get(frappe.scrub(self.name)): with open(frappe.get_app_path(self.app_name, "modules.txt"), "r") as f: diff --git a/frappe/core/doctype/report/report.js b/frappe/core/doctype/report/report.js index 818c5951e6..b4c30f5bbd 100644 --- a/frappe/core/doctype/report/report.js +++ b/frappe/core/doctype/report/report.js @@ -1,6 +1,6 @@ frappe.ui.form.on('Report', { refresh: function(frm) { - if(!frappe.boot.developer_mode && frappe.session.user !== 'Administrator') { + if (frm.doc.is_standard && !frappe.boot.developer_mode) { // make the document read-only frm.set_read_only(); } diff --git a/frappe/core/doctype/role/role.py b/frappe/core/doctype/role/role.py index 7ce2537da3..657340ec24 100644 --- a/frappe/core/doctype/role/role.py +++ b/frappe/core/doctype/role/role.py @@ -22,16 +22,28 @@ class Role(Document): frappe.db.sql("delete from `tabHas Role` where role = %s", self.name) frappe.clear_cache() + def on_update(self): + '''update system user desk access if this has changed in this update''' + if frappe.flags.in_install: return + if self.has_value_changed('desk_access'): + for user_name in get_users(self.name): + user = frappe.get_doc('User', user_name) + user_type = user.user_type + user.set_system_user() + if user_type != user.user_type: + user.save() + # Get email addresses of all users that have been assigned this role def get_emails_from_role(role): emails = [] - users = frappe.get_list("Has Role", filters={"role": role, "parenttype": "User"}, - fields=["parent"]) - - for user in users: - user_email, enabled = frappe.db.get_value("User", user.parent, ["email", "enabled"]) + for user in get_users(role): + user_email, enabled = frappe.db.get_value("User", user, ["email", "enabled"]) if enabled and user_email not in ["admin@example.com", "guest@example.com"]: emails.append(user_email) - return emails \ No newline at end of file + return emails + +def get_users(role): + return [d.parent for d in frappe.get_all("Has Role", filters={"role": role, "parenttype": "User"}, + fields=["parent"])] diff --git a/frappe/core/doctype/role/test_role.py b/frappe/core/doctype/role/test_role.py index 31efb5d4e8..6459a72c98 100644 --- a/frappe/core/doctype/role/test_role.py +++ b/frappe/core/doctype/role/test_role.py @@ -23,3 +23,28 @@ class TestUser(unittest.TestCase): frappe.get_doc("User", "test@example.com").add_roles("_Test Role 3") self.assertTrue("_Test Role 3" in frappe.get_roles("test@example.com")) + + def test_change_desk_access(self): + '''if we change desk acecss from role, remove from user''' + frappe.delete_doc_if_exists('User', 'test-user-for-desk-access@example.com') + frappe.delete_doc_if_exists('Role', 'desk-access-test') + user = frappe.get_doc(dict( + doctype='User', + email='test-user-for-desk-access@example.com', + first_name='test')).insert() + role = frappe.get_doc(dict( + doctype = 'Role', + role_name = 'desk-access-test', + desk_access = 0 + )).insert() + user.add_roles(role.name) + user.save() + self.assertTrue(user.user_type=='Website User') + role.desk_access = 1 + role.save() + user.reload() + self.assertTrue(user.user_type=='System User') + role.desk_access = 0 + role.save() + user.reload() + self.assertTrue(user.user_type=='Website User') diff --git a/frappe/core/doctype/server_script/server_script.py b/frappe/core/doctype/server_script/server_script.py index 9522b77b4b..539ae8eb01 100644 --- a/frappe/core/doctype/server_script/server_script.py +++ b/frappe/core/doctype/server_script/server_script.py @@ -42,7 +42,7 @@ class ServerScript(Document): @frappe.whitelist() def setup_scheduler_events(script_name, frequency): - method = frappe.scrub(script_name) + '_' + frequency.lower() + method = frappe.scrub('{0}-{1}'.format(script_name, frequency)) scheduled_script = frappe.db.get_value('Scheduled Job Type', dict(method=method)) diff --git a/frappe/core/doctype/session_default_settings/session_default_settings.py b/frappe/core/doctype/session_default_settings/session_default_settings.py index 453ece2890..7b4bd19e9a 100644 --- a/frappe/core/doctype/session_default_settings/session_default_settings.py +++ b/frappe/core/doctype/session_default_settings/session_default_settings.py @@ -28,8 +28,7 @@ def get_session_default_values(): @frappe.whitelist() def set_session_default_values(default_values): - if not frappe.flags.in_test: - default_values = json.loads(default_values) + default_values = frappe.parse_json(default_values) for entry in default_values: try: frappe.defaults.set_user_default(entry, default_values.get(entry)) diff --git a/frappe/core/doctype/system_settings/system_settings.json b/frappe/core/doctype/system_settings/system_settings.json index 1d0cda95a4..b2cb67dbc9 100644 --- a/frappe/core/doctype/system_settings/system_settings.json +++ b/frappe/core/doctype/system_settings/system_settings.json @@ -59,6 +59,7 @@ "column_break_18", "disable_standard_email_footer", "hide_footer_in_auto_email_reports", + "attach_view_link", "chat", "enable_chat", "use_socketio_to_upload_file" @@ -422,12 +423,18 @@ "fieldname": "enable_onboarding", "fieldtype": "Check", "label": "Enable Onboarding" + }, + { + "default": "1", + "fieldname": "attach_view_link", + "fieldtype": "Check", + "label": "Send document Web View link in email" } ], "icon": "fa fa-cog", "issingle": 1, "links": [], - "modified": "2020-05-01 19:21:15.496065", + "modified": "2020-07-02 16:13:00.166382", "modified_by": "Administrator", "module": "Core", "name": "System Settings", diff --git a/frappe/core/doctype/user/user.py b/frappe/core/doctype/user/user.py index 0c5ebc3ede..64bff32189 100644 --- a/frappe/core/doctype/user/user.py +++ b/frappe/core/doctype/user/user.py @@ -4,7 +4,7 @@ from __future__ import unicode_literals, print_function import frappe from frappe.model.document import Document -from frappe.utils import cint, has_gravatar, format_datetime, now_datetime, get_formatted_email, today +from frappe.utils import cint, flt, has_gravatar, escape_html, format_datetime, now_datetime, get_formatted_email, today from frappe import throw, msgprint, _ from frappe.utils.password import update_password as _update_password from frappe.desk.notifications import clear_notifications @@ -770,7 +770,7 @@ def sign_up(email, full_name, redirect_to): user = frappe.get_doc({ "doctype":"User", "email": email, - "first_name": full_name, + "first_name": escape_html(full_name), "enabled": 1, "new_password": random_string(10), "user_type": "Website User" @@ -811,6 +811,7 @@ def reset_password(user): frappe.clear_messages() return 'not found' +@frappe.whitelist() def user_query(doctype, txt, searchfield, start, page_len, filters): from frappe.desk.reportview import get_match_cond @@ -841,11 +842,11 @@ def user_query(doctype, txt, searchfield, start, page_len, filters): def get_total_users(): """Returns total no. of system users""" - return frappe.db.sql('''SELECT SUM(`simultaneous_sessions`) + return flt(frappe.db.sql('''SELECT SUM(`simultaneous_sessions`) FROM `tabUser` WHERE `enabled` = 1 AND `user_type` = 'System User' - AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0] + AND `name` NOT IN ({})'''.format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS)[0][0]) def get_system_users(exclude_users=None, limit=None): if not exclude_users: diff --git a/frappe/core/doctype/version/version.py b/frappe/core/doctype/version/version.py index 216cdb1716..7654db4ae5 100644 --- a/frappe/core/doctype/version/version.py +++ b/frappe/core/doctype/version/version.py @@ -21,6 +21,17 @@ class Version(Document): else: return False + def for_insert(self, doc): + updater_reference = doc.flags.updater_reference + data = { + 'creation': doc.creation, + 'updater_reference': updater_reference, + 'created_by': doc.owner + } + self.ref_doctype = doc.doctype + self.docname = doc.name + self.data = frappe.as_json(data) + def get_data(self): return json.loads(self.data) diff --git a/frappe/core/page/dashboard/dashboard.js b/frappe/core/page/dashboard/dashboard.js index f17bc1e0b5..cee230265f 100644 --- a/frappe/core/page/dashboard/dashboard.js +++ b/frappe/core/page/dashboard/dashboard.js @@ -172,19 +172,26 @@ class Dashboard { set_dropdown() { this.page.clear_menu(); - this.page.add_menu_item('Edit...', () => { + this.page.add_menu_item(__('Edit'), () => { frappe.set_route('Form', 'Dashboard', frappe.dashboard.dashboard_name); - }, 1); + }); - this.page.add_menu_item('New...', () => { + this.page.add_menu_item(__('New'), () => { frappe.new_doc('Dashboard'); - }, 1); + }); - frappe.db.get_list("Dashboard").then(dashboards => { + this.page.add_menu_item(__('Refresh All'), () => { + this.chart_group && + this.chart_group.widgets_list.forEach(chart => chart.refresh()); + this.number_card_group && + this.number_card_group.widgets_list.forEach(card => card.render_card()); + }); + + frappe.db.get_list('Dashboard').then(dashboards => { dashboards.map(dashboard => { let name = dashboard.name; if(name != this.dashboard_name){ - this.page.add_menu_item(name, () => frappe.set_route("dashboard", name)); + this.page.add_menu_item(name, () => frappe.set_route("dashboard", name), 1); } }); }); diff --git a/frappe/core/report/permitted_documents_for_user/permitted_documents_for_user.py b/frappe/core/report/permitted_documents_for_user/permitted_documents_for_user.py index 95a04360be..8b2d1e01fa 100644 --- a/frappe/core/report/permitted_documents_for_user/permitted_documents_for_user.py +++ b/frappe/core/report/permitted_documents_for_user/permitted_documents_for_user.py @@ -5,23 +5,23 @@ from __future__ import unicode_literals import frappe from frappe import _, throw import frappe.utils.user -from frappe.permissions import check_admin_or_system_manager +from frappe.permissions import check_admin_or_system_manager, rights from frappe.model import data_fieldtypes def execute(filters=None): user, doctype, show_permissions = filters.get("user"), filters.get("doctype"), filters.get("show_permissions") + if not validate(user, doctype): return [], [] columns, fields = get_columns_and_fields(doctype) data = frappe.get_list(doctype, fields=fields, as_list=True, user=user) if show_permissions: - columns = columns + ["Read", "Write", "Create", "Delete", "Submit", "Cancel", "Amend", "Print", "Email", - "Report", "Import", "Export", "Share"] + columns = columns + [frappe.unscrub(right) + ':Check:80' for right in rights] data = list(data) - for i,item in enumerate(data): - temp = frappe.permissions.get_doc_permissions(frappe.get_doc(doctype, item[0]), False,user) - data[i] = item+(temp.get("read"),temp.get("write"),temp.get("create"),temp.get("delete"),temp.get("submit"),temp.get("cancel"),temp.get("amend"),temp.get("print"),temp.get("email"),temp.get("report"),temp.get("import"),temp.get("export"),temp.get("share"),) + for i, doc in enumerate(data): + permission = frappe.permissions.get_doc_permissions(frappe.get_doc(doctype, doc[0]), user) + data[i] = doc + tuple(permission.get(right) for right in rights) return columns, data diff --git a/frappe/custom/doctype/custom_field/custom_field.json b/frappe/custom/doctype/custom_field/custom_field.json index 77490c8c43..6fa7b29161 100644 --- a/frappe/custom/doctype/custom_field/custom_field.json +++ b/frappe/custom/doctype/custom_field/custom_field.json @@ -16,8 +16,8 @@ "column_break_6", "fieldtype", "precision", - "show_seconds", - "show_days", + "hide_seconds", + "hide_days", "options", "fetch_from", "fetch_if_empty", @@ -383,22 +383,18 @@ "label": "In Preview" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_seconds", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_seconds", "fieldtype": "Check", - "label": "Show Seconds", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Seconds" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_days", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_days", "fieldtype": "Check", - "label": "Show Days", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Days" }, { "default": "0", @@ -411,7 +407,7 @@ "icon": "fa fa-glass", "idx": 1, "links": [], - "modified": "2020-05-15 23:43:00.123572", + "modified": "2020-02-06 23:43:00.123575", "modified_by": "Administrator", "module": "Custom", "name": "Custom Field", diff --git a/frappe/custom/doctype/custom_field/custom_field.py b/frappe/custom/doctype/custom_field/custom_field.py index a24777a80a..bc325b654e 100644 --- a/frappe/custom/doctype/custom_field/custom_field.py +++ b/frappe/custom/doctype/custom_field/custom_field.py @@ -31,6 +31,13 @@ class CustomField(Document): # fieldnames should be lowercase self.fieldname = self.fieldname.lower() + def before_insert(self): + meta = frappe.get_meta(self.dt, cached=False) + fieldnames = [df.fieldname for df in meta.get("fields")] + + if self.fieldname in fieldnames: + frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt)) + def validate(self): meta = frappe.get_meta(self.dt, cached=False) fieldnames = [df.fieldname for df in meta.get("fields")] @@ -46,9 +53,6 @@ class CustomField(Document): if not self.fieldname: frappe.throw(_("Fieldname not set for Custom Field")) - if self.fieldname in fieldnames: - frappe.throw(_("A field with the name '{}' already exists in doctype {}.").format(self.fieldname, self.dt)) - if self.get('translatable', 0) and not supports_translation(self.fieldtype): self.translatable = 0 @@ -68,6 +72,11 @@ class CustomField(Document): frappe.db.updatedb(self.dt) def on_trash(self): + #check if Admin owned field + if self.owner == 'Administrator' and frappe.session.user != 'Administrator': + frappe.throw(_("Custom Field {0} is created by the Administrator and can only be deleted through the Administrator account.").format( + frappe.bold(self.label))) + # delete property setter entries frappe.db.sql("""\ DELETE FROM `tabProperty Setter` diff --git a/frappe/custom/doctype/customize_form/customize_form.py b/frappe/custom/doctype/customize_form/customize_form.py index 6a54d9c7e6..d4eeba3f93 100644 --- a/frappe/custom/doctype/customize_form/customize_form.py +++ b/frappe/custom/doctype/customize_form/customize_form.py @@ -77,7 +77,9 @@ docfield_properties = { 'allow_bulk_edit': 'Check', 'auto_repeat': 'Link', 'allow_in_quick_entry': 'Check', - 'hide_border': 'Check' + 'hide_border': 'Check', + 'hide_days': 'Check', + 'hide_seconds': 'Check' } allowed_fieldtype_change = (('Currency', 'Float', 'Percent'), ('Small Text', 'Data'), diff --git a/frappe/custom/doctype/customize_form_field/customize_form_field.json b/frappe/custom/doctype/customize_form_field/customize_form_field.json index f422c36e61..267213517c 100644 --- a/frappe/custom/doctype/customize_form_field/customize_form_field.json +++ b/frappe/custom/doctype/customize_form_field/customize_form_field.json @@ -11,8 +11,8 @@ "label", "fieldtype", "fieldname", - "show_seconds", - "show_days", + "hide_seconds", + "hide_days", "reqd", "unique", "in_list_view", @@ -393,22 +393,18 @@ "label": "In Preview" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_seconds", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_seconds", "fieldtype": "Check", - "label": "Show Seconds", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Seconds" }, { - "default": "1", - "depends_on": "eval:doc.fieldtype === \"Duration\";", - "fieldname": "show_days", + "default": "0", + "depends_on": "eval:doc.fieldtype=='Duration'", + "fieldname": "hide_days", "fieldtype": "Check", - "label": "Show Days", - "show_days": 1, - "show_seconds": 1 + "label": "Hide Days" }, { "default": "0", @@ -421,7 +417,7 @@ "idx": 1, "istable": 1, "links": [], - "modified": "2020-05-15 23:45:46.810869", + "modified": "2020-06-02 23:45:46.810868", "modified_by": "Administrator", "module": "Custom", "name": "Customize Form Field", diff --git a/frappe/database/db_manager.py b/frappe/database/db_manager.py index 80236b2dc2..3345fce735 100644 --- a/frappe/database/db_manager.py +++ b/frappe/database/db_manager.py @@ -49,7 +49,7 @@ class DbManager: host = self.get_current_host() if frappe.conf.get('rds_db', 0) == 1: - self.db.sql("GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, EXECUTE ON `%s`.* TO '%s'@'%s';" % (target, user, host)) + self.db.sql("GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, EXECUTE, LOCK TABLES ON `%s`.* TO '%s'@'%s';" % (target, user, host)) else: self.db.sql("GRANT ALL PRIVILEGES ON `%s`.* TO '%s'@'%s';" % (target, user, host)) diff --git a/frappe/database/mariadb/framework_mariadb.sql b/frappe/database/mariadb/framework_mariadb.sql index bd93069a3f..af537e0612 100644 --- a/frappe/database/mariadb/framework_mariadb.sql +++ b/frappe/database/mariadb/framework_mariadb.sql @@ -64,6 +64,8 @@ CREATE TABLE `tabDocField` ( `length` int(11) NOT NULL DEFAULT 0, `translatable` int(1) NOT NULL DEFAULT 0, `hide_border` int(1) NOT NULL DEFAULT 0, + `hide_days` int(1) NOT NULL DEFAULT 0, + `hide_seconds` int(1) NOT NULL DEFAULT 0, PRIMARY KEY (`name`), KEY `parent` (`parent`), KEY `label` (`label`), diff --git a/frappe/database/mariadb/schema.py b/frappe/database/mariadb/schema.py index e806e8e415..4bbecd2a2e 100644 --- a/frappe/database/mariadb/schema.py +++ b/frappe/database/mariadb/schema.py @@ -82,5 +82,7 @@ class MariaDBTable(DBTable): fieldname = str(e).split("'")[-2] frappe.throw(_("{0} field cannot be set as unique in {1}, as there are non-unique existing values").format( fieldname, self.table_name)) + elif e.args[0]==1067: + frappe.throw(str(e.args[1])) else: raise e diff --git a/frappe/database/postgres/framework_postgres.sql b/frappe/database/postgres/framework_postgres.sql index 76309e7347..8f77ed6230 100644 --- a/frappe/database/postgres/framework_postgres.sql +++ b/frappe/database/postgres/framework_postgres.sql @@ -64,6 +64,8 @@ CREATE TABLE "tabDocField" ( "length" bigint NOT NULL DEFAULT 0, "translatable" smallint NOT NULL DEFAULT 0, "hide_border" smallint NOT NULL DEFAULT 0, + "hide_days" smallint NOT NULL DEFAULT 0, + "hide_seconds" smallint NOT NULL DEFAULT 0, PRIMARY KEY ("name") ) ; diff --git a/frappe/database/postgres/setup_db.py b/frappe/database/postgres/setup_db.py index 01a97178f9..1dc1ea4c97 100644 --- a/frappe/database/postgres/setup_db.py +++ b/frappe/database/postgres/setup_db.py @@ -1,7 +1,7 @@ import frappe, subprocess, os from six.moves import input -def setup_database(force, source_sql, verbose): +def setup_database(force, source_sql=None, verbose=False): root_conn = get_root_connection() root_conn.commit() root_conn.sql("DROP DATABASE IF EXISTS `{0}`".format(frappe.conf.db_name)) @@ -16,10 +16,12 @@ def setup_database(force, source_sql, verbose): subprocess_env = os.environ.copy() subprocess_env['PGPASSWORD'] = str(frappe.conf.db_password) # bootstrap db + if not source_sql: + source_sql = os.path.join(os.path.dirname(__file__), 'framework_postgres.sql') + subprocess.check_output([ 'psql', frappe.conf.db_name, '-h', frappe.conf.db_host or 'localhost', '-U', - frappe.conf.db_name, '-f', - os.path.join(os.path.dirname(__file__), 'framework_postgres.sql') + frappe.conf.db_name, '-f', source_sql ], env=subprocess_env) frappe.connect() diff --git a/frappe/desk/desktop.py b/frappe/desk/desktop.py index 956308568b..68b57a93d4 100644 --- a/frappe/desk/desktop.py +++ b/frappe/desk/desktop.py @@ -29,31 +29,56 @@ def handle_not_exist(fn): class Workspace: - def __init__(self, page_name): + def __init__(self, page_name, minimal=False): self.page_name = page_name self.extended_cards = [] self.extended_charts = [] self.extended_shortcuts = [] self.user = frappe.get_user() - self.allowed_modules = self.get_cached_value('user_allowed_modules', self.get_allowed_modules) + self.allowed_modules = self.get_cached('user_allowed_modules', self.get_allowed_modules) + self.doc = self.get_page_for_user() if self.doc.module not in self.allowed_modules: raise frappe.PermissionError - self.can_read = self.get_cached_value('user_perm_can_read', self.get_can_read_items) + self.can_read = self.get_cached('user_perm_can_read', self.get_can_read_items) self.allowed_pages = get_allowed_pages(cache=True) self.allowed_reports = get_allowed_reports(cache=True) - self.onboarding_doc = self.get_onboarding_doc() - self.onboarding = None - - self.table_counts = get_table_with_counts() + + if not minimal: + self.onboarding_doc = self.get_onboarding_doc() + self.onboarding = None + + self.table_counts = get_table_with_counts() self.restricted_doctypes = frappe.cache().get_value("domain_restricted_doctypes") or build_domain_restriced_doctype_cache() self.restricted_pages = frappe.cache().get_value("domain_restricted_pages") or build_domain_restriced_page_cache() - def get_cached_value(self, cache_key, fallback_fn): + def is_page_allowed(self): + cards = self.doc.cards + get_custom_reports_and_doctypes(self.doc.module) + self.extended_cards + shortcuts = self.doc.shortcuts + self.extended_shortcuts + + for section in cards: + links = loads(section.links) if isinstance(section.links, string_types) else section.links + for item in links: + if self.is_item_allowed(item.get('name'), item.get('type')): + return True + + def _in_active_domains(item): + if not item.restrict_to_domain: + return True + else: + return item.restrict_to_domain in frappe.get_active_domains() + + for item in shortcuts: + if self.is_item_allowed(item.link_to, item.type) and _in_active_domains(item): + return True + + return False + + def get_cached(self, cache_key, fallback_fn): _cache = frappe.cache() value = _cache.get_value(cache_key, user=frappe.session.user) @@ -83,12 +108,12 @@ class Workspace: 'extends': self.page_name, 'for_user': frappe.session.user } - pages = frappe.get_list("Desk Page", filters=filters) + pages = frappe.get_all("Desk Page", filters=filters, limit=1) if pages: - return frappe.get_doc("Desk Page", pages[0]) + return frappe.get_cached_doc("Desk Page", pages[0]) self.get_pages_to_extend() - return frappe.get_doc("Desk Page", self.page_name) + return frappe.get_cached_doc("Desk Page", self.page_name) def get_onboarding_doc(self): # Check if onboarding is enabled @@ -123,7 +148,7 @@ class Workspace: 'module': ['in', self.allowed_modules] }) - pages = [frappe.get_doc("Desk Page", page['name']) for page in pages] + pages = [frappe.get_cached_doc("Desk Page", page['name']) for page in pages] for page in pages: self.extended_cards = self.extended_cards + page.cards @@ -168,9 +193,9 @@ class Workspace: 'subtitle': _(self.onboarding_doc.subtitle), 'success': _(self.onboarding_doc.success_message), 'docs_url': self.onboarding_doc.documentation_url, - 'user_can_dismiss': self.onboarding_doc.user_can_dismiss, 'items': self.get_onboarding_steps() } + @handle_not_exist def get_cards(self): cards = self.doc.cards @@ -324,25 +349,44 @@ def get_desktop_page(page): } @frappe.whitelist() -def get_desk_sidebar_items(flatten=False): +def get_desk_sidebar_items(flatten=False, cache=True): """Get list of sidebar items for desk """ - # don't get domain restricted pages - blocked_modules = frappe.get_doc('User', frappe.session.user).get_blocked_modules() + pages = [] + _cache = frappe.cache() + if cache: + pages = _cache.get_value("desk_sidebar_items", user=frappe.session.user) + + if not pages or not cache: + # don't get domain restricted pages + blocked_modules = frappe.get_doc('User', frappe.session.user).get_blocked_modules() - filters = { - 'restrict_to_domain': ['in', frappe.get_active_domains()], - 'extends_another_page': 0, - 'for_user': '', - 'module': ['not in', blocked_modules] - } + filters = { + 'restrict_to_domain': ['in', frappe.get_active_domains()], + 'extends_another_page': 0, + 'for_user': '', + 'module': ['not in', blocked_modules] + } - if not frappe.local.conf.developer_mode: - filters['developer_mode_only'] = '0' + if not frappe.local.conf.developer_mode: + filters['developer_mode_only'] = '0' + + # pages sorted based on pinned to top and then by name + order_by = "pin_to_top desc, pin_to_bottom asc, name asc" + all_pages = frappe.get_all("Desk Page", fields=["name", "category"], filters=filters, order_by=order_by, ignore_permissions=True) + pages = [] + + # Filter Page based on Permission + for page in all_pages: + try: + wspace = Workspace(page.get('name'), True) + if wspace.is_page_allowed(): + pages.append(page) + except frappe.PermissionError: + pass + + _cache.set_value("desk_sidebar_items", pages, frappe.session.user) - # pages sorted based on pinned to top and then by name - order_by = "pin_to_top desc, pin_to_bottom asc, name asc" - pages = frappe.get_all("Desk Page", fields=["name", "category"], filters=filters, order_by=order_by, ignore_permissions=True) if flatten: return pages @@ -376,7 +420,7 @@ def get_custom_reports_and_doctypes(module): ] def get_custom_doctype_list(module): - doctypes = frappe.get_list("DocType", fields=["name"], filters={"custom": 1, "istable": 0, "module": module}, order_by="name", ignore_permissions=True) + doctypes = frappe.get_all("DocType", fields=["name"], filters={"custom": 1, "istable": 0, "module": module}, order_by="name") out = [] for d in doctypes: @@ -391,9 +435,9 @@ def get_custom_doctype_list(module): def get_custom_report_list(module): """Returns list on new style reports for modules.""" - reports = frappe.get_list("Report", fields=["name", "ref_doctype", "report_type"], filters= + reports = frappe.get_all("Report", fields=["name", "ref_doctype", "report_type"], filters= {"is_standard": "No", "disabled": 0, "module": module}, - order_by="name", ignore_permissions=True) + order_by="name") out = [] for r in reports: diff --git a/frappe/desk/doctype/calendar_view/calendar_view.json b/frappe/desk/doctype/calendar_view/calendar_view.json index 04839abc9f..ea220c335c 100644 --- a/frappe/desk/doctype/calendar_view/calendar_view.json +++ b/frappe/desk/doctype/calendar_view/calendar_view.json @@ -1,208 +1,81 @@ { - "allow_copy": 0, - "allow_guest_to_view": 0, - "allow_import": 0, - "allow_rename": 0, + "actions": [], "autoname": "Prompt", - "beta": 0, "creation": "2017-10-23 13:02:10.295824", - "custom": 0, - "docstatus": 0, "doctype": "DocType", - "document_type": "", "editable_grid": 1, "engine": "InnoDB", + "field_order": [ + "reference_doctype", + "subject_field", + "start_date_field", + "end_date_field", + "column_break_5", + "all_day" + ], "fields": [ { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "reference_doctype", "fieldtype": "Link", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, "in_list_view": 1, - "in_standard_filter": 0, "label": "Reference Document Type", - "length": 0, - "no_copy": 0, "options": "DocType", - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "subject_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, "in_list_view": 1, - "in_standard_filter": 0, "label": "Subject Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "start_date_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, "label": "Start Date Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 }, { - "allow_bulk_edit": 0, - "allow_on_submit": 0, - "bold": 0, - "collapsible": 0, - "columns": 0, "fieldname": "end_date_field", "fieldtype": "Select", - "hidden": 0, - "ignore_user_permissions": 0, - "ignore_xss_filter": 0, - "in_filter": 0, - "in_global_search": 0, - "in_list_view": 0, - "in_standard_filter": 0, "label": "End Date Field", - "length": 0, - "no_copy": 0, - "permlevel": 0, - "precision": "", - "print_hide": 0, - "print_hide_if_no_value": 0, - "read_only": 0, - "remember_last_selected_value": 0, - "report_hide": 0, - "reqd": 1, - "search_index": 0, - "set_only_once": 0, - "translatable": 0, - "unique": 0 + "reqd": 1 + }, + { + "fieldname": "column_break_5", + "fieldtype": "Column Break" + }, + { + "default": "0", + "fieldname": "all_day", + "fieldtype": "Check", + "label": "All Day" } ], - "has_web_view": 0, - "hide_heading": 0, - "hide_toolbar": 0, - "idx": 0, - "image_view": 0, - "in_create": 0, - "is_submittable": 0, - "issingle": 0, - "istable": 0, - "max_attachments": 0, - "modified": "2019-09-05 14:22:27.664645", + "links": [], + "modified": "2020-06-15 11:24:57.639430", "modified_by": "Administrator", "module": "Desk", "name": "Calendar View", - "name_case": "", "owner": "faris@erpnext.com", "permissions": [ { - "amend": 0, - "apply_user_permissions": 0, - "cancel": 0, "create": 1, "delete": 1, "email": 1, "export": 1, - "if_owner": 0, - "import": 0, - "permlevel": 0, "print": 1, "read": 1, "report": 1, "role": "System Manager", - "set_user_permissions": 0, "share": 1, - "submit": 0, "write": 1 }, { - "amend": 0, - "apply_user_permissions": 0, - "cancel": 0, - "create": 0, - "delete": 0, - "email": 0, - "export": 0, - "if_owner": 0, - "import": 0, - "permlevel": 0, - "print": 0, "read": 1, - "report": 0, - "role": "All", - "set_user_permissions": 0, - "share": 0, - "submit": 0, - "write": 0 + "role": "All" } ], - "quick_entry": 0, - "read_only": 0, - "read_only_onload": 0, - "show_name_in_global_search": 0, "sort_field": "modified", - "sort_order": "DESC", - "track_changes": 0, - "track_seen": 0 + "sort_order": "DESC" } \ No newline at end of file diff --git a/frappe/desk/doctype/dashboard/dashboard.js b/frappe/desk/doctype/dashboard/dashboard.js index 609e943995..237b549433 100644 --- a/frappe/desk/doctype/dashboard/dashboard.js +++ b/frappe/desk/doctype/dashboard/dashboard.js @@ -5,10 +5,14 @@ frappe.ui.form.on('Dashboard', { refresh: function(frm) { frm.add_custom_button(__("Show Dashboard"), () => frappe.set_route('dashboard', frm.doc.name)); + if (!frappe.boot.developer_mode) { + frm.disable_form(); + } + frm.set_query("chart", "charts", function() { return { filters: { - is_public: 1 + is_public: 1, } }; }); @@ -16,7 +20,7 @@ frappe.ui.form.on('Dashboard', { frm.set_query("card", "cards", function() { return { filters: { - is_public: 1 + is_public: 1, } }; }); diff --git a/frappe/desk/doctype/dashboard/dashboard.json b/frappe/desk/doctype/dashboard/dashboard.json index c0e2bddcf8..c7128823fe 100644 --- a/frappe/desk/doctype/dashboard/dashboard.json +++ b/frappe/desk/doctype/dashboard/dashboard.json @@ -1,5 +1,6 @@ { "actions": [], + "allow_rename": 1, "autoname": "field:dashboard_name", "creation": "2019-01-10 12:54:40.938705", "doctype": "DocType", @@ -8,6 +9,8 @@ "field_order": [ "dashboard_name", "is_default", + "is_standard", + "module", "charts", "chart_options", "cards" @@ -35,21 +38,35 @@ "reqd": 1 }, { - "description": "Set Default Options for all charts on this Dashboard (Ex: \"colors\": [\"#d1d8dd\", \"#ff5858\"])", - "fieldname": "chart_options", - "fieldtype": "Code", - "label": "Chart Options", - "options": "JSON" + "description": "Set Default Options for all charts on this Dashboard (Ex: \"colors\": [\"#d1d8dd\", \"#ff5858\"])", + "fieldname": "chart_options", + "fieldtype": "Code", + "label": "Chart Options", + "options": "JSON" }, { "fieldname": "cards", "fieldtype": "Table", "label": "Cards", "options": "Number Card Link" + }, + { + "default": "0", + "fieldname": "is_standard", + "fieldtype": "Check", + "label": "Is Standard" + }, + { + "depends_on": "eval: doc.is_standard", + "fieldname": "module", + "fieldtype": "Link", + "label": "Module", + "mandatory_depends_on": "eval: doc.is_standard", + "options": "Module Def" } ], "links": [], - "modified": "2020-04-29 13:26:37.362482", + "modified": "2020-07-10 17:48:19.468813", "modified_by": "Administrator", "module": "Desk", "name": "Dashboard", diff --git a/frappe/desk/doctype/dashboard/dashboard.py b/frappe/desk/doctype/dashboard/dashboard.py index af0c48d9c6..b12bcfe27d 100644 --- a/frappe/desk/doctype/dashboard/dashboard.py +++ b/frappe/desk/doctype/dashboard/dashboard.py @@ -4,6 +4,7 @@ from __future__ import unicode_literals from frappe.model.document import Document +from frappe.modules.export_file import export_to_files import frappe from frappe import _ import json @@ -15,7 +16,23 @@ class Dashboard(Document): frappe.db.sql('''update tabDashboard set is_default = 0 where name != %s''', self.name) + if frappe.conf.developer_mode and self.is_standard: + export_to_files(record_list=[['Dashboard', self.name, self.module + ' Dashboard']], record_module=self.module) + def validate(self): + if not frappe.conf.developer_mode and self.is_standard: + frappe.throw('Cannot edit Standard Dashboards') + + if self.is_standard: + non_standard_docs_map = { + 'Dashboard Chart': get_non_standard_charts_in_dashboard(self), + 'Number Card': get_non_standard_cards_in_dashboard(self) + } + + if non_standard_docs_map['Dashboard Chart'] or non_standard_docs_map['Number Card']: + message = get_non_standard_warning_message(non_standard_docs_map) + frappe.throw(message, title=_("Standard Not Set"), is_minimizable=True) + self.validate_custom_options() def validate_custom_options(self): @@ -48,3 +65,29 @@ def get_permitted_cards(dashboard_name): if frappe.has_permission('Number Card', doc=card.card): permitted_cards.append(card) return permitted_cards + +def get_non_standard_charts_in_dashboard(dashboard): + non_standard_charts = [doc.name for doc in frappe.get_list('Dashboard Chart', {'is_standard': 0})] + return [chart_link.chart for chart_link in dashboard.charts if chart_link.chart in non_standard_charts] + +def get_non_standard_cards_in_dashboard(dashboard): + non_standard_cards = [doc.name for doc in frappe.get_list('Number Card', {'is_standard': 0})] + return [card_link.card for card_link in dashboard.cards if card_link.card in non_standard_cards] + +def get_non_standard_warning_message(non_standard_docs_map): + message = _('''Please set the following documents in this Dashboard as standard first.''') + + def get_html(docs, doctype): + html = '{}
'.format(frappe.bold(doctype)) + for doc in docs: + html += ''.format(doctype=doctype, doc=doc) + html += '| ${__('Filter')} | -${__('Condition')} | +${__('Filter')} | +${__('Condition')} | ${__('Value')} |
|---|---|---|---|---|
| ${__('Filter')} | +${__('Condition')} | +${__('Value')} | +
|---|
+
+[{
+ fieldname: "company",
+ label: __("Company"),
+ fieldtype: "Link",
+ options: "Company",
+ default: frappe.defaults.get_user_default("Company"),
+ reqd: 1
+},
+{
+ fieldname: "account",
+ label: __("Account"),
+ fieldtype: "Link",
+ options: "Account",
+ reqd: 1
+}]
+`);
+ }
+ },
+
+ set_method_description: function(frm) {
+ if (frm.doc.type == 'Custom') {
+ frm.fields_dict.method.set_description(`
+ Set the path to a whitelisted function that will return the number on the card in the format:
+
+
+{
+ "value": value,
+ "fieldtype": "Currency"
+}
+`);
+ }
+ },
+
+ type: function(frm) {
+ frm.trigger('set_filters_description');
+ if (frm.doc.type == 'Report') {
+ frm.set_query('report_name', () => {
+ return {
+ filters: {
+ 'report_type': ['!=', 'Report Builder']
+ }
+ };
+ });
+ }
+
+ },
+
+ report_name: function(frm) {
+ frm.set_value('filters_json', '{}');
+ frm.set_value('dynamic_filters_json', '{}');
+ frm.set_df_property('report_field', 'options', []);
+ frm.trigger('set_report_filters');
+ },
+
+ filters_config: function(frm) {
+ frm.filters = eval(frm.doc.filters_config);
+ const filter_values = frappe.report_utils.get_filter_values(frm.filters);
+ frm.set_value('filters_json', JSON.stringify(filter_values));
frm.trigger('render_filters_table');
},
@@ -17,11 +162,16 @@ frappe.ui.form.on('Number Card', {
};
});
frm.set_value('filters_json', '[]');
+ frm.set_value('dynamic_filters_json', '[]');
frm.set_value('aggregate_function_based_on', '');
frm.trigger('set_options');
},
set_options: function(frm) {
+ if (frm.doc.type !== 'Document Type') {
+ return;
+ }
+
let aggregate_based_on_fields = [];
const doctype = frm.doc.document_type;
@@ -40,80 +190,275 @@ frappe.ui.form.on('Number Card', {
frm.set_df_property('aggregate_function_based_on', 'options', aggregate_based_on_fields);
});
+ frm.trigger('render_filters_table');
+ frm.trigger('render_dynamic_filters_table');
}
},
+ set_report_filters: function(frm) {
+ const report_name = frm.doc.report_name;
+ if (report_name) {
+ frappe.report_utils.get_report_filters(report_name).then(filters => {
+ if (filters) {
+ frm.filters = filters;
+ const filter_values = frappe.report_utils.get_filter_values(filters);
+ if (frm.doc.filters_json.length <= 2) {
+ frm.set_value('filters_json', JSON.stringify(filter_values));
+ }
+ }
+ frm.trigger('render_filters_table');
+ frm.trigger('set_report_field_options');
+ frm.trigger('render_dynamic_filters_table');
+ });
+ }
+ },
+
+ set_report_field_options: function(frm) {
+ let filters = frm.doc.filters_json.length > 2 ? JSON.parse(frm.doc.filters_json) : null;
+ if (frm.doc.dynamic_filters_json.length > 2) {
+ filters = {...filters, ...JSON.parse(frm.doc.dynamic_filters_json)};
+ }
+ frappe.xcall(
+ 'frappe.desk.query_report.run',
+ {
+ report_name: frm.doc.report_name,
+ filters: filters,
+ ignore_prepared_report: 1
+ }
+ ).then(data => {
+ if (data.result.length) {
+ frm.field_options = frappe.report_utils.get_field_options_from_report(data.columns, data);
+ frm.set_df_property('report_field', 'options', frm.field_options.numeric_fields);
+ if (!frm.field_options.numeric_fields.length) {
+ frappe.msgprint(__(`Report has no numeric fields, please change the Report Name`));
+ }
+ } else {
+ frappe.msgprint(__('Report has no data, please modify the filters or change the Report Name'));
+ }
+ });
+ },
+
render_filters_table: function(frm) {
frm.set_df_property("filters_section", "hidden", 0);
+ let is_document_type = frm.doc.type == 'Document Type';
+ let is_dynamic_filter = f => ['Date', 'DateRange'].includes(f.fieldtype) && f.default;
let wrapper = $(frm.get_field('filters_json').wrapper).empty();
- frm.filter_table = $(`| ${__('Filter')} | -${__('Condition')} | +${__('Filter')} | +${__('Condition')} | +${__('Value')} | +
|---|
${__("Click table to edit")}
`).appendTo(wrapper); + + let filters = JSON.parse(frm.doc.filters_json || '[]'); + let filters_set = false; + + // Set dynamic filters for reports + if (frm.doc.type == 'Report') { + let set_filters = false; + frm.filters.forEach(f => { + if (is_dynamic_filter(f)) { + filters[f.fieldname] = f.default; + set_filters = true; + } + }); + set_filters && frm.set_value('filters_json', JSON.stringify(filters)); + } + + let fields; + if (is_document_type) { + fields = [ + { + fieldtype: 'HTML', + fieldname: 'filter_area', + } + ]; + + if (filters.length) { + filters.forEach(filter => { + const filter_row = + $(`| ${__('Filter')} | +${__('Condition')} | ${__('Value')} |
|---|
{{ doc.name }} DeliveredCondition Examples:
\ndoc.status==\"Open\"\n", - "show_days": 1, - "show_seconds": 1 + "options": "
doc.due_date==nowdate()
doc.total > 40000\n
Condition Examples:
\ndoc.status==\"Open\"\n" }, { "collapsible": 1, "fieldname": "property_section", "fieldtype": "Section Break", - "label": "Set Property After Alert", - "show_days": 1, - "show_seconds": 1 + "label": "Set Property After Alert" }, { "fieldname": "set_property_after_alert", "fieldtype": "Select", - "label": "Set Property After Alert", - "show_days": 1, - "show_seconds": 1 + "label": "Set Property After Alert" }, { "fieldname": "property_value", "fieldtype": "Data", - "label": "Value To Be Set", - "show_days": 1, - "show_seconds": 1 + "label": "Value To Be Set" }, { "depends_on": "eval:doc.channel!=='Slack'", "fieldname": "column_break_5", "fieldtype": "Section Break", - "label": "Recipients", - "show_days": 1, - "show_seconds": 1 + "label": "Recipients" }, { "fieldname": "recipients", "fieldtype": "Table", "label": "Recipients", "mandatory_depends_on": "eval:doc.channel!=='Slack'", - "options": "Notification Recipient", - "show_days": 1, - "show_seconds": 1 + "options": "Notification Recipient" }, { "fieldname": "message_sb", "fieldtype": "Section Break", - "label": "Message", - "show_days": 1, - "show_seconds": 1 + "label": "Message" }, { "default": "Add your message here", "fieldname": "message", "fieldtype": "Code", "ignore_xss_filter": 1, - "label": "Message", - "show_days": 1, - "show_seconds": 1 + "label": "Message" }, { "depends_on": "eval:doc.channel=='Email'", "fieldname": "message_examples", "fieldtype": "HTML", "label": "Message Examples", - "options": "
doc.due_date==nowdate()
doc.total > 40000\n
<h3>Order Overdue</h3>\n\n<p>Transaction {{ doc.name }} has exceeded Due Date. Please take necessary action.</p>\n\n<!-- show last comment -->\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n<h4>Details</h4>\n\n<ul>\n<li>Customer: {{ doc.customer }}\n<li>Amount: {{ doc.grand_total }}\n</ul>\n",
- "show_days": 1,
- "show_seconds": 1
+ "options": "<h3>Order Overdue</h3>\n\n<p>Transaction {{ doc.name }} has exceeded Due Date. Please take necessary action.</p>\n\n<!-- show last comment -->\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n<h4>Details</h4>\n\n<ul>\n<li>Customer: {{ doc.customer }}\n<li>Amount: {{ doc.grand_total }}\n</ul>\n"
},
{
"depends_on": "eval:doc.channel=='Slack'",
"fieldname": "slack_message_examples",
"fieldtype": "HTML",
"label": "Message Examples",
- "options": "*Order Overdue*\n\nTransaction {{ doc.name }} has exceeded Due Date. Please take necessary action.\n\n\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n*Details*\n\n\u2022 Customer: {{ doc.customer }}\n\u2022 Amount: {{ doc.grand_total }}\n",
- "show_days": 1,
- "show_seconds": 1
+ "options": "*Order Overdue*\n\nTransaction {{ doc.name }} has exceeded Due Date. Please take necessary action.\n\n\n{% if comments %}\nLast comment: {{ comments[-1].comment }} by {{ comments[-1].by }}\n{% endif %}\n\n*Details*\n\n\u2022 Customer: {{ doc.customer }}\n\u2022 Amount: {{ doc.grand_total }}\n"
},
{
"fieldname": "view_properties",
"fieldtype": "Button",
- "label": "View Properties (via Customize Form)",
- "show_days": 1,
- "show_seconds": 1
+ "label": "View Properties (via Customize Form)"
},
{
"collapsible": 1,
"collapsible_depends_on": "attach_print",
"fieldname": "column_break_25",
"fieldtype": "Section Break",
- "label": "Print Settings",
- "show_days": 1,
- "show_seconds": 1
+ "label": "Print Settings"
},
{
"default": "0",
"fieldname": "attach_print",
"fieldtype": "Check",
- "label": "Attach Print",
- "show_days": 1,
- "show_seconds": 1
+ "label": "Attach Print"
},
{
"depends_on": "attach_print",
"fieldname": "print_format",
"fieldtype": "Link",
"label": "Print Format",
- "options": "Print Format",
- "show_days": 1,
- "show_seconds": 1
+ "options": "Print Format"
},
{
"default": "0",
@@ -339,14 +272,12 @@
"description": "If enabled, the notification will show up in the notifications dropdown on the top right corner of the navigation bar.",
"fieldname": "send_system_notification",
"fieldtype": "Check",
- "label": "Send System Notification",
- "show_days": 1,
- "show_seconds": 1
+ "label": "Send System Notification"
}
],
"icon": "fa fa-envelope",
"links": [],
- "modified": "2020-05-29 16:03:10.914526",
+ "modified": "2020-06-23 14:01:25.462544",
"modified_by": "Administrator",
"module": "Email",
"name": "Notification",
diff --git a/frappe/email/doctype/notification/notification.py b/frappe/email/doctype/notification/notification.py
index 8e53b50fa2..81670756f6 100644
--- a/frappe/email/doctype/notification/notification.py
+++ b/frappe/email/doctype/notification/notification.py
@@ -119,15 +119,17 @@ def get_context(context):
if self.is_standard:
self.load_standard_properties(context)
+ try:
+ if self.channel == 'Email':
+ self.send_an_email(doc, context)
- if self.channel == 'Email':
- self.send_an_email(doc, context)
+ if self.channel == 'Slack':
+ self.send_a_slack_msg(doc, context)
- if self.channel == 'Slack':
- self.send_a_slack_msg(doc, context)
-
- if self.channel == 'System Notification' or self.send_system_notification:
- self.create_system_notification(doc, context)
+ if self.channel == 'System Notification' or self.send_system_notification:
+ self.create_system_notification(doc, context)
+ except:
+ frappe.log_error(title='Failed to send notification', message=frappe.get_traceback())
if self.set_property_after_alert:
allow_update = True
diff --git a/frappe/email/email_body.py b/frappe/email/email_body.py
index 8340d81917..d545190c47 100755
--- a/frappe/email/email_body.py
+++ b/frappe/email/email_body.py
@@ -11,6 +11,7 @@ import email.utils
from six import iteritems, text_type, string_types
from email.mime.multipart import MIMEMultipart
from email.header import Header
+from email import policy
def get_email(recipients, sender='', msg='', subject='[No Subject]',
@@ -68,8 +69,8 @@ class EMail:
self.subject = subject
self.expose_recipients = expose_recipients
- self.msg_root = MIMEMultipart('mixed')
- self.msg_alternative = MIMEMultipart('alternative')
+ self.msg_root = MIMEMultipart('mixed', policy=policy.SMTPUTF8)
+ self.msg_alternative = MIMEMultipart('alternative', policy=policy.SMTPUTF8)
self.msg_root.attach(self.msg_alternative)
self.cc = cc or []
self.bcc = bcc or []
@@ -100,7 +101,7 @@ class EMail:
Attach message in the text portion of multipart/alternative
"""
from email.mime.text import MIMEText
- part = MIMEText(message, 'plain', 'utf-8')
+ part = MIMEText(message, 'plain', 'utf-8', policy=policy.SMTPUTF8)
self.msg_alternative.attach(part)
def set_part_html(self, message, inline_images):
@@ -113,9 +114,9 @@ class EMail:
message, _inline_images = replace_filename_with_cid(message)
# prepare parts
- msg_related = MIMEMultipart('related')
+ msg_related = MIMEMultipart('related', policy=policy.SMTPUTF8)
- html_part = MIMEText(message, 'html', 'utf-8')
+ html_part = MIMEText(message, 'html', 'utf-8', policy=policy.SMTPUTF8)
msg_related.attach(html_part)
for image in _inline_images:
@@ -124,7 +125,7 @@ class EMail:
self.msg_alternative.attach(msg_related)
else:
- self.msg_alternative.attach(MIMEText(message, 'html', 'utf-8'))
+ self.msg_alternative.attach(MIMEText(message, 'html', 'utf-8', policy=policy.SMTPUTF8))
def set_html_as_text(self, html):
"""Set plain text from HTML"""
@@ -135,7 +136,7 @@ class EMail:
from email.mime.text import MIMEText
maintype, subtype = mime_type.split('/')
- part = MIMEText(message, _subtype = subtype)
+ part = MIMEText(message, _subtype = subtype, policy=policy.SMTPUTF8)
if as_attachment:
part.add_header('Content-Disposition', 'attachment', filename=filename)
@@ -222,7 +223,8 @@ class EMail:
# reset headers as values may be changed.
for key, val in iteritems(headers):
- self.set_header(key, val)
+ if val:
+ self.set_header(key, val)
# call hook to enable apps to modify msg_root before sending
for hook in frappe.get_hooks("make_email_body_message"):
@@ -238,7 +240,7 @@ class EMail:
"""validate, build message and convert to string"""
self.validate()
self.make()
- return self.msg_root.as_string()
+ return self.msg_root.as_string(policy=policy.SMTPUTF8)
def get_formatted_html(subject, message, footer=None, print_html=None,
email_account=None, header=None, unsubscribe_link=None, sender=None):
diff --git a/frappe/email/queue.py b/frappe/email/queue.py
index ce512de276..8bffc108b9 100755
--- a/frappe/email/queue.py
+++ b/frappe/email/queue.py
@@ -347,7 +347,7 @@ def flush(from_test=False):
if not smtpserver:
smtpserver = SMTPServer()
smtpserver_dict[email.sender] = smtpserver
-
+
if from_test:
send_one(email.name, smtpserver, auto_commit)
else:
@@ -390,12 +390,12 @@ def send_one(email, smtpserver=None, auto_commit=True, now=False):
where
name=%s
for update''', email, as_dict=True)
-
+
if len(email):
email = email[0]
else:
return
-
+
recipients_list = frappe.db.sql('''select name, recipient, status from
`tabEmail Queue Recipient` where parent=%s''', email.name, as_dict=1)
@@ -417,6 +417,8 @@ def send_one(email, smtpserver=None, auto_commit=True, now=False):
if email.communication:
frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit)
+ email_sent_to_any_recipient = None
+
try:
message = None
diff --git a/frappe/email/test_email_body.py b/frappe/email/test_email_body.py
index 43c4bb8333..705a853bc6 100644
--- a/frappe/email/test_email_body.py
+++ b/frappe/email/test_email_body.py
@@ -39,7 +39,7 @@ This is the text version of this email
subject='Test Subject',
content=email_html,
text_content=email_text
- ).as_string()
+ ).as_string().replace("\r\n", "\n")
def test_prepare_message_returns_already_encoded_string(self):
@@ -153,7 +153,7 @@ w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
subject='Test Subject',
content=email_html,
header=['Email Title', 'green']
- ).as_string()
+ ).as_string().replace("\r\n", "\n")
self.assertTrue(''' current_version
+
+ if verbose and downgrade:
+ print("Your site will be downgraded from Frappe {0} to {1}".format(current_version, backup_version))
+
+ return downgrade
diff --git a/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py b/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py
index 4b595b1abf..864720174f 100644
--- a/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py
+++ b/frappe/integrations/doctype/dropbox_settings/dropbox_settings.py
@@ -56,7 +56,8 @@ def take_backup_to_dropbox(retry_count=0, upload_db_backup=True):
did_not_upload, error_log = backup_to_dropbox(upload_db_backup)
if did_not_upload: raise Exception
- send_email(True, "Dropbox", "Dropbox Settings", "send_notifications_to")
+ if cint(frappe.db.get_value("Dropbox Settings", None, "send_email_for_successful_backup")):
+ send_email(True, "Dropbox", "Dropbox Settings", "send_notifications_to")
except JobTimeoutException:
if retry_count < 2:
args = {
@@ -96,10 +97,12 @@ def backup_to_dropbox(upload_db_backup=True):
if frappe.flags.create_new_backup:
backup = new_backup(ignore_files=True)
filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db))
+ site_config = os.path.join(get_backups_path(), os.path.basename(backup.site_config_backup_path))
else:
- filename = get_latest_backup_file()
+ filename, site_config = get_latest_backup_file()
upload_file_to_dropbox(filename, "/database", dropbox_client)
+ upload_file_to_dropbox(site_config, "/database", dropbox_client)
# delete older databases
if dropbox_settings['no_of_backups']:
diff --git a/frappe/integrations/doctype/google_calendar/google_calendar.py b/frappe/integrations/doctype/google_calendar/google_calendar.py
index fa2eea6ce1..4a9acd9e84 100644
--- a/frappe/integrations/doctype/google_calendar/google_calendar.py
+++ b/frappe/integrations/doctype/google_calendar/google_calendar.py
@@ -12,6 +12,7 @@ from frappe import _
from frappe.model.document import Document
from frappe.utils import get_request_site_address
from googleapiclient.errors import HttpError
+from frappe.utils.password import set_encrypted_password
from frappe.utils import add_days, get_datetime, get_weekdays, now_datetime, add_to_date, get_time_zone
from dateutil import parser
from datetime import datetime, timedelta
@@ -198,7 +199,7 @@ def check_google_calendar(account, google_calendar):
except HttpError as err:
frappe.throw(_("Google Calendar - Could not create Calendar for {0}, error code {1}.").format(account.name, err.resp.status))
-def sync_events_from_google_calendar(g_calendar, method=None, page_length=10):
+def sync_events_from_google_calendar(g_calendar, method=None):
"""
Syncs Events from Google Calendar in Framework Calendar.
Google Calendar returns nextSyncToken when all the events in Google Calendar are fetched.
@@ -210,23 +211,32 @@ def sync_events_from_google_calendar(g_calendar, method=None, page_length=10):
if not account.pull_from_google_calendar:
return
+ sync_token = account.get_password(fieldname="next_sync_token", raise_exception=False) or None
+ events = frappe._dict()
results = []
while True:
try:
# API Response listed at EOF
- sync_token = account.get_password(fieldname="next_sync_token", raise_exception=False) or None
- events = google_calendar.events().list(calendarId=account.google_calendar_id, maxResults=page_length,
- singleEvents=False, showDeleted=True, syncToken=sync_token).execute()
+ events = google_calendar.events().list(calendarId=account.google_calendar_id, maxResults=2000,
+ pageToken=events.get("nextPageToken"), singleEvents=False, showDeleted=True, syncToken=sync_token).execute()
except HttpError as err:
- frappe.throw(_("Google Calendar - Could not fetch event from Google Calendar, error code {0}.").format(err.resp.status))
+ msg = _("Google Calendar - Could not fetch event from Google Calendar, error code {0}.").format(err.resp.status)
+
+ if err.resp.status == 410:
+ set_encrypted_password("Google Calendar", account.name, "", "next_sync_token")
+ frappe.db.commit()
+ msg += ' ' + _('Sync token was invalid and has been resetted, Retry syncing.')
+ frappe.msgprint(msg, title='Invalid Sync Token', indicator='blue')
+ else:
+ frappe.throw(msg)
for event in events.get("items", []):
results.append(event)
if not events.get("nextPageToken"):
if events.get("nextSyncToken"):
- frappe.db.set_value("Google Calendar", account.name, "next_sync_token", events.get("nextSyncToken"))
- frappe.db.commit()
+ account.next_sync_token = events.get("nextSyncToken")
+ account.save()
break
for idx, event in enumerate(results):
diff --git a/frappe/integrations/doctype/google_drive/google_drive.py b/frappe/integrations/doctype/google_drive/google_drive.py
index 60ee173bbf..c110694dff 100644
--- a/frappe/integrations/doctype/google_drive/google_drive.py
+++ b/frappe/integrations/doctype/google_drive/google_drive.py
@@ -189,13 +189,17 @@ def upload_system_backup_to_google_drive():
if frappe.flags.create_new_backup:
set_progress(1, "Backing up Data.")
backup = new_backup()
- fileurl_backup = os.path.basename(backup.backup_path_db)
- fileurl_public_files = os.path.basename(backup.backup_path_files)
- fileurl_private_files = os.path.basename(backup.backup_path_private_files)
+ fileurl_backup = backup.backup_path_db
+ fileurl_site_config = backup.site_config_backup_path
+ fileurl_public_files = backup.backup_path_files
+ fileurl_private_files = backup.backup_path_private_files
else:
- fileurl_backup, fileurl_public_files, fileurl_private_files = get_latest_backup_file(with_files=True)
+ fileurl_backup, fileurl_site_config, fileurl_public_files, fileurl_private_files = get_latest_backup_file(with_files=True)
+
+ for fileurl in [fileurl_backup, fileurl_site_config, fileurl_public_files, fileurl_private_files]:
+ if not fileurl:
+ continue
- for fileurl in [fileurl_backup, fileurl_public_files, fileurl_private_files]:
file_metadata = {
"name": fileurl,
"parents": [account.backup_folder_id]
@@ -204,7 +208,7 @@ def upload_system_backup_to_google_drive():
try:
media = MediaFileUpload(get_absolute_path(filename=fileurl), mimetype="application/gzip", resumable=True)
except IOError as e:
- frappe.throw(_("Google Drive - Could not locate locate - {0}").format(e))
+ frappe.throw(_("Google Drive - Could not locate - {0}").format(e))
try:
set_progress(2, "Uploading backup to Google Drive.")
@@ -218,15 +222,17 @@ def upload_system_backup_to_google_drive():
return _("Google Drive Backup Successful.")
def daily_backup():
- if frappe.db.get_single_value("Google Drive", "frequency") == "Daily":
+ drive_settings = frappe.db.get_singles_dict('Google Drive')
+ if drive_settings.enable and drive_settings.frequency == "Daily":
upload_system_backup_to_google_drive()
def weekly_backup():
- if frappe.db.get_single_value("Google Drive", "frequency") == "Weekly":
+ drive_settings = frappe.db.get_singles_dict('Google Drive')
+ if drive_settings.enable and drive_settings.frequency == "Weekly":
upload_system_backup_to_google_drive()
def get_absolute_path(filename):
- file_path = os.path.join(get_backups_path()[2:], filename)
+ file_path = os.path.join(get_backups_path()[2:], os.path.basename(filename))
return "{0}/sites/{1}".format(get_bench_path(), file_path)
def set_progress(progress, message):
diff --git a/frappe/integrations/doctype/s3_backup_settings/s3_backup_settings.py b/frappe/integrations/doctype/s3_backup_settings/s3_backup_settings.py
index 21232992f4..c8b007ba7b 100755
--- a/frappe/integrations/doctype/s3_backup_settings/s3_backup_settings.py
+++ b/frappe/integrations/doctype/s3_backup_settings/s3_backup_settings.py
@@ -19,6 +19,9 @@ from botocore.exceptions import ClientError
class S3BackupSettings(Document):
def validate(self):
+ if not self.enabled:
+ return
+
if not self.endpoint_url:
self.endpoint_url = 'https://s3.amazonaws.com'
conn = boto3.client(
@@ -115,19 +118,21 @@ def backup_to_s3():
backup = new_backup(ignore_files=False, backup_path_db=None,
backup_path_files=None, backup_path_private_files=None, force=True)
db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db))
+ site_config = os.path.join(get_backups_path(), os.path.basename(backup.site_config_backup_path))
if backup_files:
files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files))
private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files))
else:
if backup_files:
- db_filename, files_filename, private_files = get_latest_backup_file(with_files=backup_files)
+ db_filename, site_config, files_filename, private_files = get_latest_backup_file(with_files=backup_files)
else:
- db_filename = get_latest_backup_file()
+ db_filename, site_config = get_latest_backup_file()
folder = os.path.basename(db_filename)[:15] + '/'
# for adding datetime to folder name
upload_file_to_s3(db_filename, folder, conn, bucket)
+ upload_file_to_s3(site_config, folder, conn, bucket)
if backup_files:
upload_file_to_s3(private_files, folder, conn, bucket)
upload_file_to_s3(files_filename, folder, conn, bucket)
diff --git a/frappe/integrations/frappe_providers/__init__.py b/frappe/integrations/frappe_providers/__init__.py
index 887e191e16..161937a936 100644
--- a/frappe/integrations/frappe_providers/__init__.py
+++ b/frappe/integrations/frappe_providers/__init__.py
@@ -7,7 +7,7 @@ from frappe.integrations.frappe_providers.frappecloud import frappecloud_migrato
def migrate_to(local_site, frappe_provider):
if frappe_provider in ("frappe.cloud", "frappecloud.com"):
- return frappecloud_migrator(local_site, frappe_provider)
+ return frappecloud_migrator(local_site)
else:
print("{} is not supported yet".format(frappe_provider))
sys.exit(1)
diff --git a/frappe/integrations/frappe_providers/frappecloud.py b/frappe/integrations/frappe_providers/frappecloud.py
index 3e4b584246..e09f09a44b 100644
--- a/frappe/integrations/frappe_providers/frappecloud.py
+++ b/frappe/integrations/frappe_providers/frappecloud.py
@@ -1,397 +1,29 @@
-# imports - standard imports
-import getpass
-import json
-import re
-import sys
-
-# imports - third party imports
import click
-from html2text import html2text
import requests
+from html2text import html2text
-# imports - module imports
import frappe
-import frappe.utils.backups
-from frappe.utils import get_installed_apps_info
-from frappe.utils.commands import render_table, add_line_after, add_line_before
-# TODO: check upgrade compatibility
-
-
-def render_actions_table():
- actions_table = [["#", "Action"]]
- actions = []
-
- for n, action in enumerate(migrator_actions):
- actions_table.append([n+1, action["title"]])
- actions.append(action["fn"])
-
- render_table(actions_table)
- return actions
-
-
-def render_site_table(sites_info):
- sites_table = [["#", "Site Name", "Status"]]
- available_sites = []
-
- for n, site_data in enumerate(sites_info):
- name, status = site_data["name"], site_data["status"]
- if status in ("Active", "Broken"):
- sites_table.append([n + 1, name, status])
- available_sites.append(name)
-
- render_table(sites_table)
- return available_sites
-
-
-def render_teams_table(teams):
- teams_table = [["#", "Team"]]
-
- for n, team in enumerate(teams):
- teams_table.append([n+1, team])
-
- render_table(teams_table)
-
-
-def render_plan_table(plans_list):
- plans_table = [["Plan", "CPU Time"]]
- visible_headers = ["name", "cpu_time_per_day"]
-
- for plan in plans_list:
- plan, cpu_time = [plan[header] for header in visible_headers]
- plans_table.append([plan, "{} hour{}/day".format(cpu_time, "" if cpu_time < 2 else "s")])
-
- render_table(plans_table)
-
-
-def render_group_table(app_groups):
- # title row
- app_groups_table = [["#", "App Group", "Apps"]]
-
- # all rows
- for idx, app_group in enumerate(app_groups):
- apps_list = ", ".join(["{}:{}".format(app["scrubbed"], app["branch"]) for app in app_group["apps"]])
- row = [idx + 1, app_group["name"], apps_list]
- app_groups_table.append(row)
-
- render_table(app_groups_table)
-
-
-def handle_request_failure(request=None, message=None, traceback=True, exit_code=1):
- message = message or "Request failed with error code {}".format(request.status_code)
- response = html2text(request.text) if traceback else ""
-
- print("{0}{1}".format(message, "\n" + response))
- sys.exit(exit_code)
-
-
-@add_line_after
-def select_primary_action():
- actions = render_actions_table()
- idx = click.prompt("What do you want to do?", type=click.IntRange(1, len(actions))) - 1
-
- return actions[idx]
-
-
-@add_line_after
-def select_site():
- get_all_sites_request = session.post(all_site_url, headers={
- "accept": "application/json",
- "accept-encoding": "gzip, deflate, br",
- "content-type": "application/json; charset=utf-8"
- })
-
- if get_all_sites_request.ok:
- all_sites = get_all_sites_request.json()["message"]
- available_sites = render_site_table(all_sites)
-
- while True:
- selected_site = click.prompt("Name of the site you want to restore to", type=str).strip()
- if selected_site in available_sites:
- return selected_site
- else:
- print("Site {} does not exist. Try again ❌".format(selected_site))
- else:
- print("Couldn't retrive sites list...Try again later")
- sys.exit(1)
-
-
-@add_line_before
-def select_team(session):
- # get team options
- account_details_sc = session.post(account_details_url)
- if account_details_sc.ok:
- account_details = account_details_sc.json()["message"]
- available_teams = account_details["teams"]
-
- # ask if they want to select, go ahead with if only one exists
- if len(available_teams) == 1:
- team = available_teams[0]
- else:
- render_teams_table(available_teams)
- idx = click.prompt("Select Team", type=click.IntRange(1, len(available_teams))) - 1
- team = available_teams[idx]
-
- print("Team '{}' set for current session".format(team))
-
- return team
-
-
-def get_new_site_options():
- site_options_sc = session.post(options_url)
-
- if site_options_sc.ok:
- site_options = site_options_sc.json()["message"]
- return site_options
- else:
- print("Couldn't retrive New site information: {}".format(site_options_sc.status_code))
-
-
-def is_valid_subdomain(subdomain):
- if len(subdomain) < 5:
- print("Subdomain too short. Use 5 or more characters")
- return False
- matched = re.match("^[a-z0-9][a-z0-9-]*[a-z0-9]$", subdomain)
- if matched:
- return True
- print("Subdomain contains invalid characters. Use lowercase characters, numbers and hyphens")
-
-
-def is_subdomain_available(subdomain):
- res = session.post(site_exists_url, {"subdomain": subdomain})
- if res.ok:
- available = not res.json()["message"]
- if not available:
- print("Subdomain already exists! Try another one")
-
- return available
-
-
-@add_line_after
-def choose_plan(plans_list):
- print("{} plans available".format(len(plans_list)))
- available_plans = [plan["name"] for plan in plans_list]
- render_plan_table(plans_list)
-
- while True:
- input_plan = click.prompt("Select Plan").strip()
- if input_plan in available_plans:
- print("{} Plan selected ✅".format(input_plan))
- return input_plan
- else:
- print("Invalid Selection ❌")
-
-
-@add_line_after
-def check_app_compat(available_group):
- is_compat = True
- incompatible_apps, filtered_apps, branch_msgs = [], [], []
- existing_group = [(app["app_name"], app["branch"]) for app in get_installed_apps_info()]
- print("Checking availability of existing app group")
-
- for (app, branch) in existing_group:
- info = [ (a["name"], a["branch"]) for a in available_group["apps"] if a["scrubbed"] == app ]
- if info:
- app_title, available_branch = info[0]
-
- if branch != available_branch:
- print("⚠️ App {}:{} => {}".format(app, branch, available_branch))
- branch_msgs.append([app, branch, available_branch])
- filtered_apps.append(app_title)
- is_compat = False
-
- else:
- print("✅ App {}:{}".format(app, branch))
- filtered_apps.append(app_title)
-
- else:
- incompatible_apps.append(app)
- print("❌ App {}:{}".format(app, branch))
- is_compat = False
-
- start_msg = "\nSelecting this group will "
- incompatible_apps = ("\n\nDrop the following apps:\n" + "\n".join(incompatible_apps)) if incompatible_apps else ""
- branch_change = ("\n\nUpgrade the following apps:\n" + "\n".join(["{}: {} => {}".format(*x) for x in branch_msgs])) if branch_msgs else ""
- changes = (incompatible_apps + branch_change) or "be perfect for you :)"
- warning_message = start_msg + changes
- print(warning_message)
-
- return is_compat, filtered_apps
-
-
-@add_line_after
-def filter_apps(app_groups):
- render_group_table(app_groups)
-
- while True:
- app_group_index = click.prompt("Select App Group Number", type=int) - 1
- try:
- if app_group_index == -1:
- raise IndexError
- selected_group = app_groups[app_group_index]
- except IndexError:
- print("Invalid Selection ❌")
- continue
-
- is_compat, filtered_apps = check_app_compat(selected_group)
-
- if is_compat or click.confirm("Continue anyway?"):
- print("App Group {} selected! ✅".format(selected_group["name"]))
- break
-
- return selected_group["name"], filtered_apps
-
-
-@add_line_after
-def get_subdomain(domain):
- while True:
- subdomain = click.prompt("Enter subdomain").strip()
- if is_valid_subdomain(subdomain) and is_subdomain_available(subdomain):
- print("Site Domain: {}.{}".format(subdomain, domain))
- return subdomain
-
-
-@add_line_after
-def upload_backup(local_site):
- # take backup
- files_session = {}
- print("Taking backup for site {}".format(local_site))
- odb = frappe.utils.backups.new_backup(ignore_files=False, force=True)
-
- # upload files
- for x, (file_type, file_path) in enumerate([
- ("database", odb.backup_path_db),
- ("public", odb.backup_path_files),
- ("private", odb.backup_path_private_files)
- ]):
- file_upload_response = session.post(files_url, data={}, files={
- "file": open(file_path, "rb"),
- "is_private": 1,
- "folder": "Home",
- "method": "press.api.site.upload_backup",
- "type": file_type
- })
- print("Uploading files ({}/3)".format(x+1), end="\r")
- if file_upload_response.ok:
- files_session[file_type] = file_upload_response.json()["message"]
- else:
- print("Upload failed for: {}".format(file_path))
-
- files_uploaded = { k: v["file_url"] for k, v in files_session.items() }
- print("Uploaded backup files! ✅")
-
- return files_uploaded
-
-
-def new_site(local_site):
- # get new site options
- site_options = get_new_site_options()
-
- # set preferences from site options
- subdomain = get_subdomain(site_options["domain"])
- plan = choose_plan(site_options["plans"])
-
- app_groups = site_options["groups"]
- selected_group, filtered_apps = filter_apps(app_groups)
- files_uploaded = upload_backup(local_site)
-
- # push to frappe_cloud
- payload = json.dumps({
- "site": {
- "apps": filtered_apps,
- "files": files_uploaded,
- "group": selected_group,
- "name": subdomain,
- "plan": plan
- }
- })
-
- session.headers.update({"Content-Type": "application/json; charset=utf-8"})
- site_creation_request = session.post(upload_url, payload)
-
- if site_creation_request.ok:
- site_url = site_creation_request.json()["message"]
- print("Your site {} is being migrated ✨".format(local_site))
- print("View your site dashboard at {}/dashboard/#/sites/{}".format(remote_site, site_url))
- print("Your site URL: {}".format(site_url))
- else:
- handle_request_failure(site_creation_request)
-
-
-def restore_site(local_site):
- # get list of existing sites they can restore
- selected_site = select_site()
-
- # TODO: check if they can restore it
-
- click.confirm("This is an irreversible action. Are you sure you want to continue?", abort=True)
-
- # backup site
- files_uploaded = upload_backup(local_site)
-
- # push to frappe_cloud
- payload = json.dumps({
- "name": selected_site,
- "files": files_uploaded
- })
- headers = {"Content-Type": "application/json; charset=utf-8"}
- site_restore_request = session.post(restore_site_url, payload, headers=headers)
-
- if site_restore_request.ok:
- print("Your site {0} is being restored on {1} ✨".format(local_site, selected_site))
- print("View your site dashboard at {}/dashboard/#/sites/{}".format(remote_site, selected_site))
- print("Your site URL: {}".format(selected_site))
- else:
- handle_request_failure(site_restore_request)
-
-
-@add_line_after
-def create_session():
- print("Frappe Cloud credentials @ {}".format(remote_site))
-
- # take user input from STDIN
- username = click.prompt("Username").strip()
- password = getpass.unix_getpass()
-
- auth_credentials = {"usr": username, "pwd": password}
-
- session = requests.Session()
- login_sc = session.post(login_url, auth_credentials)
-
- if login_sc.ok:
- print("Authorization Successful! ✅")
- team = select_team(session)
- session.headers.update({"X-Press-Team": team })
- return session
- else:
- handle_request_failure(message="Authorization Failed with Error Code {}".format(login_sc.status_code), traceback=False)
-
-
-def frappecloud_migrator(local_site, frappecloud_site):
- global login_url, upload_url, files_url, options_url, site_exists_url, restore_site_url, account_details_url, all_site_url
- global session, migrator_actions, remote_site
-
+def frappecloud_migrator(local_site):
+ print("Retreiving Site Migrator...")
remote_site = frappe.conf.frappecloud_url or "frappecloud.com"
+ request_url = "https://{}/api/method/press.api.script".format(remote_site)
+ request = requests.get(request_url)
- login_url = "https://{}/api/method/login".format(remote_site)
- upload_url = "https://{}/api/method/press.api.site.new".format(remote_site)
- files_url = "https://{}/api/method/upload_file".format(remote_site)
- options_url = "https://{}/api/method/press.api.site.options_for_new".format(remote_site)
- site_exists_url = "https://{}/api/method/press.api.site.exists".format(remote_site)
- account_details_url = "https://{}/api/method/press.api.account.get".format(remote_site)
- all_site_url = "https://{}/api/method/press.api.site.all".format(remote_site)
- restore_site_url = "https://{}/api/method/press.api.site.restore".format(remote_site)
+ if request.status_code / 100 != 2:
+ print("Request exitted with Status Code: {}\nPayload: {}".format(request.status_code, html2text(request.text)))
+ click.secho("Some errors occurred while recovering the migration script. Please contact us @ Frappe Cloud if this issue persists", fg="yellow")
+ return
- migrator_actions = [
- { "title": "Create a new site", "fn": new_site },
- { "title": "Restore to an existing site", "fn": restore_site }
- ]
+ script_contents = request.json()["message"]
- # get credentials + auth user + start session
- session = create_session()
+ import tempfile
+ import os
+ import sys
- # available actions defined in migrator_actions
- primary_action = select_primary_action()
-
- primary_action(local_site)
+ py = sys.executable
+ script = tempfile.NamedTemporaryFile(mode="w")
+ script.write(script_contents)
+ print("Site Migrator stored at {}".format(script.name))
+ os.execv(py, [py, script.name, local_site])
diff --git a/frappe/integrations/offsite_backup_utils.py b/frappe/integrations/offsite_backup_utils.py
index 7e80cb68c4..9de176b2d0 100644
--- a/frappe/integrations/offsite_backup_utils.py
+++ b/frappe/integrations/offsite_backup_utils.py
@@ -47,16 +47,17 @@ def get_latest_backup_file(with_files=False):
def get_latest(file_ext):
file_list = glob.glob(os.path.join(get_backups_path(), file_ext))
- return max(file_list, key=os.path.getctime)
+ return max(file_list, key=os.path.getctime) if file_list else None
latest_file = get_latest('*.sql.gz')
+ latest_site_config = get_latest('*.json')
if with_files:
latest_public_file_bak = get_latest('*-files.tar')
latest_private_file_bak = get_latest('*-private-files.tar')
- return latest_file, latest_public_file_bak, latest_private_file_bak
+ return latest_file, latest_site_config, latest_public_file_bak, latest_private_file_bak
- return latest_file
+ return latest_file, latest_site_config
def get_file_size(file_path, unit):
@@ -76,7 +77,7 @@ def get_file_size(file_path, unit):
def validate_file_size():
frappe.flags.create_new_backup = True
- latest_file = get_latest_backup_file()
+ latest_file, site_config = get_latest_backup_file()
file_size = get_file_size(latest_file, unit='GB')
if file_size > 1:
diff --git a/frappe/model/base_document.py b/frappe/model/base_document.py
index 106d21eb51..7d56736cdc 100644
--- a/frappe/model/base_document.py
+++ b/frappe/model/base_document.py
@@ -334,7 +334,7 @@ class BaseDocument(object):
self.db_insert()
return
- frappe.msgprint(_("Duplicate name {0} {1}").format(self.doctype, self.name))
+ frappe.msgprint(_("{0} {1} already exists").format(self.doctype, frappe.bold(self.name)), title=_("Duplicate Name"), indicator="red")
raise frappe.DuplicateEntryError(self.doctype, self.name, e)
elif frappe.db.is_unique_key_violation(e):
@@ -504,19 +504,7 @@ class BaseDocument(object):
for _df in fields_to_fetch:
if self.is_new() or self.docstatus != 1 or _df.allow_on_submit:
- fetch_from_fieldname = _df.fetch_from.split('.')[-1]
- value = values[fetch_from_fieldname]
- if _df.fieldtype == 'Small Text' or _df.fieldtype == 'Text' or _df.fieldtype == 'Data':
- if fetch_from_fieldname in default_fields:
- from frappe.model.meta import get_default_df
- fetch_from_df = get_default_df(fetch_from_fieldname)
- else:
- fetch_from_df = frappe.get_meta(doctype).get_field(fetch_from_fieldname)
-
- fetch_from_ft = fetch_from_df.get('fieldtype')
- if fetch_from_ft == 'Text Editor' and value:
- value = unescape_html(strip_html(value))
- setattr(self, _df.fieldname, value)
+ self.set_fetch_from_value(doctype, _df, values)
notify_link_count(doctype, docname)
@@ -531,6 +519,27 @@ class BaseDocument(object):
return invalid_links, cancelled_links
+ def set_fetch_from_value(self, doctype, df, values):
+ fetch_from_fieldname = df.fetch_from.split('.')[-1]
+ value = values[fetch_from_fieldname]
+ if df.fieldtype in ['Small Text', 'Text', 'Data']:
+ if fetch_from_fieldname in default_fields:
+ from frappe.model.meta import get_default_df
+ fetch_from_df = get_default_df(fetch_from_fieldname)
+ else:
+ fetch_from_df = frappe.get_meta(doctype).get_field(fetch_from_fieldname)
+
+ if not fetch_from_df:
+ frappe.throw(
+ _('Please check the value of "Fetch From" set for field {0}').format(frappe.bold(df.label)),
+ title = _('Wrong Fetch From value')
+ )
+
+ fetch_from_ft = fetch_from_df.get('fieldtype')
+ if fetch_from_ft == 'Text Editor' and value:
+ value = unescape_html(strip_html(value))
+ setattr(self, df.fieldname, value)
+
def _validate_selects(self):
if frappe.flags.in_import:
return
@@ -693,16 +702,13 @@ class BaseDocument(object):
df = self.meta.get_field(fieldname)
sanitized_value = value
- if df and df.get("fieldtype") in ("Data", "Code", "Small Text", "Text") and df.get("options")=="Email":
- sanitized_value = sanitize_email(value)
+ if df and (df.get("ignore_xss_filter")
+ or (df.get("fieldtype")=="Code" and df.get("options")!="Email")
+ or df.get("fieldtype") in ("Attach", "Attach Image", "Barcode")
- elif df and (df.get("ignore_xss_filter")
- or (df.get("fieldtype")=="Code" and df.get("options")!="Email")
- or df.get("fieldtype") in ("Attach", "Attach Image", "Barcode")
-
- # cancelled and submit but not update after submit should be ignored
- or self.docstatus==2
- or (self.docstatus==1 and not df.get("allow_on_submit"))):
+ # cancelled and submit but not update after submit should be ignored
+ or self.docstatus==2
+ or (self.docstatus==1 and not df.get("allow_on_submit"))):
continue
else:
diff --git a/frappe/model/create_new.py b/frappe/model/create_new.py
index 2142d544fe..fcf648e718 100644
--- a/frappe/model/create_new.py
+++ b/frappe/model/create_new.py
@@ -45,7 +45,9 @@ def make_new_doc(doctype):
doc = doc.get_valid_dict(sanitize=False)
doc["doctype"] = doctype
doc["__islocal"] = 1
- doc["__unsaved"] = 1
+
+ if not frappe.model.meta.is_single(doctype):
+ doc["__unsaved"] = 1
return doc
diff --git a/frappe/model/db_query.py b/frappe/model/db_query.py
index 19517aa4a1..ac87b1d907 100644
--- a/frappe/model/db_query.py
+++ b/frappe/model/db_query.py
@@ -203,7 +203,7 @@ class DatabaseQuery(object):
def sanitize_fields(self):
'''
regex : ^.*[,();].*
- purpose : The regex will look for malicious patterns like `,`, '(', ')', ';' in each
+ purpose : The regex will look for malicious patterns like `,`, '(', ')', '@', ;' in each
field which may leads to sql injection.
example :
field = "`DocType`.`issingle`, version()"
@@ -211,11 +211,11 @@ class DatabaseQuery(object):
the system will filter out this field.
'''
- sub_query_regex = re.compile("^.*[,();].*")
- blacklisted_keywords = ['select', 'create', 'insert', 'delete', 'drop', 'update', 'case']
+ sub_query_regex = re.compile("^.*[,();@].*")
+ blacklisted_keywords = ['select', 'create', 'insert', 'delete', 'drop', 'update', 'case', 'show']
blacklisted_functions = ['concat', 'concat_ws', 'if', 'ifnull', 'nullif', 'coalesce',
'connection_id', 'current_user', 'database', 'last_insert_id', 'session_user',
- 'system_user', 'user', 'version']
+ 'system_user', 'user', 'version', 'global']
def _raise_exception():
frappe.throw(_('Use of sub-query or function is restricted'), frappe.DataError)
@@ -238,6 +238,10 @@ class DatabaseQuery(object):
if any("{0}(".format(keyword) in field.lower() for keyword in blacklisted_functions):
_raise_exception()
+ if '@' in field.lower():
+ # prevent access to global variables
+ _raise_exception()
+
if re.compile(r"[0-9a-zA-Z]+\s*'").match(field):
_raise_exception()
@@ -854,4 +858,4 @@ def get_date_range(operator, value):
timespan = period_map[operator] + ' ' + timespan_map[value] if operator != 'timespan' else value
- return get_timespan_date_range(timespan)
\ No newline at end of file
+ return get_timespan_date_range(timespan)
diff --git a/frappe/model/delete_doc.py b/frappe/model/delete_doc.py
index c0d2c4eef9..98dbce1d8f 100644
--- a/frappe/model/delete_doc.py
+++ b/frappe/model/delete_doc.py
@@ -77,7 +77,7 @@ def delete_doc(doctype=None, name=None, force=0, ignore_doctypes=None, for_reloa
delete_from_table(doctype, name, ignore_doctypes, None)
- if not (for_reload or frappe.flags.in_migrate or frappe.flags.in_install or frappe.flags.in_test):
+ if not (for_reload or frappe.flags.in_migrate or frappe.flags.in_install or frappe.flags.in_uninstall or frappe.flags.in_test):
try:
delete_controllers(name, doc.module)
except (FileNotFoundError, OSError, KeyError):
diff --git a/frappe/model/document.py b/frappe/model/document.py
index 843cb421fe..69a781d6d1 100644
--- a/frappe/model/document.py
+++ b/frappe/model/document.py
@@ -396,11 +396,23 @@ class Document(BaseDocument):
def get_doc_before_save(self):
return getattr(self, '_doc_before_save', None)
+ def has_value_changed(self, fieldname):
+ '''Returns true if value is changed before and after saving'''
+ previous = self.get_doc_before_save()
+ return previous.get(fieldname)!=self.get(fieldname) if previous else True
+
def set_new_name(self, force=False, set_name=None, set_child_names=True):
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
+
if self.flags.name_set and not force:
return
+ # If autoname has set as Prompt (name)
+ if self.get("__newname"):
+ self.name = self.get("__newname")
+ self.flags.name_set = True
+ return
+
if set_name:
self.name = set_name
else:
@@ -825,7 +837,7 @@ class Document(BaseDocument):
def run_notifications(self, method):
"""Run notifications for this method"""
- if frappe.flags.in_import or frappe.flags.in_patch or frappe.flags.in_install:
+ if (frappe.flags.in_import and frappe.flags.mute_emails) or frappe.flags.in_patch or frappe.flags.in_install:
return
if self.flags.notifications_executed==None:
@@ -961,7 +973,8 @@ class Document(BaseDocument):
update_global_search(self)
- if getattr(self.meta, 'track_changes', False) and self._doc_before_save and not self.flags.ignore_version:
+ if getattr(self.meta, 'track_changes', False) and not self.flags.ignore_version \
+ and not self.doctype == 'Version' and not frappe.flags.in_install:
self.save_version()
self.run_method('on_change')
@@ -1058,8 +1071,13 @@ class Document(BaseDocument):
def save_version(self):
"""Save version info"""
+ if not self._doc_before_save and frappe.flags.in_patch: return
+
version = frappe.new_doc('Version')
- if version.set_diff(self._doc_before_save, self):
+ if not self._doc_before_save:
+ version.for_insert(self)
+ version.insert(ignore_permissions=True)
+ elif version.set_diff(self._doc_before_save, self):
version.insert(ignore_permissions=True)
if not frappe.flags.in_migrate:
follow_document(self.doctype, self.name, frappe.session.user)
diff --git a/frappe/model/mapper.py b/frappe/model/mapper.py
index 3639a947c0..d3014435e0 100644
--- a/frappe/model/mapper.py
+++ b/frappe/model/mapper.py
@@ -14,6 +14,12 @@ def make_mapped_doc(method, source_name, selected_children=None, args=None):
Sets selected_children as flags for the `get_mapped_doc` method.
Called from `open_mapped_doc` from create_new.js'''
+
+ for hook in frappe.get_hooks("override_whitelisted_methods", {}).get(method, []):
+ # override using the first hook
+ method = hook
+ break
+
method = frappe.get_attr(method)
if method not in frappe.whitelisted:
diff --git a/frappe/model/meta.py b/frappe/model/meta.py
index 0c5ec75597..1cc3abba5b 100644
--- a/frappe/model/meta.py
+++ b/frappe/model/meta.py
@@ -483,6 +483,9 @@ class Meta(Document):
def get_row_template(self):
return self.get_web_template(suffix='_row')
+ def get_list_template(self):
+ return self.get_web_template(suffix='_list')
+
def get_web_template(self, suffix=''):
'''Returns the relative path of the row template for this doctype'''
module_name = frappe.scrub(self.module)
diff --git a/frappe/modules/export_file.py b/frappe/modules/export_file.py
index b904132530..4b22c82105 100644
--- a/frappe/modules/export_file.py
+++ b/frappe/modules/export_file.py
@@ -12,16 +12,17 @@ def export_doc(doc):
def export_to_files(record_list=None, record_module=None, verbose=0, create_init=None):
"""
- Export record_list to files. record_list is a list of lists ([doctype],[docname] ) ,
+ Export record_list to files. record_list is a list of lists ([doctype, docname, folder name],) ,
"""
if frappe.flags.in_import:
return
if record_list:
for record in record_list:
- write_document_file(frappe.get_doc(record[0], record[1]), record_module, create_init=create_init)
+ folder_name = record[2] if len(record) == 3 else None
+ write_document_file(frappe.get_doc(record[0], record[1]), record_module, create_init=create_init, folder_name=folder_name)
-def write_document_file(doc, record_module=None, create_init=True):
+def write_document_file(doc, record_module=None, create_init=True, folder_name=None):
newdoc = doc.as_dict(no_nulls=True)
doc.run_method("before_export", newdoc)
@@ -35,7 +36,10 @@ def write_document_file(doc, record_module=None, create_init=True):
module = record_module or get_module_name(doc)
# create folder
- folder = create_folder(module, doc.doctype, doc.name, create_init)
+ if folder_name:
+ folder = create_folder(module, folder_name, doc.name, create_init)
+ else:
+ folder = create_folder(module, doc.doctype, doc.name, create_init)
# write the data file
fname = scrub(doc.name)
diff --git a/frappe/patches.txt b/frappe/patches.txt
index fb5bf447b7..f8c767f5a3 100644
--- a/frappe/patches.txt
+++ b/frappe/patches.txt
@@ -19,6 +19,7 @@ execute:frappe.reload_doc('core', 'doctype', 'module_def') #2017-09-22
execute:frappe.reload_doc('core', 'doctype', 'version') #2017-04-01
execute:frappe.reload_doc('email', 'doctype', 'document_follow')
execute:frappe.reload_doc('core', 'doctype', 'communication_link') #2019-10-02
+execute:frappe.reload_doc('core', 'doctype', 'has_role')
execute:frappe.reload_doc('core', 'doctype', 'communication') #2019-10-02
frappe.patches.v11_0.replicate_old_user_permissions
frappe.patches.v11_0.reload_and_rename_view_log #2019-01-03
@@ -263,6 +264,7 @@ frappe.patches.v11_0.make_all_prepared_report_attachments_private #2019-11-26
frappe.patches.v12_0.setup_email_linking
frappe.patches.v12_0.fix_home_settings_for_all_users
frappe.patches.v12_0.change_existing_dashboard_chart_filters
+frappe.patches.v12_0.set_correct_assign_value_in_docs #2020-07-13
execute:frappe.delete_doc("Test Runner")
execute:frappe.delete_doc_if_exists('DocType', 'Google Maps Settings')
execute:frappe.db.set_default('desktop:home_page', 'workspace')
@@ -271,7 +273,9 @@ execute:frappe.delete_doc_if_exists('DocType', 'GSuite Templates')
execute:frappe.delete_doc_if_exists('DocType', 'GCalendar Account')
execute:frappe.delete_doc_if_exists('DocType', 'GCalendar Settings')
frappe.patches.v12_0.remove_parent_and_parenttype_from_print_formats
+frappe.patches.v12_0.remove_example_email_thread_notify
execute:from frappe.desk.page.setup_wizard.install_fixtures import update_genders;update_genders()
+frappe.patches.v12_0.set_correct_url_in_files
frappe.patches.v13_0.website_theme_custom_scss
frappe.patches.v13_0.set_existing_dashboard_charts_as_public
frappe.patches.v13_0.set_path_for_homepage_in_web_page_view
@@ -288,3 +292,6 @@ execute:frappe.delete_doc("DocType", "Onboarding Slide")
execute:frappe.delete_doc("DocType", "Onboarding Slide Field")
execute:frappe.delete_doc("DocType", "Onboarding Slide Help Link")
frappe.patches.v13_0.update_date_filters_in_user_settings
+frappe.patches.v13_0.update_duration_options
+frappe.patches.v13_0.replace_old_data_import # 2020-06-24
+frappe.patches.v13_0.create_custom_dashboards_cards_and_charts
diff --git a/frappe/patches/v11_0/reload_and_rename_view_log.py b/frappe/patches/v11_0/reload_and_rename_view_log.py
index 611de79a3c..12c71b746f 100644
--- a/frappe/patches/v11_0/reload_and_rename_view_log.py
+++ b/frappe/patches/v11_0/reload_and_rename_view_log.py
@@ -2,7 +2,7 @@ from __future__ import unicode_literals
import frappe
def execute():
- if frappe.db.exists('DocType', 'View log'):
+ if frappe.db.table_exists('View log'):
# for mac users direct renaming would not work since mysql for mac saves table name in lower case
# so while renaming `tabView log` to `tabView Log` we get "Table 'tabView Log' already exists" error
# more info https://stackoverflow.com/a/44753093/5955589 ,
diff --git a/frappe/patches/v12_0/remove_example_email_thread_notify.py b/frappe/patches/v12_0/remove_example_email_thread_notify.py
new file mode 100644
index 0000000000..94959b6077
--- /dev/null
+++ b/frappe/patches/v12_0/remove_example_email_thread_notify.py
@@ -0,0 +1,8 @@
+import frappe
+
+
+def execute():
+ # remove all example.com email user accounts from notifications
+ frappe.db.sql("""UPDATE `tabUser`
+ SET thread_notify=0, send_me_a_copy=0
+ WHERE email like '%@example.com'""")
diff --git a/frappe/patches/v12_0/set_correct_assign_value_in_docs.py b/frappe/patches/v12_0/set_correct_assign_value_in_docs.py
new file mode 100644
index 0000000000..65a635c170
--- /dev/null
+++ b/frappe/patches/v12_0/set_correct_assign_value_in_docs.py
@@ -0,0 +1,32 @@
+import frappe
+
+def execute():
+ frappe.reload_doc('desk', 'doctype', 'todo')
+
+ query = '''
+ SELECT
+ name, reference_type, reference_name, {} as assignees
+ FROM
+ `tabToDo`
+ WHERE
+ COALESCE(reference_type, '') != '' AND
+ COALESCE(reference_name, '') != '' AND
+ status != 'Cancelled'
+ GROUP BY
+ reference_type, reference_name
+ '''
+
+ assignments = frappe.db.multisql({
+ 'mariadb': query.format('GROUP_CONCAT(DISTINCT `owner`)'),
+ 'postgres': query.format('STRING_AGG(DISTINCT "owner", ",")')
+ }, as_dict=True)
+
+ for doc in assignments:
+ assignments = doc.assignees.split(',')
+ frappe.db.set_value(
+ doc.reference_type,
+ doc.reference_name,
+ '_assign',
+ frappe.as_json(assignments),
+ update_modified=False
+ )
diff --git a/frappe/patches/v12_0/set_correct_url_in_files.py b/frappe/patches/v12_0/set_correct_url_in_files.py
new file mode 100644
index 0000000000..4f820c1b24
--- /dev/null
+++ b/frappe/patches/v12_0/set_correct_url_in_files.py
@@ -0,0 +1,39 @@
+import frappe
+import os
+
+def execute():
+ files = frappe.get_all('File',
+ fields = ['name', 'file_name', 'file_url'],
+ filters = {
+ 'is_folder': 0,
+ 'file_url': ['!=', ''],
+ })
+
+ private_file_path = frappe.get_site_path('private', 'files')
+ public_file_path = frappe.get_site_path('public', 'files')
+
+ for file in files:
+ file_path = file.file_url
+ file_name = file_path.split('/')[-1]
+
+ if not file_path.startswith(('/private/', '/files/')):
+ continue
+
+ file_is_private = file_path.startswith('/private/files/')
+ full_path = frappe.utils.get_files_path(file_name, is_private=file_is_private)
+
+ if not os.path.exists(full_path):
+ if file_is_private:
+ public_file_url = os.path.join(public_file_path, file_name)
+ if os.path.exists(public_file_url):
+ frappe.db.set_value('File', file.name, {
+ 'file_url': '/files/{0}'.format(file_name),
+ 'is_private': 0
+ })
+ else:
+ private_file_url = os.path.join(private_file_path, file_name)
+ if os.path.exists(private_file_url):
+ frappe.db.set_value('File', file.name, {
+ 'file_url': '/private/files/{0}'.format(file_name),
+ 'is_private': 1
+ })
diff --git a/frappe/patches/v13_0/create_custom_dashboards_cards_and_charts.py b/frappe/patches/v13_0/create_custom_dashboards_cards_and_charts.py
new file mode 100644
index 0000000000..9a075a22cc
--- /dev/null
+++ b/frappe/patches/v13_0/create_custom_dashboards_cards_and_charts.py
@@ -0,0 +1,45 @@
+import frappe
+from frappe.model.naming import append_number_if_name_exists
+from frappe.utils.dashboard import get_dashboards_with_link
+
+def execute():
+ if not frappe.db.table_exists('Dashboard Chart')\
+ or not frappe.db.table_exists('Number Card')\
+ or not frappe.db.table_exists('Dashboard'):
+ return
+
+ frappe.reload_doc('desk', 'doctype', 'dashboard_chart')
+ frappe.reload_doc('desk', 'doctype', 'number_card')
+ frappe.reload_doc('desk', 'doctype', 'dashboard')
+
+ modified_charts = get_modified_docs('Dashboard Chart')
+ modified_cards = get_modified_docs('Number Card')
+ modified_dashboards = [doc.name for doc in get_modified_docs('Dashboard')]
+
+ for chart in modified_charts:
+ modified_dashboards += get_dashboards_with_link(chart.name, 'Dashboard Chart')
+ rename_modified_doc(chart.name, 'Dashboard Chart')
+
+ for card in modified_cards:
+ modified_dashboards += get_dashboards_with_link(card.name, 'Number Card')
+ rename_modified_doc(card.name, 'Number Card')
+
+ modified_dashboards = list(set(modified_dashboards))
+
+ for dashboard in modified_dashboards:
+ rename_modified_doc(dashboard, 'Dashboard')
+
+def get_modified_docs(doctype):
+ return frappe.get_all(doctype,
+ filters = {
+ 'owner': 'Administrator',
+ 'modified_by': ['!=', 'Administrator']
+ })
+
+def rename_modified_doc(docname, doctype):
+ new_name = docname + ' Custom'
+ try:
+ frappe.rename_doc(doctype, docname, new_name)
+ except frappe.ValidationError:
+ new_name = append_number_if_name_exists(doctype, new_name)
+ frappe.rename_doc(doctype, docname, new_name)
diff --git a/frappe/patches/v13_0/replace_old_data_import.py b/frappe/patches/v13_0/replace_old_data_import.py
new file mode 100644
index 0000000000..920ee7b553
--- /dev/null
+++ b/frappe/patches/v13_0/replace_old_data_import.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
+# MIT License. See license.txt
+
+from __future__ import unicode_literals
+import frappe
+
+
+def execute():
+ if not frappe.db.table_exists("Data Import"): return
+
+ meta = frappe.get_meta("Data Import")
+ # if Data Import is the new one, return early
+ if meta.fields[1].fieldname == "import_type":
+ return
+
+ frappe.db.sql("DROP TABLE IF EXISTS `tabData Import Legacy`")
+ frappe.rename_doc("DocType", "Data Import", "Data Import Legacy")
+ frappe.db.commit()
+ frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
+ frappe.rename_doc("DocType", "Data Import Beta", "Data Import")
diff --git a/frappe/patches/v13_0/update_duration_options.py b/frappe/patches/v13_0/update_duration_options.py
new file mode 100644
index 0000000000..60eef8fc93
--- /dev/null
+++ b/frappe/patches/v13_0/update_duration_options.py
@@ -0,0 +1,28 @@
+# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
+# MIT License. See license.txt
+
+from __future__ import unicode_literals
+import frappe
+
+def execute():
+ frappe.reload_doc('core', 'doctype', 'DocField')
+
+ if frappe.db.has_column('DocField', 'show_days'):
+ frappe.db.sql("""
+ UPDATE
+ tabDocField
+ SET
+ hide_days = 1 WHERE show_days = 0
+ """)
+ frappe.db.sql_ddl('alter table tabDocField drop column show_days')
+
+ if frappe.db.has_column('DocField', 'show_seconds'):
+ frappe.db.sql("""
+ UPDATE
+ tabDocField
+ SET
+ hide_seconds = 1 WHERE show_seconds = 0
+ """)
+ frappe.db.sql_ddl('alter table tabDocField drop column show_seconds')
+
+ frappe.clear_cache(doctype='DocField')
\ No newline at end of file
diff --git a/frappe/printing/doctype/print_settings/print_settings.json b/frappe/printing/doctype/print_settings/print_settings.json
index 397d9dda5d..f93ad0ee5a 100644
--- a/frappe/printing/doctype/print_settings/print_settings.json
+++ b/frappe/printing/doctype/print_settings/print_settings.json
@@ -1,932 +1,203 @@
{
- "allow_copy": 0,
- "allow_events_in_timeline": 0,
- "allow_guest_to_view": 0,
- "allow_import": 0,
- "allow_rename": 0,
- "beta": 0,
+ "actions": [],
"creation": "2014-07-17 06:54:20.782907",
- "custom": 0,
- "docstatus": 0,
"doctype": "DocType",
"document_type": "System",
- "editable_grid": 0,
+ "engine": "InnoDB",
+ "field_order": [
+ "pdf_settings",
+ "send_print_as_pdf",
+ "repeat_header_footer",
+ "column_break_4",
+ "pdf_page_size",
+ "view_link_in_email",
+ "with_letterhead",
+ "allow_print_for_draft",
+ "add_draft_heading",
+ "column_break_10",
+ "allow_page_break_inside_tables",
+ "allow_print_for_cancelled",
+ "server_printer",
+ "enable_print_server",
+ "server_ip",
+ "printer_name",
+ "port",
+ "raw_printing_section",
+ "enable_raw_printing",
+ "print_style_section",
+ "print_style",
+ "print_style_preview",
+ "section_break_8",
+ "font",
+ "font_size"
+ ],
"fields": [
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
"fieldname": "pdf_settings",
"fieldtype": "Section Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "PDF Settings",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "PDF Settings"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"default": "1",
"description": "Send Email Print Attachments as PDF (Recommended)",
- "fetch_if_empty": 0,
"fieldname": "send_print_as_pdf",
"fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Send Print as PDF",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Send Print as PDF"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"default": "1",
- "fetch_if_empty": 0,
"fieldname": "repeat_header_footer",
"fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Repeat Header and Footer in PDF",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Repeat Header and Footer in PDF"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
"fieldname": "column_break_4",
- "fieldtype": "Column Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "fieldtype": "Column Break"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"default": "A4",
- "fetch_if_empty": 0,
"fieldname": "pdf_page_size",
"fieldtype": "Select",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
"label": "PDF Page Size",
- "length": 0,
- "no_copy": 0,
- "options": "A4\nLetter",
- "permlevel": 0,
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "options": "A4\nLetter"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
"fieldname": "view_link_in_email",
"fieldtype": "Section Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Page Settings",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Page Settings"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"default": "1",
- "description": "",
- "fetch_if_empty": 0,
"fieldname": "with_letterhead",
"fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Print with letterhead",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Print with letterhead"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"default": "1",
- "description": "",
- "fetch_if_empty": 0,
"fieldname": "allow_print_for_draft",
"fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Allow Print for Draft",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Allow Print for Draft"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "default": "1",
- "description": "",
- "fetch_if_empty": 0,
- "fieldname": "attach_view_link",
- "fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Send document web view link in email",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
- },
- {
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
"fieldname": "column_break_10",
- "fieldtype": "Column Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "fieldtype": "Column Break"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"default": "1",
- "fetch_if_empty": 0,
"fieldname": "add_draft_heading",
"fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Always add \"Draft\" Heading for printing draft documents",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Always add \"Draft\" Heading for printing draft documents"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
+ "default": "0",
"fieldname": "allow_page_break_inside_tables",
"fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Allow page break inside tables",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Allow page break inside tables"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "description": "",
- "fetch_if_empty": 0,
+ "default": "0",
"fieldname": "allow_print_for_cancelled",
"fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Allow Print for Cancelled",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Allow Print for Cancelled"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "depends_on": "",
- "fetch_if_empty": 0,
"fieldname": "server_printer",
"fieldtype": "Section Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Print Server",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Print Server"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
+ "default": "0",
"fieldname": "enable_print_server",
"fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Enable Print Server",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Enable Print Server"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"default": "localhost",
"depends_on": "enable_print_server",
- "fetch_if_empty": 0,
"fieldname": "server_ip",
"fieldtype": "Data",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Server IP",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Server IP"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"depends_on": "enable_print_server",
- "fetch_if_empty": 0,
"fieldname": "printer_name",
"fieldtype": "Select",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Printer Name",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Printer Name"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"default": "631",
"depends_on": "enable_print_server",
- "fetch_if_empty": 0,
"fieldname": "port",
"fieldtype": "Int",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Port",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Port"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
"fieldname": "raw_printing_section",
"fieldtype": "Section Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Raw Printing",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Raw Printing"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
+ "default": "0",
"fieldname": "enable_raw_printing",
"fieldtype": "Check",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Enable Raw Printing",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Enable Raw Printing"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
"fieldname": "print_style_section",
"fieldtype": "Section Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Print Style",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Print Style"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"default": "Modern",
- "fetch_if_empty": 0,
"fieldname": "print_style",
"fieldtype": "Link",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
"in_list_view": 1,
- "in_standard_filter": 0,
"label": "Print Style",
- "length": 0,
- "no_copy": 0,
- "options": "Print Style",
- "permlevel": 0,
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "options": "Print Style"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
"fieldname": "print_style_preview",
"fieldtype": "HTML",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Print Style Preview",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Print Style Preview"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
- "fetch_if_empty": 0,
"fieldname": "section_break_8",
"fieldtype": "Section Break",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Fonts",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Fonts"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"default": "Default",
- "fetch_if_empty": 0,
"fieldname": "font",
"fieldtype": "Select",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
"label": "Font",
- "length": 0,
- "no_copy": 0,
- "options": "Default\nArial\nHelvetica\nVerdana\nMonospace",
- "permlevel": 0,
- "precision": "",
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "options": "Default\nArial\nHelvetica\nVerdana\nMonospace"
},
{
- "allow_bulk_edit": 0,
- "allow_in_quick_entry": 0,
- "allow_on_submit": 0,
- "bold": 0,
- "collapsible": 0,
- "columns": 0,
"description": "In points. Default is 9.",
- "fetch_if_empty": 0,
"fieldname": "font_size",
"fieldtype": "Float",
- "hidden": 0,
- "ignore_user_permissions": 0,
- "ignore_xss_filter": 0,
- "in_filter": 0,
- "in_global_search": 0,
- "in_list_view": 0,
- "in_standard_filter": 0,
- "label": "Font Size",
- "length": 0,
- "no_copy": 0,
- "permlevel": 0,
- "print_hide": 0,
- "print_hide_if_no_value": 0,
- "read_only": 0,
- "remember_last_selected_value": 0,
- "report_hide": 0,
- "reqd": 0,
- "search_index": 0,
- "set_only_once": 0,
- "translatable": 0,
- "unique": 0
+ "label": "Font Size"
}
],
- "has_web_view": 0,
- "hide_toolbar": 0,
"icon": "fa fa-cog",
- "idx": 0,
- "in_create": 0,
- "is_submittable": 0,
"issingle": 1,
- "istable": 0,
- "max_attachments": 0,
- "menu_index": 0,
- "modified": "2019-04-10 14:12:31.081187",
+ "links": [],
+ "modified": "2020-07-02 16:14:47.470668",
"modified_by": "Administrator",
"module": "Printing",
"name": "Print Settings",
- "name_case": "",
"owner": "Administrator",
"permissions": [
{
- "amend": 0,
- "cancel": 0,
"create": 1,
- "delete": 0,
- "email": 0,
- "export": 0,
- "if_owner": 0,
- "import": 0,
- "permlevel": 0,
- "print": 0,
"read": 1,
- "report": 0,
"role": "System Manager",
- "set_user_permissions": 0,
"share": 1,
- "submit": 0,
"write": 1
}
],
"quick_entry": 1,
- "read_only": 0,
- "show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
- "track_changes": 1,
- "track_seen": 0,
- "track_views": 0
+ "track_changes": 1
}
\ No newline at end of file
diff --git a/frappe/public/css/desk-rtl.css b/frappe/public/css/desk-rtl.css
index 31321be17d..a38f6864ff 100644
--- a/frappe/public/css/desk-rtl.css
+++ b/frappe/public/css/desk-rtl.css
@@ -110,4 +110,9 @@ ul.tree-children {
}
.section-header {
direction: ltr;
+}
+
+.ql-editor {
+ direction: rtl;
+ text-align: right;
}
\ No newline at end of file
diff --git a/frappe/public/js/frappe/data_import/column_picker_fields.js b/frappe/public/js/frappe/data_import/column_picker_fields.js
deleted file mode 100644
index 36cbf3c413..0000000000
--- a/frappe/public/js/frappe/data_import/column_picker_fields.js
+++ /dev/null
@@ -1,28 +0,0 @@
-export default class ColumnPickerFields extends frappe.views.ReportView {
- show() {}
-
- get_fields_as_options() {
- let column_map = this.get_columns_for_picker();
- let doctypes = [this.doctype].concat(
- ...frappe.meta.get_table_fields(this.doctype).map(df => df.options)
- );
- // flatten array
- return [].concat(
- ...doctypes.map(doctype => {
- return column_map[doctype].map(df => {
- let label = df.label;
- let value = df.fieldname;
- if (this.doctype !== doctype) {
- label = `${df.label} (${doctype})`;
- value = `${doctype}:${df.fieldname}`;
- }
- return {
- label,
- value,
- description: value
- };
- });
- })
- );
- }
-}
diff --git a/frappe/public/js/frappe/data_import/data_exporter.js b/frappe/public/js/frappe/data_import/data_exporter.js
index d0bf794df6..f6af338235 100644
--- a/frappe/public/js/frappe/data_import/data_exporter.js
+++ b/frappe/public/js/frappe/data_import/data_exporter.js
@@ -1,9 +1,9 @@
-import ColumnPickerFields from './column_picker_fields';
frappe.provide('frappe.data_import');
frappe.data_import.DataExporter = class DataExporter {
- constructor(doctype) {
+ constructor(doctype, exporting_for) {
this.doctype = doctype;
+ this.exporting_for = exporting_for;
frappe.model.with_doctype(doctype, () => {
this.make_dialog();
});
@@ -35,7 +35,7 @@ frappe.data_import.DataExporter = class DataExporter {
value: 'blank_template'
}
],
- default: 'blank_template',
+ default: this.exporting_for === 'Insert New Records' ? 'blank_template' : 'all',
change: () => {
this.update_record_count_message();
}
@@ -67,21 +67,22 @@ frappe.data_import.DataExporter = class DataExporter {
on_change: () => this.update_primary_action(),
options: this.get_multicheck_options(this.doctype)
},
- ...frappe.meta.get_table_fields(this.doctype)
- .map(df => {
- let doctype = df.options;
- let label = df.reqd
- ? __('{0} (1 row mandatory)', [doctype])
- : __(doctype);
- return {
- label,
- fieldname: doctype,
- fieldtype: 'MultiCheck',
- columns: 2,
- on_change: () => this.update_primary_action(),
- options: this.get_multicheck_options(doctype)
- };
- })
+ ...frappe.meta.get_table_fields(this.doctype).map(df => {
+ let doctype = df.options;
+ let child_fieldname = df.fieldname;
+ let label = df.reqd
+ ? // prettier-ignore
+ __('{0} ({1}) (1 row mandatory)', [df.label || df.fieldname, doctype])
+ : __('{0} ({1})', [df.label || df.fieldname, doctype]);
+ return {
+ label,
+ fieldname: child_fieldname,
+ fieldtype: 'MultiCheck',
+ columns: 2,
+ on_change: () => this.update_primary_action(),
+ options: this.get_multicheck_options(doctype, child_fieldname)
+ };
+ })
],
primary_action_label: __('Export'),
primary_action: values => this.export_records(values),
@@ -97,7 +98,7 @@ frappe.data_import.DataExporter = class DataExporter {
export_records() {
let method =
- '/api/method/frappe.core.doctype.data_import_beta.data_import_beta.download_template';
+ '/api/method/frappe.core.doctype.data_import.data_import.download_template';
let multicheck_fields = this.dialog.fields
.filter(df => df.fieldtype === 'MultiCheck')
@@ -137,15 +138,17 @@ frappe.data_import.DataExporter = class DataExporter {
}
make_select_all_buttons() {
+ let for_insert = this.exporting_for === 'Insert New Records';
+ let section_title = for_insert ? __('Select Fields To Insert') : __('Select Fields To Update');
let $select_all_buttons = $(`
tag with
Set dynamic filter values in JavaScript for the required fields here. +
+Ex:
+ frappe.defaults.get_user_default("Company")
+
${moment(task._start).format('MMM D')} - ${moment(task._end).format('MMM D')}
`; + `| {{ __(col.name) }} | - {% endif %} - {% endfor %} -
|---|
| - - {{ - col.formatter - ? col.formatter(row._index, col._index, value, col, row, true) - : col.format - ? col.format(value, row, col, data) - : col.docfield - ? frappe.format(value, col.docfield) - : value - }} - - | - {% endif %} - {% endfor %} -+ + {% format_data = row.is_total_row ? data[0] : row %} + {{ + col.formatter + ? col.formatter(row._index, col._index, value, col, format_data, true) + : col.format + ? col.format(value, row, col, format_data) + : col.docfield + ? frappe.format(value, col.docfield) + : value + }} + + | + {% endif %} + {% endfor %} + + {% endfor %} +