Merge branch 'develop' into grid-upload
This commit is contained in:
commit
73d1c249cc
104 changed files with 45508 additions and 6931 deletions
48
.github/helper/documentation.py
vendored
Normal file
48
.github/helper/documentation.py
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
import sys
|
||||
import requests
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
docs_repos = [
|
||||
"frappe_docs",
|
||||
"erpnext_documentation",
|
||||
"erpnext_com",
|
||||
"frappe_io",
|
||||
]
|
||||
|
||||
|
||||
def uri_validator(x):
|
||||
result = urlparse(x)
|
||||
return all([result.scheme, result.netloc, result.path])
|
||||
|
||||
def docs_link_exists(body):
|
||||
for line in body.splitlines():
|
||||
for word in line.split():
|
||||
if word.startswith('http') and uri_validator(word):
|
||||
parsed_url = urlparse(word)
|
||||
if parsed_url.netloc == "github.com":
|
||||
_, org, repo, _type, ref = parsed_url.path.split('/')
|
||||
if org == "frappe" and repo in docs_repos:
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pr = sys.argv[1]
|
||||
response = requests.get("https://api.github.com/repos/frappe/frappe/pulls/{}".format(pr))
|
||||
|
||||
if response.ok:
|
||||
payload = response.json()
|
||||
title = payload.get("title", "").lower()
|
||||
head_sha = payload.get("head", {}).get("sha")
|
||||
body = payload.get("body", "").lower()
|
||||
|
||||
if title.startswith("feat") and head_sha and "no-docs" not in body:
|
||||
if docs_link_exists(body):
|
||||
print("Documentation Link Found. You're Awesome! 🎉")
|
||||
|
||||
else:
|
||||
print("Documentation Link Not Found! ⚠️")
|
||||
sys.exit(1)
|
||||
|
||||
else:
|
||||
print("Skipping documentation checks... 🏃")
|
||||
24
.github/workflows/docs-checker.yml
vendored
Normal file
24
.github/workflows/docs-checker.yml
vendored
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
name: 'Documentation Required'
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, synchronize, reopened, edited ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: 'Setup Environment'
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.6
|
||||
|
||||
- name: 'Clone repo'
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Validate Docs
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.number }}
|
||||
run: |
|
||||
pip install requests --quiet
|
||||
python $GITHUB_WORKSPACE/.github/helper/documentation.py $PR_NUMBER
|
||||
2
.github/workflows/translation_linter.yml
vendored
2
.github/workflows/translation_linter.yml
vendored
|
|
@ -19,4 +19,4 @@ jobs:
|
|||
run: |
|
||||
git fetch origin $GITHUB_BASE_REF:$GITHUB_BASE_REF -q
|
||||
files=$(git diff --name-only --diff-filter=d $GITHUB_BASE_REF)
|
||||
python $GITHUB_WORKSPACE/.github/frappe_linter/translation.py $files
|
||||
python $GITHUB_WORKSPACE/.github/helper/translation.py $files
|
||||
|
|
|
|||
|
|
@ -1,10 +1,5 @@
|
|||
# imports - standard imports
|
||||
import atexit
|
||||
import compileall
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
# imports - third party imports
|
||||
|
|
@ -13,9 +8,7 @@ import click
|
|||
# imports - module imports
|
||||
import frappe
|
||||
from frappe.commands import get_site, pass_context
|
||||
from frappe.commands.scheduler import _is_scheduler_enabled
|
||||
from frappe.exceptions import SiteNotSpecifiedError
|
||||
from frappe.installer import update_site_config
|
||||
from frappe.utils import get_site_path, touch_file
|
||||
|
||||
|
||||
|
|
@ -64,8 +57,10 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
|
|||
sys.exit(1)
|
||||
|
||||
if not db_name:
|
||||
import hashlib
|
||||
db_name = '_' + hashlib.sha1(site.encode()).hexdigest()[:16]
|
||||
|
||||
from frappe.commands.scheduler import _is_scheduler_enabled
|
||||
from frappe.installer import install_db, make_site_dirs
|
||||
from frappe.installer import install_app as _install_app
|
||||
import frappe.utils.scheduler
|
||||
|
|
@ -73,6 +68,7 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
|
|||
frappe.init(site=site)
|
||||
|
||||
try:
|
||||
|
||||
# enable scheduler post install?
|
||||
enable_scheduler = _is_scheduler_enabled()
|
||||
except Exception:
|
||||
|
|
@ -107,11 +103,11 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
|
|||
@click.option('--install-app', multiple=True, help='Install app after installation')
|
||||
@click.option('--with-public-files', help='Restores the public files of the site, given path to its tar file')
|
||||
@click.option('--with-private-files', help='Restores the private files of the site, given path to its tar file')
|
||||
@click.option('--force', is_flag=True, default=False, help='Use a bit of force to get the job done')
|
||||
@click.option('--force', is_flag=True, default=False, help='Ignore the site downgrade warning, if applicable')
|
||||
@pass_context
|
||||
def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_password=None, db_name=None, verbose=None, install_app=None, admin_password=None, force=None, with_public_files=None, with_private_files=None):
|
||||
"Restore site database from an sql file"
|
||||
from frappe.installer import extract_sql_gzip, extract_tar_files, is_downgrade
|
||||
from frappe.installer import extract_sql_gzip, extract_files, is_downgrade
|
||||
force = context.force or force
|
||||
|
||||
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
|
||||
|
|
@ -147,12 +143,12 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas
|
|||
# Extract public and/or private files to the restored site, if user has given the path
|
||||
if with_public_files:
|
||||
with_public_files = os.path.join(base_path, with_public_files)
|
||||
public = extract_tar_files(site, with_public_files, 'public')
|
||||
public = extract_files(site, with_public_files, 'public')
|
||||
os.remove(public)
|
||||
|
||||
if with_private_files:
|
||||
with_private_files = os.path.join(base_path, with_private_files)
|
||||
private = extract_tar_files(site, with_private_files, 'private')
|
||||
private = extract_files(site, with_private_files, 'private')
|
||||
os.remove(private)
|
||||
|
||||
# Removing temporarily created file
|
||||
|
|
@ -271,12 +267,13 @@ def disable_user(context, email):
|
|||
|
||||
|
||||
@click.command('migrate')
|
||||
@click.option('--rebuild-website', help="Rebuild webpages after migration")
|
||||
@click.option('--skip-failing', is_flag=True, help="Skip patches that fail to run")
|
||||
@click.option('--skip-search-index', is_flag=True, help="Skip search indexing for web documents")
|
||||
@pass_context
|
||||
def migrate(context, rebuild_website=False, skip_failing=False, skip_search_index=False):
|
||||
def migrate(context, skip_failing=False, skip_search_index=False):
|
||||
"Run patches, sync schema and rebuild files/translations"
|
||||
import compileall
|
||||
import re
|
||||
from frappe.migrate import migrate
|
||||
|
||||
for site in context.sites:
|
||||
|
|
@ -286,7 +283,6 @@ def migrate(context, rebuild_website=False, skip_failing=False, skip_search_inde
|
|||
try:
|
||||
migrate(
|
||||
context.verbose,
|
||||
rebuild_website=rebuild_website,
|
||||
skip_failing=skip_failing,
|
||||
skip_search_index=skip_search_index
|
||||
)
|
||||
|
|
@ -387,35 +383,34 @@ def use(site, sites_path='.'):
|
|||
|
||||
@click.command('backup')
|
||||
@click.option('--with-files', default=False, is_flag=True, help="Take backup with files")
|
||||
@click.option('--verbose', default=False, is_flag=True)
|
||||
@click.option('--backup-path', default=None, help="Set path for saving all the files in this operation")
|
||||
@click.option('--backup-path-db', default=None, help="Set path for saving database file")
|
||||
@click.option('--backup-path-files', default=None, help="Set path for saving public file")
|
||||
@click.option('--backup-path-private-files', default=None, help="Set path for saving private file")
|
||||
@click.option('--backup-path-conf', default=None, help="Set path for saving config file")
|
||||
@click.option('--verbose', default=False, is_flag=True, help="Add verbosity")
|
||||
@click.option('--compress', default=False, is_flag=True, help="Compress private and public files")
|
||||
@pass_context
|
||||
def backup(context, with_files=False, backup_path_db=None, backup_path_files=None,
|
||||
backup_path_private_files=None, quiet=False, verbose=False):
|
||||
def backup(context, with_files=False, backup_path=None, backup_path_db=None, backup_path_files=None,
|
||||
backup_path_private_files=None, backup_path_conf=None, verbose=False, compress=False):
|
||||
"Backup"
|
||||
from frappe.utils.backups import scheduled_backup
|
||||
verbose = verbose or context.verbose
|
||||
exit_code = 0
|
||||
|
||||
for site in context.sites:
|
||||
try:
|
||||
frappe.init(site=site)
|
||||
frappe.connect()
|
||||
odb = scheduled_backup(ignore_files=not with_files, backup_path_db=backup_path_db, backup_path_files=backup_path_files, backup_path_private_files=backup_path_private_files, force=True, verbose=verbose)
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
print("Backup failed for {0}. Database or site_config.json may be corrupted".format(site))
|
||||
odb = scheduled_backup(ignore_files=not with_files, backup_path=backup_path, backup_path_db=backup_path_db, backup_path_files=backup_path_files, backup_path_private_files=backup_path_private_files, backup_path_conf=backup_path_conf, force=True, verbose=verbose, compress=compress)
|
||||
except Exception:
|
||||
click.secho("Backup failed for Site {0}. Database or site_config.json may be corrupted".format(site), fg="red")
|
||||
exit_code = 1
|
||||
continue
|
||||
|
||||
if verbose:
|
||||
from frappe.utils import now
|
||||
summary_title = "Backup Summary at {0}".format(now())
|
||||
print(summary_title + "\n" + "-" * len(summary_title))
|
||||
print("Database backup:", odb.backup_path_db)
|
||||
if with_files:
|
||||
print("Public files: ", odb.backup_path_files)
|
||||
print("Private files: ", odb.backup_path_private_files)
|
||||
|
||||
odb.print_summary()
|
||||
click.secho("Backup for Site {0} has been successfully completed{1}".format(site, " with files" if with_files else ""), fg="green")
|
||||
frappe.destroy()
|
||||
|
||||
if not context.sites:
|
||||
raise SiteNotSpecifiedError
|
||||
|
||||
|
|
|
|||
|
|
@ -4,11 +4,19 @@
|
|||
from __future__ import unicode_literals
|
||||
|
||||
import frappe
|
||||
import os, json
|
||||
import os
|
||||
import json
|
||||
|
||||
from frappe import _
|
||||
from frappe.modules import scrub, get_module_path
|
||||
from frappe.utils import flt, cint, get_html_format, get_url_to_form, gzip_decompress, format_duration
|
||||
from frappe.utils import (
|
||||
flt,
|
||||
cint,
|
||||
get_html_format,
|
||||
get_url_to_form,
|
||||
gzip_decompress,
|
||||
format_duration,
|
||||
)
|
||||
from frappe.model.utils import render_include
|
||||
from frappe.translate import send_translations
|
||||
import frappe.desk.reportview
|
||||
|
|
@ -17,11 +25,12 @@ from six import string_types, iteritems
|
|||
from datetime import timedelta
|
||||
from frappe.core.utils import ljust_list
|
||||
|
||||
|
||||
def get_report_doc(report_name):
|
||||
doc = frappe.get_doc("Report", report_name)
|
||||
doc.custom_columns = []
|
||||
|
||||
if doc.report_type == 'Custom Report':
|
||||
if doc.report_type == "Custom Report":
|
||||
custom_report_doc = doc
|
||||
reference_report = custom_report_doc.reference_report
|
||||
doc = frappe.get_doc("Report", reference_report)
|
||||
|
|
@ -30,11 +39,18 @@ def get_report_doc(report_name):
|
|||
doc.is_custom_report = True
|
||||
|
||||
if not doc.is_permitted():
|
||||
frappe.throw(_("You don't have access to Report: {0}").format(report_name), frappe.PermissionError)
|
||||
frappe.throw(
|
||||
_("You don't have access to Report: {0}").format(report_name),
|
||||
frappe.PermissionError,
|
||||
)
|
||||
|
||||
if not frappe.has_permission(doc.ref_doctype, "report"):
|
||||
frappe.throw(_("You don't have permission to get a report on: {0}").format(doc.ref_doctype),
|
||||
frappe.PermissionError)
|
||||
frappe.throw(
|
||||
_("You don't have permission to get a report on: {0}").format(
|
||||
doc.ref_doctype
|
||||
),
|
||||
frappe.PermissionError,
|
||||
)
|
||||
|
||||
if doc.disabled:
|
||||
frappe.throw(_("Report {0} is disabled").format(report_name))
|
||||
|
|
@ -54,11 +70,10 @@ def generate_report_result(report, filters=None, user=None, custom_columns=None)
|
|||
if report.report_type == "Query Report":
|
||||
res = report.execute_query_report(filters)
|
||||
|
||||
elif report.report_type == 'Script Report':
|
||||
elif report.report_type == "Script Report":
|
||||
res = report.execute_script_report(filters)
|
||||
|
||||
columns, result, message, chart, report_summary, skip_total_row = \
|
||||
ljust_list(res, 6)
|
||||
columns, result, message, chart, report_summary, skip_total_row = ljust_list(res, 6)
|
||||
|
||||
if report.custom_columns:
|
||||
# Original query columns, needed to reorder data as per custom columns
|
||||
|
|
@ -74,7 +89,7 @@ def generate_report_result(report, filters=None, user=None, custom_columns=None)
|
|||
result = add_data_to_custom_columns(custom_columns, result)
|
||||
|
||||
for custom_column in custom_columns:
|
||||
columns.insert(custom_column['insert_after_index'] + 1, custom_column)
|
||||
columns.insert(custom_column["insert_after_index"] + 1, custom_column)
|
||||
|
||||
if result:
|
||||
result = get_filtered_data(report.ref_doctype, columns, result, user)
|
||||
|
|
@ -90,17 +105,19 @@ def generate_report_result(report, filters=None, user=None, custom_columns=None)
|
|||
"report_summary": report_summary,
|
||||
"skip_total_row": skip_total_row or 0,
|
||||
"status": None,
|
||||
"execution_time": frappe.cache().hget('report_execution_time', report.name) or 0
|
||||
"execution_time": frappe.cache().hget("report_execution_time", report.name)
|
||||
or 0,
|
||||
}
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
def background_enqueue_run(report_name, filters=None, user=None):
|
||||
"""run reports in background"""
|
||||
if not user:
|
||||
user = frappe.session.user
|
||||
report = get_report_doc(report_name)
|
||||
track_instance = \
|
||||
frappe.get_doc({
|
||||
track_instance = frappe.get_doc(
|
||||
{
|
||||
"doctype": "Prepared Report",
|
||||
"report_name": report_name,
|
||||
# This looks like an insanity but, without this it'd be very hard to find Prepared Reports matching given condition
|
||||
|
|
@ -110,21 +127,24 @@ def background_enqueue_run(report_name, filters=None, user=None):
|
|||
"report_type": report.report_type,
|
||||
"query": report.query,
|
||||
"module": report.module,
|
||||
})
|
||||
}
|
||||
)
|
||||
track_instance.insert(ignore_permissions=True)
|
||||
frappe.db.commit()
|
||||
track_instance.enqueue_report()
|
||||
|
||||
return {
|
||||
"name": track_instance.name,
|
||||
"redirect_url": get_url_to_form("Prepared Report", track_instance.name)
|
||||
"redirect_url": get_url_to_form("Prepared Report", track_instance.name),
|
||||
}
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
def get_script(report_name):
|
||||
report = get_report_doc(report_name)
|
||||
module = report.module or frappe.db.get_value("DocType", report.ref_doctype, "module")
|
||||
module = report.module or frappe.db.get_value(
|
||||
"DocType", report.ref_doctype, "module"
|
||||
)
|
||||
module_path = get_module_path(module)
|
||||
report_folder = os.path.join(module_path, "report", scrub(report.name))
|
||||
script_path = os.path.join(report_folder, scrub(report.name) + ".js")
|
||||
|
|
@ -150,24 +170,38 @@ def get_script(report_name):
|
|||
return {
|
||||
"script": render_include(script),
|
||||
"html_format": html_format,
|
||||
"execution_time": frappe.cache().hget('report_execution_time', report_name) or 0
|
||||
"execution_time": frappe.cache().hget("report_execution_time", report_name)
|
||||
or 0,
|
||||
}
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
@frappe.read_only()
|
||||
def run(report_name, filters=None, user=None, ignore_prepared_report=False, custom_columns=None):
|
||||
def run(
|
||||
report_name,
|
||||
filters=None,
|
||||
user=None,
|
||||
ignore_prepared_report=False,
|
||||
custom_columns=None,
|
||||
):
|
||||
|
||||
report = get_report_doc(report_name)
|
||||
if not user:
|
||||
user = frappe.session.user
|
||||
if not frappe.has_permission(report.ref_doctype, "report"):
|
||||
frappe.msgprint(_("Must have report permission to access this report."),
|
||||
raise_exception=True)
|
||||
frappe.msgprint(
|
||||
_("Must have report permission to access this report."),
|
||||
raise_exception=True,
|
||||
)
|
||||
|
||||
result = None
|
||||
|
||||
if report.prepared_report and not report.disable_prepared_report and not ignore_prepared_report:
|
||||
if (
|
||||
report.prepared_report
|
||||
and not report.disable_prepared_report
|
||||
and not ignore_prepared_report
|
||||
and not custom_columns
|
||||
):
|
||||
if filters:
|
||||
if isinstance(filters, string_types):
|
||||
filters = json.loads(filters)
|
||||
|
|
@ -180,10 +214,13 @@ def run(report_name, filters=None, user=None, ignore_prepared_report=False, cust
|
|||
else:
|
||||
result = generate_report_result(report, filters, user, custom_columns)
|
||||
|
||||
result["add_total_row"] = report.add_total_row and not result.get('skip_total_row', False)
|
||||
result["add_total_row"] = report.add_total_row and not result.get(
|
||||
"skip_total_row", False
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def add_data_to_custom_columns(columns, result):
|
||||
custom_fields_data = get_data_for_custom_report(columns)
|
||||
|
||||
|
|
@ -195,25 +232,28 @@ def add_data_to_custom_columns(columns, result):
|
|||
|
||||
if isinstance(row, list):
|
||||
for idx, column in enumerate(columns):
|
||||
if column.get('link_field'):
|
||||
row_obj[column['fieldname']] = None
|
||||
if column.get("link_field"):
|
||||
row_obj[column["fieldname"]] = None
|
||||
row.insert(idx, None)
|
||||
else:
|
||||
row_obj[column['fieldname']] = row[idx]
|
||||
row_obj[column["fieldname"]] = row[idx]
|
||||
data.append(row_obj)
|
||||
else:
|
||||
data.append(row)
|
||||
|
||||
for row in data:
|
||||
for column in columns:
|
||||
if column.get('link_field'):
|
||||
fieldname = column['fieldname']
|
||||
key = (column['doctype'], fieldname)
|
||||
link_field = column['link_field']
|
||||
row[fieldname] = custom_fields_data.get(key, {}).get(row.get(link_field))
|
||||
if column.get("link_field"):
|
||||
fieldname = column["fieldname"]
|
||||
key = (column["doctype"], fieldname)
|
||||
link_field = column["link_field"]
|
||||
row[fieldname] = custom_fields_data.get(key, {}).get(
|
||||
row.get(link_field)
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def reorder_data_for_custom_columns(custom_columns, columns, result):
|
||||
if not result:
|
||||
return []
|
||||
|
|
@ -228,6 +268,7 @@ def reorder_data_for_custom_columns(custom_columns, columns, result):
|
|||
# columns do not need to be reordered if result is a list of dicts
|
||||
return result
|
||||
|
||||
|
||||
def get_columns_from_list(columns, target_columns, result):
|
||||
reordered_result = []
|
||||
|
||||
|
|
@ -244,6 +285,7 @@ def get_columns_from_list(columns, target_columns, result):
|
|||
|
||||
return reordered_result
|
||||
|
||||
|
||||
def get_prepared_report_result(report, filters, dn="", user=None):
|
||||
latest_report_data = {}
|
||||
doc = None
|
||||
|
|
@ -252,14 +294,15 @@ def get_prepared_report_result(report, filters, dn="", user=None):
|
|||
doc = frappe.get_doc("Prepared Report", dn)
|
||||
else:
|
||||
# Only look for completed prepared reports with given filters.
|
||||
doc_list = frappe.get_all("Prepared Report",
|
||||
doc_list = frappe.get_all(
|
||||
"Prepared Report",
|
||||
filters={
|
||||
"status": "Completed",
|
||||
"filters": json.dumps(filters),
|
||||
"owner": user,
|
||||
"report_name": report.get('custom_report') or report.get('report_name')
|
||||
"report_name": report.get("custom_report") or report.get("report_name"),
|
||||
},
|
||||
order_by = 'creation desc'
|
||||
order_by="creation desc",
|
||||
)
|
||||
|
||||
if doc_list:
|
||||
|
|
@ -269,11 +312,15 @@ def get_prepared_report_result(report, filters, dn="", user=None):
|
|||
if doc:
|
||||
try:
|
||||
# Prepared Report data is stored in a GZip compressed JSON file
|
||||
attached_file_name = frappe.db.get_value("File", {"attached_to_doctype": doc.doctype, "attached_to_name":doc.name}, "name")
|
||||
attached_file = frappe.get_doc('File', attached_file_name)
|
||||
attached_file_name = frappe.db.get_value(
|
||||
"File",
|
||||
{"attached_to_doctype": doc.doctype, "attached_to_name": doc.name},
|
||||
"name",
|
||||
)
|
||||
attached_file = frappe.get_doc("File", attached_file_name)
|
||||
compressed_content = attached_file.get_content()
|
||||
uncompressed_content = gzip_decompress(compressed_content)
|
||||
data = json.loads(uncompressed_content)
|
||||
data = json.loads(uncompressed_content.decode("utf-8"))
|
||||
if data:
|
||||
columns = json.loads(doc.columns) if doc.columns else data[0]
|
||||
|
||||
|
|
@ -281,23 +328,18 @@ def get_prepared_report_result(report, filters, dn="", user=None):
|
|||
if isinstance(column, dict) and column.get("label"):
|
||||
column["label"] = _(column["label"])
|
||||
|
||||
latest_report_data = {
|
||||
"columns": columns,
|
||||
"result": data
|
||||
}
|
||||
latest_report_data = {"columns": columns, "result": data}
|
||||
except Exception:
|
||||
frappe.log_error(frappe.get_traceback())
|
||||
frappe.delete_doc("Prepared Report", doc.name)
|
||||
frappe.db.commit()
|
||||
doc = None
|
||||
|
||||
latest_report_data.update({
|
||||
"prepared_report": True,
|
||||
"doc": doc
|
||||
})
|
||||
latest_report_data.update({"prepared_report": True, "doc": doc})
|
||||
|
||||
return latest_report_data
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
def export_query():
|
||||
"""export from query reports"""
|
||||
|
|
@ -313,8 +355,8 @@ def export_query():
|
|||
if isinstance(data.get("report_name"), string_types):
|
||||
report_name = data["report_name"]
|
||||
frappe.permissions.can_export(
|
||||
frappe.get_cached_value('Report', report_name, 'ref_doctype'),
|
||||
raise_exception=True
|
||||
frappe.get_cached_value("Report", report_name, "ref_doctype"),
|
||||
raise_exception=True,
|
||||
)
|
||||
if isinstance(data.get("file_format_type"), string_types):
|
||||
file_format_type = data["file_format_type"]
|
||||
|
|
@ -331,20 +373,26 @@ def export_query():
|
|||
data = run(report_name, filters, custom_columns=custom_columns)
|
||||
data = frappe._dict(data)
|
||||
if not data.columns:
|
||||
frappe.respond_as_web_page(_("No data to export"),
|
||||
_("You can try changing the filters of your report."))
|
||||
frappe.respond_as_web_page(
|
||||
_("No data to export"),
|
||||
_("You can try changing the filters of your report."),
|
||||
)
|
||||
return
|
||||
|
||||
columns = get_columns_dict(data.columns)
|
||||
|
||||
from frappe.utils.xlsxutils import make_xlsx
|
||||
data['result'] = handle_duration_fieldtype_values(data.get('result'), data.get('columns'))
|
||||
|
||||
data["result"] = handle_duration_fieldtype_values(
|
||||
data.get("result"), data.get("columns")
|
||||
)
|
||||
xlsx_data = build_xlsx_data(columns, data, visible_idx, include_indentation)
|
||||
xlsx_file = make_xlsx(xlsx_data, "Query Report")
|
||||
|
||||
frappe.response['filename'] = report_name + '.xlsx'
|
||||
frappe.response['filecontent'] = xlsx_file.getvalue()
|
||||
frappe.response['type'] = 'binary'
|
||||
frappe.response["filename"] = report_name + ".xlsx"
|
||||
frappe.response["filecontent"] = xlsx_file.getvalue()
|
||||
frappe.response["type"] = "binary"
|
||||
|
||||
|
||||
def handle_duration_fieldtype_values(result, columns):
|
||||
for i, col in enumerate(columns):
|
||||
|
|
@ -370,6 +418,7 @@ def handle_duration_fieldtype_values(result, columns):
|
|||
|
||||
return result
|
||||
|
||||
|
||||
def build_xlsx_data(columns, data, visible_idx, include_indentation):
|
||||
result = [[]]
|
||||
|
||||
|
|
@ -386,13 +435,13 @@ def build_xlsx_data(columns, data, visible_idx, include_indentation):
|
|||
|
||||
if isinstance(row, dict) and row:
|
||||
for idx in range(len(data.columns)):
|
||||
# check if column is not hidden
|
||||
# check if column is not hidden
|
||||
if not columns[idx].get("hidden"):
|
||||
label = columns[idx]["label"]
|
||||
fieldname = columns[idx]["fieldname"]
|
||||
cell_value = row.get(fieldname, row.get(label, ""))
|
||||
if cint(include_indentation) and 'indent' in row and idx == 0:
|
||||
cell_value = (' ' * cint(row['indent'])) + cell_value
|
||||
if cint(include_indentation) and "indent" in row and idx == 0:
|
||||
cell_value = (" " * cint(row["indent"])) + cell_value
|
||||
row_data.append(cell_value)
|
||||
else:
|
||||
row_data = row
|
||||
|
|
@ -401,8 +450,9 @@ def build_xlsx_data(columns, data, visible_idx, include_indentation):
|
|||
|
||||
return result
|
||||
|
||||
def add_total_row(result, columns, meta = None):
|
||||
total_row = [""]*len(columns)
|
||||
|
||||
def add_total_row(result, columns, meta=None):
|
||||
total_row = [""] * len(columns)
|
||||
has_percent = []
|
||||
for i, col in enumerate(columns):
|
||||
fieldtype, options, fieldname = None, None, None
|
||||
|
|
@ -428,10 +478,13 @@ def add_total_row(result, columns, meta = None):
|
|||
options = col.get("options")
|
||||
|
||||
for row in result:
|
||||
if i >= len(row): continue
|
||||
if i >= len(row):
|
||||
continue
|
||||
|
||||
cell = row.get(fieldname) if isinstance(row, dict) else row[i]
|
||||
if fieldtype in ["Currency", "Int", "Float", "Percent", "Duration"] and flt(cell):
|
||||
if fieldtype in ["Currency", "Int", "Float", "Percent", "Duration"] and flt(
|
||||
cell
|
||||
):
|
||||
total_row[i] = flt(total_row[i]) + flt(cell)
|
||||
|
||||
if fieldtype == "Percent" and i not in has_percent:
|
||||
|
|
@ -439,12 +492,15 @@ def add_total_row(result, columns, meta = None):
|
|||
|
||||
if fieldtype == "Time" and cell:
|
||||
if not total_row[i]:
|
||||
total_row[i]=timedelta(hours=0,minutes=0,seconds=0)
|
||||
total_row[i] = total_row[i] + cell
|
||||
total_row[i] = timedelta(hours=0, minutes=0, seconds=0)
|
||||
total_row[i] = total_row[i] + cell
|
||||
|
||||
|
||||
if fieldtype=="Link" and options == "Currency":
|
||||
total_row[i] = result[0].get(fieldname) if isinstance(result[0], dict) else result[0][i]
|
||||
if fieldtype == "Link" and options == "Currency":
|
||||
total_row[i] = (
|
||||
result[0].get(fieldname)
|
||||
if isinstance(result[0], dict)
|
||||
else result[0][i]
|
||||
)
|
||||
|
||||
for i in has_percent:
|
||||
total_row[i] = flt(total_row[i]) / len(result)
|
||||
|
|
@ -463,35 +519,44 @@ def add_total_row(result, columns, meta = None):
|
|||
result.append(total_row)
|
||||
return result
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
def get_data_for_custom_field(doctype, field):
|
||||
|
||||
if not frappe.has_permission(doctype, "read"):
|
||||
frappe.throw(_("Not Permitted"), frappe.PermissionError)
|
||||
|
||||
value_map = frappe._dict(frappe.get_all(doctype,
|
||||
fields=["name", field],
|
||||
as_list=1))
|
||||
value_map = frappe._dict(frappe.get_all(doctype, fields=["name", field], as_list=1))
|
||||
|
||||
return value_map
|
||||
|
||||
|
||||
def get_data_for_custom_report(columns):
|
||||
doc_field_value_map = {}
|
||||
|
||||
for column in columns:
|
||||
if column.get('link_field'):
|
||||
fieldname = column.get('fieldname')
|
||||
doctype = column.get('doctype')
|
||||
doc_field_value_map[(doctype, fieldname)] = get_data_for_custom_field(doctype, fieldname)
|
||||
if column.get("link_field"):
|
||||
fieldname = column.get("fieldname")
|
||||
doctype = column.get("doctype")
|
||||
doc_field_value_map[(doctype, fieldname)] = get_data_for_custom_field(
|
||||
doctype, fieldname
|
||||
)
|
||||
|
||||
return doc_field_value_map
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
def save_report(reference_report, report_name, columns):
|
||||
report_doc = get_report_doc(reference_report)
|
||||
|
||||
docname = frappe.db.exists("Report",
|
||||
{'report_name': report_name, 'is_standard': 'No', 'report_type': 'Custom Report'})
|
||||
docname = frappe.db.exists(
|
||||
"Report",
|
||||
{
|
||||
"report_name": report_name,
|
||||
"is_standard": "No",
|
||||
"report_type": "Custom Report",
|
||||
},
|
||||
)
|
||||
if docname:
|
||||
report = frappe.get_doc("Report", docname)
|
||||
report.update({"json": columns})
|
||||
|
|
@ -500,15 +565,17 @@ def save_report(reference_report, report_name, columns):
|
|||
|
||||
return docname
|
||||
else:
|
||||
new_report = frappe.get_doc({
|
||||
'doctype': 'Report',
|
||||
'report_name': report_name,
|
||||
'json': columns,
|
||||
'ref_doctype': report_doc.ref_doctype,
|
||||
'is_standard': 'No',
|
||||
'report_type': 'Custom Report',
|
||||
'reference_report': reference_report
|
||||
}).insert(ignore_permissions = True)
|
||||
new_report = frappe.get_doc(
|
||||
{
|
||||
"doctype": "Report",
|
||||
"report_name": report_name,
|
||||
"json": columns,
|
||||
"ref_doctype": report_doc.ref_doctype,
|
||||
"is_standard": "No",
|
||||
"report_type": "Custom Report",
|
||||
"reference_report": reference_report,
|
||||
}
|
||||
).insert(ignore_permissions=True)
|
||||
frappe.msgprint(_("{0} saved successfully").format(new_report.name))
|
||||
return new_report.name
|
||||
|
||||
|
|
@ -526,10 +593,22 @@ def get_filtered_data(ref_doctype, columns, data, user):
|
|||
if match_filters_per_doctype:
|
||||
for row in data:
|
||||
# Why linked_doctypes.get(ref_doctype)? because if column is empty, linked_doctypes[ref_doctype] is removed
|
||||
if linked_doctypes.get(ref_doctype) and shared and row[linked_doctypes[ref_doctype]] in shared:
|
||||
if (
|
||||
linked_doctypes.get(ref_doctype)
|
||||
and shared
|
||||
and row[linked_doctypes[ref_doctype]] in shared
|
||||
):
|
||||
result.append(row)
|
||||
|
||||
elif has_match(row, linked_doctypes, match_filters_per_doctype, ref_doctype, if_owner, columns_dict, user):
|
||||
elif has_match(
|
||||
row,
|
||||
linked_doctypes,
|
||||
match_filters_per_doctype,
|
||||
ref_doctype,
|
||||
if_owner,
|
||||
columns_dict,
|
||||
user,
|
||||
):
|
||||
result.append(row)
|
||||
else:
|
||||
result = list(data)
|
||||
|
|
@ -537,17 +616,25 @@ def get_filtered_data(ref_doctype, columns, data, user):
|
|||
return result
|
||||
|
||||
|
||||
def has_match(row, linked_doctypes, doctype_match_filters, ref_doctype, if_owner, columns_dict, user):
|
||||
def has_match(
|
||||
row,
|
||||
linked_doctypes,
|
||||
doctype_match_filters,
|
||||
ref_doctype,
|
||||
if_owner,
|
||||
columns_dict,
|
||||
user,
|
||||
):
|
||||
"""Returns True if after evaluating permissions for each linked doctype
|
||||
- There is an owner match for the ref_doctype
|
||||
- `and` There is a user permission match for all linked doctypes
|
||||
- There is an owner match for the ref_doctype
|
||||
- `and` There is a user permission match for all linked doctypes
|
||||
|
||||
Returns True if the row is empty
|
||||
Returns True if the row is empty
|
||||
|
||||
Note:
|
||||
Each doctype could have multiple conflicting user permission doctypes.
|
||||
Hence even if one of the sets allows a match, it is true.
|
||||
This behavior is equivalent to the trickling of user permissions of linked doctypes to the ref doctype.
|
||||
Note:
|
||||
Each doctype could have multiple conflicting user permission doctypes.
|
||||
Hence even if one of the sets allows a match, it is true.
|
||||
This behavior is equivalent to the trickling of user permissions of linked doctypes to the ref doctype.
|
||||
"""
|
||||
resultant_match = True
|
||||
|
||||
|
|
@ -558,20 +645,22 @@ def has_match(row, linked_doctypes, doctype_match_filters, ref_doctype, if_owner
|
|||
for doctype, filter_list in doctype_match_filters.items():
|
||||
matched_for_doctype = False
|
||||
|
||||
if doctype==ref_doctype and if_owner:
|
||||
if doctype == ref_doctype and if_owner:
|
||||
idx = linked_doctypes.get("User")
|
||||
if (idx is not None
|
||||
and row[idx]==user
|
||||
and columns_dict[idx]==columns_dict.get("owner")):
|
||||
# owner match is true
|
||||
matched_for_doctype = True
|
||||
if (
|
||||
idx is not None
|
||||
and row[idx] == user
|
||||
and columns_dict[idx] == columns_dict.get("owner")
|
||||
):
|
||||
# owner match is true
|
||||
matched_for_doctype = True
|
||||
|
||||
if not matched_for_doctype:
|
||||
for match_filters in filter_list:
|
||||
match = True
|
||||
for dt, idx in linked_doctypes.items():
|
||||
# case handled above
|
||||
if dt=="User" and columns_dict[idx]==columns_dict.get("owner"):
|
||||
if dt == "User" and columns_dict[idx] == columns_dict.get("owner"):
|
||||
continue
|
||||
|
||||
cell_value = None
|
||||
|
|
@ -580,7 +669,11 @@ def has_match(row, linked_doctypes, doctype_match_filters, ref_doctype, if_owner
|
|||
elif isinstance(row, (list, tuple)):
|
||||
cell_value = row[idx]
|
||||
|
||||
if dt in match_filters and cell_value not in match_filters.get(dt) and frappe.db.exists(dt, cell_value):
|
||||
if (
|
||||
dt in match_filters
|
||||
and cell_value not in match_filters.get(dt)
|
||||
and frappe.db.exists(dt, cell_value)
|
||||
):
|
||||
match = False
|
||||
break
|
||||
|
||||
|
|
@ -599,6 +692,7 @@ def has_match(row, linked_doctypes, doctype_match_filters, ref_doctype, if_owner
|
|||
|
||||
return resultant_match
|
||||
|
||||
|
||||
def get_linked_doctypes(columns, data):
|
||||
linked_doctypes = {}
|
||||
|
||||
|
|
@ -606,7 +700,7 @@ def get_linked_doctypes(columns, data):
|
|||
|
||||
for idx, col in enumerate(columns):
|
||||
df = columns_dict[idx]
|
||||
if df.get("fieldtype")=="Link":
|
||||
if df.get("fieldtype") == "Link":
|
||||
if data and isinstance(data[0], (list, tuple)):
|
||||
linked_doctypes[df["options"]] = idx
|
||||
else:
|
||||
|
|
@ -635,10 +729,11 @@ def get_linked_doctypes(columns, data):
|
|||
|
||||
return linked_doctypes
|
||||
|
||||
|
||||
def get_columns_dict(columns):
|
||||
"""Returns a dict with column docfield values as dict
|
||||
The keys for the dict are both idx and fieldname,
|
||||
so either index or fieldname can be used to search for a column's docfield properties
|
||||
The keys for the dict are both idx and fieldname,
|
||||
so either index or fieldname can be used to search for a column's docfield properties
|
||||
"""
|
||||
columns_dict = frappe._dict()
|
||||
for idx, col in enumerate(columns):
|
||||
|
|
@ -648,6 +743,7 @@ def get_columns_dict(columns):
|
|||
|
||||
return columns_dict
|
||||
|
||||
|
||||
def get_column_as_dict(col):
|
||||
col_dict = frappe._dict()
|
||||
|
||||
|
|
@ -671,6 +767,7 @@ def get_column_as_dict(col):
|
|||
|
||||
return col_dict
|
||||
|
||||
|
||||
def get_user_match_filters(doctypes, user):
|
||||
match_filters = {}
|
||||
|
||||
|
|
|
|||
|
|
@ -155,7 +155,12 @@ def get_context(context):
|
|||
allow_update = False
|
||||
try:
|
||||
if allow_update and not doc.flags.in_notification_update:
|
||||
doc.set(self.set_property_after_alert, self.property_value)
|
||||
fieldname = self.set_property_after_alert
|
||||
value = self.property_value
|
||||
if doc.meta.get_field(fieldname).fieldtype in frappe.model.numeric_fieldtypes:
|
||||
value = frappe.utils.cint(value)
|
||||
|
||||
doc.set(fieldname, value)
|
||||
doc.flags.updater_reference = {
|
||||
'doctype': self.doctype,
|
||||
'docname': self.name,
|
||||
|
|
@ -177,7 +182,7 @@ def get_context(context):
|
|||
recipients, cc, bcc = self.get_list_of_recipients(doc, context)
|
||||
|
||||
users = recipients + cc + bcc
|
||||
|
||||
|
||||
if not users:
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -345,8 +345,7 @@ def extract_sql_gzip(sql_gz_path):
|
|||
|
||||
return decompressed_file
|
||||
|
||||
|
||||
def extract_tar_files(site_name, file_path, folder_name):
|
||||
def extract_files(site_name, file_path, folder_name):
|
||||
import subprocess
|
||||
import shutil
|
||||
|
||||
|
|
@ -362,7 +361,10 @@ def extract_tar_files(site_name, file_path, folder_name):
|
|||
tar_path = os.path.join(abs_site_path, tar_name)
|
||||
|
||||
try:
|
||||
subprocess.check_output(['tar', 'xvf', tar_path, '--strip', '2'], cwd=abs_site_path)
|
||||
if file_path.endswith(".tar"):
|
||||
subprocess.check_output(['tar', 'xvf', tar_path, '--strip', '2'], cwd=abs_site_path)
|
||||
elif file_path.endswith(".tgz"):
|
||||
subprocess.check_output(['tar', 'zxvf', tar_path, '--strip', '2'], cwd=abs_site_path)
|
||||
except:
|
||||
raise
|
||||
finally:
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ def send_email(success, service_name, doctype, email_field, error_status=None):
|
|||
return
|
||||
|
||||
if success:
|
||||
if not frappe.db.get_value(doctype, None, "send_email_for_successful_backup"):
|
||||
if not frappe.db.get_single_value(doctype, "send_email_for_successful_backup"):
|
||||
return
|
||||
|
||||
subject = "Backup Upload Successful"
|
||||
|
|
@ -28,7 +28,6 @@ def send_email(success, service_name, doctype, email_field, error_status=None):
|
|||
<p>Hi there, this is just to inform you that your backup was successfully uploaded to your {0} bucket. So relax!</p>""".format(
|
||||
service_name
|
||||
)
|
||||
|
||||
else:
|
||||
subject = "[Warning] Backup Upload Failed"
|
||||
message = """
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ from frappe.core.doctype.scheduled_job_type.scheduled_job_type import sync_jobs
|
|||
from frappe.search.website_search import build_index_for_all_routes
|
||||
|
||||
|
||||
def migrate(verbose=True, rebuild_website=False, skip_failing=False, skip_search_index=False):
|
||||
def migrate(verbose=True, skip_failing=False, skip_search_index=False):
|
||||
'''Migrate all apps to the current version, will:
|
||||
- run before migrate hooks
|
||||
- run patches
|
||||
|
|
|
|||
|
|
@ -347,7 +347,7 @@ class BaseDocument(object):
|
|||
if self.meta.autoname=="hash":
|
||||
# hash collision? try again
|
||||
frappe.flags.retry_count = (frappe.flags.retry_count or 0) + 1
|
||||
if frappe.flags.retry_count > 5:
|
||||
if frappe.flags.retry_count > 5 and not frappe.flags.in_test:
|
||||
raise
|
||||
self.name = None
|
||||
self.db_insert()
|
||||
|
|
|
|||
|
|
@ -58,7 +58,10 @@ class DatabaseQuery(object):
|
|||
if fields:
|
||||
self.fields = fields
|
||||
else:
|
||||
self.fields = ["`tab{0}`.`name`".format(self.doctype)]
|
||||
if pluck:
|
||||
self.fields = ["`tab{0}`.`{1}`".format(self.doctype, pluck)]
|
||||
else:
|
||||
self.fields = ["`tab{0}`.`name`".format(self.doctype)]
|
||||
|
||||
if start: limit_start = start
|
||||
if page_length: limit_page_length = page_length
|
||||
|
|
@ -169,10 +172,10 @@ class DatabaseQuery(object):
|
|||
fields = []
|
||||
|
||||
for field in self.fields:
|
||||
if (field.strip().startswith(("`", "*")) or "(" in field):
|
||||
if field.strip().startswith(("`", "*", '"', "'")) or "(" in field:
|
||||
fields.append(field)
|
||||
elif "as" in field.lower().split(" "):
|
||||
col, _, new = field.split()
|
||||
col, _, new = field.split()[-3:]
|
||||
fields.append("`{0}` as {1}".format(col, new))
|
||||
else:
|
||||
fields.append("`{0}`".format(field))
|
||||
|
|
|
|||
|
|
@ -312,4 +312,4 @@ frappe.patches.v13_0.enable_custom_script
|
|||
frappe.patches.v13_0.update_newsletter_content_type
|
||||
execute:frappe.db.set_value('Website Settings', 'Website Settings', {'navbar_template': 'Standard Navbar', 'footer_template': 'Standard Footer'})
|
||||
frappe.patches.v13_0.delete_event_producer_and_consumer_keys
|
||||
frappe.patches.v13_0.web_template_set_module
|
||||
frappe.patches.v13_0.web_template_set_module #2020-10-05
|
||||
|
|
|
|||
|
|
@ -6,7 +6,8 @@ import frappe
|
|||
|
||||
def execute():
|
||||
"""Set default module for standard Web Template, if none."""
|
||||
frappe.reload_doc('website', 'doctype', 'Web Template')
|
||||
frappe.reload_doctype('Web Template')
|
||||
frappe.reload_doctype('Web Template Field')
|
||||
standard_templates = frappe.get_list('Web Template', {'standard': 1})
|
||||
for template in standard_templates:
|
||||
doc = frappe.get_doc('Web Template', template.name)
|
||||
|
|
|
|||
|
|
@ -70,9 +70,9 @@ export default class Desktop {
|
|||
}
|
||||
|
||||
make_sidebar() {
|
||||
const get_sidebar_item = function(item) {
|
||||
const get_sidebar_item = function (item) {
|
||||
return $(`<a href="${"desk#workspace/" + item.name}"
|
||||
class="sidebar-item
|
||||
class="sidebar-item
|
||||
${item.selected ? " selected" : ""}
|
||||
${item.hidden ? "hidden" : ""}
|
||||
">
|
||||
|
|
@ -87,7 +87,7 @@ export default class Desktop {
|
|||
}
|
||||
let $item = get_sidebar_item(item);
|
||||
let $mobile_item = $item.clone();
|
||||
|
||||
|
||||
$item.appendTo(this.sidebar);
|
||||
this.sidebar_items[item.name] = $item;
|
||||
|
||||
|
|
@ -128,7 +128,7 @@ export default class Desktop {
|
|||
if (this.sidebar_items && this.sidebar_items[this.current_page]) {
|
||||
this.sidebar_items[this.current_page].removeClass("selected");
|
||||
this.mobile_sidebar_items[this.current_page].removeClass("selected");
|
||||
|
||||
|
||||
this.sidebar_items[page].addClass("selected");
|
||||
this.mobile_sidebar_items[page].addClass("selected");
|
||||
}
|
||||
|
|
@ -282,6 +282,7 @@ class DesktopPage {
|
|||
}
|
||||
|
||||
save_customization() {
|
||||
frappe.dom.freeze();
|
||||
const config = {};
|
||||
|
||||
if (this.sections.charts) config.charts = this.sections.charts.get_widget_config();
|
||||
|
|
@ -292,14 +293,15 @@ class DesktopPage {
|
|||
page: this.page_name,
|
||||
config: config
|
||||
}).then(res => {
|
||||
frappe.dom.unfreeze();
|
||||
if (res.message) {
|
||||
frappe.msgprint({ message: __("Customizations Saved Successfully"), title: __("Success")});
|
||||
frappe.msgprint({ message: __("Customizations Saved Successfully"), title: __("Success") });
|
||||
this.reload();
|
||||
} else {
|
||||
frappe.throw({message: __("Something went wrong while saving customizations"), title: __("Failed")});
|
||||
frappe.throw({ message: __("Something went wrong while saving customizations"), title: __("Failed") });
|
||||
this.reload();
|
||||
}
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
make_onboarding() {
|
||||
|
|
@ -324,7 +326,7 @@ class DesktopPage {
|
|||
|
||||
make_charts() {
|
||||
return frappe.dashboard_utils.get_dashboard_settings().then(settings => {
|
||||
let chart_config = settings.chart_config ? JSON.parse(settings.chart_config): {};
|
||||
let chart_config = settings.chart_config ? JSON.parse(settings.chart_config) : {};
|
||||
if (this.data.charts.items) {
|
||||
this.data.charts.items.map(chart => {
|
||||
chart.chart_settings = chart_config[chart.chart_name] || {};
|
||||
|
|
|
|||
|
|
@ -466,7 +466,7 @@ frappe.views.QueryReport = class QueryReport extends frappe.views.BaseList {
|
|||
df.onchange = () => {
|
||||
this.refresh_filters_dependency();
|
||||
|
||||
let current_filters = this.get_filter_value();
|
||||
let current_filters = this.get_filter_values();
|
||||
if (this.previous_filters
|
||||
&& (JSON.stringify(this.previous_filters) === JSON.stringify(current_filters))) {
|
||||
// filter values have not changed
|
||||
|
|
@ -1484,7 +1484,7 @@ frappe.views.QueryReport = class QueryReport extends frappe.views.BaseList {
|
|||
insert_after_index: insert_after_index,
|
||||
link_field: this.doctype_field_map[values.doctype],
|
||||
doctype: values.doctype,
|
||||
options: df.fieldtype === "Link" ? df.options : undefined,
|
||||
options: df.options,
|
||||
width: 100
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -651,6 +651,9 @@ frappe.views.ReportView = class ReportView extends frappe.views.ListView {
|
|||
}
|
||||
|
||||
set_fields() {
|
||||
// default fields
|
||||
['name', 'docstatus'].map((f) => this._add_field(f));
|
||||
|
||||
if (this.report_name && this.report_doc.json.fields) {
|
||||
let fields = this.report_doc.json.fields.slice();
|
||||
fields.forEach(f => this._add_field(f[0], f[1]));
|
||||
|
|
@ -667,12 +670,11 @@ frappe.views.ReportView = class ReportView extends frappe.views.ListView {
|
|||
|
||||
set_default_fields() {
|
||||
// get fields from meta
|
||||
this.fields = [];
|
||||
this.fields = this.fields || [];
|
||||
const add_field = f => this._add_field(f);
|
||||
|
||||
// default fields
|
||||
[
|
||||
'name', 'docstatus',
|
||||
this.meta.title_field,
|
||||
this.meta.image_field
|
||||
].map(add_field);
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@
|
|||
|
||||
body.no-list-sidebar {
|
||||
[data-page-route^="List/"] {
|
||||
@media (min-width: @screen-md) {
|
||||
@media (min-width: @screen-sm) {
|
||||
.layout-side-section {
|
||||
display: none;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -19,6 +19,25 @@ $text-muted: $gray-600 !default;
|
|||
$border-color: $gray-300 !default;
|
||||
$headings-color: $gray-900 !default;
|
||||
|
||||
$font-sizes: (
|
||||
"xs": 0.75rem,
|
||||
"sm": 0.875rem,
|
||||
"base": 1rem,
|
||||
"lg": 1.125rem,
|
||||
"xl": 1.25rem,
|
||||
"2xl": 1.5rem,
|
||||
"3xl": 1.875rem,
|
||||
"4xl": 2.25rem,
|
||||
"5xl": 3rem,
|
||||
"6xl": 4rem
|
||||
);
|
||||
|
||||
@each $size, $value in $font-sizes {
|
||||
.font-size-#{$size} {
|
||||
font-size: $value;
|
||||
}
|
||||
}
|
||||
|
||||
$font-size-xs: 0.75rem !default;
|
||||
$font-size-sm: 0.875rem !default;
|
||||
$font-size-base: 1rem !default;
|
||||
|
|
@ -60,12 +79,12 @@ $input-border-radius: 0.375rem;
|
|||
$custom-control-indicator-bg: white;
|
||||
|
||||
$grid-breakpoints: (
|
||||
xs: 0,
|
||||
sm: 576px,
|
||||
md: 768px,
|
||||
lg: 992px,
|
||||
xl: 1200px,
|
||||
2xl: 1440px
|
||||
xs: 0,
|
||||
sm: 576px,
|
||||
md: 768px,
|
||||
lg: 992px,
|
||||
xl: 1200px,
|
||||
2xl: 1440px
|
||||
) !default;
|
||||
|
||||
$spacers: (
|
||||
|
|
@ -93,11 +112,11 @@ $spacers: (
|
|||
48: 12rem,
|
||||
52: 13rem,
|
||||
56: 14rem,
|
||||
64: 16rem,
|
||||
64: 16rem
|
||||
);
|
||||
|
||||
@import '~bootstrap/scss/functions';
|
||||
@import '~bootstrap/scss/variables';
|
||||
@import "~bootstrap/scss/functions";
|
||||
@import "~bootstrap/scss/variables";
|
||||
@import "~bootstrap/scss/mixins";
|
||||
|
||||
$code-color: $purple;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
{
|
||||
"actions": [],
|
||||
"creation": "2018-06-21 14:58:55.913619",
|
||||
"doctype": "DocType",
|
||||
"editable_grid": 1,
|
||||
|
|
@ -109,8 +110,9 @@
|
|||
"label": "Seen"
|
||||
}
|
||||
],
|
||||
"in_create": 1,
|
||||
"modified": "2019-08-21 15:51:05.288886",
|
||||
"index_web_pages_for_search": 1,
|
||||
"links": [],
|
||||
"modified": "2020-10-06 17:25:40.477044",
|
||||
"modified_by": "Administrator",
|
||||
"module": "Social",
|
||||
"name": "Energy Point Log",
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
{% macro footer_link(item) %}
|
||||
<a href="{{ item.url | abs_url }}" class="footer-link">
|
||||
<a href="{{ item.url | abs_url }}" {{ item.target }} class="footer-link">
|
||||
{%- if item.icon -%}
|
||||
<img src="{{ item.icon }}" alt="{{ item.label }}">
|
||||
{%- else -%}
|
||||
|
|
|
|||
|
|
@ -106,21 +106,25 @@ login.reset_sections = function(hide) {
|
|||
login.login = function() {
|
||||
login.reset_sections();
|
||||
$(".for-login").toggle(true);
|
||||
$("#login_email").focus();
|
||||
}
|
||||
|
||||
login.steptwo = function() {
|
||||
login.reset_sections();
|
||||
$(".for-login").toggle(true);
|
||||
$("#login_email").focus();
|
||||
}
|
||||
|
||||
login.forgot = function() {
|
||||
login.reset_sections();
|
||||
$(".for-forgot").toggle(true);
|
||||
$("#forgot_email").focus();
|
||||
}
|
||||
|
||||
login.signup = function() {
|
||||
login.reset_sections();
|
||||
$(".for-signup").toggle(true);
|
||||
$("#signup_fullname").focus();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +1,15 @@
|
|||
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
|
||||
|
||||
# imports - standard imports
|
||||
import os
|
||||
import shlex
|
||||
import subprocess
|
||||
import unittest
|
||||
from glob import glob
|
||||
|
||||
# imports - module imports
|
||||
import frappe
|
||||
from frappe.utils.backups import fetch_latest_backups
|
||||
|
||||
|
||||
def clean(value):
|
||||
|
|
@ -15,9 +18,14 @@ def clean(value):
|
|||
return value
|
||||
|
||||
|
||||
class BaseTestCommands:
|
||||
def execute(self, command):
|
||||
command = command.format(**{"site": frappe.local.site})
|
||||
class BaseTestCommands(unittest.TestCase):
|
||||
def execute(self, command, kwargs=None):
|
||||
site = {"site": frappe.local.site}
|
||||
if kwargs:
|
||||
kwargs.update(site)
|
||||
else:
|
||||
kwargs = site
|
||||
command = command.replace("\n", " ").format(**kwargs)
|
||||
command = shlex.split(command)
|
||||
self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
self.stdout = clean(self._proc.stdout)
|
||||
|
|
@ -25,7 +33,7 @@ class BaseTestCommands:
|
|||
self.returncode = clean(self._proc.returncode)
|
||||
|
||||
|
||||
class TestCommands(BaseTestCommands, unittest.TestCase):
|
||||
class TestCommands(BaseTestCommands):
|
||||
def test_execute(self):
|
||||
# test 1: execute a command expecting a numeric output
|
||||
self.execute("bench --site {site} execute frappe.db.get_database_size")
|
||||
|
|
@ -44,3 +52,70 @@ class TestCommands(BaseTestCommands, unittest.TestCase):
|
|||
self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""")
|
||||
self.assertEquals(self.returncode, 0)
|
||||
self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))
|
||||
|
||||
def test_backup(self):
|
||||
home = os.path.expanduser("~")
|
||||
site_backup_path = frappe.utils.get_site_path("private", "backups")
|
||||
|
||||
# test 1: take a backup
|
||||
before_backup = fetch_latest_backups()
|
||||
self.execute("bench --site {site} backup")
|
||||
after_backup = fetch_latest_backups()
|
||||
|
||||
self.assertEquals(self.returncode, 0)
|
||||
self.assertIn("successfully completed", self.stdout)
|
||||
self.assertNotEqual(before_backup["database"], after_backup["database"])
|
||||
|
||||
# test 2: take a backup with --with-files
|
||||
before_backup = after_backup.copy()
|
||||
self.execute("bench --site {site} backup --with-files")
|
||||
after_backup = fetch_latest_backups()
|
||||
|
||||
self.assertEquals(self.returncode, 0)
|
||||
self.assertIn("successfully completed", self.stdout)
|
||||
self.assertIn("with files", self.stdout)
|
||||
self.assertNotEqual(before_backup, after_backup)
|
||||
self.assertIsNotNone(after_backup["public"])
|
||||
self.assertIsNotNone(after_backup["private"])
|
||||
|
||||
# test 3: take a backup with --backup-path
|
||||
backup_path = os.path.join(home, "backups")
|
||||
self.execute("bench --site {site} backup --backup-path {backup_path}", {"backup_path": backup_path})
|
||||
|
||||
self.assertEquals(self.returncode, 0)
|
||||
self.assertTrue(os.path.exists(backup_path))
|
||||
self.assertGreaterEqual(len(os.listdir(backup_path)), 2)
|
||||
|
||||
# test 4: take a backup with --backup-path-db, --backup-path-files, --backup-path-private-files, --backup-path-conf
|
||||
kwargs = {
|
||||
key: os.path.join(home, key, value)
|
||||
for key, value in {
|
||||
"db_path": "database.sql.gz",
|
||||
"files_path": "public.tar",
|
||||
"private_path": "private.tar",
|
||||
"conf_path": "config.json"
|
||||
}.items()
|
||||
}
|
||||
|
||||
self.execute("""bench
|
||||
--site {site} backup --with-files
|
||||
--backup-path-db {db_path}
|
||||
--backup-path-files {files_path}
|
||||
--backup-path-private-files {private_path}
|
||||
--backup-path-conf {conf_path}""", kwargs)
|
||||
|
||||
self.assertEquals(self.returncode, 0)
|
||||
for path in kwargs.values():
|
||||
self.assertTrue(os.path.exists(path))
|
||||
|
||||
# test 5: take a backup with --compress
|
||||
self.execute("bench --site {site} backup --with-files --compress")
|
||||
|
||||
self.assertEquals(self.returncode, 0)
|
||||
|
||||
compressed_files = glob(site_backup_path + "/*.tgz")
|
||||
self.assertGreater(len(compressed_files), 0)
|
||||
|
||||
# test 6: take a backup with --verbose
|
||||
self.execute("bench --site {site} backup --verbose")
|
||||
self.assertEquals(self.returncode, 0)
|
||||
|
|
|
|||
|
|
@ -133,6 +133,8 @@ class TestDB(unittest.TestCase):
|
|||
self.assertEqual(list(frappe.get_all("ToDo", fields=[random_field], limit=1)[0])[0], random_field)
|
||||
self.assertEqual(list(frappe.get_all("ToDo", fields=["{0} as total".format(random_field)], limit=1)[0])[0], "total")
|
||||
|
||||
# Testing read for distinct keyword - Check if result contains total field
|
||||
self.assertEqual(list(frappe.get_all("ToDo", fields=["distinct {0} as total".format(random_field)], limit=1)[0])[0], "total")
|
||||
|
||||
# Testing update
|
||||
frappe.db.set_value(test_doctype, random_doc, random_field, random_value)
|
||||
|
|
|
|||
|
|
@ -347,6 +347,14 @@ class TestReportview(unittest.TestCase):
|
|||
limit=50,
|
||||
)
|
||||
|
||||
def test_pluck_name(self):
|
||||
names = DatabaseQuery("DocType").execute(filters={"name": "DocType"}, pluck="name")
|
||||
self.assertEqual(names, ["DocType"])
|
||||
|
||||
def test_pluck_any_field(self):
|
||||
owners = DatabaseQuery("DocType").execute(filters={"name": "DocType"}, pluck="owner")
|
||||
self.assertEqual(owners, ["Administrator"])
|
||||
|
||||
def create_event(subject="_Test Event", starts_on=None):
|
||||
""" create a test event """
|
||||
|
||||
|
|
|
|||
|
|
@ -620,7 +620,7 @@ def get_untranslated(lang, untranslated_file, get_all=False):
|
|||
|
||||
if get_all:
|
||||
print(str(len(messages)) + " messages")
|
||||
with open(untranslated_file, "w") as f:
|
||||
with open(untranslated_file, "wb") as f:
|
||||
for m in messages:
|
||||
# replace \n with ||| so that internal linebreaks don't get split
|
||||
f.write((escape_newlines(m[1]) + os.linesep).encode("utf-8"))
|
||||
|
|
@ -633,10 +633,10 @@ def get_untranslated(lang, untranslated_file, get_all=False):
|
|||
|
||||
if untranslated:
|
||||
print(str(len(untranslated)) + " missing translations of " + str(len(messages)))
|
||||
with open(untranslated_file, "w") as f:
|
||||
with open(untranslated_file, "wb") as f:
|
||||
for m in untranslated:
|
||||
# replace \n with ||| so that internal linebreaks don't get split
|
||||
f.write(cstr(frappe.safe_encode(escape_newlines(m) + os.linesep)))
|
||||
f.write((escape_newlines(m) + os.linesep).encode("utf-8"))
|
||||
else:
|
||||
print("all translated!")
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,3 +1,2 @@
|
|||
Comment Type,Tipo de Comentario,
|
||||
Communication,Comunicacion,
|
||||
Components,Componentes,
|
||||
|
|
|
|||
|
|
|
@ -2,3 +2,5 @@ Refreshing...,Actualizando...,
|
|||
Clear Filters,Limpiar Filtros,
|
||||
No Events Today,Sin eventos hoy,
|
||||
Today's Events,Eventos para hoy,
|
||||
Disabled,Deshabilitado,
|
||||
Your Shortcuts,Tus accesos,
|
||||
|
|
|
|||
|
|
|
@ -13,6 +13,8 @@ Maintenance User,Mantenimiento por el Usuario,
|
|||
Message Examples,Ejemplos de Mensajes,
|
||||
Middle Name (Optional),Segundo Nombre ( Opcional),
|
||||
Next,Próximo,
|
||||
No address added yet.,No se ha añadido ninguna dirección todavía.,
|
||||
No contacts added yet.,No se han añadido contactos todavía,
|
||||
Replied,Respondio,
|
||||
Report,Informe,
|
||||
Report Builder,Generador de informes,
|
||||
|
|
@ -79,7 +81,6 @@ Default Address Template cannot be deleted,Plantilla de la Direcciones Predeterm
|
|||
Default Inbox,Bandeja de entrada por defecto,
|
||||
Define workflows for forms.,Definir los flujos de trabajo para las formas .,
|
||||
Defines actions on states and the next step and allowed roles.,Define las acciones de los estados y el siguiente paso y funciones permitidas.,
|
||||
"Description for listing page, in plain text, only a couple of lines. (max 140 characters)","Descripción de la página perfil, en texto plano, sólo un par de líneas. (máx. 140 caracteres)",
|
||||
"Different ""States"" this document can exist in. Like ""Open"", ""Pending Approval"" etc.","Este documento puede estar en Diferentes ""Estados"". Como ""Abierto"", ""Pendiente de Aprobación"", etc.",
|
||||
Disable Customer Signup link in Login page,Desactivar enlace de registro del cliente en la página de entrada,
|
||||
Disable Report,Desactivar Informe,
|
||||
|
|
@ -89,6 +90,7 @@ DocType can not be merged,El DocType no se puede fusionar,
|
|||
DocType is a Table / Form in the application.,El DocType es una tabla / formulario en la aplicación.,
|
||||
DocType on which this Workflow is applicable.,El DocType en el presente del flujo de trabajo es aplicable.,
|
||||
DocType or Field,DocType o campo,
|
||||
Document Types,Tipos de Documento,
|
||||
Download with Data,Descarga de datos,
|
||||
Drag elements from the sidebar to add. Drag them back to trash.,Arrastre los elementos de la barra lateral para agregar. Arrastre de nuevo a la papelera de reciclaje.,
|
||||
Dropbox Access Key,Clave de Acceso de Dropbox,
|
||||
|
|
@ -97,7 +99,6 @@ Edit Custom HTML,Edición de HTML personalizado,
|
|||
Edit HTML,Edición de HTML,
|
||||
Edit Heading,Editar Rubro,
|
||||
Email Account Name,Correo electrónico Nombre de cuenta,
|
||||
Email By Document Field,Email Por Campo Documento,
|
||||
Email Settings,Configuración del correo electrónico,
|
||||
Email Signature,Firma Email,
|
||||
Embed image slideshows in website pages.,Presentacion de imágenes incrustadas en páginas web .,
|
||||
|
|
@ -107,7 +108,6 @@ Enter Form Type,Introduzca Tipo de Formulario,
|
|||
"Enter static url parameters here (Eg. sender=ERPNext, username=ERPNext, password=1234 etc.)","Introduzca los parámetros de URL estáticas aquí (Ej. sender = ERPNext , nombre de usuario = ERPNext , contraseña = 1234 etc )",
|
||||
Enter url parameter for receiver nos,Introduzca el parámetro url para el receptor no,
|
||||
Field Description,Descripción del Campo,
|
||||
Field {0} is not selectable.,El campo {0} no se puede seleccionar .,
|
||||
Fieldname which will be the DocType for this link field.,Nombre de campo el cual será el DocType para enlazar el campo.,
|
||||
File Size,Tamaño del archivo,
|
||||
Float,flotador,
|
||||
|
|
@ -197,7 +197,6 @@ Remove all customizations?,Eliminar todas las personalizaciones ?,
|
|||
Repeat On,Repetir OK,
|
||||
Repeat Till,Repita Hasta,
|
||||
Repeat this Event,Repita este Evento,
|
||||
Report Builder reports are managed directly by the report builder. Nothing to do.,Informes del Generador de informes son enviadas por el generador de informes . No hay nada que hacer.,
|
||||
Report Hide,Ocultar Informe,
|
||||
Report Manager,Administrador de informes,
|
||||
Report Name,Nombre del informe,
|
||||
|
|
@ -218,9 +217,6 @@ Row #{0}:,Fila # {0}:,
|
|||
Row {0}: Not allowed to enable Allow on Submit for standard fields,Fila {0}: No se permite habilitar Permitir en Enviar para campos estándar,
|
||||
Rules defining transition of state in the workflow.,Reglas que definen la transición de estado del flujo de trabajo .,
|
||||
"Rules for how states are transitions, like next state and which role is allowed to change state etc.","Reglas para transición entre estados, como el siguiente estado y qué función permite cambiar de estado , etc",
|
||||
Same file has already been attached to the record,El mismo archivo ya se ha adjuntado al registro,
|
||||
Scheduled to send to {0},Programado para enviar a {0},
|
||||
Scheduled to send to {0} recipients,Programado para enviar a {0} destinatarios,
|
||||
Script,Guión,
|
||||
Script Report,Informe de secuencias de comandos,
|
||||
Script to attach to all web pages.,Guión para unir a todas las páginas web.,
|
||||
|
|
@ -318,7 +314,6 @@ This role update User Permissions for a user,Este función actualiza los Permiso
|
|||
Time Zones,Husos horarios,
|
||||
Title Prefix,Prefijo del Título,
|
||||
Title field must be a valid fieldname,Campo Título debe ser un nombre de campo válido,
|
||||
"To format columns, give column labels in the query.","Para dar formato a columnas, dar títulos de las columnas en la consulta.",
|
||||
Total Subscribers,Los suscriptores totales,
|
||||
Unable to load: {0},No se puede cargar : {0},
|
||||
Unread Notification Sent,Notificación No leído Enviado,
|
||||
|
|
@ -337,8 +332,6 @@ Website Theme Image Link,Sitio web Imagen por tema Enlace,
|
|||
"When you Amend a document after Cancel and save it, it will get a new number that is a version of the old number.","Cuando se modifique un documento después de Cancelar y guardarlo , se obtendrá un nuevo número que es una versión del antiguo número.",
|
||||
Workflow Action,Acciones de los flujos de trabajo,
|
||||
Workflow State,Estados de los flujos de trabajo,
|
||||
Write a Python file in the same folder where this is saved and return column and result.,Escriba un archivo de Python en la misma carpeta donde esta se guarda y devuelve la columna y el resultado.,
|
||||
Write a SELECT query. Note result is not paged (all data is sent in one go).,Escriba una consulta SELECT. Nota resultado no se pagina ( todos los datos se envían en una sola vez ) .,
|
||||
You are not allowed to delete a standard Website Theme,No se le permite eliminar un tema Sitio web estándar,
|
||||
You can add dynamic properties from the document by using Jinja templating.,Puede añadir propiedades dinámicas del documento mediante el uso de plantillas Jinja.,
|
||||
"You can change Submitted documents by cancelling them and then, amending them.","Puede cambiar los documentos Enviados cancelándolos y luego, haciendo los cambios pertinentes.",
|
||||
|
|
@ -434,7 +427,6 @@ Reference Doctype,Referencia DocType,
|
|||
Select Doctype,Seleccione tipo de documento,
|
||||
clear,claro,
|
||||
font,Fuente,
|
||||
left,Izquierda,
|
||||
list,Vista de árbol,
|
||||
remove,Quitar,
|
||||
search,Búsqueda,
|
||||
|
|
|
|||
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -48,7 +48,10 @@ Maintenance Manager,Gerente de Manutenção,
|
|||
Maintenance User,Usuário da Manutenção,
|
||||
Medium,Média,
|
||||
More Information,Mais Informações,
|
||||
New Contact,Novo Contato,
|
||||
Next,Próximo,
|
||||
No address added yet.,Nenhum endereço adicionado ainda.,
|
||||
No contacts added yet.,Nenhum contato adicionado ainda.,
|
||||
Not active,Inativo,
|
||||
Owner,Proprietário,
|
||||
Page Missing or Moved,Página ausente ou mudou,
|
||||
|
|
@ -56,12 +59,14 @@ Payment Gateway,Gateway de pagamento,
|
|||
Pincode,CEP,
|
||||
Please enable pop-ups,Por favor habilite os pop-ups,
|
||||
Please select Company,"Por favor, selecione Empresa",
|
||||
Please select {0},Por favor selecione {0},
|
||||
Portal Settings,Configurações do Portal,
|
||||
Primary,Primário,
|
||||
Print Settings,Configurações de Impressão,
|
||||
Purchase Manager,Gerente de Compras,
|
||||
Purchase Master Manager,Gerente de Cadastros de Compras,
|
||||
Purchase User,Usuário de Compras,
|
||||
Range,Alcance,
|
||||
Rating,Rating,
|
||||
Reference Owner,Proprietário da Referência,
|
||||
Refresh Token,Token de Atualização,
|
||||
|
|
@ -75,6 +80,7 @@ Scheduled,Agendado,
|
|||
Select,Selecionar,
|
||||
Select DocType,Selecione o DocType,
|
||||
Service,Manutenção do Veículo,
|
||||
Set as Default,Definir como padrão,
|
||||
Shipping,Expedição,
|
||||
Slideshow,Apresentação de Slides,
|
||||
Source,Origem,
|
||||
|
|
@ -130,7 +136,6 @@ Add all roles,Adicionar todos os papéis,
|
|||
Add custom forms.,Adicionar formulários personalizados.,
|
||||
Add custom javascript to forms.,Adicionar javascript personalizado aos formulários.,
|
||||
Add fields to forms.,Adicionar campos nos formulários.,
|
||||
Add your own Tag Categories,Adicione suas próprias categorias de tags,
|
||||
Add your own translations,Adicione suas próprias traduções,
|
||||
"Added HTML in the <head> section of the web page, primarily used for website verification and SEO","HTML adicionado na seção <head> da página web, principalmente utilizadas para a verificação do site e SEO",
|
||||
Adding System Manager to this User as there must be atleast one System Manager,Adicionando este usuário como System Manager pois deve haver pelo menos um System Manager,
|
||||
|
|
@ -159,7 +164,6 @@ Allow user to login only after this hour (0-24),Permitir que o usuário faça o
|
|||
Allow user to login only before this hour (0-24),Permitir que o usuário faça o login somente antes deste horário (0-24),
|
||||
"Allowing DocType, DocType. Be careful!","Permitindo DocType , DocType . Tenha cuidado !",
|
||||
Already Registered,Já está registrado,
|
||||
Already in user's To Do list,Já está na lista de tarefas do usuário,
|
||||
"Always add ""Draft"" Heading for printing draft documents","Sempre adicionar ""Rascunho"" no cabeçalho para impressão de documentos não enviados",
|
||||
Amend,Corrigir,
|
||||
Amending,Correção,
|
||||
|
|
@ -303,7 +307,6 @@ Currently Viewing,Atualmente Exibindo,
|
|||
Custom CSS,CSS Personalizado,
|
||||
Custom HTML Help,Ajuda HTML Personalizado,
|
||||
Custom Menu Items,Itens de Menu Personalizado,
|
||||
Custom Tags,Tags Personalizadas,
|
||||
"Customize Label, Print Hide, Default etc.","Personalizar Etiquetas, Cabeçalhos, Padrões, etc.",
|
||||
"Customized Formats for Printing, Email","Formatos personalizados para impressão, Email",
|
||||
Customized HTML Templates for printing transactions.,Modelos HTML customizadas para operações de impressão.,
|
||||
|
|
@ -338,7 +341,6 @@ Deleted DocType,DocType Excluído,
|
|||
Deleted Document,Documento Excluído,
|
||||
Deleted Name,Nome Excluído,
|
||||
Depends On,Depende de,
|
||||
"Description for listing page, in plain text, only a couple of lines. (max 140 characters)","Descrição da página perfil, em texto simples, apenas um par de linhas. (Máximo 140 caracteres)",
|
||||
Desk,Mesa,
|
||||
Desktop Icon,Ícone da área de trabalho,
|
||||
Did not add,Não adicionado,
|
||||
|
|
@ -360,6 +362,7 @@ DocType on which this Workflow is applicable.,DocType em que este fluxo de traba
|
|||
Doctype required,Doctype obrigatório,
|
||||
Document Share Report,Relatório de Documentos Compartilhados,
|
||||
Document States,Documento Unidos,
|
||||
Document Types,Tipos de documento,
|
||||
Documents assigned to you and by you.,Documentos atribuídos a você e por você.,
|
||||
Domain Settings,Configurações de Domínio,
|
||||
"Don't HTML Encode HTML tags like <script> or just characters like < or >, as they could be intentionally used in this field","Não etiquetas HTML Encode HTML como <script> ou apenas caracteres como <ou>, uma vez que poderia ser usado intencionalmente neste campo",
|
||||
|
|
@ -373,15 +376,12 @@ Dropbox Access Key,Dropbox Chave de Acesso,
|
|||
Dropbox Access Secret,Segredo de Acesso Dropbox,
|
||||
Dropbox Setup,Configuração do Dropbox,
|
||||
Dropbox access is approved!,O acesso ao Dropbox está aprovado!,
|
||||
Duplicate name {0} {1},Nome duplicado {0} e {1},
|
||||
Dynamic Link,Link dinâmico,
|
||||
Edit Filter,Edit Filter,
|
||||
Editable Grid,Grid Editável,
|
||||
Editing Row,Editando Linha,
|
||||
Eg. smsgateway.com/api/send_sms.cgi,Por exemplo: smsgateway.com / api / send_sms.cgi,
|
||||
Email Addresses,Endereço de Email,
|
||||
Email By Document Field,Email por Campo do Documento,
|
||||
Email By Role,Email por Função,
|
||||
Email Domain,Domínio de Email,
|
||||
"Email Domain not configured for this account, Create one?","Domínio de email não configurado para esta conta, criar um?",
|
||||
Email Flag Queue,Flag Fila de Email,
|
||||
|
|
@ -427,7 +427,6 @@ Feedback Request,Solicitação de Feedback,
|
|||
Field Description,Descrição do Campo,
|
||||
Field Type,Tipo de Campo,
|
||||
"Field that represents the Workflow State of the transaction (if field is not present, a new hidden Custom Field will be created)","Campo que representa o status da transação no fluxo de trabalho (se o campo não estiver presente, um novo campo oculto personalizado será criado)",
|
||||
Field {0} is not selectable.,O campo {0} não é selecionável.,
|
||||
Fieldname not set for Custom Field,Fieldname não definida para campo personalizado,
|
||||
Fieldname which will be the DocType for this link field.,Nome do campo que será o DocType para este campo link.,
|
||||
Fieldname {0} cannot have special characters like {1},Fieldname {0} não pode ter caracteres especiais como {1},
|
||||
|
|
@ -620,7 +619,6 @@ Login with LDAP,Logar com LDAP,
|
|||
Logout,Sair,
|
||||
Long Text,Texto Longo,
|
||||
"Make ""name"" searchable in Global Search","Tornar ""nome"" pesquisável na Busca Global",
|
||||
Make Default,Tornar padrão,
|
||||
Make use of longer keyboard patterns,Use uma combinação de letras e números mais longa,
|
||||
Mandatory Information missing:,Informações obrigatórias ausente:,
|
||||
Mandatory fields required in {0},Os campos obrigatórios exigidos no {0},
|
||||
|
|
@ -651,7 +649,6 @@ Multiple root nodes not allowed.,"Vários nós raiz, não é permitido .",
|
|||
Must have report permission to access this report.,Deve ter permissão para acessar relatório deste relatório.,
|
||||
Must specify a Query to run,Deve especificar uma consulta para executar,
|
||||
Mute Sounds,Desativar sons,
|
||||
My Settings,Minhas Configurações,
|
||||
Name Case,Caso Nome,
|
||||
Name cannot contain special characters like {0},Nome não pode conter caracteres especiais como {0},
|
||||
Name not set via prompt,Nome não definido através Prompt,
|
||||
|
|
@ -663,7 +660,6 @@ Nested set error. Please contact the Administrator.,Erro conjunto aninhado . Ent
|
|||
New Email Account,Nova conta de email,
|
||||
New Folder,Nova pasta,
|
||||
New Kanban Board,Novo Painel Kanban,
|
||||
New Mention,Nova Menção,
|
||||
New Message from Website Contact Page,Nova Mensagem da Página de Contato do Site,
|
||||
New Password,Nova senha,
|
||||
New Password Required.,É necessário uma nova senha.,
|
||||
|
|
@ -848,7 +844,6 @@ Repeat On,Repetir em,
|
|||
Repeat Till,Repita até que,
|
||||
Repeat on Day,Repetir no dia,
|
||||
Repeat this Event,Repita este evento,
|
||||
Report Builder reports are managed directly by the report builder. Nothing to do.,Os relatórios são gerenciados diretamente pelo sistema. Nada a fazer.,
|
||||
Report cannot be set for Single types,Relatório não pode ser ajustada para os modelos únicos,
|
||||
Report of all document shares,Relatório de todas as ações de documentos,
|
||||
Report was not saved (there were errors),O Relatório não foi salvo (houve erros),
|
||||
|
|
@ -911,7 +906,6 @@ Select a group node first.,Selecione um nó de grupo primeiro.,
|
|||
Select the label after which you want to insert new field.,Selecione a etiqueta após a qual você deseja inserir um novo campo.,
|
||||
"Select your Country, Time Zone and Currency","Escolha o seu País, Fuso Horário e Moeda",
|
||||
Send Alert On,Enviar Alerta,
|
||||
Send Attachements,Enviar Anexos,
|
||||
Send Email Print Attachments as PDF (Recommended),Enviar anexos de email em PDF (Recomendado),
|
||||
Send Notifications To,Enviar Notificações para,
|
||||
Send Print as PDF,Enviar impressão como PDF,
|
||||
|
|
@ -919,7 +913,6 @@ Send Read Receipt,Enviar Confirmação de Leitura,
|
|||
Send Unsubscribe Link,Enviar link para cancelar inscrição,
|
||||
Send Welcome Email,Enviar email de boas-vindas,
|
||||
Send an email reminder in the morning,Enviar um email lembrete na parte da manhã,
|
||||
Send document web view link in email,Enviar link no email para visualizar o documento online,
|
||||
Send enquiries to this email address,Envie perguntas para este endereço de email,
|
||||
Send me a copy,Envie-me uma Cópia,
|
||||
Sent Read Receipt,Enviar Confirmação de Leitura,
|
||||
|
|
@ -959,7 +952,6 @@ Snapshot View,Ver Snapshot,
|
|||
Sorry! I could not find what you were looking for.,Desculpe! Não foi possível encontrar o que você procurou.,
|
||||
Sorry! Sharing with Website User is prohibited.,Desculpe! Não é permitido compartilhar com o site do usuário.,
|
||||
Sorry! User should have complete access to their own record.,Desculpe! O usuário deve ter acesso completo ao seu próprio registro.,
|
||||
Sorry! You cannot delete auto-generated comments,Desculpe! Você não pode excluir comentários gerados automaticamente,
|
||||
Sort Field,Ordenar por campo,
|
||||
Source Text,Texto Original,
|
||||
Standard Print Format cannot be updated,Formato de impressão padrão não pode ser atualizado,
|
||||
|
|
@ -1028,7 +1020,6 @@ Title Prefix,Prefixo do Título,
|
|||
To Do,Atribuições,
|
||||
ToDo,Lista de Atribuições,
|
||||
Toggle Charts,Alternar Gráficos,
|
||||
Top Bar,Barra superior,
|
||||
Top Bar Item,Item da barra superior,
|
||||
Top Bar Items,Itens da barra superior,
|
||||
Track Changes,Rastrear Alterações,
|
||||
|
|
@ -1036,7 +1027,6 @@ Track Seen,Marcar como visto,
|
|||
Transitions,Transições,
|
||||
Translated Text,Texto Traduzido,
|
||||
Tree,Árvore,
|
||||
Tree view not available for {0},Visualização em Árvore não disponível para {0},
|
||||
"Trigger on valid methods like ""before_insert"", ""after_update"", etc (will depend on the DocType selected)","Gatilho em métodos válidos como ""before_insert"", ""after_update"", etc. (dependerá do DocType selecionado)",
|
||||
Unable to find attachment {0},Incapaz de encontrar o anexo {0},
|
||||
Unable to load: {0},Não é possível carregar: {0},
|
||||
|
|
@ -1115,9 +1105,6 @@ Workflow Name,Nome do Fluxo de Trabalho,
|
|||
Workflow State Field,Campo do Status do Fluxo de Trabalho,
|
||||
Workflow Transition,Transição do Fluxo de Trabalho,
|
||||
Workflow state represents the current state of a document.,O Status do Fluxo de Trabalho representa o status atual de um documento.,
|
||||
Write a Python file in the same folder where this is saved and return column and result.,Gravar um arquivo Python na mesma pasta onde este é guardado e coluna de retorno e resultado.,
|
||||
Write a SELECT query. Note result is not paged (all data is sent in one go).,Escreva uma consulta SELECT. Resultado nota não é paginada (todos os dados são enviados de uma só vez).,
|
||||
Writers Introduction,Introdução dos Escritores,
|
||||
X Axis Field,Campo do eixo X,
|
||||
Yahoo Mail,Email do Yahoo,
|
||||
You are not allowed to send emails related to this document,Você não tem permissão para enviar emails relacionados a este documento,
|
||||
|
|
@ -1141,7 +1128,6 @@ Your Name,Seu Nome,
|
|||
Your information has been submitted,Suas informações foram enviadas,
|
||||
Your organization name and address for the email footer.,Nome da empresa e endereço para o rodapé do email.,
|
||||
"Your query has been received. We will reply back shortly. If you have any additional information, please reply to this mail.","Sua consulta foi recebida. Nós responderemos de volta em breve. Se você tiver qualquer informação adicional, por favor responda a este email.",
|
||||
[Label]:[Field Type]/[Options]:[Width],[Label]: [Tipo do Campo] / [Opções]: [Largura],
|
||||
align-center,Centralizar,
|
||||
align-justify,Justificar,
|
||||
align-left,Alinhar à esquerda,
|
||||
|
|
@ -1231,14 +1217,12 @@ zoom-out,diminuir zoom,
|
|||
{0} Tree,Árvore de {0},
|
||||
{0} already unsubscribed,{0} já teve sua inscrição removida,
|
||||
{0} already unsubscribed for {1} {2},{0} já teve sua sua inscrição cancelada para {1} {2},
|
||||
{0} are currently viewing this document,{0} está vendo atualmente este documento,
|
||||
{0} cannot be set for Single types,{0} não pode ser ajustada para os modelos únicos,
|
||||
{0} does not exist in row {1},{0} não existe em linha {1},
|
||||
"{0} field cannot be set as unique in {1}, as there are non-unique existing values","{0} campo não pode ser definido como único em {1}, pois há valores duplicados existentes",
|
||||
{0} has been successfully added to the Email Group.,"{0} foi adicionado com sucesso ao grupo de emails,",
|
||||
{0} has left the conversation in {1} {2},{0} deixou a conversa em {1} {2},
|
||||
{0} in row {1} cannot have both URL and child items,{0} na linha {1} não pode ter URL e Itens vinculados ao mesmo tempo,
|
||||
{0} is currently viewing this document,{0} está atualmente visualizando este documento,
|
||||
{0} is now default print format for {1} doctype,{0} agora é o formato de impressão padrão para o doctype {1},
|
||||
{0} is saved,{0} foi salvo,
|
||||
{0} logged in,{0} logado(s),
|
||||
|
|
@ -1282,7 +1266,6 @@ From Date,A Partir da Data,
|
|||
Keyboard Shortcuts,Atalhos do Teclado,
|
||||
Mark all as Read,Marcar todas como lidas,
|
||||
Modules,módulos,
|
||||
My Profile,Meu Perfil,
|
||||
Naming Series,Código dos Documentos,
|
||||
No Events Today,Nenhum evento hoje,
|
||||
Notification Settings,Configurações de Notificação,
|
||||
|
|
@ -1297,7 +1280,6 @@ Address Line 1,Endereço,
|
|||
Browse,Procurar,
|
||||
Close,fechar,
|
||||
Compact Item Print,Imprimir item no formato compacto,
|
||||
Date Range,Intervalo entre datas,
|
||||
Delete,Excluir,
|
||||
Download Template,Baixar Modelo,
|
||||
Email,Enviar e-mail,
|
||||
|
|
@ -1366,17 +1348,13 @@ download,Baixar,
|
|||
email inbox,Caixa de Entrada,
|
||||
file,arquivo,
|
||||
font,fonte,
|
||||
in,em,
|
||||
left,Saiu,
|
||||
like,Parecido,
|
||||
lock,trancar,
|
||||
logged in,Logado,
|
||||
not like,Não Parecido,
|
||||
off,fora,
|
||||
one of,Um dos,
|
||||
purple,roxo,
|
||||
random,randômico,
|
||||
right,À direita,
|
||||
stop,Parar,
|
||||
tag,Tag,
|
||||
tags,Tags,
|
||||
|
|
@ -1385,3 +1363,4 @@ trash,lixo,
|
|||
user,Usuário,
|
||||
Verified By,Verificado por,
|
||||
Lead Conversion Time,Tempo de Conversão do Lead,
|
||||
Global Defaults,Padrões Globais,
|
||||
|
|
|
|||
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,3 +1,4 @@
|
|||
Account,Račun,
|
||||
Accounts Manager,Menadžer računa,
|
||||
Accounts User,Računi korisnik,
|
||||
Active,Aktivan,
|
||||
|
|
@ -45,6 +46,10 @@ Medium,Srednji,
|
|||
Middle Name (Optional),Srednje ime (opciono),
|
||||
More Information,Više informacija,
|
||||
Move,Kretanje,
|
||||
New Address,Nova adresa,
|
||||
New Contact,Novi kontakt,
|
||||
No address added yet.,Adresa još nije dodata.,
|
||||
No contacts added yet.,Još uvijek nema dodatih kontakata,
|
||||
Not active,Nije aktivna,
|
||||
Notes,Bilješke,
|
||||
Online,Na mreži,
|
||||
|
|
@ -81,7 +86,6 @@ Week,Nedelja,
|
|||
Weekly,Nedeljni,
|
||||
1 comment,1 komentar,
|
||||
; not allowed in condition,; није дозвољенa у услову,
|
||||
About,O Nama,
|
||||
About Us Settings,Podešavanja o nama,
|
||||
Add Attachment,Dodaj prilog,
|
||||
Add Column,Dodaj kolonu,
|
||||
|
|
@ -93,7 +97,6 @@ Add a New Role,Dodaj novu rolu,
|
|||
Add a column,Dodaj kolonu,
|
||||
Add a comment,Dodaj komentar,
|
||||
Add all roles,Dodaj sve role,
|
||||
Add to To Do,Dodaj u To Do,
|
||||
Addresses And Contacts,Adrese i kontakti,
|
||||
Advanced Search,Napredna pretraga,
|
||||
Allocated To,Dodijeljeno je,
|
||||
|
|
@ -208,7 +211,6 @@ Menu,Meni,
|
|||
Merge with existing,Spoji sa postojećim,
|
||||
Missing Fields,Polja koja nisu unešena,
|
||||
Most Used,Najviše korišćeno,
|
||||
My Settings,Moja podešavanja,
|
||||
Naming Series mandatory,Vrsta dokumenta je obavezna,
|
||||
New Email,Novi email,
|
||||
New Folder,Novi folder,
|
||||
|
|
@ -255,12 +257,10 @@ Reset Password Key,Resetuj ključ lozinke,
|
|||
Restore or permanently delete a document.,Vrati ili trajno obriši dokument.,
|
||||
Role Permissions,Prava pristupa rolama,
|
||||
Roles,Role,
|
||||
Same file has already been attached to the record,Isti fajl je već dodijeljen nekom zapisu,
|
||||
Save As,Sačuvaj kao,
|
||||
Save Filter,Sačuvaj filter,
|
||||
Saving,Čuvanje,
|
||||
Script or Query reports,Skripte ili query izvještaji,
|
||||
Search Term,Pretraga,
|
||||
Search or type a command,Pretražite ili otkucajte komandu,
|
||||
Security Settings,Bezbjedonosna podešavanja,
|
||||
Select File Type,Odaberite tip datoteke,
|
||||
|
|
@ -344,7 +344,6 @@ show,Prikaži,
|
|||
{0} added,Dodao je {0},
|
||||
{0} comments,{0} komentara,
|
||||
{0} days ago,prije {0} dana,
|
||||
{0} is currently viewing this document,{0} trenutno gleda ovaj dokumenat,
|
||||
{0} is saved,{0} je sačuvan,
|
||||
{0} months ago,Prije {0} mjeseci,
|
||||
{0} or {1},{0} {1} ili,
|
||||
|
|
@ -371,7 +370,6 @@ Country,Država,
|
|||
Currency,Valuta,
|
||||
Customize,Prilagodite,
|
||||
Date,Datum,
|
||||
Date Range,Opseg datuma,
|
||||
Delete,Obriši,
|
||||
Description,Opis,
|
||||
Disabled,Neaktivni,
|
||||
|
|
@ -445,7 +443,6 @@ list,Lista,
|
|||
message,Poruka,
|
||||
move,Kretanje,
|
||||
new,Novi,
|
||||
not like,Nije kao,
|
||||
print,Štampaj,
|
||||
refresh,Osvježi,
|
||||
remove,Ukloni,
|
||||
|
|
|
|||
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -706,3 +706,18 @@ def get_html_for_route(route):
|
|||
response = render.render()
|
||||
html = frappe.safe_decode(response.get_data())
|
||||
return html
|
||||
|
||||
def get_file_size(path, format=False):
|
||||
num = os.path.getsize(path)
|
||||
|
||||
if not format:
|
||||
return num
|
||||
|
||||
suffix = 'B'
|
||||
|
||||
for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
|
||||
if abs(num) < 1024:
|
||||
return "{0:3.1f}{1}{2}".format(num, unit, suffix)
|
||||
num /= 1024
|
||||
|
||||
return "{0:.1f}{1}{2}".format(num, 'Yi', suffix)
|
||||
|
|
|
|||
|
|
@ -1,22 +1,24 @@
|
|||
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
|
||||
# MIT License. See license.txt
|
||||
|
||||
"""This module handles the On Demand Backup utility"""
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
# imports - standard imports
|
||||
import json
|
||||
import os
|
||||
from calendar import timegm
|
||||
from datetime import datetime
|
||||
from glob import glob
|
||||
|
||||
# imports - third party imports
|
||||
import click
|
||||
|
||||
# imports - module imports
|
||||
import frappe
|
||||
from frappe import _, conf
|
||||
from frappe.utils import cstr, get_url, now_datetime
|
||||
from frappe.utils import get_url, now, now_datetime, get_file_size
|
||||
|
||||
# backup variable for backwards compatibility
|
||||
verbose = False
|
||||
compress = False
|
||||
_verbose = verbose
|
||||
|
||||
|
||||
|
|
@ -27,16 +29,18 @@ class BackupGenerator:
|
|||
To initialize, specify (db_name, user, password, db_file_name=None, db_host="localhost")
|
||||
If specifying db_file_name, also append ".sql.gz"
|
||||
"""
|
||||
def __init__(self, db_name, user, password, backup_path_db=None, backup_path_files=None,
|
||||
backup_path_private_files=None, db_host="localhost", db_port=None, verbose=False,
|
||||
db_type='mariadb', backup_path_conf=None):
|
||||
def __init__(self, db_name, user, password, backup_path=None, backup_path_db=None,
|
||||
backup_path_files=None, backup_path_private_files=None, db_host="localhost", db_port=None,
|
||||
verbose=False, db_type='mariadb', backup_path_conf=None, compress_files=False):
|
||||
global _verbose
|
||||
self.compress_files = compress_files or compress
|
||||
self.db_host = db_host
|
||||
self.db_port = db_port
|
||||
self.db_name = db_name
|
||||
self.db_type = db_type
|
||||
self.user = user
|
||||
self.password = password
|
||||
self.backup_path = backup_path
|
||||
self.backup_path_conf = backup_path_conf
|
||||
self.backup_path_db = backup_path_db
|
||||
self.backup_path_files = backup_path_files
|
||||
|
|
@ -57,21 +61,24 @@ class BackupGenerator:
|
|||
_verbose = verbose
|
||||
|
||||
def setup_backup_directory(self):
|
||||
specified = self.backup_path_db or self.backup_path_files or self.backup_path_private_files
|
||||
specified = self.backup_path or self.backup_path_db or self.backup_path_files or self.backup_path_private_files or self.backup_path_conf
|
||||
|
||||
if not specified:
|
||||
backups_folder = get_backup_path()
|
||||
if not os.path.exists(backups_folder):
|
||||
os.makedirs(backups_folder)
|
||||
os.makedirs(backups_folder, exist_ok=True)
|
||||
else:
|
||||
for file_path in [self.backup_path_files, self.backup_path_db, self.backup_path_private_files]:
|
||||
dir = os.path.dirname(file_path)
|
||||
os.makedirs(dir, exist_ok=True)
|
||||
if self.backup_path:
|
||||
os.makedirs(self.backup_path, exist_ok=True)
|
||||
|
||||
for file_path in set([self.backup_path_files, self.backup_path_db, self.backup_path_private_files, self.backup_path_conf]):
|
||||
if file_path:
|
||||
dir = os.path.dirname(file_path)
|
||||
os.makedirs(dir, exist_ok=True)
|
||||
|
||||
@property
|
||||
def site_config_backup_path(self):
|
||||
# For backwards compatibility
|
||||
import click
|
||||
click.secho("BackupGenerator.site_config_backup_path has been deprecated in favour of BackupGenerator.backup_path_conf", fg="yellow")
|
||||
return getattr(self, "backup_path_conf", None)
|
||||
|
||||
|
|
@ -89,14 +96,14 @@ class BackupGenerator:
|
|||
|
||||
self.todays_date = now_datetime().strftime('%Y%m%d_%H%M%S')
|
||||
|
||||
if not (self.backup_path_files and self.backup_path_db and self.backup_path_private_files):
|
||||
if not (self.backup_path_conf and self.backup_path_db and self.backup_path_files and self.backup_path_private_files):
|
||||
self.set_backup_file_name()
|
||||
|
||||
if not (last_db and last_file and last_private_file and site_config_backup_path):
|
||||
self.take_dump()
|
||||
self.copy_site_config()
|
||||
if not ignore_files:
|
||||
self.zip_files()
|
||||
self.backup_files()
|
||||
|
||||
else:
|
||||
self.backup_path_files = last_file
|
||||
|
|
@ -108,9 +115,11 @@ class BackupGenerator:
|
|||
#Generate a random name using today's date and a 8 digit random number
|
||||
for_conf = self.todays_date + "-" + self.site_slug + "-site_config_backup.json"
|
||||
for_db = self.todays_date + "-" + self.site_slug + "-database.sql.gz"
|
||||
for_public_files = self.todays_date + "-" + self.site_slug + "-files.tar"
|
||||
for_private_files = self.todays_date + "-" + self.site_slug + "-private-files.tar"
|
||||
backup_path = get_backup_path()
|
||||
ext = "tgz" if self.compress_files else "tar"
|
||||
|
||||
for_public_files = self.todays_date + "-" + self.site_slug + "-files." + ext
|
||||
for_private_files = self.todays_date + "-" + self.site_slug + "-private-files." + ext
|
||||
backup_path = self.backup_path or get_backup_path()
|
||||
|
||||
if not self.backup_path_conf:
|
||||
self.backup_path_conf = os.path.join(backup_path, for_conf)
|
||||
|
|
@ -165,15 +174,58 @@ class BackupGenerator:
|
|||
)
|
||||
|
||||
def zip_files(self):
|
||||
# For backwards compatibility - pre v13
|
||||
click.secho("BackupGenerator.zip_files has been deprecated in favour of BackupGenerator.backup_files", fg="yellow")
|
||||
return self.backup_files()
|
||||
|
||||
def get_summary(self):
|
||||
summary = {
|
||||
"config": {
|
||||
"path": self.backup_path_conf,
|
||||
"size": get_file_size(self.backup_path_conf, format=True)
|
||||
},
|
||||
"database": {
|
||||
"path": self.backup_path_db,
|
||||
"size": get_file_size(self.backup_path_db, format=True)
|
||||
}
|
||||
}
|
||||
|
||||
if os.path.exists(self.backup_path_files) and os.path.exists(self.backup_path_private_files):
|
||||
summary.update({
|
||||
"public": {
|
||||
"path": self.backup_path_files,
|
||||
"size": get_file_size(self.backup_path_files, format=True)
|
||||
},
|
||||
"private": {
|
||||
"path": self.backup_path_private_files,
|
||||
"size": get_file_size(self.backup_path_private_files, format=True)
|
||||
}
|
||||
})
|
||||
|
||||
return summary
|
||||
|
||||
def print_summary(self):
|
||||
backup_summary = self.get_summary()
|
||||
print("Backup Summary for {0} at {1}".format(frappe.local.site, now()))
|
||||
|
||||
for _type, info in backup_summary.items():
|
||||
print("{0:8}: {1:85} {2}".format(_type.title(), info["path"], info["size"]))
|
||||
|
||||
def backup_files(self):
|
||||
import subprocess
|
||||
|
||||
for folder in ("public", "private"):
|
||||
files_path = frappe.get_site_path(folder, "files")
|
||||
backup_path = self.backup_path_files if folder=="public" else self.backup_path_private_files
|
||||
|
||||
cmd_string = """tar -cf %s %s""" % (backup_path, files_path)
|
||||
err, out = frappe.utils.execute_in_shell(cmd_string)
|
||||
if self.compress_files:
|
||||
cmd_string = "tar cf - {1} | gzip > {0}"
|
||||
else:
|
||||
cmd_string = "tar -cf {0} {1}"
|
||||
output = subprocess.check_output(cmd_string.format(backup_path, files_path), shell=True)
|
||||
|
||||
if self.verbose:
|
||||
print('Backed up files', os.path.abspath(backup_path))
|
||||
if self.verbose and output:
|
||||
print(output.decode("utf8"))
|
||||
|
||||
def copy_site_config(self):
|
||||
site_config_backup_path = self.backup_path_conf
|
||||
|
|
@ -275,23 +327,27 @@ def fetch_latest_backups():
|
|||
}
|
||||
|
||||
|
||||
def scheduled_backup(older_than=6, ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=False, verbose=False):
|
||||
def scheduled_backup(older_than=6, ignore_files=False, backup_path=None, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, backup_path_conf=None, force=False, verbose=False, compress=False):
|
||||
"""this function is called from scheduler
|
||||
deletes backups older than 7 days
|
||||
takes backup"""
|
||||
odb = new_backup(older_than, ignore_files, backup_path_db=backup_path_db, backup_path_files=backup_path_files, force=force, verbose=verbose)
|
||||
odb = new_backup(older_than, ignore_files, backup_path=backup_path, backup_path_db=backup_path_db, backup_path_files=backup_path_files, backup_path_private_files=backup_path_private_files, backup_path_conf=backup_path_conf, force=force, verbose=verbose, compress=compress)
|
||||
return odb
|
||||
|
||||
def new_backup(older_than=6, ignore_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, force=False, verbose=False):
|
||||
def new_backup(older_than=6, ignore_files=False, backup_path=None, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, backup_path_conf=None, force=False, verbose=False, compress=False):
|
||||
delete_temp_backups(older_than = frappe.conf.keep_backups_for_hours or 24)
|
||||
odb = BackupGenerator(frappe.conf.db_name, frappe.conf.db_name,\
|
||||
frappe.conf.db_password,
|
||||
backup_path_db=backup_path_db, backup_path_files=backup_path_files,
|
||||
backup_path=backup_path,
|
||||
backup_path_db=backup_path_db,
|
||||
backup_path_files=backup_path_files,
|
||||
backup_path_private_files=backup_path_private_files,
|
||||
backup_path_conf=backup_path_conf,
|
||||
db_host = frappe.db.host,
|
||||
db_port = frappe.db.port,
|
||||
db_type = frappe.conf.db_type,
|
||||
verbose=verbose)
|
||||
verbose=verbose,
|
||||
compress_files=compress)
|
||||
odb.get_backup(older_than, ignore_files, force=force)
|
||||
return odb
|
||||
|
||||
|
|
@ -336,9 +392,9 @@ def get_backup_path():
|
|||
backup_path = frappe.utils.get_site_path(conf.get("backup_path", "private/backups"))
|
||||
return backup_path
|
||||
|
||||
def backup(with_files=False, backup_path_db=None, backup_path_files=None, quiet=False):
|
||||
def backup(with_files=False, backup_path_db=None, backup_path_files=None, backup_path_private_files=None, backup_path_conf=None, quiet=False):
|
||||
"Backup"
|
||||
odb = scheduled_backup(ignore_files=not with_files, backup_path_db=backup_path_db, backup_path_files=backup_path_files, force=True)
|
||||
odb = scheduled_backup(ignore_files=not with_files, backup_path_db=backup_path_db, backup_path_files=backup_path_files, backup_path_private_files=backup_path_private_files, backup_path_conf=backup_path_conf, force=True)
|
||||
return {
|
||||
"backup_path_db": odb.backup_path_db,
|
||||
"backup_path_files": odb.backup_path_files,
|
||||
|
|
|
|||
|
|
@ -124,6 +124,13 @@ def build_context(context):
|
|||
if context.title_prefix and context.title and not context.title.startswith(context.title_prefix):
|
||||
context.title = '{0} - {1}'.format(context.title_prefix, context.title)
|
||||
|
||||
# apply context from hooks
|
||||
update_website_context = frappe.get_hooks('update_website_context')
|
||||
for method in update_website_context:
|
||||
values = frappe.get_attr(method)(context)
|
||||
if values:
|
||||
context.update(values)
|
||||
|
||||
return context
|
||||
|
||||
def load_sidebar(context, sidebar_json_path):
|
||||
|
|
@ -225,33 +232,45 @@ def add_sidebar_data(context):
|
|||
def add_metatags(context):
|
||||
tags = frappe._dict(context.get("metatags") or {})
|
||||
|
||||
if tags:
|
||||
if "og:type" not in tags:
|
||||
tags["og:type"] = "article"
|
||||
if "og:type" not in tags:
|
||||
tags["og:type"] = "article"
|
||||
|
||||
name = tags.get('name') or tags.get('title')
|
||||
if name:
|
||||
tags["og:title"] = tags["twitter:title"] = name
|
||||
if "title" not in tags and context.title:
|
||||
tags["title"] = context.title
|
||||
|
||||
description = tags.get("description") or context.description
|
||||
if description:
|
||||
tags['description'] = tags["og:description"] = tags["twitter:description"] = description
|
||||
title = tags.get("name") or tags.get("title")
|
||||
if title:
|
||||
tags["og:title"] = tags["twitter:title"] = title
|
||||
tags["twitter:card"] = "summary"
|
||||
|
||||
image = tags.get('image', context.image or None)
|
||||
if image:
|
||||
tags["og:image"] = tags["twitter:image"] = tags["image"] = frappe.utils.get_url(image)
|
||||
tags['twitter:card'] = "summary_large_image"
|
||||
if "description" not in tags and context.description:
|
||||
tags["description"] = context.description
|
||||
|
||||
if context.author or tags.get('author'):
|
||||
tags['author'] = context.author or tags.get('author')
|
||||
description = tags.get("description")
|
||||
if description:
|
||||
tags["og:description"] = tags["twitter:description"] = description
|
||||
|
||||
tags['og:url'] = tags['url'] = frappe.utils.get_url(context.path)
|
||||
if "image" not in tags and context.image:
|
||||
tags["image"] = context.image
|
||||
|
||||
if context.published_on:
|
||||
tags['datePublished'] = context.published_on
|
||||
image = tags.get("image")
|
||||
if image:
|
||||
tags["og:image"] = tags["twitter:image"] = tags["image"] = frappe.utils.get_url(image)
|
||||
tags['twitter:card'] = "summary_large_image"
|
||||
|
||||
if "author" not in tags and context.author:
|
||||
tags["author"] = context.author
|
||||
|
||||
tags['language'] = frappe.local.lang or 'en'
|
||||
tags["og:url"] = tags["url"] = frappe.utils.get_url(context.path)
|
||||
|
||||
if "published_on" not in tags and context.published_on:
|
||||
tags["published_on"] = context.published_on
|
||||
|
||||
if "published_on" in tags:
|
||||
tags["datePublished"] = tags["published_on"]
|
||||
del tags["published_on"]
|
||||
|
||||
tags["language"] = frappe.local.lang or "en"
|
||||
|
||||
# Get meta tags from Website Route meta
|
||||
# they can override the defaults set above
|
||||
|
|
|
|||
|
|
@ -81,7 +81,9 @@ class WebTemplate(Document):
|
|||
|
||||
return template
|
||||
|
||||
def render(self, values="{}"):
|
||||
def render(self, values=None):
|
||||
if not values:
|
||||
values = {}
|
||||
values = frappe.parse_json(values)
|
||||
values.update({"values": values})
|
||||
template = self.get_template(self.standard)
|
||||
|
|
|
|||
|
|
@ -136,9 +136,6 @@ def get_website_settings(context=None):
|
|||
|
||||
context.encoded_title = quote(encode(context.title or ""), str(""))
|
||||
|
||||
for update_website_context in hooks.update_website_context or []:
|
||||
frappe.get_attr(update_website_context)(context)
|
||||
|
||||
context.web_include_js = hooks.web_include_js or []
|
||||
|
||||
context.web_include_css = hooks.web_include_css or []
|
||||
|
|
|
|||
|
|
@ -6,5 +6,27 @@ from __future__ import unicode_literals
|
|||
import frappe
|
||||
from frappe.model.document import Document
|
||||
|
||||
|
||||
class WebsiteSidebar(Document):
|
||||
pass
|
||||
def get_items(self):
|
||||
items = frappe.get_all(
|
||||
"Website Sidebar Item",
|
||||
filters={'parent': self.name},
|
||||
fields=["title", "route", "group"],
|
||||
order_by="idx asc",
|
||||
)
|
||||
|
||||
items_by_group = {}
|
||||
items_without_group = []
|
||||
for item in items:
|
||||
if item.group:
|
||||
items_by_group.setdefault(item.group, []).append(item)
|
||||
else:
|
||||
items_without_group.append(item)
|
||||
|
||||
out = []
|
||||
for group, items in items_by_group.items():
|
||||
out.append({"group_title": group, "group_items": items})
|
||||
|
||||
out += items_without_group
|
||||
return out
|
||||
|
|
|
|||
|
|
@ -11,7 +11,6 @@ from os.path import join as join_path, exists as path_exists, abspath, splitext
|
|||
class WebsiteTheme(Document):
|
||||
def validate(self):
|
||||
self.validate_if_customizable()
|
||||
self.render_theme()
|
||||
self.generate_bootstrap_theme()
|
||||
|
||||
def on_update(self):
|
||||
|
|
@ -37,9 +36,6 @@ class WebsiteTheme(Document):
|
|||
if self.is_standard_and_not_valid_user():
|
||||
frappe.throw(_("Please Duplicate this Website Theme to customize."))
|
||||
|
||||
def render_theme(self):
|
||||
self.theme_scss = frappe.render_template('frappe/website/doctype/website_theme/website_theme_template.scss', self.as_dict())
|
||||
|
||||
def export_doc(self):
|
||||
"""Export to standard folder `[module]/website_theme/[name]/[name].json`."""
|
||||
from frappe.modules.export_file import export_to_files
|
||||
|
|
@ -55,6 +51,8 @@ class WebsiteTheme(Document):
|
|||
def generate_bootstrap_theme(self):
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
self.theme_scss = frappe.render_template('frappe/website/doctype/website_theme/website_theme_template.scss', self.as_dict())
|
||||
|
||||
# create theme file in site public files folder
|
||||
folder_path = abspath(frappe.utils.get_files_path('website_theme', is_private=False))
|
||||
# create folder if not exist
|
||||
|
|
@ -101,6 +99,8 @@ class WebsiteTheme(Document):
|
|||
self.generate_bootstrap_theme()
|
||||
|
||||
def set_as_default(self):
|
||||
self.generate_bootstrap_theme()
|
||||
self.save()
|
||||
website_settings = frappe.get_doc('Website Settings')
|
||||
website_settings.website_theme = self.name
|
||||
website_settings.ignore_validate = True
|
||||
|
|
@ -172,12 +172,15 @@ def get_scss_paths():
|
|||
|
||||
def after_migrate():
|
||||
"""
|
||||
Regenerate CSS files after migration.
|
||||
Regenerate Active Theme CSS file after migration.
|
||||
|
||||
Necessary to reflect possible changes in the imported SCSS files. Called at
|
||||
the end of every `bench migrate`.
|
||||
"""
|
||||
website_theme_list = frappe.get_list('Website Theme')
|
||||
for website_theme in website_theme_list:
|
||||
website_theme_doc = frappe.get_doc('Website Theme', website_theme.name)
|
||||
website_theme_doc.validate()
|
||||
website_theme = frappe.db.get_single_value('Website Settings', 'website_theme')
|
||||
if website_theme == 'Standard':
|
||||
return
|
||||
|
||||
doc = frappe.get_doc('Website Theme', website_theme)
|
||||
doc.generate_bootstrap_theme()
|
||||
doc.save()
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue