Merge branch 'develop' into fix-kanban-columns

This commit is contained in:
mergify[bot] 2025-06-26 04:50:08 +00:00 committed by GitHub
commit d2d7533266
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
148 changed files with 130360 additions and 92094 deletions

View file

@ -58,3 +58,6 @@ e9bbe03354079cfcef65a77b0c33f57b047a7c93
# ruff update
84ef6ec677c8657c3243ac456a1ef794bfb34a50
# replace `frappe.flags.in_test` with `frappe.in_test`
653c80b8483cc41aef25cd7d66b9b6bb188bf5f8

View file

@ -11,6 +11,7 @@ WEBSITE_REPOS = [
DOCUMENTATION_DOMAINS = [
"docs.erpnext.com",
"frappeframework.com",
"docs.frappe.io",
]

View file

@ -472,6 +472,11 @@ async function write_assets_json(metafile) {
}
async function update_assets_json_in_cache() {
// Redis won't be present during docker image build
if (process.env.FRAPPE_DOCKER_BUILD) {
return;
}
// update assets_json cache in redis, so that it can be read directly by python
let client = get_redis_subscriber("redis_cache");
// handle error event to avoid printing stack traces
@ -523,7 +528,7 @@ function run_build_command_for_apps(apps) {
log(
`\nInstalling dependencies for ${chalk.bold(app)} (because node_modules not found)`
);
execSync("yarn install", { encoding: "utf8", stdio: "inherit" });
execSync("yarn install --frozen-lockfile", { encoding: "utf8", stdio: "inherit" });
}
log("\nRunning build command for", chalk.bold(app));

View file

@ -83,6 +83,9 @@ cache: Optional["RedisWrapper"] = None
client_cache: Optional["ClientCache"] = None
STANDARD_USERS = ("Guest", "Administrator")
# this global may be subsequently changed by frappe.tests.utils.toggle_test_mode()
in_test = False
_dev_server = int(sbool(os.environ.get("DEV_SERVER", False)))
if _dev_server:
@ -219,7 +222,7 @@ def init(site: str, sites_path: str = ".", new_site: bool = False, force: bool =
"in_install_db": False,
"in_install_app": False,
"in_import": False,
"in_test": False,
"in_test": in_test,
"mute_messages": False,
"ignore_links": False,
"mute_emails": False,
@ -263,7 +266,7 @@ def init(site: str, sites_path: str = ".", new_site: bool = False, force: bool =
local.form_dict = _dict()
local.preload_assets = {"style": [], "script": [], "icons": []}
local.session = _dict()
local.dev_server = _dev_server
local.dev_server = _dev_server # only for backwards compatibility
local.qb = get_query_builder(local.conf.db_type)
if not cache or not client_cache:
setup_redis_cache_connection()
@ -640,7 +643,7 @@ def whitelist(allow_guest=False, xss_safe=False, methods=None):
global whitelisted, guest_methods, xss_safe_methods, allowed_http_methods_for_whitelisted_func
# validate argument types only if request is present
in_request_or_test = lambda: getattr(local, "request", None) or local.flags.in_test # noqa: E731
in_request_or_test = lambda: getattr(local, "request", None) or in_test # noqa: E731
# get function from the unbound / bound method
# this is needed because functions can be compared, but not methods
@ -740,7 +743,7 @@ def only_for(roles: list[str] | tuple[str] | str, message=False):
:param roles: Permitted role(s)
"""
if local.flags.in_test or local.session.user == "Administrator":
if in_test or local.session.user == "Administrator":
return
if isinstance(roles, str):
@ -767,7 +770,7 @@ def get_domain_data(module):
else:
return _dict()
except ImportError:
if local.flags.in_test:
if in_test:
return _dict()
else:
raise
@ -1595,7 +1598,7 @@ def copy_doc(doc: "Document", ignore_no_copy: bool = True) -> "Document":
fields_to_clear = ["name", "owner", "creation", "modified", "modified_by"]
if not local.flags.in_test:
if not in_test:
fields_to_clear.append("docstatus")
if isinstance(doc, BaseDocument) or hasattr(doc, "as_dict"):

View file

@ -15,7 +15,7 @@ from werkzeug.routing import Rule
import frappe
import frappe.client
from frappe import _, get_newargs, is_whitelisted
from frappe import _, cint, cstr, get_newargs, is_whitelisted
from frappe.core.doctype.server_script.server_script_utils import get_server_script_map
from frappe.handler import is_valid_http_method, run_server_script, upload_file
@ -65,17 +65,99 @@ def read_doc(doctype: str, name: str):
doc = frappe.get_doc(doctype, name)
doc.check_permission("read")
doc.apply_fieldlevel_read_permissions()
return doc
_doc = doc.as_dict()
for key in _doc:
df = doc.meta.get_field(key)
if df and df.fieldtype == "Link" and isinstance(_doc.get(key), int):
_doc[key] = cstr(_doc.get(key))
return _doc
def document_list(doctype: str):
if frappe.form_dict.get("fields"):
frappe.form_dict["fields"] = json.loads(frappe.form_dict["fields"])
def document_list(doctype: str) -> list[dict[str, Any]]:
"""
GET /api/v2/document/<doctype>?fields=[...],filters={...},...
# set limit of records for frappe.get_list
frappe.form_dict.limit_page_length = frappe.form_dict.limit or 20
# evaluate frappe.get_list
return frappe.call(frappe.client.get_list, doctype, **frappe.form_dict)
REST API endpoint for fetching doctype records
Args:
doctype: DocType name
Query Parameters (accessible via frappe.form_dict):
fields: JSON string of field names to fetch
filters: JSON string of filters to apply
order_by: Order by field
start: Starting offset for pagination (default: 0)
limit: Maximum number of records to fetch (default: 20)
group_by: Group by field
as_dict: Return results as dictionary (default: True)
Response:
frappe.response["data"]: List of document records as dicts
frappe.response["has_next_page"]: Indicates if more pages are available
Controller Customization:
Doctype controllers can customize queries by implementing a static get_list(query) method
that receives a QueryBuilder object and returns a modified QueryBuilder.
Example:
class Project(Document):
@staticmethod
def get_list(query):
Project = frappe.qb.DocType("Project")
if user_has_role("Project Owner"):
query = query.where(Project.owner == frappe.session.user)
else:
query = query.where(Project.is_private == 0)
return query
"""
from frappe.model.base_document import get_controller
args = frappe.form_dict
fields: list | None = frappe.parse_json(args.get("fields", None))
filters: dict | None = frappe.parse_json(args.get("filters", None))
order_by: str | None = args.get("order_by", None)
start: int = cint(args.get("start", 0))
limit: int = cint(args.get("limit", 20))
group_by: str | None = args.get("group_by", None)
debug: bool = args.get("debug", False)
as_dict: bool = args.get("as_dict", True)
query = frappe.qb.get_query(
table=doctype,
fields=fields,
filters=filters,
order_by=order_by,
offset=start,
limit=limit + 1, # Fetch one extra to check if there's a next page
group_by=group_by,
ignore_permissions=False,
)
# Check if the doctype controller has a static get_list method
controller = get_controller(doctype)
if hasattr(controller, "get_list"):
try:
return_value = controller.get_list(query)
if return_value is not None:
# Validate that the returned value has a run method (is a QueryBuilder-like object)
if not hasattr(return_value, "run"):
frappe.throw(
_(
"Custom get_list method for {0} must return a QueryBuilder object or None, got {1}"
).format(doctype, type(return_value).__name__)
)
query = return_value
except Exception as e:
frappe.throw(_("Error in {0}.get_list: {1}").format(doctype, str(e)))
data = query.run(as_dict=as_dict, debug=debug)
frappe.response["has_next_page"] = len(data) > limit
return data[:limit]
def count(doctype: str) -> int:
@ -91,7 +173,7 @@ def create_doc(doctype: str):
data.pop("doctype", None)
if (name := data.get("name")) and isinstance(name, str):
frappe.flags.api_name_set = True
return frappe.new_doc(doctype, **data).insert()
return frappe.new_doc(doctype, **data).insert().as_dict()
def copy_doc(doctype: str, name: str, ignore_no_copy: bool = True):
@ -118,7 +200,7 @@ def update_doc(doctype: str, name: str):
if doc.get("parenttype"):
frappe.get_doc(doc.parenttype, doc.parent).save()
return doc
return doc.as_dict()
def delete_doc(doctype: str, name: str):
@ -144,7 +226,9 @@ def execute_doc_method(doctype: str, name: str, method: str | None = None):
doc.is_whitelisted(method)
doc.check_permission(PERMISSION_MAP[frappe.request.method])
return doc.run_method(method, **frappe.form_dict)
result = doc.run_method(method, **frappe.form_dict)
frappe.response.docs.append(doc.as_dict())
return result
def run_doc_method(method: str, document: dict[str, Any] | str, kwargs=None):

View file

@ -92,8 +92,6 @@ def application(request: Request):
response = None
try:
rollback = True
init_request(request)
validate_auth()
@ -127,23 +125,19 @@ def application(request: Request):
else:
raise NotFound
except HTTPException as e:
return e
except Exception as e:
response = handle_exception(e)
response = e.get_response(request.environ) if isinstance(e, HTTPException) else handle_exception(e)
if db := getattr(frappe.local, "db", None):
db.rollback(chain=True)
else:
rollback = sync_database(rollback)
sync_database()
finally:
# Important note:
# this function *must* always return a response, hence any exception thrown outside of
# try..catch block like this finally block needs to be handled appropriately.
if rollback and request.method in UNSAFE_HTTP_METHODS and frappe.db:
frappe.db.rollback()
try:
run_after_request_hooks(request, response)
except Exception:
@ -177,14 +171,13 @@ def init_request(request):
# site does not exist
raise NotFound
frappe.connect(set_admin_as_user=False)
if frappe.local.conf.maintenance_mode:
frappe.connect()
if frappe.local.conf.allow_reads_during_maintenance:
setup_read_only_mode()
else:
raise frappe.SessionStopped("Session Stopped")
else:
frappe.connect(set_admin_as_user=False)
if request.path.startswith("/api/method/upload_file"):
from frappe.core.api.file import get_max_file_size
@ -397,21 +390,21 @@ def handle_exception(e):
return response
def sync_database(rollback: bool) -> bool:
def sync_database():
db = getattr(frappe.local, "db", None)
if not db:
# db isn't initialized, can't commit or rollback
return
# if HTTP method would change server state, commit if necessary
if frappe.db and (frappe.local.flags.commit or frappe.local.request.method in UNSAFE_HTTP_METHODS):
frappe.db.commit()
rollback = False
elif frappe.db:
frappe.db.rollback()
rollback = False
if frappe.local.request.method in UNSAFE_HTTP_METHODS or frappe.local.flags.commit:
db.commit(chain=True)
else:
db.rollback(chain=True)
# update session
if session := getattr(frappe.local, "session_obj", None):
if session.update():
rollback = False
return rollback
session.update()
# Always initialize sentry SDK if the DSN is sent

View file

@ -86,7 +86,7 @@ class AutoRepeat(Document):
validate_template(self.message or "")
def before_insert(self):
if not frappe.flags.in_test:
if not frappe.in_test:
start_date = getdate(self.start_date)
today_date = getdate(today())
if start_date <= today_date:
@ -112,7 +112,7 @@ class AutoRepeat(Document):
frappe.db.set_value(self.reference_doctype, self.reference_document, "auto_repeat", "")
def validate_reference_doctype(self):
if frappe.flags.in_test or frappe.flags.in_patch:
if frappe.in_test or frappe.flags.in_patch:
return
if not frappe.get_meta(self.reference_doctype).allow_auto_repeat:
frappe.throw(
@ -229,7 +229,7 @@ class AutoRepeat(Document):
self.disable_auto_repeat()
if self.reference_document and not frappe.flags.in_test:
if self.reference_document and not frappe.in_test:
self.notify_error_to_user(error_log)
def make_new_document(self):

View file

@ -145,7 +145,8 @@ def remove_apps_with_incomplete_dependencies(bootinfo):
remove_apps.add(app)
for app in remove_apps:
bootinfo.setup_wizard_not_required_apps.remove(app)
if app in bootinfo.setup_wizard_not_required_apps:
bootinfo.setup_wizard_not_required_apps.remove(app)
def get_letter_heads():

View file

@ -52,7 +52,7 @@ def build_missing_files():
folder = os.path.join(sites_path, "assets", "frappe", "dist", type)
current_asset_files.extend(os.listdir(folder))
development = frappe.local.conf.developer_mode or frappe.local.dev_server
development = frappe.local.conf.developer_mode or frappe._dev_server
build_mode = "development" if development else "production"
assets_json = frappe.read_file("assets/assets.json")

View file

@ -183,6 +183,7 @@ def main(
def run_tests_in_light_mode(test_params):
from frappe.testing.loader import FrappeTestLoader
from frappe.testing.result import FrappeTestResult
from frappe.tests.utils import toggle_test_mode
# init environment
frappe.init(test_params.site)
@ -196,6 +197,7 @@ def run_tests_in_light_mode(test_params):
frappe.utils.scheduler.disable_scheduler()
frappe.clear_cache()
toggle_test_mode(True)
suite = FrappeTestLoader().discover_tests(test_params)
result = unittest.TextTestRunner(failfast=test_params.failfast, resultclass=FrappeTestResult).run(suite)
if not result.wasSuccessful():
@ -370,6 +372,7 @@ def run_tests(
)
@click.option("--use-orchestrator", is_flag=True, help="Use orchestrator to run parallel tests")
@click.option("--dry-run", is_flag=True, default=False, help="Dont actually run tests")
@click.option("--lightmode", is_flag=True, default=False, help="Skips all before test setup")
@pass_context
def run_parallel_tests(
context: CliCtxObj,
@ -379,6 +382,7 @@ def run_parallel_tests(
with_coverage=False,
use_orchestrator=False,
dry_run=False,
lightmode=False,
):
from traceback_with_variables import activate_by_import
@ -399,6 +403,7 @@ def run_parallel_tests(
build_number=build_number,
total_builds=total_builds,
dry_run=dry_run,
lightmode=lightmode,
)
mode = "Orchestrator" if use_orchestrator else "Parallel"
banner = f"""

View file

@ -77,7 +77,7 @@ def build(
skip_frappe = False
# don't minify in developer_mode for faster builds
development = frappe.local.conf.developer_mode or frappe.local.dev_server
development = frappe.local.conf.developer_mode or frappe._dev_server
mode = "development" if development else "production"
if production:
mode = "production"

View file

@ -54,13 +54,10 @@ def make_access_log(
page=None,
columns=None,
):
user = frappe.session.user
in_request = frappe.request and frappe.request.method == "GET"
access_log = frappe.get_doc(
{
"doctype": "Access Log",
"user": user,
"user": frappe.session.user,
"export_from": doctype,
"reference_document": document,
"file_type": file_type,
@ -72,18 +69,11 @@ def make_access_log(
}
)
if frappe.flags.read_only:
if not frappe.in_test:
access_log.deferred_insert()
return
else:
access_log.db_insert()
# `frappe.db.commit` added because insert doesnt `commit` when called in GET requests like `printview`
# dont commit in test mode. It must be tempting to put this block along with the in_request in the
# whitelisted method...yeah, don't do it. That part would be executed possibly on a read only DB conn
if not frappe.flags.in_test or in_request:
frappe.db.commit()
# only for backward compatibility
_make_access_log = make_access_log

View file

@ -490,8 +490,8 @@ def get_permission_query_conditions_for_communication(user):
return """`tabCommunication`.communication_medium!='Email'"""
email_accounts = ['"{}"'.format(account.get("email_account")) for account in accounts]
return """`tabCommunication`.email_account in ({email_accounts}) or `tabCommunication`.recipients LIKE '%{user}%' or `tabCommunication`.sender LIKE '%{user}%' or `tabCommunication`.cc LIKE '%{user}%' or `tabCommunication`.bcc LIKE '%{user}%'""".format(
email_accounts=",".join(email_accounts), user=user
return """`tabCommunication`.email_account in ({email_accounts})""".format(
email_accounts=",".join(email_accounts)
)
@ -579,7 +579,7 @@ def parse_email(email_strings):
if not document_parts or len(document_parts) != 2:
continue
doctype = unquote_plus(document_parts[0])
doctype = frappe.unscrub(unquote_plus(document_parts[0]))
docname = unquote_plus(document_parts[1])
yield doctype, docname

View file

@ -101,7 +101,7 @@ class DataImport(Document):
def start_import(self):
from frappe.utils.scheduler import is_scheduler_inactive
run_now = frappe.flags.in_test or frappe.conf.developer_mode
run_now = frappe.in_test or frappe.conf.developer_mode
if is_scheduler_inactive() and not run_now:
frappe.throw(_("Scheduler is inactive. Cannot import data."), title=_("Scheduler Inactive"))

View file

@ -288,6 +288,7 @@
},
{
"default": "0",
"depends_on": "eval:!doc.issingle",
"fieldname": "allow_import",
"fieldtype": "Check",
"label": "Allow Import (via Data Import Tool)"
@ -784,7 +785,7 @@
"link_fieldname": "document_type"
}
],
"modified": "2025-05-21 21:58:59.947374",
"modified": "2025-06-24 07:46:34.380662",
"modified_by": "Administrator",
"module": "Core",
"name": "DocType",

View file

@ -322,7 +322,7 @@ class DocType(Document):
def check_developer_mode(self):
"""Throw exception if not developer mode or via patch"""
if frappe.flags.in_patch or frappe.flags.in_test:
if frappe.flags.in_patch or frappe.in_test:
return
if not frappe.conf.get("developer_mode") and not self.custom:
@ -594,7 +594,7 @@ class DocType(Document):
global_search_fields_after_update.append("name")
if set(global_search_fields_before_update) != set(global_search_fields_after_update):
now = (not frappe.request) or frappe.flags.in_test or frappe.flags.in_install
now = (not frappe.request) or frappe.in_test or frappe.flags.in_install
frappe.enqueue("frappe.utils.global_search.rebuild_for_doctype", now=now, doctype=self.name)
def set_base_class_for_controller(self):

View file

@ -242,7 +242,7 @@ def create_json_gz_file(data, dt, dn, report_name):
frappe.scrub(report_name), frappe.utils.data.format_datetime(frappe.utils.now(), "Y-m-d-H-M")
)
encoded_content = frappe.safe_encode(frappe.as_json(data, indent=None, separators=(",", ":")))
compressed_content = gzip.compress(encoded_content)
compressed_content = gzip.compress(encoded_content, compresslevel=5)
# Call save() file function to upload and attach the file
_file = frappe.get_doc(

View file

@ -29,7 +29,7 @@ class RoleProfile(Document):
self.clear_cache()
self.queue_action(
"update_all_users",
now=frappe.flags.in_test or frappe.flags.in_install,
now=frappe.in_test or frappe.flags.in_install,
enqueue_after_commit=True,
queue="long",
)

View file

@ -112,6 +112,16 @@ class RQJob(Document):
except InvalidJobOperation:
frappe.msgprint(_("Job is not running."), title=_("Invalid Operation"))
@check_permissions
def cancel(self):
if self.status == "queued":
self.job.cancel()
else:
frappe.msgprint(
_("Job is in {0} state and can't be cancelled").format(self.status),
title=_("Invalid Operation"),
)
@staticmethod
def get_count(filters=None) -> int:
return len(RQJob.get_matching_job_ids(filters))

View file

@ -26,6 +26,12 @@ def wait_for_completion(job: Job):
class TestRQJob(IntegrationTestCase):
BG_JOB = "frappe.core.doctype.rq_job.test_rq_job.test_func"
def setUp(self) -> None:
# Cleanup all pending jobs
for job in frappe.get_all("RQ Job", {"status": "queued"}):
frappe.get_doc("RQ Job", job.name).cancel()
return super().setUp()
def check_status(self, job: Job, status, wait=True):
if wait:
wait_for_completion(job)

View file

@ -4,6 +4,7 @@
import hashlib
import json
from datetime import datetime, timedelta
from functools import lru_cache
import click
from croniter import CroniterBadCronError, croniter
@ -14,6 +15,8 @@ from frappe.model.document import Document
from frappe.utils import get_datetime, now_datetime
from frappe.utils.background_jobs import enqueue, is_job_enqueued
parse_cron = lru_cache(croniter) # Cache parsed cron-expressions
class ScheduledJobType(Document):
# begin: auto-generated types
@ -132,10 +135,10 @@ class ScheduledJobType(Document):
# A dynamic fallback like current time might miss the scheduler interval and job will never start.
last_execution = get_datetime(self.last_execution or self.creation)
next_execution = croniter(self.cron_format, last_execution).get_next(datetime)
next_execution = parse_cron(self.cron_format).get_next(datetime, start_time=last_execution)
if self.frequency in ("Hourly Maintenance", "Daily Maintenance"):
next_execution += timedelta(minutes=maintenance_offset)
return croniter(self.cron_format, last_execution).get_next(datetime)
return parse_cron(self.cron_format).get_next(datetime, start_time=last_execution)
def execute(self):
if frappe.job:

View file

@ -25,6 +25,7 @@ from frappe.model.delete_doc import delete_doc
from frappe.tests import IntegrationTestCase
from frappe.tests.classes.context_managers import change_settings
from frappe.tests.test_api import FrappeAPITestCase
from frappe.tests.utils import toggle_test_mode
from frappe.utils import get_url
user_module = frappe.core.doctype.user.user
@ -212,13 +213,15 @@ class TestUser(IntegrationTestCase):
# test password strength while saving user with new password
user = frappe.get_doc("User", "test@example.com")
frappe.flags.in_test = False
user.new_password = "password"
self.assertRaises(frappe.exceptions.ValidationError, user.save)
user.reload()
user.new_password = "Eastern_43A1W"
user.save()
frappe.flags.in_test = True
toggle_test_mode(False)
try:
user.new_password = "password"
self.assertRaises(frappe.exceptions.ValidationError, user.save)
user.reload()
user.new_password = "Eastern_43A1W"
user.save()
finally:
toggle_test_mode(True)
def test_comment_mentions(self):
comment = """

View file

@ -173,7 +173,7 @@ class User(Document):
self.__new_password = self.new_password
self.new_password = ""
if not frappe.flags.in_test:
if not frappe.in_test:
self.password_strength_test()
if self.name not in STANDARD_USERS:
@ -269,7 +269,7 @@ class User(Document):
self.share_with_self()
clear_notifications(user=self.name)
frappe.clear_cache(user=self.name)
now = frappe.flags.in_test or frappe.flags.in_install
now = frappe.in_test or frappe.flags.in_install
self.send_password_notification(self.__new_password)
frappe.enqueue(
"frappe.core.doctype.user.user.create_contact",

View file

@ -105,7 +105,7 @@ def get_user_permissions(user=None):
out = {}
def add_doc_to_perm(perm, doc_name, is_default):
def add_doc_to_perm(perm, doc_name, is_default, hide_descendants):
# group rules for each type
# for example if allow is "Customer", then build all allowed customers
# in a list
@ -114,7 +114,12 @@ def get_user_permissions(user=None):
out[perm.allow].append(
frappe._dict(
{"doc": doc_name, "applicable_for": perm.get("applicable_for"), "is_default": is_default}
{
"doc": doc_name,
"applicable_for": perm.get("applicable_for"),
"is_default": is_default,
"hide_descendants": hide_descendants,
}
)
)
@ -125,12 +130,12 @@ def get_user_permissions(user=None):
filters=dict(user=user),
):
meta = frappe.get_meta(perm.allow)
add_doc_to_perm(perm, perm.for_value, perm.is_default)
add_doc_to_perm(perm, perm.for_value, perm.is_default, perm.hide_descendants)
if meta.is_nested_set() and not perm.hide_descendants:
decendants = frappe.db.get_descendants(perm.allow, perm.for_value)
for doc in decendants:
add_doc_to_perm(perm, doc, False)
add_doc_to_perm(perm, doc, False, False)
out = frappe._dict(out)
frappe.cache.hset("user_permissions", user, out)

View file

@ -75,7 +75,7 @@ def get_db(socket=None, host=None, user=None, password=None, port=None, cur_db_n
import frappe.database.sqlite.database
return frappe.database.sqlite.database.SQLiteDatabase(cur_db_name=cur_db_name)
elif conf.use_mysqlclient:
elif conf.get("use_mysqlclient", 1):
import frappe.database.mariadb.mysqlclient
return frappe.database.mariadb.mysqlclient.MariaDBDatabase(

View file

@ -1147,7 +1147,7 @@ class Database:
mode = "READ ONLY" if read_only else ""
self.sql(f"START TRANSACTION {mode}")
def commit(self):
def commit(self, *, chain=False):
"""Commit current transaction. Calls SQL `COMMIT`."""
if self._disable_transaction_control:
warnings.warn(message=TRANSACTION_DISABLED_MSG, stacklevel=2)
@ -1158,12 +1158,15 @@ class Database:
self.before_commit.run()
self.sql("commit")
self.begin() # explicitly start a new transaction
if chain:
self.sql("commit and chain")
else:
self.sql("commit")
self.begin()
self.after_commit.run()
def rollback(self, *, save_point=None):
def rollback(self, *, save_point=None, chain=False):
"""`ROLLBACK` current transaction. Optionally rollback to a known save_point."""
if save_point:
self.sql(f"rollback to savepoint {save_point}")
@ -1173,8 +1176,11 @@ class Database:
self.before_rollback.run()
self.sql("rollback")
self.begin()
if chain:
self.sql("rollback and chain")
else:
self.sql("rollback")
self.begin()
self.after_rollback.run()
else:

File diff suppressed because it is too large Load diff

View file

@ -306,7 +306,7 @@ def read_multi_pdf(output) -> bytes:
@deprecated("frappe.gzip_compress", "unknown", "v17", "Use py3 methods directly (this was compat for py2).")
def gzip_compress(data, compresslevel=9):
def gzip_compress(data, compresslevel=5):
"""Compress data in one shot and return the compressed string.
Optional argument is the compression level, in range of 0-9.
"""

View file

@ -297,7 +297,7 @@
"icon": "fa fa-calendar",
"idx": 1,
"links": [],
"modified": "2025-04-10 13:08:32.540745",
"modified": "2025-06-17 15:31:01.945146",
"modified_by": "Administrator",
"module": "Desk",
"name": "Event",
@ -310,7 +310,7 @@
"print": 1,
"read": 1,
"report": 1,
"role": "All",
"role": "Desk User",
"share": 1,
"write": 1
},
@ -326,6 +326,15 @@
"role": "System Manager",
"share": 1,
"write": 1
},
{
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "All",
"share": 1
}
],
"read_only": 1,

View file

@ -94,8 +94,8 @@ def enqueue_create_notification(users: list[str] | str, doc: dict):
"frappe.desk.doctype.notification_log.notification_log.make_notification_logs",
doc=doc,
users=users,
now=frappe.flags.in_test,
enqueue_after_commit=not frappe.flags.in_test,
now=frappe.in_test,
enqueue_after_commit=not frappe.in_test,
)
@ -141,7 +141,7 @@ def send_notification_email(doc: NotificationLog):
template="new_notification",
args=args,
header=[header, "orange"],
now=frappe.flags.in_test,
now=frappe.in_test,
)

View file

@ -49,7 +49,7 @@ class SystemConsole(Document):
frappe.db.commit()
@frappe.whitelist()
@frappe.whitelist(methods=["POST"])
def execute_code(doc):
console = frappe.get_doc(json.loads(doc))
console.run()

View file

@ -57,7 +57,7 @@ def health_check(step: str):
try:
return func(*args, **kwargs)
except Exception as e:
if frappe.flags.in_test:
if frappe.in_test:
raise
frappe.log(frappe.get_traceback())
# nosemgrep

View file

@ -112,9 +112,7 @@ class DocTags:
tl = unique(filter(lambda x: x, tl))
tags = ",".join(tl)
try:
frappe.db.sql(
"update `tab{}` set _user_tags={} where name={}".format(self.dt, "%s", "%s"), (tags, dn)
)
frappe.db.set_value(self.dt, dn, "_user_tags", tags, update_modified=False)
doc = frappe.get_lazy_doc(self.dt, dn)
update_tags(doc, tags)
except Exception as e:

View file

@ -235,7 +235,7 @@ def disable_saving_as_public():
frappe.flags.in_install
or frappe.flags.in_uninstall
or frappe.flags.in_patch
or frappe.flags.in_test
or frappe.in_test
or frappe.flags.in_fixtures
or frappe.flags.in_migrate
)

View file

@ -59,7 +59,7 @@ def getdoc(doctype, name):
@frappe.whitelist()
def getdoctype(doctype, with_parent=False, cached_timestamp=None):
def getdoctype(doctype, with_parent=False):
"""load doctype"""
docs = []
@ -75,9 +75,6 @@ def getdoctype(doctype, with_parent=False, cached_timestamp=None):
frappe.response["user_settings"] = get_user_settings(parent_dt or doctype)
if cached_timestamp and docs[0].modified == cached_timestamp:
return "use_cache"
frappe.response.docs.extend(docs)

View file

@ -16,9 +16,6 @@ ASSET_KEYS = (
"__css",
"__list_js",
"__calendar_js",
"__map_js",
"__linked_with",
"__messages",
"__print_formats",
"__workflow_docs",
"__form_grid_templates",
@ -60,9 +57,6 @@ class FormMeta(Meta):
if self.get("__assets_loaded", False):
return
self.add_search_fields()
self.add_linked_document_type()
if not self.istable:
self.add_code()
self.add_custom_script()
@ -77,15 +71,10 @@ class FormMeta(Meta):
def as_dict(self, no_nulls=False):
d = super().as_dict(no_nulls=no_nulls)
__dict = self.__dict__
for k in ASSET_KEYS:
d[k] = self.get(k)
# d['fields'] = d.get('fields', [])
for i, df in enumerate(d.get("fields") or []):
for k in ("search_fields", "is_custom_field", "linked_document_type"):
df[k] = self.get("fields")[i].get(k)
d[k] = __dict.get(k)
return d
@ -186,19 +175,6 @@ class FormMeta(Meta):
self.set("__custom_js", form_script)
self.set("__custom_list_js", list_script)
def add_search_fields(self):
"""add search fields found in the doctypes indicated by link fields' options"""
for df in self.get("fields", {"fieldtype": "Link", "options": ["!=", "[Select]"]}):
if df.options:
try:
search_fields = frappe.get_meta(df.options).search_fields
except frappe.DoesNotExistError:
self._show_missing_doctype_msg(df)
if search_fields:
search_fields = search_fields.split(",")
df.search_fields = [sf.strip() for sf in search_fields]
def _show_missing_doctype_msg(self, df):
# A link field is referring to non-existing doctype, this usually happens when
# customizations are removed or some custom app is removed but hasn't cleaned
@ -217,14 +193,6 @@ class FormMeta(Meta):
frappe.throw(msg, title=_("Missing DocType"))
def add_linked_document_type(self):
for df in self.get("fields", {"fieldtype": "Link"}):
if df.options:
try:
df.linked_document_type = frappe.get_meta(df.options).document_type
except frappe.DoesNotExistError:
self._show_missing_doctype_msg(df)
def load_print_formats(self):
print_formats = frappe.db.sql(
"""select * FROM `tabPrint Format`

View file

@ -12,7 +12,7 @@ from frappe.utils.scheduler import is_scheduler_inactive
from frappe.utils.telemetry import capture_doc
@frappe.whitelist()
@frappe.whitelist(methods=["POST", "PUT"])
def savedocs(doc, action):
"""save / submit / update doclist"""
doc = frappe.get_doc(json.loads(doc))
@ -51,7 +51,7 @@ def savedocs(doc, action):
frappe.msgprint(frappe._(status_message), indicator="green", alert=True)
@frappe.whitelist()
@frappe.whitelist(methods=["POST", "PUT"])
def cancel(doctype=None, name=None, workflow_state_fieldname=None, workflow_state=None):
"""cancel a doclist"""
doc = frappe.get_doc(doctype, name)
@ -64,7 +64,7 @@ def cancel(doctype=None, name=None, workflow_state_fieldname=None, workflow_stat
frappe.msgprint(frappe._("Cancelled"), indicator="red", alert=True)
@frappe.whitelist()
@frappe.whitelist(methods=["POST", "PUT"])
def discard(doctype: str, name: str | int):
"""discard a draft document"""
doc = frappe.get_doc(doctype, name)
@ -79,7 +79,7 @@ def send_updated_docs(doc):
from .load import get_docinfo
get_docinfo(doc)
doc.apply_fieldlevel_read_permissions()
d = doc.as_dict()
if hasattr(doc, "localname"):
d["localname"] = doc.localname

View file

@ -244,7 +244,7 @@ def update_system_settings(args): # nosemgrep
"date_format": frappe.db.get_value("Country", args.get("country"), "date_format"),
"time_format": frappe.db.get_value("Country", args.get("country"), "time_format"),
"number_format": number_format,
"enable_scheduler": 1 if not frappe.flags.in_test else 0,
"enable_scheduler": 1 if not frappe.in_test else 0,
"backup_limit": 3, # Default for downloadable backups
"enable_telemetry": cint(args.get("enable_telemetry")),
}

View file

@ -14,8 +14,9 @@ from frappe.desk.reportview import clean_params, parse_json
from frappe.model.utils import render_include
from frappe.modules import get_module_path, scrub
from frappe.monitor import add_data_to_monitor
from frappe.permissions import get_role_permissions, has_permission
from frappe.permissions import get_role_permissions, get_roles, has_permission
from frappe.utils import cint, cstr, flt, format_duration, get_html_format, sbool
from frappe.utils.caching import request_cache
def get_report_doc(report_name):
@ -706,6 +707,9 @@ def has_match(
match = False
break
if match:
match = has_unrestricted_read_access(doctype=ref_doctype, user=frappe.session.user)
# each doctype could have multiple conflicting user permission doctypes, hence using OR
# so that even if one of the sets allows a match, it is true
matched_for_doctype = matched_for_doctype or match
@ -722,6 +726,32 @@ def has_match(
return resultant_match
@request_cache
def has_unrestricted_read_access(doctype, user):
roles = get_roles(user)
permission_filters = {
"parent": doctype,
"role": ["in", roles],
"permlevel": 0,
"read": 1,
"if_owner": 0,
}
standard_perm_exists = frappe.db.exists(
"DocPerm",
permission_filters,
)
custom_perm_exists = frappe.db.exists(
"Custom DocPerm",
permission_filters,
)
has_perm = bool(custom_perm_exists or standard_perm_exists)
return has_perm
def get_linked_doctypes(columns, data):
linked_doctypes = {}

View file

@ -315,7 +315,7 @@ def compress(data, args=None):
return {"keys": keys, "values": values, "user_info": user_info}
@frappe.whitelist()
@frappe.whitelist(methods=["POST", "PUT"])
def save_report(name, doctype, report_settings):
"""Save reports of type Report Builder from Report View"""
@ -345,7 +345,7 @@ def save_report(name, doctype, report_settings):
return report.name
@frappe.whitelist()
@frappe.whitelist(methods=["POST", "DELETE"])
def delete_report(name):
"""Delete reports of type Report Builder from Report View"""
@ -555,7 +555,7 @@ def parse_field(field: str) -> tuple[str | None, str]:
return None, key.strip("`")
@frappe.whitelist()
@frappe.whitelist(methods=["POST", "DELETE"])
def delete_items():
"""delete selected items"""
import json

View file

@ -159,7 +159,7 @@ class EmailAccount(Document):
if self.enable_incoming and self.use_imap and len(self.imap_folder) <= 0:
frappe.throw(_("You need to set one IMAP folder for {0}").format(frappe.bold(self.email_id)))
if frappe.local.flags.in_patch or frappe.local.flags.in_test:
if frappe.local.flags.in_patch or frappe.in_test:
return
use_oauth = self.auth_method == "OAuth"
@ -363,9 +363,7 @@ class EmailAccount(Document):
@property
def _password(self):
raise_exception = not (
self.auth_method == "OAuth" or self.no_smtp_authentication or frappe.flags.in_test
)
raise_exception = not (self.auth_method == "OAuth" or self.no_smtp_authentication or frappe.in_test)
return self.get_password(raise_exception=raise_exception)
@property
@ -565,7 +563,7 @@ class EmailAccount(Document):
self.set_failed_attempts_count(self.get_failed_attempts_count() + 1)
def _disable_broken_incoming_account(self, description):
if frappe.flags.in_test:
if frappe.in_test:
return
self.db_set("enable_incoming", 0)

View file

@ -81,7 +81,7 @@ class EmailDomain(Document):
def validate(self):
"""Validate POP3/IMAP and SMTP connections."""
if frappe.local.flags.in_patch or frappe.local.flags.in_test or frappe.local.flags.in_install:
if frappe.local.flags.in_patch or frappe.in_test or frappe.local.flags.in_install:
return
self.validate_incoming_server_conn()

View file

@ -182,7 +182,7 @@ class EmailQueue(Document):
message = ctx.build_message(recipient.recipient)
if method := get_hook_method("override_email_send"):
method(self, self.sender, recipient.recipient, message)
elif not frappe.flags.in_test or frappe.flags.testing_email:
elif not frappe.in_test or frappe.flags.testing_email:
if ctx.email_account_doc.service == "Frappe Mail":
is_newsletter = self.reference_doctype == "Newsletter"
ctx.frappe_mail_client.send_raw(
@ -200,7 +200,7 @@ class EmailQueue(Document):
ctx.update_recipient_status_to_sent(recipient)
if frappe.flags.in_test and not frappe.flags.testing_email:
if frappe.in_test and not frappe.flags.testing_email:
frappe.flags.sent_mail = message
return
@ -773,7 +773,7 @@ class QueueBuilder:
job_name=frappe.utils.get_job_name(
"send_bulk_emails_for", self.reference_doctype, self.reference_name
),
now=frappe.flags.in_test or send_now,
now=frappe.in_test or send_now,
queue="long",
)

View file

@ -210,7 +210,7 @@ class Newsletter(WebsiteGenerator):
args["message"] = self.get_message(medium="email")
is_auto_commit_set = bool(frappe.db.auto_commit_on_many_writes)
frappe.db.auto_commit_on_many_writes = not frappe.flags.in_test
frappe.db.auto_commit_on_many_writes = not frappe.in_test
frappe.sendmail(
subject=self.subject,
@ -421,7 +421,7 @@ def send_scheduled_email():
frappe.db.set_value("Newsletter", newsletter_name, "email_sent", 0)
newsletter.log_error("Failed to send newsletter")
if not frappe.flags.in_test:
if not frappe.in_test:
frappe.db.commit()
frappe.flags.is_scheduler_running = False

View file

@ -322,7 +322,7 @@ def get_context(context):
"frappe.email.doctype.notification.notification.evaluate_alert",
doc=doc,
alert=self,
now=frappe.flags.in_test,
now=frappe.in_test,
enqueue_after_commit=enqueue_after_commit,
)

View file

@ -95,7 +95,7 @@ def get_unsubcribed_url(reference_doctype, reference_name, email, unsubscribe_me
@frappe.whitelist(allow_guest=True)
def unsubscribe(doctype, name, email):
# unsubsribe from comments and communications
if not frappe.flags.in_test and not verify_request():
if not frappe.in_test and not verify_request():
return
try:

View file

@ -219,6 +219,9 @@ class EmailServer:
uidnext = int(self.parse_imap_response("UIDNEXT", message[0]) or "1")
frappe.db.set_value("Email Account", self.settings.email_account, "uidnext", uidnext)
if uid_validity is None:
frappe.flags.initial_sync = True
if not uid_validity or uid_validity != current_uid_validity:
# uidvalidity changed & all email uids are reindexed by server
frappe.db.set_value(
@ -277,8 +280,9 @@ class EmailServer:
except imaplib.IMAP4.abort:
if self.retry_count < self.retry_limit:
self.connect()
self.get_messages(folder)
self.retry_count += 1
self.get_messages(folder)
except Exception as e:
if self.has_login_limit_exceeded(e):
raise LoginLimitExceeded(e) from e
@ -632,7 +636,7 @@ class InboundMail(Email):
def process(self):
"""Create communication record from email."""
if self.is_sender_same_as_receiver() and not self.is_reply():
if frappe.flags.in_test:
if frappe.in_test:
print("WARN: Cannot pull email. Sender same as recipient inbox")
raise SentEmailInInboxError

View file

@ -109,7 +109,7 @@ class SMTPServer:
frappe.request.after_response.add(self.quit)
elif frappe.job:
frappe.job.after_job.add(self.quit)
elif not frappe.flags.in_test:
elif not frappe.in_test:
# Console?
import atexit

View file

@ -248,31 +248,21 @@ scheduler_events = {
],
"daily_long": [],
"daily_maintenance": [
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backups_daily",
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_daily",
"frappe.integrations.doctype.google_drive.google_drive.daily_backup",
"frappe.email.doctype.auto_email_report.auto_email_report.send_daily",
"frappe.desk.notifications.clear_notifications",
"frappe.sessions.clear_expired_sessions",
"frappe.website.doctype.personal_data_deletion_request.personal_data_deletion_request.remove_unverified_record",
"frappe.integrations.doctype.google_contacts.google_contacts.sync",
"frappe.automation.doctype.auto_repeat.auto_repeat.make_auto_repeat_entry",
"frappe.core.doctype.log_settings.log_settings.run_log_clean_up",
],
"weekly_long": [
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backups_weekly",
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_weekly",
"frappe.desk.form.document_follow.send_weekly_updates",
"frappe.utils.change_log.check_for_update",
"frappe.integrations.doctype.google_drive.google_drive.weekly_backup",
"frappe.desk.doctype.changelog_feed.changelog_feed.fetch_changelog_feed",
],
"monthly": [
"frappe.email.doctype.auto_email_report.auto_email_report.send_monthly",
],
"monthly_long": [
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_monthly"
],
}
sounds = [

View file

@ -1,47 +0,0 @@
// Copyright (c) 2016, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on("Dropbox Settings", {
refresh: function (frm) {
frm.toggle_display(
["app_access_key", "app_secret_key"],
!frm.doc.__onload?.dropbox_setup_via_site_config
);
frm.events.take_backup(frm);
},
are_keys_present: function (frm) {
return (
(frm.doc.app_access_key && frm.doc.app_secret_key) ||
frm.doc.__onload?.dropbox_setup_via_site_config
);
},
allow_dropbox_access: function (frm) {
if (!frm.events.are_keys_present(frm)) {
frappe.msgprint(__("App Access Key and/or Secret Key are not present."));
return;
}
frappe.call({
method: "frappe.integrations.doctype.dropbox_settings.dropbox_settings.get_dropbox_authorize_url",
freeze: true,
callback: function (r) {
if (!r.exc) {
window.open(r.message.auth_url);
}
},
});
},
take_backup: function (frm) {
if (frm.doc.enabled && (frm.doc.dropbox_refresh_token || frm.doc.dropbox_access_token)) {
frm.add_custom_button(__("Take Backup Now"), function () {
frappe.call({
method: "frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backup",
freeze: true,
});
});
}
},
});

View file

@ -1,126 +0,0 @@
{
"actions": [],
"creation": "2016-09-21 10:12:57.399174",
"doctype": "DocType",
"document_type": "System",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"enabled",
"send_notifications_to",
"send_email_for_successful_backup",
"backup_frequency",
"limit_no_of_backups",
"no_of_backups",
"file_backup",
"app_access_key",
"app_secret_key",
"allow_dropbox_access",
"dropbox_refresh_token",
"dropbox_access_token"
],
"fields": [
{
"default": "0",
"fieldname": "enabled",
"fieldtype": "Check",
"label": "Enabled"
},
{
"fieldname": "send_notifications_to",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Send Notifications To",
"reqd": 1
},
{
"default": "1",
"description": "Note: By default emails for failed backups are sent.",
"fieldname": "send_email_for_successful_backup",
"fieldtype": "Check",
"label": "Send Email for Successful Backup"
},
{
"fieldname": "backup_frequency",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Backup Frequency",
"options": "\nDaily\nWeekly",
"reqd": 1
},
{
"default": "0",
"fieldname": "limit_no_of_backups",
"fieldtype": "Check",
"label": "Limit Number of DB Backups"
},
{
"default": "5",
"depends_on": "eval:doc.limit_no_of_backups",
"fieldname": "no_of_backups",
"fieldtype": "Int",
"label": "Number of DB Backups"
},
{
"default": "1",
"fieldname": "file_backup",
"fieldtype": "Check",
"label": "File Backup"
},
{
"fieldname": "app_access_key",
"fieldtype": "Data",
"label": "App Access Key"
},
{
"fieldname": "app_secret_key",
"fieldtype": "Password",
"label": "App Secret Key"
},
{
"fieldname": "allow_dropbox_access",
"fieldtype": "Button",
"label": "Allow Dropbox Access"
},
{
"fieldname": "dropbox_refresh_token",
"fieldtype": "Password",
"hidden": 1,
"label": "Dropbox Refresh Token",
"no_copy": 1,
"read_only": 1
},
{
"fieldname": "dropbox_access_token",
"fieldtype": "Password",
"hidden": 1,
"label": "Dropbox Access Token"
}
],
"in_create": 1,
"issingle": 1,
"links": [],
"modified": "2024-03-23 16:03:23.176690",
"modified_by": "Administrator",
"module": "Integrations",
"name": "Dropbox Settings",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"read_only": 1,
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View file

@ -1,378 +0,0 @@
# Copyright (c) 2015, Frappe Technologies and contributors
# License: MIT. See LICENSE
import os
from urllib.parse import parse_qs, urlparse
import dropbox
from rq.timeouts import JobTimeoutException
import frappe
from frappe import _
from frappe.integrations.offsite_backup_utils import (
get_chunk_site,
get_latest_backup_file,
send_email,
validate_file_size,
)
from frappe.model.document import Document
from frappe.utils import cint, encode, get_backups_path, get_files_path, get_request_site_address
from frappe.utils.background_jobs import enqueue
from frappe.utils.backups import new_backup
ignore_list = [".DS_Store"]
class DropboxSettings(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
app_access_key: DF.Data | None
app_secret_key: DF.Password | None
backup_frequency: DF.Literal["", "Daily", "Weekly"]
dropbox_access_token: DF.Password | None
dropbox_refresh_token: DF.Password | None
enabled: DF.Check
file_backup: DF.Check
limit_no_of_backups: DF.Check
no_of_backups: DF.Int
send_email_for_successful_backup: DF.Check
send_notifications_to: DF.Data
# end: auto-generated types
def onload(self):
if not self.app_access_key and frappe.conf.dropbox_access_key:
self.set_onload("dropbox_setup_via_site_config", 1)
def validate(self):
if self.enabled and self.limit_no_of_backups and self.no_of_backups < 1:
frappe.throw(_("Number of DB backups cannot be less than 1"))
@frappe.whitelist()
def take_backup():
"""Enqueue longjob for taking backup to dropbox"""
enqueue(
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backup_to_dropbox",
queue="long",
timeout=1500,
)
frappe.msgprint(_("Queued for backup. It may take a few minutes to an hour."))
def take_backups_daily():
take_backups_if("Daily")
def take_backups_weekly():
take_backups_if("Weekly")
def take_backups_if(freq):
if frappe.db.get_single_value("Dropbox Settings", "backup_frequency") == freq:
take_backup_to_dropbox()
def take_backup_to_dropbox(retry_count=0, upload_db_backup=True):
did_not_upload, error_log = [], []
try:
if cint(frappe.db.get_single_value("Dropbox Settings", "enabled")):
validate_file_size()
did_not_upload, error_log = backup_to_dropbox(upload_db_backup)
if did_not_upload:
raise Exception
if cint(frappe.db.get_single_value("Dropbox Settings", "send_email_for_successful_backup")):
send_email(True, "Dropbox", "Dropbox Settings", "send_notifications_to")
except JobTimeoutException:
if retry_count < 2:
args = {
"retry_count": retry_count + 1,
"upload_db_backup": False, # considering till worker timeout db backup is uploaded
}
enqueue(
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backup_to_dropbox",
queue="long",
timeout=1500,
**args,
)
except Exception:
if isinstance(error_log, str):
error_message = error_log + "\n" + frappe.get_traceback()
else:
file_and_error = [" - ".join(f) for f in zip(did_not_upload, error_log, strict=False)]
error_message = "\n".join(file_and_error) + "\n" + frappe.get_traceback()
send_email(False, "Dropbox", "Dropbox Settings", "send_notifications_to", error_message)
def backup_to_dropbox(upload_db_backup=True):
# upload database
dropbox_settings = get_dropbox_settings()
dropbox_client = get_dropbox_client(dropbox_settings)
if upload_db_backup:
if frappe.flags.create_new_backup:
backup = new_backup(ignore_files=True)
filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db))
site_config = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_conf))
else:
filename, site_config = get_latest_backup_file()
upload_file_to_dropbox(filename, "/database", dropbox_client)
upload_file_to_dropbox(site_config, "/database", dropbox_client)
# delete older databases
if dropbox_settings["no_of_backups"]:
delete_older_backups(dropbox_client, "/database", dropbox_settings["no_of_backups"])
# upload files to files folder
did_not_upload = []
error_log = []
if dropbox_settings["file_backup"]:
upload_from_folder(get_files_path(), 0, "/files", dropbox_client, did_not_upload, error_log)
upload_from_folder(
get_files_path(is_private=1), 1, "/private/files", dropbox_client, did_not_upload, error_log
)
return did_not_upload, list(set(error_log))
def upload_from_folder(path, is_private, dropbox_folder, dropbox_client, did_not_upload, error_log):
if not os.path.exists(path):
return
if is_fresh_upload():
response = get_uploaded_files_meta(dropbox_folder, dropbox_client)
else:
response = frappe._dict({"entries": []})
path = str(path)
for f in frappe.get_all(
"File",
filters={"is_folder": 0, "is_private": is_private, "uploaded_to_dropbox": 0},
fields=["file_url", "name", "file_name"],
):
if not f.file_url:
continue
filename = f.file_url.rsplit("/", 1)[-1]
filepath = os.path.join(path, filename)
if filename in ignore_list:
continue
found = False
for file_metadata in response.entries:
try:
if os.path.basename(filepath) == file_metadata.name and os.stat(
encode(filepath)
).st_size == int(file_metadata.size):
found = True
update_file_dropbox_status(f.name)
break
except Exception:
error_log.append(frappe.get_traceback())
if not found:
try:
upload_file_to_dropbox(filepath, dropbox_folder, dropbox_client)
update_file_dropbox_status(f.name)
except Exception:
did_not_upload.append(filepath)
error_log.append(frappe.get_traceback())
def upload_file_to_dropbox(filename, folder, dropbox_client):
"""upload files with chunk of 15 mb to reduce session append calls"""
if not os.path.exists(filename):
return
create_folder_if_not_exists(folder, dropbox_client)
file_size = os.path.getsize(encode(filename))
chunk_size = get_chunk_site(file_size)
mode = dropbox.files.WriteMode.overwrite
f = open(encode(filename), "rb")
path = f"{folder}/{os.path.basename(filename)}"
try:
if file_size <= chunk_size:
dropbox_client.files_upload(f.read(), path, mode)
else:
upload_session_start_result = dropbox_client.files_upload_session_start(f.read(chunk_size))
cursor = dropbox.files.UploadSessionCursor(
session_id=upload_session_start_result.session_id, offset=f.tell()
)
commit = dropbox.files.CommitInfo(path=path, mode=mode)
while f.tell() < file_size:
if (file_size - f.tell()) <= chunk_size:
dropbox_client.files_upload_session_finish(f.read(chunk_size), cursor, commit)
else:
dropbox_client.files_upload_session_append(
f.read(chunk_size), cursor.session_id, cursor.offset
)
cursor.offset = f.tell()
except dropbox.exceptions.ApiError as e:
if isinstance(e.error, dropbox.files.UploadError):
error = f"File Path: {path}\n"
error += frappe.get_traceback()
frappe.log_error(error)
else:
raise
def create_folder_if_not_exists(folder, dropbox_client):
try:
dropbox_client.files_get_metadata(folder)
except dropbox.exceptions.ApiError as e:
# folder not found
if isinstance(e.error, dropbox.files.GetMetadataError):
dropbox_client.files_create_folder(folder)
else:
raise
def update_file_dropbox_status(file_name):
frappe.db.set_value("File", file_name, "uploaded_to_dropbox", 1, update_modified=False)
def is_fresh_upload():
file_name = frappe.db.get_value("File", {"uploaded_to_dropbox": 1}, "name")
return not file_name
def get_uploaded_files_meta(dropbox_folder, dropbox_client):
try:
return dropbox_client.files_list_folder(dropbox_folder)
except dropbox.exceptions.ApiError as e:
# folder not found
if isinstance(e.error, dropbox.files.ListFolderError):
return frappe._dict({"entries": []})
raise
def get_dropbox_client(dropbox_settings):
dropbox_client = dropbox.Dropbox(
oauth2_access_token=dropbox_settings["access_token"],
oauth2_refresh_token=dropbox_settings["refresh_token"],
app_key=dropbox_settings["app_key"],
app_secret=dropbox_settings["app_secret"],
timeout=None,
)
# checking if the access token has expired
dropbox_client.files_list_folder("")
if dropbox_settings["access_token"] != dropbox_client._oauth2_access_token:
set_dropbox_token(dropbox_client._oauth2_access_token)
return dropbox_client
def get_dropbox_settings(redirect_uri=False):
# NOTE: access token is kept for legacy dropbox apps
settings = frappe.get_doc("Dropbox Settings")
app_details = {
"app_key": settings.app_access_key or frappe.conf.dropbox_access_key,
"app_secret": settings.get_password(fieldname="app_secret_key", raise_exception=False)
if settings.app_secret_key
else frappe.conf.dropbox_secret_key,
"refresh_token": settings.get_password("dropbox_refresh_token", raise_exception=False),
"access_token": settings.get_password("dropbox_access_token", raise_exception=False),
"file_backup": settings.file_backup,
"no_of_backups": settings.no_of_backups if settings.limit_no_of_backups else None,
}
if redirect_uri:
app_details.update(
{
"redirect_uri": get_request_site_address(True)
+ "/api/method/frappe.integrations.doctype.dropbox_settings.dropbox_settings.dropbox_auth_finish"
}
)
if not (app_details["app_key"] and app_details["app_secret"]):
raise Exception(_("Please set Dropbox access keys in site config or doctype"))
return app_details
def delete_older_backups(dropbox_client, folder_path, to_keep):
res = dropbox_client.files_list_folder(path=folder_path)
files = [f for f in res.entries if isinstance(f, dropbox.files.FileMetadata) and "sql" in f.name]
if len(files) <= to_keep:
return
files.sort(key=lambda item: item.client_modified, reverse=True)
for f in files[to_keep:]:
dropbox_client.files_delete(os.path.join(folder_path, f.name))
@frappe.whitelist()
def get_dropbox_authorize_url():
app_details = get_dropbox_settings(redirect_uri=True)
dropbox_oauth_flow = dropbox.DropboxOAuth2Flow(
consumer_key=app_details["app_key"],
redirect_uri=app_details["redirect_uri"],
session={},
csrf_token_session_key="dropbox-auth-csrf-token",
consumer_secret=app_details["app_secret"],
token_access_type="offline",
)
auth_url = dropbox_oauth_flow.start()
return {"auth_url": auth_url, "args": parse_qs(urlparse(auth_url).query)}
@frappe.whitelist()
def dropbox_auth_finish():
app_details = get_dropbox_settings(redirect_uri=True)
callback = frappe.form_dict
close = '<p class="text-muted">' + _("Please close this window") + "</p>"
if not callback.state or not callback.code:
frappe.respond_as_web_page(
_("Dropbox Setup"),
_("Illegal Access Token. Please try again") + close,
indicator_color="red",
http_status_code=frappe.AuthenticationError.http_status_code,
)
return
dropbox_oauth_flow = dropbox.DropboxOAuth2Flow(
consumer_key=app_details["app_key"],
redirect_uri=app_details["redirect_uri"],
session={"dropbox-auth-csrf-token": callback.state},
csrf_token_session_key="dropbox-auth-csrf-token",
consumer_secret=app_details["app_secret"],
)
token = dropbox_oauth_flow.finish({"state": callback.state, "code": callback.code})
set_dropbox_token(token.access_token, token.refresh_token)
frappe.local.response["type"] = "redirect"
frappe.local.response["location"] = "/app/dropbox-settings"
def set_dropbox_token(access_token, refresh_token=None):
# NOTE: used doc object instead of db.set_value so that password field is set properly
dropbox_settings = frappe.get_single("Dropbox Settings")
dropbox_settings.dropbox_access_token = access_token
if refresh_token:
dropbox_settings.dropbox_refresh_token = refresh_token
dropbox_settings.save()
frappe.db.commit()

View file

@ -1,8 +0,0 @@
# Copyright (c) 2019, Frappe Technologies and Contributors
# License: MIT. See LICENSE
# import frappe
from frappe.tests import IntegrationTestCase
class TestDropboxSettings(IntegrationTestCase):
pass

View file

@ -1,71 +0,0 @@
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on("Google Drive", {
refresh: function (frm) {
if (!frm.doc.enable) {
frm.dashboard.set_headline(
__("To use Google Drive, enable {0}.", [
`<a href='/app/google-settings'>${__("Google Settings")}</a>`,
])
);
}
frappe.realtime.on("upload_to_google_drive", (data) => {
if (data.progress) {
const progress_title = __("Uploading to Google Drive");
frm.dashboard.show_progress(
progress_title,
(data.progress / data.total) * 100,
data.message
);
if (data.progress === data.total) {
frm.dashboard.hide_progress(progress_title);
}
}
});
if (frm.doc.enable && frm.doc.refresh_token) {
let sync_button = frm.add_custom_button(__("Take Backup"), function () {
frappe.show_alert({
indicator: "green",
message: __("Backing up to Google Drive."),
});
frappe
.call({
method: "frappe.integrations.doctype.google_drive.google_drive.take_backup",
btn: sync_button,
})
.then((r) => {
frappe.msgprint(r.message);
});
});
}
if (frm.doc.enable && frm.doc.backup_folder_name && !frm.doc.refresh_token) {
frm.dashboard.set_headline(
__(
"Click on <b>Authorize Google Drive Access</b> to authorize Google Drive Access."
)
);
}
if (frm.doc.enable && frm.doc.refresh_token && frm.doc.authorization_code) {
frm.page.set_indicator("Authorized", "green");
}
},
authorize_google_drive_access: function (frm) {
frappe.call({
method: "frappe.integrations.doctype.google_drive.google_drive.authorize_access",
args: {
reauthorize: frm.doc.authorization_code ? 1 : 0,
},
callback: function (r) {
if (!r.exc) {
frm.save();
window.open(r.message.url);
}
},
});
},
});

View file

@ -1,126 +0,0 @@
{
"actions": [],
"creation": "2019-08-13 17:24:05.470876",
"doctype": "DocType",
"engine": "InnoDB",
"field_order": [
"enable",
"google_drive_section",
"backup_folder_name",
"frequency",
"email",
"send_email_for_successful_backup",
"file_backup",
"authorize_google_drive_access",
"column_break_5",
"backup_folder_id",
"last_backup_on",
"refresh_token",
"authorization_code"
],
"fields": [
{
"default": "0",
"fieldname": "enable",
"fieldtype": "Check",
"label": "Enable"
},
{
"fieldname": "backup_folder_name",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Backup Folder Name",
"reqd": 1
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "authorize_google_drive_access",
"fieldtype": "Button",
"label": "Authorize Google Drive Access"
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"fieldname": "backup_folder_id",
"fieldtype": "Data",
"label": "Backup Folder ID",
"read_only": 1
},
{
"fieldname": "frequency",
"fieldtype": "Select",
"label": "Frequency",
"options": "\nDaily\nWeekly",
"reqd": 1
},
{
"fieldname": "refresh_token",
"fieldtype": "Data",
"hidden": 1,
"label": "Refresh Token"
},
{
"fieldname": "authorization_code",
"fieldtype": "Data",
"hidden": 1,
"label": "Authorization Code"
},
{
"fieldname": "last_backup_on",
"fieldtype": "Datetime",
"label": "Last Backup On",
"read_only": 1
},
{
"default": "0",
"description": "Note: By default emails for failed backups are sent.",
"fieldname": "send_email_for_successful_backup",
"fieldtype": "Check",
"label": "Send Email for Successful backup"
},
{
"default": "0",
"fieldname": "file_backup",
"fieldtype": "Check",
"label": "File Backup"
},
{
"depends_on": "enable",
"fieldname": "google_drive_section",
"fieldtype": "Section Break",
"label": "Google Drive"
},
{
"fieldname": "email",
"fieldtype": "Data",
"label": "Send Notification To",
"options": "Email",
"reqd": 1
}
],
"issingle": 1,
"links": [],
"modified": "2024-03-23 16:03:26.999110",
"modified_by": "Administrator",
"module": "Integrations",
"name": "Google Drive",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View file

@ -1,229 +0,0 @@
# Copyright (c) 2019, Frappe Technologies and contributors
# License: MIT. See LICENSE
import os
from urllib.parse import quote
from apiclient.http import MediaFileUpload
from googleapiclient.errors import HttpError
import frappe
from frappe import _
from frappe.integrations.google_oauth import GoogleOAuth
from frappe.integrations.offsite_backup_utils import (
get_latest_backup_file,
send_email,
validate_file_size,
)
from frappe.model.document import Document
from frappe.utils import get_backups_path, get_bench_path
from frappe.utils.background_jobs import enqueue
from frappe.utils.backups import new_backup
class GoogleDrive(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
authorization_code: DF.Data | None
backup_folder_id: DF.Data | None
backup_folder_name: DF.Data
email: DF.Data
enable: DF.Check
file_backup: DF.Check
frequency: DF.Literal["", "Daily", "Weekly"]
last_backup_on: DF.Datetime | None
refresh_token: DF.Data | None
send_email_for_successful_backup: DF.Check
# end: auto-generated types
def validate(self):
doc_before_save = self.get_doc_before_save()
if doc_before_save and doc_before_save.backup_folder_name != self.backup_folder_name:
self.backup_folder_id = ""
def get_access_token(self):
if not self.refresh_token:
button_label = frappe.bold(_("Allow Google Drive Access"))
raise frappe.ValidationError(_("Click on {0} to generate Refresh Token.").format(button_label))
oauth_obj = GoogleOAuth("drive")
r = oauth_obj.refresh_access_token(
self.get_password(fieldname="refresh_token", raise_exception=False)
)
return r.get("access_token")
@frappe.whitelist(methods=["POST"])
def authorize_access(reauthorize=False, code=None):
"""
If no Authorization code get it from Google and then request for Refresh Token.
Google Contact Name is set to flags to set_value after Authorization Code is obtained.
"""
oauth_code = frappe.db.get_single_value("Google Drive", "authorization_code") if not code else code
oauth_obj = GoogleOAuth("drive")
if not oauth_code or reauthorize:
if reauthorize:
frappe.db.set_single_value("Google Drive", "backup_folder_id", "")
return oauth_obj.get_authentication_url(
{
"redirect": f"/app/Form/{quote('Google Drive')}",
},
)
r = oauth_obj.authorize(oauth_code)
frappe.db.set_single_value(
"Google Drive",
{"authorization_code": oauth_code, "refresh_token": r.get("refresh_token")},
)
def get_google_drive_object():
"""Return an object of Google Drive."""
account = frappe.get_doc("Google Drive")
oauth_obj = GoogleOAuth("drive")
google_drive = oauth_obj.get_google_service_object(
account.get_access_token(),
account.get_password(fieldname="indexing_refresh_token", raise_exception=False),
)
return google_drive, account
def check_for_folder_in_google_drive():
"""Checks if folder exists in Google Drive else create it."""
def _create_folder_in_google_drive(google_drive, account):
file_metadata = {
"name": account.backup_folder_name,
"mimeType": "application/vnd.google-apps.folder",
}
try:
folder = google_drive.files().create(body=file_metadata, fields="id").execute()
frappe.db.set_single_value("Google Drive", "backup_folder_id", folder.get("id"))
frappe.db.commit()
except HttpError as e:
frappe.throw(
_("Google Drive - Could not create folder in Google Drive - Error Code {0}").format(e)
)
google_drive, account = get_google_drive_object()
if account.backup_folder_id:
return
backup_folder_exists = False
try:
google_drive_folders = (
google_drive.files().list(q="mimeType='application/vnd.google-apps.folder'").execute()
)
except HttpError as e:
frappe.throw(_("Google Drive - Could not find folder in Google Drive - Error Code {0}").format(e))
for f in google_drive_folders.get("files"):
if f.get("name") == account.backup_folder_name:
frappe.db.set_single_value("Google Drive", "backup_folder_id", f.get("id"))
frappe.db.commit()
backup_folder_exists = True
break
if not backup_folder_exists:
_create_folder_in_google_drive(google_drive, account)
@frappe.whitelist()
def take_backup():
"""Enqueue longjob for taking backup to Google Drive"""
enqueue(
"frappe.integrations.doctype.google_drive.google_drive.upload_system_backup_to_google_drive",
queue="long",
timeout=1500,
)
frappe.msgprint(_("Queued for backup. It may take a few minutes to an hour."))
def upload_system_backup_to_google_drive():
"""
Upload system backup to Google Drive
"""
# Get Google Drive Object
google_drive, account = get_google_drive_object()
# Check if folder exists in Google Drive
check_for_folder_in_google_drive()
account.load_from_db()
validate_file_size()
if frappe.flags.create_new_backup:
set_progress(1, _("Backing up Data."))
backup = new_backup()
file_urls = []
file_urls.append(backup.backup_path_db)
file_urls.append(backup.backup_path_conf)
if account.file_backup:
file_urls.append(backup.backup_path_files)
file_urls.append(backup.backup_path_private_files)
else:
file_urls = get_latest_backup_file(with_files=account.file_backup)
for fileurl in file_urls:
if not fileurl:
continue
file_metadata = {"name": os.path.basename(fileurl), "parents": [account.backup_folder_id]}
try:
media = MediaFileUpload(
get_absolute_path(filename=fileurl), mimetype="application/gzip", resumable=True
)
except OSError as e:
frappe.throw(_("Google Drive - Could not locate - {0}").format(e))
try:
set_progress(2, _("Uploading backup to Google Drive."))
google_drive.files().create(body=file_metadata, media_body=media, fields="id").execute()
except HttpError as e:
send_email(False, "Google Drive", "Google Drive", "email", error_status=e)
set_progress(3, _("Uploading successful."))
frappe.db.set_single_value("Google Drive", "last_backup_on", frappe.utils.now_datetime())
send_email(True, "Google Drive", "Google Drive", "email")
return _("Google Drive Backup Successful.")
def daily_backup():
drive_settings = frappe.db.get_singles_dict("Google Drive", cast=True)
if drive_settings.enable and drive_settings.frequency == "Daily":
upload_system_backup_to_google_drive()
def weekly_backup():
drive_settings = frappe.db.get_singles_dict("Google Drive", cast=True)
if drive_settings.enable and drive_settings.frequency == "Weekly":
upload_system_backup_to_google_drive()
def get_absolute_path(filename):
file_path = os.path.join(get_backups_path()[2:], os.path.basename(filename))
return f"{get_bench_path()}/sites/{file_path}"
def set_progress(progress, message):
frappe.publish_realtime(
"upload_to_google_drive",
dict(progress=progress, total=3, message=message),
user=frappe.session.user,
)

View file

@ -1,8 +0,0 @@
# Copyright (c) 2019, Frappe Technologies and Contributors
# License: MIT. See LICENSE
# import frappe
from frappe.tests import IntegrationTestCase
class TestGoogleDrive(IntegrationTestCase):
pass

View file

@ -1,26 +0,0 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on("S3 Backup Settings", {
refresh: function (frm) {
frm.clear_custom_buttons();
frm.events.take_backup(frm);
},
take_backup: function (frm) {
if (frm.doc.access_key_id && frm.doc.secret_access_key) {
frm.add_custom_button(__("Take Backup Now"), function () {
frm.dashboard.set_headline_alert("S3 Backup Started!");
frappe.call({
method: "frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_s3",
callback: function (r) {
if (!r.exc) {
frappe.msgprint(__("S3 Backup complete!"));
frm.dashboard.clear_headline();
}
},
});
}).addClass("btn-primary");
}
},
});

View file

@ -1,163 +0,0 @@
{
"actions": [],
"creation": "2017-09-04 20:57:20.129205",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"enabled",
"api_access_section",
"access_key_id",
"column_break_4",
"secret_access_key",
"notification_section",
"notify_email",
"column_break_8",
"send_email_for_successful_backup",
"s3_bucket_details_section",
"bucket",
"endpoint_url",
"column_break_13",
"backup_path",
"backup_details_section",
"frequency",
"backup_files"
],
"fields": [
{
"default": "0",
"fieldname": "enabled",
"fieldtype": "Check",
"label": "Enable Automatic Backup"
},
{
"fieldname": "notify_email",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Send Notifications To",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"default": "1",
"description": "By default, emails are only sent for failed backups.",
"fieldname": "send_email_for_successful_backup",
"fieldtype": "Check",
"label": "Send Email for Successful Backup"
},
{
"fieldname": "frequency",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Backup Frequency",
"mandatory_depends_on": "enabled",
"options": "Daily\nWeekly\nMonthly\nNone",
"reqd": 1
},
{
"fieldname": "access_key_id",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Access Key ID",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"fieldname": "secret_access_key",
"fieldtype": "Password",
"in_list_view": 1,
"label": "Access Key Secret",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"default": "https://s3.amazonaws.com",
"description": "Only change this if you want to use other S3 compatible object storage backends.",
"fieldname": "endpoint_url",
"fieldtype": "Data",
"label": "Endpoint URL"
},
{
"fieldname": "bucket",
"fieldtype": "Data",
"label": "Bucket Name",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"depends_on": "enabled",
"fieldname": "api_access_section",
"fieldtype": "Section Break",
"label": "API Access"
},
{
"fieldname": "column_break_4",
"fieldtype": "Column Break"
},
{
"depends_on": "enabled",
"fieldname": "notification_section",
"fieldtype": "Section Break",
"label": "Notification"
},
{
"fieldname": "column_break_8",
"fieldtype": "Column Break"
},
{
"depends_on": "enabled",
"fieldname": "s3_bucket_details_section",
"fieldtype": "Section Break",
"label": "S3 Bucket Details"
},
{
"fieldname": "column_break_13",
"fieldtype": "Column Break"
},
{
"depends_on": "enabled",
"fieldname": "backup_details_section",
"fieldtype": "Section Break",
"label": "Backup Details"
},
{
"default": "1",
"description": "Backup public and private files along with the database.",
"fieldname": "backup_files",
"fieldtype": "Check",
"label": "Backup Files"
},
{
"description": "If it's empty, it will backup to the root of the bucket.",
"fieldname": "backup_path",
"fieldtype": "Data",
"label": "Backup Path"
}
],
"hide_toolbar": 1,
"issingle": 1,
"links": [],
"modified": "2025-03-15 12:17:49.167012",
"modified_by": "Administrator",
"module": "Integrations",
"name": "S3 Backup Settings",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"quick_entry": 1,
"row_format": "Dynamic",
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View file

@ -1,196 +0,0 @@
# Copyright (c) 2017, Frappe Technologies and contributors
# License: MIT. See LICENSE
import os
import os.path
import boto3
from botocore.exceptions import ClientError
from rq.timeouts import JobTimeoutException
import frappe
from frappe import _
from frappe.integrations.offsite_backup_utils import (
generate_files_backup,
get_latest_backup_file,
send_email,
validate_file_size,
)
from frappe.model.document import Document
from frappe.utils import cint
from frappe.utils.background_jobs import enqueue
class S3BackupSettings(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
access_key_id: DF.Data
backup_files: DF.Check
backup_path: DF.Data | None
bucket: DF.Data
enabled: DF.Check
endpoint_url: DF.Data | None
frequency: DF.Literal["Daily", "Weekly", "Monthly", "None"]
notify_email: DF.Data
secret_access_key: DF.Password
send_email_for_successful_backup: DF.Check
# end: auto-generated types
def validate(self):
if not self.enabled:
return
if not self.endpoint_url:
self.endpoint_url = "https://s3.amazonaws.com"
if self.backup_path and self.backup_path[-1] != "/":
self.backup_path += "/"
conn = boto3.client(
"s3",
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.get_password("secret_access_key"),
endpoint_url=self.endpoint_url,
)
try:
# Head_bucket returns a 200 OK if the bucket exists and have access to it.
# Requires ListBucket permission
conn.head_bucket(Bucket=self.bucket)
except ClientError as e:
error_code = e.response["Error"]["Code"]
bucket_name = frappe.bold(self.bucket)
if error_code == "403":
msg = _("Do not have permission to access bucket {0}.").format(bucket_name)
elif error_code == "404":
msg = _("Bucket {0} not found.").format(bucket_name)
else:
msg = e.args[0]
frappe.throw(msg)
@frappe.whitelist()
def take_backup():
"""Enqueue longjob for taking backup to s3"""
enqueue(
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_s3",
queue="long",
timeout=1500,
)
frappe.msgprint(_("Queued for backup. It may take a few minutes to an hour."))
def take_backups_daily():
take_backups_if("Daily")
def take_backups_weekly():
take_backups_if("Weekly")
def take_backups_monthly():
take_backups_if("Monthly")
def take_backups_if(freq):
if cint(frappe.db.get_single_value("S3 Backup Settings", "enabled")):
if frappe.db.get_single_value("S3 Backup Settings", "frequency") == freq:
take_backups_s3()
@frappe.whitelist()
def take_backups_s3(retry_count=0):
try:
validate_file_size()
backup_to_s3()
send_email(True, "Amazon S3", "S3 Backup Settings", "notify_email")
except JobTimeoutException:
if retry_count < 2:
args = {"retry_count": retry_count + 1}
enqueue(
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_s3",
queue="long",
timeout=1500,
**args,
)
else:
notify()
except Exception:
notify()
def notify():
error_message = frappe.get_traceback()
send_email(False, "Amazon S3", "S3 Backup Settings", "notify_email", error_message)
def backup_to_s3():
from frappe.utils import get_backups_path
from frappe.utils.backups import new_backup
doc = frappe.get_single("S3 Backup Settings")
bucket = doc.bucket
path = doc.backup_path or ""
backup_files = cint(doc.backup_files)
conn = boto3.client(
"s3",
aws_access_key_id=doc.access_key_id,
aws_secret_access_key=doc.get_password("secret_access_key"),
endpoint_url=doc.endpoint_url or "https://s3.amazonaws.com",
)
if frappe.flags.create_new_backup:
backup = new_backup(
ignore_files=False,
backup_path_db=None,
backup_path_files=None,
backup_path_private_files=None,
force=True,
)
db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db))
site_config = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_conf))
if backup_files:
files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files))
private_files = os.path.join(
get_backups_path(), os.path.basename(backup.backup_path_private_files)
)
else:
if backup_files:
db_filename, site_config, files_filename, private_files = get_latest_backup_file(
with_files=backup_files
)
if not files_filename or not private_files:
generate_files_backup()
db_filename, site_config, files_filename, private_files = get_latest_backup_file(
with_files=backup_files
)
else:
db_filename, site_config = get_latest_backup_file()
folder = path + os.path.basename(db_filename)[:15] + "/"
# for adding datetime to folder name
upload_file_to_s3(db_filename, folder, conn, bucket)
upload_file_to_s3(site_config, folder, conn, bucket)
if backup_files:
if private_files:
upload_file_to_s3(private_files, folder, conn, bucket)
if files_filename:
upload_file_to_s3(files_filename, folder, conn, bucket)
def upload_file_to_s3(filename, folder, conn, bucket):
destpath = os.path.join(folder, os.path.basename(filename))
print("Uploading file:", filename)
conn.upload_file(filename, bucket, destpath) # Requires PutObject permission

View file

@ -1,7 +0,0 @@
# Copyright (c) 2017, Frappe Technologies and Contributors
# License: MIT. See LICENSE
from frappe.tests import IntegrationTestCase
class TestS3BackupSettings(IntegrationTestCase):
pass

View file

@ -115,6 +115,6 @@ def flush_webhook_execution_queue():
"frappe.integrations.doctype.webhook.webhook.enqueue_webhook",
doc=instance.doc,
webhook=instance.webhook,
now=frappe.flags.in_test,
now=frappe.in_test,
queue=instance.webhook.background_jobs_queue or "default",
)

View file

@ -16,13 +16,11 @@ _SCOPES = {
}
_SERVICES = {
"contacts": ("people", "v1"),
"drive": ("drive", "v3"),
"indexing": ("indexing", "v3"),
}
_DOMAIN_CALLBACK_METHODS = {
"mail": "frappe.email.oauth.authorize_google_access",
"contacts": "frappe.integrations.doctype.google_contacts.google_contacts.authorize_access",
"drive": "frappe.integrations.doctype.google_drive.google_drive.authorize_access",
"indexing": "frappe.website.doctype.website_settings.google_indexing.authorize_access",
}
@ -34,7 +32,7 @@ class GoogleAuthenticationError(Exception):
class GoogleOAuth:
OAUTH_URL = "https://oauth2.googleapis.com/token"
def __init__(self, domain: str, validate: bool = True):
def __init__(self, domain: str, validate: bool = True, config=None):
self.google_settings = frappe.get_single("Google Settings")
self.domain = domain.lower()
self.scopes = (
@ -43,6 +41,10 @@ class GoogleOAuth:
else _SCOPES[self.domain]
)
if config:
_DOMAIN_CALLBACK_METHODS[self.domain] = config["domain_callback_url"]
_SERVICES[self.domain] = config["service_version"]
if validate:
self.validate_google_settings()

View file

@ -1,118 +0,0 @@
# Copyright (c) 2019, Frappe Technologies and contributors
# License: MIT. See LICENSE
import glob
import os
import frappe
from frappe.utils import cint, split_emails
def send_email(success, service_name, doctype, email_field, error_status=None):
recipients = get_recipients(doctype, email_field)
if not recipients:
frappe.log_error(
f"No Email Recipient found for {service_name}",
f"{service_name}: Failed to send backup status email",
)
return
if success:
if not frappe.db.get_single_value(doctype, "send_email_for_successful_backup"):
return
subject = "Backup Upload Successful"
message = """
<h3>Backup Uploaded Successfully!</h3>
<p>Hi there, this is just to inform you that your backup was successfully uploaded to your {} bucket. So relax!</p>""".format(
service_name
)
else:
subject = "[Warning] Backup Upload Failed"
message = f"""
<h3>Backup Upload Failed!</h3>
<p>Oops, your automated backup to {service_name} failed.</p>
<p>Error message: {error_status}</p>
<p>Please contact your system manager for more information.</p>"""
frappe.sendmail(recipients=recipients, subject=subject, message=message)
def get_recipients(doctype, email_field):
return split_emails(frappe.db.get_value(doctype, None, email_field))
def get_latest_backup_file(with_files=False):
from frappe.utils.backups import BackupGenerator
odb = BackupGenerator(
frappe.conf.db_name,
frappe.conf.db_user,
frappe.conf.db_password,
db_socket=frappe.conf.db_socket,
db_host=frappe.conf.db_host,
db_port=frappe.conf.db_port,
db_type=frappe.conf.db_type,
)
database, public, private, config = odb.get_recent_backup(older_than=24 * 30)
if with_files:
return database, config, public, private
return database, config
def get_file_size(file_path, unit="MB"):
file_size = os.path.getsize(file_path)
memory_size_unit_mapper = {"KB": 1, "MB": 2, "GB": 3, "TB": 4}
i = 0
while i < memory_size_unit_mapper[unit]:
file_size = file_size / 1000.0
i += 1
return file_size
def get_chunk_site(file_size):
"""this function will return chunk size in megabytes based on file size"""
file_size_in_gb = cint(file_size / 1024 / 1024)
MB = 1024 * 1024
if file_size_in_gb > 5000:
return 200 * MB
elif file_size_in_gb >= 3000:
return 150 * MB
elif file_size_in_gb >= 1000:
return 100 * MB
elif file_size_in_gb >= 500:
return 50 * MB
else:
return 15 * MB
def validate_file_size():
frappe.flags.create_new_backup = True
latest_file, site_config = get_latest_backup_file()
file_size = get_file_size(latest_file, unit="GB") if latest_file else 0
if file_size > 1:
frappe.flags.create_new_backup = False
def generate_files_backup():
from frappe.utils.backups import BackupGenerator
backup = BackupGenerator(
frappe.conf.db_name,
frappe.conf.db_user,
frappe.conf.db_password,
db_socket=frappe.conf.db_socket,
db_host=frappe.conf.db_host,
db_port=frappe.conf.db_port,
db_type=frappe.conf.db_type,
)
backup.set_backup_file_name()
backup.zip_files()

View file

@ -12,47 +12,6 @@
"is_hidden": 0,
"label": "Integrations",
"links": [
{
"hidden": 0,
"is_query_report": 0,
"label": "Backup",
"link_count": 0,
"onboard": 0,
"type": "Card Break"
},
{
"dependencies": "",
"hidden": 0,
"is_query_report": 0,
"label": "Dropbox Settings",
"link_count": 0,
"link_to": "Dropbox Settings",
"link_type": "DocType",
"onboard": 0,
"type": "Link"
},
{
"dependencies": "",
"hidden": 0,
"is_query_report": 0,
"label": "S3 Backup Settings",
"link_count": 0,
"link_to": "S3 Backup Settings",
"link_type": "DocType",
"onboard": 0,
"type": "Link"
},
{
"dependencies": "",
"hidden": 0,
"is_query_report": 0,
"label": "Google Drive",
"link_count": 0,
"link_to": "Google Drive",
"link_type": "DocType",
"onboard": 0,
"type": "Link"
},
{
"hidden": 0,
"is_query_report": 0,

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

31406
frappe/locale/cs.po Normal file

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

31648
frappe/locale/sr.po Normal file

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -977,7 +977,7 @@ class BaseDocument:
self.set(df.fieldname, cstr(self.get(df.fieldname)).strip())
value = self.get(df.fieldname)
if value not in options and not (frappe.flags.in_test and value.startswith("_T-")):
if value not in options and not (frappe.in_test and value.startswith("_T-")):
# show an elaborate message
prefix = _("Row #{0}:").format(self.idx) if self.get("parentfield") else ""
label = _(self.meta.get_label(df.fieldname))

View file

@ -153,7 +153,7 @@ def delete_doc(
"frappe.model.delete_doc.delete_dynamic_links",
doctype=doc.doctype,
name=doc.name,
now=frappe.flags.in_test,
now=frappe.in_test,
enqueue_after_commit=True,
)

View file

@ -499,7 +499,7 @@ class Document(BaseDocument):
if ignore_permissions is not None:
self.flags.ignore_permissions = ignore_permissions
self.flags.ignore_version = frappe.flags.in_test if ignore_version is None else ignore_version
self.flags.ignore_version = frappe.in_test if ignore_version is None else ignore_version
if self.get("__islocal") or not self.get("name"):
return self.insert()
@ -1201,7 +1201,9 @@ class Document(BaseDocument):
self.docstatus = DocStatus.CANCELLED
return self.save()
def _rename(self, name: str, merge: bool = False, force: bool = False, validate_rename: bool = True):
def _rename(
self, name: str | int, merge: bool = False, force: bool = False, validate_rename: bool = True
):
"""Rename the document. Triggers frappe.rename_doc, then reloads."""
from frappe.model.rename_doc import rename_doc
@ -1238,7 +1240,7 @@ class Document(BaseDocument):
self.run_method("on_discard")
@frappe.whitelist()
def rename(self, name: str, merge=False, force=False, validate_rename=True):
def rename(self, name: str | int, merge=False, force=False, validate_rename=True):
"""Rename the document to `name`. This transforms the current object."""
return self._rename(name=name, merge=merge, force=force, validate_rename=validate_rename)

View file

@ -32,7 +32,7 @@ def get_dynamic_link_map(for_delete=False):
Note: Will not map single doctypes
"""
if getattr(frappe.local, "dynamic_link_map", None) is None or frappe.flags.in_test:
if getattr(frappe.local, "dynamic_link_map", None) is None or frappe.in_test:
# Build from scratch
dynamic_link_map = {}
for df in get_dynamic_links():

View file

@ -45,6 +45,9 @@ from frappe.utils import cached_property, cast, cint, cstr
from frappe.utils.caching import site_cache
from frappe.utils.data import add_to_date, get_datetime
ListOrTuple = list | tuple
SerializableTypes = str | int | float | datetime
DEFAULT_FIELD_LABELS = {
"name": _lt("ID"),
"creation": _lt("Created On"),
@ -176,31 +179,7 @@ class Meta(Document):
self.check_if_large_table()
def as_dict(self, no_nulls=False):
def serialize(doc):
if isinstance(doc, dict):
return doc.copy()
out = {}
for key, value in doc.__dict__.items():
if isinstance(value, list | tuple):
if not value or not isinstance(value[0], BaseDocument):
# non standard list object, skip
continue
value = [serialize(d) for d in value]
if (not no_nulls and value is None) or isinstance(
value, str | int | float | datetime | list | tuple
):
out[key] = value
# set empty lists for unset table fields
for fieldname in TABLE_DOCTYPES_FOR_DOCTYPE.keys():
if out.get(fieldname) is None:
out[fieldname] = []
return out
return serialize(self)
return _serialize(self, no_nulls=no_nulls)
def get_link_fields(self):
return self.get("fields", {"fieldtype": "Link", "options": ["!=", "[Select]"]})
@ -977,6 +956,41 @@ def _update_field_order_based_on_insert_after(field_order, insert_after_map):
field_order.extend(fields)
CACHE_PROPERTIES = frozenset(
(
"_fields",
"_table_fields",
"_table_doctypes",
*(prop for prop, value in vars(Meta).items() if isinstance(value, cached_property)),
)
)
def _serialize(doc, no_nulls=False, *, is_child=False):
out = {}
for key, value in doc.__dict__.items():
if not is_child:
if key in CACHE_PROPERTIES:
continue
if isinstance(value, ListOrTuple):
if value and isinstance(value[0], BaseDocument):
out[key] = [_serialize(d, no_nulls=no_nulls, is_child=True) for d in value]
continue
if (not no_nulls and value is None) or isinstance(value, SerializableTypes):
out[key] = value
if not is_child:
# set empty lists for unset table fields
for fieldname in TABLE_DOCTYPES_FOR_DOCTYPE:
if out.get(fieldname) is None:
out[fieldname] = []
return out
if typing.TYPE_CHECKING:
# This is DX hack to add all fields from DocType to meta for autocompletions.
# Meta is technically doctype + special fields on meta.

View file

@ -90,7 +90,7 @@ class TracedValue:
"""
if value in self.forbidden_values:
if frappe.flags.in_test:
if frappe.in_test:
frappe.throw(f"{self.field_name} cannot be set to {value}", AssertionError)
else:
frappe.throw(f"{self.field_name} cannot be set to {value}")
@ -99,7 +99,7 @@ class TracedValue:
try:
self.custom_validation(obj, value)
except Exception as e:
if frappe.flags.in_test:
if frappe.in_test:
frappe.throw(str(e), AssertionError)
else:
frappe.throw(str(e))

View file

@ -12,7 +12,7 @@ import click
import requests
import frappe
from frappe.tests.utils import make_test_records
from frappe.tests.utils import make_test_records, toggle_test_mode
from .testing.environment import _decorate_all_methods_and_functions_with_type_checker
from .testing.result import TestResult
@ -29,12 +29,13 @@ TEST_WEIGHT_OVERRIDES = {
class ParallelTestRunner:
def __init__(self, app, site, build_number=1, total_builds=1, dry_run=False):
def __init__(self, app, site, build_number=1, total_builds=1, dry_run=False, lightmode=False):
self.app = app
self.site = site
self.build_number = frappe.utils.cint(build_number) or 1
self.total_builds = frappe.utils.cint(total_builds)
self.dry_run = dry_run
self.lightmode = lightmode
self.test_file_list = []
self.total_test_weight = 0
self.test_result = None
@ -53,11 +54,12 @@ class ParallelTestRunner:
if self.dry_run:
return
frappe.flags.in_test = True
toggle_test_mode(True)
frappe.clear_cache()
frappe.utils.scheduler.disable_scheduler()
_decorate_all_methods_and_functions_with_type_checker()
self.before_test_setup()
if not self.lightmode:
_decorate_all_methods_and_functions_with_type_checker()
self.before_test_setup()
def before_test_setup(self):
start_time = time.monotonic()
@ -103,9 +105,12 @@ class ParallelTestRunner:
frappe.set_user("Administrator")
path, filename = file_info
module = self.get_module(path, filename)
from frappe.deprecation_dumpster import compat_preload_test_records_upfront
compat_preload_test_records_upfront([(module, path, filename)])
if not self.lightmode:
from frappe.deprecation_dumpster import compat_preload_test_records_upfront
compat_preload_test_records_upfront([(module, path, filename)])
test_suite = unittest.TestSuite()
module_test_cases = unittest.TestLoader().loadTestsFromModule(module)
test_suite.addTest(module_test_cases)

View file

@ -1,5 +1,5 @@
[pre_model_sync]
frappe.patches.v16_0.enable_setup_complete
frappe.patches.v16_0.enable_setup_complete #25-06-2025 re-run-patch
frappe.patches.v15_0.remove_implicit_primary_key
frappe.patches.v12_0.remove_deprecated_fields_from_doctype #3
execute:frappe.utils.global_search.setup_global_search_table()
@ -246,4 +246,4 @@ frappe.patches.v16_0.move_role_desk_settings_to_user
frappe.printing.doctype.print_format.patches.sets_wkhtmltopdf_as_default_for_pdf_generator_field
frappe.patches.v14_0.fix_user_settings_collation
execute:frappe.call("frappe.core.doctype.system_settings.system_settings.sync_system_settings")
frappe.patches.v16_0.social_eps_deprecation_warning
frappe.patches.v16_0.add_module_deprecation_warning

Some files were not shown because too many files have changed in this diff Show more