fix: seperate backup options into app

This commit is contained in:
sokumon 2025-04-15 23:27:59 +05:30
parent 7d8e70e1ad
commit 422995cd45
20 changed files with 1 additions and 1444 deletions

View file

@ -248,31 +248,21 @@ scheduler_events = {
],
"daily_long": [],
"daily_maintenance": [
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backups_daily",
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_daily",
"frappe.integrations.doctype.google_drive.google_drive.daily_backup",
"frappe.email.doctype.auto_email_report.auto_email_report.send_daily",
"frappe.desk.notifications.clear_notifications",
"frappe.sessions.clear_expired_sessions",
"frappe.website.doctype.personal_data_deletion_request.personal_data_deletion_request.remove_unverified_record",
"frappe.integrations.doctype.google_contacts.google_contacts.sync",
"frappe.automation.doctype.auto_repeat.auto_repeat.make_auto_repeat_entry",
"frappe.core.doctype.log_settings.log_settings.run_log_clean_up",
],
"weekly_long": [
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backups_weekly",
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_weekly",
"frappe.desk.form.document_follow.send_weekly_updates",
"frappe.utils.change_log.check_for_update",
"frappe.integrations.doctype.google_drive.google_drive.weekly_backup",
"frappe.desk.doctype.changelog_feed.changelog_feed.fetch_changelog_feed",
],
"monthly": [
"frappe.email.doctype.auto_email_report.auto_email_report.send_monthly",
],
"monthly_long": [
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_monthly"
],
}
sounds = [

View file

@ -1,47 +0,0 @@
// Copyright (c) 2016, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on("Dropbox Settings", {
refresh: function (frm) {
frm.toggle_display(
["app_access_key", "app_secret_key"],
!frm.doc.__onload?.dropbox_setup_via_site_config
);
frm.events.take_backup(frm);
},
are_keys_present: function (frm) {
return (
(frm.doc.app_access_key && frm.doc.app_secret_key) ||
frm.doc.__onload?.dropbox_setup_via_site_config
);
},
allow_dropbox_access: function (frm) {
if (!frm.events.are_keys_present(frm)) {
frappe.msgprint(__("App Access Key and/or Secret Key are not present."));
return;
}
frappe.call({
method: "frappe.integrations.doctype.dropbox_settings.dropbox_settings.get_dropbox_authorize_url",
freeze: true,
callback: function (r) {
if (!r.exc) {
window.open(r.message.auth_url);
}
},
});
},
take_backup: function (frm) {
if (frm.doc.enabled && (frm.doc.dropbox_refresh_token || frm.doc.dropbox_access_token)) {
frm.add_custom_button(__("Take Backup Now"), function () {
frappe.call({
method: "frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backup",
freeze: true,
});
});
}
},
});

View file

@ -1,126 +0,0 @@
{
"actions": [],
"creation": "2016-09-21 10:12:57.399174",
"doctype": "DocType",
"document_type": "System",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"enabled",
"send_notifications_to",
"send_email_for_successful_backup",
"backup_frequency",
"limit_no_of_backups",
"no_of_backups",
"file_backup",
"app_access_key",
"app_secret_key",
"allow_dropbox_access",
"dropbox_refresh_token",
"dropbox_access_token"
],
"fields": [
{
"default": "0",
"fieldname": "enabled",
"fieldtype": "Check",
"label": "Enabled"
},
{
"fieldname": "send_notifications_to",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Send Notifications To",
"reqd": 1
},
{
"default": "1",
"description": "Note: By default emails for failed backups are sent.",
"fieldname": "send_email_for_successful_backup",
"fieldtype": "Check",
"label": "Send Email for Successful Backup"
},
{
"fieldname": "backup_frequency",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Backup Frequency",
"options": "\nDaily\nWeekly",
"reqd": 1
},
{
"default": "0",
"fieldname": "limit_no_of_backups",
"fieldtype": "Check",
"label": "Limit Number of DB Backups"
},
{
"default": "5",
"depends_on": "eval:doc.limit_no_of_backups",
"fieldname": "no_of_backups",
"fieldtype": "Int",
"label": "Number of DB Backups"
},
{
"default": "1",
"fieldname": "file_backup",
"fieldtype": "Check",
"label": "File Backup"
},
{
"fieldname": "app_access_key",
"fieldtype": "Data",
"label": "App Access Key"
},
{
"fieldname": "app_secret_key",
"fieldtype": "Password",
"label": "App Secret Key"
},
{
"fieldname": "allow_dropbox_access",
"fieldtype": "Button",
"label": "Allow Dropbox Access"
},
{
"fieldname": "dropbox_refresh_token",
"fieldtype": "Password",
"hidden": 1,
"label": "Dropbox Refresh Token",
"no_copy": 1,
"read_only": 1
},
{
"fieldname": "dropbox_access_token",
"fieldtype": "Password",
"hidden": 1,
"label": "Dropbox Access Token"
}
],
"in_create": 1,
"issingle": 1,
"links": [],
"modified": "2024-03-23 16:03:23.176690",
"modified_by": "Administrator",
"module": "Integrations",
"name": "Dropbox Settings",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"read_only": 1,
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View file

@ -1,378 +0,0 @@
# Copyright (c) 2015, Frappe Technologies and contributors
# License: MIT. See LICENSE
import os
from urllib.parse import parse_qs, urlparse
import dropbox
from rq.timeouts import JobTimeoutException
import frappe
from frappe import _
from frappe.integrations.offsite_backup_utils import (
get_chunk_site,
get_latest_backup_file,
send_email,
validate_file_size,
)
from frappe.model.document import Document
from frappe.utils import cint, encode, get_backups_path, get_files_path, get_request_site_address
from frappe.utils.background_jobs import enqueue
from frappe.utils.backups import new_backup
ignore_list = [".DS_Store"]
class DropboxSettings(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
app_access_key: DF.Data | None
app_secret_key: DF.Password | None
backup_frequency: DF.Literal["", "Daily", "Weekly"]
dropbox_access_token: DF.Password | None
dropbox_refresh_token: DF.Password | None
enabled: DF.Check
file_backup: DF.Check
limit_no_of_backups: DF.Check
no_of_backups: DF.Int
send_email_for_successful_backup: DF.Check
send_notifications_to: DF.Data
# end: auto-generated types
def onload(self):
if not self.app_access_key and frappe.conf.dropbox_access_key:
self.set_onload("dropbox_setup_via_site_config", 1)
def validate(self):
if self.enabled and self.limit_no_of_backups and self.no_of_backups < 1:
frappe.throw(_("Number of DB backups cannot be less than 1"))
@frappe.whitelist()
def take_backup():
"""Enqueue longjob for taking backup to dropbox"""
enqueue(
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backup_to_dropbox",
queue="long",
timeout=1500,
)
frappe.msgprint(_("Queued for backup. It may take a few minutes to an hour."))
def take_backups_daily():
take_backups_if("Daily")
def take_backups_weekly():
take_backups_if("Weekly")
def take_backups_if(freq):
if frappe.db.get_single_value("Dropbox Settings", "backup_frequency") == freq:
take_backup_to_dropbox()
def take_backup_to_dropbox(retry_count=0, upload_db_backup=True):
did_not_upload, error_log = [], []
try:
if cint(frappe.db.get_single_value("Dropbox Settings", "enabled")):
validate_file_size()
did_not_upload, error_log = backup_to_dropbox(upload_db_backup)
if did_not_upload:
raise Exception
if cint(frappe.db.get_single_value("Dropbox Settings", "send_email_for_successful_backup")):
send_email(True, "Dropbox", "Dropbox Settings", "send_notifications_to")
except JobTimeoutException:
if retry_count < 2:
args = {
"retry_count": retry_count + 1,
"upload_db_backup": False, # considering till worker timeout db backup is uploaded
}
enqueue(
"frappe.integrations.doctype.dropbox_settings.dropbox_settings.take_backup_to_dropbox",
queue="long",
timeout=1500,
**args,
)
except Exception:
if isinstance(error_log, str):
error_message = error_log + "\n" + frappe.get_traceback()
else:
file_and_error = [" - ".join(f) for f in zip(did_not_upload, error_log, strict=False)]
error_message = "\n".join(file_and_error) + "\n" + frappe.get_traceback()
send_email(False, "Dropbox", "Dropbox Settings", "send_notifications_to", error_message)
def backup_to_dropbox(upload_db_backup=True):
# upload database
dropbox_settings = get_dropbox_settings()
dropbox_client = get_dropbox_client(dropbox_settings)
if upload_db_backup:
if frappe.flags.create_new_backup:
backup = new_backup(ignore_files=True)
filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db))
site_config = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_conf))
else:
filename, site_config = get_latest_backup_file()
upload_file_to_dropbox(filename, "/database", dropbox_client)
upload_file_to_dropbox(site_config, "/database", dropbox_client)
# delete older databases
if dropbox_settings["no_of_backups"]:
delete_older_backups(dropbox_client, "/database", dropbox_settings["no_of_backups"])
# upload files to files folder
did_not_upload = []
error_log = []
if dropbox_settings["file_backup"]:
upload_from_folder(get_files_path(), 0, "/files", dropbox_client, did_not_upload, error_log)
upload_from_folder(
get_files_path(is_private=1), 1, "/private/files", dropbox_client, did_not_upload, error_log
)
return did_not_upload, list(set(error_log))
def upload_from_folder(path, is_private, dropbox_folder, dropbox_client, did_not_upload, error_log):
if not os.path.exists(path):
return
if is_fresh_upload():
response = get_uploaded_files_meta(dropbox_folder, dropbox_client)
else:
response = frappe._dict({"entries": []})
path = str(path)
for f in frappe.get_all(
"File",
filters={"is_folder": 0, "is_private": is_private, "uploaded_to_dropbox": 0},
fields=["file_url", "name", "file_name"],
):
if not f.file_url:
continue
filename = f.file_url.rsplit("/", 1)[-1]
filepath = os.path.join(path, filename)
if filename in ignore_list:
continue
found = False
for file_metadata in response.entries:
try:
if os.path.basename(filepath) == file_metadata.name and os.stat(
encode(filepath)
).st_size == int(file_metadata.size):
found = True
update_file_dropbox_status(f.name)
break
except Exception:
error_log.append(frappe.get_traceback())
if not found:
try:
upload_file_to_dropbox(filepath, dropbox_folder, dropbox_client)
update_file_dropbox_status(f.name)
except Exception:
did_not_upload.append(filepath)
error_log.append(frappe.get_traceback())
def upload_file_to_dropbox(filename, folder, dropbox_client):
"""upload files with chunk of 15 mb to reduce session append calls"""
if not os.path.exists(filename):
return
create_folder_if_not_exists(folder, dropbox_client)
file_size = os.path.getsize(encode(filename))
chunk_size = get_chunk_site(file_size)
mode = dropbox.files.WriteMode.overwrite
f = open(encode(filename), "rb")
path = f"{folder}/{os.path.basename(filename)}"
try:
if file_size <= chunk_size:
dropbox_client.files_upload(f.read(), path, mode)
else:
upload_session_start_result = dropbox_client.files_upload_session_start(f.read(chunk_size))
cursor = dropbox.files.UploadSessionCursor(
session_id=upload_session_start_result.session_id, offset=f.tell()
)
commit = dropbox.files.CommitInfo(path=path, mode=mode)
while f.tell() < file_size:
if (file_size - f.tell()) <= chunk_size:
dropbox_client.files_upload_session_finish(f.read(chunk_size), cursor, commit)
else:
dropbox_client.files_upload_session_append(
f.read(chunk_size), cursor.session_id, cursor.offset
)
cursor.offset = f.tell()
except dropbox.exceptions.ApiError as e:
if isinstance(e.error, dropbox.files.UploadError):
error = f"File Path: {path}\n"
error += frappe.get_traceback()
frappe.log_error(error)
else:
raise
def create_folder_if_not_exists(folder, dropbox_client):
try:
dropbox_client.files_get_metadata(folder)
except dropbox.exceptions.ApiError as e:
# folder not found
if isinstance(e.error, dropbox.files.GetMetadataError):
dropbox_client.files_create_folder(folder)
else:
raise
def update_file_dropbox_status(file_name):
frappe.db.set_value("File", file_name, "uploaded_to_dropbox", 1, update_modified=False)
def is_fresh_upload():
file_name = frappe.db.get_value("File", {"uploaded_to_dropbox": 1}, "name")
return not file_name
def get_uploaded_files_meta(dropbox_folder, dropbox_client):
try:
return dropbox_client.files_list_folder(dropbox_folder)
except dropbox.exceptions.ApiError as e:
# folder not found
if isinstance(e.error, dropbox.files.ListFolderError):
return frappe._dict({"entries": []})
raise
def get_dropbox_client(dropbox_settings):
dropbox_client = dropbox.Dropbox(
oauth2_access_token=dropbox_settings["access_token"],
oauth2_refresh_token=dropbox_settings["refresh_token"],
app_key=dropbox_settings["app_key"],
app_secret=dropbox_settings["app_secret"],
timeout=None,
)
# checking if the access token has expired
dropbox_client.files_list_folder("")
if dropbox_settings["access_token"] != dropbox_client._oauth2_access_token:
set_dropbox_token(dropbox_client._oauth2_access_token)
return dropbox_client
def get_dropbox_settings(redirect_uri=False):
# NOTE: access token is kept for legacy dropbox apps
settings = frappe.get_doc("Dropbox Settings")
app_details = {
"app_key": settings.app_access_key or frappe.conf.dropbox_access_key,
"app_secret": settings.get_password(fieldname="app_secret_key", raise_exception=False)
if settings.app_secret_key
else frappe.conf.dropbox_secret_key,
"refresh_token": settings.get_password("dropbox_refresh_token", raise_exception=False),
"access_token": settings.get_password("dropbox_access_token", raise_exception=False),
"file_backup": settings.file_backup,
"no_of_backups": settings.no_of_backups if settings.limit_no_of_backups else None,
}
if redirect_uri:
app_details.update(
{
"redirect_uri": get_request_site_address(True)
+ "/api/method/frappe.integrations.doctype.dropbox_settings.dropbox_settings.dropbox_auth_finish"
}
)
if not (app_details["app_key"] and app_details["app_secret"]):
raise Exception(_("Please set Dropbox access keys in site config or doctype"))
return app_details
def delete_older_backups(dropbox_client, folder_path, to_keep):
res = dropbox_client.files_list_folder(path=folder_path)
files = [f for f in res.entries if isinstance(f, dropbox.files.FileMetadata) and "sql" in f.name]
if len(files) <= to_keep:
return
files.sort(key=lambda item: item.client_modified, reverse=True)
for f in files[to_keep:]:
dropbox_client.files_delete(os.path.join(folder_path, f.name))
@frappe.whitelist()
def get_dropbox_authorize_url():
app_details = get_dropbox_settings(redirect_uri=True)
dropbox_oauth_flow = dropbox.DropboxOAuth2Flow(
consumer_key=app_details["app_key"],
redirect_uri=app_details["redirect_uri"],
session={},
csrf_token_session_key="dropbox-auth-csrf-token",
consumer_secret=app_details["app_secret"],
token_access_type="offline",
)
auth_url = dropbox_oauth_flow.start()
return {"auth_url": auth_url, "args": parse_qs(urlparse(auth_url).query)}
@frappe.whitelist()
def dropbox_auth_finish():
app_details = get_dropbox_settings(redirect_uri=True)
callback = frappe.form_dict
close = '<p class="text-muted">' + _("Please close this window") + "</p>"
if not callback.state or not callback.code:
frappe.respond_as_web_page(
_("Dropbox Setup"),
_("Illegal Access Token. Please try again") + close,
indicator_color="red",
http_status_code=frappe.AuthenticationError.http_status_code,
)
return
dropbox_oauth_flow = dropbox.DropboxOAuth2Flow(
consumer_key=app_details["app_key"],
redirect_uri=app_details["redirect_uri"],
session={"dropbox-auth-csrf-token": callback.state},
csrf_token_session_key="dropbox-auth-csrf-token",
consumer_secret=app_details["app_secret"],
)
token = dropbox_oauth_flow.finish({"state": callback.state, "code": callback.code})
set_dropbox_token(token.access_token, token.refresh_token)
frappe.local.response["type"] = "redirect"
frappe.local.response["location"] = "/app/dropbox-settings"
def set_dropbox_token(access_token, refresh_token=None):
# NOTE: used doc object instead of db.set_value so that password field is set properly
dropbox_settings = frappe.get_single("Dropbox Settings")
dropbox_settings.dropbox_access_token = access_token
if refresh_token:
dropbox_settings.dropbox_refresh_token = refresh_token
dropbox_settings.save()
frappe.db.commit()

View file

@ -1,8 +0,0 @@
# Copyright (c) 2019, Frappe Technologies and Contributors
# License: MIT. See LICENSE
# import frappe
from frappe.tests import IntegrationTestCase
class TestDropboxSettings(IntegrationTestCase):
pass

View file

@ -1,71 +0,0 @@
// Copyright (c) 2019, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on("Google Drive", {
refresh: function (frm) {
if (!frm.doc.enable) {
frm.dashboard.set_headline(
__("To use Google Drive, enable {0}.", [
`<a href='/app/google-settings'>${__("Google Settings")}</a>`,
])
);
}
frappe.realtime.on("upload_to_google_drive", (data) => {
if (data.progress) {
const progress_title = __("Uploading to Google Drive");
frm.dashboard.show_progress(
progress_title,
(data.progress / data.total) * 100,
data.message
);
if (data.progress === data.total) {
frm.dashboard.hide_progress(progress_title);
}
}
});
if (frm.doc.enable && frm.doc.refresh_token) {
let sync_button = frm.add_custom_button(__("Take Backup"), function () {
frappe.show_alert({
indicator: "green",
message: __("Backing up to Google Drive."),
});
frappe
.call({
method: "frappe.integrations.doctype.google_drive.google_drive.take_backup",
btn: sync_button,
})
.then((r) => {
frappe.msgprint(r.message);
});
});
}
if (frm.doc.enable && frm.doc.backup_folder_name && !frm.doc.refresh_token) {
frm.dashboard.set_headline(
__(
"Click on <b>Authorize Google Drive Access</b> to authorize Google Drive Access."
)
);
}
if (frm.doc.enable && frm.doc.refresh_token && frm.doc.authorization_code) {
frm.page.set_indicator("Authorized", "green");
}
},
authorize_google_drive_access: function (frm) {
frappe.call({
method: "frappe.integrations.doctype.google_drive.google_drive.authorize_access",
args: {
reauthorize: frm.doc.authorization_code ? 1 : 0,
},
callback: function (r) {
if (!r.exc) {
frm.save();
window.open(r.message.url);
}
},
});
},
});

View file

@ -1,126 +0,0 @@
{
"actions": [],
"creation": "2019-08-13 17:24:05.470876",
"doctype": "DocType",
"engine": "InnoDB",
"field_order": [
"enable",
"google_drive_section",
"backup_folder_name",
"frequency",
"email",
"send_email_for_successful_backup",
"file_backup",
"authorize_google_drive_access",
"column_break_5",
"backup_folder_id",
"last_backup_on",
"refresh_token",
"authorization_code"
],
"fields": [
{
"default": "0",
"fieldname": "enable",
"fieldtype": "Check",
"label": "Enable"
},
{
"fieldname": "backup_folder_name",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Backup Folder Name",
"reqd": 1
},
{
"depends_on": "eval:!doc.__islocal",
"fieldname": "authorize_google_drive_access",
"fieldtype": "Button",
"label": "Authorize Google Drive Access"
},
{
"fieldname": "column_break_5",
"fieldtype": "Column Break"
},
{
"fieldname": "backup_folder_id",
"fieldtype": "Data",
"label": "Backup Folder ID",
"read_only": 1
},
{
"fieldname": "frequency",
"fieldtype": "Select",
"label": "Frequency",
"options": "\nDaily\nWeekly",
"reqd": 1
},
{
"fieldname": "refresh_token",
"fieldtype": "Data",
"hidden": 1,
"label": "Refresh Token"
},
{
"fieldname": "authorization_code",
"fieldtype": "Data",
"hidden": 1,
"label": "Authorization Code"
},
{
"fieldname": "last_backup_on",
"fieldtype": "Datetime",
"label": "Last Backup On",
"read_only": 1
},
{
"default": "0",
"description": "Note: By default emails for failed backups are sent.",
"fieldname": "send_email_for_successful_backup",
"fieldtype": "Check",
"label": "Send Email for Successful backup"
},
{
"default": "0",
"fieldname": "file_backup",
"fieldtype": "Check",
"label": "File Backup"
},
{
"depends_on": "enable",
"fieldname": "google_drive_section",
"fieldtype": "Section Break",
"label": "Google Drive"
},
{
"fieldname": "email",
"fieldtype": "Data",
"label": "Send Notification To",
"options": "Email",
"reqd": 1
}
],
"issingle": 1,
"links": [],
"modified": "2024-03-23 16:03:26.999110",
"modified_by": "Administrator",
"module": "Integrations",
"name": "Google Drive",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View file

@ -1,229 +0,0 @@
# Copyright (c) 2019, Frappe Technologies and contributors
# License: MIT. See LICENSE
import os
from urllib.parse import quote
from apiclient.http import MediaFileUpload
from googleapiclient.errors import HttpError
import frappe
from frappe import _
from frappe.integrations.google_oauth import GoogleOAuth
from frappe.integrations.offsite_backup_utils import (
get_latest_backup_file,
send_email,
validate_file_size,
)
from frappe.model.document import Document
from frappe.utils import get_backups_path, get_bench_path
from frappe.utils.background_jobs import enqueue
from frappe.utils.backups import new_backup
class GoogleDrive(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
authorization_code: DF.Data | None
backup_folder_id: DF.Data | None
backup_folder_name: DF.Data
email: DF.Data
enable: DF.Check
file_backup: DF.Check
frequency: DF.Literal["", "Daily", "Weekly"]
last_backup_on: DF.Datetime | None
refresh_token: DF.Data | None
send_email_for_successful_backup: DF.Check
# end: auto-generated types
def validate(self):
doc_before_save = self.get_doc_before_save()
if doc_before_save and doc_before_save.backup_folder_name != self.backup_folder_name:
self.backup_folder_id = ""
def get_access_token(self):
if not self.refresh_token:
button_label = frappe.bold(_("Allow Google Drive Access"))
raise frappe.ValidationError(_("Click on {0} to generate Refresh Token.").format(button_label))
oauth_obj = GoogleOAuth("drive")
r = oauth_obj.refresh_access_token(
self.get_password(fieldname="refresh_token", raise_exception=False)
)
return r.get("access_token")
@frappe.whitelist(methods=["POST"])
def authorize_access(reauthorize=False, code=None):
"""
If no Authorization code get it from Google and then request for Refresh Token.
Google Contact Name is set to flags to set_value after Authorization Code is obtained.
"""
oauth_code = frappe.db.get_single_value("Google Drive", "authorization_code") if not code else code
oauth_obj = GoogleOAuth("drive")
if not oauth_code or reauthorize:
if reauthorize:
frappe.db.set_single_value("Google Drive", "backup_folder_id", "")
return oauth_obj.get_authentication_url(
{
"redirect": f"/app/Form/{quote('Google Drive')}",
},
)
r = oauth_obj.authorize(oauth_code)
frappe.db.set_single_value(
"Google Drive",
{"authorization_code": oauth_code, "refresh_token": r.get("refresh_token")},
)
def get_google_drive_object():
"""Return an object of Google Drive."""
account = frappe.get_doc("Google Drive")
oauth_obj = GoogleOAuth("drive")
google_drive = oauth_obj.get_google_service_object(
account.get_access_token(),
account.get_password(fieldname="indexing_refresh_token", raise_exception=False),
)
return google_drive, account
def check_for_folder_in_google_drive():
"""Checks if folder exists in Google Drive else create it."""
def _create_folder_in_google_drive(google_drive, account):
file_metadata = {
"name": account.backup_folder_name,
"mimeType": "application/vnd.google-apps.folder",
}
try:
folder = google_drive.files().create(body=file_metadata, fields="id").execute()
frappe.db.set_single_value("Google Drive", "backup_folder_id", folder.get("id"))
frappe.db.commit()
except HttpError as e:
frappe.throw(
_("Google Drive - Could not create folder in Google Drive - Error Code {0}").format(e)
)
google_drive, account = get_google_drive_object()
if account.backup_folder_id:
return
backup_folder_exists = False
try:
google_drive_folders = (
google_drive.files().list(q="mimeType='application/vnd.google-apps.folder'").execute()
)
except HttpError as e:
frappe.throw(_("Google Drive - Could not find folder in Google Drive - Error Code {0}").format(e))
for f in google_drive_folders.get("files"):
if f.get("name") == account.backup_folder_name:
frappe.db.set_single_value("Google Drive", "backup_folder_id", f.get("id"))
frappe.db.commit()
backup_folder_exists = True
break
if not backup_folder_exists:
_create_folder_in_google_drive(google_drive, account)
@frappe.whitelist()
def take_backup():
"""Enqueue longjob for taking backup to Google Drive"""
enqueue(
"frappe.integrations.doctype.google_drive.google_drive.upload_system_backup_to_google_drive",
queue="long",
timeout=1500,
)
frappe.msgprint(_("Queued for backup. It may take a few minutes to an hour."))
def upload_system_backup_to_google_drive():
"""
Upload system backup to Google Drive
"""
# Get Google Drive Object
google_drive, account = get_google_drive_object()
# Check if folder exists in Google Drive
check_for_folder_in_google_drive()
account.load_from_db()
validate_file_size()
if frappe.flags.create_new_backup:
set_progress(1, _("Backing up Data."))
backup = new_backup()
file_urls = []
file_urls.append(backup.backup_path_db)
file_urls.append(backup.backup_path_conf)
if account.file_backup:
file_urls.append(backup.backup_path_files)
file_urls.append(backup.backup_path_private_files)
else:
file_urls = get_latest_backup_file(with_files=account.file_backup)
for fileurl in file_urls:
if not fileurl:
continue
file_metadata = {"name": os.path.basename(fileurl), "parents": [account.backup_folder_id]}
try:
media = MediaFileUpload(
get_absolute_path(filename=fileurl), mimetype="application/gzip", resumable=True
)
except OSError as e:
frappe.throw(_("Google Drive - Could not locate - {0}").format(e))
try:
set_progress(2, _("Uploading backup to Google Drive."))
google_drive.files().create(body=file_metadata, media_body=media, fields="id").execute()
except HttpError as e:
send_email(False, "Google Drive", "Google Drive", "email", error_status=e)
set_progress(3, _("Uploading successful."))
frappe.db.set_single_value("Google Drive", "last_backup_on", frappe.utils.now_datetime())
send_email(True, "Google Drive", "Google Drive", "email")
return _("Google Drive Backup Successful.")
def daily_backup():
drive_settings = frappe.db.get_singles_dict("Google Drive", cast=True)
if drive_settings.enable and drive_settings.frequency == "Daily":
upload_system_backup_to_google_drive()
def weekly_backup():
drive_settings = frappe.db.get_singles_dict("Google Drive", cast=True)
if drive_settings.enable and drive_settings.frequency == "Weekly":
upload_system_backup_to_google_drive()
def get_absolute_path(filename):
file_path = os.path.join(get_backups_path()[2:], os.path.basename(filename))
return f"{get_bench_path()}/sites/{file_path}"
def set_progress(progress, message):
frappe.publish_realtime(
"upload_to_google_drive",
dict(progress=progress, total=3, message=message),
user=frappe.session.user,
)

View file

@ -1,8 +0,0 @@
# Copyright (c) 2019, Frappe Technologies and Contributors
# License: MIT. See LICENSE
# import frappe
from frappe.tests import IntegrationTestCase
class TestGoogleDrive(IntegrationTestCase):
pass

View file

@ -1,26 +0,0 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on("S3 Backup Settings", {
refresh: function (frm) {
frm.clear_custom_buttons();
frm.events.take_backup(frm);
},
take_backup: function (frm) {
if (frm.doc.access_key_id && frm.doc.secret_access_key) {
frm.add_custom_button(__("Take Backup Now"), function () {
frm.dashboard.set_headline_alert("S3 Backup Started!");
frappe.call({
method: "frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_s3",
callback: function (r) {
if (!r.exc) {
frappe.msgprint(__("S3 Backup complete!"));
frm.dashboard.clear_headline();
}
},
});
}).addClass("btn-primary");
}
},
});

View file

@ -1,163 +0,0 @@
{
"actions": [],
"creation": "2017-09-04 20:57:20.129205",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"enabled",
"api_access_section",
"access_key_id",
"column_break_4",
"secret_access_key",
"notification_section",
"notify_email",
"column_break_8",
"send_email_for_successful_backup",
"s3_bucket_details_section",
"bucket",
"endpoint_url",
"column_break_13",
"backup_path",
"backup_details_section",
"frequency",
"backup_files"
],
"fields": [
{
"default": "0",
"fieldname": "enabled",
"fieldtype": "Check",
"label": "Enable Automatic Backup"
},
{
"fieldname": "notify_email",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Send Notifications To",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"default": "1",
"description": "By default, emails are only sent for failed backups.",
"fieldname": "send_email_for_successful_backup",
"fieldtype": "Check",
"label": "Send Email for Successful Backup"
},
{
"fieldname": "frequency",
"fieldtype": "Select",
"in_list_view": 1,
"label": "Backup Frequency",
"mandatory_depends_on": "enabled",
"options": "Daily\nWeekly\nMonthly\nNone",
"reqd": 1
},
{
"fieldname": "access_key_id",
"fieldtype": "Data",
"in_list_view": 1,
"label": "Access Key ID",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"fieldname": "secret_access_key",
"fieldtype": "Password",
"in_list_view": 1,
"label": "Access Key Secret",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"default": "https://s3.amazonaws.com",
"description": "Only change this if you want to use other S3 compatible object storage backends.",
"fieldname": "endpoint_url",
"fieldtype": "Data",
"label": "Endpoint URL"
},
{
"fieldname": "bucket",
"fieldtype": "Data",
"label": "Bucket Name",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"depends_on": "enabled",
"fieldname": "api_access_section",
"fieldtype": "Section Break",
"label": "API Access"
},
{
"fieldname": "column_break_4",
"fieldtype": "Column Break"
},
{
"depends_on": "enabled",
"fieldname": "notification_section",
"fieldtype": "Section Break",
"label": "Notification"
},
{
"fieldname": "column_break_8",
"fieldtype": "Column Break"
},
{
"depends_on": "enabled",
"fieldname": "s3_bucket_details_section",
"fieldtype": "Section Break",
"label": "S3 Bucket Details"
},
{
"fieldname": "column_break_13",
"fieldtype": "Column Break"
},
{
"depends_on": "enabled",
"fieldname": "backup_details_section",
"fieldtype": "Section Break",
"label": "Backup Details"
},
{
"default": "1",
"description": "Backup public and private files along with the database.",
"fieldname": "backup_files",
"fieldtype": "Check",
"label": "Backup Files"
},
{
"description": "If it's empty, it will backup to the root of the bucket.",
"fieldname": "backup_path",
"fieldtype": "Data",
"label": "Backup Path"
}
],
"hide_toolbar": 1,
"issingle": 1,
"links": [],
"modified": "2025-03-15 12:17:49.167012",
"modified_by": "Administrator",
"module": "Integrations",
"name": "S3 Backup Settings",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"print": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"quick_entry": 1,
"row_format": "Dynamic",
"sort_field": "creation",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View file

@ -1,196 +0,0 @@
# Copyright (c) 2017, Frappe Technologies and contributors
# License: MIT. See LICENSE
import os
import os.path
import boto3
from botocore.exceptions import ClientError
from rq.timeouts import JobTimeoutException
import frappe
from frappe import _
from frappe.integrations.offsite_backup_utils import (
generate_files_backup,
get_latest_backup_file,
send_email,
validate_file_size,
)
from frappe.model.document import Document
from frappe.utils import cint
from frappe.utils.background_jobs import enqueue
class S3BackupSettings(Document):
# begin: auto-generated types
# This code is auto-generated. Do not modify anything in this block.
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from frappe.types import DF
access_key_id: DF.Data
backup_files: DF.Check
backup_path: DF.Data | None
bucket: DF.Data
enabled: DF.Check
endpoint_url: DF.Data | None
frequency: DF.Literal["Daily", "Weekly", "Monthly", "None"]
notify_email: DF.Data
secret_access_key: DF.Password
send_email_for_successful_backup: DF.Check
# end: auto-generated types
def validate(self):
if not self.enabled:
return
if not self.endpoint_url:
self.endpoint_url = "https://s3.amazonaws.com"
if self.backup_path and self.backup_path[-1] != "/":
self.backup_path += "/"
conn = boto3.client(
"s3",
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.get_password("secret_access_key"),
endpoint_url=self.endpoint_url,
)
try:
# Head_bucket returns a 200 OK if the bucket exists and have access to it.
# Requires ListBucket permission
conn.head_bucket(Bucket=self.bucket)
except ClientError as e:
error_code = e.response["Error"]["Code"]
bucket_name = frappe.bold(self.bucket)
if error_code == "403":
msg = _("Do not have permission to access bucket {0}.").format(bucket_name)
elif error_code == "404":
msg = _("Bucket {0} not found.").format(bucket_name)
else:
msg = e.args[0]
frappe.throw(msg)
@frappe.whitelist()
def take_backup():
"""Enqueue longjob for taking backup to s3"""
enqueue(
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_s3",
queue="long",
timeout=1500,
)
frappe.msgprint(_("Queued for backup. It may take a few minutes to an hour."))
def take_backups_daily():
take_backups_if("Daily")
def take_backups_weekly():
take_backups_if("Weekly")
def take_backups_monthly():
take_backups_if("Monthly")
def take_backups_if(freq):
if cint(frappe.db.get_single_value("S3 Backup Settings", "enabled")):
if frappe.db.get_single_value("S3 Backup Settings", "frequency") == freq:
take_backups_s3()
@frappe.whitelist()
def take_backups_s3(retry_count=0):
try:
validate_file_size()
backup_to_s3()
send_email(True, "Amazon S3", "S3 Backup Settings", "notify_email")
except JobTimeoutException:
if retry_count < 2:
args = {"retry_count": retry_count + 1}
enqueue(
"frappe.integrations.doctype.s3_backup_settings.s3_backup_settings.take_backups_s3",
queue="long",
timeout=1500,
**args,
)
else:
notify()
except Exception:
notify()
def notify():
error_message = frappe.get_traceback()
send_email(False, "Amazon S3", "S3 Backup Settings", "notify_email", error_message)
def backup_to_s3():
from frappe.utils import get_backups_path
from frappe.utils.backups import new_backup
doc = frappe.get_single("S3 Backup Settings")
bucket = doc.bucket
path = doc.backup_path or ""
backup_files = cint(doc.backup_files)
conn = boto3.client(
"s3",
aws_access_key_id=doc.access_key_id,
aws_secret_access_key=doc.get_password("secret_access_key"),
endpoint_url=doc.endpoint_url or "https://s3.amazonaws.com",
)
if frappe.flags.create_new_backup:
backup = new_backup(
ignore_files=False,
backup_path_db=None,
backup_path_files=None,
backup_path_private_files=None,
force=True,
)
db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db))
site_config = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_conf))
if backup_files:
files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files))
private_files = os.path.join(
get_backups_path(), os.path.basename(backup.backup_path_private_files)
)
else:
if backup_files:
db_filename, site_config, files_filename, private_files = get_latest_backup_file(
with_files=backup_files
)
if not files_filename or not private_files:
generate_files_backup()
db_filename, site_config, files_filename, private_files = get_latest_backup_file(
with_files=backup_files
)
else:
db_filename, site_config = get_latest_backup_file()
folder = path + os.path.basename(db_filename)[:15] + "/"
# for adding datetime to folder name
upload_file_to_s3(db_filename, folder, conn, bucket)
upload_file_to_s3(site_config, folder, conn, bucket)
if backup_files:
if private_files:
upload_file_to_s3(private_files, folder, conn, bucket)
if files_filename:
upload_file_to_s3(files_filename, folder, conn, bucket)
def upload_file_to_s3(filename, folder, conn, bucket):
destpath = os.path.join(folder, os.path.basename(filename))
print("Uploading file:", filename)
conn.upload_file(filename, bucket, destpath) # Requires PutObject permission

View file

@ -1,7 +0,0 @@
# Copyright (c) 2017, Frappe Technologies and Contributors
# License: MIT. See LICENSE
from frappe.tests import IntegrationTestCase
class TestS3BackupSettings(IntegrationTestCase):
pass

View file

@ -22,7 +22,6 @@ _SERVICES = {
_DOMAIN_CALLBACK_METHODS = {
"mail": "frappe.email.oauth.authorize_google_access",
"contacts": "frappe.integrations.doctype.google_contacts.google_contacts.authorize_access",
"drive": "frappe.integrations.doctype.google_drive.google_drive.authorize_access",
"indexing": "frappe.website.doctype.website_settings.google_indexing.authorize_access",
}

View file

@ -12,47 +12,6 @@
"is_hidden": 0,
"label": "Integrations",
"links": [
{
"hidden": 0,
"is_query_report": 0,
"label": "Backup",
"link_count": 0,
"onboard": 0,
"type": "Card Break"
},
{
"dependencies": "",
"hidden": 0,
"is_query_report": 0,
"label": "Dropbox Settings",
"link_count": 0,
"link_to": "Dropbox Settings",
"link_type": "DocType",
"onboard": 0,
"type": "Link"
},
{
"dependencies": "",
"hidden": 0,
"is_query_report": 0,
"label": "S3 Backup Settings",
"link_count": 0,
"link_to": "S3 Backup Settings",
"link_type": "DocType",
"onboard": 0,
"type": "Link"
},
{
"dependencies": "",
"hidden": 0,
"is_query_report": 0,
"label": "Google Drive",
"link_count": 0,
"link_to": "Google Drive",
"link_type": "DocType",
"onboard": 0,
"type": "Link"
},
{
"hidden": 0,
"is_query_report": 0,

View file

@ -810,7 +810,7 @@ def migrate_translations(source_app, target_app):
"""Migrate target-app-specific translations from source-app to target-app"""
strings_in_source_app = [m[1] for m in frappe.translate.get_messages_for_app(source_app)]
strings_in_target_app = [m[1] for m in frappe.translate.get_messages_for_app(target_app)]
print(strings_in_source_app)
strings_in_target_app_but_not_in_source_app = list(
set(strings_in_target_app) - set(strings_in_source_app)
)

View file

@ -83,8 +83,6 @@ dependencies = [
"markdownify~=0.14.1",
# integration dependencies
"boto3~=1.34.143",
"dropbox~=11.36.2",
"google-api-python-client~=2.2.0",
"google-auth-oauthlib~=0.4.4",
"google-auth~=1.29.0",
@ -250,13 +248,9 @@ disable_error_code = [
# External libraries without types
[[tool.mypy.overrides]]
module = [
"apiclient.http",
"bleach_allowlist",
"boto3",
"botocore.exceptions",
"cssutils",
"cups",
"dropbox",
"email_reply_parser",
"filetype",
"google",