fix: make backup files optional and ux fixes

Signed-off-by: Chinmay D. Pai <chinmaydpai@gmail.com>
This commit is contained in:
Chinmay D. Pai 2020-04-13 20:55:19 +05:30
parent 5baa07db57
commit ea798af266
No known key found for this signature in database
GPG key ID: 75507BE256F40CED
2 changed files with 83 additions and 13 deletions

View file

@ -1,18 +1,28 @@
{
"actions": [],
"creation": "2017-09-04 20:57:20.129205",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"enabled",
"notify_email",
"send_email_for_successful_backup",
"frequency",
"api_access_section",
"access_key_id",
"column_break_4",
"secret_access_key",
"region",
"endpoint_url",
"notification_section",
"notify_email",
"column_break_8",
"send_email_for_successful_backup",
"s3_bucket_details_section",
"bucket",
"endpoint_url",
"column_break_13",
"region",
"backup_details_section",
"frequency",
"backup_files",
"column_break_18",
"backup_limit"
],
"fields": [
@ -27,6 +37,7 @@
"fieldtype": "Data",
"in_list_view": 1,
"label": "Send Notifications To",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
@ -41,6 +52,7 @@
"fieldtype": "Select",
"in_list_view": 1,
"label": "Backup Frequency",
"mandatory_depends_on": "enabled",
"options": "Daily\nWeekly\nMonthly\nNone",
"reqd": 1
},
@ -49,13 +61,15 @@
"fieldtype": "Data",
"in_list_view": 1,
"label": "Access Key ID",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"fieldname": "secret_access_key",
"fieldtype": "Password",
"in_list_view": 1,
"label": "Secret Access Key",
"label": "Access Key Secret",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
@ -74,19 +88,70 @@
{
"fieldname": "bucket",
"fieldtype": "Data",
"label": "Bucket",
"label": "Bucket Name",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"description": "Set to 0 for no limit on the number of backups taken",
"fieldname": "backup_limit",
"fieldtype": "Int",
"label": "Backup Limit",
"mandatory_depends_on": "enabled",
"reqd": 1
},
{
"depends_on": "enabled",
"fieldname": "api_access_section",
"fieldtype": "Section Break",
"label": "API Access"
},
{
"fieldname": "column_break_4",
"fieldtype": "Column Break"
},
{
"depends_on": "enabled",
"fieldname": "notification_section",
"fieldtype": "Section Break",
"label": "Notification"
},
{
"fieldname": "column_break_8",
"fieldtype": "Column Break"
},
{
"depends_on": "enabled",
"fieldname": "s3_bucket_details_section",
"fieldtype": "Section Break",
"label": "S3 Bucket Details"
},
{
"fieldname": "column_break_13",
"fieldtype": "Column Break"
},
{
"depends_on": "enabled",
"fieldname": "backup_details_section",
"fieldtype": "Section Break",
"label": "Backup Details"
},
{
"default": "0",
"description": "Backup public and private files along with the database.",
"fieldname": "backup_files",
"fieldtype": "Check",
"label": "Backup Files"
},
{
"fieldname": "column_break_18",
"fieldtype": "Column Break"
}
],
"hide_toolbar": 1,
"issingle": 1,
"modified": "2019-08-22 16:26:04.774571",
"links": [],
"modified": "2020-04-13 20:50:12.956162",
"modified_by": "Administrator",
"module": "Integrations",
"name": "S3 Backup Settings",

View file

@ -114,17 +114,22 @@ def backup_to_s3():
backup = new_backup(ignore_files=False, backup_path_db=None,
backup_path_files=None, backup_path_private_files=None, force=True)
db_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_db))
files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files))
private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files))
if doc.backup_files:
files_filename = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_files))
private_files = os.path.join(get_backups_path(), os.path.basename(backup.backup_path_private_files))
else:
db_filename, files_filename, private_files = get_latest_backup_file(with_files=True)
if doc.backup_files:
db_filename, files_filename, private_files = get_latest_backup_file(with_files=doc.backup_files)
else:
db_filename = get_latest_backup_file()
folder = os.path.basename(db_filename)[:15] + '/'
# for adding datetime to folder name
upload_file_to_s3(db_filename, folder, conn, bucket)
upload_file_to_s3(private_files, folder, conn, bucket)
upload_file_to_s3(files_filename, folder, conn, bucket)
if doc.backup_files:
upload_file_to_s3(private_files, folder, conn, bucket)
upload_file_to_s3(files_filename, folder, conn, bucket)
delete_old_backups(doc.backup_limit, bucket)