Merge branch 'develop' into notification_while_import
This commit is contained in:
commit
5712a22a0e
15 changed files with 274 additions and 88 deletions
|
|
@ -21,7 +21,7 @@ class AssignmentRule(Document):
|
|||
def on_update(self): # pylint: disable=no-self-use
|
||||
frappe.cache_manager.clear_doctype_map('Assignment Rule', self.name)
|
||||
|
||||
def after_rename(self): # pylint: disable=no-self-use
|
||||
def after_rename(self, old, new, merge): # pylint: disable=no-self-use
|
||||
frappe.cache_manager.clear_doctype_map('Assignment Rule', self.name)
|
||||
|
||||
def apply_unassign(self, doc, assignments):
|
||||
|
|
|
|||
|
|
@ -108,12 +108,14 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
|
|||
@click.option('--install-app', multiple=True, help='Install app after installation')
|
||||
@click.option('--with-public-files', help='Restores the public files of the site, given path to its tar file')
|
||||
@click.option('--with-private-files', help='Restores the private files of the site, given path to its tar file')
|
||||
@click.option('--force', is_flag=True, default=False, help='Use a bit of force to get the job done')
|
||||
@pass_context
|
||||
def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_password=None, db_name=None, verbose=None, install_app=None, admin_password=None, force=None, with_public_files=None, with_private_files=None):
|
||||
"Restore site database from an sql file"
|
||||
from frappe.installer import extract_sql_gzip, extract_tar_files
|
||||
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
|
||||
from frappe.installer import extract_sql_gzip, extract_tar_files, is_downgrade
|
||||
force = context.force or force
|
||||
|
||||
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
|
||||
if not os.path.exists(sql_file_path):
|
||||
base_path = '..'
|
||||
sql_file_path = os.path.join(base_path, sql_file_path)
|
||||
|
|
@ -125,7 +127,6 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas
|
|||
else:
|
||||
base_path = '.'
|
||||
|
||||
|
||||
if sql_file_path.endswith('sql.gz'):
|
||||
decompressed_file_name = extract_sql_gzip(os.path.abspath(sql_file_path))
|
||||
else:
|
||||
|
|
@ -133,10 +134,16 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas
|
|||
|
||||
site = get_site(context)
|
||||
frappe.init(site=site)
|
||||
|
||||
# dont allow downgrading to older versions of frappe without force
|
||||
if not force and is_downgrade(decompressed_file_name, verbose=True):
|
||||
warn_message = "This is not recommended and may lead to unexpected behaviour. Do you want to continue anyway?"
|
||||
click.confirm(warn_message, abort=True)
|
||||
|
||||
_new_site(frappe.conf.db_name, site, mariadb_root_username=mariadb_root_username,
|
||||
mariadb_root_password=mariadb_root_password, admin_password=admin_password,
|
||||
verbose=context.verbose, install_apps=install_app, source_sql=decompressed_file_name,
|
||||
force=True)
|
||||
force=True, db_type=frappe.conf.db_type)
|
||||
|
||||
# Extract public and/or private files to the restored site, if user has given the path
|
||||
if with_public_files:
|
||||
|
|
|
|||
|
|
@ -22,16 +22,28 @@ class Role(Document):
|
|||
frappe.db.sql("delete from `tabHas Role` where role = %s", self.name)
|
||||
frappe.clear_cache()
|
||||
|
||||
def on_update(self):
|
||||
'''update system user desk access if this has changed in this update'''
|
||||
if frappe.flags.in_install: return
|
||||
if self.has_value_changed('desk_access'):
|
||||
for user_name in get_users(self.name):
|
||||
user = frappe.get_doc('User', user_name)
|
||||
user_type = user.user_type
|
||||
user.set_system_user()
|
||||
if user_type != user.user_type:
|
||||
user.save()
|
||||
|
||||
# Get email addresses of all users that have been assigned this role
|
||||
def get_emails_from_role(role):
|
||||
emails = []
|
||||
|
||||
users = frappe.get_list("Has Role", filters={"role": role, "parenttype": "User"},
|
||||
fields=["parent"])
|
||||
|
||||
for user in users:
|
||||
user_email, enabled = frappe.db.get_value("User", user.parent, ["email", "enabled"])
|
||||
for user in get_users(role):
|
||||
user_email, enabled = frappe.db.get_value("User", user, ["email", "enabled"])
|
||||
if enabled and user_email not in ["admin@example.com", "guest@example.com"]:
|
||||
emails.append(user_email)
|
||||
|
||||
return emails
|
||||
return emails
|
||||
|
||||
def get_users(role):
|
||||
return [d.parent for d in frappe.get_all("Has Role", filters={"role": role, "parenttype": "User"},
|
||||
fields=["parent"])]
|
||||
|
|
|
|||
|
|
@ -23,3 +23,28 @@ class TestUser(unittest.TestCase):
|
|||
|
||||
frappe.get_doc("User", "test@example.com").add_roles("_Test Role 3")
|
||||
self.assertTrue("_Test Role 3" in frappe.get_roles("test@example.com"))
|
||||
|
||||
def test_change_desk_access(self):
|
||||
'''if we change desk acecss from role, remove from user'''
|
||||
frappe.delete_doc_if_exists('User', 'test-user-for-desk-access@example.com')
|
||||
frappe.delete_doc_if_exists('Role', 'desk-access-test')
|
||||
user = frappe.get_doc(dict(
|
||||
doctype='User',
|
||||
email='test-user-for-desk-access@example.com',
|
||||
first_name='test')).insert()
|
||||
role = frappe.get_doc(dict(
|
||||
doctype = 'Role',
|
||||
role_name = 'desk-access-test',
|
||||
desk_access = 0
|
||||
)).insert()
|
||||
user.add_roles(role.name)
|
||||
user.save()
|
||||
self.assertTrue(user.user_type=='Website User')
|
||||
role.desk_access = 1
|
||||
role.save()
|
||||
user.reload()
|
||||
self.assertTrue(user.user_type=='System User')
|
||||
role.desk_access = 0
|
||||
role.save()
|
||||
user.reload()
|
||||
self.assertTrue(user.user_type=='Website User')
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import frappe, subprocess, os
|
||||
from six.moves import input
|
||||
|
||||
def setup_database(force, source_sql, verbose):
|
||||
def setup_database(force, source_sql=None, verbose=False):
|
||||
root_conn = get_root_connection()
|
||||
root_conn.commit()
|
||||
root_conn.sql("DROP DATABASE IF EXISTS `{0}`".format(frappe.conf.db_name))
|
||||
|
|
@ -16,10 +16,12 @@ def setup_database(force, source_sql, verbose):
|
|||
subprocess_env = os.environ.copy()
|
||||
subprocess_env['PGPASSWORD'] = str(frappe.conf.db_password)
|
||||
# bootstrap db
|
||||
if not source_sql:
|
||||
source_sql = os.path.join(os.path.dirname(__file__), 'framework_postgres.sql')
|
||||
|
||||
subprocess.check_output([
|
||||
'psql', frappe.conf.db_name, '-h', frappe.conf.db_host or 'localhost', '-U',
|
||||
frappe.conf.db_name, '-f',
|
||||
os.path.join(os.path.dirname(__file__), 'framework_postgres.sql')
|
||||
frappe.conf.db_name, '-f', source_sql
|
||||
], env=subprocess_env)
|
||||
|
||||
frappe.connect()
|
||||
|
|
|
|||
|
|
@ -259,7 +259,10 @@ def get_aggregate_function(chart_type):
|
|||
def get_result(data, timegrain, from_date, to_date):
|
||||
start_date = getdate(from_date)
|
||||
end_date = getdate(to_date)
|
||||
|
||||
result = []
|
||||
if timegrain == 'Daily':
|
||||
result.append([start_date, 0.0])
|
||||
|
||||
while start_date < end_date:
|
||||
next_date = get_next_expected_date(start_date, timegrain)
|
||||
|
|
|
|||
|
|
@ -35,9 +35,6 @@ class TestDashboardChart(unittest.TestCase):
|
|||
self.assertEqual(get_period_ending('2019-10-01', 'Quarterly'),
|
||||
getdate('2019-12-31'))
|
||||
|
||||
self.assertEqual(get_period_ending('2019-10-01', 'Yearly'),
|
||||
getdate('2019-12-31'))
|
||||
|
||||
def test_dashboard_chart(self):
|
||||
if frappe.db.exists('Dashboard Chart', 'Test Dashboard Chart'):
|
||||
frappe.delete_doc('Dashboard Chart', 'Test Dashboard Chart')
|
||||
|
|
@ -50,7 +47,7 @@ class TestDashboardChart(unittest.TestCase):
|
|||
based_on = 'creation',
|
||||
timespan = 'Last Year',
|
||||
time_interval = 'Monthly',
|
||||
filters_json = '[]',
|
||||
filters_json = '{}',
|
||||
timeseries = 1
|
||||
)).insert()
|
||||
|
||||
|
|
@ -82,7 +79,7 @@ class TestDashboardChart(unittest.TestCase):
|
|||
based_on = 'creation',
|
||||
timespan = 'Last Year',
|
||||
time_interval = 'Monthly',
|
||||
filters_json = '[]',
|
||||
filters_json = '{}',
|
||||
timeseries = 1
|
||||
)).insert()
|
||||
|
||||
|
|
@ -114,7 +111,7 @@ class TestDashboardChart(unittest.TestCase):
|
|||
based_on = 'creation',
|
||||
timespan = 'Last Year',
|
||||
time_interval = 'Monthly',
|
||||
filters_json = '[]',
|
||||
filters_json = '{}',
|
||||
timeseries = 1
|
||||
)).insert()
|
||||
|
||||
|
|
@ -132,6 +129,60 @@ class TestDashboardChart(unittest.TestCase):
|
|||
|
||||
frappe.db.rollback()
|
||||
|
||||
def test_group_by_chart_type(self):
|
||||
if frappe.db.exists('Dashboard Chart', 'Test Group By Dashboard Chart'):
|
||||
frappe.delete_doc('Dashboard Chart', 'Test Group By Dashboard Chart')
|
||||
|
||||
frappe.get_doc({"doctype":"ToDo", "description": "test"}).insert()
|
||||
|
||||
frappe.get_doc(dict(
|
||||
doctype = 'Dashboard Chart',
|
||||
chart_name = 'Test Group By Dashboard Chart',
|
||||
chart_type = 'Group By',
|
||||
document_type = 'ToDo',
|
||||
group_by_based_on = 'status',
|
||||
filters_json = '{}',
|
||||
)).insert()
|
||||
|
||||
result = get(chart_name ='Test Group By Dashboard Chart', refresh = 1)
|
||||
todo_status_count = frappe.db.count('ToDo', {'status': result.get('labels')[0]})
|
||||
|
||||
self.assertEqual(result.get('datasets')[0].get('values')[0], todo_status_count)
|
||||
|
||||
frappe.db.rollback()
|
||||
|
||||
def test_daily_dashboard_chart(self):
|
||||
insert_test_records()
|
||||
|
||||
if frappe.db.exists('Dashboard Chart', 'Test Daily Dashboard Chart'):
|
||||
frappe.delete_doc('Dashboard Chart', 'Test Daily Dashboard Chart')
|
||||
|
||||
frappe.get_doc(dict(
|
||||
doctype = 'Dashboard Chart',
|
||||
chart_name = 'Test Daily Dashboard Chart',
|
||||
chart_type = 'Sum',
|
||||
document_type = 'Communication',
|
||||
based_on = 'communication_date',
|
||||
value_based_on = 'rating',
|
||||
timespan = 'Select Date Range',
|
||||
time_interval = 'Daily',
|
||||
from_date = datetime(2019, 1, 6),
|
||||
to_date = datetime(2019, 1, 11),
|
||||
filters_json = '{}',
|
||||
timeseries = 1
|
||||
)).insert()
|
||||
|
||||
result = get(chart_name ='Test Daily Dashboard Chart', refresh = 1)
|
||||
|
||||
self.assertEqual(result.get('datasets')[0].get('values'), [200.0, 400.0, 300.0, 0.0, 100.0, 0.0])
|
||||
self.assertEqual(
|
||||
result.get('labels'),
|
||||
[formatdate('2019-01-06'), formatdate('2019-01-07'), formatdate('2019-01-08'),\
|
||||
formatdate('2019-01-09'), formatdate('2019-01-10'), formatdate('2019-01-11')]
|
||||
)
|
||||
|
||||
frappe.db.rollback()
|
||||
|
||||
def test_weekly_dashboard_chart(self):
|
||||
insert_test_records()
|
||||
|
||||
|
|
@ -149,42 +200,21 @@ class TestDashboardChart(unittest.TestCase):
|
|||
time_interval = 'Weekly',
|
||||
from_date = datetime(2018, 12, 30),
|
||||
to_date = datetime(2019, 1, 15),
|
||||
filters_json = '[]',
|
||||
filters_json = '{}',
|
||||
timeseries = 1
|
||||
)).insert()
|
||||
|
||||
result = get(chart_name ='Test Weekly Dashboard Chart', refresh = 1)
|
||||
|
||||
self.assertEqual(result.get('datasets')[0].get('values'), [200.0, 400.0, 0.0])
|
||||
self.assertEqual(result.get('datasets')[0].get('values'), [200.0, 800.0, 0.0])
|
||||
self.assertEqual(result.get('labels'), [formatdate('2019-01-06'), formatdate('2019-01-13'), formatdate('2019-01-20')])
|
||||
|
||||
frappe.db.rollback()
|
||||
|
||||
def test_group_by_chart_type(self):
|
||||
if frappe.db.exists('Dashboard Chart', 'Test Group By Dashboard Chart'):
|
||||
frappe.delete_doc('Dashboard Chart', 'Test Group By Dashboard Chart')
|
||||
|
||||
frappe.get_doc({"doctype":"ToDo", "description": "test"}).insert()
|
||||
|
||||
frappe.get_doc(dict(
|
||||
doctype = 'Dashboard Chart',
|
||||
chart_name = 'Test Group By Dashboard Chart',
|
||||
chart_type = 'Group By',
|
||||
document_type = 'ToDo',
|
||||
group_by_based_on = 'status',
|
||||
filters_json = '[]',
|
||||
)).insert()
|
||||
|
||||
result = get(chart_name ='Test Group By Dashboard Chart', refresh = 1)
|
||||
todo_status_count = frappe.db.count('ToDo', {'status': result.get('labels')[0]})
|
||||
|
||||
self.assertEqual(result.get('datasets')[0].get('values')[0], todo_status_count)
|
||||
|
||||
frappe.db.rollback()
|
||||
|
||||
def insert_test_records():
|
||||
create_new_communication(datetime(2019, 1, 10), 100)
|
||||
create_new_communication(datetime(2019, 1, 6), 200)
|
||||
create_new_communication(datetime(2019, 1, 7), 400)
|
||||
create_new_communication(datetime(2019, 1, 8), 300)
|
||||
|
||||
def create_new_communication(date, rating):
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ from frappe.modules import load_doctype_module
|
|||
|
||||
|
||||
@frappe.whitelist()
|
||||
def get_submitted_linked_docs(doctype, name, docs=None, linked=None):
|
||||
def get_submitted_linked_docs(doctype, name, docs=None, linked=None, visited=None):
|
||||
"""
|
||||
Get all nested submitted linked doctype linkinfo
|
||||
|
||||
|
|
@ -34,10 +34,18 @@ def get_submitted_linked_docs(doctype, name, docs=None, linked=None):
|
|||
if not linked:
|
||||
linked = {}
|
||||
|
||||
if not visited:
|
||||
visited = []
|
||||
|
||||
if name in visited:
|
||||
return
|
||||
|
||||
linkinfo = get_linked_doctypes(doctype)
|
||||
linked_docs = get_linked_docs(doctype, name, linkinfo)
|
||||
|
||||
link_count = 0
|
||||
visited.append(name)
|
||||
|
||||
for link_doctype, link_names in linked_docs.items():
|
||||
if link_doctype not in linked:
|
||||
linked[link_doctype] = []
|
||||
|
|
@ -61,13 +69,14 @@ def get_submitted_linked_docs(doctype, name, docs=None, linked=None):
|
|||
if link.name in [doc.get("name") for doc in docs]:
|
||||
continue
|
||||
|
||||
links = get_submitted_linked_docs(link_doctype, link.name, docs, linked)
|
||||
docs.append({
|
||||
"doctype": link_doctype,
|
||||
"name": link.name,
|
||||
"docstatus": link.docstatus,
|
||||
"link_count": links.get("count")
|
||||
})
|
||||
links = get_submitted_linked_docs(link_doctype, link.name, docs, linked, visited)
|
||||
if links:
|
||||
docs.append({
|
||||
"doctype": link_doctype,
|
||||
"name": link.name,
|
||||
"docstatus": link.docstatus,
|
||||
"link_count": links.get("count")
|
||||
})
|
||||
|
||||
# sort linked documents by ascending number of links
|
||||
docs.sort(key=lambda doc: doc.get("link_count"))
|
||||
|
|
|
|||
|
|
@ -333,3 +333,34 @@ def extract_tar_files(site_name, file_path, folder_name):
|
|||
frappe.destroy()
|
||||
|
||||
return tar_path
|
||||
|
||||
def is_downgrade(sql_file_path, verbose=False):
|
||||
"""checks if input db backup will get downgraded on current bench"""
|
||||
from semantic_version import Version
|
||||
head = "INSERT INTO `tabInstalled Application` VALUES"
|
||||
|
||||
with open(sql_file_path) as f:
|
||||
for line in f:
|
||||
if head in line:
|
||||
# 'line' (str) format: ('2056588823','2020-05-11 18:21:31.488367','2020-06-12 11:49:31.079506','Administrator','Administrator',0,'Installed Applications','installed_applications','Installed Applications',1,'frappe','v10.1.71-74 (3c50d5e) (v10.x.x)','v10.x.x'),('855c640b8e','2020-05-11 18:21:31.488367','2020-06-12 11:49:31.079506','Administrator','Administrator',0,'Installed Applications','installed_applications','Installed Applications',2,'your_custom_app','0.0.1','master')
|
||||
line = line.strip().lstrip(head).rstrip(";").strip()
|
||||
# 'all_apps' (list) format: [('frappe', '12.x.x-develop ()', 'develop'), ('your_custom_app', '0.0.1', 'master')]
|
||||
all_apps = [ x[-3:] for x in frappe.safe_eval(line) ]
|
||||
|
||||
for app in all_apps:
|
||||
app_name = app[0]
|
||||
app_version = app[1].split(" ")[0]
|
||||
|
||||
if app_name == "frappe":
|
||||
try:
|
||||
current_version = Version(frappe.__version__)
|
||||
backup_version = Version(app_version[1:] if app_version[0] == "v" else app_version)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
downgrade = backup_version > current_version
|
||||
|
||||
if verbose and downgrade:
|
||||
print("Your site will be downgraded from Frappe {0} to {1}".format(current_version, backup_version))
|
||||
|
||||
return downgrade
|
||||
|
|
|
|||
|
|
@ -222,11 +222,13 @@ def upload_system_backup_to_google_drive():
|
|||
return _("Google Drive Backup Successful.")
|
||||
|
||||
def daily_backup():
|
||||
if frappe.db.get_single_value("Google Drive", "frequency") == "Daily":
|
||||
drive_settings = frappe.db.get_singles_dict('Google Drive')
|
||||
if drive_settings.enable and drive_settings.frequency == "Daily":
|
||||
upload_system_backup_to_google_drive()
|
||||
|
||||
def weekly_backup():
|
||||
if frappe.db.get_single_value("Google Drive", "frequency") == "Weekly":
|
||||
drive_settings = frappe.db.get_singles_dict('Google Drive')
|
||||
if drive_settings.enable and drive_settings.frequency == "Weekly":
|
||||
upload_system_backup_to_google_drive()
|
||||
|
||||
def get_absolute_path(filename):
|
||||
|
|
|
|||
|
|
@ -396,6 +396,11 @@ class Document(BaseDocument):
|
|||
def get_doc_before_save(self):
|
||||
return getattr(self, '_doc_before_save', None)
|
||||
|
||||
def has_value_changed(self, fieldname):
|
||||
'''Returns true if value is changed before and after saving'''
|
||||
previous = self.get_doc_before_save()
|
||||
return previous.get(fieldname)!=self.get(fieldname) if previous else True
|
||||
|
||||
def set_new_name(self, force=False, set_name=None, set_child_names=True):
|
||||
"""Calls `frappe.naming.set_new_name` for parent and child docs."""
|
||||
if self.flags.name_set and not force:
|
||||
|
|
|
|||
|
|
@ -101,19 +101,25 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
|
|||
columns[1] = [];
|
||||
columns[2] = [];
|
||||
|
||||
Object.keys(this.setters).forEach((setter, index) => {
|
||||
let df_prop = frappe.meta.docfield_map[this.doctype][setter];
|
||||
|
||||
// Index + 1 to start filling from index 1
|
||||
// Since Search is a standrd field already pushed
|
||||
columns[(index + 1) % 3].push({
|
||||
fieldtype: df_prop.fieldtype,
|
||||
label: df_prop.label,
|
||||
fieldname: setter,
|
||||
options: df_prop.options,
|
||||
default: this.setters[setter]
|
||||
if ($.isArray(this.setters)) {
|
||||
this.setters.forEach((setter, index) => {
|
||||
columns[(index + 1) % 3].push(setter);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
Object.keys(this.setters).forEach((setter, index) => {
|
||||
let df_prop = frappe.meta.docfield_map[this.doctype][setter];
|
||||
|
||||
// Index + 1 to start filling from index 1
|
||||
// Since Search is a standrd field already pushed
|
||||
columns[(index + 1) % 3].push({
|
||||
fieldtype: df_prop.fieldtype,
|
||||
label: df_prop.label,
|
||||
fieldname: setter,
|
||||
options: df_prop.options,
|
||||
default: this.setters[setter]
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/seal
|
||||
if (Object.seal) {
|
||||
|
|
@ -217,7 +223,13 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
|
|||
let contents = ``;
|
||||
let columns = ["name"];
|
||||
|
||||
columns = columns.concat(Object.keys(this.setters));
|
||||
if ($.isArray(this.setters)) {
|
||||
for (let df of this.setters) {
|
||||
columns.push(df.fieldname);
|
||||
}
|
||||
} else {
|
||||
columns = columns.concat(Object.keys(this.setters));
|
||||
}
|
||||
|
||||
columns.forEach(function (column) {
|
||||
contents += `<div class="list-item__content ellipsis">
|
||||
|
|
@ -290,16 +302,24 @@ frappe.ui.form.MultiSelectDialog = class MultiSelectDialog {
|
|||
let filters = this.get_query ? this.get_query().filters : {} || {};
|
||||
let filter_fields = [];
|
||||
|
||||
Object.keys(this.setters).forEach(function (setter) {
|
||||
var value = me.dialog.fields_dict[setter].get_value();
|
||||
if (me.dialog.fields_dict[setter].df.fieldtype == "Data" && value) {
|
||||
filters[setter] = ["like", "%" + value + "%"];
|
||||
} else {
|
||||
filters[setter] = value || undefined;
|
||||
me.args[setter] = filters[setter];
|
||||
filter_fields.push(setter);
|
||||
if ($.isArray(this.setters)) {
|
||||
for (let df of this.setters) {
|
||||
filters[df.fieldname] = me.dialog.fields_dict[df.fieldname].get_value() || undefined;
|
||||
me.args[df.fieldname] = filters[df.fieldname];
|
||||
filter_fields.push(df.fieldname);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
Object.keys(this.setters).forEach(function (setter) {
|
||||
var value = me.dialog.fields_dict[setter].get_value();
|
||||
if (me.dialog.fields_dict[setter].df.fieldtype == "Data" && value) {
|
||||
filters[setter] = ["like", "%" + value + "%"];
|
||||
} else {
|
||||
filters[setter] = value || undefined;
|
||||
me.args[setter] = filters[setter];
|
||||
filter_fields.push(setter);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let filter_group = this.get_custom_filters();
|
||||
Object.assign(filters, filter_group);
|
||||
|
|
|
|||
|
|
@ -66,6 +66,13 @@ class TestDocument(unittest.TestCase):
|
|||
|
||||
self.assertEqual(frappe.db.get_value(d.doctype, d.name, "subject"), "subject changed")
|
||||
|
||||
def test_value_changed(self):
|
||||
d = self.test_insert()
|
||||
d.subject = "subject changed again"
|
||||
d.save()
|
||||
self.assertTrue(d.has_value_changed('subject'))
|
||||
self.assertFalse(d.has_value_changed('event_type'))
|
||||
|
||||
def test_mandatory(self):
|
||||
# TODO: recheck if it is OK to force delete
|
||||
frappe.delete_doc_if_exists("User", "test_mandatory@example.com", 1)
|
||||
|
|
|
|||
|
|
@ -26,17 +26,24 @@ class BackupGenerator:
|
|||
If specifying db_file_name, also append ".sql.gz"
|
||||
"""
|
||||
def __init__(self, db_name, user, password, backup_path_db=None, backup_path_files=None,
|
||||
backup_path_private_files=None, db_host="localhost", db_port=3306, verbose=False):
|
||||
backup_path_private_files=None, db_host="localhost", db_port=None, verbose=False,
|
||||
db_type='mariadb'):
|
||||
global _verbose
|
||||
self.db_host = db_host
|
||||
self.db_port = db_port or 3306
|
||||
self.db_port = db_port
|
||||
self.db_name = db_name
|
||||
self.db_type = db_type
|
||||
self.user = user
|
||||
self.password = password
|
||||
self.backup_path_files = backup_path_files
|
||||
self.backup_path_db = backup_path_db
|
||||
self.backup_path_private_files = backup_path_private_files
|
||||
|
||||
if not self.db_port and self.db_type == 'mariadb':
|
||||
self.db_port = 3306
|
||||
elif not self.db_port and self.db_type == 'postgres':
|
||||
self.db_port = 5432
|
||||
|
||||
site = frappe.local.site or frappe.generate_hash(length=8)
|
||||
self.site_slug = site.replace('.', '_')
|
||||
|
||||
|
|
@ -141,6 +148,17 @@ class BackupGenerator:
|
|||
for item in self.__dict__.copy().items())
|
||||
|
||||
cmd_string = """mysqldump --single-transaction --quick --lock-tables=false -u %(user)s -p%(password)s %(db_name)s -h %(db_host)s -P %(db_port)s | gzip > %(backup_path_db)s """ % args
|
||||
|
||||
if self.db_type == 'postgres':
|
||||
cmd_string = "pg_dump postgres://{user}:{password}@{db_host}:{db_port}/{db_name} | gzip > {backup_path_db}".format(
|
||||
user=args.get('user'),
|
||||
password=args.get('password'),
|
||||
db_host=args.get('db_host'),
|
||||
db_port=args.get('db_port'),
|
||||
db_name=args.get('db_name'),
|
||||
backup_path_db=args.get('backup_path_db')
|
||||
)
|
||||
|
||||
err, out = frappe.utils.execute_in_shell(cmd_string)
|
||||
|
||||
def send_email(self):
|
||||
|
|
@ -181,7 +199,8 @@ def get_backup():
|
|||
"""
|
||||
delete_temp_backups()
|
||||
odb = BackupGenerator(frappe.conf.db_name, frappe.conf.db_name,\
|
||||
frappe.conf.db_password, db_host = frappe.db.host)
|
||||
frappe.conf.db_password, db_host = frappe.db.host,\
|
||||
db_type=frappe.conf.db_type, db_port=frappe.conf.db_port)
|
||||
odb.get_backup()
|
||||
recipient_list = odb.send_email()
|
||||
frappe.msgprint(_("Download link for your backup will be emailed on the following email address: {0}").format(', '.join(recipient_list)))
|
||||
|
|
@ -201,6 +220,7 @@ def new_backup(older_than=6, ignore_files=False, backup_path_db=None, backup_pat
|
|||
backup_path_private_files=backup_path_private_files,
|
||||
db_host = frappe.db.host,
|
||||
db_port = frappe.db.port,
|
||||
db_type = frappe.conf.db_type,
|
||||
verbose=verbose)
|
||||
odb.get_backup(older_than, ignore_files, force=force)
|
||||
return odb
|
||||
|
|
@ -258,25 +278,38 @@ def backup(with_files=False, backup_path_db=None, backup_path_files=None, quiet=
|
|||
|
||||
if __name__ == "__main__":
|
||||
"""
|
||||
is_file_old db_name user password db_host
|
||||
get_backup db_name user password db_host
|
||||
is_file_old db_name user password db_host db_type db_port
|
||||
get_backup db_name user password db_host db_type db_port
|
||||
"""
|
||||
import sys
|
||||
cmd = sys.argv[1]
|
||||
|
||||
db_type = 'mariadb'
|
||||
try:
|
||||
db_type = sys.argv[6]
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
db_port = 3306
|
||||
try:
|
||||
db_port = int(sys.argv[7])
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
if cmd == "is_file_old":
|
||||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost")
|
||||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost", db_type=db_type, db_port=db_port)
|
||||
is_file_old(odb.db_file_name)
|
||||
|
||||
if cmd == "get_backup":
|
||||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost")
|
||||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost", db_type=db_type, db_port=db_port)
|
||||
odb.get_backup()
|
||||
|
||||
if cmd == "take_dump":
|
||||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost")
|
||||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost", db_type=db_type, db_port=db_port)
|
||||
odb.take_dump()
|
||||
|
||||
if cmd == "send_email":
|
||||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost")
|
||||
odb = BackupGenerator(sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5] or "localhost", db_type=db_type, db_port=db_port)
|
||||
odb.send_email("abc.sql.gz")
|
||||
|
||||
if cmd == "delete_temp_backups":
|
||||
|
|
|
|||
|
|
@ -15,10 +15,10 @@ Faker==2.0.4
|
|||
future==0.18.2
|
||||
GitPython==2.1.15
|
||||
gitdb2==2.0.6;python_version<'3.4'
|
||||
google-api-python-client==1.7.11
|
||||
google-api-python-client==1.9.3
|
||||
google-auth-httplib2==0.0.3
|
||||
google-auth-oauthlib==0.4.1
|
||||
google-auth==1.17.1
|
||||
google-auth==1.18.0
|
||||
googlemaps==3.1.1
|
||||
gunicorn==19.10.0
|
||||
html2text==2016.9.19
|
||||
|
|
@ -29,7 +29,7 @@ ldap3==2.7
|
|||
markdown2==2.3.9
|
||||
maxminddb-geolite2==2018.703
|
||||
ndg-httpsclient==0.5.1
|
||||
num2words==0.5.5
|
||||
num2words==0.5.10
|
||||
oauthlib==3.1.0
|
||||
openpyxl==2.6.4
|
||||
passlib==1.7.2
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue