Merge branch 'develop' of github.com:frappe/frappe into mariadb-client-refactor

This commit is contained in:
Gavin D'souza 2022-06-15 16:40:29 +05:30
commit b4578dc632
318 changed files with 4877 additions and 5803 deletions

View file

@ -17,7 +17,7 @@ if [ "$TYPE" == "server" ]; then
fi
if [ "$DB" == "mariadb" ];then
sudo apt update && sudo apt install mariadb-client-10.3
sudo apt install mariadb-client-10.3
mysql --host 127.0.0.1 --port 3306 -u root -e "SET GLOBAL character_set_server = 'utf8mb4'";
mysql --host 127.0.0.1 --port 3306 -u root -e "SET GLOBAL collation_server = 'utf8mb4_unicode_ci'";

View file

@ -16,10 +16,4 @@ sudo mv /tmp/wkhtmltox/bin/wkhtmltopdf /usr/local/bin/wkhtmltopdf
sudo chmod o+x /usr/local/bin/wkhtmltopdf
# install cups
sudo apt-get install libcups2-dev
# install redis
sudo apt-get install redis-server
# install redis
sudo apt-get install libmariadb-dev
sudo apt update && sudo apt install libcups2-dev libmariadb-dev redis-server

View file

@ -5,8 +5,10 @@ import shlex
import subprocess
import sys
import urllib.request
from functools import cache
@cache
def fetch_pr_data(pr_number, repo, endpoint):
api_url = f"https://api.github.com/repos/{repo}/pulls/{pr_number}"
@ -26,7 +28,16 @@ def get_output(command, shell=True):
return subprocess.check_output(command, shell=shell, encoding="utf8").strip()
def has_skip_ci_label(pr_number, repo="frappe/frappe"):
return any([label["name"] for label in fetch_pr_data(pr_number, repo, "")["labels"] if label["name"] == "Skip CI"])
return has_label(pr_number, "Skip CI", repo)
def has_run_server_tests_label(pr_number, repo="frappe/frappe"):
return has_label(pr_number, "Run Server Tests", repo)
def has_run_ui_tests_label(pr_number, repo="frappe/frappe"):
return has_label(pr_number, "Run UI Tests", repo)
def has_label(pr_number, label, repo="frappe/frappe"):
return any([label["name"] for label in fetch_pr_data(pr_number, repo, "")["labels"] if label["name"] == label])
def is_py(file):
return file.endswith("py")
@ -77,11 +88,11 @@ if __name__ == "__main__":
print("Only docs were updated, stopping build process.")
sys.exit(0)
elif only_frontend_code_changed and build_type == "server":
elif only_frontend_code_changed and build_type == "server" and not has_run_server_tests_label(pr_number, repo):
print("Only Frontend code was updated; Stopping Python build process.")
sys.exit(0)
elif build_type == "ui" and only_py_changed:
elif build_type == "ui" and only_py_changed and not has_run_ui_tests_label(pr_number, repo):
print("Only Python code was updated, stopping Cypress build process.")
sys.exit(0)

View file

@ -2,8 +2,13 @@ name: 'Trigger Docker build on release'
on:
release:
types: [released]
permissions:
contents: read
jobs:
curl:
permissions:
contents: none
name: 'Trigger Docker build on release'
runs-on: ubuntu-latest
container:

View file

@ -3,6 +3,9 @@ on:
pull_request:
types: [ opened, synchronize, reopened, edited ]
permissions:
contents: read
jobs:
docs-required:
name: 'Documentation Required'

View file

@ -7,6 +7,9 @@ concurrency:
group: patch-mariadb-develop-${{ github.event.number }}
cancel-in-progress: true
permissions:
contents: read
jobs:
test:
runs-on: ubuntu-latest
@ -56,7 +59,7 @@ jobs:
uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/*requirements.txt', '**/pyproject.toml', '**/setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
@ -121,7 +124,7 @@ jobs:
git fetch --depth 1 upstream $branch_name:$branch_name
git checkout -q -f $branch_name
pip install -q -r requirements.txt
bench setup requirements --python
bench --site test_site migrate
done

View file

@ -2,7 +2,10 @@ name: Generate Semantic Release
on:
push:
branches:
- version-13
- version-14-beta
permissions:
contents: read
jobs:
release:
name: Release

View file

@ -11,6 +11,9 @@ concurrency:
cancel-in-progress: true
permissions:
contents: read
jobs:
test:
runs-on: ubuntu-latest
@ -67,7 +70,7 @@ jobs:
uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/*requirements.txt', '**/pyproject.toml', '**/setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-

View file

@ -10,6 +10,9 @@ concurrency:
group: server-postgres-develop-${{ github.event.number }}
cancel-in-progress: true
permissions:
contents: read
jobs:
test:
runs-on: ubuntu-latest
@ -70,7 +73,7 @@ jobs:
uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/*requirements.txt', '**/pyproject.toml', '**/setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-

View file

@ -10,6 +10,9 @@ concurrency:
group: ui-develop-${{ github.event.number }}
cancel-in-progress: true
permissions:
contents: read
jobs:
test:
runs-on: ubuntu-latest
@ -66,7 +69,7 @@ jobs:
uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/*requirements.txt', '**/pyproject.toml', '**/setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-

View file

@ -7,6 +7,7 @@ pull_request_rules:
- author!=gavindsouza
- author!=deepeshgarg007
- author!=ankush
- author!=mergify[bot]
- or:
- base=version-13
- base=version-12

View file

@ -1,11 +1,8 @@
{
"branches": ["version-13"],
"branches": ["develop", {"name": "version-14-beta", "channel": "beta", "prerelease": true}],
"plugins": [
"@semantic-release/commit-analyzer", {
"preset": "angular",
"releaseRules": [
{"breaking": true, "release": false}
]
"preset": "angular"
},
"@semantic-release/release-notes-generator",
[

View file

@ -12,7 +12,7 @@ data_import* @netchampfaris
core/ @surajshetty3416
database @gavindsouza
model @gavindsouza
requirements.txt @gavindsouza
pyproject.toml @gavindsouza
query_builder/ @gavindsouza
commands/ @gavindsouza
workspace @shariquerik

View file

@ -26,7 +26,7 @@ context('Awesome Bar', () => {
cy.get('.title-text').should('contain', 'To Do');
cy.findByPlaceholderText('Name')
cy.findByPlaceholderText('ID')
.should('have.value', '%test%');
});

View file

@ -34,6 +34,12 @@ context('Data Control', () => {
});
});
});
it('check custom formatters', () => {
cy.visit(`/app/doctype/User`);
cy.get('[data-fieldname="fields"] .grid-row[data-idx="2"] [data-fieldname="fieldtype"] .static-area').should('have.text', '🔵 Section Break');
});
it('Verifying data control by inputting different patterns for "Name" field', () => {
cy.new_form('Test Data Control');
@ -54,7 +60,7 @@ context('Data Control', () => {
//Checking if the border color of the field changes to red
cy.get('.frappe-control[data-fieldname="name1"]').should('have.class', 'has-error');
cy.findByRole('button', {name: 'Save'}).click();
cy.save();
//Checking for the error message
cy.get('.modal-title').should('have.text', 'Message');
@ -64,7 +70,7 @@ context('Data Control', () => {
cy.get_field('name1', 'Data').clear({force: true});
cy.fill_field('name1', 'Komal{}/!', 'Data');
cy.get('.frappe-control[data-fieldname="name1"]').should('have.class', 'has-error');
cy.findByRole('button', {name: 'Save'}).click();
cy.save();
cy.get('.modal-title').should('have.text', 'Message');
cy.get('.msgprint').should('have.text', 'Komal{}/! is not a valid Name');
cy.hide_dialog();
@ -76,14 +82,14 @@ context('Data Control', () => {
cy.get_field('email', 'Data').clear({force: true});
cy.fill_field('email', 'komal', 'Data');
cy.get('.frappe-control[data-fieldname="email"]').should('have.class', 'has-error');
cy.findByRole('button', {name: 'Save'}).click();
cy.save();
cy.get('.modal-title').should('have.text', 'Message');
cy.get('.msgprint').should('have.text', 'komal is not a valid Email Address');
cy.hide_dialog();
cy.get_field('email', 'Data').clear({force: true});
cy.fill_field('email', 'komal@test', 'Data');
cy.get('.frappe-control[data-fieldname="email"]').should('have.class', 'has-error');
cy.findByRole('button', {name: 'Save'}).click();
cy.save();
cy.get('.modal-title').should('have.text', 'Message');
cy.get('.msgprint').should('have.text', 'komal@test is not a valid Email Address');
cy.hide_dialog();
@ -125,4 +131,4 @@ context('Data Control', () => {
cy.get('.actions-btn-group > .dropdown-menu [data-label="Delete"]').click();
cy.click_modal_primary_button('Yes');
});
});
});

View file

@ -4,6 +4,7 @@ const test_button_names = [
"Porcupine Tree (the GOAT)",
"AC / DC",
`Electronic Dance "music"`,
"l'imperatrice",
];
const add_button = (label, group = "TestGroup") => {

View file

@ -1,5 +1,6 @@
context('Customize Form', () => {
before(() => {
cy.login();
cy.visit('/app/customize-form');
});
it('Changing to naming rule should update autoname', () => {
@ -19,4 +20,4 @@ context('Customize Form', () => {
cy.get_field("autoname", "Data").should("have.value", value);
});
});
});
});

View file

@ -7,7 +7,7 @@ context('Timeline Email', () => {
it('Adding new ToDo', () => {
cy.click_listview_primary_button('Add ToDo');
cy.get('.custom-actions:visible > .btn').contains("Edit in full page").click({delay: 500});
cy.get('.custom-actions:visible > .btn').contains("Edit Full Form").click({delay: 500});
cy.fill_field("description", "Test ToDo", "Text Editor");
cy.wait(500);
cy.get('.primary-action').contains('Save').click({force: true});

View file

@ -27,6 +27,7 @@ import "cypress-real-events/support";
//
// -- This is will overwrite an existing command --
// Cypress.Commands.overwrite("visit", (originalFn, url, options) => { ... });
Cypress.Commands.add('login', (email, password) => {
if (!email) {
email = 'Administrator';
@ -265,9 +266,14 @@ Cypress.Commands.add('get_open_dialog', () => {
return cy.get('.modal:visible').last();
});
Cypress.Commands.add('save', () => {
cy.intercept('/api').as('api');
cy.get(`button[data-label="Save"]:visible`).click({scrollBehavior: false, force: true});
cy.wait('@api');
});
Cypress.Commands.add('hide_dialog', () => {
cy.wait(300);
cy.get_open_dialog().find('.btn-modal-close').click();
cy.get_open_dialog().focus().find('.btn-modal-close').click();
cy.get('.modal:visible').should('not.exist');
});

View file

@ -1,4 +1,4 @@
coverage==5.5
Faker~=8.1.0
Faker~=13.12.1
pyngrok~=5.0.5
unittest-xml-reporting~=3.0.4

View file

@ -10,18 +10,20 @@ be used to build database driven apps.
Read the documentation: https://frappeframework.com/docs
"""
import functools
import importlib
import inspect
import json
import os
import sys
import re
import warnings
from typing import TYPE_CHECKING, Dict, List, Optional, Union
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union
import click
from werkzeug.local import Local, release_local
from frappe.query_builder import get_query_builder, patch_query_aggregation, patch_query_execute
from frappe.utils.caching import request_cache
from frappe.utils.data import cstr, sbool
# Local application imports
@ -44,6 +46,11 @@ STANDARD_USERS = ("Guest", "Administrator")
DISABLE_DATABASE_POOLING = None
_dev_server = int(sbool(os.environ.get("DEV_SERVER", False)))
_qb_patched = {}
re._MAXCACHE = (
50 # reduced from default 512 given we are already maintaining this on parent worker
)
if _dev_server:
warnings.simplefilter("always", DeprecationWarning)
@ -236,8 +243,10 @@ def init(site, sites_path=None, new_site=False):
local.qb = get_query_builder(local.conf.db_type or "mariadb")
setup_module_map()
patch_query_execute()
patch_query_aggregation()
if not _qb_patched.get(local.conf.db_type):
patch_query_execute()
patch_query_aggregation()
local.initialised = True
@ -410,16 +419,22 @@ def msgprint(
:param is_minimizable: [optional] Allow users to minimize the modal
:param wide: [optional] Show wide modal
"""
import inspect
import sys
from frappe.utils import strip_html_tags
msg = safe_decode(msg)
out = _dict(message=msg)
@functools.lru_cache(maxsize=1024)
def _strip_html_tags(message):
return strip_html_tags(message)
def _raise_exception():
if raise_exception:
if flags.rollback_on_exception:
db.rollback()
import inspect
if inspect.isclass(raise_exception) and issubclass(raise_exception, Exception):
raise raise_exception(msg)
@ -436,8 +451,11 @@ def msgprint(
if as_list and type(msg) in (list, tuple):
out.as_list = 1
if sys.stdin.isatty():
msg = _strip_html_tags(out.message)
if flags.print_messages and out.message:
print(f"Message: {strip_html_tags(out.message)}")
print(f"Message: {_strip_html_tags(out.message)}")
out.title = title or _("Message", context="Default title of the message dialog")
@ -835,6 +853,7 @@ def clear_cache(user=None, doctype=None):
:param user: If user is given, only user cache is cleared.
:param doctype: If doctype is given, only DocType cache is cleared."""
import frappe.cache_manager
import frappe.utils.caching
if doctype:
frappe.cache_manager.clear_doctype_cache(doctype)
@ -854,7 +873,14 @@ def clear_cache(user=None, doctype=None):
for fn in get_hooks("clear_cache"):
get_attr(fn)()
frappe.utils.caching._SITE_CACHE.clear()
local.role_permissions = {}
if hasattr(local, "request_cache"):
local.request_cache.clear()
if hasattr(local, "system_settings"):
del local.system_settings
if hasattr(local, "website_settings"):
del local.website_settings
def only_has_select_perm(doctype, user=None, ignore_permissions=False):
@ -1024,7 +1050,7 @@ def get_cached_doc(*args, **kwargs):
return doc
if key := can_cache_doc(args):
# local cache
# local cache - has "ready" `Document` objects
if doc := local.document_cache.get(key):
return _respond(doc)
@ -1032,9 +1058,22 @@ def get_cached_doc(*args, **kwargs):
if doc := cache().hget("document_cache", key):
return _respond(doc, True)
# database
# Not found in local/redis, fetch from DB
doc = get_doc(*args, **kwargs)
# Store in cache
if not key:
key = get_document_cache_key(doc.doctype, doc.name)
local.document_cache[key] = doc
# Avoid setting in local.cache since we're already using local.document_cache above
# Try pickling the doc object as-is first, else fallback to doc.as_dict()
try:
cache().hset("document_cache", key, doc, cache_locally=False)
except Exception:
cache().hset("document_cache", key, doc.as_dict(), cache_locally=False)
return doc
@ -1065,6 +1104,10 @@ def clear_document_cache(doctype, name):
if key in local.document_cache:
del local.document_cache[key]
cache().hdel("document_cache", key)
if doctype == "System Settings" and hasattr(local, "system_settings"):
delattr(local, "system_settings")
if doctype == "Website Settings" and hasattr(local, "website_settings"):
delattr(local, "website_settings")
def get_cached_value(doctype, name, fieldname="name", as_dict=False):
@ -1105,10 +1148,13 @@ def get_doc(*args, **kwargs):
doc = frappe.model.document.get_doc(*args, **kwargs)
# set in cache
# Replace cache
if key := can_cache_doc(args):
local.document_cache[key] = doc
cache().hset("document_cache", key, doc.as_dict())
if key in local.document_cache:
local.document_cache[key] = doc
if cache().hexists("document_cache", key):
cache().hset("document_cache", key, doc.as_dict())
return doc
@ -1163,7 +1209,7 @@ def delete_doc(
:param delete_permanently: Do not create a Deleted Document for the document."""
import frappe.model.delete_doc
frappe.model.delete_doc.delete_doc(
return frappe.model.delete_doc.delete_doc(
doctype,
name,
force,
@ -1259,8 +1305,10 @@ def get_module_path(module, *joins):
:param module: Module name.
:param *joins: Join additional path elements using `os.path.join`."""
module = scrub(module)
return get_pymodule_path(local.module_app[module] + "." + module, *joins)
from frappe.modules.utils import get_module_app
app = get_module_app(module)
return get_pymodule_path(app + "." + scrub(module), *joins)
def get_app_path(app_name, *joins):
@ -1312,6 +1360,7 @@ def get_all_apps(with_internal_apps=True, sites_path=None):
return apps
@request_cache
def get_installed_apps(sort=False, frappe_last=False):
"""Get list of installed apps in current site."""
if getattr(flags, "in_install_db", True):
@ -1353,47 +1402,49 @@ def get_doc_hooks():
return local.doc_events_hooks
def get_hooks(hook=None, default=None, app_name=None):
@request_cache
def _load_app_hooks(app_name: Optional[str] = None):
hooks = {}
apps = [app_name] if app_name else get_installed_apps(sort=True)
for app in apps:
try:
app_hooks = get_module(f"{app}.hooks")
except ImportError:
if local.flags.in_install_app:
# if app is not installed while restoring
# ignore it
pass
print(f'Could not find app "{app}"')
if not request:
raise SystemExit
raise
for key in dir(app_hooks):
if not key.startswith("_"):
append_hook(hooks, key, getattr(app_hooks, key))
return hooks
def get_hooks(
hook: str = None, default: Optional[Any] = "_KEEP_DEFAULT_LIST", app_name: str = None
) -> _dict:
"""Get hooks via `app/hooks.py`
:param hook: Name of the hook. Will gather all hooks for this name and return as a list.
:param default: Default if no hook found.
:param app_name: Filter by app."""
def load_app_hooks(app_name=None):
hooks = {}
for app in [app_name] if app_name else get_installed_apps(sort=True):
app = "frappe" if app == "webnotes" else app
try:
app_hooks = get_module(app + ".hooks")
except ImportError:
if local.flags.in_install_app:
# if app is not installed while restoring
# ignore it
pass
print('Could not find app "{0}"'.format(app_name))
if not request:
sys.exit(1)
raise
for key in dir(app_hooks):
if not key.startswith("_"):
append_hook(hooks, key, getattr(app_hooks, key))
return hooks
no_cache = conf.developer_mode or False
if app_name:
hooks = _dict(load_app_hooks(app_name))
hooks = _dict(_load_app_hooks(app_name))
else:
if no_cache:
hooks = _dict(load_app_hooks())
if conf.developer_mode:
hooks = _dict(_load_app_hooks())
else:
hooks = _dict(cache().get_value("app_hooks", load_app_hooks))
hooks = _dict(cache().get_value("app_hooks", _load_app_hooks))
if hook:
return hooks.get(hook) or (default if default is not None else [])
else:
return hooks
return hooks.get(hook, ([] if default == "_KEEP_DEFAULT_LIST" else default))
return hooks
def append_hook(target, key, value):
@ -1501,19 +1552,35 @@ def call(fn, *args, **kwargs):
return fn(*args, **newargs)
def get_newargs(fn, kwargs):
def get_newargs(fn: Callable, kwargs: Dict[str, Any]) -> Dict[str, Any]:
"""Remove any kwargs that are not supported by the function.
Example:
>>> def fn(a=1, b=2): pass
>>> get_newargs(fn, {"a": 2, "c": 1})
{"a": 2}
"""
# if function has any **kwargs parameter that capture arbitrary keyword arguments
# Ref: https://docs.python.org/3/library/inspect.html#inspect.Parameter.kind
varkw_exist = False
if hasattr(fn, "fnargs"):
fnargs = fn.fnargs
else:
signature = inspect.signature(fn)
fnargs = list(signature.parameters)
varkw = "kwargs" in fnargs
if varkw:
fnargs.pop(-1)
for param_name, parameter in signature.parameters.items():
if parameter.kind == inspect.Parameter.VAR_KEYWORD:
varkw_exist = True
fnargs.remove(param_name)
break
newargs = {}
for a in kwargs:
if (a in fnargs) or varkw:
if (a in fnargs) or varkw_exist:
newargs[a] = kwargs.get(a)
newargs.pop("ignore_permissions", None)
@ -1809,18 +1876,21 @@ def get_value(*args, **kwargs):
return db.get_value(*args, **kwargs)
def as_json(obj: Union[Dict, List], indent=1) -> str:
def as_json(obj: Union[Dict, List], indent=1, separators=None) -> str:
from frappe.utils.response import json_handler
if separators is None:
separators = (",", ": ")
try:
return json.dumps(
obj, indent=indent, sort_keys=True, default=json_handler, separators=(",", ": ")
obj, indent=indent, sort_keys=True, default=json_handler, separators=separators
)
except TypeError:
# this would break in case the keys are not all os "str" type - as defined in the JSON
# adding this to ensure keys are sorted (expected behaviour)
sorted_obj = dict(sorted(obj.items(), key=lambda kv: str(kv[0])))
return json.dumps(sorted_obj, indent=indent, default=json_handler, separators=(",", ": "))
return json.dumps(sorted_obj, indent=indent, default=json_handler, separators=separators)
def are_emails_muted():
@ -2158,8 +2228,18 @@ def safe_eval(code, eval_globals=None, eval_locals=None):
return eval(code, eval_globals, eval_locals)
def get_website_settings(key):
if not hasattr(local, "website_settings"):
local.website_settings = db.get_singles_dict("Website Settings", cast=True)
return local.website_settings[key]
def get_system_settings(key):
return db.get_single_value("System Settings", key, cache=True)
if not hasattr(local, "system_settings"):
local.system_settings = db.get_singles_dict("System Settings", cast=True)
return local.system_settings[key]
def get_active_domains():

View file

@ -30,6 +30,8 @@ local_manager = LocalManager([frappe.local])
_site = None
_sites_path = os.environ.get("SITES_PATH", ".")
SAFE_HTTP_METHODS = ("GET", "HEAD", "OPTIONS")
UNSAFE_HTTP_METHODS = ("POST", "PUT", "DELETE", "PATCH")
class RequestContext(object):
@ -292,7 +294,10 @@ def handle_exception(e):
def after_request(rollback):
if (frappe.local.request.method in ("POST", "PUT") or frappe.local.flags.commit) and frappe.db:
# if HTTP method would change server state, commit if necessary
if frappe.db and (
frappe.local.flags.commit or frappe.local.request.method in UNSAFE_HTTP_METHODS
):
if frappe.db.transaction_writes:
frappe.db.commit()
rollback = False

View file

@ -165,7 +165,7 @@ class LoginManager:
self.set_user_info()
def get_user_info(self):
self.info = frappe.db.get_value(
self.info = frappe.get_cached_value(
"User", self.user, ["user_type", "first_name", "last_name", "user_image"], as_dict=1
)
@ -412,10 +412,16 @@ def clear_cookies():
def validate_ip_address(user):
"""check if IP Address is valid"""
user = (
frappe.get_cached_doc("User", user) if not frappe.flags.in_test else frappe.get_doc("User", user)
from frappe.core.doctype.user.user import get_restricted_ip_list
# Only fetch required fields - for perf
user_fields = ["restrict_ip", "bypass_restrict_ip_check_if_2fa_enabled"]
user_info = (
frappe.get_cached_value("User", user, user_fields, as_dict=True)
if not frappe.flags.in_test
else frappe.db.get_value("User", user, user_fields, as_dict=True)
)
ip_list = user.get_restricted_ip_list()
ip_list = get_restricted_ip_list(user_info)
if not ip_list:
return
@ -430,7 +436,7 @@ def validate_ip_address(user):
# check if two factor auth is enabled
if system_settings.enable_two_factor_auth and not bypass_restrict_ip_check:
# check if bypass restrict ip is enabled for login user
bypass_restrict_ip_check = user.bypass_restrict_ip_check_if_2fa_enabled
bypass_restrict_ip_check = user_info.bypass_restrict_ip_check_if_2fa_enabled
for ip in ip_list:
if frappe.local.request_ip.startswith(ip) or bypass_restrict_ip_check:

View file

@ -298,8 +298,6 @@ def apply(doc=None, method=None, doctype=None, name=None):
if reopened:
break
# print(f"Rule:{assignment_rule}\nDoc: {doc}\nReOpened: {reopened}")
assignment_rule.close_assignments(doc)

View file

@ -15,7 +15,7 @@ from frappe.geo.country_info import get_all
from frappe.model.base_document import get_controller
from frappe.query_builder import DocType
from frappe.query_builder.functions import Count
from frappe.query_builder.terms import subqry
from frappe.query_builder.terms import SubQuery
from frappe.social.doctype.energy_point_log.energy_point_log import get_energy_points
from frappe.social.doctype.energy_point_settings.energy_point_settings import (
is_energy_point_enabled,
@ -211,7 +211,7 @@ def get_user_pages_or_reports(parent, cache=False):
if parent == "Report":
has_role[p.name].update({"ref_doctype": p.ref_doctype})
no_of_roles = (
no_of_roles = SubQuery(
frappe.qb.from_(hasRole).select(Count("*")).where(hasRole.parent == parentTable.name)
)
@ -221,7 +221,7 @@ def get_user_pages_or_reports(parent, cache=False):
pages_with_no_roles = (
frappe.qb.from_(parentTable)
.select(parentTable.name, parentTable.modified, *columns)
.where(subqry(no_of_roles) == 0)
.where(no_of_roles == 0)
).run(as_dict=True)
for p in pages_with_no_roles:
@ -327,7 +327,7 @@ def get_unseen_notes():
(note.notify_on_every_login == 1)
& (note.expire_notification_on > frappe.utils.now())
& (
subqry(frappe.qb.from_(nsb).select(nsb.user).where(nsb.parent == note.name)).notin(
SubQuery(frappe.qb.from_(nsb).select(nsb.user).where(nsb.parent == note.name)).notin(
[frappe.session.user]
)
)

View file

@ -20,6 +20,8 @@ import frappe
timestamps = {}
app_paths = None
sites_path = os.path.abspath(os.getcwd())
WHITESPACE_PATTERN = re.compile(r"\s+")
HTML_COMMENT_PATTERN = re.compile(r"(<!--.*?-->)")
class AssetsNotDownloadedError(Exception):
@ -406,10 +408,10 @@ def link_assets_dir(source, target, hard_link=False):
def scrub_html_template(content):
"""Returns HTML content with removed whitespace and comments"""
# remove whitespace to a single space
content = re.sub(r"\s+", " ", content)
content = WHITESPACE_PATTERN.sub(" ", content)
# strip comments
content = re.sub(r"(<!--.*?-->)", "", content)
content = HTML_COMMENT_PATTERN.sub("", content)
return content.replace("'", "'")

View file

@ -100,7 +100,7 @@ def get_value(doctype, fieldname, filters=None, as_dict=True, debug=False, paren
if frappe.is_table(doctype):
check_parent_permission(parent, doctype)
if not frappe.has_permission(doctype):
if not frappe.has_permission(doctype, parent_doctype=parent):
frappe.throw(_("No permission for {0}").format(doctype), frappe.PermissionError)
filters = get_safe_filters(filters)
@ -385,7 +385,7 @@ def attach_file(
is_private=None,
docfield=None,
):
"""Attach a file to Document (POST)
"""Attach a file to Document
:param filename: filename e.g. test-file.txt
:param filedata: base64 encode filedata which must be urlencoded
@ -396,17 +396,10 @@ def attach_file(
:param is_private: Attach file as private file (1 or 0)
:param docfield: file to attach to (optional)"""
request_method = frappe.local.request.environ.get("REQUEST_METHOD")
if request_method.upper() != "POST":
frappe.throw(_("Invalid Request"))
doc = frappe.get_doc(doctype, docname)
doc.check_permission()
if not doc.has_permission():
frappe.throw(_("Not permitted"), frappe.PermissionError)
_file = frappe.get_doc(
file = frappe.get_doc(
{
"doctype": "File",
"file_name": filename,
@ -418,14 +411,13 @@ def attach_file(
"content": filedata,
"decode": decode_base64,
}
)
_file.save()
).save()
if docfield and doctype:
doc.set(docfield, _file.file_url)
doc.set(docfield, file.file_url)
doc.save()
return _file.as_dict()
return file
@frappe.whitelist()

View file

@ -9,6 +9,7 @@ import click
# imports - module imports
import frappe
from frappe.commands import get_site, pass_context
from frappe.core.doctype.log_settings.log_settings import LOG_DOCTYPES
from frappe.exceptions import SiteNotSpecifiedError
@ -143,10 +144,6 @@ def restore(
)
from frappe.utils.backups import Backup
if not os.path.exists(sql_file_path):
print("Invalid path", sql_file_path)
sys.exit(1)
_backup = Backup(sql_file_path)
site = get_site(context)
@ -1092,6 +1089,51 @@ def build_search_index(context):
frappe.destroy()
@click.command("clear-log-table")
@click.option("--doctype", default="text", type=click.Choice(LOG_DOCTYPES), help="Log DocType")
@click.option("--days", type=int, help="Keep records for days")
@click.option("--no-backup", is_flag=True, default=False, help="Do not backup the table")
@pass_context
def clear_log_table(context, doctype, days, no_backup):
"""If any logtype table grows too large then clearing it with DELETE query
is not feasible in reasonable time. This command copies recent data to new
table and replaces current table with new smaller table.
ref: https://mariadb.com/kb/en/big-deletes/#deleting-more-than-half-a-table
"""
from frappe.core.doctype.log_settings.log_settings import clear_log_table as clear_logs
from frappe.utils.backups import scheduled_backup
if not context.sites:
raise SiteNotSpecifiedError
if doctype not in LOG_DOCTYPES:
raise frappe.ValidationError(f"Unsupported logging DocType: {doctype}")
for site in context.sites:
frappe.init(site=site)
frappe.connect()
if not no_backup:
scheduled_backup(
ignore_conf=False,
include_doctypes=doctype,
ignore_files=True,
force=True,
)
click.echo(f"Backed up {doctype}")
try:
click.echo(f"Copying {doctype} records from last {days} days to temporary table.")
clear_logs(doctype, days=days)
except Exception as e:
click.echo(f"Log cleanup for {doctype} failed:\n{e}")
sys.exit(1)
else:
click.secho(f"Cleared {doctype} records older than {days} days", fg="green")
@click.command("trim-database")
@click.option("--dry-run", is_flag=True, default=False, help="Show what would be deleted")
@click.option(
@ -1264,4 +1306,5 @@ commands = [
partial_restore,
trim_tables,
trim_database,
clear_log_table,
]

View file

@ -730,6 +730,7 @@ def transform_database(context, table, engine, row_format, failfast):
@click.command("run-tests")
@click.option("--app", help="For App")
@click.option("--doctype", help="For DocType")
@click.option("--module-def", help="For all Doctypes in Module Def")
@click.option("--case", help="Select particular TestCase")
@click.option(
"--doctype-list-path",
@ -754,6 +755,7 @@ def run_tests(
app=None,
module=None,
doctype=None,
module_def=None,
test=(),
profile=False,
coverage=False,
@ -790,6 +792,7 @@ def run_tests(
app,
module,
doctype,
module_def,
context.verbose,
tests=tests,
force=context.force,

View file

@ -3,6 +3,7 @@
import functools
import re
from typing import Dict, List
import frappe
from frappe import _
@ -169,29 +170,34 @@ def delete_contact_and_address(doctype, docname):
@frappe.whitelist()
@frappe.validate_and_sanitize_search_inputs
def filter_dynamic_link_doctypes(doctype, txt, searchfield, start, page_len, filters):
if not txt:
txt = ""
def filter_dynamic_link_doctypes(txt: str, filters: Dict) -> List[List[str]]:
from frappe.permissions import get_doctypes_with_read
doctypes = frappe.db.get_all(
"DocField", filters=filters, fields=["parent"], distinct=True, as_list=True
txt = txt or ""
filters = filters or {}
TXT_PATTERN = re.compile(f"{txt}.*")
_doctypes_from_df = frappe.get_all(
"DocField",
filters=filters,
pluck="parent",
distinct=True,
order_by=None,
)
doctypes_from_df = {d for d in _doctypes_from_df if TXT_PATTERN.search(_(d), re.IGNORECASE)}
doctypes = tuple(d for d in doctypes if re.search(txt + ".*", _(d[0]), re.IGNORECASE))
filters.update({"dt": ("not in", doctypes_from_df)})
_doctypes_from_cdf = frappe.get_all(
"Custom Field", filters=filters, pluck="dt", distinct=True, order_by=None
)
doctypes_from_cdf = {d for d in _doctypes_from_cdf if TXT_PATTERN.search(_(d), re.IGNORECASE)}
filters.update({"dt": ("not in", [d[0] for d in doctypes])})
all_doctypes = doctypes_from_df.union(doctypes_from_cdf)
allowed_doctypes = set(get_doctypes_with_read())
_doctypes = frappe.db.get_all("Custom Field", filters=filters, fields=["dt"], as_list=True)
valid_doctypes = sorted(all_doctypes.intersection(allowed_doctypes))
_doctypes = tuple([d for d in _doctypes if re.search(txt + ".*", _(d[0]), re.IGNORECASE)])
all_doctypes = [d[0] for d in doctypes + _doctypes]
allowed_doctypes = frappe.permissions.get_doctypes_with_read()
valid_doctypes = sorted(set(all_doctypes).intersection(set(allowed_doctypes)))
valid_doctypes = [[doctype] for doctype in valid_doctypes]
return valid_doctypes
return [[doctype] for doctype in valid_doctypes]
def set_link_title(doc):

122
frappe/core/api/file.py Normal file
View file

@ -0,0 +1,122 @@
import json
from typing import Dict, List
import frappe
from frappe.core.doctype.file.file import File, setup_folder_path
from frappe.utils import cint, cstr
@frappe.whitelist()
def unzip_file(name: str):
"""Unzip the given file and make file records for each of the extracted files"""
file: File = frappe.get_doc("File", name)
return file.unzip()
@frappe.whitelist()
def get_attached_images(doctype: str, names: List[str]) -> frappe._dict:
"""get list of image urls attached in form
returns {name: ['image.jpg', 'image.png']}"""
if isinstance(names, str):
names = json.loads(names)
img_urls = frappe.db.get_list(
"File",
filters={
"attached_to_doctype": doctype,
"attached_to_name": ("in", names),
"is_folder": 0,
},
fields=["file_url", "attached_to_name as docname"],
)
out = frappe._dict()
for i in img_urls:
out[i.docname] = out.get(i.docname, [])
out[i.docname].append(i.file_url)
return out
@frappe.whitelist()
def get_files_in_folder(folder: str, start: int = 0, page_length: int = 20) -> Dict:
start = cint(start)
page_length = cint(page_length)
attachment_folder = frappe.db.get_value(
"File",
"Home/Attachments",
["name", "file_name", "file_url", "is_folder", "modified"],
as_dict=1,
)
files = frappe.get_list(
"File",
{"folder": folder},
["name", "file_name", "file_url", "is_folder", "modified"],
start=start,
page_length=page_length + 1,
)
if folder == "Home" and attachment_folder not in files:
files.insert(0, attachment_folder)
return {"files": files[:page_length], "has_more": len(files) > page_length}
@frappe.whitelist()
def get_files_by_search_text(text: str) -> List[Dict]:
if not text:
return []
text = "%" + cstr(text).lower() + "%"
return frappe.get_list(
"File",
fields=["name", "file_name", "file_url", "is_folder", "modified"],
filters={"is_folder": False},
or_filters={
"file_name": ("like", text),
"file_url": text,
"name": ("like", text),
},
order_by="modified desc",
limit=20,
)
@frappe.whitelist(allow_guest=True)
def get_max_file_size() -> int:
return cint(frappe.conf.get("max_file_size")) or 10485760
@frappe.whitelist()
def create_new_folder(file_name: str, folder: str) -> File:
"""create new folder under current parent folder"""
file = frappe.new_doc("File")
file.file_name = file_name
file.is_folder = 1
file.folder = folder
file.insert(ignore_if_duplicate=True)
return file
@frappe.whitelist()
def move_file(file_list: List[File], new_parent: str, old_parent: str) -> None:
if isinstance(file_list, str):
file_list = json.loads(file_list)
for file_obj in file_list:
setup_folder_path(file_obj.get("name"), new_parent)
# recalculate sizes
frappe.get_doc("File", old_parent).save()
frappe.get_doc("File", new_parent).save()
@frappe.whitelist()
def zip_files(files: str):
files = frappe.parse_json(files)
frappe.response["filename"] = "files.zip"
frappe.response["filecontent"] = File.zip_files(files)
frappe.response["type"] = "download"

View file

@ -36,6 +36,7 @@
"fieldname": "user",
"fieldtype": "Link",
"in_list_view": 1,
"in_standard_filter": 1,
"label": "User ",
"options": "User",
"read_only": 1
@ -51,6 +52,7 @@
"fieldname": "reference_document",
"fieldtype": "Data",
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Reference Document",
"read_only": 1
},
@ -129,7 +131,7 @@
}
],
"links": [],
"modified": "2022-05-03 09:34:19.337551",
"modified": "2022-06-13 05:59:26.866004",
"modified_by": "Administrator",
"module": "Core",
"name": "Access Log",

View file

@ -25,6 +25,13 @@ class ActivityLog(Document):
if self.reference_doctype and self.reference_name:
self.status = "Linked"
@staticmethod
def clear_old_logs(days=None):
if not days:
days = 90
doctype = DocType("Activity Log")
frappe.db.delete(doctype, filters=(doctype.modified < (Now() - Interval(days=days))))
def on_doctype_update():
"""Add indexes in `tabActivity Log`"""
@ -43,12 +50,3 @@ def add_authentication_log(subject, user, operation="Login", status="Success"):
"operation": operation,
}
).insert(ignore_permissions=True, ignore_links=True)
def clear_activity_logs(days=None):
"""clear 90 day old authentication logs or configured in log settings"""
if not days:
days = 90
doctype = DocType("Activity Log")
frappe.db.delete(doctype, filters=(doctype.creation < (Now() - Interval(days=days))))

View file

@ -4,5 +4,10 @@ frappe.listview_settings['Activity Log'] = {
return [__(doc.status), "green"];
else if(doc.operation == "Login" && doc.status == "Failed")
return [__(doc.status), "red"];
}
};
},
onload: function(listview) {
frappe.require("logtypes.bundle.js", () => {
frappe.utils.logtypes.show_log_retention_message(cur_list.doctype);
})
},
};

View file

@ -12,6 +12,7 @@ from frappe.utils import (
cint,
get_datetime,
get_formatted_email,
get_string_between,
list_to_str,
split_emails,
validate_email_address,
@ -21,14 +22,6 @@ if TYPE_CHECKING:
from frappe.core.doctype.communication.communication import Communication
OUTGOING_EMAIL_ACCOUNT_MISSING = _(
"""
Unable to send mail because of a missing email account.
Please setup default Email Account from Setup > Email > Email Account
"""
)
@frappe.whitelist()
def make(
doctype=None,
@ -152,7 +145,7 @@ def _make(
"reference_doctype": doctype,
"reference_name": name,
"email_template": email_template,
"message_id": get_message_id().strip(" <>"),
"message_id": get_string_between("<", get_message_id(), ">"),
"read_receipt": read_receipt,
"has_attachment": 1 if attachments else 0,
"communication_type": communication_type,
@ -169,7 +162,12 @@ def _make(
if cint(send_email):
if not comm.get_outgoing_email_account():
frappe.throw(msg=OUTGOING_EMAIL_ACCOUNT_MISSING, exc=frappe.OutgoingEmailError)
frappe.throw(
_(
"Unable to send mail because of a missing email account. Please setup default Email Account from Setup > Email > Email Account"
),
exc=frappe.OutgoingEmailError,
)
comm.send_email(
print_html=print_html,

View file

@ -152,7 +152,7 @@ class CommunicationEmailMixin:
"doctype": self.reference_doctype,
"name": self.reference_name,
"print_format": print_format,
"key": get_parent_doc(self).get_signature(),
"key": get_parent_doc(self).get_document_share_key(),
}
)

View file

@ -4,6 +4,7 @@
import io
import json
import os
import re
import timeit
from datetime import date, datetime
@ -22,6 +23,7 @@ INVALID_VALUES = ("", None)
MAX_ROWS_IN_PREVIEW = 10
INSERT = "Insert New Records"
UPDATE = "Update Existing Records"
DURATION_PATTERN = re.compile(r"^(?:(\d+d)?((^|\s)\d+h)?((^|\s)\d+m)?((^|\s)\d+s)?)$")
class Importer:
@ -725,10 +727,7 @@ class Row:
)
return
elif df.fieldtype == "Duration":
import re
is_valid_duration = re.match(r"^(?:(\d+d)?((^|\s)\d+h)?((^|\s)\d+m)?((^|\s)\d+s)?)$", value)
if not is_valid_duration:
if not DURATION_PATTERN.match(value):
self.warnings.append(
{
"row": self.row_number,

View file

@ -1,256 +1,81 @@
{
"allow_copy": 0,
"allow_import": 0,
"allow_rename": 0,
"beta": 0,
"creation": "2016-12-29 12:59:48.638970",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "",
"editable_grid": 1,
"engine": "InnoDB",
"actions": [],
"creation": "2016-12-29 12:59:48.638970",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"deleted_name",
"deleted_doctype",
"column_break_3",
"restored",
"new_name",
"section_break_6",
"data"
],
"fields": [
{
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "deleted_name",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Deleted Name",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
"fieldname": "deleted_name",
"fieldtype": "Data",
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Deleted Name",
"read_only": 1
},
{
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "deleted_doctype",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Deleted DocType",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
"fieldname": "deleted_doctype",
"fieldtype": "Data",
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Deleted DocType",
"read_only": 1
},
{
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "column_break_3",
"fieldtype": "Column Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
"fieldname": "column_break_3",
"fieldtype": "Column Break"
},
{
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "restored",
"fieldtype": "Check",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Restored",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
"default": "0",
"fieldname": "restored",
"fieldtype": "Check",
"in_list_view": 1,
"label": "Restored",
"read_only": 1
},
{
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "new_name",
"fieldtype": "Read Only",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "New Name",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
"fieldname": "new_name",
"fieldtype": "Read Only",
"label": "New Name"
},
{
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "section_break_6",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
"fieldname": "section_break_6",
"fieldtype": "Section Break"
},
{
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "data",
"fieldtype": "Code",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Data",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
"fieldname": "data",
"fieldtype": "Code",
"label": "Data",
"read_only": 1
}
],
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 1,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2016-12-29 14:39:45.724494",
"modified_by": "Administrator",
"module": "Core",
"name": "Deleted Document",
"name_case": "",
"owner": "Administrator",
],
"in_create": 1,
"links": [],
"modified": "2022-06-13 05:50:58.314908",
"modified_by": "Administrator",
"module": "Core",
"name": "Deleted Document",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 0,
"delete": 1,
"email": 0,
"export": 1,
"if_owner": 0,
"import": 0,
"is_custom": 0,
"permlevel": 0,
"print": 0,
"read": 1,
"report": 0,
"role": "System Manager",
"set_user_permissions": 0,
"share": 0,
"submit": 0,
"write": 0
"delete": 1,
"export": 1,
"read": 1,
"role": "System Manager"
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"sort_field": "modified",
"sort_order": "DESC",
"title_field": "deleted_name",
"track_changes": 1,
"track_seen": 0
],
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"title_field": "deleted_name",
"track_changes": 1
}

View file

@ -8,6 +8,7 @@ import os
# imports - standard imports
import re
import shutil
from typing import TYPE_CHECKING, Union
# imports - module imports
import frappe
@ -35,6 +36,15 @@ from frappe.query_builder.functions import Concat
from frappe.utils import cint
from frappe.website.utils import clear_cache
if TYPE_CHECKING:
from frappe.custom.doctype.customize_form.customize_form import CustomizeForm
DEPENDS_ON_PATTERN = re.compile(r'[\w\.:_]+\s*={1}\s*[\w\.@\'"]+')
ILLEGAL_FIELDNAME_PATTERN = re.compile("""['",./%@()<>{}]""")
WHITESPACE_PADDING_PATTERN = re.compile(r"^[ \t\n\r]+|[ \t\n\r]+$", flags=re.ASCII)
START_WITH_LETTERS_PATTERN = re.compile(r"^(?![\W])[^\d_\s][\w -]+$", flags=re.ASCII)
FIELD_PATTERN = re.compile("{(.*?)}", flags=re.UNICODE)
class InvalidFieldNameError(frappe.ValidationError):
pass
@ -357,8 +367,7 @@ class DocType(Document):
else:
if d.fieldname in restricted:
frappe.throw(_("Fieldname {0} is restricted").format(d.fieldname), InvalidFieldNameError)
d.fieldname = re.sub("""['",./%@()<>{}]""", "", d.fieldname)
d.fieldname = ILLEGAL_FIELDNAME_PATTERN.sub("", d.fieldname)
# fieldnames should be lowercase
d.fieldname = d.fieldname.lower()
@ -842,15 +851,13 @@ class DocType(Document):
_("Doctype name is limited to {0} characters ({1})").format(max_length, name), frappe.NameError
)
flags = {"flags": re.ASCII}
# a DocType name should not start or end with an empty space
if re.search(r"^[ \t\n\r]+|[ \t\n\r]+$", name, **flags):
if WHITESPACE_PADDING_PATTERN.search(name):
frappe.throw(_("DocType's name should not start or end with whitespace"), frappe.NameError)
# a DocType's name should not start with a number or underscore
# and should only contain letters, numbers, underscore, and hyphen
if not re.match(r"^(?![\W])[^\d_\s][\w -]+$", name, **flags):
if not START_WITH_LETTERS_PATTERN.match(name):
frappe.throw(
_(
"A DocType's name should start with a letter and can only "
@ -913,11 +920,11 @@ def validate_series(dt, autoname=None, name=None):
frappe.throw(_("Series {0} already used in {1}").format(prefix, used_in[0][0]))
def validate_autoincrement_autoname(dt: DocType) -> bool:
def validate_autoincrement_autoname(dt: Union[DocType, "CustomizeForm"]) -> bool:
"""Checks if can doctype can change to/from autoincrement autoname"""
def get_autoname_before_save(dt: DocType) -> str:
if dt.name == "Customize Form":
def get_autoname_before_save(dt: Union[DocType, "CustomizeForm"]) -> str:
if dt.doctype == "Customize Form":
property_value = frappe.db.get_value(
"Property Setter", {"doc_type": dt.doc_type, "property": "autoname"}, "value"
)
@ -940,10 +947,10 @@ def validate_autoincrement_autoname(dt: DocType) -> bool:
or (not is_autoname_autoincrement and autoname_before_save == "autoincrement")
):
if frappe.get_meta(dt.name).issingle:
if dt.name == "Customize Form":
frappe.throw(_("Cannot change to/from autoincrement autoname in Customize Form"))
if dt.doctype == "Customize Form":
frappe.throw(_("Cannot change to/from autoincrement autoname in Customize Form"))
if frappe.get_meta(dt.name).issingle:
return False
if not frappe.get_all(dt.name, limit=1):
@ -1254,7 +1261,7 @@ def validate_fields(meta):
if not pattern:
return
for fieldname in re.findall("{(.*?)}", pattern, re.UNICODE):
for fieldname in FIELD_PATTERN.findall(pattern):
if fieldname.startswith("{"):
# edge case when double curlies are used for escape
continue
@ -1336,9 +1343,7 @@ def validate_fields(meta):
]
for field in depends_on_fields:
depends_on = docfield.get(field, None)
if (
depends_on and ("=" in depends_on) and re.match(r'[\w\.:_]+\s*={1}\s*[\w\.@\'"]+', depends_on)
):
if depends_on and ("=" in depends_on) and DEPENDS_ON_PATTERN.match(depends_on):
frappe.throw(_("Invalid {0} condition").format(frappe.unscrub(field)), frappe.ValidationError)
def check_table_multiselect_option(docfield):

View file

@ -1,10 +1,14 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import random
import string
import unittest
from typing import Dict, List, Optional
from unittest.mock import patch
import frappe
from frappe.cache_manager import clear_doctype_cache
from frappe.core.doctype.doctype.doctype import (
CannotIndexedError,
DoctypeLinkError,
@ -15,8 +19,8 @@ from frappe.core.doctype.doctype.doctype import (
WrongOptionsDoctypeLinkError,
validate_links_table_fieldnames,
)
# test_records = frappe.get_test_records('DocType')
from frappe.custom.doctype.custom_field.custom_field import create_custom_fields
from frappe.desk.form.load import getdoc
class TestDocType(unittest.TestCase):
@ -628,10 +632,55 @@ class TestDocType(unittest.TestCase):
self.assertEqual(test_json.test_json_field["hello"], "world")
@patch.dict(frappe.conf, {"developer_mode": 1})
def test_delete_doctype_with_customization(self):
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
custom_field = "customfield"
doctype = new_doctype(custom=0).insert().name
# Create property setter and custom field
field = "some_fieldname"
make_property_setter(doctype, field, "default", "DELETETHIS", "Data")
create_custom_fields({doctype: [{"fieldname": custom_field, "fieldtype": "Data"}]})
# Create 1 record
original_doc = frappe.get_doc(doctype=doctype, custom_field_name="wat").insert()
self.assertEqual(original_doc.some_fieldname, "DELETETHIS")
# delete doctype
frappe.delete_doc("DocType", doctype)
clear_doctype_cache(doctype)
# "restore" doctype by inserting doctype with same schema again
new_doctype(doctype, custom=0).insert()
# Ensure basically same doctype getting "restored"
restored_doc = frappe.get_last_doc(doctype)
verify_fields = ["doctype", field, custom_field]
for f in verify_fields:
self.assertEqual(original_doc.get(f), restored_doc.get(f))
# Check form load of restored doctype
getdoc(doctype, restored_doc.name)
# ensure meta - property setter
self.assertEqual(frappe.get_meta(doctype).get_field(field).default, "DELETETHIS")
frappe.delete_doc("DocType", doctype)
def new_doctype(
name, unique: bool = False, depends_on: str = "", fields: Optional[List[Dict]] = None, **kwargs
name: Optional[str] = None,
unique: bool = False,
depends_on: str = "",
fields: Optional[List[Dict]] = None,
**kwargs,
):
if not name:
# Test prefix is required to avoid coverage
name = "Test " + "".join(random.sample(string.ascii_lowercase, 10))
doc = frappe.get_doc(
{
"doctype": "DocType",

View file

@ -0,0 +1,73 @@
// Copyright (c) 2022, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on("Document Naming Settings", {
refresh: function(frm) {
frm.trigger("setup_transaction_autocomplete");
frm.disable_save();
},
setup_transaction_autocomplete: function(frm) {
frappe.call({
method: "get_transactions_and_prefixes",
doc: frm.doc,
callback: function(r) {
frm.fields_dict.transaction_type.set_data(r.message.transactions);
frm.fields_dict.prefix.set_data(r.message.prefixes);
},
});
},
transaction_type: function(frm) {
frm.set_value("user_must_always_select", 0);
frappe.call({
method: "get_options",
doc: frm.doc,
callback: function(r) {
frm.set_value("naming_series_options", r.message);
if (r.message && r.message.split("\n")[0] == "")
frm.set_value("user_must_always_select", 1);
},
});
},
prefix: function(frm) {
frappe.call({
method: "get_current",
doc: frm.doc,
callback: function(r) {
frm.refresh_field("current_value");
},
});
},
update: function(frm) {
frappe.call({
method: "update_series",
doc: frm.doc,
freeze: true,
freeze_msg: __("Updating naming series options"),
callback: function(r) {
frm.trigger("setup_transaction_autocomplete");
frm.trigger("transaction_type");
},
});
},
try_naming_series(frm) {
frappe.call({
method: "preview_series",
doc: frm.doc,
callback: function(r) {
if (!r.exc) {
frm.set_value("series_preview", r.message);
} else {
frm.set_value(
"series_preview",
__("Failed to generate preview of series")
);
}
},
});
},
});

View file

@ -0,0 +1,133 @@
{
"actions": [],
"creation": "2022-05-30 07:24:07.736646",
"description": "Configure various aspects of how document naming works like naming series, current counter.",
"doctype": "DocType",
"engine": "InnoDB",
"field_order": [
"naming_series_tab",
"setup_series",
"transaction_type",
"naming_series_options",
"user_must_always_select",
"update",
"column_break_9",
"try_naming_series",
"series_preview",
"help_html",
"update_series",
"prefix",
"current_value",
"update_series_start"
],
"fields": [
{
"collapsible": 1,
"description": "Set Naming Series options on your transactions.",
"fieldname": "setup_series",
"fieldtype": "Section Break",
"label": "Setup Series for transactions"
},
{
"depends_on": "transaction_type",
"fieldname": "help_html",
"fieldtype": "HTML",
"label": "Help HTML",
"options": "<div class=\"well\">\n Edit list of Series in the box. Rules:\n <ul>\n <li>Each Series Prefix on a new line.</li>\n <li>Allowed special characters are \"/\" and \"-\"</li>\n <li>\n Optionally, set the number of digits in the series using dot (.)\n followed by hashes (#). For example, \".####\" means that the series\n will have four digits. Default is five digits.\n </li>\n <li>\n You can also use variables in the series name by putting them\n between (.) dots\n <br>\n Supported Variables:\n <ul>\n <li><code>.YYYY.</code> - Year in 4 digits</li>\n <li><code>.YY.</code> - Year in 2 digits</li>\n <li><code>.MM.</code> - Month</li>\n <li><code>.DD.</code> - Day of month</li>\n <li><code>.WW.</code> - Week of the year</li>\n <li><code>.FY.</code> - Fiscal Year</li>\n <li>\n <code>.{fieldname}.</code> - fieldname on the document e.g.\n <code>branch</code>\n </li>\n </ul>\n </li>\n </ul>\n Examples:\n <ul>\n <li>INV-</li>\n <li>INV-10-</li>\n <li>INVK-</li>\n <li>INV-.YYYY.-.{branch}.-.MM.-.####</li>\n </ul>\n</div>\n<br>\n"
},
{
"default": "0",
"depends_on": "transaction_type",
"description": "Check this if you want to force the user to select a series before saving. There will be no default if you check this.",
"fieldname": "user_must_always_select",
"fieldtype": "Check",
"label": "User must always select"
},
{
"depends_on": "transaction_type",
"fieldname": "update",
"fieldtype": "Button",
"label": "Update"
},
{
"collapsible": 1,
"description": "Change the starting / current sequence number of an existing series. <br>\n\nWarning: Incorrectly updating counters can prevent documents from getting created. ",
"fieldname": "update_series",
"fieldtype": "Section Break",
"label": "Update Series Counter"
},
{
"fieldname": "prefix",
"fieldtype": "Autocomplete",
"label": "Prefix"
},
{
"description": "This is the number of the last created transaction with this prefix",
"fieldname": "current_value",
"fieldtype": "Int",
"label": "Current Value"
},
{
"fieldname": "update_series_start",
"fieldtype": "Button",
"label": "Update Series Number",
"options": "update_series_start"
},
{
"depends_on": "transaction_type",
"fieldname": "naming_series_options",
"fieldtype": "Text",
"label": "Series List for this Transaction"
},
{
"depends_on": "transaction_type",
"description": "Generate 3 preview of names generate by any valid series.",
"fieldname": "try_naming_series",
"fieldtype": "Data",
"label": "Try a naming Series"
},
{
"fieldname": "transaction_type",
"fieldtype": "Autocomplete",
"label": "Select Transaction"
},
{
"fieldname": "column_break_9",
"fieldtype": "Column Break"
},
{
"fieldname": "naming_series_tab",
"fieldtype": "Tab Break",
"label": "Naming Series"
},
{
"fieldname": "series_preview",
"fieldtype": "Text",
"label": "Preview of generated names",
"read_only": 1
}
],
"hide_toolbar": 1,
"icon": "fa fa-sort-by-order",
"issingle": 1,
"links": [],
"modified": "2022-05-30 23:51:36.136535",
"modified_by": "Administrator",
"module": "Core",
"name": "Document Naming Settings",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"email": 1,
"print": 1,
"read": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View file

@ -0,0 +1,218 @@
# Copyright (c) 2022, Frappe Technologies and contributors
# For license information, please see license.txt
from typing import List, Set
import frappe
from frappe import _
from frappe.core.doctype.doctype.doctype import validate_series
from frappe.model.document import Document
from frappe.model.naming import NamingSeries
from frappe.permissions import get_doctypes_with_read
from frappe.utils import cint
class NamingSeriesNotSetError(frappe.ValidationError):
pass
class DocumentNamingSettings(Document):
@frappe.whitelist()
def get_transactions_and_prefixes(self):
transactions = self._get_transactions()
prefixes = self._get_prefixes(transactions)
return {"transactions": transactions, "prefixes": prefixes}
def _get_transactions(self) -> List[str]:
readable_doctypes = set(get_doctypes_with_read())
standard = frappe.get_all("DocField", {"fieldname": "naming_series"}, "parent", pluck="parent")
custom = frappe.get_all("Custom Field", {"fieldname": "naming_series"}, "dt", pluck="dt")
return sorted(readable_doctypes.intersection(standard + custom))
def _get_prefixes(self, doctypes) -> List[str]:
"""Get all prefixes for naming series.
- For all templates prefix is evaluated considering today's date
- All existing prefix in DB are shared as is.
"""
series_templates = set()
for d in doctypes:
try:
options = frappe.get_meta(d).get_naming_series_options()
series_templates.update(options)
except frappe.DoesNotExistError:
frappe.msgprint(_("Unable to find DocType {0}").format(d))
continue
custom_templates = frappe.get_all(
"DocType",
fields=["autoname"],
filters={
"name": ("not in", doctypes),
"autoname": ("like", "%.#%"),
"module": ("not in", ["Core"]),
},
)
if custom_templates:
series_templates.update([d.autoname.rsplit(".", 1)[0] for d in custom_templates])
return self._evaluate_and_clean_templates(series_templates)
def _evaluate_and_clean_templates(self, series_templates: Set[str]) -> List[str]:
evalauted_prefix = set()
series = frappe.qb.DocType("Series")
prefixes_from_db = frappe.qb.from_(series).select(series.name).run(pluck=True)
evalauted_prefix.update(prefixes_from_db)
for series_template in series_templates:
prefix = NamingSeries(series_template).get_prefix()
if "{" in prefix:
# fieldnames can't be evalauted, rely on data in DB instead
continue
evalauted_prefix.add(prefix)
return sorted(evalauted_prefix)
def get_options_list(self, options: str) -> List[str]:
return [op.strip() for op in options.split("\n") if op.strip()]
@frappe.whitelist()
def update_series(self):
"""update series list"""
self.validate_set_series()
self.check_duplicate()
self.set_series_options_in_meta(self.transaction_type, self.naming_series_options)
frappe.msgprint(
_("Series Updated for {}").format(self.transaction_type), alert=True, indicator="green"
)
def validate_set_series(self):
if self.transaction_type and not self.naming_series_options:
frappe.throw(_("Please set the series to be used."))
def set_series_options_in_meta(self, doctype: str, options: str) -> None:
options = self.get_options_list(options)
# validate names
for series in options:
self.validate_series_name(series)
if options and self.user_must_always_select:
options = [""] + options
default = options[0] if options else ""
option_string = "\n".join(options)
self.update_naming_series_property_setter(doctype, "options", option_string)
self.update_naming_series_property_setter(doctype, "default", default)
self.naming_series_options = option_string
frappe.clear_cache(doctype=doctype)
def update_naming_series_property_setter(self, doctype, property, value):
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
make_property_setter(doctype, "naming_series", property, value, "Text")
def check_duplicate(self):
def stripped_series(s: str) -> str:
return s.strip().rstrip("#")
standard = frappe.get_all("DocField", {"fieldname": "naming_series"}, "parent", pluck="parent")
custom = frappe.get_all("Custom Field", {"fieldname": "naming_series"}, "dt", pluck="dt")
all_doctypes_with_naming_series = set(standard + custom)
all_doctypes_with_naming_series.remove(self.transaction_type)
existing_series = {}
for doctype in all_doctypes_with_naming_series:
for series in frappe.get_meta(doctype).get_naming_series_options():
existing_series[stripped_series(series)] = doctype
dt = frappe.get_doc("DocType", self.transaction_type)
options = self.get_options_list(self.naming_series_options)
for series in options:
if stripped_series(series) in existing_series:
frappe.throw(_("Series {0} already used in {1}").format(series, existing_series[series]))
validate_series(dt, series)
def validate_series_name(self, series):
NamingSeries(series).validate()
@frappe.whitelist()
def get_options(self, doctype=None):
doctype = doctype or self.transaction_type
if not doctype:
return
if frappe.get_meta(doctype or self.transaction_type).get_field("naming_series"):
return frappe.get_meta(doctype or self.transaction_type).get_field("naming_series").options
@frappe.whitelist()
def get_current(self):
"""get series current"""
if self.prefix:
self.current_value = NamingSeries(self.prefix).get_current_value()
return self.current_value
@frappe.whitelist()
def update_series_start(self):
frappe.only_for("System Manager")
if not self.prefix:
frappe.throw(_("Please select prefix first"))
naming_series = NamingSeries(self.prefix)
previous_value = naming_series.get_current_value()
naming_series.update_counter(self.current_value)
self.create_version_log_for_change(
naming_series.get_prefix(), previous_value, self.current_value
)
frappe.msgprint(
_("Series counter for {} updated to {} successfully").format(self.prefix, self.current_value),
alert=True,
indicator="green",
)
def create_version_log_for_change(self, series, old, new):
version = frappe.new_doc("Version")
version.ref_doctype = "Series"
version.docname = series
version.data = frappe.as_json({"changed": [["current", old, new]]})
version.flags.ignore_links = True # series is not a "real" doctype
version.flags.ignore_permissions = True
version.insert()
@frappe.whitelist()
def preview_series(self) -> str:
"""Preview what the naming series will generate."""
series = self.try_naming_series
if not series:
return ""
try:
doc = self._fetch_last_doc_if_available()
return "\n".join(NamingSeries(series).get_preview(doc=doc))
except Exception as e:
if frappe.message_log:
frappe.message_log.pop()
return _("Failed to generate names from the series") + f"\n{str(e)}"
def _fetch_last_doc_if_available(self):
"""Fetch last doc for evaluating naming series with fields."""
try:
return frappe.get_last_doc(self.transaction_type)
except Exception:
return None

View file

@ -0,0 +1,65 @@
# Copyright (c) 2022, Frappe Technologies and Contributors
# See license.txt
import frappe
from frappe.core.doctype.document_naming_settings.document_naming_settings import (
DocumentNamingSettings,
)
from frappe.model.naming import NamingSeries, get_default_naming_series
from frappe.tests.utils import FrappeTestCase
from frappe.utils import cint
class TestNamingSeries(FrappeTestCase):
def setUp(self):
self.dns: DocumentNamingSettings = frappe.get_doc("Document Naming Settings")
def tearDown(self):
frappe.db.rollback()
def get_valid_serieses(self):
VALID_SERIES = ["SINV-", "SI-.{field}.", "SI-#.###", ""]
exisiting_series = self.dns.get_transactions_and_prefixes()["prefixes"]
return VALID_SERIES + exisiting_series
def test_naming_preview(self):
self.dns.transaction_type = "Webhook"
self.dns.try_naming_series = "AXBZ.####"
serieses = self.dns.preview_series().split("\n")
self.assertEqual(["AXBZ0001", "AXBZ0002", "AXBZ0003"], serieses)
self.dns.try_naming_series = "AXBZ-.{currency}.-"
serieses = self.dns.preview_series().split("\n")
def test_get_transactions(self):
naming_info = self.dns.get_transactions_and_prefixes()
self.assertIn("Webhook", naming_info["transactions"])
existing_naming_series = frappe.get_meta("Webhook").get_field("naming_series").options
for series in existing_naming_series.split("\n"):
self.assertIn(NamingSeries(series).get_prefix(), naming_info["prefixes"])
def test_default_naming_series(self):
self.assertIn("HOOK", get_default_naming_series("Webhook"))
self.assertIsNone(get_default_naming_series("DocType"))
def test_updates_naming_options(self):
self.dns.transaction_type = "Webhook"
test_series = "KOOHBEW.###"
self.dns.naming_series_options = self.dns.get_options() + "\n" + test_series
self.dns.update_series()
self.assertIn(test_series, frappe.get_meta("Webhook").get_naming_series_options())
def test_update_series_counter(self):
for series in self.get_valid_serieses():
if not series:
continue
self.dns.prefix = series
current_count = cint(self.dns.get_current())
new_count = self.dns.current_value = current_count + 1
self.dns.update_series_start()
self.assertEqual(self.dns.get_current(), new_count, f"Incorrect update for {series}")

View file

@ -0,0 +1,8 @@
// Copyright (c) 2021, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Document Share Key', {
// refresh: function(frm) {
// }
});

View file

@ -0,0 +1,73 @@
{
"actions": [],
"allow_rename": 1,
"autoname": "hash",
"creation": "2022-01-14 13:40:49.487646",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"reference_doctype",
"reference_docname",
"key",
"expires_on"
],
"fields": [
{
"fieldname": "reference_doctype",
"fieldtype": "Link",
"label": "Reference Document Type",
"options": "DocType",
"read_only": 1,
"search_index": 1
},
{
"fieldname": "reference_docname",
"fieldtype": "Dynamic Link",
"label": "Reference Document Name",
"options": "reference_doctype",
"read_only": 1,
"search_index": 1
},
{
"fieldname": "key",
"fieldtype": "Data",
"label": "Key",
"read_only": 1
},
{
"fieldname": "expires_on",
"fieldtype": "Date",
"in_list_view": 1,
"label": "Expires On",
"read_only": 1
}
],
"in_create": 1,
"index_web_pages_for_search": 1,
"links": [],
"modified": "2022-01-14 13:57:28.050678",
"modified_by": "Administrator",
"module": "Core",
"name": "Document Share Key",
"naming_rule": "Expression",
"owner": "Administrator",
"permissions": [
{
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"share": 1,
"write": 1
}
],
"read_only": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View file

@ -0,0 +1,20 @@
# Copyright (c) 2021, Frappe Technologies and contributors
# For license information, please see license.txt
from random import randrange
import frappe
from frappe.model.document import Document
class DocumentShareKey(Document):
def before_insert(self):
self.key = frappe.generate_hash(length=randrange(25, 35))
if not self.expires_on and not self.flags.no_expiry:
self.expires_on = frappe.utils.add_days(
None, days=frappe.get_system_settings("document_share_key_expiry") or 90
)
def is_expired(expires_on):
return expires_on and expires_on < frappe.utils.getdate()

View file

@ -0,0 +1,9 @@
# Copyright (c) 2021, Frappe Technologies and Contributors
# See license.txt
# import frappe
import unittest
class TestDocumentShareKey(unittest.TestCase):
pass

View file

@ -1,8 +1,17 @@
// Copyright (c) 2016, Frappe Technologies and contributors
// Copyright (c) 2022, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Error Log', {
frappe.ui.form.on("Error Log", {
refresh: function(frm) {
frm.disable_save();
}
if (frm.doc.reference_doctype && frm.doc.reference_name) {
frm.add_custom_button(__("Show Related Errors"), function() {
frappe.set_route("List", "Error Log", {
reference_doctype: frm.doc.reference_doctype,
reference_name: frm.doc.reference_name,
});
});
}
},
});

View file

@ -6,10 +6,12 @@
"engine": "MyISAM",
"field_order": [
"seen",
"method",
"error",
"reference_doctype",
"reference_name"
"column_break_3",
"reference_name",
"section_break_5",
"method",
"error"
],
"fields": [
{
@ -47,12 +49,21 @@
"fieldtype": "Data",
"label": "Reference Name",
"read_only": 1
},
{
"fieldname": "column_break_3",
"fieldtype": "Column Break"
},
{
"fieldname": "section_break_5",
"fieldtype": "Section Break"
}
],
"icon": "fa fa-warning-sign",
"idx": 1,
"in_create": 1,
"links": [],
"modified": "2022-05-19 05:32:16.026684",
"modified": "2022-06-13 06:34:05.158606",
"modified_by": "Administrator",
"module": "Core",
"name": "Error Log",
@ -70,7 +81,6 @@
"write": 1
}
],
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "ASC",
"states": [],

View file

@ -4,6 +4,8 @@
import frappe
from frappe.model.document import Document
from frappe.query_builder import Interval
from frappe.query_builder.functions import Now
class ErrorLog(Document):
@ -12,13 +14,10 @@ class ErrorLog(Document):
self.db_set("seen", 1, update_modified=0)
frappe.db.commit()
def set_old_logs_as_seen():
# set logs as seen
frappe.db.sql(
"""UPDATE `tabError Log` SET `seen`=1
WHERE `seen`=0 AND `creation` < (NOW() - INTERVAL '7' DAY)"""
)
@staticmethod
def clear_old_logs(days=30):
table = frappe.qb.DocType("Error Log")
frappe.db.delete(table, filters=(table.modified < (Now() - Interval(days=days))))
@frappe.whitelist()

View file

@ -1,7 +1,7 @@
frappe.listview_settings['Error Log'] = {
frappe.listview_settings["Error Log"] = {
add_fields: ["seen"],
get_indicator: function(doc) {
if(cint(doc.seen)) {
if (cint(doc.seen)) {
return [__("Seen"), "green", "seen,=,1"];
} else {
return [__("Not Seen"), "red", "seen,=,0"];
@ -11,11 +11,15 @@ frappe.listview_settings['Error Log'] = {
onload: function(listview) {
listview.page.add_menu_item(__("Clear Error Logs"), function() {
frappe.call({
method:'frappe.core.doctype.error_log.error_log.clear_error_logs',
method: "frappe.core.doctype.error_log.error_log.clear_error_logs",
callback: function() {
listview.refresh();
}
},
});
});
}
frappe.require("logtypes.bundle.js", () => {
frappe.utils.logtypes.show_log_retention_message(cur_list.doctype);
})
},
};

View file

@ -4,6 +4,8 @@
import frappe
from frappe.model.document import Document
from frappe.query_builder import Interval
from frappe.query_builder.functions import Now
class ErrorSnapshot(Document):
@ -32,3 +34,8 @@ class ErrorSnapshot(Document):
frappe.db.set_value("Error Snapshot", parent["name"], "relapses", parent["relapses"] + 1)
if parent["seen"]:
frappe.db.set_value("Error Snapshot", parent["name"], "seen", False)
@staticmethod
def clear_old_logs(days=30):
table = frappe.qb.DocType("Error Snapshot")
frappe.db.delete(table, filters=(table.modified < (Now() - Interval(days=days))))

View file

@ -10,5 +10,10 @@ frappe.listview_settings["Error Snapshot"] = {
} else {
return [__("First Level"), !doc.seen ? "red" : "green", "parent_error_snapshot,=,"];
}
}
},
onload: function(listview) {
frappe.require("logtypes.bundle.js", () => {
frappe.utils.logtypes.show_log_retention_message(cur_list.doctype);
})
},
}

View file

@ -0,0 +1,2 @@
from .exceptions import *
from .utils import *

View file

@ -0,0 +1,12 @@
import frappe
class MaxFileSizeReachedError(frappe.ValidationError):
pass
class FolderNotEmpty(frappe.ValidationError):
pass
from frappe.exceptions import *

View file

@ -9,19 +9,9 @@ frappe.ui.form.on("File", "refresh", function(frm) {
}, "fa fa-download");
}
var wrapper = frm.get_field("preview_html").$wrapper;
var is_viewable = frappe.utils.is_image_file(frm.doc.file_url);
frm.toggle_display("preview", is_viewable);
frm.toggle_display("preview_html", is_viewable);
if(is_viewable){
wrapper.html('<div class="img_preview">\
<img class="img-responsive" src="'+frm.doc.file_url+'"></img>\
</div>');
} else {
wrapper.empty();
}
frm.get_field("preview_html").$wrapper.html(`<div class="img_preview">
<img class="img-responsive" src="${frm.doc.file_url}" onerror="cur_frm.toggle_display('preview', false)" />
</div>`);
var is_raster_image = (/\.(gif|jpg|jpeg|tiff|png)$/i).test(frm.doc.file_url);
var is_optimizable = !frm.doc.is_folder && is_raster_image && frm.doc.file_size > 0;
@ -38,7 +28,7 @@ frappe.ui.form.on("File", "refresh", function(frm) {
if(frm.doc.file_name && frm.doc.file_name.split('.').splice(-1)[0]==='zip') {
frm.add_custom_button(__('Unzip'), function() {
frappe.call({
method: "frappe.core.doctype.file.file.unzip_file",
method: "frappe.core.api.file.unzip_file",
args: {
name: frm.doc.name,
},

File diff suppressed because it is too large Load diff

View file

@ -4,18 +4,25 @@ import base64
import json
import os
import unittest
from contextlib import contextmanager
from typing import TYPE_CHECKING
import frappe
from frappe import _
from frappe.core.doctype.file.file import (
File,
from frappe.core.api.file import (
create_new_folder,
get_attached_images,
get_files_in_folder,
move_file,
unzip_file,
)
from frappe.exceptions import ValidationError
from frappe.tests.utils import FrappeTestCase
from frappe.utils import get_files_path
if TYPE_CHECKING:
from frappe.core.doctype.file.file import File
test_content1 = "Hello"
test_content2 = "Hello World"
@ -28,7 +35,25 @@ def make_test_doc():
return d.doctype, d.name
class TestSimpleFile(unittest.TestCase):
@contextmanager
def make_test_image_file():
file_path = frappe.get_app_path("frappe", "tests/data/sample_image_for_optimization.jpg")
with open(file_path, "rb") as f:
file_content = f.read()
test_file = frappe.get_doc(
{"doctype": "File", "file_name": "sample_image_for_optimization.jpg", "content": file_content}
).insert()
# remove those flags
_test_file: "File" = frappe.get_doc("File", test_file.name)
try:
yield _test_file
finally:
_test_file.delete()
class TestSimpleFile(FrappeTestCase):
def setUp(self):
self.attached_to_doctype, self.attached_to_docname = make_test_doc()
self.test_content = test_content1
@ -50,11 +75,11 @@ class TestSimpleFile(unittest.TestCase):
self.assertEqual(content, self.test_content)
class TestBase64File(unittest.TestCase):
class TestBase64File(FrappeTestCase):
def setUp(self):
self.attached_to_doctype, self.attached_to_docname = make_test_doc()
self.test_content = base64.b64encode(test_content1.encode("utf-8"))
_file = frappe.get_doc(
_file: "File" = frappe.get_doc(
{
"doctype": "File",
"file_name": "test_base64.txt",
@ -73,7 +98,7 @@ class TestBase64File(unittest.TestCase):
self.assertEqual(content, test_content1)
class TestSameFileName(unittest.TestCase):
class TestSameFileName(FrappeTestCase):
def test_saved_content(self):
self.attached_to_doctype, self.attached_to_docname = make_test_doc()
self.test_content1 = test_content1
@ -133,7 +158,7 @@ class TestSameFileName(unittest.TestCase):
self.assertEqual(_file.get_content(), test_content2)
class TestSameContent(unittest.TestCase):
class TestSameContent(FrappeTestCase):
def setUp(self):
self.attached_to_doctype1, self.attached_to_docname1 = make_test_doc()
self.attached_to_doctype2, self.attached_to_docname2 = make_test_doc()
@ -201,7 +226,7 @@ class TestSameContent(unittest.TestCase):
frappe.clear_cache(doctype="ToDo")
class TestFile(unittest.TestCase):
class TestFile(FrappeTestCase):
def setUp(self):
frappe.set_user("Administrator")
self.delete_test_data()
@ -327,7 +352,7 @@ class TestFile(unittest.TestCase):
_file.save()
folder = frappe.get_doc("File", "Home/Test Folder 1/Test Folder 3")
self.assertRaises(frappe.ValidationError, folder.delete)
self.assertRaises(ValidationError, folder.delete)
def test_same_file_url_update(self):
attached_to_doctype1, attached_to_docname1 = make_test_doc()
@ -373,38 +398,35 @@ class TestFile(unittest.TestCase):
{
"doctype": "File",
"file_name": "parent_dir.txt",
"attached_to_doctype": "",
"attached_to_name": "",
"is_private": 1,
"content": test_content1,
}
).insert()
file1.file_url = "/private/files/../test.txt"
self.assertRaises(frappe.exceptions.ValidationError, file1.save)
self.assertRaises(ValidationError, file1.save)
# No validation to see if file exists
file1.reload()
file1.file_url = "/private/files/parent_dir2.txt"
file1.save()
self.assertRaises(OSError, file1.save)
def test_file_url_validation(self):
test_file = frappe.get_doc(
{"doctype": "File", "file_name": "logo", "file_url": "https://frappe.io/files/frappe.png"}
)
test_file: "File" = frappe.new_doc("File")
test_file.update({"file_name": "logo", "file_url": "https://frappe.io/files/frappe.png"})
self.assertIsNone(test_file.validate())
# bad path
test_file.file_url = "/usr/bin/man"
self.assertRaisesRegex(
frappe.exceptions.ValidationError, "URL must start with http:// or https://", test_file.validate
ValidationError, f"Cannot access file path {test_file.file_url}", test_file.validate
)
test_file.file_url = None
test_file.file_name = "/usr/bin/man"
self.assertRaisesRegex(
frappe.exceptions.ValidationError, "There is some problem with the file url", test_file.validate
ValidationError, "There is some problem with the file url", test_file.validate
)
test_file.file_url = None
@ -413,11 +435,11 @@ class TestFile(unittest.TestCase):
test_file.file_url = None
test_file.file_name = "/private/files/_file"
self.assertRaisesRegex(IOError, "does not exist", test_file.validate)
self.assertRaisesRegex(ValidationError, "File name cannot have", test_file.validate)
def test_make_thumbnail(self):
# test web image
test_file: File = frappe.get_doc(
test_file: "File" = frappe.get_doc(
{
"doctype": "File",
"file_name": "logo",
@ -486,37 +508,36 @@ class TestFile(unittest.TestCase):
"file_url": frappe.utils.get_url("/_test/assets/image.jpg"),
}
).insert(ignore_permissions=True)
self.assertRaisesRegex(frappe.exceptions.ValidationError, "not a zip file", test_file.unzip)
self.assertRaisesRegex(ValidationError, "not a zip file", test_file.unzip)
class TestAttachment(unittest.TestCase):
test_doctype = "Test For Attachment"
def setUp(self):
if frappe.db.exists("DocType", self.test_doctype):
return
@classmethod
def setUpClass(cls):
frappe.get_doc(
doctype="DocType",
name=self.test_doctype,
name=cls.test_doctype,
module="Custom",
custom=1,
fields=[
{"label": "Title", "fieldname": "title", "fieldtype": "Data"},
{"label": "Attachment", "fieldname": "attachment", "fieldtype": "Attach"},
],
).insert()
).insert(ignore_if_duplicate=True)
def tearDown(self):
frappe.delete_doc("DocType", self.test_doctype)
@classmethod
def tearDownClass(cls):
frappe.db.rollback()
frappe.delete_doc("DocType", cls.test_doctype)
def test_file_attachment_on_update(self):
doc = frappe.get_doc(doctype=self.test_doctype, title="test for attachment on update").insert()
file = frappe.get_doc(
{"doctype": "File", "file_name": "test_attach.txt", "content": "Test Content"}
)
file.save()
).save()
doc.attachment = file.file_url
doc.save()
@ -535,9 +556,11 @@ class TestAttachment(unittest.TestCase):
self.assertTrue(exists)
class TestAttachmentsAccess(unittest.TestCase):
def test_attachments_access(self):
class TestAttachmentsAccess(FrappeTestCase):
def setUp(self) -> None:
frappe.db.delete("File", {"is_folder": 0})
def test_attachments_access(self):
frappe.set_user("test4@example.com")
self.attached_to_doctype, self.attached_to_docname = make_test_doc()
@ -600,11 +623,12 @@ class TestAttachmentsAccess(unittest.TestCase):
self.assertIn("test_user.txt", system_manager_attachments_files)
self.assertIn("test_user.txt", user_attachments_files)
def tearDown(self) -> None:
frappe.set_user("Administrator")
frappe.db.rollback()
class TestFileUtils(unittest.TestCase):
class TestFileUtils(FrappeTestCase):
def test_extract_images_from_doc(self):
# with filename in data URI
todo = frappe.get_doc(
@ -628,30 +652,22 @@ class TestFileUtils(unittest.TestCase):
self.assertIn(f'<img src="{frappe.get_doc("File", filename).file_url}', todo.description)
def test_create_new_folder(self):
from frappe.core.doctype.file.file import create_new_folder
folder = create_new_folder("test_folder", "Home")
self.assertTrue(folder.is_folder)
class TestFileOptimization(unittest.TestCase):
class TestFileOptimization(FrappeTestCase):
def test_optimize_file(self):
file_path = frappe.get_app_path("frappe", "tests/data/sample_image_for_optimization.jpg")
with open(file_path, "rb") as f:
file_content = f.read()
test_file = frappe.get_doc(
{"doctype": "File", "file_name": "sample_image_for_optimization.jpg", "content": file_content}
).insert()
original_size = test_file.file_size
original_content_hash = test_file.content_hash
with make_test_image_file() as test_file:
original_size = test_file.file_size
original_content_hash = test_file.content_hash
test_file.optimize_file()
optimized_size = test_file.file_size
updated_content_hash = test_file.content_hash
test_file.optimize_file()
optimized_size = test_file.file_size
updated_content_hash = test_file.content_hash
self.assertLess(optimized_size, original_size)
self.assertNotEqual(original_content_hash, updated_content_hash)
test_file.delete()
self.assertLess(optimized_size, original_size)
self.assertNotEqual(original_content_hash, updated_content_hash)
def test_optimize_svg(self):
file_path = frappe.get_app_path("frappe", "tests/data/sample_svg.svg")
@ -675,17 +691,11 @@ class TestFileOptimization(unittest.TestCase):
self.assertRaises(TypeError, test_folder.optimize_file)
def test_revert_optimized_file_on_rollback(self):
file_path = frappe.get_app_path("frappe", "tests/data/sample_image_for_optimization.jpg")
with open(file_path, "rb") as f:
file_content = f.read()
test_file = frappe.get_doc(
{"doctype": "File", "file_name": "sample_image_for_optimization.jpg", "content": file_content}
).insert()
image_path = test_file.get_full_path()
size_before_optimization = os.stat(image_path).st_size
with make_test_image_file() as test_file:
image_path = test_file.get_full_path()
size_before_optimization = os.stat(image_path).st_size
test_file.optimize_file()
frappe.db.rollback()
size_after_rollback = os.stat(image_path).st_size
test_file.optimize_file()
frappe.db.rollback()
size_after_rollback = os.stat(image_path).st_size
self.assertEqual(size_before_optimization, size_after_rollback)
test_file.delete()
self.assertEqual(size_before_optimization, size_after_rollback)

View file

@ -0,0 +1,340 @@
import hashlib
import imghdr
import mimetypes
import os
import re
from io import BytesIO
from typing import TYPE_CHECKING, Optional, Tuple, Union
from urllib.parse import unquote
import requests
import requests.exceptions
from PIL import Image
import frappe
from frappe import _, safe_decode
from frappe.utils import cstr, encode, get_files_path, random_string, strip
from frappe.utils.file_manager import safe_b64decode
from frappe.utils.image import optimize_image
if TYPE_CHECKING:
from PIL.ImageFile import ImageFile
from requests.models import Response
from frappe.model.document import Document
from .file import File
def make_home_folder() -> None:
home = frappe.get_doc(
{"doctype": "File", "is_folder": 1, "is_home_folder": 1, "file_name": _("Home")}
).insert(ignore_if_duplicate=True)
frappe.get_doc(
{
"doctype": "File",
"folder": home.name,
"is_folder": 1,
"is_attachments_folder": 1,
"file_name": _("Attachments"),
}
).insert(ignore_if_duplicate=True)
def setup_folder_path(filename: str, new_parent: str) -> None:
file: "File" = frappe.get_doc("File", filename)
file.folder = new_parent
file.save()
if file.is_folder:
from frappe.model.rename_doc import rename_doc
rename_doc("File", file.name, file.get_name_based_on_parent_folder(), ignore_permissions=True)
def get_extension(
filename,
extn: Optional[str] = None,
content: Optional[bytes] = None,
response: Optional["Response"] = None,
) -> str:
mimetype = None
if response:
content_type = response.headers.get("Content-Type")
if content_type:
_extn = mimetypes.guess_extension(content_type)
if _extn:
return _extn[1:]
if extn:
# remove '?' char and parameters from extn if present
if "?" in extn:
extn = extn.split("?", 1)[0]
mimetype = mimetypes.guess_type(filename + "." + extn)[0]
if mimetype is None or not mimetype.startswith("image/") and content:
# detect file extension by reading image header properties
extn = imghdr.what(filename + "." + (extn or ""), h=content)
return extn
def get_local_image(file_url: str) -> Tuple["ImageFile", str, str]:
if file_url.startswith("/private"):
file_url_path = (file_url.lstrip("/"),)
else:
file_url_path = ("public", file_url.lstrip("/"))
file_path = frappe.get_site_path(*file_url_path)
try:
image = Image.open(file_path)
except IOError:
frappe.throw(_("Unable to read file format for {0}").format(file_url))
content = None
try:
filename, extn = file_url.rsplit(".", 1)
except ValueError:
# no extn
with open(file_path, "r") as f:
content = f.read()
filename = file_url
extn = None
extn = get_extension(filename, extn, content)
return image, filename, extn
def get_web_image(file_url: str) -> Tuple["ImageFile", str, str]:
# download
file_url = frappe.utils.get_url(file_url)
r = requests.get(file_url, stream=True)
try:
r.raise_for_status()
except requests.exceptions.HTTPError as e:
if "404" in e.args[0]:
frappe.msgprint(_("File '{0}' not found").format(file_url))
else:
frappe.msgprint(_("Unable to read file format for {0}").format(file_url))
raise
try:
image = Image.open(BytesIO(r.content))
except Exception as e:
frappe.msgprint(_("Image link '{0}' is not valid").format(file_url), raise_exception=e)
try:
filename, extn = file_url.rsplit("/", 1)[1].rsplit(".", 1)
except ValueError:
# the case when the file url doesn't have filename or extension
# but is fetched due to a query string. example: https://encrypted-tbn3.gstatic.com/images?q=something
filename = get_random_filename()
extn = None
extn = get_extension(filename, extn, r.content)
filename = "/files/" + strip(unquote(filename))
return image, filename, extn
def delete_file(path: str) -> None:
"""Delete file from `public folder`"""
if path:
if ".." in path.split("/"):
frappe.throw(
_("It is risky to delete this file: {0}. Please contact your System Manager.").format(path)
)
parts = os.path.split(path.strip("/"))
if parts[0] == "files":
path = frappe.utils.get_site_path("public", "files", parts[-1])
else:
path = frappe.utils.get_site_path("private", "files", parts[-1])
path = encode(path)
if os.path.exists(path):
os.remove(path)
def remove_file_by_url(file_url: str, doctype: str = None, name: str = None) -> "Document":
if doctype and name:
fid = frappe.db.get_value(
"File", {"file_url": file_url, "attached_to_doctype": doctype, "attached_to_name": name}
)
else:
fid = frappe.db.get_value("File", {"file_url": file_url})
if fid:
from frappe.utils.file_manager import remove_file
return remove_file(fid=fid)
def get_content_hash(content: Union[bytes, str]) -> str:
if isinstance(content, str):
content = content.encode()
return hashlib.md5(content).hexdigest() # nosec
def generate_file_name(name: str, suffix: Optional[str] = None, is_private: bool = False) -> str:
"""Generate conflict-free file name. Suffix will be ignored if name available. If the
provided suffix doesn't result in an available path, a random suffix will be picked.
"""
def path_exists(name, is_private):
return os.path.exists(encode(get_files_path(name, is_private=is_private)))
if not path_exists(name, is_private):
return name
candidate_path = get_file_name(name, suffix)
if path_exists(candidate_path, is_private):
return generate_file_name(name, is_private=is_private)
return candidate_path
def get_file_name(fname: str, optional_suffix: Optional[str] = None) -> str:
# convert to unicode
fname = cstr(fname)
partial, extn = os.path.splitext(fname)
suffix = optional_suffix or frappe.generate_hash(length=6)
return f"{partial}{suffix}{extn}"
def extract_images_from_doc(doc: "Document", fieldname: str):
content = doc.get(fieldname)
content = extract_images_from_html(doc, content)
if frappe.flags.has_dataurl:
doc.set(fieldname, content)
def extract_images_from_html(doc: "Document", content: str, is_private: bool = False):
frappe.flags.has_dataurl = False
def _save_file(match):
data = match.group(1).split("data:")[1]
headers, content = data.split(",")
mtype = headers.split(";")[0]
if isinstance(content, str):
content = content.encode("utf-8")
if b"," in content:
content = content.split(b",")[1]
content = safe_b64decode(content)
content = optimize_image(content, mtype)
if "filename=" in headers:
filename = headers.split("filename=")[-1]
filename = safe_decode(filename).split(";")[0]
else:
filename = get_random_filename(content_type=mtype)
if doc.meta.istable:
doctype = doc.parenttype
name = doc.parent
else:
doctype = doc.doctype
name = doc.name
_file = frappe.get_doc(
{
"doctype": "File",
"file_name": filename,
"attached_to_doctype": doctype,
"attached_to_name": name,
"content": content,
"decode": False,
"is_private": is_private,
}
)
_file.save(ignore_permissions=True)
file_url = _file.file_url
frappe.flags.has_dataurl = True
return f'<img src="{file_url}"'
if content and isinstance(content, str):
content = re.sub(r'<img[^>]*src\s*=\s*["\'](?=data:)(.*?)["\']', _save_file, content)
return content
def get_random_filename(content_type: str = None) -> str:
extn = None
if content_type:
extn = mimetypes.guess_extension(content_type)
return random_string(7) + (extn or "")
def update_existing_file_docs(doc: "File") -> None:
# Update is private and file url of all file docs that point to the same file
file_doctype = frappe.qb.DocType("File")
(
frappe.qb.update(file_doctype)
.set(file_doctype.file_url, doc.file_url)
.set(file_doctype.is_private, doc.is_private)
.where(file_doctype.content_hash == doc.content_hash)
.where(file_doctype.name != doc.name)
).run()
def attach_files_to_document(doc: "File", event) -> None:
"""Runs on on_update hook of all documents.
Goes through every Attach and Attach Image field and attaches
the file url to the document if it is not already attached.
"""
attach_fields = doc.meta.get("fields", {"fieldtype": ["in", ["Attach", "Attach Image"]]})
for df in attach_fields:
# this method runs in on_update hook of all documents
# we dont want the update to fail if file cannot be attached for some reason
value = doc.get(df.fieldname)
if not (value or "").startswith(("/files", "/private/files")):
return
if frappe.db.exists(
"File",
{
"file_url": value,
"attached_to_name": doc.name,
"attached_to_doctype": doc.doctype,
"attached_to_field": df.fieldname,
},
):
return
file: "File" = frappe.get_doc(
doctype="File",
file_url=value,
attached_to_name=doc.name,
attached_to_doctype=doc.doctype,
attached_to_field=df.fieldname,
folder="Home/Attachments",
)
try:
file.insert()
except Exception:
doc.log_error("Error Attaching File")
def decode_file_content(content: bytes) -> bytes:
if isinstance(content, str):
content = content.encode("utf-8")
if b"," in content:
content = content.split(b",")[1]
return safe_b64decode(content)

View file

@ -1,8 +1,16 @@
// Copyright (c) 2020, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Log Settings', {
// refresh: function(frm) {
// }
frappe.ui.form.on("Log Settings", {
refresh: (frm) => {
frm.set_query("ref_doctype", "logs_to_clear", () => {
const added_doctypes = frm.doc.logs_to_clear.map((r) => r.ref_doctype);
return {
query: "frappe.core.doctype.log_settings.log_settings.get_log_doctypes",
filters: [
["name", "not in", added_doctypes],
],
};
});
},
});

View file

@ -5,61 +5,20 @@
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"error_log_notification_section",
"users_to_notify",
"log_cleanup_section",
"clear_error_log_after",
"clear_activity_log_after",
"column_break_4",
"clear_email_queue_after"
"logs_to_clear"
],
"fields": [
{
"fieldname": "log_cleanup_section",
"fieldtype": "Section Break",
"label": "Log Cleanup"
},
{
"fieldname": "column_break_4",
"fieldtype": "Column Break"
},
{
"fieldname": "error_log_notification_section",
"fieldtype": "Section Break",
"label": "Error Log Notification"
},
{
"fieldname": "users_to_notify",
"fieldtype": "Table MultiSelect",
"label": "Users To Notify",
"options": "Log Setting User"
},
{
"default": "90",
"description": "In Days",
"fieldname": "clear_error_log_after",
"fieldtype": "Int",
"label": "Clear Error log After"
},
{
"default": "90",
"description": "In Days",
"fieldname": "clear_activity_log_after",
"fieldtype": "Int",
"label": "Clear Activity Log After"
},
{
"default": "30",
"description": "In Days",
"fieldname": "clear_email_queue_after",
"fieldtype": "Int",
"label": "Clear Email Queue After"
"fieldname": "logs_to_clear",
"fieldtype": "Table",
"label": "Logs to Clear",
"options": "Logs To Clear"
}
],
"index_web_pages_for_search": 1,
"issingle": 1,
"links": [],
"modified": "2020-10-13 12:18:48.649038",
"modified": "2022-06-11 02:17:30.803721",
"modified_by": "Administrator",
"module": "Core",
"name": "Log Settings",
@ -79,5 +38,6 @@
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}
}

View file

@ -2,44 +2,119 @@
# Copyright (c) 2020, Frappe Technologies and contributors
# License: MIT. See LICENSE
from typing import Protocol, runtime_checkable
import frappe
from frappe import _
from frappe.model.base_document import get_controller
from frappe.model.document import Document
from frappe.query_builder import DocType, Interval
from frappe.query_builder.functions import Now
from frappe.utils import cint
from frappe.utils.caching import site_cache
DEFAULT_LOGTYPES_RETENTION = {
"Error Log": 30,
"Activity Log": 90,
"Email Queue": 30,
"Error Snapshot": 30,
"Scheduled Job Log": 90,
}
@runtime_checkable
class LogType(Protocol):
"""Interface requirement for doctypes that can be cleared using log settings."""
@staticmethod
def clear_old_logs(days: int) -> None:
...
@site_cache
def _supports_log_clearing(doctype: str) -> bool:
try:
controller = get_controller(doctype)
return issubclass(controller, LogType)
except Exception:
return False
class LogSettings(Document):
def validate(self):
self.validate_supported_doctypes()
self.validate_duplicates()
self.add_default_logtypes()
def validate_supported_doctypes(self):
for entry in self.logs_to_clear:
if _supports_log_clearing(entry.ref_doctype):
continue
msg = _("{} does not support automated log clearing.").format(frappe.bold(entry.ref_doctype))
if frappe.conf.developer_mode:
msg += "<br>" + _("Implement `clear_old_logs` method to enable auto error clearing.")
frappe.throw(msg, title=_("DocType not supported by Log Settings."))
def validate_duplicates(self):
seen = set()
for entry in self.logs_to_clear:
if entry.ref_doctype in seen:
frappe.throw(
_("{} appears more than once in configured log doctypes.").format(entry.ref_doctype)
)
seen.add(entry.ref_doctype)
def add_default_logtypes(self):
existing_logtypes = {d.ref_doctype for d in self.logs_to_clear}
added_logtypes = set()
for logtype, retention in DEFAULT_LOGTYPES_RETENTION.items():
if logtype not in existing_logtypes and _supports_log_clearing(logtype):
self.append("logs_to_clear", {"ref_doctype": logtype, "days": cint(retention)})
added_logtypes.add(logtype)
if added_logtypes:
frappe.msgprint(
_("Added default log doctypes: {}").format(",".join(added_logtypes)), alert=True
)
def clear_logs(self):
self.clear_email_queue()
self.clear_error_logs()
self.clear_activity_logs()
"""
Log settings can clear any log type that's registered to it and provides a method to delete old logs.
def clear_error_logs(self):
table = DocType("Error Log")
frappe.db.delete(
table, filters=(table.creation < (Now() - Interval(days=self.clear_error_log_after)))
)
Check `LogDoctype` above for interface that doctypes need to implement.
"""
def clear_activity_logs(self):
from frappe.core.doctype.activity_log.activity_log import clear_activity_logs
for entry in self.logs_to_clear:
controller: LogType = get_controller(entry.ref_doctype)
func = controller.clear_old_logs
clear_activity_logs(days=self.clear_activity_log_after)
# Only pass what the method can handle, this is considering any
# future addition that might happen to the required interface.
kwargs = frappe.get_newargs(func, {"days": entry.days})
func(**kwargs)
frappe.db.commit()
def clear_email_queue(self):
from frappe.email.queue import clear_outbox
def register_doctype(self, doctype: str, days=30):
existing_logtypes = {d.ref_doctype for d in self.logs_to_clear}
clear_outbox(days=self.clear_email_queue_after)
if doctype not in existing_logtypes and _supports_log_clearing(doctype):
self.append("logs_to_clear", {"ref_doctype": doctype, "days": cint(days)})
else:
for entry in self.logs_to_clear:
if entry.ref_doctype == doctype:
entry.days = days
break
def run_log_clean_up():
doc = frappe.get_doc("Log Settings")
doc.add_default_logtypes()
doc.save()
doc.clear_logs()
@frappe.whitelist()
def has_unseen_error_log(user):
def _get_response(show_alert=True):
def has_unseen_error_log():
if frappe.get_all("Error Log", filters={"seen": 0}, limit=1):
return {
"show_alert": True,
"message": _("You have unseen {0}").format(
@ -47,13 +122,67 @@ def has_unseen_error_log(user):
),
}
if frappe.get_all("Error Log", filters={"seen": 0}, limit=1):
log_settings = frappe.get_cached_doc("Log Settings")
if log_settings.users_to_notify:
if user in [u.user for u in log_settings.users_to_notify]:
return _get_response()
else:
return _get_response(show_alert=False)
else:
return _get_response()
@frappe.whitelist()
@frappe.validate_and_sanitize_search_inputs
def get_log_doctypes(doctype, txt, searchfield, start, page_len, filters):
filters = filters or {}
filters.extend(
[
["istable", "=", 0],
["issingle", "=", 0],
["name", "like", f"%%{txt}%%"],
]
)
doctypes = frappe.get_list("DocType", filters=filters, pluck="name")
supported_doctypes = [(d,) for d in doctypes if _supports_log_clearing(d)]
return supported_doctypes[start:page_len]
LOG_DOCTYPES = [
"Scheduled Job Log",
"Activity Log",
"Route History",
"Email Queue",
"Email Queue Recipient",
"Error Snapshot",
"Error Log",
]
def clear_log_table(doctype, days=90):
"""If any logtype table grows too large then clearing it with DELETE query
is not feasible in reasonable time. This command copies recent data to new
table and replaces current table with new smaller table.
ref: https://mariadb.com/kb/en/big-deletes/#deleting-more-than-half-a-table
"""
from frappe.utils import get_table_name
if doctype not in LOG_DOCTYPES:
raise frappe.ValidationError(f"Unsupported logging DocType: {doctype}")
original = get_table_name(doctype)
temporary = f"{original} temp_table"
backup = f"{original} backup_table"
try:
frappe.db.sql_ddl(f"CREATE TABLE `{temporary}` LIKE `{original}`")
# Copy all recent data to new table
frappe.db.sql(
f"""INSERT INTO `{temporary}`
SELECT * FROM `{original}`
WHERE `{original}`.`modified` > NOW() - INTERVAL '{days}' DAY"""
)
frappe.db.sql_ddl(f"RENAME TABLE `{original}` TO `{backup}`, `{temporary}` TO `{original}`")
except Exception:
frappe.db.rollback()
frappe.db.sql_ddl(f"DROP TABLE IF EXISTS `{temporary}`")
raise
else:
frappe.db.sql_ddl(f"DROP TABLE `{backup}`")

View file

@ -4,7 +4,7 @@
from datetime import datetime
import frappe
from frappe.core.doctype.log_settings.log_settings import run_log_clean_up
from frappe.core.doctype.log_settings.log_settings import _supports_log_clearing, run_log_clean_up
from frappe.tests.utils import FrappeTestCase
from frappe.utils import add_to_date, now_datetime
@ -56,6 +56,23 @@ class TestLogSettings(FrappeTestCase):
self.assertEqual(error_log_count, 0)
self.assertEqual(email_queue_count, 0)
def test_logtype_identification(self):
supported_types = [
"Error Log",
"Activity Log",
"Email Queue",
"Route History",
"Error Snapshot",
"Scheduled Job Log",
]
for lt in supported_types:
self.assertTrue(_supports_log_clearing(lt), f"{lt} should be recognized as log type")
unsupported_types = ["DocType", "User", "Non Existing dt"]
for dt in unsupported_types:
self.assertFalse(_supports_log_clearing(dt), f"{dt} shouldn't be recognized as log type")
def setup_test_logs(past: datetime) -> None:
activity_log = frappe.get_doc(

View file

@ -0,0 +1,43 @@
{
"actions": [],
"autoname": "hash",
"creation": "2022-06-11 02:02:39.472511",
"doctype": "DocType",
"editable_grid": 1,
"engine": "InnoDB",
"field_order": [
"ref_doctype",
"days"
],
"fields": [
{
"fieldname": "ref_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Log DocType",
"options": "DocType",
"reqd": 1
},
{
"default": "30",
"fieldname": "days",
"fieldtype": "Int",
"in_list_view": 1,
"label": "Clear Logs After (days)",
"non_negative": 1,
"reqd": 1
}
],
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2022-06-13 02:51:36.857786",
"modified_by": "Administrator",
"module": "Core",
"name": "Logs To Clear",
"owner": "Administrator",
"permissions": [],
"sort_field": "modified",
"sort_order": "DESC",
"states": []
}

View file

@ -0,0 +1,9 @@
# Copyright (c) 2022, Frappe Technologies and contributors
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class LogsToClear(Document):
pass

View file

@ -55,11 +55,11 @@ class PackageImport(Document):
for module in os.listdir(package_path):
module_path = os.path.join(package_path, module)
if os.path.isdir(module_path):
get_doc_files(files, module_path)
files = get_doc_files(files, module_path)
# import files
for file in files:
import_file_by_path(file, force=self.force, ignore_version=True, for_sync=True)
import_file_by_path(file, force=self.force, ignore_version=True)
log.append("Imported {}".format(file))
self.log = "\n".join(log)

View file

@ -3,6 +3,6 @@
frappe.ui.form.on('Patch Log', {
refresh: function(frm) {
frm.disable_save();
}
});

View file

@ -1,87 +1,44 @@
{
"allow_copy": 0,
"allow_import": 0,
"allow_rename": 0,
"autoname": "PATCHLOG.#####",
"beta": 0,
"creation": "2013-01-17 11:36:45",
"custom": 0,
"description": "List of patches executed",
"docstatus": 0,
"doctype": "DocType",
"document_type": "System",
"editable_grid": 0,
"actions": [],
"autoname": "PATCHLOG.#####",
"creation": "2013-01-17 11:36:45",
"description": "List of patches executed",
"doctype": "DocType",
"document_type": "System",
"engine": "InnoDB",
"field_order": [
"patch"
],
"fields": [
{
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "patch",
"fieldtype": "Code",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Patch",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
"fieldname": "patch",
"fieldtype": "Code",
"label": "Patch",
"read_only": 1
}
],
"hide_heading": 0,
"hide_toolbar": 0,
"icon": "fa fa-cog",
"idx": 1,
"image_view": 0,
"in_create": 0,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2016-12-29 14:40:35.048570",
"modified_by": "Administrator",
"module": "Core",
"name": "Patch Log",
"owner": "Administrator",
],
"icon": "fa fa-cog",
"idx": 1,
"links": [],
"modified": "2022-06-13 05:34:37.845368",
"modified_by": "Administrator",
"module": "Core",
"name": "Patch Log",
"naming_rule": "Expression (old style)",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 0,
"delete": 0,
"email": 1,
"export": 0,
"if_owner": 0,
"import": 0,
"is_custom": 0,
"permlevel": 0,
"print": 1,
"read": 1,
"report": 1,
"role": "Administrator",
"set_user_permissions": 0,
"share": 0,
"submit": 0,
"write": 0
"email": 1,
"print": 1,
"read": 1,
"report": 1,
"role": "Administrator"
}
],
"quick_entry": 1,
"read_only": 0,
"read_only_onload": 0,
"track_changes": 1,
"track_seen": 0
],
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"title_field": "patch",
"track_changes": 1
}

View file

@ -23,15 +23,14 @@
{
"fieldname": "report_name",
"fieldtype": "Data",
"hidden": 1,
"label": "Report Name",
"read_only": 1
},
{
"fieldname": "ref_report_doctype",
"fieldtype": "Link",
"hidden": 1,
"label": "Ref Report DocType",
"in_standard_filter": 1,
"label": "Report Type",
"options": "Report",
"read_only": 1
},
@ -41,6 +40,7 @@
"fieldtype": "Select",
"hidden": 1,
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Status",
"options": "Error\nQueued\nCompleted",
"read_only": 1
@ -103,10 +103,11 @@
],
"in_create": 1,
"links": [],
"modified": "2020-03-05 10:52:56.598365",
"modified": "2022-06-13 06:20:34.496412",
"modified_by": "Administrator",
"module": "Core",
"name": "Prepared Report",
"naming_rule": "Expression (old style)",
"owner": "Administrator",
"permissions": [
{
@ -131,9 +132,9 @@
"share": 1
}
],
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "DESC",
"title_field": "report_name",
"states": [],
"title_field": "ref_report_doctype",
"track_changes": 1
}

View file

@ -89,7 +89,9 @@ class Report(Document):
]
custom_roles = get_custom_allowed_roles("report", self.name)
allowed.extend(custom_roles)
if custom_roles:
allowed = custom_roles
if not allowed:
return True

View file

@ -186,6 +186,38 @@ class TestReport(FrappeTestCase):
self.assertNotEqual(report.is_permitted(), True)
frappe.set_user("Administrator")
def test_report_custom_permissions(self):
frappe.set_user("test@example.com")
frappe.db.delete("Custom Role", {"report": "Test Custom Role Report"})
frappe.db.commit() # nosemgrep
if not frappe.db.exists("Report", "Test Custom Role Report"):
report = frappe.get_doc(
{
"doctype": "Report",
"ref_doctype": "User",
"report_name": "Test Custom Role Report",
"report_type": "Query Report",
"is_standard": "No",
"roles": [{"role": "_Test Role"}, {"role": "System Manager"}],
}
).insert(ignore_permissions=True)
else:
report = frappe.get_doc("Report", "Test Custom Role Report")
self.assertEqual(report.is_permitted(), True)
frappe.get_doc(
{
"doctype": "Custom Role",
"report": "Test Custom Role Report",
"roles": [{"role": "_Test Role 2"}],
"ref_doctype": "User",
}
).insert(ignore_permissions=True)
self.assertNotEqual(report.is_permitted(), True)
frappe.set_user("Administrator")
# test for the `_format` method if report data doesn't have sort_by parameter
def test_format_method(self):
if frappe.db.exists("Report", "User Activity Report Without Sort"):

View file

@ -38,7 +38,7 @@
}
],
"links": [],
"modified": "2021-10-25 00:00:00.000000",
"modified": "2022-06-13 05:41:21.090972",
"modified_by": "Administrator",
"module": "Core",
"name": "Scheduled Job Log",
@ -59,5 +59,7 @@
],
"quick_entry": 1,
"sort_field": "modified",
"sort_order": "DESC"
}
"sort_order": "DESC",
"states": [],
"title_field": "scheduled_job_type"
}

View file

@ -2,9 +2,14 @@
# Copyright (c) 2019, Frappe Technologies and contributors
# License: MIT. See LICENSE
# import frappe
import frappe
from frappe.model.document import Document
from frappe.query_builder import Interval
from frappe.query_builder.functions import Now
class ScheduledJobLog(Document):
pass
@staticmethod
def clear_old_logs(days=90):
table = frappe.qb.DocType("Scheduled Job Log")
frappe.db.delete(table, filters=(table.modified < (Now() - Interval(days=days))))

View file

@ -0,0 +1,7 @@
frappe.listview_settings["Scheduled Job Log"] = {
onload: function(listview) {
frappe.require("logtypes.bundle.js", () => {
frappe.utils.logtypes.show_log_retention_message(cur_list.doctype);
})
},
};

View file

@ -185,9 +185,12 @@ def insert_single_event(frequency: str, event: str, cron_format: str = None):
if not frappe.db.exists(
"Scheduled Job Type", {"method": event, "frequency": frequency, **cron_expr}
):
savepoint = "scheduled_job_type_creation"
try:
frappe.db.savepoint(savepoint)
doc.insert()
except frappe.DuplicateEntryError:
frappe.db.rollback(save_point=savepoint)
doc.delete()
doc.insert()

View file

@ -25,6 +25,7 @@
"fieldname": "script_type",
"fieldtype": "Select",
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Script Type",
"options": "DocType Event\nScheduler Event\nPermission Query\nAPI",
"reqd": 1
@ -41,6 +42,7 @@
"fieldname": "reference_doctype",
"fieldtype": "Link",
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Reference Document Type",
"options": "DocType"
},
@ -109,7 +111,7 @@
"link_fieldname": "server_script"
}
],
"modified": "2022-04-27 11:42:52.032963",
"modified": "2022-06-13 06:04:20.937969",
"modified_by": "Administrator",
"module": "Core",
"name": "Server Script",

View file

@ -1,6 +1,6 @@
{
"actions": [],
"creation": "2014-04-17 16:53:52.640856",
"creation": "2022-01-06 03:18:16.326761",
"doctype": "DocType",
"document_type": "System",
"engine": "InnoDB",
@ -34,12 +34,14 @@
"security",
"session_expiry",
"session_expiry_mobile",
"document_share_key_expiry",
"column_break_13",
"deny_multiple_sessions",
"allow_login_using_mobile_number",
"allow_login_using_user_name",
"allow_error_traceback",
"strip_exif_metadata_from_uploaded_images",
"allow_older_web_view_links",
"password_settings",
"logout_on_password_reset",
"force_user_to_reset_password",
@ -482,6 +484,19 @@
"options": "Sunday\nMonday\nTuesday\nWednesday\nThursday\nFriday\nSaturday"
},
{
"default": "30",
"description": "Number of days after which the document Web View link shared on email will be expired",
"fieldname": "document_share_key_expiry",
"fieldtype": "Int",
"label": "Document Share Key Expiry (in Days)"
},
{
"default": "0",
"fieldname": "allow_older_web_view_links",
"fieldtype": "Check",
"label": "Allow Older Web View Links (Insecure)"
},
{
"fieldname": "column_break_64",
"fieldtype": "Column Break"
},

View file

@ -59,6 +59,10 @@ frappe.ui.form.on('User', {
onload: function(frm) {
frm.can_edit_roles = has_access_to_edit_user();
if (frm.is_new() && frm.roles_editor) {
frm.roles_editor.reset();
}
if (frm.can_edit_roles && !frm.is_new() && in_list(['System User', 'Website User'], frm.doc.user_type)) {
if (!frm.roles_editor) {
const role_area = $('<div class="role-editor">')
@ -194,14 +198,14 @@ frappe.ui.form.on('User', {
}
}
}
if (frm.doc.user_emails){
var found =0;
for (var i = 0;i<frm.doc.user_emails.length;i++){
if (frm.doc.email==frm.doc.user_emails[i].email_id){
if (frm.doc.user_emails && frappe.model.can_create("Email Account")) {
var found = 0;
for (var i = 0; i < frm.doc.user_emails.length; i++) {
if (frm.doc.email == frm.doc.user_emails[i].email_id) {
found = 1;
}
}
if (!found){
if (!found) {
frm.add_custom_button(__("Create User Email"), function() {
frm.events.create_user_email(frm);
});

View file

@ -722,7 +722,7 @@
"link_fieldname": "user"
}
],
"modified": "2022-03-09 01:47:56.745069",
"modified": "2022-05-25 01:00:51.345319",
"modified_by": "Administrator",
"module": "Core",
"name": "User",
@ -747,6 +747,10 @@
"read": 1,
"role": "System Manager",
"write": 1
},
{
"role": "All",
"select": 1
}
],
"quick_entry": 1,

View file

@ -163,6 +163,9 @@ class User(Document):
toggle_notifications(self.name, enable=cint(self.enabled))
def add_system_manager_role(self):
if self.is_system_manager_disabled():
return
# if adding system manager, do nothing
if not cint(self.enabled) or (
"System Manager" in [user_role.role for user_role in self.get("roles")]
@ -189,6 +192,9 @@ class User(Document):
],
)
def is_system_manager_disabled(self):
return frappe.db.get_value("Role", {"name": "System Manager"}, ["disabled"])
def email_new_password(self, new_password=None):
if new_password and not self.flags.in_insert:
_update_password(user=self.name, pwd=new_password, logout_all_sessions=self.logout_all_sessions)
@ -372,6 +378,9 @@ class User(Document):
)
def a_system_manager_should_exist(self):
if self.is_system_manager_disabled():
return
if not self.get_other_system_managers():
throw(_("There should remain at least one System Manager"))
@ -424,6 +433,9 @@ class User(Document):
frappe.cache().delete_key("enabled_users")
# delete user permissions
frappe.db.delete("User Permission", {"user": self.name})
def before_rename(self, old_name, new_name, merge=False):
frappe.clear_cache(user=old_name)
self.validate_rename(old_name, new_name)
@ -586,10 +598,7 @@ class User(Document):
self.append("social_logins", social_logins)
def get_restricted_ip_list(self):
if not self.restrict_ip:
return
return [i.strip() for i in self.restrict_ip.split(",")]
return get_restricted_ip_list(self)
@classmethod
def find_by_credentials(cls, user_name: str, password: str, validate_password: bool = True):
@ -1156,6 +1165,13 @@ def create_contact(user, ignore_links=False, ignore_mandatory=False):
contact.save(ignore_permissions=True)
def get_restricted_ip_list(user):
if not user.restrict_ip:
return
return [i.strip() for i in user.restrict_ip.split(",")]
@frappe.whitelist()
def generate_keys(user):
"""

View file

@ -3,13 +3,8 @@
frappe.ui.form.on('User Type', {
refresh: function(frm) {
frm.toggle_display('is_standard', frappe.boot.developer_mode);
frm.set_df_property('is_standard', 'read_only', !frappe.boot.developer_mode);
const fields = ['role', 'apply_user_permission_on', 'user_id_field',
'user_doctypes', 'user_type_modules'];
frm.toggle_display(fields, !frm.doc.is_standard);
if (frm.is_new() && !frappe.boot.developer_mode)
frm.set_value('is_standard', 1);
frm.set_query('document_type', 'user_doctypes', function() {
return {

View file

@ -22,9 +22,11 @@
"fields": [
{
"default": "0",
"depends_on": "eval: frappe.boot.developer_mode",
"fieldname": "is_standard",
"fieldtype": "Check",
"label": "Is Standard"
"label": "Is Standard",
"read_only_depends_on": "eval: !frappe.boot.developer_mode"
},
{
"depends_on": "eval: !doc.is_standard",
@ -33,21 +35,21 @@
"label": "Document Types and Permissions"
},
{
"depends_on": "eval: !doc.is_standard",
"fieldname": "user_doctypes",
"fieldtype": "Table",
"label": "Document Types",
"mandatory_depends_on": "eval: !doc.is_standard",
"options": "User Document Type",
"read_only": 1
"options": "User Document Type"
},
{
"depends_on": "eval: !doc.is_standard",
"fieldname": "role",
"fieldtype": "Link",
"in_list_view": 1,
"label": "Role",
"mandatory_depends_on": "eval: !doc.is_standard",
"options": "Role",
"read_only": 1
"options": "Role"
},
{
"fieldname": "select_doctypes",
@ -62,13 +64,13 @@
"fieldtype": "Column Break"
},
{
"depends_on": "eval: !doc.is_standard",
"description": "Can only list down the document types which has been linked to the User document type.",
"fieldname": "apply_user_permission_on",
"fieldtype": "Link",
"label": "Apply User Permission On",
"mandatory_depends_on": "eval: !doc.is_standard",
"options": "DocType",
"read_only": 1
"options": "DocType"
},
{
"depends_on": "eval: !doc.is_standard",
@ -81,8 +83,7 @@
"fieldname": "user_id_field",
"fieldtype": "Select",
"label": "User Id Field",
"mandatory_depends_on": "eval: !doc.is_standard",
"read_only": 1
"mandatory_depends_on": "eval: !doc.is_standard"
},
{
"depends_on": "eval: !doc.is_standard",
@ -93,6 +94,7 @@
{
"fieldname": "user_type_modules",
"fieldtype": "Table",
"label": "User Type Module",
"no_copy": 1,
"options": "User Type Module",
"print_hide": 1,
@ -107,10 +109,11 @@
],
"index_web_pages_for_search": 1,
"links": [],
"modified": "2021-03-12 16:25:18.639050",
"modified": "2022-06-09 14:00:36.820306",
"modified_by": "Administrator",
"module": "Core",
"name": "User Type",
"naming_rule": "Set by user",
"owner": "Administrator",
"permissions": [
{
@ -137,5 +140,6 @@
],
"sort_field": "modified",
"sort_order": "DESC",
"states": [],
"track_changes": 1
}

View file

@ -32,6 +32,19 @@ class TestVersion(unittest.TestCase):
self.assertEqual(get_old_values(diff)[1], "01-01-2014 00:00:00")
self.assertEqual(get_new_values(diff)[1], "07-20-2017 00:00:00")
def test_no_version_on_new_doc(self):
from frappe.desk.form.load import get_versions
t = frappe.get_doc(doctype="ToDo", description="something")
t.save(ignore_version=False)
self.assertFalse(get_versions(t))
t = frappe.get_doc(t.doctype, t.name)
t.description = "changed"
t.save(ignore_version=False)
self.assertTrue(get_versions(t))
def get_fieldnames(change_array):
return [d[0] for d in change_array]

View file

@ -2,6 +2,7 @@
# License: MIT. See LICENSE
import json
from typing import Optional
import frappe
from frappe.model import no_value_fields, table_fields
@ -9,19 +10,30 @@ from frappe.model.document import Document
class Version(Document):
def set_diff(self, old, new):
def update_version_info(self, old: Optional[Document], new: Document) -> bool:
"""Update changed info and return true if change contains useful data."""
if not old:
# Check if doc has some information about creation source like data import
return self.for_insert(new)
else:
return self.set_diff(old, new)
def set_diff(self, old: Document, new: Document) -> bool:
"""Set the data property with the diff of the docs if present"""
diff = get_diff(old, new)
if diff:
self.ref_doctype = new.doctype
self.docname = new.name
self.data = frappe.as_json(diff)
self.data = frappe.as_json(diff, indent=None, separators=(",", ":"))
return True
else:
return False
def for_insert(self, doc):
def for_insert(self, doc: Document) -> bool:
updater_reference = doc.flags.updater_reference
if not updater_reference:
return False
data = {
"creation": doc.creation,
"updater_reference": updater_reference,
@ -29,7 +41,8 @@ class Version(Document):
}
self.ref_doctype = doc.doctype
self.docname = doc.name
self.data = frappe.as_json(data)
self.data = frappe.as_json(data, indent=None, separators=(",", ":"))
return True
def get_data(self):
return json.loads(self.data)

View file

@ -67,7 +67,8 @@
"fieldtype": "Link",
"in_filter": 1,
"in_list_view": 1,
"label": "Document",
"in_standard_filter": 1,
"label": "DocType",
"oldfieldname": "dt",
"oldfieldtype": "Link",
"options": "DocType",
@ -94,6 +95,7 @@
"fieldname": "fieldname",
"fieldtype": "Data",
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Fieldname",
"no_copy": 1,
"oldfieldname": "fieldname",
@ -439,7 +441,7 @@
"idx": 1,
"index_web_pages_for_search": 1,
"links": [],
"modified": "2022-04-14 09:46:58.849765",
"modified": "2022-06-13 06:39:03.319667",
"modified_by": "Administrator",
"module": "Custom",
"name": "Custom Field",

View file

@ -161,6 +161,7 @@ def create_custom_field(doctype, df, ignore_validate=False, is_system_generated=
custom_field.update(df)
custom_field.flags.ignore_validate = ignore_validate
custom_field.insert()
return custom_field
def create_custom_fields(custom_fields, ignore_validate=False, update=True):

View file

@ -1,24 +0,0 @@
from abc import ABCMeta, abstractmethod
from frappe.utils.password import get_decrypted_password
class BaseConnection(metaclass=ABCMeta):
@abstractmethod
def get(self, remote_objectname, fields=None, filters=None, start=0, page_length=10):
pass
@abstractmethod
def insert(self, doctype, doc):
pass
@abstractmethod
def update(self, doctype, doc, migration_id):
pass
@abstractmethod
def delete(self, doctype, migration_id):
pass
def get_password(self):
return get_decrypted_password("Data Migration Connector", self.connector.name)

View file

@ -1,32 +0,0 @@
import frappe
from frappe.frappeclient import FrappeClient
from .base import BaseConnection
class FrappeConnection(BaseConnection):
def __init__(self, connector):
self.connector = connector
self.connection = FrappeClient(
self.connector.hostname, self.connector.username, self.get_password()
)
self.name_field = "name"
def insert(self, doctype, doc):
doc = frappe._dict(doc)
doc.doctype = doctype
return self.connection.insert(doc)
def update(self, doctype, doc, migration_id):
doc = frappe._dict(doc)
doc.doctype = doctype
doc.name = migration_id
return self.connection.update(doc)
def delete(self, doctype, migration_id):
return self.connection.delete(doctype, migration_id)
def get(self, doctype, fields='"*"', filters=None, start=0, page_length=20):
return self.connection.get_list(
doctype, fields=fields, filters=filters, limit_start=start, limit_page_length=page_length
)

View file

@ -1,47 +0,0 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Migration Connector', {
onload(frm) {
if(frappe.boot.developer_mode) {
frm.add_custom_button(__('New Connection'), () => frm.events.new_connection(frm));
}
},
new_connection(frm) {
const d = new frappe.ui.Dialog({
title: __('New Connection'),
fields: [
{ label: __('Module'), fieldtype: 'Link', options: 'Module Def', reqd: 1 },
{ label: __('Connection Name'), fieldtype: 'Data', description: 'For e.g: Shopify Connection', reqd: 1 },
],
primary_action_label: __('Create'),
primary_action: (values) => {
let { module, connection_name } = values;
frm.events.create_new_connection(module, connection_name)
.then(r => {
if (r.message) {
const connector_name = connection_name
.replace('connection', 'Connector')
.replace('Connection', 'Connector')
.trim();
frm.set_value('connector_name', connector_name);
frm.set_value('connector_type', 'Custom');
frm.set_value('python_module', r.message);
frm.save();
frappe.show_alert(__("New module created {0}", [r.message]));
d.hide();
}
});
}
});
d.show();
},
create_new_connection(module, connection_name) {
return frappe.call('frappe.data_migration.doctype.data_migration_connector.data_migration_connector.create_new_connection', {
module, connection_name
});
}
});

View file

@ -1,307 +0,0 @@
{
"allow_copy": 0,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 1,
"autoname": "field:connector_name",
"beta": 1,
"creation": "2017-08-11 05:03:27.091416",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "",
"editable_grid": 1,
"engine": "InnoDB",
"fields": [
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "connector_name",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Connector Name",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "eval:!doc.is_custom",
"fieldname": "connector_type",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Connector Type",
"length": 0,
"no_copy": 0,
"options": "\nFrappe\nCustom",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"depends_on": "eval:doc.connector_type == 'Custom'",
"fieldname": "python_module",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Python Module",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "authentication_credentials",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Authentication Credentials",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "",
"fieldname": "hostname",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Hostname",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "database_name",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Database Name",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "username",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Username",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "password",
"fieldtype": "Password",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Password",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2017-12-01 13:38:55.992499",
"modified_by": "Administrator",
"module": "Data Migration",
"name": "Data Migration Connector",
"name_case": "",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 0,
"write": 1
}
],
"quick_entry": 0,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1,
"track_seen": 0
}

View file

@ -1,107 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# License: MIT. See LICENSE
import os
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.modules.export_file import create_init_py
from .connectors.base import BaseConnection
from .connectors.frappe_connection import FrappeConnection
class DataMigrationConnector(Document):
def validate(self):
if not (self.python_module or self.connector_type):
frappe.throw(_("Enter python module or select connector type"))
if self.python_module:
try:
get_connection_class(self.python_module)
except:
frappe.throw(frappe._("Invalid module path"))
def get_connection(self):
if self.python_module:
_class = get_connection_class(self.python_module)
return _class(self)
else:
self.connection = FrappeConnection(self)
return self.connection
@frappe.whitelist()
def create_new_connection(module, connection_name):
if not frappe.conf.get("developer_mode"):
frappe.msgprint(_("Please enable developer mode to create new connection"))
return
# create folder
module_path = frappe.get_module_path(module)
connectors_folder = os.path.join(module_path, "connectors")
frappe.create_folder(connectors_folder)
# create init py
create_init_py(module_path, "connectors", "")
connection_class = connection_name.replace(" ", "")
file_name = frappe.scrub(connection_name) + ".py"
file_path = os.path.join(module_path, "connectors", file_name)
# create boilerplate file
with open(file_path, "w") as f:
f.write(connection_boilerplate.format(connection_class=connection_class))
# get python module string from file_path
app_name = frappe.db.get_value("Module Def", module, "app_name")
python_module = os.path.relpath(file_path, "../apps/{0}".format(app_name)).replace(
os.path.sep, "."
)[:-3]
return python_module
def get_connection_class(python_module):
filename = python_module.rsplit(".", 1)[-1]
classname = frappe.unscrub(filename).replace(" ", "")
module = frappe.get_module(python_module)
raise_error = False
if hasattr(module, classname):
_class = getattr(module, classname)
if not issubclass(_class, BaseConnection):
raise_error = True
else:
raise_error = True
if raise_error:
raise ImportError(filename)
return _class
connection_boilerplate = """from frappe.data_migration.doctype.data_migration_connector.connectors.base import BaseConnection
class {connection_class}(BaseConnection):
def __init__(self, connector):
# self.connector = connector
# self.connection = YourModule(self.connector.username, self.get_password())
# self.name_field = 'id'
pass
def get(self, remote_objectname, fields=None, filters=None, start=0, page_length=10):
pass
def insert(self, doctype, doc):
pass
def update(self, doctype, doc, migration_id):
pass
def delete(self, doctype, migration_id):
pass
"""

View file

@ -1,8 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and Contributors
# License: MIT. See LICENSE
import unittest
class TestDataMigrationConnector(unittest.TestCase):
pass

View file

@ -1,8 +0,0 @@
// Copyright (c) 2017, Frappe Technologies and contributors
// For license information, please see license.txt
frappe.ui.form.on('Data Migration Mapping', {
refresh: function() {
}
});

View file

@ -1,456 +0,0 @@
{
"allow_copy": 0,
"allow_guest_to_view": 0,
"allow_import": 0,
"allow_rename": 1,
"autoname": "field:mapping_name",
"beta": 1,
"creation": "2017-08-11 05:11:49.975801",
"custom": 0,
"docstatus": 0,
"doctype": "DocType",
"document_type": "",
"editable_grid": 1,
"engine": "InnoDB",
"fields": [
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "mapping_name",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Mapping Name",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "remote_objectname",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Remote Objectname",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "remote_primary_key",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Remote Primary Key",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "local_doctype",
"fieldtype": "Link",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 1,
"in_standard_filter": 0,
"label": "Local DocType",
"length": 0,
"no_copy": 0,
"options": "DocType",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "local_primary_key",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Local Primary Key",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "column_break_5",
"fieldtype": "Column Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "mapping_type",
"fieldtype": "Select",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Mapping Type",
"length": 0,
"no_copy": 0,
"options": "Push\nPull\nSync",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"default": "10",
"fieldname": "page_length",
"fieldtype": "Int",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Page Length",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "migration_id_field",
"fieldtype": "Data",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Migration ID Field",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 1,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "mapping",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Mapping",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "fields",
"fieldtype": "Table",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Field Maps",
"length": 0,
"no_copy": 0,
"options": "Data Migration Mapping Detail",
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 1,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 1,
"columns": 0,
"fieldname": "condition_detail",
"fieldtype": "Section Break",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Condition Detail",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
},
{
"allow_bulk_edit": 0,
"allow_on_submit": 0,
"bold": 0,
"collapsible": 0,
"columns": 0,
"fieldname": "condition",
"fieldtype": "Code",
"hidden": 0,
"ignore_user_permissions": 0,
"ignore_xss_filter": 0,
"in_filter": 0,
"in_global_search": 0,
"in_list_view": 0,
"in_standard_filter": 0,
"label": "Condition",
"length": 0,
"no_copy": 0,
"permlevel": 0,
"precision": "",
"print_hide": 0,
"print_hide_if_no_value": 0,
"read_only": 0,
"remember_last_selected_value": 0,
"report_hide": 0,
"reqd": 0,
"search_index": 0,
"set_only_once": 0,
"unique": 0
}
],
"has_web_view": 0,
"hide_heading": 0,
"hide_toolbar": 0,
"idx": 0,
"image_view": 0,
"in_create": 0,
"is_submittable": 0,
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2017-09-27 18:06:43.275207",
"modified_by": "Administrator",
"module": "Data Migration",
"name": "Data Migration Mapping",
"name_case": "",
"owner": "Administrator",
"permissions": [
{
"amend": 0,
"apply_user_permissions": 0,
"cancel": 0,
"create": 1,
"delete": 1,
"email": 1,
"export": 1,
"if_owner": 0,
"import": 0,
"permlevel": 0,
"print": 1,
"read": 1,
"report": 1,
"role": "System Manager",
"set_user_permissions": 0,
"share": 1,
"submit": 0,
"write": 1
}
],
"quick_entry": 1,
"read_only": 0,
"read_only_onload": 0,
"show_name_in_global_search": 0,
"sort_field": "modified",
"sort_order": "DESC",
"track_changes": 1,
"track_seen": 0
}

Some files were not shown because too many files have changed in this diff Show more