Merge branch 'develop' of github.com:frappe/frappe into skip-backup-tables

This commit is contained in:
Gavin D'souza 2020-10-07 14:02:05 +05:30
commit 5a0948092c
157 changed files with 46362 additions and 7347 deletions

48
.github/helper/documentation.py vendored Normal file
View file

@ -0,0 +1,48 @@
import sys
import requests
from urllib.parse import urlparse
docs_repos = [
"frappe_docs",
"erpnext_documentation",
"erpnext_com",
"frappe_io",
]
def uri_validator(x):
result = urlparse(x)
return all([result.scheme, result.netloc, result.path])
def docs_link_exists(body):
for line in body.splitlines():
for word in line.split():
if word.startswith('http') and uri_validator(word):
parsed_url = urlparse(word)
if parsed_url.netloc == "github.com":
_, org, repo, _type, ref = parsed_url.path.split('/')
if org == "frappe" and repo in docs_repos:
return True
if __name__ == "__main__":
pr = sys.argv[1]
response = requests.get("https://api.github.com/repos/frappe/frappe/pulls/{}".format(pr))
if response.ok:
payload = response.json()
title = payload.get("title", "").lower()
head_sha = payload.get("head", {}).get("sha")
body = payload.get("body", "").lower()
if title.startswith("feat") and head_sha and "no-docs" not in body:
if docs_link_exists(body):
print("Documentation Link Found. You're Awesome! 🎉")
else:
print("Documentation Link Not Found! ⚠️")
sys.exit(1)
else:
print("Skipping documentation checks... 🏃")

24
.github/workflows/docs-checker.yml vendored Normal file
View file

@ -0,0 +1,24 @@
name: 'Documentation Required'
on:
pull_request:
types: [ opened, synchronize, reopened, edited ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: 'Setup Environment'
uses: actions/setup-python@v2
with:
python-version: 3.6
- name: 'Clone repo'
uses: actions/checkout@v2
- name: Validate Docs
env:
PR_NUMBER: ${{ github.event.number }}
run: |
pip install requests --quiet
python $GITHUB_WORKSPACE/.github/helper/documentation.py $PR_NUMBER

View file

@ -19,4 +19,4 @@ jobs:
run: |
git fetch origin $GITHUB_BASE_REF:$GITHUB_BASE_REF -q
files=$(git diff --name-only --diff-filter=d $GITHUB_BASE_REF)
python $GITHUB_WORKSPACE/.github/frappe_linter/translation.py $files
python $GITHUB_WORKSPACE/.github/helper/translation.py $files

View file

@ -10,7 +10,7 @@ from six import iteritems, binary_type, text_type, string_types, PY2
from werkzeug.local import Local, release_local
import os, sys, importlib, inspect, json
from past.builtins import cmp
import click
from faker import Faker
# public
@ -226,12 +226,20 @@ def get_site_config(sites_path=None, site_path=None):
if sites_path:
common_site_config = os.path.join(sites_path, "common_site_config.json")
if os.path.exists(common_site_config):
config.update(get_file_json(common_site_config))
try:
config.update(get_file_json(common_site_config))
except Exception as error:
click.secho("common_site_config.json is invalid", fg="red")
print(error)
if site_path:
site_config = os.path.join(site_path, "site_config.json")
if os.path.exists(site_config):
config.update(get_file_json(site_config))
try:
config.update(get_file_json(site_config))
except Exception as error:
click.secho("{0}/site_config.json is invalid".format(local.site), fg="red")
print(error)
elif local.site and not local.flags.new_site:
raise IncorrectSitePath("{0} does not exist".format(local.site))

View file

@ -193,7 +193,8 @@ def handle_exception(e):
else:
traceback = "<pre>" + sanitize_html(frappe.get_traceback()) + "</pre>"
if frappe.local.flags.disable_traceback:
# disable traceback in production if flag is set
if frappe.local.flags.disable_traceback and not frappe.local.dev_server:
traceback = ""
frappe.respond_as_web_page("Server Error",

View file

@ -146,7 +146,7 @@ class AutoRepeat(Document):
def make_new_document(self):
reference_doc = frappe.get_doc(self.reference_doctype, self.reference_document)
new_doc = frappe.copy_doc(reference_doc)
new_doc = frappe.copy_doc(reference_doc, ignore_no_copy = False)
self.update_doc(new_doc, reference_doc)
new_doc.insert(ignore_permissions = True)

View file

@ -4,7 +4,6 @@
from __future__ import unicode_literals
import frappe, json
import frappe.defaults
from frappe.model.document import Document
from frappe.desk.notifications import (delete_notification_count_for,
clear_notifications)

View file

@ -62,11 +62,11 @@
"label": "URLs"
}
],
"modified": "2019-11-07 13:21:19.395927",
"modified": "2020-09-18 17:26:09.703215",
"modified_by": "Administrator",
"module": "Chat",
"name": "Chat Message",
"owner": "arjun@gmail.com",
"owner": "Administrator",
"permissions": [
{
"create": 1,

View file

@ -1,10 +1,5 @@
# imports - standard imports
import atexit
import compileall
import hashlib
import os
import re
import shutil
import sys
# imports - third party imports
@ -13,9 +8,7 @@ import click
# imports - module imports
import frappe
from frappe.commands import get_site, pass_context
from frappe.commands.scheduler import _is_scheduler_enabled
from frappe.exceptions import SiteNotSpecifiedError
from frappe.installer import update_site_config
from frappe.utils import get_site_path, touch_file
@ -64,8 +57,10 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
sys.exit(1)
if not db_name:
import hashlib
db_name = '_' + hashlib.sha1(site.encode()).hexdigest()[:16]
from frappe.commands.scheduler import _is_scheduler_enabled
from frappe.installer import install_db, make_site_dirs
from frappe.installer import install_app as _install_app
import frappe.utils.scheduler
@ -73,6 +68,7 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
frappe.init(site=site)
try:
# enable scheduler post install?
enable_scheduler = _is_scheduler_enabled()
except Exception:
@ -107,11 +103,11 @@ def _new_site(db_name, site, mariadb_root_username=None, mariadb_root_password=N
@click.option('--install-app', multiple=True, help='Install app after installation')
@click.option('--with-public-files', help='Restores the public files of the site, given path to its tar file')
@click.option('--with-private-files', help='Restores the private files of the site, given path to its tar file')
@click.option('--force', is_flag=True, default=False, help='Use a bit of force to get the job done')
@click.option('--force', is_flag=True, default=False, help='Ignore the site downgrade warning, if applicable')
@pass_context
def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_password=None, db_name=None, verbose=None, install_app=None, admin_password=None, force=None, with_public_files=None, with_private_files=None):
"Restore site database from an sql file"
from frappe.installer import extract_sql_gzip, extract_tar_files, is_downgrade
from frappe.installer import extract_sql_gzip, extract_files, is_downgrade
force = context.force or force
# Extract the gzip file if user has passed *.sql.gz file instead of *.sql file
@ -147,12 +143,12 @@ def restore(context, sql_file_path, mariadb_root_username=None, mariadb_root_pas
# Extract public and/or private files to the restored site, if user has given the path
if with_public_files:
with_public_files = os.path.join(base_path, with_public_files)
public = extract_tar_files(site, with_public_files, 'public')
public = extract_files(site, with_public_files, 'public')
os.remove(public)
if with_private_files:
with_private_files = os.path.join(base_path, with_private_files)
private = extract_tar_files(site, with_private_files, 'private')
private = extract_files(site, with_private_files, 'private')
os.remove(private)
# Removing temporarily created file
@ -271,12 +267,13 @@ def disable_user(context, email):
@click.command('migrate')
@click.option('--rebuild-website', help="Rebuild webpages after migration")
@click.option('--skip-failing', is_flag=True, help="Skip patches that fail to run")
@click.option('--skip-search-index', is_flag=True, help="Skip search indexing for web documents")
@pass_context
def migrate(context, rebuild_website=False, skip_failing=False, skip_search_index=False):
def migrate(context, skip_failing=False, skip_search_index=False):
"Run patches, sync schema and rebuild files/translations"
import compileall
import re
from frappe.migrate import migrate
for site in context.sites:
@ -286,7 +283,6 @@ def migrate(context, rebuild_website=False, skip_failing=False, skip_search_inde
try:
migrate(
context.verbose,
rebuild_website=rebuild_website,
skip_failing=skip_failing,
skip_search_index=skip_search_index
)
@ -387,49 +383,52 @@ def use(site, sites_path='.'):
@click.command('backup')
@click.option('--with-files', default=False, is_flag=True, help="Take backup with files")
@click.option('--ignore-backup-conf', default=False, is_flag=True, help="Ignore excludes/includes set in config")
@click.option('--include', default="", type=str, help="Specify the DocTypes to backup seperated by commas")
@click.option('--exclude', default="", type=str, help="Specify the DocTypes to not backup seperated by commas")
@click.option('--verbose', default=False, is_flag=True)
@click.option('--backup-path', default=None, help="Set path for saving all the files in this operation")
@click.option('--backup-path-db', default=None, help="Set path for saving database file")
@click.option('--backup-path-files', default=None, help="Set path for saving public file")
@click.option('--backup-path-private-files', default=None, help="Set path for saving private file")
@click.option('--backup-path-conf', default=None, help="Set path for saving config file")
@click.option('--ignore-backup-conf', default=False, is_flag=True, help="Ignore excludes/includes set in config")
@click.option('--verbose', default=False, is_flag=True, help="Add verbosity")
@click.option('--compress', default=False, is_flag=True, help="Compress private and public files")
@pass_context
def backup(context, with_files=False, backup_path_db=None, backup_path_files=None,
backup_path_private_files=None, quiet=False, verbose=False, ignore_backup_conf=False, include="", exclude=""):
def backup(context, with_files=False, backup_path=None, backup_path_db=None, backup_path_files=None,
backup_path_private_files=None, backup_path_conf=None, ignore_backup_conf=False, verbose=False,
compress=False, include="", exclude=""):
"Backup"
from frappe.utils.backups import scheduled_backup
verbose = verbose or context.verbose
exit_code = 0
for site in context.sites:
try:
frappe.init(site=site)
frappe.connect()
odb = scheduled_backup(
ignore_files=not with_files,
backup_path=backup_path,
backup_path_db=backup_path_db,
backup_path_files=backup_path_files,
backup_path_private_files=backup_path_private_files,
backup_path_conf=backup_path_conf,
ignore_conf=ignore_backup_conf,
include_doctypes=include,
exclude_doctypes=exclude,
compress=compress,
verbose=verbose,
force=True,
force=True
)
except Exception as e:
if verbose:
print("Backup failed for {0}. Database or site_config.json may be corrupted".format(site))
print(e)
except Exception:
click.secho("Backup failed for Site {0}. Database or site_config.json may be corrupted".format(site), fg="red")
exit_code = 1
continue
if verbose:
from frappe.utils import now
summary_title = "Backup Summary at {0}".format(now())
print(summary_title + "\n" + "-" * len(summary_title))
print("Database backup:", odb.backup_path_db)
if with_files:
print("Public files: ", odb.backup_path_files)
print("Private files: ", odb.backup_path_private_files)
odb.print_summary()
click.secho("Backup for Site {0} has been successfully completed{1}".format(site, " with files" if with_files else ""), fg="green")
frappe.destroy()
if not context.sites:
raise SiteNotSpecifiedError
@ -498,13 +497,14 @@ def _drop_site(site, root_login='root', root_password=None, archived_sites_path=
if force:
pass
else:
click.echo("="*80)
click.echo("Error: The operation has stopped because backup of {s}'s database failed.".format(s=site))
click.echo("Reason: {reason}{sep}".format(reason=str(err), sep="\n"))
click.echo("Fix the issue and try again.")
click.echo(
"Hint: Use 'bench drop-site {s} --force' to force the removal of {s}".format(sep="\n", tab="\t", s=site)
)
messages = [
"=" * 80,
"Error: The operation has stopped because backup of {0}'s database failed.".format(site),
"Reason: {0}\n".format(str(err)),
"Fix the issue and try again.",
"Hint: Use 'bench drop-site {0} --force' to force the removal of {0}".format(site)
]
click.echo("\n".join(messages))
sys.exit(1)
drop_user_and_database(frappe.conf.db_name, root_login, root_password)

View file

@ -8,7 +8,7 @@ from frappe import _
import frappe.permissions
import re, csv, os
from frappe.utils.csvutils import UnicodeWriter
from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint
from frappe.utils import cstr, formatdate, format_datetime, parse_json, cint, format_duration
from frappe.core.doctype.data_import_legacy.importer import get_data_keys
from six import string_types
from frappe.core.doctype.access_log.access_log import make_access_log
@ -330,6 +330,8 @@ class DataExporter:
value = formatdate(value)
elif fieldtype == "Datetime":
value = format_datetime(value)
elif fieldtype == "Duration":
value = format_duration(value, df.hide_days)
row[_column_start_end.start + i + 1] = value

View file

@ -8,6 +8,7 @@ from frappe.model import (
no_value_fields,
table_fields as table_fieldtypes,
)
from frappe.utils import flt, format_duration
from frappe.utils.csvutils import build_csv_response
from frappe.utils.xlsxutils import build_xlsx_response
@ -146,8 +147,13 @@ class Exporter:
if df.parent == doctype:
if df.is_child_table_field and df.child_table_df.fieldname != parentfield:
continue
row[i] = doc.get(df.fieldname, "")
value = doc.get(df.fieldname, None)
if df.fieldtype == "Duration":
value = flt(value or 0)
value = format_duration(value, df.hide_days)
row[i] = value
return rows
def get_data_as_docs(self):

View file

@ -1,5 +1,5 @@
Title ,Description ,Number ,another_number ,ID (Table Field 1) ,Child Title (Table Field 1) ,Child Description (Table Field 1) ,Child 2 Title (Table Field 2) ,Child 2 Date (Table Field 2) ,Child 2 Number (Table Field 2) ,Child Title (Table Field 1 Again) ,Child Date (Table Field 1 Again) ,Child Number (Table Field 1 Again) ,table_field_1_again.child_another_number
Test ,test description ,1 ,2 ,"" ,child title ,child description ,child title ,14-08-2019 ,4 ,child title again ,22-09-2020 ,5 , 7
, , , , ,child title 2 ,child description 2 ,title child ,30-10-2019 ,5 ,child title again 2 ,22-09-2021 , ,
Test 2 ,test description 2 ,1 ,2 , ,child mandatory title , ,title child man , , ,child mandatory again , , ,
Test 3 ,test description 3 ,4 ,5 ,"" ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019 ,6 ,child title again asdf ,22-09-2022 ,9 , 71
Title ,Description ,Number ,Duration,another_number ,ID (Table Field 1),Child Title (Table Field 1),Child Description (Table Field 1),Child 2 Title (Table Field 2),Child 2 Date (Table Field 2),Child 2 Number (Table Field 2),Child Title (Table Field 1 Again),Child Date (Table Field 1 Again),Child Number (Table Field 1 Again),table_field_1_again.child_another_number
Test ,test description ,1,3h,2, ,child title ,child description ,child title ,14-08-2019,4,child title again ,22-09-2020,5,7
, , ,, , ,child title 2,child description 2,title child ,30-10-2019,5,child title again 2,22-09-2021, ,
Test 2,test description 2,1,4d 3h,2, ,child mandatory title , ,title child man , , ,child mandatory again , , ,
Test 3,test description 3,4,5d 5h 45m,5, ,child title asdf ,child description asdf ,child title asdf adsf ,15-08-2019,6,child title again asdf ,22-09-2022,9,71
Can't render this file because it contains an unexpected character in line 2 and column 54.

View file

@ -9,7 +9,7 @@ import timeit
import json
from datetime import datetime, date
from frappe import _
from frappe.utils import cint, flt, update_progress_bar, cstr
from frappe.utils import cint, flt, update_progress_bar, cstr, duration_to_seconds
from frappe.utils.csvutils import read_csv_content, get_csv_content_from_google_sheets
from frappe.utils.xlsxutils import (
read_xlsx_file_from_attached_file,
@ -664,6 +664,20 @@ class Row:
}
)
return
elif df.fieldtype == "Duration":
import re
is_valid_duration = re.match("^(?:(\d+d)?((^|\s)\d+h)?((^|\s)\d+m)?((^|\s)\d+s)?)$", value)
if not is_valid_duration:
self.warnings.append(
{
"row": self.row_number,
"col": col.column_number,
"field": df_as_json(df),
"message": _("Value {0} must be in the valid duration format: d h m s").format(
frappe.bold(value)
)
}
)
return value
@ -692,6 +706,8 @@ class Row:
value = flt(value)
elif df.fieldtype in ["Date", "Datetime"]:
value = self.get_date(value, col)
elif df.fieldtype == "Duration":
value = duration_to_seconds(value)
return value

View file

@ -5,7 +5,7 @@ from __future__ import unicode_literals
import unittest
import frappe
from frappe.utils import getdate
from frappe.utils import getdate, format_duration
doctype_name = 'DocType for Import'
@ -24,6 +24,7 @@ class TestImporter(unittest.TestCase):
self.assertEqual(doc1.description, 'test description')
self.assertEqual(doc1.number, 1)
self.assertEqual(format_duration(doc1.duration), '3h')
self.assertEqual(doc1.table_field_1[0].child_title, 'child title')
self.assertEqual(doc1.table_field_1[0].child_description, 'child description')
@ -40,7 +41,10 @@ class TestImporter(unittest.TestCase):
self.assertEqual(doc1.table_field_1_again[1].child_date, getdate('2021-09-22'))
self.assertEqual(doc2.description, 'test description 2')
self.assertEqual(format_duration(doc2.duration), '4d 3h')
self.assertEqual(doc3.another_number, 5)
self.assertEqual(format_duration(doc3.duration), '5d 5h 45m')
def test_data_import_preview(self):
import_file = get_import_file('sample_import_file')
@ -48,7 +52,7 @@ class TestImporter(unittest.TestCase):
preview = data_import.get_preview_from_template()
self.assertEqual(len(preview.data), 4)
self.assertEqual(len(preview.columns), 15)
self.assertEqual(len(preview.columns), 16)
def test_data_import_without_mandatory_values(self):
import_file = get_import_file('sample_import_file_without_mandatory')
@ -146,6 +150,7 @@ def create_doctype_if_not_exists(doctype_name, force=False):
{'label': 'Title', 'fieldname': 'title', 'reqd': 1, 'fieldtype': 'Data'},
{'label': 'Description', 'fieldname': 'description', 'fieldtype': 'Small Text'},
{'label': 'Date', 'fieldname': 'date', 'fieldtype': 'Date'},
{'label': 'Duration', 'fieldname': 'duration', 'fieldtype': 'Duration'},
{'label': 'Number', 'fieldname': 'number', 'fieldtype': 'Int'},
{'label': 'Number', 'fieldname': 'another_number', 'fieldtype': 'Int'},
{'label': 'Table Field 1', 'fieldname': 'table_field_1', 'fieldtype': 'Table', 'options': table_1_name},

View file

@ -15,7 +15,7 @@ from frappe import _
from frappe.utils.csvutils import getlink
from frappe.utils.dateutils import parse_date
from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url
from frappe.utils import cint, cstr, flt, getdate, get_datetime, get_url, get_absolute_url, duration_to_seconds
from six import string_types
@ -164,7 +164,8 @@ def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False,
d[fieldname] = get_datetime(_date + " " + _time)
else:
d[fieldname] = None
elif fieldtype == "Duration":
d[fieldname] = duration_to_seconds(cstr(d[fieldname]))
elif fieldtype in ("Image", "Attach Image", "Attach"):
# added file to attachments list
attachments.append(d[fieldname])

View file

@ -490,7 +490,7 @@
"collapsible_depends_on": "links",
"fieldname": "links_section",
"fieldtype": "Section Break",
"label": "Links Section"
"label": "Linked Documents"
},
{
"fieldname": "links",
@ -609,7 +609,7 @@
"link_fieldname": "reference_doctype"
}
],
"modified": "2020-08-06 12:59:32.369095",
"modified": "2020-09-24 13:13:58.227153",
"modified_by": "Administrator",
"module": "Core",
"name": "DocType",

View file

@ -99,6 +99,10 @@ class DocType(Document):
if self.default_print_format and not self.custom:
frappe.throw(_('Standard DocType cannot have default print format, use Customize Form'))
if frappe.conf.get('developer_mode'):
self.owner = 'Administrator'
self.modified_by = 'Administrator'
def set_default_in_list_view(self):
'''Set default in-list-view for first 4 mandatory fields'''
if not [d.fieldname for d in self.fields if d.in_list_view]:
@ -766,7 +770,7 @@ def validate_fields(meta):
if not d.get("__islocal") and frappe.db.has_column(d.parent, d.fieldname):
has_non_unique_values = frappe.db.sql("""select `{fieldname}`, count(*)
from `tab{doctype}` where ifnull({fieldname}, '') != ''
from `tab{doctype}` where ifnull(`{fieldname}`, '') != ''
group by `{fieldname}` having count(*) > 1 limit 1""".format(
doctype=d.parent, fieldname=d.fieldname))

View file

@ -17,11 +17,11 @@
"unique": 1
}
],
"modified": "2019-06-30 13:24:13.732202",
"modified": "2020-09-18 17:26:09.703215",
"modified_by": "Administrator",
"module": "Core",
"name": "Domain",
"owner": "makarand@erpnext.com",
"owner": "Administrator",
"permissions": [
{
"create": 1,

View file

@ -54,12 +54,12 @@
"issingle": 0,
"istable": 1,
"max_attachments": 0,
"modified": "2017-05-04 11:05:54.750351",
"modified": "2020-09-18 17:26:09.703215",
"modified_by": "Administrator",
"module": "Core",
"name": "Has Domain",
"name_case": "",
"owner": "makarand@erpnext.com",
"owner": "Administrator",
"permissions": [],
"quick_entry": 1,
"read_only": 0,

View file

@ -31,7 +31,7 @@
"fieldtype": "Select",
"in_list_view": 1,
"label": "Fieldtype",
"options": "Check\nCurrency\nData\nDate\nDatetime\nDynamic Link\nFloat\nFold\nInt\nLink\nSelect\nTime",
"options": "Check\nCurrency\nData\nDate\nDatetime\nDuration\nDynamic Link\nFloat\nFold\nInt\nLink\nSelect\nTime",
"reqd": 1
},
{
@ -48,7 +48,7 @@
"index_web_pages_for_search": 1,
"istable": 1,
"links": [],
"modified": "2020-08-17 14:32:17.174796",
"modified": "2020-09-03 10:52:03.895817",
"modified_by": "Administrator",
"module": "Core",
"name": "Report Column",

View file

@ -186,8 +186,8 @@
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2018-07-28 15:49:54.019073",
"modified_by": "cave@aperture.com",
"modified": "2020-09-18 17:26:09.703215",
"modified_by": "Administrator",
"module": "Data Migration",
"name": "Data Migration Plan",
"name_case": "",

View file

@ -800,12 +800,12 @@
"issingle": 0,
"istable": 0,
"max_attachments": 0,
"modified": "2018-07-30 07:02:26.980372",
"modified": "2020-09-18 17:26:09.703215",
"modified_by": "Administrator",
"module": "Data Migration",
"name": "Data Migration Run",
"name_case": "",
"owner": "faris@erpnext.com",
"owner": "Administrator",
"permissions": [
{
"amend": 0,

View file

@ -53,11 +53,11 @@
}
],
"links": [],
"modified": "2020-06-15 11:24:57.639430",
"modified": "2020-09-18 17:26:09.703215",
"modified_by": "Administrator",
"module": "Desk",
"name": "Calendar View",
"owner": "faris@erpnext.com",
"owner": "Administrator",
"permissions": [
{
"create": 1,

View file

@ -120,8 +120,8 @@
"hide_toolbar": 1,
"in_create": 1,
"links": [],
"modified": "2020-05-31 22:31:12.886950",
"modified_by": "umair@erpnext.com",
"modified": "2020-09-18 17:26:09.703215",
"modified_by": "Administrator",
"module": "Desk",
"name": "Notification Log",
"owner": "Administrator",

View file

@ -4,25 +4,33 @@
from __future__ import unicode_literals
import frappe
import os, json
import os
import json
from frappe import _
from frappe.modules import scrub, get_module_path
from frappe.utils import flt, cint, get_html_format, get_url_to_form
from frappe.utils import (
flt,
cint,
get_html_format,
get_url_to_form,
gzip_decompress,
format_duration,
)
from frappe.model.utils import render_include
from frappe.translate import send_translations
import frappe.desk.reportview
from frappe.permissions import get_role_permissions
from six import string_types, iteritems
from datetime import timedelta
from frappe.utils import gzip_decompress
from frappe.core.utils import ljust_list
def get_report_doc(report_name):
doc = frappe.get_doc("Report", report_name)
doc.custom_columns = []
if doc.report_type == 'Custom Report':
if doc.report_type == "Custom Report":
custom_report_doc = doc
reference_report = custom_report_doc.reference_report
doc = frappe.get_doc("Report", reference_report)
@ -31,11 +39,18 @@ def get_report_doc(report_name):
doc.is_custom_report = True
if not doc.is_permitted():
frappe.throw(_("You don't have access to Report: {0}").format(report_name), frappe.PermissionError)
frappe.throw(
_("You don't have access to Report: {0}").format(report_name),
frappe.PermissionError,
)
if not frappe.has_permission(doc.ref_doctype, "report"):
frappe.throw(_("You don't have permission to get a report on: {0}").format(doc.ref_doctype),
frappe.PermissionError)
frappe.throw(
_("You don't have permission to get a report on: {0}").format(
doc.ref_doctype
),
frappe.PermissionError,
)
if doc.disabled:
frappe.throw(_("Report {0} is disabled").format(report_name))
@ -55,11 +70,10 @@ def generate_report_result(report, filters=None, user=None, custom_columns=None)
if report.report_type == "Query Report":
res = report.execute_query_report(filters)
elif report.report_type == 'Script Report':
elif report.report_type == "Script Report":
res = report.execute_script_report(filters)
columns, result, message, chart, report_summary, skip_total_row = \
ljust_list(res, 6)
columns, result, message, chart, report_summary, skip_total_row = ljust_list(res, 6)
if report.custom_columns:
# Original query columns, needed to reorder data as per custom columns
@ -67,7 +81,7 @@ def generate_report_result(report, filters=None, user=None, custom_columns=None)
# Reordered columns
columns = json.loads(report.custom_columns)
result = reorder_data_for_custom_columns(columns, query_columns, result, report.report_type)
result = reorder_data_for_custom_columns(columns, query_columns, result)
result = add_data_to_custom_columns(columns, result)
@ -75,7 +89,7 @@ def generate_report_result(report, filters=None, user=None, custom_columns=None)
result = add_data_to_custom_columns(custom_columns, result)
for custom_column in custom_columns:
columns.insert(custom_column['insert_after_index'] + 1, custom_column)
columns.insert(custom_column["insert_after_index"] + 1, custom_column)
if result:
result = get_filtered_data(report.ref_doctype, columns, result, user)
@ -91,17 +105,19 @@ def generate_report_result(report, filters=None, user=None, custom_columns=None)
"report_summary": report_summary,
"skip_total_row": skip_total_row or 0,
"status": None,
"execution_time": frappe.cache().hget('report_execution_time', report.name) or 0
"execution_time": frappe.cache().hget("report_execution_time", report.name)
or 0,
}
@frappe.whitelist()
def background_enqueue_run(report_name, filters=None, user=None):
"""run reports in background"""
if not user:
user = frappe.session.user
report = get_report_doc(report_name)
track_instance = \
frappe.get_doc({
track_instance = frappe.get_doc(
{
"doctype": "Prepared Report",
"report_name": report_name,
# This looks like an insanity but, without this it'd be very hard to find Prepared Reports matching given condition
@ -111,21 +127,24 @@ def background_enqueue_run(report_name, filters=None, user=None):
"report_type": report.report_type,
"query": report.query,
"module": report.module,
})
}
)
track_instance.insert(ignore_permissions=True)
frappe.db.commit()
track_instance.enqueue_report()
return {
"name": track_instance.name,
"redirect_url": get_url_to_form("Prepared Report", track_instance.name)
"redirect_url": get_url_to_form("Prepared Report", track_instance.name),
}
@frappe.whitelist()
def get_script(report_name):
report = get_report_doc(report_name)
module = report.module or frappe.db.get_value("DocType", report.ref_doctype, "module")
module = report.module or frappe.db.get_value(
"DocType", report.ref_doctype, "module"
)
module_path = get_module_path(module)
report_folder = os.path.join(module_path, "report", scrub(report.name))
script_path = os.path.join(report_folder, scrub(report.name) + ".js")
@ -151,24 +170,38 @@ def get_script(report_name):
return {
"script": render_include(script),
"html_format": html_format,
"execution_time": frappe.cache().hget('report_execution_time', report_name) or 0
"execution_time": frappe.cache().hget("report_execution_time", report_name)
or 0,
}
@frappe.whitelist()
@frappe.read_only()
def run(report_name, filters=None, user=None, ignore_prepared_report=False, custom_columns=None):
def run(
report_name,
filters=None,
user=None,
ignore_prepared_report=False,
custom_columns=None,
):
report = get_report_doc(report_name)
if not user:
user = frappe.session.user
if not frappe.has_permission(report.ref_doctype, "report"):
frappe.msgprint(_("Must have report permission to access this report."),
raise_exception=True)
frappe.msgprint(
_("Must have report permission to access this report."),
raise_exception=True,
)
result = None
if report.prepared_report and not report.disable_prepared_report and not ignore_prepared_report:
if (
report.prepared_report
and not report.disable_prepared_report
and not ignore_prepared_report
and not custom_columns
):
if filters:
if isinstance(filters, string_types):
filters = json.loads(filters)
@ -181,10 +214,13 @@ def run(report_name, filters=None, user=None, ignore_prepared_report=False, cust
else:
result = generate_report_result(report, filters, user, custom_columns)
result["add_total_row"] = report.add_total_row and not result.get('skip_total_row', False)
result["add_total_row"] = report.add_total_row and not result.get(
"skip_total_row", False
)
return result
def add_data_to_custom_columns(columns, result):
custom_fields_data = get_data_for_custom_report(columns)
@ -196,44 +232,42 @@ def add_data_to_custom_columns(columns, result):
if isinstance(row, list):
for idx, column in enumerate(columns):
if column.get('link_field'):
row_obj[column['fieldname']] = None
if column.get("link_field"):
row_obj[column["fieldname"]] = None
row.insert(idx, None)
else:
row_obj[column['fieldname']] = row[idx]
row_obj[column["fieldname"]] = row[idx]
data.append(row_obj)
else:
data.append(row)
for row in data:
for column in columns:
if column.get('link_field'):
fieldname = column['fieldname']
key = (column['doctype'], fieldname)
link_field = column['link_field']
row[fieldname] = custom_fields_data.get(key, {}).get(row.get(link_field))
if column.get("link_field"):
fieldname = column["fieldname"]
key = (column["doctype"], fieldname)
link_field = column["link_field"]
row[fieldname] = custom_fields_data.get(key, {}).get(
row.get(link_field)
)
return data
def reorder_data_for_custom_columns(custom_columns, columns, result, report_type):
def reorder_data_for_custom_columns(custom_columns, columns, result):
if not result:
return []
if report_type == 'Query Report':
# Assume list result for query reports
# Query report columns exclusively use Label
custom_column_labels = [col["label"] for col in custom_columns]
original_column_labels = [col.split(":")[0] for col in columns]
return get_columns_from_list(custom_column_labels, original_column_labels, result)
custom_column_names = [col["fieldname"] for col in custom_columns]
columns = [get_column_as_dict(col) for col in columns]
if isinstance(result[0], list) or isinstance(result[0], tuple):
# If the result is a list of lists
original_column_names = [col["fieldname"] for col in columns]
custom_column_names = [col["label"] for col in custom_columns]
original_column_names = [col["label"] for col in columns]
return get_columns_from_list(custom_column_names, original_column_names, result)
else:
# If the result is a list of dicts
return get_columns_from_dict(custom_column_names, result)
# columns do not need to be reordered if result is a list of dicts
return result
def get_columns_from_list(columns, target_columns, result):
reordered_result = []
@ -251,20 +285,6 @@ def get_columns_from_list(columns, target_columns, result):
return reordered_result
def get_columns_from_dict(columns, result):
reordered_result = []
for res in result:
r = {}
for col_name in columns:
try:
r[col_name] = res[col_name]
except KeyError:
pass
reordered_result.append(r)
return reordered_result
def get_prepared_report_result(report, filters, dn="", user=None):
latest_report_data = {}
@ -274,14 +294,15 @@ def get_prepared_report_result(report, filters, dn="", user=None):
doc = frappe.get_doc("Prepared Report", dn)
else:
# Only look for completed prepared reports with given filters.
doc_list = frappe.get_all("Prepared Report",
doc_list = frappe.get_all(
"Prepared Report",
filters={
"status": "Completed",
"filters": json.dumps(filters),
"owner": user,
"report_name": report.get('custom_report') or report.get('report_name')
"report_name": report.get("custom_report") or report.get("report_name"),
},
order_by = 'creation desc'
order_by="creation desc",
)
if doc_list:
@ -291,11 +312,15 @@ def get_prepared_report_result(report, filters, dn="", user=None):
if doc:
try:
# Prepared Report data is stored in a GZip compressed JSON file
attached_file_name = frappe.db.get_value("File", {"attached_to_doctype": doc.doctype, "attached_to_name":doc.name}, "name")
attached_file = frappe.get_doc('File', attached_file_name)
attached_file_name = frappe.db.get_value(
"File",
{"attached_to_doctype": doc.doctype, "attached_to_name": doc.name},
"name",
)
attached_file = frappe.get_doc("File", attached_file_name)
compressed_content = attached_file.get_content()
uncompressed_content = gzip_decompress(compressed_content)
data = json.loads(uncompressed_content)
data = json.loads(uncompressed_content.decode("utf-8"))
if data:
columns = json.loads(doc.columns) if doc.columns else data[0]
@ -303,23 +328,18 @@ def get_prepared_report_result(report, filters, dn="", user=None):
if isinstance(column, dict) and column.get("label"):
column["label"] = _(column["label"])
latest_report_data = {
"columns": columns,
"result": data
}
latest_report_data = {"columns": columns, "result": data}
except Exception:
frappe.log_error(frappe.get_traceback())
frappe.delete_doc("Prepared Report", doc.name)
frappe.db.commit()
doc = None
latest_report_data.update({
"prepared_report": True,
"doc": doc
})
latest_report_data.update({"prepared_report": True, "doc": doc})
return latest_report_data
@frappe.whitelist()
def export_query():
"""export from query reports"""
@ -335,8 +355,8 @@ def export_query():
if isinstance(data.get("report_name"), string_types):
report_name = data["report_name"]
frappe.permissions.can_export(
frappe.get_cached_value('Report', report_name, 'ref_doctype'),
raise_exception=True
frappe.get_cached_value("Report", report_name, "ref_doctype"),
raise_exception=True,
)
if isinstance(data.get("file_format_type"), string_types):
file_format_type = data["file_format_type"]
@ -353,19 +373,50 @@ def export_query():
data = run(report_name, filters, custom_columns=custom_columns)
data = frappe._dict(data)
if not data.columns:
frappe.respond_as_web_page(_("No data to export"),
_("You can try changing the filters of your report."))
frappe.respond_as_web_page(
_("No data to export"),
_("You can try changing the filters of your report."),
)
return
columns = get_columns_dict(data.columns)
from frappe.utils.xlsxutils import make_xlsx
data["result"] = handle_duration_fieldtype_values(
data.get("result"), data.get("columns")
)
xlsx_data = build_xlsx_data(columns, data, visible_idx, include_indentation)
xlsx_file = make_xlsx(xlsx_data, "Query Report")
frappe.response['filename'] = report_name + '.xlsx'
frappe.response['filecontent'] = xlsx_file.getvalue()
frappe.response['type'] = 'binary'
frappe.response["filename"] = report_name + ".xlsx"
frappe.response["filecontent"] = xlsx_file.getvalue()
frappe.response["type"] = "binary"
def handle_duration_fieldtype_values(result, columns):
for i, col in enumerate(columns):
fieldtype = None
if isinstance(col, string_types):
col = col.split(":")
if len(col) > 1:
if col[1]:
fieldtype = col[1]
if "/" in fieldtype:
fieldtype, options = fieldtype.split("/")
else:
fieldtype = "Data"
else:
fieldtype = col.get("fieldtype")
if fieldtype == "Duration":
for entry in range(0, len(result)):
val_in_seconds = result[entry][i]
if val_in_seconds:
duration_val = format_duration(val_in_seconds)
result[entry][i] = duration_val
return result
def build_xlsx_data(columns, data, visible_idx, include_indentation):
@ -384,13 +435,13 @@ def build_xlsx_data(columns, data, visible_idx, include_indentation):
if isinstance(row, dict) and row:
for idx in range(len(data.columns)):
# check if column is not hidden
# check if column is not hidden
if not columns[idx].get("hidden"):
label = columns[idx]["label"]
fieldname = columns[idx]["fieldname"]
cell_value = row.get(fieldname, row.get(label, ""))
if cint(include_indentation) and 'indent' in row and idx == 0:
cell_value = (' ' * cint(row['indent'])) + cell_value
if cint(include_indentation) and "indent" in row and idx == 0:
cell_value = (" " * cint(row["indent"])) + cell_value
row_data.append(cell_value)
else:
row_data = row
@ -399,8 +450,9 @@ def build_xlsx_data(columns, data, visible_idx, include_indentation):
return result
def add_total_row(result, columns, meta = None):
total_row = [""]*len(columns)
def add_total_row(result, columns, meta=None):
total_row = [""] * len(columns)
has_percent = []
for i, col in enumerate(columns):
fieldtype, options, fieldname = None, None, None
@ -426,10 +478,13 @@ def add_total_row(result, columns, meta = None):
options = col.get("options")
for row in result:
if i >= len(row): continue
if i >= len(row):
continue
cell = row.get(fieldname) if isinstance(row, dict) else row[i]
if fieldtype in ["Currency", "Int", "Float", "Percent"] and flt(cell):
if fieldtype in ["Currency", "Int", "Float", "Percent", "Duration"] and flt(
cell
):
total_row[i] = flt(total_row[i]) + flt(cell)
if fieldtype == "Percent" and i not in has_percent:
@ -437,12 +492,15 @@ def add_total_row(result, columns, meta = None):
if fieldtype == "Time" and cell:
if not total_row[i]:
total_row[i]=timedelta(hours=0,minutes=0,seconds=0)
total_row[i] = total_row[i] + cell
total_row[i] = timedelta(hours=0, minutes=0, seconds=0)
total_row[i] = total_row[i] + cell
if fieldtype=="Link" and options == "Currency":
total_row[i] = result[0].get(fieldname) if isinstance(result[0], dict) else result[0][i]
if fieldtype == "Link" and options == "Currency":
total_row[i] = (
result[0].get(fieldname)
if isinstance(result[0], dict)
else result[0][i]
)
for i in has_percent:
total_row[i] = flt(total_row[i]) / len(result)
@ -461,35 +519,44 @@ def add_total_row(result, columns, meta = None):
result.append(total_row)
return result
@frappe.whitelist()
def get_data_for_custom_field(doctype, field):
if not frappe.has_permission(doctype, "read"):
frappe.throw(_("Not Permitted"), frappe.PermissionError)
value_map = frappe._dict(frappe.get_all(doctype,
fields=["name", field],
as_list=1))
value_map = frappe._dict(frappe.get_all(doctype, fields=["name", field], as_list=1))
return value_map
def get_data_for_custom_report(columns):
doc_field_value_map = {}
for column in columns:
if column.get('link_field'):
fieldname = column.get('fieldname')
doctype = column.get('doctype')
doc_field_value_map[(doctype, fieldname)] = get_data_for_custom_field(doctype, fieldname)
if column.get("link_field"):
fieldname = column.get("fieldname")
doctype = column.get("doctype")
doc_field_value_map[(doctype, fieldname)] = get_data_for_custom_field(
doctype, fieldname
)
return doc_field_value_map
@frappe.whitelist()
def save_report(reference_report, report_name, columns):
report_doc = get_report_doc(reference_report)
docname = frappe.db.exists("Report",
{'report_name': report_name, 'is_standard': 'No', 'report_type': 'Custom Report'})
docname = frappe.db.exists(
"Report",
{
"report_name": report_name,
"is_standard": "No",
"report_type": "Custom Report",
},
)
if docname:
report = frappe.get_doc("Report", docname)
report.update({"json": columns})
@ -498,15 +565,17 @@ def save_report(reference_report, report_name, columns):
return docname
else:
new_report = frappe.get_doc({
'doctype': 'Report',
'report_name': report_name,
'json': columns,
'ref_doctype': report_doc.ref_doctype,
'is_standard': 'No',
'report_type': 'Custom Report',
'reference_report': reference_report
}).insert(ignore_permissions = True)
new_report = frappe.get_doc(
{
"doctype": "Report",
"report_name": report_name,
"json": columns,
"ref_doctype": report_doc.ref_doctype,
"is_standard": "No",
"report_type": "Custom Report",
"reference_report": reference_report,
}
).insert(ignore_permissions=True)
frappe.msgprint(_("{0} saved successfully").format(new_report.name))
return new_report.name
@ -524,10 +593,22 @@ def get_filtered_data(ref_doctype, columns, data, user):
if match_filters_per_doctype:
for row in data:
# Why linked_doctypes.get(ref_doctype)? because if column is empty, linked_doctypes[ref_doctype] is removed
if linked_doctypes.get(ref_doctype) and shared and row[linked_doctypes[ref_doctype]] in shared:
if (
linked_doctypes.get(ref_doctype)
and shared
and row[linked_doctypes[ref_doctype]] in shared
):
result.append(row)
elif has_match(row, linked_doctypes, match_filters_per_doctype, ref_doctype, if_owner, columns_dict, user):
elif has_match(
row,
linked_doctypes,
match_filters_per_doctype,
ref_doctype,
if_owner,
columns_dict,
user,
):
result.append(row)
else:
result = list(data)
@ -535,17 +616,25 @@ def get_filtered_data(ref_doctype, columns, data, user):
return result
def has_match(row, linked_doctypes, doctype_match_filters, ref_doctype, if_owner, columns_dict, user):
def has_match(
row,
linked_doctypes,
doctype_match_filters,
ref_doctype,
if_owner,
columns_dict,
user,
):
"""Returns True if after evaluating permissions for each linked doctype
- There is an owner match for the ref_doctype
- `and` There is a user permission match for all linked doctypes
- There is an owner match for the ref_doctype
- `and` There is a user permission match for all linked doctypes
Returns True if the row is empty
Returns True if the row is empty
Note:
Each doctype could have multiple conflicting user permission doctypes.
Hence even if one of the sets allows a match, it is true.
This behavior is equivalent to the trickling of user permissions of linked doctypes to the ref doctype.
Note:
Each doctype could have multiple conflicting user permission doctypes.
Hence even if one of the sets allows a match, it is true.
This behavior is equivalent to the trickling of user permissions of linked doctypes to the ref doctype.
"""
resultant_match = True
@ -556,20 +645,22 @@ def has_match(row, linked_doctypes, doctype_match_filters, ref_doctype, if_owner
for doctype, filter_list in doctype_match_filters.items():
matched_for_doctype = False
if doctype==ref_doctype and if_owner:
if doctype == ref_doctype and if_owner:
idx = linked_doctypes.get("User")
if (idx is not None
and row[idx]==user
and columns_dict[idx]==columns_dict.get("owner")):
# owner match is true
matched_for_doctype = True
if (
idx is not None
and row[idx] == user
and columns_dict[idx] == columns_dict.get("owner")
):
# owner match is true
matched_for_doctype = True
if not matched_for_doctype:
for match_filters in filter_list:
match = True
for dt, idx in linked_doctypes.items():
# case handled above
if dt=="User" and columns_dict[idx]==columns_dict.get("owner"):
if dt == "User" and columns_dict[idx] == columns_dict.get("owner"):
continue
cell_value = None
@ -578,7 +669,11 @@ def has_match(row, linked_doctypes, doctype_match_filters, ref_doctype, if_owner
elif isinstance(row, (list, tuple)):
cell_value = row[idx]
if dt in match_filters and cell_value not in match_filters.get(dt) and frappe.db.exists(dt, cell_value):
if (
dt in match_filters
and cell_value not in match_filters.get(dt)
and frappe.db.exists(dt, cell_value)
):
match = False
break
@ -597,6 +692,7 @@ def has_match(row, linked_doctypes, doctype_match_filters, ref_doctype, if_owner
return resultant_match
def get_linked_doctypes(columns, data):
linked_doctypes = {}
@ -604,7 +700,7 @@ def get_linked_doctypes(columns, data):
for idx, col in enumerate(columns):
df = columns_dict[idx]
if df.get("fieldtype")=="Link":
if df.get("fieldtype") == "Link":
if data and isinstance(data[0], (list, tuple)):
linked_doctypes[df["options"]] = idx
else:
@ -633,38 +729,45 @@ def get_linked_doctypes(columns, data):
return linked_doctypes
def get_columns_dict(columns):
"""Returns a dict with column docfield values as dict
The keys for the dict are both idx and fieldname,
so either index or fieldname can be used to search for a column's docfield properties
The keys for the dict are both idx and fieldname,
so either index or fieldname can be used to search for a column's docfield properties
"""
columns_dict = frappe._dict()
for idx, col in enumerate(columns):
col_dict = frappe._dict()
# string
if isinstance(col, string_types):
col = col.split(":")
if len(col) > 1:
if "/" in col[1]:
col_dict["fieldtype"], col_dict["options"] = col[1].split("/")
else:
col_dict["fieldtype"] = col[1]
col_dict["label"] = col[0]
col_dict["fieldname"] = frappe.scrub(col[0])
# dict
else:
col_dict.update(col)
if "fieldname" not in col_dict:
col_dict["fieldname"] = frappe.scrub(col_dict["label"])
col_dict = get_column_as_dict(col)
columns_dict[idx] = col_dict
columns_dict[col_dict["fieldname"]] = col_dict
return columns_dict
def get_column_as_dict(col):
col_dict = frappe._dict()
# string
if isinstance(col, string_types):
col = col.split(":")
if len(col) > 1:
if "/" in col[1]:
col_dict["fieldtype"], col_dict["options"] = col[1].split("/")
else:
col_dict["fieldtype"] = col[1]
col_dict["label"] = col[0]
col_dict["fieldname"] = frappe.scrub(col[0])
# dict
else:
col_dict.update(col)
if "fieldname" not in col_dict:
col_dict["fieldname"] = frappe.scrub(col_dict["label"])
return col_dict
def get_user_match_filters(doctypes, user):
match_filters = {}

View file

@ -11,7 +11,7 @@ from frappe.model.db_query import DatabaseQuery
from frappe import _
from six import string_types, StringIO
from frappe.core.doctype.access_log.access_log import make_access_log
from frappe.utils import cstr
from frappe.utils import cstr, format_duration
@frappe.whitelist()
@ -167,6 +167,8 @@ def export_query():
for i, row in enumerate(ret):
data.append([i+1] + list(row))
data = handle_duration_fieldtype_values(doctype, data, db_query.fields)
if file_format_type == "CSV":
# convert to csv
@ -236,6 +238,29 @@ def get_labels(fields, doctype):
return labels
def handle_duration_fieldtype_values(doctype, data, fields):
for field in fields:
key = field.split(" as ")[0]
if key.startswith(('count(', 'sum(', 'avg(')): continue
if "." in key:
parenttype, fieldname = key.split(".")[0][4:-1], key.split(".")[1].strip("`")
else:
parenttype = doctype
fieldname = field.strip("`")
df = frappe.get_meta(parenttype).get_field(fieldname)
if df and df.fieldtype == 'Duration':
index = fields.index(field) + 1
for i in range(1, len(data)):
val_in_seconds = data[i][index]
if val_in_seconds:
duration_val = format_duration(val_in_seconds, df.hide_days)
data[i][index] = duration_val
return data
@frappe.whitelist()
def delete_items():
"""delete selected items"""

View file

@ -4,12 +4,13 @@
frappe.ui.form.on("Email Queue", {
refresh: function(frm) {
if (["Not Sent","Partially Sent"].indexOf(frm.doc.status)!=-1) {
frm.add_custom_button("Send Now", function() {
let button = frm.add_custom_button("Send Now", function() {
frappe.call({
method: 'frappe.email.doctype.email_queue.email_queue.send_now',
args: {
name: frm.doc.name
},
btn: button,
callback: function() {
frm.reload_doc();
}
@ -18,12 +19,13 @@ frappe.ui.form.on("Email Queue", {
}
if (["Error","Partially Errored"].indexOf(frm.doc.status)!=-1) {
frm.add_custom_button("Retry Sending", function() {
let button = frm.add_custom_button("Retry Sending", function() {
frm.call({
method: "retry_sending",
args: {
name: frm.doc.name
},
btn: button,
callback: function(r) {
if (!r.exc) {
frm.set_value("status", "Not Sent");

View file

@ -155,7 +155,12 @@ def get_context(context):
allow_update = False
try:
if allow_update and not doc.flags.in_notification_update:
doc.set(self.set_property_after_alert, self.property_value)
fieldname = self.set_property_after_alert
value = self.property_value
if doc.meta.get_field(fieldname).fieldtype in frappe.model.numeric_fieldtypes:
value = frappe.utils.cint(value)
doc.set(fieldname, value)
doc.flags.updater_reference = {
'doctype': self.doctype,
'docname': self.name,
@ -177,7 +182,7 @@ def get_context(context):
recipients, cc, bcc = self.get_list_of_recipients(doc, context)
users = recipients + cc + bcc
if not users:
return

View file

@ -282,6 +282,7 @@ setup_wizard_exception = [
]
before_migrate = ['frappe.patches.v11_0.sync_user_permission_doctype_before_migrate.execute']
after_migrate = ['frappe.website.doctype.website_theme.website_theme.after_migrate']
otp_methods = ['OTP App','Email','SMS']
user_privacy_documents = [

View file

@ -345,8 +345,7 @@ def extract_sql_gzip(sql_gz_path):
return decompressed_file
def extract_tar_files(site_name, file_path, folder_name):
def extract_files(site_name, file_path, folder_name):
import subprocess
import shutil
@ -362,7 +361,10 @@ def extract_tar_files(site_name, file_path, folder_name):
tar_path = os.path.join(abs_site_path, tar_name)
try:
subprocess.check_output(['tar', 'xvf', tar_path, '--strip', '2'], cwd=abs_site_path)
if file_path.endswith(".tar"):
subprocess.check_output(['tar', 'xvf', tar_path, '--strip', '2'], cwd=abs_site_path)
elif file_path.endswith(".tgz"):
subprocess.check_output(['tar', 'zxvf', tar_path, '--strip', '2'], cwd=abs_site_path)
except:
raise
finally:

View file

@ -97,8 +97,8 @@
"label": "Push to Google Contacts"
}
],
"modified": "2019-09-13 15:53:19.569924",
"modified_by": "himanshu@erpnext.com",
"modified": "2020-09-18 17:26:09.703215",
"modified_by": "Administrator",
"module": "Integrations",
"name": "Google Contacts",
"owner": "Administrator",

View file

@ -100,8 +100,8 @@
}
],
"issingle": 1,
"modified": "2019-08-21 17:33:28.516614",
"modified_by": "qwe@qwe.com",
"modified": "2020-09-18 17:26:09.703215",
"modified_by": "Administrator",
"module": "Integrations",
"name": "Google Drive",
"owner": "Administrator",

View file

@ -19,7 +19,7 @@ def send_email(success, service_name, doctype, email_field, error_status=None):
return
if success:
if not frappe.db.get_value(doctype, None, "send_email_for_successful_backup"):
if not frappe.db.get_single_value(doctype, "send_email_for_successful_backup"):
return
subject = "Backup Upload Successful"
@ -28,7 +28,6 @@ def send_email(success, service_name, doctype, email_field, error_status=None):
<p>Hi there, this is just to inform you that your backup was successfully uploaded to your {0} bucket. So relax!</p>""".format(
service_name
)
else:
subject = "[Warning] Backup Upload Failed"
message = """

View file

@ -22,7 +22,7 @@ from frappe.core.doctype.scheduled_job_type.scheduled_job_type import sync_jobs
from frappe.search.website_search import build_index_for_all_routes
def migrate(verbose=True, rebuild_website=False, skip_failing=False, skip_search_index=False):
def migrate(verbose=True, skip_failing=False, skip_search_index=False):
'''Migrate all apps to the current version, will:
- run before migrate hooks
- run patches

View file

@ -26,18 +26,16 @@ max_positive_value = {
DOCTYPES_FOR_DOCTYPE = ('DocType', 'DocField', 'DocPerm', 'DocType Action', 'DocType Link')
_classes = {}
def get_controller(doctype):
"""Returns the **class** object of the given DocType.
For `custom` type, returns `frappe.model.document.Document`.
:param doctype: DocType name as string."""
from frappe.model.document import Document
from frappe.utils.nestedset import NestedSet
global _classes
if not doctype in _classes:
def _get_controller():
from frappe.model.document import Document
from frappe.utils.nestedset import NestedSet
module_name, custom = frappe.db.get_value("DocType", doctype, ("module", "custom"), cache=True) \
or ["Core", False]
@ -48,8 +46,17 @@ def get_controller(doctype):
is_tree = False
_class = NestedSet if is_tree else Document
else:
module = load_doctype_module(doctype, module_name)
classname = doctype.replace(" ", "").replace("-", "")
class_overrides = frappe.get_hooks('override_doctype_class')
if class_overrides and class_overrides.get(doctype):
import_path = frappe.get_hooks('override_doctype_class').get(doctype)[-1]
module_path, classname = import_path.rsplit('.', 1)
module = frappe.get_module(module_path)
if not hasattr(module, classname):
raise ImportError('{0}: {1} does not exist in module {2}'.format(doctype, classname, module_path))
else:
module = load_doctype_module(doctype, module_name)
classname = doctype.replace(" ", "").replace("-", "")
if hasattr(module, classname):
_class = getattr(module, classname)
if issubclass(_class, BaseDocument):
@ -58,9 +65,13 @@ def get_controller(doctype):
raise ImportError(doctype)
else:
raise ImportError(doctype)
_classes[doctype] = _class
return _class
return _classes[doctype]
if frappe.local.dev_server:
return _get_controller()
key = '{}:doctype_classes'.format(frappe.local.site)
return frappe.cache().hget(key, doctype, generator=_get_controller, shared=True)
class BaseDocument(object):
ignore_in_getter = ("doctype", "_meta", "meta", "_table_fields", "_valid_columns")
@ -336,7 +347,7 @@ class BaseDocument(object):
if self.meta.autoname=="hash":
# hash collision? try again
frappe.flags.retry_count = (frappe.flags.retry_count or 0) + 1
if frappe.flags.retry_count > 5:
if frappe.flags.retry_count > 5 and not frappe.flags.in_test:
raise
self.name = None
self.db_insert()

View file

@ -37,7 +37,8 @@ class DatabaseQuery(object):
ignore_permissions=False, user=None, with_comment_count=False,
join='left join', distinct=False, start=None, page_length=None, limit=None,
ignore_ifnull=False, save_user_settings=False, save_user_settings_fields=False,
update=None, add_total_row=None, user_settings=None, reference_doctype=None, return_query=False, strict=True):
update=None, add_total_row=None, user_settings=None, reference_doctype=None,
return_query=False, strict=True, pluck=None):
if not ignore_permissions and not frappe.has_permission(self.doctype, "read", user=user):
frappe.flags.error_message = _('Insufficient Permission for {0}').format(frappe.bold(self.doctype))
raise frappe.PermissionError(self.doctype)
@ -57,7 +58,10 @@ class DatabaseQuery(object):
if fields:
self.fields = fields
else:
self.fields = ["`tab{0}`.`name`".format(self.doctype)]
if pluck:
self.fields = ["`tab{0}`.`{1}`".format(self.doctype, pluck)]
else:
self.fields = ["`tab{0}`.`name`".format(self.doctype)]
if start: limit_start = start
if page_length: limit_page_length = page_length
@ -104,6 +108,9 @@ class DatabaseQuery(object):
self.save_user_settings_fields = save_user_settings_fields
self.update_user_settings()
if pluck:
return [d[pluck] for d in result]
return result
def build_and_run(self):
@ -162,7 +169,18 @@ class DatabaseQuery(object):
self.set_field_tables()
args.fields = ', '.join(self.fields)
fields = []
for field in self.fields:
if (field.strip().startswith(("`", "*")) or "(" in field):
fields.append(field)
elif "as" in field.lower().split(" "):
col, _, new = field.split()
fields.append("`{0}` as {1}".format(col, new))
else:
fields.append("`{0}`".format(field))
args.fields = ", ".join(fields)
self.set_order_by(args)

View file

@ -13,7 +13,7 @@ from oauthlib.oauth2.rfc6749.endpoints.token import TokenEndpoint
from oauthlib.oauth2.rfc6749.endpoints.resource import ResourceEndpoint
from oauthlib.oauth2.rfc6749.endpoints.revocation import RevocationEndpoint
from oauthlib.common import Request
from six.moves.urllib.parse import parse_qs, urlparse, unquote
from six.moves.urllib.parse import unquote
def get_url_delimiter(separator_character=" "):
return separator_character
@ -94,19 +94,13 @@ class OAuthWebRequestValidator(RequestValidator):
def validate_scopes(self, client_id, scopes, client, request, *args, **kwargs):
# Is the client allowed to access the requested scopes?
client_scopes = frappe.db.get_value("OAuth Client", client_id, 'scopes').split(get_url_delimiter())
are_scopes_valid = True
for scp in scopes:
are_scopes_valid = are_scopes_valid and True if scp in client_scopes else False
return are_scopes_valid
allowed_scopes = get_client_scopes(client_id)
return all(scope in allowed_scopes for scope in scopes)
def get_default_scopes(self, client_id, request, *args, **kwargs):
# Scopes a client will authorize for if none are supplied in the
# authorization request.
scopes = frappe.db.get_value("OAuth Client", client_id, 'scopes').split(get_url_delimiter())
scopes = get_client_scopes(client_id)
request.scopes = scopes #Apparently this is possible.
return scopes
@ -440,3 +434,8 @@ def delete_oauth2_data():
frappe.delete_doc("OAuth Bearer Token", token["name"])
if commit_code or commit_token:
frappe.db.commit()
def get_client_scopes(client_id):
scopes_string = frappe.db.get_value("OAuth Client", client_id, "scopes")
return scopes_string.split()

View file

@ -312,3 +312,4 @@ frappe.patches.v13_0.enable_custom_script
frappe.patches.v13_0.update_newsletter_content_type
execute:frappe.db.set_value('Website Settings', 'Website Settings', {'navbar_template': 'Standard Navbar', 'footer_template': 'Standard Footer'})
frappe.patches.v13_0.delete_event_producer_and_consumer_keys
frappe.patches.v13_0.web_template_set_module #2020-10-05

View file

@ -0,0 +1,16 @@
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
"""Set default module for standard Web Template, if none."""
frappe.reload_doctype('Web Template')
frappe.reload_doctype('Web Template Field')
standard_templates = frappe.get_list('Web Template', {'standard': 1})
for template in standard_templates:
doc = frappe.get_doc('Web Template', template.name)
if not doc.module:
doc.module = 'Website'
doc.save()

View file

@ -52,7 +52,7 @@ frappe.Application = Class.extend({
this.set_favicon();
this.setup_analytics();
this.set_fullwidth_if_enabled();
this.add_browser_class();
this.setup_energy_point_listeners();
frappe.ui.keys.setup();
@ -511,6 +511,16 @@ frappe.Application = Class.extend({
}
},
add_browser_class() {
let browsers = ['Chrome', 'Firefox', 'Safari'];
for (let browser of browsers) {
if (navigator.userAgent.includes(browser)) {
$('html').addClass(browser.toLowerCase());
return;
}
}
},
set_fullwidth_if_enabled() {
frappe.ui.toolbar.set_fullwidth_if_enabled();
},

View file

@ -31,7 +31,7 @@ $.extend(frappe.model, {
{fieldname:'docstatus', fieldtype:'Int', label:__('Document Status')},
],
numeric_fieldtypes: ["Int", "Float", "Currency", "Percent"],
numeric_fieldtypes: ["Int", "Float", "Currency", "Percent", "Duration"],
std_fields_table: [
{fieldname:'parent', fieldtype:'Data', label:__('Parent')},

View file

@ -824,8 +824,14 @@ Object.assign(frappe.utils, {
};
},
get_formatted_duration(value, duration_options) {
get_formatted_duration(value, duration_options=null) {
let duration = '';
if (!duration_options) {
duration_options = {
hide_days: 0,
hide_seconds: 0
};
}
if (value) {
let total_duration = frappe.utils.seconds_to_duration(value, duration_options);

View file

@ -466,7 +466,7 @@ frappe.views.QueryReport = class QueryReport extends frappe.views.BaseList {
df.onchange = () => {
this.refresh_filters_dependency();
let current_filters = this.get_filter_value();
let current_filters = this.get_filter_values();
if (this.previous_filters
&& (JSON.stringify(this.previous_filters) === JSON.stringify(current_filters))) {
// filter values have not changed
@ -1327,6 +1327,9 @@ frappe.views.QueryReport = class QueryReport extends frappe.views.BaseList {
return row
.slice(standard_column_count)
.map((cell, i) => {
if (cell.column.fieldtype === "Duration") {
cell.content = frappe.utils.get_formatted_duration(cell.content);
}
if (include_indentation && i===0) {
cell.content = ' '.repeat(row.meta.indent) + (cell.content || '');
}

View file

@ -139,16 +139,6 @@ export default class WebForm extends frappe.ui.FieldGroup {
this.handle_success(response.message);
frappe.web_form.events.trigger('after_save');
this.after_save && this.after_save();
// args doctype and docname added to link doctype in file manager
frappe.call({
type: 'POST',
method: "frappe.handler.upload_file",
args: {
file_url: response.message.attachment,
doctype: response.message.doctype,
docname: response.message.name
}
});
}
},
always: function() {

View file

@ -847,9 +847,13 @@ input[type="checkbox"] {
// Firefox doesn't support
// pseudo elements on checkbox
@supports (-moz-appearance: none) or (-ms-ime-align:auto) {
html.firefox, html.safari {
input[type="checkbox"] {
height: @checkbox-height !important
height: @checkbox-height !important;
&:before {
visibility: hidden;
}
}
}
@ -1188,4 +1192,4 @@ body.no-sidebar {
&:not(:last-child) {
margin-bottom: 1em;
}
}
}

View file

@ -98,7 +98,7 @@
text-align: right;
}
.grid-row .grid-row-check {
html.chrome .grid-row .grid-row-check {
margin-top: 12px;
}

View file

@ -19,6 +19,25 @@ $text-muted: $gray-600 !default;
$border-color: $gray-300 !default;
$headings-color: $gray-900 !default;
$font-sizes: (
"xs": 0.75rem,
"sm": 0.875rem,
"base": 1rem,
"lg": 1.125rem,
"xl": 1.25rem,
"2xl": 1.5rem,
"3xl": 1.875rem,
"4xl": 2.25rem,
"5xl": 3rem,
"6xl": 4rem
);
@each $size, $value in $font-sizes {
.font-size-#{$size} {
font-size: $value;
}
}
$font-size-xs: 0.75rem !default;
$font-size-sm: 0.875rem !default;
$font-size-base: 1rem !default;
@ -60,12 +79,12 @@ $input-border-radius: 0.375rem;
$custom-control-indicator-bg: white;
$grid-breakpoints: (
xs: 0,
sm: 576px,
md: 768px,
lg: 992px,
xl: 1200px,
2xl: 1440px
xs: 0,
sm: 576px,
md: 768px,
lg: 992px,
xl: 1200px,
2xl: 1440px
) !default;
$spacers: (
@ -93,11 +112,11 @@ $spacers: (
48: 12rem,
52: 13rem,
56: 14rem,
64: 16rem,
64: 16rem
);
@import '~bootstrap/scss/functions';
@import '~bootstrap/scss/variables';
@import "~bootstrap/scss/functions";
@import "~bootstrap/scss/variables";
@import "~bootstrap/scss/mixins";
$code-color: $purple;

View file

@ -244,3 +244,9 @@ h5.modal-title {
white-space: nowrap;
text-overflow: ellipsis;
}
.about-section {
padding-top: 1rem;
}
.about-footer {
padding-top: 1rem;
}

View file

@ -1,4 +1,5 @@
{
"actions": [],
"creation": "2018-06-21 14:58:55.913619",
"doctype": "DocType",
"editable_grid": 1,
@ -109,8 +110,9 @@
"label": "Seen"
}
],
"in_create": 1,
"modified": "2019-08-21 15:51:05.288886",
"index_web_pages_for_search": 1,
"links": [],
"modified": "2020-10-06 17:25:40.477044",
"modified_by": "Administrator",
"module": "Social",
"name": "Energy Point Log",

View file

@ -1,5 +1,5 @@
{% macro footer_link(item) %}
<a href="{{ item.url | abs_url }}" class="footer-link">
<a href="{{ item.url | abs_url }}" {{ item.target }} class="footer-link">
{%- if item.icon -%}
<img src="{{ item.icon }}" alt="{{ item.label }}">
{%- else -%}

View file

@ -106,21 +106,25 @@ login.reset_sections = function(hide) {
login.login = function() {
login.reset_sections();
$(".for-login").toggle(true);
$("#login_email").focus();
}
login.steptwo = function() {
login.reset_sections();
$(".for-login").toggle(true);
$("#login_email").focus();
}
login.forgot = function() {
login.reset_sections();
$(".for-forgot").toggle(true);
$("#forgot_email").focus();
}
login.signup = function() {
login.reset_sections();
$(".for-signup").toggle(true);
$("#signup_fullname").focus();
}

View file

@ -7,19 +7,19 @@
web_block.css_class
]) -%}
{%- if web_template.type == 'Section' -%}
{%- if web_template_type == 'Section' -%}
{%- if not web_block.hide_block -%}
<section class="section {{ classes }}" data-section-idx="{{ web_block.idx | e }}"
data-section-template="{{ web_block.web_template | e }}">
{%- if web_block.add_container -%}
<div class="container">
{%- endif -%}
{{ web_template.render(web_block.web_template_values) }}
{{ web_template_html }}
{%- if web_block.add_container -%}
</div>
{%- endif -%}
</section>
{%- endif -%}
{%- else -%}
{{ web_template.render(web_block.web_template_values) }}
{{ web_template_html }}
{%- endif -%}

View file

@ -1,12 +1,15 @@
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# imports - standard imports
import os
import shlex
import subprocess
import unittest
from glob import glob
# imports - module imports
import frappe
from frappe.utils.backups import fetch_latest_backups
def clean(value):
@ -15,9 +18,14 @@ def clean(value):
return value
class BaseTestCommands:
def execute(self, command):
command = command.format(**{"site": frappe.local.site})
class BaseTestCommands(unittest.TestCase):
def execute(self, command, kwargs=None):
site = {"site": frappe.local.site}
if kwargs:
kwargs.update(site)
else:
kwargs = site
command = command.replace("\n", " ").format(**kwargs)
command = shlex.split(command)
self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.stdout = clean(self._proc.stdout)
@ -25,7 +33,7 @@ class BaseTestCommands:
self.returncode = clean(self._proc.returncode)
class TestCommands(BaseTestCommands, unittest.TestCase):
class TestCommands(BaseTestCommands):
def test_execute(self):
# test 1: execute a command expecting a numeric output
self.execute("bench --site {site} execute frappe.db.get_database_size")
@ -44,3 +52,70 @@ class TestCommands(BaseTestCommands, unittest.TestCase):
self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""")
self.assertEquals(self.returncode, 0)
self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))
def test_backup(self):
home = os.path.expanduser("~")
site_backup_path = frappe.utils.get_site_path("private", "backups")
# test 1: take a backup
before_backup = fetch_latest_backups()
self.execute("bench --site {site} backup")
after_backup = fetch_latest_backups()
self.assertEquals(self.returncode, 0)
self.assertIn("successfully completed", self.stdout)
self.assertNotEqual(before_backup["database"], after_backup["database"])
# test 2: take a backup with --with-files
before_backup = after_backup.copy()
self.execute("bench --site {site} backup --with-files")
after_backup = fetch_latest_backups()
self.assertEquals(self.returncode, 0)
self.assertIn("successfully completed", self.stdout)
self.assertIn("with files", self.stdout)
self.assertNotEqual(before_backup, after_backup)
self.assertIsNotNone(after_backup["public"])
self.assertIsNotNone(after_backup["private"])
# test 3: take a backup with --backup-path
backup_path = os.path.join(home, "backups")
self.execute("bench --site {site} backup --backup-path {backup_path}", {"backup_path": backup_path})
self.assertEquals(self.returncode, 0)
self.assertTrue(os.path.exists(backup_path))
self.assertGreaterEqual(len(os.listdir(backup_path)), 2)
# test 4: take a backup with --backup-path-db, --backup-path-files, --backup-path-private-files, --backup-path-conf
kwargs = {
key: os.path.join(home, key, value)
for key, value in {
"db_path": "database.sql.gz",
"files_path": "public.tar",
"private_path": "private.tar",
"conf_path": "config.json"
}.items()
}
self.execute("""bench
--site {site} backup --with-files
--backup-path-db {db_path}
--backup-path-files {files_path}
--backup-path-private-files {private_path}
--backup-path-conf {conf_path}""", kwargs)
self.assertEquals(self.returncode, 0)
for path in kwargs.values():
self.assertTrue(os.path.exists(path))
# test 5: take a backup with --compress
self.execute("bench --site {site} backup --with-files --compress")
self.assertEquals(self.returncode, 0)
compressed_files = glob(site_backup_path + "/*.tgz")
self.assertGreater(len(compressed_files), 0)
# test 6: take a backup with --verbose
self.execute("bench --site {site} backup --verbose")
self.assertEquals(self.returncode, 0)

View file

@ -4,9 +4,15 @@
# MIT License. See license.txt
from __future__ import unicode_literals
import unittest
from random import choice
import frappe
from frappe.custom.doctype.custom_field.custom_field import create_custom_field
from frappe.utils import random_string
from frappe.utils.testutils import clear_custom_fields
class TestDB(unittest.TestCase):
def test_get_value(self):
@ -80,3 +86,59 @@ class TestDB(unittest.TestCase):
self.assertIn('tabCustom Field', frappe.flags.touched_tables)
frappe.flags.in_migrate = False
frappe.flags.touched_tables.clear()
def test_db_keywords_as_fields(self):
"""Tests if DB keywords work as docfield names. If they're wrapped with grave accents."""
# Using random.choices, picked out a list of 40 keywords for testing
all_keywords = {
"mariadb": ["CHARACTER", "DELAYED", "LINES", "EXISTS", "YEAR_MONTH", "LOCALTIME", "BOTH", "MEDIUMINT",
"LEFT", "BINARY", "DEFAULT", "KILL", "WRITE", "SQL_SMALL_RESULT", "CURRENT_TIME", "CROSS", "INHERITS",
"SELECT", "TABLE", "ALTER", "CURRENT_TIMESTAMP", "XOR", "CASE", "ALL", "WHERE", "INT", "TO", "SOME",
"DAY_MINUTE", "ERRORS", "OPTIMIZE", "REPLACE", "HIGH_PRIORITY", "VARBINARY", "HELP", "IS",
"CHAR", "DESCRIBE", "KEY"],
"postgres": ["WORK", "LANCOMPILER", "REAL", "HAVING", "REPEATABLE", "DATA", "USING", "BIT", "DEALLOCATE",
"SERIALIZABLE", "CURSOR", "INHERITS", "ARRAY", "TRUE", "IGNORE", "PARAMETER_MODE", "ROW", "CHECKPOINT",
"SHOW", "BY", "SIZE", "SCALE", "UNENCRYPTED", "WITH", "AND", "CONVERT", "FIRST", "SCOPE", "WRITE", "INTERVAL",
"CHARACTER_SET_SCHEMA", "ADD", "SCROLL", "NULL", "WHEN", "TRANSACTION_ACTIVE",
"INT", "FORTRAN", "STABLE"]
}
created_docs = []
fields = all_keywords[frappe.conf.db_type]
test_doctype = "ToDo"
def add_custom_field(field):
create_custom_field(test_doctype, {
"fieldname": field.lower(),
"label": field.title(),
"fieldtype": 'Data',
})
# Create custom fields for test_doctype
for field in fields:
add_custom_field(field)
# Create documents under that doctype and query them via ORM
for _ in range(10):
docfields = { key.lower(): random_string(10) for key in fields }
doc = frappe.get_doc({"doctype": test_doctype, "description": random_string(20), **docfields})
doc.insert()
created_docs.append(doc.name)
random_field = choice(fields).lower()
random_doc = choice(created_docs)
random_value = random_string(20)
# Testing read
self.assertEqual(list(frappe.get_all("ToDo", fields=[random_field], limit=1)[0])[0], random_field)
self.assertEqual(list(frappe.get_all("ToDo", fields=["{0} as total".format(random_field)], limit=1)[0])[0], "total")
# Testing update
frappe.db.set_value(test_doctype, random_doc, random_field, random_value)
self.assertEqual(frappe.db.get_value(test_doctype, random_doc, random_field), random_value)
# Cleanup - delete records and remove custom fields
for doc in created_docs:
frappe.delete_doc(test_doctype, doc)
clear_custom_fields(test_doctype)

View file

@ -347,6 +347,14 @@ class TestReportview(unittest.TestCase):
limit=50,
)
def test_pluck_name(self):
names = DatabaseQuery("DocType").execute(filters={"name": "DocType"}, pluck="name")
self.assertEqual(names, ["DocType"])
def test_pluck_any_field(self):
owners = DatabaseQuery("DocType").execute(filters={"name": "DocType"}, pluck="owner")
self.assertEqual(owners, ["Administrator"])
def create_event(subject="_Test Event", starts_on=None):
""" create a test event """

View file

@ -4,6 +4,7 @@
from __future__ import unicode_literals
import unittest
import frappe
from frappe.desk.doctype.todo.todo import ToDo
class TestHooks(unittest.TestCase):
def test_hooks(self):
@ -14,3 +15,23 @@ class TestHooks(unittest.TestCase):
self.assertTrue(isinstance(hooks.get("doc_events").get("*"), dict))
self.assertTrue("frappe.desk.notifications.clear_doctype_notifications" in
hooks.get("doc_events").get("*").get("on_update"))
def test_override_doctype_class(self):
# mock get_hooks
original = frappe.get_hooks
def get_hooks(hook=None, default=None, app_name=None):
if hook == 'override_doctype_class':
return {
'ToDo': ['frappe.tests.test_hooks.CustomToDo']
}
return original(hook, default, app_name)
frappe.get_hooks = get_hooks
todo = frappe.get_doc(doctype='ToDo', description='asdf')
self.assertTrue(isinstance(todo, CustomToDo))
# restore
frappe.get_hooks = original
class CustomToDo(ToDo):
pass

View file

@ -4,21 +4,27 @@ from __future__ import unicode_literals
import unittest, frappe, requests, time
from frappe.test_runner import make_test_records
from six.moves.urllib.parse import urlparse, parse_qs
from six.moves.urllib.parse import urlparse, parse_qs, urljoin
from urllib.parse import urlencode, quote
class TestOAuth20(unittest.TestCase):
def setUp(self):
make_test_records("OAuth Client")
make_test_records("User")
self.client_id = frappe.get_all("OAuth Client", fields=["*"])[0].get("client_id")
self.form_header = {"content-type": "application/x-www-form-urlencoded"}
self.scope = "all openid"
self.redirect_uri = "http://localhost"
# Set Frappe server URL reqired for id_token generation
try:
frappe_login_key = frappe.get_doc("Social Login Key", "frappe")
except frappe.DoesNotExistError:
frappe_login_key = frappe.new_doc("Social Login Key")
frappe_login_key.get_social_login_provider("Frappe", initialize=True)
frappe_login_key.base_url = "http://localhost:8000"
frappe_login_key.base_url = frappe.utils.get_url()
frappe_login_key.enable_social_login = 0
frappe_login_key.save()
frappe.db.commit()
@ -34,38 +40,39 @@ class TestOAuth20(unittest.TestCase):
frappe.db.commit()
session = requests.Session()
# Login
session.post(
frappe.get_site_config().host_name + "/api/method/login",
data={"usr":"test@example.com","pwd":"Eastern_43A1W"}
)
login(session)
redirect_destination = None
# Go to Authorize url
try:
session.get(
frappe.get_site_config().host_name + "/api/method/frappe.integrations.oauth2.authorize?client_id=" +
self.client_id +
"&scope=all%20openid&response_type=code&redirect_uri=http%3A%2F%2Flocalhost"
get_full_url("/api/method/frappe.integrations.oauth2.authorize"),
params=encode_params({
"client_id": self.client_id,
"scope": self.scope,
"response_type": "code",
"redirect_uri": self.redirect_uri
})
)
except requests.exceptions.ConnectionError as ex:
redirect_destination = ex.request.url
# Get authorization code from redirected URL
auth_code = urlparse(redirect_destination).query.split("=")[1]
payload = "grant_type=authorization_code&code="
payload += auth_code
payload += "&redirect_uri=http%3A%2F%2Flocalhost&client_id="
payload += self.client_id
headers = {'content-type':'application/x-www-form-urlencoded'}
query = parse_qs(urlparse(redirect_destination).query)
auth_code = query.get("code")[0]
# Request for bearer token
token_response = requests.post( frappe.get_site_config().host_name +
"/api/method/frappe.integrations.oauth2.get_token", data=payload, headers=headers)
token_response = requests.post(
get_full_url("/api/method/frappe.integrations.oauth2.get_token"),
headers=self.form_header,
data=encode_params({
"grant_type": "authorization_code",
"code": auth_code,
"redirect_uri": self.redirect_uri,
"client_id": self.client_id
})
)
# Parse bearer token json
bearer_token = token_response.json()
@ -86,45 +93,49 @@ class TestOAuth20(unittest.TestCase):
frappe.db.commit()
session = requests.Session()
# Login
session.post(
frappe.get_site_config().host_name + "/api/method/login",
data={"usr":"test@example.com","pwd":"Eastern_43A1W"}
)
login(session)
redirect_destination = None
# Go to Authorize url
try:
session.get(
frappe.get_site_config().host_name + "/api/method/frappe.integrations.oauth2.authorize?client_id=" +
self.client_id +
"&scope=all%20openid&response_type=code&redirect_uri=http%3A%2F%2Flocalhost"
get_full_url("/api/method/frappe.integrations.oauth2.authorize"),
params=encode_params({
"client_id": self.client_id,
"scope": self.scope,
"response_type": "code",
"redirect_uri": self.redirect_uri
})
)
except requests.exceptions.ConnectionError as ex:
redirect_destination = ex.request.url
# Get authorization code from redirected URL
auth_code = urlparse(redirect_destination).query.split("=")[1]
payload = "grant_type=authorization_code&code="
payload += auth_code
payload += "&redirect_uri=http%3A%2F%2Flocalhost&client_id="
payload += self.client_id
headers = {'content-type':'application/x-www-form-urlencoded'}
query = parse_qs(urlparse(redirect_destination).query)
auth_code = query.get("code")[0]
# Request for bearer token
token_response = requests.post( frappe.get_site_config().host_name +
"/api/method/frappe.integrations.oauth2.get_token", data=payload, headers=headers)
token_response = requests.post(
get_full_url("/api/method/frappe.integrations.oauth2.get_token"),
headers=self.form_header,
data=encode_params({
"grant_type": "authorization_code",
"code": auth_code,
"redirect_uri": self.redirect_uri,
"client_id": self.client_id
})
)
# Parse bearer token json
bearer_token = token_response.json()
# Revoke Token
revoke_token_response = requests.post(frappe.get_site_config().host_name + "/api/method/frappe.integrations.oauth2.revoke_token",
data="token=" + bearer_token.get("access_token"), headers=headers)
revoke_token_response = requests.post(
get_full_url("/api/method/frappe.integrations.oauth2.revoke_token"),
headers=self.form_header,
data={"token": bearer_token.get("access_token")}
)
self.assertTrue(revoke_token_response.status_code == 200)
@ -138,18 +149,18 @@ class TestOAuth20(unittest.TestCase):
client.save()
frappe.db.commit()
# Set payload
payload = "grant_type=password"
payload += "&username=test@example.com"
payload += "&password=Eastern_43A1W"
payload += "&client_id=" + self.client_id
payload += "&scope=openid%20all"
headers = {'content-type':'application/x-www-form-urlencoded'}
# Request for bearer token
token_response = requests.post( frappe.get_site_config().host_name +
"/api/method/frappe.integrations.oauth2.get_token", data=payload, headers=headers)
token_response = requests.post(
get_full_url("/api/method/frappe.integrations.oauth2.get_token"),
headers=self.form_header,
data=encode_params({
"grant_type": "password",
"username": "test@example.com",
"password": "Eastern_43A1W",
"client_id": self.client_id,
"scope": self.scope
})
)
# Parse bearer token json
bearer_token = token_response.json()
@ -158,7 +169,6 @@ class TestOAuth20(unittest.TestCase):
self.assertTrue(check_valid_openid_response(bearer_token.get("access_token")))
def test_login_using_implicit_token(self):
oauth_client = frappe.get_doc("OAuth Client", self.client_id)
oauth_client.grant_type = "Implicit"
oauth_client.response_type = "Token"
@ -166,43 +176,69 @@ class TestOAuth20(unittest.TestCase):
frappe.db.commit()
session = requests.Session()
# Login
session.post(
frappe.get_site_config().host_name + "/api/method/login",
data={"usr":"test@example.com","pwd":"Eastern_43A1W"}
)
login(session)
redirect_destination = None
# Go to Authorize url
try:
session.get(
frappe.get_site_config().host_name + "/api/method/frappe.integrations.oauth2.authorize?client_id=" +
self.client_id +
"&scope=all%20openid&response_type=token&redirect_uri=http%3A%2F%2Flocalhost"
get_full_url("/api/method/frappe.integrations.oauth2.authorize"),
params=encode_params({
"client_id": self.client_id,
"scope": self.scope,
"response_type": "token",
"redirect_uri": self.redirect_uri
})
)
except requests.exceptions.ConnectionError as ex:
redirect_destination = ex.request.url
response_url = dict(parse_qs(urlparse(redirect_destination).fragment))
response_dict = parse_qs(urlparse(redirect_destination).fragment)
self.assertTrue(response_dict.get("access_token"))
self.assertTrue(response_dict.get("expires_in"))
self.assertTrue(response_dict.get("scope"))
self.assertTrue(response_dict.get("token_type"))
self.assertTrue(check_valid_openid_response(response_dict.get("access_token")[0]))
self.assertTrue(response_url.get("access_token"))
self.assertTrue(response_url.get("expires_in"))
self.assertTrue(response_url.get("scope"))
self.assertTrue(response_url.get("token_type"))
self.assertTrue(check_valid_openid_response(response_url.get("access_token")[0]))
def check_valid_openid_response(access_token=None):
# Returns True for valid response
"""Return True for valid response."""
# Use token in header
headers = {}
if access_token:
headers["Authorization"] = 'Bearer ' + access_token
headers["Authorization"] = "Bearer " + access_token
# check openid for email test@example.com
openid_response = requests.get(frappe.get_site_config().host_name +
"/api/method/frappe.integrations.oauth2.openid_profile", headers=headers)
openid_response = requests.get(
get_full_url("/api/method/frappe.integrations.oauth2.openid_profile"),
headers=headers
)
return True if openid_response.status_code == 200 else False
return openid_response.status_code == 200
def login(session):
session.post(
get_full_url("/api/method/login"),
data={
"usr": "test@example.com",
"pwd": "Eastern_43A1W"
}
)
def get_full_url(endpoint):
"""Turn '/endpoint' into 'http://127.0.0.1:8000/endpoint'."""
return urljoin(frappe.utils.get_url(), endpoint)
def encode_params(params):
"""
Encode a dict of params into a query string.
Use `quote_via=urllib.parse.quote` so that whitespaces will be encoded as
`%20` instead of as `+`. This is needed because oauthlib cannot handle `+`
as a whitespace.
"""
return urlencode(params, quote_via=quote)

View file

@ -620,7 +620,7 @@ def get_untranslated(lang, untranslated_file, get_all=False):
if get_all:
print(str(len(messages)) + " messages")
with open(untranslated_file, "w") as f:
with open(untranslated_file, "wb") as f:
for m in messages:
# replace \n with ||| so that internal linebreaks don't get split
f.write((escape_newlines(m[1]) + os.linesep).encode("utf-8"))
@ -633,10 +633,10 @@ def get_untranslated(lang, untranslated_file, get_all=False):
if untranslated:
print(str(len(untranslated)) + " missing translations of " + str(len(messages)))
with open(untranslated_file, "w") as f:
with open(untranslated_file, "wb") as f:
for m in untranslated:
# replace \n with ||| so that internal linebreaks don't get split
f.write(cstr(frappe.safe_encode(escape_newlines(m) + os.linesep)))
f.write((escape_newlines(m) + os.linesep).encode("utf-8"))
else:
print("all translated!")

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,2 @@
Comment Type,Tipo de Comentario,
Communication,Comunicacion,
Components,Componentes,

1 Comment Type Tipo de Comentario
2 Communication Comunicacion
Components Componentes

View file

@ -2,3 +2,5 @@ Refreshing...,Actualizando...,
Clear Filters,Limpiar Filtros,
No Events Today,Sin eventos hoy,
Today's Events,Eventos para hoy,
Disabled,Deshabilitado,
Your Shortcuts,Tus accesos,

1 Refreshing... Actualizando...
2 Clear Filters Limpiar Filtros
3 No Events Today Sin eventos hoy
4 Today's Events Eventos para hoy
5 Disabled Deshabilitado
6 Your Shortcuts Tus accesos

View file

@ -13,6 +13,8 @@ Maintenance User,Mantenimiento por el Usuario,
Message Examples,Ejemplos de Mensajes,
Middle Name (Optional),Segundo Nombre ( Opcional),
Next,Próximo,
No address added yet.,No se ha añadido ninguna dirección todavía.,
No contacts added yet.,No se han añadido contactos todavía,
Replied,Respondio,
Report,Informe,
Report Builder,Generador de informes,
@ -79,7 +81,6 @@ Default Address Template cannot be deleted,Plantilla de la Direcciones Predeterm
Default Inbox,Bandeja de entrada por defecto,
Define workflows for forms.,Definir los flujos de trabajo para las formas .,
Defines actions on states and the next step and allowed roles.,Define las acciones de los estados y el siguiente paso y funciones permitidas.,
"Description for listing page, in plain text, only a couple of lines. (max 140 characters)","Descripción de la página perfil, en texto plano, sólo un par de líneas. (máx. 140 caracteres)",
"Different ""States"" this document can exist in. Like ""Open"", ""Pending Approval"" etc.","Este documento puede estar en Diferentes ""Estados"". Como ""Abierto"", ""Pendiente de Aprobación"", etc.",
Disable Customer Signup link in Login page,Desactivar enlace de registro del cliente en la página de entrada,
Disable Report,Desactivar Informe,
@ -89,6 +90,7 @@ DocType can not be merged,El DocType no se puede fusionar,
DocType is a Table / Form in the application.,El DocType es una tabla / formulario en la aplicación.,
DocType on which this Workflow is applicable.,El DocType en el presente del flujo de trabajo es aplicable.,
DocType or Field,DocType o campo,
Document Types,Tipos de Documento,
Download with Data,Descarga de datos,
Drag elements from the sidebar to add. Drag them back to trash.,Arrastre los elementos de la barra lateral para agregar. Arrastre de nuevo a la papelera de reciclaje.,
Dropbox Access Key,Clave de Acceso de Dropbox,
@ -97,7 +99,6 @@ Edit Custom HTML,Edición de HTML personalizado,
Edit HTML,Edición de HTML,
Edit Heading,Editar Rubro,
Email Account Name,Correo electrónico Nombre de cuenta,
Email By Document Field,Email Por Campo Documento,
Email Settings,Configuración del correo electrónico,
Email Signature,Firma Email,
Embed image slideshows in website pages.,Presentacion de imágenes incrustadas en páginas web .,
@ -107,7 +108,6 @@ Enter Form Type,Introduzca Tipo de Formulario,
"Enter static url parameters here (Eg. sender=ERPNext, username=ERPNext, password=1234 etc.)","Introduzca los parámetros de URL estáticas aquí (Ej. sender = ERPNext , nombre de usuario = ERPNext , contraseña = 1234 etc )",
Enter url parameter for receiver nos,Introduzca el parámetro url para el receptor no,
Field Description,Descripción del Campo,
Field {0} is not selectable.,El campo {0} no se puede seleccionar .,
Fieldname which will be the DocType for this link field.,Nombre de campo el cual será el DocType para enlazar el campo.,
File Size,Tamaño del archivo,
Float,flotador,
@ -197,7 +197,6 @@ Remove all customizations?,Eliminar todas las personalizaciones ?,
Repeat On,Repetir OK,
Repeat Till,Repita Hasta,
Repeat this Event,Repita este Evento,
Report Builder reports are managed directly by the report builder. Nothing to do.,Informes del Generador de informes son enviadas por el generador de informes . No hay nada que hacer.,
Report Hide,Ocultar Informe,
Report Manager,Administrador de informes,
Report Name,Nombre del informe,
@ -218,9 +217,6 @@ Row #{0}:,Fila # {0}:,
Row {0}: Not allowed to enable Allow on Submit for standard fields,Fila {0}: No se permite habilitar Permitir en Enviar para campos estándar,
Rules defining transition of state in the workflow.,Reglas que definen la transición de estado del flujo de trabajo .,
"Rules for how states are transitions, like next state and which role is allowed to change state etc.","Reglas para transición entre estados, como el siguiente estado y qué función permite cambiar de estado , etc",
Same file has already been attached to the record,El mismo archivo ya se ha adjuntado al registro,
Scheduled to send to {0},Programado para enviar a {0},
Scheduled to send to {0} recipients,Programado para enviar a {0} destinatarios,
Script,Guión,
Script Report,Informe de secuencias de comandos,
Script to attach to all web pages.,Guión para unir a todas las páginas web.,
@ -318,7 +314,6 @@ This role update User Permissions for a user,Este función actualiza los Permiso
Time Zones,Husos horarios,
Title Prefix,Prefijo del Título,
Title field must be a valid fieldname,Campo Título debe ser un nombre de campo válido,
"To format columns, give column labels in the query.","Para dar formato a columnas, dar títulos de las columnas en la consulta.",
Total Subscribers,Los suscriptores totales,
Unable to load: {0},No se puede cargar : {0},
Unread Notification Sent,Notificación No leído Enviado,
@ -337,8 +332,6 @@ Website Theme Image Link,Sitio web Imagen por tema Enlace,
"When you Amend a document after Cancel and save it, it will get a new number that is a version of the old number.","Cuando se modifique un documento después de Cancelar y guardarlo , se obtendrá un nuevo número que es una versión del antiguo número.",
Workflow Action,Acciones de los flujos de trabajo,
Workflow State,Estados de los flujos de trabajo,
Write a Python file in the same folder where this is saved and return column and result.,Escriba un archivo de Python en la misma carpeta donde esta se guarda y devuelve la columna y el resultado.,
Write a SELECT query. Note result is not paged (all data is sent in one go).,Escriba una consulta SELECT. Nota resultado no se pagina ( todos los datos se envían en una sola vez ) .,
You are not allowed to delete a standard Website Theme,No se le permite eliminar un tema Sitio web estándar,
You can add dynamic properties from the document by using Jinja templating.,Puede añadir propiedades dinámicas del documento mediante el uso de plantillas Jinja.,
"You can change Submitted documents by cancelling them and then, amending them.","Puede cambiar los documentos Enviados cancelándolos y luego, haciendo los cambios pertinentes.",
@ -434,7 +427,6 @@ Reference Doctype,Referencia DocType,
Select Doctype,Seleccione tipo de documento,
clear,claro,
font,Fuente,
left,Izquierda,
list,Vista de árbol,
remove,Quitar,
search,Búsqueda,

1 Add Añadir
13 Message Examples Ejemplos de Mensajes
14 Middle Name (Optional) Segundo Nombre ( Opcional)
15 Next Próximo
16 No address added yet. No se ha añadido ninguna dirección todavía.
17 No contacts added yet. No se han añadido contactos todavía
18 Replied Respondio
19 Report Informe
20 Report Builder Generador de informes
81 Default Inbox Bandeja de entrada por defecto
82 Define workflows for forms. Definir los flujos de trabajo para las formas .
83 Defines actions on states and the next step and allowed roles. Define las acciones de los estados y el siguiente paso y funciones permitidas.
Description for listing page, in plain text, only a couple of lines. (max 140 characters) Descripción de la página perfil, en texto plano, sólo un par de líneas. (máx. 140 caracteres)
84 Different "States" this document can exist in. Like "Open", "Pending Approval" etc. Este documento puede estar en Diferentes "Estados". Como "Abierto", "Pendiente de Aprobación", etc.
85 Disable Customer Signup link in Login page Desactivar enlace de registro del cliente en la página de entrada
86 Disable Report Desactivar Informe
90 DocType is a Table / Form in the application. El DocType es una tabla / formulario en la aplicación.
91 DocType on which this Workflow is applicable. El DocType en el presente del flujo de trabajo es aplicable.
92 DocType or Field DocType o campo
93 Document Types Tipos de Documento
94 Download with Data Descarga de datos
95 Drag elements from the sidebar to add. Drag them back to trash. Arrastre los elementos de la barra lateral para agregar. Arrastre de nuevo a la papelera de reciclaje.
96 Dropbox Access Key Clave de Acceso de Dropbox
99 Edit HTML Edición de HTML
100 Edit Heading Editar Rubro
101 Email Account Name Correo electrónico Nombre de cuenta
Email By Document Field Email Por Campo Documento
102 Email Settings Configuración del correo electrónico
103 Email Signature Firma Email
104 Embed image slideshows in website pages. Presentacion de imágenes incrustadas en páginas web .
108 Enter static url parameters here (Eg. sender=ERPNext, username=ERPNext, password=1234 etc.) Introduzca los parámetros de URL estáticas aquí (Ej. sender = ERPNext , nombre de usuario = ERPNext , contraseña = 1234 etc )
109 Enter url parameter for receiver nos Introduzca el parámetro url para el receptor no
110 Field Description Descripción del Campo
Field {0} is not selectable. El campo {0} no se puede seleccionar .
111 Fieldname which will be the DocType for this link field. Nombre de campo el cual será el DocType para enlazar el campo.
112 File Size Tamaño del archivo
113 Float flotador
197 Repeat On Repetir OK
198 Repeat Till Repita Hasta
199 Repeat this Event Repita este Evento
Report Builder reports are managed directly by the report builder. Nothing to do. Informes del Generador de informes son enviadas por el generador de informes . No hay nada que hacer.
200 Report Hide Ocultar Informe
201 Report Manager Administrador de informes
202 Report Name Nombre del informe
217 Row {0}: Not allowed to enable Allow on Submit for standard fields Fila {0}: No se permite habilitar Permitir en Enviar para campos estándar
218 Rules defining transition of state in the workflow. Reglas que definen la transición de estado del flujo de trabajo .
219 Rules for how states are transitions, like next state and which role is allowed to change state etc. Reglas para transición entre estados, como el siguiente estado y qué función permite cambiar de estado , etc
Same file has already been attached to the record El mismo archivo ya se ha adjuntado al registro
Scheduled to send to {0} Programado para enviar a {0}
Scheduled to send to {0} recipients Programado para enviar a {0} destinatarios
220 Script Guión
221 Script Report Informe de secuencias de comandos
222 Script to attach to all web pages. Guión para unir a todas las páginas web.
314 Time Zones Husos horarios
315 Title Prefix Prefijo del Título
316 Title field must be a valid fieldname Campo Título debe ser un nombre de campo válido
To format columns, give column labels in the query. Para dar formato a columnas, dar títulos de las columnas en la consulta.
317 Total Subscribers Los suscriptores totales
318 Unable to load: {0} No se puede cargar : {0}
319 Unread Notification Sent Notificación No leído Enviado
332 When you Amend a document after Cancel and save it, it will get a new number that is a version of the old number. Cuando se modifique un documento después de Cancelar y guardarlo , se obtendrá un nuevo número que es una versión del antiguo número.
333 Workflow Action Acciones de los flujos de trabajo
334 Workflow State Estados de los flujos de trabajo
Write a Python file in the same folder where this is saved and return column and result. Escriba un archivo de Python en la misma carpeta donde esta se guarda y devuelve la columna y el resultado.
Write a SELECT query. Note result is not paged (all data is sent in one go). Escriba una consulta SELECT. Nota resultado no se pagina ( todos los datos se envían en una sola vez ) .
335 You are not allowed to delete a standard Website Theme No se le permite eliminar un tema Sitio web estándar
336 You can add dynamic properties from the document by using Jinja templating. Puede añadir propiedades dinámicas del documento mediante el uso de plantillas Jinja.
337 You can change Submitted documents by cancelling them and then, amending them. Puede cambiar los documentos Enviados cancelándolos y luego, haciendo los cambios pertinentes.
427 Select Doctype Seleccione tipo de documento
428 clear claro
429 font Fuente
left Izquierda
430 list Vista de árbol
431 remove Quitar
432 search Búsqueda

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

Some files were not shown because too many files have changed in this diff Show more