Merge branch 'develop' into get-docs
This commit is contained in:
commit
0d8ddb5958
168 changed files with 31713 additions and 26191 deletions
188
.github/actions/setup/action.yml
vendored
188
.github/actions/setup/action.yml
vendored
|
|
@ -11,17 +11,11 @@ inputs:
|
|||
default: '24'
|
||||
build-assets:
|
||||
required: false
|
||||
description: 'Wether to build assets'
|
||||
description: 'Whether to build assets'
|
||||
default: true
|
||||
enable-coverage:
|
||||
required: false
|
||||
default: false
|
||||
enable-watch:
|
||||
required: false
|
||||
default: false
|
||||
enable-schedule:
|
||||
required: false
|
||||
default: false
|
||||
disable-web:
|
||||
required: false
|
||||
default: false
|
||||
|
|
@ -47,7 +41,7 @@ runs:
|
|||
- name: Clone
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
path: apps/${{ github.event.repository.name }}
|
||||
path: frappe-src
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v6
|
||||
|
|
@ -57,25 +51,19 @@ runs:
|
|||
- shell: bash -e {0}
|
||||
run: |
|
||||
# Check for valid Python & Merge Conflicts
|
||||
python -m compileall -q -f "${GITHUB_WORKSPACE}/apps/${{ github.event.repository.name }}"
|
||||
if grep -lr --exclude-dir=node_modules "^<<<<<<< " "${GITHUB_WORKSPACE}/apps/${{ github.event.repository.name }}"
|
||||
python -m compileall -q -f "${GITHUB_WORKSPACE}/frappe-src"
|
||||
if grep -lr --exclude-dir=node_modules "^<<<<<<< " "${GITHUB_WORKSPACE}/frappe-src"
|
||||
then echo "Found merge conflicts"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Checkout Frappe
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
repository: ${{ env.FRAPPE_GH_ORG || github.repository_owner }}/frappe
|
||||
ref: ${{ github.event.client_payload.frappe_sha || github.base_ref || github.ref_name }}
|
||||
path: apps/frappe
|
||||
if: github.event.repository.name != 'frappe'
|
||||
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: ${{ inputs.node-version }}
|
||||
check-latest: true
|
||||
|
||||
- uses: astral-sh/setup-uv@v6
|
||||
|
||||
- name: Cache pip
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
|
@ -118,107 +106,92 @@ runs:
|
|||
echo -e "\033[33mInstall System Dependencies: $((end_time - start_time)) seconds\033[0m"
|
||||
|
||||
- shell: bash -e {0}
|
||||
env:
|
||||
DB: ${{ inputs.db }}
|
||||
run: |
|
||||
# Init Bench & test_site
|
||||
# Init Bench
|
||||
start_time=$(date +%s)
|
||||
mkdir ${GITHUB_WORKSPACE}/{sites,config,logs,config/pids,sites/test_site}
|
||||
python -m venv ${GITHUB_WORKSPACE}/env
|
||||
source ${GITHUB_WORKSPACE}/env/bin/activate
|
||||
pip install --quiet --upgrade pip
|
||||
pip cache remove mysqlclient
|
||||
uv tool install frappe-bench
|
||||
bench init ${GITHUB_WORKSPACE} \
|
||||
--ignore-exist \
|
||||
--frappe-path "${GITHUB_WORKSPACE}/frappe-src" \
|
||||
--skip-assets \
|
||||
--no-backups \
|
||||
--python "$(which python)"
|
||||
|
||||
pip install --quiet frappe-bench
|
||||
# bench init sets origin to the local checkout path, fix it to point at GitHub
|
||||
git -C apps/frappe remote set-url upstream "https://github.com/${{ github.repository }}"
|
||||
|
||||
python <<EOF
|
||||
from bench.config.common_site_config import setup_config
|
||||
from bench.config.redis import generate_config
|
||||
from bench.config.procfile import setup_procfile
|
||||
# Trim Procfile for CI
|
||||
sed -i '/^watch:/d' Procfile
|
||||
sed -i '/^schedule:/d' Procfile
|
||||
if [ "${{ inputs.disable-web }}" == "true" ]; then
|
||||
sed -i '/^web:/d' Procfile
|
||||
elif [ "${{ inputs.enable-coverage }}" == "true" ]; then
|
||||
sed -i 's|^web: bench serve|web: bench serve --with-coverage|' Procfile
|
||||
fi
|
||||
if [ "${{ inputs.disable-socketio }}" == "true" ]; then
|
||||
sed -i '/^socketio:/d' Procfile
|
||||
fi
|
||||
|
||||
bench_path = "${{ github.workspace }}"
|
||||
is_true = lambda str: True if str == "true" else False
|
||||
is_not_true = lambda str: True if str != "true" else False
|
||||
|
||||
setup_config(bench_path)
|
||||
generate_config(bench_path)
|
||||
setup_procfile(
|
||||
bench_path,
|
||||
skip_redis=False,
|
||||
skip_web=is_true("${{ inputs.disable-web }}"),
|
||||
skip_watch=is_not_true("${{ inputs.enable-watch }}"),
|
||||
skip_socketio=is_true("${{ inputs.disable-socketio }}"),
|
||||
skip_schedule=is_not_true("${{ inputs.enable-schedule }}"),
|
||||
with_coverage=is_true("${{ inputs.enable-coverage }}"),
|
||||
)
|
||||
EOF
|
||||
end_time=$(date +%s)
|
||||
echo -e "\033[33mInit Bench: $((end_time - start_time)) seconds\033[0m"
|
||||
cat ${GITHUB_WORKSPACE}/Procfile | awk '{print "\033[0;34m" $0 "\033[0m"}'
|
||||
# Attempt to copy the configuration file
|
||||
if cp "${GITHUB_WORKSPACE}/apps/${{ github.event.repository.name }}/.github/helper/db/$DB.json" ${GITHUB_WORKSPACE}/sites/test_site/site_config.json; then
|
||||
echo "Successfully copied ${DB}.json to site_config.json."
|
||||
else
|
||||
echo "Error: The configuration file ${GITHUB_WORKSPACE}/apps/${{ github.event.repository.name }}/.github/helper/db/$DB.json does not exist."
|
||||
echo "Please ensure that the database JSON file is correctly named and located in the helper/db directory."
|
||||
exit 1 # Exit with a non-zero status to indicate failure
|
||||
fi
|
||||
|
||||
if [ "$DB" == "mariadb" ]; then
|
||||
mariadb --host 127.0.0.1 --port 3306 -u root -p${{ inputs.db-root-password }} -e "SET GLOBAL character_set_server = 'utf8mb4'";
|
||||
mariadb --host 127.0.0.1 --port 3306 -u root -p${{ inputs.db-root-password }} -e "SET GLOBAL collation_server = 'utf8mb4_unicode_ci'";
|
||||
|
||||
mariadb --host 127.0.0.1 --port 3306 -u root -p${{ inputs.db-root-password }} -e "CREATE DATABASE test_frappe";
|
||||
mariadb --host 127.0.0.1 --port 3306 -u root -p${{ inputs.db-root-password }} -e "CREATE USER 'test_frappe'@'localhost' IDENTIFIED BY 'test_frappe'";
|
||||
mariadb --host 127.0.0.1 --port 3306 -u root -p${{ inputs.db-root-password }} -e "GRANT ALL PRIVILEGES ON \`test_frappe\`.* TO 'test_frappe'@'localhost'";
|
||||
|
||||
mariadb --host 127.0.0.1 --port 3306 -u root -p${{ inputs.db-root-password }} -e "FLUSH PRIVILEGES";
|
||||
fi
|
||||
|
||||
if [ "$DB" == "postgres" ]; then
|
||||
export PGPASSWORD='travis'
|
||||
psql -h 127.0.0.1 -p 5432 -c "CREATE DATABASE test_frappe" -U postgres
|
||||
psql -h 127.0.0.1 -p 5432 -c "CREATE USER test_frappe WITH PASSWORD 'test_frappe'" -U postgres
|
||||
psql -h 127.0.0.1 -p 5432 -U postgres -c "GRANT ALL PRIVILEGES ON DATABASE test_frappe TO test_frappe;"
|
||||
unset PGPASSWORD
|
||||
fi
|
||||
|
||||
- shell: bash -e {0}
|
||||
run: |
|
||||
# Install App(s)
|
||||
step_start_time=$(date +%s)
|
||||
source ${GITHUB_WORKSPACE}/env/bin/activate
|
||||
# Install dev/test dependencies
|
||||
start_time=$(date +%s)
|
||||
bench setup requirements --dev
|
||||
if [ "${{ inputs.build-assets }}" == "true" ]; then
|
||||
bench setup requirements --node
|
||||
fi
|
||||
|
||||
for app in ${GITHUB_WORKSPACE}/apps/*/; do
|
||||
app_name="$(basename $app)"
|
||||
if [ -f "${app}setup.py" ] || [ -f "${app}pyproject.toml" ]; then
|
||||
start_time=$(date +%s)
|
||||
echo -e "\033[36mInstalling python app from ${app}\033[0m"
|
||||
pip install --upgrade -e "${app}[dev,test]"
|
||||
end_time=$(date +%s)
|
||||
echo -e "\033[36mTime taken to Install python ${app}: $((end_time - start_time)) seconds\033[0m"
|
||||
fi
|
||||
if [ "${{ inputs.build-assets }}" == "true" ] && [ -f "${app}package.json" ]; then
|
||||
start_time=$(date +%s)
|
||||
echo -e "\033[36mInstalling js app dependencies from ${app}\033[0m"
|
||||
pushd "$app"
|
||||
yarn --check-files
|
||||
popd
|
||||
end_time=$(date +%s)
|
||||
echo -e "\033[36mTime taken to Install js ${app}: $((end_time - start_time)) seconds\033[0m"
|
||||
fi
|
||||
echo "$app_name" >> sites/apps.txt
|
||||
echo -e "\033[32mAdded $app_name to $PWD/sites/apps.txt\033[0m"
|
||||
done
|
||||
step_end_time=$(date +%s)
|
||||
echo -e "\033[33mInstall App(s): $((step_end_time - step_start_time)) seconds\033[0m"
|
||||
end_time=$(date +%s)
|
||||
echo -e "\033[33mInstall dev/test deps: $((end_time - start_time)) seconds\033[0m"
|
||||
|
||||
- shell: bash -e {0}
|
||||
env:
|
||||
TYPE: server
|
||||
DB: ${{ inputs.db }}
|
||||
run: |
|
||||
# Create Site
|
||||
start_time=$(date +%s)
|
||||
if [ "$DB" == "postgres" ]; then
|
||||
DB_ROOT_USER="postgres"
|
||||
DB_ROOT_PWD="travis"
|
||||
else
|
||||
DB_ROOT_USER="root"
|
||||
DB_ROOT_PWD="${{ inputs.db-root-password }}"
|
||||
fi
|
||||
|
||||
bench set-config -g root_login "$DB_ROOT_USER"
|
||||
bench set-config -g root_password "$DB_ROOT_PWD"
|
||||
bench set-config -g admin_password admin
|
||||
|
||||
bench new-site test_site \
|
||||
--db-type "$DB" \
|
||||
--db-host 127.0.0.1 \
|
||||
--db-name test_frappe \
|
||||
--db-password test_frappe \
|
||||
--verbose
|
||||
bench --site test_site set-config allow_tests 1 --parse
|
||||
bench --site test_site set-config server_script_enabled 1 --parse
|
||||
bench --site test_site set-config host_name "http://test_site:8000"
|
||||
bench --site test_site set-config auto_email_id "test@example.com"
|
||||
bench --site test_site set-config mail_server localhost
|
||||
bench --site test_site set-config mail_port 2525 --parse
|
||||
bench --site test_site set-config mail_login "test@example.com"
|
||||
bench --site test_site set-config mail_password test
|
||||
bench --site test_site set-config disable_mail_smtp_authentication 1 --parse
|
||||
|
||||
if [ "$DB" == "mariadb" ]; then
|
||||
bench --site test_site set-config monitor 1 --parse
|
||||
bench --site test_site set-config use_mysqlclient 1 --parse
|
||||
fi
|
||||
|
||||
end_time=$(date +%s)
|
||||
echo -e "\033[33mCreate Site: $((end_time - start_time)) seconds\033[0m"
|
||||
|
||||
- shell: bash -e {0}
|
||||
run: |
|
||||
# Start Bench
|
||||
source ${GITHUB_WORKSPACE}/env/bin/activate
|
||||
bench start &> ${GITHUB_WORKSPACE}/bench_start.log &
|
||||
|
||||
- shell: bash -e {0}
|
||||
|
|
@ -226,12 +199,7 @@ runs:
|
|||
run: |
|
||||
# Build Assets
|
||||
start_time=$(date +%s)
|
||||
|
||||
source ${GITHUB_WORKSPACE}/env/bin/activate
|
||||
CI=Yes bench build --force --production &
|
||||
build_pid=$!
|
||||
bench --site test_site reinstall --yes
|
||||
wait $build_pid
|
||||
CI=Yes bench build --force --production
|
||||
|
||||
end_time=$(date +%s)
|
||||
echo -e "\033[33mBuild Assets and reinstall site: $((end_time - start_time)) seconds\033[0m"
|
||||
echo -e "\033[33mBuild Assets: $((end_time - start_time)) seconds\033[0m"
|
||||
|
|
|
|||
20
.github/helper/db/mariadb.json
vendored
20
.github/helper/db/mariadb.json
vendored
|
|
@ -1,20 +0,0 @@
|
|||
{
|
||||
"db_host": "127.0.0.1",
|
||||
"db_port": 3306,
|
||||
"db_name": "test_frappe",
|
||||
"db_password": "test_frappe",
|
||||
"allow_tests": true,
|
||||
"db_type": "mariadb",
|
||||
"auto_email_id": "test@example.com",
|
||||
"mail_server": "localhost",
|
||||
"mail_port": 2525,
|
||||
"mail_login": "test@example.com",
|
||||
"mail_password": "test",
|
||||
"admin_password": "admin",
|
||||
"root_login": "root",
|
||||
"root_password": "db_root",
|
||||
"host_name": "http://test_site:8000",
|
||||
"use_mysqlclient": 1,
|
||||
"monitor": 1,
|
||||
"server_script_enabled": true
|
||||
}
|
||||
18
.github/helper/db/postgres.json
vendored
18
.github/helper/db/postgres.json
vendored
|
|
@ -1,18 +0,0 @@
|
|||
{
|
||||
"db_host": "127.0.0.1",
|
||||
"db_port": 5432,
|
||||
"db_name": "test_frappe",
|
||||
"db_password": "test_frappe",
|
||||
"db_type": "postgres",
|
||||
"allow_tests": true,
|
||||
"auto_email_id": "test@example.com",
|
||||
"mail_server": "localhost",
|
||||
"mail_port": 2525,
|
||||
"mail_login": "test@example.com",
|
||||
"mail_password": "test",
|
||||
"admin_password": "admin",
|
||||
"root_login": "postgres",
|
||||
"root_password": "travis",
|
||||
"host_name": "http://test_site:8000",
|
||||
"server_script_enabled": true
|
||||
}
|
||||
13
.github/helper/db/sqlite.json
vendored
13
.github/helper/db/sqlite.json
vendored
|
|
@ -1,13 +0,0 @@
|
|||
{
|
||||
"db_name": "test_frappe",
|
||||
"db_type": "sqlite",
|
||||
"allow_tests": true,
|
||||
"auto_email_id": "test@example.com",
|
||||
"mail_server": "localhost",
|
||||
"mail_port": 2525,
|
||||
"mail_login": "test@example.com",
|
||||
"mail_password": "test",
|
||||
"admin_password": "admin",
|
||||
"host_name": "http://test_site:8000",
|
||||
"server_script_enabled": true
|
||||
}
|
||||
534
.github/helper/po_pr_review.py
vendored
Normal file
534
.github/helper/po_pr_review.py
vendored
Normal file
|
|
@ -0,0 +1,534 @@
|
|||
"""Generate a review-friendly summary for large translation PRs.
|
||||
|
||||
This helper runs in GitHub Actions for bot-authored `.po` pull requests.
|
||||
It compares the trusted base checkout against the PR head translation files,
|
||||
groups similarly sized file diffs, and renders a markdown comment with the
|
||||
high-signal translation changes that are hard to inspect in GitHub's UI.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import html
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from collections import Counter
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from urllib.error import HTTPError
|
||||
|
||||
from babel.messages.pofile import read_po
|
||||
|
||||
COMMENT_MARKER = "<!-- po-translation-review -->"
|
||||
SIMILARITY_TOLERANCE = 0.02
|
||||
REVIEW_HIDDEN_PO_FILES = {"eo.po"}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class TranslationEntry:
|
||||
"""Normalized representation of a gettext entry used for diffing."""
|
||||
|
||||
context: str
|
||||
msgid: str
|
||||
msgid_plural: str | None
|
||||
translation: tuple[str, ...]
|
||||
|
||||
@property
|
||||
def key(self) -> tuple[str, str, str]:
|
||||
return (self.context, self.msgid, self.msgid_plural or "")
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Build a PR review comment for .po file changes in a GitHub pull request."
|
||||
)
|
||||
parser.add_argument("--repo", default=os.environ.get("GITHUB_REPOSITORY"))
|
||||
parser.add_argument("--pr", type=int, default=os.environ.get("PR_NUMBER"))
|
||||
parser.add_argument("--head-sha", default=os.environ.get("PR_HEAD_SHA"))
|
||||
parser.add_argument("--output", default="po-pr-review-comment.md")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def request_url(url: str, *, accept: str, allow_missing: bool = False) -> bytes | None:
|
||||
"""Fetch bytes from GitHub with auth, retries, and optional 404 handling."""
|
||||
|
||||
headers = {
|
||||
"Accept": accept,
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
"User-Agent": "frappe-po-review-helper",
|
||||
}
|
||||
token = os.environ.get("GITHUB_TOKEN")
|
||||
if token:
|
||||
headers["Authorization"] = f"Bearer {token}"
|
||||
|
||||
retries = 0
|
||||
while True:
|
||||
try:
|
||||
request = urllib.request.Request(url, headers=headers)
|
||||
with urllib.request.urlopen(request) as response:
|
||||
return response.read()
|
||||
except HTTPError as exc:
|
||||
if exc.code == 404 and allow_missing:
|
||||
return None
|
||||
|
||||
if exc.code in {403, 429, 500, 502, 503, 504} and retries < 5:
|
||||
retries += 1
|
||||
time.sleep(retries)
|
||||
continue
|
||||
|
||||
raise
|
||||
|
||||
|
||||
def request_json(url: str) -> Any:
|
||||
response = request_url(url, accept="application/vnd.github+json")
|
||||
if response is None:
|
||||
return None
|
||||
return json.loads(response.decode("utf-8"))
|
||||
|
||||
|
||||
def fetch_pr_files(repo: str, pr_number: int) -> list[dict[str, Any]]:
|
||||
"""Return the full changed-file list for a PR, following GitHub pagination."""
|
||||
|
||||
files: list[dict[str, Any]] = []
|
||||
page = 1
|
||||
|
||||
while True:
|
||||
url = f"https://api.github.com/repos/{repo}/pulls/{pr_number}/files?per_page=100&page={page}"
|
||||
page_files = request_json(url) or []
|
||||
if not page_files:
|
||||
break
|
||||
|
||||
files.extend(page_files)
|
||||
if len(page_files) < 100:
|
||||
break
|
||||
|
||||
page += 1
|
||||
|
||||
return files
|
||||
|
||||
|
||||
def read_local_file(path: str | None) -> str | None:
|
||||
"""Read a file from the trusted base checkout while preventing path traversal."""
|
||||
|
||||
if not path:
|
||||
return None
|
||||
|
||||
repo_root = Path.cwd().resolve()
|
||||
file_path = (repo_root / path).resolve()
|
||||
try:
|
||||
file_path.relative_to(repo_root)
|
||||
except ValueError as exc:
|
||||
raise ValueError(f"Unexpected repository path: {path}") from exc
|
||||
|
||||
if not file_path.exists():
|
||||
return None
|
||||
|
||||
return file_path.read_text(encoding="utf-8")
|
||||
|
||||
|
||||
def fetch_file_content(repo: str, path: str | None, ref: str | None) -> str | None:
|
||||
"""Fetch the raw content for a repository file at a specific git ref."""
|
||||
|
||||
if not path or not ref:
|
||||
return None
|
||||
|
||||
quoted_path = urllib.parse.quote(path, safe="/")
|
||||
quoted_ref = urllib.parse.quote(ref, safe="")
|
||||
url = f"https://api.github.com/repos/{repo}/contents/{quoted_path}?ref={quoted_ref}"
|
||||
response = request_url(url, accept="application/vnd.github.raw", allow_missing=True)
|
||||
if response is None:
|
||||
return None
|
||||
return response.decode("utf-8")
|
||||
|
||||
|
||||
def is_po_file(change: dict[str, Any]) -> bool:
|
||||
current_path = change.get("filename", "")
|
||||
previous_path = change.get("previous_filename", "")
|
||||
return current_path.endswith(".po") or previous_path.endswith(".po")
|
||||
|
||||
|
||||
def base_path_for_file(change: dict[str, Any]) -> str | None:
|
||||
if change.get("status") == "renamed":
|
||||
return change.get("previous_filename") or change.get("filename")
|
||||
return change.get("filename")
|
||||
|
||||
|
||||
def head_path_for_file(change: dict[str, Any]) -> str | None:
|
||||
if change.get("status") == "removed":
|
||||
return None
|
||||
return change.get("filename")
|
||||
|
||||
|
||||
def normalize_translation(value: Any) -> tuple[str, ...]:
|
||||
if value is None:
|
||||
return ("",)
|
||||
if isinstance(value, (tuple, list)):
|
||||
return tuple("" if part is None else str(part) for part in value)
|
||||
return (str(value),)
|
||||
|
||||
|
||||
def is_translation_empty(translation: tuple[str, ...]) -> bool:
|
||||
"""Return whether every translated value in the entry is empty or whitespace."""
|
||||
|
||||
return not any(part.strip() for part in translation)
|
||||
|
||||
|
||||
def normalize_message(message: Any) -> TranslationEntry:
|
||||
if isinstance(message.id, tuple):
|
||||
msgid, msgid_plural = message.id
|
||||
else:
|
||||
msgid, msgid_plural = message.id, None
|
||||
|
||||
return TranslationEntry(
|
||||
context=message.context or "",
|
||||
msgid=str(msgid),
|
||||
msgid_plural=None if msgid_plural is None else str(msgid_plural),
|
||||
translation=normalize_translation(message.string),
|
||||
)
|
||||
|
||||
|
||||
def load_translation_entries(
|
||||
content: str | None,
|
||||
) -> tuple[str | None, dict[tuple[str, str, str], TranslationEntry]]:
|
||||
"""Parse `.po` content into normalized entries keyed for translation diffing.
|
||||
|
||||
The gettext header entry is skipped, and both singular and plural messages are
|
||||
flattened into `TranslationEntry` objects so they can be compared uniformly.
|
||||
"""
|
||||
|
||||
if not content:
|
||||
return None, {}
|
||||
|
||||
catalog = read_po(io.StringIO(content))
|
||||
language = str(catalog.locale) if catalog.locale else None
|
||||
entries: dict[tuple[str, str, str], TranslationEntry] = {}
|
||||
|
||||
for message in catalog:
|
||||
if not message.id:
|
||||
continue
|
||||
|
||||
entry = normalize_message(message)
|
||||
entries[entry.key] = entry
|
||||
|
||||
return language, entries
|
||||
|
||||
|
||||
def compare_entries(
|
||||
base_entries: dict[tuple[str, str, str], TranslationEntry],
|
||||
head_entries: dict[tuple[str, str, str], TranslationEntry],
|
||||
) -> list[dict[str, TranslationEntry | str | None]]:
|
||||
"""Return only the translations that are new or changed in the PR head.
|
||||
|
||||
Removed entries are not included here because reviewers primarily need to
|
||||
inspect what was introduced or modified in the new translation state. Brand
|
||||
new entries with empty `msgstr` values are also skipped to avoid noisy review
|
||||
tables for untranslated strings.
|
||||
"""
|
||||
|
||||
changes: list[dict[str, TranslationEntry | str | None]] = []
|
||||
|
||||
for key in sorted(head_entries, key=lambda item: (item[0].lower(), item[1].lower(), item[2].lower())):
|
||||
head_entry = head_entries[key]
|
||||
base_entry = base_entries.get(key)
|
||||
|
||||
if base_entry is None:
|
||||
if is_translation_empty(head_entry.translation):
|
||||
continue
|
||||
changes.append({"status": "added", "before": None, "after": head_entry})
|
||||
continue
|
||||
|
||||
if base_entry.translation != head_entry.translation:
|
||||
changes.append({"status": "changed", "before": base_entry, "after": head_entry})
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
def within_tolerance(value: int, reference: float, tolerance: float = SIMILARITY_TOLERANCE) -> bool:
|
||||
if reference == 0:
|
||||
return value == 0
|
||||
|
||||
allowed_delta = max(1, round(reference * tolerance))
|
||||
return abs(value - reference) <= allowed_delta
|
||||
|
||||
|
||||
def cluster_similar_change_sizes(changes: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||
"""Group files whose added and removed line counts are within the tolerance.
|
||||
|
||||
This helps spot bulk-generated translation updates where many locale files were
|
||||
changed in nearly the same way.
|
||||
"""
|
||||
|
||||
clusters: list[dict[str, Any]] = []
|
||||
|
||||
sorted_changes = sorted(
|
||||
changes,
|
||||
key=lambda item: (-item.get("additions", 0), -item.get("deletions", 0), item.get("filename", "")),
|
||||
)
|
||||
|
||||
for change in sorted_changes:
|
||||
additions = change.get("additions", 0)
|
||||
deletions = change.get("deletions", 0)
|
||||
|
||||
for cluster in clusters:
|
||||
if within_tolerance(additions, cluster["avg_additions"]) and within_tolerance(
|
||||
deletions, cluster["avg_deletions"]
|
||||
):
|
||||
cluster["files"].append(change)
|
||||
cluster["avg_additions"] = sum(file["additions"] for file in cluster["files"]) / len(
|
||||
cluster["files"]
|
||||
)
|
||||
cluster["avg_deletions"] = sum(file["deletions"] for file in cluster["files"]) / len(
|
||||
cluster["files"]
|
||||
)
|
||||
break
|
||||
else:
|
||||
clusters.append(
|
||||
{
|
||||
"files": [change],
|
||||
"avg_additions": float(additions),
|
||||
"avg_deletions": float(deletions),
|
||||
}
|
||||
)
|
||||
|
||||
return sorted(
|
||||
[cluster for cluster in clusters if len(cluster["files"]) > 1],
|
||||
key=lambda cluster: (-len(cluster["files"]), -cluster["avg_additions"], -cluster["avg_deletions"]),
|
||||
)
|
||||
|
||||
|
||||
def format_translation(translation: tuple[str, ...]) -> str:
|
||||
if len(translation) == 1:
|
||||
return translation[0]
|
||||
|
||||
return "\n".join(f"[{index}] {value or '(empty)'}" for index, value in enumerate(translation))
|
||||
|
||||
|
||||
def escape_table_cell(value: str) -> str:
|
||||
if not value:
|
||||
return "<em>empty</em>"
|
||||
|
||||
return html.escape(value).replace("|", "|").replace("\n", "<br>")
|
||||
|
||||
|
||||
def render_msgid(entry: TranslationEntry) -> str:
|
||||
parts = [entry.msgid]
|
||||
if entry.msgid_plural:
|
||||
parts.append(f"[plural] {entry.msgid_plural}")
|
||||
return "\n".join(parts)
|
||||
|
||||
|
||||
def should_hide_report_from_review(report: dict[str, Any]) -> bool:
|
||||
"""Return whether a file should be omitted from reviewer-facing language details."""
|
||||
|
||||
return Path(str(report["path"])).name in REVIEW_HIDDEN_PO_FILES
|
||||
|
||||
|
||||
def build_language_section(report: dict[str, Any]) -> list[str]:
|
||||
"""Render one language's added or changed translations as a markdown table."""
|
||||
|
||||
lines = [
|
||||
f"### `{report['language']}` (`{report['path']}`)",
|
||||
"",
|
||||
"| Status | Msgid | Previous | Current |",
|
||||
"| --- | --- | --- | --- |",
|
||||
]
|
||||
|
||||
for change in report["changes"]:
|
||||
before = change["before"]
|
||||
after = change["after"]
|
||||
after = after if isinstance(after, TranslationEntry) else None
|
||||
before = before if isinstance(before, TranslationEntry) else None
|
||||
|
||||
if after is None:
|
||||
continue
|
||||
|
||||
lines.append(
|
||||
"| "
|
||||
+ " | ".join(
|
||||
[
|
||||
str(change["status"]),
|
||||
escape_table_cell(render_msgid(after)),
|
||||
escape_table_cell("" if before is None else format_translation(before.translation)),
|
||||
escape_table_cell(format_translation(after.translation)),
|
||||
]
|
||||
)
|
||||
+ " |"
|
||||
)
|
||||
|
||||
lines.append("")
|
||||
return lines
|
||||
|
||||
|
||||
def build_comment(
|
||||
po_files: list[dict[str, Any]],
|
||||
language_reports: list[dict[str, Any]],
|
||||
similar_groups: list[dict[str, Any]],
|
||||
parse_errors: list[dict[str, str]],
|
||||
) -> str:
|
||||
"""Build the final PR comment with stats, grouped diffs, and translation tables.
|
||||
|
||||
The result is intentionally compact at the top and expandable below so large
|
||||
translation PRs stay reviewable even when GitHub cannot render the raw diff.
|
||||
"""
|
||||
|
||||
status_counts = Counter(change.get("status", "modified") for change in po_files)
|
||||
total_files = len(po_files)
|
||||
added_files = status_counts["added"]
|
||||
removed_files = status_counts["removed"]
|
||||
reviewable_language_reports = [
|
||||
report for report in language_reports if not should_hide_report_from_review(report)
|
||||
]
|
||||
|
||||
grouped_files_count = sum(len(group["files"]) for group in similar_groups)
|
||||
translation_change_count = sum(
|
||||
len(report["changes"]) for report in reviewable_language_reports if report["changes"]
|
||||
)
|
||||
changed_languages_count = sum(1 for report in reviewable_language_reports if report["changes"])
|
||||
removed_reports = [report for report in reviewable_language_reports if report["status"] == "removed"]
|
||||
metadata_only_reports = [
|
||||
report
|
||||
for report in reviewable_language_reports
|
||||
if not report["changes"] and report["status"] != "removed"
|
||||
]
|
||||
|
||||
lines = [
|
||||
COMMENT_MARKER,
|
||||
"Here is a summary of the `.po` file changes:",
|
||||
"",
|
||||
f"- Changed files: `{total_files}`",
|
||||
f"- Added files: `{added_files}`",
|
||||
f"- Removed files: `{removed_files}`",
|
||||
f"- Files in similar change-size groups within 2% tolerance: `{grouped_files_count}`",
|
||||
f"- Added or changed translations detected: `{translation_change_count}` across `{changed_languages_count}` file(s)",
|
||||
]
|
||||
|
||||
if parse_errors:
|
||||
lines.append(f"- Files that could not be parsed: `{len(parse_errors)}`")
|
||||
|
||||
lines.extend(["", "### Similar Change-Size Groups", ""])
|
||||
|
||||
if similar_groups:
|
||||
for group in similar_groups:
|
||||
representative_additions = round(group["avg_additions"])
|
||||
representative_deletions = round(group["avg_deletions"])
|
||||
file_names = ", ".join(f"`{Path(file['filename']).name}`" for file in group["files"])
|
||||
lines.append(
|
||||
f"- Around `+{representative_additions} / -{representative_deletions}` lines: "
|
||||
f"`{len(group['files'])}` files ({file_names})"
|
||||
)
|
||||
else:
|
||||
lines.append("- No repeated change-size groups were found within the 2% tolerance.")
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"<details>",
|
||||
f"<summary>Added or changed translations by language ({translation_change_count} entries across {changed_languages_count} file(s))</summary>",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
if translation_change_count:
|
||||
for report in reviewable_language_reports:
|
||||
if not report["changes"]:
|
||||
continue
|
||||
lines.extend(build_language_section(report))
|
||||
else:
|
||||
lines.extend(
|
||||
[
|
||||
"No added or changed translations were detected. The `.po` changes appear to be metadata, comment, or source reference updates only.",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
if metadata_only_reports:
|
||||
lines.extend(["### Metadata-Only File Changes", ""])
|
||||
for report in metadata_only_reports:
|
||||
lines.append(f"- `{report['language']}` (`{report['path']}`)")
|
||||
lines.append("")
|
||||
|
||||
if removed_reports:
|
||||
lines.extend(["### Removed Translation Files", ""])
|
||||
for report in removed_reports:
|
||||
lines.append(f"- `{report['language']}` (`{report['path']}`)")
|
||||
lines.append("")
|
||||
|
||||
if parse_errors:
|
||||
lines.extend(["### Parse Errors", ""])
|
||||
for error in parse_errors:
|
||||
lines.append(f"- `{error['path']}`: {html.escape(error['error'])}")
|
||||
lines.append("")
|
||||
|
||||
lines.append("</details>")
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def build_language_report(
|
||||
repo: str,
|
||||
change: dict[str, Any],
|
||||
head_sha: str,
|
||||
) -> tuple[dict[str, Any] | None, dict[str, str] | None]:
|
||||
"""Compare one changed `.po` file between the base checkout and PR head blob.
|
||||
|
||||
The base side is read from the trusted local checkout, while the head side is
|
||||
fetched by SHA from GitHub so the workflow does not have to execute PR code.
|
||||
"""
|
||||
|
||||
base_path = base_path_for_file(change)
|
||||
head_path = head_path_for_file(change)
|
||||
base_po_path = base_path if (base_path or "").endswith(".po") else None
|
||||
head_po_path = head_path if (head_path or "").endswith(".po") else None
|
||||
display_path = head_path or base_path or change.get("filename")
|
||||
|
||||
try:
|
||||
base_content = read_local_file(base_po_path)
|
||||
head_content = fetch_file_content(repo, head_po_path, head_sha)
|
||||
|
||||
base_language, base_entries = load_translation_entries(base_content)
|
||||
head_language, head_entries = load_translation_entries(head_content)
|
||||
language = head_language or base_language or Path(display_path).stem
|
||||
|
||||
return (
|
||||
{
|
||||
"language": language,
|
||||
"path": display_path,
|
||||
"status": change.get("status"),
|
||||
"changes": compare_entries(base_entries, head_entries),
|
||||
},
|
||||
None,
|
||||
)
|
||||
except Exception as exc:
|
||||
return None, {"path": display_path, "error": str(exc)}
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Generate the comment body for the current PR and write it to disk."""
|
||||
|
||||
args = parse_args()
|
||||
if not args.repo or not args.pr or not args.head_sha:
|
||||
raise SystemExit("Missing required pull request context.")
|
||||
|
||||
all_files = fetch_pr_files(args.repo, args.pr)
|
||||
po_files = [change for change in all_files if is_po_file(change)]
|
||||
language_reports: list[dict[str, Any]] = []
|
||||
parse_errors: list[dict[str, str]] = []
|
||||
|
||||
for change in po_files:
|
||||
report, error = build_language_report(args.repo, change, args.head_sha)
|
||||
if report:
|
||||
language_reports.append(report)
|
||||
if error:
|
||||
parse_errors.append(error)
|
||||
|
||||
language_reports.sort(key=lambda report: (str(report["language"]).lower(), str(report["path"]).lower()))
|
||||
comment = build_comment(po_files, language_reports, cluster_similar_change_sizes(po_files), parse_errors)
|
||||
Path(args.output).write_text(comment, encoding="utf-8")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
78
.github/workflows/_base-migration.yml
vendored
78
.github/workflows/_base-migration.yml
vendored
|
|
@ -68,92 +68,36 @@ jobs:
|
|||
env:
|
||||
DB_ARTIFACT_URL: ${{ inputs.db-artifact-url }}
|
||||
run: |
|
||||
source ${GITHUB_WORKSPACE}/env/bin/activate
|
||||
wget "$DB_ARTIFACT_URL"
|
||||
bench --site test_site --force restore ${GITHUB_WORKSPACE}/$(basename "$DB_ARTIFACT_URL")
|
||||
|
||||
function update_to_version() {
|
||||
version="$1"
|
||||
python_version="${2:-${{ inputs.python-version }}}"
|
||||
|
||||
if [ -z "$version" ]; then
|
||||
base_ref="${{ github.base_ref || github.ref_name }}"
|
||||
head_ref="${{ github.sha }}"
|
||||
ref="${{ github.sha }}"
|
||||
else
|
||||
base_ref="version-$version-hotfix"
|
||||
head_ref="version-$version-hotfix"
|
||||
ref="version-$version-hotfix"
|
||||
fi
|
||||
|
||||
source ${GITHUB_WORKSPACE}/env/bin/activate
|
||||
echo "Updating to version ${version:-$head_ref}"
|
||||
echo "Updating to $ref"
|
||||
|
||||
# Fetch and checkout branches
|
||||
for app in ${GITHUB_WORKSPACE}/apps/*/; do
|
||||
app_name=$(basename "$app")
|
||||
echo "Processing app: $app_name"
|
||||
git -C apps/frappe fetch --depth 1 upstream "$ref":"$ref"
|
||||
git -C apps/frappe checkout --quiet --force "$ref"
|
||||
|
||||
if [[ "$app_name" == "${{ github.event.repository.name }}" ]]; then
|
||||
git -C "$app" fetch --depth 1 origin $head_ref:$head_ref
|
||||
if git -C "$app" checkout --quiet --force $head_ref; then
|
||||
echo "Checked out $head_ref successfully at $app"
|
||||
else
|
||||
echo "Failed to checkout $ref at $app" >&2
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
git -C "$app" fetch --depth 1 origin $base_ref:$base_ref
|
||||
if git -C "$app" checkout --quiet --force $base_ref; then
|
||||
echo "Checked out $base_ref successfully at $app"
|
||||
else
|
||||
echo "Failed to checkout $base_ref at $app" >&2
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Resetup env and install apps
|
||||
if pgrep honcho > /dev/null; then
|
||||
echo "Stopping honcho process..."
|
||||
pgrep honcho | xargs kill
|
||||
sleep 10
|
||||
fi
|
||||
|
||||
echo "Setting up environment..."
|
||||
|
||||
# Last python version in the array is the "default", so the 2nd parameter here is optional
|
||||
if rm -rf ${GITHUB_WORKSPACE}/env && python"$2" -m venv ${GITHUB_WORKSPACE}/env; then
|
||||
source ${GITHUB_WORKSPACE}/env/bin/activate
|
||||
pip install --quiet --upgrade pip
|
||||
pip install --quiet frappe-bench
|
||||
echo "Environment setup completed."
|
||||
else
|
||||
echo "Environment setup failed." >&2
|
||||
return 1
|
||||
fi
|
||||
bench migrate-env "python$python_version"
|
||||
|
||||
echo "Installing apps..."
|
||||
for app in ${GITHUB_WORKSPACE}/apps/*/; do
|
||||
if pip install --upgrade -e "$app"; then
|
||||
echo "Installed $app successfully."
|
||||
else
|
||||
echo "Failed to install $app." >&2
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Starting bench..."
|
||||
bench setup requirements
|
||||
bench start &>> ${GITHUB_WORKSPACE}/bench_start.log &
|
||||
|
||||
echo "Running migrations on test_site..."
|
||||
if bench --site test_site migrate; then
|
||||
echo "Migration completed successfully."
|
||||
else
|
||||
echo "Migration failed." >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "Update to version ${version:-$base_ref} completed."
|
||||
bench --site test_site migrate
|
||||
}
|
||||
|
||||
# Save this script into a file for later use.
|
||||
declare -f update_to_version > "$RUNNER_TEMP/migrate"
|
||||
|
||||
- name: Update to v15
|
||||
|
|
@ -161,7 +105,7 @@ jobs:
|
|||
source $RUNNER_TEMP/migrate
|
||||
update_to_version 15 3.13
|
||||
exit $?
|
||||
|
||||
|
||||
- name: Update to v16
|
||||
run: |
|
||||
source $RUNNER_TEMP/migrate
|
||||
|
|
|
|||
1
.github/workflows/_base-server-tests.yml
vendored
1
.github/workflows/_base-server-tests.yml
vendored
|
|
@ -108,7 +108,6 @@ jobs:
|
|||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
source ${GITHUB_WORKSPACE}/env/bin/activate
|
||||
bench --site test_site \
|
||||
run-parallel-tests \
|
||||
--app "${{ github.event.repository.name }}" \
|
||||
|
|
|
|||
14
.github/workflows/_base-ui-tests.yml
vendored
14
.github/workflows/_base-ui-tests.yml
vendored
|
|
@ -94,7 +94,6 @@ jobs:
|
|||
|
||||
- name: Site Setup
|
||||
run: |
|
||||
source ${GITHUB_WORKSPACE}/env/bin/activate
|
||||
bench --site test_site execute frappe.utils.install.complete_setup_wizard
|
||||
bench --site test_site execute frappe.tests.ui_test_helpers.create_test_user
|
||||
|
||||
|
|
@ -105,16 +104,17 @@ jobs:
|
|||
- name: Run Tests
|
||||
id: ui-tests
|
||||
run: |
|
||||
source ${GITHUB_WORKSPACE}/env/bin/activate
|
||||
bench --site test_site \
|
||||
run-ui-tests ${{ github.event.repository.name }} \
|
||||
--with-coverage \
|
||||
--headless \
|
||||
--parallel \
|
||||
--browser ${{ env.BROWSER_PATH }} \
|
||||
--ci-build-id $GITHUB_RUN_ID-$GITHUB_RUN_ATTEMPT
|
||||
--ci-build-id $GITHUB_RUN_ID-$GITHUB_RUN_ATTEMPT \
|
||||
--group ui-shard-${{ matrix.index }}
|
||||
env:
|
||||
CYPRESS_RECORD_KEY: 4a48f41c-11b3-425b-aa88-c58048fa69eb
|
||||
SPLIT: ${{ inputs.parallel-runs }}
|
||||
SPLIT_INDEX: ${{ strategy.job-index }}
|
||||
|
||||
- name: Stop server and wait for coverage file
|
||||
if: inputs.enable-coverage
|
||||
|
|
@ -131,7 +131,7 @@ jobs:
|
|||
path: ./apps/${{ github.event.repository.name }}/.cypress-coverage/clover.xml
|
||||
- name: Compress Cypress Videos
|
||||
if: always() && steps.ui-tests.outcome == 'failure'
|
||||
run: |
|
||||
run: |
|
||||
if find ./cypressVideos -mindepth 1 | read; then
|
||||
zip -r cypress_recordings.zip ./cypressVideos
|
||||
fi
|
||||
|
|
@ -142,7 +142,7 @@ jobs:
|
|||
name: Cypress CI Video Recordings
|
||||
path: ./cypress_recordings.zip
|
||||
|
||||
|
||||
|
||||
|
||||
- name: Upload coverage data
|
||||
uses: actions/upload-artifact@v7
|
||||
|
|
@ -150,7 +150,7 @@ jobs:
|
|||
with:
|
||||
name: coverage-py-${{ matrix.index }}
|
||||
path: ./sites/*-coverage*.xml
|
||||
|
||||
|
||||
- name: Setup tmate session
|
||||
uses: mxschmitt/action-tmate@v3
|
||||
if: ${{ failure() && contains( github.event.pull_request.labels.*.name, 'debug-gha') }}
|
||||
|
|
|
|||
2
.github/workflows/linters.yml
vendored
2
.github/workflows/linters.yml
vendored
|
|
@ -95,7 +95,7 @@ jobs:
|
|||
run: |
|
||||
pip install pip-audit
|
||||
cd ${GITHUB_WORKSPACE}
|
||||
pip-audit --desc on --ignore-vuln PYSEC-2023-312 .
|
||||
pip-audit --desc on --ignore-vuln PYSEC-2023-312 --ignore-vuln CVE-2026-4539 .
|
||||
|
||||
precommit:
|
||||
name: 'Pre-Commit'
|
||||
|
|
|
|||
80
.github/workflows/review-po-prs.yml
vendored
Normal file
80
.github/workflows/review-po-prs.yml
vendored
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
name: Review translation PRs
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, reopened, synchronize, ready_for_review]
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "**/*.po"
|
||||
|
||||
concurrency:
|
||||
group: po-review-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
review-po-pr:
|
||||
if: ${{ github.event.pull_request.user.login == 'frappe-pr-bot' }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
fetch-depth: 1
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.14"
|
||||
|
||||
- name: Install helper dependencies
|
||||
run: python -m pip install babel
|
||||
|
||||
- name: Build PO review comment
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
run: python .github/helper/po_pr_review.py --output po-pr-review-comment.md
|
||||
|
||||
- name: Create or update PR comment
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const fs = require("fs");
|
||||
const marker = "<!-- po-translation-review -->";
|
||||
const body = fs.readFileSync("po-pr-review-comment.md", "utf8");
|
||||
const { owner, repo } = context.repo;
|
||||
const issue_number = context.issue.number;
|
||||
const comments = await github.paginate(github.rest.issues.listComments, {
|
||||
owner,
|
||||
repo,
|
||||
issue_number,
|
||||
per_page: 100
|
||||
});
|
||||
|
||||
const existingComment = comments.find((comment) => comment.body.includes(marker));
|
||||
|
||||
if (existingComment) {
|
||||
await github.rest.issues.updateComment({
|
||||
owner,
|
||||
repo,
|
||||
comment_id: existingComment.id,
|
||||
body
|
||||
});
|
||||
} else {
|
||||
await github.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number,
|
||||
body
|
||||
});
|
||||
}
|
||||
|
|
@ -1,6 +1,7 @@
|
|||
const { defineConfig } = require("cypress");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const cypressSplit = require("cypress-split");
|
||||
|
||||
module.exports = defineConfig({
|
||||
projectId: "92odwv",
|
||||
|
|
@ -20,6 +21,11 @@ module.exports = defineConfig({
|
|||
// We've imported your old cypress plugins here.
|
||||
// You may want to clean this up later by importing these.
|
||||
setupNodeEvents(on, config) {
|
||||
// Splitting tests only works when Cypress Cloud is not orchestrating parallel runs.
|
||||
if (process.env.CYPRESS_CLOUD_PARALLEL !== "1") {
|
||||
cypressSplit(on, config);
|
||||
}
|
||||
|
||||
// Delete videos for specs without failing or retried tests
|
||||
// https://docs.cypress.io/guides/guides/screenshots-and-videos#Delete-videos-for-specs-without-failing-or-retried-tests
|
||||
on("after:spec", (spec, results) => {
|
||||
|
|
|
|||
|
|
@ -97,7 +97,8 @@ def get_values_for_link_and_dynamic_link_fields(doc_dict):
|
|||
|
||||
doctype = field.options if field.fieldtype == "Link" else doc_dict.get(field.options)
|
||||
|
||||
link_doc = frappe.get_doc(doctype, doc_fieldvalue)
|
||||
link_doc = frappe.get_doc(doctype, doc_fieldvalue, check_permission="read")
|
||||
link_doc.apply_fieldlevel_read_permissions()
|
||||
doc_dict.update({field.fieldname: link_doc})
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ from frappe.query_builder import DocType
|
|||
from frappe.query_builder.functions import Count
|
||||
from frappe.query_builder.terms import ParameterizedValueWrapper, SubQuery
|
||||
from frappe.utils import add_user_info, cstr, get_system_timezone
|
||||
from frappe.utils.caching import redis_cache
|
||||
from frappe.utils.change_log import get_versions
|
||||
from frappe.utils.frappecloud import on_frappecloud
|
||||
from frappe.website.doctype.web_page_view.web_page_view import is_tracking_enabled
|
||||
|
|
@ -60,14 +61,13 @@ def get_bootinfo():
|
|||
bootinfo.desktop_icons = get_desktop_icons(bootinfo=bootinfo)
|
||||
bootinfo.letter_heads = get_letter_heads()
|
||||
bootinfo.active_domains = frappe.get_active_domains()
|
||||
bootinfo.all_domains = [d.get("name") for d in frappe.get_all("Domain")]
|
||||
bootinfo.all_domains = frappe.get_all("Domain", pluck="name")
|
||||
add_layouts(bootinfo)
|
||||
|
||||
bootinfo.module_app = frappe.local.module_app
|
||||
bootinfo.single_types = [d.name for d in frappe.get_all("DocType", {"issingle": 1})]
|
||||
bootinfo.nested_set_doctypes = [
|
||||
d.parent for d in frappe.get_all("DocField", {"fieldname": "lft"}, ["parent"])
|
||||
]
|
||||
bootinfo.single_types = frappe.get_all("DocType", {"issingle": 1}, pluck="name")
|
||||
bootinfo.nested_set_doctypes = frappe.get_all("DocField", {"fieldname": "lft"}, pluck="parent")
|
||||
bootinfo.tree_view_doctypes = get_tree_view_doctypes()
|
||||
add_home_page(bootinfo, doclist)
|
||||
bootinfo.page_info = get_allowed_pages()
|
||||
load_translations(bootinfo)
|
||||
|
|
@ -217,7 +217,7 @@ def load_desktop_data(bootinfo):
|
|||
app_logo_url=app_info.get("logo")
|
||||
or frappe.get_hooks("app_logo_url", app_name=app_name)
|
||||
or frappe.get_hooks("app_logo_url", app_name="frappe"),
|
||||
modules=[m.name for m in frappe.get_all("Module Def", dict(app_name=app_name))],
|
||||
modules=frappe.get_all("Module Def", dict(app_name=app_name), pluck="name"),
|
||||
workspaces=workspaces,
|
||||
)
|
||||
)
|
||||
|
|
@ -342,10 +342,10 @@ def get_user_pages_or_reports(parent, cache=False):
|
|||
|
||||
|
||||
def load_translations(bootinfo):
|
||||
from frappe.translate import get_messages_for_boot
|
||||
from frappe.translate import get_translation_version
|
||||
|
||||
bootinfo["lang"] = frappe.lang
|
||||
bootinfo["__messages"] = get_messages_for_boot()
|
||||
bootinfo["translations_version"] = get_translation_version()
|
||||
|
||||
|
||||
def get_user_info():
|
||||
|
|
@ -409,7 +409,7 @@ def get_success_action():
|
|||
def get_link_preview_doctypes():
|
||||
from frappe.utils import cint
|
||||
|
||||
link_preview_doctypes = [d.name for d in frappe.get_all("DocType", {"show_preview_popup": 1})]
|
||||
link_preview_doctypes = frappe.get_all("DocType", {"show_preview_popup": 1}, pluck="name")
|
||||
customizations = frappe.get_all(
|
||||
"Property Setter", fields=["doc_type", "value"], filters={"property": "show_preview_popup"}
|
||||
)
|
||||
|
|
@ -522,6 +522,11 @@ def get_marketplace_apps():
|
|||
return apps
|
||||
|
||||
|
||||
@redis_cache
|
||||
def get_tree_view_doctypes():
|
||||
return frappe.get_all("DocType", {"default_view": "Tree"}, pluck="name")
|
||||
|
||||
|
||||
def add_subscription_conf():
|
||||
try:
|
||||
return frappe.conf.subscription
|
||||
|
|
@ -585,6 +590,7 @@ def get_sidebar_items(allowed_workspaces):
|
|||
"filters": item.filters,
|
||||
"route_options": item.route_options,
|
||||
"tab": item.navigate_to_tab,
|
||||
"open_in_new_tab": item.open_in_new_tab,
|
||||
}
|
||||
if item.link_type == "Report" and item.link_to and frappe.db.exists("Report", item.link_to):
|
||||
report_type, ref_doctype = frappe.db.get_value(
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ def get(
|
|||
doc.check_permission()
|
||||
doc.apply_fieldlevel_read_permissions()
|
||||
|
||||
return doc.as_dict()
|
||||
return doc.as_dict(no_nulls=True)
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
|
|
|
|||
|
|
@ -159,11 +159,14 @@ def main(
|
|||
discover_all_tests(apps, runner)
|
||||
|
||||
results = []
|
||||
global unittest_runner
|
||||
for app, category, suite in runner.iterRun():
|
||||
click.secho(
|
||||
f"\nRunning {suite.countTestCases()} {category} tests for {app}", fg="cyan", bold=True
|
||||
)
|
||||
results.append([app, category, runner.run(suite)])
|
||||
main_runner = unittest_runner if junit_xml_output and unittest_runner else runner
|
||||
res = main_runner.run(suite)
|
||||
results.append([app, category, res])
|
||||
|
||||
success = all(r.wasSuccessful() for _, _, r in results)
|
||||
if not success:
|
||||
|
|
@ -447,7 +450,7 @@ def run_ui_tests(
|
|||
context: CliCtxObj,
|
||||
app,
|
||||
headless=False,
|
||||
parallel=True,
|
||||
parallel=False,
|
||||
with_coverage=False,
|
||||
browser="chrome",
|
||||
ci_build_id=None,
|
||||
|
|
@ -480,6 +483,7 @@ def run_ui_tests(
|
|||
real_events_plugin_path = f"{node_bin}/../cypress-real-events"
|
||||
testing_library_path = f"{node_bin}/../@testing-library"
|
||||
coverage_plugin_path = f"{node_bin}/../@cypress/code-coverage"
|
||||
cypress_split_path = f"{node_bin}/../cypress-split"
|
||||
|
||||
# check if cypress in path...if not, install it.
|
||||
if not (
|
||||
|
|
@ -488,6 +492,7 @@ def run_ui_tests(
|
|||
and os.path.exists(real_events_plugin_path)
|
||||
and os.path.exists(testing_library_path)
|
||||
and os.path.exists(coverage_plugin_path)
|
||||
and os.path.exists(cypress_split_path)
|
||||
):
|
||||
# install cypress & dependent plugins
|
||||
click.secho("Installing Cypress...", fg="yellow")
|
||||
|
|
@ -499,6 +504,7 @@ def run_ui_tests(
|
|||
"@testing-library/cypress@^10",
|
||||
"@testing-library/dom@8.17.1",
|
||||
"@cypress/code-coverage@^3",
|
||||
"cypress-split@^1.0.0",
|
||||
]
|
||||
)
|
||||
|
||||
|
|
@ -516,7 +522,10 @@ def run_ui_tests(
|
|||
run_or_open = f"run --browser {browser}" if headless else "open"
|
||||
if headless and spec:
|
||||
run_or_open += f" --spec {spec}"
|
||||
formatted_command = f"{site_env} {password_env} {coverage_env} {cypress_path} {run_or_open}"
|
||||
parallel_env = "CYPRESS_CLOUD_PARALLEL=1" if parallel else "CYPRESS_CLOUD_PARALLEL=0"
|
||||
formatted_command = (
|
||||
f"{site_env} {password_env} {coverage_env} {parallel_env} {cypress_path} {run_or_open}"
|
||||
)
|
||||
|
||||
if os.environ.get("CYPRESS_RECORD_KEY"):
|
||||
formatted_command += " --record"
|
||||
|
|
|
|||
|
|
@ -20,6 +20,22 @@ class CommunicationEmailMixin:
|
|||
parent_doc = get_parent_doc(self)
|
||||
return parent_doc.owner if parent_doc else None
|
||||
|
||||
def get_notification_recipient(self):
|
||||
"""Get notification recipient of the communication docs parent.
|
||||
|
||||
Calls `get_notification_email` on the parent if available; otherwise returns the owner.
|
||||
This uses `run_method` so hooks can customize recipients per app/site.
|
||||
"""
|
||||
parent_doc = get_parent_doc(self)
|
||||
if not parent_doc:
|
||||
return None
|
||||
|
||||
notification_email = parent_doc.run_method("get_notification_email")
|
||||
if notification_email:
|
||||
return notification_email
|
||||
|
||||
return parent_doc.owner
|
||||
|
||||
def get_all_email_addresses(self, exclude_displayname=False):
|
||||
"""Get all Email addresses mentioned in the doc along with display name."""
|
||||
return (
|
||||
|
|
@ -60,7 +76,7 @@ class CommunicationEmailMixin:
|
|||
"""Build cc list to send an email.
|
||||
|
||||
* if email copy is requested by sender, then add sender to CC.
|
||||
* If this doc is created through inbound mail, then add doc owner to cc list
|
||||
* If this doc is created through inbound mail, then add the notification recipient to CC
|
||||
* remove all the thread_notify disabled users.
|
||||
* Remove standard users from email list
|
||||
"""
|
||||
|
|
@ -77,9 +93,9 @@ class CommunicationEmailMixin:
|
|||
cc.append(sender)
|
||||
|
||||
if is_inbound_mail_communcation:
|
||||
# inform parent document owner incase communication is created through inbound mail
|
||||
if doc_owner := self.get_owner():
|
||||
cc.append(doc_owner)
|
||||
# inform the configured notification recipient in case communication is created inbound
|
||||
if notification_recipient := self.get_notification_recipient():
|
||||
cc.append(notification_recipient)
|
||||
cc = set(cc) - {self.sender_mailid}
|
||||
assignees = set(self.get_assignees()) - {self.sender_mailid}
|
||||
# Check and remove If user disabled notifications for incoming emails on assigned document.
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ from frappe.core.doctype.version.version import get_diff
|
|||
from frappe.model import no_value_fields
|
||||
from frappe.utils import cint, cstr, duration_to_seconds, flt, update_progress_bar
|
||||
from frappe.utils.csvutils import get_csv_content_from_google_sheets, read_csv_content
|
||||
from frappe.utils.data import escape_html
|
||||
from frappe.utils.xlsxutils import (
|
||||
read_xls_file_from_attached_file,
|
||||
read_xlsx_file_from_attached_file,
|
||||
|
|
@ -727,7 +728,9 @@ class Row:
|
|||
elif df.fieldtype == "Link":
|
||||
exists = self.link_exists(value, df)
|
||||
if not exists:
|
||||
msg = _("Value {0} missing for {1}").format(frappe.bold(value), frappe.bold(df.options))
|
||||
msg = _("Value {0} missing for {1}").format(
|
||||
frappe.bold(escape_html(cstr(value))), frappe.bold(df.options)
|
||||
)
|
||||
self.warnings.append(
|
||||
{
|
||||
"row": self.row_number,
|
||||
|
|
@ -746,7 +749,8 @@ class Row:
|
|||
"col": col.column_number,
|
||||
"field": df_as_json(df),
|
||||
"message": _("Value {0} must in {1} format").format(
|
||||
frappe.bold(value), frappe.bold(get_user_format(col.date_format))
|
||||
frappe.bold(escape_html(cstr(value))),
|
||||
frappe.bold(get_user_format(col.date_format)),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
|
@ -761,7 +765,8 @@ class Row:
|
|||
"col": col.column_number,
|
||||
"field": df_as_json(df),
|
||||
"message": _("Value {0} must in {1} format").format(
|
||||
frappe.bold(value), frappe.bold(get_user_format(col.date_format))
|
||||
frappe.bold(escape_html(cstr(value))),
|
||||
frappe.bold(get_user_format(col.date_format)),
|
||||
),
|
||||
}
|
||||
)
|
||||
|
|
@ -774,7 +779,7 @@ class Row:
|
|||
"col": col.column_number,
|
||||
"field": df_as_json(df),
|
||||
"message": _("Value {0} must be in the valid duration format: d h m s").format(
|
||||
frappe.bold(value)
|
||||
frappe.bold(escape_html(cstr(value)))
|
||||
),
|
||||
}
|
||||
)
|
||||
|
|
@ -1045,7 +1050,7 @@ class Column:
|
|||
]
|
||||
not_exists = list(set(values) - set(exists))
|
||||
if not_exists:
|
||||
missing_values = ", ".join(not_exists)
|
||||
missing_values = ", ".join(escape_html(v) for v in not_exists)
|
||||
message = _("The following values do not exist for {0}: {1}")
|
||||
self.warnings.append(
|
||||
{
|
||||
|
|
@ -1088,7 +1093,7 @@ class Column:
|
|||
invalid = values - set(options)
|
||||
if invalid:
|
||||
valid_values = ", ".join(frappe.bold(o) for o in options)
|
||||
invalid_values = ", ".join(frappe.bold(i) for i in invalid)
|
||||
invalid_values = ", ".join(frappe.bold(escape_html(i)) for i in invalid)
|
||||
message = _("The following values are invalid: {0}. Values must be one of {1}")
|
||||
self.warnings.append(
|
||||
{
|
||||
|
|
|
|||
|
|
@ -93,8 +93,9 @@ class PackageRelease(Document):
|
|||
|
||||
def export_package_files(self, package):
|
||||
# write readme
|
||||
with open(frappe.get_site_path("packages", package.package_name, "README.md"), "w") as readme:
|
||||
readme.write(package.readme)
|
||||
if package.readme:
|
||||
with open(frappe.get_site_path("packages", package.package_name, "README.md"), "w") as readme:
|
||||
readme.write(package.readme)
|
||||
|
||||
# write license
|
||||
if package.license:
|
||||
|
|
|
|||
|
|
@ -106,6 +106,13 @@ class Report(Document):
|
|||
|
||||
delete_custom_role("report", self.name)
|
||||
|
||||
def clear_cache(self):
|
||||
self.update_report_cache()
|
||||
return super().clear_cache()
|
||||
|
||||
def update_report_cache(self):
|
||||
frappe.cache.delete_key("bootinfo")
|
||||
|
||||
def delete_report_folder(self):
|
||||
from frappe.modules.export_file import delete_folder
|
||||
|
||||
|
|
|
|||
|
|
@ -406,3 +406,32 @@ result = [
|
|||
self.assertEqual(result[-1][0], "Total")
|
||||
self.assertEqual(result[-1][1], 200)
|
||||
self.assertEqual(result[-1][2], 150.50)
|
||||
|
||||
def test_report_cache_invalidation(self):
|
||||
import frappe.sessions
|
||||
from frappe.utils import set_request
|
||||
|
||||
frappe.set_user("test@example.com")
|
||||
set_request(method="GET", path="/app")
|
||||
|
||||
try:
|
||||
frappe.sessions.get()
|
||||
|
||||
report_name = _save_report(
|
||||
"Test Cache Invalidation Report",
|
||||
"User",
|
||||
json.dumps([{"fieldname": "email", "fieldtype": "Data", "label": "Email"}]),
|
||||
)
|
||||
|
||||
cached_bootinfo = frappe.sessions.get()
|
||||
self.assertIn(report_name, cached_bootinfo["user"]["all_reports"])
|
||||
|
||||
doc = frappe.get_doc("Report", report_name)
|
||||
delete_report(doc.name)
|
||||
|
||||
cached_bootinfo = frappe.sessions.get()
|
||||
self.assertNotIn(report_name, cached_bootinfo["user"]["all_reports"])
|
||||
|
||||
finally:
|
||||
frappe.local.request = None
|
||||
frappe.set_user("Administrator")
|
||||
|
|
|
|||
|
|
@ -17,13 +17,78 @@ frappe.ui.form.on("Role", {
|
|||
|
||||
frm.set_df_property("is_custom", "read_only", frappe.session.user !== "Administrator");
|
||||
|
||||
frm.add_custom_button("Role Permissions Manager", function () {
|
||||
frappe.route_options = { role: frm.doc.name };
|
||||
frappe.set_route("permission-manager");
|
||||
});
|
||||
frm.add_custom_button("Show Users", function () {
|
||||
frappe.route_options = { role: frm.doc.name };
|
||||
frappe.set_route("List", "User", "Report");
|
||||
});
|
||||
frm.add_custom_button(
|
||||
__("Role Permissions Manager"),
|
||||
function () {
|
||||
frappe.route_options = { role: frm.doc.name };
|
||||
frappe.set_route("permission-manager");
|
||||
},
|
||||
__("View")
|
||||
);
|
||||
|
||||
frm.add_custom_button(
|
||||
__("Show Users"),
|
||||
function () {
|
||||
frappe.route_options = { role: frm.doc.name };
|
||||
frappe.set_route("List", "User", "Report");
|
||||
},
|
||||
__("View")
|
||||
);
|
||||
|
||||
if (frappe.user.has_role("System Manager")) {
|
||||
frm.add_custom_button(
|
||||
__("Replicate Role"),
|
||||
function () {
|
||||
replicate_role(frm);
|
||||
},
|
||||
__("Action")
|
||||
);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
function replicate_role(frm) {
|
||||
const dialog = new frappe.ui.Dialog({
|
||||
title: __("Replicate Role"),
|
||||
fields: [
|
||||
{
|
||||
label: __("New Role Name"),
|
||||
fieldname: "new_role_name",
|
||||
fieldtype: "Data",
|
||||
default: frm.doc.name,
|
||||
reqd: 1,
|
||||
},
|
||||
],
|
||||
freeze: true,
|
||||
freeze_message: __("Replicating Role..."),
|
||||
primary_action_label: __("Replicate"),
|
||||
primary_action: function (values) {
|
||||
dialog.hide();
|
||||
frappe.call({
|
||||
method: "replicate_role",
|
||||
doc: frm.doc,
|
||||
args: {
|
||||
cur_role: frm.doc.name,
|
||||
new_role: values.new_role_name,
|
||||
},
|
||||
callback: function (r) {
|
||||
if (r.message) {
|
||||
frappe.set_route("Form", "Role", r.message);
|
||||
frappe.show_alert({
|
||||
message: __("New role created successfully."),
|
||||
indicator: "green",
|
||||
});
|
||||
} else if (r.exc) {
|
||||
JSON.parse(r.exc).forEach((err) => {
|
||||
frappe.show_alert({
|
||||
message: __(err),
|
||||
indicator: "red",
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
dialog.show();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
# License: MIT. See LICENSE
|
||||
|
||||
import frappe
|
||||
from frappe.core.page.permission_manager.permission_manager import get_permissions
|
||||
from frappe.model.document import Document
|
||||
from frappe.website.path_resolver import validate_path
|
||||
from frappe.website.router import clear_routing_cache
|
||||
|
|
@ -86,6 +87,32 @@ class Role(Document):
|
|||
if user_type != user.user_type:
|
||||
user.save()
|
||||
|
||||
@frappe.whitelist()
|
||||
def replicate_role(self, cur_role: str, new_role: str) -> str:
|
||||
frappe.only_for("System Manager")
|
||||
|
||||
if frappe.db.get_value("Role", new_role, "name"):
|
||||
return frappe.errprint(f"Role {new_role} already exist.")
|
||||
|
||||
new_role = frappe.get_doc({"doctype": "Role", "role_name": new_role}).insert().name
|
||||
|
||||
perms = get_permissions(role=cur_role)
|
||||
for perm in perms:
|
||||
perm.update(
|
||||
{
|
||||
"name": None,
|
||||
"creation": None,
|
||||
"modified": None,
|
||||
"modified_by": None,
|
||||
"owner": None,
|
||||
"linked_doctypes": None,
|
||||
"role": new_role,
|
||||
}
|
||||
)
|
||||
frappe.get_doc({"doctype": "Custom DocPerm", **perm}).insert()
|
||||
|
||||
return new_role
|
||||
|
||||
|
||||
def get_info_based_on_role(role, field="email", ignore_permissions=False):
|
||||
"""Get information of all users that have been assigned this role"""
|
||||
|
|
|
|||
|
|
@ -1,18 +0,0 @@
|
|||
// Copyright (c) 2024, Frappe Technologies and contributors
|
||||
// For license information, please see license.txt
|
||||
|
||||
frappe.ui.form.on("Role Replication", {
|
||||
refresh(frm) {
|
||||
frm.disable_save();
|
||||
frm.page.set_primary_action(__("Replicate"), ($btn) => {
|
||||
$btn.text(__("Replicating..."));
|
||||
frappe.run_serially([
|
||||
() => frappe.dom.freeze("Replicating..."),
|
||||
() => frm.call("replicate_role"),
|
||||
() => frappe.dom.unfreeze(),
|
||||
() => frappe.msgprint(__("Replication completed.")),
|
||||
() => $btn.text(__("Replicate")),
|
||||
]);
|
||||
});
|
||||
},
|
||||
});
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
{
|
||||
"actions": [],
|
||||
"creation": "2024-06-24 18:25:23.163914",
|
||||
"doctype": "DocType",
|
||||
"engine": "InnoDB",
|
||||
"field_order": [
|
||||
"existing_role",
|
||||
"column_break_ydyj",
|
||||
"new_role"
|
||||
],
|
||||
"fields": [
|
||||
{
|
||||
"fieldname": "existing_role",
|
||||
"fieldtype": "Link",
|
||||
"label": "Existing Role",
|
||||
"options": "Role"
|
||||
},
|
||||
{
|
||||
"fieldname": "column_break_ydyj",
|
||||
"fieldtype": "Column Break"
|
||||
},
|
||||
{
|
||||
"description": "Input existing role name if you would like to extend it with access of another role.",
|
||||
"fieldname": "new_role",
|
||||
"fieldtype": "Data",
|
||||
"label": "New Role"
|
||||
}
|
||||
],
|
||||
"index_web_pages_for_search": 1,
|
||||
"issingle": 1,
|
||||
"links": [],
|
||||
"modified": "2024-06-24 19:26:54.279801",
|
||||
"modified_by": "Administrator",
|
||||
"module": "Core",
|
||||
"name": "Role Replication",
|
||||
"owner": "Administrator",
|
||||
"permissions": [
|
||||
{
|
||||
"create": 1,
|
||||
"delete": 1,
|
||||
"email": 1,
|
||||
"print": 1,
|
||||
"read": 1,
|
||||
"role": "System Manager",
|
||||
"share": 1,
|
||||
"write": 1
|
||||
}
|
||||
],
|
||||
"sort_field": "creation",
|
||||
"sort_order": "DESC",
|
||||
"states": []
|
||||
}
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
# Copyright (c) 2024, Frappe Technologies and contributors
|
||||
# For license information, please see license.txt
|
||||
|
||||
import frappe
|
||||
from frappe.core.page.permission_manager.permission_manager import get_permissions
|
||||
from frappe.model.document import Document
|
||||
from frappe.permissions import setup_custom_perms
|
||||
|
||||
|
||||
class RoleReplication(Document):
|
||||
# begin: auto-generated types
|
||||
# This code is auto-generated. Do not modify anything in this block.
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from frappe.types import DF
|
||||
|
||||
existing_role: DF.Link | None
|
||||
new_role: DF.Data | None
|
||||
# end: auto-generated types
|
||||
|
||||
@frappe.whitelist()
|
||||
def replicate_role(self):
|
||||
frappe.only_for("System Manager")
|
||||
|
||||
new_role = frappe.db.get_value("Role", self.new_role, "name")
|
||||
if not new_role:
|
||||
new_role = frappe.get_doc({"doctype": "Role", "role_name": self.new_role}).insert().name
|
||||
|
||||
perms = get_permissions(role=self.existing_role)
|
||||
|
||||
doctypes_with_custom_perms_setup = set()
|
||||
for perm in perms:
|
||||
doctype = perm.get("parent")
|
||||
if doctype and doctype not in doctypes_with_custom_perms_setup:
|
||||
# if no Custom DocPerm exists for the doctype, move standard permissions to Custom DocPerm
|
||||
# before creating first Custom DocPerm for the new role
|
||||
setup_custom_perms(doctype)
|
||||
doctypes_with_custom_perms_setup.add(doctype)
|
||||
|
||||
# Create Custom DocPerm for the new role
|
||||
frappe.get_doc(
|
||||
{
|
||||
"doctype": "Custom DocPerm",
|
||||
**perm,
|
||||
"name": None,
|
||||
"creation": None,
|
||||
"modified": None,
|
||||
"modified_by": None,
|
||||
"owner": None,
|
||||
"linked_doctypes": None,
|
||||
"role": new_role,
|
||||
}
|
||||
).insert()
|
||||
|
|
@ -1,98 +0,0 @@
|
|||
# Copyright (c) 2024, Frappe Technologies and Contributors
|
||||
# See license.txt
|
||||
|
||||
import frappe
|
||||
from frappe.permissions import get_all_perms
|
||||
from frappe.tests import IntegrationTestCase
|
||||
|
||||
|
||||
class TestRoleReplication(IntegrationTestCase):
|
||||
def setUp(self):
|
||||
# Create a test role with permissions
|
||||
self.test_role_name = "_Test Role For Replication"
|
||||
self.new_role_name = "_Test Replicated Role"
|
||||
|
||||
# Clean up any existing test roles and permissions
|
||||
self._cleanup_test_data()
|
||||
|
||||
# Create the test role
|
||||
self.test_role = frappe.get_doc({"doctype": "Role", "role_name": self.test_role_name}).insert()
|
||||
|
||||
# Add a DocPerm permission (simulating standard permission)
|
||||
# We use a doctype that doesn't have Custom DocPerm to simulate the bug scenario
|
||||
self.test_doctype = "User"
|
||||
|
||||
# First ensure no Custom DocPerm exists for this doctype
|
||||
frappe.db.delete("Custom DocPerm", {"parent": self.test_doctype})
|
||||
|
||||
# Add DocPerm for the test role
|
||||
self.test_perm = frappe.get_doc(
|
||||
{
|
||||
"doctype": "DocPerm",
|
||||
"parent": self.test_doctype,
|
||||
"parenttype": "DocType",
|
||||
"parentfield": "permissions",
|
||||
"role": self.test_role_name,
|
||||
"permlevel": 0,
|
||||
"read": 1,
|
||||
"write": 1,
|
||||
"create": 0,
|
||||
}
|
||||
).insert()
|
||||
|
||||
def _cleanup_test_data(self):
|
||||
"""Clean up test roles and permissions."""
|
||||
for role_name in [self.test_role_name, self.new_role_name]:
|
||||
frappe.db.delete("Custom DocPerm", {"role": role_name})
|
||||
frappe.db.delete("DocPerm", {"role": role_name})
|
||||
if frappe.db.exists("Role", role_name):
|
||||
frappe.delete_doc("Role", role_name, force=True)
|
||||
|
||||
def tearDown(self):
|
||||
self._cleanup_test_data()
|
||||
|
||||
def test_replicate_role_preserves_original_permissions(self):
|
||||
"""
|
||||
Test that replicating a role does not erase the original role's permissions.
|
||||
This is a regression test for https://github.com/frappe/frappe/issues/34605
|
||||
"""
|
||||
# Get original permissions count before replication using get_all_perms
|
||||
# (this is what the Role Permissions Manager UI uses)
|
||||
original_perms_before = get_all_perms(self.test_role_name)
|
||||
self.assertTrue(
|
||||
len(original_perms_before) > 0, "Test role should have permissions before replication"
|
||||
)
|
||||
|
||||
# Perform role replication
|
||||
role_replication = frappe.get_doc(
|
||||
{
|
||||
"doctype": "Role Replication",
|
||||
"existing_role": self.test_role_name,
|
||||
"new_role": self.new_role_name,
|
||||
}
|
||||
)
|
||||
role_replication.replicate_role()
|
||||
|
||||
# Verify new role was created
|
||||
self.assertTrue(frappe.db.exists("Role", self.new_role_name), "New role should be created")
|
||||
|
||||
# Verify new role has permissions
|
||||
new_role_perms = get_all_perms(self.new_role_name)
|
||||
self.assertTrue(len(new_role_perms) > 0, "New role should have permissions after replication")
|
||||
|
||||
# Verify original role still has its permissions visible via get_all_perms
|
||||
original_perms_after = get_all_perms(self.test_role_name)
|
||||
self.assertEqual(
|
||||
len(original_perms_before),
|
||||
len(original_perms_after),
|
||||
"Original role should retain all its permissions after replication",
|
||||
)
|
||||
|
||||
# Verify the original role now has Custom DocPerm entries
|
||||
original_custom_perms = frappe.get_all(
|
||||
"Custom DocPerm", filters={"role": self.test_role_name}, fields=["parent", "read", "write"]
|
||||
)
|
||||
self.assertTrue(
|
||||
len(original_custom_perms) > 0,
|
||||
"Original role should have Custom DocPerm entries after replication to preserve visibility",
|
||||
)
|
||||
|
|
@ -168,9 +168,9 @@ def queue_submission(doc: Document, action: str, alert: bool = True):
|
|||
"Submission Queue", {"ref_doctype": doc.doctype, "ref_docname": doc.name, "status": "Queued"}
|
||||
):
|
||||
frappe.msgprint(
|
||||
_(
|
||||
"This document has already been queued for submission. You can track the progress over {0}."
|
||||
).format(f"<a href='/desk/submission-queue/{existing_queue}'><b>here</b></a>"),
|
||||
_("This document has already been queued for {0}. You can track the progress over {1}.").format(
|
||||
action, f"<a href='/desk/submission-queue/{existing_queue}'><b>here</b></a>"
|
||||
),
|
||||
indicator="orange",
|
||||
alert=True,
|
||||
)
|
||||
|
|
@ -183,8 +183,8 @@ def queue_submission(doc: Document, action: str, alert: bool = True):
|
|||
|
||||
if alert:
|
||||
frappe.msgprint(
|
||||
_("Queued for Submission. You can track the progress over {0}.").format(
|
||||
f"<a href='/desk/submission-queue/{queue.name}'><b>here</b></a>"
|
||||
_("Queued for {0}. You can track the progress over {1}.").format(
|
||||
action, f"<a href='/desk/submission-queue/{queue.name}'><b>here</b></a>"
|
||||
),
|
||||
indicator="green",
|
||||
alert=True,
|
||||
|
|
|
|||
|
|
@ -51,3 +51,71 @@ class TestSubmissionQueue(IntegrationTestCase):
|
|||
job = self.queue.fetch_job(submission_queue.job_id)
|
||||
# Test completion
|
||||
self.check_status(job, status="finished")
|
||||
|
||||
def test_cancel_operation(self):
|
||||
from frappe.core.doctype.doctype.test_doctype import new_doctype
|
||||
from frappe.core.doctype.submission_queue.submission_queue import queue_submission
|
||||
|
||||
if not frappe.db.table_exists("Test Submission Queue", cached=False):
|
||||
doc = new_doctype("Test Submission Queue", is_submittable=True, queue_in_background=True)
|
||||
doc.insert()
|
||||
|
||||
d = frappe.new_doc("Test Submission Queue")
|
||||
d.update({"some_fieldname": "Random"})
|
||||
d.insert()
|
||||
d.submit()
|
||||
frappe.db.commit()
|
||||
|
||||
self.assertEqual(d.docstatus, 1)
|
||||
|
||||
queue_submission(d, "Cancel")
|
||||
frappe.db.commit()
|
||||
|
||||
time.sleep(4)
|
||||
submission_queue = frappe.get_last_doc("Submission Queue")
|
||||
|
||||
job = self.queue.fetch_job(submission_queue.job_id)
|
||||
self.check_status(job, status="finished")
|
||||
|
||||
d.reload()
|
||||
self.assertEqual(d.docstatus, 2)
|
||||
|
||||
def test_cancel_on_cancelled_doc(self):
|
||||
from frappe.core.doctype.doctype.test_doctype import new_doctype
|
||||
from frappe.core.doctype.submission_queue.submission_queue import queue_submission
|
||||
|
||||
if not frappe.db.table_exists("Test Submission Queue", cached=False):
|
||||
doc = new_doctype("Test Submission Queue", is_submittable=True, queue_in_background=True)
|
||||
doc.insert()
|
||||
|
||||
d = frappe.new_doc("Test Submission Queue")
|
||||
d.update({"some_fieldname": "Random"})
|
||||
d.insert()
|
||||
d.submit()
|
||||
frappe.db.commit()
|
||||
|
||||
existing = frappe.get_doc(
|
||||
{
|
||||
"doctype": "Submission Queue",
|
||||
"ref_doctype": d.doctype,
|
||||
"ref_docname": d.name,
|
||||
"status": "Queued",
|
||||
}
|
||||
)
|
||||
existing.insert(d, "Cancel")
|
||||
frappe.db.commit()
|
||||
|
||||
initial_count = frappe.db.count(
|
||||
"Submission Queue", {"ref_doctype": d.doctype, "ref_docname": d.name, "status": "Queued"}
|
||||
)
|
||||
|
||||
queue_submission(d, "Cancel")
|
||||
|
||||
final_count = frappe.db.count(
|
||||
"Submission Queue", {"ref_doctype": d.doctype, "ref_docname": d.name, "status": "Queued"}
|
||||
)
|
||||
|
||||
self.assertEqual(initial_count, final_count)
|
||||
|
||||
existing.delete(ignore_permissions=True)
|
||||
frappe.db.commit()
|
||||
|
|
|
|||
|
|
@ -785,11 +785,10 @@
|
|||
"label": "Only allow System Managers to upload public files"
|
||||
}
|
||||
],
|
||||
"hide_toolbar": 1,
|
||||
"icon": "fa fa-cog",
|
||||
"issingle": 1,
|
||||
"links": [],
|
||||
"modified": "2026-02-24 14:27:04.763075",
|
||||
"modified": "2026-03-28 23:46:03.614749",
|
||||
"modified_by": "Administrator",
|
||||
"module": "Core",
|
||||
"name": "System Settings",
|
||||
|
|
|
|||
|
|
@ -16,16 +16,20 @@ class TestTranslation(IntegrationTestCase):
|
|||
clear_cache()
|
||||
|
||||
def test_doctype(self):
|
||||
translation_data = get_translation_data()
|
||||
for lang, (source_string, new_translation) in translation_data.items():
|
||||
doctype = "Translation"
|
||||
meta = frappe.get_meta(doctype)
|
||||
source_string = meta.get_label("translated_text")
|
||||
|
||||
for lang in ["de", "bs", "zh", "hr", "en", "sv"]:
|
||||
frappe.local.lang = lang
|
||||
original_translation = _(source_string)
|
||||
original_translation = _(source_string, context=doctype)
|
||||
new_translation = f"{original_translation} Customized"
|
||||
|
||||
docname = create_translation(lang, source_string, new_translation)
|
||||
self.assertEqual(_(source_string), new_translation)
|
||||
docname = create_translation(lang, source_string, new_translation, context=doctype)
|
||||
self.assertEqual(_(source_string, context=doctype), new_translation)
|
||||
|
||||
frappe.delete_doc("Translation", docname)
|
||||
self.assertEqual(_(source_string), original_translation)
|
||||
frappe.delete_doc(doctype, docname)
|
||||
self.assertEqual(_(source_string, context=doctype), original_translation)
|
||||
|
||||
def test_parent_language(self):
|
||||
data = {
|
||||
|
|
@ -60,37 +64,54 @@ class TestTranslation(IntegrationTestCase):
|
|||
source = "User"
|
||||
self.assertNotEqual(_(source, lang="de"), _(source, lang="es"))
|
||||
|
||||
def test_html_content_data_translation(self):
|
||||
# ruff: noqa: RUF001
|
||||
def test_html_content_translation(self):
|
||||
source = """
|
||||
<span style="color: rgb(51, 51, 51); font-family: "Amazon Ember", Arial, sans-serif; font-size:
|
||||
small;">MacBook Air lasts up to an incredible 12 hours between charges. So from your morning coffee to
|
||||
your evening commute, you can work unplugged. When it’s time to kick back and relax,
|
||||
you can get up to 12 hours of iTunes movie playback. And with up to 30 days of standby time,
|
||||
you can go away for weeks and pick up where you left off.Whatever the task,
|
||||
fifth-generation Intel Core i5 and i7 processors with Intel HD Graphics 6000 are up to it.</span><br>
|
||||
"""
|
||||
|
||||
To add dynamic subject, use jinja tags like
|
||||
<div><pre><code>{{ doc.name }} Billed</code></pre></div>
|
||||
""".strip()
|
||||
target = """
|
||||
MacBook Air dura hasta 12 horas increíbles entre cargas. Por lo tanto,
|
||||
desde el café de la mañana hasta el viaje nocturno, puede trabajar desconectado.
|
||||
Cuando es hora de descansar y relajarse, puede obtener hasta 12 horas de reproducción de películas de iTunes.
|
||||
Y con hasta 30 días de tiempo de espera, puede irse por semanas y continuar donde lo dejó. Sea cual sea la tarea,
|
||||
los procesadores Intel Core i5 e i7 de quinta generación con Intel HD Graphics 6000 son capaces de hacerlo.
|
||||
"""
|
||||
Um einen dynamischen Betreff hinzuzufügen, verwenden Sie Jinja-Tags wie
|
||||
<div><pre><code>{{ doc.name }} Abgerechnet</code></pre></div>
|
||||
""".strip()
|
||||
|
||||
create_translation("es", source, target)
|
||||
frappe.local.lang = "de"
|
||||
|
||||
source = """
|
||||
<span style="font-family: "Amazon Ember", Arial, sans-serif; font-size:
|
||||
small; color: rgb(51, 51, 51);">MacBook Air lasts up to an incredible 12 hours between charges. So from your morning coffee to
|
||||
your evening commute, you can work unplugged. When it’s time to kick back and relax,
|
||||
you can get up to 12 hours of iTunes movie playback. And with up to 30 days of standby time,
|
||||
you can go away for weeks and pick up where you left off.Whatever the task,
|
||||
fifth-generation Intel Core i5 and i7 processors with Intel HD Graphics 6000 are up to it.</span><br>
|
||||
"""
|
||||
self.assertEqual(_(source), source)
|
||||
|
||||
self.assertTrue(_(source), target)
|
||||
create_translation("de", source, target)
|
||||
|
||||
self.assertEqual(_(source), target)
|
||||
|
||||
def test_translated_html_is_sanitized(self):
|
||||
source = "Translation with HTML"
|
||||
target = """
|
||||
<span style="color:red" onclick="alert('xss')">Hallo</span>
|
||||
<script>alert("xss")</script>
|
||||
<iframe src="https://example.com"></iframe>
|
||||
<div>Ok</div>
|
||||
""".strip()
|
||||
|
||||
docname = create_translation("de", source, target)
|
||||
translated_text = frappe.db.get_value("Translation", docname, "translated_text")
|
||||
|
||||
self.assertIn('<span style="color:red">Hallo</span>', translated_text)
|
||||
self.assertIn("<div>Ok</div>", translated_text)
|
||||
self.assertNotIn("onclick", translated_text)
|
||||
self.assertNotIn("<script", translated_text)
|
||||
self.assertNotIn('alert("xss")', translated_text)
|
||||
self.assertNotIn("<iframe", translated_text)
|
||||
self.assertNotIn("example.com", translated_text)
|
||||
|
||||
frappe.local.lang = "de"
|
||||
self.assertEqual(_(source), translated_text)
|
||||
|
||||
def test_plain_text_translation_with_angle_brackets_is_unchanged(self):
|
||||
source = "Comparison"
|
||||
target = "1 < 2 and 3 > 2"
|
||||
|
||||
docname = create_translation("de", source, target)
|
||||
|
||||
self.assertEqual(frappe.db.get_value("Translation", docname, "translated_text"), target)
|
||||
|
||||
def test_html_message_translations(self):
|
||||
"""Test fallback for messages w/ HTML Tags"""
|
||||
|
|
@ -100,27 +121,12 @@ class TestTranslation(IntegrationTestCase):
|
|||
self.assertEqual(_(message, lang="zh"), translated_message)
|
||||
|
||||
|
||||
def get_translation_data():
|
||||
html_source_data = """<font color="#848484" face="arial, tahoma, verdana, sans-serif">
|
||||
<span style="font-size: 11px; line-height: 16.9px;">Test Data</span></font>"""
|
||||
html_translated_data = """<font color="#848484" face="arial, tahoma, verdana, sans-serif">
|
||||
<span style="font-size: 11px; line-height: 16.9px;"> testituloksia </span></font>"""
|
||||
|
||||
return {
|
||||
"hr": ["Test data", "Testdaten"],
|
||||
"ms": ["Test Data", "ujian Data"],
|
||||
"et": ["Test Data", "testandmed"],
|
||||
"es": ["Test Data", "datos de prueba"],
|
||||
"en": ["Quotation", "Tax Invoice"],
|
||||
"fi": [html_source_data, html_translated_data],
|
||||
}
|
||||
|
||||
|
||||
def create_translation(lang, source_string, new_translation) -> str:
|
||||
def create_translation(lang, source_string, new_translation, context=None) -> str:
|
||||
doc = frappe.new_doc("Translation")
|
||||
doc.language = lang
|
||||
doc.source_text = source_string
|
||||
doc.translated_text = new_translation
|
||||
doc.context = context
|
||||
doc.save()
|
||||
|
||||
return doc.name
|
||||
|
|
|
|||
|
|
@ -1,12 +1,10 @@
|
|||
# Copyright (c) 2015, Frappe Technologies and contributors
|
||||
# License: MIT. See LICENSE
|
||||
|
||||
import json
|
||||
|
||||
import frappe
|
||||
from frappe.model.document import Document
|
||||
from frappe.translate import MERGED_TRANSLATION_KEY, USER_TRANSLATION_KEY
|
||||
from frappe.utils import is_html, strip_html_tags
|
||||
from frappe.translate import MERGED_TRANSLATION_KEY, USER_TRANSLATION_KEY, change_translation_version
|
||||
from frappe.utils import sanitize_html
|
||||
|
||||
|
||||
class Translation(Document):
|
||||
|
|
@ -28,11 +26,7 @@ class Translation(Document):
|
|||
# end: auto-generated types
|
||||
|
||||
def validate(self):
|
||||
if is_html(self.source_text):
|
||||
self.remove_html_from_source()
|
||||
|
||||
def remove_html_from_source(self):
|
||||
self.source_text = strip_html_tags(self.source_text).strip()
|
||||
self.translated_text = sanitize_html(self.translated_text)
|
||||
|
||||
def on_update(self):
|
||||
clear_user_translation_cache(self.language)
|
||||
|
|
@ -46,3 +40,4 @@ class Translation(Document):
|
|||
def clear_user_translation_cache(lang):
|
||||
frappe.cache.hdel(USER_TRANSLATION_KEY, lang)
|
||||
frappe.cache.hdel(MERGED_TRANSLATION_KEY, lang)
|
||||
change_translation_version()
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ frappe.ui.form.on("User", {
|
|||
frm.set_query("default_workspace", () => {
|
||||
return {
|
||||
filters: {
|
||||
for_user: ["in", [null, frappe.session.user]],
|
||||
for_user: ["in", ["", frappe.session.user]],
|
||||
title: ["!=", "Welcome Workspace"],
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -48,6 +48,7 @@
|
|||
"print_width",
|
||||
"alignment",
|
||||
"no_copy",
|
||||
"set_only_once",
|
||||
"allow_on_submit",
|
||||
"in_list_view",
|
||||
"in_standard_filter",
|
||||
|
|
@ -484,6 +485,12 @@
|
|||
"fieldtype": "Select",
|
||||
"label": "Button Color",
|
||||
"options": "\nDefault\nPrimary\nInfo\nSuccess\nWarning\nDanger"
|
||||
},
|
||||
{
|
||||
"default": "0",
|
||||
"fieldname": "set_only_once",
|
||||
"fieldtype": "Check",
|
||||
"label": "Set only once"
|
||||
}
|
||||
],
|
||||
"grid_page_length": 50,
|
||||
|
|
@ -491,7 +498,7 @@
|
|||
"idx": 1,
|
||||
"index_web_pages_for_search": 1,
|
||||
"links": [],
|
||||
"modified": "2025-11-12 01:14:24.753774",
|
||||
"modified": "2026-03-22 10:35:32.555267",
|
||||
"modified_by": "Administrator",
|
||||
"module": "Custom",
|
||||
"name": "Custom Field",
|
||||
|
|
|
|||
|
|
@ -22,9 +22,9 @@ class CustomField(Document):
|
|||
if TYPE_CHECKING:
|
||||
from frappe.types import DF
|
||||
|
||||
alignment: DF.Literal["", "Left", "Center", "Right"]
|
||||
allow_in_quick_entry: DF.Check
|
||||
allow_on_submit: DF.Check
|
||||
alignment: DF.Literal["", "Left", "Center", "Right"]
|
||||
bold: DF.Check
|
||||
button_color: DF.Literal["", "Default", "Primary", "Info", "Success", "Warning", "Danger"]
|
||||
collapsible: DF.Check
|
||||
|
|
@ -114,6 +114,7 @@ class CustomField(Document):
|
|||
report_hide: DF.Check
|
||||
reqd: DF.Check
|
||||
search_index: DF.Check
|
||||
set_only_once: DF.Check
|
||||
show_dashboard: DF.Check
|
||||
sort_options: DF.Check
|
||||
translatable: DF.Check
|
||||
|
|
|
|||
|
|
@ -810,6 +810,7 @@ docfield_properties = {
|
|||
"placeholder": "Data",
|
||||
"button_color": "Select",
|
||||
"mask": "Check",
|
||||
"set_only_once": "Check",
|
||||
}
|
||||
|
||||
doctype_link_properties = {
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@
|
|||
"in_preview",
|
||||
"bold",
|
||||
"no_copy",
|
||||
"set_only_once",
|
||||
"allow_in_quick_entry",
|
||||
"translatable",
|
||||
"mask",
|
||||
|
|
@ -509,6 +510,12 @@
|
|||
"fieldname": "mask",
|
||||
"fieldtype": "Check",
|
||||
"label": "Mask"
|
||||
},
|
||||
{
|
||||
"default": "0",
|
||||
"fieldname": "set_only_once",
|
||||
"fieldtype": "Check",
|
||||
"label": "Set only once"
|
||||
}
|
||||
],
|
||||
"grid_page_length": 50,
|
||||
|
|
@ -516,7 +523,7 @@
|
|||
"index_web_pages_for_search": 1,
|
||||
"istable": 1,
|
||||
"links": [],
|
||||
"modified": "2025-12-23 14:17:10.458916",
|
||||
"modified": "2026-03-22 10:36:12.968197",
|
||||
"modified_by": "Administrator",
|
||||
"module": "Custom",
|
||||
"name": "Customize Form Field",
|
||||
|
|
|
|||
|
|
@ -13,10 +13,10 @@ class CustomizeFormField(Document):
|
|||
if TYPE_CHECKING:
|
||||
from frappe.types import DF
|
||||
|
||||
alignment: DF.Literal["", "Left", "Center", "Right"]
|
||||
allow_bulk_edit: DF.Check
|
||||
allow_in_quick_entry: DF.Check
|
||||
allow_on_submit: DF.Check
|
||||
alignment: DF.Literal["", "Left", "Center", "Right"]
|
||||
bold: DF.Check
|
||||
button_color: DF.Literal["", "Default", "Primary", "Info", "Success", "Warning", "Danger"]
|
||||
collapsible: DF.Check
|
||||
|
|
@ -109,6 +109,7 @@ class CustomizeFormField(Document):
|
|||
remember_last_selected_value: DF.Check
|
||||
report_hide: DF.Check
|
||||
reqd: DF.Check
|
||||
set_only_once: DF.Check
|
||||
show_dashboard: DF.Check
|
||||
sort_options: DF.Check
|
||||
translatable: DF.Check
|
||||
|
|
|
|||
|
|
@ -48,6 +48,10 @@ def func_in(key: Field, value: list | tuple) -> frappe.qb:
|
|||
"""
|
||||
if isinstance(value, str):
|
||||
value = value.split(",")
|
||||
|
||||
value = ["" if v is None else v for v in value]
|
||||
if "" in value:
|
||||
return Coalesce(key, "").isin(value)
|
||||
return key.isin(value)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ def _apply_date_field_filter_conversion(value, operator: str, doctype: str, fiel
|
|||
elif isinstance(value, datetime.datetime):
|
||||
return value.date()
|
||||
|
||||
except AttributeError, TypeError, KeyError:
|
||||
except (AttributeError, TypeError, KeyError):
|
||||
pass
|
||||
|
||||
return value
|
||||
|
|
@ -136,11 +136,7 @@ WORDS_PATTERN = re.compile(r"\w+")
|
|||
COMMA_PATTERN = re.compile(r",\s*(?![^()]*\))")
|
||||
|
||||
# Pattern for validating simple field names (alphanumeric + underscore)
|
||||
SIMPLE_FIELD_PATTERN = re.compile(r"^\w+$", flags=re.ASCII)
|
||||
|
||||
# Pattern for validating SQL identifiers (aliases, field names in functions)
|
||||
# More restrictive: must start with letter or underscore
|
||||
IDENTIFIER_PATTERN = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*$", flags=re.ASCII)
|
||||
SIMPLE_FIELD_PATTERN = re.compile(r"^\w+$")
|
||||
|
||||
# Pattern for detecting SQL function calls: identifier followed by opening parenthesis
|
||||
FUNCTION_CALL_PATTERN = re.compile(r"^\s*[a-zA-Z_][a-zA-Z0-9_]*\s*\(", flags=re.ASCII)
|
||||
|
|
@ -157,7 +153,7 @@ FUNCTION_CALL_PATTERN = re.compile(r"^\s*[a-zA-Z_][a-zA-Z0-9_]*\s*\(", flags=re.
|
|||
# - ... as 'Child:field'
|
||||
ALLOWED_FIELD_PATTERN = re.compile(
|
||||
r"^(?:(`[\w\s-]+`|\w+)\.)?(`\w+`|\w+)(?:\s+as\s+(?:`[\w\s-]+`|'[\w\s:-]+'|\w+))?$",
|
||||
flags=re.ASCII | re.IGNORECASE,
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
|
||||
# Regex to parse field names:
|
||||
|
|
@ -676,7 +672,7 @@ class Engine:
|
|||
else:
|
||||
try:
|
||||
fallback_value = int(fallback_sql)
|
||||
except ValueError, TypeError:
|
||||
except (ValueError, TypeError):
|
||||
fallback_value = fallback_sql
|
||||
|
||||
return operator_fn(_field, ValueWrapper(fallback_value))
|
||||
|
|
@ -705,7 +701,7 @@ class Engine:
|
|||
else:
|
||||
try:
|
||||
fallback_value = int(fallback_sql)
|
||||
except ValueError, TypeError:
|
||||
except (ValueError, TypeError):
|
||||
fallback_value = fallback_sql
|
||||
|
||||
if fallback_value == _value:
|
||||
|
|
@ -2424,14 +2420,15 @@ class SQLFunctionParser:
|
|||
).format(arg),
|
||||
frappe.ValidationError,
|
||||
)
|
||||
elif self._is_valid_field_name(arg):
|
||||
self._check_function_field_permission(arg)
|
||||
return self.engine.table[arg]
|
||||
|
||||
# Check if it's a numeric string like "1" (for COUNT(1), etc.)
|
||||
elif arg.isdigit():
|
||||
return int(arg)
|
||||
|
||||
elif self._is_valid_field_name(arg):
|
||||
self._check_function_field_permission(arg)
|
||||
return self.engine.table[arg]
|
||||
|
||||
else:
|
||||
frappe.throw(
|
||||
_(
|
||||
|
|
@ -2443,7 +2440,7 @@ class SQLFunctionParser:
|
|||
def _is_valid_field_name(self, name: str) -> bool:
|
||||
"""Check if a string is a valid field name."""
|
||||
# Field names should only contain alphanumeric characters and underscores
|
||||
return IDENTIFIER_PATTERN.match(name) is not None
|
||||
return SIMPLE_FIELD_PATTERN.match(name) is not None
|
||||
|
||||
def _validate_alias(self, alias: str):
|
||||
"""Validate alias name for SQL injection."""
|
||||
|
|
@ -2456,7 +2453,7 @@ class SQLFunctionParser:
|
|||
|
||||
# Alias should be a simple identifier
|
||||
# Note: pypika wraps aliases in backticks, so anything without backticks is safe
|
||||
if not IDENTIFIER_PATTERN.match(alias):
|
||||
if not SIMPLE_FIELD_PATTERN.match(alias):
|
||||
frappe.throw(
|
||||
_("Invalid alias format: {0}. Alias must be a simple identifier.").format(alias),
|
||||
frappe.ValidationError,
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from frappe import _
|
|||
from frappe.utils import cint, cstr, flt
|
||||
from frappe.utils.defaults import get_not_null_defaults
|
||||
|
||||
# This matches anything that isn't [a-zA-Z0-9_]
|
||||
# This matches anything that isn't Unicode Word Characters, Numbers and Underscore.
|
||||
SPECIAL_CHAR_PATTERN = re.compile(r"[\W]", flags=re.UNICODE)
|
||||
|
||||
VARCHAR_CAST_PATTERN = re.compile(r"varchar\(([\d]+)\)")
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@
|
|||
},
|
||||
{
|
||||
"bold": 1,
|
||||
"description": "SQL Conditions. Example: status=\"Open\"",
|
||||
"description": "SQL Conditions. Example: {\"status\" : \"open\", \"priority\" : \"medium\"}",
|
||||
"fieldname": "condition",
|
||||
"fieldtype": "Small Text",
|
||||
"label": "Condition"
|
||||
|
|
@ -52,7 +52,7 @@
|
|||
],
|
||||
"issingle": 1,
|
||||
"links": [],
|
||||
"modified": "2024-03-23 16:01:29.575802",
|
||||
"modified": "2026-04-01 12:18:08.821282",
|
||||
"modified_by": "Administrator",
|
||||
"module": "Desk",
|
||||
"name": "Bulk Update",
|
||||
|
|
@ -70,8 +70,9 @@
|
|||
}
|
||||
],
|
||||
"quick_entry": 1,
|
||||
"row_format": "Dynamic",
|
||||
"sort_field": "creation",
|
||||
"sort_order": "DESC",
|
||||
"states": [],
|
||||
"track_changes": 1
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -31,17 +31,18 @@ class BulkUpdate(Document):
|
|||
def bulk_update(self):
|
||||
self.check_permission("write")
|
||||
limit = self.limit if self.limit and cint(self.limit) < 500 else 500
|
||||
|
||||
condition = ""
|
||||
query_args = {"doctype": self.document_type, "limit": limit, "pluck": "name"}
|
||||
if self.condition:
|
||||
if ";" in self.condition:
|
||||
frappe.throw(_("; not allowed in condition"))
|
||||
try:
|
||||
filters = frappe.parse_json(self.condition)
|
||||
if isinstance(filters, dict):
|
||||
if "or_filters" in filters:
|
||||
query_args["or_filters"] = filters.pop("or_filters")
|
||||
query_args["filters"] = filters
|
||||
except Exception as e:
|
||||
frappe.throw(_("The Bulk Update could not happen due to <b>{0}</b>").format(str(e)))
|
||||
|
||||
condition = f" where {self.condition}"
|
||||
|
||||
docnames = frappe.db.sql_list(
|
||||
f"""select name from `tab{self.document_type}`{condition} limit {limit} offset 0"""
|
||||
)
|
||||
docnames = frappe.get_all(**query_args)
|
||||
return submit_cancel_or_update_docs(
|
||||
self.document_type, docnames, "update", {self.field: self.update_value}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -103,3 +103,45 @@ class TestBulkUpdate(IntegrationTestCase):
|
|||
docnames_bg = frappe.get_all(self.doctype, {"docstatus": 0}, limit=20, pluck="name")
|
||||
submit_cancel_or_update_docs(self.doctype, docnames_bg, action="update", data=update_data)
|
||||
self.wait_for_assertion(lambda: check_child_field(docnames_bg, "_Test Child Updated"))
|
||||
|
||||
def test_bulk_update_conditions(self):
|
||||
"""Test the whitelisted bulk update method"""
|
||||
todo_names = []
|
||||
for i in range(5):
|
||||
doc = frappe.get_doc(
|
||||
{
|
||||
"doctype": "ToDo",
|
||||
"description": f"Bulk Update Status Test {i}",
|
||||
"status": "Open" if i < 3 else "Closed",
|
||||
}
|
||||
).insert()
|
||||
todo_names.append(doc.name)
|
||||
|
||||
try:
|
||||
condition_json = frappe.as_json({"status": "Open", "name": ["in", todo_names]})
|
||||
|
||||
bulk_upd = frappe.get_doc(
|
||||
{
|
||||
"doctype": "Bulk Update",
|
||||
"document_type": "ToDo",
|
||||
"field": "status",
|
||||
"update_value": "Closed",
|
||||
"condition": condition_json,
|
||||
"limit": 5,
|
||||
}
|
||||
)
|
||||
|
||||
bulk_upd.bulk_update()
|
||||
|
||||
updated_docs = frappe.get_all("ToDo", filters={"name": ["in", todo_names]}, fields=["status"])
|
||||
|
||||
for doc in updated_docs:
|
||||
self.assertEqual(doc.status, "Closed")
|
||||
|
||||
remaining_open_count = frappe.db.count("ToDo", {"name": ["in", todo_names], "status": "Open"})
|
||||
self.assertEqual(remaining_open_count, 0)
|
||||
|
||||
finally:
|
||||
for name in todo_names:
|
||||
frappe.delete_doc("ToDo", name)
|
||||
frappe.db.commit()
|
||||
|
|
|
|||
|
|
@ -76,18 +76,6 @@ class Workspace(Document):
|
|||
|
||||
if self.public and not is_workspace_manager() and not disable_saving_as_public():
|
||||
frappe.throw(_("You need to be Workspace Manager to edit this document"))
|
||||
|
||||
if (
|
||||
not self.public
|
||||
and self.for_user
|
||||
and self.for_user != frappe.session.user
|
||||
and not is_workspace_manager()
|
||||
):
|
||||
frappe.throw(
|
||||
_("You are not allowed to edit this workspace"),
|
||||
frappe.PermissionError,
|
||||
)
|
||||
|
||||
if self.has_value_changed("title"):
|
||||
validate_route_conflict(self.doctype, self.title)
|
||||
else:
|
||||
|
|
@ -112,6 +100,14 @@ class Workspace(Document):
|
|||
|
||||
self.app = get_module_app(self.module)
|
||||
|
||||
def before_rename(self, old_name, new_name, merge=False):
|
||||
if self.public and not is_workspace_manager() and not disable_saving_as_public():
|
||||
frappe.throw(
|
||||
_("You need to be {0} to rename this document").format(frappe.bold("Workspace Manager")),
|
||||
frappe.PermissionError,
|
||||
title=_("Permission Error"),
|
||||
)
|
||||
|
||||
def clear_cache(self):
|
||||
super().clear_cache()
|
||||
if self.for_user:
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@
|
|||
"child",
|
||||
"navigate_to_tab",
|
||||
"url",
|
||||
"open_in_new_tab",
|
||||
"display_section",
|
||||
"collapsible_column",
|
||||
"collapsible",
|
||||
|
|
@ -168,13 +169,20 @@
|
|||
"fieldname": "filter_area",
|
||||
"fieldtype": "HTML",
|
||||
"label": "Filter Area"
|
||||
},
|
||||
{
|
||||
"default": "1",
|
||||
"depends_on": "eval:doc.link_type === \"URL\";",
|
||||
"fieldname": "open_in_new_tab",
|
||||
"fieldtype": "Check",
|
||||
"label": "Open in New Tab"
|
||||
}
|
||||
],
|
||||
"grid_page_length": 50,
|
||||
"index_web_pages_for_search": 1,
|
||||
"istable": 1,
|
||||
"links": [],
|
||||
"modified": "2026-01-12 15:35:56.930873",
|
||||
"modified": "2026-03-15 02:26:37.285903",
|
||||
"modified_by": "Administrator",
|
||||
"module": "Desk",
|
||||
"name": "Workspace Sidebar Item",
|
||||
|
|
|
|||
|
|
@ -24,6 +24,7 @@ class WorkspaceSidebarItem(Document):
|
|||
link_to: DF.DynamicLink | None
|
||||
link_type: DF.Literal["DocType", "Page", "Report", "Workspace", "Dashboard", "URL"]
|
||||
navigate_to_tab: DF.Autocomplete | None
|
||||
open_in_new_tab: DF.Check
|
||||
parent: DF.Data
|
||||
parentfield: DF.Data
|
||||
parenttype: DF.Data
|
||||
|
|
|
|||
|
|
@ -64,6 +64,11 @@ def cancel(
|
|||
|
||||
if workflow_state_fieldname and workflow_state:
|
||||
doc.set(workflow_state_fieldname, workflow_state)
|
||||
|
||||
if doc.meta.queue_in_background and not is_scheduler_inactive():
|
||||
queue_submission(doc, "Cancel")
|
||||
return
|
||||
|
||||
doc.cancel()
|
||||
send_updated_docs(doc)
|
||||
frappe.msgprint(frappe._("Cancelled"), indicator="red", alert=True)
|
||||
|
|
|
|||
|
|
@ -26,9 +26,9 @@
|
|||
<div class="flex" style="gap:16px; align-items: center;">
|
||||
<div class="desktop-notifications">
|
||||
<div class="dropdown dropdown-notifications">
|
||||
<button class="btn-reset nav-link text-muted" data-toggle="dropdown" >
|
||||
<button class="btn-reset nav-link text-muted" data-toggle="dropdown" aria-label="{{ _("Notifications") }}" aria-haspopup="true">
|
||||
<svg
|
||||
class="icon icon-md"
|
||||
class="icon icon-md" aria-hidden="true"
|
||||
>
|
||||
<use href="#icon-bell"></use>
|
||||
</svg>
|
||||
|
|
@ -50,8 +50,8 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="desktop-avatar">
|
||||
</div>
|
||||
<button class="desktop-avatar btn-reset" aria-label="{{ _('User Menu') }}">
|
||||
</button>
|
||||
</div>
|
||||
|
||||
</header>
|
||||
|
|
|
|||
|
|
@ -41,18 +41,30 @@ frappe.pages["setup-wizard"].on_page_load = function (wrapper) {
|
|||
freeze: true,
|
||||
callback: function (r) {
|
||||
frappe.setup.data.lang = r.message;
|
||||
frappe.call({
|
||||
method: "frappe.desk.page.setup_wizard.setup_wizard.load_user_details",
|
||||
freeze: true,
|
||||
callback: function (r) {
|
||||
frappe.setup.data.full_name = r.message.full_name;
|
||||
frappe.setup.data.email = r.message.email;
|
||||
|
||||
frappe.setup.run_event("before_load");
|
||||
var wizard_settings = {
|
||||
parent: wrapper,
|
||||
slides: frappe.setup.slides,
|
||||
slide_class: frappe.setup.SetupWizardSlide,
|
||||
unidirectional: 1,
|
||||
done_state: 1,
|
||||
};
|
||||
frappe.wizard = new frappe.setup.SetupWizard(wizard_settings);
|
||||
frappe.setup.run_event("after_load");
|
||||
frappe.wizard.show_slide(cint(frappe.get_route()[1]));
|
||||
if (r.message.full_name) {
|
||||
frappe.setup.data.first_name = r.message.full_name.split(" ")[0];
|
||||
}
|
||||
|
||||
frappe.setup.run_event("before_load");
|
||||
var wizard_settings = {
|
||||
parent: wrapper,
|
||||
slides: frappe.setup.slides,
|
||||
slide_class: frappe.setup.SetupWizardSlide,
|
||||
unidirectional: 1,
|
||||
done_state: 1,
|
||||
};
|
||||
frappe.wizard = new frappe.setup.SetupWizard(wizard_settings);
|
||||
frappe.setup.run_event("after_load");
|
||||
frappe.wizard.show_slide(cint(frappe.get_route()[1]));
|
||||
},
|
||||
});
|
||||
},
|
||||
});
|
||||
});
|
||||
|
|
@ -388,7 +400,7 @@ frappe.setup.slides_settings = [
|
|||
{
|
||||
// Welcome (language) slide
|
||||
name: "welcome",
|
||||
title: __("Welcome"),
|
||||
title: () => __("Welcome") + " " + (frappe.setup.data.first_name || ""),
|
||||
|
||||
fields: [
|
||||
{
|
||||
|
|
@ -427,13 +439,6 @@ frappe.setup.slides_settings = [
|
|||
default: cint(frappe.telemetry.can_enable()),
|
||||
depends_on: "eval:frappe.telemetry.can_enable()",
|
||||
},
|
||||
{
|
||||
fieldname: "allow_recording_first_session",
|
||||
label: __("Allow recording my first session to improve user experience"),
|
||||
fieldtype: "Check",
|
||||
default: 0,
|
||||
depends_on: "eval:frappe.telemetry.can_enable()",
|
||||
},
|
||||
],
|
||||
|
||||
onload: function (slide) {
|
||||
|
|
@ -518,22 +523,19 @@ frappe.setup.slides_settings = [
|
|||
slide.form.fields_dict.email.df.read_only = 1;
|
||||
slide.form.fields_dict.email.refresh();
|
||||
} else {
|
||||
slide.form.fields_dict.email.df.reqd = 1;
|
||||
slide.form.fields_dict.email.refresh();
|
||||
if (!frappe.boot.is_fc_site) slide.form.fields_dict.password.df.reqd = 1;
|
||||
slide.form.fields_dict.password.refresh();
|
||||
|
||||
frappe.setup.utils.load_user_details(slide, this.setup_fields);
|
||||
}
|
||||
},
|
||||
|
||||
setup_fields: function (slide) {
|
||||
if (frappe.setup.data.full_name) {
|
||||
slide.form.fields_dict.full_name.set_input(frappe.setup.data.full_name);
|
||||
}
|
||||
if (frappe.setup.data.email) {
|
||||
let email = frappe.setup.data.email;
|
||||
slide.form.fields_dict.email.set_input(email);
|
||||
if (frappe.setup.data.full_name) {
|
||||
slide.form.fields_dict.full_name.set_input(frappe.setup.data.full_name);
|
||||
slide.form.fields_dict.full_name.df.read_only = 1;
|
||||
slide.form.fields_dict.full_name.refresh();
|
||||
}
|
||||
if (frappe.setup.data.email) {
|
||||
slide.form.fields_dict.email.set_input(frappe.setup.data.email);
|
||||
slide.form.fields_dict.email.df.read_only = 1;
|
||||
}
|
||||
slide.form.fields_dict.email.df.reqd = 1;
|
||||
slide.form.fields_dict.email.refresh();
|
||||
}
|
||||
},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -186,6 +186,13 @@ def run_setup_success(args): # nosemgrep
|
|||
for hook in frappe.get_hooks("setup_wizard_success"):
|
||||
frappe.get_attr(hook)(args)
|
||||
install_fixtures.install()
|
||||
if not frappe.conf.developer_mode:
|
||||
login_as_first_user(args)
|
||||
|
||||
|
||||
def login_as_first_user(args):
|
||||
if args.get("email") and hasattr(frappe.local, "login_manager"):
|
||||
frappe.local.login_manager.login_as(args.get("email"))
|
||||
|
||||
|
||||
def get_stages_hooks(args): # nosemgrep
|
||||
|
|
|
|||
|
|
@ -393,7 +393,7 @@ def get_names_for_mentions(search_term: str):
|
|||
continue
|
||||
|
||||
mention_data["link"] = frappe.utils.get_url_to_form(
|
||||
"User Group" if mention_data.get("is_group") else "User Profile", mention_data["id"]
|
||||
"User Group" if mention_data.get("is_group") else "User", mention_data["id"]
|
||||
)
|
||||
|
||||
filtered_mentions.append(mention_data)
|
||||
|
|
|
|||
|
|
@ -492,7 +492,7 @@ class EmailAccount(Document):
|
|||
|
||||
@classmethod
|
||||
def create_dummy(cls):
|
||||
return cls.from_record({"sender": "notifications@example.com"})
|
||||
return cls.from_record({"name": "Notifications", "email_id": "notifications@example.com"})
|
||||
|
||||
@classmethod
|
||||
@cache_email_account("outgoing_email_account")
|
||||
|
|
|
|||
|
|
@ -188,16 +188,18 @@ class EmailQueue(Document):
|
|||
if ctx.smtp_server.session.has_extn("SIZE"):
|
||||
if max_size := ctx.smtp_server.session.esmtp_features.get("size"):
|
||||
max_size = int(max_size)
|
||||
msg_size = len(msg)
|
||||
|
||||
if msg_size > max_size:
|
||||
msg_size_mb = msg_size / (1024 * 1024)
|
||||
max_size_mb = max_size / (1024 * 1024)
|
||||
frappe.throw(
|
||||
_(
|
||||
"Email size {0:.2f} MB exceeds the maximum allowed size of {1:.2f} MB"
|
||||
).format(msg_size_mb, max_size_mb)
|
||||
)
|
||||
if max_size > 0:
|
||||
msg_size = len(msg)
|
||||
|
||||
if msg_size > max_size:
|
||||
msg_size_mb = msg_size / (1024 * 1024)
|
||||
max_size_mb = max_size / (1024 * 1024)
|
||||
frappe.throw(
|
||||
_(
|
||||
"Email size {0:.2f} MB exceeds the maximum allowed size of {1:.2f} MB"
|
||||
).format(msg_size_mb, max_size_mb)
|
||||
)
|
||||
|
||||
return msg
|
||||
|
||||
|
|
|
|||
|
|
@ -210,7 +210,18 @@ class EMail:
|
|||
|
||||
if has_inline_images:
|
||||
# process inline images
|
||||
message, _inline_images = replace_filename_with_cid(message)
|
||||
provided_images = {}
|
||||
if inline_images:
|
||||
for img in inline_images:
|
||||
if img.get("filename") and img.get("filecontent"):
|
||||
# index by full path and basename for flexible matching
|
||||
provided_images[img["filename"]] = img["filecontent"]
|
||||
basename = img["filename"].rsplit("/", 1)[-1]
|
||||
if basename not in provided_images:
|
||||
provided_images[basename] = img["filecontent"]
|
||||
|
||||
# process inline images while preferring provided_images over disk reads
|
||||
message, _inline_images = replace_filename_with_cid(message, provided_images)
|
||||
|
||||
# prepare parts
|
||||
msg_related = MIMEMultipart("related", policy=policy.SMTP)
|
||||
|
|
@ -571,11 +582,22 @@ def get_footer(email_account, footer=None):
|
|||
return footer
|
||||
|
||||
|
||||
def replace_filename_with_cid(message):
|
||||
def replace_filename_with_cid(message, provided_images=None):
|
||||
"""Replaces <img embed="assets/frappe/images/filename.jpg" ...> with
|
||||
<img src="cid:content_id" ...> and return the modified message and
|
||||
a list of inline_images with {filename, filecontent, content_id}
|
||||
|
||||
Args:
|
||||
message: The HTML message to process
|
||||
provided_images: A dictionary of images to use instead of reading from disk
|
||||
Example:
|
||||
{
|
||||
"assets/frappe/images/filename.jpg": filecontent,
|
||||
"filename.jpg": filecontent,
|
||||
}
|
||||
"""
|
||||
if provided_images is None:
|
||||
provided_images = {}
|
||||
|
||||
inline_images = []
|
||||
|
||||
|
|
@ -590,7 +612,11 @@ def replace_filename_with_cid(message):
|
|||
img_path_escaped = frappe.utils.html_utils.unescape_html(img_path)
|
||||
filename = img_path_escaped.rsplit("/")[-1]
|
||||
|
||||
filecontent = get_filecontent_from_path(img_path_escaped)
|
||||
# check if the image is provided in the provided_images(by checking full path and basename)
|
||||
filecontent = provided_images.get(img_path_escaped) or provided_images.get(filename)
|
||||
if not filecontent:
|
||||
filecontent = get_filecontent_from_path(img_path_escaped)
|
||||
|
||||
if not filecontent:
|
||||
message = re.sub(f"""embed=['"]{re.escape(img_path)}['"]""", "", message)
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -137,6 +137,43 @@ w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
|||
""".format(inline_images[0].get("content_id"))
|
||||
self.assertEqual(message, processed_message)
|
||||
|
||||
def test_sendmail_inline_images_parameter_respected(self):
|
||||
"""
|
||||
Test that inline_images parameter works through sendmail.
|
||||
Earlier this was ignored and the image was read from disk instead of using the provided content.
|
||||
The way to check this is essentially checking if the image is embedded with cid:
|
||||
<img src="cid:content_id" ...> -> Correct behavior
|
||||
If the image is not embedded with cid: -> Incorrect behavior
|
||||
"""
|
||||
|
||||
test_image_content = b"FAKE_PNG_BINARY_CONTENT_FOR_TESTING"
|
||||
|
||||
html_content = '<div><img embed="files/nonexistent_test_image.png" alt="Logo"></div>'
|
||||
|
||||
inline_images = [
|
||||
{
|
||||
"filename": "files/nonexistent_test_image.png",
|
||||
"filecontent": test_image_content,
|
||||
}
|
||||
]
|
||||
|
||||
# use QueueBuilder to send the email (sendmail uses this internally)
|
||||
from frappe.email.doctype.email_queue.email_queue import QueueBuilder
|
||||
|
||||
builder = QueueBuilder(
|
||||
recipients=["test@example.com"],
|
||||
sender="me@example.com",
|
||||
subject="Test Inline Images",
|
||||
message=html_content,
|
||||
inline_images=inline_images,
|
||||
)
|
||||
|
||||
mail = builder.prepare_email_content()
|
||||
email_string = mail.as_string()
|
||||
|
||||
self.assertIn("cid:", email_string)
|
||||
self.assertNotIn('embed="files/nonexistent_test_image.png"', email_string)
|
||||
|
||||
def test_inline_styling(self):
|
||||
html = """
|
||||
<h3>Hi John</h3>
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ no,Norsk,0
|
|||
pl,Polski,0
|
||||
ps,پښتو,0
|
||||
pt,Português,0
|
||||
pt-BR,Português Brasileiro,0
|
||||
pt-BR,Português Brasileiro,1
|
||||
ro,Română,0
|
||||
ru,Русский,0
|
||||
rw,Kinyarwanda,0
|
||||
|
|
|
|||
|
|
|
@ -165,7 +165,7 @@ def upload_file():
|
|||
file = files["file"]
|
||||
filename = file.filename
|
||||
|
||||
if frappe.form_dict.chunk_index:
|
||||
if frappe.form_dict.get("chunk_index") is not None:
|
||||
current_chunk = int(frappe.form_dict.chunk_index)
|
||||
total_chunks = int(frappe.form_dict.total_chunk_count)
|
||||
offset = int(frappe.form_dict.chunk_byte_offset)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ def get_base_url():
|
|||
|
||||
|
||||
def get_site_login_url():
|
||||
return f"{get_base_url()}/dashboard/site-login"
|
||||
return f"{get_base_url()}/dashboard/login"
|
||||
|
||||
|
||||
def get_site_name():
|
||||
|
|
|
|||
1851
frappe/locale/ar.po
1851
frappe/locale/ar.po
File diff suppressed because it is too large
Load diff
1861
frappe/locale/bs.po
1861
frappe/locale/bs.po
File diff suppressed because it is too large
Load diff
1839
frappe/locale/cs.po
1839
frappe/locale/cs.po
File diff suppressed because it is too large
Load diff
1839
frappe/locale/da.po
1839
frappe/locale/da.po
File diff suppressed because it is too large
Load diff
2396
frappe/locale/de.po
2396
frappe/locale/de.po
File diff suppressed because it is too large
Load diff
1853
frappe/locale/eo.po
1853
frappe/locale/eo.po
File diff suppressed because it is too large
Load diff
2045
frappe/locale/es.po
2045
frappe/locale/es.po
File diff suppressed because it is too large
Load diff
1845
frappe/locale/fa.po
1845
frappe/locale/fa.po
File diff suppressed because it is too large
Load diff
1839
frappe/locale/fr.po
1839
frappe/locale/fr.po
File diff suppressed because it is too large
Load diff
1957
frappe/locale/hr.po
1957
frappe/locale/hr.po
File diff suppressed because it is too large
Load diff
1851
frappe/locale/hu.po
1851
frappe/locale/hu.po
File diff suppressed because it is too large
Load diff
1839
frappe/locale/id.po
1839
frappe/locale/id.po
File diff suppressed because it is too large
Load diff
1843
frappe/locale/it.po
1843
frappe/locale/it.po
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
1839
frappe/locale/my.po
1839
frappe/locale/my.po
File diff suppressed because it is too large
Load diff
1849
frappe/locale/nb.po
1849
frappe/locale/nb.po
File diff suppressed because it is too large
Load diff
1843
frappe/locale/nl.po
1843
frappe/locale/nl.po
File diff suppressed because it is too large
Load diff
1839
frappe/locale/pl.po
1839
frappe/locale/pl.po
File diff suppressed because it is too large
Load diff
1839
frappe/locale/pt.po
1839
frappe/locale/pt.po
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
1851
frappe/locale/ru.po
1851
frappe/locale/ru.po
File diff suppressed because it is too large
Load diff
1839
frappe/locale/sl.po
1839
frappe/locale/sl.po
File diff suppressed because it is too large
Load diff
1849
frappe/locale/sr.po
1849
frappe/locale/sr.po
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
1875
frappe/locale/sv.po
1875
frappe/locale/sv.po
File diff suppressed because it is too large
Load diff
1845
frappe/locale/th.po
1845
frappe/locale/th.po
File diff suppressed because it is too large
Load diff
1845
frappe/locale/tr.po
1845
frappe/locale/tr.po
File diff suppressed because it is too large
Load diff
2199
frappe/locale/vi.po
2199
frappe/locale/vi.po
File diff suppressed because it is too large
Load diff
1849
frappe/locale/zh.po
1849
frappe/locale/zh.po
File diff suppressed because it is too large
Load diff
|
|
@ -888,7 +888,11 @@ from {tables}
|
|||
if value is None:
|
||||
values = f.value or ""
|
||||
if isinstance(values, str):
|
||||
values = values.split(",")
|
||||
try:
|
||||
parsed = json.loads(values)
|
||||
values = parsed if isinstance(parsed, list) else [parsed]
|
||||
except ValueError:
|
||||
values = values.split(",")
|
||||
|
||||
fallback = "''"
|
||||
value = [frappe.db.escape((cstr(v) or "").strip(), percent=False) for v in values]
|
||||
|
|
|
|||
|
|
@ -1949,7 +1949,7 @@ class Document(BaseDocument):
|
|||
_("Table {0} cannot be empty").format(label), raise_exception or frappe.EmptyTableError
|
||||
)
|
||||
|
||||
def round_floats_in(self, doc, fieldnames=None):
|
||||
def round_floats_in(self, doc, fieldnames=None, do_not_round_fields=None):
|
||||
"""Round floats for all `Currency`, `Float`, `Percent` fields for the given doc.
|
||||
|
||||
:param doc: Document whose numeric properties are to be rounded.
|
||||
|
|
@ -1963,6 +1963,9 @@ class Document(BaseDocument):
|
|||
# PERF: flt internally has to resolve this if we don't specify it.
|
||||
rounding_method = frappe.get_system_settings("rounding_method")
|
||||
for fieldname in fieldnames:
|
||||
if do_not_round_fields and fieldname in do_not_round_fields:
|
||||
continue
|
||||
|
||||
doc.set(
|
||||
fieldname,
|
||||
flt(
|
||||
|
|
|
|||
|
|
@ -672,7 +672,7 @@ class Meta(Document):
|
|||
|
||||
@cached_property
|
||||
def high_permlevel_fields(self):
|
||||
return [df for df in self.fields if df.permlevel > 0]
|
||||
return [df for df in self.fields if (df.permlevel or 0) > 0]
|
||||
|
||||
def get_permitted_fieldnames(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -57,6 +57,7 @@ class InvalidIncludePath(frappe.ValidationError):
|
|||
|
||||
def render_include(content):
|
||||
"""render {% raw %}{% include "app/path/filename" %}{% endraw %} in js file"""
|
||||
import os
|
||||
|
||||
content = cstr(content)
|
||||
|
||||
|
|
@ -69,7 +70,13 @@ def render_include(content):
|
|||
|
||||
for path in paths:
|
||||
app, app_path = path.split("/", 1)
|
||||
with open(frappe.get_app_path(app, app_path), encoding="utf-8") as f:
|
||||
|
||||
resolved_path = os.path.realpath(frappe.get_app_path(app, app_path))
|
||||
app_root = os.path.realpath(frappe.get_app_path(app))
|
||||
if not resolved_path.startswith(app_root + os.sep):
|
||||
frappe.throw(frappe._("Security Error: The Path provided is not safe."))
|
||||
|
||||
with open(resolved_path, encoding="utf-8") as f:
|
||||
include = f.read()
|
||||
if path.endswith(".html"):
|
||||
include = html_to_js_template(path, include)
|
||||
|
|
|
|||
|
|
@ -218,6 +218,10 @@ def apply_workflow(doc: Document | str | dict, action: str):
|
|||
elif doc.docstatus.is_submitted() and new_docstatus.is_submitted():
|
||||
doc.save()
|
||||
elif doc.docstatus.is_submitted() and new_docstatus.is_cancelled():
|
||||
if doc.meta.queue_in_background and not is_scheduler_inactive():
|
||||
queue_submission(doc, "Cancel")
|
||||
return
|
||||
|
||||
doc.cancel()
|
||||
else:
|
||||
frappe.throw(_("Illegal Document Status for {0}").format(next_state.state))
|
||||
|
|
|
|||
|
|
@ -72,7 +72,6 @@ frappe.ui.form.on("Print Format", {
|
|||
},
|
||||
print_format_for: function (frm) {
|
||||
if (frm.doc.print_format_for === "Report") {
|
||||
frm.set_value("standard", "No");
|
||||
frm.set_value("custom_format", 1);
|
||||
}
|
||||
},
|
||||
|
|
|
|||
|
|
@ -81,7 +81,6 @@
|
|||
"oldfieldname": "standard",
|
||||
"oldfieldtype": "Select",
|
||||
"options": "No\nYes",
|
||||
"read_only_depends_on": "eval:doc.print_format_for === \"Report\";",
|
||||
"reqd": 1,
|
||||
"search_index": 1
|
||||
},
|
||||
|
|
@ -294,7 +293,7 @@
|
|||
"icon": "fa fa-print",
|
||||
"idx": 1,
|
||||
"links": [],
|
||||
"modified": "2026-02-11 13:17:55.662780",
|
||||
"modified": "2026-03-26 16:27:02.559100",
|
||||
"modified_by": "Administrator",
|
||||
"module": "Printing",
|
||||
"name": "Print Format",
|
||||
|
|
|
|||
|
|
@ -63,7 +63,6 @@ class PrintFormat(Document):
|
|||
def before_save(self):
|
||||
if self.print_format_for == "Report":
|
||||
self.custom_format = 1
|
||||
self.standard = "No"
|
||||
|
||||
def get_html(self, docname, letterhead=None):
|
||||
return get_html(self.doc_type, docname, self.name, letterhead)
|
||||
|
|
|
|||
|
|
@ -14,19 +14,6 @@
|
|||
</head>
|
||||
<body>
|
||||
<div class="print-format-gutter">
|
||||
{% if print_settings.repeat_header_footer %}
|
||||
<div id="footer-html" class="visible-pdf">
|
||||
{% if print_settings.letter_head && print_settings.letter_head.footer %}
|
||||
<div class="letter-head-footer">
|
||||
{{ print_settings.letter_head.footer }}
|
||||
</div>
|
||||
{% endif %}
|
||||
<p class="text-center small page-number visible-pdf">
|
||||
{{ __("Page {0} of {1}", [`<span class="page"></span>`, `<span class="topage"></span>`]) }}
|
||||
</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="print-format {% if landscape %}landscape{% endif %}"
|
||||
{% if columns.length > 20 %}
|
||||
{% if can_use_smaller_font %}
|
||||
|
|
@ -40,6 +27,18 @@
|
|||
</div>
|
||||
{% endif %}
|
||||
{{ content }}
|
||||
{% if print_settings.repeat_header_footer %}
|
||||
<div id="footer-html" class="visible-pdf">
|
||||
{% if print_settings.letter_head && print_settings.letter_head.footer %}
|
||||
<div class="letter-head-footer">
|
||||
{{ print_settings.letter_head.footer }}
|
||||
</div>
|
||||
{% endif %}
|
||||
<p class="text-center small page-number visible-pdf">
|
||||
{{ __("Page {0} of {1}", [`<span class="page"></span>`, `<span class="topage"></span>`]) }}
|
||||
</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
|
|
|
|||
|
|
@ -61,11 +61,14 @@ let docfield_df = computed(() => {
|
|||
df.options = ["", "Email", "Name", "Phone", "URL", "Barcode", "IBAN"];
|
||||
}
|
||||
|
||||
if (store.form.selected_field.fieldtype === "Select") {
|
||||
df.description = __("Enter list of Options, each on a new line.");
|
||||
} else {
|
||||
df.description = "";
|
||||
}
|
||||
const FIELD_DESCRIPTIONS = {
|
||||
Select: __("Enter list of Options, each on a new line."),
|
||||
Currency: __(
|
||||
"Enter the fieldname of the currency field or a cached value (e.g. Company:company:default_currency)."
|
||||
),
|
||||
};
|
||||
const fieldtype = store.form.selected_field?.fieldtype;
|
||||
df.description = FIELD_DESCRIPTIONS[fieldtype] || "";
|
||||
}
|
||||
|
||||
// show link_filters docfield only when link field is selected
|
||||
|
|
|
|||
|
|
@ -30,7 +30,9 @@ frappe.Application = class Application {
|
|||
this.startup();
|
||||
}
|
||||
|
||||
startup() {
|
||||
async startup() {
|
||||
// Wait for translations to be loaded before rendering any UI
|
||||
if (frappe._translations_loaded) await frappe._translations_loaded;
|
||||
frappe.realtime.init();
|
||||
frappe.model.init();
|
||||
|
||||
|
|
@ -287,6 +289,7 @@ frappe.Application = class Application {
|
|||
} else {
|
||||
this.set_as_guest();
|
||||
}
|
||||
frappe.ui.toolbar.fetch_session_defaults();
|
||||
}
|
||||
|
||||
setup_workspaces() {
|
||||
|
|
|
|||
|
|
@ -571,7 +571,7 @@ function return_as_dataurl() {
|
|||
async function upload_file(file, i) {
|
||||
currently_uploading.value = i;
|
||||
|
||||
const CHUNK_SIZE = frappe.boot.file_chunk_size;
|
||||
const CHUNK_SIZE = frappe.boot.file_chunk_size || 25 * 1024 * 1024;
|
||||
|
||||
const use_chunks = file.file_obj && file.file_obj.size > CHUNK_SIZE;
|
||||
const total_chunks = use_chunks ? Math.ceil(file.file_obj.size / CHUNK_SIZE) : 1;
|
||||
|
|
@ -685,7 +685,6 @@ async function upload_file(file, i) {
|
|||
xhr.setRequestHeader("X-Frappe-CSRF-Token", frappe.csrf_token);
|
||||
|
||||
let form_data = new FormData();
|
||||
|
||||
if (chunk_blob) {
|
||||
form_data.append("file", chunk_blob, file.name);
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue