Merge remote-tracking branch 'upstream/develop' into l10n_develop

This commit is contained in:
barredterra 2024-12-12 00:50:46 +01:00
commit 8acd5568c4
106 changed files with 2359 additions and 757 deletions

View file

@ -57,4 +57,4 @@ de9ac897482013f5464a05f3c171da0072619c3a
e9bbe03354079cfcef65a77b0c33f57b047a7c93
# ruff update
84ef6ec677c8657c3243ac456a1ef794bfb34a5
84ef6ec677c8657c3243ac456a1ef794bfb34a50

View file

@ -103,7 +103,7 @@ jobs:
if pgrep honcho > /dev/null; then
echo "Stopping honcho process..."
pgrep honcho | xargs kill
sleep 3
sleep 5
fi
echo "Setting up environment..."

View file

@ -19,11 +19,12 @@ jobs:
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const ref = context.payload.pull_request ? context.payload.pull_request.head.sha : context.sha;
const { data: pyprojectContent } = await github.rest.repos.getContent({
owner: context.repo.owner,
repo: context.repo.repo,
path: 'pyproject.toml',
ref: context.payload.pull_request.head.sha
ref: ref
});
const content = Buffer.from(pyprojectContent.content, 'base64').toString();
const toml = require('toml');
@ -38,14 +39,22 @@ jobs:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const { mypyFiles } = ${{ steps.get-pyproject.outputs.result }};
const { data: changedFiles } = await github.rest.pulls.listFiles({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.payload.pull_request.number
});
const changedMypyFiles = changedFiles
.filter(file => mypyFiles.includes(file.filename))
.map(file => file.filename);
let changedMypyFiles = [];
if (context.payload.pull_request) {
const { data: changedFiles } = await github.rest.pulls.listFiles({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.payload.pull_request.number
});
changedMypyFiles = changedFiles
.filter(file => mypyFiles.includes(file.filename))
.map(file => file.filename);
} else {
// If not a pull request, assume all mypy files are changed
changedMypyFiles = mypyFiles;
}
return changedMypyFiles.length > 0;
- name: Set up Python

View file

@ -51,9 +51,11 @@ from frappe.query_builder.utils import (
from frappe.utils.caching import request_cache
from frappe.utils.data import cint, cstr, sbool
from .bench_interface import Bench
# Local application imports
from .exceptions import *
from .types.frappedict import _dict
from .types import Filters, FilterSignature, FilterTuple, _dict
from .utils.jinja import (
get_email_from_template,
get_jenv,
@ -81,6 +83,7 @@ if TYPE_CHECKING: # pragma: no cover
controllers: dict[str, "Document"] = {}
local = Local()
bench = Bench()
cache: Optional["RedisWrapper"] = None
STANDARD_USERS = ("Guest", "Administrator")
@ -245,6 +248,7 @@ def init(site: str, sites_path: str = ".", new_site: bool = False, force: bool =
local.test_objects = defaultdict(list)
local.site = site
local.site_name = site # implicitly scopes bench
local.sites_path = sites_path
local.site_path = os.path.join(sites_path, site)
local.all_apps = None
@ -1383,7 +1387,13 @@ def get_doc(*args: Any, **kwargs: Any) -> "Document":
return doc
def get_last_doc(doctype, filters=None, order_by="creation desc", *, for_update=False):
def get_last_doc(
doctype,
filters: FilterSignature | None = None,
order_by="creation desc",
*,
for_update=False,
):
"""Get last created document of this type."""
d = get_all(doctype, filters=filters, limit_page_length=1, order_by=order_by, pluck="name")
if d:
@ -2380,7 +2390,9 @@ loggers: dict[str, "Logger"] = {}
log_level: int | None = None
def logger(module=None, with_more_info=False, allow_site=True, filter=None, max_size=100_000, file_count=20):
def logger(
module=None, with_more_info=False, allow_site=True, filter=None, max_size=100_000, file_count=20
) -> "Logger":
"""Return a python logger that uses StreamHandler."""
from frappe.utils.logger import get_logger

480
frappe/bench_interface.py Normal file
View file

@ -0,0 +1,480 @@
"""
This module provides a consolidated API for interacting with the Frappe bench environment.
It includes classes and utilities to manage and access various components of the bench,
such as applications, sites, logs, and configuration.
### Key Classes
- `Bench`: The central class representing the Frappe bench environment.
- `Apps`: Manages applications within the bench.
- `Sites`: Manages sites within the bench, including site configuration and scoping to a single site.
- `Logs`: Handles log files for the bench.
- `Run`: Manages the run/config directory of the bench.
### Usage
This module is designed to be used as the **only** interface to the Frappe bench, allowing for
well-specified and central interaction with its components.
### Example
```python
from bench_interface import Bench
# Initialize the bench
bench = Bench()
# Access applications
apps = bench.apps
for app in apps:
print(app.name)
# Access sites
sites = bench.sites
for site in sites:
print(site.name)
# Scope to a specific site
scoped_site = bench.sites.scope("site_name")
print(scoped_site.name)
```
### Notes
- This module uses environment variables and default paths to locate bench and its compontents.
- It ensures thread-safety and security by storing site names in thread-local storage.
- Mangled attributes ensure that no accidential api surface is construed.
"""
import json
import os
from collections.abc import Iterator
from pathlib import Path
from typing import Any, Final, cast
from typing_extensions import override
from frappe.config import ConfigHandler, ConfigType
__all__ = [
"Bench",
"BenchNotScopedError",
"BenchSiteNotLoadedError",
]
# used to implement legacy code paths to keep the main path clean
_current_bench: "Bench"
class BenchNotScopedError(NotImplementedError):
pass
class BenchSiteNotLoadedError(ValueError):
pass
class _Serializable:
def __json__(self) -> dict[str, Any]: # type: ignore[no-any-explicit]
return {k: v for k, v in self.__dict__.items()} # type: ignore[no-any-expr]
class _PathResolvable:
path: Path
__path_name: str
__env_variable: str
def __init__(self, path: str | Path | None = None, parent_path: Path | None = None) -> None:
if path is None:
path = os.environ.get(cast(str, getattr(self, f"_{self.__class__.__name__}__env_variable")))
if path is None and parent_path:
path = parent_path.joinpath(
cast(str, getattr(self, f"_{self.__class__.__name__}__path_name"))
)
if path is None:
raise ValueError(f"Unable to determine path for {self.__class__.__name__}")
self.path = Path(path).resolve()
# os.PathLike
def __fspath__(self) -> str:
return str(self.path)
class _FrappeComponent:
name: str
path: Path
python_path: Path
app: "App"
def __bool__(self) -> bool:
return (
self._is_frappe_component()
or (isinstance(self, App) and self._is_app_installed())
or (isinstance(self, Module) and self._is_module_registered())
)
def _is_frappe_component(self) -> bool:
return self.python_path.is_dir() and self.python_path.joinpath(".frappe").exists()
def _is_app_installed(self) -> bool:
global _current_bench
res = self.name in _current_bench.sites.path.joinpath("apps.txt").read_text()
if res:
from frappe.deprecation_dumpster import deprecation_warning
deprecation_warning(
"2024-10-18",
"yet unknown",
f"Instead of adding {self.name} to sites/apps.txt, drop an empty file at {self.python_path}/.frappe",
)
return res
return False
def _is_module_registered(self) -> bool:
res = self.name.title() in self.app.python_path.joinpath("modules.txt").read_text()
if res:
from frappe.deprecation_dumpster import deprecation_warning
deprecation_warning(
"2024-10-18",
"yet unknown",
f"Instead of adding {self.name.title()} to {self.app.python_path}/modules.txt, drop an empty file at {self.python_path}/.frappe",
)
return res
return False
# os.PathLike
def __fspath__(self) -> str:
return str(self.path)
class Module(_FrappeComponent, _Serializable):
def __init__(self, *, name: str, path: Path, app: "App") -> None:
self.name = name
self.path = path
self.python_path = self.path
self.app = app
@override
def __repr__(self) -> str:
return (
super().__repr__()
+ ("\n * Path: " + str(self.path))
+ ("\n * Python Path: " + str(self.python_path))
)
@override
def __json__(self) -> dict[str, Any]: # type: ignore[no-any-explicit,no-any-decorated]
excluded = (
"app", # prevent circular deps
)
return {k: v for k, v in self.__dict__.items() if k not in excluded} # type: ignore[no-any-expr]
class App(_FrappeComponent, _Serializable):
__modules: dict[str, Module]
def __init__(self, *, name: str, path: Path):
self.name = name
self.path = path
self.python_path = self.path.joinpath(self.name)
def __iter__(self) -> Iterator[Module]:
return iter(self.modules.values())
def __len__(self) -> int:
return len(self.modules)
def __getitem__(self, key: str) -> Module:
return self.modules[key]
@override
def __repr__(self) -> str:
return (
super().__repr__()
+ ("\n * Path: " + str(self.path))
+ ("\n * Python Path: " + str(self.python_path))
+ ("\n * Modules:\n\t" + ("\n\t".join([str(module) for module in self.modules]) or "n/a"))
)
@override
def __str__(self) -> str:
return self.name
@override
def __json__(self) -> dict[str, Module]:
return self.modules
@property
def modules(self) -> dict[str, Module]:
if not hasattr(self, "__modules"):
self.__modules = {
d.name: module
for d in self.python_path.iterdir()
if d.is_dir() and (module := Module(name=d.name, path=d, app=self))
}
return self.__modules
class Apps(_PathResolvable, _Serializable):
__apps: dict[str, App]
__path_name = "apps"
__env_variable = "FRAPPE_APPS_PATH"
def __init__(self, path: str | Path | None = None, parent_path: Path | None = None) -> None:
super().__init__(path, parent_path)
def __iter__(self) -> Iterator[App]:
return iter(self.apps.values())
def __len__(self) -> int:
return len(self.apps)
def __getitem__(self, key: str) -> App:
return self.apps[key]
@override
def __repr__(self) -> str:
return (
super().__repr__()
+ ("\n * Apps Path: " + str(self.path))
+ ("\n * Loaded Apps:\n\t" + ("\n\t".join([str(app) for app in self.apps]) or "n/a"))
)
@override
def __json__(self) -> dict[str, App]:
return self.apps
@property
def apps(self) -> dict[str, App]:
if not hasattr(self, "__apps"):
self.__apps = {
d.name: app for d in self.path.iterdir() if d.is_dir() and (app := App(name=d.name, path=d))
}
return self.__apps
class Logs(_PathResolvable, _Serializable):
__path_name = "logs"
__env_variable = "FRAPPE_LOGS_PATH"
def __init__(self, *, path: str | Path | None = None, parent_path: Path | None = None):
super().__init__(path, parent_path)
def get_log_file(self, log_type: str) -> Path:
return self.path.joinpath(f"{log_type}.log")
class Run(_PathResolvable, _Serializable):
# config is the legacy naming of this folder; a misnomer
__path_name = "config"
__env_variable = "FRAPPE_RUN_PATH"
class Site(ConfigHandler, _Serializable):
name: str
path: Path
bench: "Bench"
_combined_config: ConfigType
def __init__(self, *, bench: "Bench", name: str, path: str | Path):
self.name = name
self.path = Path(path)
self.bench: Bench = bench
ConfigHandler.__init__(self, self.path.joinpath("site_config.json"))
@property
@override
def config(self) -> ConfigType:
if not hasattr(self, "_combined_config") or self._config_stale or self.bench.sites._config_stale:
site_config = super().config.copy()
config = self.bench.sites.config.copy()
config.update(site_config)
self._combined_config = ConfigType(**config)
return self._combined_config
# os.PathLike
def __fspath__(self) -> str:
return str(self.path)
@override
def __repr__(self) -> str:
return super().__repr__() + ("\n * Site Path: " + str(self.path)) + ("\n * Site Name: " + self.name)
@override
def __eq__(self, o: Any) -> bool: # type: ignore[no-any-explicit,no-any-decorated]
return self.name == str(o) # type: ignore[no-any-expr]
@override
def __hash__(self) -> int:
return hash(self.name)
@override
def __str__(self) -> str:
return self.name
@override
def __json__(self) -> dict[str, Any]: # type: ignore[no-any-explicit,no-any-decorated]
excluded = (
"bench", # prevent circular deps
"_ConfigHandler__config", # holds file contents
"_config_stale", # never stale after accessored
"_config",
)
naming = {"_combined_config": "config"}
self.config # ensure config is loaded
return {naming.get(k, k): v for k, v in self.__dict__.items() if k not in excluded} # type: ignore[no-any-expr]
class Sites(_PathResolvable, ConfigHandler, _Serializable):
SCOPE_ALL_SITES: Final = "__scope-all-sites__"
__sites: dict[str, Site]
__path_name = "sites"
__env_variable = "FRAPPE_SITES_PATH"
if legacy_env_path := os.environ.get("SITES_PATH"):
os.environ["FRAPPE_SITES_PATH"] = legacy_env_path
def __init__(self, *, bench: "Bench", path: str | Path | None = None, parent_path: Path | None = None):
_PathResolvable.__init__(self, path, parent_path)
ConfigHandler.__init__(self, self.path.joinpath("common_site_config.json"))
self.bench = bench
# security & thread-safety: site_name is stored in thread-local storage
self.site_name = os.environ.get("FRAPPE_SITE") or cast(str | None, self.config.get("default_site"))
@property
def site_name(self) -> str | None:
import frappe
return cast(str | None, getattr(frappe.local, "site_name", None))
@site_name.setter
def site_name(self, value):
if self.site_name is not None and value == self.SCOPE_ALL_SITES:
raise BenchNotScopedError("Cannot scope back to SCOPE_ALL_SITES when already scoped")
import frappe
frappe.local.site_name = value
if hasattr(self, "_Sites__sites"):
del self.__sites
def add_site(self, site_name: str) -> None:
site_path = self.path.joinpath(site_name)
for dir_path in [
site_path.joinpath("public", "files"),
site_path.joinpath("private", "backups"),
site_path.joinpath("private", "files"),
site_path.joinpath("locks"),
site_path.joinpath("logs"),
]:
dir_path.mkdir(parents=True, exist_ok=True)
self.__sites[site_name] = Site(bench=self.bench, name=site_name, path=site_path)
def remove_site(self, site_name: str) -> None:
if site_name in self.__sites:
del self.__sites[site_name]
# site_path = self.path.joinpath(site_name)
# Note: This doesn't actually delete the site directory, just removes it from the sites dict
# Actual deletion should be handled separately with proper safeguards
@property
def scoped(self) -> bool:
return bool(self.site_name) and self.site_name != self.SCOPE_ALL_SITES
@property
def site(self) -> Site:
# security & thread-safety: site_name is stored in thread-local storage
if not self.site_name or self.site_name == self.SCOPE_ALL_SITES:
raise BenchNotScopedError("Sites was not scoped to a single site, yet.")
return self[self.site_name]
@property
def _sites(self) -> dict[str, Site]:
if not hasattr(self, "_Sites__sites"):
self.__sites = {}
# security & thread-safety: site_name is stored in thread-local storage
if self.site_name == self.SCOPE_ALL_SITES:
for path in self.path.iterdir():
if path.is_dir() and path.joinpath("site_config.json").exists():
self.__sites[path.name] = Site(bench=self.bench, name=path.name, path=path)
elif self.site_name:
# init scoped site even if it doesn't exist for use during site creation
# all file access must reamin lazy and only happen after completed setup
self.__sites[self.site_name] = Site(
bench=self.bench, name=self.site_name, path=(self.path / self.site_name)
)
return self.__sites
def __iter__(self) -> Iterator[Site]:
# security & thread-safety: site_name is stored in thread-local storage
if self.site_name == self.SCOPE_ALL_SITES:
return iter(self._sites.values())
elif self.site_name:
return iter([self[self.site_name]])
raise BenchNotScopedError("Sites was not scoped, yet.")
def __len__(self) -> int:
return len(self._sites)
def __getitem__(self, key: str) -> Site:
try:
return self._sites[key]
except KeyError:
raise BenchSiteNotLoadedError(f"Site '{key}' was not found")
@override
def __repr__(self) -> str:
return (
super().__repr__()
+ ("\n * Sites Path: " + str(self.path))
+ ("\n * Scoped Site: " + (self.site_name or "n/a"))
+ ("\n * Loaded Sites:\n\t" + ("\n\t".join([str(site) for site in self]) or "n/a"))
)
@override
def __json__(self) -> dict[str, Any]: # type: ignore[no-any-explicit,no-any-decorated]
excluded = (
"bench", # prevent circular deps
"_ConfigHandler__config", # holds file contents
"_config_stale", # never stale after accessored
)
naming = {"_config": "config", "_Sites__sites": "_sites"}
self._sites # ensure sites are loaded
return {naming.get(k, k): v for k, v in self.__dict__.items() if k not in excluded} # type: ignore[no-any-expr]
class Bench(_PathResolvable, _Serializable):
__env_variable = "FRAPPE_BENCH_PATH"
if legacy_env_path := os.environ.get("FRAPPE_BENCH_ROOT"):
os.environ["FRAPPE_BENCH_PATH"] = legacy_env_path
if not os.environ.get("FRAPPE_BENCH_PATH") and (legacy_env_path_sites := os.environ.get("SITES_PATH")):
os.environ["FRAPPE_BENCH_PATH"] = str(Path(legacy_env_path_sites).resolve().parent)
if not os.environ.get("FRAPPE_BENCH_PATH"):
# bench folder in standard layout
os.environ["FRAPPE_BENCH_PATH"] = str(Path(__file__).resolve().parents[3])
def __init__(self, path: str | Path | None = None):
super().__init__(path)
self.logs = Logs(parent_path=self.path)
self.run = Run(parent_path=self.path)
self.sites = Sites(bench=self, parent_path=self.path)
global _current_bench
_current_bench = self
self.apps = Apps(parent_path=self.path)
@property
def site(self) -> Site:
return self.sites.site
@property
def scoped(self) -> bool:
return self.sites.scoped
@override
def __repr__(self) -> str:
return (
super().__repr__()
+ ("\n * Bench Path: " + str(self.path))
+ ("\n\n" + str(self.apps))
+ ("\n\n" + str(self.sites))
)

246
frappe/config.py Normal file
View file

@ -0,0 +1,246 @@
"""
This module provides a comprehensive configuration management system for the Frappe framework.
It includes classes and functions to register, load, store, and update configuration options
from various sources such as files, environment variables.
### Key Classes
- `ConfigType`: A dictionary subclass that provides attribute-style access to configuration options
and warns when accessing unregistered configuration options.
- `ConfigRegistry`: A registry for configuration options with their documentation and default values.
- `ConfigHandler`: Handles loading, storing, and updating configuration values from files and environment.
Supports hot reloading of configuration upon changes.
### Configuration Registry
The `ConfigRegistry` class allows registering configuration options with their documentation and default values.
This registry is used to manage and document all available configuration options.
### Configuration Handling
The `ConfigHandler` class is responsible for loading configuration from files, updating it from environment variables,
and applying additional configuration from external modules. It also supports hot reloading of the configuration.
### Example Usage
```python
from config_manager import register
# Register a new configuration option
register("new_option", "Documentation for the new option", "default_value")
```
### Notes
- Configuration options can be registered with default values that can be either static or dynamic (callable).
- Environment variables are used to override configuration values.
- Additional configuration can be applied from external modules using the `extra_config` option.
- The module uses file locking to ensure safe updates to the configuration file.
### Global Configuration
The module includes global default configurations for common Frappe settings such as Redis URLs, database connections,
and more. These can be overridden using environment variables or by updating the configuration file.
"""
import importlib
import json
import os
import pprint
import re
import traceback
import warnings
from collections.abc import Callable
from pathlib import Path
from typing import Any, TypeAlias, TypedDict, cast
from filelock import FileLock, Timeout
from typing_extensions import NotRequired, override
class FrappeUnregisteredConfigOptionWarning(Warning):
pass
ConfigValue: TypeAlias = str | bool | int | float | list["ConfigValue"] | dict[str, "ConfigValue"]
ConfigCallable: TypeAlias = Callable[["ConfigType"], ConfigValue | None]
class ConfigType(dict[str, ConfigValue]):
"""A dictionary subclass that provides attribute-style access to configuration options.
Warns when accessing unregistered configuration options.
"""
@override
def __repr__(self) -> str:
return pprint.pformat(dict(self), indent=2, width=80, sort_dicts=False)
def __getattr__(self, name: str) -> ConfigValue | None:
if name not in registry.options:
# filter out noise in ipython console
if not name.startswith("_ipython") and name != "_repr_mimebundle_":
warnings.warn(
f"Accessing unregistered configuration option: {name}",
FrappeUnregisteredConfigOptionWarning,
stacklevel=2,
)
if name not in self and (option := registry.options.get(name)) and (default := option["default"]):
if callable(default):
return default(self)
return default
elif name not in self:
return None
return self[name]
class ConfigRegistryOption(TypedDict):
docstring: str
default: NotRequired[ConfigCallable | ConfigValue | None]
class ConfigRegistry:
"""Registry for configuration options with their documentation and default values."""
def __init__(self):
self.options: dict[str, ConfigRegistryOption] = {}
def register(self, option: str, docstring: str, default: ConfigCallable | ConfigValue | None):
self.options[option] = {"docstring": docstring, "default": default}
@override
def __repr__(self) -> str:
if not self.options:
return "ConfigRegistry(No options registered)"
# Find the maximum lengths for formatting
max_option_length = max(len(option) for option in self.options)
max_default_length = max(len(self._format_default(opt["default"])) for opt in self.options.values())
# Create the header
header = f"{'Option':<{max_option_length}} | {'Default':<{max_default_length}} | Description"
separator = f"{'-' * max_option_length}-+-{'-' * max_default_length}-+{'-' * 20}"
# Create the table rows
rows = []
for option, details in self.options.items():
default = self._format_default(details["default"])
docstring = details["docstring"].replace("\n", " ") # Remove any newlines in docstring
row = f"{option:<{max_option_length}} | {default:<{max_default_length}} | {docstring}"
rows.append(row)
# Combine all parts
table = "\n".join([header, separator, *rows])
return f"ConfigRegistry:\n{table}"
def _format_default(self, default: ConfigCallable | ConfigValue | None) -> str:
if callable(default):
return "<dynamic>"
return str(default)
registry = ConfigRegistry()
def register(option: str, docstring: str, default: ConfigCallable | ConfigValue | None):
"""Register a new configuration option with documentation and default value.
Args:
option: Name of the configuration option
docstring: Documentation describing the option
default: Default value for the option
"""
registry.register(option, docstring, default)
# Global default config
register("redis_queue", "Redis URL for queue management", "redis://127.0.0.1:11311")
register("redis_cache", "Redis URL for caching", "redis://127.0.0.1:13311")
register("db_type", "Database type (mariadb or postgres)", "mariadb")
register("db_host", "Database host address", "127.0.0.1")
register("db_port", "Database port number", lambda c: 5432 if c.db_type == "postgres" else 3306)
register("db_user", "Database user name", lambda c: c.db_name)
register("db_name", "Database name", lambda c: c.db_user)
register("db_socket", "Unix socket file path for database connection (optional)", None)
class ConfigHandler:
"""Handles loading, storing and updating configuration values from files and environment.
Supports hot reloading of configuration upon tainting.
"""
__config: dict[str, ConfigValue | None]
_config: ConfigType
_config_stale: bool
def __init__(self, config_path: str | Path):
self.config_path = Path(config_path)
self._config_stale = True
def taint(self):
"Mark configuration as stale to trigger reload"
self._config_stale = True
@property
def config(self) -> ConfigType:
"Get current configuration, reloading if stale"
if not hasattr(self, "_config") or self._config_stale:
if self.config_path.exists():
self.__config = json.loads(self.config_path.read_bytes())
self._config = ConfigType(**self.__config)
self._update_from_env()
self._apply_extra_config()
# TODO: enable in-memory caching only once we have identified a mechanism to hot-reload on external config changes
# self._config_stale = False
return self._config
def update_config(self, updates: dict[str, ConfigValue | None]):
"""Update configuration with new values and save to config file.
Args:
updates: Dictionary of configuration updates to apply
Raises:
Timeout: If unable to acquire file lock for saving
"""
self.__config.update(updates)
try:
with FileLock(f"{self.config_path}.lock", timeout=5):
from frappe.utils.response import json_handler
self.config_path.write_text(
json.dumps(
self.__config,
indent=2,
default=json_handler, # type: ignore[no-any-expr]
sort_keys=True,
)
)
except Timeout as e:
from frappe.utils.error import log_error
log_error(f"Filelock: Failed to aquire {self.config_path}.lock") # type: ignore[no-untyped-call]
raise e
self._config_stale = True
def _update_from_env(self):
"Update config values from environment variables"
assert isinstance(self._config, ConfigType) # will never be None by now
for key in self._config.keys():
# Convert camelCase or kebab-case to SNAKE_CASE
env_key = re.sub(r"([a-z0-9])([A-Z])", r"\1_\2", key)
env_key = env_key.replace("-", "_")
env_key = f"FRAPPE_{env_key.upper()}"
if env_value := os.environ.get(env_key):
self._config[key] = env_value
def _apply_extra_config(self):
"Apply additional configuration from external modules"
# TODO: maybe motion to deprecate https://github.com/frappe/frappe/pull/24706#issuecomment-2471209484
assert isinstance(self._config, ConfigType) # will never be None by now
if extra_config := cast(str | list[str], self._config.get("extra_config")):
if isinstance(extra_config, str):
extra_config = [extra_config]
for hook in extra_config:
try:
module, method = hook.rsplit(".", 1)
self._config.update(getattr(importlib.import_module(module), method)()) # type: ignore[no-any-expr]
except Exception:
print(f"Config hook {hook} failed")
traceback.print_exc()

View file

@ -0,0 +1,39 @@
[
{
"doctype": "Contact",
"salutation": "Mr",
"first_name": "_Test Contact For _Test Customer",
"is_primary_contact": 1,
"status": "Open",
"email_ids": [
{
"email_id": "test_contact@example.com",
"is_primary": 1
}
],
"phone_nos": [
{
"phone": "+91 0000000000",
"is_primary_phone": 1
}
]
},
{
"doctype": "Contact",
"first_name": "_Test Contact For _Test Supplier",
"is_primary_contact": 1,
"status": "Open",
"email_ids": [
{
"email_id": "test_contact@example.com",
"is_primary": 1
}
],
"phone_nos": [
{
"phone": "+91 0000000001",
"is_primary_phone": 1
}
]
}
]

View file

@ -1,27 +0,0 @@
[[Contact]]
salutation = "Mr"
first_name = "_Test Contact For _Test Customer"
is_primary_contact = 1
status = "Open"
[[Contact.email_ids]]
email_id = "test_contact@example.com"
is_primary = 1
[[Contact.phone_nos]]
phone = "+91 0000000000"
is_primary_phone = 1
[[Contact]]
first_name = "_Test Contact For _Test Supplier"
is_primary_contact = 1
status = "Open"
[[Contact.email_ids]]
email_id = "test_contact@example.com"
is_primary = 1
[[Contact.phone_nos]]
phone = "+91 0000000001"
is_primary_phone = 1

View file

@ -0,0 +1,8 @@
[
{
"salutation": "Mr"
},
{
"salutation": "Mrs"
}
]

View file

@ -1,6 +0,0 @@
[[Salutation]]
salutation = "Mr"
[[Salutation]]
salutation = "Mrs"

View file

@ -0,0 +1,10 @@
[
{
"doctype": "Communication",
"name": "_Test Communication 1",
"subject": "Test Subject",
"sent_or_received": "Received",
"parenttype": "User",
"parent": "Administrator"
}
]

View file

@ -1,7 +0,0 @@
[[Communication]]
name = "_Test Communication 1"
subject = "Test Subject"
sent_or_received = "Received"
parenttype = "User"
parent = "Administrator"

View file

@ -110,11 +110,21 @@ class Exporter:
return fields or []
def get_data_to_export(self):
frappe.permissions.can_export(self.doctype, raise_exception=True)
table_fields = [f for f in self.exportable_fields if f != self.doctype]
data = self.get_data_as_docs()
if not frappe.permissions.can_export(self.doctype):
if frappe.permissions.can_export(self.doctype, is_owner=True):
for doc in data:
if doc.get("owner") != frappe.session.user:
raise frappe.PermissionError(
_("You are not allowed to export {} doctype").format(self.doctype)
)
else:
raise frappe.PermissionError(
_("You are not allowed to export {} doctype").format(self.doctype)
)
for doc in data:
rows = []
rows = self.add_data_row(self.doctype, None, doc, rows, 0)
@ -163,7 +173,7 @@ class Exporter:
parent_data = frappe.db.get_list(
self.doctype,
filters=filters,
fields=["name", *parent_fields],
fields=["name", "owner", *parent_fields],
limit_page_length=self.export_page_length,
order_by=order_by,
as_list=0,

View file

@ -0,0 +1 @@
[]

View file

@ -1085,6 +1085,12 @@ def validate_series(dt, autoname=None, name=None):
df.unique = 1
break
if autoname and autoname.startswith("format:"):
from frappe.model.naming import BRACED_PARAMS_HASH_PATTERN
if len(BRACED_PARAMS_HASH_PATTERN.findall(autoname)) > 1:
frappe.throw(_("Only one set of {#} pattern is allowed in the format string"))
if (
autoname
and (not autoname.startswith("field:"))

View file

@ -42,8 +42,7 @@
"in_standard_filter": 1,
"label": "Reference DocType",
"options": "DocType",
"read_only": 1,
"search_index": 1
"read_only": 1
},
{
"fieldname": "reference_name",
@ -51,7 +50,8 @@
"in_list_view": 1,
"in_standard_filter": 1,
"label": "Reference Name",
"read_only": 1
"read_only": 1,
"search_index": 1
},
{
"fieldname": "column_break_3",
@ -72,7 +72,7 @@
"idx": 1,
"in_create": 1,
"links": [],
"modified": "2024-06-05 05:34:35.048489",
"modified": "2024-12-09 14:22:44.819718",
"modified_by": "Administrator",
"module": "Core",
"name": "Error Log",
@ -94,4 +94,4 @@
"sort_order": "DESC",
"states": [],
"title_field": "method"
}
}

View file

@ -0,0 +1 @@
[]

View file

@ -0,0 +1,10 @@
[
{
"doctype": "Report",
"name": "_Test Report 1",
"report_name": "_Test Report 1",
"report_type": "Query Report",
"is_standard": "No",
"ref_doctype": "Event"
}
]

View file

@ -1,7 +0,0 @@
[[Report]]
name = "_Test Report 1"
report_name = "_Test Report 1"
report_type = "Query Report"
is_standard = "No"
ref_doctype = "Event"

View file

@ -0,0 +1,22 @@
[
{
"doctype": "Role",
"role_name": "_Test Role",
"desk_access": 1
},
{
"doctype": "Role",
"role_name": "_Test Role 2",
"desk_access": 1
},
{
"doctype": "Role",
"role_name": "_Test Role 3",
"desk_access": 1
},
{
"doctype": "Role",
"role_name": "_Test Role 4",
"desk_access": 0
}
]

View file

@ -1,16 +0,0 @@
[[Role]]
role_name = "_Test Role"
desk_access = 1
[[Role]]
role_name = "_Test Role 2"
desk_access = 1
[[Role]]
role_name = "_Test Role 3"
desk_access = 1
[[Role]]
role_name = "_Test Role 4"
desk_access = 0

View file

@ -4,7 +4,6 @@
import datetime
from contextlib import suppress
import pytz
from rq import Worker
import frappe
@ -104,6 +103,7 @@ def serialize_worker(worker: Worker) -> frappe._dict:
def compute_utilization(worker: Worker) -> float:
with suppress(Exception):
total_time = (
datetime.datetime.now(pytz.UTC) - worker.birth_date.replace(tzinfo=pytz.UTC)
datetime.datetime.now(datetime.timezone.utc)
- worker.birth_date.replace(tzinfo=datetime.timezone.utc)
).total_seconds()
return worker.total_working_time / total_time * 100

View file

@ -0,0 +1,95 @@
[
{
"doctype": "User",
"email": "test@example.com",
"enabled": 1,
"first_name": "_Test",
"new_password": "Eastern_43A1W",
"roles": [
{
"doctype": "Has Role",
"parentfield": "roles",
"role": "_Test Role"
},
{
"doctype": "Has Role",
"parentfield": "roles",
"role": "System Manager"
}
]
},
{
"doctype": "User",
"email": "test1@example.com",
"first_name": "_Test1",
"new_password": "Eastern_43A1W"
},
{
"doctype": "User",
"email": "test2@example.com",
"first_name": "_Test2",
"new_password": "Eastern_43A1W",
"enabled": 1
},
{
"doctype": "User",
"email": "test3@example.com",
"first_name": "_Test3",
"new_password": "Eastern_43A1W",
"enabled": 1
},
{
"doctype": "User",
"email": "test4@example.com",
"first_name": "_Test4",
"new_password": "Eastern_43A1W",
"enabled": 1
},
{
"doctype": "User",
"email": "test'5@example.com",
"first_name": "_Test'5",
"new_password": "Eastern_43A1W",
"enabled": 1
},
{
"doctype": "User",
"email": "testperm@example.com",
"first_name": "_Test Perm",
"new_password": "Eastern_43A1W",
"enabled": 1
},
{
"doctype": "User",
"email": "testdelete@example.com",
"enabled": 1,
"first_name": "_Test",
"new_password": "Eastern_43A1W",
"roles": [
{
"doctype": "Has Role",
"parentfield": "roles",
"role": "_Test Role 2"
},
{
"doctype": "Has Role",
"parentfield": "roles",
"role": "System Manager"
}
]
},
{
"doctype": "User",
"email": "testpassword@example.com",
"enabled": 1,
"first_name": "_Test",
"new_password": "Eastern_43A1W",
"roles": [
{
"doctype": "Has Role",
"parentfield": "roles",
"role": "System Manager"
}
]
}
]

View file

@ -1,78 +0,0 @@
[[User]]
email = "test@example.com"
enabled = 1
first_name = "_Test"
new_password = "Eastern_43A1W"
[[User.roles]]
doctype = "Has Role"
parentfield = "roles"
role = "_Test Role"
[[User.roles]]
doctype = "Has Role"
parentfield = "roles"
role = "System Manager"
[[User]]
email = "test1@example.com"
first_name = "_Test1"
new_password = "Eastern_43A1W"
[[User]]
email = "test2@example.com"
first_name = "_Test2"
new_password = "Eastern_43A1W"
enabled = 1
[[User]]
email = "test3@example.com"
first_name = "_Test3"
new_password = "Eastern_43A1W"
enabled = 1
[[User]]
email = "test4@example.com"
first_name = "_Test4"
new_password = "Eastern_43A1W"
enabled = 1
[[User]]
email = "test'5@example.com"
first_name = "_Test'5"
new_password = "Eastern_43A1W"
enabled = 1
[[User]]
email = "testperm@example.com"
first_name = "_Test Perm"
new_password = "Eastern_43A1W"
enabled = 1
[[User]]
email = "testdelete@example.com"
enabled = 1
first_name = "_Test"
new_password = "Eastern_43A1W"
[[User.roles]]
doctype = "Has Role"
parentfield = "roles"
role = "_Test Role 2"
[[User.roles]]
doctype = "Has Role"
parentfield = "roles"
role = "System Manager"
[[User]]
email = "testpassword@example.com"
enabled = 1
first_name = "_Test"
new_password = "Eastern_43A1W"
[[User.roles]]
doctype = "Has Role"
parentfield = "roles"
role = "System Manager"

View file

@ -808,9 +808,9 @@ class User(Document):
@frappe.whitelist()
def get_timezones():
import pytz
import zoneinfo
return {"timezones": pytz.all_timezones}
return {"timezones": zoneinfo.available_timezones()}
@frappe.whitelist()

View file

@ -0,0 +1 @@
[]

View file

@ -0,0 +1 @@
[]

View file

@ -0,0 +1,10 @@
[
{
"doc_type": "User",
"doctype_or_field": "DocField",
"field_name": "location",
"property": "in_list_view",
"property_type": "Check",
"value": "1"
}
]

View file

@ -1,8 +0,0 @@
[["Property Setter"]]
doc_type = "User"
doctype_or_field = "DocField"
field_name = "location"
property = "in_list_view"
property_type = "Check"
value = "1"

View file

@ -31,6 +31,7 @@ from frappe.database.utils import (
)
from frappe.exceptions import DoesNotExistError, ImplicitCommitError
from frappe.monitor import get_trace_id
from frappe.query_builder import Case
from frappe.query_builder.functions import Count
from frappe.utils import CallbackManager, cint, get_datetime, get_table_name, getdate, now, sbool
from frappe.utils import cast as cast_fieldtype
@ -956,6 +957,139 @@ class Database:
if dt in self.value_cache:
del self.value_cache[dt]
def bulk_update(
self,
doctype: str,
doc_updates: dict,
*,
chunk_size: int = 100,
modified: str | None = None,
modified_by: str | None = None,
update_modified: bool = True,
debug: bool = False,
):
"""
:param doctype: DocType to update
:param doc_updates: Dictionary of key (docname) and values to update
:param chunk_size: Number of documents to update in a single transaction
:param modified: Use this as the `modified` timestamp.
:param modified_by: Set this user as `modified_by`.
:param update_modified: default True. Update `modified` and `modified_by` fields
:param debug: Print the query in the developer / js console.
doc_updates should be in the following format:
```py
{
"docname1": {
"field1": "value1",
"field2": "value2",
...
},
"docname2": {
"field1": "value1",
"field2": "value2",
...
},
}
```
Note:
- Bigger chunk sizes could be less performant. Use appropriate chunk size based on the number of fields to update.
"""
if not doc_updates:
return
modified_dict = None
if update_modified:
modified_dict = self._get_update_dict(
{}, None, modified=modified, modified_by=modified_by, update_modified=update_modified
)
total_docs = len(doc_updates)
iterator = iter(doc_updates.items())
for __ in range(0, total_docs, chunk_size):
doc_chunk = dict(itertools.islice(iterator, chunk_size))
self._build_and_run_bulk_update_query(doctype, doc_chunk, modified_dict, debug)
@staticmethod
def _build_and_run_bulk_update_query(
doctype: str, doc_updates: dict, modified_dict: dict | None = None, debug: bool = False
):
"""
:param doctype: DocType to update
:param doc_updates: Dictionary of key (docname) and values to update
:param debug: Print the query in the developer / js console.
---
doc_updates should be in the following format:
```py
{
"docname1": {
"field1": "value1",
"field2": "value2",
...
},
"docname2": {
"field1": "value1",
"field2": "value2",
...
},
}
```
---
Query will be built as:
```sql
UPDATE `tabItem`
SET `status` = CASE
WHEN `name` = 'Item-1' THEN 'Close'
WHEN `name` = 'Item-2' THEN 'Open'
WHEN `name` = 'Item-3' THEN 'Close'
WHEN `name` = 'Item-4' THEN 'Cancelled'
ELSE `status`
end,
`description` = CASE
WHEN `name` = 'Item-1' THEN 'This is the first task'
WHEN `name` = 'Item-2' THEN 'This is the second task'
WHEN `name` = 'Item-3' THEN 'This is the third task'
WHEN `name` = 'Item-4' THEN 'This is the fourth task'
ELSE `description`
end
WHERE `name` IN ( 'Item-1', 'Item-2', 'Item-3', 'Item-4' )
```
"""
if not doc_updates:
return
dt = frappe.qb.DocType(doctype)
update_query = frappe.qb.update(dt)
conditions = {}
docnames = list(doc_updates.keys())
for docname, row in doc_updates.items():
for field, value in row.items():
# CASE
if field not in conditions:
conditions[field] = Case()
# WHEN
conditions[field].when(dt.name == docname, value)
for field in conditions:
# ELSE
update_query = update_query.set(dt[field], conditions[field].else_(dt[field]))
if modified_dict:
for column, value in modified_dict.items():
update_query = update_query.set(dt[column], value)
update_query.where(dt.name.isin(docnames)).run(debug=debug)
def set_global(self, key, val, user="__global"):
"""Save a global key value. Global values will be automatically set if they match fieldname."""
self.set_default(key, val, user)

View file

@ -20,6 +20,7 @@ import inspect
import os
import re
import sys
import typing
import warnings
from importlib.metadata import version
@ -86,7 +87,7 @@ def __get_deprecation_class(graduation: str | None = None, class_name: str | Non
try:
return getattr(current_module, class_name)
except AttributeError:
return PendingDeprecationWarning
return PendingFrappeDeprecationWarning
# Parse PYTHONWARNINGS environment variable
@ -200,6 +201,44 @@ def deprecation_warning(marked: str, graduation: str, msg: str):
### Party starts here
if typing.TYPE_CHECKING:
from werkzeug.local import Local
def get_local_with_deprecations() -> "Local":
from werkzeug.local import Local
class DeprecatedLocalAttribute:
def __init__(self, name, warning):
self.name = name
self.warning = warning
def __get__(self, obj, type=None):
self.warning()
return obj.__getattr__(self.name)
def __set__(self, obj, value):
return obj.__setattr__(self.name, value)
def __delete__(self, obj):
return obj.__delattr__(self.name)
class LocalWithDeprecations(Local):
"""Can deprecate local attributes."""
# sites_path = DeprecatedLocalAttribute(
# "sites_path",
# lambda: deprecation_warning(
# "2024-12-06",
# "v17",
# "'local.sites_path' will be deprecated: use 'frappe.bench.sites.path instead'",
# ),
# )
return LocalWithDeprecations()
def _old_deprecated(func):
return deprecated(
"frappe.deprecations.deprecated",
@ -814,15 +853,19 @@ def get_tests_CompatFrappeTestCase():
@contextmanager
def freeze_time(self, time_to_freeze, is_utc=False, *args, **kwargs):
import pytz
from zoneinfo import ZoneInfo
from freezegun import freeze_time
from frappe.utils.data import convert_utc_to_timezone, get_datetime, get_system_timezone
if not is_utc:
# Freeze time expects UTC or tzaware objects. We have neither, so convert to UTC.
timezone = pytz.timezone(get_system_timezone())
time_to_freeze = timezone.localize(get_datetime(time_to_freeze)).astimezone(pytz.utc)
time_to_freeze = (
get_datetime(time_to_freeze)
.replace(tzinfo=ZoneInfo(get_system_timezone()))
.astimezone(ZoneInfo("UTC"))
)
with freeze_time(time_to_freeze, *args, **kwargs):
yield
@ -1001,3 +1044,17 @@ def get_number_format_info(format: str) -> tuple[str, str, int]:
from frappe.utils.number_format import NUMBER_FORMAT_MAP
return NUMBER_FORMAT_MAP.get(format) or (".", ",", 2)
@deprecated(
"modules.txt",
"2024-11-12",
"yet unknown",
"""It has been added for compatibility in addition to the new .frappe sentinel file inside the module. This is for your info: you don't have to do anything.
""",
)
def boilerplate_modules_txt(dest, app_name, app_title):
import frappe
with open(os.path.join(dest, app_name, app_name, "modules.txt"), "w") as f:
f.write(frappe.as_unicode(app_title))

View file

@ -125,7 +125,7 @@ def get(
filters = []
# don't include cancelled documents
filters.append([chart.document_type, "docstatus", "<", 2, False])
filters.append([chart.document_type, "docstatus", "<", 2])
if chart.chart_type == "Group By":
chart_config = get_group_by_chart_config(chart, filters)
@ -196,8 +196,8 @@ def get_chart_config(chart, filters, timespan, timegrain, from_date, to_date):
from_date = from_date.strftime("%Y-%m-%d")
to_date = to_date
filters.append([doctype, datefield, ">=", from_date, False])
filters.append([doctype, datefield, "<=", to_date, False])
filters.append([doctype, datefield, ">=", from_date])
filters.append([doctype, datefield, "<=", to_date])
data = frappe.get_list(
doctype,
@ -231,8 +231,8 @@ def get_heatmap_chart_config(chart, filters, heatmap_year):
year_start_date = datetime.date(year, 1, 1).strftime("%Y-%m-%d")
next_year_start_date = datetime.date(year + 1, 1, 1).strftime("%Y-%m-%d")
filters.append([doctype, datefield, ">", f"{year_start_date}", False])
filters.append([doctype, datefield, "<", f"{next_year_start_date}", False])
filters.append([doctype, datefield, ">", f"{year_start_date}"])
filters.append([doctype, datefield, "<", f"{next_year_start_date}"])
if frappe.db.db_type == "mariadb":
timestamp_field = f"unix_timestamp({datefield})"

View file

@ -0,0 +1,23 @@
[
{
"doctype": "Event",
"subject":"_Test Event 1",
"starts_on": "2014-01-01",
"event_type": "Public",
"creation": "2014-01-01"
},
{
"doctype": "Event",
"subject":"_Test Event 2",
"starts_on": "2014-01-01",
"event_type": "Private",
"creation": "2014-01-01"
},
{
"doctype": "Event",
"subject": "_Test Event 3",
"starts_on": "2014-02-01",
"event_type": "Private",
"creation": "2014-02-01"
}
]

View file

@ -1,18 +0,0 @@
[[Event]]
subject = "_Test Event 1"
starts_on = "2014-01-01"
event_type = "Public"
creation = "2014-01-01"
[[Event]]
subject = "_Test Event 2"
starts_on = "2014-01-01"
event_type = "Private"
creation = "2014-01-01"
[[Event]]
subject = "_Test Event 3"
starts_on = "2014-02-01"
event_type = "Private"
creation = "2014-02-01"

View file

@ -0,0 +1,7 @@
[
{
"doctype": "Note",
"name": "_Test Note 1",
"title": "Test Note Title"
}
]

View file

@ -1,4 +0,0 @@
[[Note]]
name = "_Test Note 1"
title = "Test Note Title"

View file

@ -71,9 +71,23 @@ def get_group_by_count(doctype: str, current_filters: str, field: str) -> list[d
group_by=f"`tab{doctype}`.{field}",
fields=["count(*) as count", f"`{field}` as name"],
order_by="count desc",
limit=50,
)
if field == "owner":
owner_idx = None
for idx, item in enumerate(data):
if item.name == frappe.session.user:
owner_idx = idx
break
if owner_idx:
data = [data.pop(owner_idx)] + data[0:49]
else:
data = data[0:50]
else:
data = data[0:50]
# Add in title if it's a link field and `show_title_field_in_link` is set
if (field_meta := meta.get_field(field)) and field_meta.fieldtype == "Link":
link_meta = frappe.get_meta(field_meta.options)

View file

@ -357,14 +357,16 @@ def export_query():
form_params["limit_page_length"] = None
form_params["as_list"] = True
doctype = form_params.pop("doctype")
if isinstance(form_params["fields"], list):
form_params["fields"].append("owner")
elif isinstance(form_params["fields"], tuple):
form_params["fields"] = form_params["fields"] + ("owner",)
file_format_type = form_params.pop("file_format_type")
title = form_params.pop("title", doctype)
csv_params = pop_csv_params(form_params)
add_totals_row = 1 if form_params.pop("add_totals_row", None) == "1" else None
translate_values = 1 if form_params.pop("translate_values", None) == "1" else None
frappe.permissions.can_export(doctype, raise_exception=True)
if selection := form_params.pop("selected_items", None):
form_params["filters"] = {"name": ("in", json.loads(selection))}
@ -378,6 +380,16 @@ def export_query():
db_query = DatabaseQuery(doctype)
ret = db_query.execute(**form_params)
if not frappe.permissions.can_export(doctype):
if frappe.permissions.can_export(doctype, is_owner=True):
for row in ret:
if row[-1] != frappe.session.user:
raise frappe.PermissionError(
_("You are not allowed to export {} doctype").format(doctype)
)
else:
raise frappe.PermissionError(_("You are not allowed to export {} doctype").format(doctype))
if add_totals_row:
ret = append_totals_row(ret)

View file

@ -51,6 +51,7 @@
"imap_folder",
"section_break_12",
"append_emails_to_sent_folder",
"sent_folder_name",
"append_to",
"create_contact",
"enable_automatic_linking",
@ -691,12 +692,19 @@
"fieldname": "backend_app_flow",
"fieldtype": "Check",
"label": "Authenticate as Service Principal"
},
{
"depends_on": "eval:!doc.domain && doc.enable_outgoing && doc.enable_incoming && doc.use_imap",
"fetch_from": "domain.sent_folder_name",
"fieldname": "sent_folder_name",
"fieldtype": "Data",
"label": "Sent Folder Name"
}
],
"icon": "fa fa-inbox",
"index_web_pages_for_search": 1,
"links": [],
"modified": "2024-11-11 10:12:06.667888",
"modified": "2024-12-04 23:30:37.622353",
"modified_by": "Administrator",
"module": "Email",
"name": "Email Account",

View file

@ -101,6 +101,7 @@ class EmailAccount(Document):
password: DF.Password | None
send_notification_to: DF.SmallText | None
send_unsubscribe_message: DF.Check
sent_folder_name: DF.Data | None
service: DF.Literal[
"", "Frappe Mail", "GMail", "Sendgrid", "SparkPost", "Yahoo Mail", "Outlook.com", "Yandex.Mail"
]
@ -774,7 +775,10 @@ class EmailAccount(Document):
try:
email_server = self.get_incoming_server(in_receive=True)
message = safe_encode(message)
email_server.imap.append("Sent", "\\Seen", imaplib.Time2Internaldate(time.time()), message)
sent_folder_name = self.sent_folder_name or "Sent"
email_server.imap.append(
sent_folder_name, "\\Seen", imaplib.Time2Internaldate(time.time()), message
)
except Exception:
self.log_error("Unable to add to Sent folder")

View file

@ -0,0 +1,29 @@
[
{
"is_default": 1,
"is_global": 1,
"doctype": "Email Account",
"domain":"example.com",
"email_account_name": "_Test Email Account 1",
"enable_outgoing": 1,
"smtp_server": "test.example.com",
"email_id": "test@example.com",
"password": "password",
"add_signature": 1,
"signature": "\nBest Wishes\nTest Signature",
"enable_auto_reply": 1,
"auto_reply_message": "",
"enable_incoming": 1,
"notify_if_unreplied": 1,
"unreplied_for_mins": 20,
"send_notification_to": "test_unreplied@example.com",
"pop3_server": "pop.test.example.com",
"append_to": "ToDo",
"imap_folder": [{"folder_name": "INBOX", "append_to": "ToDo"}, {"folder_name": "Test Folder", "append_to": "Communication"}],
"track_email_status": 1
},
{
"doctype": "ToDo",
"description":"test doctype"
}
]

View file

@ -1,32 +0,0 @@
[["Email Account"]]
is_default = 1
is_global = 1
domain = "example.com"
email_account_name = "_Test Email Account 1"
enable_outgoing = 1
smtp_server = "test.example.com"
email_id = "test@example.com"
password = "password"
add_signature = 1
signature = "\nBest Wishes\nTest Signature"
enable_auto_reply = 1
auto_reply_message = ""
enable_incoming = 1
notify_if_unreplied = 1
unreplied_for_mins = 20
send_notification_to = "test_unreplied@example.com"
pop3_server = "pop.test.example.com"
append_to = "ToDo"
track_email_status = 1
[["Email Account".imap_folder]]
folder_name = "INBOX"
append_to = "ToDo"
[["Email Account".imap_folder]]
folder_name = "Test Folder"
append_to = "Communication"
[[ToDo]]
description = "test doctype"

View file

@ -24,7 +24,8 @@
"validate_ssl_certificate_for_outgoing",
"column_break_18",
"smtp_port",
"append_emails_to_sent_folder"
"append_emails_to_sent_folder",
"sent_folder_name"
],
"fields": [
{
@ -141,6 +142,14 @@
"fieldname": "validate_ssl_certificate_for_outgoing",
"fieldtype": "Check",
"label": "Validate SSL Certificate"
},
{
"default": "Sent",
"depends_on": "eval: doc.append_emails_to_sent_folder",
"description": "Some mailboxes require a different Sent Folder Name e.g. \"INBOX.Sent\"",
"fieldname": "sent_folder_name",
"fieldtype": "Data",
"label": "Sent Folder Name"
}
],
"icon": "icon-inbox",
@ -150,7 +159,7 @@
"link_fieldname": "domain"
}
],
"modified": "2024-03-23 16:03:23.836849",
"modified": "2024-12-04 23:26:20.993971",
"modified_by": "Administrator",
"module": "Email",
"name": "Email Domain",

View file

@ -66,6 +66,7 @@ class EmailDomain(Document):
domain_name: DF.Data
email_server: DF.Data
incoming_port: DF.Data | None
sent_folder_name: DF.Data | None
smtp_port: DF.Data | None
smtp_server: DF.Data
use_imap: DF.Check

View file

@ -0,0 +1,32 @@
[
{
"doctype": "Email Domain",
"domain_name": "test.com",
"email_id": "_test@test.com",
"email_server": "imap.test.com",
"use_imap": "imap.test.com",
"use_ssl": 1,
"use_tls": 1,
"incoming_port": "993",
"attachment_limit": "1",
"smtp_server": "smtp.test.com",
"smtp_port": "587",
"password": "password"
},
{
"doctype": "Email Account",
"name": "_Test Email Account 1",
"enable_incoming": 1,
"email_id": "_test@test.com",
"domain": "test.com",
"email_server": "imap.test.com",
"use_imap": 1,
"use_ssl": 0,
"use_tls": 1,
"incoming_port": "143",
"attachment_limit": "1",
"smtp_server": "smtp.test.com",
"smtp_port": "587",
"password": "password"
}
]

View file

@ -1,28 +0,0 @@
[["Email Domain"]]
domain_name = "test.com"
email_id = "_test@test.com"
email_server = "imap.test.com"
use_imap = "imap.test.com"
use_ssl = 1
use_tls = 1
incoming_port = "993"
attachment_limit = "1"
smtp_server = "smtp.test.com"
smtp_port = "587"
password = "password"
[["Email Account"]]
name = "_Test Email Account 1"
enable_incoming = 1
email_id = "_test@test.com"
domain = "test.com"
email_server = "imap.test.com"
use_imap = 1
use_ssl = 0
use_tls = 1
incoming_port = "143"
attachment_limit = "1"
smtp_server = "smtp.test.com"
smtp_port = "587"
password = "password"

View file

@ -0,0 +1,6 @@
[
{
"doctype": "Email Group",
"title": "_Test Email Group"
}
]

View file

@ -1,3 +0,0 @@
[["Email Group"]]
title = "_Test Email Group"

View file

@ -0,0 +1,77 @@
[
{
"doctype": "Notification",
"subject":"_Test Notification 1",
"document_type": "Communication",
"event": "New",
"attach_print": 0,
"message": "New communication {{ doc.content }} created",
"condition": "doc.communication_type=='Communication' and doc.sender_full_name=='__test_notification_sender__'",
"recipients": [
{ "receiver_by_document_field": "owner" }
]
},
{
"doctype": "Notification",
"subject":"_Test Notification 2",
"document_type": "Communication",
"event": "Save",
"attach_print": 0,
"sender_full_name": "__testsender__",
"message": "New communication {{ doc.content }} saved",
"condition": "doc.communication_type=='Communication' and doc.sender_full_name=='__test_notification_sender__'",
"recipients": [
{ "receiver_by_document_field": "owner" }
],
"set_property_after_alert": "subject",
"property_value": "__testing__"
},
{
"doctype": "Notification",
"subject":"_Test Notification 3",
"document_type": "Event",
"event": "Save",
"attach_print": 0,
"condition": "doc.event_type=='Public'",
"message": "A new public event {{ doc.subject }} on {{ doc.starts_on }} is created",
"recipients": [
{ "receiver_by_document_field": "owner" }
]
},
{
"doctype": "Notification",
"subject":"_Test Notification 4",
"document_type": "Event",
"event": "Value Change",
"attach_print": 0,
"value_changed": "description",
"message": "Description changed",
"recipients": [
{ "receiver_by_document_field": "owner" }
]
},
{
"doctype": "Notification",
"subject":"_Test Notification 5",
"document_type": "Event",
"event": "Days Before",
"attach_print": 0,
"date_changed": "starts_on",
"days_in_advance": 2,
"message": "Description changed",
"recipients": [
{ "receiver_by_document_field": "owner" }
]
},
{
"doctype": "Notification",
"subject":"_Test Notification 6",
"document_type": "User",
"event": "New",
"attach_print": 0,
"message": "New user {{ doc.name }} created",
"recipients": [
{ "receiver_by_document_field": "owner", "cc": "{{ doc.email }}" }
]
}
]

View file

@ -1,70 +0,0 @@
[[Notification]]
subject = "_Test Notification 1"
document_type = "Communication"
event = "New"
attach_print = 0
message = "New communication {{ doc.content }} created"
condition = "doc.communication_type=='Communication' and doc.sender_full_name=='__test_notification_sender__'"
[[Notification.recipients]]
receiver_by_document_field = "owner"
[[Notification]]
subject = "_Test Notification 2"
document_type = "Communication"
event = "Save"
attach_print = 0
sender_full_name = "__testsender__"
message = "New communication {{ doc.content }} saved"
condition = "doc.communication_type=='Communication' and doc.sender_full_name=='__test_notification_sender__'"
set_property_after_alert = "subject"
property_value = "__testing__"
[[Notification.recipients]]
receiver_by_document_field = "owner"
[[Notification]]
subject = "_Test Notification 3"
document_type = "Event"
event = "Save"
attach_print = 0
condition = "doc.event_type=='Public'"
message = "A new public event {{ doc.subject }} on {{ doc.starts_on }} is created"
[[Notification.recipients]]
receiver_by_document_field = "owner"
[[Notification]]
subject = "_Test Notification 4"
document_type = "Event"
event = "Value Change"
attach_print = 0
value_changed = "description"
message = "Description changed"
[[Notification.recipients]]
receiver_by_document_field = "owner"
[[Notification]]
subject = "_Test Notification 5"
document_type = "Event"
event = "Days Before"
attach_print = 0
date_changed = "starts_on"
days_in_advance = 2
message = "Description changed"
[[Notification.recipients]]
receiver_by_document_field = "owner"
[[Notification]]
subject = "_Test Notification 6"
document_type = "User"
event = "New"
attach_print = 0
message = "New user {{ doc.name }} created"
[[Notification.recipients]]
receiver_by_document_field = "owner"
cc = "{{ doc.email }}"

View file

@ -1,8 +1,7 @@
from datetime import datetime
from typing import Any
from urllib.parse import urljoin
import pytz
from zoneinfo import ZoneInfo
import frappe
from frappe import _
@ -122,12 +121,11 @@ class FrappeMail:
def add_or_update_tzinfo(date_time: datetime | str, timezone: str | None = None) -> str:
"""Adds or updates timezone to the datetime."""
date_time = get_datetime(date_time)
target_tz = pytz.timezone(timezone or get_system_timezone())
target_tz = ZoneInfo(timezone or get_system_timezone())
if date_time.tzinfo is None:
date_time = target_tz.localize(date_time)
date_time = date_time.replace(tzinfo=target_tz)
else:
date_time = date_time.astimezone(target_tz)

View file

@ -4,6 +4,11 @@
# BEWARE don't put anything in this file except exceptions
from werkzeug.exceptions import NotFound
from .bench_interface import (
BenchNotScopedError,
BenchSiteNotLoadedError,
)
class SiteNotSpecifiedError(Exception):
def __init__(self, *args, **kwargs):

View file

@ -0,0 +1,6 @@
[
{
"country_name": "_Test Country",
"doctype": "Country"
}
]

View file

@ -1,3 +0,0 @@
[[Country]]
country_name = "_Test Country"

View file

@ -0,0 +1 @@
[]

View file

@ -0,0 +1,13 @@
[
{
"doctype": "Connected App",
"provider_name": "frappe",
"client_id": "test_client_id",
"client_secret": "test_client_secret",
"scopes": [
{
"scope": "all"
}
]
}
]

View file

@ -1,8 +0,0 @@
[["Connected App"]]
provider_name = "frappe"
client_id = "test_client_id"
client_secret = "test_client_secret"
[["Connected App".scopes]]
scope = "all"

View file

@ -0,0 +1,18 @@
[
{
"doctype": "Token Cache",
"user": "test@example.com",
"access_token": "test-access-token",
"refresh_token": "test-refresh-token",
"token_type": "Bearer",
"expires_in": 1000,
"scopes": [
{
"scope": "all"
},
{
"scope": "openid"
}
]
}
]

View file

@ -1,13 +0,0 @@
[["Token Cache"]]
user = "test@example.com"
access_token = "test-access-token"
refresh_token = "test-refresh-token"
token_type = "Bearer"
expires_in = 1000
[["Token Cache".scopes]]
scope = "all"
[["Token Cache".scopes]]
scope = "openid"

View file

@ -2,13 +2,12 @@
# License: MIT. See LICENSE
import datetime
import pytz
from zoneinfo import ZoneInfo
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.utils import cint, cstr, get_system_timezone
from frappe.utils import cint, cstr, get_datetime, get_system_timezone
class TokenCache(Document):
@ -73,11 +72,10 @@ class TokenCache(Document):
return self
def get_expires_in(self):
system_timezone = pytz.timezone(get_system_timezone())
modified = frappe.utils.get_datetime(self.modified)
modified = system_timezone.localize(modified)
expiry_utc = modified.astimezone(pytz.utc) + datetime.timedelta(seconds=self.expires_in)
now_utc = datetime.datetime.now(pytz.utc)
system_timezone = ZoneInfo(get_system_timezone())
modified: datetime.datetime = get_datetime(self.modified).replace(tzinfo=system_timezone)
expiry_utc = modified.astimezone(datetime.timezone.utc) + datetime.timedelta(seconds=self.expires_in)
now_utc = datetime.datetime.now(datetime.timezone.utc)
return cint((expiry_utc - now_utc).total_seconds())
def is_expired(self):

View file

@ -7,7 +7,7 @@ import datetime
import json
import re
from collections import Counter
from collections.abc import Sequence
from collections.abc import Mapping, Sequence
import frappe
import frappe.defaults
@ -21,6 +21,7 @@ from frappe.model.meta import get_table_columns
from frappe.model.utils import is_virtual_doctype
from frappe.model.utils.user_settings import get_user_settings, update_user_settings
from frappe.query_builder.utils import Column
from frappe.types import Filters, FilterSignature, FilterTuple
from frappe.utils import (
cint,
cstr,
@ -28,7 +29,6 @@ from frappe.utils import (
get_filter,
get_time,
get_timespan_date_range,
make_filter_tuple,
)
from frappe.utils.data import DateTimeLikeObject, get_datetime, getdate, sbool
@ -79,8 +79,8 @@ class DatabaseQuery:
def execute(
self,
fields=None,
filters=None,
or_filters=None,
filters: FilterSignature | str | None = None,
or_filters: FilterSignature | None = None,
docstatus=None,
group_by=None,
order_by=DefaultOrderBy,
@ -137,8 +137,18 @@ class DatabaseQuery:
if as_list and not isinstance(self.fields, (Sequence | str)) and len(self.fields) > 1:
frappe.throw(_("Fields must be a list or tuple when as_list is enabled"))
self.filters = filters or []
self.or_filters = or_filters or []
self.filters: Filters
self.or_filters: Filters
for k, _filters in {
"filters": filters or Filters(),
"or_filters": or_filters or Filters(),
}.items():
if isinstance(_filters, str):
_filters = json.loads(_filters)
if not isinstance(_filters, Filters):
_filters = Filters(_filters, doctype=self.doctype)
setattr(self, k, _filters)
self.docstatus = docstatus or []
self.group_by = group_by
self.order_by = order_by
@ -362,16 +372,6 @@ class DatabaseQuery:
field = f"{field} as {alias}"
self.fields[self.fields.index(original_field)] = field
for filter_name in ["filters", "or_filters"]:
filters = getattr(self, filter_name)
if isinstance(filters, str):
filters = json.loads(filters)
if isinstance(filters, dict):
fdict = filters
filters = [make_filter_tuple(self.doctype, key, value) for key, value in fdict.items()]
setattr(self, filter_name, filters)
def sanitize_fields(self):
"""
regex : ^.*[,();].*
@ -554,27 +554,11 @@ class DatabaseQuery:
def set_optional_columns(self):
"""Removes optional columns like `_user_tags`, `_comments` etc. if not in table"""
# remove from fields
to_remove = []
for fld in self.fields:
to_remove.extend(fld for f in optional_fields if f in fld and f not in self.columns)
for fld in to_remove:
del self.fields[self.fields.index(fld)]
# remove from filters
to_remove = []
for each in self.filters:
if isinstance(each, str):
each = [each]
to_remove.extend(
each for element in each if element in optional_fields and element not in self.columns
)
for each in to_remove:
if isinstance(self.filters, dict):
del self.filters[each]
else:
self.filters.remove(each)
self.fields[:] = [f for f in self.fields if f not in optional_fields or f in self.columns]
self.filters[:] = [
f for f in self.filters if f.fieldname not in optional_fields or f.fieldname in self.columns
]
def build_conditions(self):
self.conditions = []
@ -588,19 +572,13 @@ class DatabaseQuery:
if match_conditions:
self.conditions.append(f"({match_conditions})")
def build_filter_conditions(self, filters, conditions: list, ignore_permissions=None):
def build_filter_conditions(self, filters: Filters, conditions: list, ignore_permissions=None):
"""build conditions from user filters"""
if ignore_permissions is not None:
self.flags.ignore_permissions = ignore_permissions
if isinstance(filters, dict):
filters = [filters]
for f in filters:
if isinstance(f, str):
conditions.append(f)
else:
conditions.append(self.prepare_filter_condition(f))
conditions.append(self.prepare_filter_condition(f))
def remove_field(self, idx: int):
if self.as_list:
@ -701,7 +679,7 @@ class DatabaseQuery:
self.fields[i + j : i + j + 1] = permitted_fields
j = j + len(permitted_fields) - 1
def prepare_filter_condition(self, f):
def prepare_filter_condition(self, ft: FilterTuple) -> str:
"""Return a filter condition in the format:
ifnull(`tabDocType`.`fieldname`, fallback) operator "value"
@ -712,7 +690,7 @@ class DatabaseQuery:
from frappe.boot import get_additional_filters_from_hooks
additional_filters_config = get_additional_filters_from_hooks()
f = get_filter(self.doctype, f, additional_filters_config)
f: FilterTuple = get_filter(self.doctype, ft, additional_filters_config)
tname = "`tab" + f.doctype + "`"
if tname not in self.tables:

View file

@ -1725,8 +1725,16 @@ class Document(BaseDocument, DocRef):
return
if date_diff(to_date, from_date) < 0:
table_row = ""
if self.meta.istable:
table_row = _("{0} row #{1}: ").format(
_(frappe.unscrub(self.parentfield)),
self.idx,
)
frappe.throw(
_("{0} must be after {1}").format(
table_row
+ _("{0} must be after {1}").format(
frappe.bold(_(self.meta.get_label(to_date_field))),
frappe.bold(_(self.meta.get_label(from_date_field))),
),

View file

@ -23,7 +23,8 @@ if TYPE_CHECKING:
NAMING_SERIES_PATTERN = re.compile(r"^[\w\- \/.#{}]+$", re.UNICODE)
BRACED_PARAMS_PATTERN = re.compile(r"(\{[\w | #]+\})")
BRACED_PARAMS_WORD_PATTERN = re.compile(r"(\{[\w]+\})")
BRACED_PARAMS_HASH_PATTERN = re.compile(r"(\{[#]+\})")
# Types that can be using in naming series fields
@ -314,6 +315,7 @@ def parse_naming_series(
doctype=None,
doc: Optional["Document"] = None,
number_generator: Callable[[str, int], str] | None = None,
key: str | None = None,
) -> str:
"""Parse the naming series and get next name.
@ -341,7 +343,10 @@ def parse_naming_series(
if e.startswith("#"):
if not series_set:
digits = len(e)
part = number_generator(name, digits)
if key:
part = number_generator(key, digits)
else:
part = number_generator(name, digits)
series_set = True
elif e == "YY":
part = today.strftime("%y")
@ -575,11 +580,19 @@ def _format_autoname(autoname: str, doc):
first_colon_index = autoname.find(":")
autoname_value = autoname[first_colon_index + 1 :]
def get_param_value_for_match(match):
def get_param_value_for_word_match(match):
param = match.group()
return parse_naming_series([param[1:-1]], doc=doc)
# Replace braced params with their parsed value
name = BRACED_PARAMS_PATTERN.sub(get_param_value_for_match, autoname_value)
def get_param_value_for_hash_match(patterned_string: str):
def get_param_value(match):
param = match.group()
key = patterned_string[: patterned_string.find(param)]
return name
return parse_naming_series([param[1:-1]], doc=doc, key=key)
return get_param_value
# Replace braced params with their parsed value
autoname_value = BRACED_PARAMS_WORD_PATTERN.sub(get_param_value_for_word_match, autoname_value)
return BRACED_PARAMS_HASH_PATTERN.sub(get_param_value_for_hash_match(autoname_value), autoname_value)

View file

@ -7,7 +7,6 @@ import os
import traceback
import uuid
import pytz
import rq
import frappe
@ -52,7 +51,7 @@ class Monitor:
self.data = frappe._dict(
{
"site": frappe.local.site,
"timestamp": datetime.datetime.now(pytz.UTC),
"timestamp": datetime.datetime.now(datetime.timezone.utc),
"transaction_type": transaction_type,
"uuid": str(uuid.uuid4()),
}
@ -85,7 +84,7 @@ class Monitor:
if job := rq.get_current_job():
self.data.uuid = job.id
waitdiff = self.data.timestamp - job.enqueued_at.replace(tzinfo=pytz.UTC)
waitdiff = self.data.timestamp - job.enqueued_at.replace(tzinfo=datetime.timezone.utc)
self.data.job.wait = int(waitdiff.total_seconds() * 1000000)
def add_custom_data(self, **kwargs):
@ -94,7 +93,7 @@ class Monitor:
def dump(self, response=None):
try:
timediff = datetime.datetime.now(pytz.UTC) - self.data.timestamp
timediff = datetime.datetime.now(datetime.timezone.utc) - self.data.timestamp
# Obtain duration in microseconds
self.data.duration = int(timediff.total_seconds() * 1000000)

View file

@ -6,7 +6,6 @@ from http import cookies
from urllib.parse import unquote, urljoin, urlparse
import jwt
import pytz
from oauthlib.openid import RequestValidator
import frappe

View file

@ -242,3 +242,4 @@ execute:frappe.db.set_single_value("Workspace Settings", "workspace_setup_comple
frappe.patches.v16_0.add_app_launcher_in_navbar_settings
frappe.desk.doctype.workspace.patches.update_app
frappe.patches.v16_0.move_role_desk_settings_to_user
frappe.patches.v16_0.update_expression_series

View file

@ -0,0 +1,63 @@
import frappe
from frappe.model.naming import (
BRACED_PARAMS_WORD_PATTERN,
determine_consecutive_week_number,
has_custom_parser,
)
from frappe.query_builder import DocType
def execute():
Series = DocType("Series")
doctypes = frappe.get_all("DocType", filters={"naming_rule": "expression"}, fields=["name", "autoname"])
uniq_exprs = set()
def get_param_value_for_word_match(doc):
def get_param_value(match):
e = match.group()[1:-1]
creation = doc.creation
_sentinel = object()
part = ""
if e == "YY":
part = creation.strftime("%y")
elif e == "MM":
part = creation.strftime("%m")
elif e == "DD":
part = creation.strftime("%d")
elif e == "YYYY":
part = creation.strftime("%Y")
elif e == "WW":
part = determine_consecutive_week_number(creation)
elif e == "timestamp":
part = str(creation)
elif doc and (e.startswith("{") or doc.get(e, _sentinel) is not _sentinel):
e = e.replace("{", "").replace("}", "")
part = doc.get(e)
elif method := has_custom_parser(e):
part = frappe.get_attr(method[0])(doc, e)
else:
part = e
return part
return get_param_value
for doctype in doctypes:
if "#" in doctype.autoname:
docs = frappe.get_all(doctype.name)
if docs:
for doc in docs:
_doc = frappe.get_doc(doctype.name, doc.name)
expr = doctype.autoname[7 : doctype.autoname.find("{#")]
key = BRACED_PARAMS_WORD_PATTERN.sub(get_param_value_for_word_match(_doc), expr)
uniq_exprs.add(key)
current = (frappe.qb.from_(Series).select("*").where(Series.name == "")).run(as_dict=True)
if current:
current = current[0].get("current")
for uniq_expr in uniq_exprs:
expr_exists = (frappe.qb.from_(Series).select("*").where(Series.name == uniq_expr)).run(
as_dict=True
)
if not expr_exists:
(frappe.qb.into(Series).columns("name", "current").insert(uniq_expr, current + 1)).run()

View file

@ -593,11 +593,11 @@ def can_import(doctype, raise_exception=False):
return True
def can_export(doctype, raise_exception=False):
def can_export(doctype, raise_exception=False, is_owner=False):
if "System Manager" in frappe.get_roles():
return True
else:
role_permissions = frappe.permissions.get_role_permissions(doctype)
role_permissions = frappe.permissions.get_role_permissions(doctype, is_owner=is_owner)
has_access = role_permissions.get("export") or role_permissions.get("if_owner").get("export")
if not has_access and raise_exception:
raise frappe.PermissionError(_("You are not allowed to export {} doctype").format(doctype))

View file

@ -0,0 +1,9 @@
[
{
"doctype": "Print Format",
"name": "_Test Print Format 1",
"module": "Core",
"doc_type": "User",
"html": ""
}
]

View file

@ -1,6 +0,0 @@
[["Print Format"]]
name = "_Test Print Format 1"
module = "Core"
doc_type = "User"
html = ""

View file

@ -730,13 +730,12 @@ Tip: use lucide.svg in /icons for all downloaded icons
<path d="M14.2271 6.59087C15.3567 6.59087 16.2725 5.6751 16.2725 4.54544C16.2725 3.41577 15.3567 2.5 14.2271 2.5C13.0974 2.5 12.1816 3.41577 12.1816 4.54544C12.1816 5.6751 13.0974 6.59087 14.2271 6.59087Z" stroke-miterlimit="10" stroke-linecap="round" stroke-linejoin="round"/>
</symbol>
<symbol viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg" id="icon-reply">
<path d="M8.55263 15.3307L1.78113 9.49965L8.55263 4.04483V7.47368V7.97829L9.05722 7.97366C13.1526 7.93609 16.6082 10.8388 17.3522 14.7145C15.3383 12.5107 12.327 11.4474 9.05263 11.4474H8.55263V11.9474V15.3307Z" stroke-miterlimit="10" stroke-linecap="round" />
<symbol xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" id="icon-reply">
<polyline points="9 17 4 12 9 7" /> <path d="M20 18v-2a4 4 0 0 0-4-4H4" />
</symbol>
<symbol viewBox="0 0 20 20" xmlns="http://www.w3.org/2000/svg" id="icon-reply-all">
<path d="M10.5589 14.8213L4.693 9.77014L10.5589 5.04483V7.97057V8.47518L11.0635 8.47055C14.6022 8.43809 17.598 10.8976 18.3297 14.213C16.5316 12.3421 13.9036 11.4411 11.0589 11.4411H10.5589V11.9411V14.8213Z" stroke-miterlimit="10" />
<path d="M8.14703 15.9117L1 9.75733L8.14703 4" stroke-miterlimit="10" />
<symbol xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" id="icon-reply-all">
<polyline points="7 17 2 12 7 7" /> <polyline points="12 17 7 12 12 7" /> <path d="M22 18v-2a4 4 0 0 0-4-4H7" />
</symbol>
<symbol viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg" id="icon-printer">

Before

Width:  |  Height:  |  Size: 87 KiB

After

Width:  |  Height:  |  Size: 87 KiB

View file

@ -74,6 +74,9 @@ export default class Grid {
<div class="grid-empty text-center text-extra-muted">
${__("No rows")}
</div>
<div class="grid-scroll-bar">
<div class="grid-scroll-bar-rows"></div>
</div>
</div>
</div>
</div>
@ -969,7 +972,6 @@ export default class Grid {
}
total_colsize += df.colsize;
if (total_colsize > 11) return false;
this.visible_columns.push([df, df.colsize]);
}
}

View file

@ -16,7 +16,7 @@ export default class GridRow {
let render_row = true;
this.wrapper = $('<div class="grid-row"></div>');
this.row = $('<div class="data-row row"></div>')
this.row = $('<div class="data-row row m-0"></div>')
.appendTo(this.wrapper)
.on("click", function (e) {
if (
@ -362,7 +362,7 @@ export default class GridRow {
if (this.configure_columns && this.frm) {
this.configure_columns_button = $(`
<div class="col grid-static-col d-flex justify-content-center" style="cursor: pointer;">
<div class="col grid-static-col pointer">
<a>${frappe.utils.icon("setting-gear", "sm", "", "filter: opacity(0.5)")}</a>
</div>
`)
@ -401,7 +401,6 @@ export default class GridRow {
});
this.grid_settings_dialog.set_primary_action(__("Update"), () => {
this.validate_columns_width();
this.columns = {};
this.update_user_settings_for_grid();
this.grid_settings_dialog.hide();
@ -465,6 +464,8 @@ export default class GridRow {
sort_options: false,
},
],
secondary_action_label: __("Select All"),
secondary_action: () => this.select_all_columns(docfields),
});
d.set_primary_action(__("Add"), () => {
@ -489,6 +490,17 @@ export default class GridRow {
d.show();
}
select_all_columns(docfields) {
docfields.forEach((docfield) => {
if (docfield.checked) {
return;
}
$(`.checkbox.unit-checkbox input[type="checkbox"][data-unit="${docfield.value}"]`)
.prop("checked", true)
.trigger("change");
});
}
prepare_columns_for_dialog(selected_fields) {
let fields = [];
@ -623,20 +635,6 @@ export default class GridRow {
});
}
validate_columns_width() {
let total_column_width = 0.0;
this.selected_columns_for_grid.forEach((row) => {
if (row.columns && row.columns > 0) {
total_column_width += cint(row.columns);
}
});
if (total_column_width && total_column_width > 10) {
frappe.throw(__("The total column width cannot be more than 10."));
}
}
remove_selected_column() {
$(this.fields_html_wrapper)
.find(".remove-field")
@ -685,6 +683,8 @@ export default class GridRow {
? this.grid.user_defined_columns
: this.docfields;
let total_colsize = 0;
this.grid.visible_columns.forEach((col, ci) => {
// to get update df for the row
let df = fields.find((field) => field?.fieldname === col[0].fieldname);
@ -693,6 +693,7 @@ export default class GridRow {
let colsize = col[1];
total_colsize += colsize;
let txt = this.doc
? frappe.format(this.doc[df.fieldname], df, null, this.doc)
: __(df.label, null, df.parent);
@ -723,6 +724,23 @@ export default class GridRow {
}
});
let current_grid = $(
`div[data-fieldname="${this.grid.df.fieldname}"] .form-grid-container`
);
if (total_colsize > 10) {
current_grid.addClass("column-limit-reached");
} else if (current_grid.hasClass("column-limit-reached")) {
if (Number($(current_grid).children(".form-grid").css("left")) != 0) {
$(current_grid).children(".form-grid").css("left", 0);
$(current_grid).children().find(".grid-scroll-bar").css({
width: "auto",
"margin-left": "0px",
});
$(current_grid).children().find(".grid-scroll-bar-rows").css("width", "auto");
}
current_grid.removeClass("column-limit-reached");
}
if (this.show_search) {
// last empty column
$(`<div class="col grid-static-col search"></div>`).appendTo(this.row);
@ -873,16 +891,8 @@ export default class GridRow {
let grid;
let grid_container;
let inital_position_x = 0;
let start_x = 0;
let start_y = 0;
let input_in_focus = false;
let vertical = false;
let horizontal = false;
// prevent random layout shifts caused by widgets and on click position elements inside view (UX).
function on_input_focus(el) {
input_in_focus = true;
@ -940,59 +950,6 @@ export default class GridRow {
.attr("data-fieldtype", df.fieldtype)
.data("df", df)
.appendTo(this.row)
// initialize grid for horizontal scroll on mobile devices.
.on("touchstart", function (event) {
grid_container = $(event.currentTarget).closest(".form-grid-container")[0];
grid = $(event.currentTarget).closest(".form-grid")[0];
grid.style.position != "relative" && $(grid).css("position", "relative");
!grid.style.left && $(grid).css("left", 0);
start_x = event.touches[0].clientX;
start_y = event.touches[0].clientY;
inital_position_x = -parseFloat(grid.style.left || 0) + start_x;
})
// calculate X and Y movement based on touch events.
.on("touchmove", function (event) {
if (input_in_focus) return;
let moved_x;
let moved_y;
if (!horizontal && !vertical) {
moved_x = Math.abs(start_x - event.touches[0].clientX);
moved_y = Math.abs(start_y - event.touches[0].clientY);
}
if (!vertical && moved_x > 16) {
horizontal = true;
} else if (!horizontal && moved_y > 16) {
vertical = true;
}
if (horizontal) {
event.preventDefault();
let grid_start = inital_position_x - event.touches[0].clientX;
let grid_end = grid.clientWidth - grid_container.clientWidth + 2;
if (frappe.utils.is_rtl()) {
grid_start = -grid_start;
}
if (grid_start < 0) {
grid_start = 0;
} else if (grid_start > grid_end) {
grid_start = grid_end;
}
grid.style.left = `${frappe.utils.is_rtl() ? "" : "-"}${grid_start}px`;
}
})
.on("touchend", function () {
vertical = false;
horizontal = false;
})
.on("click", function (event) {
if (frappe.ui.form.editable_row !== me) {
var out = me.toggle_editable_row();
@ -1025,6 +982,30 @@ export default class GridRow {
$col.field_area = $('<div class="field-area"></div>').appendTo($col).toggle(false);
$col.static_area = $('<div class="static-area ellipsis"></div>').appendTo($col).html(txt);
$(document).ready(function () {
let $scrollBar = $(".grid-scroll-bar");
let form_grid = $(".form-grid");
let grid_container = $(".form-grid-container");
let grid_scroll_bar_rows = $(".grid-scroll-bar-rows");
// Make sure the grid container is scrollable
$scrollBar.on("scroll", function (event) {
grid_container = $(event.currentTarget).closest(".form-grid-container");
form_grid = $(event.currentTarget).closest(".form-grid");
grid_scroll_bar_rows = $(event.currentTarget).closest(".grid-scroll-bar-rows");
var scroll_left = $(this).scrollLeft();
// Sync the form grid's left position with the scroll bar
form_grid.css("position", "relative");
form_grid.css("left", -scroll_left + "px");
$(this).css("margin-left", scroll_left + "px");
});
$scrollBar.css("width", grid_container.width());
grid_scroll_bar_rows.css("width", form_grid[0].scrollWidth);
});
// set title attribute to see full label for columns in the heading row
if (!this.doc) {
$col.attr("title", txt);

View file

@ -71,9 +71,11 @@ export default class GridRowForm {
<span class="text-medium"> ${__("Shortcuts")}: </span>
<kbd>${__("Ctrl + Up")}</kbd> . <kbd>${__("Ctrl + Down")}</kbd> . <kbd>${__("ESC")}</kbd>
</div>
<button class="btn btn-secondary btn-sm pull-right grid-append-row">
${__("Insert Below")}
</button>
<span class="row-actions">
<button class="btn btn-secondary btn-sm pull-right grid-append-row">
${__("Insert Below")}
</button>
</span>
</div>
</div>`;

View file

@ -916,7 +916,7 @@ class FilterArea {
$(`<div class="filter-selector">
<div class="btn-group">
<button class="btn btn-default btn-sm filter-button">
<span class="filter-icon">
<span class="filter-icon button-icon">
${frappe.utils.icon("es-line-filter")}
</span>
<span class="button-label hidden-xs">
@ -924,7 +924,7 @@ class FilterArea {
<span>
</button>
<button class="btn btn-default btn-sm filter-x-button" title="${__("Clear all filters")}">
<span class="filter-icon">
<span class="filter-icon button-icon">
${frappe.utils.icon("es-small-close")}
</span>
</button>

View file

@ -229,7 +229,7 @@ frappe.ui.GroupBy = class {
this.page.wrapper.find(".sort-selector").before(
$(`<div class="group-by-selector">
<button class="btn btn-default btn-sm group-by-button ellipsis">
<span class="group-by-icon">
<span class="group-by-icon button-icon">
${frappe.utils.icon("es-line-folder-alt")}
</span>
<span class="button-label hidden-xs">

View file

@ -100,7 +100,7 @@ export default class ShortcutWidget extends Widget {
const label = get_label();
let color = this.color && count ? this.color.toLowerCase() : "gray";
$(
`<div class="indicator-pill no-indicator-dot ellipsis ${color}">${label}</div>`
`<div class="indicator-pill no-indicator-dot ellipsis ${color}">${__(label)}</div>`
).appendTo(this.action_area);
$(frappe.utils.icon("es-line-arrow-up-right", "xs", "", "", "ml-2")).appendTo(

View file

@ -196,6 +196,8 @@
.col:last-child {
border-right: none;
min-width: 0;
display: flex;
justify-content: center;
}
.col {
@ -216,8 +218,6 @@
}
.btn-open-row {
display: flex;
justify-content: center;
line-height: unset;
div {
margin-left: var(--margin-xs);
@ -230,6 +230,7 @@
--input-disabled-bg: var(--gray-50);
.grid-static-col {
padding: 0px !important;
height: 32px;
}
.frappe-control[data-fieldtype="Select"].form-group .select-icon {
@ -448,6 +449,7 @@
overflow: visible;
margin: 0px calc(-1 * var(--margin-md));
padding: var(--padding-sm) var(--padding-md);
max-width: 865px;
}
.grid-form-heading {
@ -573,10 +575,107 @@
margin-bottom: 4px;
}
.form-grid-container:has(.grid-row.grid-row-open) {
overflow-x: clip;
white-space: normal;
overflow-y: unset;
.form-grid {
left: 0px !important;
}
}
.data-row.row {
flex-wrap: nowrap;
}
.sortable-handle span {
width: 31px;
display: block;
}
.form-grid-container {
overflow-x: clip;
}
.data-row.row {
flex-wrap: nowrap;
}
.column-limit-reached {
background-color: var(--subtle-accent);
border-top-left-radius: var(--border-radius-md);
border-top-right-radius: var(--border-radius-md);
.form-grid {
display: grid;
grid-auto-rows: min-content;
.grid-static-col.col-xs-1 {
flex: 0 0 60px;
max-width: 60px;
}
.grid-static-col.col-xs-2 {
flex: 0 0 90px;
max-width: 90px;
}
.grid-static-col.col-xs-3 {
flex: 0 0 120px;
max-width: 120px;
}
.grid-static-col.col-xs-4 {
flex: 0 0 150px;
max-width: 150px;
}
.grid-static-col.col-xs-5 {
flex: 0 0 200px;
max-width: 200px;
}
.grid-static-col.col-xs-6 {
flex: 0 0 250px;
max-width: 250px;
}
.grid-static-col.col-xs-7 {
flex: 0 0 300px;
max-width: 300px;
}
.grid-static-col.col-xs-8 {
flex: 0 0 350px;
max-width: 350px;
}
.grid-static-col.col-xs-9 {
flex: 0 0 400px;
max-width: 400px;
}
.grid-static-col.col-xs-10 {
flex: 0 0 450px;
max-width: 450px;
}
.grid-static-col.col-xs-11 {
flex: 0 0 500px;
max-width: 500px;
}
.grid-static-col.col-xs-12 {
flex: 0 0 550px;
max-width: 550px;
}
.grid-row > .row .col:last-child,
.grid-row > .dialog-assignment-row .col:last-child {
min-width: 40px;
}
}
}
.grid-scroll-bar {
overflow-x: auto;
height: 12px;
position: relative;
scrollbar-width: auto;
scrollbar-color: auto;
}
.grid-scroll-bar::-webkit-scrollbar {
height: 11px !important;
}
.grid-scroll-bar-rows {
height: 100%;
}
@media (max-width: map-get($grid-breakpoints, "md")) {
.form-grid-container {
overflow-x: clip;
.form-grid {
min-width: 738px;
}
@ -589,16 +688,6 @@
}
}
@media (min-width: map-get($grid-breakpoints, "md")) {
.form-grid-container {
overflow-x: unset !important;
.form-grid {
position: unset !important;
}
}
}
@media (max-width: map-get($grid-breakpoints, "sm")) {
.form-in-grid .form-section .form-column {
padding-left: 0 !important;

View file

@ -102,6 +102,14 @@
.group-by-button {
margin: 5px;
}
.btn {
.button-label,
.button-icon {
display: inline-flex;
vertical-align: middle;
}
}
.group-by-icon.active {
--icon-stroke: var(--primary);
}

View file

@ -5,7 +5,6 @@ import datetime
from collections.abc import Callable
from functools import wraps
import pytz
from werkzeug.wrappers import Response
import frappe
@ -35,7 +34,7 @@ class RateLimiter:
self.limit = int(limit * 1000000)
self.window = window
self.start = datetime.datetime.now(pytz.UTC)
self.start = datetime.datetime.now(datetime.timezone.utc)
timestamp = int(frappe.utils.now_datetime().timestamp())
self.window_number, self.spent = divmod(timestamp, self.window)
@ -80,7 +79,7 @@ class RateLimiter:
def record_request_end(self):
if self.end is not None:
return
self.end = datetime.datetime.now(pytz.UTC)
self.end = datetime.datetime.now(datetime.timezone.utc)
self.duration = int((self.end - self.start).total_seconds() * 1000000)
def respond(self):

View file

@ -1,4 +1,4 @@
<div style="text-align: justify;">
<div>
<div style="width: 600px; margin: 10px auto;">
{{ message }}
</div>

View file

@ -2,7 +2,6 @@ import logging
from collections.abc import Callable
from contextlib import contextmanager
from functools import wraps
from inspect import isfunction, ismethod
from typing import TYPE_CHECKING, Any
import frappe
@ -28,15 +27,18 @@ logger = logging.Logger(__file__)
@contextmanager
def freeze_time(time_to_freeze: Any, is_utc: bool = False, *args: Any, **kwargs: Any) -> None:
"""Temporarily: freeze time with freezegun."""
import pytz
from datetime import UTC
from zoneinfo import ZoneInfo
from freezegun import freeze_time as freezegun_freeze_time
from frappe.utils.data import get_datetime, get_system_timezone
if not is_utc:
# Freeze time expects UTC or tzaware objects. We have neither, so convert to UTC.
timezone = pytz.timezone(get_system_timezone())
time_to_freeze = timezone.localize(get_datetime(time_to_freeze)).astimezone(pytz.utc)
time_to_freeze = (
get_datetime(time_to_freeze).replace(tzinfo=ZoneInfo(get_system_timezone())).astimezone(UTC)
)
with freezegun_freeze_time(time_to_freeze, *args, **kwargs):
yield

View file

@ -550,6 +550,60 @@ class TestDB(IntegrationTestCase):
frappe.db.delete("ToDo", {"description": test_body})
def test_bulk_update(self):
test_body = f"test_bulk_update - {random_string(10)}"
frappe.db.bulk_insert(
"ToDo",
["name", "description"],
[[f"ToDo Test Bulk Update {i}", test_body] for i in range(20)],
ignore_duplicates=True,
)
record_names = frappe.get_all("ToDo", filters={"description": test_body}, pluck="name")
new_descriptions = {name: f"{test_body} - updated - {random_string(10)}" for name in record_names}
# update with same fields to update
frappe.db.bulk_update(
"ToDo", {name: {"description": new_descriptions[name]} for name in record_names}
)
# check if all records were updated
updated_records = dict(
frappe.get_all(
"ToDo", filters={"name": ("in", record_names)}, fields=["name", "description"], as_list=True
)
)
self.assertDictEqual(new_descriptions, updated_records)
# update with different fields to update
updates = {
record_names[0]: {"priority": "High", "status": "Closed"},
record_names[1]: {"status": "Closed"},
}
frappe.db.bulk_update("ToDo", updates)
priority, status = frappe.db.get_value("ToDo", record_names[0], ["priority", "status"])
self.assertEqual(priority, "High")
self.assertEqual(status, "Closed")
# further updates with different fields to update
updates = {record_names[0]: {"status": "Open"}, record_names[1]: {"priority": "Low"}}
frappe.db.bulk_update("ToDo", updates)
priority, status = frappe.db.get_value("ToDo", record_names[0], ["priority", "status"])
self.assertEqual(priority, "High") # should stay the same
self.assertEqual(status, "Open")
priority, status = frappe.db.get_value("ToDo", record_names[1], ["priority", "status"])
self.assertEqual(priority, "Low")
self.assertEqual(status, "Closed") # should stay the same
# cleanup
frappe.db.delete("ToDo", {"name": ("in", record_names)})
def test_count(self):
frappe.db.delete("Note")

View file

@ -1082,8 +1082,12 @@ class TestDBQuery(IntegrationTestCase):
class VirtualDocType:
@staticmethod
def get_list(args=None, limit_page_length=0, doctype=None):
from frappe.types.filter import FilterTuple
# Backward compatibility
self.assertEqual(args["filters"], [["Virtual DocType", "name", "=", "test"]])
self.assertEqual(
args["filters"], [FilterTuple(doctype="Virtual DocType", fieldname="name", value="test")]
)
self.assertEqual(limit_page_length, 1)
self.assertEqual(doctype, "Virtual DocType")

View file

@ -103,7 +103,8 @@ class TestNaming(IntegrationTestCase):
doc.some_fieldname = description
doc.insert()
series = getseries("", 2)
series = getseries(f"TODO-{now_datetime().strftime('%m')}-{description}-", 2)
series = int(series) - 1
self.assertEqual(doc.name, f"TODO-{now_datetime().strftime('%m')}-{description}-{series:02}")
@ -117,7 +118,7 @@ class TestNaming(IntegrationTestCase):
doc.field = field
doc.insert()
series = getseries("", 2)
series = getseries(f"TODO-{field}-", 2)
series = int(series) - 1
self.assertEqual(doc.name, f"TODO-{field}-{series:02}")
@ -138,15 +139,13 @@ class TestNaming(IntegrationTestCase):
todo.description = description
todo.insert()
series = getseries("", 2)
week = determine_consecutive_week_number(now_datetime())
series = getseries(f"TODO-{week}-", 2)
series = str(int(series) - 1)
if len(series) < 2:
series = "0" + series
week = determine_consecutive_week_number(now_datetime())
self.assertEqual(todo.name, f"TODO-{week}-{series}")
def test_revert_series(self):

View file

@ -5,14 +5,13 @@ import io
import json
import os
import sys
from datetime import date, datetime, time, timedelta
from datetime import date, datetime, time, timedelta, timezone
from decimal import ROUND_HALF_UP, Decimal, localcontext
from enum import Enum
from io import StringIO
from mimetypes import guess_type
from unittest.mock import patch
import pytz
from hypothesis import given
from hypothesis import strategies as st
from PIL import Image
@ -736,9 +735,9 @@ class TestResponse(IntegrationTestCase):
minute=23,
second=23,
microsecond=23,
tzinfo=pytz.utc,
tzinfo=timezone.utc,
),
time(hour=23, minute=23, second=23, microsecond=23, tzinfo=pytz.utc),
time(hour=23, minute=23, second=23, microsecond=23, tzinfo=timezone.utc),
timedelta(days=10, hours=12, minutes=120, seconds=10),
],
"float": [

View file

@ -1,2 +1,3 @@
from .docref import DocRef
from .filter import Filters, FilterSignature, FilterTuple
from .frappedict import _dict

288
frappe/types/filter.py Normal file
View file

@ -0,0 +1,288 @@
import textwrap
from collections import defaultdict
from collections.abc import Generator, Iterable, Mapping, Sequence
from datetime import date, datetime
from itertools import groupby
from operator import attrgetter
from typing import Any, NamedTuple, TypeAlias, TypeGuard, TypeVar, cast
from pypika import Column
from typing_extensions import Self, override
from .docref import DocRef
Doct: TypeAlias = str
Fld: TypeAlias = str
Op: TypeAlias = str
DateTime: TypeAlias = datetime | date
_Value: TypeAlias = str | int | float | None | DateTime | Column
_InputValue: TypeAlias = _Value | DocRef | bool
Value: TypeAlias = _Value | Sequence[_Value]
InputValue: TypeAlias = _InputValue | Sequence[_InputValue]
FilterTupleSpec: TypeAlias = (
tuple[Fld, InputValue] | tuple[Fld, Op, InputValue] | tuple[Doct, Fld, Op, InputValue]
)
FilterMappingSpec: TypeAlias = Mapping[Fld, _InputValue | tuple[Op, InputValue]]
class Sentinel:
def __bool__(self) -> bool:
return False
@override
def __str__(self) -> str:
return "UNSPECIFIED"
UNSPECIFIED = Sentinel()
T = TypeVar("T")
def is_unspecified(value: T | Sentinel) -> TypeGuard[Sentinel]:
return value is UNSPECIFIED
class _FilterTuple(NamedTuple):
doctype: Doct
fieldname: Fld
operator: Op
value: Value
def _type_narrow(v: _InputValue) -> _Value:
if isinstance(v, bool): # beware: bool derives int in _Value
return int(v)
elif isinstance(v, _Value):
return v
elif isinstance(v, DocRef): # type: ignore[redundant-expr]
return v.__value__()
else:
raise ValueError(
f"Value must be one of types: {', '.join(str(t.__name__) for t in _InputValue.__args__)}; found {type(v)}"
)
class FilterTuple(_FilterTuple):
"""A named tuple representing a filter condition."""
def __new__(
cls,
s: FilterTupleSpec | None = None,
/,
*,
doctype: Doct | Sentinel = UNSPECIFIED,
fieldname: Fld | Sentinel = UNSPECIFIED,
operator: Op = "=",
value: InputValue | Sentinel = UNSPECIFIED,
) -> Self:
"""
Create a new FilterTuple instance.
Args:
s: A sequence representing the filter tuple.
doctype: The document type.
fieldname: The field name.
operator: The comparison operator.
value: The value to compare against.
Returns:
A new FilterTuple instance.
"""
try:
if isinstance(s, Sequence):
if len(s) == 2:
fieldname, value = s
elif len(s) == 3:
fieldname, operator, value = s
elif len(s) == 4: # type: ignore[redundant-expr]
doctype, fieldname, operator, value = s
elif len(s) == 5: # type: ignore[unreachable]
from frappe.deprecation_dumpster import deprecation_warning
deprecation_warning(
"2024-12-05",
"v16",
f"List type filters now should have 2, 3 or 4 elements: got 5 (Input: {s!r}). Hint: you probably need to remove the last filter element, a no-op from history.",
)
doctype, fieldname, operator, value, _noop = s
else:
raise ValueError(f"Invalid sequence length: {len(s)}. Expected 2, 3, or 4 elements.")
if is_unspecified(doctype) or doctype is None:
raise ValueError("doctype is required")
if is_unspecified(fieldname) or fieldname is None:
raise ValueError("fieldname is required")
if is_unspecified(value):
raise ValueError("value is required; can be None")
# soundness
if operator in ("in", "not in") and isinstance(value, str):
value = value.split(",")
_value: Value
if isinstance(value, _InputValue):
_value = _type_narrow(value)
else:
_value = tuple(_type_narrow(v) for v in value)
return super().__new__(
cls,
doctype=doctype,
fieldname=fieldname,
operator=operator,
value=_value,
)
except Exception as e:
error_context = (
f"Error creating FilterTuple:\n"
f"Input: {s}, doctype={doctype}, fieldname={fieldname}, operator={operator}, value={value}\n"
f"Error: {e!s}\n"
f"Usage: FilterTuple( (fieldname, value), doctype=dt )\n"
f" FilterTuple( (fieldname, operator, value), doctype=dt )\n"
f" FilterTuple( (doctype, fieldname, operator, value) )\n"
f" FilterTuple( doctype=doctype, fieldname=fieldname, operator=operator, value=value )"
)
raise ValueError(error_context) from e
@override
def __str__(self) -> str:
value_repr = f"'{self.value}'" if isinstance(self.value, str) else repr(self.value)
return f"<{self.doctype}>.{self.fieldname} {self.operator} {value_repr}"
class Filters(list[FilterTuple]):
"""A sequence of filter tuples representing multiple filter conditions."""
def __init__(
self,
/,
*s: FilterTuple
| FilterTupleSpec
| FilterMappingSpec
| Sequence[FilterTuple | FilterTupleSpec | FilterMappingSpec],
doctype: Doct | Sentinel = UNSPECIFIED,
) -> None:
"""
Create a new Filters instance.
Args:
s: A sequence of FilterTuple or FilterTupleSpec, or a FilterMappingSpec.
doctype: The document type for the filters.
Returns:
A new Filters instance.
"""
super().__init__()
try:
# only one argument
if len(s) == 1:
# and that is an empty sequence
if len(s[0]) == 0:
return
# compat: unpack if first argument was Sequence of Sequences
if (
not isinstance(s[0], FilterTuple)
and isinstance(s[0], Sequence)
and not isinstance(s[0][0], str) # it's a FilterTupleSpec
and isinstance(s[0][0], Sequence | Mapping)
):
self.extend(
cast(Iterable[FilterTuple | FilterTupleSpec | FilterMappingSpec], s[0]), doctype
)
else:
self.extend(cast(Iterable[FilterTuple | FilterTupleSpec | FilterMappingSpec], s), doctype)
else:
self.extend(cast(Iterable[FilterTuple | FilterTupleSpec | FilterMappingSpec], s), doctype)
except Exception as e:
error_lines = str(e).split("\n")
indented_error = error_lines[0] + "\n" + textwrap.indent("\n".join(error_lines[1:]), " ")
error_context = (
f"\nError creating Filters:\n"
f"Input: {s}, doctype={doctype}\n"
f"Usage: Filters( FilterTuple(...), ... )\n"
f" Filters( (fieldnam, value), ... doctype=dt )\n"
f" Filters( (fieldname, operator, value), ... doctype=dt )\n"
f" Filters( (doctype, fieldname, operator, value), ... )\n"
f" Filters( {{'fieldname': value, ...}}, ... doctype=dt )\n"
f" Filters( {{'fieldname': (operator, value), ...}}, ... doctype=dt )\n\n"
f"Cause:\n{indented_error}"
)
raise ValueError(error_context) from e
if self: # only optimize non-empty; avoid infinit recursion
self.optimize()
@override
def extend(
self,
values: Iterable[FilterTuple | FilterTupleSpec | FilterMappingSpec],
doctype: Doct | Sentinel = UNSPECIFIED,
) -> None:
for item in values:
self.append(item, doctype)
@override
def append(
self, value: FilterTuple | FilterTupleSpec | FilterMappingSpec, doctype: Doct | Sentinel = UNSPECIFIED
) -> None:
if isinstance(value, FilterTuple):
super().append(value)
elif isinstance(value, Mapping):
if is_unspecified(doctype) or doctype is None:
raise ValueError("When initiated with a mapping, doctype keyword argument is required")
self._init_from_mapping(value, doctype)
elif isinstance(value, Sequence): # type: ignore[redundant-expr]
super().append(FilterTuple(value, doctype=doctype))
else:
raise TypeError(f"Expected FilterTruple, Mapping or Sequence, got {type(value).__name__}")
def _init_from_mapping(self, s: FilterMappingSpec, doctype: Doct) -> None:
for k, v in s.items():
if isinstance(v, _InputValue):
self.append(FilterTuple(doctype=doctype, fieldname=k, value=v))
elif isinstance(v, Sequence): # type: ignore[redundant-expr]
self.append(FilterTuple(doctype=doctype, fieldname=k, operator=v[0], value=v[1]))
else:
raise ValueError(f"Invalid value for key '{k}': expected value or (operator, value[s]) tuple")
def optimize(self) -> None:
"""Optimize the filters by grouping '=' operators into 'in' operators where possible."""
def group_key(f: FilterTuple) -> tuple[str, str, bool]:
return (f.doctype, f.fieldname, f.operator == "=")
optimized = Filters()
for (doctype, fieldname, collatable), filters in groupby(sorted(self, key=group_key), key=group_key):
if not collatable:
optimized.extend(filters)
else:
def _values() -> Generator[_Value, None, None]:
for f in filters:
# f.value is already narrowed to Val when we optimize over fully initialized FilterTuple
yield cast(_Value, f.value) # = operator only is allowed to have _Value
values = tuple(_values())
_op = "in" if len(values) > 1 else "="
optimized.append(
FilterTuple(
doctype=doctype,
fieldname=fieldname,
operator=_op,
value=values if _op == "in" else values[0],
)
)
self[:] = optimized
@override
def __str__(self) -> str:
if not self:
return "Filters()"
filters_str = "\n".join(f" {filter}" for filter in self)
return f"Filters(\n{filters_str}\n)"
FilterSignature: TypeAlias = Filters | FilterTuple | FilterMappingSpec | FilterTupleSpec

2
frappe/utils/bench_helper.py Normal file → Executable file
View file

@ -1,3 +1,5 @@
#!/bin/env python3
import importlib
import json
import linecache

View file

@ -166,8 +166,14 @@ def _create_app_boilerplate(dest, hooks, no_git=False):
with open(os.path.join(dest, hooks.app_name, "license.txt"), "w") as f:
f.write(frappe.as_unicode(license_body))
with open(os.path.join(dest, hooks.app_name, hooks.app_name, "modules.txt"), "w") as f:
f.write(frappe.as_unicode(hooks.app_title))
with open(
os.path.join(dest, hooks.app_name, hooks.app_name, frappe.scrub(hooks.app_title), ".frappe"), "w"
) as f:
f.write("")
from frappe.deprecation_dumpster import boilerplate_modules_txt
boilerplate_modules_txt(dest, hooks.app_name, hooks.app_title)
# These values could contain quotes and can break string declarations
# So escaping them before setting variables in setup.py and hooks.py

View file

@ -7,8 +7,6 @@ from collections import defaultdict
from collections.abc import Callable
from functools import wraps
import pytz
import frappe
_SITE_CACHE = defaultdict(lambda: defaultdict(dict))
@ -115,7 +113,9 @@ def site_cache(ttl: int | None = None, maxsize: int | None = None) -> Callable:
if ttl is not None and not callable(ttl):
func.ttl = ttl
func.expiration = datetime.datetime.now(pytz.UTC) + datetime.timedelta(seconds=func.ttl)
func.expiration = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(
seconds=func.ttl
)
if maxsize is not None and not callable(maxsize):
func.maxsize = maxsize
@ -125,9 +125,11 @@ def site_cache(ttl: int | None = None, maxsize: int | None = None) -> Callable:
if getattr(frappe.local, "initialised", None):
func_call_key = json.dumps((args, kwargs))
if hasattr(func, "ttl") and datetime.datetime.now(pytz.UTC) >= func.expiration:
if hasattr(func, "ttl") and datetime.datetime.now(datetime.timezone.utc) >= func.expiration:
func.clear_cache()
func.expiration = datetime.datetime.now(pytz.UTC) + datetime.timedelta(seconds=func.ttl)
func.expiration = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(
seconds=func.ttl
)
if hasattr(func, "maxsize") and len(_SITE_CACHE[func_key][frappe.local.site]) >= func.maxsize:
_SITE_CACHE[func_key][frappe.local.site].pop(

View file

@ -15,8 +15,8 @@ from code import compile_command
from enum import Enum
from typing import Any, Literal, Optional, TypeVar
from urllib.parse import parse_qsl, quote, urlencode, urljoin, urlparse, urlunparse
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
import pytz
from click import secho
from dateutil import parser
from dateutil.parser import ParserError
@ -25,6 +25,7 @@ from dateutil.relativedelta import relativedelta
import frappe
from frappe.desk.utils import slug
from frappe.locale import get_date_format, get_first_day_of_the_week, get_number_format, get_time_format
from frappe.types.filter import Filters, FilterSignature, FilterTuple
from frappe.utils.deprecations import deprecated
from frappe.utils.number_format import NUMBER_FORMAT_MAP, NumberFormat
@ -52,6 +53,8 @@ TimespanOptions = Literal[
if typing.TYPE_CHECKING:
from collections.abc import Mapping
from PIL.ImageFile import ImageFile as PILImageFile
T = TypeVar("T")
@ -347,8 +350,7 @@ def time_diff_in_hours(string_ed_date: DateTimeLikeObject, string_st_date: DateT
def now_datetime() -> datetime.datetime:
"""Return the current datetime in system timezone."""
dt = convert_utc_to_system_timezone(datetime.datetime.now(pytz.UTC))
return dt.replace(tzinfo=None)
return datetime.datetime.now(ZoneInfo(get_system_timezone())).replace(tzinfo=None)
def get_timestamp(date: Optional["DateTimeLikeObject"] = None) -> float:
@ -369,19 +371,18 @@ def get_system_timezone() -> str:
def convert_utc_to_timezone(utc_timestamp: datetime.datetime, time_zone: str) -> datetime.datetime:
from pytz import UnknownTimeZoneError, timezone
if utc_timestamp.tzinfo is None:
utc_timestamp = timezone("UTC").localize(utc_timestamp)
utc_timestamp = utc_timestamp.replace(tzinfo=ZoneInfo("UTC"))
try:
return utc_timestamp.astimezone(timezone(time_zone))
except UnknownTimeZoneError:
return utc_timestamp.astimezone(ZoneInfo(time_zone))
except ZoneInfoNotFoundError:
return utc_timestamp
def get_datetime_in_timezone(time_zone: str) -> datetime.datetime:
"""Return the current datetime in the given timezone (e.g. 'Asia/Kolkata')."""
utc_timestamp = datetime.datetime.now(pytz.UTC)
utc_timestamp = datetime.datetime.now(datetime.timezone.utc)
return convert_utc_to_timezone(utc_timestamp, time_zone)
@ -1984,19 +1985,14 @@ operator_map = {
}
def evaluate_filters(doc, filters: dict | list | tuple):
def evaluate_filters(doc: "Mapping", filters: FilterSignature):
"""Return True if doc matches filters."""
if isinstance(filters, dict):
for key, value in filters.items():
f = get_filter(None, {key: value})
if not compare(doc.get(f.fieldname), f.operator, f.value, f.fieldtype):
return False
elif isinstance(filters, list | tuple):
for d in filters:
f = get_filter(None, d)
if not compare(doc.get(f.fieldname), f.operator, f.value, f.fieldtype):
return False
if not isinstance(filters, Filters):
filters = Filters(filters, doctype=doc.get("doctype"))
for d in filters:
f = get_filter(None, d)
if not compare(doc.get(f.fieldname), f.operator, f.value, f.fieldtype):
return False
return True
@ -2011,7 +2007,7 @@ def compare(val1: Any, condition: str, val2: Any, fieldtype: str | None = None):
return False
def get_filter(doctype: str, f: dict | list | tuple, filters_config=None) -> "frappe._dict":
def get_filter(doctype: str, filters: FilterSignature, filters_config=None) -> "frappe._dict":
"""Return a `_dict` like:
{
@ -2025,30 +2021,18 @@ def get_filter(doctype: str, f: dict | list | tuple, filters_config=None) -> "fr
from frappe.database.utils import NestedSetHierarchy
from frappe.model import child_table_fields, default_fields, optional_fields
if isinstance(f, dict):
key, value = next(iter(f.items()))
f = make_filter_tuple(doctype, key, value)
ft: FilterTuple
if isinstance(filters, FilterTuple):
ft = filters
elif not isinstance(filters, Filters):
ft = Filters(filters, doctype=doctype)[0]
else:
ft = filters[0]
if not isinstance(f, list | tuple):
frappe.throw(frappe._("Filter must be a tuple or list (in a list)"))
if len(f) == 3:
f = (doctype, f[0], f[1], f[2])
elif len(f) > 4:
f = f[0:4]
elif len(f) != 4:
frappe.throw(
frappe._("Filter must have 4 values (doctype, fieldname, operator, value): {0}").format(str(f))
)
f = frappe._dict(doctype=f[0], fieldname=f[1], operator=f[2], value=f[3])
f = frappe._dict(doctype=ft[0], fieldname=ft[1], operator=ft[2], value=ft[3])
sanitize_column(f.fieldname)
if not f.operator:
# if operator is missing
f.operator = "="
valid_operators = (
"=",
"!=",

View file

@ -23,6 +23,8 @@ from frappe.utils import cstr, scrub_urls
from frappe.utils.caching import redis_cache
from frappe.utils.jinja_globals import bundled_asset, is_rtl
cssutils.log.setLog(frappe.logger("cssutils"))
PDF_CONTENT_ERRORS = [
"ContentNotFoundError",
"ContentOperationNotPermittedError",

View file

@ -14,7 +14,6 @@ import random
import time
from typing import NoReturn
import pytz
import setproctitle
from croniter import CroniterBadCronError
from filelock import FileLock, Timeout
@ -72,7 +71,7 @@ def sleep_duration(tick):
# This makes scheduler aligned with real clock,
# so event scheduled at 12:00 happen at 12:00 and not 12:00:35.
minutes = tick // 60
now = datetime.datetime.now(pytz.UTC)
now = datetime.datetime.now(datetime.timezone.utc)
left_minutes = minutes - now.minute % minutes
next_execution = now.replace(second=0) + datetime.timedelta(minutes=left_minutes)

View file

@ -0,0 +1,17 @@
[
{
"doctype": "Blog Category",
"parent_website_route": "blog",
"title": "_Test Blog Category"
},
{
"doctype": "Blog Category",
"parent_website_route": "blog",
"title": "_Test Blog Category 1"
},
{
"doctype": "Blog Category",
"parent_website_route": "blog",
"title": "_Test Blog Category 2"
}
]

View file

@ -1,12 +0,0 @@
[["Blog Category"]]
parent_website_route = "blog"
title = "_Test Blog Category"
[["Blog Category"]]
parent_website_route = "blog"
title = "_Test Blog Category 1"
[["Blog Category"]]
parent_website_route = "blog"
title = "_Test Blog Category 2"

View file

@ -0,0 +1,38 @@
[
{
"blog_category": "-test-blog-category",
"blog_intro": "Test Blog Intro",
"blogger": "_Test Blogger",
"content": "Test Blog Content",
"doctype": "Blog Post",
"title": "_Test Blog Post",
"published": 1
},
{
"blog_category": "-test-blog-category-1",
"blog_intro": "Test Blog Intro",
"blogger": "_Test Blogger",
"content": "Test Blog Content",
"doctype": "Blog Post",
"title": "_Test Blog Post 1",
"published": 1
},
{
"blog_category": "-test-blog-category-1",
"blog_intro": "Test Blog Intro",
"blogger": "_Test Blogger 1",
"content": "Test Blog Content",
"doctype": "Blog Post",
"title": "_Test Blog Post 2",
"published": 0
},
{
"blog_category": "-test-blog-category-1",
"blog_intro": "Test Blog Intro",
"blogger": "_Test Blogger 2",
"content": "Test Blog Content",
"doctype": "Blog Post",
"title": "_Test Blog Post 3",
"published": 0
}
]

View file

@ -1,32 +0,0 @@
[["Blog Post"]]
blog_category = "-test-blog-category"
blog_intro = "Test Blog Intro"
blogger = "_Test Blogger"
content = "Test Blog Content"
title = "_Test Blog Post"
published = 1
[["Blog Post"]]
blog_category = "-test-blog-category-1"
blog_intro = "Test Blog Intro"
blogger = "_Test Blogger"
content = "Test Blog Content"
title = "_Test Blog Post 1"
published = 1
[["Blog Post"]]
blog_category = "-test-blog-category-1"
blog_intro = "Test Blog Intro"
blogger = "_Test Blogger 1"
content = "Test Blog Content"
title = "_Test Blog Post 2"
published = 0
[["Blog Post"]]
blog_category = "-test-blog-category-1"
blog_intro = "Test Blog Intro"
blogger = "_Test Blogger 2"
content = "Test Blog Content"
title = "_Test Blog Post 3"
published = 0

View file

@ -0,0 +1,17 @@
[
{
"doctype": "Blogger",
"full_name": "_Test Blogger",
"short_name": "_Test Blogger"
},
{
"doctype": "Blogger",
"full_name": "_Test Blogger 1",
"short_name": "_Test Blogger 1"
},
{
"doctype": "Blogger",
"full_name": "_Test Blogger 2",
"short_name": "_Test Blogger 2"
}
]

View file

@ -1,12 +0,0 @@
[[Blogger]]
full_name = "_Test Blogger"
short_name = "_Test Blogger"
[[Blogger]]
full_name = "_Test Blogger 1"
short_name = "_Test Blogger 1"
[[Blogger]]
full_name = "_Test Blogger 2"
short_name = "_Test Blogger 2"

Some files were not shown because too many files have changed in this diff Show more