diff --git a/frappe/__init__.py b/frappe/__init__.py index f309a1e5e5..7ed40024f9 100644 --- a/frappe/__init__.py +++ b/frappe/__init__.py @@ -69,9 +69,10 @@ if TYPE_CHECKING: # pragma: no cover from frappe.database.mariadb.database import MariaDBDatabase as PyMariaDBDatabase from frappe.database.mariadb.mysqlclient import MariaDBDatabase from frappe.database.postgres.database import PostgresDatabase + from frappe.database.sqlite.database import SQLiteDatabase from frappe.email.doctype.email_queue.email_queue import EmailQueue from frappe.model.document import Document - from frappe.query_builder.builder import MariaDB, Postgres + from frappe.query_builder.builder import MariaDB, Postgres, SQLite from frappe.types.lazytranslatedstring import _LazyTranslate from frappe.utils.redis_wrapper import ClientCache, RedisWrapper @@ -161,8 +162,8 @@ ResponseDict: TypeAlias = _dict[str, Any] # type: ignore[no-any-explicit] FlagsDict: TypeAlias = _dict[str, Any] # type: ignore[no-any-explicit] FormDict: TypeAlias = _dict[str, str] -db: LocalProxy[Union["PyMariaDBDatabase", "MariaDBDatabase", "PostgresDatabase"]] = local("db") -qb: LocalProxy[Union["MariaDB", "Postgres"]] = local("qb") +db: LocalProxy[Union["PyMariaDBDatabase", "MariaDBDatabase", "PostgresDatabase", "SQLiteDatabase"]] = local("db") +qb: LocalProxy[Union["MariaDB", "Postgres", "SQLite"]] = local("qb") conf: LocalProxy[ConfType] = local("conf") form_dict: LocalProxy[FormDict] = local("form_dict") form = form_dict @@ -182,7 +183,7 @@ lang: LocalProxy[str] = local("lang") if TYPE_CHECKING: # pragma: no cover # trick because some type checkers fail to follow "RedisWrapper", etc (written as string literal) # trough a generic wrapper; seems to be a bug - db: PyMariaDBDatabase | MariaDBDatabase | PostgresDatabase + db: PyMariaDBDatabase | MariaDBDatabase | PostgresDatabase | SQLiteDatabase qb: MariaDB | Postgres conf: ConfType form_dict: FormDict diff --git a/frappe/cache_manager.py b/frappe/cache_manager.py index b602b9e36c..4e648b9fb6 100644 --- a/frappe/cache_manager.py +++ b/frappe/cache_manager.py @@ -203,13 +203,24 @@ def build_table_count_cache(): ): return - table_name = frappe.qb.Field("table_name").as_("name") - table_rows = frappe.qb.Field("table_rows").as_("count") - information_schema = frappe.qb.Schema("information_schema") + if frappe.db.db_type != "sqlite": + table_name = frappe.qb.Field("table_name").as_("name") + table_rows = frappe.qb.Field("table_rows").as_("count") + information_schema = frappe.qb.Schema("information_schema") - data = (frappe.qb.from_(information_schema.tables).select(table_name, table_rows)).run(as_dict=True) - counts = {d.get("name").replace("tab", "", 1): d.get("count", None) for d in data} - frappe.cache.set_value("information_schema:counts", counts) + data = (frappe.qb.from_(information_schema.tables).select(table_name, table_rows)).run(as_dict=True) + counts = {d.get("name").replace("tab", "", 1): d.get("count", None) for d in data} + frappe.cache.set_value("information_schema:counts", counts) + else: + counts = {} + name = frappe.qb.Field("name") + type = frappe.qb.Field("type") + sqlite_master = frappe.qb.Schema("sqlite_master") + data = frappe.qb.from_(sqlite_master).select(name).where(type == "table").run(as_dict=True) + for table in data: + count = frappe.db.sql(f"SELECT COUNT(*) FROM `{table.name}`")[0][0] + counts[table.name.replace("tab", "", 1)] = count + frappe.cache.set_value("information_schema:counts", counts) return counts diff --git a/frappe/commands/utils.py b/frappe/commands/utils.py index ff5d435646..ea17a029e8 100644 --- a/frappe/commands/utils.py +++ b/frappe/commands/utils.py @@ -524,12 +524,27 @@ def postgres(context: CliCtxObj, extra_args): _enter_console(extra_args=extra_args) +@click.command("sqlite", context_settings=EXTRA_ARGS_CTX) +@click.argument("extra_args", nargs=-1) +@pass_context +def sqlite(context: CliCtxObj, extra_args): + """ + Enter into sqlite console for a given site. + """ + site = get_site(context) + frappe.init(site) + frappe.conf.db_type = "sqlite" + _enter_console(extra_args=extra_args) + + def _enter_console(extra_args=None): from frappe.database import get_command from frappe.utils import get_site_path if frappe.conf.db_type == "mariadb": os.environ["MYSQL_HISTFILE"] = os.path.abspath(get_site_path("logs", "mariadb_console.log")) + elif frappe.conf.db_type == "sqlite": + os.environ["SQLITE_HISTORY"] = os.path.abspath(get_site_path("logs", "sqlite_console.log")) else: os.environ["PSQL_HISTORY"] = os.path.abspath(get_site_path("logs", "postgresql_console.log")) @@ -1033,6 +1048,7 @@ commands = [ make_app, create_patch, mariadb, + sqlite, postgres, request, reset_perms, diff --git a/frappe/core/report/database_storage_usage_by_tables/database_storage_usage_by_tables.py b/frappe/core/report/database_storage_usage_by_tables/database_storage_usage_by_tables.py index 73052ad170..53e0c1ee8c 100644 --- a/frappe/core/report/database_storage_usage_by_tables/database_storage_usage_by_tables.py +++ b/frappe/core/report/database_storage_usage_by_tables/database_storage_usage_by_tables.py @@ -34,6 +34,27 @@ def execute(filters=None): WHERE table_schema = 'public' ORDER BY 2 DESC; """, + "sqlite": """ + WITH RECURSIVE + page_size AS ( + SELECT CAST(page_size AS FLOAT) as size FROM PRAGMA_page_size() + ) + SELECT + m.name as 'table', + ROUND(CAST((SELECT SUM(pgsize) FROM dbstat WHERE name = m.name) * page_size.size / (1024.0 * 1024.0 * 1024.0) AS FLOAT), 2) as 'data_size_mb', + ROUND(CAST((SELECT SUM(pgsize) FROM dbstat WHERE name IN ( + SELECT name FROM sqlite_master + WHERE type = 'index' AND tbl_name = m.name + )) * page_size.size / (1024.0 * 1024.0 * 1024.0) AS FLOAT), 2) as 'index_size_mb', + ROUND(CAST((SELECT SUM(pgsize) FROM dbstat WHERE name = m.name OR name IN ( + SELECT name FROM sqlite_master + WHERE type = 'index' AND tbl_name = m.name + )) * page_size.size / (1024.0 * 1024.0 * 1024.0) AS FLOAT), 2) as 'total_size_mb' + FROM sqlite_master m + CROSS JOIN page_size + WHERE m.type = 'table' + AND m.name NOT LIKE 'sqlite_%' + ORDER BY total_size_mb DESC;""", }, as_dict=1, ) diff --git a/frappe/database/__init__.py b/frappe/database/__init__.py index 9f114a09e1..a68fdb78b5 100644 --- a/frappe/database/__init__.py +++ b/frappe/database/__init__.py @@ -3,6 +3,7 @@ # Database Module # -------------------- +from pathlib import Path from shutil import which from frappe.database.database import savepoint @@ -133,7 +134,10 @@ def get_command( elif frappe.conf.db_type == "sqlite": bin, bin_name = which("sqlite3"), "sqlite3" - command = [] + db_path = Path(frappe.get_site_path()) / "db" / f"{db_name}.db" + command = [db_path.as_posix()] + if dump: + command.append(".dump") else: if dump: diff --git a/frappe/database/database.py b/frappe/database/database.py index 765ea3ca77..41f5f91e75 100644 --- a/frappe/database/database.py +++ b/frappe/database/database.py @@ -8,7 +8,7 @@ import re import string import traceback import warnings -from collections.abc import Hashable, Iterable, Sequence +from collections.abc import Iterable, Sequence from contextlib import contextmanager, suppress from time import time from typing import TYPE_CHECKING, Any diff --git a/frappe/database/db_manager.py b/frappe/database/db_manager.py index 2b13c46ddf..1d9e3f93b8 100644 --- a/frappe/database/db_manager.py +++ b/frappe/database/db_manager.py @@ -3,7 +3,7 @@ from frappe import _ class DbManager: - def __init__(self, db): + def __init__(self, db: frappe.database.database.Database | None = None): """ Pass root_conn here for access to all databases. """ diff --git a/frappe/database/schema.py b/frappe/database/schema.py index 14cbd44e21..5adc416e3e 100644 --- a/frappe/database/schema.py +++ b/frappe/database/schema.py @@ -275,9 +275,9 @@ class DbColumn: self.table.change_type.append(self) # unique - if (self.unique and not current_def["unique"]) and column_type not in ("text", "longtext"): + if (self.unique and not current_def.get("unique")) and column_type not in ("text", "longtext"): self.table.add_unique.append(self) - elif (current_def["unique"] and not self.unique) and column_type not in ("text", "longtext"): + elif (current_def.get("unique") and not self.unique) and column_type not in ("text", "longtext"): self.table.drop_unique.append(self) # default @@ -289,21 +289,21 @@ class DbColumn: self.table.set_default.append(self) # nullability - if self.not_nullable is not None and (self.not_nullable != current_def["not_nullable"]): + if self.not_nullable is not None and (self.not_nullable != current_def.get("not_nullable")): self.table.change_nullability.append(self) # index should be applied or dropped irrespective of type change - if (current_def["index"] and not self.set_index) and column_type not in ("text", "longtext"): + if (current_def.get("index") and not self.set_index) and column_type not in ("text", "longtext"): self.table.drop_index.append(self) - elif (not current_def["index"] and self.set_index) and column_type not in ("text", "longtext"): + elif (not current_def.get("index") and self.set_index) and column_type not in ("text", "longtext"): self.table.add_index.append(self) def default_changed(self, current_def): if "decimal" in current_def["type"]: return self.default_changed_for_decimal(current_def) else: - cur_default = current_def["default"] + cur_default = current_def.get("default") new_default = self.default if cur_default == "NULL" or cur_default is None: cur_default = None diff --git a/frappe/database/sqlite/database.py b/frappe/database/sqlite/database.py index 3df7f31b23..23035275ec 100644 --- a/frappe/database/sqlite/database.py +++ b/frappe/database/sqlite/database.py @@ -1,11 +1,12 @@ import re import sqlite3 -from contextlib import contextmanager +import warnings +from pathlib import Path import frappe -from frappe.database.database import Database +from frappe.database.database import TRANSACTION_DISABLED_MSG, Database, ImplicitCommitError, is_query_type from frappe.database.sqlite.schema import SQLiteTable -from frappe.utils import UnicodeWithAttrs, cstr, get_datetime, get_table_name +from frappe.utils import get_table_name _PARAM_COMP = re.compile(r"%\([\w]*\)s") @@ -89,23 +90,20 @@ class SQLiteDatabase(SQLiteExceptionUtil, Database): MAX_ROW_SIZE_LIMIT = None def get_connection(self): - conn = self._get_connection() + conn = self.create_connection() conn.isolation_level = None return conn - def _get_connection(self): - """Return SQLite connection object.""" - return self.create_connection() - def create_connection(self): - return sqlite3.connect(self.get_connection_settings()) + db_path = self.get_db_path() + return sqlite3.connect(db_path) + + def get_db_path(self): + return Path(frappe.get_site_path()) / "db" / f"{self.cur_db_name}.db" def set_execution_timeout(self, seconds: int): self.sql(f"PRAGMA busy_timeout = {int(seconds) * 1000}") - def get_connection_settings(self) -> str: - return self.cur_db_name - def setup_type_map(self): self.db_type = "sqlite" self.type_map = { @@ -245,7 +243,7 @@ class SQLiteDatabase(SQLiteExceptionUtil, Database): return None def has_index(self, table_name, index_name): - return self.sql(f"PRAGMA index_list(`{table_name}`)") + return self.sql(f"SELECT * FROM pragma_index_list(`{table_name}`) WHERE name = '{index_name}'") def get_column_index(self, table_name: str, fieldname: str, unique: bool = False) -> frappe._dict | None: """Check if column exists for a specific fields in specified order.""" @@ -254,18 +252,32 @@ class SQLiteDatabase(SQLiteExceptionUtil, Database): index_info = self.sql(f"PRAGMA index_info(`{index['name']}`)", as_dict=True) if index_info and index_info[0]["name"] == fieldname: return index - return None def add_index(self, doctype: str, fields: list, index_name: str | None = None): """Creates an index with given fields if not already created.""" + + # We can't specify the length of the index in SQLite + fields = [re.sub(r"\(.*?\)", "", field) for field in fields] + index_name = index_name or self.get_index_name(fields) table_name = get_table_name(doctype) - if not self.has_index(table_name, index_name): - self.commit() - self.sql(f"CREATE INDEX `{index_name}` ON `{table_name}` ({', '.join(fields)})") + self.commit() + self.sql(f"CREATE INDEX IF NOT EXISTS `{index_name}` ON `{table_name}` ({', '.join(fields)})") def add_unique(self, doctype, fields, constraint_name=None): - raise NotImplementedError("SQLite does not support adding unique constraints directly.") + """Creates unique constraint on fields.""" + if isinstance(fields, str): + fields = [fields] + if not constraint_name: + constraint_name = f"unique_{'_'.join(fields)}" + table_name = get_table_name(doctype) + + columns = ", ".join(fields) + sql_create_unique = ( + f"CREATE UNIQUE INDEX IF NOT EXISTS `{constraint_name}` ON `{table_name}` ({columns})" + ) + self.commit() # commit before creating index + self.sql(sql_create_unique) def updatedb(self, doctype, meta=None): """Syncs a `DocType` to the table.""" @@ -307,7 +319,8 @@ class SQLiteDatabase(SQLiteExceptionUtil, Database): query = query % {x: f"'{y}'" for x, y in values.items()} except TypeError: pass - return self._cursor.execute(query, values) + + return self._cursor.execute(query, values or ()) def sql(self, *args, **kwargs): if args: @@ -318,6 +331,90 @@ class SQLiteDatabase(SQLiteExceptionUtil, Database): elif kwargs.get("query"): kwargs["query"] = modify_query(kwargs.get("query")) + return super().sql(*args, **kwargs) + + def begin(self, *, read_only=False): + read_only = read_only or frappe.flags.read_only + # mode = "READ ONLY" if read_only else "" + # TODO: support read_only + self.sql("BEGIN") + + def commit(self): + """Commit current transaction. Calls SQL `COMMIT`.""" + if not self._conn: + self.connect() + + if self._disable_transaction_control: + warnings.warn(message=TRANSACTION_DISABLED_MSG, stacklevel=2) + return + + self.before_rollback.reset() + self.after_rollback.reset() + + self.before_commit.run() + + self._conn.commit() + self.transaction_writes = 0 + self.begin() # explicitly start a new transaction + + self.after_commit.run() + + def rollback(self, *, save_point=None): + """`ROLLBACK` current transaction. Optionally rollback to a known save_point.""" + if not self._conn: + self.connect() + if save_point: + self.sql(f"rollback to savepoint {save_point}") + elif not self._disable_transaction_control: + self.before_commit.reset() + self.after_commit.reset() + + self.before_rollback.run() + + self._conn.rollback() + self.begin() + + self.after_rollback.run() + else: + warnings.warn(message=TRANSACTION_DISABLED_MSG, stacklevel=2) + + def get_db_table_columns(self, table) -> list[str]: + """Return list of column names from given table.""" + key = f"table_columns::{table}" + columns = frappe.client_cache.get_value(key) + if columns is None: + columns = self.sql(f"PRAGMA table_info(`{table}`)", as_dict=True) + columns = [col["name"] for col in columns] + + if columns: + frappe.cache.set_value(key, columns) + + return columns + + def check_implicit_commit(self, query: str): + if ( + self.transaction_writes + and query + and is_query_type( + query, + ("start", "alter", "drop", "create", "truncate", "vacuum", "attach", "detach"), + ) + ): + raise ImplicitCommitError("This statement can cause implicit commit", query) + + def estimate_count(self, doctype: str): + """Get estimated count of total rows in a table.""" + from frappe.utils.data import cint + + table = get_table_name(doctype) + try: + if count := self.sql(f"SELECT COUNT(*) FROM `{table}`"): + return cint(count[0][0]) + except sqlite3.OperationalError as e: + if not self.is_table_missing(e): + raise + return 0 + def modify_query(query): """ diff --git a/frappe/database/sqlite/schema.py b/frappe/database/sqlite/schema.py index 524503c55b..b3c40301c0 100644 --- a/frappe/database/sqlite/schema.py +++ b/frappe/database/sqlite/schema.py @@ -1,5 +1,3 @@ -from pymysql.constants.ER import DUP_ENTRY - import frappe from frappe import _ from frappe.database.schema import DBTable @@ -8,149 +6,136 @@ from frappe.utils.defaults import get_not_null_defaults class SQLiteTable(DBTable): def create(self): + # First prepare the basic table creation without indexes additional_definitions = [] - engine = self.meta.get("engine") or "InnoDB" - varchar_len = frappe.db.VARCHAR_LEN - name_column = f"name varchar({varchar_len}) primary key" + name_column = "name TEXT PRIMARY KEY" # columns column_defs = self.get_column_definitions() if column_defs: additional_definitions += column_defs - # index - index_defs = self.get_index_definitions() - if index_defs: - additional_definitions += index_defs + index_defs = [] # Store index definitions separately # child table columns if self.meta.get("istable", default=0): - additional_definitions += [ - f"parent varchar({varchar_len})", - f"parentfield varchar({varchar_len})", - f"parenttype varchar({varchar_len})", - "index parent(parent)", - ] + additional_definitions.extend(["parent TEXT", "parentfield TEXT", "parenttype TEXT"]) + index_defs.append(f"CREATE INDEX `{self.table_name}_parent_idx` ON `{self.table_name}`(parent)") else: # parent types - additional_definitions.append("index creation(creation)") + index_defs.append( + f"CREATE INDEX `{self.table_name}_creation_idx` ON `{self.table_name}`(creation)" + ) if self.meta.sort_field == "modified": - # Support old doctype default by indexing it, also 2nd popular choice. - additional_definitions.append("index modified(modified)") + index_defs.append( + f"CREATE INDEX `{self.table_name}_modified_idx` ON `{self.table_name}`(modified)" + ) # creating sequence(s) if not self.meta.issingle and self.meta.autoname == "autoincrement": - frappe.db.create_sequence(self.doctype, check_not_exists=True) - - # NOTE: not used nextval func as default as the ability to restore - # database with sequences has bugs in mariadb and gives a scary error. - # issue link: https://jira.mariadb.org/browse/MDEV-20070 - name_column = "name bigint primary key" - + name_column = "name INTEGER PRIMARY KEY AUTOINCREMENT" elif not self.meta.issingle and self.meta.autoname == "UUID": - name_column = "name uuid primary key" + name_column = "name TEXT PRIMARY KEY" additional_definitions = ",\n".join(additional_definitions) # create table - query = f"""create table `{self.table_name}` ( + create_table_query = f"""CREATE TABLE `{self.table_name}` ( {name_column}, - creation datetime(6), - modified datetime(6), - modified_by varchar({varchar_len}), - owner varchar({varchar_len}), - docstatus tinyint not null default '0', - idx int not null default '0', - {additional_definitions}) - ENGINE={engine} - ROW_FORMAT=DYNAMIC - CHARACTER SET=utf8mb4 - COLLATE=utf8mb4_unicode_ci""" + creation DATETIME, + modified DATETIME, + modified_by TEXT, + owner TEXT, + docstatus INTEGER NOT NULL DEFAULT 0, + idx INTEGER NOT NULL DEFAULT 0, + {additional_definitions})""" - frappe.db.sql_ddl(query) + # Execute table creation + frappe.db.sql_ddl(create_table_query) + + # Create indexes separately + for index_query in index_defs: + frappe.db.sql_ddl(index_query) def alter(self): for col in self.columns.values(): col.build_for_alter_table(self.current_columns.get(col.fieldname.lower())) - add_column_query = [f"ADD COLUMN `{col.fieldname}` {col.get_definition()}" for col in self.add_column] + for col in self.add_column: + frappe.db.sql_ddl( + f"ALTER TABLE `{self.table_name}` ADD COLUMN `{col.fieldname}` {col.get_definition()}" + ) + + if not ( + self.change_type + or self.set_default + or self.change_nullability + or self.add_index + or self.add_unique + or self.drop_index + or self.drop_unique + ): + return + + # Get current table column definitions + existing_columns = [] + for column in frappe.db.sql(f"PRAGMA table_info(`{self.table_name}`)", as_dict=1): + existing_columns.append(f"`{column.name}` {column.type}") + + columns = existing_columns.copy() + + # Modify existing columns columns_to_modify = set(self.change_type + self.set_default + self.change_nullability) - modify_column_query = [ - f"MODIFY `{col.fieldname}` {col.get_definition(for_modification=True)}" - for col in columns_to_modify - ] - if alter_pk := self.alter_primary_key(): - modify_column_query.append(alter_pk) + for col in columns_to_modify: + # Replace the old column definition with the new one + for i, column in enumerate(columns): + if column.startswith(f"`{col.fieldname}`"): + columns[i] = f"`{col.fieldname}` {col.get_definition(for_modification=True)}" + break - modify_column_query.extend( - [f"ADD UNIQUE INDEX IF NOT EXISTS {col.fieldname} (`{col.fieldname}`)" for col in self.add_unique] - ) - add_index_query = [ - f"ADD INDEX `{col.fieldname}_index`(`{col.fieldname}`)" - for col in self.add_index - if not frappe.db.get_column_index(self.table_name, col.fieldname, unique=False) - ] + # Create new table + temp_table = f"{self.table_name}_new" + create_table = f"CREATE TABLE `{temp_table}` (\n{','.join(columns)}\n)" + frappe.db.sql_ddl(create_table) + # Copy data + existing_columns = [col.split()[0] for col in existing_columns] + column_list = ", ".join(existing_columns) + frappe.db.sql_ddl(f"INSERT INTO `{temp_table}` SELECT {column_list} FROM `{self.table_name}`") + + # Drop old table + frappe.db.sql_ddl(f"DROP TABLE `{self.table_name}`") + + # Rename new table + frappe.db.sql_ddl(f"ALTER TABLE `{temp_table}` RENAME TO `{self.table_name}`") + + # Recreate indexes + index_queries = [] + if self.add_unique: + index_queries.extend( + f"CREATE UNIQUE INDEX `{col.fieldname}` ON `{self.table_name}` (`{col.fieldname}`)" + for col in self.add_unique + ) + if self.add_index: + index_queries.extend( + f"CREATE INDEX `{col.fieldname}_index` ON `{self.table_name}` (`{col.fieldname}`)" + for col in self.add_index + if not frappe.db.get_column_index(self.table_name, col.fieldname, unique=False) + ) if self.meta.sort_field == "modified" and not frappe.db.get_column_index( self.table_name, "modified", unique=False ): - add_index_query.append("ADD INDEX `modified`(`modified`)") + index_queries.append(f"CREATE INDEX `modified` ON `{self.table_name}` (`modified`)") - drop_index_query = [] - - for col in {*self.drop_index, *self.drop_unique}: - if col.fieldname == "name": - continue - - current_column = self.current_columns.get(col.fieldname.lower()) - unique_constraint_changed = current_column.unique != col.unique - if unique_constraint_changed and not col.unique: - if unique_index := frappe.db.get_column_index(self.table_name, col.fieldname, unique=True): - drop_index_query.append(f"DROP INDEX `{unique_index.Key_name}`") - - index_constraint_changed = current_column.index != col.set_index - if index_constraint_changed and not col.set_index: - if index_record := frappe.db.get_column_index(self.table_name, col.fieldname, unique=False): - drop_index_query.append(f"DROP INDEX `{index_record.Key_name}`") - - for col in self.change_nullability: - if col.not_nullable: - try: - table = frappe.qb.DocType(self.doctype) - frappe.qb.update(table).set( - col.fieldname, col.default or get_not_null_defaults(col.fieldtype) - ).where(table[col.fieldname].isnull()).run() - except Exception: - print(f"Failed to update data in {self.table_name} for {col.fieldname}") - raise - try: - for query_parts in [add_column_query, modify_column_query, add_index_query, drop_index_query]: - if query_parts: - query_body = ", ".join(query_parts) - query = f"ALTER TABLE `{self.table_name}` {query_body}" - # nosemgrep - frappe.db.sql_ddl(query) - - except Exception as e: - if query := locals().get("query"): # this weirdness is to avoid potentially unbounded vars - print(f"Failed to alter schema using query: {query}") - - if e.args[0] == DUP_ENTRY: - fieldname = str(e).split("'")[-2] - frappe.throw( - _( - "{0} field cannot be set as unique in {1}, as there are non-unique existing values" - ).format(fieldname, self.table_name) - ) - - raise + for query in index_queries: + frappe.db.sql_ddl(query) def alter_primary_key(self) -> str | None: - # If there are no values in table allow migrating to UUID from varchar + # If there are no values in table allow migrating to UUID from TEXT autoname = self.meta.autoname - if autoname == "UUID" and frappe.db.get_column_type(self.doctype, "name") != "uuid": + if autoname == "UUID" and frappe.db.get_column_type(self.doctype, "name") != "TEXT": if not frappe.db.get_value(self.doctype, {}, order_by=None): - return "modify name uuid" + return "ALTER COLUMN name TEXT" else: frappe.throw( _("Primary key of doctype {0} can not be changed as there are existing values.").format( @@ -158,6 +143,6 @@ class SQLiteTable(DBTable): ) ) - # Reverting from UUID to VARCHAR - if autoname != "UUID" and frappe.db.get_column_type(self.doctype, "name") == "uuid": - return f"modify name varchar({frappe.db.VARCHAR_LEN})" + # Reverting from UUID to TEXT + if autoname != "UUID" and frappe.db.get_column_type(self.doctype, "name") == "TEXT": + return "ALTER COLUMN name TEXT" diff --git a/frappe/database/sqlite/setup_db.py b/frappe/database/sqlite/setup_db.py index a93c9676aa..6cd66d46cb 100644 --- a/frappe/database/sqlite/setup_db.py +++ b/frappe/database/sqlite/setup_db.py @@ -21,8 +21,6 @@ def setup_database(force, verbose): def bootstrap_database(verbose, source_sql=None): import sys - frappe.connect() - import_db_from_sql(source_sql, verbose) frappe.connect() @@ -32,7 +30,7 @@ def bootstrap_database(verbose, source_sql=None): secho( "Table 'tabDefaultValue' missing in the restored site. " "This happens when the backup fails to restore. Please check that the file is valid\n" - "Do go through the above output to check the exact error message from MariaDB", + "Do go through the above output to check the exact error message", fg="red", ) sys.exit(1) @@ -44,9 +42,7 @@ def import_db_from_sql(source_sql=None, verbose=False): db_name = frappe.conf.db_name if not source_sql: source_sql = os.path.join(os.path.dirname(__file__), "framework_sqlite.sql") - DbManager(frappe.local.db).restore_database( - verbose, db_name, source_sql, frappe.conf.db_user, frappe.conf.db_password - ) + DbManager().restore_database(verbose, db_name, source_sql, frappe.conf.db_user, frappe.conf.db_password) if verbose: print("Imported from database {}".format(source_sql)) diff --git a/frappe/desk/doctype/system_console/system_console.py b/frappe/desk/doctype/system_console/system_console.py index 8ddd1f6539..2334f3809e 100644 --- a/frappe/desk/doctype/system_console/system_console.py +++ b/frappe/desk/doctype/system_console/system_console.py @@ -61,6 +61,9 @@ def show_processlist(): def _show_processlist(): + if frappe.db.db_type == "sqlite": + return [] + return frappe.db.multisql( { "postgres": """ diff --git a/frappe/desk/doctype/system_health_report/system_health_report.py b/frappe/desk/doctype/system_health_report/system_health_report.py index 9596ebec5c..e206676889 100644 --- a/frappe/desk/doctype/system_health_report/system_health_report.py +++ b/frappe/desk/doctype/system_health_report/system_health_report.py @@ -231,10 +231,25 @@ class SystemHealthReport(Document): LIMIT 5 """ + sqlite_query = """ + SELECT scheduled_job_type, + AVG(CASE WHEN status != 'Complete' THEN 1 ELSE 0 END) * 100 AS failure_rate + FROM `tabScheduled Job Log` + WHERE + creation > %(lower_threshold)s + AND modified > %(lower_threshold)s + AND creation < %(upper_threshold)s + GROUP BY scheduled_job_type + HAVING failure_rate > 0 + ORDER BY failure_rate DESC + LIMIT 5 + """ + failing_jobs = frappe.db.multisql( { "mariadb": mariadb_query, "postgres": postgres_query, + "sqlite": sqlite_query, }, {"lower_threshold": lower_threshold, "upper_threshold": upper_threshold}, as_dict=True, diff --git a/frappe/desk/form/load.py b/frappe/desk/form/load.py index 065d286ddb..83f1d5c0bf 100644 --- a/frappe/desk/form/load.py +++ b/frappe/desk/form/load.py @@ -319,15 +319,32 @@ def get_communication_data( {conditions} """ - return frappe.db.sql( - """ + sqlite_query = f""" + SELECT * FROM ( + SELECT * FROM ({part1}) + UNION ALL + SELECT * FROM ({part2}) + ) AS combined + {group_by or ""} + ORDER BY communication_date DESC + LIMIT %(limit)s + OFFSET %(start)s""" + + query = f""" SELECT * FROM (({part1}) UNION ({part2})) AS combined - {group_by} + {group_by or ""} ORDER BY communication_date DESC LIMIT %(limit)s OFFSET %(start)s - """.format(part1=part1, part2=part2, group_by=(group_by or "")), + """ + + return frappe.db.multisql( + { + "sqlite": sqlite_query, + "postgres": query, + "mariadb": query, + }, dict( doctype=doctype, name=name, diff --git a/frappe/geo/doctype/country/country.py b/frappe/geo/doctype/country/country.py index bec0e1e175..4ada13f16f 100644 --- a/frappe/geo/doctype/country/country.py +++ b/frappe/geo/doctype/country/country.py @@ -83,7 +83,7 @@ def get_countries_and_currencies(): symbol=country.currency_symbol, fraction_units=country.currency_fraction_units, smallest_currency_fraction_value=country.smallest_currency_fraction_value, - number_format=country.number_format, + number_format=frappe.db.escape(country.number_format)[1:-1], ) ) diff --git a/frappe/model/db_query.py b/frappe/model/db_query.py index b0a884ca7a..2aa951ee9f 100644 --- a/frappe/model/db_query.py +++ b/frappe/model/db_query.py @@ -1180,8 +1180,7 @@ def cast_name(column: str) -> str: Example: input - "ifnull(`tabBlog Post`.`name`, '')=''" output - "ifnull(cast(`tabBlog Post`.`name` as varchar), '')=''" """ - - if frappe.db.db_type == "mariadb": + if frappe.db.db_type != "postgres": return column kwargs = {"string": column} diff --git a/frappe/model/document.py b/frappe/model/document.py index a194751e22..8a34141479 100644 --- a/frappe/model/document.py +++ b/frappe/model/document.py @@ -232,11 +232,14 @@ class Document(BaseDocument, DocRef): else: if not is_doctype and isinstance(self.name, str): + for_update = "" + if self.flags.for_update and frappe.db.db_type != "sqlite": + for_update = "FOR UPDATE" # Fast path - use raw SQL to avoid QB/ORM overheads. d = frappe.db.sql( "SELECT * FROM {table_name} WHERE `name` = %s {for_update}".format( table_name=get_table_name(self.doctype, wrap_in_backticks=True), - for_update="FOR UPDATE" if self.flags.for_update else "", + for_update=for_update, ), (self.name), as_dict=True, @@ -289,6 +292,9 @@ class Document(BaseDocument, DocRef): for_update=self.flags.for_update, ) else: + for_update = "" + if self.flags.for_update and frappe.db.db_type != "sqlite": + for_update = "FOR UPDATE" # Fast pass for all other doctypes - using raw SQL children = frappe.db.sql( """SELECT * FROM {table_name} @@ -297,7 +303,7 @@ class Document(BaseDocument, DocRef): AND `parentfield`= %(parentfield)s ORDER BY `idx` ASC {for_update}""".format( table_name=get_table_name(child_doctype, wrap_in_backticks=True), - for_update="FOR UPDATE" if self.flags.for_update else "", + for_update=for_update, ), {"parent": self.name, "parenttype": self.doctype, "parentfield": fieldname}, as_dict=True, diff --git a/frappe/model/utils/user_settings.py b/frappe/model/utils/user_settings.py index a3546b174c..4dad61c9b0 100644 --- a/frappe/model/utils/user_settings.py +++ b/frappe/model/utils/user_settings.py @@ -57,6 +57,8 @@ def sync_user_settings(): "postgres": """INSERT INTO `__UserSettings` (`user`, `doctype`, `data`) VALUES (%s, %s, %s) ON CONFLICT ("user", "doctype") DO UPDATE SET `data`=%s""", + "sqlite": """INSERT OR REPLACE INTO `__UserSettings` (`user`, `doctype`, `data`) + VALUES (%s, %s, %s)""", }, (user, doctype, data, data), as_dict=1, diff --git a/frappe/query_builder/builder.py b/frappe/query_builder/builder.py index ea161d3180..0b4a2b48bb 100644 --- a/frappe/query_builder/builder.py +++ b/frappe/query_builder/builder.py @@ -6,7 +6,7 @@ from pypika.dialects import MySQLQueryBuilder, PostgreSQLQueryBuilder, SQLLiteQu from pypika.queries import QueryBuilder, Schema, Table from pypika.terms import Function -from frappe.query_builder.terms import ParameterizedValueWrapper +from frappe.query_builder.terms import ParameterizedValueWrapper, SQLiteParameterizedValueWrapper from frappe.utils import get_table_name @@ -100,11 +100,13 @@ class Postgres(Base, PostgreSQLQuery): class SQLite(Base, SQLLiteQuery): + Field = terms.Field + _BuilderClasss = SQLLiteQueryBuilder @classmethod def _builder(cls, *args, **kwargs) -> "SQLLiteQueryBuilder": - return super()._builder(*args, wrapper_cls=ParameterizedValueWrapper, **kwargs) + return super()._builder(*args, wrapper_cls=SQLiteParameterizedValueWrapper, **kwargs) @classmethod def from_(cls, table, *args, **kwargs): diff --git a/frappe/query_builder/terms.py b/frappe/query_builder/terms.py index cdae2a449d..a042d40778 100644 --- a/frappe/query_builder/terms.py +++ b/frappe/query_builder/terms.py @@ -1,6 +1,7 @@ from datetime import datetime, time, timedelta from typing import Any +from pypika.dialects import SQLLiteValueWrapper from pypika.queries import QueryBuilder from pypika.terms import Criterion, Function, ValueWrapper from pypika.utils import format_alias_sql @@ -71,6 +72,10 @@ class ParameterizedValueWrapper(ValueWrapper): return format_alias_sql(sql, self.alias, quote_char=quote_char, **kwargs) +class SQLiteParameterizedValueWrapper(ParameterizedValueWrapper, SQLLiteValueWrapper): + pass + + class ParameterizedFunction(Function): """ Class to monkey patch pypika.terms.Functions diff --git a/frappe/utils/backups.py b/frappe/utils/backups.py index 3fbde4735f..ef06ab9e3d 100644 --- a/frappe/utils/backups.py +++ b/frappe/utils/backups.py @@ -431,6 +431,12 @@ class BackupGenerator: elif self.backup_excludes: extra.extend([f"--ignore-table={self.db_name}.{table}" for table in self.backup_excludes]) + elif self.db_type == "sqlite": + if self.backup_includes: + extra.extend([f'"{table}"' for table in self.backup_includes]) + elif self.backup_excludes: + click.secho("Excluding tables is not supported for SQLite", fg="yellow") + elif self.db_type == "postgres": if self.backup_includes: extra.extend([f'--table=public."{table}"' for table in self.backup_includes]) diff --git a/frappe/utils/global_search.py b/frappe/utils/global_search.py index 45871c14be..623e85df05 100644 --- a/frappe/utils/global_search.py +++ b/frappe/utils/global_search.py @@ -226,6 +226,9 @@ def insert_values_for_multiple_docs(all_contents): (doctype, name, content, published, title, route) VALUES {} ON CONFLICT("name", "doctype") DO NOTHING""".format(", ".join(batch_values)), + "sqlite": """INSERT OR IGNORE INTO `__global_search` + (doctype, name, content, published, title, route) + VALUES {} """.format(", ".join(batch_values)), } ) @@ -447,6 +450,10 @@ def sync_value(value: dict): `published`=%(published)s, `title`=%(title)s, `route`=%(route)s + """, + "sqlite": """INSERT OR REPLACE INTO `__global_search` + (`doctype`, `name`, `content`, `published`, `title`, `route`) + VALUES (%(doctype)s, %(name)s, %(content)s, %(published)s, %(title)s, %(route)s) """, }, value, diff --git a/frappe/utils/make_random.py b/frappe/utils/make_random.py index 89addead2b..6d9750ee71 100644 --- a/frappe/utils/make_random.py +++ b/frappe/utils/make_random.py @@ -44,6 +44,8 @@ def get_random(doctype: str, filters: dict | None = None, doc: bool = False): "mariadb": f"""select name from `tab{doctype}` {condition} order by RAND() limit 1 offset 0""", "postgres": f"""select name from `tab{doctype}` {condition} + order by RANDOM() limit 1 offset 0""", + "sqlite": f"""select name from `tab{doctype}` {condition} order by RANDOM() limit 1 offset 0""", } )