feat: init sqlite

Signed-off-by: Akhil Narang <me@akhilnarang.dev>
This commit is contained in:
Akhil Narang 2025-01-30 13:13:04 +05:30
parent 886adfbc43
commit 0b5e245507
No known key found for this signature in database
GPG key ID: 9DCC61E211BF645F
15 changed files with 964 additions and 69 deletions

View file

@ -304,9 +304,11 @@ def connect(site: str | None = None, db_name: str | None = None, set_admin_as_us
db_name_ = conf.db_name or db_name db_name_ = conf.db_name or db_name
db_password = conf.db_password db_password = conf.db_password
assert db_user, "site must be fully initialized, db_user missing"
assert db_name_, "site must be fully initialized, db_name missing" assert db_name_, "site must be fully initialized, db_name missing"
assert db_password, "site must be fully initialized, db_password missing"
if frappe.conf.db_type in ("mariadb", "postgres"):
assert db_user, "site must be fully initialized, db_user missing"
assert db_password, "site must be fully initialized, db_password missing"
local.db = get_db( local.db = get_db(
socket=conf.db_socket, socket=conf.db_socket,

View file

@ -22,8 +22,8 @@ from frappe.utils.bench_helper import CliCtxObj
@click.option( @click.option(
"--db-type", "--db-type",
default="mariadb", default="mariadb",
type=click.Choice(["mariadb", "postgres"]), type=click.Choice(["mariadb", "postgres", "sqlite"]),
help='Optional "postgres" or "mariadb". Default is "mariadb"', help='Optional "sqlite", "postgres" or "mariadb". Default is "mariadb"',
) )
@click.option("--db-host", help="Database Host") @click.option("--db-host", help="Database Host")
@click.option("--db-port", type=int, help="Database Port") @click.option("--db-port", type=int, help="Database Port")

View file

@ -78,21 +78,23 @@ def _get_site_config(sites_path: str, site_path: str) -> _dict[str, Any]:
os.environ.get("FRAPPE_REDIS_CACHE") or config.get("redis_cache") or "redis://127.0.0.1:13311" os.environ.get("FRAPPE_REDIS_CACHE") or config.get("redis_cache") or "redis://127.0.0.1:13311"
) )
config["db_type"] = os.environ.get("FRAPPE_DB_TYPE") or config.get("db_type") or "mariadb" config["db_type"] = os.environ.get("FRAPPE_DB_TYPE") or config.get("db_type") or "mariadb"
config["db_socket"] = os.environ.get("FRAPPE_DB_SOCKET") or config.get("db_socket")
config["db_host"] = os.environ.get("FRAPPE_DB_HOST") or config.get("db_host") or "127.0.0.1"
config["db_port"] = int(
os.environ.get("FRAPPE_DB_PORT") or config.get("db_port") or db_default_ports(config["db_type"])
)
# Set the user as database name if not set in config if config["db_type"] in ("mariadb", "postgres"):
config["db_user"] = os.environ.get("FRAPPE_DB_USER") or config.get("db_user") or config.get("db_name") config["db_socket"] = os.environ.get("FRAPPE_DB_SOCKET") or config.get("db_socket")
config["db_host"] = os.environ.get("FRAPPE_DB_HOST") or config.get("db_host") or "127.0.0.1"
config["db_port"] = int(
os.environ.get("FRAPPE_DB_PORT") or config.get("db_port") or db_default_ports(config["db_type"])
)
# Set the user as database name if not set in config
config["db_user"] = os.environ.get("FRAPPE_DB_USER") or config.get("db_user") or config.get("db_name")
# read password
config["db_password"] = os.environ.get("FRAPPE_DB_PASSWORD") or config.get("db_password")
# vice versa for dbname if not defined # vice versa for dbname if not defined
config["db_name"] = os.environ.get("FRAPPE_DB_NAME") or config.get("db_name") or config["db_user"] config["db_name"] = os.environ.get("FRAPPE_DB_NAME") or config.get("db_name") or config["db_user"]
# read password
config["db_password"] = os.environ.get("FRAPPE_DB_PASSWORD") or config.get("db_password")
# Allow externally extending the config with hooks # Allow externally extending the config with hooks
if extra_config := config.get("extra_config"): if extra_config := config.get("extra_config"):
if isinstance(extra_config, str): if isinstance(extra_config, str):

View file

@ -11,40 +11,52 @@ from frappe.database.database import savepoint
def setup_database(force, verbose=None, mariadb_user_host_login_scope=None): def setup_database(force, verbose=None, mariadb_user_host_login_scope=None):
import frappe import frappe
if frappe.conf.db_type == "postgres": if frappe.conf.db_type == "mariadb":
import frappe.database.postgres.setup_db
return frappe.database.postgres.setup_db.setup_database()
else:
import frappe.database.mariadb.setup_db import frappe.database.mariadb.setup_db
return frappe.database.mariadb.setup_db.setup_database(force, verbose, mariadb_user_host_login_scope) return frappe.database.mariadb.setup_db.setup_database(force, verbose, mariadb_user_host_login_scope)
elif frappe.conf.db_type == "sqlite":
import frappe.database.sqlite.setup_db
return frappe.database.sqlite.setup_db.setup_database(force, verbose)
else:
import frappe.database.postgres.setup_db
return frappe.database.postgres.setup_db.setup_database()
def bootstrap_database(verbose=None, source_sql=None): def bootstrap_database(verbose=None, source_sql=None):
import frappe import frappe
if frappe.conf.db_type == "postgres": if frappe.conf.db_type == "mariadb":
import frappe.database.postgres.setup_db
return frappe.database.postgres.setup_db.bootstrap_database(verbose, source_sql)
else:
import frappe.database.mariadb.setup_db import frappe.database.mariadb.setup_db
return frappe.database.mariadb.setup_db.bootstrap_database(verbose, source_sql) return frappe.database.mariadb.setup_db.bootstrap_database(verbose, source_sql)
elif frappe.conf.db_type == "sqlite":
import frappe.database.sqlite.setup_db
return frappe.database.sqlite.setup_db.bootstrap_database(verbose, source_sql)
else:
import frappe.database.postgres.setup_db
return frappe.database.postgres.setup_db.bootstrap_database(verbose, source_sql)
def drop_user_and_database(db_name, db_user): def drop_user_and_database(db_name, db_user):
import frappe import frappe
if frappe.conf.db_type == "postgres": if frappe.conf.db_type == "mariadb":
import frappe.database.postgres.setup_db
return frappe.database.postgres.setup_db.drop_user_and_database(db_name, db_user)
else:
import frappe.database.mariadb.setup_db import frappe.database.mariadb.setup_db
return frappe.database.mariadb.setup_db.drop_user_and_database(db_name, db_user) return frappe.database.mariadb.setup_db.drop_user_and_database(db_name, db_user)
elif frappe.conf.db_type == "sqlite":
import frappe.database.sqlite.setup_db
return frappe.database.sqlite.setup_db.drop_database(db_name)
else:
import frappe.database.postgres.setup_db
return frappe.database.postgres.setup_db.drop_user_and_database(db_name, db_user)
def get_db(socket=None, host=None, user=None, password=None, port=None, cur_db_name=None): def get_db(socket=None, host=None, user=None, password=None, port=None, cur_db_name=None):
@ -58,6 +70,10 @@ def get_db(socket=None, host=None, user=None, password=None, port=None, cur_db_n
return frappe.database.postgres.database.PostgresDatabase( return frappe.database.postgres.database.PostgresDatabase(
socket, host, user, password, port, cur_db_name socket, host, user, password, port, cur_db_name
) )
elif conf.db_type == "sqlite":
import frappe.database.sqlite.database
return frappe.database.sqlite.database.SQLiteDatabase(cur_db_name=cur_db_name)
elif conf.use_mysqlclient: elif conf.use_mysqlclient:
import frappe.database.mariadb.mysqlclient import frappe.database.mariadb.mysqlclient
@ -77,27 +93,7 @@ def get_command(
): ):
import frappe import frappe
if frappe.conf.db_type == "postgres": if frappe.conf.db_type == "mariadb":
if dump:
bin, bin_name = which("pg_dump"), "pg_dump"
else:
bin, bin_name = which("psql"), "psql"
if socket and password:
conn_string = f"postgresql://{user}:{password}@/{db_name}?host={socket}"
elif socket:
conn_string = f"postgresql://{user}@/{db_name}?host={socket}"
elif password:
conn_string = f"postgresql://{user}:{password}@{host}:{port}/{db_name}"
else:
conn_string = f"postgresql://{user}@{host}:{port}/{db_name}"
command = [conn_string]
if extra:
command.extend(extra)
else:
if dump: if dump:
bin, bin_name = which("mariadb-dump") or which("mysqldump"), "mariadb-dump" bin, bin_name = which("mariadb-dump") or which("mysqldump"), "mariadb-dump"
else: else:
@ -135,4 +131,28 @@ def get_command(
if extra: if extra:
command.extend(extra) command.extend(extra)
elif frappe.conf.db_type == "sqlite":
bin, bin_name = which("sqlite3"), "sqlite3"
command = []
else:
if dump:
bin, bin_name = which("pg_dump"), "pg_dump"
else:
bin, bin_name = which("psql"), "psql"
if socket and password:
conn_string = f"postgresql://{user}:{password}@/{db_name}?host={socket}"
elif socket:
conn_string = f"postgresql://{user}@/{db_name}?host={socket}"
elif password:
conn_string = f"postgresql://{user}:{password}@{host}:{port}/{db_name}"
else:
conn_string = f"postgresql://{user}@{host}:{port}/{db_name}"
command = [conn_string]
if extra:
command.extend(extra)
return bin, command, bin_name return bin, command, bin_name

View file

@ -13,7 +13,7 @@ from contextlib import contextmanager, suppress
from time import time from time import time
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING, Any
from pypika.dialects import MySQLQueryBuilder, PostgreSQLQueryBuilder from pypika.dialects import MySQLQueryBuilder, PostgreSQLQueryBuilder, SQLLiteQueryBuilder
import frappe import frappe
import frappe.defaults import frappe.defaults
@ -44,6 +44,8 @@ if TYPE_CHECKING:
from psycopg2 import cursor as PostgresCursor from psycopg2 import cursor as PostgresCursor
from pymysql.connections import Connection as MariadbConnection from pymysql.connections import Connection as MariadbConnection
from pymysql.cursors import Cursor as MariadbCursor from pymysql.cursors import Cursor as MariadbCursor
from sqlite3 import Connection as SQLiteConnection
from sqlite3 import Cursor as SQLiteCursor
IFNULL_PATTERN = re.compile(r"ifnull\(", flags=re.IGNORECASE) IFNULL_PATTERN = re.compile(r"ifnull\(", flags=re.IGNORECASE)
INDEX_PATTERN = re.compile(r"\s*\([^)]+\)\s*") INDEX_PATTERN = re.compile(r"\s*\([^)]+\)\s*")
@ -128,8 +130,8 @@ class Database:
def connect(self): def connect(self):
"""Connects to a database as set in `site_config.json`.""" """Connects to a database as set in `site_config.json`."""
self._conn: MySQLdbConnection | MariadbConnection | PostgresConnection = self.get_connection() self._conn: MySQLdbConnection | MariadbConnection | PostgresConnection | SQLiteConnection = self.get_connection()
self._cursor: MySQLdbCursor | MariadbCursor | PostgresCursor = self._conn.cursor() self._cursor: MySQLdbCursor | MariadbCursor | PostgresCursor | SQLiteCursor = self._conn.cursor()
try: try:
if execution_timeout := get_query_execution_timeout(): if execution_timeout := get_query_execution_timeout():
@ -208,7 +210,7 @@ class Database:
{"name": "a%", "owner":"test@example.com"}) {"name": "a%", "owner":"test@example.com"})
""" """
if isinstance(query, MySQLQueryBuilder | PostgreSQLQueryBuilder): if isinstance(query, MySQLQueryBuilder | PostgreSQLQueryBuilder | SQLLiteQueryBuilder):
frappe.log("Use run method to execute SQL queries generated by Query Builder") frappe.log("Use run method to execute SQL queries generated by Query Builder")
debug = debug or getattr(self, "debug", False) debug = debug or getattr(self, "debug", False)
@ -1457,7 +1459,7 @@ class Database:
if ignore_duplicates: if ignore_duplicates:
# Pypika does not have same api for ignoring duplicates # Pypika does not have same api for ignoring duplicates
if frappe.conf.db_type == "mariadb": if frappe.conf.db_type in ("mariadb", "sqlite"):
query = query.ignore() query = query.ignore()
elif frappe.conf.db_type == "postgres": elif frappe.conf.db_type == "postgres":
query = query.on_conflict().do_nothing() query = query.on_conflict().do_nothing()

View file

@ -56,6 +56,7 @@ class Engine:
self.is_mariadb = db_type == "mariadb" self.is_mariadb = db_type == "mariadb"
self.is_postgres = db_type == "postgres" self.is_postgres = db_type == "postgres"
self.is_sqlite = db_type == "sqlite"
self.validate_filters = validate_filters self.validate_filters = validate_filters
if isinstance(table, Table): if isinstance(table, Table):

View file

View file

@ -0,0 +1,303 @@
import re
import sqlite3
from contextlib import contextmanager
import frappe
from frappe.database.database import Database
from frappe.database.sqlite.schema import SQLiteTable
from frappe.utils import UnicodeWithAttrs, cstr, get_datetime, get_table_name
_PARAM_COMP = re.compile(r"%\([\w]*\)s")
class SQLiteExceptionUtil:
ProgrammingError = sqlite3.ProgrammingError
TableMissingError = sqlite3.OperationalError
OperationalError = sqlite3.OperationalError
InternalError = sqlite3.InternalError
SQLError = sqlite3.OperationalError
DataError = sqlite3.DataError
@staticmethod
def is_deadlocked(e: sqlite3.Error) -> bool:
return "database is locked" in str(e)
@staticmethod
def is_timedout(e: sqlite3.Error) -> bool:
return "database is locked" in str(e)
@staticmethod
def is_read_only_mode_error(e: sqlite3.Error) -> bool:
return "attempt to write a readonly database" in str(e)
@staticmethod
def is_table_missing(e: sqlite3.Error) -> bool:
return "no such table" in str(e)
@staticmethod
def is_missing_column(e: sqlite3.Error) -> bool:
return "no such column" in str(e)
@staticmethod
def is_duplicate_fieldname(e: sqlite3.Error) -> bool:
return "duplicate column name" in str(e)
@staticmethod
def is_duplicate_entry(e: sqlite3.Error) -> bool:
return "UNIQUE constraint failed" in str(e)
@staticmethod
def is_access_denied(e: sqlite3.Error) -> bool:
return "access denied" in str(e)
@staticmethod
def cant_drop_field_or_key(e: sqlite3.Error) -> bool:
return "cannot drop" in str(e)
@staticmethod
def is_syntax_error(e: sqlite3.Error) -> bool:
return "syntax error" in str(e)
@staticmethod
def is_statement_timeout(e: sqlite3.Error) -> bool:
return "statement timeout" in str(e)
@staticmethod
def is_data_too_long(e: sqlite3.Error) -> bool:
return "string or blob too big" in str(e)
@staticmethod
def is_db_table_size_limit(e: sqlite3.Error) -> bool:
return "too many columns" in str(e)
@staticmethod
def is_primary_key_violation(e: sqlite3.Error) -> bool:
return "UNIQUE constraint failed" in str(e)
@staticmethod
def is_unique_key_violation(e: sqlite3.Error) -> bool:
return "UNIQUE constraint failed" in str(e)
@staticmethod
def is_interface_error(e: sqlite3.Error):
return isinstance(e, sqlite3.InterfaceError)
class SQLiteDatabase(SQLiteExceptionUtil, Database):
REGEX_CHARACTER = "regexp"
default_port = None
MAX_ROW_SIZE_LIMIT = None
def get_connection(self):
conn = self._get_connection()
conn.isolation_level = None
return conn
def _get_connection(self):
"""Return SQLite connection object."""
return self.create_connection()
def create_connection(self):
settings = self.get_connection_settings()
print(settings)
return sqlite3.connect(self.get_connection_settings())
def set_execution_timeout(self, seconds: int):
self.sql(f"PRAGMA busy_timeout = {int(seconds) * 1000}")
def get_connection_settings(self) -> str:
return self.cur_db_name
def setup_type_map(self):
self.db_type = "sqlite"
self.type_map = {
"Currency": ("REAL", None),
"Int": ("INTEGER", None),
"Long Int": ("INTEGER", None),
"Float": ("REAL", None),
"Percent": ("REAL", None),
"Check": ("INTEGER", None),
"Small Text": ("TEXT", None),
"Long Text": ("TEXT", None),
"Code": ("TEXT", None),
"Text Editor": ("TEXT", None),
"Markdown Editor": ("TEXT", None),
"HTML Editor": ("TEXT", None),
"Date": ("TEXT", None),
"Datetime": ("TEXT", None),
"Time": ("TEXT", None),
"Text": ("TEXT", None),
"Data": ("TEXT", None),
"Link": ("TEXT", None),
"Dynamic Link": ("TEXT", None),
"Password": ("TEXT", None),
"Select": ("TEXT", None),
"Rating": ("REAL", None),
"Read Only": ("TEXT", None),
"Attach": ("TEXT", None),
"Attach Image": ("TEXT", None),
"Signature": ("TEXT", None),
"Color": ("TEXT", None),
"Barcode": ("TEXT", None),
"Geolocation": ("TEXT", None),
"Duration": ("REAL", None),
"Icon": ("TEXT", None),
"Phone": ("TEXT", None),
"Autocomplete": ("TEXT", None),
"JSON": ("TEXT", None),
}
def get_database_size(self):
"""Return database size in MB."""
import os
return os.path.getsize(self.db_name) / (1024 * 1024)
def log_query(self, query, values, debug, explain):
self.last_query = query
self._log_query(self.last_query, debug, explain, query)
return self.last_query
def _clean_up(self):
pass
@staticmethod
def escape(s, percent=True):
"""Escape quotes and percent in given string."""
s = s.replace("'", "''")
if percent:
s = s.replace("%", "%%")
return "'" + s + "'"
@staticmethod
def is_type_number(code):
return code in (sqlite3.NUMERIC, sqlite3.INTEGER, sqlite3.REAL)
@staticmethod
def is_type_datetime(code):
return code == sqlite3.TEXT
def rename_table(self, old_name: str, new_name: str) -> list | tuple:
old_name = get_table_name(old_name)
new_name = get_table_name(new_name)
return self.sql(f"ALTER TABLE `{old_name}` RENAME TO `{new_name}`")
def describe(self, doctype: str) -> list | tuple:
table_name = get_table_name(doctype)
return self.sql(f"PRAGMA table_info(`{table_name}`)")
def change_column_type(
self, doctype: str, column: str, type: str, nullable: bool = False
) -> list | tuple:
raise NotImplementedError("SQLite does not support altering column types directly.")
def rename_column(self, doctype: str, old_column_name, new_column_name):
raise NotImplementedError("SQLite does not support renaming columns directly.")
def create_auth_table(self):
self.sql_ddl(
"""CREATE TABLE IF NOT EXISTS `__Auth` (
`doctype` TEXT NOT NULL,
`name` TEXT NOT NULL,
`fieldname` TEXT NOT NULL,
`password` TEXT NOT NULL,
`encrypted` INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY (`doctype`, `name`, `fieldname`)
)"""
)
def create_global_search_table(self):
if "__global_search" not in self.get_tables():
self.sql(
"""CREATE VIRTUAL TABLE __global_search USING FTS5(
doctype,
name,
title,
content,
route,
published
)"""
)
def create_user_settings_table(self):
self.sql_ddl(
"""CREATE TABLE IF NOT EXISTS __UserSettings (
`user` TEXT NOT NULL,
`doctype` TEXT NOT NULL,
`data` TEXT,
UNIQUE(user, doctype)
)"""
)
@staticmethod
def get_on_duplicate_update():
return "ON CONFLICT DO UPDATE SET "
def get_table_columns_description(self, table_name):
"""Return list of columns with descriptions."""
return self.sql(f"PRAGMA table_info(`{table_name}`)", as_dict=1)
def get_column_type(self, doctype, column):
"""Return column type from database."""
table_name = get_table_name(doctype)
result = self.sql(f"PRAGMA table_info(`{table_name}`)", as_dict=1)
for row in result:
if row["name"] == column:
return row["type"]
return None
def has_index(self, table_name, index_name):
return self.sql(f"PRAGMA index_list(`{table_name}`)")
def get_column_index(self, table_name: str, fieldname: str, unique: bool = False) -> frappe._dict | None:
"""Check if column exists for a specific fields in specified order."""
indexes = self.sql(f"PRAGMA index_list(`{table_name}`)", as_dict=True)
for index in indexes:
index_info = self.sql(f"PRAGMA index_info(`{index['name']}`)", as_dict=True)
if index_info and index_info[0]["name"] == fieldname:
return index
return None
def add_index(self, doctype: str, fields: list, index_name: str | None = None):
"""Creates an index with given fields if not already created."""
index_name = index_name or self.get_index_name(fields)
table_name = get_table_name(doctype)
if not self.has_index(table_name, index_name):
self.commit()
self.sql(f"CREATE INDEX `{index_name}` ON `{table_name}` ({', '.join(fields)})")
def add_unique(self, doctype, fields, constraint_name=None):
raise NotImplementedError("SQLite does not support adding unique constraints directly.")
def updatedb(self, doctype, meta=None):
"""Syncs a `DocType` to the table."""
res = self.sql("SELECT issingle FROM `tabDocType` WHERE name=%s", (doctype,))
if not res:
raise Exception(f"Wrong doctype {doctype} in updatedb")
if not res[0][0]:
db_table = SQLiteTable(doctype, meta)
db_table.validate()
db_table.sync()
self.commit()
def get_database_list(self):
return [self.db_name]
def get_tables(self, cached=True):
"""Return list of tables."""
to_query = not cached
if cached:
tables = frappe.cache.get_value("db_tables")
to_query = not tables
if to_query:
tables = self.sql("SELECT name FROM sqlite_master WHERE type='table';", pluck=True)
frappe.cache.set_value("db_tables", tables)
return tables
def get_row_size(self, doctype: str) -> int:
"""Get estimated max row size of any table in bytes."""
raise NotImplementedError("SQLite does not support getting row size directly.")

View file

@ -0,0 +1,320 @@
-- Core Elements to install WNFramework
-- To be called from install.py
--
-- Table structure for table `tabDocField`
--
DROP TABLE IF EXISTS `tabDocField`;
CREATE TABLE `tabDocField` (
`name` TEXT NOT NULL,
`creation` TEXT DEFAULT NULL,
`modified` TEXT DEFAULT NULL,
`modified_by` TEXT DEFAULT NULL,
`owner` TEXT DEFAULT NULL,
`docstatus` INTEGER NOT NULL DEFAULT 0,
`parent` TEXT DEFAULT NULL,
`parentfield` TEXT DEFAULT NULL,
`parenttype` TEXT DEFAULT NULL,
`idx` INTEGER NOT NULL DEFAULT 0,
`fieldname` TEXT DEFAULT NULL,
`label` TEXT DEFAULT NULL,
`oldfieldname` TEXT DEFAULT NULL,
`fieldtype` TEXT DEFAULT NULL,
`oldfieldtype` TEXT DEFAULT NULL,
`options` TEXT,
`search_index` INTEGER NOT NULL DEFAULT 0,
`show_dashboard` INTEGER NOT NULL DEFAULT 0,
`hidden` INTEGER NOT NULL DEFAULT 0,
`set_only_once` INTEGER NOT NULL DEFAULT 0,
`allow_in_quick_entry` INTEGER NOT NULL DEFAULT 0,
`print_hide` INTEGER NOT NULL DEFAULT 0,
`report_hide` INTEGER NOT NULL DEFAULT 0,
`reqd` INTEGER NOT NULL DEFAULT 0,
`bold` INTEGER NOT NULL DEFAULT 0,
`in_global_search` INTEGER NOT NULL DEFAULT 0,
`collapsible` INTEGER NOT NULL DEFAULT 0,
`unique` INTEGER NOT NULL DEFAULT 0,
`no_copy` INTEGER NOT NULL DEFAULT 0,
`allow_on_submit` INTEGER NOT NULL DEFAULT 0,
`show_preview_popup` INTEGER NOT NULL DEFAULT 0,
`trigger` TEXT DEFAULT NULL,
`collapsible_depends_on` TEXT,
`mandatory_depends_on` TEXT,
`read_only_depends_on` TEXT,
`depends_on` TEXT,
`permlevel` INTEGER NOT NULL DEFAULT 0,
`ignore_user_permissions` INTEGER NOT NULL DEFAULT 0,
`width` TEXT DEFAULT NULL,
`print_width` TEXT DEFAULT NULL,
`columns` INTEGER NOT NULL DEFAULT 0,
`default` TEXT,
`description` TEXT,
`in_list_view` INTEGER NOT NULL DEFAULT 0,
`fetch_if_empty` INTEGER NOT NULL DEFAULT 0,
`in_filter` INTEGER NOT NULL DEFAULT 0,
`remember_last_selected_value` INTEGER NOT NULL DEFAULT 0,
`ignore_xss_filter` INTEGER NOT NULL DEFAULT 0,
`print_hide_if_no_value` INTEGER NOT NULL DEFAULT 0,
`allow_bulk_edit` INTEGER NOT NULL DEFAULT 0,
`in_standard_filter` INTEGER NOT NULL DEFAULT 0,
`in_preview` INTEGER NOT NULL DEFAULT 0,
`read_only` INTEGER NOT NULL DEFAULT 0,
`precision` TEXT DEFAULT NULL,
`max_height` TEXT DEFAULT NULL,
`length` INTEGER NOT NULL DEFAULT 0,
`translatable` INTEGER NOT NULL DEFAULT 0,
`hide_border` INTEGER NOT NULL DEFAULT 0,
`hide_days` INTEGER NOT NULL DEFAULT 0,
`hide_seconds` INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY (`name`)
);
--
-- Table structure for table `tabDocPerm`
--
DROP TABLE IF EXISTS `tabDocPerm`;
CREATE TABLE `tabDocPerm` (
`name` TEXT NOT NULL,
`creation` TEXT DEFAULT NULL,
`modified` TEXT DEFAULT NULL,
`modified_by` TEXT DEFAULT NULL,
`owner` TEXT DEFAULT NULL,
`docstatus` INTEGER NOT NULL DEFAULT 0,
`parent` TEXT DEFAULT NULL,
`parentfield` TEXT DEFAULT NULL,
`parenttype` TEXT DEFAULT NULL,
`idx` INTEGER NOT NULL DEFAULT 0,
`permlevel` INTEGER DEFAULT 0,
`role` TEXT DEFAULT NULL,
`match` TEXT DEFAULT NULL,
`read` INTEGER NOT NULL DEFAULT 1,
`write` INTEGER NOT NULL DEFAULT 1,
`create` INTEGER NOT NULL DEFAULT 1,
`submit` INTEGER NOT NULL DEFAULT 0,
`cancel` INTEGER NOT NULL DEFAULT 0,
`delete` INTEGER NOT NULL DEFAULT 1,
`amend` INTEGER NOT NULL DEFAULT 0,
`report` INTEGER NOT NULL DEFAULT 1,
`export` INTEGER NOT NULL DEFAULT 1,
`import` INTEGER NOT NULL DEFAULT 0,
`share` INTEGER NOT NULL DEFAULT 1,
`print` INTEGER NOT NULL DEFAULT 1,
`email` INTEGER NOT NULL DEFAULT 1,
PRIMARY KEY (`name`)
);
--
-- Table structure for table `tabDocType Action`
--
DROP TABLE IF EXISTS `tabDocType Action`;
CREATE TABLE `tabDocType Action` (
`name` TEXT NOT NULL,
`creation` TEXT DEFAULT NULL,
`modified` TEXT DEFAULT NULL,
`modified_by` TEXT DEFAULT NULL,
`owner` TEXT DEFAULT NULL,
`docstatus` INTEGER NOT NULL DEFAULT 0,
`parent` TEXT DEFAULT NULL,
`parentfield` TEXT DEFAULT NULL,
`parenttype` TEXT DEFAULT NULL,
`idx` INTEGER NOT NULL DEFAULT 0,
`label` TEXT DEFAULT NULL,
`group` TEXT DEFAULT NULL,
`action_type` TEXT DEFAULT NULL,
`action` TEXT DEFAULT NULL,
PRIMARY KEY (`name`)
);
--
-- Table structure for table `tabDocType Link`
--
DROP TABLE IF EXISTS `tabDocType Link`;
CREATE TABLE `tabDocType Link` (
`name` TEXT NOT NULL,
`creation` TEXT DEFAULT NULL,
`modified` TEXT DEFAULT NULL,
`modified_by` TEXT DEFAULT NULL,
`owner` TEXT DEFAULT NULL,
`docstatus` INTEGER NOT NULL DEFAULT 0,
`parent` TEXT DEFAULT NULL,
`parentfield` TEXT DEFAULT NULL,
`parenttype` TEXT DEFAULT NULL,
`idx` INTEGER NOT NULL DEFAULT 0,
`group` TEXT DEFAULT NULL,
`link_doctype` TEXT DEFAULT NULL,
`link_fieldname` TEXT DEFAULT NULL,
PRIMARY KEY (`name`)
);
--
-- Table structure for table `tabDocType`
--
DROP TABLE IF EXISTS `tabDocType`;
CREATE TABLE `tabDocType` (
`name` TEXT NOT NULL,
`creation` TEXT DEFAULT NULL,
`modified` TEXT DEFAULT NULL,
`modified_by` TEXT DEFAULT NULL,
`owner` TEXT DEFAULT NULL,
`docstatus` INTEGER NOT NULL DEFAULT 0,
`idx` INTEGER NOT NULL DEFAULT 0,
`search_fields` TEXT DEFAULT NULL,
`issingle` INTEGER NOT NULL DEFAULT 0,
`is_virtual` INTEGER NOT NULL DEFAULT 0,
`is_tree` INTEGER NOT NULL DEFAULT 0,
`istable` INTEGER NOT NULL DEFAULT 0,
`editable_grid` INTEGER NOT NULL DEFAULT 1,
`track_changes` INTEGER NOT NULL DEFAULT 0,
`module` TEXT DEFAULT NULL,
`restrict_to_domain` TEXT DEFAULT NULL,
`app` TEXT DEFAULT NULL,
`autoname` TEXT DEFAULT NULL,
`naming_rule` TEXT DEFAULT NULL,
`title_field` TEXT DEFAULT NULL,
`image_field` TEXT DEFAULT NULL,
`timeline_field` TEXT DEFAULT NULL,
`sort_field` TEXT DEFAULT NULL,
`sort_order` TEXT DEFAULT NULL,
`description` TEXT,
`colour` TEXT DEFAULT NULL,
`read_only` INTEGER NOT NULL DEFAULT 0,
`in_create` INTEGER NOT NULL DEFAULT 0,
`menu_index` INTEGER DEFAULT NULL,
`parent_node` TEXT DEFAULT NULL,
`smallicon` TEXT DEFAULT NULL,
`allow_copy` INTEGER NOT NULL DEFAULT 0,
`allow_rename` INTEGER NOT NULL DEFAULT 0,
`allow_import` INTEGER NOT NULL DEFAULT 0,
`hide_toolbar` INTEGER NOT NULL DEFAULT 0,
`track_seen` INTEGER NOT NULL DEFAULT 0,
`max_attachments` INTEGER NOT NULL DEFAULT 0,
`print_outline` TEXT DEFAULT NULL,
`document_type` TEXT DEFAULT NULL,
`icon` TEXT DEFAULT NULL,
`color` TEXT DEFAULT NULL,
`tag_fields` TEXT DEFAULT NULL,
`subject` TEXT DEFAULT NULL,
`_last_update` TEXT DEFAULT NULL,
`engine` TEXT DEFAULT 'InnoDB',
`default_print_format` TEXT DEFAULT NULL,
`is_submittable` INTEGER NOT NULL DEFAULT 0,
`show_name_in_global_search` INTEGER NOT NULL DEFAULT 0,
`_user_tags` TEXT DEFAULT NULL,
`custom` INTEGER NOT NULL DEFAULT 0,
`beta` INTEGER NOT NULL DEFAULT 0,
`has_web_view` INTEGER NOT NULL DEFAULT 0,
`allow_guest_to_view` INTEGER NOT NULL DEFAULT 0,
`route` TEXT DEFAULT NULL,
`is_published_field` TEXT DEFAULT NULL,
`website_search_field` TEXT DEFAULT NULL,
`email_append_to` INTEGER NOT NULL DEFAULT 0,
`subject_field` TEXT DEFAULT NULL,
`sender_field` TEXT DEFAULT NULL,
`show_title_field_in_link` INTEGER NOT NULL DEFAULT 0,
`migration_hash` TEXT DEFAULT NULL,
`translated_doctype` INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY (`name`)
);
--
-- Table structure for table `tabSeries`
--
DROP TABLE IF EXISTS `tabSeries`;
CREATE TABLE `tabSeries` (
`name` TEXT NOT NULL,
`current` INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY(`name`)
);
--
-- Table structure for table `tabSessions`
--
DROP TABLE IF EXISTS `tabSessions`;
CREATE TABLE `tabSessions` (
`user` TEXT DEFAULT NULL,
`sid` TEXT DEFAULT NULL,
`sessiondata` TEXT,
`ipaddress` TEXT DEFAULT NULL,
`lastupdate` TEXT DEFAULT NULL,
`status` TEXT DEFAULT NULL,
PRIMARY KEY (`sid`)
);
--
-- Table structure for table `tabSingles`
--
DROP TABLE IF EXISTS `tabSingles`;
CREATE TABLE `tabSingles` (
`doctype` TEXT DEFAULT NULL,
`field` TEXT DEFAULT NULL,
`value` TEXT,
PRIMARY KEY (`doctype`, `field`)
);
--
-- Table structure for table `__Auth`
--
DROP TABLE IF EXISTS `__Auth`;
CREATE TABLE `__Auth` (
`doctype` TEXT NOT NULL,
`name` TEXT NOT NULL,
`fieldname` TEXT NOT NULL,
`password` TEXT NOT NULL,
`encrypted` INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY (`doctype`, `name`, `fieldname`)
);
--
-- Table structure for table `tabFile`
--
DROP TABLE IF EXISTS `tabFile`;
CREATE TABLE `tabFile` (
`name` TEXT NOT NULL,
`creation` TEXT DEFAULT NULL,
`modified` TEXT DEFAULT NULL,
`modified_by` TEXT DEFAULT NULL,
`owner` TEXT DEFAULT NULL,
`docstatus` INTEGER NOT NULL DEFAULT 0,
`parent` TEXT DEFAULT NULL,
`parentfield` TEXT DEFAULT NULL,
`parenttype` TEXT DEFAULT NULL,
`idx` INTEGER NOT NULL DEFAULT 0,
`file_name` TEXT DEFAULT NULL,
`file_url` TEXT DEFAULT NULL,
`module` TEXT DEFAULT NULL,
`attached_to_name` TEXT DEFAULT NULL,
`file_size` INTEGER NOT NULL DEFAULT 0,
`attached_to_doctype` TEXT DEFAULT NULL,
PRIMARY KEY (`name`)
);
--
-- Table structure for table `tabDefaultValue`
--
DROP TABLE IF EXISTS `tabDefaultValue`;
CREATE TABLE `tabDefaultValue` (
`name` TEXT NOT NULL,
`creation` TEXT DEFAULT NULL,
`modified` TEXT DEFAULT NULL,
`modified_by` TEXT DEFAULT NULL,
`owner` TEXT DEFAULT NULL,
`docstatus` INTEGER NOT NULL DEFAULT 0,
`parent` TEXT DEFAULT NULL,
`parentfield` TEXT DEFAULT NULL,
`parenttype` TEXT DEFAULT NULL,
`idx` INTEGER NOT NULL DEFAULT 0,
`defvalue` TEXT,
`defkey` TEXT DEFAULT NULL,
PRIMARY KEY (`name`)
);

View file

@ -0,0 +1,163 @@
from pymysql.constants.ER import DUP_ENTRY
import frappe
from frappe import _
from frappe.database.schema import DBTable
from frappe.utils.defaults import get_not_null_defaults
class SQLiteTable(DBTable):
def create(self):
additional_definitions = []
engine = self.meta.get("engine") or "InnoDB"
varchar_len = frappe.db.VARCHAR_LEN
name_column = f"name varchar({varchar_len}) primary key"
# columns
column_defs = self.get_column_definitions()
if column_defs:
additional_definitions += column_defs
# index
index_defs = self.get_index_definitions()
if index_defs:
additional_definitions += index_defs
# child table columns
if self.meta.get("istable", default=0):
additional_definitions += [
f"parent varchar({varchar_len})",
f"parentfield varchar({varchar_len})",
f"parenttype varchar({varchar_len})",
"index parent(parent)",
]
else:
# parent types
additional_definitions.append("index creation(creation)")
if self.meta.sort_field == "modified":
# Support old doctype default by indexing it, also 2nd popular choice.
additional_definitions.append("index modified(modified)")
# creating sequence(s)
if not self.meta.issingle and self.meta.autoname == "autoincrement":
frappe.db.create_sequence(self.doctype, check_not_exists=True)
# NOTE: not used nextval func as default as the ability to restore
# database with sequences has bugs in mariadb and gives a scary error.
# issue link: https://jira.mariadb.org/browse/MDEV-20070
name_column = "name bigint primary key"
elif not self.meta.issingle and self.meta.autoname == "UUID":
name_column = "name uuid primary key"
additional_definitions = ",\n".join(additional_definitions)
# create table
query = f"""create table `{self.table_name}` (
{name_column},
creation datetime(6),
modified datetime(6),
modified_by varchar({varchar_len}),
owner varchar({varchar_len}),
docstatus tinyint not null default '0',
idx int not null default '0',
{additional_definitions})
ENGINE={engine}
ROW_FORMAT=DYNAMIC
CHARACTER SET=utf8mb4
COLLATE=utf8mb4_unicode_ci"""
frappe.db.sql_ddl(query)
def alter(self):
for col in self.columns.values():
col.build_for_alter_table(self.current_columns.get(col.fieldname.lower()))
add_column_query = [f"ADD COLUMN `{col.fieldname}` {col.get_definition()}" for col in self.add_column]
columns_to_modify = set(self.change_type + self.set_default + self.change_nullability)
modify_column_query = [
f"MODIFY `{col.fieldname}` {col.get_definition(for_modification=True)}"
for col in columns_to_modify
]
if alter_pk := self.alter_primary_key():
modify_column_query.append(alter_pk)
modify_column_query.extend(
[f"ADD UNIQUE INDEX IF NOT EXISTS {col.fieldname} (`{col.fieldname}`)" for col in self.add_unique]
)
add_index_query = [
f"ADD INDEX `{col.fieldname}_index`(`{col.fieldname}`)"
for col in self.add_index
if not frappe.db.get_column_index(self.table_name, col.fieldname, unique=False)
]
if self.meta.sort_field == "modified" and not frappe.db.get_column_index(
self.table_name, "modified", unique=False
):
add_index_query.append("ADD INDEX `modified`(`modified`)")
drop_index_query = []
for col in {*self.drop_index, *self.drop_unique}:
if col.fieldname == "name":
continue
current_column = self.current_columns.get(col.fieldname.lower())
unique_constraint_changed = current_column.unique != col.unique
if unique_constraint_changed and not col.unique:
if unique_index := frappe.db.get_column_index(self.table_name, col.fieldname, unique=True):
drop_index_query.append(f"DROP INDEX `{unique_index.Key_name}`")
index_constraint_changed = current_column.index != col.set_index
if index_constraint_changed and not col.set_index:
if index_record := frappe.db.get_column_index(self.table_name, col.fieldname, unique=False):
drop_index_query.append(f"DROP INDEX `{index_record.Key_name}`")
for col in self.change_nullability:
if col.not_nullable:
try:
table = frappe.qb.DocType(self.doctype)
frappe.qb.update(table).set(
col.fieldname, col.default or get_not_null_defaults(col.fieldtype)
).where(table[col.fieldname].isnull()).run()
except Exception:
print(f"Failed to update data in {self.table_name} for {col.fieldname}")
raise
try:
for query_parts in [add_column_query, modify_column_query, add_index_query, drop_index_query]:
if query_parts:
query_body = ", ".join(query_parts)
query = f"ALTER TABLE `{self.table_name}` {query_body}"
# nosemgrep
frappe.db.sql_ddl(query)
except Exception as e:
if query := locals().get("query"): # this weirdness is to avoid potentially unbounded vars
print(f"Failed to alter schema using query: {query}")
if e.args[0] == DUP_ENTRY:
fieldname = str(e).split("'")[-2]
frappe.throw(
_(
"{0} field cannot be set as unique in {1}, as there are non-unique existing values"
).format(fieldname, self.table_name)
)
raise
def alter_primary_key(self) -> str | None:
# If there are no values in table allow migrating to UUID from varchar
autoname = self.meta.autoname
if autoname == "UUID" and frappe.db.get_column_type(self.doctype, "name") != "uuid":
if not frappe.db.get_value(self.doctype, {}, order_by=None):
return "modify name uuid"
else:
frappe.throw(
_("Primary key of doctype {0} can not be changed as there are existing values.").format(
self.doctype
)
)
# Reverting from UUID to VARCHAR
if autoname != "UUID" and frappe.db.get_column_type(self.doctype, "name") == "uuid":
return f"modify name varchar({frappe.db.VARCHAR_LEN})"

View file

@ -0,0 +1,63 @@
import os
from pathlib import Path
import click
import frappe
from frappe.database.db_manager import DbManager
def get_sqlite_version() -> str:
return frappe.db.sql("select sqlite_version()")[0][0]
def setup_database(force, verbose):
frappe.local.session = frappe._dict({"user": "Administrator"})
root_conn = get_root_connection()
root_conn.close()
def bootstrap_database(verbose, source_sql=None):
import sys
frappe.connect()
import_db_from_sql(source_sql, verbose)
frappe.connect()
if "tabDefaultValue" not in frappe.db.get_tables(cached=False):
from click import secho
secho(
"Table 'tabDefaultValue' missing in the restored site. "
"This happens when the backup fails to restore. Please check that the file is valid\n"
"Do go through the above output to check the exact error message from MariaDB",
fg="red",
)
sys.exit(1)
def import_db_from_sql(source_sql=None, verbose=False):
if verbose:
print("Starting database import...")
db_name = frappe.conf.db_name
if not source_sql:
source_sql = os.path.join(os.path.dirname(__file__), "framework_sqlite.sql")
DbManager(frappe.local.db).restore_database(
verbose, db_name, source_sql, frappe.conf.db_user, frappe.conf.db_password
)
if verbose:
print("Imported from database {}".format(source_sql))
def drop_database(db_name: str):
Path(db_name).unlink(missing_ok=True)
def get_root_connection():
frappe.local.flags.root_connection = frappe.database.get_db(
cur_db_name=frappe.conf.db_name,
)
return frappe.local.flags.root_connection

View file

@ -6,11 +6,11 @@ import string
from functools import cached_property, wraps from functools import cached_property, wraps
import frappe import frappe
from frappe.query_builder.builder import MariaDB, Postgres from frappe.query_builder.builder import MariaDB, Postgres, SQLite
from frappe.query_builder.functions import Function from frappe.query_builder.functions import Function
from frappe.types import DocRef from frappe.types import DocRef
Query = str | MariaDB | Postgres Query = str | MariaDB | Postgres | SQLite
QueryValues = tuple | list | dict | None QueryValues = tuple | list | dict | None
FilterValue = DocRef | str | int | bool FilterValue = DocRef | str | int | bool

View file

@ -583,16 +583,20 @@ def make_site_config(
if db_type: if db_type:
site_config["db_type"] = db_type site_config["db_type"] = db_type
if db_socket: if db_type == "sqlite":
site_config["db_socket"] = db_socket site_config["db_name"] = db_name
if db_host: else:
site_config["db_host"] = db_host if db_socket:
site_config["db_socket"] = db_socket
if db_port: if db_host:
site_config["db_port"] = db_port site_config["db_host"] = db_host
site_config["db_user"] = db_user or db_name if db_port:
site_config["db_port"] = db_port
site_config["db_user"] = db_user or db_name
with open(site_file, "w") as f: with open(site_file, "w") as f:
f.write(json.dumps(site_config, indent=1, sort_keys=True)) f.write(json.dumps(site_config, indent=1, sort_keys=True))

View file

@ -1,8 +1,8 @@
import types import types
import typing import typing
from pypika import MySQLQuery, Order, PostgreSQLQuery, terms from pypika import MySQLQuery, Order, PostgreSQLQuery, SQLLiteQuery, terms
from pypika.dialects import MySQLQueryBuilder, PostgreSQLQueryBuilder from pypika.dialects import MySQLQueryBuilder, PostgreSQLQueryBuilder, SQLLiteQueryBuilder
from pypika.queries import QueryBuilder, Schema, Table from pypika.queries import QueryBuilder, Schema, Table
from pypika.terms import Function from pypika.terms import Function
@ -97,3 +97,17 @@ class Postgres(Base, PostgreSQLQuery):
table = cls.DocType(table) table = cls.DocType(table)
return super().from_(table, *args, **kwargs) return super().from_(table, *args, **kwargs)
class SQLite(Base, SQLLiteQuery):
_BuilderClasss = SQLLiteQueryBuilder
@classmethod
def _builder(cls, *args, **kwargs) -> "SQLLiteQueryBuilder":
return super()._builder(*args, wrapper_cls=ParameterizedValueWrapper, **kwargs)
@classmethod
def from_(cls, table, *args, **kwargs):
if isinstance(table, str):
table = cls.DocType(table)
return super().from_(table, *args, **kwargs)

View file

@ -10,7 +10,7 @@ from pypika.terms import PseudoColumn
import frappe import frappe
from frappe.query_builder.terms import NamedParameterWrapper from frappe.query_builder.terms import NamedParameterWrapper
from .builder import Base, MariaDB, Postgres from .builder import Base, MariaDB, Postgres, SQLite
class PseudoColumnMapper(PseudoColumn): class PseudoColumnMapper(PseudoColumn):
@ -26,6 +26,7 @@ class PseudoColumnMapper(PseudoColumn):
class db_type_is(Enum): class db_type_is(Enum):
MARIADB = "mariadb" MARIADB = "mariadb"
POSTGRES = "postgres" POSTGRES = "postgres"
SQLITE = "sqlite"
class ImportMapper: class ImportMapper:
@ -42,14 +43,14 @@ class BuilderIdentificationFailed(Exception):
super().__init__("Couldn't guess builder") super().__init__("Couldn't guess builder")
def get_query_builder(type_of_db: str) -> Postgres | MariaDB: def get_query_builder(type_of_db: str) -> Postgres | MariaDB | SQLite:
"""Return the query builder object. """Return the query builder object.
Args: Args:
type_of_db: string value of the db used type_of_db: string value of the db used
""" """
db = db_type_is(type_of_db) db = db_type_is(type_of_db)
picks = {db_type_is.MARIADB: MariaDB, db_type_is.POSTGRES: Postgres} picks = {db_type_is.MARIADB: MariaDB, db_type_is.POSTGRES: Postgres, db_type_is.SQLITE: SQLite}
return picks[db] return picks[db]