Merge pull request #37888 from safwansamsudeen/chunking-support
feat: add chunking support
This commit is contained in:
commit
cbef975127
6 changed files with 132 additions and 97 deletions
|
|
@ -151,9 +151,10 @@ def get_letter_heads():
|
|||
|
||||
|
||||
def load_conf_settings(bootinfo):
|
||||
from frappe.core.api.file import get_max_file_size
|
||||
from frappe.core.api.file import get_file_chunk_size, get_max_file_size
|
||||
|
||||
bootinfo.max_file_size = get_max_file_size()
|
||||
bootinfo.file_chunk_size = get_file_chunk_size()
|
||||
for key in ("developer_mode", "socketio_port", "file_watcher_port"):
|
||||
if key in frappe.conf:
|
||||
bootinfo[key] = frappe.conf.get(key)
|
||||
|
|
|
|||
|
|
@ -90,6 +90,10 @@ def get_max_file_size() -> int:
|
|||
)
|
||||
|
||||
|
||||
def get_file_chunk_size() -> int:
|
||||
return cint(frappe.conf.get("file_chunk_size")) or 25 * 1024 * 1024
|
||||
|
||||
|
||||
@frappe.whitelist()
|
||||
def create_new_folder(file_name: str, folder: str) -> File:
|
||||
"""create new folder under current parent folder"""
|
||||
|
|
|
|||
|
|
@ -751,7 +751,7 @@ class File(Document):
|
|||
return self.save_file_on_filesystem()
|
||||
|
||||
def save_file_on_filesystem(self):
|
||||
safe_file_name = re.sub(r"[/\\%?#]", "_", self.file_name)
|
||||
safe_file_name = get_safe_file_name(self.file_name)
|
||||
if self.is_private:
|
||||
self.file_url = f"/private/files/{safe_file_name}"
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -476,3 +476,7 @@ def find_file_by_url(path: str, name: str | None = None) -> "File" | None:
|
|||
file: File = frappe.get_doc(doctype="File", **file_data)
|
||||
if file.is_downloadable():
|
||||
return file
|
||||
|
||||
|
||||
def get_safe_file_name(file_name: str) -> str:
|
||||
return re.sub(r"[/\\%?#]", "_", file_name)
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
import os
|
||||
from mimetypes import guess_type
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from werkzeug.wrappers import Response
|
||||
|
|
@ -11,11 +12,11 @@ import frappe
|
|||
import frappe.sessions
|
||||
import frappe.utils
|
||||
from frappe import _, is_whitelisted, ping
|
||||
from frappe.core.doctype.file.utils import find_file_by_url
|
||||
from frappe.core.doctype.file.utils import find_file_by_url, get_safe_file_name
|
||||
from frappe.core.doctype.server_script.server_script_utils import get_server_script_map
|
||||
from frappe.monitor import add_data_to_monitor
|
||||
from frappe.permissions import check_doctype_permission
|
||||
from frappe.utils import cint
|
||||
from frappe.utils import cint, get_files_path
|
||||
from frappe.utils.csvutils import build_csv_response
|
||||
from frappe.utils.deprecations import deprecated
|
||||
from frappe.utils.image import optimize_image
|
||||
|
|
@ -162,9 +163,27 @@ def upload_file():
|
|||
|
||||
if "file" in files:
|
||||
file = files["file"]
|
||||
content = file.stream.read()
|
||||
filename = file.filename
|
||||
|
||||
total_file_size = frappe.form_dict.total_file_size
|
||||
if frappe.form_dict.chunk_index:
|
||||
current_chunk = int(frappe.form_dict.chunk_index)
|
||||
total_chunks = int(frappe.form_dict.total_chunk_count)
|
||||
offset = int(frappe.form_dict.chunk_byte_offset)
|
||||
else:
|
||||
offset = 0
|
||||
current_chunk = 0
|
||||
total_chunks = 1
|
||||
|
||||
temp_path = Path(get_files_path(".temp-" + get_safe_file_name(filename), is_private=is_private))
|
||||
with temp_path.open("ab") as f:
|
||||
f.seek(offset)
|
||||
f.write(file.stream.read())
|
||||
if not f.tell() >= int(total_file_size) or current_chunk != total_chunks - 1:
|
||||
return
|
||||
|
||||
content = temp_path.read_bytes()
|
||||
temp_path.unlink()
|
||||
content_type = guess_type(filename)[0]
|
||||
if optimize and content_type and content_type.startswith("image/"):
|
||||
args = {"content": content, "content_type": content_type}
|
||||
|
|
|
|||
|
|
@ -568,57 +568,66 @@ function return_as_dataurl() {
|
|||
close_dialog.value = true;
|
||||
return Promise.all(promises);
|
||||
}
|
||||
function upload_file(file, i) {
|
||||
async function upload_file(file, i) {
|
||||
currently_uploading.value = i;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let xhr = new XMLHttpRequest();
|
||||
xhr.upload.addEventListener("loadstart", (e) => {
|
||||
file.uploading = true;
|
||||
});
|
||||
xhr.upload.addEventListener("progress", (e) => {
|
||||
if (e.lengthComputable) {
|
||||
file.progress = e.loaded;
|
||||
file.total = e.total;
|
||||
}
|
||||
});
|
||||
xhr.upload.addEventListener("load", (e) => {
|
||||
file.uploading = false;
|
||||
});
|
||||
xhr.addEventListener("error", (e) => {
|
||||
file.failed = true;
|
||||
reject();
|
||||
});
|
||||
xhr.onreadystatechange = () => {
|
||||
if (xhr.readyState == XMLHttpRequest.DONE) {
|
||||
const CHUNK_SIZE = frappe.boot.file_chunk_size;
|
||||
|
||||
const use_chunks = file.file_obj && file.file_obj.size > CHUNK_SIZE;
|
||||
const total_chunks = use_chunks ? Math.ceil(file.file_obj.size / CHUNK_SIZE) : 1;
|
||||
|
||||
const send_chunk = (chunk_blob, chunk_index, chunk_byte_offset) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let xhr = new XMLHttpRequest();
|
||||
|
||||
xhr.upload.addEventListener("loadstart", () => {
|
||||
file.uploading = true;
|
||||
});
|
||||
xhr.upload.addEventListener("progress", (e) => {
|
||||
if (e.lengthComputable) {
|
||||
file.progress = chunk_byte_offset + e.loaded;
|
||||
file.total = file.file_obj?.size || e.total;
|
||||
}
|
||||
});
|
||||
xhr.upload.addEventListener("load", () => {
|
||||
if (chunk_index === total_chunks - 1) {
|
||||
file.uploading = false;
|
||||
}
|
||||
});
|
||||
xhr.addEventListener("error", () => {
|
||||
file.failed = true;
|
||||
reject();
|
||||
});
|
||||
xhr.onreadystatechange = () => {
|
||||
if (xhr.readyState !== XMLHttpRequest.DONE) return;
|
||||
|
||||
if (xhr.status === 200) {
|
||||
resolve();
|
||||
file.request_succeeded = true;
|
||||
let r = null;
|
||||
let file_doc = null;
|
||||
try {
|
||||
r = JSON.parse(xhr.responseText);
|
||||
if (r.message.doctype === "File") {
|
||||
file_doc = r.message;
|
||||
// Only the last chunk returns a meaningful response
|
||||
if (chunk_index === total_chunks - 1) {
|
||||
file.request_succeeded = true;
|
||||
let r = null;
|
||||
let file_doc = null;
|
||||
try {
|
||||
r = JSON.parse(xhr.responseText);
|
||||
if (r.message?.doctype === "File") {
|
||||
file_doc = r.message;
|
||||
}
|
||||
} catch (e) {
|
||||
r = xhr.responseText;
|
||||
}
|
||||
} catch (e) {
|
||||
r = xhr.responseText;
|
||||
}
|
||||
|
||||
file.doc = file_doc;
|
||||
|
||||
if (props.on_success) {
|
||||
props.on_success(file_doc, r);
|
||||
}
|
||||
|
||||
if (
|
||||
i == files.value.length - 1 &&
|
||||
files.value.every((file) => file.request_succeeded)
|
||||
) {
|
||||
close_dialog.value = true;
|
||||
}
|
||||
if (show_web_link.value && file.file_url) {
|
||||
close_dialog.value = true;
|
||||
file.doc = file_doc;
|
||||
if (props.on_success) {
|
||||
props.on_success(file_doc, r);
|
||||
}
|
||||
if (
|
||||
(i == files.value.length - 1 &&
|
||||
files.value.every((f) => f.request_succeeded)) ||
|
||||
(show_web_link.value && file.file_url)
|
||||
) {
|
||||
close_dialog.value = true;
|
||||
}
|
||||
}
|
||||
} else if (xhr.status === 403) {
|
||||
reject();
|
||||
|
|
@ -669,60 +678,58 @@ function upload_file(file, i) {
|
|||
}
|
||||
frappe.request.cleanup({}, error);
|
||||
}
|
||||
};
|
||||
|
||||
xhr.open("POST", "/api/method/upload_file", true);
|
||||
xhr.setRequestHeader("Accept", "application/json");
|
||||
xhr.setRequestHeader("X-Frappe-CSRF-Token", frappe.csrf_token);
|
||||
|
||||
let form_data = new FormData();
|
||||
|
||||
if (chunk_blob) {
|
||||
form_data.append("file", chunk_blob, file.name);
|
||||
}
|
||||
};
|
||||
xhr.open("POST", "/api/method/upload_file", true);
|
||||
xhr.setRequestHeader("Accept", "application/json");
|
||||
xhr.setRequestHeader("X-Frappe-CSRF-Token", frappe.csrf_token);
|
||||
|
||||
let form_data = new FormData();
|
||||
if (file.file_obj) {
|
||||
form_data.append("file", file.file_obj, file.name);
|
||||
}
|
||||
form_data.append("is_private", +file.private);
|
||||
form_data.append("folder", props.folder);
|
||||
form_data.append("is_private", +file.private);
|
||||
form_data.append("folder", props.folder);
|
||||
form_data.append("total_file_size", file.file_obj?.size ?? 0);
|
||||
|
||||
if (file.file_url) {
|
||||
form_data.append("file_url", file.file_url);
|
||||
}
|
||||
if (file.file_size) {
|
||||
form_data.append("file_size", file.file_size);
|
||||
}
|
||||
if (file.file_name) {
|
||||
form_data.append("file_name", file.file_name);
|
||||
}
|
||||
if (file.library_file_name) {
|
||||
form_data.append("library_file_name", file.library_file_name);
|
||||
}
|
||||
if (use_chunks) {
|
||||
form_data.append("chunk_index", chunk_index);
|
||||
form_data.append("total_chunk_count", total_chunks);
|
||||
form_data.append("chunk_byte_offset", chunk_byte_offset);
|
||||
}
|
||||
|
||||
if (props.doctype) {
|
||||
form_data.append("doctype", props.doctype);
|
||||
}
|
||||
if (file.file_url) form_data.append("file_url", file.file_url);
|
||||
if (file.file_size) form_data.append("file_size", file.file_size);
|
||||
if (file.file_name) form_data.append("file_name", file.file_name);
|
||||
if (file.library_file_name)
|
||||
form_data.append("library_file_name", file.library_file_name);
|
||||
if (props.doctype) form_data.append("doctype", props.doctype);
|
||||
if (props.docname) form_data.append("docname", props.docname);
|
||||
if (props.fieldname) form_data.append("fieldname", props.fieldname);
|
||||
if (props.method) form_data.append("method", props.method);
|
||||
if (file.optimize) form_data.append("optimize", true);
|
||||
if (props.attach_doc_image) {
|
||||
form_data.append("max_width", 200);
|
||||
form_data.append("max_height", 200);
|
||||
}
|
||||
|
||||
if (props.docname) {
|
||||
form_data.append("docname", props.docname);
|
||||
}
|
||||
xhr.send(form_data);
|
||||
});
|
||||
};
|
||||
|
||||
if (props.fieldname) {
|
||||
form_data.append("fieldname", props.fieldname);
|
||||
}
|
||||
|
||||
if (props.method) {
|
||||
form_data.append("method", props.method);
|
||||
}
|
||||
|
||||
if (file.optimize) {
|
||||
form_data.append("optimize", true);
|
||||
}
|
||||
|
||||
if (props.attach_doc_image) {
|
||||
form_data.append("max_width", 200);
|
||||
form_data.append("max_height", 200);
|
||||
}
|
||||
|
||||
xhr.send(form_data);
|
||||
});
|
||||
// Slice and send chunks sequentially
|
||||
let chunk_byte_offset = 0;
|
||||
for (let chunk_index = 0; chunk_index < total_chunks; chunk_index++) {
|
||||
const chunk_blob = file.file_obj
|
||||
? file.file_obj.slice(chunk_byte_offset, chunk_byte_offset + CHUNK_SIZE)
|
||||
: null;
|
||||
await send_chunk(chunk_blob, chunk_index, chunk_byte_offset);
|
||||
chunk_byte_offset += CHUNK_SIZE;
|
||||
}
|
||||
}
|
||||
|
||||
function parse_error_response(response_text) {
|
||||
let error_message = "";
|
||||
let server_messages = [];
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue