From fabcd4e0bf858369484adc184bf6184f6c67a61d Mon Sep 17 00:00:00 2001 From: Matias Palma Date: Mon, 20 Apr 2026 22:31:49 -0400 Subject: [PATCH] improvements: cap read_file at 10 MiB to avoid multi-hundred-MB string round-trips MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit read_file returned the entire file body as a String with no size gate. Opening a 200 MB log pipes ~500+ MB through: disk read → owned String allocation → serde_json encode → IPC → JS string decode → Monaco buffer, which is enough to OOM the renderer on low-memory machines and causes a visible freeze on anything bigger. Fix: stat the file first and reject up front when the length exceeds 10 MiB (READ_FILE_MAX_BYTES). 10 MiB is well above any source file the Monaco editor, AI chat context window, or git-explorer diff viewer actually handle well, and every in-tree caller reads small text (package.json, skill/index MD, workspace files opened from the tree), so legitimate usage is unchanged. Consumers that genuinely need large files (future streaming viewer, log tailer) are expected to use a separate chunked Tauri command rather than lifting this cap. The error message includes the actual file size and the limit so the frontend can surface an actionable message to the user. --- apps/desktop/src-tauri/src/lib.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/apps/desktop/src-tauri/src/lib.rs b/apps/desktop/src-tauri/src/lib.rs index fc2e4e5b..f84992fe 100644 --- a/apps/desktop/src-tauri/src/lib.rs +++ b/apps/desktop/src-tauri/src/lib.rs @@ -164,8 +164,33 @@ fn read_directory(path: String) -> Result, String> { Ok(entries) } +/// Upper bound for `read_file`. The Monaco editor, the AI-chat context window +/// and the IPC serialization path all struggle with multi-hundred-MB strings, +/// so anything above 10 MiB is rejected with a clear error instead of being +/// buffered into a `String` and marshaled across the Tauri bridge. Consumers +/// that genuinely need large files (future streaming viewer, log tailer) are +/// expected to use a separate chunked command rather than lifting this cap. +const READ_FILE_MAX_BYTES: u64 = 10 * 1024 * 1024; + #[tauri::command] fn read_file(path: String) -> Result { + let metadata = fs::metadata(&path).map_err(|e| { + let err = format!("Failed to stat file {}: {}", path, e); + error!("{}", err); + err + })?; + + if metadata.len() > READ_FILE_MAX_BYTES { + let err = format!( + "File {} is {} bytes, which exceeds the {}-byte read_file limit; use a streaming reader for files this large", + path, + metadata.len(), + READ_FILE_MAX_BYTES + ); + error!("{}", err); + return Err(err); + } + fs::read_to_string(&path).map_err(|e| { let err = format!("Failed to read file {}: {}", path, e); error!("{}", err);