@@ -593,6 +627,10 @@
onExport={() => {
exportHandler();
}}
+ onCreateSub={() => {
+ createSubFolderParentId = folderId;
+ showCreateSubFolderModal = true;
+ }}
>
diff --git a/src/lib/components/workspace/Models/ModelEditor.svelte b/src/lib/components/workspace/Models/ModelEditor.svelte
index 1966311bef..6f2c573acf 100644
--- a/src/lib/components/workspace/Models/ModelEditor.svelte
+++ b/src/lib/components/workspace/Models/ModelEditor.svelte
@@ -230,7 +230,11 @@
}
info.params.system = system.trim() === '' ? null : system;
- info.params.stop = params.stop ? params.stop.split(',').filter((s) => s.trim()) : null;
+ info.params.stop = params.stop
+ ? (typeof params.stop === 'string' ? params.stop.split(',') : params.stop).filter((s) =>
+ s.trim()
+ )
+ : null;
Object.keys(info.params).forEach((key) => {
if (info.params[key] === '' || info.params[key] === null) {
delete info.params[key];
diff --git a/src/lib/components/workspace/Skills/SkillEditor.svelte b/src/lib/components/workspace/Skills/SkillEditor.svelte
index 62927b142b..7537ec45e7 100644
--- a/src/lib/components/workspace/Skills/SkillEditor.svelte
+++ b/src/lib/components/workspace/Skills/SkillEditor.svelte
@@ -252,15 +252,15 @@
{#if !disabled}
{/if}
diff --git a/src/lib/components/workspace/common/ValvesModal.svelte b/src/lib/components/workspace/common/ValvesModal.svelte
index 577ce2eb04..16f3af4b6b 100644
--- a/src/lib/components/workspace/common/ValvesModal.svelte
+++ b/src/lib/components/workspace/common/ValvesModal.svelte
@@ -184,7 +184,7 @@
+
{/if}
diff --git a/src/lib/i18n/locales/en-US/translation.json b/src/lib/i18n/locales/en-US/translation.json
index 2f81884b91..96ce2b4717 100644
--- a/src/lib/i18n/locales/en-US/translation.json
+++ b/src/lib/i18n/locales/en-US/translation.json
@@ -192,6 +192,7 @@
"Are you sure you want to delete this channel?": "",
"Are you sure you want to delete this message?": "",
"Are you sure you want to delete this version? Child versions will be relinked to this version's parent.": "",
+ "Are you sure you want to delete this?": "",
"Are you sure you want to unarchive all archived chats?": "",
"Arena Models": "",
"Artifacts": "",
@@ -296,6 +297,7 @@
"Charge Amount Control": "",
"Chart new frontiers": "",
"Chat": "",
+ "Chat archived.": "",
"Chat Background Image": "",
"Chat Bubble UI": "",
"Chat Completions": "",
@@ -521,6 +523,7 @@
"Delete": "",
"Delete {{count}} Logs Successfully_one": "",
"Delete {{count}} Logs Successfully_other": "",
+ "Delete {{name}}": "",
"Delete a model": "",
"Delete All": "",
"Delete All Chats": "",
@@ -615,6 +618,7 @@
"Downloading stats...": "",
"Draw": "",
"Drop any files here to upload": "",
+ "Drop files here": "",
"Drop files here to upload": "",
"DuckDuckGo": "",
"e.g. '30s','10m'. Valid time units are 's', 'm', 'h'.": "",
@@ -878,6 +882,7 @@
"Fade Effect for Streaming Text": "",
"Failed to add file.": "",
"Failed to add members": "",
+ "Failed to archive chat.": "",
"Failed to attach file": "",
"Failed to clear status": "",
"Failed to connect to {{URL}} OpenAPI tool server": "",
@@ -892,9 +897,11 @@
"Failed to generate title": "",
"Failed to import models": "",
"Failed to load chat preview": "",
+ "Failed to load DOCX file. Please try downloading it instead.": "",
"Failed to load Excel/CSV file. Please try downloading it instead.": "",
"Failed to load file content.": "",
"Failed to load Interface settings": "",
+ "Failed to load PPTX file. Please try downloading it instead.": "",
"Failed to move chat": "",
"Failed to process URL: {{url}}": "",
"Failed to read clipboard contents": "",
@@ -950,6 +957,7 @@
"Focus Chat Input": "",
"Folder": "",
"Folder Background Image": "",
+ "Folder created successfully": "",
"Folder deleted successfully": "",
"Folder Max File Count": "",
"Folder name": "",
@@ -1375,11 +1383,13 @@
"No file selected": "",
"No files found": "",
"No files in this knowledge base.": "",
+ "No files yet. Upload files or run Python code to create them.": "",
"No functions found": "",
"No groups found": "",
"No history available": "",
"No HTML, CSS, or JavaScript content found.": "",
"No inference engine with management support found": "",
+ "No kernel": "",
"No knowledge bases found.": "",
"No knowledge found": "",
"No Log": "",
@@ -1397,6 +1407,7 @@
"No results": "",
"No results found": "",
"No search query generated": "",
+ "No servers detected": "",
"No skills found": "",
"No source available": "",
"No sources found": "",
@@ -1563,6 +1574,7 @@
"Please use a private key in PKCS#1 format. You can convert it using a format-conversion tool.": "",
"Please wait until all files are uploaded.": "",
"Port": "",
+ "Ports": "",
"Positive attitude": "",
"Prefer not to say": "",
"Prefix ID": "",
@@ -1600,6 +1612,7 @@
"Pull \"{{searchValue}}\" from Ollama.com": "",
"Pull a model from Ollama.com": "",
"Pull Model": "",
+ "Pyodide file browser": "",
"QRCode": "",
"Query Generation Prompt": "",
"Querying": "",
@@ -1681,6 +1694,7 @@
"Response splitting": "",
"Response Watermark": "",
"Responses": "",
+ "Restart": "",
"Result": "",
"RESULT": "",
"Retrieval": "",
@@ -1693,6 +1707,7 @@
"Role": "",
"RTL": "",
"Run": "",
+ "Run All": "",
"Running": "",
"Running...": "",
"Runs embedding tasks concurrently to speed up processing. Turn off if rate limits become an issue.": "",
@@ -1891,6 +1906,7 @@
"Start a new conversation": "",
"Start of the channel": "",
"Start Tag": "",
+ "Starting kernel...": "",
"Status": "",
"Status cleared successfully": "",
"Status updated successfully": "",
@@ -2260,6 +2276,8 @@
"You're now logged in.": "",
"Your Account": "",
"Your account status is currently pending activation.": "",
+ "Your browser does not support the audio tag.": "",
+ "Your browser does not support the video tag.": "",
"Your entire contribution will go directly to the plugin developer; Open WebUI does not take any percentage. However, the chosen funding platform might have its own fees.": "",
"Your message text or inputs": "",
"Your usage stats have been successfully synced.": "",
diff --git a/src/lib/i18n/locales/zh-CN/translation.json b/src/lib/i18n/locales/zh-CN/translation.json
index 90c0fc8baa..901097411c 100644
--- a/src/lib/i18n/locales/zh-CN/translation.json
+++ b/src/lib/i18n/locales/zh-CN/translation.json
@@ -192,6 +192,7 @@
"Are you sure you want to delete this channel?": "您确认要删除此频道吗?",
"Are you sure you want to delete this message?": "您确认要删除此消息吗?",
"Are you sure you want to delete this version? Child versions will be relinked to this version's parent.": "您确认要删除此版本吗?其子版本将重新链接到该版本的上一级。",
+ "Are you sure you want to delete this?": "",
"Are you sure you want to unarchive all archived chats?": "您确认要取消所有已归档的对话吗?",
"Arena Models": "启用竞技场匿名评价模型",
"Artifacts": "产物",
@@ -296,6 +297,7 @@
"Charge Amount Control": "充值金额控制",
"Chart new frontiers": "开辟前沿",
"Chat": "对话",
+ "Chat archived.": "对话已归档。",
"Chat Background Image": "对话背景图片",
"Chat Bubble UI": "以聊天气泡的形式显示对话内容",
"Chat Completions": "Chat Completions",
@@ -520,6 +522,7 @@
"Defaults": "默认值",
"Delete": "删除",
"Delete {{count}} Logs Successfully_other": "成功删除 {{count}} 条日志",
+ "Delete {{name}}": "",
"Delete a model": "删除模型",
"Delete All": "全部删除",
"Delete All Chats": "删除所有对话记录",
@@ -614,6 +617,7 @@
"Downloading stats...": "正在下载统计数据...",
"Draw": "平局",
"Drop any files here to upload": "拖拽文件至此上传",
+ "Drop files here": "",
"Drop files here to upload": "将文件拖到此处即可上传",
"DuckDuckGo": "DuckDuckGo",
"e.g. '30s','10m'. Valid time units are 's', 'm', 'h'.": "例如:“30s”,“10m”。有效的时间单位包括:“s”(秒), “m”(分), “h”(时)",
@@ -877,6 +881,7 @@
"Fade Effect for Streaming Text": "流式输出内容时启用动态渐显效果",
"Failed to add file.": "添加文件失败",
"Failed to add members": "添加成员失败",
+ "Failed to archive chat.": "对话归档失败。",
"Failed to attach file": "文件上传失败",
"Failed to clear status": "清除状态失败",
"Failed to connect to {{URL}} OpenAPI tool server": "连接到 {{URL}} OpenAPI 工具服务器失败",
@@ -891,9 +896,11 @@
"Failed to generate title": "生成标题失败",
"Failed to import models": "导入模型配置失败",
"Failed to load chat preview": "对话预览加载失败",
+ "Failed to load DOCX file. Please try downloading it instead.": "无法加载 DOCX 文件,请尝试下载后查看。",
"Failed to load Excel/CSV file. Please try downloading it instead.": "加载 Excel/CSV 文件失败,请尝试直接下载文件。",
"Failed to load file content.": "文件内容加载失败",
"Failed to load Interface settings": "“界面设置”数据加载失败",
+ "Failed to load PPTX file. Please try downloading it instead.": "无法加载 PPTX 文件,请尝试下载后查看。",
"Failed to move chat": "移动对话失败",
"Failed to process URL: {{url}}": "处理链接失败: {{url}}",
"Failed to read clipboard contents": "读取剪贴板内容失败",
@@ -949,6 +956,7 @@
"Focus Chat Input": "聚焦对话框",
"Folder": "分组",
"Folder Background Image": "分组背景图",
+ "Folder created successfully": "",
"Folder deleted successfully": "分组删除成功",
"Folder Max File Count": "分组最大文件数量",
"Folder name": "文件夹名称",
@@ -1329,7 +1337,7 @@
"More options": "更多选项",
"More Options": "更多选项",
"Move": "移动",
- "Moved {{name}}": "",
+ "Moved {{name}}": "移动“{{name}}”成功",
"My Terminal": "我的终端",
"Name": "名称",
"Name and ID are required, please fill them out": "名称和 ID 是必填项,请填写。",
@@ -1374,11 +1382,13 @@
"No file selected": "未选中文件",
"No files found": "未找到文件",
"No files in this knowledge base.": "此知识库中没有文件。",
+ "No files yet. Upload files or run Python code to create them.": "",
"No functions found": "未找到函数",
"No groups found": "暂无权限组",
"No history available": "暂无历史记录",
"No HTML, CSS, or JavaScript content found.": "未找到 HTML、CSS 或 JavaScript 内容。",
"No inference engine with management support found": "未找到支持管理的推理引擎",
+ "No kernel": "未找到内核",
"No knowledge bases found.": "未找到知识库",
"No knowledge found": "未找到知识",
"No Log": "暂无日志",
@@ -1396,6 +1406,7 @@
"No results": "未找到结果",
"No results found": "未找到结果",
"No search query generated": "未生成搜索查询",
+ "No servers detected": "未检测到任何服务器",
"No skills found": "没有找到技能",
"No source available": "没有可用引用来源",
"No sources found": "未找到任何引用来源",
@@ -1562,6 +1573,7 @@
"Please use a private key in PKCS#1 format. You can convert it using a format-conversion tool.": "请使用 PKCS1 格式的密钥,新生成的密钥需要通过工具转换",
"Please wait until all files are uploaded.": "请等待所有文件上传完毕。",
"Port": "端口",
+ "Ports": "端口",
"Positive attitude": "态度积极",
"Prefer not to say": "暂不透露",
"Prefix ID": "模型 ID 前缀",
@@ -1599,6 +1611,7 @@
"Pull \"{{searchValue}}\" from Ollama.com": "从 Ollama.com 下载 “{{searchValue}}”",
"Pull a model from Ollama.com": "从 Ollama.com 下载模型",
"Pull Model": "下载模型",
+ "Pyodide file browser": "",
"QRCode": "二维码",
"Query Generation Prompt": "查询生成提示词",
"Querying": "查询中",
@@ -1680,6 +1693,7 @@
"Response splitting": "拆分回答",
"Response Watermark": "复制时添加水印",
"Responses": "Responses",
+ "Restart": "重启",
"Result": "结果",
"RESULT": "结果",
"Retrieval": "检索",
@@ -1691,6 +1705,7 @@
"Role": "角色",
"RTL": "从右至左",
"Run": "运行",
+ "Run All": "运行全部",
"Running": "运行中",
"Running...": "运行中...",
"Runs embedding tasks concurrently to speed up processing. Turn off if rate limits become an issue.": "并行运行嵌入任务以加快处理速度。如果遇到限速问题,请关闭此选项。",
@@ -1889,6 +1904,7 @@
"Start a new conversation": "开始新对话",
"Start of the channel": "频道起点",
"Start Tag": "起始标签",
+ "Starting kernel...": "正在启动内核...",
"Status": "状态",
"Status cleared successfully": "状态已清除",
"Status updated successfully": "状态已更新",
@@ -2258,6 +2274,8 @@
"You're now logged in.": "已登录。",
"Your Account": "您的账号",
"Your account status is currently pending activation.": "您的账号当前状态为待激活",
+ "Your browser does not support the audio tag.": "您的浏览器不支持播放音频。",
+ "Your browser does not support the video tag.": "您的浏览器不支持播放视频。",
"Your entire contribution will go directly to the plugin developer; Open WebUI does not take any percentage. However, the chosen funding platform might have its own fees.": "您的全部捐款将直接给到插件开发者,Open WebUI 不会收取任何分成。但众筹平台可能会有服务费。",
"Your message text or inputs": "您的消息文本或输入",
"Your usage stats have been successfully synced.": "已成功同步您的使用统计数据。",
diff --git a/src/lib/stores/index.ts b/src/lib/stores/index.ts
index cab868d1b9..8365d3823c 100644
--- a/src/lib/stores/index.ts
+++ b/src/lib/stores/index.ts
@@ -72,6 +72,9 @@ export const functions = writable(null);
export const toolServers = writable([]);
export const terminalServers = writable([]);
+// Persistent Pyodide worker for code interpreter FS
+export const pyodideWorker: Writable
= writable(null);
+
export const banners: Writable = writable([]);
export const settings: Writable = writable({});
diff --git a/src/lib/utils/codeHighlight.ts b/src/lib/utils/codeHighlight.ts
new file mode 100644
index 0000000000..23861bc77b
--- /dev/null
+++ b/src/lib/utils/codeHighlight.ts
@@ -0,0 +1,188 @@
+/**
+ * Map file extensions to Shiki language identifiers.
+ * Only extensions whose Shiki lang id differs from the extension itself need explicit entries.
+ */
+const EXT_OVERRIDE: Record = {
+ py: 'python',
+ js: 'javascript',
+ ts: 'typescript',
+ jsx: 'jsx',
+ tsx: 'tsx',
+ rb: 'ruby',
+ rs: 'rust',
+ kt: 'kotlin',
+ cs: 'csharp',
+ fs: 'fsharp',
+ sh: 'bash',
+ bash: 'bash',
+ zsh: 'bash',
+ yml: 'yaml',
+ md: 'markdown',
+ mdx: 'mdx',
+ dockerfile: 'dockerfile',
+ tf: 'terraform',
+ hcl: 'hcl',
+ ex: 'elixir',
+ exs: 'elixir',
+ erl: 'erlang',
+ hs: 'haskell',
+ ml: 'ocaml',
+ mli: 'ocaml',
+ pl: 'perl',
+ pm: 'perl',
+ r: 'r',
+ m: 'objective-c',
+ mm: 'objective-cpp',
+ h: 'c',
+ hpp: 'cpp',
+ cc: 'cpp',
+ cxx: 'cpp',
+ proto: 'proto',
+ nim: 'nim',
+ zig: 'zig',
+ v: 'v',
+ svelte: 'svelte',
+ vue: 'vue',
+ astro: 'astro',
+ prisma: 'prisma',
+ graphql: 'graphql',
+ gql: 'graphql',
+ jsonc: 'jsonc',
+ jsonl: 'jsonl'
+};
+
+// Common extensions that exactly match their Shiki language ID.
+// This replaces the runtime `bundledLanguages` import from shiki, which
+// pulled ~5-10MB of JavaScript into the initial page load just so
+// isCodeFile() could check extension support.
+const KNOWN_LANG_IDS = new Set([
+ 'ada',
+ 'awk',
+ 'bat',
+ 'c',
+ 'cmake',
+ 'clojure',
+ 'cpp',
+ 'crystal',
+ 'css',
+ 'd',
+ 'dart',
+ 'diff',
+ 'elixir',
+ 'elm',
+ 'erlang',
+ 'fish',
+ 'gleam',
+ 'glsl',
+ 'go',
+ 'groovy',
+ 'haml',
+ 'haskell',
+ 'hlsl',
+ 'html',
+ 'ini',
+ 'java',
+ 'javascript',
+ 'json',
+ 'json5',
+ 'jsonc',
+ 'jsx',
+ 'julia',
+ 'kotlin',
+ 'latex',
+ 'less',
+ 'lisp',
+ 'log',
+ 'lua',
+ 'make',
+ 'markdown',
+ 'matlab',
+ 'mdx',
+ 'mojo',
+ 'nim',
+ 'nix',
+ 'nushell',
+ 'ocaml',
+ 'pascal',
+ 'perl',
+ 'php',
+ 'postcss',
+ 'powershell',
+ 'prisma',
+ 'prolog',
+ 'proto',
+ 'pug',
+ 'python',
+ 'r',
+ 'ruby',
+ 'rust',
+ 'sass',
+ 'scala',
+ 'scheme',
+ 'scss',
+ 'solidity',
+ 'sql',
+ 'svelte',
+ 'swift',
+ 'tcl',
+ 'terraform',
+ 'tex',
+ 'toml',
+ 'tsx',
+ 'typescript',
+ 'typst',
+ 'v',
+ 'vb',
+ 'verilog',
+ 'vhdl',
+ 'vue',
+ 'wasm',
+ 'wgsl',
+ 'xml',
+ 'yaml',
+ 'zig'
+]);
+
+/**
+ * Resolve a file extension to a Shiki language id, or null if not supported.
+ */
+export function extToLang(ext: string): string | null {
+ const lower = ext.toLowerCase();
+ // explicit override first
+ if (EXT_OVERRIDE[lower]) return EXT_OVERRIDE[lower];
+ // if the extension itself is a known language id (e.g. 'go', 'rust', 'sql', 'toml', ...)
+ if (KNOWN_LANG_IDS.has(lower)) return lower;
+ return null;
+}
+
+/**
+ * Returns true if the given file path has a code-file extension that Shiki can highlight.
+ */
+export function isCodeFile(path: string | null): boolean {
+ if (!path) return false;
+ const ext = path.split('.').pop()?.toLowerCase() ?? '';
+ return extToLang(ext) !== null;
+}
+
+/**
+ * Highlight code using Shiki with dual light/dark themes via CSS variables.
+ * Returns an HTML string. Throws on failure.
+ *
+ * Shiki is loaded on demand (dynamic import) to avoid pulling ~5-10MB of
+ * JavaScript into the initial page bundle. Since this function is already
+ * async, callers are completely unaffected by the change.
+ */
+export async function highlightCode(code: string, filePath: string): Promise {
+ const ext = filePath.split('.').pop()?.toLowerCase() ?? '';
+ const lang = extToLang(ext) ?? 'text';
+
+ const { codeToHtml } = await import('shiki');
+ return await codeToHtml(code, {
+ lang,
+ themes: {
+ light: 'github-light',
+ dark: 'github-dark'
+ },
+ defaultColor: 'light'
+ });
+}
diff --git a/src/lib/utils/excelToTable.ts b/src/lib/utils/excelToTable.ts
new file mode 100644
index 0000000000..3f7f8004a2
--- /dev/null
+++ b/src/lib/utils/excelToTable.ts
@@ -0,0 +1,88 @@
+/**
+ * Shared Excel → HTML table renderer.
+ *
+ * Converts a worksheet to a styled HTML table with:
+ * - Column letter headers (A, B, C…)
+ * - Row numbers
+ * - Proper empty cell handling
+ * - Sanitized output
+ */
+
+import type { WorkSheet } from 'xlsx';
+
+/** Convert column index (0-based) to Excel-style letter (A, B, …, Z, AA, AB, …) */
+const colLetter = (i: number): string => {
+ let s = '';
+ let n = i;
+ while (n >= 0) {
+ s = String.fromCharCode(65 + (n % 26)) + s;
+ n = Math.floor(n / 26) - 1;
+ }
+ return s;
+};
+
+/** Escape HTML entities */
+const esc = (v: unknown): string => {
+ if (v === null || v === undefined || v === '') return ' ';
+ return String(v).replace(/&/g, '&').replace(//g, '>');
+};
+
+export interface ExcelTableResult {
+ html: string;
+ rowCount: number;
+ colCount: number;
+}
+
+/**
+ * Render a worksheet as an HTML table string.
+ * Uses sheet_to_json with header:1 for a raw 2D array.
+ */
+export async function excelToTable(worksheet: WorkSheet): Promise {
+ const XLSX = await import('xlsx');
+ const rows: unknown[][] = XLSX.utils.sheet_to_json(worksheet, { header: 1, defval: '' });
+
+ if (rows.length === 0) {
+ return {
+ html: '',
+ rowCount: 0,
+ colCount: 0
+ };
+ }
+
+ // Determine column count from the widest row
+ const colCount = rows.reduce((max, row) => Math.max(max, row.length), 0);
+ const rowCount = rows.length;
+
+ const parts: string[] = [];
+ parts.push('');
+
+ // Column letter header row
+ parts.push('');
+ parts.push(' | '); // corner cell
+ for (let c = 0; c < colCount; c++) {
+ parts.push(`${colLetter(c)} | `);
+ }
+ parts.push('
');
+
+ // Data rows
+ parts.push('');
+ for (let r = 0; r < rowCount; r++) {
+ const row = rows[r];
+ parts.push('');
+ parts.push(`| ${r + 1} | `);
+ for (let c = 0; c < colCount; c++) {
+ const val = c < row.length ? row[c] : '';
+ const isNum = typeof val === 'number';
+ parts.push(`${esc(val)} | `);
+ }
+ parts.push('
');
+ }
+ parts.push('
');
+
+ const DOMPurify = (await import('dompurify')).default;
+ return {
+ html: DOMPurify.sanitize(parts.join('')),
+ rowCount,
+ colCount
+ };
+}
diff --git a/src/lib/utils/index.ts b/src/lib/utils/index.ts
index 3c42370063..61ecb75c8b 100644
--- a/src/lib/utils/index.ts
+++ b/src/lib/utils/index.ts
@@ -275,11 +275,50 @@ export const canvasPixelTest = () => {
return true;
};
+let resizeImageWarmupDone = false;
+/**
+ * Draws an image to a canvas at the given dimensions and returns a data URL.
+ * On mobile, the first export uses toBlob (avoids black image on Android); later exports use toDataURL.
+ */
+async function resizeImageToDataURL(
+ img: HTMLImageElement,
+ width: number,
+ height: number,
+ mimeType = 'image/jpeg'
+): Promise {
+ const canvas = document.createElement('canvas');
+ canvas.width = width;
+ canvas.height = height;
+ canvas.getContext('2d')?.drawImage(img, 0, 0, width, height);
+
+ const toDataURL = () => canvas.toDataURL(mimeType);
+
+ if (
+ !resizeImageWarmupDone &&
+ canvas.toBlob &&
+ /android|iphone|ipad|ipod/i.test(navigator?.userAgent)
+ ) {
+ resizeImageWarmupDone = true;
+ return new Promise((resolve) => {
+ canvas.toBlob((blob) => {
+ if (!blob) {
+ resolve(toDataURL());
+ return;
+ }
+ const reader = new FileReader();
+ reader.onload = () => resolve(String(reader.result));
+ reader.onerror = () => resolve(toDataURL());
+ reader.readAsDataURL(blob);
+ }, mimeType);
+ });
+ }
+ return Promise.resolve(toDataURL());
+}
+
export const compressImage = async (imageUrl, maxWidth, maxHeight) => {
return new Promise((resolve, reject) => {
const img = new Image();
- img.onload = () => {
- const canvas = document.createElement('canvas');
+ img.onload = async () => {
let width = img.width;
let height = img.height;
@@ -322,16 +361,8 @@ export const compressImage = async (imageUrl, maxWidth, maxHeight) => {
height = maxHeight;
}
- canvas.width = width;
- canvas.height = height;
-
- const context = canvas.getContext('2d');
- context.drawImage(img, 0, 0, width, height);
-
- // Get compressed image URL
- const mimeType = imageUrl.match(/^data:([^;]+);/)?.[1];
- const compressedUrl = canvas.toDataURL(mimeType);
- resolve(compressedUrl);
+ const mimeType = imageUrl.match(/^data:([^;]+);/)?.[1] ?? 'image/jpeg';
+ resolve(await resizeImageToDataURL(img, width, height, mimeType));
};
img.onerror = (error) => reject(error);
img.src = imageUrl;
@@ -956,6 +987,16 @@ export const extractSentencesForAudio = (text: string) => {
};
export const getMessageContentParts = (content: string, splitOn: string = 'punctuation') => {
+ // Strip blocks directly on the full string before any
+ // code-block-aware processing. removeAllDetails (which callers use)
+ // applies the regex via replaceOutsideCode, which splits on triple-
+ // backtick code fences first. If a block contains code
+ // fences (e.g. reasoning with code examples), the opening and
+ // closing tags land in separate segments and the regex fails,
+ // leaking thinking content into TTS. Applying the strip here on
+ // the full string catches those cases. (Fixes #22197)
+ content = content.replace(/]*>[\s\S]*?<\/details>/gi, '');
+
const messageContentParts: string[] = [];
switch (splitOn) {
@@ -1171,19 +1212,19 @@ export const getWeekday = () => {
};
export const createMessagesList = (history, messageId) => {
- if (messageId === null) {
- return [];
- }
+ const list = [];
+ let currentId = messageId;
- const message = history.messages[messageId];
- if (message === undefined) {
- return [];
- }
- if (message?.parentId) {
- return [...createMessagesList(history, message.parentId), message];
- } else {
- return [message];
+ while (currentId !== null && currentId !== undefined) {
+ const message = history.messages[currentId];
+ if (message === undefined) {
+ break;
+ }
+ list.push(message);
+ currentId = message.parentId;
}
+
+ return list.reverse();
};
export const formatFileSize = (size) => {
@@ -1638,6 +1679,10 @@ export const renderVegaVisualization = async (spec: string, i18n?: any) => {
};
export const getCodeBlockContents = (content: string): object => {
+ // Strip thinking/reasoning and other detail blocks before extracting code
+ // to prevent code inside from being treated as artifacts
+ content = removeAllDetails(content);
+
const codeBlockContents = content.match(/```[\s\S]*?```/g);
let codeBlocks = [];
diff --git a/src/lib/utils/marked/katex-extension.ts b/src/lib/utils/marked/katex-extension.ts
index dd755066ce..13860fab30 100644
--- a/src/lib/utils/marked/katex-extension.ts
+++ b/src/lib/utils/marked/katex-extension.ts
@@ -13,6 +13,12 @@ const ALLOWED_SURROUNDING_CHARS =
'\\s。,、、;;„“‘’“”()「」『』[]《》【】‹›«»…⋯::?!~⇒?!-\\/:-@\\[-`{-~\\p{Script=Han}\\p{Script=Hiragana}\\p{Script=Katakana}\\p{Script=Hangul}';
// Modified to fit more formats in different languages. Originally: '\\s?。,、;!-\\/:-@\\[-`{-~\\p{Script=Han}\\p{Script=Hiragana}\\p{Script=Katakana}\\p{Script=Hangul}';
+// Pre-compile the surrounding character regex once at module load time.
+// This regex uses Unicode property escapes (\p{Script=Han}, etc.) which are
+// extremely expensive to compile - doing so on every call caused ~87% of
+// markdown rendering time to be spent in KaTeX regex compilation.
+const ALLOWED_SURROUNDING_CHARS_REGEX = new RegExp(`[${ALLOWED_SURROUNDING_CHARS}]`, 'u');
+
// const DELIMITER_LIST = [
// { left: '$$', right: '$$', display: false },
// { left: '$', right: '$', display: false },
@@ -67,48 +73,31 @@ export default function (options = {}) {
}
function katexStart(src, displayMode: boolean) {
- const ruleReg = displayMode ? blockRule : inlineRule;
+ for (let i = 0; i < src.length; i++) {
+ const ch = src.charCodeAt(i);
- let indexSrc = src;
-
- while (indexSrc) {
- let index = -1;
- let startIndex = -1;
- let startDelimiter = '';
- let endDelimiter = '';
- for (const delimiter of DELIMITER_LIST) {
- if (delimiter.display !== displayMode) {
+ if (ch === 36 /* $ */) {
+ // Display mode requires $$, skip single $ for display
+ if (displayMode && src.charAt(i + 1) !== '$') {
continue;
}
-
- startIndex = indexSrc.indexOf(delimiter.left);
- if (startIndex === -1) {
- continue;
+ if (i === 0 || ALLOWED_SURROUNDING_CHARS_REGEX.test(src.charAt(i - 1))) {
+ return i;
}
-
- index = startIndex;
- startDelimiter = delimiter.left;
- endDelimiter = delimiter.right;
- }
-
- if (index === -1) {
- return;
- }
-
- // Check if the delimiter is preceded by a special character.
- // If it does, then it's potentially a math formula.
- const f =
- index === 0 ||
- indexSrc.charAt(index - 1).match(new RegExp(`[${ALLOWED_SURROUNDING_CHARS}]`, 'u'));
- if (f) {
- const possibleKatex = indexSrc.substring(index);
-
- if (possibleKatex.match(ruleReg)) {
- return index;
+ } else if (ch === 92 /* \ */) {
+ const next = src.charAt(i + 1);
+ // Only consider \ if followed by a valid math delimiter start
+ if (displayMode) {
+ // Display: \[ or \begin{equation}
+ if (next !== '[' && next !== 'b') continue;
+ } else {
+ // Inline: \( or \ce{ or \pu{
+ if (next !== '(' && next !== 'c' && next !== 'p') continue;
+ }
+ if (i === 0 || ALLOWED_SURROUNDING_CHARS_REGEX.test(src.charAt(i - 1))) {
+ return i;
}
}
-
- indexSrc = indexSrc.substring(index + startDelimiter.length).replace(endDelimiter, '');
}
}
diff --git a/src/lib/utils/pptxToHtml.ts b/src/lib/utils/pptxToHtml.ts
new file mode 100644
index 0000000000..045ac70001
--- /dev/null
+++ b/src/lib/utils/pptxToHtml.ts
@@ -0,0 +1,257 @@
+/**
+ * Lightweight PPTX → Image renderer.
+ *
+ * Extracts text and images from each slide and renders them
+ * directly to canvas, returning PNG data URLs.
+ *
+ * Uses jszip (dynamically imported) and the browser Canvas 2D API.
+ * No theme resolution, charts, SmartArt, or animations — preview only.
+ */
+
+const EMU_PER_PX = 9525;
+const emuToPx = (emu: number) => Math.round(emu / EMU_PER_PX);
+
+const parseEmu = (val: string | null | undefined): number => (val ? parseInt(val, 10) || 0 : 0);
+
+/** Load a data URI into an Image element and wait for it. */
+const loadImage = (src: string): Promise =>
+ new Promise((resolve, reject) => {
+ const img = new Image();
+ img.onload = () => resolve(img);
+ img.onerror = () => reject(new Error('Failed to load image'));
+ img.src = src;
+ });
+
+/**
+ * Convert PPTX ArrayBuffer → array of PNG data URL strings, one per slide.
+ */
+export async function pptxToImages(
+ buffer: ArrayBuffer
+): Promise<{ images: string[]; width: number; height: number }> {
+ const JSZip = (await import('jszip')).default;
+ const zip = await JSZip.loadAsync(buffer);
+
+ // ── Read slide dimensions from presentation.xml ──────────────────
+ let slideW = 960;
+ let slideH = 540;
+ const presXml = zip.file('ppt/presentation.xml');
+ if (presXml) {
+ const presText = await presXml.async('text');
+ const presDoc = new DOMParser().parseFromString(presText, 'application/xml');
+ const sldSz = presDoc.getElementsByTagName('p:sldSz')[0];
+ if (sldSz) {
+ slideW = emuToPx(parseEmu(sldSz.getAttribute('cx')));
+ slideH = emuToPx(parseEmu(sldSz.getAttribute('cy')));
+ }
+ }
+
+ // ── Collect media files (images) as base64 data URIs ─────────────
+ const media: Record = {};
+ const mediaFiles = Object.keys(zip.files).filter((f) => f.startsWith('ppt/media/'));
+ await Promise.all(
+ mediaFiles.map(async (path) => {
+ const file = zip.file(path);
+ if (!file) return;
+ const base64 = await file.async('base64');
+ const ext = path.split('.').pop()?.toLowerCase() ?? '';
+ const mime =
+ ext === 'png'
+ ? 'image/png'
+ : ext === 'gif'
+ ? 'image/gif'
+ : ext === 'svg'
+ ? 'image/svg+xml'
+ : ext === 'emf' || ext === 'wmf'
+ ? 'image/x-emf'
+ : 'image/jpeg';
+ media[path] = `data:${mime};base64,${base64}`;
+ })
+ );
+
+ // ── Discover slide files ─────────────────────────────────────────
+ const slideFiles = Object.keys(zip.files)
+ .filter((f) => /^ppt\/slides\/slide\d+\.xml$/.test(f))
+ .sort((a, b) => {
+ const na = parseInt(a.match(/slide(\d+)/)?.[1] ?? '0');
+ const nb = parseInt(b.match(/slide(\d+)/)?.[1] ?? '0');
+ return na - nb;
+ });
+
+ const images: string[] = [];
+
+ for (const slidePath of slideFiles) {
+ const slideText = await zip.file(slidePath)!.async('text');
+ const slideDoc = new DOMParser().parseFromString(slideText, 'application/xml');
+
+ // Load relationship file for this slide to resolve image references
+ const slideNum = slidePath.match(/slide(\d+)/)?.[1];
+ const relsPath = `ppt/slides/_rels/slide${slideNum}.xml.rels`;
+ const rels: Record = {};
+ const relsFile = zip.file(relsPath);
+ if (relsFile) {
+ const relsText = await relsFile.async('text');
+ const relsDoc = new DOMParser().parseFromString(relsText, 'application/xml');
+ const relEls = relsDoc.getElementsByTagName('Relationship');
+ for (let i = 0; i < relEls.length; i++) {
+ const rel = relEls[i];
+ const id = rel.getAttribute('Id') ?? '';
+ const target = rel.getAttribute('Target') ?? '';
+ if (target.startsWith('../')) {
+ rels[id] = 'ppt/' + target.replace('../', '');
+ } else {
+ rels[id] = target;
+ }
+ }
+ }
+
+ // ── Create canvas and render slide ───────────────────────────
+ const canvas = document.createElement('canvas');
+ canvas.width = slideW;
+ canvas.height = slideH;
+ const ctx = canvas.getContext('2d')!;
+
+ // White background
+ ctx.fillStyle = '#ffffff';
+ ctx.fillRect(0, 0, slideW, slideH);
+
+ const spTree = slideDoc.getElementsByTagName('p:spTree')[0];
+ if (!spTree) {
+ images.push(canvas.toDataURL('image/png'));
+ continue;
+ }
+
+ const shapes = [
+ ...Array.from(spTree.getElementsByTagName('p:sp')),
+ ...Array.from(spTree.getElementsByTagName('p:pic'))
+ ];
+
+ for (const shape of shapes) {
+ const xfrm =
+ shape.getElementsByTagName('a:xfrm')[0] ?? shape.getElementsByTagName('p:xfrm')[0];
+ if (!xfrm) continue;
+
+ const off = xfrm.getElementsByTagName('a:off')[0];
+ const ext = xfrm.getElementsByTagName('a:ext')[0];
+ if (!off || !ext) continue;
+
+ const x = emuToPx(parseEmu(off.getAttribute('x')));
+ const y = emuToPx(parseEmu(off.getAttribute('y')));
+ const w = emuToPx(parseEmu(ext.getAttribute('cx')));
+ const h = emuToPx(parseEmu(ext.getAttribute('cy')));
+
+ if (w === 0 && h === 0) continue;
+
+ // ── Picture ──────────────────────────────────────────────
+ const blipFill = shape.getElementsByTagName('p:blipFill')[0];
+ if (blipFill) {
+ const blip = blipFill.getElementsByTagName('a:blip')[0];
+ if (blip) {
+ const rEmbed = blip.getAttribute('r:embed') ?? '';
+ const mediaPath = rels[rEmbed];
+ const dataUri = mediaPath ? media[mediaPath] : '';
+ if (dataUri && !dataUri.includes('image/x-emf')) {
+ try {
+ const img = await loadImage(dataUri);
+ ctx.drawImage(img, x, y, w, h);
+ } catch {
+ // Skip images that fail to load
+ }
+ }
+ }
+ continue;
+ }
+
+ // ── Text shape ───────────────────────────────────────────
+ const txBody = shape.getElementsByTagName('p:txBody')[0];
+ if (!txBody) continue;
+
+ ctx.save();
+ ctx.rect(x, y, w, h);
+ ctx.clip();
+
+ const paragraphs = txBody.getElementsByTagName('a:p');
+ let cursorY = y;
+ const defaultFontSize = 12;
+
+ for (let pi = 0; pi < paragraphs.length; pi++) {
+ const para = paragraphs[pi];
+ const runs = para.getElementsByTagName('a:r');
+
+ if (runs.length === 0) {
+ cursorY += defaultFontSize * 1.5;
+ continue;
+ }
+
+ // Calculate max font size in this paragraph for line height
+ let maxFontPt = defaultFontSize;
+ for (let ri = 0; ri < runs.length; ri++) {
+ const rPr = runs[ri].getElementsByTagName('a:rPr')[0];
+ if (rPr) {
+ const sz = rPr.getAttribute('sz');
+ if (sz) {
+ const pt = parseInt(sz, 10) / 100;
+ if (pt > maxFontPt) maxFontPt = pt;
+ }
+ }
+ }
+
+ const lineHeight = maxFontPt * 1.4;
+ cursorY += maxFontPt; // baseline offset
+
+ let cursorX = x + 4; // small left padding
+
+ for (let ri = 0; ri < runs.length; ri++) {
+ const run = runs[ri];
+ const rPr = run.getElementsByTagName('a:rPr')[0];
+ const text = run.getElementsByTagName('a:t')[0]?.textContent ?? '';
+ if (!text) continue;
+
+ let fontPt = defaultFontSize;
+ let bold = false;
+ let italic = false;
+ let color = '#000000';
+
+ if (rPr) {
+ if (rPr.getAttribute('b') === '1') bold = true;
+ if (rPr.getAttribute('i') === '1') italic = true;
+ const sz = rPr.getAttribute('sz');
+ if (sz) fontPt = parseInt(sz, 10) / 100;
+ const solidFill = rPr.getElementsByTagName('a:solidFill')[0];
+ if (solidFill) {
+ const srgb = solidFill.getElementsByTagName('a:srgbClr')[0];
+ if (srgb) {
+ const val = srgb.getAttribute('val');
+ if (val) color = `#${val}`;
+ }
+ }
+ }
+
+ ctx.font = `${italic ? 'italic ' : ''}${bold ? 'bold ' : ''}${fontPt}pt Calibri, Arial, sans-serif`;
+ ctx.fillStyle = color;
+ ctx.textBaseline = 'alphabetic';
+
+ // Simple word-wrap within the shape bounds
+ const words = text.split(/(\s+)/);
+ for (const word of words) {
+ const metrics = ctx.measureText(word);
+ if (cursorX + metrics.width > x + w && cursorX > x + 4) {
+ cursorX = x + 4;
+ cursorY += lineHeight;
+ }
+ if (cursorY > y + h) break;
+ ctx.fillText(word, cursorX, cursorY);
+ cursorX += metrics.width;
+ }
+ }
+
+ cursorY += lineHeight * 0.4; // paragraph spacing
+ }
+
+ ctx.restore();
+ }
+
+ images.push(canvas.toDataURL('image/png'));
+ }
+
+ return { images, width: slideW, height: slideH };
+}
diff --git a/src/lib/workers/pyodide.worker.ts b/src/lib/workers/pyodide.worker.ts
index 221effca5e..7ef3916731 100644
--- a/src/lib/workers/pyodide.worker.ts
+++ b/src/lib/workers/pyodide.worker.ts
@@ -13,6 +13,12 @@ declare global {
}
}
+// ---------------------------------------------------------------------------
+// Pyodide bootstrap
+// ---------------------------------------------------------------------------
+
+let pyodideReady: Promise | null = null;
+
async function loadPyodideAndPackages(packages: string[] = []) {
self.stdout = null;
self.stderr = null;
@@ -40,41 +46,148 @@ async function loadPyodideAndPackages(packages: string[] = []) {
packages: ['micropip']
});
- const mountDir = '/mnt';
- self.pyodide.FS.mkdirTree(mountDir);
- // self.pyodide.FS.mount(self.pyodide.FS.filesystems.IDBFS, {}, mountDir);
-
- // // Load persisted files from IndexedDB (Initial Sync)
- // await new Promise((resolve, reject) => {
- // self.pyodide.FS.syncfs(true, (err) => {
- // if (err) {
- // console.error('Error syncing from IndexedDB:', err);
- // reject(err);
- // } else {
- // console.log('Successfully loaded from IndexedDB.');
- // resolve();
- // }
- // });
- // });
+ // Create the upload directory and mount IDBFS for persistence
+ const uploadDir = '/mnt/uploads';
+ self.pyodide.FS.mkdirTree(uploadDir);
+ self.pyodide.FS.mount(self.pyodide.FS.filesystems.IDBFS, {}, '/mnt');
- const micropip = self.pyodide.pyimport('micropip');
+ // Load persisted files from IndexedDB
+ await new Promise((resolve) => {
+ (self.pyodide.FS as any).syncfs(true, (err: Error | null) => {
+ if (err) {
+ console.error('Error syncing from IndexedDB:', err);
+ }
+ // Always resolve — missing data is fine on first run
+ resolve();
+ });
+ });
- // await micropip.set_index_urls('https://pypi.org/pypi/{package_name}/json');
+ // Ensure /mnt/uploads still exists after sync (first-time init)
+ try {
+ self.pyodide.FS.stat(uploadDir);
+ } catch {
+ self.pyodide.FS.mkdirTree(uploadDir);
+ }
+
+ const micropip = self.pyodide.pyimport('micropip');
await micropip.install(packages);
}
-self.onmessage = async (event) => {
- const { id, code, ...context } = event.data;
+/**
+ * Ensure Pyodide is loaded. On the first call, loads and installs packages.
+ * Subsequent calls reuse the already-loaded instance (persistent worker).
+ */
+async function ensurePyodide(packages: string[] = []) {
+ if (!pyodideReady) {
+ pyodideReady = loadPyodideAndPackages(packages);
+ }
+ await pyodideReady;
+
+ // Install any additional packages not loaded on init
+ if (packages.length > 0 && self.pyodide) {
+ const micropip = self.pyodide.pyimport('micropip');
+ await micropip.install(packages);
+ }
+}
+
+/**
+ * Persist the in-memory FS to IndexedDB (fire-and-forget with logging).
+ */
+function persistFS() {
+ if (!self.pyodide) return;
+ (self.pyodide.FS as any).syncfs(false, (err: Error | null) => {
+ if (err) {
+ console.error('Error syncing to IndexedDB:', err);
+ } else {
+ console.log('Successfully synced to IndexedDB.');
+ }
+ });
+}
- console.log(event.data);
+// ---------------------------------------------------------------------------
+// FS operations
+// ---------------------------------------------------------------------------
- // The worker copies the context in its own "memory" (an object mapping name to values)
- for (const key of Object.keys(context)) {
- self[key] = context[key];
+function fsUploadFiles(files: { name: string; data: ArrayBuffer }[], dir = '/mnt/uploads') {
+ try {
+ self.pyodide.FS.stat(dir);
+ } catch {
+ self.pyodide.FS.mkdirTree(dir);
}
- // make sure loading is done
- await loadPyodideAndPackages(self.packages);
+ for (const file of files) {
+ self.pyodide.FS.writeFile(`${dir}/${file.name}`, new Uint8Array(file.data));
+ }
+}
+
+function fsList(path: string) {
+ const entries: { name: string; type: 'file' | 'directory'; size: number }[] = [];
+ try {
+ const items = self.pyodide.FS.readdir(path).filter((n: string) => n !== '.' && n !== '..');
+ for (const name of items) {
+ try {
+ const stat = self.pyodide.FS.stat(`${path}/${name}`);
+ const isDir = self.pyodide.FS.isDir(stat.mode);
+ entries.push({
+ name,
+ type: isDir ? 'directory' : 'file',
+ size: isDir ? 0 : stat.size
+ });
+ } catch {
+ // skip inaccessible entries
+ }
+ }
+ } catch {
+ // directory doesn't exist
+ }
+ return entries;
+}
+
+function fsRead(path: string): ArrayBuffer {
+ const data: Uint8Array = (self.pyodide.FS as any).readFile(path) as Uint8Array;
+ return data.buffer as ArrayBuffer;
+}
+
+function fsDelete(path: string) {
+ try {
+ const stat = self.pyodide.FS.stat(path);
+ if (self.pyodide.FS.isDir(stat.mode)) {
+ // Recursively delete directory contents
+ const items = self.pyodide.FS.readdir(path).filter((n: string) => n !== '.' && n !== '..');
+ for (const item of items) {
+ fsDelete(`${path}/${item}`);
+ }
+ self.pyodide.FS.rmdir(path);
+ } else {
+ self.pyodide.FS.unlink(path);
+ }
+ } catch {
+ // already gone
+ }
+}
+
+function fsMkdir(path: string) {
+ self.pyodide.FS.mkdirTree(path);
+}
+
+// ---------------------------------------------------------------------------
+// Code execution
+// ---------------------------------------------------------------------------
+
+async function executeCode(
+ id: string,
+ code: string,
+ files?: { name: string; data: ArrayBuffer }[]
+) {
+ self.stdout = null;
+ self.stderr = null;
+ self.result = null;
+
+ // Upload any accompanying files before execution
+ if (files && files.length > 0) {
+ fsUploadFiles(files);
+ persistFS();
+ }
try {
// check if matplotlib is imported in the code
@@ -113,25 +226,94 @@ matplotlib.pyplot.show = show`);
console.log('Python result:', self.result);
- // Persist any changes to IndexedDB
- // await new Promise((resolve, reject) => {
- // self.pyodide.FS.syncfs(false, (err) => {
- // if (err) {
- // console.error('Error syncing to IndexedDB:', err);
- // reject(err);
- // } else {
- // console.log('Successfully synced to IndexedDB.');
- // resolve();
- // }
- // });
- // });
- } catch (error) {
- self.stderr = error.toString();
+ // Persist any files the code may have written
+ persistFS();
+ } catch (error: unknown) {
+ self.stderr = error instanceof Error ? error.message : String(error);
}
self.postMessage({ id, result: self.result, stdout: self.stdout, stderr: self.stderr });
+}
+
+// ---------------------------------------------------------------------------
+// Message handler
+// ---------------------------------------------------------------------------
+
+self.onmessage = async (event) => {
+ const data = event.data;
+ const { id, type } = data;
+
+ // Backward compatibility: messages without a `type` field are execute requests
+ if (!type || type === 'execute') {
+ const { code, files, ...context } = data;
+
+ // Copy context keys (packages, etc.) into worker scope
+ for (const key of Object.keys(context)) {
+ if (key !== 'id' && key !== 'type') {
+ self[key] = context[key];
+ }
+ }
+
+ await ensurePyodide(self.packages);
+ await executeCode(id, code, files);
+ return;
+ }
+
+ // FS operations require Pyodide to be loaded
+ await ensurePyodide();
+
+ switch (type) {
+ case 'fs:upload': {
+ const { files, dir } = data;
+ fsUploadFiles(files, dir);
+ persistFS();
+ self.postMessage({ id, type: 'fs:upload', success: true });
+ break;
+ }
+
+ case 'fs:list': {
+ const entries = fsList(data.path);
+ self.postMessage({ id, type: 'fs:list', entries });
+ break;
+ }
+
+ case 'fs:read': {
+ try {
+ const buffer = fsRead(data.path);
+ self.postMessage({ id, type: 'fs:read', data: buffer }, { transfer: [buffer] });
+ } catch (err: unknown) {
+ self.postMessage({
+ id,
+ type: 'fs:read',
+ error: err instanceof Error ? err.message : String(err)
+ });
+ }
+ break;
+ }
+
+ case 'fs:delete': {
+ fsDelete(data.path);
+ persistFS();
+ self.postMessage({ id, type: 'fs:delete', success: true });
+ break;
+ }
+
+ case 'fs:mkdir': {
+ fsMkdir(data.path);
+ persistFS();
+ self.postMessage({ id, type: 'fs:mkdir', success: true });
+ break;
+ }
+
+ default:
+ console.warn('Unknown message type:', type);
+ }
};
+// ---------------------------------------------------------------------------
+// Helpers
+// ---------------------------------------------------------------------------
+
function processResult(result: any): any {
// Catch and always return JSON-safe string representations
try {
@@ -167,9 +349,9 @@ function processResult(result: any): any {
}
// Stringify anything that's left (e.g., Proxy objects that cannot be directly processed)
return JSON.stringify(result);
- } catch (err) {
+ } catch (err: unknown) {
// In case something unexpected happens, we return a stringified fallback
- return `[processResult error]: ${err.message || err.toString()}`;
+ return `[processResult error]: ${err instanceof Error ? err.message : String(err)}`;
}
}
diff --git a/src/routes/+layout.svelte b/src/routes/+layout.svelte
index 42768ca99d..36fd7b22d5 100644
--- a/src/routes/+layout.svelte
+++ b/src/routes/+layout.svelte
@@ -34,8 +34,10 @@
terminalServers,
showControls,
showFileNavPath,
- showFileNavDir
+ showFileNavDir,
+ pyodideWorker
} from '$lib/stores';
+ import { getFileContentById } from '$lib/apis/files';
import { goto } from '$app/navigation';
import { page } from '$app/stores';
import { beforeNavigate } from '$app/navigation';
@@ -184,7 +186,20 @@
});
};
- const executePythonAsWorker = async (id, code, cb) => {
+ /**
+ * Get or create the persistent Pyodide worker.
+ * The worker persists across executions so the virtual FS (IDBFS) is preserved.
+ */
+ const getOrCreateWorker = () => {
+ let worker = $pyodideWorker;
+ if (!worker) {
+ worker = new PyodideWorker();
+ pyodideWorker.set(worker);
+ }
+ return worker;
+ };
+
+ const executePythonAsWorker = async (id, code, cb, files = []) => {
let result = null;
let stdout = null;
let stderr = null;
@@ -206,19 +221,44 @@
/\bimport\s+pytz\b|\bfrom\s+pytz\b/.test(code) ? 'pytz' : null
].filter(Boolean);
- const pyodideWorker = new PyodideWorker();
+ const worker = getOrCreateWorker();
+
+ // Fetch file content from the server and prepare for the worker
+ let filePayloads = [];
+ if (files && files.length > 0) {
+ for (const file of files) {
+ try {
+ const fileId = file?.id;
+ const fileName = file?.filename || file?.name || 'file';
+ if (fileId) {
+ const content = await getFileContentById(fileId);
+ if (content) {
+ filePayloads.push({ name: fileName, data: content });
+ }
+ }
+ } catch (e) {
+ console.error('Failed to fetch file for Pyodide:', e);
+ }
+ }
+ }
- pyodideWorker.postMessage({
+ worker.postMessage({
+ type: 'execute',
id: id,
code: code,
- packages: packages
+ packages: packages,
+ files: filePayloads.length > 0 ? filePayloads : undefined
});
- setTimeout(() => {
+ // Timeout for this specific execution (not the worker itself)
+ let timeoutId = setTimeout(() => {
if (executing) {
executing = false;
stderr = 'Execution Time Limit Exceeded';
- pyodideWorker.terminate();
+
+ // Terminate and recreate the worker on timeout
+ worker.terminate();
+ pyodideWorker.set(null);
if (cb) {
cb(
@@ -237,11 +277,18 @@
}
}, 60000);
- pyodideWorker.onmessage = (event) => {
- console.log('pyodideWorker.onmessage', event);
- const { id, ...data } = event.data;
+ // Use addEventListener so multiple concurrent executions don't clobber each other
+ const onMessage = (event) => {
+ const { id: eventId, ...data } = event.data;
+ // Only handle responses for this execution ID
+ if (eventId !== id) return;
+ // Ignore FS responses (they use a type field)
+ if (data.type && data.type.startsWith('fs:')) return;
- console.log(id, data);
+ console.log('pyodideWorker.onmessage', event);
+ clearTimeout(timeoutId);
+ worker.removeEventListener('message', onMessage);
+ worker.removeEventListener('error', onError);
data['stdout'] && (stdout = data['stdout']);
data['stderr'] && (stderr = data['stderr']);
@@ -265,8 +312,11 @@
executing = false;
};
- pyodideWorker.onerror = (event) => {
+ const onError = (event) => {
console.log('pyodideWorker.onerror', event);
+ clearTimeout(timeoutId);
+ worker.removeEventListener('message', onMessage);
+ worker.removeEventListener('error', onError);
if (cb) {
cb(
@@ -284,6 +334,9 @@
}
executing = false;
};
+
+ worker.addEventListener('message', onMessage);
+ worker.addEventListener('error', onError);
};
const resolveToolServer = (serverUrl) => {
@@ -423,7 +476,7 @@
} else if (data?.session_id === $socket.id) {
if (type === 'execute:python') {
console.log('execute:python', data);
- executePythonAsWorker(data.id, data.code, cb);
+ executePythonAsWorker(data.id, data.code, cb, data.files || []);
} else if (type === 'execute:tool') {
console.log('execute:tool', data);
executeTool(data, cb);
diff --git a/static/sql.js/sql-wasm.wasm b/static/sql.js/sql-wasm.wasm
new file mode 100755
index 0000000000..b32b66473d
Binary files /dev/null and b/static/sql.js/sql-wasm.wasm differ