From a59434c2e9235f4c80e26bc40f846139e6a589f2 Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:09:34 +0700 Subject: [PATCH 01/19] Extract desktop state storage helpers --- src/composables/desktopStateStorage.ts | 448 +++++++++++++++++++++++ src/composables/useDesktopState.ts | 480 ++----------------------- tests.md | 28 ++ 3 files changed, 511 insertions(+), 445 deletions(-) create mode 100644 src/composables/desktopStateStorage.ts diff --git a/src/composables/desktopStateStorage.ts b/src/composables/desktopStateStorage.ts new file mode 100644 index 000000000..c52e2e30c --- /dev/null +++ b/src/composables/desktopStateStorage.ts @@ -0,0 +1,448 @@ +import type { CollaborationModeKind, UiThreadTokenUsage } from '../types/codex' +import { toProjectName } from '../pathUtils.js' + +const READ_STATE_STORAGE_KEY = 'codex-web-local.thread-read-state.v1' +const UNREAD_CUTOFF_STORAGE_KEY = 'codex-web-local.thread-unread-cutoff.v1' +const THREAD_TOKEN_USAGE_STORAGE_KEY = 'codex-web-local.thread-token-usage.v1' +const THREAD_TERMINAL_OPEN_STORAGE_KEY = 'codex-web-local.thread-terminal-open.v1' +const SELECTED_THREAD_STORAGE_KEY = 'codex-web-local.selected-thread-id.v1' +const SELECTED_MODEL_BY_CONTEXT_STORAGE_KEY = 'codex-web-local.selected-model-by-context.v1' +const LEGACY_SELECTED_MODEL_STORAGE_KEY = 'codex-web-local.selected-model-id.v1' +const PROJECT_ORDER_STORAGE_KEY = 'codex-web-local.project-order.v1' +const PROJECT_DISPLAY_NAME_STORAGE_KEY = 'codex-web-local.project-display-name.v1' +const COLLABORATION_MODE_STORAGE_KEY = 'codex-web-local.collaboration-mode-by-context.v1' +const LEGACY_COLLABORATION_MODE_STORAGE_KEY = 'codex-web-local.collaboration-mode.v1' +export const NEW_THREAD_COLLABORATION_MODE_CONTEXT = '__new-thread__' +const NEW_THREAD_PROVIDER_MODEL_CONTEXT_PREFIX = '__new-thread-provider__::' + +export function loadReadStateMap(): Record { + if (typeof window === 'undefined') return {} + + try { + const raw = window.localStorage.getItem(READ_STATE_STORAGE_KEY) + if (!raw) return {} + + const parsed = JSON.parse(raw) as unknown + if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return {} + return parsed as Record + } catch { + return {} + } +} + +export function saveReadStateMap(state: Record): void { + if (typeof window === 'undefined') return + window.localStorage.setItem(READ_STATE_STORAGE_KEY, JSON.stringify(state)) +} + +export function loadUnreadCutoffIso(): string { + if (typeof window === 'undefined') return '' + + const existing = window.localStorage.getItem(UNREAD_CUTOFF_STORAGE_KEY) + if (existing) return existing + + const initialCutoff = new Date().toISOString() + window.localStorage.setItem(UNREAD_CUTOFF_STORAGE_KEY, initialCutoff) + return initialCutoff +} + +export function saveUnreadCutoffIso(cutoffIso: string): void { + if (typeof window === 'undefined') return + window.localStorage.setItem(UNREAD_CUTOFF_STORAGE_KEY, cutoffIso) +} + +export function isThreadUpdatedAfterCutoff(updatedAtIso: string, cutoffIso: string): boolean { + if (!updatedAtIso || !cutoffIso) return false + const updatedAtMs = new Date(updatedAtIso).getTime() + const cutoffMs = new Date(cutoffIso).getTime() + if (!Number.isFinite(updatedAtMs) || !Number.isFinite(cutoffMs)) return false + return updatedAtMs > cutoffMs +} + +export function isThreadUnreadByLastRead( + updatedAtIso: string, + threadReadStateIso: string | undefined, + unreadCutoffIso: string, +): boolean { + const effectiveLastReadIso = threadReadStateIso ?? unreadCutoffIso + return isThreadUpdatedAfterCutoff(updatedAtIso, effectiveLastReadIso) +} + +export function normalizeCollaborationMode(value: unknown): CollaborationModeKind { + return value === 'plan' ? 'plan' : 'default' +} + +export function normalizeStoredModelId(value: unknown): string { + return typeof value === 'string' ? value.trim() : '' +} + +export function createStringKeyedRecord(): Record { + return Object.create(null) as Record +} + +export function cloneStringKeyedRecord(record: Record): Record { + const next = createStringKeyedRecord() + for (const [key, value] of Object.entries(record)) { + next[key] = value + } + return next +} + +export function omitStringKeyedRecordKey(record: Record, key: string): Record { + if (!(key in record)) return record + const next = createStringKeyedRecord() + for (const [entryKey, value] of Object.entries(record)) { + if (entryKey !== key) { + next[entryKey] = value + } + } + return next +} + +export function pruneThreadContextStateMap( + stateMap: Record, + threadIds: Set, +): Record { + let changed = false + const next = createStringKeyedRecord() + for (const [contextId, value] of Object.entries(stateMap)) { + if ( + contextId === NEW_THREAD_COLLABORATION_MODE_CONTEXT + || contextId.startsWith(NEW_THREAD_PROVIDER_MODEL_CONTEXT_PREFIX) + || threadIds.has(contextId) + ) { + next[contextId] = value + continue + } + changed = true + } + return changed ? next : stateMap +} + +export function normalizeProviderContextId(providerId: string): string { + const normalized = providerId.trim().toLowerCase() + return normalized || 'codex' +} + +export function isNewThreadContextId(contextId: string): boolean { + return contextId === NEW_THREAD_COLLABORATION_MODE_CONTEXT +} + +export function toProviderModelContextId(providerId: string): string { + const normalizedProviderId = normalizeProviderContextId(providerId) + if (!normalizedProviderId) return '' + return `${NEW_THREAD_PROVIDER_MODEL_CONTEXT_PREFIX}${normalizedProviderId}` +} + +export function toThreadContextId(threadId: string): string { + const normalizedThreadId = threadId.trim() + return normalizedThreadId || NEW_THREAD_COLLABORATION_MODE_CONTEXT +} + +export function loadSelectedModelMap(): Record { + if (typeof window === 'undefined') return createStringKeyedRecord() + + try { + const raw = window.localStorage.getItem(SELECTED_MODEL_BY_CONTEXT_STORAGE_KEY) + if (raw) { + const parsed = JSON.parse(raw) as unknown + if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return createStringKeyedRecord() + + const next = createStringKeyedRecord() + for (const [contextId, value] of Object.entries(parsed as Record)) { + if (typeof contextId !== 'string' || contextId.length === 0) continue + const normalizedModelId = normalizeStoredModelId(value) + if (normalizedModelId) { + next[contextId] = normalizedModelId + } + } + return next + } + } catch { + // Fall back to the legacy global preference below. + } + + const legacyModelId = normalizeStoredModelId(window.localStorage.getItem(LEGACY_SELECTED_MODEL_STORAGE_KEY)) + const next = createStringKeyedRecord() + if (legacyModelId) { + next[NEW_THREAD_COLLABORATION_MODE_CONTEXT] = legacyModelId + } + return next +} + +export function readSelectedModel( + state: Record, + threadId: string, +): string { + const contextId = toThreadContextId(threadId) + const contextModelId = normalizeStoredModelId(state[contextId]) + if (contextModelId) return contextModelId + return normalizeStoredModelId(state[NEW_THREAD_COLLABORATION_MODE_CONTEXT]) +} + +export function saveSelectedModelMap(state: Record): void { + if (typeof window === 'undefined') return + try { + if (Object.keys(state).length === 0) { + window.localStorage.removeItem(SELECTED_MODEL_BY_CONTEXT_STORAGE_KEY) + } else { + window.localStorage.setItem(SELECTED_MODEL_BY_CONTEXT_STORAGE_KEY, JSON.stringify(state)) + } + window.localStorage.removeItem(LEGACY_SELECTED_MODEL_STORAGE_KEY) + } catch { + // Keep in-memory selection working even if localStorage writes fail. + } +} + +export function loadSelectedCollaborationModeMap(): Record { + if (typeof window === 'undefined') return createStringKeyedRecord() + + try { + const raw = window.localStorage.getItem(COLLABORATION_MODE_STORAGE_KEY) + if (raw) { + const parsed = JSON.parse(raw) as unknown + if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) { + return createStringKeyedRecord() + } + + const next = createStringKeyedRecord() + for (const [contextId, value] of Object.entries(parsed as Record)) { + if (typeof contextId !== 'string' || contextId.length === 0) continue + const normalizedMode = normalizeCollaborationMode(value) + if (normalizedMode === 'plan') { + next[contextId] = normalizedMode + } + } + return next + } + } catch { + // Fall back to the legacy global preference below. + } + + return createStringKeyedRecord() +} + +export function readSelectedCollaborationMode( + state: Record, + threadId: string, +): CollaborationModeKind { + const contextId = toThreadContextId(threadId) + return normalizeCollaborationMode(state[contextId]) +} + +export function writeSelectedCollaborationModeForContext( + state: Record, + threadId: string, + mode: CollaborationModeKind, +): Record { + const contextId = toThreadContextId(threadId) + if (isNewThreadContextId(contextId)) { + return omitStringKeyedRecordKey(state, contextId) + } + if (mode === 'plan') { + const next = cloneStringKeyedRecord(state) + next[contextId] = 'plan' + return next + } + return omitStringKeyedRecordKey(state, contextId) +} + +export function saveSelectedCollaborationModeMap(state: Record): void { + if (typeof window === 'undefined') return + try { + if (Object.keys(state).length === 0) { + window.localStorage.removeItem(COLLABORATION_MODE_STORAGE_KEY) + } else { + window.localStorage.setItem(COLLABORATION_MODE_STORAGE_KEY, JSON.stringify(state)) + } + window.localStorage.removeItem(LEGACY_COLLABORATION_MODE_STORAGE_KEY) + } catch { + // Keep in-memory mode selection working even if localStorage writes fail. + } +} + +export function clamp(value: number, minValue: number, maxValue: number): number { + return Math.min(Math.max(value, minValue), maxValue) +} + +export function normalizeStoredTokenCount(value: unknown): number | null { + if (typeof value === 'number' && Number.isFinite(value)) { + return Math.max(0, Math.trunc(value)) + } + + if (typeof value === 'string' && value.trim().length > 0) { + const parsed = Number(value) + if (Number.isFinite(parsed)) { + return Math.max(0, Math.trunc(parsed)) + } + } + + return null +} + +export function normalizeTokenUsageBreakdown(value: unknown): UiThreadTokenUsage['last'] | null { + if (!value || typeof value !== 'object' || Array.isArray(value)) return null + + const record = value as Record + return { + totalTokens: normalizeStoredTokenCount(record.totalTokens) ?? 0, + inputTokens: normalizeStoredTokenCount(record.inputTokens) ?? 0, + cachedInputTokens: normalizeStoredTokenCount(record.cachedInputTokens) ?? 0, + outputTokens: normalizeStoredTokenCount(record.outputTokens) ?? 0, + reasoningOutputTokens: normalizeStoredTokenCount(record.reasoningOutputTokens) ?? 0, + } +} + +export function normalizeThreadTokenUsage(value: unknown): UiThreadTokenUsage | null { + if (!value || typeof value !== 'object' || Array.isArray(value)) return null + + const record = value as Record + const total = normalizeTokenUsageBreakdown(record.total) + const last = normalizeTokenUsageBreakdown(record.last) + if (!total || !last) return null + + const modelContextWindow = normalizeStoredTokenCount(record.modelContextWindow) + const currentContextTokens = last.totalTokens + const remainingContextTokens = typeof modelContextWindow === 'number' + ? Math.max(modelContextWindow - currentContextTokens, 0) + : null + const remainingContextPercent = typeof modelContextWindow === 'number' && modelContextWindow > 0 + ? clamp(Math.round((remainingContextTokens ?? 0) / modelContextWindow * 100), 0, 100) + : null + + return { + total, + last, + modelContextWindow, + currentContextTokens, + remainingContextTokens, + remainingContextPercent, + } +} + +export function loadThreadTokenUsageMap(): Record { + if (typeof window === 'undefined') return {} + + try { + const raw = window.localStorage.getItem(THREAD_TOKEN_USAGE_STORAGE_KEY) + if (!raw) return {} + + const parsed = JSON.parse(raw) as unknown + if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return {} + + const normalizedMap: Record = {} + for (const [threadId, usage] of Object.entries(parsed as Record)) { + if (!threadId) continue + const normalizedUsage = normalizeThreadTokenUsage(usage) + if (normalizedUsage) { + normalizedMap[threadId] = normalizedUsage + } + } + return normalizedMap + } catch { + return {} + } +} + +export function saveThreadTokenUsageMap(state: Record): void { + if (typeof window === 'undefined') return + window.localStorage.setItem(THREAD_TOKEN_USAGE_STORAGE_KEY, JSON.stringify(state)) +} + +export function loadThreadTerminalOpenMap(): Record { + if (typeof window === 'undefined') return {} + + try { + const raw = window.localStorage.getItem(THREAD_TERMINAL_OPEN_STORAGE_KEY) + if (!raw) return {} + + const parsed = JSON.parse(raw) as unknown + if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return {} + + const normalizedMap: Record = {} + for (const [threadId, isOpen] of Object.entries(parsed as Record)) { + if (threadId && typeof isOpen === 'boolean') { + normalizedMap[threadId] = isOpen + } + } + return normalizedMap + } catch { + return {} + } +} + +export function saveThreadTerminalOpenMap(state: Record): void { + if (typeof window === 'undefined') return + window.localStorage.setItem(THREAD_TERMINAL_OPEN_STORAGE_KEY, JSON.stringify(state)) +} + +export function loadSelectedThreadId(): string { + if (typeof window === 'undefined') return '' + const raw = window.localStorage.getItem(SELECTED_THREAD_STORAGE_KEY) + return raw ?? '' +} + +export function saveSelectedThreadId(threadId: string): void { + if (typeof window === 'undefined') return + if (!threadId) { + window.localStorage.removeItem(SELECTED_THREAD_STORAGE_KEY) + return + } + window.localStorage.setItem(SELECTED_THREAD_STORAGE_KEY, threadId) +} + +export function loadProjectOrder(): string[] { + if (typeof window === 'undefined') return [] + + try { + const raw = window.localStorage.getItem(PROJECT_ORDER_STORAGE_KEY) + if (!raw) return [] + + const parsed = JSON.parse(raw) as unknown + if (!Array.isArray(parsed)) return [] + const order: string[] = [] + for (const item of parsed) { + if (typeof item !== 'string' || item.length === 0) continue + const normalizedItem = toProjectName(item) + if (normalizedItem.length > 0 && !order.includes(normalizedItem)) { + order.push(normalizedItem) + } + } + return order + } catch { + return [] + } +} + +export function saveProjectOrder(order: string[]): void { + if (typeof window === 'undefined') return + window.localStorage.setItem(PROJECT_ORDER_STORAGE_KEY, JSON.stringify(order)) +} + +export function loadProjectDisplayNames(): Record { + if (typeof window === 'undefined') return {} + + try { + const raw = window.localStorage.getItem(PROJECT_DISPLAY_NAME_STORAGE_KEY) + if (!raw) return {} + + const parsed = JSON.parse(raw) as unknown + if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return {} + + const displayNames: Record = {} + for (const [projectName, displayName] of Object.entries(parsed as Record)) { + const normalizedProjectName = typeof projectName === 'string' ? toProjectName(projectName) : '' + if (normalizedProjectName.length > 0 && typeof displayName === 'string') { + displayNames[normalizedProjectName] = displayName + } + } + return displayNames + } catch { + return {} + } +} + +export function saveProjectDisplayNames(displayNames: Record): void { + if (typeof window === 'undefined') return + window.localStorage.setItem(PROJECT_DISPLAY_NAME_STORAGE_KEY, JSON.stringify(displayNames)) +} diff --git a/src/composables/useDesktopState.ts b/src/composables/useDesktopState.ts index 0ac873d3d..785a8d841 100644 --- a/src/composables/useDesktopState.ts +++ b/src/composables/useDesktopState.ts @@ -59,6 +59,41 @@ import type { UiThread, } from '../types/codex' import { getPathParent, isProjectlessChatPath, normalizePathForUi, toProjectName } from '../pathUtils.js' +import { + cloneStringKeyedRecord, + clamp, + createStringKeyedRecord, + isThreadUnreadByLastRead, + loadProjectDisplayNames, + loadProjectOrder, + loadReadStateMap, + loadSelectedCollaborationModeMap, + loadSelectedModelMap, + loadSelectedThreadId, + loadThreadTerminalOpenMap, + loadThreadTokenUsageMap, + loadUnreadCutoffIso, + NEW_THREAD_COLLABORATION_MODE_CONTEXT, + normalizeProviderContextId, + normalizeStoredModelId, + omitStringKeyedRecordKey, + pruneThreadContextStateMap, + readSelectedCollaborationMode, + readSelectedModel, + saveProjectDisplayNames, + saveProjectOrder, + saveReadStateMap, + saveSelectedCollaborationModeMap, + saveSelectedModelMap, + saveSelectedThreadId, + saveThreadTerminalOpenMap, + saveThreadTokenUsageMap, + saveUnreadCutoffIso, + toProviderModelContextId, + toThreadContextId, + writeSelectedCollaborationModeForContext, +} from './desktopStateStorage' +export { isThreadUnreadByLastRead } from './desktopStateStorage' function flattenThreads(groups: UiProjectGroup[]): UiThread[] { return groups.flatMap((group) => group.threads) @@ -70,19 +105,6 @@ export function findAdjacentThreadId(threads: UiThread[], threadId: string): str return threads[targetIndex + 1]?.id ?? threads[targetIndex - 1]?.id ?? '' } -const READ_STATE_STORAGE_KEY = 'codex-web-local.thread-read-state.v1' -const UNREAD_CUTOFF_STORAGE_KEY = 'codex-web-local.thread-unread-cutoff.v1' -const THREAD_TOKEN_USAGE_STORAGE_KEY = 'codex-web-local.thread-token-usage.v1' -const THREAD_TERMINAL_OPEN_STORAGE_KEY = 'codex-web-local.thread-terminal-open.v1' -const SELECTED_THREAD_STORAGE_KEY = 'codex-web-local.selected-thread-id.v1' -const SELECTED_MODEL_BY_CONTEXT_STORAGE_KEY = 'codex-web-local.selected-model-by-context.v1' -const LEGACY_SELECTED_MODEL_STORAGE_KEY = 'codex-web-local.selected-model-id.v1' -const PROJECT_ORDER_STORAGE_KEY = 'codex-web-local.project-order.v1' -const PROJECT_DISPLAY_NAME_STORAGE_KEY = 'codex-web-local.project-display-name.v1' -const COLLABORATION_MODE_STORAGE_KEY = 'codex-web-local.collaboration-mode-by-context.v1' -const LEGACY_COLLABORATION_MODE_STORAGE_KEY = 'codex-web-local.collaboration-mode.v1' -const NEW_THREAD_COLLABORATION_MODE_CONTEXT = '__new-thread__' -const NEW_THREAD_PROVIDER_MODEL_CONTEXT_PREFIX = '__new-thread-provider__::' const EVENT_SYNC_DEBOUNCE_MS = 220 const BACKGROUND_THREAD_PAGINATION_DELAY_MS = 10_000 const RATE_LIMIT_REFRESH_DEBOUNCE_MS = 500 @@ -92,438 +114,6 @@ const REASONING_EFFORT_OPTIONS: ReasoningEffort[] = ['none', 'minimal', 'low', ' const GLOBAL_SERVER_REQUEST_SCOPE = '__global__' const MODEL_FALLBACK_ID = 'gpt-5.4-mini' -function loadReadStateMap(): Record { - if (typeof window === 'undefined') return {} - - try { - const raw = window.localStorage.getItem(READ_STATE_STORAGE_KEY) - if (!raw) return {} - - const parsed = JSON.parse(raw) as unknown - if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return {} - return parsed as Record - } catch { - return {} - } -} - -function saveReadStateMap(state: Record): void { - if (typeof window === 'undefined') return - window.localStorage.setItem(READ_STATE_STORAGE_KEY, JSON.stringify(state)) -} - -function loadUnreadCutoffIso(): string { - if (typeof window === 'undefined') return '' - - const existing = window.localStorage.getItem(UNREAD_CUTOFF_STORAGE_KEY) - if (existing) return existing - - const initialCutoff = new Date().toISOString() - window.localStorage.setItem(UNREAD_CUTOFF_STORAGE_KEY, initialCutoff) - return initialCutoff -} - -function saveUnreadCutoffIso(cutoffIso: string): void { - if (typeof window === 'undefined') return - window.localStorage.setItem(UNREAD_CUTOFF_STORAGE_KEY, cutoffIso) -} - -function isThreadUpdatedAfterCutoff(updatedAtIso: string, cutoffIso: string): boolean { - if (!updatedAtIso || !cutoffIso) return false - const updatedAtMs = new Date(updatedAtIso).getTime() - const cutoffMs = new Date(cutoffIso).getTime() - if (!Number.isFinite(updatedAtMs) || !Number.isFinite(cutoffMs)) return false - return updatedAtMs > cutoffMs -} - -export function isThreadUnreadByLastRead( - updatedAtIso: string, - threadReadStateIso: string | undefined, - unreadCutoffIso: string, -): boolean { - const effectiveLastReadIso = threadReadStateIso ?? unreadCutoffIso - return isThreadUpdatedAfterCutoff(updatedAtIso, effectiveLastReadIso) -} - -function normalizeCollaborationMode(value: unknown): CollaborationModeKind { - return value === 'plan' ? 'plan' : 'default' -} - -function normalizeStoredModelId(value: unknown): string { - return typeof value === 'string' ? value.trim() : '' -} - -function createStringKeyedRecord(): Record { - return Object.create(null) as Record -} - -function cloneStringKeyedRecord(record: Record): Record { - const next = createStringKeyedRecord() - for (const [key, value] of Object.entries(record)) { - next[key] = value - } - return next -} - -function omitStringKeyedRecordKey(record: Record, key: string): Record { - if (!(key in record)) return record - const next = createStringKeyedRecord() - for (const [entryKey, value] of Object.entries(record)) { - if (entryKey !== key) { - next[entryKey] = value - } - } - return next -} - -function pruneThreadContextStateMap( - stateMap: Record, - threadIds: Set, -): Record { - let changed = false - const next = createStringKeyedRecord() - for (const [contextId, value] of Object.entries(stateMap)) { - if ( - contextId === NEW_THREAD_COLLABORATION_MODE_CONTEXT - || contextId.startsWith(NEW_THREAD_PROVIDER_MODEL_CONTEXT_PREFIX) - || threadIds.has(contextId) - ) { - next[contextId] = value - continue - } - changed = true - } - return changed ? next : stateMap -} - -function normalizeProviderContextId(providerId: string): string { - const normalized = providerId.trim().toLowerCase() - return normalized || 'codex' -} - -function isNewThreadContextId(contextId: string): boolean { - return contextId === NEW_THREAD_COLLABORATION_MODE_CONTEXT -} - -function toProviderModelContextId(providerId: string): string { - const normalizedProviderId = normalizeProviderContextId(providerId) - if (!normalizedProviderId) return '' - return `${NEW_THREAD_PROVIDER_MODEL_CONTEXT_PREFIX}${normalizedProviderId}` -} - -function toThreadContextId(threadId: string): string { - const normalizedThreadId = threadId.trim() - return normalizedThreadId || NEW_THREAD_COLLABORATION_MODE_CONTEXT -} - -function loadSelectedModelMap(): Record { - if (typeof window === 'undefined') return createStringKeyedRecord() - - try { - const raw = window.localStorage.getItem(SELECTED_MODEL_BY_CONTEXT_STORAGE_KEY) - if (raw) { - const parsed = JSON.parse(raw) as unknown - if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return createStringKeyedRecord() - - const next = createStringKeyedRecord() - for (const [contextId, value] of Object.entries(parsed as Record)) { - if (typeof contextId !== 'string' || contextId.length === 0) continue - const normalizedModelId = normalizeStoredModelId(value) - if (normalizedModelId) { - next[contextId] = normalizedModelId - } - } - return next - } - } catch { - // Fall back to the legacy global preference below. - } - - const legacyModelId = normalizeStoredModelId(window.localStorage.getItem(LEGACY_SELECTED_MODEL_STORAGE_KEY)) - const next = createStringKeyedRecord() - if (legacyModelId) { - next[NEW_THREAD_COLLABORATION_MODE_CONTEXT] = legacyModelId - } - return next -} - -function readSelectedModel( - state: Record, - threadId: string, -): string { - const contextId = toThreadContextId(threadId) - const contextModelId = normalizeStoredModelId(state[contextId]) - if (contextModelId) return contextModelId - return normalizeStoredModelId(state[NEW_THREAD_COLLABORATION_MODE_CONTEXT]) -} - -function saveSelectedModelMap(state: Record): void { - if (typeof window === 'undefined') return - try { - if (Object.keys(state).length === 0) { - window.localStorage.removeItem(SELECTED_MODEL_BY_CONTEXT_STORAGE_KEY) - } else { - window.localStorage.setItem(SELECTED_MODEL_BY_CONTEXT_STORAGE_KEY, JSON.stringify(state)) - } - window.localStorage.removeItem(LEGACY_SELECTED_MODEL_STORAGE_KEY) - } catch { - // Keep in-memory selection working even if localStorage writes fail. - } -} - -function loadSelectedCollaborationModeMap(): Record { - if (typeof window === 'undefined') return createStringKeyedRecord() - - try { - const raw = window.localStorage.getItem(COLLABORATION_MODE_STORAGE_KEY) - if (raw) { - const parsed = JSON.parse(raw) as unknown - if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) { - return createStringKeyedRecord() - } - - const next = createStringKeyedRecord() - for (const [contextId, value] of Object.entries(parsed as Record)) { - if (typeof contextId !== 'string' || contextId.length === 0) continue - const normalizedMode = normalizeCollaborationMode(value) - if (normalizedMode === 'plan') { - next[contextId] = normalizedMode - } - } - return next - } - } catch { - // Fall back to the legacy global preference below. - } - - return createStringKeyedRecord() -} - -function readSelectedCollaborationMode( - state: Record, - threadId: string, -): CollaborationModeKind { - const contextId = toThreadContextId(threadId) - return normalizeCollaborationMode(state[contextId]) -} - -function writeSelectedCollaborationModeForContext( - state: Record, - threadId: string, - mode: CollaborationModeKind, -): Record { - const contextId = toThreadContextId(threadId) - if (isNewThreadContextId(contextId)) { - return omitStringKeyedRecordKey(state, contextId) - } - if (mode === 'plan') { - const next = cloneStringKeyedRecord(state) - next[contextId] = 'plan' - return next - } - return omitStringKeyedRecordKey(state, contextId) -} - -function saveSelectedCollaborationModeMap(state: Record): void { - if (typeof window === 'undefined') return - try { - if (Object.keys(state).length === 0) { - window.localStorage.removeItem(COLLABORATION_MODE_STORAGE_KEY) - } else { - window.localStorage.setItem(COLLABORATION_MODE_STORAGE_KEY, JSON.stringify(state)) - } - window.localStorage.removeItem(LEGACY_COLLABORATION_MODE_STORAGE_KEY) - } catch { - // Keep in-memory mode selection working even if localStorage writes fail. - } -} - -function clamp(value: number, minValue: number, maxValue: number): number { - return Math.min(Math.max(value, minValue), maxValue) -} - -function normalizeStoredTokenCount(value: unknown): number | null { - if (typeof value === 'number' && Number.isFinite(value)) { - return Math.max(0, Math.trunc(value)) - } - - if (typeof value === 'string' && value.trim().length > 0) { - const parsed = Number(value) - if (Number.isFinite(parsed)) { - return Math.max(0, Math.trunc(parsed)) - } - } - - return null -} - -function normalizeTokenUsageBreakdown(value: unknown): UiThreadTokenUsage['last'] | null { - if (!value || typeof value !== 'object' || Array.isArray(value)) return null - - const record = value as Record - return { - totalTokens: normalizeStoredTokenCount(record.totalTokens) ?? 0, - inputTokens: normalizeStoredTokenCount(record.inputTokens) ?? 0, - cachedInputTokens: normalizeStoredTokenCount(record.cachedInputTokens) ?? 0, - outputTokens: normalizeStoredTokenCount(record.outputTokens) ?? 0, - reasoningOutputTokens: normalizeStoredTokenCount(record.reasoningOutputTokens) ?? 0, - } -} - -function normalizeThreadTokenUsage(value: unknown): UiThreadTokenUsage | null { - if (!value || typeof value !== 'object' || Array.isArray(value)) return null - - const record = value as Record - const total = normalizeTokenUsageBreakdown(record.total) - const last = normalizeTokenUsageBreakdown(record.last) - if (!total || !last) return null - - const modelContextWindow = normalizeStoredTokenCount(record.modelContextWindow) - const currentContextTokens = last.totalTokens - const remainingContextTokens = typeof modelContextWindow === 'number' - ? Math.max(modelContextWindow - currentContextTokens, 0) - : null - const remainingContextPercent = typeof modelContextWindow === 'number' && modelContextWindow > 0 - ? clamp(Math.round((remainingContextTokens ?? 0) / modelContextWindow * 100), 0, 100) - : null - - return { - total, - last, - modelContextWindow, - currentContextTokens, - remainingContextTokens, - remainingContextPercent, - } -} - -function loadThreadTokenUsageMap(): Record { - if (typeof window === 'undefined') return {} - - try { - const raw = window.localStorage.getItem(THREAD_TOKEN_USAGE_STORAGE_KEY) - if (!raw) return {} - - const parsed = JSON.parse(raw) as unknown - if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return {} - - const normalizedMap: Record = {} - for (const [threadId, usage] of Object.entries(parsed as Record)) { - if (!threadId) continue - const normalizedUsage = normalizeThreadTokenUsage(usage) - if (normalizedUsage) { - normalizedMap[threadId] = normalizedUsage - } - } - return normalizedMap - } catch { - return {} - } -} - -function saveThreadTokenUsageMap(state: Record): void { - if (typeof window === 'undefined') return - window.localStorage.setItem(THREAD_TOKEN_USAGE_STORAGE_KEY, JSON.stringify(state)) -} - -function loadThreadTerminalOpenMap(): Record { - if (typeof window === 'undefined') return {} - - try { - const raw = window.localStorage.getItem(THREAD_TERMINAL_OPEN_STORAGE_KEY) - if (!raw) return {} - - const parsed = JSON.parse(raw) as unknown - if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return {} - - const normalizedMap: Record = {} - for (const [threadId, isOpen] of Object.entries(parsed as Record)) { - if (threadId && typeof isOpen === 'boolean') { - normalizedMap[threadId] = isOpen - } - } - return normalizedMap - } catch { - return {} - } -} - -function saveThreadTerminalOpenMap(state: Record): void { - if (typeof window === 'undefined') return - window.localStorage.setItem(THREAD_TERMINAL_OPEN_STORAGE_KEY, JSON.stringify(state)) -} - -function loadSelectedThreadId(): string { - if (typeof window === 'undefined') return '' - const raw = window.localStorage.getItem(SELECTED_THREAD_STORAGE_KEY) - return raw ?? '' -} - -function saveSelectedThreadId(threadId: string): void { - if (typeof window === 'undefined') return - if (!threadId) { - window.localStorage.removeItem(SELECTED_THREAD_STORAGE_KEY) - return - } - window.localStorage.setItem(SELECTED_THREAD_STORAGE_KEY, threadId) -} - -function loadProjectOrder(): string[] { - if (typeof window === 'undefined') return [] - - try { - const raw = window.localStorage.getItem(PROJECT_ORDER_STORAGE_KEY) - if (!raw) return [] - - const parsed = JSON.parse(raw) as unknown - if (!Array.isArray(parsed)) return [] - const order: string[] = [] - for (const item of parsed) { - if (typeof item !== 'string' || item.length === 0) continue - const normalizedItem = toProjectName(item) - if (normalizedItem.length > 0 && !order.includes(normalizedItem)) { - order.push(normalizedItem) - } - } - return order - } catch { - return [] - } -} - -function saveProjectOrder(order: string[]): void { - if (typeof window === 'undefined') return - window.localStorage.setItem(PROJECT_ORDER_STORAGE_KEY, JSON.stringify(order)) -} - -function loadProjectDisplayNames(): Record { - if (typeof window === 'undefined') return {} - - try { - const raw = window.localStorage.getItem(PROJECT_DISPLAY_NAME_STORAGE_KEY) - if (!raw) return {} - - const parsed = JSON.parse(raw) as unknown - if (!parsed || typeof parsed !== 'object' || Array.isArray(parsed)) return {} - - const displayNames: Record = {} - for (const [projectName, displayName] of Object.entries(parsed as Record)) { - const normalizedProjectName = typeof projectName === 'string' ? toProjectName(projectName) : '' - if (normalizedProjectName.length > 0 && typeof displayName === 'string') { - displayNames[normalizedProjectName] = displayName - } - } - return displayNames - } catch { - return {} - } -} - -function saveProjectDisplayNames(displayNames: Record): void { - if (typeof window === 'undefined') return - window.localStorage.setItem(PROJECT_DISPLAY_NAME_STORAGE_KEY, JSON.stringify(displayNames)) -} - function mergeProjectOrder(previousOrder: string[], incomingGroups: UiProjectGroup[]): string[] { const nextOrder: string[] = [] diff --git a/tests.md b/tests.md index 7c59f5356..82b70e35c 100644 --- a/tests.md +++ b/tests.md @@ -303,6 +303,34 @@ Rollback/cleanup: --- +### Desktop state storage helper split + +#### Feature/Change Name +Desktop state localStorage and context-key helper extraction. + +#### Prerequisites/Setup +1. Dependencies installed with `pnpm install` +2. Dev server available if doing a manual browser pass (`pnpm run dev --host 127.0.0.1 --port 4173`) +3. Light theme and dark theme both available from the appearance switcher + +#### Steps +1. Run `pnpm run test:unit -- src/composables/useDesktopState.test.ts`. +2. Run `pnpm run build:frontend`. +3. In light theme, open the app and switch between at least two existing threads. +4. Change the selected model or collaboration mode for a thread, refresh the page, and confirm the selection persists. +5. Switch to dark theme and repeat steps 3-4. + +#### Expected Results +- Desktop-state unit tests pass. +- Frontend typecheck/build passes. +- Selected thread, per-thread model, collaboration mode, unread state, project order, token usage, and terminal-open persistence continue to work through the extracted helper module. +- The behavior is theme-independent and works in light theme and dark theme. + +#### Rollback/Cleanup +- None. + +--- + ### Composio logged-out connector preview #### Feature/Change Name From e26e2a425308ec6c9f9aa3604477bbfe60a8815c Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:11:07 +0700 Subject: [PATCH 02/19] Extract thread inline payload helpers --- src/server/codexAppServerBridge.ts | 579 +---------------------------- src/server/threadInlinePayloads.ts | 578 ++++++++++++++++++++++++++++ tests.md | 27 ++ 3 files changed, 622 insertions(+), 562 deletions(-) create mode 100644 src/server/threadInlinePayloads.ts diff --git a/src/server/codexAppServerBridge.ts b/src/server/codexAppServerBridge.ts index 0dde66bba..e6519e788 100644 --- a/src/server/codexAppServerBridge.ts +++ b/src/server/codexAppServerBridge.ts @@ -41,6 +41,8 @@ import { } from '../commandResolution.js' import type { CollaborationModeKind, ReasoningEffort } from '../types/codex.js' import { isAbsoluteLikePath } from '../pathUtils.js' +import { mergeSessionSkillInputsIntoThreadResult, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' +export { mergeSessionSkillInputsIntoTurns, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' type JsonRpcCall = { jsonrpc: '2.0' @@ -234,206 +236,6 @@ const DEFAULT_API_PERF_BODY_MB_THRESHOLD = 1 const MB_DIVISOR = 1024 * 1024 const COMPOSIO_USER_DATA_PATH = join(homedir(), '.composio', 'user_data.json') -type SessionRecoveredFileChange = { - path: string - operation: 'add' | 'delete' | 'update' - movedToPath: string | null - diff: string - addedLineCount: number - removedLineCount: number -} - -type SessionRecoveredTurnFileChanges = { - turnId: string - turnIndex: number - fileChanges: SessionRecoveredFileChange[] -} - -type SessionRecoveredSkillInput = { - name: string - path: string -} - -type SessionSkillInputCacheEntry = { - size: number - mtimeMs: number - skillsByTurnId: Map -} - -const SESSION_SKILL_INPUT_CACHE_LIMIT = 64 -const sessionSkillInputCache = new Map() - -function parseSessionSkillText(value: string): SessionRecoveredSkillInput | null { - const trimmed = value.trim() - if (!trimmed.startsWith('')) return null - const name = trimmed.match(/\s*([\s\S]*?)\s*<\/name>/u)?.[1]?.trim() ?? '' - const path = trimmed.match(/\s*([\s\S]*?)\s*<\/path>/u)?.[1]?.trim() ?? '' - if (!name || !path) return null - return { name, path } -} - -function buildSessionSkillInputsByTurn(sessionLogRaw: string): Map { - let currentTurnId = '' - const skillsByTurnId = new Map() - - for (const line of sessionLogRaw.split('\n')) { - if (!line.trim()) continue - let row: Record | null = null - try { - row = JSON.parse(line) as Record - } catch { - continue - } - - if (row.type === 'turn_context') { - const payloadRecord = asRecord(row.payload) - currentTurnId = readNonEmptyString(payloadRecord?.turn_id) || currentTurnId - continue - } - if (row.type === 'event_msg') { - const payloadRecord = asRecord(row.payload) - if (payloadRecord?.type === 'task_started') { - currentTurnId = readNonEmptyString(payloadRecord.turn_id) || currentTurnId - } - continue - } - - if (row.type !== 'response_item' || !currentTurnId) continue - const payloadRecord = asRecord(row.payload) - if (payloadRecord?.type !== 'message' || payloadRecord.role !== 'user') continue - const content = Array.isArray(payloadRecord.content) ? payloadRecord.content : [] - - for (const contentItem of content) { - const contentRecord = asRecord(contentItem) - if (contentRecord?.type !== 'input_text' || typeof contentRecord.text !== 'string') continue - const skill = parseSessionSkillText(contentRecord.text) - if (!skill) continue - const existing = skillsByTurnId.get(currentTurnId) ?? [] - if (!existing.some((item) => item.path === skill.path)) { - existing.push(skill) - skillsByTurnId.set(currentTurnId, existing) - } - } - } - - return skillsByTurnId -} - -async function readCachedSessionSkillInputsByTurn(sessionPath: string): Promise> { - const sessionStat = await stat(sessionPath) - const cached = sessionSkillInputCache.get(sessionPath) - if (cached && cached.size === sessionStat.size && cached.mtimeMs === sessionStat.mtimeMs) { - return cached.skillsByTurnId - } - - const sessionLogRaw = await readFile(sessionPath, 'utf8') - const skillsByTurnId = buildSessionSkillInputsByTurn(sessionLogRaw) - sessionSkillInputCache.set(sessionPath, { - size: sessionStat.size, - mtimeMs: sessionStat.mtimeMs, - skillsByTurnId, - }) - if (sessionSkillInputCache.size > SESSION_SKILL_INPUT_CACHE_LIMIT) { - const oldestKey = sessionSkillInputCache.keys().next().value - if (oldestKey) sessionSkillInputCache.delete(oldestKey) - } - return skillsByTurnId -} - -function mergeSessionSkillInputsIntoTurnsFromMap( - turns: unknown[], - skillsByTurnId: Map, -): unknown[] { - const turnIds = new Set() - for (const turn of turns) { - const turnRecord = asRecord(turn) - const turnId = readNonEmptyString(turnRecord?.id) - if (turnId) turnIds.add(turnId) - } - if (turnIds.size === 0) return turns - - if (skillsByTurnId.size === 0) return turns - - let changed = false - const nextTurns = turns.map((turn) => { - const turnRecord = asRecord(turn) - const turnId = readNonEmptyString(turnRecord?.id) - const skills = turnId ? skillsByTurnId.get(turnId) : undefined - const items = Array.isArray(turnRecord?.items) ? turnRecord.items : null - if (!turnRecord || !skills || skills.length === 0 || !items) return turn - - let targetUserMessageIndex = -1 - for (let index = items.length - 1; index >= 0; index -= 1) { - const itemRecord = asRecord(items[index]) - if (itemRecord?.type === 'userMessage' && Array.isArray(itemRecord.content)) { - targetUserMessageIndex = index - break - } - } - if (targetUserMessageIndex < 0) return turn - - let addedToMessage = false - const nextItems = items.map((item, index) => { - const itemRecord = asRecord(item) - const content = Array.isArray(itemRecord?.content) ? itemRecord.content : null - if (index !== targetUserMessageIndex || itemRecord?.type !== 'userMessage' || !content) return item - - const existingSkillPaths = new Set( - content.flatMap((contentItem) => { - const contentRecord = asRecord(contentItem) - const path = typeof contentRecord?.path === 'string' ? contentRecord.path.trim() : '' - return contentRecord?.type === 'skill' && path ? [path] : [] - }), - ) - const missingSkills = skills.filter((skill) => !existingSkillPaths.has(skill.path)) - if (missingSkills.length === 0) return item - - addedToMessage = true - changed = true - return { - ...itemRecord, - content: [ - ...content, - ...missingSkills.map((skill) => ({ type: 'skill', name: skill.name, path: skill.path })), - ], - } - }) - - return addedToMessage ? { ...turnRecord, items: nextItems } : turn - }) - - return changed ? nextTurns : turns -} - -export function mergeSessionSkillInputsIntoTurns(turns: unknown[], sessionLogRaw: string): unknown[] { - return mergeSessionSkillInputsIntoTurnsFromMap(turns, buildSessionSkillInputsByTurn(sessionLogRaw)) -} - -async function mergeSessionSkillInputsIntoThreadResult(result: unknown): Promise { - const record = asRecord(result) - const thread = asRecord(record?.thread) - const turns = Array.isArray(thread?.turns) ? thread.turns : null - const sessionPath = readNonEmptyString(thread?.path) - if (!record || !thread || !turns || turns.length === 0 || !sessionPath || !isAbsolute(sessionPath)) { - return result - } - - try { - const skillsByTurnId = await readCachedSessionSkillInputsByTurn(sessionPath) - const mergedTurns = mergeSessionSkillInputsIntoTurnsFromMap(turns, skillsByTurnId) - if (mergedTurns === turns) return result - return { - ...record, - thread: { - ...thread, - turns: mergedTurns, - }, - } - } catch { - return result - } -} - function readEnvValueFromFile(filePath: string, key: string): string | null { try { const content = readFileSync(filePath, 'utf8') @@ -519,368 +321,6 @@ function asRecord(value: unknown): Record | null { : null } -function isInlineDataUrl(value: string): boolean { - return /^data:/iu.test(value.trim()) -} - -function inferImageMimeTypeFromBytes(bytes: Uint8Array): string | null { - if ( - bytes.length >= 8 && - bytes[0] === 0x89 && - bytes[1] === 0x50 && - bytes[2] === 0x4e && - bytes[3] === 0x47 && - bytes[4] === 0x0d && - bytes[5] === 0x0a && - bytes[6] === 0x1a && - bytes[7] === 0x0a - ) { - return 'image/png' - } - if (bytes.length >= 3 && bytes[0] === 0xff && bytes[1] === 0xd8 && bytes[2] === 0xff) { - return 'image/jpeg' - } - if ( - bytes.length >= 12 && - bytes[0] === 0x52 && - bytes[1] === 0x49 && - bytes[2] === 0x46 && - bytes[3] === 0x46 && - bytes[8] === 0x57 && - bytes[9] === 0x45 && - bytes[10] === 0x42 && - bytes[11] === 0x50 - ) { - return 'image/webp' - } - if ( - bytes.length >= 6 && - bytes[0] === 0x47 && - bytes[1] === 0x49 && - bytes[2] === 0x46 && - bytes[3] === 0x38 && - (bytes[4] === 0x37 || bytes[4] === 0x39) && - bytes[5] === 0x61 - ) { - return 'image/gif' - } - return null -} - -function inferImageMimeTypeFromBase64(value: string): string | null { - const compact = value.trim().replace(/\s+/gu, '') - if (compact.length < 32 || !/^[A-Za-z0-9+/]+={0,2}$/u.test(compact)) return null - try { - return inferImageMimeTypeFromBytes(Buffer.from(compact.slice(0, 64), 'base64')) - } catch { - return null - } -} - -function normalizeBase64ImageDataUrl(value: string, mimeType: string): string | null { - const trimmed = value.trim() - if (!trimmed) return null - if (isInlineDataUrl(trimmed)) { - return /^data:image\//iu.test(trimmed) ? trimmed : null - } - const compact = trimmed.replace(/\s+/gu, '') - const inferredMimeType = inferImageMimeTypeFromBase64(compact) - if (!inferredMimeType) return null - const normalizedMimeType = mimeType.trim().toLowerCase() - const finalMimeType = normalizedMimeType.startsWith('image/') && normalizedMimeType !== 'image/*' - ? normalizedMimeType - : inferredMimeType - return `data:${finalMimeType};base64,${compact}` -} - -function extensionFromMimeType(mimeType: string): string { - const normalized = mimeType.trim().toLowerCase() - if (normalized === 'image/png') return '.png' - if (normalized === 'image/jpeg') return '.jpg' - if (normalized === 'image/webp') return '.webp' - if (normalized === 'image/gif') return '.gif' - if (normalized === 'image/svg+xml') return '.svg' - if (normalized === 'application/pdf') return '.pdf' - return '' -} - -function asNonEmptyString(value: unknown): string | null { - if (typeof value !== 'string') return null - const trimmed = value.trim() - return trimmed.length > 0 ? trimmed : null -} - -function toAttachmentLinkTarget(block: Record, fallback: string): string { - const candidate = asNonEmptyString(block.path) - ?? asNonEmptyString(block.file_path) - ?? asNonEmptyString(block.filename) - ?? asNonEmptyString(block.file_id) - ?? fallback - if (candidate.startsWith('file://')) return candidate - if (candidate.startsWith('/')) return `file://${candidate}` - return `attachment://${candidate}` -} - -async function persistInlineDataUrlToLocalFile(dataUrl: string, baseName: string): Promise { - const trimmed = dataUrl.trim() - const match = /^data:([^;,]*)(;base64)?,(.*)$/isu.exec(trimmed) - if (!match) return null - const mimeType = (match[1] ?? '').trim().toLowerCase() - const encodedPayload = match[3] ?? '' - let bytes: Buffer - try { - bytes = match[2] - ? Buffer.from(encodedPayload, 'base64') - : Buffer.from(decodeURIComponent(encodedPayload), 'utf8') - } catch { - return null - } - if (bytes.length === 0) return null - - const hash = createHash('sha1').update(bytes).digest('hex') - const ext = extensionFromMimeType(mimeType) - const mediaDir = join(tmpdir(), 'codex-web-inline-media') - await mkdir(mediaDir, { recursive: true }) - const fileName = `${baseName}-${hash}${ext}` - const filePath = join(mediaDir, fileName) - try { - await stat(filePath) - } catch { - await writeFile(filePath, bytes) - } - return filePath -} - -function toLocalImageProxyUrl(path: string): string { - return `/codex-local-image?path=${encodeURIComponent(path)}` -} - -const INLINE_IMAGE_FIELD_NAMES = new Set([ - 'b64_json', - 'image', - 'image_url', - 'images', - 'result', - 'url', -]) - -type InlinePayloadSanitizeContext = { - turnId: string - itemId: string - blockIndex: number - fieldName?: string -} - -function isPotentialInlineImageField(fieldName: string | undefined): boolean { - return typeof fieldName === 'string' && INLINE_IMAGE_FIELD_NAMES.has(fieldName) -} - -async function sanitizeInlineImageString( - value: string, - context: InlinePayloadSanitizeContext, -): Promise<{ value: string; changed: boolean }> { - if (!isPotentialInlineImageField(context.fieldName)) { - return { value, changed: false } - } - - const dataUrl = normalizeBase64ImageDataUrl(value, 'image/*') - if (!dataUrl) return { value, changed: false } - - const localUrl = await persistInlineDataUrlToLocalFile( - dataUrl, - `inline-image-${context.turnId}-${context.itemId}-${context.fieldName}-${String(context.blockIndex)}`, - ) - if (!localUrl) return { value, changed: false } - - return { value: toLocalImageProxyUrl(localUrl), changed: true } -} - -async function sanitizeInlineUserContentBlock( - block: unknown, - context: InlinePayloadSanitizeContext, -): Promise { - const record = asRecord(block) - if (!record) return block - - const type = asNonEmptyString(record.type) ?? '' - const imageUrl = asNonEmptyString(record.url) ?? asNonEmptyString(record.image_url) - if (imageUrl && isInlineDataUrl(imageUrl)) { - const localUrl = await persistInlineDataUrlToLocalFile(imageUrl, `inline-image-${context.turnId}-${context.itemId}-${String(context.blockIndex)}`) - if (localUrl) { - const nextRecord = { ...record } - if (typeof record.url === 'string') { - nextRecord.url = toLocalImageProxyUrl(localUrl) - } - if (typeof record.image_url === 'string') { - nextRecord.image_url = toLocalImageProxyUrl(localUrl) - } - return { - ...nextRecord, - type: 'image', - } - } - const target = toAttachmentLinkTarget(record, `inline-image/${context.turnId}/${context.itemId}/${String(context.blockIndex)}`) - return { - type: 'text', - text: `Image attachment: ${target}`, - } - } - - if (type === 'imageGeneration' || type === 'image_generation') { - const rawResult = asNonEmptyString(record.result) - ?? asNonEmptyString(record.b64_json) - ?? asNonEmptyString(record.image) - const mimeType = asNonEmptyString(record.mime_type) - ?? asNonEmptyString(record.mimeType) - ?? 'image/png' - const dataUrl = rawResult ? normalizeBase64ImageDataUrl(rawResult, mimeType) : null - if (dataUrl) { - const localUrl = await persistInlineDataUrlToLocalFile(dataUrl, `generated-image-${context.turnId}-${context.itemId}`) - if (localUrl) { - return { - ...record, - type: 'imageView', - path: localUrl, - } - } - } - } - - const inlineFileData = asNonEmptyString(record.file_data) - ?? asNonEmptyString(record.data) - ?? asNonEmptyString(record.base64) - if ((type.includes('file') || type === 'input_file' || type === 'file') && inlineFileData) { - const mimeType = asNonEmptyString(record.mime_type) ?? 'application/octet-stream' - const fileDataUrl = `data:${mimeType};base64,${inlineFileData}` - const localUrl = await persistInlineDataUrlToLocalFile(fileDataUrl, `inline-file-${context.turnId}-${context.itemId}-${String(context.blockIndex)}`) - if (localUrl) { - return { - type: 'text', - text: `File attachment: ${localUrl}`, - } - } - const target = toAttachmentLinkTarget(record, `inline-file/${context.turnId}/${context.itemId}/${String(context.blockIndex)}`) - return { - type: 'text', - text: `File attachment: ${target}`, - } - } - - return block -} - -async function sanitizeInlinePayloadDeep( - value: unknown, - context: InlinePayloadSanitizeContext, -): Promise<{ value: unknown; changed: boolean }> { - const maybeBlock = await sanitizeInlineUserContentBlock(value, context) - if (maybeBlock !== value) { - return { value: maybeBlock, changed: true } - } - - if (typeof value === 'string') { - return sanitizeInlineImageString(value, context) - } - - if (Array.isArray(value)) { - let changed = false - const nextArray: unknown[] = [] - for (let index = 0; index < value.length; index += 1) { - const nested = await sanitizeInlinePayloadDeep(value[index], { - turnId: context.turnId, - itemId: context.itemId, - blockIndex: index, - fieldName: context.fieldName, - }) - if (nested.changed) changed = true - nextArray.push(nested.value) - } - return changed ? { value: nextArray, changed: true } : { value, changed: false } - } - - const record = asRecord(value) - if (!record) return { value, changed: false } - - let changed = false - const nextRecord: Record = {} - for (const [key, nestedValue] of Object.entries(record)) { - const nested = await sanitizeInlinePayloadDeep(nestedValue, { - turnId: context.turnId, - itemId: context.itemId, - blockIndex: context.blockIndex, - fieldName: key, - }) - if (nested.changed) changed = true - nextRecord[key] = nested.value - } - - return changed ? { value: nextRecord, changed: true } : { value, changed: false } -} - -export async function sanitizeThreadTurnsInlinePayloads(method: string, result: unknown): Promise { - if (!THREAD_METHODS_WITH_TURNS.has(method)) return result - - const record = asRecord(result) - const thread = asRecord(record?.thread) - const turns = Array.isArray(thread?.turns) ? thread.turns : null - if (!record || !thread || !turns || turns.length === 0) return result - - let changed = false - const nextTurns: unknown[] = [] - for (let turnIndex = 0; turnIndex < turns.length; turnIndex += 1) { - const turn = turns[turnIndex] - const turnRecord = asRecord(turn) - const turnId = asNonEmptyString(turnRecord?.id) ?? 'turn' - const items = Array.isArray(turnRecord?.items) ? turnRecord.items : null - if (!turnRecord || !items) { - nextTurns.push(turn) - continue - } - - let itemChanged = false - const nextItems: unknown[] = [] - for (let itemIndex = 0; itemIndex < items.length; itemIndex += 1) { - const item = items[itemIndex] - const itemRecord = asRecord(item) - const itemId = asNonEmptyString(itemRecord?.id) ?? 'item' - if (!itemRecord) { - nextItems.push(item) - continue - } - const sanitizedItem = await sanitizeInlinePayloadDeep(item, { - turnId, - itemId, - blockIndex: itemIndex + turnIndex, - }) - if (!sanitizedItem.changed) { - nextItems.push(item) - continue - } - itemChanged = true - nextItems.push(sanitizedItem.value) - } - - if (!itemChanged) { - nextTurns.push(turn) - continue - } - changed = true - nextTurns.push({ - ...turnRecord, - items: nextItems, - }) - } - - if (!changed) return result - return { - ...record, - thread: { - ...thread, - turns: nextTurns, - }, - } -} - function trimThreadTurnsInRpcResult(method: string, result: unknown): unknown { if (!THREAD_METHODS_WITH_TURNS.has(method)) return result @@ -1859,6 +1299,21 @@ async function installComposioCli(): Promise { } } +type SessionRecoveredFileChange = { + path: string + operation: 'add' | 'delete' | 'update' + movedToPath: string | null + diff: string + addedLineCount: number + removedLineCount: number +} + +type SessionRecoveredTurnFileChanges = { + turnId: string + turnIndex: number + fileChanges: SessionRecoveredFileChange[] +} + function countRecoveredContentLines(value: string): number { if (!value) return 0 const normalized = value.replace(/\r\n/g, '\n') diff --git a/src/server/threadInlinePayloads.ts b/src/server/threadInlinePayloads.ts new file mode 100644 index 000000000..adc168918 --- /dev/null +++ b/src/server/threadInlinePayloads.ts @@ -0,0 +1,578 @@ +import { createHash } from 'node:crypto' +import { mkdir, readFile, stat, writeFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { isAbsolute, join } from 'node:path' + +const THREAD_METHODS_WITH_TURNS = new Set(['thread/read', 'thread/resume', 'thread/fork', 'thread/rollback']) + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +function readNonEmptyString(value: unknown): string { + return typeof value === 'string' && value.trim().length > 0 ? value : '' +} + +type SessionRecoveredFileChange = { + path: string + operation: 'add' | 'delete' | 'update' + movedToPath: string | null + diff: string + addedLineCount: number + removedLineCount: number +} + +type SessionRecoveredTurnFileChanges = { + turnId: string + turnIndex: number + fileChanges: SessionRecoveredFileChange[] +} + +type SessionRecoveredSkillInput = { + name: string + path: string +} + +type SessionSkillInputCacheEntry = { + size: number + mtimeMs: number + skillsByTurnId: Map +} + +const SESSION_SKILL_INPUT_CACHE_LIMIT = 64 +const sessionSkillInputCache = new Map() + +function parseSessionSkillText(value: string): SessionRecoveredSkillInput | null { + const trimmed = value.trim() + if (!trimmed.startsWith('')) return null + const name = trimmed.match(/\s*([\s\S]*?)\s*<\/name>/u)?.[1]?.trim() ?? '' + const path = trimmed.match(/\s*([\s\S]*?)\s*<\/path>/u)?.[1]?.trim() ?? '' + if (!name || !path) return null + return { name, path } +} + +function buildSessionSkillInputsByTurn(sessionLogRaw: string): Map { + let currentTurnId = '' + const skillsByTurnId = new Map() + + for (const line of sessionLogRaw.split('\n')) { + if (!line.trim()) continue + let row: Record | null = null + try { + row = JSON.parse(line) as Record + } catch { + continue + } + + if (row.type === 'turn_context') { + const payloadRecord = asRecord(row.payload) + currentTurnId = readNonEmptyString(payloadRecord?.turn_id) || currentTurnId + continue + } + if (row.type === 'event_msg') { + const payloadRecord = asRecord(row.payload) + if (payloadRecord?.type === 'task_started') { + currentTurnId = readNonEmptyString(payloadRecord.turn_id) || currentTurnId + } + continue + } + + if (row.type !== 'response_item' || !currentTurnId) continue + const payloadRecord = asRecord(row.payload) + if (payloadRecord?.type !== 'message' || payloadRecord.role !== 'user') continue + const content = Array.isArray(payloadRecord.content) ? payloadRecord.content : [] + + for (const contentItem of content) { + const contentRecord = asRecord(contentItem) + if (contentRecord?.type !== 'input_text' || typeof contentRecord.text !== 'string') continue + const skill = parseSessionSkillText(contentRecord.text) + if (!skill) continue + const existing = skillsByTurnId.get(currentTurnId) ?? [] + if (!existing.some((item) => item.path === skill.path)) { + existing.push(skill) + skillsByTurnId.set(currentTurnId, existing) + } + } + } + + return skillsByTurnId +} + +async function readCachedSessionSkillInputsByTurn(sessionPath: string): Promise> { + const sessionStat = await stat(sessionPath) + const cached = sessionSkillInputCache.get(sessionPath) + if (cached && cached.size === sessionStat.size && cached.mtimeMs === sessionStat.mtimeMs) { + return cached.skillsByTurnId + } + + const sessionLogRaw = await readFile(sessionPath, 'utf8') + const skillsByTurnId = buildSessionSkillInputsByTurn(sessionLogRaw) + sessionSkillInputCache.set(sessionPath, { + size: sessionStat.size, + mtimeMs: sessionStat.mtimeMs, + skillsByTurnId, + }) + if (sessionSkillInputCache.size > SESSION_SKILL_INPUT_CACHE_LIMIT) { + const oldestKey = sessionSkillInputCache.keys().next().value + if (oldestKey) sessionSkillInputCache.delete(oldestKey) + } + return skillsByTurnId +} + +function mergeSessionSkillInputsIntoTurnsFromMap( + turns: unknown[], + skillsByTurnId: Map, +): unknown[] { + const turnIds = new Set() + for (const turn of turns) { + const turnRecord = asRecord(turn) + const turnId = readNonEmptyString(turnRecord?.id) + if (turnId) turnIds.add(turnId) + } + if (turnIds.size === 0) return turns + + if (skillsByTurnId.size === 0) return turns + + let changed = false + const nextTurns = turns.map((turn) => { + const turnRecord = asRecord(turn) + const turnId = readNonEmptyString(turnRecord?.id) + const skills = turnId ? skillsByTurnId.get(turnId) : undefined + const items = Array.isArray(turnRecord?.items) ? turnRecord.items : null + if (!turnRecord || !skills || skills.length === 0 || !items) return turn + + let targetUserMessageIndex = -1 + for (let index = items.length - 1; index >= 0; index -= 1) { + const itemRecord = asRecord(items[index]) + if (itemRecord?.type === 'userMessage' && Array.isArray(itemRecord.content)) { + targetUserMessageIndex = index + break + } + } + if (targetUserMessageIndex < 0) return turn + + let addedToMessage = false + const nextItems = items.map((item, index) => { + const itemRecord = asRecord(item) + const content = Array.isArray(itemRecord?.content) ? itemRecord.content : null + if (index !== targetUserMessageIndex || itemRecord?.type !== 'userMessage' || !content) return item + + const existingSkillPaths = new Set( + content.flatMap((contentItem) => { + const contentRecord = asRecord(contentItem) + const path = typeof contentRecord?.path === 'string' ? contentRecord.path.trim() : '' + return contentRecord?.type === 'skill' && path ? [path] : [] + }), + ) + const missingSkills = skills.filter((skill) => !existingSkillPaths.has(skill.path)) + if (missingSkills.length === 0) return item + + addedToMessage = true + changed = true + return { + ...itemRecord, + content: [ + ...content, + ...missingSkills.map((skill) => ({ type: 'skill', name: skill.name, path: skill.path })), + ], + } + }) + + return addedToMessage ? { ...turnRecord, items: nextItems } : turn + }) + + return changed ? nextTurns : turns +} + +export function mergeSessionSkillInputsIntoTurns(turns: unknown[], sessionLogRaw: string): unknown[] { + return mergeSessionSkillInputsIntoTurnsFromMap(turns, buildSessionSkillInputsByTurn(sessionLogRaw)) +} + +export async function mergeSessionSkillInputsIntoThreadResult(result: unknown): Promise { + const record = asRecord(result) + const thread = asRecord(record?.thread) + const turns = Array.isArray(thread?.turns) ? thread.turns : null + const sessionPath = readNonEmptyString(thread?.path) + if (!record || !thread || !turns || turns.length === 0 || !sessionPath || !isAbsolute(sessionPath)) { + return result + } + + try { + const skillsByTurnId = await readCachedSessionSkillInputsByTurn(sessionPath) + const mergedTurns = mergeSessionSkillInputsIntoTurnsFromMap(turns, skillsByTurnId) + if (mergedTurns === turns) return result + return { + ...record, + thread: { + ...thread, + turns: mergedTurns, + }, + } + } catch { + return result + } +} + +function isInlineDataUrl(value: string): boolean { + return /^data:/iu.test(value.trim()) +} + +function inferImageMimeTypeFromBytes(bytes: Uint8Array): string | null { + if ( + bytes.length >= 8 && + bytes[0] === 0x89 && + bytes[1] === 0x50 && + bytes[2] === 0x4e && + bytes[3] === 0x47 && + bytes[4] === 0x0d && + bytes[5] === 0x0a && + bytes[6] === 0x1a && + bytes[7] === 0x0a + ) { + return 'image/png' + } + if (bytes.length >= 3 && bytes[0] === 0xff && bytes[1] === 0xd8 && bytes[2] === 0xff) { + return 'image/jpeg' + } + if ( + bytes.length >= 12 && + bytes[0] === 0x52 && + bytes[1] === 0x49 && + bytes[2] === 0x46 && + bytes[3] === 0x46 && + bytes[8] === 0x57 && + bytes[9] === 0x45 && + bytes[10] === 0x42 && + bytes[11] === 0x50 + ) { + return 'image/webp' + } + if ( + bytes.length >= 6 && + bytes[0] === 0x47 && + bytes[1] === 0x49 && + bytes[2] === 0x46 && + bytes[3] === 0x38 && + (bytes[4] === 0x37 || bytes[4] === 0x39) && + bytes[5] === 0x61 + ) { + return 'image/gif' + } + return null +} + +function inferImageMimeTypeFromBase64(value: string): string | null { + const compact = value.trim().replace(/\s+/gu, '') + if (compact.length < 32 || !/^[A-Za-z0-9+/]+={0,2}$/u.test(compact)) return null + try { + return inferImageMimeTypeFromBytes(Buffer.from(compact.slice(0, 64), 'base64')) + } catch { + return null + } +} + +function normalizeBase64ImageDataUrl(value: string, mimeType: string): string | null { + const trimmed = value.trim() + if (!trimmed) return null + if (isInlineDataUrl(trimmed)) { + return /^data:image\//iu.test(trimmed) ? trimmed : null + } + const compact = trimmed.replace(/\s+/gu, '') + const inferredMimeType = inferImageMimeTypeFromBase64(compact) + if (!inferredMimeType) return null + const normalizedMimeType = mimeType.trim().toLowerCase() + const finalMimeType = normalizedMimeType.startsWith('image/') && normalizedMimeType !== 'image/*' + ? normalizedMimeType + : inferredMimeType + return `data:${finalMimeType};base64,${compact}` +} + +function extensionFromMimeType(mimeType: string): string { + const normalized = mimeType.trim().toLowerCase() + if (normalized === 'image/png') return '.png' + if (normalized === 'image/jpeg') return '.jpg' + if (normalized === 'image/webp') return '.webp' + if (normalized === 'image/gif') return '.gif' + if (normalized === 'image/svg+xml') return '.svg' + if (normalized === 'application/pdf') return '.pdf' + return '' +} + +function asNonEmptyString(value: unknown): string | null { + if (typeof value !== 'string') return null + const trimmed = value.trim() + return trimmed.length > 0 ? trimmed : null +} + +function toAttachmentLinkTarget(block: Record, fallback: string): string { + const candidate = asNonEmptyString(block.path) + ?? asNonEmptyString(block.file_path) + ?? asNonEmptyString(block.filename) + ?? asNonEmptyString(block.file_id) + ?? fallback + if (candidate.startsWith('file://')) return candidate + if (candidate.startsWith('/')) return `file://${candidate}` + return `attachment://${candidate}` +} + +async function persistInlineDataUrlToLocalFile(dataUrl: string, baseName: string): Promise { + const trimmed = dataUrl.trim() + const match = /^data:([^;,]*)(;base64)?,(.*)$/isu.exec(trimmed) + if (!match) return null + const mimeType = (match[1] ?? '').trim().toLowerCase() + const encodedPayload = match[3] ?? '' + let bytes: Buffer + try { + bytes = match[2] + ? Buffer.from(encodedPayload, 'base64') + : Buffer.from(decodeURIComponent(encodedPayload), 'utf8') + } catch { + return null + } + if (bytes.length === 0) return null + + const hash = createHash('sha1').update(bytes).digest('hex') + const ext = extensionFromMimeType(mimeType) + const mediaDir = join(tmpdir(), 'codex-web-inline-media') + await mkdir(mediaDir, { recursive: true }) + const fileName = `${baseName}-${hash}${ext}` + const filePath = join(mediaDir, fileName) + try { + await stat(filePath) + } catch { + await writeFile(filePath, bytes) + } + return filePath +} + +function toLocalImageProxyUrl(path: string): string { + return `/codex-local-image?path=${encodeURIComponent(path)}` +} + +const INLINE_IMAGE_FIELD_NAMES = new Set([ + 'b64_json', + 'image', + 'image_url', + 'images', + 'result', + 'url', +]) + +type InlinePayloadSanitizeContext = { + turnId: string + itemId: string + blockIndex: number + fieldName?: string +} + +function isPotentialInlineImageField(fieldName: string | undefined): boolean { + return typeof fieldName === 'string' && INLINE_IMAGE_FIELD_NAMES.has(fieldName) +} + +async function sanitizeInlineImageString( + value: string, + context: InlinePayloadSanitizeContext, +): Promise<{ value: string; changed: boolean }> { + if (!isPotentialInlineImageField(context.fieldName)) { + return { value, changed: false } + } + + const dataUrl = normalizeBase64ImageDataUrl(value, 'image/*') + if (!dataUrl) return { value, changed: false } + + const localUrl = await persistInlineDataUrlToLocalFile( + dataUrl, + `inline-image-${context.turnId}-${context.itemId}-${context.fieldName}-${String(context.blockIndex)}`, + ) + if (!localUrl) return { value, changed: false } + + return { value: toLocalImageProxyUrl(localUrl), changed: true } +} + +async function sanitizeInlineUserContentBlock( + block: unknown, + context: InlinePayloadSanitizeContext, +): Promise { + const record = asRecord(block) + if (!record) return block + + const type = asNonEmptyString(record.type) ?? '' + const imageUrl = asNonEmptyString(record.url) ?? asNonEmptyString(record.image_url) + if (imageUrl && isInlineDataUrl(imageUrl)) { + const localUrl = await persistInlineDataUrlToLocalFile(imageUrl, `inline-image-${context.turnId}-${context.itemId}-${String(context.blockIndex)}`) + if (localUrl) { + const nextRecord = { ...record } + if (typeof record.url === 'string') { + nextRecord.url = toLocalImageProxyUrl(localUrl) + } + if (typeof record.image_url === 'string') { + nextRecord.image_url = toLocalImageProxyUrl(localUrl) + } + return { + ...nextRecord, + type: 'image', + } + } + const target = toAttachmentLinkTarget(record, `inline-image/${context.turnId}/${context.itemId}/${String(context.blockIndex)}`) + return { + type: 'text', + text: `Image attachment: ${target}`, + } + } + + if (type === 'imageGeneration' || type === 'image_generation') { + const rawResult = asNonEmptyString(record.result) + ?? asNonEmptyString(record.b64_json) + ?? asNonEmptyString(record.image) + const mimeType = asNonEmptyString(record.mime_type) + ?? asNonEmptyString(record.mimeType) + ?? 'image/png' + const dataUrl = rawResult ? normalizeBase64ImageDataUrl(rawResult, mimeType) : null + if (dataUrl) { + const localUrl = await persistInlineDataUrlToLocalFile(dataUrl, `generated-image-${context.turnId}-${context.itemId}`) + if (localUrl) { + return { + ...record, + type: 'imageView', + path: localUrl, + } + } + } + } + + const inlineFileData = asNonEmptyString(record.file_data) + ?? asNonEmptyString(record.data) + ?? asNonEmptyString(record.base64) + if ((type.includes('file') || type === 'input_file' || type === 'file') && inlineFileData) { + const mimeType = asNonEmptyString(record.mime_type) ?? 'application/octet-stream' + const fileDataUrl = `data:${mimeType};base64,${inlineFileData}` + const localUrl = await persistInlineDataUrlToLocalFile(fileDataUrl, `inline-file-${context.turnId}-${context.itemId}-${String(context.blockIndex)}`) + if (localUrl) { + return { + type: 'text', + text: `File attachment: ${localUrl}`, + } + } + const target = toAttachmentLinkTarget(record, `inline-file/${context.turnId}/${context.itemId}/${String(context.blockIndex)}`) + return { + type: 'text', + text: `File attachment: ${target}`, + } + } + + return block +} + +async function sanitizeInlinePayloadDeep( + value: unknown, + context: InlinePayloadSanitizeContext, +): Promise<{ value: unknown; changed: boolean }> { + const maybeBlock = await sanitizeInlineUserContentBlock(value, context) + if (maybeBlock !== value) { + return { value: maybeBlock, changed: true } + } + + if (typeof value === 'string') { + return sanitizeInlineImageString(value, context) + } + + if (Array.isArray(value)) { + let changed = false + const nextArray: unknown[] = [] + for (let index = 0; index < value.length; index += 1) { + const nested = await sanitizeInlinePayloadDeep(value[index], { + turnId: context.turnId, + itemId: context.itemId, + blockIndex: index, + fieldName: context.fieldName, + }) + if (nested.changed) changed = true + nextArray.push(nested.value) + } + return changed ? { value: nextArray, changed: true } : { value, changed: false } + } + + const record = asRecord(value) + if (!record) return { value, changed: false } + + let changed = false + const nextRecord: Record = {} + for (const [key, nestedValue] of Object.entries(record)) { + const nested = await sanitizeInlinePayloadDeep(nestedValue, { + turnId: context.turnId, + itemId: context.itemId, + blockIndex: context.blockIndex, + fieldName: key, + }) + if (nested.changed) changed = true + nextRecord[key] = nested.value + } + + return changed ? { value: nextRecord, changed: true } : { value, changed: false } +} + +export async function sanitizeThreadTurnsInlinePayloads(method: string, result: unknown): Promise { + if (!THREAD_METHODS_WITH_TURNS.has(method)) return result + + const record = asRecord(result) + const thread = asRecord(record?.thread) + const turns = Array.isArray(thread?.turns) ? thread.turns : null + if (!record || !thread || !turns || turns.length === 0) return result + + let changed = false + const nextTurns: unknown[] = [] + for (let turnIndex = 0; turnIndex < turns.length; turnIndex += 1) { + const turn = turns[turnIndex] + const turnRecord = asRecord(turn) + const turnId = asNonEmptyString(turnRecord?.id) ?? 'turn' + const items = Array.isArray(turnRecord?.items) ? turnRecord.items : null + if (!turnRecord || !items) { + nextTurns.push(turn) + continue + } + + let itemChanged = false + const nextItems: unknown[] = [] + for (let itemIndex = 0; itemIndex < items.length; itemIndex += 1) { + const item = items[itemIndex] + const itemRecord = asRecord(item) + const itemId = asNonEmptyString(itemRecord?.id) ?? 'item' + if (!itemRecord) { + nextItems.push(item) + continue + } + const sanitizedItem = await sanitizeInlinePayloadDeep(item, { + turnId, + itemId, + blockIndex: itemIndex + turnIndex, + }) + if (!sanitizedItem.changed) { + nextItems.push(item) + continue + } + itemChanged = true + nextItems.push(sanitizedItem.value) + } + + if (!itemChanged) { + nextTurns.push(turn) + continue + } + changed = true + nextTurns.push({ + ...turnRecord, + items: nextItems, + }) + } + + if (!changed) return result + return { + ...record, + thread: { + ...thread, + turns: nextTurns, + }, + } +} diff --git a/tests.md b/tests.md index 82b70e35c..f3a007f01 100644 --- a/tests.md +++ b/tests.md @@ -303,6 +303,33 @@ Rollback/cleanup: --- +### Server inline payload helper split + +#### Feature/Change Name +Codex bridge inline payload and session-skill recovery extraction. + +#### Prerequisites/Setup +1. Dependencies installed with `pnpm install` +2. Existing Codex session fixtures covered by the server bridge unit tests + +#### Steps +1. Run `pnpm run test:unit -- src/server/codexAppServerBridge.inlinePayload.test.ts src/server/codexAppServerBridge.archive.test.ts`. +2. Run `pnpm run build:cli`. +3. Manually open a thread that contains generated/inline image or file payload content. +4. Confirm inline media still renders through local proxy URLs instead of huge inline payloads. +5. Open a thread whose user message used skills and confirm skill chips remain associated with the correct turn. + +#### Expected Results +- Inline payload sanitization tests pass. +- Archive recovery tests continue to pass through the bridge re-export. +- CLI build succeeds with the extracted server module. +- Session skill inputs and local inline-media proxying behave the same after extraction. + +#### Rollback/Cleanup +- Remove any temporary files created under the system temp `codex-web-inline-media` directory if manual media testing creates unwanted artifacts. + +--- + ### Desktop state storage helper split #### Feature/Change Name From 251037dc45af493dcc7f41d1536ca56bb1cb9d22 Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:14:32 +0700 Subject: [PATCH 03/19] Extract thread conversation file change helpers --- src/components/content/ThreadConversation.vue | 265 +----------------- .../content/threadConversationFileChanges.ts | 252 +++++++++++++++++ tests.md | 28 ++ 3 files changed, 294 insertions(+), 251 deletions(-) create mode 100644 src/components/content/threadConversationFileChanges.ts diff --git a/src/components/content/ThreadConversation.vue b/src/components/content/ThreadConversation.vue index ca265d246..322122047 100644 --- a/src/components/content/ThreadConversation.vue +++ b/src/components/content/ThreadConversation.vue @@ -871,6 +871,20 @@ import { computed, nextTick, onBeforeUnmount, onMounted, ref, watch } from 'vue' import type { UiFileChange, UiLiveOverlay, UiMessage, UiPlanStep, UiServerRequest } from '../../types/codex' import { useMobile } from '../../composables/useMobile' +import { + aggregateFileChanges, + buildDiffViewerLines, + fileChangeDeltaParts, + fileChangeKey, + fileChangeOperationLabel, + fileChangeOperationTone, + fileChangeSummaryLabel, + fileChangeSummaryStatusParts, + formatFileChangeDelta, + formatFileChangeCountLabel, + type DiffViewerLine, + type TurnFileChangeSummary, +} from './threadConversationFileChanges' import IconTablerArrowUp from '../icons/IconTablerArrowUp.vue' import IconTablerCopy from '../icons/IconTablerCopy.vue' @@ -1159,10 +1173,6 @@ function isFileChangeSummaryExpanded(message: UiMessage): boolean { return expandedFileChangeSummaryIds.value.has(message.id) } -function fileChangeKey(change: UiFileChange): string { - return `${change.path}\u0000${change.movedToPath ?? ''}` -} - function openDiffViewer(summary: TurnFileChangeSummary | null, change: UiFileChange): void { if (!summary) return activeDiffViewerSummary.value = summary @@ -1401,20 +1411,6 @@ type McpElicitationField = { options: McpElicitationFieldOption[] defaultValue: string | number | boolean | string[] } -type TurnFileChangeSummary = { - changes: UiFileChange[] - sourceMessageIds: string[] - source: 'assistant' | 'metadata' -} -type DiffViewerLineKind = 'meta' | 'hunk' | 'add' | 'remove' | 'context' -type DiffViewerLine = { - key: string - kind: DiffViewerLineKind - oldLine: number | null - newLine: number | null - text: string -} - function isFilePath(value: string): boolean { if (!value || /[\r\n]/u.test(value)) return false if (value.endsWith('/') || value.endsWith('\\')) return false @@ -1737,47 +1733,6 @@ function showForkResponseButton(message: UiMessage): boolean { return typeof forkableTurnIndexByAnchorId.value[message.id] === 'number' } -function mergeFileChangeDiff(first: string, second: string): string { - if (!first) return second - if (!second || first === second) return first - return `${first}\n${second}`.trim() -} - -function mergeFileChangeEntry(first: UiFileChange, second: UiFileChange): UiFileChange { - const operation = first.operation === 'add' || second.operation === 'add' - ? 'add' - : first.operation === 'delete' || second.operation === 'delete' - ? 'delete' - : 'update' - return { - path: second.path || first.path, - operation, - movedToPath: second.movedToPath ?? first.movedToPath ?? null, - diff: mergeFileChangeDiff(first.diff, second.diff), - addedLineCount: first.addedLineCount + second.addedLineCount, - removedLineCount: first.removedLineCount + second.removedLineCount, - } -} - -function compareFileChanges(first: UiFileChange, second: UiFileChange): number { - const firstRank = first.operation === 'add' ? 0 : first.operation === 'update' ? 1 : 2 - const secondRank = second.operation === 'add' ? 0 : second.operation === 'update' ? 1 : 2 - if (firstRank !== secondRank) return firstRank - secondRank - const firstPath = `${first.path}\u0000${first.movedToPath ?? ''}` - const secondPath = `${second.path}\u0000${second.movedToPath ?? ''}` - return firstPath.localeCompare(secondPath) -} - -function aggregateFileChanges(changes: UiFileChange[]): UiFileChange[] { - const byPath = new Map() - for (const change of changes) { - const key = `${change.path}\u0000${change.movedToPath ?? ''}` - const previous = byPath.get(key) - byPath.set(key, previous ? mergeFileChangeEntry(previous, change) : { ...change }) - } - return Array.from(byPath.values()).sort(compareFileChanges) -} - const anchoredFileChangeSummaryByAnchorId = computed>(() => { const assistantAnchorIdByTurnKey = new Map() const assistantSummaryByAnchorId = new Map() @@ -1878,96 +1833,6 @@ function readStandaloneFileChangeSummary(message: UiMessage): TurnFileChangeSumm return standaloneFileChangeSummaryByMessageId.value[message.id] ?? null } -function fileChangeOperationLabel(change: UiFileChange): string { - if (change.operation === 'update' && change.movedToPath) { - return change.addedLineCount > 0 || change.removedLineCount > 0 ? 'Moved + edited' : 'Moved' - } - if (change.operation === 'add') return 'Added' - if (change.operation === 'delete') return 'Deleted' - return 'Edited' -} - -function fileChangeOperationTone(change: UiFileChange): 'add' | 'delete' | 'update' | 'move' { - if (change.operation === 'update' && change.movedToPath) return 'move' - return change.operation -} - -function formatFileChangeDelta(change: UiFileChange): string { - const parts: string[] = [] - if (change.addedLineCount > 0) parts.push(`+${change.addedLineCount}`) - if (change.removedLineCount > 0) parts.push(`-${change.removedLineCount}`) - return parts.join(' ') -} - -type FileChangeDeltaTone = 'add' | 'remove' | 'neutral' - -type FileChangeDeltaPart = { - tone: FileChangeDeltaTone - label: string -} - -function buildFileChangeDeltaParts(addedCount: number, removedCount: number, fallbackLabel = ''): FileChangeDeltaPart[] { - const parts: FileChangeDeltaPart[] = [] - if (addedCount > 0) parts.push({ tone: 'add', label: `+${addedCount}` }) - if (removedCount > 0) parts.push({ tone: 'remove', label: `-${removedCount}` }) - if (parts.length > 0) return parts - return fallbackLabel ? [{ tone: 'neutral', label: fallbackLabel }] : [] -} - -function fileChangeDeltaParts(change: UiFileChange): FileChangeDeltaPart[] { - return buildFileChangeDeltaParts(change.addedLineCount, change.removedLineCount) -} - -function formatFileChangeCountLabel(count: number): string { - return count === 1 ? '1 file changed' : `${count} files changed` -} - -function summarizeFileChangeKinds(summary: TurnFileChangeSummary | null): string { - if (!summary || summary.changes.length === 0) return '' - let added = 0 - let deleted = 0 - let edited = 0 - let moved = 0 - - for (const change of summary.changes) { - if (change.operation === 'add') { - added += 1 - continue - } - if (change.operation === 'delete') { - deleted += 1 - continue - } - if (change.movedToPath) { - moved += 1 - continue - } - edited += 1 - } - - const parts: string[] = [] - if (edited > 0) parts.push(`${edited} edited`) - if (added > 0) parts.push(`${added} added`) - if (deleted > 0) parts.push(`${deleted} deleted`) - if (moved > 0) parts.push(`${moved} moved`) - return parts.join(', ') -} - -function fileChangeSummaryLabel(summary: TurnFileChangeSummary | null): string { - if (!summary || summary.changes.length === 0) return 'Modified files' - const countLabel = formatFileChangeCountLabel(summary.changes.length) - const kindSummary = summarizeFileChangeKinds(summary) - return kindSummary ? `${countLabel} · ${kindSummary}` : countLabel -} - -function fileChangeSummaryStatusParts(summary: TurnFileChangeSummary | null): FileChangeDeltaPart[] { - if (!summary || summary.changes.length === 0) return [] - const totalAdded = summary.changes.reduce((sum, change) => sum + change.addedLineCount, 0) - const totalRemoved = summary.changes.reduce((sum, change) => sum + change.removedLineCount, 0) - const fallbackLabel = summary.changes.some((change) => change.movedToPath) ? 'Moved' : 'Ready' - return buildFileChangeDeltaParts(totalAdded, totalRemoved, fallbackLabel) -} - function displayFileChangePath(pathValue: string): string { const resolved = resolveRelativePath(pathValue, props.cwd) const normalizedCwd = normalizePathDots(normalizePathSeparators(props.cwd.trim())) @@ -2003,108 +1868,6 @@ function inferDiffViewerLanguage(change: UiFileChange): string { return CODE_LANGUAGE_ALIASES[extension] ?? extension ?? '' } -function hasStructuredUnifiedDiff(change: UiFileChange): boolean { - return change.operation === 'update' && /^diff --git |^@@ |^--- |^\+\+\+ |^[ +-]|^\*\*\* (Move to:|End of File)/mu.test(change.diff) -} - -function buildSyntheticDiffLines(change: UiFileChange): DiffViewerLine[] { - const normalized = change.diff.replace(/\r\n/g, '\n') - const lines = normalized.length > 0 ? normalized.split('\n') : [] - if (lines.length > 0 && lines[lines.length - 1] === '') { - lines.pop() - } - return lines.map((line, index) => ({ - key: `${fileChangeKey(change)}:synthetic:${index}`, - kind: change.operation === 'delete' ? 'remove' : 'add', - oldLine: change.operation === 'delete' ? index + 1 : null, - newLine: change.operation === 'delete' ? null : index + 1, - text: line, - })) -} - -function buildUnifiedDiffLines(change: UiFileChange): DiffViewerLine[] { - const normalized = change.diff.replace(/\r\n/g, '\n') - const lines = normalized.length > 0 ? normalized.split('\n') : [] - if (lines.length > 0 && lines[lines.length - 1] === '') { - lines.pop() - } - - const output: DiffViewerLine[] = [] - let oldLine = 0 - let newLine = 0 - - for (const [index, line] of lines.entries()) { - const hunkMatch = line.match(/^@@\s+-(\d+)(?:,\d+)?\s+\+(\d+)(?:,\d+)?\s+@@/u) - if (hunkMatch) { - oldLine = Number(hunkMatch[1]) - newLine = Number(hunkMatch[2]) - output.push({ - key: `${fileChangeKey(change)}:hunk:${index}`, - kind: 'hunk', - oldLine: null, - newLine: null, - text: line, - }) - continue - } - - if (line.startsWith('+') && !line.startsWith('+++')) { - output.push({ - key: `${fileChangeKey(change)}:add:${index}`, - kind: 'add', - oldLine: null, - newLine, - text: line.slice(1), - }) - newLine += 1 - continue - } - - if (line.startsWith('-') && !line.startsWith('---')) { - output.push({ - key: `${fileChangeKey(change)}:remove:${index}`, - kind: 'remove', - oldLine, - newLine: null, - text: line.slice(1), - }) - oldLine += 1 - continue - } - - if (line.startsWith(' ')) { - output.push({ - key: `${fileChangeKey(change)}:context:${index}`, - kind: 'context', - oldLine, - newLine, - text: line.slice(1), - }) - oldLine += 1 - newLine += 1 - continue - } - - output.push({ - key: `${fileChangeKey(change)}:meta:${index}`, - kind: 'meta', - oldLine: null, - newLine: null, - text: line, - }) - } - - return output -} - -function buildDiffViewerLines(change: UiFileChange | null): DiffViewerLine[] { - if (!change || !change.diff.trim()) return [] - if (hasStructuredUnifiedDiff(change)) { - return buildUnifiedDiffLines(change) - } - return buildSyntheticDiffLines(change) -} - const activeDiffViewerLines = computed(() => buildDiffViewerLines(activeDiffViewerChange.value)) function hasDiffViewerContent(change: UiFileChange | null): boolean { diff --git a/src/components/content/threadConversationFileChanges.ts b/src/components/content/threadConversationFileChanges.ts new file mode 100644 index 000000000..f0041d345 --- /dev/null +++ b/src/components/content/threadConversationFileChanges.ts @@ -0,0 +1,252 @@ +import type { UiFileChange } from '../../types/codex' + +export type TurnFileChangeSummary = { + changes: UiFileChange[] + sourceMessageIds: string[] + source: 'assistant' | 'metadata' +} +export type DiffViewerLineKind = 'meta' | 'hunk' | 'add' | 'remove' | 'context' +export type DiffViewerLine = { + key: string + kind: DiffViewerLineKind + oldLine: number | null + newLine: number | null + text: string +} + +export function fileChangeKey(change: UiFileChange): string { + return `${change.path}\u0000${change.movedToPath ?? ''}` +} + +export function mergeFileChangeDiff(first: string, second: string): string { + if (!first) return second + if (!second || first === second) return first + return `${first}\n${second}`.trim() +} + +export function mergeFileChangeEntry(first: UiFileChange, second: UiFileChange): UiFileChange { + const operation = first.operation === 'add' || second.operation === 'add' + ? 'add' + : first.operation === 'delete' || second.operation === 'delete' + ? 'delete' + : 'update' + return { + path: second.path || first.path, + operation, + movedToPath: second.movedToPath ?? first.movedToPath ?? null, + diff: mergeFileChangeDiff(first.diff, second.diff), + addedLineCount: first.addedLineCount + second.addedLineCount, + removedLineCount: first.removedLineCount + second.removedLineCount, + } +} + +export function compareFileChanges(first: UiFileChange, second: UiFileChange): number { + const firstRank = first.operation === 'add' ? 0 : first.operation === 'update' ? 1 : 2 + const secondRank = second.operation === 'add' ? 0 : second.operation === 'update' ? 1 : 2 + if (firstRank !== secondRank) return firstRank - secondRank + const firstPath = `${first.path}\u0000${first.movedToPath ?? ''}` + const secondPath = `${second.path}\u0000${second.movedToPath ?? ''}` + return firstPath.localeCompare(secondPath) +} + +export function aggregateFileChanges(changes: UiFileChange[]): UiFileChange[] { + const byPath = new Map() + for (const change of changes) { + const key = `${change.path}\u0000${change.movedToPath ?? ''}` + const previous = byPath.get(key) + byPath.set(key, previous ? mergeFileChangeEntry(previous, change) : { ...change }) + } + return Array.from(byPath.values()).sort(compareFileChanges) +} + +export function fileChangeOperationLabel(change: UiFileChange): string { + if (change.operation === 'update' && change.movedToPath) { + return change.addedLineCount > 0 || change.removedLineCount > 0 ? 'Moved + edited' : 'Moved' + } + if (change.operation === 'add') return 'Added' + if (change.operation === 'delete') return 'Deleted' + return 'Edited' +} + +export function fileChangeOperationTone(change: UiFileChange): 'add' | 'delete' | 'update' | 'move' { + if (change.operation === 'update' && change.movedToPath) return 'move' + return change.operation +} + +export function formatFileChangeDelta(change: UiFileChange): string { + const parts: string[] = [] + if (change.addedLineCount > 0) parts.push(`+${change.addedLineCount}`) + if (change.removedLineCount > 0) parts.push(`-${change.removedLineCount}`) + return parts.join(' ') +} + +export type FileChangeDeltaTone = 'add' | 'remove' | 'neutral' + +export type FileChangeDeltaPart = { + tone: FileChangeDeltaTone + label: string +} + +export function buildFileChangeDeltaParts(addedCount: number, removedCount: number, fallbackLabel = ''): FileChangeDeltaPart[] { + const parts: FileChangeDeltaPart[] = [] + if (addedCount > 0) parts.push({ tone: 'add', label: `+${addedCount}` }) + if (removedCount > 0) parts.push({ tone: 'remove', label: `-${removedCount}` }) + if (parts.length > 0) return parts + return fallbackLabel ? [{ tone: 'neutral', label: fallbackLabel }] : [] +} + +export function fileChangeDeltaParts(change: UiFileChange): FileChangeDeltaPart[] { + return buildFileChangeDeltaParts(change.addedLineCount, change.removedLineCount) +} + +export function formatFileChangeCountLabel(count: number): string { + return count === 1 ? '1 file changed' : `${count} files changed` +} + +export function summarizeFileChangeKinds(summary: TurnFileChangeSummary | null): string { + if (!summary || summary.changes.length === 0) return '' + let added = 0 + let deleted = 0 + let edited = 0 + let moved = 0 + + for (const change of summary.changes) { + if (change.operation === 'add') { + added += 1 + continue + } + if (change.operation === 'delete') { + deleted += 1 + continue + } + if (change.movedToPath) { + moved += 1 + continue + } + edited += 1 + } + + const parts: string[] = [] + if (edited > 0) parts.push(`${edited} edited`) + if (added > 0) parts.push(`${added} added`) + if (deleted > 0) parts.push(`${deleted} deleted`) + if (moved > 0) parts.push(`${moved} moved`) + return parts.join(', ') +} + +export function fileChangeSummaryLabel(summary: TurnFileChangeSummary | null): string { + if (!summary || summary.changes.length === 0) return 'Modified files' + const countLabel = formatFileChangeCountLabel(summary.changes.length) + const kindSummary = summarizeFileChangeKinds(summary) + return kindSummary ? `${countLabel} · ${kindSummary}` : countLabel +} + +export function fileChangeSummaryStatusParts(summary: TurnFileChangeSummary | null): FileChangeDeltaPart[] { + if (!summary || summary.changes.length === 0) return [] + const totalAdded = summary.changes.reduce((sum, change) => sum + change.addedLineCount, 0) + const totalRemoved = summary.changes.reduce((sum, change) => sum + change.removedLineCount, 0) + const fallbackLabel = summary.changes.some((change) => change.movedToPath) ? 'Moved' : 'Ready' + return buildFileChangeDeltaParts(totalAdded, totalRemoved, fallbackLabel) +} + +export function hasStructuredUnifiedDiff(change: UiFileChange): boolean { + return change.operation === 'update' && /^diff --git |^@@ |^--- |^\+\+\+ |^[ +-]|^\*\*\* (Move to:|End of File)/mu.test(change.diff) +} + +export function buildSyntheticDiffLines(change: UiFileChange): DiffViewerLine[] { + const normalized = change.diff.replace(/\r\n/g, '\n') + const lines = normalized.length > 0 ? normalized.split('\n') : [] + if (lines.length > 0 && lines[lines.length - 1] === '') { + lines.pop() + } + return lines.map((line, index) => ({ + key: `${fileChangeKey(change)}:synthetic:${index}`, + kind: change.operation === 'delete' ? 'remove' : 'add', + oldLine: change.operation === 'delete' ? index + 1 : null, + newLine: change.operation === 'delete' ? null : index + 1, + text: line, + })) +} + +export function buildUnifiedDiffLines(change: UiFileChange): DiffViewerLine[] { + const normalized = change.diff.replace(/\r\n/g, '\n') + const lines = normalized.length > 0 ? normalized.split('\n') : [] + if (lines.length > 0 && lines[lines.length - 1] === '') { + lines.pop() + } + + const output: DiffViewerLine[] = [] + let oldLine = 0 + let newLine = 0 + + for (const [index, line] of lines.entries()) { + const hunkMatch = line.match(/^@@\s+-(\d+)(?:,\d+)?\s+\+(\d+)(?:,\d+)?\s+@@/u) + if (hunkMatch) { + oldLine = Number(hunkMatch[1]) + newLine = Number(hunkMatch[2]) + output.push({ + key: `${fileChangeKey(change)}:hunk:${index}`, + kind: 'hunk', + oldLine: null, + newLine: null, + text: line, + }) + continue + } + + if (line.startsWith('+') && !line.startsWith('+++')) { + output.push({ + key: `${fileChangeKey(change)}:add:${index}`, + kind: 'add', + oldLine: null, + newLine, + text: line.slice(1), + }) + newLine += 1 + continue + } + + if (line.startsWith('-') && !line.startsWith('---')) { + output.push({ + key: `${fileChangeKey(change)}:remove:${index}`, + kind: 'remove', + oldLine, + newLine: null, + text: line.slice(1), + }) + oldLine += 1 + continue + } + + if (line.startsWith(' ')) { + output.push({ + key: `${fileChangeKey(change)}:context:${index}`, + kind: 'context', + oldLine, + newLine, + text: line.slice(1), + }) + oldLine += 1 + newLine += 1 + continue + } + + output.push({ + key: `${fileChangeKey(change)}:meta:${index}`, + kind: 'meta', + oldLine: null, + newLine: null, + text: line, + }) + } + + return output +} + +export function buildDiffViewerLines(change: UiFileChange | null): DiffViewerLine[] { + if (!change || !change.diff.trim()) return [] + if (hasStructuredUnifiedDiff(change)) { + return buildUnifiedDiffLines(change) + } + return buildSyntheticDiffLines(change) +} diff --git a/tests.md b/tests.md index f3a007f01..f1890c653 100644 --- a/tests.md +++ b/tests.md @@ -303,6 +303,34 @@ Rollback/cleanup: --- +### Thread conversation file-change helper split + +#### Feature/Change Name +Thread conversation file-change summary and diff-line helper extraction. + +#### Prerequisites/Setup +1. Dependencies installed with `pnpm install` +2. Dev server available if doing a manual browser pass (`pnpm run dev --host 127.0.0.1 --port 4173`) +3. A thread with modified-file summaries or recovered apply-patch file changes +4. Light theme and dark theme both available from the appearance switcher + +#### Steps +1. Run `pnpm run build:frontend`. +2. In light theme, open a thread with a modified-files summary. +3. Open the file-change summary and confirm file labels, operation badges, and line deltas are correct. +4. Open the diff viewer and confirm added, removed, hunk, and context lines render with stable line numbers. +5. Switch to dark theme and repeat steps 2-4. + +#### Expected Results +- Frontend typecheck/build passes. +- File-change summaries are still grouped by assistant turn and standalone metadata message as before. +- Diff viewer selection, labels, deltas, and line rendering remain stable in light theme and dark theme. + +#### Rollback/Cleanup +- None. + +--- + ### Server inline payload helper split #### Feature/Change Name From 76c0452fbcca878ac0a2359fe2cd929cea46ca10 Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:15:39 +0700 Subject: [PATCH 04/19] Extract app markdown export formatter --- src/App.vue | 77 ++-------------------------------------- src/appExportMarkdown.ts | 73 +++++++++++++++++++++++++++++++++++++ tests.md | 27 ++++++++++++++ 3 files changed, 103 insertions(+), 74 deletions(-) create mode 100644 src/appExportMarkdown.ts diff --git a/src/App.vue b/src/App.vue index 77a0b2638..1d0038e58 100644 --- a/src/App.vue +++ b/src/App.vue @@ -1124,6 +1124,7 @@ import type { ReasoningEffort, SpeedMode, UiAccountEntry, UiRateLimitWindow, UiS import type { ComposerDraftPayload, ThreadComposerExposed } from './components/content/ThreadComposer.vue' import type { GitCommitOption, LocalDirectoryEntry, TelegramStatus, ThreadTerminalQuickCommand, WorktreeBranchOption } from './api/codexGateway' import { getFreeModeStatus, setFreeMode, setFreeModeCustomKey, setCustomProvider } from './api/codexGateway' +import { buildExportFileName, buildThreadMarkdown } from './appExportMarkdown' import { getPathLeafName, getPathParent, isProjectlessChatPath, normalizePathForUi } from './pathUtils.js' const ThreadConversation = defineAsyncComponent(() => import('./components/content/ThreadConversation.vue')) @@ -3669,8 +3670,8 @@ function onImplementPlan(payload: { turnId: string }): void { function onExportChat(): void { if (isHomeRoute.value || isSkillsRoute.value || isAutomationsRoute.value || typeof document === 'undefined') return if (!selectedThread.value || filteredMessages.value.length === 0) return - const markdown = buildThreadMarkdown() - const fileName = buildExportFileName() + const markdown = buildThreadMarkdown(selectedThread.value, filteredMessages.value) + const fileName = buildExportFileName(selectedThread.value) const blob = new Blob([markdown], { type: 'text/markdown;charset=utf-8' }) const objectUrl = URL.createObjectURL(blob) const link = document.createElement('a') @@ -3682,78 +3683,6 @@ function onExportChat(): void { window.setTimeout(() => URL.revokeObjectURL(objectUrl), 0) } -function buildThreadMarkdown(): string { - const lines: string[] = [] - const threadTitle = selectedThread.value?.title?.trim() || 'Untitled thread' - lines.push(`# ${escapeMarkdownText(threadTitle)}`) - lines.push('') - lines.push(`- Exported: ${new Date().toISOString()}`) - lines.push(`- Thread ID: ${selectedThread.value?.id ?? ''}`) - lines.push('') - lines.push('---') - lines.push('') - - for (const message of filteredMessages.value) { - const roleLabel = message.role ? message.role.toUpperCase() : 'MESSAGE' - lines.push(`## ${roleLabel}`) - lines.push('') - - const normalizedText = message.text.trim() - if (normalizedText) { - lines.push(normalizedText) - lines.push('') - } - - if (message.commandExecution) { - lines.push('```text') - lines.push(`command: ${message.commandExecution.command}`) - lines.push(`status: ${message.commandExecution.status}`) - if (message.commandExecution.cwd) { - lines.push(`cwd: ${message.commandExecution.cwd}`) - } - if (message.commandExecution.exitCode !== null) { - lines.push(`exitCode: ${message.commandExecution.exitCode}`) - } - lines.push(message.commandExecution.aggregatedOutput || '(no output)') - lines.push('```') - lines.push('') - } - - if (message.fileAttachments && message.fileAttachments.length > 0) { - lines.push('Attachments:') - for (const attachment of message.fileAttachments) { - lines.push(`- ${attachment.path}`) - } - lines.push('') - } - - if (message.images && message.images.length > 0) { - lines.push('Images:') - for (const imageUrl of message.images) { - lines.push(`- ${imageUrl}`) - } - lines.push('') - } - } - - return `${lines.join('\n').trimEnd()}\n` -} - -function buildExportFileName(): string { - const threadTitle = selectedThread.value?.title?.trim() || 'chat' - const sanitized = threadTitle - .toLowerCase() - .replace(/[^a-z0-9]+/g, '-') - .replace(/^-+|-+$/g, '') - const base = sanitized || 'chat' - const stamp = new Date().toISOString().replace(/[:.]/g, '-') - return `${base}-${stamp}.md` -} - -function escapeMarkdownText(value: string): string { - return value.replace(/([\\`*_{}\[\]()#+\-.!])/g, '\\$1') -} - function loadBoolPref(key: string, fallback: boolean): boolean { if (typeof window === 'undefined') return fallback const v = window.localStorage.getItem(key) diff --git a/src/appExportMarkdown.ts b/src/appExportMarkdown.ts new file mode 100644 index 000000000..6dff78385 --- /dev/null +++ b/src/appExportMarkdown.ts @@ -0,0 +1,73 @@ +import type { UiMessage, UiThread } from './types/codex' + +export function buildThreadMarkdown(thread: UiThread | null, messages: UiMessage[]): string { + const lines: string[] = [] + const threadTitle = thread?.title?.trim() || 'Untitled thread' + lines.push(`# ${escapeMarkdownText(threadTitle)}`) + lines.push('') + lines.push(`- Exported: ${new Date().toISOString()}`) + lines.push(`- Thread ID: ${thread?.id ?? ''}`) + lines.push('') + lines.push('---') + lines.push('') + + for (const message of messages) { + const roleLabel = message.role ? message.role.toUpperCase() : 'MESSAGE' + lines.push(`## ${roleLabel}`) + lines.push('') + + const normalizedText = message.text.trim() + if (normalizedText) { + lines.push(normalizedText) + lines.push('') + } + + if (message.commandExecution) { + lines.push('```text') + lines.push(`command: ${message.commandExecution.command}`) + lines.push(`status: ${message.commandExecution.status}`) + if (message.commandExecution.cwd) { + lines.push(`cwd: ${message.commandExecution.cwd}`) + } + if (message.commandExecution.exitCode !== null) { + lines.push(`exitCode: ${message.commandExecution.exitCode}`) + } + lines.push(message.commandExecution.aggregatedOutput || '(no output)') + lines.push('```') + lines.push('') + } + + if (message.fileAttachments && message.fileAttachments.length > 0) { + lines.push('Attachments:') + for (const attachment of message.fileAttachments) { + lines.push(`- ${attachment.path}`) + } + lines.push('') + } + + if (message.images && message.images.length > 0) { + lines.push('Images:') + for (const imageUrl of message.images) { + lines.push(`- ${imageUrl}`) + } + lines.push('') + } + } + + return `${lines.join('\n').trimEnd()}\n` +} + +export function buildExportFileName(thread: UiThread | null): string { + const threadTitle = thread?.title?.trim() || 'chat' + const sanitized = threadTitle + .toLowerCase() + .replace(/[^a-z0-9]+/g, '-') + .replace(/^-+|-+$/g, '') + const base = sanitized || 'chat' + const stamp = new Date().toISOString().replace(/[:.]/g, '-') + return `${base}-${stamp}.md` +} + +export function escapeMarkdownText(value: string): string { + return value.replace(/([\\`*_{}\[\]()#+\-.!])/g, '\\$1') +} diff --git a/tests.md b/tests.md index f1890c653..4154e6e3b 100644 --- a/tests.md +++ b/tests.md @@ -303,6 +303,33 @@ Rollback/cleanup: --- +### App chat export formatter split + +#### Feature/Change Name +Root app markdown export formatter extraction. + +#### Prerequisites/Setup +1. Dependencies installed with `pnpm install` +2. Dev server available if doing a manual browser pass (`pnpm run dev --host 127.0.0.1 --port 4173`) +3. A thread with user/assistant messages, command output, attachments, or images +4. Light theme and dark theme both available from the appearance switcher + +#### Steps +1. Run `pnpm run build:frontend`. +2. In light theme, open a populated thread and trigger chat export. +3. Open the downloaded markdown file and confirm the title, exported timestamp, thread ID, message roles, text, command output, attachments, and images are present. +4. Switch to dark theme and repeat steps 2-3. + +#### Expected Results +- Frontend typecheck/build passes. +- Exported markdown content and filename format match the pre-extraction behavior. +- Export behavior is unchanged in light theme and dark theme. + +#### Rollback/Cleanup +- Delete downloaded markdown export files if they are no longer needed. + +--- + ### Thread conversation file-change helper split #### Feature/Change Name From 051752e4d171228a321f71aac602c3c7f86fef72 Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:19:58 +0700 Subject: [PATCH 05/19] Extract Composio bridge support --- src/server/codexAppServerBridge.ts | 502 +------------------------- src/server/composioRoutesSupport.ts | 522 ++++++++++++++++++++++++++++ 2 files changed, 531 insertions(+), 493 deletions(-) create mode 100644 src/server/composioRoutesSupport.ts diff --git a/src/server/codexAppServerBridge.ts b/src/server/codexAppServerBridge.ts index e6519e788..a7bbd4da9 100644 --- a/src/server/codexAppServerBridge.ts +++ b/src/server/codexAppServerBridge.ts @@ -41,6 +41,15 @@ import { } from '../commandResolution.js' import type { CollaborationModeKind, ReasoningEffort } from '../types/codex.js' import { isAbsoluteLikePath } from '../pathUtils.js' +import { + installComposioCli, + listComposioConnectors, + parseComposioLimit, + readComposioConnectorDetail, + readComposioStatus, + startComposioLink, + startComposioLogin, +} from './composioRoutesSupport.js' import { mergeSessionSkillInputsIntoThreadResult, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' export { mergeSessionSkillInputsIntoTurns, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' @@ -128,98 +137,6 @@ type ProviderModelsResponse = { source: 'provider' } -type ComposioUserData = { - apiKey: string - baseUrl: string - webUrl: string - orgId: string - testUserId: string -} - -type ComposioStatusResponse = { - available: boolean - authenticated: boolean - cliVersion: string - email: string - defaultOrgName: string - defaultOrgId: string - webUrl: string - baseUrl: string - testUserId: string -} - -type ComposioConnectionSummary = { - id: string - wordId: string - alias: string - status: string - authScheme: string - createdAt: string - updatedAt: string - isComposioManaged: boolean - isDisabled: boolean -} - -type ComposioConnectorSummary = { - slug: string - name: string - description: string - logoUrl: string - latestVersion: string - toolsCount: number - triggersCount: number - isNoAuth: boolean - enabled: boolean - authModes: string[] - activeCount: number - totalConnections: number - connectionStatuses: string[] -} - -type ComposioToolSummary = { - slug: string - name: string - description: string -} - -type ComposioConnectorDetail = { - connector: ComposioConnectorSummary - connections: ComposioConnectionSummary[] - tools: ComposioToolSummary[] - dashboardUrl: string -} - -type ComposioLinkResult = { - status: string - message: string - connectedAccountId: string - redirectUrl: string - toolkit: string - projectType: string -} - -type ComposioLoginResult = { - status: string - message: string - loginUrl: string - cliKey: string - expiresAt: string -} - -type ComposioInstallResult = { - ok: boolean - command: string - output: string -} - -type ComposioConnectorPage = { - data: ComposioConnectorSummary[] - nextCursor: string | null - total: number -} - -const COMPOSIO_CONNECTORS_PAGE_LIMIT_MAX = 1000 - const PROVIDER_MODELS_FETCH_TIMEOUT_MS = 5_000 const THREAD_RESPONSE_TURN_LIMIT = 10 @@ -234,8 +151,6 @@ const API_PERF_BODY_MB_THRESHOLD_ENV_KEY = 'CODEXUI_API_PERF_BODY_MB_THRESHOLD' const DEFAULT_API_PERF_MS_THRESHOLD = 300 const DEFAULT_API_PERF_BODY_MB_THRESHOLD = 1 const MB_DIVISOR = 1024 * 1024 -const COMPOSIO_USER_DATA_PATH = join(homedir(), '.composio', 'user_data.json') - function readEnvValueFromFile(filePath: string, key: string): string | null { try { const content = readFileSync(filePath, 'utf8') @@ -900,405 +815,6 @@ function quoteShellTokenIfNeeded(value: string): string { return /^[A-Za-z0-9_./:@-]+$/.test(value) ? value : `'${value.replace(/'/g, `'\\''`)}'` } -function readBoolean(value: unknown): boolean { - return value === true -} - -function readNumber(value: unknown): number { - return typeof value === 'number' && Number.isFinite(value) ? value : 0 -} - -type ComposioCliInvocation = { command: string; args: string[]; displayCommand: string } - -function buildComposioInvocation(args: string[]): ComposioCliInvocation | null { - const overrideCommand = process.env.CODEXUI_COMPOSIO_COMMAND?.trim() - if (overrideCommand) { - const invocation = getSpawnInvocation(overrideCommand, args) - return { - command: invocation.command, - args: invocation.args, - displayCommand: `${overrideCommand} ${args.map(quoteShellTokenIfNeeded).join(' ')}`.trim(), - } - } - return buildInstalledComposioInvocation(args) -} - -function buildInstalledComposioInvocation(args: string[]): ComposioCliInvocation | null { - const candidates = [ - join(homedir(), '.composio', 'composio'), - 'composio', - ] - for (const candidate of candidates) { - if ((candidate.includes('/') || candidate.includes('\\')) && !existsSync(candidate)) continue - const invocation = getSpawnInvocation(candidate, args) - return { - command: invocation.command, - args: invocation.args, - displayCommand: `${candidate} ${args.map(quoteShellTokenIfNeeded).join(' ')}`.trim(), - } - } - return null -} - -function probeComposioInvocation(invocation: ComposioCliInvocation): { available: boolean; cliVersion: string; output: string } { - const probe = spawnSync(invocation.command, invocation.args, { - encoding: 'utf8', - env: process.env, - windowsHide: true, - }) - const output = `${probe.stdout ?? ''}${probe.stderr ?? ''}`.trim() - return { - available: !probe.error && probe.status === 0, - cliVersion: probe.status === 0 ? (probe.stdout ?? '').trim() : '', - output, - } -} - -function resolveComposioInvocation(args: string[]): ComposioCliInvocation | null { - const invocation = buildComposioInvocation(args) - const versionInvocation = buildComposioInvocation(['--version']) - if (invocation && versionInvocation && probeComposioInvocation(versionInvocation).available) return invocation - return null -} - -function parseComposioJson(stdout: string, fallback: string): T { - const trimmed = stdout.trim() - if (!trimmed) { - throw new Error(fallback) - } - return JSON.parse(trimmed) as T -} - -async function runComposioJson(args: string[], fallback: string): Promise { - const invocation = resolveComposioInvocation(args) - if (!invocation) { - throw new Error('Composio CLI is not installed') - } - const child = spawn(invocation.command, invocation.args, { - env: process.env, - stdio: ['ignore', 'pipe', 'pipe'], - windowsHide: true, - }) - - let stdout = '' - let stderr = '' - - child.stdout.setEncoding('utf8') - child.stderr.setEncoding('utf8') - child.stdout.on('data', (chunk) => { stdout += chunk }) - child.stderr.on('data', (chunk) => { stderr += chunk }) - - const exitCode = await new Promise((resolveExit, reject) => { - child.once('error', reject) - child.once('close', (code) => resolveExit(code ?? 0)) - }) - - if (exitCode !== 0) { - throw new Error(stderr.trim() || stdout.trim() || fallback) - } - - try { - return parseComposioJson(stdout, fallback) - } catch (error) { - const details = stderr.trim() || stdout.trim() - throw new Error(details || getErrorMessage(error, fallback)) - } -} - -async function readComposioUserData(): Promise { - try { - const raw = await readFile(COMPOSIO_USER_DATA_PATH, 'utf8') - const payload = asRecord(JSON.parse(raw)) - if (!payload) return null - return { - apiKey: readNonEmptyString(payload.api_key), - baseUrl: readNonEmptyString(payload.base_url), - webUrl: readNonEmptyString(payload.web_url), - orgId: readNonEmptyString(payload.org_id), - testUserId: readNonEmptyString(payload.test_user_id), - } - } catch { - return null - } -} - -function normalizeComposioConnection(value: unknown): ComposioConnectionSummary | null { - const record = asRecord(value) - if (!record) return null - const authConfig = asRecord(record.auth_config) - return { - id: readNonEmptyString(record.id), - wordId: readNonEmptyString(record.word_id), - alias: readNonEmptyString(record.alias), - status: readNonEmptyString(record.status), - authScheme: readNonEmptyString(record.authScheme || authConfig?.auth_scheme), - createdAt: readNonEmptyString(record.created_at), - updatedAt: readNonEmptyString(record.updated_at), - isComposioManaged: readBoolean(authConfig?.is_composio_managed), - isDisabled: readBoolean(record.is_disabled), - } -} - -function normalizeComposioToolkit(value: unknown, connectionsBySlug: Map): ComposioConnectorSummary | null { - const record = asRecord(value) - if (!record) return null - const slug = readNonEmptyString(record.slug) - if (!slug) return null - const connectionRows = connectionsBySlug.get(slug) ?? [] - return { - slug, - name: readNonEmptyString(record.name), - description: readNonEmptyString(record.description), - logoUrl: readNonEmptyString(record.logo || record.meta && asRecord(record.meta)?.logo), - latestVersion: readNonEmptyString(record.latest_version || record.latestVersion), - toolsCount: readNumber(record.tools_count), - triggersCount: readNumber(record.triggers_count), - isNoAuth: readBoolean(record.is_no_auth), - enabled: record.enabled !== false, - authModes: Array.isArray(record.auth_modes) ? record.auth_modes.map(readNonEmptyString).filter(Boolean) : [], - activeCount: connectionRows.filter((row) => row.status === 'ACTIVE' && !row.isDisabled).length, - totalConnections: connectionRows.length, - connectionStatuses: [...new Set(connectionRows.map((row) => row.status).filter(Boolean))], - } -} - -function normalizeComposioTool(value: unknown): ComposioToolSummary | null { - const record = asRecord(value) - if (!record) return null - const slug = readNonEmptyString(record.slug) - if (!slug) return null - return { - slug, - name: readNonEmptyString(record.name), - description: readNonEmptyString(record.description), - } -} - -async function readComposioConnectionsBySlug(): Promise> { - const payload = asRecord(await runComposioJson>(['connections', 'list'], 'Failed to list Composio connections')) - const bySlug = new Map() - for (const [slug, rawRows] of Object.entries(payload ?? {})) { - if (!Array.isArray(rawRows)) continue - const rows = rawRows.map(normalizeComposioConnection).filter((row): row is ComposioConnectionSummary => row !== null) - bySlug.set(slug, rows) - } - return bySlug -} - -async function readComposioStatus(): Promise { - const versionInvocation = buildComposioInvocation(['--version']) - const probe = versionInvocation - ? probeComposioInvocation(versionInvocation) - : { available: false, cliVersion: '', output: '' } - const available = probe.available - const cliVersion = probe.cliVersion - const userData = await readComposioUserData() - if (!available) { - return { - available: false, - authenticated: false, - cliVersion, - email: '', - defaultOrgName: '', - defaultOrgId: userData?.orgId ?? '', - webUrl: userData?.webUrl ?? '', - baseUrl: userData?.baseUrl ?? '', - testUserId: userData?.testUserId ?? '', - } - } - - try { - const payload = asRecord(await runComposioJson>(['whoami'], 'Failed to read Composio account status')) - return { - available: true, - authenticated: true, - cliVersion, - email: readNonEmptyString(payload?.email), - defaultOrgName: readNonEmptyString(payload?.default_org_name), - defaultOrgId: readNonEmptyString(payload?.default_org_id) || userData?.orgId || '', - webUrl: userData?.webUrl || 'https://dashboard.composio.dev/', - baseUrl: userData?.baseUrl || 'https://backend.composio.dev', - testUserId: readNonEmptyString(payload?.test_user_id) || userData?.testUserId || '', - } - } catch { - return { - available: true, - authenticated: false, - cliVersion, - email: '', - defaultOrgName: '', - defaultOrgId: userData?.orgId ?? '', - webUrl: userData?.webUrl || 'https://dashboard.composio.dev/', - baseUrl: userData?.baseUrl || 'https://backend.composio.dev', - testUserId: userData?.testUserId ?? '', - } - } -} - -async function listComposioConnectors(query: string, cursor: string | null = null, limit = 50): Promise { - const args = ['dev', 'toolkits', 'list', '--limit', String(COMPOSIO_CONNECTORS_PAGE_LIMIT_MAX)] - const trimmedQuery = query.trim() - if (trimmedQuery) { - args.push('--query', trimmedQuery) - } - const [payload, connectionsBySlug] = await Promise.all([ - runComposioJson(args, 'Failed to list Composio toolkits'), - readComposioConnectionsBySlug(), - ]) - const allRows = payload - .map((item) => normalizeComposioToolkit(item, connectionsBySlug)) - .filter((row): row is ComposioConnectorSummary => row !== null) - const safeLimit = Number.isFinite(limit) ? Math.max(1, Math.min(COMPOSIO_CONNECTORS_PAGE_LIMIT_MAX, Math.floor(limit))) : 50 - const safeCursor = parseComposioCursor(cursor, allRows.length) - return { - data: allRows.slice(safeCursor, safeCursor + safeLimit), - nextCursor: safeCursor + safeLimit < allRows.length ? String(safeCursor + safeLimit) : null, - total: allRows.length, - } -} - -function parseComposioCursor(cursor: string | null | undefined, maxLength: number): number { - const trimmed = cursor?.trim() ?? '' - const parsed = Number.parseInt(trimmed, 10) - if (!Number.isFinite(parsed) || Number.isNaN(parsed) || parsed <= 0) return 0 - if (parsed >= maxLength) return maxLength - return parsed -} - -function parseComposioLimit(rawLimit: string | null): number { - const parsed = Number.parseInt((rawLimit ?? '').trim(), 10) - if (!Number.isFinite(parsed) || Number.isNaN(parsed) || parsed <= 0) return 50 - return Math.max(1, Math.min(COMPOSIO_CONNECTORS_PAGE_LIMIT_MAX, parsed)) -} - -async function readComposioConnectorDetail(slug: string): Promise { - const normalizedSlug = slug.trim() - if (!normalizedSlug) { - throw new Error('Missing Composio connector slug') - } - - const [infoPayload, toolsPayload, connectionsPayload, userData] = await Promise.all([ - runComposioJson>(['dev', 'toolkits', 'info', normalizedSlug], `Failed to load Composio toolkit ${normalizedSlug}`), - runComposioJson(['tools', 'list', normalizedSlug, '--limit', '10'], `Failed to list tools for ${normalizedSlug}`), - runComposioJson<{ toolkit?: string; items?: unknown[] }>(['link', normalizedSlug, '--list'], `Failed to list connections for ${normalizedSlug}`), - readComposioUserData(), - ]) - - const connections = Array.isArray(connectionsPayload.items) - ? connectionsPayload.items.map(normalizeComposioConnection).filter((row): row is ComposioConnectionSummary => row !== null) - : [] - const connector = normalizeComposioToolkit(infoPayload, new Map([[normalizedSlug, connections]])) - if (!connector) { - throw new Error(`Unknown Composio connector: ${normalizedSlug}`) - } - - return { - connector, - connections, - tools: Array.isArray(toolsPayload) - ? toolsPayload.map(normalizeComposioTool).filter((row): row is ComposioToolSummary => row !== null) - : [], - dashboardUrl: userData?.webUrl || 'https://dashboard.composio.dev/', - } -} - -async function startComposioLink(slug: string): Promise { - const normalizedSlug = slug.trim() - if (!normalizedSlug) { - throw new Error('Missing Composio connector slug') - } - const payload = asRecord(await runComposioJson>(['link', normalizedSlug, '--no-wait'], `Failed to start Composio link for ${normalizedSlug}`)) - return { - status: readNonEmptyString(payload?.status), - message: readNonEmptyString(payload?.message), - connectedAccountId: readNonEmptyString(payload?.connected_account_id), - redirectUrl: readNonEmptyString(payload?.redirect_url), - toolkit: readNonEmptyString(payload?.toolkit), - projectType: readNonEmptyString(payload?.project_type), - } -} - -async function startComposioLogin(): Promise { - const invocation = resolveComposioInvocation(['login', '--no-browser', '-y']) - if (!invocation) { - throw new Error('Composio CLI is not installed') - } - const proc = spawn(invocation.command, invocation.args, { - cwd: process.cwd(), - env: process.env, - detached: true, - stdio: ['ignore', 'pipe', 'pipe'], - windowsHide: true, - }) - proc.unref() - - let stdout = '' - let stderr = '' - proc.stdout.setEncoding('utf8') - proc.stderr.setEncoding('utf8') - proc.stderr.on('data', (chunk) => { stderr += chunk }) - - const loginUrl = await new Promise((resolveLoginUrl, reject) => { - const timeout = setTimeout(() => { - proc.kill('SIGTERM') - reject(new Error(stderr.trim() || stdout.trim() || 'Timed out waiting for Composio CLI login URL')) - }, 10_000) - const finish = (url: string) => { - clearTimeout(timeout) - proc.stdout.destroy() - proc.stderr.destroy() - resolveLoginUrl(url) - } - proc.once('error', (error) => { - clearTimeout(timeout) - reject(error) - }) - proc.once('close', (code) => { - clearTimeout(timeout) - reject(new Error(stderr.trim() || stdout.trim() || `Composio CLI login exited with code ${code ?? 0}`)) - }) - proc.stdout.on('data', (chunk) => { - stdout += chunk - const url = stdout.match(/https?:\/\/\S+/)?.[0] ?? '' - if (url) finish(url) - }) - }) - - const cliKey = loginUrl ? (new URL(loginUrl).searchParams.get('cliKey') ?? '') : '' - return { - status: 'started', - message: 'Composio CLI login URL created', - loginUrl, - cliKey, - expiresAt: '', - } -} - -async function installComposioCli(): Promise { - const command = 'bash' - const installScriptUrl = 'https://composio.dev/install' - const args = ['-lc', `curl -fsSL ${installScriptUrl} | bash`] - const invocation = getSpawnInvocation(command, args) - const env = { - ...process.env, - COMPOSIO_INSTALL_DIR: process.env.COMPOSIO_INSTALL_DIR?.trim() || join(homedir(), '.composio'), - } - const result = spawnSync(invocation.command, invocation.args, { - encoding: 'utf8', - env, - windowsHide: true, - }) - const output = `${result.stdout ?? ''}${result.stderr ?? ''}`.trim() - if (result.error || result.status !== 0) { - throw new Error(output || result.error?.message || 'Failed to install Composio CLI') - } - return { - ok: true, - command: `curl -fsSL ${installScriptUrl} | bash`, - output, - } -} - type SessionRecoveredFileChange = { path: string operation: 'add' | 'delete' | 'update' diff --git a/src/server/composioRoutesSupport.ts b/src/server/composioRoutesSupport.ts new file mode 100644 index 000000000..92410a7a7 --- /dev/null +++ b/src/server/composioRoutesSupport.ts @@ -0,0 +1,522 @@ +import { spawn, spawnSync } from 'node:child_process' +import { existsSync } from 'node:fs' +import { readFile } from 'node:fs/promises' +import { homedir } from 'node:os' +import { join } from 'node:path' +import { getSpawnInvocation } from '../utils/commandInvocation.js' + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +function readNonEmptyString(value: unknown): string { + return typeof value === 'string' && value.trim().length > 0 ? value : '' +} + +function getErrorMessage(payload: unknown, fallback: string): string { + if (payload instanceof Error && payload.message.trim().length > 0) return payload.message + const record = asRecord(payload) + if (!record) return fallback + const error = record.error + if (typeof error === 'string' && error.length > 0) return error + const nestedError = asRecord(error) + if (nestedError && typeof nestedError.message === 'string' && nestedError.message.length > 0) return nestedError.message + return fallback +} + +function quoteShellTokenIfNeeded(value: string): string { + return /^[A-Za-z0-9_./:=@+-]+$/u.test(value) ? value : JSON.stringify(value) +} + +export type ComposioUserData = { + apiKey: string + baseUrl: string + webUrl: string + orgId: string + testUserId: string +} + +export type ComposioStatusResponse = { + available: boolean + authenticated: boolean + cliVersion: string + email: string + defaultOrgName: string + defaultOrgId: string + webUrl: string + baseUrl: string + testUserId: string +} + +export type ComposioConnectionSummary = { + id: string + wordId: string + alias: string + status: string + authScheme: string + createdAt: string + updatedAt: string + isComposioManaged: boolean + isDisabled: boolean +} + +export type ComposioConnectorSummary = { + slug: string + name: string + description: string + logoUrl: string + latestVersion: string + toolsCount: number + triggersCount: number + isNoAuth: boolean + enabled: boolean + authModes: string[] + activeCount: number + totalConnections: number + connectionStatuses: string[] +} + +export type ComposioToolSummary = { + slug: string + name: string + description: string +} + +export type ComposioConnectorDetail = { + connector: ComposioConnectorSummary + connections: ComposioConnectionSummary[] + tools: ComposioToolSummary[] + dashboardUrl: string +} + +export type ComposioLinkResult = { + status: string + message: string + connectedAccountId: string + redirectUrl: string + toolkit: string + projectType: string +} + +export type ComposioLoginResult = { + status: string + message: string + loginUrl: string + cliKey: string + expiresAt: string +} + +export type ComposioInstallResult = { + ok: boolean + command: string + output: string +} + +export type ComposioConnectorPage = { + data: ComposioConnectorSummary[] + nextCursor: string | null + total: number +} + +const COMPOSIO_CONNECTORS_PAGE_LIMIT_MAX = 1000 + +function readBoolean(value: unknown): boolean { + return value === true +} + +function readNumber(value: unknown): number { + return typeof value === 'number' && Number.isFinite(value) ? value : 0 +} + +export type ComposioCliInvocation = { command: string; args: string[]; displayCommand: string } + +function buildComposioInvocation(args: string[]): ComposioCliInvocation | null { + const overrideCommand = process.env.CODEXUI_COMPOSIO_COMMAND?.trim() + if (overrideCommand) { + const invocation = getSpawnInvocation(overrideCommand, args) + return { + command: invocation.command, + args: invocation.args, + displayCommand: `${overrideCommand} ${args.map(quoteShellTokenIfNeeded).join(' ')}`.trim(), + } + } + return buildInstalledComposioInvocation(args) +} + +function buildInstalledComposioInvocation(args: string[]): ComposioCliInvocation | null { + const candidates = [ + join(homedir(), '.composio', 'composio'), + 'composio', + ] + for (const candidate of candidates) { + if ((candidate.includes('/') || candidate.includes('\\')) && !existsSync(candidate)) continue + const invocation = getSpawnInvocation(candidate, args) + return { + command: invocation.command, + args: invocation.args, + displayCommand: `${candidate} ${args.map(quoteShellTokenIfNeeded).join(' ')}`.trim(), + } + } + return null +} + +function probeComposioInvocation(invocation: ComposioCliInvocation): { available: boolean; cliVersion: string; output: string } { + const probe = spawnSync(invocation.command, invocation.args, { + encoding: 'utf8', + env: process.env, + windowsHide: true, + }) + const output = `${probe.stdout ?? ''}${probe.stderr ?? ''}`.trim() + return { + available: !probe.error && probe.status === 0, + cliVersion: probe.status === 0 ? (probe.stdout ?? '').trim() : '', + output, + } +} + +function resolveComposioInvocation(args: string[]): ComposioCliInvocation | null { + const invocation = buildComposioInvocation(args) + const versionInvocation = buildComposioInvocation(['--version']) + if (invocation && versionInvocation && probeComposioInvocation(versionInvocation).available) return invocation + return null +} + +function parseComposioJson(stdout: string, fallback: string): T { + const trimmed = stdout.trim() + if (!trimmed) { + throw new Error(fallback) + } + return JSON.parse(trimmed) as T +} + +async function runComposioJson(args: string[], fallback: string): Promise { + const invocation = resolveComposioInvocation(args) + if (!invocation) { + throw new Error('Composio CLI is not installed') + } + const child = spawn(invocation.command, invocation.args, { + env: process.env, + stdio: ['ignore', 'pipe', 'pipe'], + windowsHide: true, + }) + + let stdout = '' + let stderr = '' + + child.stdout.setEncoding('utf8') + child.stderr.setEncoding('utf8') + child.stdout.on('data', (chunk) => { stdout += chunk }) + child.stderr.on('data', (chunk) => { stderr += chunk }) + + const exitCode = await new Promise((resolveExit, reject) => { + child.once('error', reject) + child.once('close', (code) => resolveExit(code ?? 0)) + }) + + if (exitCode !== 0) { + throw new Error(stderr.trim() || stdout.trim() || fallback) + } + + try { + return parseComposioJson(stdout, fallback) + } catch (error) { + const details = stderr.trim() || stdout.trim() + throw new Error(details || getErrorMessage(error, fallback)) + } +} + +async function readComposioUserData(): Promise { + try { + const raw = await readFile(COMPOSIO_USER_DATA_PATH, 'utf8') + const payload = asRecord(JSON.parse(raw)) + if (!payload) return null + return { + apiKey: readNonEmptyString(payload.api_key), + baseUrl: readNonEmptyString(payload.base_url), + webUrl: readNonEmptyString(payload.web_url), + orgId: readNonEmptyString(payload.org_id), + testUserId: readNonEmptyString(payload.test_user_id), + } + } catch { + return null + } +} + +function normalizeComposioConnection(value: unknown): ComposioConnectionSummary | null { + const record = asRecord(value) + if (!record) return null + const authConfig = asRecord(record.auth_config) + return { + id: readNonEmptyString(record.id), + wordId: readNonEmptyString(record.word_id), + alias: readNonEmptyString(record.alias), + status: readNonEmptyString(record.status), + authScheme: readNonEmptyString(record.authScheme || authConfig?.auth_scheme), + createdAt: readNonEmptyString(record.created_at), + updatedAt: readNonEmptyString(record.updated_at), + isComposioManaged: readBoolean(authConfig?.is_composio_managed), + isDisabled: readBoolean(record.is_disabled), + } +} + +function normalizeComposioToolkit(value: unknown, connectionsBySlug: Map): ComposioConnectorSummary | null { + const record = asRecord(value) + if (!record) return null + const slug = readNonEmptyString(record.slug) + if (!slug) return null + const connectionRows = connectionsBySlug.get(slug) ?? [] + return { + slug, + name: readNonEmptyString(record.name), + description: readNonEmptyString(record.description), + logoUrl: readNonEmptyString(record.logo || record.meta && asRecord(record.meta)?.logo), + latestVersion: readNonEmptyString(record.latest_version || record.latestVersion), + toolsCount: readNumber(record.tools_count), + triggersCount: readNumber(record.triggers_count), + isNoAuth: readBoolean(record.is_no_auth), + enabled: record.enabled !== false, + authModes: Array.isArray(record.auth_modes) ? record.auth_modes.map(readNonEmptyString).filter(Boolean) : [], + activeCount: connectionRows.filter((row) => row.status === 'ACTIVE' && !row.isDisabled).length, + totalConnections: connectionRows.length, + connectionStatuses: [...new Set(connectionRows.map((row) => row.status).filter(Boolean))], + } +} + +function normalizeComposioTool(value: unknown): ComposioToolSummary | null { + const record = asRecord(value) + if (!record) return null + const slug = readNonEmptyString(record.slug) + if (!slug) return null + return { + slug, + name: readNonEmptyString(record.name), + description: readNonEmptyString(record.description), + } +} + +async function readComposioConnectionsBySlug(): Promise> { + const payload = asRecord(await runComposioJson>(['connections', 'list'], 'Failed to list Composio connections')) + const bySlug = new Map() + for (const [slug, rawRows] of Object.entries(payload ?? {})) { + if (!Array.isArray(rawRows)) continue + const rows = rawRows.map(normalizeComposioConnection).filter((row): row is ComposioConnectionSummary => row !== null) + bySlug.set(slug, rows) + } + return bySlug +} + +export async function readComposioStatus(): Promise { + const versionInvocation = buildComposioInvocation(['--version']) + const probe = versionInvocation + ? probeComposioInvocation(versionInvocation) + : { available: false, cliVersion: '', output: '' } + const available = probe.available + const cliVersion = probe.cliVersion + const userData = await readComposioUserData() + if (!available) { + return { + available: false, + authenticated: false, + cliVersion, + email: '', + defaultOrgName: '', + defaultOrgId: userData?.orgId ?? '', + webUrl: userData?.webUrl ?? '', + baseUrl: userData?.baseUrl ?? '', + testUserId: userData?.testUserId ?? '', + } + } + + try { + const payload = asRecord(await runComposioJson>(['whoami'], 'Failed to read Composio account status')) + return { + available: true, + authenticated: true, + cliVersion, + email: readNonEmptyString(payload?.email), + defaultOrgName: readNonEmptyString(payload?.default_org_name), + defaultOrgId: readNonEmptyString(payload?.default_org_id) || userData?.orgId || '', + webUrl: userData?.webUrl || 'https://dashboard.composio.dev/', + baseUrl: userData?.baseUrl || 'https://backend.composio.dev', + testUserId: readNonEmptyString(payload?.test_user_id) || userData?.testUserId || '', + } + } catch { + return { + available: true, + authenticated: false, + cliVersion, + email: '', + defaultOrgName: '', + defaultOrgId: userData?.orgId ?? '', + webUrl: userData?.webUrl || 'https://dashboard.composio.dev/', + baseUrl: userData?.baseUrl || 'https://backend.composio.dev', + testUserId: userData?.testUserId ?? '', + } + } +} + +export async function listComposioConnectors(query: string, cursor: string | null = null, limit = 50): Promise { + const args = ['dev', 'toolkits', 'list', '--limit', String(COMPOSIO_CONNECTORS_PAGE_LIMIT_MAX)] + const trimmedQuery = query.trim() + if (trimmedQuery) { + args.push('--query', trimmedQuery) + } + const [payload, connectionsBySlug] = await Promise.all([ + runComposioJson(args, 'Failed to list Composio toolkits'), + readComposioConnectionsBySlug(), + ]) + const allRows = payload + .map((item) => normalizeComposioToolkit(item, connectionsBySlug)) + .filter((row): row is ComposioConnectorSummary => row !== null) + const safeLimit = Number.isFinite(limit) ? Math.max(1, Math.min(COMPOSIO_CONNECTORS_PAGE_LIMIT_MAX, Math.floor(limit))) : 50 + const safeCursor = parseComposioCursor(cursor, allRows.length) + return { + data: allRows.slice(safeCursor, safeCursor + safeLimit), + nextCursor: safeCursor + safeLimit < allRows.length ? String(safeCursor + safeLimit) : null, + total: allRows.length, + } +} + +function parseComposioCursor(cursor: string | null | undefined, maxLength: number): number { + const trimmed = cursor?.trim() ?? '' + const parsed = Number.parseInt(trimmed, 10) + if (!Number.isFinite(parsed) || Number.isNaN(parsed) || parsed <= 0) return 0 + if (parsed >= maxLength) return maxLength + return parsed +} + +export function parseComposioLimit(rawLimit: string | null): number { + const parsed = Number.parseInt((rawLimit ?? '').trim(), 10) + if (!Number.isFinite(parsed) || Number.isNaN(parsed) || parsed <= 0) return 50 + return Math.max(1, Math.min(COMPOSIO_CONNECTORS_PAGE_LIMIT_MAX, parsed)) +} + +export async function readComposioConnectorDetail(slug: string): Promise { + const normalizedSlug = slug.trim() + if (!normalizedSlug) { + throw new Error('Missing Composio connector slug') + } + + const [infoPayload, toolsPayload, connectionsPayload, userData] = await Promise.all([ + runComposioJson>(['dev', 'toolkits', 'info', normalizedSlug], `Failed to load Composio toolkit ${normalizedSlug}`), + runComposioJson(['tools', 'list', normalizedSlug, '--limit', '10'], `Failed to list tools for ${normalizedSlug}`), + runComposioJson<{ toolkit?: string; items?: unknown[] }>(['link', normalizedSlug, '--list'], `Failed to list connections for ${normalizedSlug}`), + readComposioUserData(), + ]) + + const connections = Array.isArray(connectionsPayload.items) + ? connectionsPayload.items.map(normalizeComposioConnection).filter((row): row is ComposioConnectionSummary => row !== null) + : [] + const connector = normalizeComposioToolkit(infoPayload, new Map([[normalizedSlug, connections]])) + if (!connector) { + throw new Error(`Unknown Composio connector: ${normalizedSlug}`) + } + + return { + connector, + connections, + tools: Array.isArray(toolsPayload) + ? toolsPayload.map(normalizeComposioTool).filter((row): row is ComposioToolSummary => row !== null) + : [], + dashboardUrl: userData?.webUrl || 'https://dashboard.composio.dev/', + } +} + +export async function startComposioLink(slug: string): Promise { + const normalizedSlug = slug.trim() + if (!normalizedSlug) { + throw new Error('Missing Composio connector slug') + } + const payload = asRecord(await runComposioJson>(['link', normalizedSlug, '--no-wait'], `Failed to start Composio link for ${normalizedSlug}`)) + return { + status: readNonEmptyString(payload?.status), + message: readNonEmptyString(payload?.message), + connectedAccountId: readNonEmptyString(payload?.connected_account_id), + redirectUrl: readNonEmptyString(payload?.redirect_url), + toolkit: readNonEmptyString(payload?.toolkit), + projectType: readNonEmptyString(payload?.project_type), + } +} + +export async function startComposioLogin(): Promise { + const invocation = resolveComposioInvocation(['login', '--no-browser', '-y']) + if (!invocation) { + throw new Error('Composio CLI is not installed') + } + const proc = spawn(invocation.command, invocation.args, { + cwd: process.cwd(), + env: process.env, + detached: true, + stdio: ['ignore', 'pipe', 'pipe'], + windowsHide: true, + }) + proc.unref() + + let stdout = '' + let stderr = '' + proc.stdout.setEncoding('utf8') + proc.stderr.setEncoding('utf8') + proc.stderr.on('data', (chunk) => { stderr += chunk }) + + const loginUrl = await new Promise((resolveLoginUrl, reject) => { + const timeout = setTimeout(() => { + proc.kill('SIGTERM') + reject(new Error(stderr.trim() || stdout.trim() || 'Timed out waiting for Composio CLI login URL')) + }, 10_000) + const finish = (url: string) => { + clearTimeout(timeout) + proc.stdout.destroy() + proc.stderr.destroy() + resolveLoginUrl(url) + } + proc.once('error', (error) => { + clearTimeout(timeout) + reject(error) + }) + proc.once('close', (code) => { + clearTimeout(timeout) + reject(new Error(stderr.trim() || stdout.trim() || `Composio CLI login exited with code ${code ?? 0}`)) + }) + proc.stdout.on('data', (chunk) => { + stdout += chunk + const url = stdout.match(/https?:\/\/\S+/)?.[0] ?? '' + if (url) finish(url) + }) + }) + + const cliKey = loginUrl ? (new URL(loginUrl).searchParams.get('cliKey') ?? '') : '' + return { + status: 'started', + message: 'Composio CLI login URL created', + loginUrl, + cliKey, + expiresAt: '', + } +} + +export async function installComposioCli(): Promise { + const command = 'bash' + const installScriptUrl = 'https://composio.dev/install' + const args = ['-lc', `curl -fsSL ${installScriptUrl} | bash`] + const invocation = getSpawnInvocation(command, args) + const env = { + ...process.env, + COMPOSIO_INSTALL_DIR: process.env.COMPOSIO_INSTALL_DIR?.trim() || join(homedir(), '.composio'), + } + const result = spawnSync(invocation.command, invocation.args, { + encoding: 'utf8', + env, + windowsHide: true, + }) + const output = `${result.stdout ?? ''}${result.stderr ?? ''}`.trim() + if (result.error || result.status !== 0) { + throw new Error(output || result.error?.message || 'Failed to install Composio CLI') + } + return { + ok: true, + command: `curl -fsSL ${installScriptUrl} | bash`, + output, + } +} From 774fb99e7fb9bd456b45022bb19d6a2a9684df95 Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:20:50 +0700 Subject: [PATCH 06/19] Extract terminal quick command discovery --- src/server/codexAppServerBridge.ts | 139 +------------------------- src/server/terminalQuickCommands.ts | 147 ++++++++++++++++++++++++++++ 2 files changed, 148 insertions(+), 138 deletions(-) create mode 100644 src/server/terminalQuickCommands.ts diff --git a/src/server/codexAppServerBridge.ts b/src/server/codexAppServerBridge.ts index a7bbd4da9..a0cceedf8 100644 --- a/src/server/codexAppServerBridge.ts +++ b/src/server/codexAppServerBridge.ts @@ -50,6 +50,7 @@ import { startComposioLink, startComposioLogin, } from './composioRoutesSupport.js' +import { listTerminalQuickCommands } from './terminalQuickCommands.js' import { mergeSessionSkillInputsIntoThreadResult, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' export { mergeSessionSkillInputsIntoTurns, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' @@ -677,144 +678,6 @@ export async function callRpcWithArchiveRecovery( } } -type TerminalQuickCommand = { - label: string - value: string - source: 'package' | 'script' | 'make' -} - -async function listTerminalQuickCommands(cwd: string): Promise { - const normalizedCwd = isAbsolute(cwd) ? cwd : resolve(cwd) - const info = await stat(normalizedCwd) - if (!info.isDirectory()) { - throw new Error('Terminal cwd is not a directory') - } - - const commands: TerminalQuickCommand[] = [] - const seen = new Set() - const addCommand = (command: TerminalQuickCommand) => { - if (!command.value || seen.has(command.value)) return - seen.add(command.value) - commands.push(command) - } - - await addPackageJsonCommands(normalizedCwd, addCommand) - await addMakefileCommands(normalizedCwd, addCommand) - await addRootScriptCommands(normalizedCwd, addCommand) - await addScriptsDirectoryCommands(normalizedCwd, addCommand) - return commands -} - -async function addPackageJsonCommands( - cwd: string, - addCommand: (command: TerminalQuickCommand) => void, -): Promise { - try { - const raw = await readFile(join(cwd, 'package.json'), 'utf8') - const parsed = JSON.parse(raw) as unknown - const record = asRecord(parsed) - const scripts = asRecord(record?.scripts) - if (!scripts) return - const packageManager = resolvePackageManager(cwd) - for (const scriptName of Object.keys(scripts)) { - if (typeof scripts[scriptName] !== 'string') continue - const value = formatPackageScriptCommand(packageManager, scriptName) - addCommand({ - label: value, - value, - source: 'package', - }) - } - } catch { - // A project without package.json simply has no package quick commands. - } -} - -async function addMakefileCommands( - cwd: string, - addCommand: (command: TerminalQuickCommand) => void, -): Promise { - const makefilePath = existsSync(join(cwd, 'Makefile')) - ? join(cwd, 'Makefile') - : existsSync(join(cwd, 'makefile')) - ? join(cwd, 'makefile') - : '' - if (!makefilePath) return - - try { - const raw = await readFile(makefilePath, 'utf8') - for (const line of raw.split(/\r?\n/)) { - const match = /^([A-Za-z0-9_.@%/+~-][A-Za-z0-9_.@%/+~-]*)\s*:(?![=])/.exec(line) - if (!match) continue - const target = match[1] - if (!target || target.startsWith('.')) continue - const value = `make ${quoteShellTokenIfNeeded(target)}` - addCommand({ - label: value, - value, - source: 'make', - }) - } - } catch { - // Ignore unreadable Makefiles for quick-command discovery. - } -} - -async function addRootScriptCommands( - cwd: string, - addCommand: (command: TerminalQuickCommand) => void, -): Promise { - await addScriptFileCommands(cwd, '.', addCommand) -} - -async function addScriptsDirectoryCommands( - cwd: string, - addCommand: (command: TerminalQuickCommand) => void, -): Promise { - await addScriptFileCommands(join(cwd, 'scripts'), './scripts', addCommand) -} - -async function addScriptFileCommands( - directory: string, - commandPrefix: string, - addCommand: (command: TerminalQuickCommand) => void, -): Promise { - try { - const entries = await readdir(directory, { withFileTypes: true }) - for (const entry of entries) { - if (!entry.isFile()) continue - if (!entry.name.endsWith('.sh') && !entry.name.endsWith('.cmd')) continue - const value = `${commandPrefix}/${quoteShellTokenIfNeeded(entry.name)}` - addCommand({ - label: value, - value, - source: 'script', - }) - } - } catch { - // A project without script files simply has no script-file quick commands. - } -} - -function resolvePackageManager(cwd: string): 'npm' | 'pnpm' | 'yarn' | 'bun' { - if (existsSync(join(cwd, 'pnpm-lock.yaml'))) return 'pnpm' - if (existsSync(join(cwd, 'yarn.lock'))) return 'yarn' - if (existsSync(join(cwd, 'bun.lock')) || existsSync(join(cwd, 'bun.lockb'))) return 'bun' - return 'npm' -} - -function formatPackageScriptCommand(packageManager: 'npm' | 'pnpm' | 'yarn' | 'bun', scriptName: string): string { - const quoted = quoteShellTokenIfNeeded(scriptName) - if (packageManager === 'npm') return `npm run ${quoted}` - if (packageManager === 'pnpm') return `pnpm run ${quoted}` - if (packageManager === 'bun') return `bun run ${quoted}` - return `yarn ${quoted}` -} - -function quoteShellTokenIfNeeded(value: string): string { - return /^[A-Za-z0-9_./:@-]+$/.test(value) ? value : `'${value.replace(/'/g, `'\\''`)}'` -} - type SessionRecoveredFileChange = { path: string operation: 'add' | 'delete' | 'update' diff --git a/src/server/terminalQuickCommands.ts b/src/server/terminalQuickCommands.ts new file mode 100644 index 000000000..547428bf1 --- /dev/null +++ b/src/server/terminalQuickCommands.ts @@ -0,0 +1,147 @@ +import { existsSync } from 'node:fs' +import { readFile, readdir, stat } from 'node:fs/promises' +import { isAbsolute, join, resolve } from 'node:path' + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +export type TerminalQuickCommand = { + label: string + value: string + source: 'package' | 'script' | 'make' +} + +export async function listTerminalQuickCommands(cwd: string): Promise { + const normalizedCwd = isAbsolute(cwd) ? cwd : resolve(cwd) + const info = await stat(normalizedCwd) + if (!info.isDirectory()) { + throw new Error('Terminal cwd is not a directory') + } + + const commands: TerminalQuickCommand[] = [] + const seen = new Set() + const addCommand = (command: TerminalQuickCommand) => { + if (!command.value || seen.has(command.value)) return + seen.add(command.value) + commands.push(command) + } + + await addPackageJsonCommands(normalizedCwd, addCommand) + await addMakefileCommands(normalizedCwd, addCommand) + await addRootScriptCommands(normalizedCwd, addCommand) + await addScriptsDirectoryCommands(normalizedCwd, addCommand) + return commands +} + +async function addPackageJsonCommands( + cwd: string, + addCommand: (command: TerminalQuickCommand) => void, +): Promise { + try { + const raw = await readFile(join(cwd, 'package.json'), 'utf8') + const parsed = JSON.parse(raw) as unknown + const record = asRecord(parsed) + const scripts = asRecord(record?.scripts) + if (!scripts) return + const packageManager = resolvePackageManager(cwd) + for (const scriptName of Object.keys(scripts)) { + if (typeof scripts[scriptName] !== 'string') continue + const value = formatPackageScriptCommand(packageManager, scriptName) + addCommand({ + label: value, + value, + source: 'package', + }) + } + } catch { + // A project without package.json simply has no package quick commands. + } +} + +async function addMakefileCommands( + cwd: string, + addCommand: (command: TerminalQuickCommand) => void, +): Promise { + const makefilePath = existsSync(join(cwd, 'Makefile')) + ? join(cwd, 'Makefile') + : existsSync(join(cwd, 'makefile')) + ? join(cwd, 'makefile') + : '' + if (!makefilePath) return + + try { + const raw = await readFile(makefilePath, 'utf8') + for (const line of raw.split(/\r?\n/)) { + const match = /^([A-Za-z0-9_.@%/+~-][A-Za-z0-9_.@%/+~-]*)\s*:(?![=])/.exec(line) + if (!match) continue + const target = match[1] + if (!target || target.startsWith('.')) continue + const value = `make ${quoteShellTokenIfNeeded(target)}` + addCommand({ + label: value, + value, + source: 'make', + }) + } + } catch { + // Ignore unreadable Makefiles for quick-command discovery. + } +} + +async function addRootScriptCommands( + cwd: string, + addCommand: (command: TerminalQuickCommand) => void, +): Promise { + await addScriptFileCommands(cwd, '.', addCommand) +} + +async function addScriptsDirectoryCommands( + cwd: string, + addCommand: (command: TerminalQuickCommand) => void, +): Promise { + await addScriptFileCommands(join(cwd, 'scripts'), './scripts', addCommand) +} + +async function addScriptFileCommands( + directory: string, + commandPrefix: string, + addCommand: (command: TerminalQuickCommand) => void, +): Promise { + try { + const entries = await readdir(directory, { withFileTypes: true }) + for (const entry of entries) { + if (!entry.isFile()) continue + if (!entry.name.endsWith('.sh') && !entry.name.endsWith('.cmd')) continue + const value = `${commandPrefix}/${quoteShellTokenIfNeeded(entry.name)}` + addCommand({ + label: value, + value, + source: 'script', + }) + } + } catch { + // A project without script files simply has no script-file quick commands. + } +} + +function resolvePackageManager(cwd: string): 'npm' | 'pnpm' | 'yarn' | 'bun' { + if (existsSync(join(cwd, 'pnpm-lock.yaml'))) return 'pnpm' + if (existsSync(join(cwd, 'yarn.lock'))) return 'yarn' + if (existsSync(join(cwd, 'bun.lock')) || existsSync(join(cwd, 'bun.lockb'))) return 'bun' + return 'npm' +} + +function formatPackageScriptCommand(packageManager: 'npm' | 'pnpm' | 'yarn' | 'bun', scriptName: string): string { + const quoted = quoteShellTokenIfNeeded(scriptName) + if (packageManager === 'npm') return `npm run ${quoted}` + if (packageManager === 'pnpm') return `pnpm run ${quoted}` + if (packageManager === 'bun') return `bun run ${quoted}` + return `yarn ${quoted}` +} + +function quoteShellTokenIfNeeded(value: string): string { + return /^[A-Za-z0-9_./:@-]+$/.test(value) ? value : `'${value.replace(/'/g, `'\\''`)}'` +} From e82d82f7927d43b713ace5b74032f63e09ec255a Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:21:43 +0700 Subject: [PATCH 07/19] Extract session recovery helpers --- src/server/codexAppServerBridge.ts | 728 +--------------------------- src/server/sessionRecovery.ts | 738 +++++++++++++++++++++++++++++ 2 files changed, 745 insertions(+), 721 deletions(-) create mode 100644 src/server/sessionRecovery.ts diff --git a/src/server/codexAppServerBridge.ts b/src/server/codexAppServerBridge.ts index a0cceedf8..4ddf0bddf 100644 --- a/src/server/codexAppServerBridge.ts +++ b/src/server/codexAppServerBridge.ts @@ -51,6 +51,12 @@ import { startComposioLogin, } from './composioRoutesSupport.js' import { listTerminalQuickCommands } from './terminalQuickCommands.js' +import { + buildSessionFileChangeFallback, + collectFileChangesForTurns, + mergeSessionCommandsIntoTurns, + revertTurnFileChanges, +} from './sessionRecovery.js' import { mergeSessionSkillInputsIntoThreadResult, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' export { mergeSessionSkillInputsIntoTurns, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' @@ -678,726 +684,6 @@ export async function callRpcWithArchiveRecovery( } } -type SessionRecoveredFileChange = { - path: string - operation: 'add' | 'delete' | 'update' - movedToPath: string | null - diff: string - addedLineCount: number - removedLineCount: number -} - -type SessionRecoveredTurnFileChanges = { - turnId: string - turnIndex: number - fileChanges: SessionRecoveredFileChange[] -} - -function countRecoveredContentLines(value: string): number { - if (!value) return 0 - const normalized = value.replace(/\r\n/g, '\n') - const trimmed = normalized.endsWith('\n') ? normalized.slice(0, -1) : normalized - if (!trimmed) return 0 - return trimmed.split('\n').length -} - -function countRecoveredPatchLines(value: string): { addedLineCount: number; removedLineCount: number } { - let addedLineCount = 0 - let removedLineCount = 0 - - for (const line of value.replace(/\r\n/g, '\n').split('\n')) { - if (!line) continue - if (line.startsWith('+++') || line.startsWith('---') || line.startsWith('@@')) continue - if (line.startsWith('+')) { - addedLineCount += 1 - continue - } - if (line.startsWith('-')) { - removedLineCount += 1 - } - } - - return { addedLineCount, removedLineCount } -} - -function mergeRecoveredDiff(first: string, second: string): string { - if (!first) return second - if (!second || first === second) return first - return `${first}\n${second}`.trim() -} - -function mergeRecoveredFileChange(first: SessionRecoveredFileChange, second: SessionRecoveredFileChange): SessionRecoveredFileChange { - const operation = first.operation === 'add' || second.operation === 'add' - ? 'add' - : first.operation === 'delete' || second.operation === 'delete' - ? 'delete' - : 'update' - - return { - path: second.path || first.path, - operation, - movedToPath: second.movedToPath ?? first.movedToPath ?? null, - diff: mergeRecoveredDiff(first.diff, second.diff), - addedLineCount: first.addedLineCount + second.addedLineCount, - removedLineCount: first.removedLineCount + second.removedLineCount, - } -} - -function isApplyPatchSectionBoundary(value: string): boolean { - return value.startsWith('*** Update File: ') - || value.startsWith('*** Add File: ') - || value.startsWith('*** Delete File: ') - || value === '*** End Patch' -} - -function parseApplyPatchInput(input: string): SessionRecoveredFileChange[] { - const normalized = input.replace(/\r\n/g, '\n') - const lines = normalized.split('\n') - const changes: SessionRecoveredFileChange[] = [] - - for (let index = 0; index < lines.length; index += 1) { - const line = lines[index] ?? '' - - if (line.startsWith('*** Add File: ')) { - const path = line.slice('*** Add File: '.length).trim() - const contentLines: string[] = [] - for (index += 1; index < lines.length; index += 1) { - const nextLine = lines[index] ?? '' - if (isApplyPatchSectionBoundary(nextLine)) { - index -= 1 - break - } - contentLines.push(nextLine.startsWith('+') ? nextLine.slice(1) : nextLine) - } - const diff = contentLines.join('\n').trimEnd() - if (path) { - changes.push({ - path, - operation: 'add', - movedToPath: null, - diff, - addedLineCount: countRecoveredContentLines(diff), - removedLineCount: 0, - }) - } - continue - } - - if (line.startsWith('*** Delete File: ')) { - const path = line.slice('*** Delete File: '.length).trim() - if (path) { - changes.push({ - path, - operation: 'delete', - movedToPath: null, - diff: '', - addedLineCount: 0, - removedLineCount: 0, - }) - } - continue - } - - if (line.startsWith('*** Update File: ')) { - const path = line.slice('*** Update File: '.length).trim() - let movedToPath: string | null = null - const diffLines: string[] = [] - - for (index += 1; index < lines.length; index += 1) { - const nextLine = lines[index] ?? '' - if (nextLine.startsWith('*** Move to: ')) { - const moved = nextLine.slice('*** Move to: '.length).trim() - movedToPath = moved || null - continue - } - if (isApplyPatchSectionBoundary(nextLine)) { - index -= 1 - break - } - diffLines.push(nextLine) - } - - const diff = diffLines.join('\n').trimEnd() - const counts = countRecoveredPatchLines(diff) - if (path) { - changes.push({ - path, - operation: 'update', - movedToPath, - diff, - ...counts, - }) - } - } - } - - return changes -} - -function buildSessionFileChangeFallback(threadReadPayload: unknown, sessionLogRaw: string): SessionRecoveredTurnFileChanges[] { - const payload = asRecord(threadReadPayload) - const thread = asRecord(payload?.thread) - const turns = Array.isArray(thread?.turns) ? thread.turns : [] - const turnIndexById = new Map() - - for (let turnIndex = 0; turnIndex < turns.length; turnIndex += 1) { - const turnRecord = asRecord(turns[turnIndex]) - const turnId = readNonEmptyString(turnRecord?.id) - if (turnId) { - turnIndexById.set(turnId, turnIndex) - } - } - - const collectedByTurnId = new Map() - let currentTurnId = '' - - for (const line of sessionLogRaw.split('\n')) { - if (!line.trim()) continue - let row: Record | null = null - try { - row = JSON.parse(line) as Record - } catch { - continue - } - - if (row.type === 'turn_context') { - const payloadRecord = asRecord(row.payload) - currentTurnId = readNonEmptyString(payloadRecord?.turn_id) || currentTurnId - continue - } - - if (row.type !== 'response_item' || !currentTurnId || !turnIndexById.has(currentTurnId)) { - continue - } - - const payloadRecord = asRecord(row.payload) - if ( - payloadRecord?.type !== 'custom_tool_call' - || payloadRecord.name !== 'apply_patch' - || payloadRecord.status !== 'completed' - ) { - continue - } - - const input = readNonEmptyString(payloadRecord.input) - if (!input) continue - - const parsedChanges = parseApplyPatchInput(input) - if (parsedChanges.length === 0) continue - - const previous = collectedByTurnId.get(currentTurnId) ?? [] - previous.push(...parsedChanges) - collectedByTurnId.set(currentTurnId, previous) - } - - const recovered: SessionRecoveredTurnFileChanges[] = [] - for (const [turnId, fileChanges] of collectedByTurnId.entries()) { - const turnIndex = turnIndexById.get(turnId) - if (typeof turnIndex !== 'number' || fileChanges.length === 0) continue - - const mergedByPath = new Map() - for (const fileChange of fileChanges) { - const key = `${fileChange.path}\u0000${fileChange.movedToPath ?? ''}` - const previous = mergedByPath.get(key) - mergedByPath.set(key, previous ? mergeRecoveredFileChange(previous, fileChange) : { ...fileChange }) - } - - recovered.push({ - turnId, - turnIndex, - fileChanges: Array.from(mergedByPath.values()), - }) - } - - return recovered.sort((first, second) => first.turnIndex - second.turnIndex) -} - -type SessionRecoveredCommand = { - id: string - type: 'commandExecution' - command: string - cwd: string | null - status: 'completed' | 'failed' - aggregatedOutput: string - exitCode: number | null - durationMs: number | null -} - -function parseExecCommandOutput(output: string): { exitCode: number | null; wallTime: number | null; cleanOutput: string } { - let exitCode: number | null = null - let wallTime: number | null = null - const outputLines: string[] = [] - let pastHeader = false - - for (const line of output.split('\n')) { - if (!pastHeader) { - const exitMatch = line.match(/^Process exited with code (\d+)/) - if (exitMatch) { - exitCode = Number.parseInt(exitMatch[1]!, 10) - continue - } - const wallMatch = line.match(/^Wall time:\s+([\d.]+)\s+seconds/) - if (wallMatch) { - wallTime = Math.round(Number.parseFloat(wallMatch[1]!) * 1000) - continue - } - if (line.startsWith('Command:') || line.startsWith('Chunk ID:') || line.startsWith('Original token count:')) { - continue - } - if (line === 'Output:') { - pastHeader = true - continue - } - } - outputLines.push(line) - } - - return { exitCode, wallTime, cleanOutput: outputLines.join('\n').trimEnd() } -} - -type SessionRecoveredFileChangeItem = { - id: string - type: 'fileChange' - status: 'completed' - changes: Record[] -} - -type SessionItemSlot = { - type: 'agentMessage' | 'commandExecution' | 'fileChange' - command?: SessionRecoveredCommand - fileChange?: SessionRecoveredFileChangeItem -} - -function buildSessionItemOrder(sessionLogRaw: string, turnIds: Set): Map { - let currentTurnId = '' - const orderByTurnId = new Map() - const callIdToCommand = new Map() - - for (const line of sessionLogRaw.split('\n')) { - if (!line.trim()) continue - let row: Record | null = null - try { - row = JSON.parse(line) as Record - } catch { - continue - } - - if (row.type === 'turn_context') { - const p = asRecord(row.payload) - currentTurnId = readNonEmptyString(p?.turn_id) || currentTurnId - continue - } - if (row.type === 'event_msg') { - const p = asRecord(row.payload) - if (p?.type === 'task_started') { - currentTurnId = readNonEmptyString(p.turn_id) || currentTurnId - } - continue - } - - if (row.type !== 'response_item' || !currentTurnId || !turnIds.has(currentTurnId)) continue - const payload = asRecord(row.payload) - if (!payload) continue - - let slots = orderByTurnId.get(currentTurnId) - if (!slots) { - slots = [] - orderByTurnId.set(currentTurnId, slots) - } - - if (payload.type === 'message' && payload.role === 'assistant') { - slots.push({ type: 'agentMessage' }) - continue - } - - if (payload.type === 'function_call' && payload.name === 'exec_command') { - const callId = readNonEmptyString(payload.call_id) - if (!callId) continue - let cmd = '' - try { - const args = JSON.parse(payload.arguments as string) as Record - cmd = typeof args.cmd === 'string' ? args.cmd : '' - } catch { /* empty */ } - const command: SessionRecoveredCommand = { - id: `session-cmd-${callId}`, - type: 'commandExecution', - command: cmd, - cwd: null, - status: 'completed', - aggregatedOutput: '', - exitCode: null, - durationMs: null, - } - callIdToCommand.set(callId, command) - slots.push({ type: 'commandExecution', command }) - continue - } - - if (payload.type === 'function_call_output') { - const callId = readNonEmptyString(payload.call_id) - if (!callId) continue - const existing = callIdToCommand.get(callId) - if (!existing) continue - const rawOutput = typeof payload.output === 'string' ? payload.output : '' - const parsed = parseExecCommandOutput(rawOutput) - existing.aggregatedOutput = parsed.cleanOutput - existing.exitCode = parsed.exitCode - existing.durationMs = parsed.wallTime - existing.status = parsed.exitCode === 0 || parsed.exitCode === null ? 'completed' : 'failed' - } - - if (payload.type === 'custom_tool_call' && payload.name === 'apply_patch' && payload.status === 'completed') { - const input = typeof payload.input === 'string' ? payload.input : '' - const callId = readNonEmptyString(payload.call_id) - if (!input || !callId) continue - const parsedChanges = parseApplyPatchInput(input) - if (parsedChanges.length === 0) continue - const fcItem: SessionRecoveredFileChangeItem = { - id: `session-fc-${callId}`, - type: 'fileChange', - status: 'completed', - changes: parsedChanges.map((fc) => ({ - ...fc, - kind: { type: fc.operation, ...(fc.movedToPath ? { move_path: fc.movedToPath } : {}) }, - })), - } - slots.push({ type: 'fileChange', fileChange: fcItem }) - } - } - - return orderByTurnId -} - -function extractFilePathsFromCommand(cmd: string, cwd: string): string[] { - const paths: string[] = [] - const absPathPattern = /(?:^|\s|>>|>|<)(\/?(?:Users|home|tmp|var|etc|root)\/[^\s;|&><"']+)/g - let match: RegExpExecArray | null - while ((match = absPathPattern.exec(cmd)) !== null) { - const p = match[1]?.trim() - if (p && !p.endsWith('/') && !p.startsWith('-')) paths.push(p) - } - - const redirectPattern = /(?:>>?|cat\s*>\s*)([^\s;|&><"']+)/g - while ((match = redirectPattern.exec(cmd)) !== null) { - const p = match[1]?.trim() - if (p && !p.startsWith('-') && !p.startsWith('/dev/')) { - paths.push(isAbsolute(p) ? p : join(cwd, p)) - } - } - - return [...new Set(paths)] -} - -type CollectedTurnFileInfo = { - patchInputs: { callId: string; input: string }[] - commandFilePaths: string[] -} - -function collectFileChangesForTurns( - sessionLogRaw: string, - turnIdsToRevert: Set, - cwd: string, -): Map { - let currentTurnId = '' - const infoByTurnId = new Map() - - for (const line of sessionLogRaw.split('\n')) { - if (!line.trim()) continue - let row: Record | null = null - try { - row = JSON.parse(line) as Record - } catch { - continue - } - - if (row.type === 'turn_context') { - const p = asRecord(row.payload) - currentTurnId = readNonEmptyString(p?.turn_id) || currentTurnId - continue - } - if (row.type === 'event_msg') { - const p = asRecord(row.payload) - if (p?.type === 'task_started') { - currentTurnId = readNonEmptyString(p.turn_id) || currentTurnId - } - continue - } - - if (row.type !== 'response_item' || !currentTurnId || !turnIdsToRevert.has(currentTurnId)) continue - const payload = asRecord(row.payload) - if (!payload) continue - - let info = infoByTurnId.get(currentTurnId) - if (!info) { - info = { patchInputs: [], commandFilePaths: [] } - infoByTurnId.set(currentTurnId, info) - } - - if (payload.type === 'custom_tool_call' && payload.name === 'apply_patch' && payload.status === 'completed') { - const input = typeof payload.input === 'string' ? payload.input : '' - const callId = readNonEmptyString(payload.call_id) - if (input && callId) { - info.patchInputs.push({ callId, input }) - } - } - - if (payload.type === 'function_call' && payload.name === 'exec_command') { - let cmd = '' - try { - const args = JSON.parse(payload.arguments as string) as Record - cmd = typeof args.cmd === 'string' ? args.cmd : '' - } catch { /* empty */ } - if (cmd) { - const extracted = extractFilePathsFromCommand(cmd, cwd) - for (const p of extracted) { - if (!info.commandFilePaths.includes(p)) info.commandFilePaths.push(p) - } - } - } - } - - return infoByTurnId -} - -function reverseV4aDiff(fileContent: string, diffText: string): string | null { - const fileLines = fileContent.split('\n') - const rawDiffLines = diffText.split('\n') - while (rawDiffLines.length > 0 && rawDiffLines[rawDiffLines.length - 1]?.trim() === '') rawDiffLines.pop() - const diffLines = rawDiffLines - const result = [...fileLines] - - type DiffEntry = { type: 'context' | 'add' | 'remove'; text: string } - const hunks: DiffEntry[][] = [] - let currentHunk: DiffEntry[] | null = null - - for (const dl of diffLines) { - if (dl.startsWith('@@')) { - if (currentHunk) hunks.push(currentHunk) - currentHunk = [] - continue - } - if (!currentHunk) continue - if (dl.startsWith('+')) { - currentHunk.push({ type: 'add', text: dl.slice(1) }) - } else if (dl.startsWith('-')) { - currentHunk.push({ type: 'remove', text: dl.slice(1) }) - } else if (dl.startsWith(' ')) { - currentHunk.push({ type: 'context', text: dl.slice(1) }) - } else { - currentHunk.push({ type: 'context', text: dl }) - } - } - if (currentHunk) hunks.push(currentHunk) - - for (let hi = hunks.length - 1; hi >= 0; hi--) { - const hunk = hunks[hi]! - const expectedSequence = hunk - .filter((e) => e.type === 'context' || e.type === 'add') - .map((e) => e.text) - - if (expectedSequence.length === 0) continue - - let seqStart = -1 - outer: for (let ri = result.length - expectedSequence.length; ri >= 0; ri--) { - for (let si = 0; si < expectedSequence.length; si++) { - if (result[ri + si] !== expectedSequence[si]) continue outer - } - seqStart = ri - break - } - - if (seqStart < 0) return null - - const newLines: string[] = [] - let seqIdx = 0 - for (const entry of hunk) { - if (entry.type === 'context') { - newLines.push(result[seqStart + seqIdx]!) - seqIdx++ - } else if (entry.type === 'add') { - seqIdx++ - } else if (entry.type === 'remove') { - newLines.push(entry.text) - } - } - - result.splice(seqStart, expectedSequence.length, ...newLines) - } - - return result.join('\n') -} - -async function revertTurnFileChanges( - cwd: string, - turnInfos: Map, -): Promise<{ reverted: number; errors: string[] }> { - if (turnInfos.size === 0) return { reverted: 0, errors: [] } - - let reverted = 0 - const errors: string[] = [] - - const allEntries = [...turnInfos.values()] - const allPatchInputs = allEntries.flatMap((info) => info.patchInputs).reverse() - const allCommandPaths = new Set(allEntries.flatMap((info) => info.commandFilePaths)) - - let isGitRepo = false - let gitRoot = '' - try { - gitRoot = await runCommandCapture('git', ['rev-parse', '--show-toplevel'], { cwd }) - isGitRepo = !!gitRoot - } catch { /* not a git repo */ } - - const trackedFiles = new Set() - if (isGitRepo) { - try { - const tracked = await runCommandCapture('git', ['ls-files', '--full-name'], { cwd: gitRoot }) - for (const f of tracked.split('\n')) { - if (f.trim()) trackedFiles.add(join(gitRoot, f.trim())) - } - } catch { /* empty */ } - } - - const patchRevertedPaths = new Set() - - for (const patch of allPatchInputs) { - const changes = parseApplyPatchInput(patch.input) - for (let ci = changes.length - 1; ci >= 0; ci--) { - const change = changes[ci]! - const filePath = isAbsolute(change.path) ? change.path : join(cwd, change.path) - - try { - if (change.operation === 'add') { - const fileStat = await stat(filePath).catch(() => null) - if (fileStat) { - await rm(filePath, { force: true }) - reverted++ - patchRevertedPaths.add(filePath) - } - } else if (change.operation === 'update' && change.diff) { - let reversed = false - try { - const currentContent = await readFile(filePath, 'utf8') - const newContent = reverseV4aDiff(currentContent, change.diff) - if (newContent !== null && newContent !== currentContent) { - const { writeFile } = await import('node:fs/promises') - await writeFile(filePath, newContent) - reverted++ - patchRevertedPaths.add(filePath) - reversed = true - } - } catch { /* file read/write failed */ } - - if (!reversed) { - const isTracked = trackedFiles.has(filePath) - if (isTracked && isGitRepo) { - const relativePath = filePath.startsWith(gitRoot + '/') ? filePath.slice(gitRoot.length + 1) : filePath - try { - await runCommand('git', ['checkout', 'HEAD', '--', relativePath], { cwd: gitRoot }) - reverted++ - patchRevertedPaths.add(filePath) - } catch { - errors.push(`Could not revert: ${filePath}`) - } - } else { - errors.push(`Could not reverse patch for untracked file: ${filePath}`) - } - } - } else if (change.operation === 'delete') { - const isTracked = trackedFiles.has(filePath) - if (isTracked && isGitRepo) { - const relativePath = filePath.startsWith(gitRoot + '/') ? filePath.slice(gitRoot.length + 1) : filePath - try { - await runCommand('git', ['checkout', 'HEAD', '--', relativePath], { cwd: gitRoot }) - reverted++ - patchRevertedPaths.add(filePath) - } catch { - errors.push(`Could not restore deleted file: ${filePath}`) - } - } - } - } catch (err) { - errors.push(`Failed to revert patch for ${filePath}: ${err instanceof Error ? err.message : String(err)}`) - } - } - } - - for (const filePath of allCommandPaths) { - if (patchRevertedPaths.has(filePath)) continue - const isTracked = trackedFiles.has(filePath) - if (isTracked && isGitRepo) { - const relativePath = filePath.startsWith(gitRoot + '/') ? filePath.slice(gitRoot.length + 1) : filePath - try { - await runCommand('git', ['checkout', 'HEAD', '--', relativePath], { cwd: gitRoot }) - reverted++ - } catch { - errors.push(`Could not restore command-modified file: ${filePath}`) - } - } - } - - return { reverted, errors } -} - -function mergeSessionCommandsIntoTurns(turns: unknown[], sessionLogRaw: string): unknown[] { - const turnIds = new Set() - for (const turn of turns) { - const turnRecord = asRecord(turn) - const turnId = readNonEmptyString(turnRecord?.id) - if (turnId) turnIds.add(turnId) - } - - if (turnIds.size === 0) return turns - - const orderByTurnId = buildSessionItemOrder(sessionLogRaw, turnIds) - if (orderByTurnId.size === 0) return turns - - return turns.map((turn) => { - const turnRecord = asRecord(turn) - if (!turnRecord) return turn - const turnId = readNonEmptyString(turnRecord.id) - if (!turnId) return turn - - const slots = orderByTurnId.get(turnId) - if (!slots || slots.length === 0) return turn - - const existingItems = Array.isArray(turnRecord.items) ? (turnRecord.items as Record[]) : [] - const alreadyHasRecoveredItems = existingItems.some((it) => it.type === 'commandExecution' || it.type === 'fileChange') - if (alreadyHasRecoveredItems) return turn - - const agentMessages = existingItems.filter((it) => it.type === 'agentMessage') - const nonAgentNonUserItems = existingItems.filter((it) => it.type !== 'agentMessage' && it.type !== 'userMessage') - const userMessages = existingItems.filter((it) => it.type === 'userMessage') - - let agentIdx = 0 - const interleaved: Record[] = [...userMessages] - - for (const slot of slots) { - if (slot.type === 'agentMessage') { - if (agentIdx < agentMessages.length) { - interleaved.push(agentMessages[agentIdx]!) - agentIdx++ - } - } else if (slot.type === 'commandExecution' && slot.command) { - interleaved.push(slot.command as unknown as Record) - } else if (slot.type === 'fileChange' && slot.fileChange) { - interleaved.push(slot.fileChange as unknown as Record) - } - } - - while (agentIdx < agentMessages.length) { - interleaved.push(agentMessages[agentIdx]!) - agentIdx++ - } - - interleaved.push(...nonAgentNonUserItems) - - return { - ...turnRecord, - items: interleaved, - } - }) -} - function isExactPhraseMatch(query: string, doc: ThreadSearchDocument): boolean { const q = query.trim().toLowerCase() if (!q) return false @@ -5165,7 +4451,7 @@ export function createCodexBridgeMiddleware(): CodexBridgeMiddleware { return } - const result = await revertTurnFileChanges(cwd, turnInfos) + const result = await revertTurnFileChanges(cwd, turnInfos, { runCommand, runCommandCapture }) setJson(res, 200, { ...result, message: `Reverted ${result.reverted} file change(s)` }) } catch (error) { setJson(res, 500, { error: getErrorMessage(error, 'Failed to revert file changes') }) diff --git a/src/server/sessionRecovery.ts b/src/server/sessionRecovery.ts new file mode 100644 index 000000000..d1256265a --- /dev/null +++ b/src/server/sessionRecovery.ts @@ -0,0 +1,738 @@ +import { readFile, rm, stat, writeFile } from 'node:fs/promises' +import { isAbsolute, join } from 'node:path' + +type SessionRecoveryCommandDeps = { + runCommand: (command: string, args: string[], options?: { cwd?: string; timeoutMs?: number }) => Promise + runCommandCapture: (command: string, args: string[], options?: { cwd?: string }) => Promise +} + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +function readNonEmptyString(value: unknown): string { + return typeof value === 'string' && value.trim().length > 0 ? value : '' +} + +export type SessionRecoveredFileChange = { + path: string + operation: 'add' | 'delete' | 'update' + movedToPath: string | null + diff: string + addedLineCount: number + removedLineCount: number +} + +export type SessionRecoveredTurnFileChanges = { + turnId: string + turnIndex: number + fileChanges: SessionRecoveredFileChange[] +} + +function countRecoveredContentLines(value: string): number { + if (!value) return 0 + const normalized = value.replace(/\r\n/g, '\n') + const trimmed = normalized.endsWith('\n') ? normalized.slice(0, -1) : normalized + if (!trimmed) return 0 + return trimmed.split('\n').length +} + +function countRecoveredPatchLines(value: string): { addedLineCount: number; removedLineCount: number } { + let addedLineCount = 0 + let removedLineCount = 0 + + for (const line of value.replace(/\r\n/g, '\n').split('\n')) { + if (!line) continue + if (line.startsWith('+++') || line.startsWith('---') || line.startsWith('@@')) continue + if (line.startsWith('+')) { + addedLineCount += 1 + continue + } + if (line.startsWith('-')) { + removedLineCount += 1 + } + } + + return { addedLineCount, removedLineCount } +} + +function mergeRecoveredDiff(first: string, second: string): string { + if (!first) return second + if (!second || first === second) return first + return `${first}\n${second}`.trim() +} + +function mergeRecoveredFileChange(first: SessionRecoveredFileChange, second: SessionRecoveredFileChange): SessionRecoveredFileChange { + const operation = first.operation === 'add' || second.operation === 'add' + ? 'add' + : first.operation === 'delete' || second.operation === 'delete' + ? 'delete' + : 'update' + + return { + path: second.path || first.path, + operation, + movedToPath: second.movedToPath ?? first.movedToPath ?? null, + diff: mergeRecoveredDiff(first.diff, second.diff), + addedLineCount: first.addedLineCount + second.addedLineCount, + removedLineCount: first.removedLineCount + second.removedLineCount, + } +} + +function isApplyPatchSectionBoundary(value: string): boolean { + return value.startsWith('*** Update File: ') + || value.startsWith('*** Add File: ') + || value.startsWith('*** Delete File: ') + || value === '*** End Patch' +} + +function parseApplyPatchInput(input: string): SessionRecoveredFileChange[] { + const normalized = input.replace(/\r\n/g, '\n') + const lines = normalized.split('\n') + const changes: SessionRecoveredFileChange[] = [] + + for (let index = 0; index < lines.length; index += 1) { + const line = lines[index] ?? '' + + if (line.startsWith('*** Add File: ')) { + const path = line.slice('*** Add File: '.length).trim() + const contentLines: string[] = [] + for (index += 1; index < lines.length; index += 1) { + const nextLine = lines[index] ?? '' + if (isApplyPatchSectionBoundary(nextLine)) { + index -= 1 + break + } + contentLines.push(nextLine.startsWith('+') ? nextLine.slice(1) : nextLine) + } + const diff = contentLines.join('\n').trimEnd() + if (path) { + changes.push({ + path, + operation: 'add', + movedToPath: null, + diff, + addedLineCount: countRecoveredContentLines(diff), + removedLineCount: 0, + }) + } + continue + } + + if (line.startsWith('*** Delete File: ')) { + const path = line.slice('*** Delete File: '.length).trim() + if (path) { + changes.push({ + path, + operation: 'delete', + movedToPath: null, + diff: '', + addedLineCount: 0, + removedLineCount: 0, + }) + } + continue + } + + if (line.startsWith('*** Update File: ')) { + const path = line.slice('*** Update File: '.length).trim() + let movedToPath: string | null = null + const diffLines: string[] = [] + + for (index += 1; index < lines.length; index += 1) { + const nextLine = lines[index] ?? '' + if (nextLine.startsWith('*** Move to: ')) { + const moved = nextLine.slice('*** Move to: '.length).trim() + movedToPath = moved || null + continue + } + if (isApplyPatchSectionBoundary(nextLine)) { + index -= 1 + break + } + diffLines.push(nextLine) + } + + const diff = diffLines.join('\n').trimEnd() + const counts = countRecoveredPatchLines(diff) + if (path) { + changes.push({ + path, + operation: 'update', + movedToPath, + diff, + ...counts, + }) + } + } + } + + return changes +} + +export function buildSessionFileChangeFallback(threadReadPayload: unknown, sessionLogRaw: string): SessionRecoveredTurnFileChanges[] { + const payload = asRecord(threadReadPayload) + const thread = asRecord(payload?.thread) + const turns = Array.isArray(thread?.turns) ? thread.turns : [] + const turnIndexById = new Map() + + for (let turnIndex = 0; turnIndex < turns.length; turnIndex += 1) { + const turnRecord = asRecord(turns[turnIndex]) + const turnId = readNonEmptyString(turnRecord?.id) + if (turnId) { + turnIndexById.set(turnId, turnIndex) + } + } + + const collectedByTurnId = new Map() + let currentTurnId = '' + + for (const line of sessionLogRaw.split('\n')) { + if (!line.trim()) continue + let row: Record | null = null + try { + row = JSON.parse(line) as Record + } catch { + continue + } + + if (row.type === 'turn_context') { + const payloadRecord = asRecord(row.payload) + currentTurnId = readNonEmptyString(payloadRecord?.turn_id) || currentTurnId + continue + } + + if (row.type !== 'response_item' || !currentTurnId || !turnIndexById.has(currentTurnId)) { + continue + } + + const payloadRecord = asRecord(row.payload) + if ( + payloadRecord?.type !== 'custom_tool_call' + || payloadRecord.name !== 'apply_patch' + || payloadRecord.status !== 'completed' + ) { + continue + } + + const input = readNonEmptyString(payloadRecord.input) + if (!input) continue + + const parsedChanges = parseApplyPatchInput(input) + if (parsedChanges.length === 0) continue + + const previous = collectedByTurnId.get(currentTurnId) ?? [] + previous.push(...parsedChanges) + collectedByTurnId.set(currentTurnId, previous) + } + + const recovered: SessionRecoveredTurnFileChanges[] = [] + for (const [turnId, fileChanges] of collectedByTurnId.entries()) { + const turnIndex = turnIndexById.get(turnId) + if (typeof turnIndex !== 'number' || fileChanges.length === 0) continue + + const mergedByPath = new Map() + for (const fileChange of fileChanges) { + const key = `${fileChange.path}\u0000${fileChange.movedToPath ?? ''}` + const previous = mergedByPath.get(key) + mergedByPath.set(key, previous ? mergeRecoveredFileChange(previous, fileChange) : { ...fileChange }) + } + + recovered.push({ + turnId, + turnIndex, + fileChanges: Array.from(mergedByPath.values()), + }) + } + + return recovered.sort((first, second) => first.turnIndex - second.turnIndex) +} + +type SessionRecoveredCommand = { + id: string + type: 'commandExecution' + command: string + cwd: string | null + status: 'completed' | 'failed' + aggregatedOutput: string + exitCode: number | null + durationMs: number | null +} + +function parseExecCommandOutput(output: string): { exitCode: number | null; wallTime: number | null; cleanOutput: string } { + let exitCode: number | null = null + let wallTime: number | null = null + const outputLines: string[] = [] + let pastHeader = false + + for (const line of output.split('\n')) { + if (!pastHeader) { + const exitMatch = line.match(/^Process exited with code (\d+)/) + if (exitMatch) { + exitCode = Number.parseInt(exitMatch[1]!, 10) + continue + } + const wallMatch = line.match(/^Wall time:\s+([\d.]+)\s+seconds/) + if (wallMatch) { + wallTime = Math.round(Number.parseFloat(wallMatch[1]!) * 1000) + continue + } + if (line.startsWith('Command:') || line.startsWith('Chunk ID:') || line.startsWith('Original token count:')) { + continue + } + if (line === 'Output:') { + pastHeader = true + continue + } + } + outputLines.push(line) + } + + return { exitCode, wallTime, cleanOutput: outputLines.join('\n').trimEnd() } +} + +export type SessionRecoveredFileChangeItem = { + id: string + type: 'fileChange' + status: 'completed' + changes: Record[] +} + +type SessionItemSlot = { + type: 'agentMessage' | 'commandExecution' | 'fileChange' + command?: SessionRecoveredCommand + fileChange?: SessionRecoveredFileChangeItem +} + +function buildSessionItemOrder(sessionLogRaw: string, turnIds: Set): Map { + let currentTurnId = '' + const orderByTurnId = new Map() + const callIdToCommand = new Map() + + for (const line of sessionLogRaw.split('\n')) { + if (!line.trim()) continue + let row: Record | null = null + try { + row = JSON.parse(line) as Record + } catch { + continue + } + + if (row.type === 'turn_context') { + const p = asRecord(row.payload) + currentTurnId = readNonEmptyString(p?.turn_id) || currentTurnId + continue + } + if (row.type === 'event_msg') { + const p = asRecord(row.payload) + if (p?.type === 'task_started') { + currentTurnId = readNonEmptyString(p.turn_id) || currentTurnId + } + continue + } + + if (row.type !== 'response_item' || !currentTurnId || !turnIds.has(currentTurnId)) continue + const payload = asRecord(row.payload) + if (!payload) continue + + let slots = orderByTurnId.get(currentTurnId) + if (!slots) { + slots = [] + orderByTurnId.set(currentTurnId, slots) + } + + if (payload.type === 'message' && payload.role === 'assistant') { + slots.push({ type: 'agentMessage' }) + continue + } + + if (payload.type === 'function_call' && payload.name === 'exec_command') { + const callId = readNonEmptyString(payload.call_id) + if (!callId) continue + let cmd = '' + try { + const args = JSON.parse(payload.arguments as string) as Record + cmd = typeof args.cmd === 'string' ? args.cmd : '' + } catch { /* empty */ } + const command: SessionRecoveredCommand = { + id: `session-cmd-${callId}`, + type: 'commandExecution', + command: cmd, + cwd: null, + status: 'completed', + aggregatedOutput: '', + exitCode: null, + durationMs: null, + } + callIdToCommand.set(callId, command) + slots.push({ type: 'commandExecution', command }) + continue + } + + if (payload.type === 'function_call_output') { + const callId = readNonEmptyString(payload.call_id) + if (!callId) continue + const existing = callIdToCommand.get(callId) + if (!existing) continue + const rawOutput = typeof payload.output === 'string' ? payload.output : '' + const parsed = parseExecCommandOutput(rawOutput) + existing.aggregatedOutput = parsed.cleanOutput + existing.exitCode = parsed.exitCode + existing.durationMs = parsed.wallTime + existing.status = parsed.exitCode === 0 || parsed.exitCode === null ? 'completed' : 'failed' + } + + if (payload.type === 'custom_tool_call' && payload.name === 'apply_patch' && payload.status === 'completed') { + const input = typeof payload.input === 'string' ? payload.input : '' + const callId = readNonEmptyString(payload.call_id) + if (!input || !callId) continue + const parsedChanges = parseApplyPatchInput(input) + if (parsedChanges.length === 0) continue + const fcItem: SessionRecoveredFileChangeItem = { + id: `session-fc-${callId}`, + type: 'fileChange', + status: 'completed', + changes: parsedChanges.map((fc) => ({ + ...fc, + kind: { type: fc.operation, ...(fc.movedToPath ? { move_path: fc.movedToPath } : {}) }, + })), + } + slots.push({ type: 'fileChange', fileChange: fcItem }) + } + } + + return orderByTurnId +} + +function extractFilePathsFromCommand(cmd: string, cwd: string): string[] { + const paths: string[] = [] + const absPathPattern = /(?:^|\s|>>|>|<)(\/?(?:Users|home|tmp|var|etc|root)\/[^\s;|&><"']+)/g + let match: RegExpExecArray | null + while ((match = absPathPattern.exec(cmd)) !== null) { + const p = match[1]?.trim() + if (p && !p.endsWith('/') && !p.startsWith('-')) paths.push(p) + } + + const redirectPattern = /(?:>>?|cat\s*>\s*)([^\s;|&><"']+)/g + while ((match = redirectPattern.exec(cmd)) !== null) { + const p = match[1]?.trim() + if (p && !p.startsWith('-') && !p.startsWith('/dev/')) { + paths.push(isAbsolute(p) ? p : join(cwd, p)) + } + } + + return [...new Set(paths)] +} + +export type CollectedTurnFileInfo = { + patchInputs: { callId: string; input: string }[] + commandFilePaths: string[] +} + +export function collectFileChangesForTurns( + sessionLogRaw: string, + turnIdsToRevert: Set, + cwd: string, +): Map { + let currentTurnId = '' + const infoByTurnId = new Map() + + for (const line of sessionLogRaw.split('\n')) { + if (!line.trim()) continue + let row: Record | null = null + try { + row = JSON.parse(line) as Record + } catch { + continue + } + + if (row.type === 'turn_context') { + const p = asRecord(row.payload) + currentTurnId = readNonEmptyString(p?.turn_id) || currentTurnId + continue + } + if (row.type === 'event_msg') { + const p = asRecord(row.payload) + if (p?.type === 'task_started') { + currentTurnId = readNonEmptyString(p.turn_id) || currentTurnId + } + continue + } + + if (row.type !== 'response_item' || !currentTurnId || !turnIdsToRevert.has(currentTurnId)) continue + const payload = asRecord(row.payload) + if (!payload) continue + + let info = infoByTurnId.get(currentTurnId) + if (!info) { + info = { patchInputs: [], commandFilePaths: [] } + infoByTurnId.set(currentTurnId, info) + } + + if (payload.type === 'custom_tool_call' && payload.name === 'apply_patch' && payload.status === 'completed') { + const input = typeof payload.input === 'string' ? payload.input : '' + const callId = readNonEmptyString(payload.call_id) + if (input && callId) { + info.patchInputs.push({ callId, input }) + } + } + + if (payload.type === 'function_call' && payload.name === 'exec_command') { + let cmd = '' + try { + const args = JSON.parse(payload.arguments as string) as Record + cmd = typeof args.cmd === 'string' ? args.cmd : '' + } catch { /* empty */ } + if (cmd) { + const extracted = extractFilePathsFromCommand(cmd, cwd) + for (const p of extracted) { + if (!info.commandFilePaths.includes(p)) info.commandFilePaths.push(p) + } + } + } + } + + return infoByTurnId +} + +function reverseV4aDiff(fileContent: string, diffText: string): string | null { + const fileLines = fileContent.split('\n') + const rawDiffLines = diffText.split('\n') + while (rawDiffLines.length > 0 && rawDiffLines[rawDiffLines.length - 1]?.trim() === '') rawDiffLines.pop() + const diffLines = rawDiffLines + const result = [...fileLines] + + type DiffEntry = { type: 'context' | 'add' | 'remove'; text: string } + const hunks: DiffEntry[][] = [] + let currentHunk: DiffEntry[] | null = null + + for (const dl of diffLines) { + if (dl.startsWith('@@')) { + if (currentHunk) hunks.push(currentHunk) + currentHunk = [] + continue + } + if (!currentHunk) continue + if (dl.startsWith('+')) { + currentHunk.push({ type: 'add', text: dl.slice(1) }) + } else if (dl.startsWith('-')) { + currentHunk.push({ type: 'remove', text: dl.slice(1) }) + } else if (dl.startsWith(' ')) { + currentHunk.push({ type: 'context', text: dl.slice(1) }) + } else { + currentHunk.push({ type: 'context', text: dl }) + } + } + if (currentHunk) hunks.push(currentHunk) + + for (let hi = hunks.length - 1; hi >= 0; hi--) { + const hunk = hunks[hi]! + const expectedSequence = hunk + .filter((e) => e.type === 'context' || e.type === 'add') + .map((e) => e.text) + + if (expectedSequence.length === 0) continue + + let seqStart = -1 + outer: for (let ri = result.length - expectedSequence.length; ri >= 0; ri--) { + for (let si = 0; si < expectedSequence.length; si++) { + if (result[ri + si] !== expectedSequence[si]) continue outer + } + seqStart = ri + break + } + + if (seqStart < 0) return null + + const newLines: string[] = [] + let seqIdx = 0 + for (const entry of hunk) { + if (entry.type === 'context') { + newLines.push(result[seqStart + seqIdx]!) + seqIdx++ + } else if (entry.type === 'add') { + seqIdx++ + } else if (entry.type === 'remove') { + newLines.push(entry.text) + } + } + + result.splice(seqStart, expectedSequence.length, ...newLines) + } + + return result.join('\n') +} + +export async function revertTurnFileChanges( + cwd: string, + turnInfos: Map, + deps: SessionRecoveryCommandDeps, +): Promise<{ reverted: number; errors: string[] }> { + if (turnInfos.size === 0) return { reverted: 0, errors: [] } + + let reverted = 0 + const errors: string[] = [] + + const allEntries = [...turnInfos.values()] + const allPatchInputs = allEntries.flatMap((info) => info.patchInputs).reverse() + const allCommandPaths = new Set(allEntries.flatMap((info) => info.commandFilePaths)) + + let isGitRepo = false + let gitRoot = '' + try { + gitRoot = await deps.runCommandCapture('git', ['rev-parse', '--show-toplevel'], { cwd }) + isGitRepo = !!gitRoot + } catch { /* not a git repo */ } + + const trackedFiles = new Set() + if (isGitRepo) { + try { + const tracked = await deps.runCommandCapture('git', ['ls-files', '--full-name'], { cwd: gitRoot }) + for (const f of tracked.split('\n')) { + if (f.trim()) trackedFiles.add(join(gitRoot, f.trim())) + } + } catch { /* empty */ } + } + + const patchRevertedPaths = new Set() + + for (const patch of allPatchInputs) { + const changes = parseApplyPatchInput(patch.input) + for (let ci = changes.length - 1; ci >= 0; ci--) { + const change = changes[ci]! + const filePath = isAbsolute(change.path) ? change.path : join(cwd, change.path) + + try { + if (change.operation === 'add') { + const fileStat = await stat(filePath).catch(() => null) + if (fileStat) { + await rm(filePath, { force: true }) + reverted++ + patchRevertedPaths.add(filePath) + } + } else if (change.operation === 'update' && change.diff) { + let reversed = false + try { + const currentContent = await readFile(filePath, 'utf8') + const newContent = reverseV4aDiff(currentContent, change.diff) + if (newContent !== null && newContent !== currentContent) { + const { writeFile } = await import('node:fs/promises') + await writeFile(filePath, newContent) + reverted++ + patchRevertedPaths.add(filePath) + reversed = true + } + } catch { /* file read/write failed */ } + + if (!reversed) { + const isTracked = trackedFiles.has(filePath) + if (isTracked && isGitRepo) { + const relativePath = filePath.startsWith(gitRoot + '/') ? filePath.slice(gitRoot.length + 1) : filePath + try { + await deps.runCommand('git', ['checkout', 'HEAD', '--', relativePath], { cwd: gitRoot }) + reverted++ + patchRevertedPaths.add(filePath) + } catch { + errors.push(`Could not revert: ${filePath}`) + } + } else { + errors.push(`Could not reverse patch for untracked file: ${filePath}`) + } + } + } else if (change.operation === 'delete') { + const isTracked = trackedFiles.has(filePath) + if (isTracked && isGitRepo) { + const relativePath = filePath.startsWith(gitRoot + '/') ? filePath.slice(gitRoot.length + 1) : filePath + try { + await deps.runCommand('git', ['checkout', 'HEAD', '--', relativePath], { cwd: gitRoot }) + reverted++ + patchRevertedPaths.add(filePath) + } catch { + errors.push(`Could not restore deleted file: ${filePath}`) + } + } + } + } catch (err) { + errors.push(`Failed to revert patch for ${filePath}: ${err instanceof Error ? err.message : String(err)}`) + } + } + } + + for (const filePath of allCommandPaths) { + if (patchRevertedPaths.has(filePath)) continue + const isTracked = trackedFiles.has(filePath) + if (isTracked && isGitRepo) { + const relativePath = filePath.startsWith(gitRoot + '/') ? filePath.slice(gitRoot.length + 1) : filePath + try { + await deps.runCommand('git', ['checkout', 'HEAD', '--', relativePath], { cwd: gitRoot }) + reverted++ + } catch { + errors.push(`Could not restore command-modified file: ${filePath}`) + } + } + } + + return { reverted, errors } +} + +export function mergeSessionCommandsIntoTurns(turns: unknown[], sessionLogRaw: string): unknown[] { + const turnIds = new Set() + for (const turn of turns) { + const turnRecord = asRecord(turn) + const turnId = readNonEmptyString(turnRecord?.id) + if (turnId) turnIds.add(turnId) + } + + if (turnIds.size === 0) return turns + + const orderByTurnId = buildSessionItemOrder(sessionLogRaw, turnIds) + if (orderByTurnId.size === 0) return turns + + return turns.map((turn) => { + const turnRecord = asRecord(turn) + if (!turnRecord) return turn + const turnId = readNonEmptyString(turnRecord.id) + if (!turnId) return turn + + const slots = orderByTurnId.get(turnId) + if (!slots || slots.length === 0) return turn + + const existingItems = Array.isArray(turnRecord.items) ? (turnRecord.items as Record[]) : [] + const alreadyHasRecoveredItems = existingItems.some((it) => it.type === 'commandExecution' || it.type === 'fileChange') + if (alreadyHasRecoveredItems) return turn + + const agentMessages = existingItems.filter((it) => it.type === 'agentMessage') + const nonAgentNonUserItems = existingItems.filter((it) => it.type !== 'agentMessage' && it.type !== 'userMessage') + const userMessages = existingItems.filter((it) => it.type === 'userMessage') + + let agentIdx = 0 + const interleaved: Record[] = [...userMessages] + + for (const slot of slots) { + if (slot.type === 'agentMessage') { + if (agentIdx < agentMessages.length) { + interleaved.push(agentMessages[agentIdx]!) + agentIdx++ + } + } else if (slot.type === 'commandExecution' && slot.command) { + interleaved.push(slot.command as unknown as Record) + } else if (slot.type === 'fileChange' && slot.fileChange) { + interleaved.push(slot.fileChange as unknown as Record) + } + } + + while (agentIdx < agentMessages.length) { + interleaved.push(agentMessages[agentIdx]!) + agentIdx++ + } + + interleaved.push(...nonAgentNonUserItems) + + return { + ...turnRecord, + items: interleaved, + } + }) +} From 3ff108134225de47c630a4720349c194f771b4be Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:22:26 +0700 Subject: [PATCH 08/19] Extract thread automation storage --- src/server/codexAppServerBridge.ts | 419 +---------------------------- src/server/threadAutomations.ts | 418 ++++++++++++++++++++++++++++ 2 files changed, 431 insertions(+), 406 deletions(-) create mode 100644 src/server/threadAutomations.ts diff --git a/src/server/codexAppServerBridge.ts b/src/server/codexAppServerBridge.ts index 4ddf0bddf..9e30d63be 100644 --- a/src/server/codexAppServerBridge.ts +++ b/src/server/codexAppServerBridge.ts @@ -57,6 +57,19 @@ import { mergeSessionCommandsIntoTurns, revertTurnFileChanges, } from './sessionRecovery.js' +import { + deleteProjectCronAutomation, + deleteThreadHeartbeatAutomation, + listProjectCronAutomations, + listThreadHeartbeatAutomations, + readProjectCronAutomation, + readProjectCronAutomations, + readThreadHeartbeatAutomation, + readThreadHeartbeatAutomations, + writeProjectCronAutomation, + writeThreadHeartbeatAutomation, + type ThreadAutomationRecord, +} from './threadAutomations.js' import { mergeSessionSkillInputsIntoThreadResult, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' export { mergeSessionSkillInputsIntoTurns, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' @@ -1359,412 +1372,6 @@ function getCodexAutomationsDir(): string { return join(getCodexHomeDir(), 'automations') } -type ThreadAutomationStatus = 'ACTIVE' | 'PAUSED' - -type ThreadAutomationRecord = { - id: string - kind: 'heartbeat' | 'cron' - name: string - prompt: string - rrule: string - status: ThreadAutomationStatus - targetThreadId: string | null - cwds: string[] - extraTomlLines: string[] - createdAtMs: number | null - updatedAtMs: number | null - nextRunAtMs: number | null -} - -function readTomlString(value: string): string { - const trimmed = value.trim() - if ((trimmed.startsWith('"') && trimmed.endsWith('"')) || (trimmed.startsWith('\'') && trimmed.endsWith('\''))) { - try { - return JSON.parse(trimmed) - } catch { - return trimmed.slice(1, -1) - } - } - return trimmed -} - -function serializeTomlString(value: string): string { - return JSON.stringify(value) -} - -function parseTomlStringArray(value: string): string[] { - const trimmed = value.trim() - if (!trimmed.startsWith('[') || !trimmed.endsWith(']')) return [] - try { - const parsed = JSON.parse(trimmed) - return Array.isArray(parsed) - ? parsed.filter((item): item is string => typeof item === 'string' && item.trim().length > 0) - : [] - } catch { - return [] - } -} - -function serializeTomlStringArray(values: string[]): string { - return `[${values.map((value) => serializeTomlString(value)).join(', ')}]` -} - -function parseAutomationToml(raw: string): ThreadAutomationRecord | null { - const values: Record = {} - const extraTomlLines: string[] = [] - const knownKeys = new Set([ - 'version', - 'id', - 'kind', - 'name', - 'prompt', - 'status', - 'rrule', - 'target_thread_id', - 'cwds', - 'created_at', - 'updated_at', - ]) - let isInsideExtraTable = false - for (const line of raw.split(/\r?\n/u)) { - const trimmed = line.trim() - if (!trimmed || trimmed.startsWith('#')) continue - if (trimmed.startsWith('[') && trimmed.endsWith(']')) { - isInsideExtraTable = true - extraTomlLines.push(trimmed) - continue - } - if (isInsideExtraTable) { - extraTomlLines.push(trimmed) - continue - } - if (!trimmed.includes('=')) { - extraTomlLines.push(trimmed) - continue - } - const separatorIndex = trimmed.indexOf('=') - const key = trimmed.slice(0, separatorIndex).trim() - const value = trimmed.slice(separatorIndex + 1).trim() - if (!key) continue - if (knownKeys.has(key)) { - values[key] = value - } else { - extraTomlLines.push(trimmed) - } - } - - const id = readTomlString(values.id ?? '') - const kindValue = readTomlString(values.kind ?? (values.cwds ? 'cron' : 'heartbeat')) - const name = readTomlString(values.name ?? '') - const prompt = readTomlString(values.prompt ?? '') - const rrule = readTomlString(values.rrule ?? '') - const statusValue = readTomlString(values.status ?? 'ACTIVE') - const targetThreadId = readTomlString(values.target_thread_id ?? '') || null - const cwds = parseTomlStringArray(values.cwds ?? '') - const createdAtMs = Number.parseInt(values.created_at ?? '', 10) - const updatedAtMs = Number.parseInt(values.updated_at ?? '', 10) - - if (!id || !name || !prompt || !rrule) return null - if (kindValue !== 'heartbeat' && kindValue !== 'cron') return null - if (statusValue !== 'ACTIVE' && statusValue !== 'PAUSED') return null - - return { - id, - kind: kindValue, - name, - prompt, - rrule, - status: statusValue, - targetThreadId, - cwds, - extraTomlLines, - createdAtMs: Number.isFinite(createdAtMs) ? createdAtMs : null, - updatedAtMs: Number.isFinite(updatedAtMs) ? updatedAtMs : null, - nextRunAtMs: null, - } -} - -function serializeAutomationToml(record: ThreadAutomationRecord): string { - const lines = [ - 'version = 1', - `id = ${serializeTomlString(record.id)}`, - `kind = ${serializeTomlString(record.kind)}`, - `name = ${serializeTomlString(record.name)}`, - `prompt = ${serializeTomlString(record.prompt)}`, - `status = ${serializeTomlString(record.status)}`, - `rrule = ${serializeTomlString(record.rrule)}`, - ] - if (record.targetThreadId) { - lines.push(`target_thread_id = ${serializeTomlString(record.targetThreadId)}`) - } - if (record.cwds.length > 0) { - lines.push(`cwds = ${serializeTomlStringArray(record.cwds)}`) - } - lines.push( - `created_at = ${String(record.createdAtMs ?? Date.now())}`, - `updated_at = ${String(record.updatedAtMs ?? Date.now())}`, - ) - lines.push(...record.extraTomlLines) - return `${lines.join('\n')}\n` -} - -function slugifyAutomationId(threadId: string, name: string): string { - const preferred = name.trim().toLowerCase().replace(/[^a-z0-9]+/gu, '-').replace(/^-+|-+$/gu, '') - if (preferred) return preferred.slice(0, 48) - const fallback = threadId.trim().toLowerCase().replace(/[^a-z0-9]+/gu, '-').replace(/^-+|-+$/gu, '') - return `heartbeat-${fallback.slice(0, 24) || randomBytes(4).toString('hex')}` -} - -async function readAutomationRecordFromFile(filePath: string): Promise { - try { - return parseAutomationToml(await readFile(filePath, 'utf8')) - } catch { - return null - } -} - -async function listThreadHeartbeatAutomations(): Promise> { - const automationRoot = getCodexAutomationsDir() - const next: Record = {} - let entries - try { - entries = await readdir(automationRoot, { withFileTypes: true }) - } catch { - return next - } - - for (const entry of entries) { - if (!entry.isDirectory()) continue - const automation = await readAutomationRecordFromFile(join(automationRoot, entry.name, 'automation.toml')) - if (!automation || automation.kind !== 'heartbeat' || !automation.targetThreadId) continue - next[automation.targetThreadId] = [...(next[automation.targetThreadId] ?? []), automation] - } - - for (const automations of Object.values(next)) { - automations.sort((first, second) => { - const firstCreatedAt = first.createdAtMs ?? 0 - const secondCreatedAt = second.createdAtMs ?? 0 - if (firstCreatedAt !== secondCreatedAt) return firstCreatedAt - secondCreatedAt - return first.id.localeCompare(second.id) - }) - } - - return next -} - -async function readThreadHeartbeatAutomations(threadId: string): Promise { - const all = await listThreadHeartbeatAutomations() - return all[threadId] ?? [] -} - -async function readThreadHeartbeatAutomation(threadId: string, automationId = ''): Promise { - const automations = await readThreadHeartbeatAutomations(threadId) - if (automationId) return automations.find((automation) => automation.id === automationId) ?? null - return automations[0] ?? null -} - -function resolveUniqueAutomationId(existingIds: Set, threadId: string, name: string): string { - const baseId = slugifyAutomationId(threadId, name) - if (!existingIds.has(baseId)) return baseId - for (let index = 2; index < 1000; index += 1) { - const candidate = `${baseId}-${index}` - if (!existingIds.has(candidate)) return candidate - } - return `${baseId}-${randomBytes(4).toString('hex')}` -} - -async function writeThreadHeartbeatAutomation(input: { - threadId: string - id?: string - name: string - prompt: string - rrule: string - status: ThreadAutomationStatus -}): Promise { - const threadId = input.threadId.trim() - const name = input.name.trim() - const prompt = input.prompt.trim() - const rrule = input.rrule.trim() - if (!threadId || !name || !prompt || !rrule) { - throw new Error('threadId, name, prompt, and rrule are required') - } - - const automationRoot = getCodexAutomationsDir() - await mkdir(automationRoot, { recursive: true }) - const existing = input.id ? await readThreadHeartbeatAutomation(threadId, input.id.trim()) : null - const entries = await readdir(automationRoot, { withFileTypes: true }).catch(() => []) - const existingIds = new Set(entries.filter((entry) => entry.isDirectory()).map((entry) => entry.name)) - const id = existing?.id ?? resolveUniqueAutomationId(existingIds, threadId, name) - const automationDir = join(automationRoot, id) - const now = Date.now() - const record: ThreadAutomationRecord = { - id, - kind: 'heartbeat', - name, - prompt, - rrule, - status: input.status, - targetThreadId: threadId, - cwds: [], - extraTomlLines: existing?.extraTomlLines ?? [], - createdAtMs: existing?.createdAtMs ?? now, - updatedAtMs: now, - nextRunAtMs: null, - } - - await mkdir(automationDir, { recursive: true }) - await writeFile(join(automationDir, 'automation.toml'), serializeAutomationToml(record), 'utf8') - const memoryPath = join(automationDir, 'memory.md') - try { - await stat(memoryPath) - } catch { - await writeFile(memoryPath, '', 'utf8') - } - return record -} - -async function deleteThreadHeartbeatAutomation(threadId: string, automationId = ''): Promise { - const normalizedThreadId = threadId.trim() - const normalizedAutomationId = automationId.trim() - if (normalizedAutomationId) { - const automation = await readThreadHeartbeatAutomation(normalizedThreadId, normalizedAutomationId) - if (!automation) return false - await rm(join(getCodexAutomationsDir(), automation.id), { recursive: true, force: true }) - return true - } - - const automations = await readThreadHeartbeatAutomations(normalizedThreadId) - if (automations.length === 0) return false - await Promise.all(automations.map((automation) => rm(join(getCodexAutomationsDir(), automation.id), { recursive: true, force: true }))) - return true -} - -async function listProjectCronAutomations(): Promise> { - const automationRoot = getCodexAutomationsDir() - const next: Record = {} - let entries - try { - entries = await readdir(automationRoot, { withFileTypes: true }) - } catch { - return next - } - - for (const entry of entries) { - if (!entry.isDirectory()) continue - const automation = await readAutomationRecordFromFile(join(automationRoot, entry.name, 'automation.toml')) - if (!automation || automation.kind !== 'cron' || automation.cwds.length === 0) continue - for (const cwd of automation.cwds) { - next[cwd] = [...(next[cwd] ?? []), automation] - } - } - - for (const automations of Object.values(next)) { - automations.sort((first, second) => { - const firstCreatedAt = first.createdAtMs ?? 0 - const secondCreatedAt = second.createdAtMs ?? 0 - if (firstCreatedAt !== secondCreatedAt) return firstCreatedAt - secondCreatedAt - return first.id.localeCompare(second.id) - }) - } - - return next -} - -async function readProjectCronAutomations(projectName: string): Promise { - const all = await listProjectCronAutomations() - return all[projectName] ?? [] -} - -async function readProjectCronAutomation(projectName: string, automationId = ''): Promise { - const automations = await readProjectCronAutomations(projectName) - if (automationId) return automations.find((automation) => automation.id === automationId) ?? null - return automations[0] ?? null -} - -async function writeProjectCronAutomation(input: { - projectName: string - id?: string - name: string - prompt: string - rrule: string - status: ThreadAutomationStatus -}): Promise { - const projectName = input.projectName.trim() - const name = input.name.trim() - const prompt = input.prompt.trim() - const rrule = input.rrule.trim() - if (!projectName || !name || !prompt || !rrule) { - throw new Error('projectName, name, prompt, and rrule are required') - } - if (!isAbsoluteLikePath(projectName)) { - throw new Error('Project automation cwd must be an absolute path') - } - - const automationRoot = getCodexAutomationsDir() - await mkdir(automationRoot, { recursive: true }) - const existing = input.id ? await readProjectCronAutomation(projectName, input.id.trim()) : null - const entries = await readdir(automationRoot, { withFileTypes: true }).catch(() => []) - const existingIds = new Set(entries.filter((entry) => entry.isDirectory()).map((entry) => entry.name)) - const id = existing?.id ?? resolveUniqueAutomationId(existingIds, projectName, name) - const automationDir = join(automationRoot, id) - const now = Date.now() - const record: ThreadAutomationRecord = { - id, - kind: 'cron', - name, - prompt, - rrule, - status: input.status, - targetThreadId: null, - cwds: Array.from(new Set([...(existing?.cwds ?? []), projectName])), - extraTomlLines: existing?.extraTomlLines ?? [], - createdAtMs: existing?.createdAtMs ?? now, - updatedAtMs: now, - nextRunAtMs: null, - } - - await mkdir(automationDir, { recursive: true }) - await writeFile(join(automationDir, 'automation.toml'), serializeAutomationToml(record), 'utf8') - const memoryPath = join(automationDir, 'memory.md') - try { - await stat(memoryPath) - } catch { - await writeFile(memoryPath, '', 'utf8') - } - return record -} - -async function deleteProjectCronAutomation(projectName: string, automationId = ''): Promise { - const normalizedProjectName = projectName.trim() - const normalizedAutomationId = automationId.trim() - if (!normalizedProjectName || !isAbsoluteLikePath(normalizedProjectName)) return false - if (normalizedAutomationId) { - const automation = await readProjectCronAutomation(normalizedProjectName, normalizedAutomationId) - if (!automation) return false - const remainingCwds = automation.cwds.filter((cwd) => cwd !== normalizedProjectName) - if (remainingCwds.length > 0) { - const record = { ...automation, cwds: remainingCwds, updatedAtMs: Date.now() } - await writeFile(join(getCodexAutomationsDir(), automation.id, 'automation.toml'), serializeAutomationToml(record), 'utf8') - } else { - await rm(join(getCodexAutomationsDir(), automation.id), { recursive: true, force: true }) - } - return true - } - - const automations = await readProjectCronAutomations(normalizedProjectName) - if (automations.length === 0) return false - await Promise.all(automations.map(async (automation) => { - const remainingCwds = automation.cwds.filter((cwd) => cwd !== normalizedProjectName) - if (remainingCwds.length > 0) { - const record = { ...automation, cwds: remainingCwds, updatedAtMs: Date.now() } - await writeFile(join(getCodexAutomationsDir(), automation.id, 'automation.toml'), serializeAutomationToml(record), 'utf8') - return - } - await rm(join(getCodexAutomationsDir(), automation.id), { recursive: true, force: true }) - })) - return true -} - type ThreadTitleCache = { titles: Record; order: string[] } const MAX_THREAD_TITLES = 500 const EMPTY_THREAD_TITLE_CACHE: ThreadTitleCache = { titles: {}, order: [] } diff --git a/src/server/threadAutomations.ts b/src/server/threadAutomations.ts new file mode 100644 index 000000000..fb0c6bbf3 --- /dev/null +++ b/src/server/threadAutomations.ts @@ -0,0 +1,418 @@ +import { mkdir, readdir, readFile, rm, stat, writeFile } from 'node:fs/promises' +import { homedir } from 'node:os' +import { join } from 'node:path' + +function getCodexHomeDir(): string { + const codexHome = process.env.CODEX_HOME?.trim() + return codexHome && codexHome.length > 0 ? codexHome : join(homedir(), '.codex') +} + +function getCodexAutomationsDir(): string { + return join(getCodexHomeDir(), 'automations') +} + +export type ThreadAutomationStatus = 'ACTIVE' | 'PAUSED' + +export type ThreadAutomationRecord = { + id: string + kind: 'heartbeat' | 'cron' + name: string + prompt: string + rrule: string + status: ThreadAutomationStatus + targetThreadId: string | null + cwds: string[] + extraTomlLines: string[] + createdAtMs: number | null + updatedAtMs: number | null + nextRunAtMs: number | null +} + +function readTomlString(value: string): string { + const trimmed = value.trim() + if ((trimmed.startsWith('"') && trimmed.endsWith('"')) || (trimmed.startsWith('\'') && trimmed.endsWith('\''))) { + try { + return JSON.parse(trimmed) + } catch { + return trimmed.slice(1, -1) + } + } + return trimmed +} + +function serializeTomlString(value: string): string { + return JSON.stringify(value) +} + +function parseTomlStringArray(value: string): string[] { + const trimmed = value.trim() + if (!trimmed.startsWith('[') || !trimmed.endsWith(']')) return [] + try { + const parsed = JSON.parse(trimmed) + return Array.isArray(parsed) + ? parsed.filter((item): item is string => typeof item === 'string' && item.trim().length > 0) + : [] + } catch { + return [] + } +} + +function serializeTomlStringArray(values: string[]): string { + return `[${values.map((value) => serializeTomlString(value)).join(', ')}]` +} + +function parseAutomationToml(raw: string): ThreadAutomationRecord | null { + const values: Record = {} + const extraTomlLines: string[] = [] + const knownKeys = new Set([ + 'version', + 'id', + 'kind', + 'name', + 'prompt', + 'status', + 'rrule', + 'target_thread_id', + 'cwds', + 'created_at', + 'updated_at', + ]) + let isInsideExtraTable = false + for (const line of raw.split(/\r?\n/u)) { + const trimmed = line.trim() + if (!trimmed || trimmed.startsWith('#')) continue + if (trimmed.startsWith('[') && trimmed.endsWith(']')) { + isInsideExtraTable = true + extraTomlLines.push(trimmed) + continue + } + if (isInsideExtraTable) { + extraTomlLines.push(trimmed) + continue + } + if (!trimmed.includes('=')) { + extraTomlLines.push(trimmed) + continue + } + const separatorIndex = trimmed.indexOf('=') + const key = trimmed.slice(0, separatorIndex).trim() + const value = trimmed.slice(separatorIndex + 1).trim() + if (!key) continue + if (knownKeys.has(key)) { + values[key] = value + } else { + extraTomlLines.push(trimmed) + } + } + + const id = readTomlString(values.id ?? '') + const kindValue = readTomlString(values.kind ?? (values.cwds ? 'cron' : 'heartbeat')) + const name = readTomlString(values.name ?? '') + const prompt = readTomlString(values.prompt ?? '') + const rrule = readTomlString(values.rrule ?? '') + const statusValue = readTomlString(values.status ?? 'ACTIVE') + const targetThreadId = readTomlString(values.target_thread_id ?? '') || null + const cwds = parseTomlStringArray(values.cwds ?? '') + const createdAtMs = Number.parseInt(values.created_at ?? '', 10) + const updatedAtMs = Number.parseInt(values.updated_at ?? '', 10) + + if (!id || !name || !prompt || !rrule) return null + if (kindValue !== 'heartbeat' && kindValue !== 'cron') return null + if (statusValue !== 'ACTIVE' && statusValue !== 'PAUSED') return null + + return { + id, + kind: kindValue, + name, + prompt, + rrule, + status: statusValue, + targetThreadId, + cwds, + extraTomlLines, + createdAtMs: Number.isFinite(createdAtMs) ? createdAtMs : null, + updatedAtMs: Number.isFinite(updatedAtMs) ? updatedAtMs : null, + nextRunAtMs: null, + } +} + +function serializeAutomationToml(record: ThreadAutomationRecord): string { + const lines = [ + 'version = 1', + `id = ${serializeTomlString(record.id)}`, + `kind = ${serializeTomlString(record.kind)}`, + `name = ${serializeTomlString(record.name)}`, + `prompt = ${serializeTomlString(record.prompt)}`, + `status = ${serializeTomlString(record.status)}`, + `rrule = ${serializeTomlString(record.rrule)}`, + ] + if (record.targetThreadId) { + lines.push(`target_thread_id = ${serializeTomlString(record.targetThreadId)}`) + } + if (record.cwds.length > 0) { + lines.push(`cwds = ${serializeTomlStringArray(record.cwds)}`) + } + lines.push( + `created_at = ${String(record.createdAtMs ?? Date.now())}`, + `updated_at = ${String(record.updatedAtMs ?? Date.now())}`, + ) + lines.push(...record.extraTomlLines) + return `${lines.join('\n')}\n` +} + +function slugifyAutomationId(threadId: string, name: string): string { + const preferred = name.trim().toLowerCase().replace(/[^a-z0-9]+/gu, '-').replace(/^-+|-+$/gu, '') + if (preferred) return preferred.slice(0, 48) + const fallback = threadId.trim().toLowerCase().replace(/[^a-z0-9]+/gu, '-').replace(/^-+|-+$/gu, '') + return `heartbeat-${fallback.slice(0, 24) || randomBytes(4).toString('hex')}` +} + +async function readAutomationRecordFromFile(filePath: string): Promise { + try { + return parseAutomationToml(await readFile(filePath, 'utf8')) + } catch { + return null + } +} + +export async function listThreadHeartbeatAutomations(): Promise> { + const automationRoot = getCodexAutomationsDir() + const next: Record = {} + let entries + try { + entries = await readdir(automationRoot, { withFileTypes: true }) + } catch { + return next + } + + for (const entry of entries) { + if (!entry.isDirectory()) continue + const automation = await readAutomationRecordFromFile(join(automationRoot, entry.name, 'automation.toml')) + if (!automation || automation.kind !== 'heartbeat' || !automation.targetThreadId) continue + next[automation.targetThreadId] = [...(next[automation.targetThreadId] ?? []), automation] + } + + for (const automations of Object.values(next)) { + automations.sort((first, second) => { + const firstCreatedAt = first.createdAtMs ?? 0 + const secondCreatedAt = second.createdAtMs ?? 0 + if (firstCreatedAt !== secondCreatedAt) return firstCreatedAt - secondCreatedAt + return first.id.localeCompare(second.id) + }) + } + + return next +} + +export async function readThreadHeartbeatAutomations(threadId: string): Promise { + const all = await listThreadHeartbeatAutomations() + return all[threadId] ?? [] +} + +export async function readThreadHeartbeatAutomation(threadId: string, automationId = ''): Promise { + const automations = await readThreadHeartbeatAutomations(threadId) + if (automationId) return automations.find((automation) => automation.id === automationId) ?? null + return automations[0] ?? null +} + +function resolveUniqueAutomationId(existingIds: Set, threadId: string, name: string): string { + const baseId = slugifyAutomationId(threadId, name) + if (!existingIds.has(baseId)) return baseId + for (let index = 2; index < 1000; index += 1) { + const candidate = `${baseId}-${index}` + if (!existingIds.has(candidate)) return candidate + } + return `${baseId}-${randomBytes(4).toString('hex')}` +} + +export async function writeThreadHeartbeatAutomation(input: { + threadId: string + id?: string + name: string + prompt: string + rrule: string + status: ThreadAutomationStatus +}): Promise { + const threadId = input.threadId.trim() + const name = input.name.trim() + const prompt = input.prompt.trim() + const rrule = input.rrule.trim() + if (!threadId || !name || !prompt || !rrule) { + throw new Error('threadId, name, prompt, and rrule are required') + } + + const automationRoot = getCodexAutomationsDir() + await mkdir(automationRoot, { recursive: true }) + const existing = input.id ? await readThreadHeartbeatAutomation(threadId, input.id.trim()) : null + const entries = await readdir(automationRoot, { withFileTypes: true }).catch(() => []) + const existingIds = new Set(entries.filter((entry) => entry.isDirectory()).map((entry) => entry.name)) + const id = existing?.id ?? resolveUniqueAutomationId(existingIds, threadId, name) + const automationDir = join(automationRoot, id) + const now = Date.now() + const record: ThreadAutomationRecord = { + id, + kind: 'heartbeat', + name, + prompt, + rrule, + status: input.status, + targetThreadId: threadId, + cwds: [], + extraTomlLines: existing?.extraTomlLines ?? [], + createdAtMs: existing?.createdAtMs ?? now, + updatedAtMs: now, + nextRunAtMs: null, + } + + await mkdir(automationDir, { recursive: true }) + await writeFile(join(automationDir, 'automation.toml'), serializeAutomationToml(record), 'utf8') + const memoryPath = join(automationDir, 'memory.md') + try { + await stat(memoryPath) + } catch { + await writeFile(memoryPath, '', 'utf8') + } + return record +} + +export async function deleteThreadHeartbeatAutomation(threadId: string, automationId = ''): Promise { + const normalizedThreadId = threadId.trim() + const normalizedAutomationId = automationId.trim() + if (normalizedAutomationId) { + const automation = await readThreadHeartbeatAutomation(normalizedThreadId, normalizedAutomationId) + if (!automation) return false + await rm(join(getCodexAutomationsDir(), automation.id), { recursive: true, force: true }) + return true + } + + const automations = await readThreadHeartbeatAutomations(normalizedThreadId) + if (automations.length === 0) return false + await Promise.all(automations.map((automation) => rm(join(getCodexAutomationsDir(), automation.id), { recursive: true, force: true }))) + return true +} + +export async function listProjectCronAutomations(): Promise> { + const automationRoot = getCodexAutomationsDir() + const next: Record = {} + let entries + try { + entries = await readdir(automationRoot, { withFileTypes: true }) + } catch { + return next + } + + for (const entry of entries) { + if (!entry.isDirectory()) continue + const automation = await readAutomationRecordFromFile(join(automationRoot, entry.name, 'automation.toml')) + if (!automation || automation.kind !== 'cron' || automation.cwds.length === 0) continue + for (const cwd of automation.cwds) { + next[cwd] = [...(next[cwd] ?? []), automation] + } + } + + for (const automations of Object.values(next)) { + automations.sort((first, second) => { + const firstCreatedAt = first.createdAtMs ?? 0 + const secondCreatedAt = second.createdAtMs ?? 0 + if (firstCreatedAt !== secondCreatedAt) return firstCreatedAt - secondCreatedAt + return first.id.localeCompare(second.id) + }) + } + + return next +} + +export async function readProjectCronAutomations(projectName: string): Promise { + const all = await listProjectCronAutomations() + return all[projectName] ?? [] +} + +export async function readProjectCronAutomation(projectName: string, automationId = ''): Promise { + const automations = await readProjectCronAutomations(projectName) + if (automationId) return automations.find((automation) => automation.id === automationId) ?? null + return automations[0] ?? null +} + +export async function writeProjectCronAutomation(input: { + projectName: string + id?: string + name: string + prompt: string + rrule: string + status: ThreadAutomationStatus +}): Promise { + const projectName = input.projectName.trim() + const name = input.name.trim() + const prompt = input.prompt.trim() + const rrule = input.rrule.trim() + if (!projectName || !name || !prompt || !rrule) { + throw new Error('projectName, name, prompt, and rrule are required') + } + if (!isAbsoluteLikePath(projectName)) { + throw new Error('Project automation cwd must be an absolute path') + } + + const automationRoot = getCodexAutomationsDir() + await mkdir(automationRoot, { recursive: true }) + const existing = input.id ? await readProjectCronAutomation(projectName, input.id.trim()) : null + const entries = await readdir(automationRoot, { withFileTypes: true }).catch(() => []) + const existingIds = new Set(entries.filter((entry) => entry.isDirectory()).map((entry) => entry.name)) + const id = existing?.id ?? resolveUniqueAutomationId(existingIds, projectName, name) + const automationDir = join(automationRoot, id) + const now = Date.now() + const record: ThreadAutomationRecord = { + id, + kind: 'cron', + name, + prompt, + rrule, + status: input.status, + targetThreadId: null, + cwds: Array.from(new Set([...(existing?.cwds ?? []), projectName])), + extraTomlLines: existing?.extraTomlLines ?? [], + createdAtMs: existing?.createdAtMs ?? now, + updatedAtMs: now, + nextRunAtMs: null, + } + + await mkdir(automationDir, { recursive: true }) + await writeFile(join(automationDir, 'automation.toml'), serializeAutomationToml(record), 'utf8') + const memoryPath = join(automationDir, 'memory.md') + try { + await stat(memoryPath) + } catch { + await writeFile(memoryPath, '', 'utf8') + } + return record +} + +export async function deleteProjectCronAutomation(projectName: string, automationId = ''): Promise { + const normalizedProjectName = projectName.trim() + const normalizedAutomationId = automationId.trim() + if (!normalizedProjectName || !isAbsoluteLikePath(normalizedProjectName)) return false + if (normalizedAutomationId) { + const automation = await readProjectCronAutomation(normalizedProjectName, normalizedAutomationId) + if (!automation) return false + const remainingCwds = automation.cwds.filter((cwd) => cwd !== normalizedProjectName) + if (remainingCwds.length > 0) { + const record = { ...automation, cwds: remainingCwds, updatedAtMs: Date.now() } + await writeFile(join(getCodexAutomationsDir(), automation.id, 'automation.toml'), serializeAutomationToml(record), 'utf8') + } else { + await rm(join(getCodexAutomationsDir(), automation.id), { recursive: true, force: true }) + } + return true + } + + const automations = await readProjectCronAutomations(normalizedProjectName) + if (automations.length === 0) return false + await Promise.all(automations.map(async (automation) => { + const remainingCwds = automation.cwds.filter((cwd) => cwd !== normalizedProjectName) + if (remainingCwds.length > 0) { + const record = { ...automation, cwds: remainingCwds, updatedAtMs: Date.now() } + await writeFile(join(getCodexAutomationsDir(), automation.id, 'automation.toml'), serializeAutomationToml(record), 'utf8') + return + } + await rm(join(getCodexAutomationsDir(), automation.id), { recursive: true, force: true }) + })) + return true +} From 87f1d922c99a34d02ff64a975d56bd23c5b95516 Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:23:34 +0700 Subject: [PATCH 09/19] Extract global state store helpers --- src/server/codexAppServerBridge.ts | 404 ++-------------------------- src/server/globalStateStore.ts | 418 +++++++++++++++++++++++++++++ 2 files changed, 435 insertions(+), 387 deletions(-) create mode 100644 src/server/globalStateStore.ts diff --git a/src/server/codexAppServerBridge.ts b/src/server/codexAppServerBridge.ts index 9e30d63be..b8c36c1ec 100644 --- a/src/server/codexAppServerBridge.ts +++ b/src/server/codexAppServerBridge.ts @@ -70,6 +70,23 @@ import { writeThreadHeartbeatAutomation, type ThreadAutomationRecord, } from './threadAutomations.js' +import { + normalizePinnedThreadIds, + normalizeStringArray, + normalizeStringRecord, + persistWorkspaceRoot, + readFirstLaunchPluginsCardDismissed, + readMergedThreadTitleCache, + readPinnedThreadIds, + readThreadTitleCache, + readWorkspaceRootsState, + removeFromThreadTitleCache, + updateThreadTitleCache, + updateWorkspaceRootsState, + writeFirstLaunchPluginsCardDismissed, + writePinnedThreadIds, + writeThreadTitleCache, +} from './globalStateStore.js' import { mergeSessionSkillInputsIntoThreadResult, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' export { mergeSessionSkillInputsIntoTurns, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' @@ -1114,50 +1131,6 @@ async function runCommandWithOutput(command: string, args: string[], options: { } -function normalizeStringArray(value: unknown): string[] { - if (!Array.isArray(value)) return [] - const normalized: string[] = [] - for (const item of value) { - if (typeof item === 'string' && item.length > 0 && !normalized.includes(item)) { - normalized.push(item) - } - } - return normalized -} - -function normalizeStringRecord(value: unknown): Record { - if (!value || typeof value !== 'object' || Array.isArray(value)) return {} - const next: Record = {} - for (const [key, item] of Object.entries(value as Record)) { - if (typeof key === 'string' && key.length > 0 && typeof item === 'string') { - next[key] = item - } - } - return next -} - -function normalizeRemoteProjects(value: unknown): WorkspaceRootsState['remoteProjects'] { - if (!Array.isArray(value)) return [] - const next: WorkspaceRootsState['remoteProjects'] = [] - const seen = new Set() - for (const item of value) { - const record = asRecord(item) - if (!record) continue - const id = typeof record.id === 'string' ? record.id.trim() : '' - if (!id || seen.has(id)) continue - seen.add(id) - next.push({ - id, - hostId: typeof record.hostId === 'string' ? record.hostId.trim() : '', - remotePath: typeof record.remotePath === 'string' ? record.remotePath.trim() : '', - label: typeof record.label === 'string' ? record.label.trim() : '', - }) - } - return next -} - - - function getCodexAuthPath(): string { return join(getCodexHomeDir(), 'auth.json') } @@ -1372,167 +1345,6 @@ function getCodexAutomationsDir(): string { return join(getCodexHomeDir(), 'automations') } -type ThreadTitleCache = { titles: Record; order: string[] } -const MAX_THREAD_TITLES = 500 -const EMPTY_THREAD_TITLE_CACHE: ThreadTitleCache = { titles: {}, order: [] } -const PINNED_THREAD_IDS_KEY = 'pinned-thread-ids' - -type SessionIndexThreadTitleCacheState = { - fileSignature: string | null - cache: ThreadTitleCache -} - -let sessionIndexThreadTitleCacheState: SessionIndexThreadTitleCacheState = { - fileSignature: null, - cache: EMPTY_THREAD_TITLE_CACHE, -} - -type TelegramBridgeConfigState = { - botToken: string - chatIds: number[] - allowedUserIds: Array -} - -function normalizeThreadTitleCache(value: unknown): ThreadTitleCache { - const record = asRecord(value) - if (!record) return EMPTY_THREAD_TITLE_CACHE - const rawTitles = asRecord(record.titles) - const titles: Record = {} - if (rawTitles) { - for (const [k, v] of Object.entries(rawTitles)) { - if (typeof v === 'string' && v.length > 0) titles[k] = v - } - } - const order = normalizeStringArray(record.order) - return { titles, order } -} - -function normalizePinnedThreadIds(value: unknown): string[] { - return normalizeStringArray(value) -} - -function updateThreadTitleCache(cache: ThreadTitleCache, id: string, title: string): ThreadTitleCache { - const titles = { ...cache.titles, [id]: title } - const order = [id, ...cache.order.filter((o) => o !== id)] - while (order.length > MAX_THREAD_TITLES) { - const removed = order.pop() - if (removed) delete titles[removed] - } - return { titles, order } -} - -function removeFromThreadTitleCache(cache: ThreadTitleCache, id: string): ThreadTitleCache { - const { [id]: _, ...titles } = cache.titles - return { titles, order: cache.order.filter((o) => o !== id) } -} - -type SessionIndexThreadTitle = { - id: string - title: string - updatedAtMs: number -} - -function normalizeSessionIndexThreadTitle(value: unknown): SessionIndexThreadTitle | null { - const record = asRecord(value) - if (!record) return null - - const id = typeof record.id === 'string' ? record.id.trim() : '' - const title = typeof record.thread_name === 'string' ? record.thread_name.trim() : '' - const updatedAtIso = typeof record.updated_at === 'string' ? record.updated_at.trim() : '' - const updatedAtMs = updatedAtIso ? Date.parse(updatedAtIso) : Number.NaN - - if (!id || !title) return null - return { - id, - title, - updatedAtMs: Number.isFinite(updatedAtMs) ? updatedAtMs : 0, - } -} - -function trimThreadTitleCache(cache: ThreadTitleCache): ThreadTitleCache { - const titles = { ...cache.titles } - const order = cache.order.filter((id) => { - if (!titles[id]) return false - return true - }).slice(0, MAX_THREAD_TITLES) - - for (const id of Object.keys(titles)) { - if (!order.includes(id)) { - delete titles[id] - } - } - - return { titles, order } -} - -function mergeThreadTitleCaches(base: ThreadTitleCache, overlay: ThreadTitleCache): ThreadTitleCache { - const titles = { ...base.titles, ...overlay.titles } - const order: string[] = [] - - for (const id of [...overlay.order, ...base.order]) { - if (!titles[id] || order.includes(id)) continue - order.push(id) - } - - for (const id of Object.keys(titles)) { - if (!order.includes(id)) { - order.push(id) - } - } - - return trimThreadTitleCache({ titles, order }) -} - -async function readThreadTitleCache(): Promise { - const statePath = getCodexGlobalStatePath() - try { - const raw = await readFile(statePath, 'utf8') - const payload = asRecord(JSON.parse(raw)) ?? {} - return normalizeThreadTitleCache(payload['thread-titles']) - } catch { - return EMPTY_THREAD_TITLE_CACHE - } -} - -async function writeThreadTitleCache(cache: ThreadTitleCache): Promise { - const statePath = getCodexGlobalStatePath() - let payload: Record = {} - try { - const raw = await readFile(statePath, 'utf8') - payload = asRecord(JSON.parse(raw)) ?? {} - } catch { - payload = {} - } - payload['thread-titles'] = cache - await writeFile(statePath, JSON.stringify(payload), 'utf8') -} - -async function readPinnedThreadIds(): Promise { - const statePath = getCodexGlobalStatePath() - try { - const raw = await readFile(statePath, 'utf8') - const payload = asRecord(JSON.parse(raw)) ?? {} - return normalizePinnedThreadIds(payload[PINNED_THREAD_IDS_KEY]) - } catch { - return [] - } -} - -async function writePinnedThreadIds(threadIds: string[]): Promise { - const statePath = getCodexGlobalStatePath() - let payload: Record = {} - try { - const raw = await readFile(statePath, 'utf8') - payload = asRecord(JSON.parse(raw)) ?? {} - } catch { - payload = {} - } - - payload[PINNED_THREAD_IDS_KEY] = normalizePinnedThreadIds(threadIds) - await writeFile(statePath, JSON.stringify(payload), 'utf8') -} - -const FIRST_LAUNCH_PLUGINS_CARD_DISMISSED_KEY = 'first-launch-plugins-card-dismissed' const THREAD_QUEUE_STATE_KEY = 'thread-queue-state' type StoredQueuedMessage = { @@ -1770,188 +1582,6 @@ function isTurnCompletedNotification(notification: { method: string; params: unk return notification.method === 'turn/completed' } -async function readFirstLaunchPluginsCardDismissed(): Promise { - const statePath = getCodexGlobalStatePath() - try { - const raw = await readFile(statePath, 'utf8') - const payload = asRecord(JSON.parse(raw)) ?? {} - return payload[FIRST_LAUNCH_PLUGINS_CARD_DISMISSED_KEY] === true - } catch { - return false - } -} - -async function writeFirstLaunchPluginsCardDismissed(dismissed: boolean): Promise { - const statePath = getCodexGlobalStatePath() - let payload: Record = {} - try { - const raw = await readFile(statePath, 'utf8') - payload = asRecord(JSON.parse(raw)) ?? {} - } catch { - payload = {} - } - payload[FIRST_LAUNCH_PLUGINS_CARD_DISMISSED_KEY] = dismissed === true - await writeFile(statePath, JSON.stringify(payload), 'utf8') -} - -function getSessionIndexFileSignature(stats: { mtimeMs: number; size: number }): string { - return `${String(stats.mtimeMs)}:${String(stats.size)}` -} - -async function parseThreadTitlesFromSessionIndex(sessionIndexPath: string): Promise { - const latestById = new Map() - const input = createReadStream(sessionIndexPath, { encoding: 'utf8' }) - const lines = createInterface({ - input, - crlfDelay: Infinity, - }) - - try { - for await (const line of lines) { - const trimmed = line.trim() - if (!trimmed) continue - - try { - const entry = normalizeSessionIndexThreadTitle(JSON.parse(trimmed) as unknown) - if (!entry) continue - - const previous = latestById.get(entry.id) - if (!previous || entry.updatedAtMs >= previous.updatedAtMs) { - latestById.set(entry.id, entry) - } - } catch { - // Skip malformed lines and keep scanning the rest of the index. - } - } - } finally { - lines.close() - input.close() - } - - const entries = Array.from(latestById.values()).sort((first, second) => second.updatedAtMs - first.updatedAtMs) - const titles: Record = {} - const order: string[] = [] - for (const entry of entries) { - titles[entry.id] = entry.title - order.push(entry.id) - } - - return trimThreadTitleCache({ titles, order }) -} - -async function readThreadTitlesFromSessionIndex(): Promise { - const sessionIndexPath = getCodexSessionIndexPath() - - try { - const stats = await stat(sessionIndexPath) - const fileSignature = getSessionIndexFileSignature(stats) - if (sessionIndexThreadTitleCacheState.fileSignature === fileSignature) { - return sessionIndexThreadTitleCacheState.cache - } - - const cache = await parseThreadTitlesFromSessionIndex(sessionIndexPath) - sessionIndexThreadTitleCacheState = { fileSignature, cache } - return cache - } catch { - sessionIndexThreadTitleCacheState = { - fileSignature: 'missing', - cache: EMPTY_THREAD_TITLE_CACHE, - } - return sessionIndexThreadTitleCacheState.cache - } -} - -async function readMergedThreadTitleCache(): Promise { - const [sessionIndexCache, persistedCache] = await Promise.all([ - readThreadTitlesFromSessionIndex(), - readThreadTitleCache(), - ]) - return mergeThreadTitleCaches(persistedCache, sessionIndexCache) -} - -async function readWorkspaceRootsState(): Promise { - const statePath = getCodexGlobalStatePath() - let payload: Record = {} - - try { - const raw = await readFile(statePath, 'utf8') - const parsed = JSON.parse(raw) as unknown - payload = asRecord(parsed) ?? {} - } catch { - payload = {} - } - - return { - order: normalizeStringArray(payload['electron-saved-workspace-roots']), - labels: normalizeStringRecord(payload['electron-workspace-root-labels']), - active: normalizeStringArray(payload['active-workspace-roots']), - projectOrder: normalizeStringArray(payload['project-order']), - remoteProjects: normalizeRemoteProjects(payload['remote-projects']), - } -} - -async function writeWorkspaceRootsState(nextState: WorkspaceRootsState): Promise { - const statePath = getCodexGlobalStatePath() - let payload: Record = {} - try { - const raw = await readFile(statePath, 'utf8') - payload = asRecord(JSON.parse(raw)) ?? {} - } catch { - payload = {} - } - - payload['electron-saved-workspace-roots'] = normalizeStringArray(nextState.order) - payload['electron-workspace-root-labels'] = normalizeStringRecord(nextState.labels) - payload['active-workspace-roots'] = normalizeStringArray(nextState.active) - payload['project-order'] = normalizeStringArray(nextState.projectOrder) - - await writeFile(statePath, JSON.stringify(payload), 'utf8') -} - -let workspaceRootsMutation: Promise = Promise.resolve() - -function queueWorkspaceRootsMutation(mutation: () => Promise): Promise { - const run = workspaceRootsMutation.catch(() => undefined).then(mutation) - workspaceRootsMutation = run.then( - () => undefined, - () => undefined, - ) - return run -} - -function prependUniqueString(value: string, items: string[]): string[] { - return [value, ...items.filter((item) => item !== value)] -} - -async function updateWorkspaceRootsState( - updater: (existingState: WorkspaceRootsState) => WorkspaceRootsState, -): Promise { - await queueWorkspaceRootsMutation(async () => { - const existingState = await readWorkspaceRootsState() - await writeWorkspaceRootsState(updater(existingState)) - }) -} - -async function persistWorkspaceRoot(workspaceRoot: string, label = ''): Promise { - const normalizedRoot = workspaceRoot.trim() - if (!normalizedRoot) return - - await updateWorkspaceRootsState((existingState) => { - const nextLabels = { ...existingState.labels } - const trimmedLabel = label.trim() - if (trimmedLabel.length > 0) { - nextLabels[normalizedRoot] = trimmedLabel - } - return { - order: prependUniqueString(normalizedRoot, existingState.order), - labels: nextLabels, - active: prependUniqueString(normalizedRoot, existingState.active), - projectOrder: prependUniqueString(normalizedRoot, existingState.projectOrder), - remoteProjects: existingState.remoteProjects, - } - }) -} - async function rollbackCreatedWorktree( gitRoot: string, worktreeCwd: string, diff --git a/src/server/globalStateStore.ts b/src/server/globalStateStore.ts new file mode 100644 index 000000000..ecb489554 --- /dev/null +++ b/src/server/globalStateStore.ts @@ -0,0 +1,418 @@ +import { createReadStream } from 'node:fs' +import { readFile, stat, writeFile } from 'node:fs/promises' +import { homedir } from 'node:os' +import { join } from 'node:path' +import { createInterface } from 'node:readline' + +type WorkspaceRootsState = { + order: string[] + labels: Record + active: string[] + projectOrder: string[] + remoteProjects?: Array<{ id: string; hostId: string; remotePath: string; label?: string }> +} + +function getCodexHomeDir(): string { + const codexHome = process.env.CODEX_HOME?.trim() + return codexHome && codexHome.length > 0 ? codexHome : join(homedir(), '.codex') +} + +function getCodexGlobalStatePath(): string { + return join(getCodexHomeDir(), '.codex-global-state.json') +} + +function getCodexSessionIndexPath(): string { + return join(getCodexHomeDir(), 'session_index.jsonl') +} + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +export function normalizeStringArray(value: unknown): string[] { + if (!Array.isArray(value)) return [] + const normalized: string[] = [] + for (const item of value) { + if (typeof item === 'string' && item.length > 0 && !normalized.includes(item)) { + normalized.push(item) + } + } + return normalized +} + +export function normalizeStringRecord(value: unknown): Record { + if (!value || typeof value !== 'object' || Array.isArray(value)) return {} + const next: Record = {} + for (const [key, item] of Object.entries(value as Record)) { + if (typeof key === 'string' && key.length > 0 && typeof item === 'string') { + next[key] = item + } + } + return next +} + +export function normalizeRemoteProjects(value: unknown): WorkspaceRootsState['remoteProjects'] { + if (!Array.isArray(value)) return [] + const next: WorkspaceRootsState['remoteProjects'] = [] + const seen = new Set() + for (const item of value) { + const record = asRecord(item) + if (!record) continue + const id = typeof record.id === 'string' ? record.id.trim() : '' + if (!id || seen.has(id)) continue + seen.add(id) + next.push({ + id, + hostId: typeof record.hostId === 'string' ? record.hostId.trim() : '', + remotePath: typeof record.remotePath === 'string' ? record.remotePath.trim() : '', + label: typeof record.label === 'string' ? record.label.trim() : '', + }) + } + return next +} + +export type ThreadTitleCache = { titles: Record; order: string[] } +const MAX_THREAD_TITLES = 500 +const EMPTY_THREAD_TITLE_CACHE: ThreadTitleCache = { titles: {}, order: [] } +const PINNED_THREAD_IDS_KEY = 'pinned-thread-ids' + +type SessionIndexThreadTitleCacheState = { + fileSignature: string | null + cache: ThreadTitleCache +} + +let sessionIndexThreadTitleCacheState: SessionIndexThreadTitleCacheState = { + fileSignature: null, + cache: EMPTY_THREAD_TITLE_CACHE, +} + +type TelegramBridgeConfigState = { + botToken: string + chatIds: number[] + allowedUserIds: Array +} + +function normalizeThreadTitleCache(value: unknown): ThreadTitleCache { + const record = asRecord(value) + if (!record) return EMPTY_THREAD_TITLE_CACHE + const rawTitles = asRecord(record.titles) + const titles: Record = {} + if (rawTitles) { + for (const [k, v] of Object.entries(rawTitles)) { + if (typeof v === 'string' && v.length > 0) titles[k] = v + } + } + const order = normalizeStringArray(record.order) + return { titles, order } +} + +export function normalizePinnedThreadIds(value: unknown): string[] { + return normalizeStringArray(value) +} + +export function updateThreadTitleCache(cache: ThreadTitleCache, id: string, title: string): ThreadTitleCache { + const titles = { ...cache.titles, [id]: title } + const order = [id, ...cache.order.filter((o) => o !== id)] + while (order.length > MAX_THREAD_TITLES) { + const removed = order.pop() + if (removed) delete titles[removed] + } + return { titles, order } +} + +export function removeFromThreadTitleCache(cache: ThreadTitleCache, id: string): ThreadTitleCache { + const { [id]: _, ...titles } = cache.titles + return { titles, order: cache.order.filter((o) => o !== id) } +} + +type SessionIndexThreadTitle = { + id: string + title: string + updatedAtMs: number +} + +function normalizeSessionIndexThreadTitle(value: unknown): SessionIndexThreadTitle | null { + const record = asRecord(value) + if (!record) return null + + const id = typeof record.id === 'string' ? record.id.trim() : '' + const title = typeof record.thread_name === 'string' ? record.thread_name.trim() : '' + const updatedAtIso = typeof record.updated_at === 'string' ? record.updated_at.trim() : '' + const updatedAtMs = updatedAtIso ? Date.parse(updatedAtIso) : Number.NaN + + if (!id || !title) return null + return { + id, + title, + updatedAtMs: Number.isFinite(updatedAtMs) ? updatedAtMs : 0, + } +} + +function trimThreadTitleCache(cache: ThreadTitleCache): ThreadTitleCache { + const titles = { ...cache.titles } + const order = cache.order.filter((id) => { + if (!titles[id]) return false + return true + }).slice(0, MAX_THREAD_TITLES) + + for (const id of Object.keys(titles)) { + if (!order.includes(id)) { + delete titles[id] + } + } + + return { titles, order } +} + +function mergeThreadTitleCaches(base: ThreadTitleCache, overlay: ThreadTitleCache): ThreadTitleCache { + const titles = { ...base.titles, ...overlay.titles } + const order: string[] = [] + + for (const id of [...overlay.order, ...base.order]) { + if (!titles[id] || order.includes(id)) continue + order.push(id) + } + + for (const id of Object.keys(titles)) { + if (!order.includes(id)) { + order.push(id) + } + } + + return trimThreadTitleCache({ titles, order }) +} + +export async function readThreadTitleCache(): Promise { + const statePath = getCodexGlobalStatePath() + try { + const raw = await readFile(statePath, 'utf8') + const payload = asRecord(JSON.parse(raw)) ?? {} + return normalizeThreadTitleCache(payload['thread-titles']) + } catch { + return EMPTY_THREAD_TITLE_CACHE + } +} + +export async function writeThreadTitleCache(cache: ThreadTitleCache): Promise { + const statePath = getCodexGlobalStatePath() + let payload: Record = {} + try { + const raw = await readFile(statePath, 'utf8') + payload = asRecord(JSON.parse(raw)) ?? {} + } catch { + payload = {} + } + payload['thread-titles'] = cache + await writeFile(statePath, JSON.stringify(payload), 'utf8') +} + +export async function readPinnedThreadIds(): Promise { + const statePath = getCodexGlobalStatePath() + try { + const raw = await readFile(statePath, 'utf8') + const payload = asRecord(JSON.parse(raw)) ?? {} + return normalizePinnedThreadIds(payload[PINNED_THREAD_IDS_KEY]) + } catch { + return [] + } +} + +export async function writePinnedThreadIds(threadIds: string[]): Promise { + const statePath = getCodexGlobalStatePath() + let payload: Record = {} + try { + const raw = await readFile(statePath, 'utf8') + payload = asRecord(JSON.parse(raw)) ?? {} + } catch { + payload = {} + } + + payload[PINNED_THREAD_IDS_KEY] = normalizePinnedThreadIds(threadIds) + await writeFile(statePath, JSON.stringify(payload), 'utf8') +} + +const FIRST_LAUNCH_PLUGINS_CARD_DISMISSED_KEY = 'first-launch-plugins-card-dismissed' + +export async function readFirstLaunchPluginsCardDismissed(): Promise { + const statePath = getCodexGlobalStatePath() + try { + const raw = await readFile(statePath, 'utf8') + const payload = asRecord(JSON.parse(raw)) ?? {} + return payload[FIRST_LAUNCH_PLUGINS_CARD_DISMISSED_KEY] === true + } catch { + return false + } +} + +export async function writeFirstLaunchPluginsCardDismissed(dismissed: boolean): Promise { + const statePath = getCodexGlobalStatePath() + let payload: Record = {} + try { + const raw = await readFile(statePath, 'utf8') + payload = asRecord(JSON.parse(raw)) ?? {} + } catch { + payload = {} + } + payload[FIRST_LAUNCH_PLUGINS_CARD_DISMISSED_KEY] = dismissed === true + await writeFile(statePath, JSON.stringify(payload), 'utf8') +} + +function getSessionIndexFileSignature(stats: { mtimeMs: number; size: number }): string { + return `${String(stats.mtimeMs)}:${String(stats.size)}` +} + +async function parseThreadTitlesFromSessionIndex(sessionIndexPath: string): Promise { + const latestById = new Map() + const input = createReadStream(sessionIndexPath, { encoding: 'utf8' }) + const lines = createInterface({ + input, + crlfDelay: Infinity, + }) + + try { + for await (const line of lines) { + const trimmed = line.trim() + if (!trimmed) continue + + try { + const entry = normalizeSessionIndexThreadTitle(JSON.parse(trimmed) as unknown) + if (!entry) continue + + const previous = latestById.get(entry.id) + if (!previous || entry.updatedAtMs >= previous.updatedAtMs) { + latestById.set(entry.id, entry) + } + } catch { + // Skip malformed lines and keep scanning the rest of the index. + } + } + } finally { + lines.close() + input.close() + } + + const entries = Array.from(latestById.values()).sort((first, second) => second.updatedAtMs - first.updatedAtMs) + const titles: Record = {} + const order: string[] = [] + for (const entry of entries) { + titles[entry.id] = entry.title + order.push(entry.id) + } + + return trimThreadTitleCache({ titles, order }) +} + +async function readThreadTitlesFromSessionIndex(): Promise { + const sessionIndexPath = getCodexSessionIndexPath() + + try { + const stats = await stat(sessionIndexPath) + const fileSignature = getSessionIndexFileSignature(stats) + if (sessionIndexThreadTitleCacheState.fileSignature === fileSignature) { + return sessionIndexThreadTitleCacheState.cache + } + + const cache = await parseThreadTitlesFromSessionIndex(sessionIndexPath) + sessionIndexThreadTitleCacheState = { fileSignature, cache } + return cache + } catch { + sessionIndexThreadTitleCacheState = { + fileSignature: 'missing', + cache: EMPTY_THREAD_TITLE_CACHE, + } + return sessionIndexThreadTitleCacheState.cache + } +} + +export async function readMergedThreadTitleCache(): Promise { + const [sessionIndexCache, persistedCache] = await Promise.all([ + readThreadTitlesFromSessionIndex(), + readThreadTitleCache(), + ]) + return mergeThreadTitleCaches(persistedCache, sessionIndexCache) +} + +export async function readWorkspaceRootsState(): Promise { + const statePath = getCodexGlobalStatePath() + let payload: Record = {} + + try { + const raw = await readFile(statePath, 'utf8') + const parsed = JSON.parse(raw) as unknown + payload = asRecord(parsed) ?? {} + } catch { + payload = {} + } + + return { + order: normalizeStringArray(payload['electron-saved-workspace-roots']), + labels: normalizeStringRecord(payload['electron-workspace-root-labels']), + active: normalizeStringArray(payload['active-workspace-roots']), + projectOrder: normalizeStringArray(payload['project-order']), + remoteProjects: normalizeRemoteProjects(payload['remote-projects']), + } +} + +export async function writeWorkspaceRootsState(nextState: WorkspaceRootsState): Promise { + const statePath = getCodexGlobalStatePath() + let payload: Record = {} + try { + const raw = await readFile(statePath, 'utf8') + payload = asRecord(JSON.parse(raw)) ?? {} + } catch { + payload = {} + } + + payload['electron-saved-workspace-roots'] = normalizeStringArray(nextState.order) + payload['electron-workspace-root-labels'] = normalizeStringRecord(nextState.labels) + payload['active-workspace-roots'] = normalizeStringArray(nextState.active) + payload['project-order'] = normalizeStringArray(nextState.projectOrder) + + await writeFile(statePath, JSON.stringify(payload), 'utf8') +} + +let workspaceRootsMutation: Promise = Promise.resolve() + +function queueWorkspaceRootsMutation(mutation: () => Promise): Promise { + const run = workspaceRootsMutation.catch(() => undefined).then(mutation) + workspaceRootsMutation = run.then( + () => undefined, + () => undefined, + ) + return run +} + +function prependUniqueString(value: string, items: string[]): string[] { + return [value, ...items.filter((item) => item !== value)] +} + +export async function updateWorkspaceRootsState( + updater: (existingState: WorkspaceRootsState) => WorkspaceRootsState, +): Promise { + await queueWorkspaceRootsMutation(async () => { + const existingState = await readWorkspaceRootsState() + await writeWorkspaceRootsState(updater(existingState)) + }) +} + +export async function persistWorkspaceRoot(workspaceRoot: string, label = ''): Promise { + const normalizedRoot = workspaceRoot.trim() + if (!normalizedRoot) return + + await updateWorkspaceRootsState((existingState) => { + const nextLabels = { ...existingState.labels } + const trimmedLabel = label.trim() + if (trimmedLabel.length > 0) { + nextLabels[normalizedRoot] = trimmedLabel + } + return { + order: prependUniqueString(normalizedRoot, existingState.order), + labels: nextLabels, + active: prependUniqueString(normalizedRoot, existingState.active), + projectOrder: prependUniqueString(normalizedRoot, existingState.projectOrder), + remoteProjects: existingState.remoteProjects, + } + }) +} From 328de33de8184bde9abc5f25af79b7541896d3ca Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:26:45 +0700 Subject: [PATCH 10/19] Extract app server process and queue processor --- src/server/appServerProcess.ts | 703 +++++++++++++++++ src/server/authRefresh.ts | 165 ++++ src/server/backendQueueProcessor.ts | 507 +++++++++++++ src/server/codexAppServerBridge.ts | 1078 +-------------------------- 4 files changed, 1384 insertions(+), 1069 deletions(-) create mode 100644 src/server/appServerProcess.ts create mode 100644 src/server/authRefresh.ts create mode 100644 src/server/backendQueueProcessor.ts diff --git a/src/server/appServerProcess.ts b/src/server/appServerProcess.ts new file mode 100644 index 000000000..f1e0f2bb2 --- /dev/null +++ b/src/server/appServerProcess.ts @@ -0,0 +1,703 @@ +import { spawn, type ChildProcessWithoutNullStreams } from 'node:child_process' +import { readFileSync, writeFileSync } from 'node:fs' +import { homedir } from 'node:os' +import { join } from 'node:path' +import { buildAppServerArgs } from './appServerRuntimeConfig.js' +import { + FREE_MODE_STATE_FILE, + getFreeModeConfigArgs, + getFreeModeEnvVars, + shouldCreateDefaultFreeModeStateForMissingAuth, + type FreeModeState, +} from './freeMode.js' +import { refreshChatgptAuthTokensForExternalAuth } from './authRefresh.js' +import { resolveCodexCommand } from '../commandResolution.js' +import { getSpawnInvocation } from '../utils/commandInvocation.js' + +const THREAD_TURN_PAGE_READ_CACHE_TTL_MS = 30_000 + +type JsonRpcCall = { + jsonrpc: '2.0' + id: number + method: string + params?: unknown +} + +type JsonRpcResponse = { + id?: number + result?: unknown + error?: { code: number; message: string } + method?: string + params?: unknown +} + +type ServerRequestReply = { + result?: unknown + error?: { code?: number; message: string } +} + +type PendingServerRequest = { + id: number + method: string + params: unknown + receivedAtIso: string +} + +type ChatgptAuthTokensRefreshParams = { + reason?: string + previousAccountId?: string +} + +type ChatgptAuthTokensRefreshResponse = { + accessToken: string + chatgptAccountId: string + chatgptPlanType: string | null +} + +function getCodexHomeDir(): string { + const codexHome = process.env.CODEX_HOME?.trim() + return codexHome && codexHome.length > 0 ? codexHome : join(homedir(), '.codex') +} + +function getCodexAuthPath(): string { + return join(getCodexHomeDir(), 'auth.json') +} + +function readFreeModeStateSync(statePath: string): FreeModeState | null { + try { + const raw = readFileSync(statePath, 'utf8') + return JSON.parse(raw) as FreeModeState + } catch { + return null + } +} + +function hasUsableCodexAuthSync(): boolean { + try { + const raw = readFileSync(getCodexAuthPath(), 'utf8') + const auth = JSON.parse(raw) as { tokens?: { access_token?: string } } + return Boolean(auth.tokens?.access_token?.trim()) + } catch { + return false + } +} + +function ensureDefaultFreeModeStateForMissingAuthSync(statePath: string): FreeModeState | null { + const current = readFreeModeStateSync(statePath) + if (!shouldCreateDefaultFreeModeStateForMissingAuth(current, hasUsableCodexAuthSync())) { + return current + } + const next = { mode: 'openai', keyIndex: 0, updatedAt: new Date().toISOString() } satisfies FreeModeState + writeFileSync(statePath, JSON.stringify(next, null, 2), 'utf8') + return next +} + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +function readNonEmptyString(value: unknown): string { + return typeof value === 'string' && value.trim().length > 0 ? value : '' +} + +function getErrorMessage(payload: unknown, fallback: string): string { + if (payload instanceof Error && payload.message.trim().length > 0) return payload.message + const record = asRecord(payload) + if (!record) return fallback + const error = record.error + if (typeof error === 'string' && error.length > 0) return error + const nestedError = asRecord(error) + if (nestedError && typeof nestedError.message === 'string' && nestedError.message.length > 0) return nestedError.message + return fallback +} + +const STREAM_EVENT_BUFFER_LIMIT = 400 + +export type StreamEventFrame = { + method: string + params: unknown + atIso: string +} + +type CapturedItem = { + id: string + type: string + turnId: string + data: Record + completed: boolean +} + +const MERGEABLE_ITEM_TYPES = new Set([ + 'commandExecution', + 'fileChange', +]) + +export class AppServerProcess { + private process: ChildProcessWithoutNullStreams | null = null + private initialized = false + private initializePromise: Promise | null = null + private readBuffer = '' + private nextId = 1 + private stopping = false + private readonly pending = new Map void; reject: (reason?: unknown) => void }>() + private readonly notificationListeners = new Set<(value: { method: string; params: unknown }) => void>() + private readonly pendingServerRequests = new Map() + private readonly appServerArgs = buildAppServerArgs() + private readonly streamEventsByThreadId = new Map() + private readonly lastThreadReadSnapshotByThreadId = new Map() + private readonly threadTurnPageReadCacheByThreadId = new Map() + private readonly threadTurnPageReadPromiseByThreadId = new Map>() + private readonly capturedItemsByThreadId = new Map>() + private readonly liveStateCache = new Map() + private chatgptAuthRefreshPromise: Promise | null = null + + + private getCodexCommand(): string { + const codexCommand = resolveCodexCommand() + if (!codexCommand) { + throw new Error('Codex CLI is not available. Install @openai/codex or set CODEXUI_CODEX_COMMAND.') + } + return codexCommand + } + + private buildAppServerConfig(): { args: string[]; env: Record } { + const args = [ + 'app-server', + '-c', 'approval_policy="never"', + '-c', 'sandbox_mode="danger-full-access"', + ] + let extraEnv: Record = {} + const serverPort = parseInt(process.env.CODEXUI_SERVER_PORT ?? '', 10) || undefined + const statePath = join(getCodexHomeDir(), FREE_MODE_STATE_FILE) + try { + const state = ensureDefaultFreeModeStateForMissingAuthSync(statePath) + if (state) { + args.push(...getFreeModeConfigArgs(state, serverPort)) + extraEnv = getFreeModeEnvVars(state) + } + } catch { + // No free-mode state or invalid — use defaults + } + return { args, env: extraEnv } + } + + private start(): void { + if (this.process) return + + this.stopping = false + const config = this.buildAppServerConfig() + const invocation = getSpawnInvocation(this.getCodexCommand(), config.args) + const spawnEnv = Object.keys(config.env).length > 0 + ? { ...process.env, ...config.env } + : undefined + const proc = spawn(invocation.command, invocation.args, { stdio: ['pipe', 'pipe', 'pipe'], ...(spawnEnv ? { env: spawnEnv } : {}) }) + this.process = proc + + proc.stdout.setEncoding('utf8') + proc.stdout.on('data', (chunk: string) => { + this.readBuffer += chunk + + let lineEnd = this.readBuffer.indexOf('\n') + while (lineEnd !== -1) { + const line = this.readBuffer.slice(0, lineEnd).trim() + this.readBuffer = this.readBuffer.slice(lineEnd + 1) + + if (line.length > 0) { + this.handleLine(line) + } + + lineEnd = this.readBuffer.indexOf('\n') + } + }) + + proc.stderr.setEncoding('utf8') + proc.stderr.on('data', () => { + // Keep stderr silent in dev middleware; JSON-RPC errors are forwarded via responses. + }) + + proc.on('exit', () => { + if (this.process !== proc) { + return + } + + const failure = new Error(this.stopping ? 'codex app-server stopped' : 'codex app-server exited unexpectedly') + for (const request of this.pending.values()) { + request.reject(failure) + } + + this.pending.clear() + this.pendingServerRequests.clear() + this.process = null + this.initialized = false + this.initializePromise = null + this.readBuffer = '' + }) + } + + private sendLine(payload: Record): void { + if (!this.process) { + throw new Error('codex app-server is not running') + } + + this.process.stdin.write(`${JSON.stringify(payload)}\n`) + } + + private handleLine(line: string): void { + let message: JsonRpcResponse + try { + message = JSON.parse(line) as JsonRpcResponse + } catch { + return + } + + if (typeof message.id === 'number' && this.pending.has(message.id)) { + const pendingRequest = this.pending.get(message.id) + this.pending.delete(message.id) + + if (!pendingRequest) return + + if (message.error) { + pendingRequest.reject(new Error(message.error.message)) + } else { + pendingRequest.resolve(message.result) + } + return + } + + if (typeof message.method === 'string' && typeof message.id !== 'number') { + this.emitNotification({ + method: message.method, + params: message.params ?? null, + }) + return + } + + // Handle server-initiated JSON-RPC requests (approvals, dynamic tool calls, etc.). + if (typeof message.id === 'number' && typeof message.method === 'string') { + this.handleServerRequest(message.id, message.method, message.params ?? null) + } + } + + private emitNotification(notification: { method: string; params: unknown }): void { + this.recordStreamEvent(notification) + this.captureItemFromNotification(notification) + const nThreadId = this.extractThreadIdFromParams(notification.params) + if (nThreadId) { + this.invalidateLiveStateCache(nThreadId) + this.threadTurnPageReadCacheByThreadId.delete(nThreadId) + } + for (const listener of this.notificationListeners) { + listener(notification) + } + } + + private extractThreadIdFromParams(params: unknown): string { + const record = asRecord(params) + if (!record) return '' + const threadId = + (typeof record.threadId === 'string' ? record.threadId : '') || + (typeof record.thread_id === 'string' ? record.thread_id : '') || + (typeof record.conversationId === 'string' ? record.conversationId : '') || + (typeof record.conversation_id === 'string' ? record.conversation_id : '') + if (threadId) return threadId + const thread = asRecord(record.thread) + if (thread && typeof thread.id === 'string') return thread.id + const turn = asRecord(record.turn) + if (turn) { + const turnThreadId = + (typeof turn.threadId === 'string' ? turn.threadId : '') || + (typeof turn.thread_id === 'string' ? turn.thread_id : '') + if (turnThreadId) return turnThreadId + } + return '' + } + + private recordStreamEvent(notification: { method: string; params: unknown }): void { + const threadId = this.extractThreadIdFromParams(notification.params) + if (!threadId) return + const frame: StreamEventFrame = { + method: notification.method, + params: notification.params, + atIso: new Date().toISOString(), + } + let buffer = this.streamEventsByThreadId.get(threadId) + if (!buffer) { + buffer = [] + this.streamEventsByThreadId.set(threadId, buffer) + } + buffer.push(frame) + if (buffer.length > STREAM_EVENT_BUFFER_LIMIT) { + buffer.splice(0, buffer.length - STREAM_EVENT_BUFFER_LIMIT) + } + } + + getStreamEvents(threadId: string, limit: number): StreamEventFrame[] { + const buffer = this.streamEventsByThreadId.get(threadId) + if (!buffer || buffer.length === 0) return [] + return buffer.slice(-limit) + } + + storeThreadReadSnapshot(threadId: string, snapshot: unknown): void { + this.lastThreadReadSnapshotByThreadId.set(threadId, snapshot) + this.threadTurnPageReadCacheByThreadId.delete(threadId) + } + + getLastThreadReadSnapshot(threadId: string): unknown | null { + return this.lastThreadReadSnapshotByThreadId.get(threadId) ?? null + } + + async readThreadForTurnPage(threadId: string): Promise { + const now = Date.now() + const cached = this.threadTurnPageReadCacheByThreadId.get(threadId) + if (cached && cached.expiresAt > now) return cached.result + if (cached) this.threadTurnPageReadCacheByThreadId.delete(threadId) + + const pending = this.threadTurnPageReadPromiseByThreadId.get(threadId) + if (pending) return pending + + const promise = this.rpc('thread/read', { + threadId, + includeTurns: true, + }).then((result) => { + this.threadTurnPageReadCacheByThreadId.set(threadId, { + result, + expiresAt: Date.now() + THREAD_TURN_PAGE_READ_CACHE_TTL_MS, + }) + return result + }).finally(() => { + this.threadTurnPageReadPromiseByThreadId.delete(threadId) + }) + + this.threadTurnPageReadPromiseByThreadId.set(threadId, promise) + return promise + } + + cacheLiveState(threadId: string, data: unknown, turnCount: number, sessionSize: number): void { + this.liveStateCache.set(threadId, { data, turnCount, sessionSize }) + } + + getCachedLiveState(threadId: string, turnCount: number, sessionSize: number): unknown | null { + const cached = this.liveStateCache.get(threadId) + if (!cached) return null + if (cached.turnCount !== turnCount || cached.sessionSize !== sessionSize) return null + return cached.data + } + + invalidateLiveStateCache(threadId: string): void { + this.liveStateCache.delete(threadId) + } + + private captureItemFromNotification(notification: { method: string; params: unknown }): void { + if (notification.method !== 'item/started' && notification.method !== 'item/completed') return + + const params = asRecord(notification.params) + if (!params) return + const item = asRecord(params.item) + if (!item) return + const itemType = typeof item.type === 'string' ? item.type : '' + if (!MERGEABLE_ITEM_TYPES.has(itemType)) return + + const itemId = typeof item.id === 'string' ? item.id : '' + if (!itemId) return + + const threadId = this.extractThreadIdFromParams(params) + if (!threadId) return + + const turnId = + (typeof params.turnId === 'string' ? params.turnId : '') || + (typeof params.turn_id === 'string' ? params.turn_id : '') + if (!turnId) return + + let threadItems = this.capturedItemsByThreadId.get(threadId) + if (!threadItems) { + threadItems = new Map() + this.capturedItemsByThreadId.set(threadId, threadItems) + } + + const isCompleted = notification.method === 'item/completed' + const existing = threadItems.get(itemId) + + if (existing && existing.completed && !isCompleted) return + + threadItems.set(itemId, { + id: itemId, + type: itemType, + turnId, + data: item as Record, + completed: isCompleted, + }) + } + + mergeItemsIntoTurns(threadId: string, turns: unknown[]): unknown[] { + const capturedMap = this.capturedItemsByThreadId.get(threadId) + if (!capturedMap || capturedMap.size === 0) return turns + + const itemsByTurnId = new Map() + for (const captured of capturedMap.values()) { + let group = itemsByTurnId.get(captured.turnId) + if (!group) { + group = [] + itemsByTurnId.set(captured.turnId, group) + } + group.push(captured) + } + + return turns.map((turn) => { + const turnRecord = asRecord(turn) + if (!turnRecord) return turn + const turnId = typeof turnRecord.id === 'string' ? turnRecord.id : '' + if (!turnId) return turn + + const captured = itemsByTurnId.get(turnId) + if (!captured || captured.length === 0) return turn + + const existingItems = Array.isArray(turnRecord.items) ? (turnRecord.items as Record[]) : [] + const existingIds = new Set(existingItems.map((it) => (typeof it.id === 'string' ? it.id : '')).filter(Boolean)) + + const newItems = captured + .filter((c) => !existingIds.has(c.id)) + .map((c) => c.data) + + if (newItems.length === 0) return turn + + return { + ...turnRecord, + items: [...existingItems, ...newItems], + } + }) + } + + private sendServerRequestReply(requestId: number, reply: ServerRequestReply): void { + if (reply.error) { + this.sendLine({ + jsonrpc: '2.0', + id: requestId, + error: reply.error, + }) + return + } + + this.sendLine({ + jsonrpc: '2.0', + id: requestId, + result: reply.result ?? {}, + }) + } + + private resolvePendingServerRequest(requestId: number, reply: ServerRequestReply): void { + const pendingRequest = this.pendingServerRequests.get(requestId) + if (!pendingRequest) { + throw new Error(`No pending server request found for id ${String(requestId)}`) + } + this.pendingServerRequests.delete(requestId) + + this.sendServerRequestReply(requestId, reply) + const requestParams = asRecord(pendingRequest.params) + const threadId = + typeof requestParams?.threadId === 'string' && requestParams.threadId.length > 0 + ? requestParams.threadId + : '' + this.emitNotification({ + method: 'server/request/resolved', + params: { + id: requestId, + method: pendingRequest.method, + threadId, + mode: 'manual', + resolvedAtIso: new Date().toISOString(), + }, + }) + } + + private async refreshChatgptAuthTokens(params: ChatgptAuthTokensRefreshParams): Promise { + if (!this.chatgptAuthRefreshPromise) { + this.chatgptAuthRefreshPromise = refreshChatgptAuthTokensForExternalAuth(params).finally(() => { + this.chatgptAuthRefreshPromise = null + }) + } + return await this.chatgptAuthRefreshPromise + } + + private async handleChatgptAuthTokensRefreshRequest(requestId: number, params: unknown): Promise { + const requestParams = asRecord(params) + const previousAccountId = readNonEmptyString(requestParams?.previousAccountId ?? requestParams?.previous_account_id) + try { + const result = await this.refreshChatgptAuthTokens({ + reason: readNonEmptyString(requestParams?.reason) || undefined, + previousAccountId: previousAccountId || undefined, + }) + this.sendServerRequestReply(requestId, { result }) + this.emitNotification({ + method: 'server/request/resolved', + params: { + id: requestId, + method: 'account/chatgptAuthTokens/refresh', + mode: 'automatic', + resolvedAtIso: new Date().toISOString(), + }, + }) + } catch (error) { + this.sendServerRequestReply(requestId, { + error: { + code: -32001, + message: getErrorMessage(error, 'Failed to refresh ChatGPT auth tokens'), + }, + }) + } + } + + private handleServerRequest(requestId: number, method: string, params: unknown): void { + if (method === 'account/chatgptAuthTokens/refresh') { + void this.handleChatgptAuthTokensRefreshRequest(requestId, params) + return + } + + const pendingRequest: PendingServerRequest = { + id: requestId, + method, + params, + receivedAtIso: new Date().toISOString(), + } + this.pendingServerRequests.set(requestId, pendingRequest) + + this.emitNotification({ + method: 'server/request', + params: pendingRequest, + }) + } + + private async call(method: string, params: unknown): Promise { + this.start() + const id = this.nextId++ + + return new Promise((resolve, reject) => { + this.pending.set(id, { resolve, reject }) + + this.sendLine({ + jsonrpc: '2.0', + id, + method, + params, + } satisfies JsonRpcCall) + }) + } + + private async ensureInitialized(): Promise { + if (this.initialized) return + if (this.initializePromise) { + await this.initializePromise + return + } + + this.initializePromise = this.call('initialize', { + clientInfo: { + name: 'codex-web-local', + version: '0.1.0', + }, + capabilities: { + experimentalApi: true, + }, + }).then(() => { + this.sendLine({ + jsonrpc: '2.0', + method: 'initialized', + }) + this.initialized = true + }).finally(() => { + this.initializePromise = null + }) + + await this.initializePromise + } + + async rpc(method: string, params: unknown): Promise { + await this.ensureInitialized() + return this.call(method, params) + } + + onNotification(listener: (value: { method: string; params: unknown }) => void): () => void { + this.notificationListeners.add(listener) + return () => { + this.notificationListeners.delete(listener) + } + } + + async respondToServerRequest(payload: unknown): Promise { + await this.ensureInitialized() + + const body = asRecord(payload) + if (!body) { + throw new Error('Invalid response payload: expected object') + } + + const id = body.id + if (typeof id !== 'number' || !Number.isInteger(id)) { + throw new Error('Invalid response payload: "id" must be an integer') + } + + const rawError = asRecord(body.error) + if (rawError) { + const message = typeof rawError.message === 'string' && rawError.message.trim().length > 0 + ? rawError.message.trim() + : 'Server request rejected by client' + const code = typeof rawError.code === 'number' && Number.isFinite(rawError.code) + ? Math.trunc(rawError.code) + : -32000 + this.resolvePendingServerRequest(id, { error: { code, message } }) + return + } + + if (!('result' in body)) { + throw new Error('Invalid response payload: expected "result" or "error"') + } + + this.resolvePendingServerRequest(id, { result: body.result }) + } + + listPendingServerRequests(): PendingServerRequest[] { + return Array.from(this.pendingServerRequests.values()) + } + + dispose(): void { + if (!this.process) return + + const proc = this.process + this.stopping = true + this.process = null + this.initialized = false + this.initializePromise = null + this.readBuffer = '' + + const failure = new Error('codex app-server stopped') + for (const request of this.pending.values()) { + request.reject(failure) + } + this.pending.clear() + this.pendingServerRequests.clear() + + try { + proc.stdin.end() + } catch { + // ignore close errors on shutdown + } + + try { + proc.kill('SIGTERM') + } catch { + // ignore kill errors on shutdown + } + + const forceKillTimer = setTimeout(() => { + if (!proc.killed) { + try { + proc.kill('SIGKILL') + } catch { + // ignore kill errors on shutdown + } + } + }, 1500) + forceKillTimer.unref() + } +} diff --git a/src/server/authRefresh.ts b/src/server/authRefresh.ts new file mode 100644 index 000000000..bb88e0555 --- /dev/null +++ b/src/server/authRefresh.ts @@ -0,0 +1,165 @@ +import { readFile, writeFile } from 'node:fs/promises' +import { homedir } from 'node:os' +import { join } from 'node:path' + +function getCodexHomeDir(): string { + const codexHome = process.env.CODEX_HOME?.trim() + return codexHome && codexHome.length > 0 ? codexHome : join(homedir(), '.codex') +} + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +function readNonEmptyString(value: unknown): string { + return typeof value === 'string' && value.trim().length > 0 ? value : '' +} + +function getCodexAuthPath(): string { + return join(getCodexHomeDir(), 'auth.json') +} + +type CodexAuth = { + auth_mode?: string + last_refresh?: number + tokens?: { + access_token?: string + refresh_token?: string + id_token?: string + account_id?: string + } +} + +const CODEX_CHATGPT_CLIENT_ID = 'app_EMoamEEZ73f0CkXaXp7hrann' +const DEFAULT_CODEX_REFRESH_TOKEN_URL = 'https://auth.openai.com/oauth/token' + +function decodeBase64UrlJson(value: string): Record | null { + try { + const padded = `${value}${'='.repeat((4 - (value.length % 4)) % 4)}` + const decoded = Buffer.from(padded.replace(/-/g, '+').replace(/_/g, '/'), 'base64').toString('utf8') + const parsed = JSON.parse(decoded) as unknown + return asRecord(parsed) + } catch { + return null + } +} + +function decodeJwtPayload(token: string | undefined): Record | null { + if (!token) return null + const parts = token.split('.') + if (parts.length < 2) return null + return decodeBase64UrlJson(parts[1] ?? '') +} + +function extractChatgptTokenMetadata(accessToken: string | undefined): { + chatgptAccountId: string | null + chatgptPlanType: string | null +} { + const payload = decodeJwtPayload(accessToken) + const auth = asRecord(payload?.['https://api.openai.com/auth']) + return { + chatgptAccountId: readNonEmptyString(auth?.chatgpt_account_id) || null, + chatgptPlanType: readNonEmptyString(auth?.chatgpt_plan_type) || null, + } +} + +function readTokenErrorMessage(payload: unknown, fallback: string): string { + const record = asRecord(payload) + const message = readNonEmptyString(record?.message) + if (message) return message + const error = record?.error + if (typeof error === 'string' && error.trim().length > 0) return error.trim() + const nestedError = asRecord(error) + return readNonEmptyString(nestedError?.message) + || readNonEmptyString(nestedError?.error_description) + || readNonEmptyString(record?.error_description) + || fallback +} + +function readTokenResponseString(payload: Record | null, ...keys: string[]): string | null { + if (!payload) return null + for (const key of keys) { + const value = readNonEmptyString(payload[key]) + if (value) return value + } + return null +} + +export async function refreshChatgptAuthTokensForExternalAuth( + params: ChatgptAuthTokensRefreshParams = {}, +): Promise { + const authPath = getCodexAuthPath() + const raw = await readFile(authPath, 'utf8') + const auth = JSON.parse(raw) as CodexAuth + const currentRefreshToken = auth.tokens?.refresh_token?.trim() ?? '' + if (!currentRefreshToken) { + throw new Error('No ChatGPT refresh token is available. Please sign in again.') + } + + const refreshUrl = process.env.CODEX_REFRESH_TOKEN_URL_OVERRIDE?.trim() || DEFAULT_CODEX_REFRESH_TOKEN_URL + const body = new URLSearchParams({ + grant_type: 'refresh_token', + refresh_token: currentRefreshToken, + client_id: CODEX_CHATGPT_CLIENT_ID, + }) + + const response = await fetch(refreshUrl, { + method: 'POST', + headers: { + 'Content-Type': 'application/x-www-form-urlencoded', + }, + body: body.toString(), + signal: AbortSignal.timeout(25_000), + }) + + const text = await response.text() + let payload: Record | null = null + try { + payload = asRecord(JSON.parse(text)) + } catch { + payload = null + } + + if (!response.ok) { + throw new Error(readTokenErrorMessage(payload, `ChatGPT token refresh failed with HTTP ${String(response.status)}`)) + } + + const accessToken = readTokenResponseString(payload, 'access_token', 'accessToken') + if (!accessToken) { + throw new Error('ChatGPT token refresh response did not include an access token.') + } + + const nextRefreshToken = readTokenResponseString(payload, 'refresh_token', 'refreshToken') ?? currentRefreshToken + const nextIdToken = readTokenResponseString(payload, 'id_token', 'idToken') ?? auth.tokens?.id_token + const metadata = extractChatgptTokenMetadata(accessToken) + const chatgptAccountId = + metadata.chatgptAccountId + || readTokenResponseString(payload, 'chatgpt_account_id', 'chatgptAccountId') + || readNonEmptyString(params.previousAccountId) + || readNonEmptyString(auth.tokens?.account_id) + if (!chatgptAccountId) { + throw new Error('ChatGPT token refresh response did not include account metadata.') + } + + const nextAuth: CodexAuth = { + ...auth, + auth_mode: auth.auth_mode || 'chatgpt', + last_refresh: Date.now(), + tokens: { + ...auth.tokens, + access_token: accessToken, + refresh_token: nextRefreshToken, + account_id: chatgptAccountId, + ...(nextIdToken ? { id_token: nextIdToken } : {}), + }, + } + await writeFile(authPath, JSON.stringify(nextAuth, null, 2), { encoding: 'utf8', mode: 0o600 }) + + return { + accessToken, + chatgptAccountId, + chatgptPlanType: metadata.chatgptPlanType, + } +} diff --git a/src/server/backendQueueProcessor.ts b/src/server/backendQueueProcessor.ts new file mode 100644 index 000000000..687e7b68e --- /dev/null +++ b/src/server/backendQueueProcessor.ts @@ -0,0 +1,507 @@ +import { randomBytes } from 'node:crypto' +import { readFile, writeFile } from 'node:fs/promises' +import { homedir } from 'node:os' +import { join } from 'node:path' +import type { CollaborationModeKind, ReasoningEffort } from '../types/codex.js' +import { normalizeStringArray } from './globalStateStore.js' +import { AppServerProcess } from './appServerProcess.js' +import type { ThreadAutomationRecord } from './threadAutomations.js' + +function getCodexHomeDir(): string { + const codexHome = process.env.CODEX_HOME?.trim() + return codexHome && codexHome.length > 0 ? codexHome : join(homedir(), '.codex') +} + +function getCodexGlobalStatePath(): string { + return join(getCodexHomeDir(), '.codex-global-state.json') +} + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +function readNonEmptyString(value: unknown): string { + return typeof value === 'string' && value.trim().length > 0 ? value : '' +} + +const THREAD_QUEUE_STATE_KEY = 'thread-queue-state' + +export type StoredQueuedMessage = { + id: string + text: string + imageUrls: string[] + skills: Array<{ name: string; path: string }> + fileAttachments: Array<{ label: string; path: string; fsPath: string }> + collaborationMode: 'default' | 'plan' +} + +export type ThreadQueueState = Record + +type BackendQueuedTurn = { + threadId: string + message: StoredQueuedMessage +} + +export type ThreadQueueStateUpdate = { + nextState: ThreadQueueState + result: T +} + +type ResolvedCollaborationModeSettings = { + model: string + reasoningEffort: ReasoningEffort | null +} + +function normalizeStoredQueuedMessage(value: unknown): StoredQueuedMessage | null { + const record = asRecord(value) + if (!record) return null + + const id = typeof record.id === 'string' ? record.id.trim() : '' + if (!id) return null + + const normalizeNamedPathItems = (items: unknown): Array<{ name: string; path: string }> => { + if (!Array.isArray(items)) return [] + return items.flatMap((item) => { + const itemRecord = asRecord(item) + if (!itemRecord) return [] + const name = typeof itemRecord.name === 'string' ? itemRecord.name.trim() : '' + const path = typeof itemRecord.path === 'string' ? itemRecord.path.trim() : '' + return name && path ? [{ name, path }] : [] + }) + } + + const normalizeFileAttachments = (items: unknown): Array<{ label: string; path: string; fsPath: string }> => { + if (!Array.isArray(items)) return [] + return items.flatMap((item) => { + const itemRecord = asRecord(item) + if (!itemRecord) return [] + const label = typeof itemRecord.label === 'string' ? itemRecord.label.trim() : '' + const path = typeof itemRecord.path === 'string' ? itemRecord.path.trim() : '' + const fsPath = typeof itemRecord.fsPath === 'string' ? itemRecord.fsPath.trim() : '' + return label && path && fsPath ? [{ label, path, fsPath }] : [] + }) + } + + return { + id, + text: typeof record.text === 'string' ? record.text : '', + imageUrls: normalizeStringArray(record.imageUrls), + skills: normalizeNamedPathItems(record.skills), + fileAttachments: normalizeFileAttachments(record.fileAttachments), + collaborationMode: record.collaborationMode === 'plan' ? 'plan' : 'default', + } +} + +export function normalizeThreadQueueState(value: unknown): ThreadQueueState { + const record = asRecord(value) + if (!record) return {} + + const state: ThreadQueueState = {} + for (const [threadId, rawMessages] of Object.entries(record)) { + const normalizedThreadId = threadId.trim() + if (!normalizedThreadId || !Array.isArray(rawMessages)) continue + const messages = rawMessages.flatMap((item) => { + const message = normalizeStoredQueuedMessage(item) + return message ? [message] : [] + }) + if (messages.length > 0) { + state[normalizedThreadId] = messages + } + } + return state +} + +let threadQueueMutationChain: Promise = Promise.resolve() + +export async function readThreadQueueState(): Promise { + const statePath = getCodexGlobalStatePath() + try { + const raw = await readFile(statePath, 'utf8') + const payload = asRecord(JSON.parse(raw)) ?? {} + return normalizeThreadQueueState(payload[THREAD_QUEUE_STATE_KEY]) + } catch { + return {} + } +} + +export async function writeThreadQueueStateUnlocked(nextState: ThreadQueueState): Promise { + const statePath = getCodexGlobalStatePath() + let payload: Record = {} + try { + const raw = await readFile(statePath, 'utf8') + payload = asRecord(JSON.parse(raw)) ?? {} + } catch { + payload = {} + } + const normalized = normalizeThreadQueueState(nextState) + if (Object.keys(normalized).length > 0) { + payload[THREAD_QUEUE_STATE_KEY] = normalized + } else { + delete payload[THREAD_QUEUE_STATE_KEY] + } + await writeFile(statePath, JSON.stringify(payload), 'utf8') +} + +async function withThreadQueueStateUpdate( + update: (state: ThreadQueueState) => ThreadQueueStateUpdate | Promise>, +): Promise { + const run = threadQueueMutationChain.then(async () => { + const currentState = await readThreadQueueState() + const { nextState, result } = await update(currentState) + await writeThreadQueueStateUnlocked(nextState) + return result + }) + threadQueueMutationChain = run.catch(() => {}) + return run +} + +export async function writeThreadQueueState(nextState: ThreadQueueState): Promise { + await withThreadQueueStateUpdate(() => ({ + nextState: normalizeThreadQueueState(nextState), + result: undefined, + })) +} + +export async function appendThreadQueuedMessage(threadId: string, message: StoredQueuedMessage): Promise { + const normalizedThreadId = threadId.trim() + if (!normalizedThreadId) throw new Error('threadId is required') + await withThreadQueueStateUpdate((state) => ({ + nextState: { + ...state, + [normalizedThreadId]: [...(state[normalizedThreadId] ?? []), message], + }, + result: undefined, + })) +} + +function normalizeReasoningEffort(value: unknown): ReasoningEffort | '' { + const allowed: ReasoningEffort[] = ['none', 'minimal', 'low', 'medium', 'high', 'xhigh'] + return typeof value === 'string' && allowed.includes(value as ReasoningEffort) + ? (value as ReasoningEffort) + : '' +} + +function normalizeCollaborationModeReasoningEffort(value: ReasoningEffort | '' | null | undefined): ReasoningEffort | null { + return value && value.length > 0 ? value : null +} + +function extractLocalImagePathFromUrl(value: string): string | null { + if (!value) return null + try { + const parsed = new URL(value, 'http://localhost') + if (parsed.pathname !== '/codex-local-image') return null + const path = parsed.searchParams.get('path')?.trim() ?? '' + return path.length > 0 ? path : null + } catch { + return null + } +} + +function buildTextWithAttachments(prompt: string, files: StoredQueuedMessage['fileAttachments']): string { + if (files.length === 0) return prompt + let prefix = '# Files mentioned by the user:\n' + for (const f of files) { + prefix += `\n## ${f.label}: ${f.path}\n` + } + return `${prefix}\n## My request for Codex:\n\n${prompt}\n` +} + +function escapeHeartbeatXmlText(value: string): string { + return value + .replace(/&/gu, '&') + .replace(//gu, '>') +} + +export function buildHeartbeatQueuedMessage(automation: ThreadAutomationRecord): StoredQueuedMessage { + return { + id: `automation-${automation.id}-${Date.now()}-${randomBytes(3).toString('hex')}`, + text: ` +${escapeHeartbeatXmlText(automation.id)} +${new Date().toISOString()} + +${escapeHeartbeatXmlText(automation.prompt)} + +`, + imageUrls: [], + skills: [], + fileAttachments: [], + collaborationMode: 'default', + } +} + +function fileNameFromPath(pathValue: string): string { + const normalized = pathValue.replace(/\\/g, '/') + const segments = normalized.split('/').filter(Boolean) + return segments.at(-1) ?? normalized +} + +function extractThreadIdFromNotificationParams(params: unknown): string { + const record = asRecord(params) + if (!record) return '' + const threadId = + (typeof record.threadId === 'string' ? record.threadId : '') || + (typeof record.thread_id === 'string' ? record.thread_id : '') || + (typeof record.conversationId === 'string' ? record.conversationId : '') || + (typeof record.conversation_id === 'string' ? record.conversation_id : '') + if (threadId) return threadId + const thread = asRecord(record.thread) + if (thread && typeof thread.id === 'string') return thread.id + const turn = asRecord(record.turn) + if (turn) { + const turnThreadId = + (typeof turn.threadId === 'string' ? turn.threadId : '') || + (typeof turn.thread_id === 'string' ? turn.thread_id : '') + if (turnThreadId) return turnThreadId + } + return '' +} + +function isTurnCompletedNotification(notification: { method: string; params: unknown }): boolean { + return notification.method === 'turn/completed' +} + +export class BackendQueueProcessor { + private readonly processingThreadIds = new Set() + private readonly queueDrainTimersByThreadId = new Map>() + private readonly queueDrainDueAtByThreadId = new Map() + private readonly unsubscribe: () => void + + constructor(private readonly appServer: AppServerProcess) { + this.unsubscribe = appServer.onNotification((notification) => { + if (!isTurnCompletedNotification(notification)) return + const threadId = extractThreadIdFromNotificationParams(notification.params) + if (!threadId) return + void this.processThreadQueue(threadId) + }) + void this.scheduleAllQueuedThreads(1000) + } + + dispose(): void { + this.unsubscribe() + for (const timer of this.queueDrainTimersByThreadId.values()) { + clearTimeout(timer) + } + this.queueDrainTimersByThreadId.clear() + this.queueDrainDueAtByThreadId.clear() + this.processingThreadIds.clear() + } + + async scheduleAllQueuedThreads(delayMs = 0): Promise { + try { + const state = await readThreadQueueState() + for (const threadId of Object.keys(state)) { + this.scheduleThreadQueueDrain(threadId, delayMs) + } + } catch { + // Queue recovery is best-effort; normal turn-completed events can still drain later. + } + } + + scheduleThreadQueueDrain(threadId: string, delayMs = 5000): void { + if (!threadId) return + const normalizedDelayMs = Math.max(0, delayMs) + const nextDueAt = Date.now() + normalizedDelayMs + const existingDueAt = this.queueDrainDueAtByThreadId.get(threadId) + const existingTimer = this.queueDrainTimersByThreadId.get(threadId) + if (existingTimer) { + if (existingDueAt !== undefined && existingDueAt <= nextDueAt) return + clearTimeout(existingTimer) + this.queueDrainTimersByThreadId.delete(threadId) + this.queueDrainDueAtByThreadId.delete(threadId) + } + const timer = setTimeout(() => { + this.queueDrainTimersByThreadId.delete(threadId) + this.queueDrainDueAtByThreadId.delete(threadId) + void this.processThreadQueue(threadId) + }, normalizedDelayMs) + timer.unref?.() + this.queueDrainTimersByThreadId.set(threadId, timer) + this.queueDrainDueAtByThreadId.set(threadId, nextDueAt) + } + + async processThreadQueue(threadId: string): Promise { + if (this.processingThreadIds.has(threadId)) return + this.processingThreadIds.add(threadId) + try { + const canStart = await this.canStartQueuedTurn(threadId) + if (!canStart) { + if (await this.hasQueuedTurns(threadId)) { + this.scheduleThreadQueueDrain(threadId) + } + return + } + const next = await this.popNextQueuedTurn(threadId) + if (!next) return + try { + await this.startQueuedTurn(next) + if (await this.hasQueuedTurns(threadId)) { + this.scheduleThreadQueueDrain(threadId) + } + } catch { + await this.restoreQueuedTurn(next) + this.scheduleThreadQueueDrain(threadId) + } + } catch { + // Queue processing is best-effort. Keep the bridge alive if app-server is unavailable. + this.scheduleThreadQueueDrain(threadId) + } finally { + this.processingThreadIds.delete(threadId) + } + } + + private async hasQueuedTurns(threadId: string): Promise { + const state = await readThreadQueueState() + const queue = state[threadId] + return Array.isArray(queue) && queue.length > 0 + } + + private async canStartQueuedTurn(threadId: string): Promise { + const response = asRecord(await this.appServer.rpc('thread/read', { threadId, includeTurns: true })) + const thread = asRecord(response?.thread) + if (!thread) return false + + const status = asRecord(thread.status) + const statusType = readNonEmptyString(status?.type) + if (statusType === 'inProgress' || statusType === 'running' || statusType === 'active') return false + + const turns = Array.isArray(thread.turns) ? thread.turns : [] + return !turns.some((turn) => readNonEmptyString(asRecord(turn)?.status) === 'inProgress') + } + + private async popNextQueuedTurn(threadId: string): Promise { + return withThreadQueueStateUpdate((state) => { + const queue = state[threadId] + if (!queue || queue.length === 0) { + return { nextState: state, result: null } + } + + const [message, ...rest] = queue + const nextState = { ...state } + if (rest.length > 0) { + nextState[threadId] = rest + } else { + delete nextState[threadId] + } + return { nextState, result: { threadId, message } } + }) + } + + private async restoreQueuedTurn(turn: BackendQueuedTurn): Promise { + await withThreadQueueStateUpdate((state) => { + const queue = state[turn.threadId] ?? [] + return { + nextState: { + ...state, + [turn.threadId]: [turn.message, ...queue], + }, + result: undefined, + } + }) + } + + private async resolveCollaborationModeSettings(mode: CollaborationModeKind): Promise { + let currentConfig: Record | null = null + try { + const configPayload = asRecord(await this.appServer.rpc('config/read', {})) + currentConfig = asRecord(configPayload?.config) + } catch { + currentConfig = null + } + + const configuredModel = readNonEmptyString(currentConfig?.model) + if (configuredModel) { + return { + model: configuredModel, + reasoningEffort: normalizeCollaborationModeReasoningEffort(normalizeReasoningEffort(currentConfig?.model_reasoning_effort)), + } + } + + try { + const modelsPayload = asRecord(await this.appServer.rpc('model/list', {})) + const models = Array.isArray(modelsPayload?.data) ? modelsPayload.data : [] + for (const row of models) { + const record = asRecord(row) + const candidate = readNonEmptyString(record?.id) || readNonEmptyString(record?.model) + if (candidate) { + return { + model: candidate, + reasoningEffort: normalizeCollaborationModeReasoningEffort(normalizeReasoningEffort(currentConfig?.model_reasoning_effort)), + } + } + } + } catch { + // Fall through to no collaboration-mode payload. + } + + throw new Error(`${mode === 'plan' ? 'Plan' : 'Default'} mode requires an available model.`) + } + + private async buildQueuedTurnParams(turn: BackendQueuedTurn): Promise> { + const localImageAttachments: StoredQueuedMessage['fileAttachments'] = [] + for (const imageUrl of turn.message.imageUrls) { + const localImagePath = extractLocalImagePathFromUrl(imageUrl.trim()) + if (!localImagePath) continue + localImageAttachments.push({ + label: fileNameFromPath(localImagePath), + path: localImagePath, + fsPath: localImagePath, + }) + } + + const allFileAttachments = [...turn.message.fileAttachments, ...localImageAttachments] + const dedupedFileAttachments = allFileAttachments.filter((entry, index) => + allFileAttachments.findIndex((candidate) => candidate.fsPath === entry.fsPath) === index) + + const input: Array> = [{ + type: 'text', + text: buildTextWithAttachments(turn.message.text, dedupedFileAttachments), + }] + + for (const imageUrl of turn.message.imageUrls) { + const normalizedUrl = imageUrl.trim() + if (!normalizedUrl) continue + const localImagePath = extractLocalImagePathFromUrl(normalizedUrl) + if (localImagePath) { + input.push({ type: 'localImage', path: localImagePath }) + } else { + input.push({ type: 'image', url: normalizedUrl, image_url: normalizedUrl }) + } + } + + for (const skill of turn.message.skills) { + input.push({ type: 'skill', name: skill.name, path: skill.path }) + } + + const params: Record = { + threadId: turn.threadId, + input, + } + if (dedupedFileAttachments.length > 0) { + params.attachments = dedupedFileAttachments.map((f) => ({ label: f.label, path: f.path, fsPath: f.fsPath })) + } + + try { + const settings = await this.resolveCollaborationModeSettings(turn.message.collaborationMode) + params.collaborationMode = { + mode: turn.message.collaborationMode, + settings: { + model: settings.model, + reasoning_effort: settings.reasoningEffort, + developer_instructions: null, + }, + } + } catch { + // Older app-server versions still accept a plain turn/start without collaborationMode. + } + + return params + } + + private async startQueuedTurn(turn: BackendQueuedTurn): Promise { + await this.appServer.rpc('thread/resume', { threadId: turn.threadId }) + await this.appServer.rpc('turn/start', await this.buildQueuedTurnParams(turn)) + } +} diff --git a/src/server/codexAppServerBridge.ts b/src/server/codexAppServerBridge.ts index b8c36c1ec..26225425d 100644 --- a/src/server/codexAppServerBridge.ts +++ b/src/server/codexAppServerBridge.ts @@ -87,6 +87,15 @@ import { writePinnedThreadIds, writeThreadTitleCache, } from './globalStateStore.js' +import { AppServerProcess, type StreamEventFrame } from './appServerProcess.js' +import { + BackendQueueProcessor, + appendThreadQueuedMessage, + buildHeartbeatQueuedMessage, + normalizeThreadQueueState, + readThreadQueueState, + writeThreadQueueState, +} from './backendQueueProcessor.js' import { mergeSessionSkillInputsIntoThreadResult, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' export { mergeSessionSkillInputsIntoTurns, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' @@ -1345,243 +1354,6 @@ function getCodexAutomationsDir(): string { return join(getCodexHomeDir(), 'automations') } -const THREAD_QUEUE_STATE_KEY = 'thread-queue-state' - -type StoredQueuedMessage = { - id: string - text: string - imageUrls: string[] - skills: Array<{ name: string; path: string }> - fileAttachments: Array<{ label: string; path: string; fsPath: string }> - collaborationMode: 'default' | 'plan' -} - -type ThreadQueueState = Record - -type BackendQueuedTurn = { - threadId: string - message: StoredQueuedMessage -} - -type ThreadQueueStateUpdate = { - nextState: ThreadQueueState - result: T -} - -type ResolvedCollaborationModeSettings = { - model: string - reasoningEffort: ReasoningEffort | null -} - -function normalizeStoredQueuedMessage(value: unknown): StoredQueuedMessage | null { - const record = asRecord(value) - if (!record) return null - - const id = typeof record.id === 'string' ? record.id.trim() : '' - if (!id) return null - - const normalizeNamedPathItems = (items: unknown): Array<{ name: string; path: string }> => { - if (!Array.isArray(items)) return [] - return items.flatMap((item) => { - const itemRecord = asRecord(item) - if (!itemRecord) return [] - const name = typeof itemRecord.name === 'string' ? itemRecord.name.trim() : '' - const path = typeof itemRecord.path === 'string' ? itemRecord.path.trim() : '' - return name && path ? [{ name, path }] : [] - }) - } - - const normalizeFileAttachments = (items: unknown): Array<{ label: string; path: string; fsPath: string }> => { - if (!Array.isArray(items)) return [] - return items.flatMap((item) => { - const itemRecord = asRecord(item) - if (!itemRecord) return [] - const label = typeof itemRecord.label === 'string' ? itemRecord.label.trim() : '' - const path = typeof itemRecord.path === 'string' ? itemRecord.path.trim() : '' - const fsPath = typeof itemRecord.fsPath === 'string' ? itemRecord.fsPath.trim() : '' - return label && path && fsPath ? [{ label, path, fsPath }] : [] - }) - } - - return { - id, - text: typeof record.text === 'string' ? record.text : '', - imageUrls: normalizeStringArray(record.imageUrls), - skills: normalizeNamedPathItems(record.skills), - fileAttachments: normalizeFileAttachments(record.fileAttachments), - collaborationMode: record.collaborationMode === 'plan' ? 'plan' : 'default', - } -} - -function normalizeThreadQueueState(value: unknown): ThreadQueueState { - const record = asRecord(value) - if (!record) return {} - - const state: ThreadQueueState = {} - for (const [threadId, rawMessages] of Object.entries(record)) { - const normalizedThreadId = threadId.trim() - if (!normalizedThreadId || !Array.isArray(rawMessages)) continue - const messages = rawMessages.flatMap((item) => { - const message = normalizeStoredQueuedMessage(item) - return message ? [message] : [] - }) - if (messages.length > 0) { - state[normalizedThreadId] = messages - } - } - return state -} - -let threadQueueMutationChain: Promise = Promise.resolve() - -async function readThreadQueueState(): Promise { - const statePath = getCodexGlobalStatePath() - try { - const raw = await readFile(statePath, 'utf8') - const payload = asRecord(JSON.parse(raw)) ?? {} - return normalizeThreadQueueState(payload[THREAD_QUEUE_STATE_KEY]) - } catch { - return {} - } -} - -async function writeThreadQueueStateUnlocked(nextState: ThreadQueueState): Promise { - const statePath = getCodexGlobalStatePath() - let payload: Record = {} - try { - const raw = await readFile(statePath, 'utf8') - payload = asRecord(JSON.parse(raw)) ?? {} - } catch { - payload = {} - } - const normalized = normalizeThreadQueueState(nextState) - if (Object.keys(normalized).length > 0) { - payload[THREAD_QUEUE_STATE_KEY] = normalized - } else { - delete payload[THREAD_QUEUE_STATE_KEY] - } - await writeFile(statePath, JSON.stringify(payload), 'utf8') -} - -async function withThreadQueueStateUpdate( - update: (state: ThreadQueueState) => ThreadQueueStateUpdate | Promise>, -): Promise { - const run = threadQueueMutationChain.then(async () => { - const currentState = await readThreadQueueState() - const { nextState, result } = await update(currentState) - await writeThreadQueueStateUnlocked(nextState) - return result - }) - threadQueueMutationChain = run.catch(() => {}) - return run -} - -async function writeThreadQueueState(nextState: ThreadQueueState): Promise { - await withThreadQueueStateUpdate(() => ({ - nextState: normalizeThreadQueueState(nextState), - result: undefined, - })) -} - -async function appendThreadQueuedMessage(threadId: string, message: StoredQueuedMessage): Promise { - const normalizedThreadId = threadId.trim() - if (!normalizedThreadId) throw new Error('threadId is required') - await withThreadQueueStateUpdate((state) => ({ - nextState: { - ...state, - [normalizedThreadId]: [...(state[normalizedThreadId] ?? []), message], - }, - result: undefined, - })) -} - -function normalizeReasoningEffort(value: unknown): ReasoningEffort | '' { - const allowed: ReasoningEffort[] = ['none', 'minimal', 'low', 'medium', 'high', 'xhigh'] - return typeof value === 'string' && allowed.includes(value as ReasoningEffort) - ? (value as ReasoningEffort) - : '' -} - -function normalizeCollaborationModeReasoningEffort(value: ReasoningEffort | '' | null | undefined): ReasoningEffort | null { - return value && value.length > 0 ? value : null -} - -function extractLocalImagePathFromUrl(value: string): string | null { - if (!value) return null - try { - const parsed = new URL(value, 'http://localhost') - if (parsed.pathname !== '/codex-local-image') return null - const path = parsed.searchParams.get('path')?.trim() ?? '' - return path.length > 0 ? path : null - } catch { - return null - } -} - -function buildTextWithAttachments(prompt: string, files: StoredQueuedMessage['fileAttachments']): string { - if (files.length === 0) return prompt - let prefix = '# Files mentioned by the user:\n' - for (const f of files) { - prefix += `\n## ${f.label}: ${f.path}\n` - } - return `${prefix}\n## My request for Codex:\n\n${prompt}\n` -} - -function escapeHeartbeatXmlText(value: string): string { - return value - .replace(/&/gu, '&') - .replace(//gu, '>') -} - -function buildHeartbeatQueuedMessage(automation: ThreadAutomationRecord): StoredQueuedMessage { - return { - id: `automation-${automation.id}-${Date.now()}-${randomBytes(3).toString('hex')}`, - text: ` -${escapeHeartbeatXmlText(automation.id)} -${new Date().toISOString()} - -${escapeHeartbeatXmlText(automation.prompt)} - -`, - imageUrls: [], - skills: [], - fileAttachments: [], - collaborationMode: 'default', - } -} - -function fileNameFromPath(pathValue: string): string { - const normalized = pathValue.replace(/\\/g, '/') - const segments = normalized.split('/').filter(Boolean) - return segments.at(-1) ?? normalized -} - -function extractThreadIdFromNotificationParams(params: unknown): string { - const record = asRecord(params) - if (!record) return '' - const threadId = - (typeof record.threadId === 'string' ? record.threadId : '') || - (typeof record.thread_id === 'string' ? record.thread_id : '') || - (typeof record.conversationId === 'string' ? record.conversationId : '') || - (typeof record.conversation_id === 'string' ? record.conversation_id : '') - if (threadId) return threadId - const thread = asRecord(record.thread) - if (thread && typeof thread.id === 'string') return thread.id - const turn = asRecord(record.turn) - if (turn) { - const turnThreadId = - (typeof turn.threadId === 'string' ? turn.threadId : '') || - (typeof turn.thread_id === 'string' ? turn.thread_id : '') - if (turnThreadId) return turnThreadId - } - return '' -} - -function isTurnCompletedNotification(notification: { method: string; params: unknown }): boolean { - return notification.method === 'turn/completed' -} - async function rollbackCreatedWorktree( gitRoot: string, worktreeCwd: string, @@ -1886,838 +1658,6 @@ async function fetchConnectorLogo(rawUrl: string): Promise<{ contentType: string } } -const STREAM_EVENT_BUFFER_LIMIT = 400 - -type StreamEventFrame = { - method: string - params: unknown - atIso: string -} - -type CapturedItem = { - id: string - type: string - turnId: string - data: Record - completed: boolean -} - -const MERGEABLE_ITEM_TYPES = new Set([ - 'commandExecution', - 'fileChange', -]) - -class AppServerProcess { - private process: ChildProcessWithoutNullStreams | null = null - private initialized = false - private initializePromise: Promise | null = null - private readBuffer = '' - private nextId = 1 - private stopping = false - private readonly pending = new Map void; reject: (reason?: unknown) => void }>() - private readonly notificationListeners = new Set<(value: { method: string; params: unknown }) => void>() - private readonly pendingServerRequests = new Map() - private readonly appServerArgs = buildAppServerArgs() - private readonly streamEventsByThreadId = new Map() - private readonly lastThreadReadSnapshotByThreadId = new Map() - private readonly threadTurnPageReadCacheByThreadId = new Map() - private readonly threadTurnPageReadPromiseByThreadId = new Map>() - private readonly capturedItemsByThreadId = new Map>() - private readonly liveStateCache = new Map() - private chatgptAuthRefreshPromise: Promise | null = null - - - private getCodexCommand(): string { - const codexCommand = resolveCodexCommand() - if (!codexCommand) { - throw new Error('Codex CLI is not available. Install @openai/codex or set CODEXUI_CODEX_COMMAND.') - } - return codexCommand - } - - private buildAppServerConfig(): { args: string[]; env: Record } { - const args = [ - 'app-server', - '-c', 'approval_policy="never"', - '-c', 'sandbox_mode="danger-full-access"', - ] - let extraEnv: Record = {} - const serverPort = parseInt(process.env.CODEXUI_SERVER_PORT ?? '', 10) || undefined - const statePath = join(getCodexHomeDir(), FREE_MODE_STATE_FILE) - try { - const state = ensureDefaultFreeModeStateForMissingAuthSync(statePath) - if (state) { - args.push(...getFreeModeConfigArgs(state, serverPort)) - extraEnv = getFreeModeEnvVars(state) - } - } catch { - // No free-mode state or invalid — use defaults - } - return { args, env: extraEnv } - } - - private start(): void { - if (this.process) return - - this.stopping = false - const config = this.buildAppServerConfig() - const invocation = getSpawnInvocation(this.getCodexCommand(), config.args) - const spawnEnv = Object.keys(config.env).length > 0 - ? { ...process.env, ...config.env } - : undefined - const proc = spawn(invocation.command, invocation.args, { stdio: ['pipe', 'pipe', 'pipe'], ...(spawnEnv ? { env: spawnEnv } : {}) }) - this.process = proc - - proc.stdout.setEncoding('utf8') - proc.stdout.on('data', (chunk: string) => { - this.readBuffer += chunk - - let lineEnd = this.readBuffer.indexOf('\n') - while (lineEnd !== -1) { - const line = this.readBuffer.slice(0, lineEnd).trim() - this.readBuffer = this.readBuffer.slice(lineEnd + 1) - - if (line.length > 0) { - this.handleLine(line) - } - - lineEnd = this.readBuffer.indexOf('\n') - } - }) - - proc.stderr.setEncoding('utf8') - proc.stderr.on('data', () => { - // Keep stderr silent in dev middleware; JSON-RPC errors are forwarded via responses. - }) - - proc.on('exit', () => { - if (this.process !== proc) { - return - } - - const failure = new Error(this.stopping ? 'codex app-server stopped' : 'codex app-server exited unexpectedly') - for (const request of this.pending.values()) { - request.reject(failure) - } - - this.pending.clear() - this.pendingServerRequests.clear() - this.process = null - this.initialized = false - this.initializePromise = null - this.readBuffer = '' - }) - } - - private sendLine(payload: Record): void { - if (!this.process) { - throw new Error('codex app-server is not running') - } - - this.process.stdin.write(`${JSON.stringify(payload)}\n`) - } - - private handleLine(line: string): void { - let message: JsonRpcResponse - try { - message = JSON.parse(line) as JsonRpcResponse - } catch { - return - } - - if (typeof message.id === 'number' && this.pending.has(message.id)) { - const pendingRequest = this.pending.get(message.id) - this.pending.delete(message.id) - - if (!pendingRequest) return - - if (message.error) { - pendingRequest.reject(new Error(message.error.message)) - } else { - pendingRequest.resolve(message.result) - } - return - } - - if (typeof message.method === 'string' && typeof message.id !== 'number') { - this.emitNotification({ - method: message.method, - params: message.params ?? null, - }) - return - } - - // Handle server-initiated JSON-RPC requests (approvals, dynamic tool calls, etc.). - if (typeof message.id === 'number' && typeof message.method === 'string') { - this.handleServerRequest(message.id, message.method, message.params ?? null) - } - } - - private emitNotification(notification: { method: string; params: unknown }): void { - this.recordStreamEvent(notification) - this.captureItemFromNotification(notification) - const nThreadId = this.extractThreadIdFromParams(notification.params) - if (nThreadId) { - this.invalidateLiveStateCache(nThreadId) - this.threadTurnPageReadCacheByThreadId.delete(nThreadId) - } - for (const listener of this.notificationListeners) { - listener(notification) - } - } - - private extractThreadIdFromParams(params: unknown): string { - const record = asRecord(params) - if (!record) return '' - const threadId = - (typeof record.threadId === 'string' ? record.threadId : '') || - (typeof record.thread_id === 'string' ? record.thread_id : '') || - (typeof record.conversationId === 'string' ? record.conversationId : '') || - (typeof record.conversation_id === 'string' ? record.conversation_id : '') - if (threadId) return threadId - const thread = asRecord(record.thread) - if (thread && typeof thread.id === 'string') return thread.id - const turn = asRecord(record.turn) - if (turn) { - const turnThreadId = - (typeof turn.threadId === 'string' ? turn.threadId : '') || - (typeof turn.thread_id === 'string' ? turn.thread_id : '') - if (turnThreadId) return turnThreadId - } - return '' - } - - private recordStreamEvent(notification: { method: string; params: unknown }): void { - const threadId = this.extractThreadIdFromParams(notification.params) - if (!threadId) return - const frame: StreamEventFrame = { - method: notification.method, - params: notification.params, - atIso: new Date().toISOString(), - } - let buffer = this.streamEventsByThreadId.get(threadId) - if (!buffer) { - buffer = [] - this.streamEventsByThreadId.set(threadId, buffer) - } - buffer.push(frame) - if (buffer.length > STREAM_EVENT_BUFFER_LIMIT) { - buffer.splice(0, buffer.length - STREAM_EVENT_BUFFER_LIMIT) - } - } - - getStreamEvents(threadId: string, limit: number): StreamEventFrame[] { - const buffer = this.streamEventsByThreadId.get(threadId) - if (!buffer || buffer.length === 0) return [] - return buffer.slice(-limit) - } - - storeThreadReadSnapshot(threadId: string, snapshot: unknown): void { - this.lastThreadReadSnapshotByThreadId.set(threadId, snapshot) - this.threadTurnPageReadCacheByThreadId.delete(threadId) - } - - getLastThreadReadSnapshot(threadId: string): unknown | null { - return this.lastThreadReadSnapshotByThreadId.get(threadId) ?? null - } - - async readThreadForTurnPage(threadId: string): Promise { - const now = Date.now() - const cached = this.threadTurnPageReadCacheByThreadId.get(threadId) - if (cached && cached.expiresAt > now) return cached.result - if (cached) this.threadTurnPageReadCacheByThreadId.delete(threadId) - - const pending = this.threadTurnPageReadPromiseByThreadId.get(threadId) - if (pending) return pending - - const promise = this.rpc('thread/read', { - threadId, - includeTurns: true, - }).then((result) => { - this.threadTurnPageReadCacheByThreadId.set(threadId, { - result, - expiresAt: Date.now() + THREAD_TURN_PAGE_READ_CACHE_TTL_MS, - }) - return result - }).finally(() => { - this.threadTurnPageReadPromiseByThreadId.delete(threadId) - }) - - this.threadTurnPageReadPromiseByThreadId.set(threadId, promise) - return promise - } - - cacheLiveState(threadId: string, data: unknown, turnCount: number, sessionSize: number): void { - this.liveStateCache.set(threadId, { data, turnCount, sessionSize }) - } - - getCachedLiveState(threadId: string, turnCount: number, sessionSize: number): unknown | null { - const cached = this.liveStateCache.get(threadId) - if (!cached) return null - if (cached.turnCount !== turnCount || cached.sessionSize !== sessionSize) return null - return cached.data - } - - invalidateLiveStateCache(threadId: string): void { - this.liveStateCache.delete(threadId) - } - - private captureItemFromNotification(notification: { method: string; params: unknown }): void { - if (notification.method !== 'item/started' && notification.method !== 'item/completed') return - - const params = asRecord(notification.params) - if (!params) return - const item = asRecord(params.item) - if (!item) return - const itemType = typeof item.type === 'string' ? item.type : '' - if (!MERGEABLE_ITEM_TYPES.has(itemType)) return - - const itemId = typeof item.id === 'string' ? item.id : '' - if (!itemId) return - - const threadId = this.extractThreadIdFromParams(params) - if (!threadId) return - - const turnId = - (typeof params.turnId === 'string' ? params.turnId : '') || - (typeof params.turn_id === 'string' ? params.turn_id : '') - if (!turnId) return - - let threadItems = this.capturedItemsByThreadId.get(threadId) - if (!threadItems) { - threadItems = new Map() - this.capturedItemsByThreadId.set(threadId, threadItems) - } - - const isCompleted = notification.method === 'item/completed' - const existing = threadItems.get(itemId) - - if (existing && existing.completed && !isCompleted) return - - threadItems.set(itemId, { - id: itemId, - type: itemType, - turnId, - data: item as Record, - completed: isCompleted, - }) - } - - mergeItemsIntoTurns(threadId: string, turns: unknown[]): unknown[] { - const capturedMap = this.capturedItemsByThreadId.get(threadId) - if (!capturedMap || capturedMap.size === 0) return turns - - const itemsByTurnId = new Map() - for (const captured of capturedMap.values()) { - let group = itemsByTurnId.get(captured.turnId) - if (!group) { - group = [] - itemsByTurnId.set(captured.turnId, group) - } - group.push(captured) - } - - return turns.map((turn) => { - const turnRecord = asRecord(turn) - if (!turnRecord) return turn - const turnId = typeof turnRecord.id === 'string' ? turnRecord.id : '' - if (!turnId) return turn - - const captured = itemsByTurnId.get(turnId) - if (!captured || captured.length === 0) return turn - - const existingItems = Array.isArray(turnRecord.items) ? (turnRecord.items as Record[]) : [] - const existingIds = new Set(existingItems.map((it) => (typeof it.id === 'string' ? it.id : '')).filter(Boolean)) - - const newItems = captured - .filter((c) => !existingIds.has(c.id)) - .map((c) => c.data) - - if (newItems.length === 0) return turn - - return { - ...turnRecord, - items: [...existingItems, ...newItems], - } - }) - } - - private sendServerRequestReply(requestId: number, reply: ServerRequestReply): void { - if (reply.error) { - this.sendLine({ - jsonrpc: '2.0', - id: requestId, - error: reply.error, - }) - return - } - - this.sendLine({ - jsonrpc: '2.0', - id: requestId, - result: reply.result ?? {}, - }) - } - - private resolvePendingServerRequest(requestId: number, reply: ServerRequestReply): void { - const pendingRequest = this.pendingServerRequests.get(requestId) - if (!pendingRequest) { - throw new Error(`No pending server request found for id ${String(requestId)}`) - } - this.pendingServerRequests.delete(requestId) - - this.sendServerRequestReply(requestId, reply) - const requestParams = asRecord(pendingRequest.params) - const threadId = - typeof requestParams?.threadId === 'string' && requestParams.threadId.length > 0 - ? requestParams.threadId - : '' - this.emitNotification({ - method: 'server/request/resolved', - params: { - id: requestId, - method: pendingRequest.method, - threadId, - mode: 'manual', - resolvedAtIso: new Date().toISOString(), - }, - }) - } - - private async refreshChatgptAuthTokens(params: ChatgptAuthTokensRefreshParams): Promise { - if (!this.chatgptAuthRefreshPromise) { - this.chatgptAuthRefreshPromise = refreshChatgptAuthTokensForExternalAuth(params).finally(() => { - this.chatgptAuthRefreshPromise = null - }) - } - return await this.chatgptAuthRefreshPromise - } - - private async handleChatgptAuthTokensRefreshRequest(requestId: number, params: unknown): Promise { - const requestParams = asRecord(params) - const previousAccountId = readNonEmptyString(requestParams?.previousAccountId ?? requestParams?.previous_account_id) - try { - const result = await this.refreshChatgptAuthTokens({ - reason: readNonEmptyString(requestParams?.reason) || undefined, - previousAccountId: previousAccountId || undefined, - }) - this.sendServerRequestReply(requestId, { result }) - this.emitNotification({ - method: 'server/request/resolved', - params: { - id: requestId, - method: 'account/chatgptAuthTokens/refresh', - mode: 'automatic', - resolvedAtIso: new Date().toISOString(), - }, - }) - } catch (error) { - this.sendServerRequestReply(requestId, { - error: { - code: -32001, - message: getErrorMessage(error, 'Failed to refresh ChatGPT auth tokens'), - }, - }) - } - } - - private handleServerRequest(requestId: number, method: string, params: unknown): void { - if (method === 'account/chatgptAuthTokens/refresh') { - void this.handleChatgptAuthTokensRefreshRequest(requestId, params) - return - } - - const pendingRequest: PendingServerRequest = { - id: requestId, - method, - params, - receivedAtIso: new Date().toISOString(), - } - this.pendingServerRequests.set(requestId, pendingRequest) - - this.emitNotification({ - method: 'server/request', - params: pendingRequest, - }) - } - - private async call(method: string, params: unknown): Promise { - this.start() - const id = this.nextId++ - - return new Promise((resolve, reject) => { - this.pending.set(id, { resolve, reject }) - - this.sendLine({ - jsonrpc: '2.0', - id, - method, - params, - } satisfies JsonRpcCall) - }) - } - - private async ensureInitialized(): Promise { - if (this.initialized) return - if (this.initializePromise) { - await this.initializePromise - return - } - - this.initializePromise = this.call('initialize', { - clientInfo: { - name: 'codex-web-local', - version: '0.1.0', - }, - capabilities: { - experimentalApi: true, - }, - }).then(() => { - this.sendLine({ - jsonrpc: '2.0', - method: 'initialized', - }) - this.initialized = true - }).finally(() => { - this.initializePromise = null - }) - - await this.initializePromise - } - - async rpc(method: string, params: unknown): Promise { - await this.ensureInitialized() - return this.call(method, params) - } - - onNotification(listener: (value: { method: string; params: unknown }) => void): () => void { - this.notificationListeners.add(listener) - return () => { - this.notificationListeners.delete(listener) - } - } - - async respondToServerRequest(payload: unknown): Promise { - await this.ensureInitialized() - - const body = asRecord(payload) - if (!body) { - throw new Error('Invalid response payload: expected object') - } - - const id = body.id - if (typeof id !== 'number' || !Number.isInteger(id)) { - throw new Error('Invalid response payload: "id" must be an integer') - } - - const rawError = asRecord(body.error) - if (rawError) { - const message = typeof rawError.message === 'string' && rawError.message.trim().length > 0 - ? rawError.message.trim() - : 'Server request rejected by client' - const code = typeof rawError.code === 'number' && Number.isFinite(rawError.code) - ? Math.trunc(rawError.code) - : -32000 - this.resolvePendingServerRequest(id, { error: { code, message } }) - return - } - - if (!('result' in body)) { - throw new Error('Invalid response payload: expected "result" or "error"') - } - - this.resolvePendingServerRequest(id, { result: body.result }) - } - - listPendingServerRequests(): PendingServerRequest[] { - return Array.from(this.pendingServerRequests.values()) - } - - dispose(): void { - if (!this.process) return - - const proc = this.process - this.stopping = true - this.process = null - this.initialized = false - this.initializePromise = null - this.readBuffer = '' - - const failure = new Error('codex app-server stopped') - for (const request of this.pending.values()) { - request.reject(failure) - } - this.pending.clear() - this.pendingServerRequests.clear() - - try { - proc.stdin.end() - } catch { - // ignore close errors on shutdown - } - - try { - proc.kill('SIGTERM') - } catch { - // ignore kill errors on shutdown - } - - const forceKillTimer = setTimeout(() => { - if (!proc.killed) { - try { - proc.kill('SIGKILL') - } catch { - // ignore kill errors on shutdown - } - } - }, 1500) - forceKillTimer.unref() - } -} - -export class BackendQueueProcessor { - private readonly processingThreadIds = new Set() - private readonly queueDrainTimersByThreadId = new Map>() - private readonly queueDrainDueAtByThreadId = new Map() - private readonly unsubscribe: () => void - - constructor(private readonly appServer: AppServerProcess) { - this.unsubscribe = appServer.onNotification((notification) => { - if (!isTurnCompletedNotification(notification)) return - const threadId = extractThreadIdFromNotificationParams(notification.params) - if (!threadId) return - void this.processThreadQueue(threadId) - }) - void this.scheduleAllQueuedThreads(1000) - } - - dispose(): void { - this.unsubscribe() - for (const timer of this.queueDrainTimersByThreadId.values()) { - clearTimeout(timer) - } - this.queueDrainTimersByThreadId.clear() - this.queueDrainDueAtByThreadId.clear() - this.processingThreadIds.clear() - } - - async scheduleAllQueuedThreads(delayMs = 0): Promise { - try { - const state = await readThreadQueueState() - for (const threadId of Object.keys(state)) { - this.scheduleThreadQueueDrain(threadId, delayMs) - } - } catch { - // Queue recovery is best-effort; normal turn-completed events can still drain later. - } - } - - scheduleThreadQueueDrain(threadId: string, delayMs = 5000): void { - if (!threadId) return - const normalizedDelayMs = Math.max(0, delayMs) - const nextDueAt = Date.now() + normalizedDelayMs - const existingDueAt = this.queueDrainDueAtByThreadId.get(threadId) - const existingTimer = this.queueDrainTimersByThreadId.get(threadId) - if (existingTimer) { - if (existingDueAt !== undefined && existingDueAt <= nextDueAt) return - clearTimeout(existingTimer) - this.queueDrainTimersByThreadId.delete(threadId) - this.queueDrainDueAtByThreadId.delete(threadId) - } - const timer = setTimeout(() => { - this.queueDrainTimersByThreadId.delete(threadId) - this.queueDrainDueAtByThreadId.delete(threadId) - void this.processThreadQueue(threadId) - }, normalizedDelayMs) - timer.unref?.() - this.queueDrainTimersByThreadId.set(threadId, timer) - this.queueDrainDueAtByThreadId.set(threadId, nextDueAt) - } - - async processThreadQueue(threadId: string): Promise { - if (this.processingThreadIds.has(threadId)) return - this.processingThreadIds.add(threadId) - try { - const canStart = await this.canStartQueuedTurn(threadId) - if (!canStart) { - if (await this.hasQueuedTurns(threadId)) { - this.scheduleThreadQueueDrain(threadId) - } - return - } - const next = await this.popNextQueuedTurn(threadId) - if (!next) return - try { - await this.startQueuedTurn(next) - if (await this.hasQueuedTurns(threadId)) { - this.scheduleThreadQueueDrain(threadId) - } - } catch { - await this.restoreQueuedTurn(next) - this.scheduleThreadQueueDrain(threadId) - } - } catch { - // Queue processing is best-effort. Keep the bridge alive if app-server is unavailable. - this.scheduleThreadQueueDrain(threadId) - } finally { - this.processingThreadIds.delete(threadId) - } - } - - private async hasQueuedTurns(threadId: string): Promise { - const state = await readThreadQueueState() - const queue = state[threadId] - return Array.isArray(queue) && queue.length > 0 - } - - private async canStartQueuedTurn(threadId: string): Promise { - const response = asRecord(await this.appServer.rpc('thread/read', { threadId, includeTurns: true })) - const thread = asRecord(response?.thread) - if (!thread) return false - - const status = asRecord(thread.status) - const statusType = readNonEmptyString(status?.type) - if (statusType === 'inProgress' || statusType === 'running' || statusType === 'active') return false - - const turns = Array.isArray(thread.turns) ? thread.turns : [] - return !turns.some((turn) => readNonEmptyString(asRecord(turn)?.status) === 'inProgress') - } - - private async popNextQueuedTurn(threadId: string): Promise { - return withThreadQueueStateUpdate((state) => { - const queue = state[threadId] - if (!queue || queue.length === 0) { - return { nextState: state, result: null } - } - - const [message, ...rest] = queue - const nextState = { ...state } - if (rest.length > 0) { - nextState[threadId] = rest - } else { - delete nextState[threadId] - } - return { nextState, result: { threadId, message } } - }) - } - - private async restoreQueuedTurn(turn: BackendQueuedTurn): Promise { - await withThreadQueueStateUpdate((state) => { - const queue = state[turn.threadId] ?? [] - return { - nextState: { - ...state, - [turn.threadId]: [turn.message, ...queue], - }, - result: undefined, - } - }) - } - - private async resolveCollaborationModeSettings(mode: CollaborationModeKind): Promise { - let currentConfig: Record | null = null - try { - const configPayload = asRecord(await this.appServer.rpc('config/read', {})) - currentConfig = asRecord(configPayload?.config) - } catch { - currentConfig = null - } - - const configuredModel = readNonEmptyString(currentConfig?.model) - if (configuredModel) { - return { - model: configuredModel, - reasoningEffort: normalizeCollaborationModeReasoningEffort(normalizeReasoningEffort(currentConfig?.model_reasoning_effort)), - } - } - - try { - const modelsPayload = asRecord(await this.appServer.rpc('model/list', {})) - const models = Array.isArray(modelsPayload?.data) ? modelsPayload.data : [] - for (const row of models) { - const record = asRecord(row) - const candidate = readNonEmptyString(record?.id) || readNonEmptyString(record?.model) - if (candidate) { - return { - model: candidate, - reasoningEffort: normalizeCollaborationModeReasoningEffort(normalizeReasoningEffort(currentConfig?.model_reasoning_effort)), - } - } - } - } catch { - // Fall through to no collaboration-mode payload. - } - - throw new Error(`${mode === 'plan' ? 'Plan' : 'Default'} mode requires an available model.`) - } - - private async buildQueuedTurnParams(turn: BackendQueuedTurn): Promise> { - const localImageAttachments: StoredQueuedMessage['fileAttachments'] = [] - for (const imageUrl of turn.message.imageUrls) { - const localImagePath = extractLocalImagePathFromUrl(imageUrl.trim()) - if (!localImagePath) continue - localImageAttachments.push({ - label: fileNameFromPath(localImagePath), - path: localImagePath, - fsPath: localImagePath, - }) - } - - const allFileAttachments = [...turn.message.fileAttachments, ...localImageAttachments] - const dedupedFileAttachments = allFileAttachments.filter((entry, index) => - allFileAttachments.findIndex((candidate) => candidate.fsPath === entry.fsPath) === index) - - const input: Array> = [{ - type: 'text', - text: buildTextWithAttachments(turn.message.text, dedupedFileAttachments), - }] - - for (const imageUrl of turn.message.imageUrls) { - const normalizedUrl = imageUrl.trim() - if (!normalizedUrl) continue - const localImagePath = extractLocalImagePathFromUrl(normalizedUrl) - if (localImagePath) { - input.push({ type: 'localImage', path: localImagePath }) - } else { - input.push({ type: 'image', url: normalizedUrl, image_url: normalizedUrl }) - } - } - - for (const skill of turn.message.skills) { - input.push({ type: 'skill', name: skill.name, path: skill.path }) - } - - const params: Record = { - threadId: turn.threadId, - input, - } - if (dedupedFileAttachments.length > 0) { - params.attachments = dedupedFileAttachments.map((f) => ({ label: f.label, path: f.path, fsPath: f.fsPath })) - } - - try { - const settings = await this.resolveCollaborationModeSettings(turn.message.collaborationMode) - params.collaborationMode = { - mode: turn.message.collaborationMode, - settings: { - model: settings.model, - reasoning_effort: settings.reasoningEffort, - developer_instructions: null, - }, - } - } catch { - // Older app-server versions still accept a plain turn/start without collaborationMode. - } - - return params - } - - private async startQueuedTurn(turn: BackendQueuedTurn): Promise { - await this.appServer.rpc('thread/resume', { threadId: turn.threadId }) - await this.appServer.rpc('turn/start', await this.buildQueuedTurnParams(turn)) - } -} - class MethodCatalog { private methodCache: string[] | null = null private notificationCache: string[] | null = null From d4793c7b664dac30848b952aec01c44cce457d95 Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:29:42 +0700 Subject: [PATCH 11/19] Extract bridge route support helpers --- src/server/apiPerfConfig.ts | 86 +++ src/server/codexAppServerBridge.ts | 885 +-------------------------- src/server/mediaProxyRoutes.ts | 255 ++++++++ src/server/methodCatalog.ts | 125 ++++ src/server/projectCreation.ts | 117 ++++ src/server/providerModelDiscovery.ts | 212 +++++++ src/server/threadSearchIndex.ts | 197 ++++++ 7 files changed, 999 insertions(+), 878 deletions(-) create mode 100644 src/server/apiPerfConfig.ts create mode 100644 src/server/mediaProxyRoutes.ts create mode 100644 src/server/methodCatalog.ts create mode 100644 src/server/projectCreation.ts create mode 100644 src/server/providerModelDiscovery.ts create mode 100644 src/server/threadSearchIndex.ts diff --git a/src/server/apiPerfConfig.ts b/src/server/apiPerfConfig.ts new file mode 100644 index 000000000..b93b4ab82 --- /dev/null +++ b/src/server/apiPerfConfig.ts @@ -0,0 +1,86 @@ +import { readFileSync } from 'node:fs' + +const API_PERF_LOGGING_ENV_KEY = 'CODEXUI_API_PERF_LOGGING' +export const API_PERF_MS_THRESHOLD_ENV_KEY = 'CODEXUI_API_PERF_MS_THRESHOLD' +export const API_PERF_BODY_MB_THRESHOLD_ENV_KEY = 'CODEXUI_API_PERF_BODY_MB_THRESHOLD' +const DEFAULT_API_PERF_MS_THRESHOLD = 300 +const DEFAULT_API_PERF_BODY_MB_THRESHOLD = 1 +const MB_DIVISOR = 1024 * 1024 +function readEnvValueFromFile(filePath: string, key: string): string | null { + try { + const content = readFileSync(filePath, 'utf8') + const escapedKey = key.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') + const match = content.match(new RegExp(`^\\s*${escapedKey}\\s*=\\s*(.+)\\s*$`, 'm')) + if (!match) return null + const rawValue = match[1]?.trim() ?? '' + if (!rawValue) return null + if ((rawValue.startsWith('"') && rawValue.endsWith('"')) || (rawValue.startsWith('\'') && rawValue.endsWith('\''))) { + return rawValue.slice(1, -1).trim() + } + return rawValue + } catch { + return null + } +} + +function parseBooleanEnvFlag(value: string | null | undefined): boolean | null { + if (!value) return null + const normalized = value.trim().toLowerCase() + if (['1', 'true', 'yes', 'on'].includes(normalized)) return true + if (['0', 'false', 'no', 'off'].includes(normalized)) return false + return null +} + +function resolveApiPerfLoggingEnabled(): boolean { + const explicitValue = parseBooleanEnvFlag(process.env[API_PERF_LOGGING_ENV_KEY]) + if (explicitValue !== null) return explicitValue + + const fromEnvLocal = parseBooleanEnvFlag(readEnvValueFromFile('.env.local', API_PERF_LOGGING_ENV_KEY)) + if (fromEnvLocal !== null) return fromEnvLocal + + const fromEnv = parseBooleanEnvFlag(readEnvValueFromFile('.env', API_PERF_LOGGING_ENV_KEY)) + if (fromEnv !== null) return fromEnv + + return false +} + +export const API_PERF_LOGGING_ENABLED = resolveApiPerfLoggingEnabled() + +function parseNumberEnvFlag(value: string | null | undefined): number | null { + if (!value) return null + const parsed = Number.parseFloat(value.trim()) + if (!Number.isFinite(parsed)) return null + return parsed +} + +function resolveNumericEnvConfig(envKey: string, fallback: number): number { + const fromProcess = parseNumberEnvFlag(process.env[envKey]) + if (fromProcess !== null) return fromProcess + + const fromEnvLocal = parseNumberEnvFlag(readEnvValueFromFile('.env.local', envKey)) + if (fromEnvLocal !== null) return fromEnvLocal + + const fromEnv = parseNumberEnvFlag(readEnvValueFromFile('.env', envKey)) + if (fromEnv !== null) return fromEnv + + return fallback +} + +export const API_PERF_MS_THRESHOLD = resolveNumericEnvConfig(API_PERF_MS_THRESHOLD_ENV_KEY, DEFAULT_API_PERF_MS_THRESHOLD) +export const API_PERF_BODY_MB_THRESHOLD = resolveNumericEnvConfig(API_PERF_BODY_MB_THRESHOLD_ENV_KEY, DEFAULT_API_PERF_BODY_MB_THRESHOLD) + +export function getChunkByteLength(chunk: unknown, encoding?: BufferEncoding): number { + if (typeof chunk === 'string') { + return Buffer.byteLength(chunk, encoding) + } + if (chunk instanceof Uint8Array) { + return chunk.byteLength + } + if (ArrayBuffer.isView(chunk)) { + return chunk.byteLength + } + if (chunk instanceof ArrayBuffer) { + return chunk.byteLength + } + return 0 +} diff --git a/src/server/codexAppServerBridge.ts b/src/server/codexAppServerBridge.ts index 26225425d..db67aab8b 100644 --- a/src/server/codexAppServerBridge.ts +++ b/src/server/codexAppServerBridge.ts @@ -96,6 +96,12 @@ import { readThreadQueueState, writeThreadQueueState, } from './backendQueueProcessor.js' +import { MethodCatalog } from './methodCatalog.js' +import { fetchConnectorLogo, handleFileUpload, proxyTranscribe } from './mediaProxyRoutes.js' +import { fetchCustomEndpointDefaultModel, readProviderBackedModelIds } from './providerModelDiscovery.js' +import { cloneGithubRepositoryIntoBase, createProjectlessThreadDirectory } from './projectCreation.js' +import { API_PERF_BODY_MB_THRESHOLD, API_PERF_LOGGING_ENABLED, API_PERF_MS_THRESHOLD, getChunkByteLength } from './apiPerfConfig.js' +import { buildThreadSearchIndex, isExactPhraseMatch, listFilesWithRipgrep, scoreFileCandidate, type ThreadSearchIndex } from './threadSearchIndex.js' import { mergeSessionSkillInputsIntoThreadResult, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' export { mergeSessionSkillInputsIntoTurns, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' @@ -165,117 +171,12 @@ type ChatgptAuthTokensRefreshResponse = { chatgptPlanType: string | null } -type ThreadSearchDocument = { - id: string - title: string - preview: string - messageText: string - searchableText: string -} - -type ThreadSearchIndex = { - docsById: Map -} - -type ProviderModelsResponse = { - data: string[] - providerId: string - source: 'provider' -} - -const PROVIDER_MODELS_FETCH_TIMEOUT_MS = 5_000 - const THREAD_RESPONSE_TURN_LIMIT = 10 const THREAD_TURN_PAGE_READ_CACHE_TTL_MS = 30_000 const THREAD_METHODS_WITH_TURNS = new Set(['thread/read', 'thread/resume', 'thread/fork', 'thread/rollback']) const THREAD_SEARCH_FULL_TEXT_THREAD_LIMIT = 100 const PROJECTLESS_THREAD_DIRECTORY_MAX_ATTEMPTS = 100 const PROJECTLESS_THREAD_SLUG_MAX_LENGTH = 80 -const API_PERF_LOGGING_ENV_KEY = 'CODEXUI_API_PERF_LOGGING' -const API_PERF_MS_THRESHOLD_ENV_KEY = 'CODEXUI_API_PERF_MS_THRESHOLD' -const API_PERF_BODY_MB_THRESHOLD_ENV_KEY = 'CODEXUI_API_PERF_BODY_MB_THRESHOLD' -const DEFAULT_API_PERF_MS_THRESHOLD = 300 -const DEFAULT_API_PERF_BODY_MB_THRESHOLD = 1 -const MB_DIVISOR = 1024 * 1024 -function readEnvValueFromFile(filePath: string, key: string): string | null { - try { - const content = readFileSync(filePath, 'utf8') - const escapedKey = key.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') - const match = content.match(new RegExp(`^\\s*${escapedKey}\\s*=\\s*(.+)\\s*$`, 'm')) - if (!match) return null - const rawValue = match[1]?.trim() ?? '' - if (!rawValue) return null - if ((rawValue.startsWith('"') && rawValue.endsWith('"')) || (rawValue.startsWith('\'') && rawValue.endsWith('\''))) { - return rawValue.slice(1, -1).trim() - } - return rawValue - } catch { - return null - } -} - -function parseBooleanEnvFlag(value: string | null | undefined): boolean | null { - if (!value) return null - const normalized = value.trim().toLowerCase() - if (['1', 'true', 'yes', 'on'].includes(normalized)) return true - if (['0', 'false', 'no', 'off'].includes(normalized)) return false - return null -} - -function resolveApiPerfLoggingEnabled(): boolean { - const explicitValue = parseBooleanEnvFlag(process.env[API_PERF_LOGGING_ENV_KEY]) - if (explicitValue !== null) return explicitValue - - const fromEnvLocal = parseBooleanEnvFlag(readEnvValueFromFile('.env.local', API_PERF_LOGGING_ENV_KEY)) - if (fromEnvLocal !== null) return fromEnvLocal - - const fromEnv = parseBooleanEnvFlag(readEnvValueFromFile('.env', API_PERF_LOGGING_ENV_KEY)) - if (fromEnv !== null) return fromEnv - - return false -} - -const API_PERF_LOGGING_ENABLED = resolveApiPerfLoggingEnabled() - -function parseNumberEnvFlag(value: string | null | undefined): number | null { - if (!value) return null - const parsed = Number.parseFloat(value.trim()) - if (!Number.isFinite(parsed)) return null - return parsed -} - -function resolveNumericEnvConfig(envKey: string, fallback: number): number { - const fromProcess = parseNumberEnvFlag(process.env[envKey]) - if (fromProcess !== null) return fromProcess - - const fromEnvLocal = parseNumberEnvFlag(readEnvValueFromFile('.env.local', envKey)) - if (fromEnvLocal !== null) return fromEnvLocal - - const fromEnv = parseNumberEnvFlag(readEnvValueFromFile('.env', envKey)) - if (fromEnv !== null) return fromEnv - - return fallback -} - -const API_PERF_MS_THRESHOLD = resolveNumericEnvConfig(API_PERF_MS_THRESHOLD_ENV_KEY, DEFAULT_API_PERF_MS_THRESHOLD) -const API_PERF_BODY_MB_THRESHOLD = resolveNumericEnvConfig(API_PERF_BODY_MB_THRESHOLD_ENV_KEY, DEFAULT_API_PERF_BODY_MB_THRESHOLD) - -function getChunkByteLength(chunk: unknown, encoding?: BufferEncoding): number { - if (typeof chunk === 'string') { - return Buffer.byteLength(chunk, encoding) - } - if (chunk instanceof Uint8Array) { - return chunk.byteLength - } - if (ArrayBuffer.isView(chunk)) { - return chunk.byteLength - } - if (chunk instanceof ArrayBuffer) { - return chunk.byteLength - } - return 0 -} - function asRecord(value: unknown): Record | null { return value !== null && typeof value === 'object' && !Array.isArray(value) ? (value as Record) @@ -334,333 +235,6 @@ function isTimeoutError(payload: unknown): boolean { return payload instanceof Error && (payload.name === 'AbortError' || payload.name === 'TimeoutError') } -function formatProjectlessDateSegment(date = new Date()): string { - const month = String(date.getMonth() + 1).padStart(2, '0') - const day = String(date.getDate()).padStart(2, '0') - return `${date.getFullYear()}-${month}-${day}` -} - -function buildProjectlessPromptSlug(prompt: string | null): string { - const slug = prompt - ?.toLowerCase() - .match(/[a-z0-9]+/g) - ?.slice(0, 6) - .join('-') - .slice(0, PROJECTLESS_THREAD_SLUG_MAX_LENGTH) - return slug && slug.length > 0 ? slug : 'new-chat' -} - -async function ensureRealDirectory(path: string, label: string): Promise { - const info = await lstat(path) - if (info.isSymbolicLink() || !info.isDirectory()) { - throw new Error(`${label} must be a real directory`) - } -} - -async function createProjectlessThreadDirectory(prompt: string | null): Promise<{ cwd: string; outputDirectory: string; workspaceRoot: string }> { - const workspaceRoot = join(homedir(), 'Documents', 'Codex') - await mkdir(workspaceRoot, { recursive: true }) - await ensureRealDirectory(workspaceRoot, 'Projectless workspace root') - - const dateDir = join(workspaceRoot, formatProjectlessDateSegment()) - await mkdir(dateDir, { recursive: true }) - await ensureRealDirectory(dateDir, 'Projectless thread date directory') - - const slug = buildProjectlessPromptSlug(prompt) - for (let index = 0; index < PROJECTLESS_THREAD_DIRECTORY_MAX_ATTEMPTS; index += 1) { - const folderName = index === 0 ? slug : `${slug}-${index + 1}` - const cwd = join(dateDir, folderName) - try { - await mkdir(cwd, { recursive: false }) - return { cwd, outputDirectory: cwd, workspaceRoot } - } catch { - try { - await stat(cwd) - } catch { - throw new Error('Failed to create new chat folder') - } - } - } - - throw new Error('Unable to create a unique new chat folder') -} - -function normalizeGithubCloneUrl(rawUrl: string): { url: string; repoName: string } { - const trimmedUrl = rawUrl.trim() - if (!trimmedUrl) throw new Error('Missing GitHub repository URL') - - const sshMatch = trimmedUrl.match(/^git@github\.com:([A-Za-z0-9_.-]+)\/([A-Za-z0-9_.-]+?)(?:\.git)?$/u) - if (sshMatch) { - const repoName = sshMatch[2] - return { url: `git@github.com:${sshMatch[1]}/${repoName}.git`, repoName } - } - - let parsed: URL - try { - parsed = new URL(trimmedUrl) - } catch { - throw new Error('Enter a valid GitHub repository URL') - } - if (parsed.hostname.toLowerCase() !== 'github.com') { - throw new Error('Only github.com repository URLs are supported') - } - const segments = parsed.pathname.split('/').filter(Boolean) - if (segments.length < 2) { - throw new Error('Enter a GitHub repository URL with owner and repository name') - } - const owner = segments[0] - const repoName = segments[1].replace(/\.git$/iu, '') - if (!/^[A-Za-z0-9_.-]+$/u.test(owner) || !/^[A-Za-z0-9_.-]+$/u.test(repoName)) { - throw new Error('GitHub repository owner or name contains unsupported characters') - } - return { url: `https://github.com/${owner}/${repoName}.git`, repoName } -} - -async function cloneGithubRepositoryIntoBase(rawUrl: string, rawBasePath: string): Promise { - const basePath = rawBasePath.trim() - if (!basePath) throw new Error('Missing clone destination folder') - const normalizedBasePath = isAbsolute(basePath) ? basePath : resolve(basePath) - await ensureRealDirectory(normalizedBasePath, 'Clone destination folder') - - const { url, repoName } = normalizeGithubCloneUrl(rawUrl) - const targetPath = join(normalizedBasePath, repoName) - try { - await stat(targetPath) - throw new Error(`Destination already exists: ${targetPath}`) - } catch (error) { - if ((error as NodeJS.ErrnoException)?.code !== 'ENOENT') throw error - } - - try { - await runCommand('git', ['clone', url, targetPath], { cwd: normalizedBasePath, timeoutMs: 5 * 60_000 }) - } catch (error) { - await rm(targetPath, { recursive: true, force: true }).catch(() => undefined) - throw error - } - await persistWorkspaceRoot(targetPath, '') - return targetPath -} - -function normalizeHeaderValue(value: unknown): string | null { - if (typeof value === 'string') { - const trimmed = value.trim() - return trimmed.length > 0 ? trimmed : null - } - if (typeof value === 'number' || typeof value === 'boolean') { - return String(value) - } - return null -} - -function normalizeQueryParams(value: unknown): URLSearchParams { - const params = new URLSearchParams() - const record = asRecord(value) - if (!record) return params - - for (const [key, rawValue] of Object.entries(record)) { - const normalized = normalizeHeaderValue(rawValue) - if (!normalized) continue - params.set(key, normalized) - } - - return params -} - -function buildProviderModelsUrl(baseUrl: string, queryParams: unknown): URL { - const url = new URL(baseUrl) - url.pathname = url.pathname.endsWith('/') ? `${url.pathname}models` : `${url.pathname}/models` - const extraParams = normalizeQueryParams(queryParams) - for (const [key, value] of extraParams.entries()) { - url.searchParams.set(key, value) - } - return url -} - -function normalizeProviderModelsData(payload: unknown): string[] { - const record = asRecord(payload) - const rows = Array.isArray(record?.data) ? record.data : null - if (!rows) { - throw new Error('provider /models payload is missing a data array') - } - - const ids: string[] = [] - for (const row of rows) { - const entry = asRecord(row) - const candidate = readNonEmptyString(entry?.id) - if (!candidate || ids.includes(candidate)) continue - ids.push(candidate) - } - return ids -} - -async function fetchCustomEndpointDefaultModel(baseUrl: string, apiKey: string): Promise { - const normalizedBaseUrl = baseUrl.trim() - if (!normalizedBaseUrl) return '' - - try { - const modelsUrl = buildProviderModelsUrl(normalizedBaseUrl, null) - const headers: Record = apiKey ? { Authorization: `Bearer ${apiKey}` } : {} - const response = await fetch(modelsUrl, { headers, signal: AbortSignal.timeout(PROVIDER_MODELS_FETCH_TIMEOUT_MS) }) - if (!response.ok) return '' - const payload = await response.json() as unknown - const modelIds = normalizeProviderModelsData(payload) - return modelIds[0] ?? '' - } catch { - return '' - } -} - -async function readProviderBackedModelIds(appServer: AppServerProcess): Promise { - const configPayload = asRecord(await appServer.rpc('config/read', {})) - const config = asRecord(configPayload?.config) - const providerId = readNonEmptyString(config?.model_provider) - if (!providerId) { - return { data: [], providerId: '', source: 'provider' } - } - - const providers = asRecord(config?.model_providers) - const provider = asRecord(providers?.[providerId]) - if (!provider) { - logProviderModelDiscoveryWarning('configured provider is missing from model_providers', { providerId }) - return { data: [], providerId, source: 'provider' } - } - - const wireApi = readNonEmptyString(provider.wire_api) - if (wireApi !== 'responses') { - return { data: [], providerId, source: 'provider' } - } - - const baseUrl = readNonEmptyString(provider.base_url) - if (!baseUrl) { - logProviderModelDiscoveryWarning('responses provider is missing base_url', { providerId }) - return { data: [], providerId, source: 'provider' } - } - - const headers = new Headers() - const configuredHeaders = asRecord(provider.http_headers) - if (configuredHeaders) { - for (const [key, rawValue] of Object.entries(configuredHeaders)) { - const normalized = normalizeHeaderValue(rawValue) - if (!normalized) continue - headers.set(key, normalized) - } - } - - const bearerToken = readNonEmptyString(provider.experimental_bearer_token) - if (bearerToken && !headers.has('Authorization')) { - headers.set('Authorization', `Bearer ${bearerToken}`) - } - - const envKey = readNonEmptyString(provider.env_key) - const envHttpHeaders = asRecord(provider.env_http_headers) - if (envKey || envHttpHeaders) { - logProviderModelDiscoveryWarning('provider discovery skipped env-backed auth/header expansion', { - providerId, - hasEnvKey: Boolean(envKey), - hasEnvHttpHeaders: Boolean(envHttpHeaders), - }) - } - - let requestUrl: URL - try { - requestUrl = buildProviderModelsUrl(baseUrl, provider.query_params) - } catch (error) { - logProviderModelDiscoveryWarning('provider /models URL was invalid', { - providerId, - error: getErrorMessage(error, 'invalid url'), - }) - return { data: [], providerId, source: 'provider' } - } - - let response: Response - try { - response = await fetch(requestUrl, { - method: 'GET', - headers, - signal: AbortSignal.timeout(PROVIDER_MODELS_FETCH_TIMEOUT_MS), - }) - } catch (error) { - logProviderModelDiscoveryWarning('provider /models request failed', { - providerId, - error: isTimeoutError(error) ? `request timed out after ${PROVIDER_MODELS_FETCH_TIMEOUT_MS}ms` : getErrorMessage(error, 'network error'), - }) - return { data: [], providerId, source: 'provider' } - } - - let payload: unknown = null - try { - payload = await response.json() - } catch (error) { - logProviderModelDiscoveryWarning('provider /models response was not valid JSON', { - providerId, - status: response.status, - error: getErrorMessage(error, 'invalid json'), - }) - return { data: [], providerId, source: 'provider' } - } - - if (!response.ok) { - logProviderModelDiscoveryWarning('provider /models request returned non-2xx', { - providerId, - status: response.status, - statusText: response.statusText, - }) - return { data: [], providerId, source: 'provider' } - } - - try { - return { - data: normalizeProviderModelsData(payload), - providerId, - source: 'provider', - } - } catch (error) { - logProviderModelDiscoveryWarning('provider /models payload was invalid', { - providerId, - error: getErrorMessage(error, 'invalid payload'), - }) - return { data: [], providerId, source: 'provider' } - } -} - -function extractThreadMessageText(threadReadPayload: unknown): string { - const payload = asRecord(threadReadPayload) - const thread = asRecord(payload?.thread) - const turns = Array.isArray(thread?.turns) ? thread.turns : [] - const parts: string[] = [] - - for (const turn of turns) { - const turnRecord = asRecord(turn) - const items = Array.isArray(turnRecord?.items) ? turnRecord.items : [] - for (const item of items) { - const itemRecord = asRecord(item) - const type = typeof itemRecord?.type === 'string' ? itemRecord.type : '' - if (type === 'agentMessage' && typeof itemRecord?.text === 'string' && itemRecord.text.trim().length > 0) { - parts.push(itemRecord.text.trim()) - continue - } - if (type === 'userMessage') { - const content = Array.isArray(itemRecord?.content) ? itemRecord.content : [] - for (const block of content) { - const blockRecord = asRecord(block) - if (blockRecord?.type === 'text' && typeof blockRecord.text === 'string' && blockRecord.text.trim().length > 0) { - parts.push(blockRecord.text.trim()) - } - } - continue - } - if (type === 'commandExecution') { - const command = typeof itemRecord?.command === 'string' ? itemRecord.command.trim() : '' - const output = typeof itemRecord?.aggregatedOutput === 'string' ? itemRecord.aggregatedOutput.trim() : '' - if (command) parts.push(command) - if (output) parts.push(output) - } - } - } - - return parts.join('\n').trim() -} - function readNonEmptyString(value: unknown): string { return typeof value === 'string' && value.trim().length > 0 ? value : '' } @@ -723,62 +297,6 @@ export async function callRpcWithArchiveRecovery( } } -function isExactPhraseMatch(query: string, doc: ThreadSearchDocument): boolean { - const q = query.trim().toLowerCase() - if (!q) return false - return ( - doc.title.toLowerCase().includes(q) || - doc.preview.toLowerCase().includes(q) || - doc.messageText.toLowerCase().includes(q) - ) -} - -function scoreFileCandidate(path: string, query: string): number { - if (!query) return 0 - const lowerPath = path.toLowerCase() - const lowerQuery = query.toLowerCase() - const baseName = lowerPath.slice(lowerPath.lastIndexOf('/') + 1) - if (baseName === lowerQuery) return 0 - if (baseName.startsWith(lowerQuery)) return 1 - if (baseName.includes(lowerQuery)) return 2 - if (lowerPath.includes(`/${lowerQuery}`)) return 3 - if (lowerPath.includes(lowerQuery)) return 4 - return 10 -} - -async function listFilesWithRipgrep(cwd: string): Promise { - return await new Promise((resolve, reject) => { - const ripgrepCommand = resolveRipgrepCommand() - if (!ripgrepCommand) { - reject(new Error('ripgrep (rg) is not available')) - return - } - - const proc = spawn(ripgrepCommand, ['--files', '--hidden', '-g', '!.git', '-g', '!node_modules'], { - cwd, - env: process.env, - stdio: ['ignore', 'pipe', 'pipe'], - }) - let stdout = '' - let stderr = '' - proc.stdout.on('data', (chunk: Buffer) => { stdout += chunk.toString() }) - proc.stderr.on('data', (chunk: Buffer) => { stderr += chunk.toString() }) - proc.on('error', reject) - proc.on('close', (code) => { - if (code === 0) { - const rows = stdout - .split(/\r?\n/) - .map((line) => line.trim()) - .filter(Boolean) - resolve(rows) - return - } - const details = [stderr.trim(), stdout.trim()].filter(Boolean).join('\n') - reject(new Error(details || 'rg --files failed')) - }) - }) -} - function getCodexHomeDir(): string { const codexHome = process.env.CODEX_HOME?.trim() return codexHome && codexHome.length > 0 ? codexHome : join(homedir(), '.codex') @@ -1458,319 +976,6 @@ async function readRawBody(req: IncomingMessage): Promise { return Buffer.concat(chunks) } -function bufferIndexOf(buf: Buffer, needle: Buffer, start = 0): number { - for (let i = start; i <= buf.length - needle.length; i++) { - let match = true - for (let j = 0; j < needle.length; j++) { - if (buf[i + j] !== needle[j]) { match = false; break } - } - if (match) return i - } - return -1 -} - -function handleFileUpload(req: IncomingMessage, res: ServerResponse): void { - const chunks: Buffer[] = [] - req.on('data', (chunk: Buffer) => chunks.push(chunk)) - req.on('end', async () => { - try { - const body = Buffer.concat(chunks) - const contentType = req.headers['content-type'] ?? '' - const boundaryMatch = contentType.match(/boundary=(.+)/i) - if (!boundaryMatch) { setJson(res, 400, { error: 'Missing multipart boundary' }); return } - const boundary = boundaryMatch[1] - const boundaryBuf = Buffer.from(`--${boundary}`) - const parts: Buffer[] = [] - let searchStart = 0 - while (searchStart < body.length) { - const idx = body.indexOf(boundaryBuf, searchStart) - if (idx < 0) break - if (searchStart > 0) parts.push(body.subarray(searchStart, idx)) - searchStart = idx + boundaryBuf.length - if (body[searchStart] === 0x0d && body[searchStart + 1] === 0x0a) searchStart += 2 - } - let fileName = 'uploaded-file' - let fileData: Buffer | null = null - const headerSep = Buffer.from('\r\n\r\n') - for (const part of parts) { - const headerEnd = bufferIndexOf(part, headerSep) - if (headerEnd < 0) continue - const headers = part.subarray(0, headerEnd).toString('utf8') - const fnMatch = headers.match(/filename="([^"]+)"/i) - if (!fnMatch) continue - fileName = fnMatch[1].replace(/[/\\]/g, '_') - let end = part.length - if (end >= 2 && part[end - 2] === 0x0d && part[end - 1] === 0x0a) end -= 2 - fileData = part.subarray(headerEnd + 4, end) - break - } - if (!fileData) { setJson(res, 400, { error: 'No file in request' }); return } - const uploadDir = join(tmpdir(), 'codex-web-uploads') - await mkdir(uploadDir, { recursive: true }) - const destDir = await mkdtemp(join(uploadDir, 'f-')) - const destPath = join(destDir, fileName) - await writeFile(destPath, fileData) - setJson(res, 200, { path: destPath }) - } catch (err) { - setJson(res, 500, { error: getErrorMessage(err, 'Upload failed') }) - } - }) - req.on('error', (err: Error) => { - setJson(res, 500, { error: getErrorMessage(err, 'Upload stream error') }) - }) -} - -function httpPost( - url: string, - headers: Record, - body: Buffer, -): Promise<{ status: number; body: string }> { - const doRequest = url.startsWith('http://') ? httpRequest : httpsRequest - return new Promise((resolve, reject) => { - const req = doRequest(url, { method: 'POST', headers }, (res) => { - const chunks: Buffer[] = [] - res.on('data', (c: Buffer) => chunks.push(c)) - res.on('end', () => resolve({ status: res.statusCode ?? 500, body: Buffer.concat(chunks).toString('utf8') })) - res.on('error', reject) - }) - req.on('error', reject) - req.write(body) - req.end() - }) -} - -let curlImpersonateAvailable: boolean | null = null - -function curlImpersonatePost( - url: string, - headers: Record, - body: Buffer, -): Promise<{ status: number; body: string }> { - return new Promise((resolve, reject) => { - const args = ['-s', '-w', '\n%{http_code}', '-X', 'POST', url] - for (const [k, v] of Object.entries(headers)) { - if (k.toLowerCase() === 'content-length') continue - args.push('-H', `${k}: ${String(v)}`) - } - args.push('--data-binary', '@-') - const proc = spawn('curl-impersonate-chrome', args, { - env: { ...process.env, CURL_IMPERSONATE: 'chrome116' }, - stdio: ['pipe', 'pipe', 'pipe'], - }) - const chunks: Buffer[] = [] - proc.stdout.on('data', (c: Buffer) => chunks.push(c)) - proc.on('error', (e) => { - curlImpersonateAvailable = false - reject(e) - }) - proc.on('close', (code) => { - const raw = Buffer.concat(chunks).toString('utf8') - const lastNewline = raw.lastIndexOf('\n') - const statusStr = lastNewline >= 0 ? raw.slice(lastNewline + 1).trim() : '' - const responseBody = lastNewline >= 0 ? raw.slice(0, lastNewline) : raw - const status = parseInt(statusStr, 10) || (code === 0 ? 200 : 500) - curlImpersonateAvailable = true - resolve({ status, body: responseBody }) - }) - proc.stdin.write(body) - proc.stdin.end() - }) -} - -async function proxyTranscribe( - body: Buffer, - contentType: string, - authToken: string, - accountId?: string, -): Promise<{ status: number; body: string }> { - const chatgptHeaders: Record = { - 'Content-Type': contentType, - 'Content-Length': body.length, - Authorization: `Bearer ${authToken}`, - originator: 'Codex Desktop', - 'User-Agent': `Codex Desktop/0.1.0 (${process.platform}; ${process.arch})`, - } - if (accountId) chatgptHeaders['ChatGPT-Account-Id'] = accountId - - const postFn = curlImpersonateAvailable !== false ? curlImpersonatePost : httpPost - let result: { status: number; body: string } - try { - result = await postFn('https://chatgpt.com/backend-api/transcribe', chatgptHeaders, body) - } catch { - result = await httpPost('https://chatgpt.com/backend-api/transcribe', chatgptHeaders, body) - } - - if (result.status === 403 && result.body.includes('cf_chl')) { - if (curlImpersonateAvailable !== false && postFn !== curlImpersonatePost) { - try { - const ciResult = await curlImpersonatePost('https://chatgpt.com/backend-api/transcribe', chatgptHeaders, body) - if (ciResult.status !== 403) return ciResult - } catch {} - } - return { status: 503, body: JSON.stringify({ error: 'Transcription blocked by Cloudflare. Install curl-impersonate-chrome.' }) } - } - - return result -} - -function parseConnectorLogoUrl(rawUrl: string): { connectorId: string; theme: 'light' | 'dark' } | null { - const trimmed = rawUrl.trim() - if (!trimmed.startsWith('connectors://')) return null - const rest = trimmed.slice('connectors://'.length) - const connectorId = (rest.split(/[/?#]/u)[0] ?? '').trim() - if (!connectorId) return null - const query = rest.includes('?') ? rest.slice(rest.indexOf('?') + 1).split('#')[0] ?? '' : '' - const theme = new URLSearchParams(query).get('theme')?.toLowerCase() === 'dark' ? 'dark' : 'light' - return { connectorId, theme } -} - -async function fetchConnectorLogo(rawUrl: string): Promise<{ contentType: string; body: Buffer }> { - const parsed = parseConnectorLogoUrl(rawUrl) - if (!parsed) throw new Error('Unsupported connector logo URL') - const auth = await readCodexAuth() - if (!auth) throw new Error('No auth token available for connector logo') - - const endpoint = `https://chatgpt.com/backend-api/aip/connectors/${encodeURIComponent(parsed.connectorId)}/logo?theme=${parsed.theme}` - const response = await fetch(endpoint, { - headers: { - Authorization: `Bearer ${auth.accessToken}`, - originator: 'Codex Desktop', - 'User-Agent': `Codex Desktop/0.1.0 (${process.platform}; ${process.arch})`, - ...(auth.accountId ? { 'ChatGPT-Account-Id': auth.accountId } : {}), - }, - signal: AbortSignal.timeout(10_000), - }) - if (!response.ok) throw new Error(`Connector logo fetch failed (${response.status})`) - - const contentType = response.headers.get('content-type') ?? '' - if (contentType.includes('application/json')) { - const payload = asRecord(await response.json()) - const body = asRecord(payload?.body) - const base64 = readNonEmptyString(body?.base64) - const nestedContentType = readNonEmptyString(body?.contentType) ?? readNonEmptyString(body?.content_type) - if (!base64 || !nestedContentType) throw new Error('Connector logo response was missing image data') - return { contentType: nestedContentType, body: Buffer.from(base64, 'base64') } - } - - return { - contentType: contentType || 'image/png', - body: Buffer.from(await response.arrayBuffer()), - } -} - -class MethodCatalog { - private methodCache: string[] | null = null - private notificationCache: string[] | null = null - - private async runGenerateSchemaCommand(outDir: string): Promise { - await new Promise((resolve, reject) => { - const codexCommand = resolveCodexCommand() - if (!codexCommand) { - reject(new Error('Codex CLI is not available. Install @openai/codex or set CODEXUI_CODEX_COMMAND.')) - return - } - - const invocation = getSpawnInvocation(codexCommand, ['app-server', 'generate-json-schema', '--out', outDir]) - const process = spawn(invocation.command, invocation.args, { - stdio: ['ignore', 'ignore', 'pipe'], - }) - - let stderr = '' - - process.stderr.setEncoding('utf8') - process.stderr.on('data', (chunk: string) => { - stderr += chunk - }) - - process.on('error', reject) - process.on('exit', (code) => { - if (code === 0) { - resolve() - return - } - - reject(new Error(stderr.trim() || `generate-json-schema exited with code ${String(code)}`)) - }) - }) - } - - private extractMethodsFromClientRequest(payload: unknown): string[] { - const root = asRecord(payload) - const oneOf = Array.isArray(root?.oneOf) ? root.oneOf : [] - const methods = new Set() - - for (const entry of oneOf) { - const row = asRecord(entry) - const properties = asRecord(row?.properties) - const methodDef = asRecord(properties?.method) - const methodEnum = Array.isArray(methodDef?.enum) ? methodDef.enum : [] - - for (const item of methodEnum) { - if (typeof item === 'string' && item.length > 0) { - methods.add(item) - } - } - } - - return Array.from(methods).sort((a, b) => a.localeCompare(b)) - } - - private extractMethodsFromServerNotification(payload: unknown): string[] { - const root = asRecord(payload) - const oneOf = Array.isArray(root?.oneOf) ? root.oneOf : [] - const methods = new Set() - - for (const entry of oneOf) { - const row = asRecord(entry) - const properties = asRecord(row?.properties) - const methodDef = asRecord(properties?.method) - const methodEnum = Array.isArray(methodDef?.enum) ? methodDef.enum : [] - - for (const item of methodEnum) { - if (typeof item === 'string' && item.length > 0) { - methods.add(item) - } - } - } - - return Array.from(methods).sort((a, b) => a.localeCompare(b)) - } - - async listMethods(): Promise { - if (this.methodCache) { - return this.methodCache - } - - const outDir = await mkdtemp(join(tmpdir(), 'codex-web-local-schema-')) - await this.runGenerateSchemaCommand(outDir) - - const clientRequestPath = join(outDir, 'ClientRequest.json') - const raw = await readFile(clientRequestPath, 'utf8') - const parsed = JSON.parse(raw) as unknown - const methods = this.extractMethodsFromClientRequest(parsed) - - this.methodCache = methods - return methods - } - - async listNotificationMethods(): Promise { - if (this.notificationCache) { - return this.notificationCache - } - - const outDir = await mkdtemp(join(tmpdir(), 'codex-web-local-schema-')) - await this.runGenerateSchemaCommand(outDir) - - const serverNotificationPath = join(outDir, 'ServerNotification.json') - const raw = await readFile(serverNotificationPath, 'utf8') - const parsed = JSON.parse(raw) as unknown - const methods = this.extractMethodsFromServerNotification(parsed) - - this.notificationCache = methods - return methods - } -} - type CodexBridgeMiddleware = ((req: IncomingMessage, res: ServerResponse, next: () => void) => Promise) & { dispose: () => void subscribeNotifications: (listener: (value: { method: string; params: unknown; atIso: string }) => void) => () => void @@ -1822,82 +1027,6 @@ function getSharedBridgeState(): SharedBridgeState { return created } -async function loadAllThreadsForSearch(appServer: AppServerProcess): Promise { - const threads: Array<{ id: string; title: string; preview: string }> = [] - let cursor: string | null = null - - do { - const response = asRecord(await appServer.rpc('thread/list', { - archived: false, - limit: 100, - sortKey: 'updated_at', - modelProviders: [], - cursor, - })) - const data = Array.isArray(response?.data) ? response.data : [] - for (const row of data) { - const record = asRecord(row) - const id = typeof record?.id === 'string' ? record.id : '' - if (!id) continue - const title = typeof record?.name === 'string' && record.name.trim().length > 0 - ? record.name.trim() - : (typeof record?.preview === 'string' && record.preview.trim().length > 0 ? record.preview.trim() : 'Untitled thread') - const preview = typeof record?.preview === 'string' ? record.preview : '' - threads.push({ id, title, preview }) - } - cursor = typeof response?.nextCursor === 'string' && response.nextCursor.length > 0 ? response.nextCursor : null - } while (cursor) - - const docs: ThreadSearchDocument[] = threads.map((thread) => { - const searchableText = [thread.title, thread.preview].filter(Boolean).join('\n') - return { - id: thread.id, - title: thread.title, - preview: thread.preview, - messageText: '', - searchableText, - } satisfies ThreadSearchDocument - }) - - const docsById = new Map(docs.map((doc) => [doc.id, doc])) - const fullTextThreads = threads.slice(0, THREAD_SEARCH_FULL_TEXT_THREAD_LIMIT) - const concurrency = 4 - for (let offset = 0; offset < fullTextThreads.length; offset += concurrency) { - const batch = fullTextThreads.slice(offset, offset + concurrency) - const loaded = await Promise.all(batch.map(async (thread) => { - try { - const readResponse = await appServer.rpc('thread/read', { - threadId: thread.id, - includeTurns: true, - }) - const messageText = extractThreadMessageText(readResponse) - const searchableText = [thread.title, thread.preview, messageText].filter(Boolean).join('\n') - return [thread.id, { - id: thread.id, - title: thread.title, - preview: thread.preview, - messageText, - searchableText, - } satisfies ThreadSearchDocument] as const - } catch { - return null - } - })) - for (const row of loaded) { - if (!row) continue - docsById.set(row[0], row[1]) - } - } - - return Array.from(docsById.values()) -} - -async function buildThreadSearchIndex(appServer: AppServerProcess): Promise { - const docs = await loadAllThreadsForSearch(appServer) - const docsById = new Map(docs.map((doc) => [doc.id, doc])) - return { docsById } -} - export function createCodexBridgeMiddleware(): CodexBridgeMiddleware { const { appServer, terminalManager, methodCatalog, telegramBridge, backendQueueProcessor } = getSharedBridgeState() let threadSearchIndex: ThreadSearchIndex | null = null @@ -3402,7 +2531,7 @@ export function createCodexBridgeMiddleware(): CodexBridgeMiddleware { const repoUrl = typeof payload?.url === 'string' ? payload.url.trim() : '' const basePath = typeof payload?.basePath === 'string' ? payload.basePath.trim() : '' try { - const clonedPath = await cloneGithubRepositoryIntoBase(repoUrl, basePath) + const clonedPath = await cloneGithubRepositoryIntoBase(repoUrl, basePath, { runCommand, persistWorkspaceRoot }) setJson(res, 200, { data: { path: clonedPath } }) } catch (error) { setJson(res, 400, { error: error instanceof Error ? error.message : 'Failed to clone GitHub repository' }) diff --git a/src/server/mediaProxyRoutes.ts b/src/server/mediaProxyRoutes.ts new file mode 100644 index 000000000..546327066 --- /dev/null +++ b/src/server/mediaProxyRoutes.ts @@ -0,0 +1,255 @@ +import { spawn } from 'node:child_process' +import { request as httpRequest } from 'node:http' +import type { IncomingMessage, ServerResponse } from 'node:http' +import { request as httpsRequest } from 'node:https' +import { readFile, mkdir, mkdtemp, writeFile } from 'node:fs/promises' +import { homedir, tmpdir } from 'node:os' +import { join } from 'node:path' + +function getCodexHomeDir(): string { + const codexHome = process.env.CODEX_HOME?.trim() + return codexHome && codexHome.length > 0 ? codexHome : join(homedir(), '.codex') +} + +function getCodexAuthPath(): string { + return join(getCodexHomeDir(), 'auth.json') +} + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +function readNonEmptyString(value: unknown): string { + return typeof value === 'string' && value.trim().length > 0 ? value : '' +} + +function getErrorMessage(payload: unknown, fallback: string): string { + if (payload instanceof Error && payload.message.trim().length > 0) return payload.message + const record = asRecord(payload) + if (!record) return fallback + const error = record.error + if (typeof error === 'string' && error.length > 0) return error + const nestedError = asRecord(error) + if (nestedError && typeof nestedError.message === 'string' && nestedError.message.length > 0) return nestedError.message + return fallback +} + +function setJson(res: ServerResponse, statusCode: number, payload: unknown): void { + res.statusCode = statusCode + res.setHeader('Content-Type', 'application/json; charset=utf-8') + res.end(JSON.stringify(payload)) +} + +async function readCodexAuth(): Promise<{ accessToken: string; accountId?: string } | null> { + try { + const raw = await readFile(getCodexAuthPath(), 'utf8') + const auth = JSON.parse(raw) as { tokens?: { access_token?: string; account_id?: string } } + const token = auth.tokens?.access_token + if (!token) return null + return { accessToken: token, accountId: auth.tokens?.account_id ?? undefined } + } catch { + return null + } +} + +function bufferIndexOf(buf: Buffer, needle: Buffer, start = 0): number { + for (let i = start; i <= buf.length - needle.length; i++) { + let match = true + for (let j = 0; j < needle.length; j++) { + if (buf[i + j] !== needle[j]) { match = false; break } + } + if (match) return i + } + return -1 +} + +export function handleFileUpload(req: IncomingMessage, res: ServerResponse): void { + const chunks: Buffer[] = [] + req.on('data', (chunk: Buffer) => chunks.push(chunk)) + req.on('end', async () => { + try { + const body = Buffer.concat(chunks) + const contentType = req.headers['content-type'] ?? '' + const boundaryMatch = contentType.match(/boundary=(.+)/i) + if (!boundaryMatch) { setJson(res, 400, { error: 'Missing multipart boundary' }); return } + const boundary = boundaryMatch[1] + const boundaryBuf = Buffer.from(`--${boundary}`) + const parts: Buffer[] = [] + let searchStart = 0 + while (searchStart < body.length) { + const idx = body.indexOf(boundaryBuf, searchStart) + if (idx < 0) break + if (searchStart > 0) parts.push(body.subarray(searchStart, idx)) + searchStart = idx + boundaryBuf.length + if (body[searchStart] === 0x0d && body[searchStart + 1] === 0x0a) searchStart += 2 + } + let fileName = 'uploaded-file' + let fileData: Buffer | null = null + const headerSep = Buffer.from('\r\n\r\n') + for (const part of parts) { + const headerEnd = bufferIndexOf(part, headerSep) + if (headerEnd < 0) continue + const headers = part.subarray(0, headerEnd).toString('utf8') + const fnMatch = headers.match(/filename="([^"]+)"/i) + if (!fnMatch) continue + fileName = fnMatch[1].replace(/[/\\]/g, '_') + let end = part.length + if (end >= 2 && part[end - 2] === 0x0d && part[end - 1] === 0x0a) end -= 2 + fileData = part.subarray(headerEnd + 4, end) + break + } + if (!fileData) { setJson(res, 400, { error: 'No file in request' }); return } + const uploadDir = join(tmpdir(), 'codex-web-uploads') + await mkdir(uploadDir, { recursive: true }) + const destDir = await mkdtemp(join(uploadDir, 'f-')) + const destPath = join(destDir, fileName) + await writeFile(destPath, fileData) + setJson(res, 200, { path: destPath }) + } catch (err) { + setJson(res, 500, { error: getErrorMessage(err, 'Upload failed') }) + } + }) + req.on('error', (err: Error) => { + setJson(res, 500, { error: getErrorMessage(err, 'Upload stream error') }) + }) +} + +function httpPost( + url: string, + headers: Record, + body: Buffer, +): Promise<{ status: number; body: string }> { + const doRequest = url.startsWith('http://') ? httpRequest : httpsRequest + return new Promise((resolve, reject) => { + const req = doRequest(url, { method: 'POST', headers }, (res) => { + const chunks: Buffer[] = [] + res.on('data', (c: Buffer) => chunks.push(c)) + res.on('end', () => resolve({ status: res.statusCode ?? 500, body: Buffer.concat(chunks).toString('utf8') })) + res.on('error', reject) + }) + req.on('error', reject) + req.write(body) + req.end() + }) +} + +let curlImpersonateAvailable: boolean | null = null + +function curlImpersonatePost( + url: string, + headers: Record, + body: Buffer, +): Promise<{ status: number; body: string }> { + return new Promise((resolve, reject) => { + const args = ['-s', '-w', '\n%{http_code}', '-X', 'POST', url] + for (const [k, v] of Object.entries(headers)) { + if (k.toLowerCase() === 'content-length') continue + args.push('-H', `${k}: ${String(v)}`) + } + args.push('--data-binary', '@-') + const proc = spawn('curl-impersonate-chrome', args, { + env: { ...process.env, CURL_IMPERSONATE: 'chrome116' }, + stdio: ['pipe', 'pipe', 'pipe'], + }) + const chunks: Buffer[] = [] + proc.stdout.on('data', (c: Buffer) => chunks.push(c)) + proc.on('error', (e) => { + curlImpersonateAvailable = false + reject(e) + }) + proc.on('close', (code) => { + const raw = Buffer.concat(chunks).toString('utf8') + const lastNewline = raw.lastIndexOf('\n') + const statusStr = lastNewline >= 0 ? raw.slice(lastNewline + 1).trim() : '' + const responseBody = lastNewline >= 0 ? raw.slice(0, lastNewline) : raw + const status = parseInt(statusStr, 10) || (code === 0 ? 200 : 500) + curlImpersonateAvailable = true + resolve({ status, body: responseBody }) + }) + proc.stdin.write(body) + proc.stdin.end() + }) +} + +export async function proxyTranscribe( + body: Buffer, + contentType: string, + authToken: string, + accountId?: string, +): Promise<{ status: number; body: string }> { + const chatgptHeaders: Record = { + 'Content-Type': contentType, + 'Content-Length': body.length, + Authorization: `Bearer ${authToken}`, + originator: 'Codex Desktop', + 'User-Agent': `Codex Desktop/0.1.0 (${process.platform}; ${process.arch})`, + } + if (accountId) chatgptHeaders['ChatGPT-Account-Id'] = accountId + + const postFn = curlImpersonateAvailable !== false ? curlImpersonatePost : httpPost + let result: { status: number; body: string } + try { + result = await postFn('https://chatgpt.com/backend-api/transcribe', chatgptHeaders, body) + } catch { + result = await httpPost('https://chatgpt.com/backend-api/transcribe', chatgptHeaders, body) + } + + if (result.status === 403 && result.body.includes('cf_chl')) { + if (curlImpersonateAvailable !== false && postFn !== curlImpersonatePost) { + try { + const ciResult = await curlImpersonatePost('https://chatgpt.com/backend-api/transcribe', chatgptHeaders, body) + if (ciResult.status !== 403) return ciResult + } catch {} + } + return { status: 503, body: JSON.stringify({ error: 'Transcription blocked by Cloudflare. Install curl-impersonate-chrome.' }) } + } + + return result +} + +function parseConnectorLogoUrl(rawUrl: string): { connectorId: string; theme: 'light' | 'dark' } | null { + const trimmed = rawUrl.trim() + if (!trimmed.startsWith('connectors://')) return null + const rest = trimmed.slice('connectors://'.length) + const connectorId = (rest.split(/[/?#]/u)[0] ?? '').trim() + if (!connectorId) return null + const query = rest.includes('?') ? rest.slice(rest.indexOf('?') + 1).split('#')[0] ?? '' : '' + const theme = new URLSearchParams(query).get('theme')?.toLowerCase() === 'dark' ? 'dark' : 'light' + return { connectorId, theme } +} + +export async function fetchConnectorLogo(rawUrl: string): Promise<{ contentType: string; body: Buffer }> { + const parsed = parseConnectorLogoUrl(rawUrl) + if (!parsed) throw new Error('Unsupported connector logo URL') + const auth = await readCodexAuth() + if (!auth) throw new Error('No auth token available for connector logo') + + const endpoint = `https://chatgpt.com/backend-api/aip/connectors/${encodeURIComponent(parsed.connectorId)}/logo?theme=${parsed.theme}` + const response = await fetch(endpoint, { + headers: { + Authorization: `Bearer ${auth.accessToken}`, + originator: 'Codex Desktop', + 'User-Agent': `Codex Desktop/0.1.0 (${process.platform}; ${process.arch})`, + ...(auth.accountId ? { 'ChatGPT-Account-Id': auth.accountId } : {}), + }, + signal: AbortSignal.timeout(10_000), + }) + if (!response.ok) throw new Error(`Connector logo fetch failed (${response.status})`) + + const contentType = response.headers.get('content-type') ?? '' + if (contentType.includes('application/json')) { + const payload = asRecord(await response.json()) + const body = asRecord(payload?.body) + const base64 = readNonEmptyString(body?.base64) + const nestedContentType = readNonEmptyString(body?.contentType) ?? readNonEmptyString(body?.content_type) + if (!base64 || !nestedContentType) throw new Error('Connector logo response was missing image data') + return { contentType: nestedContentType, body: Buffer.from(base64, 'base64') } + } + + return { + contentType: contentType || 'image/png', + body: Buffer.from(await response.arrayBuffer()), + } +} diff --git a/src/server/methodCatalog.ts b/src/server/methodCatalog.ts new file mode 100644 index 000000000..5dcc79ecc --- /dev/null +++ b/src/server/methodCatalog.ts @@ -0,0 +1,125 @@ +import { spawn } from 'node:child_process' +import { mkdtemp, readFile } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import { join } from 'node:path' +import { resolveCodexCommand } from '../commandResolution.js' +import { getSpawnInvocation } from '../utils/commandInvocation.js' + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +export class MethodCatalog { + private methodCache: string[] | null = null + private notificationCache: string[] | null = null + + private async runGenerateSchemaCommand(outDir: string): Promise { + await new Promise((resolve, reject) => { + const codexCommand = resolveCodexCommand() + if (!codexCommand) { + reject(new Error('Codex CLI is not available. Install @openai/codex or set CODEXUI_CODEX_COMMAND.')) + return + } + + const invocation = getSpawnInvocation(codexCommand, ['app-server', 'generate-json-schema', '--out', outDir]) + const process = spawn(invocation.command, invocation.args, { + stdio: ['ignore', 'ignore', 'pipe'], + }) + + let stderr = '' + + process.stderr.setEncoding('utf8') + process.stderr.on('data', (chunk: string) => { + stderr += chunk + }) + + process.on('error', reject) + process.on('exit', (code) => { + if (code === 0) { + resolve() + return + } + + reject(new Error(stderr.trim() || `generate-json-schema exited with code ${String(code)}`)) + }) + }) + } + + private extractMethodsFromClientRequest(payload: unknown): string[] { + const root = asRecord(payload) + const oneOf = Array.isArray(root?.oneOf) ? root.oneOf : [] + const methods = new Set() + + for (const entry of oneOf) { + const row = asRecord(entry) + const properties = asRecord(row?.properties) + const methodDef = asRecord(properties?.method) + const methodEnum = Array.isArray(methodDef?.enum) ? methodDef.enum : [] + + for (const item of methodEnum) { + if (typeof item === 'string' && item.length > 0) { + methods.add(item) + } + } + } + + return Array.from(methods).sort((a, b) => a.localeCompare(b)) + } + + private extractMethodsFromServerNotification(payload: unknown): string[] { + const root = asRecord(payload) + const oneOf = Array.isArray(root?.oneOf) ? root.oneOf : [] + const methods = new Set() + + for (const entry of oneOf) { + const row = asRecord(entry) + const properties = asRecord(row?.properties) + const methodDef = asRecord(properties?.method) + const methodEnum = Array.isArray(methodDef?.enum) ? methodDef.enum : [] + + for (const item of methodEnum) { + if (typeof item === 'string' && item.length > 0) { + methods.add(item) + } + } + } + + return Array.from(methods).sort((a, b) => a.localeCompare(b)) + } + + async listMethods(): Promise { + if (this.methodCache) { + return this.methodCache + } + + const outDir = await mkdtemp(join(tmpdir(), 'codex-web-local-schema-')) + await this.runGenerateSchemaCommand(outDir) + + const clientRequestPath = join(outDir, 'ClientRequest.json') + const raw = await readFile(clientRequestPath, 'utf8') + const parsed = JSON.parse(raw) as unknown + const methods = this.extractMethodsFromClientRequest(parsed) + + this.methodCache = methods + return methods + } + + async listNotificationMethods(): Promise { + if (this.notificationCache) { + return this.notificationCache + } + + const outDir = await mkdtemp(join(tmpdir(), 'codex-web-local-schema-')) + await this.runGenerateSchemaCommand(outDir) + + const serverNotificationPath = join(outDir, 'ServerNotification.json') + const raw = await readFile(serverNotificationPath, 'utf8') + const parsed = JSON.parse(raw) as unknown + const methods = this.extractMethodsFromServerNotification(parsed) + + this.notificationCache = methods + return methods + } +} diff --git a/src/server/projectCreation.ts b/src/server/projectCreation.ts new file mode 100644 index 000000000..785dfd129 --- /dev/null +++ b/src/server/projectCreation.ts @@ -0,0 +1,117 @@ +import { lstat, mkdir, rm, stat } from 'node:fs/promises' +import { homedir } from 'node:os' +import { isAbsolute, join, resolve } from 'node:path' + +const PROJECTLESS_THREAD_DIRECTORY_MAX_ATTEMPTS = 100 +const PROJECTLESS_THREAD_SLUG_MAX_LENGTH = 80 + +function formatProjectlessDateSegment(date = new Date()): string { + const month = String(date.getMonth() + 1).padStart(2, '0') + const day = String(date.getDate()).padStart(2, '0') + return `${date.getFullYear()}-${month}-${day}` +} + +function buildProjectlessPromptSlug(prompt: string | null): string { + const slug = prompt + ?.toLowerCase() + .match(/[a-z0-9]+/g) + ?.slice(0, 6) + .join('-') + .slice(0, PROJECTLESS_THREAD_SLUG_MAX_LENGTH) + return slug && slug.length > 0 ? slug : 'new-chat' +} + +async function ensureRealDirectory(path: string, label: string): Promise { + const info = await lstat(path) + if (info.isSymbolicLink() || !info.isDirectory()) { + throw new Error(`${label} must be a real directory`) + } +} + +export async function createProjectlessThreadDirectory(prompt: string | null): Promise<{ cwd: string; outputDirectory: string; workspaceRoot: string }> { + const workspaceRoot = join(homedir(), 'Documents', 'Codex') + await mkdir(workspaceRoot, { recursive: true }) + await ensureRealDirectory(workspaceRoot, 'Projectless workspace root') + + const dateDir = join(workspaceRoot, formatProjectlessDateSegment()) + await mkdir(dateDir, { recursive: true }) + await ensureRealDirectory(dateDir, 'Projectless thread date directory') + + const slug = buildProjectlessPromptSlug(prompt) + for (let index = 0; index < PROJECTLESS_THREAD_DIRECTORY_MAX_ATTEMPTS; index += 1) { + const folderName = index === 0 ? slug : `${slug}-${index + 1}` + const cwd = join(dateDir, folderName) + try { + await mkdir(cwd, { recursive: false }) + return { cwd, outputDirectory: cwd, workspaceRoot } + } catch { + try { + await stat(cwd) + } catch { + throw new Error('Failed to create new chat folder') + } + } + } + + throw new Error('Unable to create a unique new chat folder') +} + +function normalizeGithubCloneUrl(rawUrl: string): { url: string; repoName: string } { + const trimmedUrl = rawUrl.trim() + if (!trimmedUrl) throw new Error('Missing GitHub repository URL') + + const sshMatch = trimmedUrl.match(/^git@github\.com:([A-Za-z0-9_.-]+)\/([A-Za-z0-9_.-]+?)(?:\.git)?$/u) + if (sshMatch) { + const repoName = sshMatch[2] + return { url: `git@github.com:${sshMatch[1]}/${repoName}.git`, repoName } + } + + let parsed: URL + try { + parsed = new URL(trimmedUrl) + } catch { + throw new Error('Enter a valid GitHub repository URL') + } + if (parsed.hostname.toLowerCase() !== 'github.com') { + throw new Error('Only github.com repository URLs are supported') + } + const segments = parsed.pathname.split('/').filter(Boolean) + if (segments.length < 2) { + throw new Error('Enter a GitHub repository URL with owner and repository name') + } + const owner = segments[0] + const repoName = segments[1].replace(/\.git$/iu, '') + if (!/^[A-Za-z0-9_.-]+$/u.test(owner) || !/^[A-Za-z0-9_.-]+$/u.test(repoName)) { + throw new Error('GitHub repository owner or name contains unsupported characters') + } + return { url: `https://github.com/${owner}/${repoName}.git`, repoName } +} + +export async function cloneGithubRepositoryIntoBase( + rawUrl: string, + rawBasePath: string, + deps: { runCommand: (command: string, args: string[], options?: { cwd?: string; timeoutMs?: number }) => Promise; persistWorkspaceRoot: (workspaceRoot: string, label?: string) => Promise }, +): Promise { + const basePath = rawBasePath.trim() + if (!basePath) throw new Error('Missing clone destination folder') + const normalizedBasePath = isAbsolute(basePath) ? basePath : resolve(basePath) + await ensureRealDirectory(normalizedBasePath, 'Clone destination folder') + + const { url, repoName } = normalizeGithubCloneUrl(rawUrl) + const targetPath = join(normalizedBasePath, repoName) + try { + await stat(targetPath) + throw new Error(`Destination already exists: ${targetPath}`) + } catch (error) { + if ((error as NodeJS.ErrnoException)?.code !== 'ENOENT') throw error + } + + try { + await deps.runCommand('git', ['clone', url, targetPath], { cwd: normalizedBasePath, timeoutMs: 5 * 60_000 }) + } catch (error) { + await rm(targetPath, { recursive: true, force: true }).catch(() => undefined) + throw error + } + await deps.persistWorkspaceRoot(targetPath, '') + return targetPath +} diff --git a/src/server/providerModelDiscovery.ts b/src/server/providerModelDiscovery.ts new file mode 100644 index 000000000..0ba2f0efc --- /dev/null +++ b/src/server/providerModelDiscovery.ts @@ -0,0 +1,212 @@ +import type { AppServerProcess } from './appServerProcess.js' + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +function readNonEmptyString(value: unknown): string { + return typeof value === 'string' && value.trim().length > 0 ? value : '' +} + +function getErrorMessage(payload: unknown, fallback: string): string { + if (payload instanceof Error && payload.message.trim().length > 0) return payload.message + const record = asRecord(payload) + if (!record) return fallback + const error = record.error + if (typeof error === 'string' && error.length > 0) return error + const nestedError = asRecord(error) + if (nestedError && typeof nestedError.message === 'string' && nestedError.message.length > 0) return nestedError.message + return fallback +} + +export type ProviderModelsResponse = { + data: string[] + providerId: string + source: 'provider' +} + +const PROVIDER_MODELS_FETCH_TIMEOUT_MS = 5_000 + +function normalizeHeaderValue(value: unknown): string | null { + if (typeof value === 'string') { + const trimmed = value.trim() + return trimmed.length > 0 ? trimmed : null + } + if (typeof value === 'number' || typeof value === 'boolean') { + return String(value) + } + return null +} + +function normalizeQueryParams(value: unknown): URLSearchParams { + const params = new URLSearchParams() + const record = asRecord(value) + if (!record) return params + + for (const [key, rawValue] of Object.entries(record)) { + const normalized = normalizeHeaderValue(rawValue) + if (!normalized) continue + params.set(key, normalized) + } + + return params +} + +function buildProviderModelsUrl(baseUrl: string, queryParams: unknown): URL { + const url = new URL(baseUrl) + url.pathname = url.pathname.endsWith('/') ? `${url.pathname}models` : `${url.pathname}/models` + const extraParams = normalizeQueryParams(queryParams) + for (const [key, value] of extraParams.entries()) { + url.searchParams.set(key, value) + } + return url +} + +function normalizeProviderModelsData(payload: unknown): string[] { + const record = asRecord(payload) + const rows = Array.isArray(record?.data) ? record.data : null + if (!rows) { + throw new Error('provider /models payload is missing a data array') + } + + const ids: string[] = [] + for (const row of rows) { + const entry = asRecord(row) + const candidate = readNonEmptyString(entry?.id) + if (!candidate || ids.includes(candidate)) continue + ids.push(candidate) + } + return ids +} + +export async function fetchCustomEndpointDefaultModel(baseUrl: string, apiKey: string): Promise { + const normalizedBaseUrl = baseUrl.trim() + if (!normalizedBaseUrl) return '' + + try { + const modelsUrl = buildProviderModelsUrl(normalizedBaseUrl, null) + const headers: Record = apiKey ? { Authorization: `Bearer ${apiKey}` } : {} + const response = await fetch(modelsUrl, { headers, signal: AbortSignal.timeout(PROVIDER_MODELS_FETCH_TIMEOUT_MS) }) + if (!response.ok) return '' + const payload = await response.json() as unknown + const modelIds = normalizeProviderModelsData(payload) + return modelIds[0] ?? '' + } catch { + return '' + } +} + +export async function readProviderBackedModelIds(appServer: AppServerProcess): Promise { + const configPayload = asRecord(await appServer.rpc('config/read', {})) + const config = asRecord(configPayload?.config) + const providerId = readNonEmptyString(config?.model_provider) + if (!providerId) { + return { data: [], providerId: '', source: 'provider' } + } + + const providers = asRecord(config?.model_providers) + const provider = asRecord(providers?.[providerId]) + if (!provider) { + logProviderModelDiscoveryWarning('configured provider is missing from model_providers', { providerId }) + return { data: [], providerId, source: 'provider' } + } + + const wireApi = readNonEmptyString(provider.wire_api) + if (wireApi !== 'responses') { + return { data: [], providerId, source: 'provider' } + } + + const baseUrl = readNonEmptyString(provider.base_url) + if (!baseUrl) { + logProviderModelDiscoveryWarning('responses provider is missing base_url', { providerId }) + return { data: [], providerId, source: 'provider' } + } + + const headers = new Headers() + const configuredHeaders = asRecord(provider.http_headers) + if (configuredHeaders) { + for (const [key, rawValue] of Object.entries(configuredHeaders)) { + const normalized = normalizeHeaderValue(rawValue) + if (!normalized) continue + headers.set(key, normalized) + } + } + + const bearerToken = readNonEmptyString(provider.experimental_bearer_token) + if (bearerToken && !headers.has('Authorization')) { + headers.set('Authorization', `Bearer ${bearerToken}`) + } + + const envKey = readNonEmptyString(provider.env_key) + const envHttpHeaders = asRecord(provider.env_http_headers) + if (envKey || envHttpHeaders) { + logProviderModelDiscoveryWarning('provider discovery skipped env-backed auth/header expansion', { + providerId, + hasEnvKey: Boolean(envKey), + hasEnvHttpHeaders: Boolean(envHttpHeaders), + }) + } + + let requestUrl: URL + try { + requestUrl = buildProviderModelsUrl(baseUrl, provider.query_params) + } catch (error) { + logProviderModelDiscoveryWarning('provider /models URL was invalid', { + providerId, + error: getErrorMessage(error, 'invalid url'), + }) + return { data: [], providerId, source: 'provider' } + } + + let response: Response + try { + response = await fetch(requestUrl, { + method: 'GET', + headers, + signal: AbortSignal.timeout(PROVIDER_MODELS_FETCH_TIMEOUT_MS), + }) + } catch (error) { + logProviderModelDiscoveryWarning('provider /models request failed', { + providerId, + error: isTimeoutError(error) ? `request timed out after ${PROVIDER_MODELS_FETCH_TIMEOUT_MS}ms` : getErrorMessage(error, 'network error'), + }) + return { data: [], providerId, source: 'provider' } + } + + let payload: unknown = null + try { + payload = await response.json() + } catch (error) { + logProviderModelDiscoveryWarning('provider /models response was not valid JSON', { + providerId, + status: response.status, + error: getErrorMessage(error, 'invalid json'), + }) + return { data: [], providerId, source: 'provider' } + } + + if (!response.ok) { + logProviderModelDiscoveryWarning('provider /models request returned non-2xx', { + providerId, + status: response.status, + statusText: response.statusText, + }) + return { data: [], providerId, source: 'provider' } + } + + try { + return { + data: normalizeProviderModelsData(payload), + providerId, + source: 'provider', + } + } catch (error) { + logProviderModelDiscoveryWarning('provider /models payload was invalid', { + providerId, + error: getErrorMessage(error, 'invalid payload'), + }) + return { data: [], providerId, source: 'provider' } + } +} diff --git a/src/server/threadSearchIndex.ts b/src/server/threadSearchIndex.ts new file mode 100644 index 000000000..09d80d52e --- /dev/null +++ b/src/server/threadSearchIndex.ts @@ -0,0 +1,197 @@ +import { spawn } from 'node:child_process' +import type { AppServerProcess } from './appServerProcess.js' +import { resolveRipgrepCommand } from '../commandResolution.js' + +const THREAD_SEARCH_FULL_TEXT_THREAD_LIMIT = 100 + +function asRecord(value: unknown): Record | null { + return value !== null && typeof value === 'object' && !Array.isArray(value) + ? (value as Record) + : null +} + +function readNonEmptyString(value: unknown): string { + return typeof value === 'string' && value.trim().length > 0 ? value : '' +} + +export type ThreadSearchDocument = { + id: string + title: string + preview: string + messageText: string + searchableText: string +} + +export type ThreadSearchIndex = { + docsById: Map +} + +function extractThreadMessageText(threadReadPayload: unknown): string { + const payload = asRecord(threadReadPayload) + const thread = asRecord(payload?.thread) + const turns = Array.isArray(thread?.turns) ? thread.turns : [] + const parts: string[] = [] + + for (const turn of turns) { + const turnRecord = asRecord(turn) + const items = Array.isArray(turnRecord?.items) ? turnRecord.items : [] + for (const item of items) { + const itemRecord = asRecord(item) + const type = typeof itemRecord?.type === 'string' ? itemRecord.type : '' + if (type === 'agentMessage' && typeof itemRecord?.text === 'string' && itemRecord.text.trim().length > 0) { + parts.push(itemRecord.text.trim()) + continue + } + if (type === 'userMessage') { + const content = Array.isArray(itemRecord?.content) ? itemRecord.content : [] + for (const block of content) { + const blockRecord = asRecord(block) + if (blockRecord?.type === 'text' && typeof blockRecord.text === 'string' && blockRecord.text.trim().length > 0) { + parts.push(blockRecord.text.trim()) + } + } + continue + } + if (type === 'commandExecution') { + const command = typeof itemRecord?.command === 'string' ? itemRecord.command.trim() : '' + const output = typeof itemRecord?.aggregatedOutput === 'string' ? itemRecord.aggregatedOutput.trim() : '' + if (command) parts.push(command) + if (output) parts.push(output) + } + } + } + + return parts.join('\n').trim() +} + +export function isExactPhraseMatch(query: string, doc: ThreadSearchDocument): boolean { + const q = query.trim().toLowerCase() + if (!q) return false + return ( + doc.title.toLowerCase().includes(q) || + doc.preview.toLowerCase().includes(q) || + doc.messageText.toLowerCase().includes(q) + ) +} + +export function scoreFileCandidate(path: string, query: string): number { + if (!query) return 0 + const lowerPath = path.toLowerCase() + const lowerQuery = query.toLowerCase() + const baseName = lowerPath.slice(lowerPath.lastIndexOf('/') + 1) + if (baseName === lowerQuery) return 0 + if (baseName.startsWith(lowerQuery)) return 1 + if (baseName.includes(lowerQuery)) return 2 + if (lowerPath.includes(`/${lowerQuery}`)) return 3 + if (lowerPath.includes(lowerQuery)) return 4 + return 10 +} + +export async function listFilesWithRipgrep(cwd: string): Promise { + return await new Promise((resolve, reject) => { + const ripgrepCommand = resolveRipgrepCommand() + if (!ripgrepCommand) { + reject(new Error('ripgrep (rg) is not available')) + return + } + + const proc = spawn(ripgrepCommand, ['--files', '--hidden', '-g', '!.git', '-g', '!node_modules'], { + cwd, + env: process.env, + stdio: ['ignore', 'pipe', 'pipe'], + }) + let stdout = '' + let stderr = '' + proc.stdout.on('data', (chunk: Buffer) => { stdout += chunk.toString() }) + proc.stderr.on('data', (chunk: Buffer) => { stderr += chunk.toString() }) + proc.on('error', reject) + proc.on('close', (code) => { + if (code === 0) { + const rows = stdout + .split(/\r?\n/) + .map((line) => line.trim()) + .filter(Boolean) + resolve(rows) + return + } + const details = [stderr.trim(), stdout.trim()].filter(Boolean).join('\n') + reject(new Error(details || 'rg --files failed')) + }) + }) +} + +async function loadAllThreadsForSearch(appServer: AppServerProcess): Promise { + const threads: Array<{ id: string; title: string; preview: string }> = [] + let cursor: string | null = null + + do { + const response = asRecord(await appServer.rpc('thread/list', { + archived: false, + limit: 100, + sortKey: 'updated_at', + modelProviders: [], + cursor, + })) + const data = Array.isArray(response?.data) ? response.data : [] + for (const row of data) { + const record = asRecord(row) + const id = typeof record?.id === 'string' ? record.id : '' + if (!id) continue + const title = typeof record?.name === 'string' && record.name.trim().length > 0 + ? record.name.trim() + : (typeof record?.preview === 'string' && record.preview.trim().length > 0 ? record.preview.trim() : 'Untitled thread') + const preview = typeof record?.preview === 'string' ? record.preview : '' + threads.push({ id, title, preview }) + } + cursor = typeof response?.nextCursor === 'string' && response.nextCursor.length > 0 ? response.nextCursor : null + } while (cursor) + + const docs: ThreadSearchDocument[] = threads.map((thread) => { + const searchableText = [thread.title, thread.preview].filter(Boolean).join('\n') + return { + id: thread.id, + title: thread.title, + preview: thread.preview, + messageText: '', + searchableText, + } satisfies ThreadSearchDocument + }) + + const docsById = new Map(docs.map((doc) => [doc.id, doc])) + const fullTextThreads = threads.slice(0, THREAD_SEARCH_FULL_TEXT_THREAD_LIMIT) + const concurrency = 4 + for (let offset = 0; offset < fullTextThreads.length; offset += concurrency) { + const batch = fullTextThreads.slice(offset, offset + concurrency) + const loaded = await Promise.all(batch.map(async (thread) => { + try { + const readResponse = await appServer.rpc('thread/read', { + threadId: thread.id, + includeTurns: true, + }) + const messageText = extractThreadMessageText(readResponse) + const searchableText = [thread.title, thread.preview, messageText].filter(Boolean).join('\n') + return [thread.id, { + id: thread.id, + title: thread.title, + preview: thread.preview, + messageText, + searchableText, + } satisfies ThreadSearchDocument] as const + } catch { + return null + } + })) + for (const row of loaded) { + if (!row) continue + docsById.set(row[0], row[1]) + } + } + + return Array.from(docsById.values()) +} + +export async function buildThreadSearchIndex(appServer: AppServerProcess): Promise { + const docs = await loadAllThreadsForSearch(appServer) + const docsById = new Map(docs.map((doc) => [doc.id, doc])) + return { docsById } +} From 085b993594ecf91fc737686158b3827261620ae0 Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:34:30 +0700 Subject: [PATCH 12/19] Extract Vue scoped styles --- src/App.scoped.css | 1037 +++++++++++++++ src/App.vue | 1041 +-------------- .../content/ThreadConversation.scoped.css | 1149 ++++++++++++++++ src/components/content/ThreadConversation.vue | 1152 +---------------- src/server/apiPerfConfig.ts | 2 +- src/server/appServerProcess.ts | 3 +- src/server/authRefresh.ts | 11 + src/server/codexAppServerBridge.ts | 11 +- src/server/composioRoutesSupport.ts | 2 + src/server/providerModelDiscovery.ts | 10 +- src/server/threadAutomations.ts | 2 + 11 files changed, 2224 insertions(+), 2196 deletions(-) create mode 100644 src/App.scoped.css create mode 100644 src/components/content/ThreadConversation.scoped.css diff --git a/src/App.scoped.css b/src/App.scoped.css new file mode 100644 index 000000000..60bcf090b --- /dev/null +++ b/src/App.scoped.css @@ -0,0 +1,1037 @@ +@reference "tailwindcss"; + +.sidebar-root { + @apply h-full flex flex-col select-none; +} + +.sidebar-root input, +.sidebar-root textarea { + @apply select-text; +} + +.sidebar-scrollable { + @apply flex-1 min-h-0 overflow-y-auto py-4 px-2 flex flex-col gap-2; +} + +.content-root { + @apply h-full min-h-0 min-w-0 w-full flex flex-col overflow-y-hidden overflow-x-hidden bg-white; +} + +.content-root.is-virtual-keyboard-open { + height: var(--visual-viewport-height); + max-height: var(--visual-viewport-height); + transform: translateY(var(--visual-viewport-offset-top)); +} + +.sidebar-thread-controls-host { + @apply mt-1 -translate-y-px px-2 pb-1; +} + +.sidebar-search-toggle { + @apply h-6.75 w-6.75 rounded-md border border-transparent bg-transparent text-zinc-600 flex items-center justify-center transition hover:border-zinc-200 hover:bg-zinc-50; +} + +.sidebar-search-toggle[aria-pressed='true'] { + @apply border-zinc-300 bg-zinc-100 text-zinc-700; +} + +.sidebar-search-toggle-icon { + @apply w-4 h-4; +} + +.sidebar-search-bar { + @apply flex items-center gap-1.5 mx-2 px-2 py-1 rounded-md border border-zinc-200 bg-white transition-colors focus-within:border-zinc-400; +} + +.sidebar-search-bar-icon { + @apply w-3.5 h-3.5 text-zinc-400 shrink-0; +} + +.sidebar-search-input { + @apply flex-1 min-w-0 bg-transparent text-sm text-zinc-800 placeholder-zinc-400 outline-none border-none p-0; +} + +.sidebar-search-clear { + @apply w-4 h-4 rounded text-zinc-400 flex items-center justify-center transition hover:text-zinc-600; +} + +.sidebar-search-clear-icon { + @apply w-3.5 h-3.5; +} + +.sidebar-skills-link { + @apply mx-2 flex items-center gap-3 rounded-2xl border border-transparent bg-transparent px-3 py-2.5 text-left text-zinc-700 transition hover:bg-zinc-100 hover:text-zinc-950 cursor-pointer; +} + +.sidebar-skills-link.is-active { + @apply border-transparent bg-zinc-100 text-zinc-950; +} + +.sidebar-skills-link-icon { + @apply flex h-10 w-10 shrink-0 items-center justify-center rounded-2xl bg-emerald-600 text-white; +} + +.sidebar-automations-link-icon { + @apply bg-amber-500; +} + +.sidebar-skills-link-icon :deep(svg) { + @apply h-5 w-5; +} + +.sidebar-skills-link-copy { + @apply flex min-w-0 flex-col; +} + +.sidebar-skills-link-title { + @apply truncate text-sm font-semibold leading-5 tracking-[-0.01em]; +} + +.sidebar-skills-link-subtitle { + @apply truncate text-[11px] font-medium uppercase tracking-[0.18em] text-zinc-500; +} + +.sidebar-thread-controls-header-host { + @apply ml-1; +} + +.skills-route-header-icon { + @apply flex h-9 w-9 shrink-0 items-center justify-center rounded-2xl bg-emerald-600 text-white shadow-[0_16px_32px_-20px_rgba(5,150,105,0.9)]; +} + +.automations-route-header-icon { + @apply bg-amber-500 shadow-[0_16px_32px_-20px_rgba(245,158,11,0.9)]; +} + +.skills-route-header-icon :deep(svg) { + @apply h-4.5 w-4.5; +} + +:global(:root.dark) .sidebar-skills-link-title { + @apply text-zinc-50; +} + +:global(:root.dark) .sidebar-skills-link-subtitle { + @apply text-zinc-400; +} + +.content-body { + @apply flex-1 min-h-0 min-w-0 w-full flex flex-col gap-2 sm:gap-3 pt-1 pb-2 sm:pb-4 overflow-x-hidden; +} + +.content-root.is-virtual-keyboard-open .content-body { + padding-bottom: max(0.5rem, env(safe-area-inset-bottom)); +} + +.content-root.is-virtual-keyboard-open .content-grid { + gap: 0.5rem; +} + +.content-root.is-virtual-keyboard-open .content-thread { + min-height: 0; +} + +.content-root.is-virtual-keyboard-open .composer-with-queue { + gap: 0.375rem; + padding-bottom: max(0.25rem, env(safe-area-inset-bottom)); +} + +.content-root.is-virtual-keyboard-open .content-thread-terminal-panel { + min-height: 0; +} + +.content-root.is-virtual-keyboard-open .content-keyboard-spacer { + display: none; +} + + + +.content-error { + @apply m-0 rounded-lg border border-rose-200 bg-rose-50 px-3 py-2 text-sm text-rose-700; +} + +.content-grid { + @apply flex-1 min-h-0 flex flex-col gap-3; +} + +.content-grid-home { + @apply overflow-y-auto; +} + +.content-thread { + @apply flex-1 min-h-0; +} + +.composer-with-queue { + @apply w-full shrink-0 px-2 sm:px-6 flex flex-col gap-2; +} + +.content-thread-terminal-panel { + @apply w-full; +} + +.content-header-terminal-command { + @apply max-w-48; +} + +.content-header-terminal-command :deep(.composer-dropdown-trigger) { + @apply h-8 rounded-full border border-zinc-200 bg-white px-3 text-xs text-zinc-700 outline-none transition hover:bg-zinc-50 focus:border-zinc-300; +} + +.content-header-terminal-command :deep(.composer-dropdown-prefix-icon) { + @apply h-4 w-4 text-zinc-500; +} + +.content-header-terminal-command.is-open :deep(.composer-dropdown-trigger) { + @apply border-zinc-300 bg-zinc-100 text-zinc-950; +} + +.content-header-terminal-command :deep(.composer-dropdown-menu-wrap) { + left: auto; + right: 0; +} + +.content-header-terminal-command :deep(.composer-dropdown-menu) { + width: min(18rem, calc(100vw - 1rem)); + min-width: min(14rem, calc(100vw - 1rem)); +} + +.content-header-terminal-command :deep(.composer-dropdown-option) { + @apply block truncate; +} + +.content-header-terminal-command :deep(.composer-dropdown-trigger) { + @apply rounded-full border border-zinc-200 bg-white px-2.5 py-1.5 text-xs text-zinc-700 transition hover:bg-zinc-50; +} + +.content-header-terminal-command :deep(.composer-dropdown-prefix-icon), +.content-header-branch-dropdown :deep(.composer-dropdown-prefix-icon) { + @apply h-4 w-4 text-zinc-600; +} + +.content-header-terminal-command :deep(.composer-dropdown-trigger), +.content-header-branch-dropdown :deep(.composer-dropdown-trigger) { + @apply gap-0.5; +} + +.content-header-branch-dropdown :deep(.composer-dropdown-trigger) { + @apply rounded-full border border-zinc-200 bg-white px-2.5 py-1.5 text-xs text-zinc-700 transition hover:bg-zinc-50; +} + +.content-header-branch-dropdown :deep(.composer-dropdown-value) { + @apply max-w-40 truncate; +} + +.content-header-branch-dropdown :deep(.composer-dropdown-menu-wrap) { + left: auto; + right: 0; +} + +.content-header-branch-dropdown.is-review-open :deep(.composer-dropdown-trigger) { + @apply border-zinc-900 bg-zinc-900 text-white hover:bg-zinc-800; +} + +.content-header-branch-dropdown.is-review-open :deep(.composer-dropdown-chevron) { + @apply text-white; +} + +.new-thread-empty { + @apply flex-1 min-h-0 flex flex-col items-center justify-center gap-0.5 px-3 sm:px-6; +} + +.new-thread-hero { + @apply m-0 text-2xl sm:text-[2.5rem] font-normal leading-[1.05] text-zinc-900; +} + +.new-thread-folder-dropdown { + @apply text-2xl sm:text-[2.5rem] text-zinc-500; +} + +.new-thread-folder-dropdown :deep(.composer-dropdown-trigger) { + @apply h-auto p-0 text-2xl sm:text-[2.5rem] leading-[1.05]; +} + +.new-thread-folder-dropdown :deep(.composer-dropdown-value) { + @apply leading-[1.05]; +} + +.new-thread-folder-dropdown :deep(.composer-dropdown-chevron) { + @apply h-4 w-4 sm:h-5 sm:w-5 mt-0; +} + +.new-thread-folder-selected { + @apply mt-2 mb-0 max-w-3xl text-center text-xs text-zinc-500 break-all; +} + +.new-thread-folder-actions { + @apply mt-3 flex w-full max-w-3xl flex-wrap items-center justify-center gap-2; +} + +.new-thread-launch-card { + @apply mt-4 w-full max-w-3xl rounded-[28px] border border-emerald-200 bg-[radial-gradient(circle_at_top_left,_rgba(16,185,129,0.2),_transparent_42%),linear-gradient(135deg,_#f4fff8,_#ffffff_58%)] px-5 py-5 text-left shadow-[0_18px_50px_-28px_rgba(5,150,105,0.45)]; +} + +.new-thread-launch-card-copy { + @apply flex flex-col gap-2; +} + +.new-thread-launch-card-topline { + @apply flex items-center gap-2; +} + +.new-thread-launch-card-badge { + @apply flex h-8 w-8 shrink-0 items-center justify-center rounded-2xl bg-emerald-700 text-white shadow-[0_12px_28px_-18px_rgba(5,150,105,0.9)]; +} + +.new-thread-launch-card-badge :deep(svg) { + @apply h-4 w-4; +} + +.new-thread-launch-card-eyebrow { + @apply m-0 text-[11px] font-semibold uppercase tracking-[0.24em] text-emerald-700; +} + +.new-thread-launch-card-title { + @apply m-0 text-xl font-semibold leading-tight text-zinc-950 sm:text-2xl; +} + +.new-thread-launch-card-text { + @apply m-0 max-w-2xl text-sm leading-6 text-zinc-700 sm:text-[15px]; +} + +.new-thread-launch-card-actions { + @apply mt-4 flex flex-wrap items-center gap-2; +} + +.new-thread-launch-card-pills { + @apply mt-1 flex flex-wrap gap-2; +} + +.new-thread-launch-card-pill { + @apply inline-flex items-center rounded-full border border-emerald-100 bg-white/80 px-2.5 py-1 text-[11px] font-semibold uppercase tracking-[0.12em] text-emerald-700; +} + +.new-thread-launch-card-button { + @apply inline-flex h-10 items-center justify-center rounded-full border border-zinc-200 bg-white px-4 text-sm font-medium text-zinc-700 transition hover:bg-zinc-50; +} + +.new-thread-launch-card-button-primary { + @apply border-emerald-700 bg-emerald-700 text-white hover:bg-emerald-600; +} + +:global(:root.dark) .new-thread-launch-card { + @apply border-emerald-900/80 bg-[radial-gradient(circle_at_top_left,_rgba(16,185,129,0.2),_transparent_38%),linear-gradient(135deg,_rgba(6,78,59,0.32),_rgba(24,24,27,0.96)_58%)] shadow-[0_24px_64px_-34px_rgba(16,185,129,0.35)]; +} + +:global(:root.dark) .new-thread-launch-card-eyebrow { + @apply text-emerald-300; +} + +:global(:root.dark) .new-thread-launch-card-badge { + @apply bg-emerald-500 text-white; +} + +:global(:root.dark) .new-thread-launch-card-title { + @apply text-zinc-50; +} + +:global(:root.dark) .new-thread-launch-card-text { + @apply text-zinc-300; +} + +:global(:root.dark) .new-thread-launch-card-pill { + @apply border-emerald-900 bg-zinc-900/70 text-emerald-300; +} + +:global(:root.dark) .new-thread-launch-card-button { + @apply border-zinc-700 bg-zinc-900 text-zinc-100 hover:bg-zinc-800; +} + +:global(:root.dark) .new-thread-launch-card-button-primary { + @apply border-emerald-600 bg-emerald-600 text-white hover:bg-emerald-500; +} + +.new-thread-folder-action { + @apply inline-flex h-9 items-center justify-center rounded-full border border-zinc-200 bg-white px-4 text-sm font-medium text-zinc-700 transition hover:bg-zinc-50 disabled:cursor-default disabled:opacity-60; +} + +.new-thread-folder-action-primary { + @apply border-zinc-900 bg-zinc-900 text-white hover:bg-zinc-800; +} + +.new-thread-open-folder-overlay { + @apply fixed inset-0 z-50 flex items-center justify-center bg-black/40 p-4; +} + +.new-thread-open-folder { + @apply flex w-full max-w-3xl max-h-[90vh] flex-col gap-2 overflow-y-auto rounded-2xl border border-zinc-200 bg-white px-4 py-4 text-left shadow-xl; +} + +.new-thread-project-modal { + @apply flex w-full max-w-xl max-h-[90vh] flex-col gap-3 overflow-y-auto rounded-2xl border border-zinc-200 bg-white px-4 py-4 text-left shadow-xl; +} + +.new-thread-open-folder-header { + @apply flex items-center justify-between gap-3; +} + +.new-thread-open-folder-title { + @apply m-0 text-sm font-semibold text-zinc-900; +} + +.new-thread-open-folder-close { + @apply border-0 bg-transparent p-0 text-sm text-zinc-500 transition hover:text-zinc-800; +} + +.new-thread-open-folder-label { + @apply m-0 text-xs font-medium uppercase tracking-wide text-zinc-500; +} + +.new-thread-open-folder-current { + @apply flex items-start gap-2; +} + +.new-thread-open-folder-path { + @apply min-w-0 flex-1 rounded-xl border border-zinc-200 bg-white px-3 py-2 font-mono text-xs text-zinc-700 outline-none transition focus:border-zinc-400; +} + +.new-thread-open-folder-actions { + @apply flex flex-wrap items-center gap-2; +} + +.new-thread-project-mode-tabs { + @apply grid grid-cols-2 rounded-xl border border-zinc-200 bg-zinc-50 p-1; +} + +.new-thread-project-mode-tab { + @apply inline-flex h-9 items-center justify-center rounded-lg border-0 bg-transparent px-3 text-sm font-medium text-zinc-600 transition hover:bg-white hover:text-zinc-900 disabled:cursor-default disabled:opacity-60; +} + +.new-thread-project-mode-tab.is-active { + @apply bg-white text-zinc-950 shadow-sm; +} + +.new-thread-project-field { + @apply flex flex-col gap-1.5; +} + +.new-thread-project-modal-actions { + @apply mt-1 flex flex-wrap justify-end gap-2; +} + +.new-thread-open-folder-toggle { + @apply inline-flex items-center gap-2 text-sm text-zinc-600; +} + +.new-thread-open-folder-toggle-input { + @apply relative h-4 w-4 shrink-0 appearance-none rounded-[4px] border border-zinc-300 bg-white outline-none transition; +} + +.new-thread-open-folder-toggle-input:focus-visible { + box-shadow: 0 0 0 3px rgb(228 228 231); +} + +.new-thread-open-folder-toggle-input:checked { + border-color: rgb(24 24 27); + background-color: rgb(255 255 255); +} + +.new-thread-open-folder-toggle-input::after { + content: ''; + position: absolute; + left: 4px; + top: 1px; + width: 4px; + height: 8px; + border-right: 2px solid rgb(24 24 27); + border-bottom: 2px solid rgb(24 24 27); + transform: rotate(45deg); + opacity: 0; +} + +.new-thread-open-folder-toggle-input:checked::after { + opacity: 1; +} + +.new-thread-open-folder-filter { + @apply w-full rounded-xl border border-zinc-200 bg-white px-3 py-2 text-sm text-zinc-900 outline-none transition focus:border-zinc-400; +} + +.new-thread-open-folder-create { + @apply flex flex-col gap-2; +} + +.new-thread-open-folder-create-composer { + @apply flex items-center gap-2; +} + +.new-thread-open-folder-create-input { + @apply w-full min-w-0 flex-1 rounded-xl border border-zinc-200 bg-white px-3 py-2 text-sm text-zinc-900 outline-none transition focus:border-zinc-400; +} + +.new-thread-open-folder-create-submit { + @apply shrink-0; +} + +.new-thread-folder-action[aria-pressed='true'] { + @apply border-zinc-900 bg-zinc-900 text-white hover:bg-zinc-800; +} + +.new-thread-open-folder-status { + @apply m-0 rounded-xl border border-zinc-200 bg-zinc-50 px-3 py-2 text-sm text-zinc-600; +} + +.new-thread-open-folder-error { + @apply m-0 rounded-xl border border-rose-200 bg-rose-50 px-3 py-2 text-sm text-rose-700; +} + +.new-thread-open-folder-error-actions { + @apply flex flex-wrap items-start gap-2; +} + +.new-thread-open-folder-list { + @apply m-0 flex max-h-72 list-none flex-col gap-1 overflow-y-auto p-0 pr-3; + scrollbar-gutter: stable; + scrollbar-color: rgb(161 161 170) rgb(244 244 245); + scrollbar-width: thin; +} + +.new-thread-open-folder-list::-webkit-scrollbar { + width: 10px; +} + +.new-thread-open-folder-list::-webkit-scrollbar-track { + background: rgb(244 244 245); + border-radius: 9999px; +} + +.new-thread-open-folder-list::-webkit-scrollbar-thumb { + background: rgb(161 161 170); + border-radius: 9999px; + border: 2px solid rgb(244 244 245); +} + +.new-thread-open-folder-list::-webkit-scrollbar-thumb:hover { + background: rgb(113 113 122); +} + +.new-thread-open-folder-item { + @apply grid grid-cols-[minmax(0,1fr)_auto] items-center gap-1; +} + +.new-thread-open-folder-item-main { + @apply min-w-0 truncate rounded-xl border border-zinc-200 bg-zinc-50 px-2.5 py-1 text-left text-sm font-medium leading-5 text-zinc-900 transition hover:border-zinc-300 hover:bg-zinc-100; +} + +.new-thread-open-folder-item-main:disabled, +.new-thread-open-folder-item-open:disabled { + @apply cursor-default opacity-60; +} + +.new-thread-open-folder-item-name { + @apply block truncate; +} + +.new-thread-open-folder-item-open { + @apply inline-flex h-7 items-center justify-center rounded-xl border border-zinc-200 bg-white px-2.5 text-xs font-medium text-zinc-700 transition hover:bg-zinc-50; +} + +.new-thread-runtime-dropdown { + @apply mt-3; +} + +.new-thread-branch-select { + @apply mt-3 w-full max-w-3xl; +} + +.new-thread-branch-select-label { + @apply m-0 mb-1 text-xs font-medium uppercase tracking-wide text-zinc-500; +} + +.new-thread-branch-dropdown :deep(.composer-dropdown-trigger) { + @apply h-9 rounded-xl border border-zinc-200 bg-white px-3 text-sm text-zinc-700; +} + +.new-thread-branch-select-help { + @apply mt-1 mb-0 text-xs text-zinc-500; +} + +.new-thread-runtime-help { + @apply mt-2 mb-0 max-w-3xl text-center text-xs text-zinc-500; +} + +.worktree-init-status { + @apply mt-3 flex w-full max-w-xl flex-col gap-1 rounded-xl border px-3 py-2 text-sm; +} + +.worktree-init-status.is-running { + @apply border-zinc-300 bg-zinc-50 text-zinc-700; +} + +.worktree-init-status.is-error { + @apply border-rose-300 bg-rose-50 text-rose-800; +} + +.worktree-init-status-title { + @apply font-medium; +} + +.worktree-init-status-message { + @apply break-all; +} + +.sidebar-settings-area { + @apply shrink-0 bg-slate-100 pt-2 px-2 pb-2 border-t border-zinc-200; +} + +.sidebar-settings-button { + @apply flex items-center gap-2 w-full rounded-lg border-0 bg-transparent px-2 py-2 text-sm text-zinc-600 transition hover:bg-zinc-200 hover:text-zinc-900 cursor-pointer; +} + +.sidebar-settings-button-version { + @apply ml-auto min-w-0 truncate text-right text-xs; +} + +.sidebar-settings-icon { + @apply w-4.5 h-4.5; +} + +.sidebar-settings-panel { + @apply mb-1 max-h-[min(70vh,36rem)] overflow-y-auto rounded-lg border border-zinc-200 bg-white; +} + +.sidebar-settings-row { + @apply flex items-center justify-between w-full px-3 py-2.5 text-sm text-zinc-700 border-0 bg-transparent transition hover:bg-zinc-50 cursor-pointer; +} + +.sidebar-settings-row--select { + @apply cursor-default items-center gap-2; +} + +.sidebar-settings-language-dropdown { + @apply min-w-0 max-w-52; +} + +.sidebar-settings-language-dropdown :deep(.composer-dropdown-trigger) { + @apply h-auto rounded-md border border-zinc-200 bg-white px-2 py-1 text-xs text-zinc-700; +} + +.sidebar-settings-language-dropdown :deep(.composer-dropdown-value) { + @apply max-w-32; +} + +.sidebar-settings-row + .sidebar-settings-row { + @apply border-t border-zinc-100; +} + +.sidebar-settings-telegram-panel { + @apply border-t border-zinc-100 bg-zinc-50/70 px-3 py-3; +} + +.sidebar-settings-field { + @apply flex flex-col gap-1.5; +} + +.sidebar-settings-field + .sidebar-settings-field { + @apply mt-3; +} + +.sidebar-settings-field-label { + @apply text-xs font-medium text-zinc-700; +} + +.sidebar-settings-input, +.sidebar-settings-textarea { + @apply w-full rounded-md border border-zinc-200 bg-white px-2.5 py-2 text-sm text-zinc-800 outline-none transition focus:border-zinc-400 focus:ring-2 focus:ring-zinc-200; +} + +.sidebar-settings-textarea { + @apply min-h-20 resize-y font-mono text-xs; +} + +.sidebar-settings-field-help { + @apply mt-2 text-xs leading-5 text-zinc-500; +} + +.sidebar-settings-telegram-error { + @apply mt-2 rounded-md bg-rose-50 px-2.5 py-2 text-xs text-rose-700; +} + +.sidebar-settings-telegram-actions { + @apply mt-3 flex items-center justify-end; +} + +.sidebar-settings-telegram-save { + @apply rounded-full border border-zinc-200 bg-white px-3 py-1.5 text-xs font-medium text-zinc-700 transition hover:bg-zinc-50 disabled:cursor-default disabled:opacity-60; +} + +.sidebar-settings-account-section { + @apply border-t border-zinc-100 bg-zinc-50/60 px-3 py-3; +} + +.sidebar-settings-account-header { + @apply mb-2 flex items-center justify-between gap-2; +} + +.sidebar-settings-account-header-main { + @apply flex items-center gap-2; +} + +.sidebar-settings-account-collapse { + @apply inline-flex h-5 w-5 items-center justify-center rounded border border-zinc-200 bg-white text-zinc-600 transition hover:bg-zinc-100; +} + +.sidebar-settings-account-collapse-icon { + @apply text-[11px] leading-none; +} + +.sidebar-settings-account-title { + @apply text-sm font-medium text-zinc-800; +} + +.sidebar-settings-account-count { + @apply rounded bg-zinc-200 px-1.5 py-0.5 text-[11px] text-zinc-600; +} + +.sidebar-settings-account-error { + @apply mb-2 rounded-md bg-rose-50 px-2 py-1.5 text-xs text-rose-700; +} + +.sidebar-settings-account-refresh { + @apply shrink-0 rounded-full border border-zinc-200 bg-white px-2.5 py-1 text-xs text-zinc-700 transition hover:bg-zinc-50 disabled:cursor-default disabled:opacity-60; +} + +.sidebar-settings-account-login { + @apply mb-2 flex items-center gap-2; +} + +.sidebar-settings-account-login-button { + @apply shrink-0 rounded-full border border-zinc-200 bg-white px-3 py-1 text-xs font-medium text-zinc-700 transition hover:bg-zinc-50 disabled:cursor-default disabled:opacity-60; +} + +.sidebar-settings-account-login-link { + @apply min-w-0 truncate text-xs text-blue-600 hover:text-blue-700 hover:underline; +} + +.sidebar-settings-account-empty { + @apply text-xs text-zinc-500; +} + +.codex-login-modal-backdrop { + @apply fixed inset-0 z-[100] flex items-center justify-center bg-black/35 px-4; +} + +.codex-login-modal { + @apply flex w-full max-w-md flex-col gap-3 rounded-xl border border-zinc-200 bg-white p-4 shadow-2xl; +} + +.codex-login-modal-header { + @apply flex items-center justify-between gap-3; +} + +.codex-login-modal-title { + @apply text-base font-semibold text-zinc-900; +} + +.codex-login-modal-close { + @apply inline-flex h-7 w-7 items-center justify-center rounded-full border border-zinc-200 bg-white text-lg leading-none text-zinc-600 transition hover:bg-zinc-50 disabled:cursor-default disabled:opacity-60; +} + +.codex-login-modal-copy { + @apply text-sm leading-5 text-zinc-600; +} + +.codex-login-modal-link { + @apply min-w-0 truncate text-sm text-blue-600 hover:text-blue-700 hover:underline; +} + +.codex-login-modal-input { + @apply w-full rounded-lg border border-zinc-200 bg-white px-3 py-2 text-sm text-zinc-900 outline-none transition focus:border-zinc-400 disabled:cursor-default disabled:opacity-60; +} + +.codex-login-modal-error { + @apply rounded-md bg-rose-50 px-3 py-2 text-xs text-rose-700; +} + +.codex-login-modal-actions { + @apply flex items-center justify-end gap-2; +} + +.codex-login-modal-cancel, +.codex-login-modal-submit { + @apply rounded-full border border-zinc-200 bg-white px-3 py-1.5 text-sm text-zinc-700 transition hover:bg-zinc-50 disabled:cursor-default disabled:opacity-60; +} + +.codex-login-modal-submit { + @apply border-zinc-900 bg-zinc-900 text-white hover:bg-zinc-800; +} + +:global(:root.dark) .codex-login-modal { + @apply border-zinc-700 bg-zinc-900; +} + +:global(:root.dark) .codex-login-modal-title { + @apply text-zinc-100; +} + +:global(:root.dark) .codex-login-modal-close, +:global(:root.dark) .codex-login-modal-cancel { + @apply border-zinc-600 bg-zinc-800 text-zinc-200 hover:bg-zinc-700; +} + +:global(:root.dark) .codex-login-modal-copy { + @apply text-zinc-300; +} + +:global(:root.dark) .codex-login-modal-link { + @apply text-sky-300 hover:text-sky-200; +} + +:global(:root.dark) .codex-login-modal-input { + @apply border-zinc-600 bg-zinc-950 text-zinc-100 placeholder:text-zinc-500 focus:border-zinc-400; +} + +:global(:root.dark) .codex-login-modal-error { + @apply bg-rose-950/40 text-rose-200; +} + +:global(:root.dark) .codex-login-modal-submit { + @apply border-zinc-200 bg-zinc-100 text-zinc-900 hover:bg-white; +} + +.sidebar-settings-account-list { + @apply flex flex-col gap-2; +} + +.sidebar-settings-account-item { + @apply flex items-center gap-2 rounded-lg border border-zinc-200 bg-white px-2.5 py-2; +} + +.sidebar-settings-account-item.is-active { + @apply border-emerald-200 bg-emerald-50; +} + +.sidebar-settings-account-item.is-unavailable { + @apply border-rose-200 bg-rose-50; +} + +.sidebar-settings-account-main { + @apply min-w-0 flex-1; +} + +.sidebar-settings-account-actions { + @apply flex w-24 shrink-0 flex-col items-end gap-1.5; +} + +.sidebar-settings-account-email { + @apply truncate text-sm text-zinc-800; +} + +.sidebar-settings-account-meta { + @apply truncate text-[11px] text-zinc-500; +} + +.sidebar-settings-account-quota { + @apply truncate text-[11px] text-zinc-600; +} + +.sidebar-settings-account-id { + @apply mt-1 inline-flex max-w-full rounded-full bg-zinc-100 px-2 py-0.5 font-mono text-[11px] text-zinc-700; +} + +.sidebar-settings-account-item.is-active .sidebar-settings-account-id { + @apply bg-emerald-100 text-emerald-800; +} + +.sidebar-settings-account-item.is-unavailable .sidebar-settings-account-id { + @apply bg-rose-100 text-rose-800; +} + +.sidebar-settings-account-switch { + @apply min-w-[4.75rem] shrink-0 rounded-full border border-zinc-200 bg-white px-2.5 py-1 text-center text-xs text-zinc-700 transition hover:bg-zinc-50 disabled:cursor-default disabled:opacity-60; +} + +.sidebar-settings-account-remove { + @apply invisible shrink-0 rounded-full border border-amber-200 bg-white px-2 py-0.5 text-[10px] leading-4 text-zinc-500 opacity-0 pointer-events-none transition-colors hover:bg-amber-50 disabled:cursor-default disabled:opacity-60; +} + +.sidebar-settings-account-remove.is-visible { + @apply visible opacity-100 pointer-events-auto; +} + +.sidebar-settings-account-remove.is-confirming { + @apply border-amber-300 bg-amber-50 text-amber-700 font-medium; +} + +.sidebar-settings-label { + @apply text-left; +} + +.sidebar-settings-value { + @apply text-xs text-zinc-500 bg-zinc-100 rounded px-1.5 py-0.5; +} + + +.sidebar-settings-toggle { + @apply relative w-9 h-5 rounded-full bg-zinc-300 transition-colors shrink-0; +} + +.sidebar-settings-toggle::after { + content: ''; + @apply absolute top-0.5 left-0.5 w-4 h-4 rounded-full bg-white transition-transform shadow-sm; +} + +.sidebar-settings-toggle.is-on { + @apply bg-zinc-800; +} + +.sidebar-settings-toggle.is-on::after { + transform: translateX(16px); +} + +.sidebar-settings-row--input { + @apply flex flex-col gap-1 py-1.5; +} + +.sidebar-settings-error { + @apply text-xs text-red-600 bg-red-50 rounded px-2 py-1.5 break-words; +} + +.sidebar-settings-key-group { + @apply flex items-center gap-1.5 w-full; +} + +.sidebar-settings-key-input { + @apply flex-1 min-w-0 text-xs rounded border border-zinc-200 bg-white px-2 py-1 outline-none transition-colors placeholder:text-zinc-400; +} + +.sidebar-settings-key-input:focus { + @apply border-zinc-400; +} + +.sidebar-settings-key-save { + @apply shrink-0 rounded border border-zinc-200 bg-white px-2.5 py-1 text-xs text-zinc-700 transition-colors hover:bg-zinc-50 disabled:opacity-40 disabled:cursor-default; +} + +.sidebar-settings-key-masked { + @apply flex-1 min-w-0 text-xs text-zinc-500 font-mono truncate; +} + +.sidebar-settings-key-clear { + @apply shrink-0 w-6 h-6 flex items-center justify-center rounded-full border border-zinc-200 text-xs text-zinc-400 transition-colors hover:text-zinc-600 hover:border-zinc-300 disabled:opacity-40; +} + +.sidebar-settings-provider-select { + @apply min-w-0 max-w-40 rounded-md border border-zinc-200 bg-white px-2 py-1 text-xs text-zinc-700 outline-none transition-colors cursor-pointer; +} + +.sidebar-settings-provider-select:focus { + @apply border-zinc-400 ring-2 ring-zinc-200; +} + +.sidebar-settings-segmented { + @apply inline-flex items-center rounded-md border border-zinc-200 bg-white p-0.5; +} + +.sidebar-settings-segmented-option { + @apply rounded px-2 py-1 text-xs text-zinc-600 transition-colors; +} + +.sidebar-settings-segmented-option.is-active { + @apply bg-zinc-800 text-white; +} + +.sidebar-settings-provider-info { + @apply flex items-center justify-between w-full; +} + +.sidebar-settings-provider-link { + @apply text-xs text-blue-600 hover:text-blue-700 underline shrink-0; +} + +:root.dark .sidebar-settings-provider-select { + @apply border-zinc-600 bg-zinc-800 text-zinc-200; +} + +:root.dark .sidebar-settings-provider-select:focus { + @apply border-zinc-500 ring-zinc-700; +} + +:root.dark .sidebar-settings-segmented { + @apply border-zinc-600 bg-zinc-800; +} + +:root.dark .sidebar-settings-segmented-option { + @apply text-zinc-300; +} + +:root.dark .sidebar-settings-segmented-option.is-active { + @apply bg-zinc-100 text-zinc-900; +} + +:root.dark .sidebar-settings-provider-link { + @apply text-blue-400 hover:text-blue-300; +} + +:root.dark .sidebar-settings-key-input { + @apply border-zinc-600 bg-zinc-800 text-zinc-200 placeholder:text-zinc-500; +} + +:root.dark .sidebar-settings-key-input:focus { + @apply border-zinc-500; +} + +:root.dark .sidebar-settings-key-save { + @apply border-zinc-600 bg-zinc-700 text-zinc-200 hover:bg-zinc-600; +} + +:root.dark .sidebar-settings-key-masked { + @apply text-zinc-400; +} + +:root.dark .sidebar-settings-key-clear { + @apply border-zinc-600 text-zinc-500 hover:text-zinc-300 hover:border-zinc-500; +} + +.settings-panel-enter-active, +.settings-panel-leave-active { + transition: all 150ms ease; +} + +.settings-panel-enter-from, +.settings-panel-leave-to { + opacity: 0; + transform: translateY(8px); +} + +.sidebar-settings-context-row { + @apply cursor-default; +} + +.sidebar-settings-context-value { + @apply text-xs font-semibold text-zinc-700 text-right; +} + +.sidebar-settings-context-value[data-state='ok'] { + @apply text-emerald-700; +} + +.sidebar-settings-context-value[data-state='warning'] { + @apply text-amber-700; +} + +.sidebar-settings-context-value[data-state='danger'] { + @apply text-rose-700; +} + +.sidebar-settings-context-meta { + @apply block text-[11px] font-normal text-zinc-500; +} + +.sidebar-settings-rate-limits { + @apply border-t border-zinc-200 px-2 pt-2; +} + +.sidebar-settings-build-label { + @apply border-t border-zinc-100 px-3 py-2 text-[11px] text-zinc-500; +} diff --git a/src/App.vue b/src/App.vue index 1d0038e58..1f3f1d59f 100644 --- a/src/App.vue +++ b/src/App.vue @@ -4370,1043 +4370,4 @@ async function loadWorktreeBranches(sourceCwd: string): Promise { } - + diff --git a/src/components/content/ThreadConversation.scoped.css b/src/components/content/ThreadConversation.scoped.css new file mode 100644 index 000000000..a2ecf874c --- /dev/null +++ b/src/components/content/ThreadConversation.scoped.css @@ -0,0 +1,1149 @@ +@reference "tailwindcss"; + +.conversation-root { + @apply relative h-full min-h-0 min-w-0 p-0 flex flex-col overflow-y-hidden overflow-x-hidden bg-transparent border-none rounded-none; +} + +.conversation-loading { + @apply m-0 px-6 text-sm text-slate-500; +} + +.conversation-empty { + @apply m-0 px-6 text-sm text-slate-500; +} + +.conversation-list { + @apply h-full min-h-0 list-none m-0 px-2 sm:px-6 py-0 overflow-y-auto overflow-x-visible flex flex-col gap-2 sm:gap-3; +} + +.conversation-load-more { + @apply flex justify-center py-3 m-0; +} + +.load-more-button { + @apply px-4 py-1.5 text-xs rounded-full border border-slate-300 dark:border-slate-600 + text-slate-500 dark:text-slate-400 bg-transparent + hover:bg-slate-100 dark:hover:bg-slate-800 + disabled:opacity-40 disabled:cursor-not-allowed + transition-colors cursor-pointer; +} + +.conversation-item { + @apply m-0 w-full min-w-0 flex; +} + +.conversation-item-request { + @apply justify-center; +} + +.conversation-item-overlay { + @apply justify-center; +} + +.message-row { + @apply relative w-full min-w-0 max-w-[min(var(--chat-column-max,45rem),100%)] mx-auto flex; +} + +.message-row[data-role='user'] { + @apply justify-end; +} + +.message-row[data-role='assistant'], +.message-row[data-role='system'] { + @apply justify-start; +} + +.conversation-bottom-anchor { + @apply h-px; +} + +.jump-to-latest-button { + @apply absolute left-1/2 bottom-4 z-20 inline-flex h-11 w-11 -translate-x-1/2 items-center justify-center rounded-full border border-slate-300 bg-white/96 text-slate-700 shadow-lg shadow-slate-900/10 transition hover:-translate-x-1/2 hover:-translate-y-0.5 hover:bg-white hover:text-slate-900; +} + +.jump-to-latest-icon { + transform: rotate(180deg); +} + +.message-stack { + @apply flex flex-col w-full min-w-0; +} + +.request-card { + @apply w-full max-w-[min(var(--chat-column-max,45rem),100%)] rounded-xl border border-amber-300 bg-amber-50 px-4 py-3 flex flex-col gap-2; +} + +.request-title { + @apply m-0 text-sm leading-5 font-semibold text-amber-900; +} + +.request-meta { + @apply m-0 text-xs leading-4 text-amber-700; +} + +.request-reason { + @apply m-0 text-sm leading-5 text-amber-900 whitespace-pre-wrap break-words; + overflow-wrap: anywhere; +} + +.request-actions { + @apply flex flex-wrap gap-2; +} + +.request-button { + @apply rounded-md border border-amber-300 bg-white px-3 py-1.5 text-xs text-amber-900 hover:bg-amber-100 transition; +} + +.request-button-primary { + @apply border-amber-500 bg-amber-500 text-white hover:bg-amber-600; +} + +.request-user-input { + @apply flex flex-col gap-3; +} + +.request-question { + @apply flex flex-col gap-1; +} + +.request-question-title { + @apply m-0 text-sm leading-5 font-medium text-amber-900; +} + +.request-question-text { + @apply m-0 text-xs leading-4 text-amber-800; +} + +.request-question-option-description { + @apply m-0 text-xs leading-4 text-amber-700; +} + +.request-link { + @apply inline-flex w-fit rounded-md border border-amber-300 bg-white px-3 py-1.5 text-xs text-amber-900 hover:bg-amber-100 transition; +} + +.request-select { + @apply h-8 rounded-md border border-amber-300 bg-white px-2 text-sm text-amber-900; +} + +.request-input { + @apply h-8 rounded-md border border-amber-300 bg-white px-2 text-sm text-amber-900 placeholder:text-amber-500; +} + +.request-checkbox-list { + @apply flex flex-col gap-1.5; +} + +.request-checkbox-row { + @apply flex items-center gap-2 text-sm text-amber-900; +} + +.live-overlay-inline { + @apply w-full max-w-[min(var(--chat-column-max,45rem),100%)] px-0 py-1 flex flex-col gap-1; +} + +.live-overlay-label { + @apply m-0 text-sm leading-5 font-medium text-zinc-600; +} + +.live-overlay-reasoning { + @apply m-0 text-sm leading-5 text-zinc-500 whitespace-pre-wrap break-words; + display: block; + max-height: calc(1.25rem * 5); + overflow: auto; + overflow-wrap: anywhere; + scrollbar-width: none; + mask-image: linear-gradient(to top, black 75%, transparent 100%); + -webkit-mask-image: linear-gradient(to top, black 75%, transparent 100%); +} + +.live-overlay-reasoning::-webkit-scrollbar { + display: none; +} + +.live-overlay-error { + @apply m-0 text-sm leading-5 text-rose-600 whitespace-pre-wrap; +} + +.message-body { + @apply flex flex-col min-w-0 max-w-full; + width: fit-content; +} + +.message-body[data-role='user'] { + @apply ml-auto items-end; + align-self: flex-end; +} + +.message-toolbar { + @apply mt-1 self-start flex items-center gap-1 opacity-[0.01] transition-opacity duration-200; +} + +.message-row:hover .message-toolbar { + @apply opacity-100; +} + +.message-copy-button { + @apply inline-flex items-center gap-0.5 rounded-full border border-slate-200 bg-white/90 px-1.25 py-0.5 text-[9px] font-medium leading-none text-slate-500 transition hover:border-slate-300 hover:bg-white hover:text-slate-900; +} + +.message-fork-button { + @apply inline-flex items-center gap-0.5 px-0.5 py-0 text-[9px] font-medium leading-none text-slate-500 transition hover:text-slate-900; +} + + +.message-copy-button[data-copied='true'] { + @apply border-emerald-200 bg-emerald-50 text-emerald-700; +} + +.message-edit-button { + @apply inline-flex items-center gap-0.5 px-0.5 py-0 text-[9px] font-medium leading-none text-amber-600/70 transition hover:text-amber-700; +} + +.message-fork-icon, +.message-copy-icon, +.message-edit-icon { + @apply text-[10px]; +} + +.message-fork-label, +.message-copy-label, +.message-edit-label { + @apply leading-none; +} + +.message-image-list { + @apply list-none m-0 mb-2 p-0 flex flex-wrap gap-2; +} + +.message-image-list[data-role='user'] { + @apply ml-auto justify-end; +} + +.message-generated-image-list { + @apply gap-3; +} + +.message-image-item { + @apply m-0; +} + +.message-image-button { + @apply block rounded-xl overflow-hidden border border-slate-300 bg-white p-0 transition hover:border-slate-400; +} + +.message-image-preview { + @apply block w-16 h-16 object-cover; +} + +.message-generated-image-preview { + @apply w-auto h-auto max-w-[min(560px,85vw)] max-h-[min(460px,62vh)] object-contain bg-white; +} + +.message-file-attachments { + @apply mb-2 flex flex-wrap gap-1.5; +} + +.message-skill-attachments { + @apply mb-2 flex flex-wrap justify-end gap-1.5; +} + +.message-file-chip { + @apply inline-flex items-center gap-1 rounded-md border border-zinc-200 bg-zinc-50 px-2 py-0.5 text-xs text-zinc-700; +} + +.message-skill-chip { + @apply inline-flex max-w-full items-center gap-1.5 rounded-md border border-emerald-200 bg-emerald-50 px-2 py-0.5 text-xs text-emerald-800 no-underline transition hover:border-emerald-300 hover:bg-emerald-100 hover:text-emerald-900; +} + +.message-skill-chip-prefix { + @apply shrink-0 font-medium text-emerald-700; +} + +.message-skill-chip-name { + @apply min-w-0 max-w-48 truncate font-mono; +} + +.message-file-chip-icon { + @apply text-[10px] leading-none; +} + +.message-file-chip-name { + @apply truncate max-w-48 font-mono; +} + +.message-card { + @apply max-w-[min(var(--chat-card-max,76ch),100%)] px-0 py-0 bg-transparent border-none rounded-none; +} + +.message-text-flow { + @apply flex flex-col gap-2; +} + +.plan-card { + @apply flex max-w-[min(var(--chat-card-max,76ch),100%)] flex-col gap-3 rounded-2xl border border-sky-200 bg-sky-50 px-4 py-3 text-slate-900; +} + +.plan-card-header { + @apply flex items-center justify-between gap-3; +} + +.plan-card-title { + @apply m-0 text-sm font-semibold leading-5 text-sky-900; +} + +.plan-card-badge { + @apply inline-flex items-center rounded-full bg-sky-200 px-2 py-0.5 text-[11px] font-medium leading-4 text-sky-900; +} + +.plan-card-explanation { + @apply text-slate-700; +} + +.plan-card-markdown { + @apply flex flex-col gap-2; +} + +.plan-card-markdown :deep(.message-text), +.plan-card-markdown :deep(.message-heading), +.plan-card-markdown :deep(.message-blockquote), +.plan-card-markdown :deep(.message-list), +.plan-card-markdown :deep(.message-table-wrap), +.plan-card-markdown :deep(.message-code-block), +.plan-card-markdown :deep(.message-divider) { + @apply m-0; +} + +.plan-card-markdown :deep(.message-text) { + @apply text-sm leading-relaxed whitespace-pre-wrap text-slate-800; +} + +.plan-card-markdown :deep(.message-heading) { + @apply text-slate-900 tracking-tight; +} + +.plan-card-markdown :deep(.message-heading-h1) { + @apply text-2xl font-semibold leading-tight; +} + +.plan-card-markdown :deep(.message-heading-h2) { + @apply text-xl font-semibold leading-tight; +} + +.plan-card-markdown :deep(.message-heading-h3) { + @apply text-lg font-semibold leading-snug; +} + +.plan-card-markdown :deep(.message-heading-h4) { + @apply text-base font-semibold leading-snug; +} + +.plan-card-markdown :deep(.message-heading-h5) { + @apply text-sm font-semibold leading-snug uppercase tracking-[0.02em]; +} + +.plan-card-markdown :deep(.message-heading-h6) { + @apply text-xs font-semibold leading-snug uppercase tracking-[0.04em] text-slate-600; +} + +.plan-card-markdown :deep(.message-blockquote) { + @apply border-l-4 border-slate-300 pl-4 py-1 text-sm leading-relaxed whitespace-pre-wrap text-slate-700 bg-slate-50/70 rounded-r-lg; +} + +.plan-card-markdown :deep(.message-list) { + @apply pl-5 text-sm leading-relaxed text-slate-800 flex flex-col gap-1.5; +} + +.plan-card-markdown :deep(.message-list-unordered) { + @apply list-disc; +} + +.plan-card-markdown :deep(.message-list-ordered) { + @apply list-decimal; +} + +.plan-card-markdown :deep(.message-list-item) { + @apply pl-1; +} + +.plan-card-markdown :deep(.message-list-item-text) { + @apply whitespace-pre-wrap; +} + +.plan-card-markdown :deep(.message-list-item-paragraph + .message-list-item-paragraph) { + @apply mt-2; +} + +.plan-card-markdown :deep(.message-task-list) { + @apply list-none pl-0; +} + +.plan-card-markdown :deep(.message-task-item) { + @apply flex items-start gap-2; +} + +.plan-card-markdown :deep(.message-task-checkbox) { + @apply mt-0.5 text-sm leading-none text-slate-500 select-none; +} + +.plan-card-markdown :deep(.message-code-block) { + @apply overflow-hidden rounded-xl border border-slate-200 bg-slate-950/95 text-slate-100; +} + +.plan-card-markdown :deep(.message-code-language) { + @apply border-b border-slate-800 bg-slate-900/90 px-3 py-2 text-[11px] font-medium uppercase tracking-[0.08em] text-slate-400; +} + +.plan-card-markdown :deep(.message-code-pre) { + @apply m-0 overflow-x-auto px-3 py-3 text-[13px] leading-6; +} + +.plan-card-markdown :deep(.message-inline-code) { + @apply rounded-md bg-slate-200/80 px-1.5 py-0.5 font-mono text-[0.9em] text-slate-900; +} + +.plan-card-markdown :deep(.message-file-link) { + @apply text-sky-700 underline decoration-sky-300 underline-offset-2; +} + +.plan-card-markdown :deep(.message-table) { + @apply bg-white/90; +} + +.plan-step-list { + @apply m-0 flex list-none flex-col gap-2 p-0; +} + +.plan-step-item { + @apply flex items-start gap-2 rounded-xl border border-white/70 bg-white/80 px-3 py-2 text-sm leading-relaxed text-slate-800; +} + +.plan-step-item[data-status='completed'] { + @apply border-emerald-200 bg-emerald-50/80; +} + +.plan-step-item[data-status='inProgress'] { + @apply border-amber-200 bg-amber-50/80; +} + +.plan-step-status { + @apply mt-0.5 inline-flex h-5 w-5 shrink-0 items-center justify-center rounded-full bg-slate-200 text-xs font-semibold text-slate-700; +} + +.plan-step-status[data-status='completed'] { + @apply bg-emerald-200 text-emerald-900; +} + +.plan-step-status[data-status='inProgress'] { + @apply bg-amber-200 text-amber-900; +} + +.plan-step-text { + @apply min-w-0 flex-1; +} + +.plan-card-actions { + @apply mt-3 flex justify-end; +} + +.plan-card-implement-button { + @apply inline-flex items-center rounded-full border border-slate-300 bg-white px-3 py-1.5 text-xs font-medium text-slate-800 transition hover:border-slate-400 hover:bg-slate-50; +} + +.message-text { + @apply m-0 text-sm leading-relaxed whitespace-pre-wrap break-words text-slate-800; + overflow-wrap: anywhere; +} + +.message-heading { + @apply m-0 text-slate-900 tracking-tight; +} + +.message-heading-h1 { + @apply text-2xl font-semibold leading-tight; +} + +.message-heading-h2 { + @apply text-xl font-semibold leading-tight; +} + +.message-heading-h3 { + @apply text-lg font-semibold leading-snug; +} + +.message-heading-h4 { + @apply text-base font-semibold leading-snug; +} + +.message-heading-h5 { + @apply text-sm font-semibold leading-snug uppercase tracking-[0.02em]; +} + +.message-heading-h6 { + @apply text-xs font-semibold leading-snug uppercase tracking-[0.04em] text-slate-600; +} + +.message-blockquote { + @apply m-0 border-l-4 border-slate-300 pl-4 py-1 text-sm leading-relaxed whitespace-pre-wrap break-words text-slate-700 bg-slate-50/70 rounded-r-lg; + overflow-wrap: anywhere; +} + +.message-list { + @apply m-0 pl-5 text-sm leading-relaxed text-slate-800 flex flex-col gap-1.5; +} + +.message-list-unordered { + @apply list-disc; +} + +.message-list-ordered { + @apply list-decimal; +} + +.message-list-item { + @apply pl-1; +} + +.message-list-item-content { + @apply flex flex-col gap-1.5; +} + +.message-list-item-text { + @apply whitespace-pre-wrap break-words; + overflow-wrap: anywhere; +} + +.message-list-item-paragraph + .message-list-item-paragraph { + @apply mt-2; +} + +.message-task-list { + @apply list-none pl-0; +} + +.message-task-item { + @apply flex items-start gap-2; +} + +.message-task-checkbox { + @apply mt-0.5 text-sm leading-none text-slate-500 select-none; +} + +.message-table-wrap { + @apply w-full overflow-x-auto; +} + +.message-table { + @apply min-w-full border-separate border-spacing-0 overflow-hidden rounded-xl border border-slate-200 bg-white text-sm text-slate-800; +} + +.message-table-head-cell, +.message-table-cell { + @apply border-b border-l border-slate-200 px-3 py-2 align-top whitespace-pre-wrap break-words; + overflow-wrap: anywhere; +} + +.message-table-head-cell:first-child, +.message-table-cell:first-child { + @apply border-l-0; +} + +.message-table-head-cell { + @apply bg-slate-100 font-semibold text-slate-900; +} + +.message-table-body-row:last-child .message-table-cell { + @apply border-b-0; +} + +.message-bold-text { + @apply font-semibold text-slate-900; +} + +.message-italic-text { + @apply italic; +} + +.message-strikethrough-text { + @apply line-through text-slate-500; +} + +.message-markdown-image { + @apply w-auto h-auto max-w-[min(560px,85vw)] max-h-[min(460px,62vh)] object-contain bg-white; +} + +.message-inline-code { + @apply rounded-md border border-slate-200 bg-slate-100/60 px-1.5 py-0.5 text-[0.875em] leading-[1.4] text-slate-900 font-mono; +} + +.message-code-block { + @apply overflow-hidden rounded-xl border border-slate-200 bg-slate-950 text-slate-100; +} + +.message-code-language { + @apply border-b border-slate-800 px-3 py-2 text-[11px] font-mono uppercase tracking-[0.08em] text-slate-400; +} + +.message-code-pre { + @apply m-0 overflow-x-auto px-3 py-3 text-[13px] leading-relaxed font-mono whitespace-pre; +} + +.message-code-pre :deep(.hljs) { + @apply block bg-transparent p-0 text-inherit; +} + +.message-file-link { + @apply text-sm leading-relaxed text-[#0969da] no-underline hover:text-[#1f6feb] hover:underline underline-offset-2; +} + +.file-link-context-menu { + @apply fixed z-50 min-w-36 rounded-lg border border-zinc-200 bg-white p-1 shadow-xl; +} + +.file-link-context-menu-item { + @apply block w-full rounded-md px-2 py-1.5 text-left text-xs text-zinc-700 hover:bg-zinc-100; +} + +.message-divider { + @apply m-0 border-0 h-px bg-slate-300/80; +} + +.message-stack[data-role='user'] { + @apply items-end; +} + +.message-stack[data-role='assistant'], +.message-stack[data-role='system'] { + @apply items-start; +} + +.message-card[data-role='user'] { + @apply rounded-2xl bg-slate-200 px-4 py-3 max-w-[min(560px,100%)]; + width: fit-content; + margin-left: auto; + align-self: flex-end; +} + +.automation-message-label { + @apply mb-2 flex flex-wrap items-center gap-2 text-[11px] font-semibold uppercase tracking-[0.12em] text-slate-500; +} + +.automation-message-label code { + @apply rounded-full bg-white/70 px-2 py-0.5 text-[10px] normal-case tracking-normal text-slate-600; +} + +.message-card[data-role='assistant'], +.message-card[data-role='system'] { + @apply px-0 py-0 bg-transparent border-none rounded-none; +} + +:global(.dark) .message-file-chip { + @apply border-zinc-700 bg-zinc-900 text-zinc-200; +} + +:global(.dark) .message-skill-chip { + @apply border-emerald-800/70 bg-emerald-950/50 text-emerald-100; +} + +:global(.dark) .message-skill-chip-prefix { + @apply text-emerald-300; +} + +.conversation-item[data-message-type='worked'] .message-stack, +.conversation-item[data-message-type='worked'] .message-body, +.conversation-item[data-message-type='worked'] .message-card { + @apply w-full max-w-full; +} + +.worked-separator-wrap { + @apply w-full flex flex-col gap-0; +} + +.worked-separator { + @apply w-full flex items-center gap-3 bg-transparent border-none cursor-pointer p-0; +} + +.worked-chevron { + @apply text-[9px] text-zinc-400 transition-transform duration-200 flex-shrink-0; +} + +.worked-chevron-open { + transform: rotate(90deg); +} + +.worked-separator-line { + @apply h-px bg-zinc-300/80 flex-1; +} + +.worked-separator-text { + @apply m-0 text-sm leading-relaxed font-normal text-slate-800; +} + +.worked-details { + @apply flex flex-col gap-1.5 pt-2; +} + +.worked-cmd-item { + @apply flex flex-col; +} + +.image-modal-backdrop { + @apply fixed inset-0 z-50 bg-black/40 p-6 flex items-center justify-center; +} + +.image-modal-content { + @apply relative max-w-[min(92vw,1100px)] max-h-[92vh]; +} + +.image-modal-close { + @apply absolute top-2 right-2 z-10 w-10 h-10 rounded-full bg-white/90 text-slate-900 border border-slate-300 flex items-center justify-center; +} + +.image-modal-image { + @apply block max-w-full max-h-[90vh] rounded-2xl shadow-2xl bg-white; +} + +.icon-svg { + @apply w-5 h-5; +} + +.cmd-row { + @apply w-full flex items-center gap-2 px-3 py-1.5 rounded-lg border border-zinc-200 bg-zinc-50 cursor-pointer transition text-left hover:bg-zinc-100; +} + +.cmd-row.cmd-row-group { + @apply border-dashed border-zinc-300 bg-zinc-100/90 text-zinc-600; +} + +.cmd-row.cmd-compact { + gap: 0.375rem; + padding: 0.375rem 0.625rem; + border-radius: 0.625rem; +} + +.cmd-row.cmd-compact .cmd-chevron { + font-size: 9px; +} + +.cmd-row.cmd-compact .cmd-label { + font-size: 0.75rem; +} + +.cmd-row.cmd-compact .cmd-status { + max-width: 4.5rem; + font-size: 0.75rem; +} + +.cmd-row.cmd-expanded { + @apply rounded-b-none; +} + +.cmd-chevron { + @apply text-[10px] text-zinc-400 transition-transform duration-150 flex-shrink-0; +} + +.cmd-chevron-open { + transform: rotate(90deg); +} + +.cmd-label { + @apply flex-1 min-w-0 truncate text-xs font-mono text-zinc-700; +} + +.cmd-group-label { + @apply flex-1 min-w-0 truncate text-xs font-medium text-zinc-600; +} + +.cmd-status { + @apply max-w-24 truncate text-right text-[11px] font-medium flex-shrink-0; +} + +.cmd-status-running .cmd-status { + @apply text-amber-600; +} + +.cmd-status-ok .cmd-status { + @apply text-emerald-600; +} + +.cmd-status-error .cmd-status { + @apply text-rose-600; +} + +.cmd-output-wrap { + @apply rounded-b-lg bg-zinc-900; + display: grid; + grid-template-rows: 0fr; + transition: grid-template-rows 300ms ease-out, border-color 300ms ease-out; + border: 1px solid transparent; + border-top: none; +} + +.cmd-output-wrap.cmd-output-visible { + grid-template-rows: 1fr; + border-color: #e4e4e7; +} + +.cmd-group-wrap { + display: grid; + grid-template-rows: 0fr; + transition: grid-template-rows 220ms ease-out; +} + +.cmd-group-wrap.cmd-group-visible { + grid-template-rows: 1fr; +} + +.cmd-group-inner { + @apply mb-1 flex min-h-0 flex-col gap-1 overflow-hidden pl-2; +} + +.cmd-output-inner { + overflow: hidden; + min-height: 0; +} + +.cmd-output { + @apply m-0 px-3 py-2 text-xs font-mono text-zinc-200 whitespace-pre-wrap break-words max-h-60 overflow-y-auto; +} + +.cmd-output.cmd-output-condensed { + max-height: 9rem; +} + +.file-change-summary-block { + @apply mt-3 flex flex-col gap-0; +} + +.file-change-summary-block-inline { + @apply mt-4; +} + +.file-change-summary-row { + @apply border-dashed; +} + +.file-change-summary-label { + @apply flex-1 min-w-0 truncate text-xs font-medium text-zinc-700; +} + +.file-change-summary-status { + @apply inline-flex max-w-28 items-center justify-end gap-1.5 text-right text-[11px] font-semibold text-zinc-500 flex-shrink-0; +} + +.file-change-panel-inner { + @apply mb-1 min-h-0 overflow-hidden pl-2; +} + +.file-change-list { + @apply m-0 flex list-none flex-col gap-0.5 rounded-xl border border-zinc-200 bg-white/80 p-1.5; +} + +.file-change-item { + @apply flex flex-wrap items-center gap-1.5 rounded-lg px-2 py-1 text-sm text-zinc-700; +} + +.file-change-badge { + @apply inline-flex items-center rounded-full px-2.5 py-1 text-[11px] font-semibold uppercase tracking-[0.08em]; +} + +.file-change-badge[data-operation='add'] { + @apply bg-emerald-50 text-emerald-700; +} + +.file-change-badge[data-operation='update'] { + @apply bg-sky-50 text-sky-700; +} + +.file-change-badge[data-operation='delete'] { + @apply bg-rose-50 text-rose-700; +} + +.file-change-badge[data-operation='move'] { + @apply bg-amber-50 text-amber-700; +} + +.file-change-path { + @apply min-w-0 break-all font-mono text-[13px]; +} + +.file-change-path-button { + @apply min-w-0 border-0 bg-transparent p-0 text-left font-mono text-[13px] text-[#0969da] hover:text-[#1f6feb] hover:underline underline-offset-2; +} + +.file-change-arrow { + @apply text-zinc-400; +} + +.file-change-delta { + @apply ml-auto inline-flex items-center gap-1.5 rounded-full bg-zinc-100 px-2 py-1 text-[11px] font-semibold text-zinc-600; +} + +.file-change-signed-count { + @apply inline-flex items-center whitespace-nowrap; +} + +.file-change-signed-count[data-tone='add'] { + @apply text-emerald-600; +} + +.file-change-signed-count[data-tone='remove'] { + @apply text-rose-600; +} + +.diff-viewer-backdrop { + @apply fixed inset-0 z-50 bg-black/45 p-3 sm:p-6 flex items-center justify-center; +} + +.diff-viewer-shell { + @apply relative grid h-[min(88vh,920px)] w-[min(96vw,1320px)] grid-cols-1 overflow-hidden rounded-3xl border border-zinc-200 bg-white shadow-2xl lg:grid-cols-[320px_minmax(0,1fr)]; +} + +.diff-viewer-sidebar { + @apply flex min-h-0 flex-col border-b border-zinc-200 bg-zinc-50 lg:border-b-0 lg:border-r; +} + +.diff-viewer-sidebar-header { + @apply flex items-center justify-between gap-3 border-b border-zinc-200 px-4 py-4; +} + +.diff-viewer-sidebar-title { + @apply m-0 text-sm font-semibold text-zinc-900; +} + +.diff-viewer-sidebar-count { + @apply m-0 text-xs font-medium text-zinc-500; +} + +.diff-viewer-sidebar-list { + @apply flex min-h-0 flex-col gap-2 overflow-y-auto p-3; +} + +.diff-viewer-file-button { + @apply flex w-full flex-col items-start gap-2 rounded-2xl border border-transparent bg-transparent px-3 py-3 text-left transition hover:border-zinc-200 hover:bg-white; +} + +.diff-viewer-file-button[data-active='true'] { + @apply border-sky-200 bg-white shadow-sm; +} + +.diff-viewer-file-label { + @apply break-all font-mono text-[13px] text-zinc-700; +} + +.diff-viewer-file-delta { + @apply inline-flex items-center rounded-full bg-zinc-100 px-2.5 py-1 text-[11px] font-medium text-zinc-600; +} + +.diff-viewer-main { + @apply flex min-h-0 flex-col bg-white; +} + +.diff-viewer-toolbar { + @apply flex items-start justify-between gap-4 border-b border-zinc-200 px-5 py-4; +} + +.diff-viewer-toolbar-actions { + @apply flex items-center gap-2 shrink-0; +} + +.diff-viewer-title-wrap { + @apply min-w-0; +} + +.diff-viewer-title { + @apply m-0 break-all text-base font-semibold text-zinc-900; +} + +.diff-viewer-subtitle { + @apply mt-1 mb-0 text-sm text-zinc-500; +} + +.diff-viewer-close { + @apply static shrink-0 border-zinc-200 bg-zinc-100 text-zinc-700; +} + +.diff-viewer-mobile-files-button { + @apply inline-flex items-center rounded-full border border-zinc-200 bg-zinc-100 px-3 py-1.5 text-xs font-medium text-zinc-700; +} + +.diff-viewer-empty { + @apply flex min-h-0 flex-1 flex-col items-center justify-center px-6 text-center; +} + +.diff-viewer-empty-title { + @apply m-0 text-base font-semibold text-zinc-900; +} + +.diff-viewer-empty-text { + @apply mt-2 max-w-2xl text-sm leading-relaxed text-zinc-500; +} + +.diff-viewer-panel { + @apply flex min-h-0 flex-1 flex-col; +} + +.diff-viewer-meta { + @apply border-b border-zinc-200 bg-zinc-50 px-5 py-2; +} + +.diff-viewer-language { + @apply inline-flex items-center rounded-full bg-zinc-200 px-2.5 py-1 text-[11px] font-semibold uppercase tracking-[0.08em] text-zinc-700; +} + +.diff-viewer-lines { + @apply min-h-0 flex-1 overflow-auto bg-zinc-950; +} + +.diff-viewer-line { + display: grid; + grid-template-columns: 4rem 4rem 2rem minmax(0, 1fr); + align-items: stretch; + min-width: fit-content; +} + +.diff-viewer-line-number { + @apply border-r border-zinc-800 px-3 py-1.5 text-right font-mono text-xs text-zinc-500 select-none; +} + +.diff-viewer-line-marker { + @apply border-r border-zinc-800 px-2 py-1.5 text-center font-mono text-xs text-zinc-500 select-none; +} + +.diff-viewer-line-code { + @apply block whitespace-pre px-3 py-1.5 font-mono text-[12px] leading-5 text-zinc-100; +} + +.diff-viewer-line[data-kind='meta'] { + @apply bg-zinc-900; +} + +.diff-viewer-line[data-kind='meta'] .diff-viewer-line-code, +.diff-viewer-line[data-kind='meta'] .diff-viewer-line-marker { + @apply text-sky-300; +} + +.diff-viewer-line[data-kind='hunk'] { + @apply bg-sky-950/40; +} + +.diff-viewer-line[data-kind='hunk'] .diff-viewer-line-code, +.diff-viewer-line[data-kind='hunk'] .diff-viewer-line-marker { + @apply text-sky-300; +} + +.diff-viewer-line[data-kind='add'] { + background: rgba(20, 83, 45, 0.38); +} + +.diff-viewer-line[data-kind='add'] .diff-viewer-line-marker, +.diff-viewer-line[data-kind='add'] .diff-viewer-line-code { + @apply text-emerald-200; +} + +.diff-viewer-line[data-kind='remove'] { + background: rgba(127, 29, 29, 0.32); +} + +.diff-viewer-line[data-kind='remove'] .diff-viewer-line-marker, +.diff-viewer-line[data-kind='remove'] .diff-viewer-line-code { + @apply text-rose-200; +} + +.diff-viewer-line[data-kind='context'] { + @apply bg-zinc-950; +} + +.diff-viewer-line[data-kind='context'] .diff-viewer-line-code { + @apply text-zinc-100; +} + +.diff-viewer-mobile-sheet-backdrop { + @apply absolute inset-0 z-20 bg-black/35 flex items-end; +} + +.diff-viewer-mobile-sheet { + @apply w-full max-h-[70vh] rounded-t-3xl bg-white shadow-2xl border-t border-zinc-200 flex flex-col overflow-hidden; +} + +.diff-viewer-mobile-sheet-handle { + @apply mx-auto mt-3 h-1.5 w-12 rounded-full bg-zinc-300; +} + +.diff-viewer-mobile-sheet-header { + @apply flex items-center justify-between gap-3 px-4 pt-3 pb-2 border-b border-zinc-200; +} + +.diff-viewer-mobile-sheet-list { + @apply flex min-h-0 flex-col gap-2 overflow-y-auto px-3 py-3; +} + +.diff-viewer-sheet-enter-active, +.diff-viewer-sheet-leave-active { + @apply transition-opacity duration-200; +} + +.diff-viewer-sheet-enter-active .diff-viewer-mobile-sheet, +.diff-viewer-sheet-leave-active .diff-viewer-mobile-sheet { + transition: transform 200ms ease; +} + +.diff-viewer-sheet-enter-from, +.diff-viewer-sheet-leave-to { + @apply opacity-0; +} + +.diff-viewer-sheet-enter-from .diff-viewer-mobile-sheet, +.diff-viewer-sheet-leave-to .diff-viewer-mobile-sheet { + transform: translateY(100%); +} + +@media (max-width: 767px) { + .diff-viewer-backdrop { + @apply p-0 items-stretch; + } + + .diff-viewer-shell { + @apply h-[100dvh] w-screen rounded-none border-0 shadow-none; + } + + .diff-viewer-main { + @apply min-w-0; + } + + .diff-viewer-toolbar { + @apply sticky top-0 z-10 bg-white px-3 py-3; + } + + .diff-viewer-title { + @apply text-sm leading-5; + } + + .diff-viewer-subtitle { + @apply text-xs; + } + + .diff-viewer-meta { + @apply px-3 py-2; + } + + .diff-viewer-language { + @apply text-[10px]; + } + + .diff-viewer-line { + grid-template-columns: 2.75rem 2.75rem 1.5rem minmax(0, 1fr); + } + + .diff-viewer-line-number { + @apply px-1.5 py-1 text-[10px]; + } + + .diff-viewer-line-marker { + @apply px-1 py-1 text-[10px]; + } + + .diff-viewer-line-code { + @apply px-2 py-1 text-[11px] leading-5; + } +} diff --git a/src/components/content/ThreadConversation.vue b/src/components/content/ThreadConversation.vue index 322122047..cd7435b14 100644 --- a/src/components/content/ThreadConversation.vue +++ b/src/components/content/ThreadConversation.vue @@ -4010,1154 +4010,4 @@ onBeforeUnmount(() => { }) - + diff --git a/src/server/apiPerfConfig.ts b/src/server/apiPerfConfig.ts index b93b4ab82..36c201484 100644 --- a/src/server/apiPerfConfig.ts +++ b/src/server/apiPerfConfig.ts @@ -5,7 +5,7 @@ export const API_PERF_MS_THRESHOLD_ENV_KEY = 'CODEXUI_API_PERF_MS_THRESHOLD' export const API_PERF_BODY_MB_THRESHOLD_ENV_KEY = 'CODEXUI_API_PERF_BODY_MB_THRESHOLD' const DEFAULT_API_PERF_MS_THRESHOLD = 300 const DEFAULT_API_PERF_BODY_MB_THRESHOLD = 1 -const MB_DIVISOR = 1024 * 1024 +export const MB_DIVISOR = 1024 * 1024 function readEnvValueFromFile(filePath: string, key: string): string | null { try { const content = readFileSync(filePath, 'utf8') diff --git a/src/server/appServerProcess.ts b/src/server/appServerProcess.ts index f1e0f2bb2..cdf878d80 100644 --- a/src/server/appServerProcess.ts +++ b/src/server/appServerProcess.ts @@ -5,6 +5,7 @@ import { join } from 'node:path' import { buildAppServerArgs } from './appServerRuntimeConfig.js' import { FREE_MODE_STATE_FILE, + createDefaultOpenCodeZenFreeModeState, getFreeModeConfigArgs, getFreeModeEnvVars, shouldCreateDefaultFreeModeStateForMissingAuth, @@ -87,7 +88,7 @@ function ensureDefaultFreeModeStateForMissingAuthSync(statePath: string): FreeMo if (!shouldCreateDefaultFreeModeStateForMissingAuth(current, hasUsableCodexAuthSync())) { return current } - const next = { mode: 'openai', keyIndex: 0, updatedAt: new Date().toISOString() } satisfies FreeModeState + const next = createDefaultOpenCodeZenFreeModeState() writeFileSync(statePath, JSON.stringify(next, null, 2), 'utf8') return next } diff --git a/src/server/authRefresh.ts b/src/server/authRefresh.ts index bb88e0555..7fe739de5 100644 --- a/src/server/authRefresh.ts +++ b/src/server/authRefresh.ts @@ -32,6 +32,17 @@ type CodexAuth = { } } +export type ChatgptAuthTokensRefreshParams = { + reason?: string + previousAccountId?: string +} + +export type ChatgptAuthTokensRefreshResponse = { + accessToken: string + chatgptAccountId: string + chatgptPlanType: string | null +} + const CODEX_CHATGPT_CLIENT_ID = 'app_EMoamEEZ73f0CkXaXp7hrann' const DEFAULT_CODEX_REFRESH_TOKEN_URL = 'https://auth.openai.com/oauth/token' diff --git a/src/server/codexAppServerBridge.ts b/src/server/codexAppServerBridge.ts index db67aab8b..5daa864a7 100644 --- a/src/server/codexAppServerBridge.ts +++ b/src/server/codexAppServerBridge.ts @@ -96,11 +96,12 @@ import { readThreadQueueState, writeThreadQueueState, } from './backendQueueProcessor.js' +export { BackendQueueProcessor } from './backendQueueProcessor.js' import { MethodCatalog } from './methodCatalog.js' import { fetchConnectorLogo, handleFileUpload, proxyTranscribe } from './mediaProxyRoutes.js' -import { fetchCustomEndpointDefaultModel, readProviderBackedModelIds } from './providerModelDiscovery.js' +import { fetchCustomEndpointDefaultModel, normalizeProviderModelsData, readProviderBackedModelIds } from './providerModelDiscovery.js' import { cloneGithubRepositoryIntoBase, createProjectlessThreadDirectory } from './projectCreation.js' -import { API_PERF_BODY_MB_THRESHOLD, API_PERF_LOGGING_ENABLED, API_PERF_MS_THRESHOLD, getChunkByteLength } from './apiPerfConfig.js' +import { API_PERF_BODY_MB_THRESHOLD, API_PERF_LOGGING_ENABLED, API_PERF_MS_THRESHOLD, MB_DIVISOR, getChunkByteLength } from './apiPerfConfig.js' import { buildThreadSearchIndex, isExactPhraseMatch, listFilesWithRipgrep, scoreFileCandidate, type ThreadSearchIndex } from './threadSearchIndex.js' import { mergeSessionSkillInputsIntoThreadResult, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' export { mergeSessionSkillInputsIntoTurns, sanitizeThreadTurnsInlinePayloads } from './threadInlinePayloads.js' @@ -132,6 +133,12 @@ type RpcExecutor = { rpc: (method: string, params: unknown) => Promise } +type TelegramBridgeConfigState = { + botToken: string + chatIds: number[] + allowedUserIds: Array +} + type ServerRequestReply = { result?: unknown error?: { diff --git a/src/server/composioRoutesSupport.ts b/src/server/composioRoutesSupport.ts index 92410a7a7..28a538cd8 100644 --- a/src/server/composioRoutesSupport.ts +++ b/src/server/composioRoutesSupport.ts @@ -38,6 +38,8 @@ export type ComposioUserData = { testUserId: string } +const COMPOSIO_USER_DATA_PATH = join(homedir(), '.composio', 'user_data.json') + export type ComposioStatusResponse = { available: boolean authenticated: boolean diff --git a/src/server/providerModelDiscovery.ts b/src/server/providerModelDiscovery.ts index 0ba2f0efc..169de8bd8 100644 --- a/src/server/providerModelDiscovery.ts +++ b/src/server/providerModelDiscovery.ts @@ -64,7 +64,7 @@ function buildProviderModelsUrl(baseUrl: string, queryParams: unknown): URL { return url } -function normalizeProviderModelsData(payload: unknown): string[] { +export function normalizeProviderModelsData(payload: unknown): string[] { const record = asRecord(payload) const rows = Array.isArray(record?.data) ? record.data : null if (!rows) { @@ -81,6 +81,14 @@ function normalizeProviderModelsData(payload: unknown): string[] { return ids } +function logProviderModelDiscoveryWarning(message: string, details: Record): void { + console.warn('[codex-provider-models]', message, details) +} + +function isTimeoutError(payload: unknown): boolean { + return payload instanceof Error && (payload.name === 'AbortError' || payload.name === 'TimeoutError') +} + export async function fetchCustomEndpointDefaultModel(baseUrl: string, apiKey: string): Promise { const normalizedBaseUrl = baseUrl.trim() if (!normalizedBaseUrl) return '' diff --git a/src/server/threadAutomations.ts b/src/server/threadAutomations.ts index fb0c6bbf3..a19f7680e 100644 --- a/src/server/threadAutomations.ts +++ b/src/server/threadAutomations.ts @@ -1,6 +1,8 @@ +import { randomBytes } from 'node:crypto' import { mkdir, readdir, readFile, rm, stat, writeFile } from 'node:fs/promises' import { homedir } from 'node:os' import { join } from 'node:path' +import { isAbsoluteLikePath } from '../pathUtils.js' function getCodexHomeDir(): string { const codexHome = process.env.CODEX_HOME?.trim() From 84ad36a659a16e46615381abcb3b9917e6138e8f Mon Sep 17 00:00:00 2001 From: Igor Date: Tue, 12 May 2026 10:35:59 +0700 Subject: [PATCH 13/19] Extract large Vue template blocks --- src/App.template.html | 1065 ++++++++++++++++ src/App.vue | 1081 +---------------- .../content/ThreadConversation.template.html | 866 +++++++++++++ src/components/content/ThreadConversation.vue | 869 +------------ .../sidebar/SidebarThreadTree.scoped.css | 504 ++++++++ src/components/sidebar/SidebarThreadTree.vue | 507 +------- 6 files changed, 2451 insertions(+), 2441 deletions(-) create mode 100644 src/App.template.html create mode 100644 src/components/content/ThreadConversation.template.html create mode 100644 src/components/sidebar/SidebarThreadTree.scoped.css diff --git a/src/App.template.html b/src/App.template.html new file mode 100644 index 000000000..62c77c86d --- /dev/null +++ b/src/App.template.html @@ -0,0 +1,1065 @@ + +